QCamera3HWI.cpp revision d97e494db5f2842765083f1295a61bbad3e7cfcd
1/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved. 2* 3* Redistribution and use in source and binary forms, with or without 4* modification, are permitted provided that the following conditions are 5* met: 6* * Redistributions of source code must retain the above copyright 7* notice, this list of conditions and the following disclaimer. 8* * Redistributions in binary form must reproduce the above 9* copyright notice, this list of conditions and the following 10* disclaimer in the documentation and/or other materials provided 11* with the distribution. 12* * Neither the name of The Linux Foundation nor the names of its 13* contributors may be used to endorse or promote products derived 14* from this software without specific prior written permission. 15* 16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED 17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF 18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT 19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS 20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR 23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE 25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN 26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27* 28*/ 29 30#define LOG_TAG "QCamera3HWI" 31 32#include <cutils/properties.h> 33#include <hardware/camera3.h> 34#include <camera/CameraMetadata.h> 35#include <stdlib.h> 36#include <utils/Log.h> 37#include <utils/Errors.h> 38#include <ui/Fence.h> 39#include <gralloc_priv.h> 40#include "QCamera3HWI.h" 41#include "QCamera3Mem.h" 42#include "QCamera3Channel.h" 43#include "QCamera3PostProc.h" 44 45using namespace android; 46 47namespace qcamera { 48#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX ) 49cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS]; 50parm_buffer_t *prevSettings; 51const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS]; 52 53pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock = 54 PTHREAD_MUTEX_INITIALIZER; 55unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0; 56 57const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = { 58 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF }, 59 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO }, 60 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE }, 61 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE }, 62 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA }, 63 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE }, 64 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD }, 65 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD }, 66 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA } 67}; 68 69const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = { 70 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF }, 71 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO }, 72 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT }, 73 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT }, 74 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT}, 75 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT }, 76 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT }, 77 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT }, 78 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE } 79}; 80 81const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = { 82 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION }, 83 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT }, 84 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE }, 85 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT }, 86 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT }, 87 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE }, 88 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH }, 89 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW }, 90 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET }, 91 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE }, 92 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS }, 93 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS }, 94 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY }, 95 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT }, 96 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE} 97}; 98 99const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = { 100 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED }, 101 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO }, 102 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO }, 103 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF }, 104 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE }, 105 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO } 106}; 107 108const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = { 109 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF }, 110 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ }, 111 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ }, 112 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO } 113}; 114 115const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = { 116 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF }, 117 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF }, 118 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO}, 119 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON }, 120 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO} 121}; 122 123const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = { 124 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF }, 125 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_ON }, 126 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH} 127}; 128 129const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288, 130 320, 240, 176, 144, 0, 0}; 131 132camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = { 133 initialize: QCamera3HardwareInterface::initialize, 134 configure_streams: QCamera3HardwareInterface::configure_streams, 135 register_stream_buffers: QCamera3HardwareInterface::register_stream_buffers, 136 construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings, 137 process_capture_request: QCamera3HardwareInterface::process_capture_request, 138 get_metadata_vendor_tag_ops: QCamera3HardwareInterface::get_metadata_vendor_tag_ops, 139 dump: QCamera3HardwareInterface::dump, 140}; 141 142 143/*=========================================================================== 144 * FUNCTION : QCamera3HardwareInterface 145 * 146 * DESCRIPTION: constructor of QCamera3HardwareInterface 147 * 148 * PARAMETERS : 149 * @cameraId : camera ID 150 * 151 * RETURN : none 152 *==========================================================================*/ 153QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId) 154 : mCameraId(cameraId), 155 mCameraHandle(NULL), 156 mCameraOpened(false), 157 mCameraInitialized(false), 158 mCallbackOps(NULL), 159 mInputStream(NULL), 160 mMetadataChannel(NULL), 161 mFirstRequest(false), 162 mParamHeap(NULL), 163 mParameters(NULL), 164 mJpegSettings(NULL), 165 m_pPowerModule(NULL) 166{ 167 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG; 168 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0; 169 mCameraDevice.common.close = close_camera_device; 170 mCameraDevice.ops = &mCameraOps; 171 mCameraDevice.priv = this; 172 gCamCapability[cameraId]->version = CAM_HAL_V3; 173 174 pthread_mutex_init(&mRequestLock, NULL); 175 pthread_cond_init(&mRequestCond, NULL); 176 mPendingRequest = 0; 177 mCurrentRequestId = -1; 178 179 pthread_mutex_init(&mMutex, NULL); 180 pthread_mutex_init(&mCaptureResultLock, NULL); 181 182 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) 183 mDefaultMetadata[i] = NULL; 184 185#ifdef HAS_MULTIMEDIA_HINTS 186 if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) { 187 ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID); 188 } 189#endif 190} 191 192/*=========================================================================== 193 * FUNCTION : ~QCamera3HardwareInterface 194 * 195 * DESCRIPTION: destructor of QCamera3HardwareInterface 196 * 197 * PARAMETERS : none 198 * 199 * RETURN : none 200 *==========================================================================*/ 201QCamera3HardwareInterface::~QCamera3HardwareInterface() 202{ 203 ALOGV("%s: E", __func__); 204 /* We need to stop all streams before deleting any stream */ 205 for (List<stream_info_t *>::iterator it = mStreamInfo.begin(); 206 it != mStreamInfo.end(); it++) { 207 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv; 208 if (channel) 209 channel->stop(); 210 } 211 for (List<stream_info_t *>::iterator it = mStreamInfo.begin(); 212 it != mStreamInfo.end(); it++) { 213 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv; 214 if (channel) 215 delete channel; 216 free (*it); 217 } 218 219 if (mJpegSettings != NULL) { 220 free(mJpegSettings); 221 mJpegSettings = NULL; 222 } 223 224 /* Clean up all channels */ 225 if (mCameraInitialized) { 226 mMetadataChannel->stop(); 227 delete mMetadataChannel; 228 mMetadataChannel = NULL; 229 deinitParameters(); 230 } 231 232 if (mCameraOpened) 233 closeCamera(); 234 235 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) 236 if (mDefaultMetadata[i]) 237 free_camera_metadata(mDefaultMetadata[i]); 238 239 pthread_mutex_destroy(&mRequestLock); 240 pthread_cond_destroy(&mRequestCond); 241 242 pthread_mutex_destroy(&mMutex); 243 pthread_mutex_destroy(&mCaptureResultLock); 244 ALOGV("%s: X", __func__); 245} 246 247/*=========================================================================== 248 * FUNCTION : openCamera 249 * 250 * DESCRIPTION: open camera 251 * 252 * PARAMETERS : 253 * @hw_device : double ptr for camera device struct 254 * 255 * RETURN : int32_t type of status 256 * NO_ERROR -- success 257 * none-zero failure code 258 *==========================================================================*/ 259int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device) 260{ 261 int rc = 0; 262 pthread_mutex_lock(&mCameraSessionLock); 263 if (mCameraSessionActive) { 264 ALOGE("%s: multiple simultaneous camera instance not supported", __func__); 265 pthread_mutex_unlock(&mCameraSessionLock); 266 return INVALID_OPERATION; 267 } 268 269 if (mCameraOpened) { 270 *hw_device = NULL; 271 return PERMISSION_DENIED; 272 } 273 274 rc = openCamera(); 275 if (rc == 0) { 276 *hw_device = &mCameraDevice.common; 277 mCameraSessionActive = 1; 278 } else 279 *hw_device = NULL; 280 281#ifdef HAS_MULTIMEDIA_HINTS 282 if (rc == 0) { 283 if (m_pPowerModule) { 284 if (m_pPowerModule->powerHint) { 285 m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE, 286 (void *)"state=1"); 287 } 288 } 289 } 290#endif 291 pthread_mutex_unlock(&mCameraSessionLock); 292 return rc; 293} 294 295/*=========================================================================== 296 * FUNCTION : openCamera 297 * 298 * DESCRIPTION: open camera 299 * 300 * PARAMETERS : none 301 * 302 * RETURN : int32_t type of status 303 * NO_ERROR -- success 304 * none-zero failure code 305 *==========================================================================*/ 306int QCamera3HardwareInterface::openCamera() 307{ 308 if (mCameraHandle) { 309 ALOGE("Failure: Camera already opened"); 310 return ALREADY_EXISTS; 311 } 312 mCameraHandle = camera_open(mCameraId); 313 if (!mCameraHandle) { 314 ALOGE("camera_open failed."); 315 return UNKNOWN_ERROR; 316 } 317 318 mCameraOpened = true; 319 320 return NO_ERROR; 321} 322 323/*=========================================================================== 324 * FUNCTION : closeCamera 325 * 326 * DESCRIPTION: close camera 327 * 328 * PARAMETERS : none 329 * 330 * RETURN : int32_t type of status 331 * NO_ERROR -- success 332 * none-zero failure code 333 *==========================================================================*/ 334int QCamera3HardwareInterface::closeCamera() 335{ 336 int rc = NO_ERROR; 337 338 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle); 339 mCameraHandle = NULL; 340 mCameraOpened = false; 341 342#ifdef HAS_MULTIMEDIA_HINTS 343 if (rc == NO_ERROR) { 344 if (m_pPowerModule) { 345 if (m_pPowerModule->powerHint) { 346 m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE, 347 (void *)"state=0"); 348 } 349 } 350 } 351#endif 352 353 return rc; 354} 355 356/*=========================================================================== 357 * FUNCTION : initialize 358 * 359 * DESCRIPTION: Initialize frameworks callback functions 360 * 361 * PARAMETERS : 362 * @callback_ops : callback function to frameworks 363 * 364 * RETURN : 365 * 366 *==========================================================================*/ 367int QCamera3HardwareInterface::initialize( 368 const struct camera3_callback_ops *callback_ops) 369{ 370 int rc; 371 372 pthread_mutex_lock(&mMutex); 373 374 rc = initParameters(); 375 if (rc < 0) { 376 ALOGE("%s: initParamters failed %d", __func__, rc); 377 goto err1; 378 } 379 //Create metadata channel and initialize it 380 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle, 381 mCameraHandle->ops, captureResultCb, 382 &gCamCapability[mCameraId]->padding_info, this); 383 if (mMetadataChannel == NULL) { 384 ALOGE("%s: failed to allocate metadata channel", __func__); 385 rc = -ENOMEM; 386 goto err2; 387 } 388 rc = mMetadataChannel->initialize(); 389 if (rc < 0) { 390 ALOGE("%s: metadata channel initialization failed", __func__); 391 goto err3; 392 } 393 394 mCallbackOps = callback_ops; 395 396 pthread_mutex_unlock(&mMutex); 397 mCameraInitialized = true; 398 return 0; 399 400err3: 401 delete mMetadataChannel; 402 mMetadataChannel = NULL; 403err2: 404 deinitParameters(); 405err1: 406 pthread_mutex_unlock(&mMutex); 407 return rc; 408} 409 410/*=========================================================================== 411 * FUNCTION : configureStreams 412 * 413 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input 414 * and output streams. 415 * 416 * PARAMETERS : 417 * @stream_list : streams to be configured 418 * 419 * RETURN : 420 * 421 *==========================================================================*/ 422int QCamera3HardwareInterface::configureStreams( 423 camera3_stream_configuration_t *streamList) 424{ 425 int rc = 0; 426 pthread_mutex_lock(&mMutex); 427 428 // Sanity check stream_list 429 if (streamList == NULL) { 430 ALOGE("%s: NULL stream configuration", __func__); 431 pthread_mutex_unlock(&mMutex); 432 return BAD_VALUE; 433 } 434 435 if (streamList->streams == NULL) { 436 ALOGE("%s: NULL stream list", __func__); 437 pthread_mutex_unlock(&mMutex); 438 return BAD_VALUE; 439 } 440 441 if (streamList->num_streams < 1) { 442 ALOGE("%s: Bad number of streams requested: %d", __func__, 443 streamList->num_streams); 444 pthread_mutex_unlock(&mMutex); 445 return BAD_VALUE; 446 } 447 448 camera3_stream_t *inputStream = NULL; 449 /* first invalidate all the steams in the mStreamList 450 * if they appear again, they will be validated */ 451 for (List<stream_info_t*>::iterator it=mStreamInfo.begin(); 452 it != mStreamInfo.end(); it++) { 453 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv; 454 channel->stop(); 455 (*it)->status = INVALID; 456 } 457 458 for (size_t i = 0; i < streamList->num_streams; i++) { 459 camera3_stream_t *newStream = streamList->streams[i]; 460 ALOGV("%s: newStream type = %d, stream format = %d", 461 __func__, newStream->stream_type, newStream->format); 462 //if the stream is in the mStreamList validate it 463 bool stream_exists = false; 464 for (List<stream_info_t*>::iterator it=mStreamInfo.begin(); 465 it != mStreamInfo.end(); it++) { 466 if ((*it)->stream == newStream) { 467 QCamera3Channel *channel = 468 (QCamera3Channel*)(*it)->stream->priv; 469 stream_exists = true; 470 (*it)->status = RECONFIGURE; 471 /*delete the channel object associated with the stream because 472 we need to reconfigure*/ 473 delete channel; 474 (*it)->stream->priv = NULL; 475 } 476 } 477 if (!stream_exists) { 478 //new stream 479 stream_info_t* stream_info; 480 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t)); 481 stream_info->stream = newStream; 482 stream_info->status = VALID; 483 stream_info->registered = 0; 484 mStreamInfo.push_back(stream_info); 485 } 486 if (newStream->stream_type == CAMERA3_STREAM_INPUT) { 487 if (inputStream != NULL) { 488 ALOGE("%s: Multiple input streams requested!", __func__); 489 pthread_mutex_unlock(&mMutex); 490 return BAD_VALUE; 491 } 492 inputStream = newStream; 493 } 494 } 495 mInputStream = inputStream; 496 497 /*clean up invalid streams*/ 498 for (List<stream_info_t*>::iterator it=mStreamInfo.begin(); 499 it != mStreamInfo.end();) { 500 if(((*it)->status) == INVALID){ 501 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv; 502 delete channel; 503 delete[] (buffer_handle_t*)(*it)->buffer_set.buffers; 504 free(*it); 505 it = mStreamInfo.erase(it); 506 } else { 507 it++; 508 } 509 } 510 511 //mMetadataChannel->stop(); 512 513 /* Allocate channel objects for the requested streams */ 514 for (size_t i = 0; i < streamList->num_streams; i++) { 515 camera3_stream_t *newStream = streamList->streams[i]; 516 if (newStream->priv == NULL) { 517 //New stream, construct channel 518 switch (newStream->stream_type) { 519 case CAMERA3_STREAM_INPUT: 520 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ; 521 break; 522 case CAMERA3_STREAM_BIDIRECTIONAL: 523 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ | 524 GRALLOC_USAGE_HW_CAMERA_WRITE; 525 break; 526 case CAMERA3_STREAM_OUTPUT: 527 newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE; 528 break; 529 default: 530 ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type); 531 break; 532 } 533 534 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT || 535 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) { 536 QCamera3Channel *channel; 537 switch (newStream->format) { 538 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: 539 case HAL_PIXEL_FORMAT_YCbCr_420_888: 540 newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers; 541 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle, 542 mCameraHandle->ops, captureResultCb, 543 &gCamCapability[mCameraId]->padding_info, this, newStream); 544 if (channel == NULL) { 545 ALOGE("%s: allocation of channel failed", __func__); 546 pthread_mutex_unlock(&mMutex); 547 return -ENOMEM; 548 } 549 550 newStream->priv = channel; 551 break; 552 case HAL_PIXEL_FORMAT_BLOB: 553 newStream->max_buffers = QCamera3PicChannel::kMaxBuffers; 554 channel = new QCamera3PicChannel(mCameraHandle->camera_handle, 555 mCameraHandle->ops, captureResultCb, 556 &gCamCapability[mCameraId]->padding_info, this, newStream); 557 if (channel == NULL) { 558 ALOGE("%s: allocation of channel failed", __func__); 559 pthread_mutex_unlock(&mMutex); 560 return -ENOMEM; 561 } 562 newStream->priv = channel; 563 break; 564 565 //TODO: Add support for app consumed format? 566 default: 567 ALOGE("%s: not a supported format 0x%x", __func__, newStream->format); 568 break; 569 } 570 } 571 } else { 572 // Channel already exists for this stream 573 // Do nothing for now 574 } 575 } 576 /*For the streams to be reconfigured we need to register the buffers 577 since the framework wont*/ 578 for (List<stream_info_t *>::iterator it = mStreamInfo.begin(); 579 it != mStreamInfo.end(); it++) { 580 if ((*it)->status == RECONFIGURE) { 581 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv; 582 /*only register buffers for streams that have already been 583 registered*/ 584 if ((*it)->registered) { 585 rc = channel->registerBuffers((*it)->buffer_set.num_buffers, 586 (*it)->buffer_set.buffers); 587 if (rc != NO_ERROR) { 588 ALOGE("%s: Failed to register the buffers of old stream,\ 589 rc = %d", __func__, rc); 590 } 591 ALOGV("%s: channel %p has %d buffers", 592 __func__, channel, (*it)->buffer_set.num_buffers); 593 } 594 } 595 596 ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream); 597 if (index == NAME_NOT_FOUND) { 598 mPendingBuffersMap.add((*it)->stream, 0); 599 } else { 600 mPendingBuffersMap.editValueAt(index) = 0; 601 } 602 } 603 604 /* Initialize mPendingRequestInfo and mPendnigBuffersMap */ 605 mPendingRequestsList.clear(); 606 607 //settings/parameters don't carry over for new configureStreams 608 memset(mParameters, 0, sizeof(parm_buffer_t)); 609 mFirstRequest = true; 610 611 pthread_mutex_unlock(&mMutex); 612 return rc; 613} 614 615/*=========================================================================== 616 * FUNCTION : validateCaptureRequest 617 * 618 * DESCRIPTION: validate a capture request from camera service 619 * 620 * PARAMETERS : 621 * @request : request from framework to process 622 * 623 * RETURN : 624 * 625 *==========================================================================*/ 626int QCamera3HardwareInterface::validateCaptureRequest( 627 camera3_capture_request_t *request) 628{ 629 ssize_t idx = 0; 630 const camera3_stream_buffer_t *b; 631 CameraMetadata meta; 632 633 /* Sanity check the request */ 634 if (request == NULL) { 635 ALOGE("%s: NULL capture request", __func__); 636 return BAD_VALUE; 637 } 638 639 uint32_t frameNumber = request->frame_number; 640 if (request->input_buffer != NULL && 641 request->input_buffer->stream != mInputStream) { 642 ALOGE("%s: Request %d: Input buffer not from input stream!", 643 __FUNCTION__, frameNumber); 644 return BAD_VALUE; 645 } 646 if (request->num_output_buffers < 1 || request->output_buffers == NULL) { 647 ALOGE("%s: Request %d: No output buffers provided!", 648 __FUNCTION__, frameNumber); 649 return BAD_VALUE; 650 } 651 if (request->input_buffer != NULL) { 652 //TODO 653 ALOGE("%s: Not supporting input buffer yet", __func__); 654 return BAD_VALUE; 655 } 656 657 // Validate all buffers 658 b = request->output_buffers; 659 do { 660 QCamera3Channel *channel = 661 static_cast<QCamera3Channel*>(b->stream->priv); 662 if (channel == NULL) { 663 ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!", 664 __func__, frameNumber, idx); 665 return BAD_VALUE; 666 } 667 if (b->status != CAMERA3_BUFFER_STATUS_OK) { 668 ALOGE("%s: Request %d: Buffer %d: Status not OK!", 669 __func__, frameNumber, idx); 670 return BAD_VALUE; 671 } 672 if (b->release_fence != -1) { 673 ALOGE("%s: Request %d: Buffer %d: Has a release fence!", 674 __func__, frameNumber, idx); 675 return BAD_VALUE; 676 } 677 if (b->buffer == NULL) { 678 ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!", 679 __func__, frameNumber, idx); 680 return BAD_VALUE; 681 } 682 idx++; 683 b = request->output_buffers + idx; 684 } while (idx < (ssize_t)request->num_output_buffers); 685 686 return NO_ERROR; 687} 688 689/*=========================================================================== 690 * FUNCTION : registerStreamBuffers 691 * 692 * DESCRIPTION: Register buffers for a given stream with the HAL device. 693 * 694 * PARAMETERS : 695 * @stream_list : streams to be configured 696 * 697 * RETURN : 698 * 699 *==========================================================================*/ 700int QCamera3HardwareInterface::registerStreamBuffers( 701 const camera3_stream_buffer_set_t *buffer_set) 702{ 703 int rc = 0; 704 705 pthread_mutex_lock(&mMutex); 706 707 if (buffer_set == NULL) { 708 ALOGE("%s: Invalid buffer_set parameter.", __func__); 709 pthread_mutex_unlock(&mMutex); 710 return -EINVAL; 711 } 712 if (buffer_set->stream == NULL) { 713 ALOGE("%s: Invalid stream parameter.", __func__); 714 pthread_mutex_unlock(&mMutex); 715 return -EINVAL; 716 } 717 if (buffer_set->num_buffers < 1) { 718 ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers); 719 pthread_mutex_unlock(&mMutex); 720 return -EINVAL; 721 } 722 if (buffer_set->buffers == NULL) { 723 ALOGE("%s: Invalid buffers parameter.", __func__); 724 pthread_mutex_unlock(&mMutex); 725 return -EINVAL; 726 } 727 728 camera3_stream_t *stream = buffer_set->stream; 729 QCamera3Channel *channel = (QCamera3Channel *)stream->priv; 730 731 //set the buffer_set in the mStreamInfo array 732 for (List<stream_info_t *>::iterator it = mStreamInfo.begin(); 733 it != mStreamInfo.end(); it++) { 734 if ((*it)->stream == stream) { 735 uint32_t numBuffers = buffer_set->num_buffers; 736 (*it)->buffer_set.stream = buffer_set->stream; 737 (*it)->buffer_set.num_buffers = numBuffers; 738 (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers]; 739 if ((*it)->buffer_set.buffers == NULL) { 740 ALOGE("%s: Failed to allocate buffer_handle_t*", __func__); 741 pthread_mutex_unlock(&mMutex); 742 return -ENOMEM; 743 } 744 for (size_t j = 0; j < numBuffers; j++){ 745 (*it)->buffer_set.buffers[j] = buffer_set->buffers[j]; 746 } 747 (*it)->registered = 1; 748 } 749 } 750 751 if (stream->stream_type != CAMERA3_STREAM_OUTPUT) { 752 ALOGE("%s: not yet support non output type stream", __func__); 753 pthread_mutex_unlock(&mMutex); 754 return -EINVAL; 755 } 756 rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers); 757 if (rc < 0) { 758 ALOGE("%s: registerBUffers for stream %p failed", __func__, stream); 759 pthread_mutex_unlock(&mMutex); 760 return -ENODEV; 761 } 762 763 pthread_mutex_unlock(&mMutex); 764 return NO_ERROR; 765} 766 767/*=========================================================================== 768 * FUNCTION : processCaptureRequest 769 * 770 * DESCRIPTION: process a capture request from camera service 771 * 772 * PARAMETERS : 773 * @request : request from framework to process 774 * 775 * RETURN : 776 * 777 *==========================================================================*/ 778int QCamera3HardwareInterface::processCaptureRequest( 779 camera3_capture_request_t *request) 780{ 781 int rc = NO_ERROR; 782 int32_t request_id; 783 CameraMetadata meta; 784 785 pthread_mutex_lock(&mMutex); 786 787 rc = validateCaptureRequest(request); 788 if (rc != NO_ERROR) { 789 ALOGE("%s: incoming request is not valid", __func__); 790 pthread_mutex_unlock(&mMutex); 791 return rc; 792 } 793 794 uint32_t frameNumber = request->frame_number; 795 796 rc = setFrameParameters(request->frame_number, request->settings); 797 if (rc < 0) { 798 ALOGE("%s: fail to set frame parameters", __func__); 799 pthread_mutex_unlock(&mMutex); 800 return rc; 801 } 802 803 meta = request->settings; 804 if (meta.exists(ANDROID_REQUEST_ID)) { 805 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0]; 806 mCurrentRequestId = request_id; 807 ALOGV("%s: Received request with id: %d",__func__, request_id); 808 } else if (mFirstRequest || mCurrentRequestId == -1){ 809 ALOGE("%s: Unable to find request id field, \ 810 & no previous id available", __func__); 811 return NAME_NOT_FOUND; 812 } else { 813 ALOGV("%s: Re-using old request id", __func__); 814 request_id = mCurrentRequestId; 815 } 816 817 818 ALOGV("%s: %d, num_output_buffers = %d", __func__, __LINE__, 819 request->num_output_buffers); 820 // Acquire all request buffers first 821 for (size_t i = 0; i < request->num_output_buffers; i++) { 822 const camera3_stream_buffer_t& output = request->output_buffers[i]; 823 sp<Fence> acquireFence = new Fence(output.acquire_fence); 824 825 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) { 826 //Call function to store local copy of jpeg data for encode params. 827 rc = getJpegSettings(request->settings); 828 if (rc < 0) { 829 ALOGE("%s: failed to get jpeg parameters", __func__); 830 pthread_mutex_unlock(&mMutex); 831 return rc; 832 } 833 } 834 835 rc = acquireFence->wait(Fence::TIMEOUT_NEVER); 836 if (rc != OK) { 837 ALOGE("%s: fence wait failed %d", __func__, rc); 838 pthread_mutex_unlock(&mMutex); 839 return rc; 840 } 841 } 842 843 /* Update pending request list and pending buffers map */ 844 pthread_mutex_lock(&mRequestLock); 845 PendingRequestInfo pendingRequest; 846 pendingRequest.frame_number = frameNumber; 847 pendingRequest.num_buffers = request->num_output_buffers; 848 pendingRequest.request_id = request_id; 849 850 for (size_t i = 0; i < request->num_output_buffers; i++) { 851 RequestedBufferInfo requestedBuf; 852 requestedBuf.stream = request->output_buffers[i].stream; 853 requestedBuf.buffer = NULL; 854 pendingRequest.buffers.push_back(requestedBuf); 855 856 mPendingBuffersMap.editValueFor(requestedBuf.stream)++; 857 } 858 mPendingRequestsList.push_back(pendingRequest); 859 pthread_mutex_unlock(&mRequestLock); 860 861 // Notify metadata channel we receive a request 862 mMetadataChannel->request(NULL, frameNumber); 863 864 // Call request on other streams 865 for (size_t i = 0; i < request->num_output_buffers; i++) { 866 const camera3_stream_buffer_t& output = request->output_buffers[i]; 867 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv; 868 869 if (channel == NULL) { 870 ALOGE("%s: invalid channel pointer for stream", __func__); 871 continue; 872 } 873 874 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) { 875 rc = channel->request(output.buffer, frameNumber, mJpegSettings); 876 } else { 877 ALOGV("%s: %d, request with buffer %p, frame_number %d", __func__, 878 __LINE__, output.buffer, frameNumber); 879 rc = channel->request(output.buffer, frameNumber); 880 } 881 if (rc < 0) 882 ALOGE("%s: request failed", __func__); 883 } 884 885 mFirstRequest = false; 886 887 //Block on conditional variable 888 pthread_mutex_lock(&mRequestLock); 889 mPendingRequest = 1; 890 while (mPendingRequest == 1) { 891 pthread_cond_wait(&mRequestCond, &mRequestLock); 892 } 893 pthread_mutex_unlock(&mRequestLock); 894 895 pthread_mutex_unlock(&mMutex); 896 return rc; 897} 898 899/*=========================================================================== 900 * FUNCTION : getMetadataVendorTagOps 901 * 902 * DESCRIPTION: 903 * 904 * PARAMETERS : 905 * 906 * 907 * RETURN : 908 *==========================================================================*/ 909void QCamera3HardwareInterface::getMetadataVendorTagOps( 910 vendor_tag_query_ops_t* /*ops*/) 911{ 912 /* Enable locks when we eventually add Vendor Tags */ 913 /* 914 pthread_mutex_lock(&mMutex); 915 916 pthread_mutex_unlock(&mMutex); 917 */ 918 return; 919} 920 921/*=========================================================================== 922 * FUNCTION : dump 923 * 924 * DESCRIPTION: 925 * 926 * PARAMETERS : 927 * 928 * 929 * RETURN : 930 *==========================================================================*/ 931void QCamera3HardwareInterface::dump(int /*fd*/) 932{ 933 /*Enable lock when we implement this function*/ 934 /* 935 pthread_mutex_lock(&mMutex); 936 937 pthread_mutex_unlock(&mMutex); 938 */ 939 return; 940} 941 942/*=========================================================================== 943 * FUNCTION : captureResultCb 944 * 945 * DESCRIPTION: Callback handler for all capture result 946 * (streams, as well as metadata) 947 * 948 * PARAMETERS : 949 * @metadata : metadata information 950 * @buffer : actual gralloc buffer to be returned to frameworks. 951 * NULL if metadata. 952 * 953 * RETURN : NONE 954 *==========================================================================*/ 955void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf, 956 camera3_stream_buffer_t *buffer, uint32_t frame_number) 957{ 958 pthread_mutex_lock(&mRequestLock); 959 960 if (metadata_buf) { 961 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer; 962 int32_t frame_number_valid = *(int32_t *) 963 POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata); 964 uint32_t frame_number = *(uint32_t *) 965 POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata); 966 const struct timeval *tv = (const struct timeval *) 967 POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata); 968 nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC + 969 tv->tv_usec * NSEC_PER_USEC; 970 971 if (!frame_number_valid) { 972 ALOGV("%s: Not a valid frame number, used as SOF only", __func__); 973 mMetadataChannel->bufDone(metadata_buf); 974 goto done_metadata; 975 } 976 ALOGV("%s: valid frame_number = %d, capture_time = %lld", __func__, 977 frame_number, capture_time); 978 979 // Go through the pending requests info and send shutter/results to frameworks 980 for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin(); 981 i != mPendingRequestsList.end() && i->frame_number <= frame_number;) { 982 camera3_capture_result_t result; 983 camera3_notify_msg_t notify_msg; 984 ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number); 985 986 // Flush out all entries with less or equal frame numbers. 987 988 //TODO: Make sure shutter timestamp really reflects shutter timestamp. 989 //Right now it's the same as metadata timestamp 990 991 //TODO: When there is metadata drop, how do we derive the timestamp of 992 //dropped frames? For now, we fake the dropped timestamp by substracting 993 //from the reported timestamp 994 nsecs_t current_capture_time = capture_time - 995 (frame_number - i->frame_number) * NSEC_PER_33MSEC; 996 997 // Send shutter notify to frameworks 998 notify_msg.type = CAMERA3_MSG_SHUTTER; 999 notify_msg.message.shutter.frame_number = i->frame_number; 1000 notify_msg.message.shutter.timestamp = current_capture_time; 1001 mCallbackOps->notify(mCallbackOps, ¬ify_msg); 1002 ALOGV("%s: notify frame_number = %d, capture_time = %lld", __func__, 1003 i->frame_number, capture_time); 1004 1005 // Send empty metadata with already filled buffers for dropped metadata 1006 // and send valid metadata with already filled buffers for current metadata 1007 if (i->frame_number < frame_number) { 1008 CameraMetadata dummyMetadata; 1009 dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP, 1010 ¤t_capture_time, 1); 1011 dummyMetadata.update(ANDROID_REQUEST_ID, 1012 &(i->request_id), 1); 1013 result.result = dummyMetadata.release(); 1014 } else { 1015 result.result = translateCbMetadataToResultMetadata(metadata, 1016 current_capture_time, i->request_id); 1017 // Return metadata buffer 1018 mMetadataChannel->bufDone(metadata_buf); 1019 } 1020 if (!result.result) { 1021 ALOGE("%s: metadata is NULL", __func__); 1022 } 1023 result.frame_number = i->frame_number; 1024 result.num_output_buffers = 0; 1025 result.output_buffers = NULL; 1026 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin(); 1027 j != i->buffers.end(); j++) { 1028 if (j->buffer) { 1029 result.num_output_buffers++; 1030 } 1031 } 1032 1033 if (result.num_output_buffers > 0) { 1034 camera3_stream_buffer_t *result_buffers = 1035 new camera3_stream_buffer_t[result.num_output_buffers]; 1036 if (!result_buffers) { 1037 ALOGE("%s: Fatal error: out of memory", __func__); 1038 } 1039 size_t result_buffers_idx = 0; 1040 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin(); 1041 j != i->buffers.end(); j++) { 1042 if (j->buffer) { 1043 result_buffers[result_buffers_idx++] = *(j->buffer); 1044 free(j->buffer); 1045 j->buffer = NULL; 1046 mPendingBuffersMap.editValueFor(j->stream)--; 1047 } 1048 } 1049 result.output_buffers = result_buffers; 1050 1051 mCallbackOps->process_capture_result(mCallbackOps, &result); 1052 ALOGV("%s: meta frame_number = %d, capture_time = %lld", 1053 __func__, result.frame_number, current_capture_time); 1054 free_camera_metadata((camera_metadata_t *)result.result); 1055 delete[] result_buffers; 1056 } else { 1057 mCallbackOps->process_capture_result(mCallbackOps, &result); 1058 ALOGV("%s: meta frame_number = %d, capture_time = %lld", 1059 __func__, result.frame_number, current_capture_time); 1060 free_camera_metadata((camera_metadata_t *)result.result); 1061 } 1062 // erase the element from the list 1063 i = mPendingRequestsList.erase(i); 1064 } 1065 1066 1067done_metadata: 1068 bool max_buffers_dequeued = false; 1069 for (size_t i = 0; i < mPendingBuffersMap.size(); i++) { 1070 const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i); 1071 uint32_t queued_buffers = mPendingBuffersMap.valueAt(i); 1072 if (queued_buffers == stream->max_buffers) { 1073 max_buffers_dequeued = true; 1074 break; 1075 } 1076 } 1077 if (!max_buffers_dequeued) { 1078 // Unblock process_capture_request 1079 mPendingRequest = 0; 1080 pthread_cond_signal(&mRequestCond); 1081 } 1082 } else { 1083 // If the frame number doesn't exist in the pending request list, 1084 // directly send the buffer to the frameworks, and update pending buffers map 1085 // Otherwise, book-keep the buffer. 1086 List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin(); 1087 while (i != mPendingRequestsList.end() && i->frame_number != frame_number) 1088 i++; 1089 if (i == mPendingRequestsList.end()) { 1090 // Verify all pending requests frame_numbers are greater 1091 for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin(); 1092 j != mPendingRequestsList.end(); j++) { 1093 if (j->frame_number < frame_number) { 1094 ALOGE("%s: Error: pending frame number %d is smaller than %d", 1095 __func__, j->frame_number, frame_number); 1096 } 1097 } 1098 camera3_capture_result_t result; 1099 result.result = NULL; 1100 result.frame_number = frame_number; 1101 result.num_output_buffers = 1; 1102 result.output_buffers = buffer; 1103 ALOGV("%s: result frame_number = %d, buffer = %p", 1104 __func__, frame_number, buffer); 1105 mPendingBuffersMap.editValueFor(buffer->stream)--; 1106 mCallbackOps->process_capture_result(mCallbackOps, &result); 1107 } else { 1108 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin(); 1109 j != i->buffers.end(); j++) { 1110 if (j->stream == buffer->stream) { 1111 if (j->buffer != NULL) { 1112 ALOGE("%s: Error: buffer is already set", __func__); 1113 } else { 1114 j->buffer = (camera3_stream_buffer_t *)malloc( 1115 sizeof(camera3_stream_buffer_t)); 1116 *(j->buffer) = *buffer; 1117 ALOGV("%s: cache buffer %p at result frame_number %d", 1118 __func__, buffer, frame_number); 1119 } 1120 } 1121 } 1122 } 1123 } 1124 1125 pthread_mutex_unlock(&mRequestLock); 1126 return; 1127} 1128 1129/*=========================================================================== 1130 * FUNCTION : translateCbMetadataToResultMetadata 1131 * 1132 * DESCRIPTION: 1133 * 1134 * PARAMETERS : 1135 * @metadata : metadata information from callback 1136 * 1137 * RETURN : camera_metadata_t* 1138 * metadata in a format specified by fwk 1139 *==========================================================================*/ 1140camera_metadata_t* 1141QCamera3HardwareInterface::translateCbMetadataToResultMetadata 1142 (metadata_buffer_t *metadata, nsecs_t timestamp, 1143 int32_t request_id) 1144{ 1145 CameraMetadata camMetadata; 1146 camera_metadata_t* resultMetadata; 1147 1148 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, ×tamp, 1); 1149 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1); 1150 1151 /*CAM_INTF_META_HISTOGRAM - TODO*/ 1152 /*cam_hist_stats_t *histogram = 1153 (cam_hist_stats_t *)POINTER_OF(CAM_INTF_META_HISTOGRAM, 1154 metadata);*/ 1155 1156 /*face detection*/ 1157 cam_face_detection_data_t *faceDetectionInfo =(cam_face_detection_data_t *) 1158 POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata); 1159 uint8_t numFaces = faceDetectionInfo->num_faces_detected; 1160 int32_t faceIds[numFaces]; 1161 uint8_t faceScores[numFaces]; 1162 int32_t faceRectangles[numFaces * 4]; 1163 int32_t faceLandmarks[numFaces * 6]; 1164 int j = 0, k = 0; 1165 for (int i = 0; i < numFaces; i++) { 1166 faceIds[i] = faceDetectionInfo->faces[i].face_id; 1167 faceScores[i] = faceDetectionInfo->faces[i].score; 1168 convertToRegions(faceDetectionInfo->faces[i].face_boundary, 1169 faceRectangles+j, -1); 1170 convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k); 1171 j+= 4; 1172 k+= 6; 1173 } 1174 if (numFaces > 0) { 1175 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces); 1176 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces); 1177 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES, 1178 faceRectangles, numFaces*4); 1179 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS, 1180 faceLandmarks, numFaces*6); 1181 } 1182 1183 uint8_t *color_correct_mode = 1184 (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata); 1185 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1); 1186 1187 int32_t *ae_precapture_id = 1188 (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata); 1189 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1); 1190 1191 /*aec regions*/ 1192 cam_area_t *hAeRegions = 1193 (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata); 1194 int32_t aeRegions[5]; 1195 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight); 1196 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5); 1197 1198 uint8_t *ae_state = 1199 (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata); 1200 camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1); 1201 1202 uint8_t *focusMode = 1203 (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata); 1204 camMetadata.update(ANDROID_CONTROL_AF_MODE, focusMode, 1); 1205 1206 /*af regions*/ 1207 cam_area_t *hAfRegions = 1208 (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata); 1209 int32_t afRegions[5]; 1210 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight); 1211 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5); 1212 1213 uint8_t *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata); 1214 camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1); 1215 1216 int32_t *afTriggerId = 1217 (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata); 1218 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1); 1219 1220 uint8_t *whiteBalance = 1221 (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata); 1222 camMetadata.update(ANDROID_CONTROL_AWB_MODE, whiteBalance, 1); 1223 1224 /*awb regions*/ 1225 cam_area_t *hAwbRegions = 1226 (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata); 1227 int32_t awbRegions[5]; 1228 convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight); 1229 camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5); 1230 1231 uint8_t *whiteBalanceState = 1232 (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata); 1233 camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1); 1234 1235 uint8_t *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata); 1236 camMetadata.update(ANDROID_CONTROL_MODE, mode, 1); 1237 1238 uint8_t *edgeMode = (uint8_t *)POINTER_OF(CAM_INTF_META_EDGE, metadata); 1239 camMetadata.update(ANDROID_EDGE_MODE, edgeMode, 1); 1240 1241 uint8_t *flashPower = 1242 (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata); 1243 camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1); 1244 1245 int64_t *flashFiringTime = 1246 (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata); 1247 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1); 1248 1249 /*int32_t *ledMode = 1250 (int32_t *)POINTER_OF(CAM_INTF_PARM_LED_MODE, metadata); 1251 camMetadata.update(ANDROID_FLASH_FIRING_TIME, ledMode, 1);*/ 1252 1253 uint8_t *flashState = 1254 (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata); 1255 camMetadata.update(ANDROID_FLASH_STATE, flashState, 1); 1256 1257 uint8_t *hotPixelMode = 1258 (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata); 1259 camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1); 1260 1261 float *lensAperture = 1262 (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata); 1263 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1); 1264 1265 float *filterDensity = 1266 (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata); 1267 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1); 1268 1269 float *focalLength = 1270 (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata); 1271 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1); 1272 1273 float *focusDistance = 1274 (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata); 1275 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1); 1276 1277 float *focusRange = 1278 (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata); 1279 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 1); 1280 1281 uint8_t *opticalStab = 1282 (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata); 1283 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1); 1284 1285 /*int32_t *focusState = 1286 (int32_t *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_STATE, metadata); 1287 camMetadata.update(ANDROID_LENS_STATE , focusState, 1); //check */ 1288 1289 uint8_t *noiseRedMode = 1290 (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata); 1291 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1); 1292 1293 /*CAM_INTF_META_SCALER_CROP_REGION - check size*/ 1294 1295 cam_crop_region_t *hScalerCropRegion =(cam_crop_region_t *) 1296 POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata); 1297 int32_t scalerCropRegion[4]; 1298 scalerCropRegion[0] = hScalerCropRegion->left; 1299 scalerCropRegion[1] = hScalerCropRegion->top; 1300 scalerCropRegion[2] = hScalerCropRegion->width; 1301 scalerCropRegion[3] = hScalerCropRegion->height; 1302 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4); 1303 1304 int64_t *sensorExpTime = 1305 (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata); 1306 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1); 1307 1308 int64_t *sensorFameDuration = 1309 (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata); 1310 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1); 1311 1312 int32_t *sensorSensitivity = 1313 (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata); 1314 mMetadataResponse.iso_speed = *sensorSensitivity; 1315 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1); 1316 1317 uint8_t *shadingMode = 1318 (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata); 1319 camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1); 1320 1321 uint8_t *faceDetectMode = 1322 (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata); 1323 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, faceDetectMode, 1); 1324 1325 uint8_t *histogramMode = 1326 (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata); 1327 camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1); 1328 1329 uint8_t *sharpnessMapMode = 1330 (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata); 1331 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, 1332 sharpnessMapMode, 1); 1333 1334 /*CAM_INTF_META_STATS_SHARPNESS_MAP - check size*/ 1335 cam_sharpness_map_t *sharpnessMap = (cam_sharpness_map_t *) 1336 POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata); 1337 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, 1338 (int32_t*)sharpnessMap->sharpness, 1339 CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT); 1340 1341 resultMetadata = camMetadata.release(); 1342 return resultMetadata; 1343} 1344 1345/*=========================================================================== 1346 * FUNCTION : convertToRegions 1347 * 1348 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array 1349 * 1350 * PARAMETERS : 1351 * @rect : cam_rect_t struct to convert 1352 * @region : int32_t destination array 1353 * @weight : if we are converting from cam_area_t, weight is valid 1354 * else weight = -1 1355 * 1356 *==========================================================================*/ 1357void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){ 1358 region[0] = rect.left; 1359 region[1] = rect.top; 1360 region[2] = rect.left + rect.width; 1361 region[3] = rect.top + rect.height; 1362 if (weight > -1) { 1363 region[4] = weight; 1364 } 1365} 1366 1367/*=========================================================================== 1368 * FUNCTION : convertFromRegions 1369 * 1370 * DESCRIPTION: helper method to convert from array to cam_rect_t 1371 * 1372 * PARAMETERS : 1373 * @rect : cam_rect_t struct to convert 1374 * @region : int32_t destination array 1375 * @weight : if we are converting from cam_area_t, weight is valid 1376 * else weight = -1 1377 * 1378 *==========================================================================*/ 1379void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi, 1380 const camera_metadata_t *settings, 1381 uint32_t tag){ 1382 CameraMetadata frame_settings; 1383 frame_settings = settings; 1384 int32_t x_min = frame_settings.find(tag).data.i32[0]; 1385 int32_t y_min = frame_settings.find(tag).data.i32[1]; 1386 int32_t x_max = frame_settings.find(tag).data.i32[2]; 1387 int32_t y_max = frame_settings.find(tag).data.i32[3]; 1388 roi->weight = frame_settings.find(tag).data.i32[4]; 1389 roi->rect.left = x_min; 1390 roi->rect.top = y_min; 1391 roi->rect.width = x_max - x_min; 1392 roi->rect.height = y_max - y_min; 1393} 1394 1395/*=========================================================================== 1396 * FUNCTION : resetIfNeededROI 1397 * 1398 * DESCRIPTION: helper method to reset the roi if it is greater than scaler 1399 * crop region 1400 * 1401 * PARAMETERS : 1402 * @roi : cam_area_t struct to resize 1403 * @scalerCropRegion : cam_crop_region_t region to compare against 1404 * 1405 * 1406 *==========================================================================*/ 1407bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi, 1408 const cam_crop_region_t* scalerCropRegion) 1409{ 1410 int32_t roi_x_max = roi->rect.width + roi->rect.left; 1411 int32_t roi_y_max = roi->rect.height + roi->rect.top; 1412 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->top; 1413 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->left; 1414 if ((roi_x_max < scalerCropRegion->left) || 1415 (roi_y_max < scalerCropRegion->top) || 1416 (roi->rect.left > crop_x_max) || 1417 (roi->rect.top > crop_y_max)){ 1418 return false; 1419 } 1420 if (roi->rect.left < scalerCropRegion->left) { 1421 roi->rect.left = scalerCropRegion->left; 1422 } 1423 if (roi->rect.top < scalerCropRegion->top) { 1424 roi->rect.top = scalerCropRegion->top; 1425 } 1426 if (roi_x_max > crop_x_max) { 1427 roi_x_max = crop_x_max; 1428 } 1429 if (roi_y_max > crop_y_max) { 1430 roi_y_max = crop_y_max; 1431 } 1432 roi->rect.width = roi_x_max - roi->rect.left; 1433 roi->rect.height = roi_y_max - roi->rect.top; 1434 return true; 1435} 1436 1437/*=========================================================================== 1438 * FUNCTION : convertLandmarks 1439 * 1440 * DESCRIPTION: helper method to extract the landmarks from face detection info 1441 * 1442 * PARAMETERS : 1443 * @face : cam_rect_t struct to convert 1444 * @landmarks : int32_t destination array 1445 * 1446 * 1447 *==========================================================================*/ 1448void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks) 1449{ 1450 landmarks[0] = face.left_eye_center.x; 1451 landmarks[1] = face.left_eye_center.y; 1452 landmarks[2] = face.right_eye_center.y; 1453 landmarks[3] = face.right_eye_center.y; 1454 landmarks[4] = face.mouth_center.x; 1455 landmarks[5] = face.mouth_center.y; 1456} 1457 1458#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX ) 1459/*=========================================================================== 1460 * FUNCTION : initCapabilities 1461 * 1462 * DESCRIPTION: initialize camera capabilities in static data struct 1463 * 1464 * PARAMETERS : 1465 * @cameraId : camera Id 1466 * 1467 * RETURN : int32_t type of status 1468 * NO_ERROR -- success 1469 * none-zero failure code 1470 *==========================================================================*/ 1471int QCamera3HardwareInterface::initCapabilities(int cameraId) 1472{ 1473 int rc = 0; 1474 mm_camera_vtbl_t *cameraHandle = NULL; 1475 QCamera3HeapMemory *capabilityHeap = NULL; 1476 1477 cameraHandle = camera_open(cameraId); 1478 if (!cameraHandle) { 1479 ALOGE("%s: camera_open failed", __func__); 1480 rc = -1; 1481 goto open_failed; 1482 } 1483 1484 capabilityHeap = new QCamera3HeapMemory(); 1485 if (capabilityHeap == NULL) { 1486 ALOGE("%s: creation of capabilityHeap failed", __func__); 1487 goto heap_creation_failed; 1488 } 1489 /* Allocate memory for capability buffer */ 1490 rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false); 1491 if(rc != OK) { 1492 ALOGE("%s: No memory for cappability", __func__); 1493 goto allocate_failed; 1494 } 1495 1496 /* Map memory for capability buffer */ 1497 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t)); 1498 rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle, 1499 CAM_MAPPING_BUF_TYPE_CAPABILITY, 1500 capabilityHeap->getFd(0), 1501 sizeof(cam_capability_t)); 1502 if(rc < 0) { 1503 ALOGE("%s: failed to map capability buffer", __func__); 1504 goto map_failed; 1505 } 1506 1507 /* Query Capability */ 1508 rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle); 1509 if(rc < 0) { 1510 ALOGE("%s: failed to query capability",__func__); 1511 goto query_failed; 1512 } 1513 gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t)); 1514 if (!gCamCapability[cameraId]) { 1515 ALOGE("%s: out of memory", __func__); 1516 goto query_failed; 1517 } 1518 memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0), 1519 sizeof(cam_capability_t)); 1520 rc = 0; 1521 1522query_failed: 1523 cameraHandle->ops->unmap_buf(cameraHandle->camera_handle, 1524 CAM_MAPPING_BUF_TYPE_CAPABILITY); 1525map_failed: 1526 capabilityHeap->deallocate(); 1527allocate_failed: 1528 delete capabilityHeap; 1529heap_creation_failed: 1530 cameraHandle->ops->close_camera(cameraHandle->camera_handle); 1531 cameraHandle = NULL; 1532open_failed: 1533 return rc; 1534} 1535 1536/*=========================================================================== 1537 * FUNCTION : initParameters 1538 * 1539 * DESCRIPTION: initialize camera parameters 1540 * 1541 * PARAMETERS : 1542 * 1543 * RETURN : int32_t type of status 1544 * NO_ERROR -- success 1545 * none-zero failure code 1546 *==========================================================================*/ 1547int QCamera3HardwareInterface::initParameters() 1548{ 1549 int rc = 0; 1550 1551 //Allocate Set Param Buffer 1552 mParamHeap = new QCamera3HeapMemory(); 1553 rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false); 1554 if(rc != OK) { 1555 rc = NO_MEMORY; 1556 ALOGE("Failed to allocate SETPARM Heap memory"); 1557 delete mParamHeap; 1558 mParamHeap = NULL; 1559 return rc; 1560 } 1561 1562 //Map memory for parameters buffer 1563 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle, 1564 CAM_MAPPING_BUF_TYPE_PARM_BUF, 1565 mParamHeap->getFd(0), 1566 sizeof(parm_buffer_t)); 1567 if(rc < 0) { 1568 ALOGE("%s:failed to map SETPARM buffer",__func__); 1569 rc = FAILED_TRANSACTION; 1570 mParamHeap->deallocate(); 1571 delete mParamHeap; 1572 mParamHeap = NULL; 1573 return rc; 1574 } 1575 1576 mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0); 1577 return rc; 1578} 1579 1580/*=========================================================================== 1581 * FUNCTION : deinitParameters 1582 * 1583 * DESCRIPTION: de-initialize camera parameters 1584 * 1585 * PARAMETERS : 1586 * 1587 * RETURN : NONE 1588 *==========================================================================*/ 1589void QCamera3HardwareInterface::deinitParameters() 1590{ 1591 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle, 1592 CAM_MAPPING_BUF_TYPE_PARM_BUF); 1593 1594 mParamHeap->deallocate(); 1595 delete mParamHeap; 1596 mParamHeap = NULL; 1597 1598 mParameters = NULL; 1599} 1600 1601/*=========================================================================== 1602 * FUNCTION : calcMaxJpegSize 1603 * 1604 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId 1605 * 1606 * PARAMETERS : 1607 * 1608 * RETURN : max_jpeg_size 1609 *==========================================================================*/ 1610int QCamera3HardwareInterface::calcMaxJpegSize() 1611{ 1612 int32_t max_jpeg_size = 0; 1613 int temp_width, temp_height; 1614 for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) { 1615 temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width; 1616 temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height; 1617 if (temp_width * temp_height > max_jpeg_size ) { 1618 max_jpeg_size = temp_width * temp_height; 1619 } 1620 } 1621 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t); 1622 return max_jpeg_size; 1623} 1624 1625/*=========================================================================== 1626 * FUNCTION : initStaticMetadata 1627 * 1628 * DESCRIPTION: initialize the static metadata 1629 * 1630 * PARAMETERS : 1631 * @cameraId : camera Id 1632 * 1633 * RETURN : int32_t type of status 1634 * 0 -- success 1635 * non-zero failure code 1636 *==========================================================================*/ 1637int QCamera3HardwareInterface::initStaticMetadata(int cameraId) 1638{ 1639 int rc = 0; 1640 CameraMetadata staticInfo; 1641 int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK; 1642 /*HAL 3 only*/ 1643 /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE, 1644 &gCamCapability[cameraId]->min_focus_distance, 1); */ 1645 1646 /*hard coded for now but this should come from sensor*/ 1647 float min_focus_distance; 1648 if(facingBack){ 1649 min_focus_distance = 10; 1650 } else { 1651 min_focus_distance = 0; 1652 } 1653 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE, 1654 &min_focus_distance, 1); 1655 1656 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, 1657 &gCamCapability[cameraId]->hyper_focal_distance, 1); 1658 1659 /*should be using focal lengths but sensor doesn't provide that info now*/ 1660 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, 1661 &gCamCapability[cameraId]->focal_length, 1662 1); 1663 1664 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES, 1665 gCamCapability[cameraId]->apertures, 1666 gCamCapability[cameraId]->apertures_count); 1667 1668 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES, 1669 gCamCapability[cameraId]->filter_densities, 1670 gCamCapability[cameraId]->filter_densities_count); 1671 1672 1673 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION, 1674 (uint8_t*)gCamCapability[cameraId]->optical_stab_modes, 1675 gCamCapability[cameraId]->optical_stab_modes_count); 1676 1677 staticInfo.update(ANDROID_LENS_POSITION, 1678 gCamCapability[cameraId]->lens_position, 1679 sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float)); 1680 1681 int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width, 1682 gCamCapability[cameraId]->lens_shading_map_size.height}; 1683 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE, 1684 lens_shading_map_size, 1685 sizeof(lens_shading_map_size)/sizeof(int32_t)); 1686 1687 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP, gCamCapability[cameraId]->lens_shading_map, 1688 sizeof(gCamCapability[cameraId]->lens_shading_map)/ sizeof(float)); 1689 1690 int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width, 1691 gCamCapability[cameraId]->geo_correction_map_size.height}; 1692 staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE, 1693 geo_correction_map_size, 1694 sizeof(geo_correction_map_size)/sizeof(int32_t)); 1695 1696 staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP, 1697 gCamCapability[cameraId]->geo_correction_map, 1698 sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float)); 1699 1700 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE, 1701 gCamCapability[cameraId]->sensor_physical_size, 2); 1702 1703 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, 1704 gCamCapability[cameraId]->exposure_time_range, 2); 1705 1706 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION, 1707 &gCamCapability[cameraId]->max_frame_duration, 1); 1708 1709 1710 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT, 1711 (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1); 1712 1713 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width, 1714 gCamCapability[cameraId]->pixel_array_size.height}; 1715 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, 1716 pixel_array_size, 2); 1717 1718 int32_t active_array_size[] = {0, 0, 1719 gCamCapability[cameraId]->active_array_size.width, 1720 gCamCapability[cameraId]->active_array_size.height}; 1721 1722 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, 1723 active_array_size, 4); 1724 1725 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL, 1726 &gCamCapability[cameraId]->white_level, 1); 1727 1728 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN, 1729 gCamCapability[cameraId]->black_level_pattern, 4); 1730 1731 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION, 1732 &gCamCapability[cameraId]->flash_charge_duration, 1); 1733 1734 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS, 1735 &gCamCapability[cameraId]->max_tone_map_curve_points, 1); 1736 1737 /*staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, 1738 (int*)&gCamCapability[cameraId]->max_face_detection_count, 1);*/ 1739 /*hardcode 0 for now*/ 1740 int32_t max_face_count = 0; 1741 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, 1742 &max_face_count, 1); 1743 1744 staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT, 1745 &gCamCapability[cameraId]->histogram_size, 1); 1746 1747 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT, 1748 &gCamCapability[cameraId]->max_histogram_count, 1); 1749 1750 int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width, 1751 gCamCapability[cameraId]->sharpness_map_size.height}; 1752 1753 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, 1754 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t)); 1755 1756 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE, 1757 &gCamCapability[cameraId]->max_sharpness_map_value, 1); 1758 1759 1760 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS, 1761 &gCamCapability[cameraId]->raw_min_duration, 1762 1); 1763 1764 int32_t scalar_formats[] = {HAL_PIXEL_FORMAT_YCbCr_420_888, 1765 HAL_PIXEL_FORMAT_BLOB}; 1766 int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t); 1767 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, 1768 scalar_formats, 1769 scalar_formats_count); 1770 1771 int32_t available_processed_sizes[CAM_FORMAT_MAX * 2]; 1772 makeTable(gCamCapability[cameraId]->supported_sizes_tbl, 1773 gCamCapability[cameraId]->supported_sizes_tbl_cnt, 1774 available_processed_sizes); 1775 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES, 1776 available_processed_sizes, 1777 (gCamCapability[cameraId]->supported_sizes_tbl_cnt) * 2); 1778 1779 int32_t available_fps_ranges[MAX_SIZES_CNT * 2]; 1780 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl, 1781 gCamCapability[cameraId]->fps_ranges_tbl_cnt, 1782 available_fps_ranges); 1783 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, 1784 available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) ); 1785 1786 camera_metadata_rational exposureCompensationStep = { 1787 gCamCapability[cameraId]->exp_compensation_step.numerator, 1788 gCamCapability[cameraId]->exp_compensation_step.denominator}; 1789 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP, 1790 &exposureCompensationStep, 1); 1791 1792 /*TO DO*/ 1793 uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF}; 1794 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, 1795 availableVstabModes, sizeof(availableVstabModes)); 1796 1797 /*HAL 1 and HAL 3 common*/ 1798 float maxZoom = 4; 1799 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, 1800 &maxZoom, 1); 1801 1802 int32_t max3aRegions = 1; 1803 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS, 1804 &max3aRegions, 1); 1805 1806 uint8_t availableFaceDetectModes[] = { 1807 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF }; 1808 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES, 1809 availableFaceDetectModes, 1810 sizeof(availableFaceDetectModes)); 1811 1812 int32_t raw_size[] = {gCamCapability[cameraId]->raw_dim.width, 1813 gCamCapability[cameraId]->raw_dim.height}; 1814 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES, 1815 raw_size, 1816 sizeof(raw_size)/sizeof(uint32_t)); 1817 1818 int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min, 1819 gCamCapability[cameraId]->exposure_compensation_max}; 1820 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE, 1821 exposureCompensationRange, 1822 sizeof(exposureCompensationRange)/sizeof(int32_t)); 1823 1824 uint8_t lensFacing = (facingBack) ? 1825 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT; 1826 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1); 1827 1828 int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2]; 1829 makeTable(gCamCapability[cameraId]->picture_sizes_tbl, 1830 gCamCapability[cameraId]->picture_sizes_tbl_cnt, 1831 available_jpeg_sizes); 1832 staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES, 1833 available_jpeg_sizes, 1834 (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2)); 1835 1836 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, 1837 available_thumbnail_sizes, 1838 sizeof(available_thumbnail_sizes)/sizeof(int32_t)); 1839 1840 int32_t max_jpeg_size = 0; 1841 int temp_width, temp_height; 1842 for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) { 1843 temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width; 1844 temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height; 1845 if (temp_width * temp_height > max_jpeg_size ) { 1846 max_jpeg_size = temp_width * temp_height; 1847 } 1848 } 1849 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t); 1850 staticInfo.update(ANDROID_JPEG_MAX_SIZE, 1851 &max_jpeg_size, 1); 1852 1853 uint8_t avail_effects[CAM_EFFECT_MODE_MAX]; 1854 int32_t size = 0; 1855 for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) { 1856 int val = lookupFwkName(EFFECT_MODES_MAP, 1857 sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]), 1858 gCamCapability[cameraId]->supported_effects[i]); 1859 if (val != NAME_NOT_FOUND) { 1860 avail_effects[size] = (uint8_t)val; 1861 size++; 1862 } 1863 } 1864 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS, 1865 avail_effects, 1866 size); 1867 1868 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX]; 1869 uint8_t supported_indexes[CAM_SCENE_MODE_MAX]; 1870 int32_t supported_scene_modes_cnt = 0; 1871 for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) { 1872 int val = lookupFwkName(SCENE_MODES_MAP, 1873 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]), 1874 gCamCapability[cameraId]->supported_scene_modes[i]); 1875 if (val != NAME_NOT_FOUND) { 1876 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val; 1877 supported_indexes[supported_scene_modes_cnt] = i; 1878 supported_scene_modes_cnt++; 1879 } 1880 } 1881 1882 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES, 1883 avail_scene_modes, 1884 supported_scene_modes_cnt); 1885 1886 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3]; 1887 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides, 1888 supported_scene_modes_cnt, 1889 scene_mode_overrides, 1890 supported_indexes, 1891 cameraId); 1892 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES, 1893 scene_mode_overrides, 1894 supported_scene_modes_cnt*3); 1895 1896 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX]; 1897 size = 0; 1898 for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) { 1899 int val = lookupFwkName(ANTIBANDING_MODES_MAP, 1900 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]), 1901 gCamCapability[cameraId]->supported_antibandings[i]); 1902 if (val != NAME_NOT_FOUND) { 1903 avail_antibanding_modes[size] = (uint8_t)val; 1904 size++; 1905 } 1906 1907 } 1908 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, 1909 avail_antibanding_modes, 1910 size); 1911 1912 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX]; 1913 size = 0; 1914 for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) { 1915 int val = lookupFwkName(FOCUS_MODES_MAP, 1916 sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]), 1917 gCamCapability[cameraId]->supported_focus_modes[i]); 1918 if (val != NAME_NOT_FOUND) { 1919 avail_af_modes[size] = (uint8_t)val; 1920 size++; 1921 } 1922 } 1923 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES, 1924 avail_af_modes, 1925 size); 1926 1927 uint8_t avail_awb_modes[CAM_WB_MODE_MAX]; 1928 size = 0; 1929 for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) { 1930 int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP, 1931 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]), 1932 gCamCapability[cameraId]->supported_white_balances[i]); 1933 if (val != NAME_NOT_FOUND) { 1934 avail_awb_modes[size] = (uint8_t)val; 1935 size++; 1936 } 1937 } 1938 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES, 1939 avail_awb_modes, 1940 size); 1941 1942 uint8_t avail_flash_modes[CAM_FLASH_MODE_MAX]; 1943 size = 0; 1944 for (int i = 0; i < gCamCapability[cameraId]->supported_flash_modes_cnt; i++) { 1945 int val = lookupFwkName(FLASH_MODES_MAP, 1946 sizeof(FLASH_MODES_MAP)/sizeof(FLASH_MODES_MAP[0]), 1947 gCamCapability[cameraId]->supported_flash_modes[i]); 1948 if (val != NAME_NOT_FOUND) { 1949 avail_flash_modes[size] = (uint8_t)val; 1950 size++; 1951 } 1952 } 1953 static uint8_t flashAvailable = 0; 1954 if (size > 1) { 1955 //flash is supported 1956 flashAvailable = 1; 1957 } 1958 staticInfo.update(ANDROID_FLASH_MODE, 1959 avail_flash_modes, 1960 size); 1961 1962 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE, 1963 &flashAvailable, 1); 1964 1965 uint8_t avail_ae_modes[5]; 1966 size = 0; 1967 for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) { 1968 avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i]; 1969 size++; 1970 } 1971 if (flashAvailable) { 1972 avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH; 1973 avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH; 1974 avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE; 1975 } 1976 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES, 1977 avail_ae_modes, 1978 size); 1979 size = 0; 1980 int32_t avail_sensitivities[CAM_ISO_MODE_MAX]; 1981 for (int i = 0; i < gCamCapability[cameraId]->supported_iso_modes_cnt; i++) { 1982 int32_t sensitivity = getSensorSensitivity(gCamCapability[cameraId]->supported_iso_modes[i]); 1983 if (sensitivity != -1) { 1984 avail_sensitivities[size] = sensitivity; 1985 size++; 1986 } 1987 } 1988 staticInfo.update(ANDROID_SENSOR_INFO_AVAILABLE_SENSITIVITIES, 1989 avail_sensitivities, 1990 size); 1991 1992 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY, 1993 &gCamCapability[cameraId]->max_analog_sensitivity, 1994 sizeof(int32_t) ); 1995 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS, 1996 &gCamCapability[cameraId]->processed_min_duration, 1997 sizeof(int32_t)); 1998 staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS, 1999 &gCamCapability[cameraId]->jpeg_min_duration, 2000 sizeof(int32_t)); 2001 2002 gStaticMetadata[cameraId] = staticInfo.release(); 2003 return rc; 2004} 2005 2006/*=========================================================================== 2007 * FUNCTION : makeTable 2008 * 2009 * DESCRIPTION: make a table of sizes 2010 * 2011 * PARAMETERS : 2012 * 2013 * 2014 *==========================================================================*/ 2015void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size, 2016 int32_t* sizeTable) 2017{ 2018 int j = 0; 2019 for (int i = 0; i < size; i++) { 2020 sizeTable[j] = dimTable[i].width; 2021 sizeTable[j+1] = dimTable[i].height; 2022 j+=2; 2023 } 2024} 2025 2026/*=========================================================================== 2027 * FUNCTION : makeFPSTable 2028 * 2029 * DESCRIPTION: make a table of fps ranges 2030 * 2031 * PARAMETERS : 2032 * 2033 *==========================================================================*/ 2034void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size, 2035 int32_t* fpsRangesTable) 2036{ 2037 int j = 0; 2038 for (int i = 0; i < size; i++) { 2039 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps; 2040 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps; 2041 j+=2; 2042 } 2043} 2044 2045/*=========================================================================== 2046 * FUNCTION : makeOverridesList 2047 * 2048 * DESCRIPTION: make a list of scene mode overrides 2049 * 2050 * PARAMETERS : 2051 * 2052 * 2053 *==========================================================================*/ 2054void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable, 2055 uint8_t size, uint8_t* overridesList, 2056 uint8_t* supported_indexes, 2057 int camera_id) 2058{ 2059 /*daemon will give a list of overrides for all scene modes. 2060 However we should send the fwk only the overrides for the scene modes 2061 supported by the framework*/ 2062 int j = 0, index = 0, supt = 0; 2063 uint8_t focus_override; 2064 for (int i = 0; i < size; i++) { 2065 supt = 0; 2066 index = supported_indexes[i]; 2067 overridesList[j] = (uint8_t)overridesTable[index].ae_mode; 2068 overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP, 2069 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]), 2070 overridesTable[index].awb_mode); 2071 focus_override = (uint8_t)overridesTable[index].af_mode; 2072 for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) { 2073 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) { 2074 supt = 1; 2075 break; 2076 } 2077 } 2078 if (supt) { 2079 overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP, 2080 sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]), 2081 focus_override); 2082 } else { 2083 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF; 2084 } 2085 j+=3; 2086 } 2087} 2088 2089/*=========================================================================== 2090 * FUNCTION : getPreviewHalPixelFormat 2091 * 2092 * DESCRIPTION: convert the format to type recognized by framework 2093 * 2094 * PARAMETERS : format : the format from backend 2095 * 2096 ** RETURN : format recognized by framework 2097 * 2098 *==========================================================================*/ 2099int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format) 2100{ 2101 int32_t halPixelFormat; 2102 2103 switch (format) { 2104 case CAM_FORMAT_YUV_420_NV12: 2105 halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP; 2106 break; 2107 case CAM_FORMAT_YUV_420_NV21: 2108 halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP; 2109 break; 2110 case CAM_FORMAT_YUV_420_NV21_ADRENO: 2111 halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO; 2112 break; 2113 case CAM_FORMAT_YUV_420_YV12: 2114 halPixelFormat = HAL_PIXEL_FORMAT_YV12; 2115 break; 2116 case CAM_FORMAT_YUV_422_NV16: 2117 case CAM_FORMAT_YUV_422_NV61: 2118 default: 2119 halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP; 2120 break; 2121 } 2122 return halPixelFormat; 2123} 2124 2125/*=========================================================================== 2126 * FUNCTION : getSensorSensitivity 2127 * 2128 * DESCRIPTION: convert iso_mode to an integer value 2129 * 2130 * PARAMETERS : iso_mode : the iso_mode supported by sensor 2131 * 2132 ** RETURN : sensitivity supported by sensor 2133 * 2134 *==========================================================================*/ 2135int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode) 2136{ 2137 int32_t sensitivity; 2138 2139 switch (iso_mode) { 2140 case CAM_ISO_MODE_100: 2141 sensitivity = 100; 2142 break; 2143 case CAM_ISO_MODE_200: 2144 sensitivity = 200; 2145 break; 2146 case CAM_ISO_MODE_400: 2147 sensitivity = 400; 2148 break; 2149 case CAM_ISO_MODE_800: 2150 sensitivity = 800; 2151 break; 2152 case CAM_ISO_MODE_1600: 2153 sensitivity = 1600; 2154 break; 2155 default: 2156 sensitivity = -1; 2157 break; 2158 } 2159 return sensitivity; 2160} 2161 2162 2163/*=========================================================================== 2164 * FUNCTION : AddSetParmEntryToBatch 2165 * 2166 * DESCRIPTION: add set parameter entry into batch 2167 * 2168 * PARAMETERS : 2169 * @p_table : ptr to parameter buffer 2170 * @paramType : parameter type 2171 * @paramLength : length of parameter value 2172 * @paramValue : ptr to parameter value 2173 * 2174 * RETURN : int32_t type of status 2175 * NO_ERROR -- success 2176 * none-zero failure code 2177 *==========================================================================*/ 2178int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table, 2179 cam_intf_parm_type_t paramType, 2180 uint32_t paramLength, 2181 void *paramValue) 2182{ 2183 int position = paramType; 2184 int current, next; 2185 2186 /************************************************************************* 2187 * Code to take care of linking next flags * 2188 *************************************************************************/ 2189 current = GET_FIRST_PARAM_ID(p_table); 2190 if (position == current){ 2191 //DO NOTHING 2192 } else if (position < current){ 2193 SET_NEXT_PARAM_ID(position, p_table, current); 2194 SET_FIRST_PARAM_ID(p_table, position); 2195 } else { 2196 /* Search for the position in the linked list where we need to slot in*/ 2197 while (position > GET_NEXT_PARAM_ID(current, p_table)) 2198 current = GET_NEXT_PARAM_ID(current, p_table); 2199 2200 /*If node already exists no need to alter linking*/ 2201 if (position != GET_NEXT_PARAM_ID(current, p_table)) { 2202 next = GET_NEXT_PARAM_ID(current, p_table); 2203 SET_NEXT_PARAM_ID(current, p_table, position); 2204 SET_NEXT_PARAM_ID(position, p_table, next); 2205 } 2206 } 2207 2208 /************************************************************************* 2209 * Copy contents into entry * 2210 *************************************************************************/ 2211 2212 if (paramLength > sizeof(parm_type_t)) { 2213 ALOGE("%s:Size of input larger than max entry size",__func__); 2214 return BAD_VALUE; 2215 } 2216 memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength); 2217 return NO_ERROR; 2218} 2219 2220/*=========================================================================== 2221 * FUNCTION : lookupFwkName 2222 * 2223 * DESCRIPTION: In case the enum is not same in fwk and backend 2224 * make sure the parameter is correctly propogated 2225 * 2226 * PARAMETERS : 2227 * @arr : map between the two enums 2228 * @len : len of the map 2229 * @hal_name : name of the hal_parm to map 2230 * 2231 * RETURN : int type of status 2232 * fwk_name -- success 2233 * none-zero failure code 2234 *==========================================================================*/ 2235int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[], 2236 int len, int hal_name) 2237{ 2238 2239 for (int i = 0; i < len; i++) { 2240 if (arr[i].hal_name == hal_name) 2241 return arr[i].fwk_name; 2242 } 2243 2244 /* Not able to find matching framework type is not necessarily 2245 * an error case. This happens when mm-camera supports more attributes 2246 * than the frameworks do */ 2247 ALOGD("%s: Cannot find matching framework type", __func__); 2248 return NAME_NOT_FOUND; 2249} 2250 2251/*=========================================================================== 2252 * FUNCTION : lookupHalName 2253 * 2254 * DESCRIPTION: In case the enum is not same in fwk and backend 2255 * make sure the parameter is correctly propogated 2256 * 2257 * PARAMETERS : 2258 * @arr : map between the two enums 2259 * @len : len of the map 2260 * @fwk_name : name of the hal_parm to map 2261 * 2262 * RETURN : int32_t type of status 2263 * hal_name -- success 2264 * none-zero failure code 2265 *==========================================================================*/ 2266int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[], 2267 int len, int fwk_name) 2268{ 2269 for (int i = 0; i < len; i++) { 2270 if (arr[i].fwk_name == fwk_name) 2271 return arr[i].hal_name; 2272 } 2273 ALOGE("%s: Cannot find matching hal type", __func__); 2274 return NAME_NOT_FOUND; 2275} 2276 2277/*=========================================================================== 2278 * FUNCTION : getCapabilities 2279 * 2280 * DESCRIPTION: query camera capabilities 2281 * 2282 * PARAMETERS : 2283 * @cameraId : camera Id 2284 * @info : camera info struct to be filled in with camera capabilities 2285 * 2286 * RETURN : int32_t type of status 2287 * NO_ERROR -- success 2288 * none-zero failure code 2289 *==========================================================================*/ 2290int QCamera3HardwareInterface::getCamInfo(int cameraId, 2291 struct camera_info *info) 2292{ 2293 int rc = 0; 2294 2295 if (NULL == gCamCapability[cameraId]) { 2296 rc = initCapabilities(cameraId); 2297 if (rc < 0) { 2298 //pthread_mutex_unlock(&g_camlock); 2299 return rc; 2300 } 2301 } 2302 2303 if (NULL == gStaticMetadata[cameraId]) { 2304 rc = initStaticMetadata(cameraId); 2305 if (rc < 0) { 2306 return rc; 2307 } 2308 } 2309 2310 switch(gCamCapability[cameraId]->position) { 2311 case CAM_POSITION_BACK: 2312 info->facing = CAMERA_FACING_BACK; 2313 break; 2314 2315 case CAM_POSITION_FRONT: 2316 info->facing = CAMERA_FACING_FRONT; 2317 break; 2318 2319 default: 2320 ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId); 2321 rc = -1; 2322 break; 2323 } 2324 2325 2326 info->orientation = gCamCapability[cameraId]->sensor_mount_angle; 2327 info->device_version = HARDWARE_DEVICE_API_VERSION(3, 0); 2328 info->static_camera_characteristics = gStaticMetadata[cameraId]; 2329 2330 return rc; 2331} 2332 2333/*=========================================================================== 2334 * FUNCTION : translateMetadata 2335 * 2336 * DESCRIPTION: translate the metadata into camera_metadata_t 2337 * 2338 * PARAMETERS : type of the request 2339 * 2340 * 2341 * RETURN : success: camera_metadata_t* 2342 * failure: NULL 2343 * 2344 *==========================================================================*/ 2345camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type) 2346{ 2347 pthread_mutex_lock(&mMutex); 2348 2349 if (mDefaultMetadata[type] != NULL) { 2350 pthread_mutex_unlock(&mMutex); 2351 return mDefaultMetadata[type]; 2352 } 2353 //first time we are handling this request 2354 //fill up the metadata structure using the wrapper class 2355 CameraMetadata settings; 2356 //translate from cam_capability_t to camera_metadata_tag_t 2357 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE; 2358 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1); 2359 2360 /*control*/ 2361 2362 uint8_t controlIntent = 0; 2363 switch (type) { 2364 case CAMERA3_TEMPLATE_PREVIEW: 2365 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW; 2366 break; 2367 case CAMERA3_TEMPLATE_STILL_CAPTURE: 2368 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE; 2369 break; 2370 case CAMERA3_TEMPLATE_VIDEO_RECORD: 2371 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD; 2372 break; 2373 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT: 2374 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT; 2375 break; 2376 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG: 2377 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG; 2378 break; 2379 default: 2380 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM; 2381 break; 2382 } 2383 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1); 2384 2385 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, 2386 &gCamCapability[mCameraId]->exposure_compensation_default, 1); 2387 2388 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF; 2389 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1); 2390 2391 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF; 2392 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1); 2393 2394 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO; 2395 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1); 2396 2397 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO; 2398 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1); 2399 2400 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF; 2401 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1); 2402 2403 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; //similar to AUTO? 2404 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1); 2405 2406 static uint8_t focusMode; 2407 if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) { 2408 ALOGE("%s: Setting focus mode to auto", __func__); 2409 focusMode = ANDROID_CONTROL_AF_MODE_AUTO; 2410 } else { 2411 ALOGE("%s: Setting focus mode to off", __func__); 2412 focusMode = ANDROID_CONTROL_AF_MODE_OFF; 2413 } 2414 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1); 2415 2416 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON; 2417 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1); 2418 2419 /*flash*/ 2420 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF; 2421 settings.update(ANDROID_FLASH_MODE, &flashMode, 1); 2422 2423 2424 /* lens */ 2425 float default_aperture = gCamCapability[mCameraId]->apertures[0]; 2426 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1); 2427 2428 if (gCamCapability[mCameraId]->filter_densities_count) { 2429 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0]; 2430 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density, 2431 gCamCapability[mCameraId]->filter_densities_count); 2432 } 2433 2434 float default_focal_length = gCamCapability[mCameraId]->focal_length; 2435 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1); 2436 2437 mDefaultMetadata[type] = settings.release(); 2438 2439 pthread_mutex_unlock(&mMutex); 2440 return mDefaultMetadata[type]; 2441} 2442 2443/*=========================================================================== 2444 * FUNCTION : setFrameParameters 2445 * 2446 * DESCRIPTION: set parameters per frame as requested in the metadata from 2447 * framework 2448 * 2449 * PARAMETERS : 2450 * @settings : frame settings information from framework 2451 * 2452 * 2453 * RETURN : success: NO_ERROR 2454 * failure: 2455 *==========================================================================*/ 2456int QCamera3HardwareInterface::setFrameParameters(int frame_id, 2457 const camera_metadata_t *settings) 2458{ 2459 /*translate from camera_metadata_t type to parm_type_t*/ 2460 int rc = 0; 2461 if (settings == NULL && mFirstRequest) { 2462 /*settings cannot be null for the first request*/ 2463 return BAD_VALUE; 2464 } 2465 2466 int32_t hal_version = CAM_HAL_V3; 2467 2468 memset(mParameters, 0, sizeof(parm_buffer_t)); 2469 mParameters->first_flagged_entry = CAM_INTF_PARM_MAX; 2470 AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION, 2471 sizeof(hal_version), &hal_version); 2472 2473 /*we need to update the frame number in the parameters*/ 2474 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER, 2475 sizeof(frame_id), &frame_id); 2476 if (rc < 0) { 2477 ALOGE("%s: Failed to set the frame number in the parameters", __func__); 2478 return BAD_VALUE; 2479 } 2480 2481 if(settings != NULL){ 2482 rc = translateMetadataToParameters(settings); 2483 } 2484 /*set the parameters to backend*/ 2485 mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters); 2486 return rc; 2487} 2488 2489/*=========================================================================== 2490 * FUNCTION : translateMetadataToParameters 2491 * 2492 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t 2493 * 2494 * 2495 * PARAMETERS : 2496 * @settings : frame settings information from framework 2497 * 2498 * 2499 * RETURN : success: NO_ERROR 2500 * failure: 2501 *==========================================================================*/ 2502int QCamera3HardwareInterface::translateMetadataToParameters 2503 (const camera_metadata_t *settings) 2504{ 2505 int rc = 0; 2506 CameraMetadata frame_settings; 2507 frame_settings = settings; 2508 2509 2510 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) { 2511 int32_t antibandingMode = 2512 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0]; 2513 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING, 2514 sizeof(antibandingMode), &antibandingMode); 2515 } 2516 2517 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) { 2518 int32_t expCompensation = frame_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0]; 2519 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION, 2520 sizeof(expCompensation), &expCompensation); 2521 } 2522 2523 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) { 2524 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0]; 2525 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK, 2526 sizeof(aeLock), &aeLock); 2527 } 2528 2529 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) { 2530 cam_fps_range_t fps_range; 2531 fps_range.min_fps = 2532 frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0]; 2533 fps_range.max_fps = 2534 frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1]; 2535 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE, 2536 sizeof(fps_range), &fps_range); 2537 } 2538 2539 float focalDistance = -1.0; 2540 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) { 2541 focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0]; 2542 rc = AddSetParmEntryToBatch(mParameters, 2543 CAM_INTF_META_LENS_FOCUS_DISTANCE, 2544 sizeof(focalDistance), &focalDistance); 2545 } 2546 2547 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) { 2548 uint8_t fwk_focusMode = 2549 frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0]; 2550 uint8_t focusMode; 2551 if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) { 2552 focusMode = CAM_FOCUS_MODE_INFINITY; 2553 } else{ 2554 focusMode = lookupHalName(FOCUS_MODES_MAP, 2555 sizeof(FOCUS_MODES_MAP), 2556 fwk_focusMode); 2557 } 2558 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE, 2559 sizeof(focusMode), &focusMode); 2560 } 2561 2562 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) { 2563 uint8_t awbLock = 2564 frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0]; 2565 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK, 2566 sizeof(awbLock), &awbLock); 2567 } 2568 2569 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) { 2570 uint8_t fwk_whiteLevel = 2571 frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0]; 2572 uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP, 2573 sizeof(WHITE_BALANCE_MODES_MAP), 2574 fwk_whiteLevel); 2575 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE, 2576 sizeof(whiteLevel), &whiteLevel); 2577 } 2578 2579 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) { 2580 uint8_t fwk_effectMode = 2581 frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0]; 2582 uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP, 2583 sizeof(EFFECT_MODES_MAP), 2584 fwk_effectMode); 2585 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT, 2586 sizeof(effectMode), &effectMode); 2587 } 2588 2589 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) { 2590 uint8_t fwk_aeMode = 2591 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0]; 2592 uint8_t aeMode; 2593 int32_t redeye; 2594 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) { 2595 aeMode = CAM_AE_MODE_OFF; 2596 } else { 2597 aeMode = CAM_AE_MODE_ON; 2598 } 2599 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) { 2600 redeye = 1; 2601 } else { 2602 redeye = 0; 2603 } 2604 int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP, 2605 sizeof(AE_FLASH_MODE_MAP), 2606 fwk_aeMode); 2607 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE, 2608 sizeof(aeMode), &aeMode); 2609 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE, 2610 sizeof(flashMode), &flashMode); 2611 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION, 2612 sizeof(redeye), &redeye); 2613 } 2614 2615 if (frame_settings.exists(ANDROID_REQUEST_FRAME_COUNT)) { 2616 int32_t metaFrameNumber = 2617 frame_settings.find(ANDROID_REQUEST_FRAME_COUNT).data.i32[0]; 2618 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER, 2619 sizeof(metaFrameNumber), &metaFrameNumber); 2620 } 2621 2622 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) { 2623 uint8_t colorCorrectMode = 2624 frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0]; 2625 rc = 2626 AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE, 2627 sizeof(colorCorrectMode), &colorCorrectMode); 2628 } 2629 cam_trigger_t aecTrigger; 2630 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE; 2631 aecTrigger.trigger_id = -1; 2632 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&& 2633 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) { 2634 aecTrigger.trigger = 2635 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0]; 2636 aecTrigger.trigger_id = 2637 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0]; 2638 } 2639 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, 2640 sizeof(aecTrigger), &aecTrigger); 2641 2642 /*af_trigger must come with a trigger id*/ 2643 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) && 2644 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) { 2645 cam_trigger_t af_trigger; 2646 af_trigger.trigger = 2647 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0]; 2648 af_trigger.trigger_id = 2649 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0]; 2650 rc = AddSetParmEntryToBatch(mParameters, 2651 CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger); 2652 } 2653 2654 if (frame_settings.exists(ANDROID_CONTROL_MODE)) { 2655 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0]; 2656 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE, 2657 sizeof(metaMode), &metaMode); 2658 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) { 2659 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0]; 2660 uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP, 2661 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]), 2662 fwk_sceneMode); 2663 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE, 2664 sizeof(sceneMode), &sceneMode); 2665 } else if (metaMode == ANDROID_CONTROL_MODE_OFF) { 2666 uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF; 2667 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE, 2668 sizeof(sceneMode), &sceneMode); 2669 } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) { 2670 uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF; 2671 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE, 2672 sizeof(sceneMode), &sceneMode); 2673 } 2674 } 2675 2676 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) { 2677 int32_t demosaic = 2678 frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0]; 2679 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC, 2680 sizeof(demosaic), &demosaic); 2681 } 2682 2683 if (frame_settings.exists(ANDROID_EDGE_MODE)) { 2684 uint8_t edgeMode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0]; 2685 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE, 2686 sizeof(edgeMode), &edgeMode); 2687 } 2688 2689 if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) { 2690 int32_t edgeStrength = 2691 frame_settings.find(ANDROID_EDGE_STRENGTH).data.i32[0]; 2692 rc = AddSetParmEntryToBatch(mParameters, 2693 CAM_INTF_META_SHARPNESS_STRENGTH, sizeof(edgeStrength), &edgeStrength); 2694 } 2695 2696 if (frame_settings.exists(ANDROID_FLASH_MODE)) { 2697 uint8_t flashMode = 2698 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]; 2699 rc = AddSetParmEntryToBatch(mParameters, 2700 CAM_INTF_META_FLASH_MODE, sizeof(flashMode), &flashMode); 2701 } 2702 2703 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) { 2704 uint8_t flashPower = 2705 frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0]; 2706 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER, 2707 sizeof(flashPower), &flashPower); 2708 } 2709 2710 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) { 2711 int64_t flashFiringTime = 2712 frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0]; 2713 rc = AddSetParmEntryToBatch(mParameters, 2714 CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime); 2715 } 2716 2717 if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) { 2718 uint8_t geometricMode = 2719 frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0]; 2720 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE, 2721 sizeof(geometricMode), &geometricMode); 2722 } 2723 2724 if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) { 2725 uint8_t geometricStrength = 2726 frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0]; 2727 rc = AddSetParmEntryToBatch(mParameters, 2728 CAM_INTF_META_GEOMETRIC_STRENGTH, 2729 sizeof(geometricStrength), &geometricStrength); 2730 } 2731 2732 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) { 2733 uint8_t hotPixelMode = 2734 frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0]; 2735 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE, 2736 sizeof(hotPixelMode), &hotPixelMode); 2737 } 2738 2739 if (frame_settings.exists(ANDROID_LENS_APERTURE)) { 2740 float lensAperture = 2741 frame_settings.find( ANDROID_LENS_APERTURE).data.f[0]; 2742 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE, 2743 sizeof(lensAperture), &lensAperture); 2744 } 2745 2746 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) { 2747 float filterDensity = 2748 frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0]; 2749 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY, 2750 sizeof(filterDensity), &filterDensity); 2751 } 2752 2753 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) { 2754 float focalLength = 2755 frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0]; 2756 rc = AddSetParmEntryToBatch(mParameters, 2757 CAM_INTF_META_LENS_FOCAL_LENGTH, 2758 sizeof(focalLength), &focalLength); 2759 } 2760 2761 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) { 2762 uint8_t optStabMode = 2763 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0]; 2764 rc = AddSetParmEntryToBatch(mParameters, 2765 CAM_INTF_META_LENS_OPT_STAB_MODE, 2766 sizeof(optStabMode), &optStabMode); 2767 } 2768 2769 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) { 2770 uint8_t noiseRedMode = 2771 frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]; 2772 rc = AddSetParmEntryToBatch(mParameters, 2773 CAM_INTF_META_NOISE_REDUCTION_MODE, 2774 sizeof(noiseRedMode), &noiseRedMode); 2775 } 2776 2777 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) { 2778 uint8_t noiseRedStrength = 2779 frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0]; 2780 rc = AddSetParmEntryToBatch(mParameters, 2781 CAM_INTF_META_NOISE_REDUCTION_STRENGTH, 2782 sizeof(noiseRedStrength), &noiseRedStrength); 2783 } 2784 2785 cam_crop_region_t scalerCropRegion; 2786 bool scalerCropSet = false; 2787 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) { 2788 scalerCropRegion.left = 2789 frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0]; 2790 scalerCropRegion.top = 2791 frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1]; 2792 scalerCropRegion.width = 2793 frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2]; 2794 scalerCropRegion.height = 2795 frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3]; 2796 rc = AddSetParmEntryToBatch(mParameters, 2797 CAM_INTF_META_SCALER_CROP_REGION, 2798 sizeof(scalerCropRegion), &scalerCropRegion); 2799 scalerCropSet = true; 2800 } 2801 2802 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) { 2803 int64_t sensorExpTime = 2804 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0]; 2805 rc = AddSetParmEntryToBatch(mParameters, 2806 CAM_INTF_META_SENSOR_EXPOSURE_TIME, 2807 sizeof(sensorExpTime), &sensorExpTime); 2808 } 2809 2810 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) { 2811 int64_t sensorFrameDuration = 2812 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0]; 2813 rc = AddSetParmEntryToBatch(mParameters, 2814 CAM_INTF_META_SENSOR_FRAME_DURATION, 2815 sizeof(sensorFrameDuration), &sensorFrameDuration); 2816 } 2817 2818 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) { 2819 int32_t sensorSensitivity = 2820 frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0]; 2821 rc = AddSetParmEntryToBatch(mParameters, 2822 CAM_INTF_META_SENSOR_SENSITIVITY, 2823 sizeof(sensorSensitivity), &sensorSensitivity); 2824 } 2825 2826 if (frame_settings.exists(ANDROID_SHADING_MODE)) { 2827 int32_t shadingMode = 2828 frame_settings.find(ANDROID_SHADING_MODE).data.u8[0]; 2829 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE, 2830 sizeof(shadingMode), &shadingMode); 2831 } 2832 2833 if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) { 2834 uint8_t shadingStrength = 2835 frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0]; 2836 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH, 2837 sizeof(shadingStrength), &shadingStrength); 2838 } 2839 2840 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) { 2841 uint8_t facedetectMode = 2842 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0]; 2843 rc = AddSetParmEntryToBatch(mParameters, 2844 CAM_INTF_META_STATS_FACEDETECT_MODE, 2845 sizeof(facedetectMode), &facedetectMode); 2846 } 2847 2848 if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) { 2849 uint8_t histogramMode = 2850 frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0]; 2851 rc = AddSetParmEntryToBatch(mParameters, 2852 CAM_INTF_META_STATS_HISTOGRAM_MODE, 2853 sizeof(histogramMode), &histogramMode); 2854 } 2855 2856 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) { 2857 uint8_t sharpnessMapMode = 2858 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0]; 2859 rc = AddSetParmEntryToBatch(mParameters, 2860 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, 2861 sizeof(sharpnessMapMode), &sharpnessMapMode); 2862 } 2863 2864 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) { 2865 uint8_t tonemapMode = 2866 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0]; 2867 rc = AddSetParmEntryToBatch(mParameters, 2868 CAM_INTF_META_TONEMAP_MODE, 2869 sizeof(tonemapMode), &tonemapMode); 2870 } 2871 2872 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) { 2873 uint8_t captureIntent = 2874 frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0]; 2875 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT, 2876 sizeof(captureIntent), &captureIntent); 2877 } 2878 2879 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) { 2880 cam_area_t roi; 2881 bool reset = true; 2882 convertFromRegions(&roi, settings, ANDROID_CONTROL_AE_REGIONS); 2883 if (scalerCropSet) { 2884 reset = resetIfNeededROI(&roi, &scalerCropRegion); 2885 } 2886 if (reset) { 2887 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI, 2888 sizeof(roi), &roi); 2889 } 2890 } 2891 2892 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) { 2893 cam_area_t roi; 2894 bool reset = true; 2895 convertFromRegions(&roi, settings, ANDROID_CONTROL_AF_REGIONS); 2896 if (scalerCropSet) { 2897 reset = resetIfNeededROI(&roi, &scalerCropRegion); 2898 } 2899 if (reset) { 2900 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI, 2901 sizeof(roi), &roi); 2902 } 2903 } 2904 2905 if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) { 2906 cam_area_t roi; 2907 bool reset = true; 2908 convertFromRegions(&roi, settings, ANDROID_CONTROL_AWB_REGIONS); 2909 if (scalerCropSet) { 2910 reset = resetIfNeededROI(&roi, &scalerCropRegion); 2911 } 2912 if (reset) { 2913 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS, 2914 sizeof(roi), &roi); 2915 } 2916 } 2917 return rc; 2918} 2919 2920/*=========================================================================== 2921 * FUNCTION : getJpegSettings 2922 * 2923 * DESCRIPTION: save the jpeg settings in the HAL 2924 * 2925 * 2926 * PARAMETERS : 2927 * @settings : frame settings information from framework 2928 * 2929 * 2930 * RETURN : success: NO_ERROR 2931 * failure: 2932 *==========================================================================*/ 2933int QCamera3HardwareInterface::getJpegSettings 2934 (const camera_metadata_t *settings) 2935{ 2936 if (mJpegSettings) { 2937 if (mJpegSettings->gps_timestamp) { 2938 free(mJpegSettings->gps_timestamp); 2939 mJpegSettings->gps_timestamp = NULL; 2940 } 2941 if (mJpegSettings->gps_coordinates) { 2942 for (int i = 0; i < 3; i++) { 2943 free(mJpegSettings->gps_coordinates[i]); 2944 mJpegSettings->gps_coordinates[i] = NULL; 2945 } 2946 } 2947 free(mJpegSettings); 2948 mJpegSettings = NULL; 2949 } 2950 mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t)); 2951 CameraMetadata jpeg_settings; 2952 jpeg_settings = settings; 2953 2954 if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) { 2955 mJpegSettings->jpeg_orientation = 2956 jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0]; 2957 } else { 2958 mJpegSettings->jpeg_orientation = 0; 2959 } 2960 if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) { 2961 mJpegSettings->jpeg_quality = 2962 jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0]; 2963 } else { 2964 mJpegSettings->jpeg_quality = 85; 2965 } 2966 if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) { 2967 mJpegSettings->thumbnail_size.width = 2968 jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0]; 2969 mJpegSettings->thumbnail_size.height = 2970 jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1]; 2971 } else { 2972 mJpegSettings->thumbnail_size.width = 0; 2973 mJpegSettings->thumbnail_size.height = 0; 2974 } 2975 if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) { 2976 for (int i = 0; i < 3; i++) { 2977 mJpegSettings->gps_coordinates[i] = (double*)malloc(sizeof(double*)); 2978 *(mJpegSettings->gps_coordinates[i]) = 2979 jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i]; 2980 } 2981 } else{ 2982 for (int i = 0; i < 3; i++) { 2983 mJpegSettings->gps_coordinates[i] = NULL; 2984 } 2985 } 2986 2987 if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) { 2988 mJpegSettings->gps_timestamp = (int64_t*)malloc(sizeof(int64_t*)); 2989 *(mJpegSettings->gps_timestamp) = 2990 jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0]; 2991 } else { 2992 mJpegSettings->gps_timestamp = NULL; 2993 } 2994 2995 if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) { 2996 int len = jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count; 2997 for (int i = 0; i < len; i++) { 2998 mJpegSettings->gps_processing_method[i] = 2999 jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[i]; 3000 } 3001 if (mJpegSettings->gps_processing_method[len-1] != '\0') { 3002 mJpegSettings->gps_processing_method[len] = '\0'; 3003 } 3004 } else { 3005 mJpegSettings->gps_processing_method[0] = '\0'; 3006 } 3007 3008 if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) { 3009 mJpegSettings->sensor_sensitivity = 3010 jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0]; 3011 } else { 3012 mJpegSettings->sensor_sensitivity = mMetadataResponse.iso_speed; 3013 } 3014 3015 if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) { 3016 mJpegSettings->lens_focal_length = 3017 jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0]; 3018 } 3019 if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) { 3020 mJpegSettings->exposure_compensation = 3021 jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0]; 3022 } 3023 mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step; 3024 mJpegSettings->max_jpeg_size = calcMaxJpegSize(); 3025 return 0; 3026} 3027 3028/*=========================================================================== 3029 * FUNCTION : captureResultCb 3030 * 3031 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata) 3032 * 3033 * PARAMETERS : 3034 * @frame : frame information from mm-camera-interface 3035 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata. 3036 * @userdata: userdata 3037 * 3038 * RETURN : NONE 3039 *==========================================================================*/ 3040void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata, 3041 camera3_stream_buffer_t *buffer, 3042 uint32_t frame_number, void *userdata) 3043{ 3044 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata; 3045 if (hw == NULL) { 3046 ALOGE("%s: Invalid hw %p", __func__, hw); 3047 return; 3048 } 3049 3050 hw->captureResultCb(metadata, buffer, frame_number); 3051 return; 3052} 3053 3054/*=========================================================================== 3055 * FUNCTION : initialize 3056 * 3057 * DESCRIPTION: Pass framework callback pointers to HAL 3058 * 3059 * PARAMETERS : 3060 * 3061 * 3062 * RETURN : Success : 0 3063 * Failure: -ENODEV 3064 *==========================================================================*/ 3065 3066int QCamera3HardwareInterface::initialize(const struct camera3_device *device, 3067 const camera3_callback_ops_t *callback_ops) 3068{ 3069 ALOGV("%s: E", __func__); 3070 QCamera3HardwareInterface *hw = 3071 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 3072 if (!hw) { 3073 ALOGE("%s: NULL camera device", __func__); 3074 return -ENODEV; 3075 } 3076 3077 int rc = hw->initialize(callback_ops); 3078 ALOGV("%s: X", __func__); 3079 return rc; 3080} 3081 3082/*=========================================================================== 3083 * FUNCTION : configure_streams 3084 * 3085 * DESCRIPTION: 3086 * 3087 * PARAMETERS : 3088 * 3089 * 3090 * RETURN : Success: 0 3091 * Failure: -EINVAL (if stream configuration is invalid) 3092 * -ENODEV (fatal error) 3093 *==========================================================================*/ 3094 3095int QCamera3HardwareInterface::configure_streams( 3096 const struct camera3_device *device, 3097 camera3_stream_configuration_t *stream_list) 3098{ 3099 ALOGV("%s: E", __func__); 3100 QCamera3HardwareInterface *hw = 3101 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 3102 if (!hw) { 3103 ALOGE("%s: NULL camera device", __func__); 3104 return -ENODEV; 3105 } 3106 int rc = hw->configureStreams(stream_list); 3107 ALOGV("%s: X", __func__); 3108 return rc; 3109} 3110 3111/*=========================================================================== 3112 * FUNCTION : register_stream_buffers 3113 * 3114 * DESCRIPTION: Register stream buffers with the device 3115 * 3116 * PARAMETERS : 3117 * 3118 * RETURN : 3119 *==========================================================================*/ 3120int QCamera3HardwareInterface::register_stream_buffers( 3121 const struct camera3_device *device, 3122 const camera3_stream_buffer_set_t *buffer_set) 3123{ 3124 ALOGV("%s: E", __func__); 3125 QCamera3HardwareInterface *hw = 3126 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 3127 if (!hw) { 3128 ALOGE("%s: NULL camera device", __func__); 3129 return -ENODEV; 3130 } 3131 int rc = hw->registerStreamBuffers(buffer_set); 3132 ALOGV("%s: X", __func__); 3133 return rc; 3134} 3135 3136/*=========================================================================== 3137 * FUNCTION : construct_default_request_settings 3138 * 3139 * DESCRIPTION: Configure a settings buffer to meet the required use case 3140 * 3141 * PARAMETERS : 3142 * 3143 * 3144 * RETURN : Success: Return valid metadata 3145 * Failure: Return NULL 3146 *==========================================================================*/ 3147const camera_metadata_t* QCamera3HardwareInterface:: 3148 construct_default_request_settings(const struct camera3_device *device, 3149 int type) 3150{ 3151 3152 ALOGV("%s: E", __func__); 3153 camera_metadata_t* fwk_metadata = NULL; 3154 QCamera3HardwareInterface *hw = 3155 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 3156 if (!hw) { 3157 ALOGE("%s: NULL camera device", __func__); 3158 return NULL; 3159 } 3160 3161 fwk_metadata = hw->translateCapabilityToMetadata(type); 3162 3163 ALOGV("%s: X", __func__); 3164 return fwk_metadata; 3165} 3166 3167/*=========================================================================== 3168 * FUNCTION : process_capture_request 3169 * 3170 * DESCRIPTION: 3171 * 3172 * PARAMETERS : 3173 * 3174 * 3175 * RETURN : 3176 *==========================================================================*/ 3177int QCamera3HardwareInterface::process_capture_request( 3178 const struct camera3_device *device, 3179 camera3_capture_request_t *request) 3180{ 3181 ALOGV("%s: E", __func__); 3182 QCamera3HardwareInterface *hw = 3183 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 3184 if (!hw) { 3185 ALOGE("%s: NULL camera device", __func__); 3186 return -EINVAL; 3187 } 3188 3189 int rc = hw->processCaptureRequest(request); 3190 ALOGV("%s: X", __func__); 3191 return rc; 3192} 3193 3194/*=========================================================================== 3195 * FUNCTION : get_metadata_vendor_tag_ops 3196 * 3197 * DESCRIPTION: 3198 * 3199 * PARAMETERS : 3200 * 3201 * 3202 * RETURN : 3203 *==========================================================================*/ 3204 3205void QCamera3HardwareInterface::get_metadata_vendor_tag_ops( 3206 const struct camera3_device *device, 3207 vendor_tag_query_ops_t* ops) 3208{ 3209 ALOGV("%s: E", __func__); 3210 QCamera3HardwareInterface *hw = 3211 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 3212 if (!hw) { 3213 ALOGE("%s: NULL camera device", __func__); 3214 return; 3215 } 3216 3217 hw->getMetadataVendorTagOps(ops); 3218 ALOGV("%s: X", __func__); 3219 return; 3220} 3221 3222/*=========================================================================== 3223 * FUNCTION : dump 3224 * 3225 * DESCRIPTION: 3226 * 3227 * PARAMETERS : 3228 * 3229 * 3230 * RETURN : 3231 *==========================================================================*/ 3232 3233void QCamera3HardwareInterface::dump( 3234 const struct camera3_device *device, int fd) 3235{ 3236 ALOGV("%s: E", __func__); 3237 QCamera3HardwareInterface *hw = 3238 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 3239 if (!hw) { 3240 ALOGE("%s: NULL camera device", __func__); 3241 return; 3242 } 3243 3244 hw->dump(fd); 3245 ALOGV("%s: X", __func__); 3246 return; 3247} 3248 3249/*=========================================================================== 3250 * FUNCTION : close_camera_device 3251 * 3252 * DESCRIPTION: 3253 * 3254 * PARAMETERS : 3255 * 3256 * 3257 * RETURN : 3258 *==========================================================================*/ 3259int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device) 3260{ 3261 ALOGV("%s: E", __func__); 3262 int ret = NO_ERROR; 3263 QCamera3HardwareInterface *hw = 3264 reinterpret_cast<QCamera3HardwareInterface *>( 3265 reinterpret_cast<camera3_device_t *>(device)->priv); 3266 if (!hw) { 3267 ALOGE("NULL camera device"); 3268 return BAD_VALUE; 3269 } 3270 delete hw; 3271 3272 pthread_mutex_lock(&mCameraSessionLock); 3273 mCameraSessionActive = 0; 3274 pthread_mutex_unlock(&mCameraSessionLock); 3275 ALOGV("%s: X", __func__); 3276 return ret; 3277} 3278 3279}; //end namespace qcamera 3280