QCamera3HWI.cpp revision bce5a07247b615d829c84fbcf4da8c785f53b575
1/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved. 2* 3* Redistribution and use in source and binary forms, with or without 4* modification, are permitted provided that the following conditions are 5* met: 6* * Redistributions of source code must retain the above copyright 7* notice, this list of conditions and the following disclaimer. 8* * Redistributions in binary form must reproduce the above 9* copyright notice, this list of conditions and the following 10* disclaimer in the documentation and/or other materials provided 11* with the distribution. 12* * Neither the name of The Linux Foundation nor the names of its 13* contributors may be used to endorse or promote products derived 14* from this software without specific prior written permission. 15* 16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED 17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF 18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT 19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS 20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR 23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE 25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN 26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27* 28*/ 29 30#define LOG_TAG "QCamera3HWI" 31//#define LOG_NDEBUG 0 32 33#include <cutils/properties.h> 34#include <hardware/camera3.h> 35#include <camera/CameraMetadata.h> 36#include <stdlib.h> 37#include <utils/Log.h> 38#include <utils/Errors.h> 39#include <ui/Fence.h> 40#include <gralloc_priv.h> 41#include "QCamera3HWI.h" 42#include "QCamera3Mem.h" 43#include "QCamera3Channel.h" 44#include "QCamera3PostProc.h" 45 46using namespace android; 47 48namespace qcamera { 49 50#define MAX(a, b) ((a) > (b) ? (a) : (b)) 51 52#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX ) 53cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS]; 54parm_buffer_t *prevSettings; 55const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS]; 56 57pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock = 58 PTHREAD_MUTEX_INITIALIZER; 59unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0; 60 61const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = { 62 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF }, 63 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO }, 64 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE }, 65 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE }, 66 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA }, 67 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE }, 68 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD }, 69 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD }, 70 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA } 71}; 72 73const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = { 74 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF }, 75 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO }, 76 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT }, 77 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT }, 78 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT}, 79 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT }, 80 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT }, 81 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT }, 82 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE } 83}; 84 85const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = { 86 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION }, 87 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT }, 88 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE }, 89 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT }, 90 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT }, 91 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE }, 92 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH }, 93 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW }, 94 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET }, 95 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE }, 96 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS }, 97 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS }, 98 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY }, 99 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT }, 100 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE} 101}; 102 103const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = { 104 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED }, 105 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO }, 106 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO }, 107 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF }, 108 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE }, 109 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO } 110}; 111 112const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = { 113 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF }, 114 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ }, 115 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ }, 116 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO } 117}; 118 119const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = { 120 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF }, 121 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF }, 122 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO}, 123 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON }, 124 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO} 125}; 126 127const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = { 128 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF }, 129 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE }, 130 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH } 131}; 132 133const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = { 134 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF }, 135 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL } 136}; 137 138const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288, 139 320, 240, 176, 144, 0, 0}; 140 141camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = { 142 initialize: QCamera3HardwareInterface::initialize, 143 configure_streams: QCamera3HardwareInterface::configure_streams, 144 register_stream_buffers: QCamera3HardwareInterface::register_stream_buffers, 145 construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings, 146 process_capture_request: QCamera3HardwareInterface::process_capture_request, 147 get_metadata_vendor_tag_ops: QCamera3HardwareInterface::get_metadata_vendor_tag_ops, 148 dump: QCamera3HardwareInterface::dump, 149 flush: QCamera3HardwareInterface::flush, 150 reserved: {0}, 151}; 152 153 154/*=========================================================================== 155 * FUNCTION : QCamera3HardwareInterface 156 * 157 * DESCRIPTION: constructor of QCamera3HardwareInterface 158 * 159 * PARAMETERS : 160 * @cameraId : camera ID 161 * 162 * RETURN : none 163 *==========================================================================*/ 164QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId) 165 : mCameraId(cameraId), 166 mCameraHandle(NULL), 167 mCameraOpened(false), 168 mCameraInitialized(false), 169 mCallbackOps(NULL), 170 mInputStream(NULL), 171 mMetadataChannel(NULL), 172 mPictureChannel(NULL), 173 mFirstRequest(false), 174 mParamHeap(NULL), 175 mParameters(NULL), 176 mJpegSettings(NULL), 177 mIsZslMode(false), 178 mMinProcessedFrameDuration(0), 179 mMinJpegFrameDuration(0), 180 mMinRawFrameDuration(0), 181 m_pPowerModule(NULL) 182{ 183 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG; 184 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0; 185 mCameraDevice.common.close = close_camera_device; 186 mCameraDevice.ops = &mCameraOps; 187 mCameraDevice.priv = this; 188 gCamCapability[cameraId]->version = CAM_HAL_V3; 189 // TODO: hardcode for now until mctl add support for min_num_pp_bufs 190 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3 191 gCamCapability[cameraId]->min_num_pp_bufs = 3; 192 193 pthread_cond_init(&mRequestCond, NULL); 194 mPendingRequest = 0; 195 mCurrentRequestId = -1; 196 pthread_mutex_init(&mMutex, NULL); 197 198 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) 199 mDefaultMetadata[i] = NULL; 200 201#ifdef HAS_MULTIMEDIA_HINTS 202 if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) { 203 ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID); 204 } 205#endif 206} 207 208/*=========================================================================== 209 * FUNCTION : ~QCamera3HardwareInterface 210 * 211 * DESCRIPTION: destructor of QCamera3HardwareInterface 212 * 213 * PARAMETERS : none 214 * 215 * RETURN : none 216 *==========================================================================*/ 217QCamera3HardwareInterface::~QCamera3HardwareInterface() 218{ 219 ALOGV("%s: E", __func__); 220 /* We need to stop all streams before deleting any stream */ 221 /*flush the metadata list*/ 222 if (!mStoredMetadataList.empty()) { 223 for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin(); 224 m != mStoredMetadataList.end(); m++) { 225 mMetadataChannel->bufDone(m->meta_buf); 226 free(m->meta_buf); 227 m = mStoredMetadataList.erase(m); 228 } 229 } 230 for (List<stream_info_t *>::iterator it = mStreamInfo.begin(); 231 it != mStreamInfo.end(); it++) { 232 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv; 233 if (channel) 234 channel->stop(); 235 } 236 for (List<stream_info_t *>::iterator it = mStreamInfo.begin(); 237 it != mStreamInfo.end(); it++) { 238 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv; 239 if (channel) 240 delete channel; 241 free (*it); 242 } 243 244 mPictureChannel = NULL; 245 246 if (mJpegSettings != NULL) { 247 free(mJpegSettings); 248 mJpegSettings = NULL; 249 } 250 251 /* Clean up all channels */ 252 if (mCameraInitialized) { 253 mMetadataChannel->stop(); 254 delete mMetadataChannel; 255 mMetadataChannel = NULL; 256 deinitParameters(); 257 } 258 259 if (mCameraOpened) 260 closeCamera(); 261 262 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) 263 if (mDefaultMetadata[i]) 264 free_camera_metadata(mDefaultMetadata[i]); 265 266 pthread_cond_destroy(&mRequestCond); 267 268 pthread_mutex_destroy(&mMutex); 269 ALOGV("%s: X", __func__); 270} 271 272/*=========================================================================== 273 * FUNCTION : openCamera 274 * 275 * DESCRIPTION: open camera 276 * 277 * PARAMETERS : 278 * @hw_device : double ptr for camera device struct 279 * 280 * RETURN : int32_t type of status 281 * NO_ERROR -- success 282 * none-zero failure code 283 *==========================================================================*/ 284int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device) 285{ 286 int rc = 0; 287 pthread_mutex_lock(&mCameraSessionLock); 288 if (mCameraSessionActive) { 289 ALOGE("%s: multiple simultaneous camera instance not supported", __func__); 290 pthread_mutex_unlock(&mCameraSessionLock); 291 return INVALID_OPERATION; 292 } 293 294 if (mCameraOpened) { 295 *hw_device = NULL; 296 return PERMISSION_DENIED; 297 } 298 299 rc = openCamera(); 300 if (rc == 0) { 301 *hw_device = &mCameraDevice.common; 302 mCameraSessionActive = 1; 303 } else 304 *hw_device = NULL; 305 306#ifdef HAS_MULTIMEDIA_HINTS 307 if (rc == 0) { 308 if (m_pPowerModule) { 309 if (m_pPowerModule->powerHint) { 310 m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE, 311 (void *)"state=1"); 312 } 313 } 314 } 315#endif 316 pthread_mutex_unlock(&mCameraSessionLock); 317 return rc; 318} 319 320/*=========================================================================== 321 * FUNCTION : openCamera 322 * 323 * DESCRIPTION: open camera 324 * 325 * PARAMETERS : none 326 * 327 * RETURN : int32_t type of status 328 * NO_ERROR -- success 329 * none-zero failure code 330 *==========================================================================*/ 331int QCamera3HardwareInterface::openCamera() 332{ 333 if (mCameraHandle) { 334 ALOGE("Failure: Camera already opened"); 335 return ALREADY_EXISTS; 336 } 337 mCameraHandle = camera_open(mCameraId); 338 if (!mCameraHandle) { 339 ALOGE("camera_open failed."); 340 return UNKNOWN_ERROR; 341 } 342 343 mCameraOpened = true; 344 345 return NO_ERROR; 346} 347 348/*=========================================================================== 349 * FUNCTION : closeCamera 350 * 351 * DESCRIPTION: close camera 352 * 353 * PARAMETERS : none 354 * 355 * RETURN : int32_t type of status 356 * NO_ERROR -- success 357 * none-zero failure code 358 *==========================================================================*/ 359int QCamera3HardwareInterface::closeCamera() 360{ 361 int rc = NO_ERROR; 362 363 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle); 364 mCameraHandle = NULL; 365 mCameraOpened = false; 366 367#ifdef HAS_MULTIMEDIA_HINTS 368 if (rc == NO_ERROR) { 369 if (m_pPowerModule) { 370 if (m_pPowerModule->powerHint) { 371 m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE, 372 (void *)"state=0"); 373 } 374 } 375 } 376#endif 377 378 return rc; 379} 380 381/*=========================================================================== 382 * FUNCTION : initialize 383 * 384 * DESCRIPTION: Initialize frameworks callback functions 385 * 386 * PARAMETERS : 387 * @callback_ops : callback function to frameworks 388 * 389 * RETURN : 390 * 391 *==========================================================================*/ 392int QCamera3HardwareInterface::initialize( 393 const struct camera3_callback_ops *callback_ops) 394{ 395 int rc; 396 397 pthread_mutex_lock(&mMutex); 398 399 rc = initParameters(); 400 if (rc < 0) { 401 ALOGE("%s: initParamters failed %d", __func__, rc); 402 goto err1; 403 } 404 mCallbackOps = callback_ops; 405 406 pthread_mutex_unlock(&mMutex); 407 mCameraInitialized = true; 408 return 0; 409 410err1: 411 pthread_mutex_unlock(&mMutex); 412 return rc; 413} 414 415/*=========================================================================== 416 * FUNCTION : configureStreams 417 * 418 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input 419 * and output streams. 420 * 421 * PARAMETERS : 422 * @stream_list : streams to be configured 423 * 424 * RETURN : 425 * 426 *==========================================================================*/ 427int QCamera3HardwareInterface::configureStreams( 428 camera3_stream_configuration_t *streamList) 429{ 430 int rc = 0; 431 mIsZslMode = false; 432 433 // Sanity check stream_list 434 if (streamList == NULL) { 435 ALOGE("%s: NULL stream configuration", __func__); 436 return BAD_VALUE; 437 } 438 if (streamList->streams == NULL) { 439 ALOGE("%s: NULL stream list", __func__); 440 return BAD_VALUE; 441 } 442 443 if (streamList->num_streams < 1) { 444 ALOGE("%s: Bad number of streams requested: %d", __func__, 445 streamList->num_streams); 446 return BAD_VALUE; 447 } 448 449 /* first invalidate all the steams in the mStreamList 450 * if they appear again, they will be validated */ 451 for (List<stream_info_t*>::iterator it=mStreamInfo.begin(); 452 it != mStreamInfo.end(); it++) { 453 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv; 454 channel->stop(); 455 (*it)->status = INVALID; 456 } 457 if (mMetadataChannel) { 458 /* If content of mStreamInfo is not 0, there is metadata stream */ 459 mMetadataChannel->stop(); 460 } 461 462 pthread_mutex_lock(&mMutex); 463 464 camera3_stream_t *inputStream = NULL; 465 camera3_stream_t *jpegStream = NULL; 466 cam_stream_size_info_t stream_config_info; 467 468 for (size_t i = 0; i < streamList->num_streams; i++) { 469 camera3_stream_t *newStream = streamList->streams[i]; 470 ALOGV("%s: newStream type = %d, stream format = %d stream size : %d x %d", 471 __func__, newStream->stream_type, newStream->format, 472 newStream->width, newStream->height); 473 //if the stream is in the mStreamList validate it 474 bool stream_exists = false; 475 for (List<stream_info_t*>::iterator it=mStreamInfo.begin(); 476 it != mStreamInfo.end(); it++) { 477 if ((*it)->stream == newStream) { 478 QCamera3Channel *channel = 479 (QCamera3Channel*)(*it)->stream->priv; 480 stream_exists = true; 481 (*it)->status = RECONFIGURE; 482 /*delete the channel object associated with the stream because 483 we need to reconfigure*/ 484 delete channel; 485 (*it)->stream->priv = NULL; 486 } 487 } 488 if (!stream_exists) { 489 //new stream 490 stream_info_t* stream_info; 491 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t)); 492 stream_info->stream = newStream; 493 stream_info->status = VALID; 494 stream_info->registered = 0; 495 mStreamInfo.push_back(stream_info); 496 } 497 if (newStream->stream_type == CAMERA3_STREAM_INPUT 498 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) { 499 if (inputStream != NULL) { 500 ALOGE("%s: Multiple input streams requested!", __func__); 501 pthread_mutex_unlock(&mMutex); 502 return BAD_VALUE; 503 } 504 inputStream = newStream; 505 } 506 if (newStream->format == HAL_PIXEL_FORMAT_BLOB) { 507 jpegStream = newStream; 508 } 509 } 510 mInputStream = inputStream; 511 512 /*clean up invalid streams*/ 513 for (List<stream_info_t*>::iterator it=mStreamInfo.begin(); 514 it != mStreamInfo.end();) { 515 if(((*it)->status) == INVALID){ 516 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv; 517 delete channel; 518 delete[] (buffer_handle_t*)(*it)->buffer_set.buffers; 519 free(*it); 520 it = mStreamInfo.erase(it); 521 } else { 522 it++; 523 } 524 } 525 if (mMetadataChannel) { 526 delete mMetadataChannel; 527 mMetadataChannel = NULL; 528 } 529 530 //Create metadata channel and initialize it 531 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle, 532 mCameraHandle->ops, captureResultCb, 533 &gCamCapability[mCameraId]->padding_info, this); 534 if (mMetadataChannel == NULL) { 535 ALOGE("%s: failed to allocate metadata channel", __func__); 536 rc = -ENOMEM; 537 pthread_mutex_unlock(&mMutex); 538 return rc; 539 } 540 rc = mMetadataChannel->initialize(); 541 if (rc < 0) { 542 ALOGE("%s: metadata channel initialization failed", __func__); 543 delete mMetadataChannel; 544 pthread_mutex_unlock(&mMutex); 545 return rc; 546 } 547 548 /* Allocate channel objects for the requested streams */ 549 for (size_t i = 0; i < streamList->num_streams; i++) { 550 camera3_stream_t *newStream = streamList->streams[i]; 551 uint32_t stream_usage = newStream->usage; 552 stream_config_info.stream_sizes[i].width = newStream->width; 553 stream_config_info.stream_sizes[i].height = newStream->height; 554 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL && 555 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED && jpegStream){ 556 //for zsl stream the size is jpeg size 557 stream_config_info.stream_sizes[i].width = jpegStream->width; 558 stream_config_info.stream_sizes[i].height = jpegStream->height; 559 stream_config_info.type[i] = CAM_STREAM_TYPE_SNAPSHOT; 560 } else { 561 //for non zsl streams find out the format 562 switch (newStream->format) { 563 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED : 564 { 565 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) { 566 stream_config_info.type[i] = CAM_STREAM_TYPE_VIDEO; 567 } else { 568 stream_config_info.type[i] = CAM_STREAM_TYPE_PREVIEW; 569 } 570 } 571 break; 572 case HAL_PIXEL_FORMAT_YCbCr_420_888: 573 stream_config_info.type[i] = CAM_STREAM_TYPE_CALLBACK; 574 break; 575 case HAL_PIXEL_FORMAT_BLOB: 576 stream_config_info.type[i] = CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT; 577 break; 578 default: 579 stream_config_info.type[i] = CAM_STREAM_TYPE_DEFAULT; 580 break; 581 } 582 } 583 if (newStream->priv == NULL) { 584 //New stream, construct channel 585 switch (newStream->stream_type) { 586 case CAMERA3_STREAM_INPUT: 587 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ; 588 break; 589 case CAMERA3_STREAM_BIDIRECTIONAL: 590 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ | 591 GRALLOC_USAGE_HW_CAMERA_WRITE; 592 break; 593 case CAMERA3_STREAM_OUTPUT: 594 /* For video encoding stream, set read/write rarely 595 * flag so that they may be set to un-cached */ 596 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER) 597 newStream->usage = 598 (GRALLOC_USAGE_SW_READ_RARELY | 599 GRALLOC_USAGE_SW_WRITE_RARELY | 600 GRALLOC_USAGE_HW_CAMERA_WRITE); 601 else 602 newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE; 603 break; 604 default: 605 ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type); 606 break; 607 } 608 609 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT || 610 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) { 611 QCamera3Channel *channel; 612 switch (newStream->format) { 613 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: 614 case HAL_PIXEL_FORMAT_YCbCr_420_888: 615 newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers; 616 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL && 617 jpegStream) { 618 uint32_t width = jpegStream->width; 619 uint32_t height = jpegStream->height; 620 mIsZslMode = true; 621 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle, 622 mCameraHandle->ops, captureResultCb, 623 &gCamCapability[mCameraId]->padding_info, this, newStream, 624 width, height); 625 } else 626 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle, 627 mCameraHandle->ops, captureResultCb, 628 &gCamCapability[mCameraId]->padding_info, this, newStream); 629 if (channel == NULL) { 630 ALOGE("%s: allocation of channel failed", __func__); 631 pthread_mutex_unlock(&mMutex); 632 return -ENOMEM; 633 } 634 635 newStream->priv = channel; 636 break; 637 case HAL_PIXEL_FORMAT_BLOB: 638 newStream->max_buffers = QCamera3PicChannel::kMaxBuffers; 639 mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle, 640 mCameraHandle->ops, captureResultCb, 641 &gCamCapability[mCameraId]->padding_info, this, newStream); 642 if (mPictureChannel == NULL) { 643 ALOGE("%s: allocation of channel failed", __func__); 644 pthread_mutex_unlock(&mMutex); 645 return -ENOMEM; 646 } 647 newStream->priv = (QCamera3Channel*)mPictureChannel; 648 break; 649 650 //TODO: Add support for app consumed format? 651 default: 652 ALOGE("%s: not a supported format 0x%x", __func__, newStream->format); 653 break; 654 } 655 } 656 } else { 657 // Channel already exists for this stream 658 // Do nothing for now 659 } 660 } 661 /*For the streams to be reconfigured we need to register the buffers 662 since the framework wont*/ 663 for (List<stream_info_t *>::iterator it = mStreamInfo.begin(); 664 it != mStreamInfo.end(); it++) { 665 if ((*it)->status == RECONFIGURE) { 666 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv; 667 /*only register buffers for streams that have already been 668 registered*/ 669 if ((*it)->registered) { 670 rc = channel->registerBuffers((*it)->buffer_set.num_buffers, 671 (*it)->buffer_set.buffers); 672 if (rc != NO_ERROR) { 673 ALOGE("%s: Failed to register the buffers of old stream,\ 674 rc = %d", __func__, rc); 675 } 676 ALOGV("%s: channel %p has %d buffers", 677 __func__, channel, (*it)->buffer_set.num_buffers); 678 } 679 } 680 681 ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream); 682 if (index == NAME_NOT_FOUND) { 683 mPendingBuffersMap.add((*it)->stream, 0); 684 } else { 685 mPendingBuffersMap.editValueAt(index) = 0; 686 } 687 } 688 689 /* Initialize mPendingRequestInfo and mPendnigBuffersMap */ 690 mPendingRequestsList.clear(); 691 692 /*flush the metadata list*/ 693 if (!mStoredMetadataList.empty()) { 694 for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin(); 695 m != mStoredMetadataList.end(); m++) { 696 mMetadataChannel->bufDone(m->meta_buf); 697 free(m->meta_buf); 698 m = mStoredMetadataList.erase(m); 699 } 700 } 701 int32_t hal_version = CAM_HAL_V3; 702 stream_config_info.num_streams = streamList->num_streams; 703 704 //settings/parameters don't carry over for new configureStreams 705 memset(mParameters, 0, sizeof(parm_buffer_t)); 706 707 mParameters->first_flagged_entry = CAM_INTF_PARM_MAX; 708 AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION, 709 sizeof(hal_version), &hal_version); 710 711 AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_INFO, 712 sizeof(stream_config_info), &stream_config_info); 713 714 mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters); 715 716 mFirstRequest = true; 717 718 //Get min frame duration for this streams configuration 719 deriveMinFrameDuration(); 720 721 pthread_mutex_unlock(&mMutex); 722 return rc; 723} 724 725/*=========================================================================== 726 * FUNCTION : validateCaptureRequest 727 * 728 * DESCRIPTION: validate a capture request from camera service 729 * 730 * PARAMETERS : 731 * @request : request from framework to process 732 * 733 * RETURN : 734 * 735 *==========================================================================*/ 736int QCamera3HardwareInterface::validateCaptureRequest( 737 camera3_capture_request_t *request) 738{ 739 ssize_t idx = 0; 740 const camera3_stream_buffer_t *b; 741 CameraMetadata meta; 742 743 /* Sanity check the request */ 744 if (request == NULL) { 745 ALOGE("%s: NULL capture request", __func__); 746 return BAD_VALUE; 747 } 748 749 uint32_t frameNumber = request->frame_number; 750 if (request->input_buffer != NULL && 751 request->input_buffer->stream != mInputStream) { 752 ALOGE("%s: Request %d: Input buffer not from input stream!", 753 __FUNCTION__, frameNumber); 754 return BAD_VALUE; 755 } 756 if (request->num_output_buffers < 1 || request->output_buffers == NULL) { 757 ALOGE("%s: Request %d: No output buffers provided!", 758 __FUNCTION__, frameNumber); 759 return BAD_VALUE; 760 } 761 if (request->input_buffer != NULL) { 762 b = request->input_buffer; 763 QCamera3Channel *channel = 764 static_cast<QCamera3Channel*>(b->stream->priv); 765 if (channel == NULL) { 766 ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!", 767 __func__, frameNumber, idx); 768 return BAD_VALUE; 769 } 770 if (b->status != CAMERA3_BUFFER_STATUS_OK) { 771 ALOGE("%s: Request %d: Buffer %d: Status not OK!", 772 __func__, frameNumber, idx); 773 return BAD_VALUE; 774 } 775 if (b->release_fence != -1) { 776 ALOGE("%s: Request %d: Buffer %d: Has a release fence!", 777 __func__, frameNumber, idx); 778 return BAD_VALUE; 779 } 780 if (b->buffer == NULL) { 781 ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!", 782 __func__, frameNumber, idx); 783 return BAD_VALUE; 784 } 785 } 786 787 // Validate all buffers 788 b = request->output_buffers; 789 do { 790 QCamera3Channel *channel = 791 static_cast<QCamera3Channel*>(b->stream->priv); 792 if (channel == NULL) { 793 ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!", 794 __func__, frameNumber, idx); 795 return BAD_VALUE; 796 } 797 if (b->status != CAMERA3_BUFFER_STATUS_OK) { 798 ALOGE("%s: Request %d: Buffer %d: Status not OK!", 799 __func__, frameNumber, idx); 800 return BAD_VALUE; 801 } 802 if (b->release_fence != -1) { 803 ALOGE("%s: Request %d: Buffer %d: Has a release fence!", 804 __func__, frameNumber, idx); 805 return BAD_VALUE; 806 } 807 if (b->buffer == NULL) { 808 ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!", 809 __func__, frameNumber, idx); 810 return BAD_VALUE; 811 } 812 idx++; 813 b = request->output_buffers + idx; 814 } while (idx < (ssize_t)request->num_output_buffers); 815 816 return NO_ERROR; 817} 818 819/*=========================================================================== 820 * FUNCTION : deriveMinFrameDuration 821 * 822 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based 823 * on currently configured streams. 824 * 825 * PARAMETERS : NONE 826 * 827 * RETURN : NONE 828 * 829 *==========================================================================*/ 830void QCamera3HardwareInterface::deriveMinFrameDuration() 831{ 832 int32_t maxJpegDimension, maxProcessedDimension; 833 834 maxJpegDimension = 0; 835 maxProcessedDimension = 0; 836 837 // Figure out maximum jpeg, processed, and raw dimensions 838 for (List<stream_info_t*>::iterator it = mStreamInfo.begin(); 839 it != mStreamInfo.end(); it++) { 840 841 // Input stream doesn't have valid stream_type 842 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT) 843 continue; 844 845 int32_t dimension = (*it)->stream->width * (*it)->stream->height; 846 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) { 847 if (dimension > maxJpegDimension) 848 maxJpegDimension = dimension; 849 } else if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_SENSOR) { 850 if (dimension > maxProcessedDimension) 851 maxProcessedDimension = dimension; 852 } 853 } 854 855 //Assume all jpeg dimensions are in processed dimensions. 856 if (maxJpegDimension > maxProcessedDimension) 857 maxProcessedDimension = maxJpegDimension; 858 859 //Find minimum durations for processed, jpeg, and raw 860 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration; 861 for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) { 862 if (maxProcessedDimension == 863 gCamCapability[mCameraId]->picture_sizes_tbl[i].width * 864 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) { 865 mMinProcessedFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i]; 866 mMinJpegFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i]; 867 break; 868 } 869 } 870} 871 872/*=========================================================================== 873 * FUNCTION : getMinFrameDuration 874 * 875 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations 876 * and current request configuration. 877 * 878 * PARAMETERS : @request: requset sent by the frameworks 879 * 880 * RETURN : min farme duration for a particular request 881 * 882 *==========================================================================*/ 883int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request) 884{ 885 bool hasJpegStream = false; 886 for (uint32_t i = 0; i < request->num_output_buffers; i ++) { 887 const camera3_stream_t *stream = request->output_buffers[i].stream; 888 if (stream->format == HAL_PIXEL_FORMAT_BLOB) 889 hasJpegStream = true; 890 } 891 892 if (!hasJpegStream) 893 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration); 894 else 895 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration); 896} 897 898/*=========================================================================== 899 * FUNCTION : registerStreamBuffers 900 * 901 * DESCRIPTION: Register buffers for a given stream with the HAL device. 902 * 903 * PARAMETERS : 904 * @stream_list : streams to be configured 905 * 906 * RETURN : 907 * 908 *==========================================================================*/ 909int QCamera3HardwareInterface::registerStreamBuffers( 910 const camera3_stream_buffer_set_t *buffer_set) 911{ 912 int rc = 0; 913 914 pthread_mutex_lock(&mMutex); 915 916 if (buffer_set == NULL) { 917 ALOGE("%s: Invalid buffer_set parameter.", __func__); 918 pthread_mutex_unlock(&mMutex); 919 return -EINVAL; 920 } 921 if (buffer_set->stream == NULL) { 922 ALOGE("%s: Invalid stream parameter.", __func__); 923 pthread_mutex_unlock(&mMutex); 924 return -EINVAL; 925 } 926 if (buffer_set->num_buffers < 1) { 927 ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers); 928 pthread_mutex_unlock(&mMutex); 929 return -EINVAL; 930 } 931 if (buffer_set->buffers == NULL) { 932 ALOGE("%s: Invalid buffers parameter.", __func__); 933 pthread_mutex_unlock(&mMutex); 934 return -EINVAL; 935 } 936 937 camera3_stream_t *stream = buffer_set->stream; 938 QCamera3Channel *channel = (QCamera3Channel *)stream->priv; 939 940 //set the buffer_set in the mStreamInfo array 941 for (List<stream_info_t *>::iterator it = mStreamInfo.begin(); 942 it != mStreamInfo.end(); it++) { 943 if ((*it)->stream == stream) { 944 uint32_t numBuffers = buffer_set->num_buffers; 945 (*it)->buffer_set.stream = buffer_set->stream; 946 (*it)->buffer_set.num_buffers = numBuffers; 947 (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers]; 948 if ((*it)->buffer_set.buffers == NULL) { 949 ALOGE("%s: Failed to allocate buffer_handle_t*", __func__); 950 pthread_mutex_unlock(&mMutex); 951 return -ENOMEM; 952 } 953 for (size_t j = 0; j < numBuffers; j++){ 954 (*it)->buffer_set.buffers[j] = buffer_set->buffers[j]; 955 } 956 (*it)->registered = 1; 957 } 958 } 959 rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers); 960 if (rc < 0) { 961 ALOGE("%s: registerBUffers for stream %p failed", __func__, stream); 962 pthread_mutex_unlock(&mMutex); 963 return -ENODEV; 964 } 965 966 pthread_mutex_unlock(&mMutex); 967 return NO_ERROR; 968} 969 970/*=========================================================================== 971 * FUNCTION : processCaptureRequest 972 * 973 * DESCRIPTION: process a capture request from camera service 974 * 975 * PARAMETERS : 976 * @request : request from framework to process 977 * 978 * RETURN : 979 * 980 *==========================================================================*/ 981int QCamera3HardwareInterface::processCaptureRequest( 982 camera3_capture_request_t *request) 983{ 984 int rc = NO_ERROR; 985 int32_t request_id; 986 CameraMetadata meta; 987 MetadataBufferInfo reproc_meta; 988 int queueMetadata = 0; 989 990 pthread_mutex_lock(&mMutex); 991 992 rc = validateCaptureRequest(request); 993 if (rc != NO_ERROR) { 994 ALOGE("%s: incoming request is not valid", __func__); 995 pthread_mutex_unlock(&mMutex); 996 return rc; 997 } 998 999 meta = request->settings; 1000 1001 // For first capture request, send capture intent, and 1002 // stream on all streams 1003 if (mFirstRequest) { 1004 1005 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) { 1006 int32_t hal_version = CAM_HAL_V3; 1007 uint8_t captureIntent = 1008 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0]; 1009 1010 memset(mParameters, 0, sizeof(parm_buffer_t)); 1011 mParameters->first_flagged_entry = CAM_INTF_PARM_MAX; 1012 AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION, 1013 sizeof(hal_version), &hal_version); 1014 AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT, 1015 sizeof(captureIntent), &captureIntent); 1016 mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, 1017 mParameters); 1018 } 1019 1020 mMetadataChannel->start(); 1021 for (List<stream_info_t *>::iterator it = mStreamInfo.begin(); 1022 it != mStreamInfo.end(); it++) { 1023 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv; 1024 channel->start(); 1025 } 1026 } 1027 1028 uint32_t frameNumber = request->frame_number; 1029 uint32_t streamTypeMask = 0; 1030 1031 if (meta.exists(ANDROID_REQUEST_ID)) { 1032 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0]; 1033 mCurrentRequestId = request_id; 1034 ALOGV("%s: Received request with id: %d",__func__, request_id); 1035 } else if (mFirstRequest || mCurrentRequestId == -1){ 1036 ALOGE("%s: Unable to find request id field, \ 1037 & no previous id available", __func__); 1038 return NAME_NOT_FOUND; 1039 } else { 1040 ALOGV("%s: Re-using old request id", __func__); 1041 request_id = mCurrentRequestId; 1042 } 1043 1044 ALOGE("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d", 1045 __func__, __LINE__, 1046 request->num_output_buffers, 1047 request->input_buffer, 1048 frameNumber); 1049 // Acquire all request buffers first 1050 int blob_request = 0; 1051 for (size_t i = 0; i < request->num_output_buffers; i++) { 1052 const camera3_stream_buffer_t& output = request->output_buffers[i]; 1053 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv; 1054 sp<Fence> acquireFence = new Fence(output.acquire_fence); 1055 1056 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) { 1057 //Call function to store local copy of jpeg data for encode params. 1058 blob_request = 1; 1059 rc = getJpegSettings(request->settings); 1060 if (rc < 0) { 1061 ALOGE("%s: failed to get jpeg parameters", __func__); 1062 pthread_mutex_unlock(&mMutex); 1063 return rc; 1064 } 1065 } 1066 1067 rc = acquireFence->wait(Fence::TIMEOUT_NEVER); 1068 if (rc != OK) { 1069 ALOGE("%s: fence wait failed %d", __func__, rc); 1070 pthread_mutex_unlock(&mMutex); 1071 return rc; 1072 } 1073 streamTypeMask |= channel->getStreamTypeMask(); 1074 } 1075 1076 rc = setFrameParameters(request, streamTypeMask); 1077 if (rc < 0) { 1078 ALOGE("%s: fail to set frame parameters", __func__); 1079 pthread_mutex_unlock(&mMutex); 1080 return rc; 1081 } 1082 1083 /* Update pending request list and pending buffers map */ 1084 PendingRequestInfo pendingRequest; 1085 pendingRequest.frame_number = frameNumber; 1086 pendingRequest.num_buffers = request->num_output_buffers; 1087 pendingRequest.request_id = request_id; 1088 pendingRequest.blob_request = blob_request; 1089 pendingRequest.input_buffer_present = (request->input_buffer != NULL)? 1 : 0; 1090 1091 for (size_t i = 0; i < request->num_output_buffers; i++) { 1092 RequestedBufferInfo requestedBuf; 1093 requestedBuf.stream = request->output_buffers[i].stream; 1094 requestedBuf.buffer = NULL; 1095 pendingRequest.buffers.push_back(requestedBuf); 1096 1097 mPendingBuffersMap.editValueFor(requestedBuf.stream)++; 1098 } 1099 mPendingRequestsList.push_back(pendingRequest); 1100 1101 // Notify metadata channel we receive a request 1102 mMetadataChannel->request(NULL, frameNumber); 1103 1104 // Call request on other streams 1105 for (size_t i = 0; i < request->num_output_buffers; i++) { 1106 const camera3_stream_buffer_t& output = request->output_buffers[i]; 1107 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv; 1108 mm_camera_buf_def_t *pInputBuffer = NULL; 1109 1110 if (channel == NULL) { 1111 ALOGE("%s: invalid channel pointer for stream", __func__); 1112 continue; 1113 } 1114 1115 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) { 1116 QCamera3RegularChannel* inputChannel = NULL; 1117 if(request->input_buffer != NULL){ 1118 //Try to get the internal format 1119 inputChannel = (QCamera3RegularChannel*) 1120 request->input_buffer->stream->priv; 1121 if(inputChannel == NULL ){ 1122 ALOGE("%s: failed to get input channel handle", __func__); 1123 } else { 1124 pInputBuffer = 1125 inputChannel->getInternalFormatBuffer( 1126 request->input_buffer->buffer); 1127 ALOGD("%s: Input buffer dump",__func__); 1128 ALOGD("Stream id: %d", pInputBuffer->stream_id); 1129 ALOGD("streamtype:%d", pInputBuffer->stream_type); 1130 ALOGD("frame len:%d", pInputBuffer->frame_len); 1131 ALOGD("Handle:%p", request->input_buffer->buffer); 1132 //TODO: need to get corresponding metadata and send it to pproc 1133 for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin(); 1134 m != mStoredMetadataList.end(); m++) { 1135 if (m->zsl_buf_hdl == request->input_buffer->buffer) { 1136 reproc_meta.meta_buf = m->meta_buf; 1137 queueMetadata = 1; 1138 break; 1139 } 1140 } 1141 } 1142 } 1143 rc = channel->request(output.buffer, frameNumber, mJpegSettings, 1144 pInputBuffer,(QCamera3Channel*)inputChannel); 1145 if (queueMetadata) { 1146 mPictureChannel->queueMetadata(reproc_meta.meta_buf,mMetadataChannel,false); 1147 } 1148 } else { 1149 ALOGE("%s: %d, request with buffer %p, frame_number %d", __func__, 1150 __LINE__, output.buffer, frameNumber); 1151 if (mIsZslMode && output.stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) { 1152 for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin(); 1153 m != mStoredMetadataList.end(); m++) { 1154 for (uint32_t j = 0; j < request->num_output_buffers; j++) { 1155 if (m->zsl_buf_hdl == request->output_buffers[j].buffer) { 1156 mMetadataChannel->bufDone(m->meta_buf); 1157 free(m->meta_buf); 1158 m = mStoredMetadataList.erase(m); 1159 break; 1160 } 1161 } 1162 } 1163 } 1164 rc = channel->request(output.buffer, frameNumber); 1165 } 1166 if (rc < 0) 1167 ALOGE("%s: request failed", __func__); 1168 } 1169 1170 mFirstRequest = false; 1171 1172 //Block on conditional variable 1173 mPendingRequest = 1; 1174 while (mPendingRequest == 1) { 1175 pthread_cond_wait(&mRequestCond, &mMutex); 1176 } 1177 1178 pthread_mutex_unlock(&mMutex); 1179 return rc; 1180} 1181 1182/*=========================================================================== 1183 * FUNCTION : getMetadataVendorTagOps 1184 * 1185 * DESCRIPTION: 1186 * 1187 * PARAMETERS : 1188 * 1189 * 1190 * RETURN : 1191 *==========================================================================*/ 1192void QCamera3HardwareInterface::getMetadataVendorTagOps( 1193 vendor_tag_query_ops_t* /*ops*/) 1194{ 1195 /* Enable locks when we eventually add Vendor Tags */ 1196 /* 1197 pthread_mutex_lock(&mMutex); 1198 1199 pthread_mutex_unlock(&mMutex); 1200 */ 1201 return; 1202} 1203 1204/*=========================================================================== 1205 * FUNCTION : dump 1206 * 1207 * DESCRIPTION: 1208 * 1209 * PARAMETERS : 1210 * 1211 * 1212 * RETURN : 1213 *==========================================================================*/ 1214void QCamera3HardwareInterface::dump(int /*fd*/) 1215{ 1216 /*Enable lock when we implement this function*/ 1217 /* 1218 pthread_mutex_lock(&mMutex); 1219 1220 pthread_mutex_unlock(&mMutex); 1221 */ 1222 return; 1223} 1224 1225/*=========================================================================== 1226 * FUNCTION : flush 1227 * 1228 * DESCRIPTION: 1229 * 1230 * PARAMETERS : 1231 * 1232 * 1233 * RETURN : 1234 *==========================================================================*/ 1235int QCamera3HardwareInterface::flush() 1236{ 1237 /*Enable lock when we implement this function*/ 1238 /* 1239 pthread_mutex_lock(&mMutex); 1240 1241 pthread_mutex_unlock(&mMutex); 1242 */ 1243 return 0; 1244} 1245 1246/*=========================================================================== 1247 * FUNCTION : captureResultCb 1248 * 1249 * DESCRIPTION: Callback handler for all capture result 1250 * (streams, as well as metadata) 1251 * 1252 * PARAMETERS : 1253 * @metadata : metadata information 1254 * @buffer : actual gralloc buffer to be returned to frameworks. 1255 * NULL if metadata. 1256 * 1257 * RETURN : NONE 1258 *==========================================================================*/ 1259void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf, 1260 camera3_stream_buffer_t *buffer, uint32_t frame_number) 1261{ 1262 pthread_mutex_lock(&mMutex); 1263 1264 if (metadata_buf) { 1265 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer; 1266 int32_t frame_number_valid = *(int32_t *) 1267 POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata); 1268 uint32_t pending_requests = *(uint32_t *)POINTER_OF( 1269 CAM_INTF_META_PENDING_REQUESTS, metadata); 1270 uint32_t frame_number = *(uint32_t *) 1271 POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata); 1272 const struct timeval *tv = (const struct timeval *) 1273 POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata); 1274 nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC + 1275 tv->tv_usec * NSEC_PER_USEC; 1276 1277 if (!frame_number_valid) { 1278 ALOGV("%s: Not a valid frame number, used as SOF only", __func__); 1279 mMetadataChannel->bufDone(metadata_buf); 1280 goto done_metadata; 1281 } 1282 ALOGV("%s: valid frame_number = %d, capture_time = %lld", __func__, 1283 frame_number, capture_time); 1284 1285 // Go through the pending requests info and send shutter/results to frameworks 1286 for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin(); 1287 i != mPendingRequestsList.end() && i->frame_number <= frame_number;) { 1288 camera3_capture_result_t result; 1289 camera3_notify_msg_t notify_msg; 1290 ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number); 1291 1292 // Flush out all entries with less or equal frame numbers. 1293 1294 //TODO: Make sure shutter timestamp really reflects shutter timestamp. 1295 //Right now it's the same as metadata timestamp 1296 1297 //TODO: When there is metadata drop, how do we derive the timestamp of 1298 //dropped frames? For now, we fake the dropped timestamp by substracting 1299 //from the reported timestamp 1300 nsecs_t current_capture_time = capture_time - 1301 (frame_number - i->frame_number) * NSEC_PER_33MSEC; 1302 1303 // Send shutter notify to frameworks 1304 notify_msg.type = CAMERA3_MSG_SHUTTER; 1305 notify_msg.message.shutter.frame_number = i->frame_number; 1306 notify_msg.message.shutter.timestamp = current_capture_time; 1307 mCallbackOps->notify(mCallbackOps, ¬ify_msg); 1308 ALOGV("%s: notify frame_number = %d, capture_time = %lld", __func__, 1309 i->frame_number, capture_time); 1310 1311 // Send empty metadata with already filled buffers for dropped metadata 1312 // and send valid metadata with already filled buffers for current metadata 1313 if (i->frame_number < frame_number) { 1314 CameraMetadata dummyMetadata; 1315 dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP, 1316 ¤t_capture_time, 1); 1317 dummyMetadata.update(ANDROID_REQUEST_ID, 1318 &(i->request_id), 1); 1319 result.result = dummyMetadata.release(); 1320 } else { 1321 result.result = translateCbMetadataToResultMetadata(metadata, 1322 current_capture_time, i->request_id); 1323 if (mIsZslMode) { 1324 int found_metadata = 0; 1325 //for ZSL case store the metadata buffer and corresp. ZSL handle ptr 1326 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin(); 1327 j != i->buffers.end(); j++) { 1328 if (j->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) { 1329 //check if corresp. zsl already exists in the stored metadata list 1330 for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin(); 1331 m != mStoredMetadataList.begin(); m++) { 1332 if (m->frame_number == frame_number) { 1333 m->meta_buf = metadata_buf; 1334 found_metadata = 1; 1335 break; 1336 } 1337 } 1338 if (!found_metadata) { 1339 MetadataBufferInfo store_meta_info; 1340 store_meta_info.meta_buf = metadata_buf; 1341 store_meta_info.frame_number = frame_number; 1342 mStoredMetadataList.push_back(store_meta_info); 1343 found_metadata = 1; 1344 } 1345 } 1346 } 1347 if (!found_metadata) { 1348 if (!i->input_buffer_present && i->blob_request) { 1349 //livesnapshot or fallback non-zsl snapshot case 1350 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin(); 1351 j != i->buffers.end(); j++){ 1352 if (j->stream->stream_type == CAMERA3_STREAM_OUTPUT && 1353 j->stream->format == HAL_PIXEL_FORMAT_BLOB) { 1354 mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true); 1355 break; 1356 } 1357 } 1358 } else { 1359 //return the metadata immediately 1360 mMetadataChannel->bufDone(metadata_buf); 1361 free(metadata_buf); 1362 } 1363 } 1364 } else if (!mIsZslMode && i->blob_request) { 1365 //If it is a blob request then send the metadata to the picture channel 1366 mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true); 1367 } else { 1368 // Return metadata buffer 1369 mMetadataChannel->bufDone(metadata_buf); 1370 free(metadata_buf); 1371 } 1372 1373 } 1374 if (!result.result) { 1375 ALOGE("%s: metadata is NULL", __func__); 1376 } 1377 result.frame_number = i->frame_number; 1378 result.num_output_buffers = 0; 1379 result.output_buffers = NULL; 1380 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin(); 1381 j != i->buffers.end(); j++) { 1382 if (j->buffer) { 1383 result.num_output_buffers++; 1384 } 1385 } 1386 1387 if (result.num_output_buffers > 0) { 1388 camera3_stream_buffer_t *result_buffers = 1389 new camera3_stream_buffer_t[result.num_output_buffers]; 1390 if (!result_buffers) { 1391 ALOGE("%s: Fatal error: out of memory", __func__); 1392 } 1393 size_t result_buffers_idx = 0; 1394 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin(); 1395 j != i->buffers.end(); j++) { 1396 if (j->buffer) { 1397 result_buffers[result_buffers_idx++] = *(j->buffer); 1398 free(j->buffer); 1399 j->buffer = NULL; 1400 mPendingBuffersMap.editValueFor(j->stream)--; 1401 } 1402 } 1403 result.output_buffers = result_buffers; 1404 1405 mCallbackOps->process_capture_result(mCallbackOps, &result); 1406 ALOGV("%s: meta frame_number = %d, capture_time = %lld", 1407 __func__, result.frame_number, current_capture_time); 1408 free_camera_metadata((camera_metadata_t *)result.result); 1409 delete[] result_buffers; 1410 } else { 1411 mCallbackOps->process_capture_result(mCallbackOps, &result); 1412 ALOGE("%s: meta frame_number = %d, capture_time = %lld", 1413 __func__, result.frame_number, current_capture_time); 1414 free_camera_metadata((camera_metadata_t *)result.result); 1415 } 1416 // erase the element from the list 1417 i = mPendingRequestsList.erase(i); 1418 } 1419 1420 1421done_metadata: 1422 bool max_buffers_dequeued = false; 1423 for (size_t i = 0; i < mPendingBuffersMap.size(); i++) { 1424 const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i); 1425 uint32_t queued_buffers = mPendingBuffersMap.valueAt(i); 1426 if (queued_buffers == stream->max_buffers) { 1427 max_buffers_dequeued = true; 1428 break; 1429 } 1430 } 1431 if (!max_buffers_dequeued && !pending_requests) { 1432 // Unblock process_capture_request 1433 mPendingRequest = 0; 1434 pthread_cond_signal(&mRequestCond); 1435 } 1436 } else { 1437 // If the frame number doesn't exist in the pending request list, 1438 // directly send the buffer to the frameworks, and update pending buffers map 1439 // Otherwise, book-keep the buffer. 1440 List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin(); 1441 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){ 1442 i++; 1443 } 1444 if (i == mPendingRequestsList.end()) { 1445 // Verify all pending requests frame_numbers are greater 1446 for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin(); 1447 j != mPendingRequestsList.end(); j++) { 1448 if (j->frame_number < frame_number) { 1449 ALOGE("%s: Error: pending frame number %d is smaller than %d", 1450 __func__, j->frame_number, frame_number); 1451 } 1452 } 1453 camera3_capture_result_t result; 1454 result.result = NULL; 1455 result.frame_number = frame_number; 1456 result.num_output_buffers = 1; 1457 result.output_buffers = buffer; 1458 ALOGV("%s: result frame_number = %d, buffer = %p", 1459 __func__, frame_number, buffer); 1460 mPendingBuffersMap.editValueFor(buffer->stream)--; 1461 if (buffer->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) { 1462 int found = 0; 1463 for (List<MetadataBufferInfo>::iterator k = mStoredMetadataList.begin(); 1464 k != mStoredMetadataList.end(); k++) { 1465 if (k->frame_number == frame_number) { 1466 k->zsl_buf_hdl = buffer->buffer; 1467 found = 1; 1468 break; 1469 } 1470 } 1471 if (!found) { 1472 MetadataBufferInfo meta_info; 1473 meta_info.frame_number = frame_number; 1474 meta_info.zsl_buf_hdl = buffer->buffer; 1475 mStoredMetadataList.push_back(meta_info); 1476 } 1477 } 1478 mCallbackOps->process_capture_result(mCallbackOps, &result); 1479 } else { 1480 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin(); 1481 j != i->buffers.end(); j++) { 1482 if (j->stream == buffer->stream) { 1483 if (j->buffer != NULL) { 1484 ALOGE("%s: Error: buffer is already set", __func__); 1485 } else { 1486 j->buffer = (camera3_stream_buffer_t *)malloc( 1487 sizeof(camera3_stream_buffer_t)); 1488 *(j->buffer) = *buffer; 1489 ALOGV("%s: cache buffer %p at result frame_number %d", 1490 __func__, buffer, frame_number); 1491 } 1492 } 1493 } 1494 } 1495 } 1496 pthread_mutex_unlock(&mMutex); 1497 return; 1498} 1499 1500/*=========================================================================== 1501 * FUNCTION : translateCbMetadataToResultMetadata 1502 * 1503 * DESCRIPTION: 1504 * 1505 * PARAMETERS : 1506 * @metadata : metadata information from callback 1507 * 1508 * RETURN : camera_metadata_t* 1509 * metadata in a format specified by fwk 1510 *==========================================================================*/ 1511camera_metadata_t* 1512QCamera3HardwareInterface::translateCbMetadataToResultMetadata 1513 (metadata_buffer_t *metadata, nsecs_t timestamp, 1514 int32_t request_id) 1515{ 1516 CameraMetadata camMetadata; 1517 camera_metadata_t* resultMetadata; 1518 1519 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, ×tamp, 1); 1520 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1); 1521 1522 /*CAM_INTF_META_HISTOGRAM - TODO*/ 1523 /*cam_hist_stats_t *histogram = 1524 (cam_hist_stats_t *)POINTER_OF(CAM_INTF_META_HISTOGRAM, 1525 metadata);*/ 1526 1527 /*face detection*/ 1528 cam_face_detection_data_t *faceDetectionInfo =(cam_face_detection_data_t *) 1529 POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata); 1530 uint8_t numFaces = faceDetectionInfo->num_faces_detected; 1531 int32_t faceIds[numFaces]; 1532 uint8_t faceScores[numFaces]; 1533 int32_t faceRectangles[numFaces * 4]; 1534 int32_t faceLandmarks[numFaces * 6]; 1535 int j = 0, k = 0; 1536 for (int i = 0; i < numFaces; i++) { 1537 faceIds[i] = faceDetectionInfo->faces[i].face_id; 1538 faceScores[i] = faceDetectionInfo->faces[i].score; 1539 convertToRegions(faceDetectionInfo->faces[i].face_boundary, 1540 faceRectangles+j, -1); 1541 convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k); 1542 j+= 4; 1543 k+= 6; 1544 } 1545 if (numFaces > 0) { 1546 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces); 1547 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces); 1548 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES, 1549 faceRectangles, numFaces*4); 1550 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS, 1551 faceLandmarks, numFaces*6); 1552 } 1553 1554 uint8_t *color_correct_mode = 1555 (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata); 1556 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1); 1557 1558 int32_t *ae_precapture_id = 1559 (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata); 1560 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1); 1561 1562 /*aec regions*/ 1563 cam_area_t *hAeRegions = 1564 (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata); 1565 int32_t aeRegions[5]; 1566 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight); 1567 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5); 1568 1569 uint8_t *ae_state = 1570 (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata); 1571 camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1); 1572 1573 uint8_t *focusMode = 1574 (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata); 1575 camMetadata.update(ANDROID_CONTROL_AF_MODE, focusMode, 1); 1576 1577 /*af regions*/ 1578 cam_area_t *hAfRegions = 1579 (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata); 1580 int32_t afRegions[5]; 1581 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight); 1582 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5); 1583 1584 uint8_t *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata); 1585 camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1); 1586 1587 int32_t *afTriggerId = 1588 (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata); 1589 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1); 1590 1591 uint8_t *whiteBalance = 1592 (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata); 1593 camMetadata.update(ANDROID_CONTROL_AWB_MODE, whiteBalance, 1); 1594 1595 /*awb regions*/ 1596 cam_area_t *hAwbRegions = 1597 (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata); 1598 int32_t awbRegions[5]; 1599 convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight); 1600 camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5); 1601 1602 uint8_t *whiteBalanceState = 1603 (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata); 1604 camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1); 1605 1606 uint8_t *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata); 1607 camMetadata.update(ANDROID_CONTROL_MODE, mode, 1); 1608 1609 uint8_t *edgeMode = (uint8_t *)POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata); 1610 camMetadata.update(ANDROID_EDGE_MODE, edgeMode, 1); 1611 1612 uint8_t *flashPower = 1613 (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata); 1614 camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1); 1615 1616 int64_t *flashFiringTime = 1617 (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata); 1618 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1); 1619 1620 /*int32_t *ledMode = 1621 (int32_t *)POINTER_OF(CAM_INTF_PARM_LED_MODE, metadata); 1622 camMetadata.update(ANDROID_FLASH_FIRING_TIME, ledMode, 1);*/ 1623 1624 uint8_t *flashState = 1625 (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata); 1626 camMetadata.update(ANDROID_FLASH_STATE, flashState, 1); 1627 1628 uint8_t *hotPixelMode = 1629 (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata); 1630 camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1); 1631 1632 float *lensAperture = 1633 (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata); 1634 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1); 1635 1636 float *filterDensity = 1637 (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata); 1638 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1); 1639 1640 float *focalLength = 1641 (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata); 1642 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1); 1643 1644 float *focusDistance = 1645 (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata); 1646 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1); 1647 1648 float *focusRange = 1649 (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata); 1650 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 1); 1651 1652 uint8_t *opticalStab = 1653 (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata); 1654 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1); 1655 1656 /*int32_t *focusState = 1657 (int32_t *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_STATE, metadata); 1658 camMetadata.update(ANDROID_LENS_STATE , focusState, 1); //check */ 1659 1660 uint8_t *noiseRedMode = 1661 (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata); 1662 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1); 1663 1664 /*CAM_INTF_META_SCALER_CROP_REGION - check size*/ 1665 1666 cam_crop_region_t *hScalerCropRegion =(cam_crop_region_t *) 1667 POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata); 1668 int32_t scalerCropRegion[4]; 1669 scalerCropRegion[0] = hScalerCropRegion->left; 1670 scalerCropRegion[1] = hScalerCropRegion->top; 1671 scalerCropRegion[2] = hScalerCropRegion->width; 1672 scalerCropRegion[3] = hScalerCropRegion->height; 1673 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4); 1674 1675 int64_t *sensorExpTime = 1676 (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata); 1677 mMetadataResponse.exposure_time = *sensorExpTime; 1678 ALOGV("%s: sensorExpTime = %lld", __func__, *sensorExpTime); 1679 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1); 1680 1681 int64_t *sensorFameDuration = 1682 (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata); 1683 ALOGV("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration); 1684 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1); 1685 1686 int32_t *sensorSensitivity = 1687 (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata); 1688 ALOGV("%s: sensorSensitivity = %d", __func__, *sensorSensitivity); 1689 mMetadataResponse.iso_speed = *sensorSensitivity; 1690 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1); 1691 1692 uint8_t *shadingMode = 1693 (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata); 1694 camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1); 1695 1696 uint8_t *faceDetectMode = 1697 (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata); 1698 uint8_t fwk_faceDetectMode = lookupFwkName(FACEDETECT_MODES_MAP, 1699 sizeof(FACEDETECT_MODES_MAP)/sizeof(FACEDETECT_MODES_MAP[0]), 1700 *faceDetectMode); 1701 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1); 1702 1703 uint8_t *histogramMode = 1704 (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata); 1705 camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1); 1706 1707 uint8_t *sharpnessMapMode = 1708 (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata); 1709 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, 1710 sharpnessMapMode, 1); 1711 1712 /*CAM_INTF_META_STATS_SHARPNESS_MAP - check size*/ 1713 cam_sharpness_map_t *sharpnessMap = (cam_sharpness_map_t *) 1714 POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata); 1715 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, 1716 (int32_t*)sharpnessMap->sharpness, 1717 CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT); 1718 1719 cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *) 1720 POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP, metadata); 1721 int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height; 1722 int map_width = gCamCapability[mCameraId]->lens_shading_map_size.width; 1723 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP, 1724 (float*)lensShadingMap->lens_shading, 1725 4*map_width*map_height); 1726 1727 //Populate CAM_INTF_META_TONEMAP_CURVES 1728 /* ch0 = G, ch 1 = B, ch 2 = R*/ 1729 cam_rgb_tonemap_curves *tonemap = (cam_rgb_tonemap_curves *) 1730 POINTER_OF(CAM_INTF_META_TONEMAP_CURVES, metadata); 1731 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN, 1732 (float*)tonemap->curves[0].tonemap_points, 1733 tonemap->tonemap_points_cnt * 2); 1734 1735 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE, 1736 (float*)tonemap->curves[1].tonemap_points, 1737 tonemap->tonemap_points_cnt * 2); 1738 1739 camMetadata.update(ANDROID_TONEMAP_CURVE_RED, 1740 (float*)tonemap->curves[2].tonemap_points, 1741 tonemap->tonemap_points_cnt * 2); 1742 1743 cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*) 1744 POINTER_OF(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata); 1745 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4); 1746 1747 cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*) 1748 POINTER_OF(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata); 1749 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM, 1750 (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3); 1751 1752 cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*) 1753 POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata); 1754 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, 1755 predColorCorrectionGains->gains, 4); 1756 1757 cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*) 1758 POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata); 1759 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM, 1760 (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3); 1761 1762 uint8_t *blackLevelLock = (uint8_t*) 1763 POINTER_OF(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata); 1764 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1); 1765 1766 uint8_t *sceneFlicker = (uint8_t*) 1767 POINTER_OF(CAM_INTF_META_SCENE_FLICKER, metadata); 1768 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1); 1769 1770 1771 resultMetadata = camMetadata.release(); 1772 return resultMetadata; 1773} 1774 1775/*=========================================================================== 1776 * FUNCTION : convertToRegions 1777 * 1778 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array 1779 * 1780 * PARAMETERS : 1781 * @rect : cam_rect_t struct to convert 1782 * @region : int32_t destination array 1783 * @weight : if we are converting from cam_area_t, weight is valid 1784 * else weight = -1 1785 * 1786 *==========================================================================*/ 1787void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){ 1788 region[0] = rect.left; 1789 region[1] = rect.top; 1790 region[2] = rect.left + rect.width; 1791 region[3] = rect.top + rect.height; 1792 if (weight > -1) { 1793 region[4] = weight; 1794 } 1795} 1796 1797/*=========================================================================== 1798 * FUNCTION : convertFromRegions 1799 * 1800 * DESCRIPTION: helper method to convert from array to cam_rect_t 1801 * 1802 * PARAMETERS : 1803 * @rect : cam_rect_t struct to convert 1804 * @region : int32_t destination array 1805 * @weight : if we are converting from cam_area_t, weight is valid 1806 * else weight = -1 1807 * 1808 *==========================================================================*/ 1809void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi, 1810 const camera_metadata_t *settings, 1811 uint32_t tag){ 1812 CameraMetadata frame_settings; 1813 frame_settings = settings; 1814 int32_t x_min = frame_settings.find(tag).data.i32[0]; 1815 int32_t y_min = frame_settings.find(tag).data.i32[1]; 1816 int32_t x_max = frame_settings.find(tag).data.i32[2]; 1817 int32_t y_max = frame_settings.find(tag).data.i32[3]; 1818 roi->weight = frame_settings.find(tag).data.i32[4]; 1819 roi->rect.left = x_min; 1820 roi->rect.top = y_min; 1821 roi->rect.width = x_max - x_min; 1822 roi->rect.height = y_max - y_min; 1823} 1824 1825/*=========================================================================== 1826 * FUNCTION : resetIfNeededROI 1827 * 1828 * DESCRIPTION: helper method to reset the roi if it is greater than scaler 1829 * crop region 1830 * 1831 * PARAMETERS : 1832 * @roi : cam_area_t struct to resize 1833 * @scalerCropRegion : cam_crop_region_t region to compare against 1834 * 1835 * 1836 *==========================================================================*/ 1837bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi, 1838 const cam_crop_region_t* scalerCropRegion) 1839{ 1840 int32_t roi_x_max = roi->rect.width + roi->rect.left; 1841 int32_t roi_y_max = roi->rect.height + roi->rect.top; 1842 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->top; 1843 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->left; 1844 if ((roi_x_max < scalerCropRegion->left) || 1845 (roi_y_max < scalerCropRegion->top) || 1846 (roi->rect.left > crop_x_max) || 1847 (roi->rect.top > crop_y_max)){ 1848 return false; 1849 } 1850 if (roi->rect.left < scalerCropRegion->left) { 1851 roi->rect.left = scalerCropRegion->left; 1852 } 1853 if (roi->rect.top < scalerCropRegion->top) { 1854 roi->rect.top = scalerCropRegion->top; 1855 } 1856 if (roi_x_max > crop_x_max) { 1857 roi_x_max = crop_x_max; 1858 } 1859 if (roi_y_max > crop_y_max) { 1860 roi_y_max = crop_y_max; 1861 } 1862 roi->rect.width = roi_x_max - roi->rect.left; 1863 roi->rect.height = roi_y_max - roi->rect.top; 1864 return true; 1865} 1866 1867/*=========================================================================== 1868 * FUNCTION : convertLandmarks 1869 * 1870 * DESCRIPTION: helper method to extract the landmarks from face detection info 1871 * 1872 * PARAMETERS : 1873 * @face : cam_rect_t struct to convert 1874 * @landmarks : int32_t destination array 1875 * 1876 * 1877 *==========================================================================*/ 1878void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks) 1879{ 1880 landmarks[0] = face.left_eye_center.x; 1881 landmarks[1] = face.left_eye_center.y; 1882 landmarks[2] = face.right_eye_center.y; 1883 landmarks[3] = face.right_eye_center.y; 1884 landmarks[4] = face.mouth_center.x; 1885 landmarks[5] = face.mouth_center.y; 1886} 1887 1888#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX ) 1889/*=========================================================================== 1890 * FUNCTION : initCapabilities 1891 * 1892 * DESCRIPTION: initialize camera capabilities in static data struct 1893 * 1894 * PARAMETERS : 1895 * @cameraId : camera Id 1896 * 1897 * RETURN : int32_t type of status 1898 * NO_ERROR -- success 1899 * none-zero failure code 1900 *==========================================================================*/ 1901int QCamera3HardwareInterface::initCapabilities(int cameraId) 1902{ 1903 int rc = 0; 1904 mm_camera_vtbl_t *cameraHandle = NULL; 1905 QCamera3HeapMemory *capabilityHeap = NULL; 1906 1907 cameraHandle = camera_open(cameraId); 1908 if (!cameraHandle) { 1909 ALOGE("%s: camera_open failed", __func__); 1910 rc = -1; 1911 goto open_failed; 1912 } 1913 1914 capabilityHeap = new QCamera3HeapMemory(); 1915 if (capabilityHeap == NULL) { 1916 ALOGE("%s: creation of capabilityHeap failed", __func__); 1917 goto heap_creation_failed; 1918 } 1919 /* Allocate memory for capability buffer */ 1920 rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false); 1921 if(rc != OK) { 1922 ALOGE("%s: No memory for cappability", __func__); 1923 goto allocate_failed; 1924 } 1925 1926 /* Map memory for capability buffer */ 1927 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t)); 1928 rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle, 1929 CAM_MAPPING_BUF_TYPE_CAPABILITY, 1930 capabilityHeap->getFd(0), 1931 sizeof(cam_capability_t)); 1932 if(rc < 0) { 1933 ALOGE("%s: failed to map capability buffer", __func__); 1934 goto map_failed; 1935 } 1936 1937 /* Query Capability */ 1938 rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle); 1939 if(rc < 0) { 1940 ALOGE("%s: failed to query capability",__func__); 1941 goto query_failed; 1942 } 1943 gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t)); 1944 if (!gCamCapability[cameraId]) { 1945 ALOGE("%s: out of memory", __func__); 1946 goto query_failed; 1947 } 1948 memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0), 1949 sizeof(cam_capability_t)); 1950 rc = 0; 1951 1952query_failed: 1953 cameraHandle->ops->unmap_buf(cameraHandle->camera_handle, 1954 CAM_MAPPING_BUF_TYPE_CAPABILITY); 1955map_failed: 1956 capabilityHeap->deallocate(); 1957allocate_failed: 1958 delete capabilityHeap; 1959heap_creation_failed: 1960 cameraHandle->ops->close_camera(cameraHandle->camera_handle); 1961 cameraHandle = NULL; 1962open_failed: 1963 return rc; 1964} 1965 1966/*=========================================================================== 1967 * FUNCTION : initParameters 1968 * 1969 * DESCRIPTION: initialize camera parameters 1970 * 1971 * PARAMETERS : 1972 * 1973 * RETURN : int32_t type of status 1974 * NO_ERROR -- success 1975 * none-zero failure code 1976 *==========================================================================*/ 1977int QCamera3HardwareInterface::initParameters() 1978{ 1979 int rc = 0; 1980 1981 //Allocate Set Param Buffer 1982 mParamHeap = new QCamera3HeapMemory(); 1983 rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false); 1984 if(rc != OK) { 1985 rc = NO_MEMORY; 1986 ALOGE("Failed to allocate SETPARM Heap memory"); 1987 delete mParamHeap; 1988 mParamHeap = NULL; 1989 return rc; 1990 } 1991 1992 //Map memory for parameters buffer 1993 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle, 1994 CAM_MAPPING_BUF_TYPE_PARM_BUF, 1995 mParamHeap->getFd(0), 1996 sizeof(parm_buffer_t)); 1997 if(rc < 0) { 1998 ALOGE("%s:failed to map SETPARM buffer",__func__); 1999 rc = FAILED_TRANSACTION; 2000 mParamHeap->deallocate(); 2001 delete mParamHeap; 2002 mParamHeap = NULL; 2003 return rc; 2004 } 2005 2006 mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0); 2007 return rc; 2008} 2009 2010/*=========================================================================== 2011 * FUNCTION : deinitParameters 2012 * 2013 * DESCRIPTION: de-initialize camera parameters 2014 * 2015 * PARAMETERS : 2016 * 2017 * RETURN : NONE 2018 *==========================================================================*/ 2019void QCamera3HardwareInterface::deinitParameters() 2020{ 2021 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle, 2022 CAM_MAPPING_BUF_TYPE_PARM_BUF); 2023 2024 mParamHeap->deallocate(); 2025 delete mParamHeap; 2026 mParamHeap = NULL; 2027 2028 mParameters = NULL; 2029} 2030 2031/*=========================================================================== 2032 * FUNCTION : calcMaxJpegSize 2033 * 2034 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId 2035 * 2036 * PARAMETERS : 2037 * 2038 * RETURN : max_jpeg_size 2039 *==========================================================================*/ 2040int QCamera3HardwareInterface::calcMaxJpegSize() 2041{ 2042 int32_t max_jpeg_size = 0; 2043 int temp_width, temp_height; 2044 for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) { 2045 temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width; 2046 temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height; 2047 if (temp_width * temp_height > max_jpeg_size ) { 2048 max_jpeg_size = temp_width * temp_height; 2049 } 2050 } 2051 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t); 2052 return max_jpeg_size; 2053} 2054 2055/*=========================================================================== 2056 * FUNCTION : initStaticMetadata 2057 * 2058 * DESCRIPTION: initialize the static metadata 2059 * 2060 * PARAMETERS : 2061 * @cameraId : camera Id 2062 * 2063 * RETURN : int32_t type of status 2064 * 0 -- success 2065 * non-zero failure code 2066 *==========================================================================*/ 2067int QCamera3HardwareInterface::initStaticMetadata(int cameraId) 2068{ 2069 int rc = 0; 2070 CameraMetadata staticInfo; 2071 2072 /* android.info: hardware level */ 2073 uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL; 2074 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL, 2075 &supportedHardwareLevel, 1); 2076 2077 int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK; 2078 /*HAL 3 only*/ 2079 /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE, 2080 &gCamCapability[cameraId]->min_focus_distance, 1); */ 2081 2082 /*hard coded for now but this should come from sensor*/ 2083 float min_focus_distance; 2084 if(facingBack){ 2085 min_focus_distance = 10; 2086 } else { 2087 min_focus_distance = 0; 2088 } 2089 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE, 2090 &min_focus_distance, 1); 2091 2092 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, 2093 &gCamCapability[cameraId]->hyper_focal_distance, 1); 2094 2095 /*should be using focal lengths but sensor doesn't provide that info now*/ 2096 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, 2097 &gCamCapability[cameraId]->focal_length, 2098 1); 2099 2100 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES, 2101 gCamCapability[cameraId]->apertures, 2102 gCamCapability[cameraId]->apertures_count); 2103 2104 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES, 2105 gCamCapability[cameraId]->filter_densities, 2106 gCamCapability[cameraId]->filter_densities_count); 2107 2108 2109 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION, 2110 (uint8_t*)gCamCapability[cameraId]->optical_stab_modes, 2111 gCamCapability[cameraId]->optical_stab_modes_count); 2112 2113 staticInfo.update(ANDROID_LENS_POSITION, 2114 gCamCapability[cameraId]->lens_position, 2115 sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float)); 2116 2117 int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width, 2118 gCamCapability[cameraId]->lens_shading_map_size.height}; 2119 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE, 2120 lens_shading_map_size, 2121 sizeof(lens_shading_map_size)/sizeof(int32_t)); 2122 2123 int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width, 2124 gCamCapability[cameraId]->geo_correction_map_size.height}; 2125 staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE, 2126 geo_correction_map_size, 2127 sizeof(geo_correction_map_size)/sizeof(int32_t)); 2128 2129 staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP, 2130 gCamCapability[cameraId]->geo_correction_map, 2131 sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float)); 2132 2133 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE, 2134 gCamCapability[cameraId]->sensor_physical_size, 2); 2135 2136 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, 2137 gCamCapability[cameraId]->exposure_time_range, 2); 2138 2139 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION, 2140 &gCamCapability[cameraId]->max_frame_duration, 1); 2141 2142 2143 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT, 2144 (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1); 2145 2146 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width, 2147 gCamCapability[cameraId]->pixel_array_size.height}; 2148 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, 2149 pixel_array_size, 2); 2150 2151 int32_t active_array_size[] = {0, 0, 2152 gCamCapability[cameraId]->active_array_size.width, 2153 gCamCapability[cameraId]->active_array_size.height}; 2154 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, 2155 active_array_size, 4); 2156 2157 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL, 2158 &gCamCapability[cameraId]->white_level, 1); 2159 2160 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN, 2161 gCamCapability[cameraId]->black_level_pattern, 4); 2162 2163 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION, 2164 &gCamCapability[cameraId]->flash_charge_duration, 1); 2165 2166 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS, 2167 &gCamCapability[cameraId]->max_tone_map_curve_points, 1); 2168 2169 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, 2170 (int*)&gCamCapability[cameraId]->max_num_roi, 1); 2171 2172 staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT, 2173 &gCamCapability[cameraId]->histogram_size, 1); 2174 2175 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT, 2176 &gCamCapability[cameraId]->max_histogram_count, 1); 2177 2178 int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width, 2179 gCamCapability[cameraId]->sharpness_map_size.height}; 2180 2181 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, 2182 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t)); 2183 2184 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE, 2185 &gCamCapability[cameraId]->max_sharpness_map_value, 1); 2186 2187 2188 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS, 2189 &gCamCapability[cameraId]->raw_min_duration, 2190 1); 2191 2192 int32_t scalar_formats[] = {HAL_PIXEL_FORMAT_YCbCr_420_888, 2193 HAL_PIXEL_FORMAT_BLOB}; 2194 int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t); 2195 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, 2196 scalar_formats, 2197 scalar_formats_count); 2198 2199 int32_t available_processed_sizes[CAM_FORMAT_MAX * 2]; 2200 makeTable(gCamCapability[cameraId]->picture_sizes_tbl, 2201 gCamCapability[cameraId]->picture_sizes_tbl_cnt, 2202 available_processed_sizes); 2203 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES, 2204 available_processed_sizes, 2205 (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2); 2206 2207 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS, 2208 &gCamCapability[cameraId]->jpeg_min_duration[0], 2209 gCamCapability[cameraId]->picture_sizes_tbl_cnt); 2210 2211 int32_t available_fps_ranges[MAX_SIZES_CNT * 2]; 2212 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl, 2213 gCamCapability[cameraId]->fps_ranges_tbl_cnt, 2214 available_fps_ranges); 2215 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, 2216 available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) ); 2217 2218 camera_metadata_rational exposureCompensationStep = { 2219 gCamCapability[cameraId]->exp_compensation_step.numerator, 2220 gCamCapability[cameraId]->exp_compensation_step.denominator}; 2221 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP, 2222 &exposureCompensationStep, 1); 2223 2224 /*TO DO*/ 2225 uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF}; 2226 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, 2227 availableVstabModes, sizeof(availableVstabModes)); 2228 2229 /*HAL 1 and HAL 3 common*/ 2230 float maxZoom = 4; 2231 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, 2232 &maxZoom, 1); 2233 2234 int32_t max3aRegions = 1; 2235 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS, 2236 &max3aRegions, 1); 2237 2238 uint8_t availableFaceDetectModes[] = { 2239 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, 2240 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL }; 2241 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES, 2242 availableFaceDetectModes, 2243 sizeof(availableFaceDetectModes)); 2244 2245 int32_t raw_size[] = {gCamCapability[cameraId]->raw_dim.width, 2246 gCamCapability[cameraId]->raw_dim.height}; 2247 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES, 2248 raw_size, 2249 sizeof(raw_size)/sizeof(uint32_t)); 2250 2251 int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min, 2252 gCamCapability[cameraId]->exposure_compensation_max}; 2253 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE, 2254 exposureCompensationRange, 2255 sizeof(exposureCompensationRange)/sizeof(int32_t)); 2256 2257 uint8_t lensFacing = (facingBack) ? 2258 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT; 2259 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1); 2260 2261 staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES, 2262 available_processed_sizes, 2263 (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2)); 2264 2265 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, 2266 available_thumbnail_sizes, 2267 sizeof(available_thumbnail_sizes)/sizeof(int32_t)); 2268 2269 int32_t max_jpeg_size = 0; 2270 int temp_width, temp_height; 2271 for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) { 2272 temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width; 2273 temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height; 2274 if (temp_width * temp_height > max_jpeg_size ) { 2275 max_jpeg_size = temp_width * temp_height; 2276 } 2277 } 2278 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t); 2279 staticInfo.update(ANDROID_JPEG_MAX_SIZE, 2280 &max_jpeg_size, 1); 2281 2282 uint8_t avail_effects[CAM_EFFECT_MODE_MAX]; 2283 int32_t size = 0; 2284 for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) { 2285 int val = lookupFwkName(EFFECT_MODES_MAP, 2286 sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]), 2287 gCamCapability[cameraId]->supported_effects[i]); 2288 if (val != NAME_NOT_FOUND) { 2289 avail_effects[size] = (uint8_t)val; 2290 size++; 2291 } 2292 } 2293 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS, 2294 avail_effects, 2295 size); 2296 2297 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX]; 2298 uint8_t supported_indexes[CAM_SCENE_MODE_MAX]; 2299 int32_t supported_scene_modes_cnt = 0; 2300 for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) { 2301 int val = lookupFwkName(SCENE_MODES_MAP, 2302 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]), 2303 gCamCapability[cameraId]->supported_scene_modes[i]); 2304 if (val != NAME_NOT_FOUND) { 2305 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val; 2306 supported_indexes[supported_scene_modes_cnt] = i; 2307 supported_scene_modes_cnt++; 2308 } 2309 } 2310 2311 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES, 2312 avail_scene_modes, 2313 supported_scene_modes_cnt); 2314 2315 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3]; 2316 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides, 2317 supported_scene_modes_cnt, 2318 scene_mode_overrides, 2319 supported_indexes, 2320 cameraId); 2321 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES, 2322 scene_mode_overrides, 2323 supported_scene_modes_cnt*3); 2324 2325 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX]; 2326 size = 0; 2327 for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) { 2328 int val = lookupFwkName(ANTIBANDING_MODES_MAP, 2329 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]), 2330 gCamCapability[cameraId]->supported_antibandings[i]); 2331 if (val != NAME_NOT_FOUND) { 2332 avail_antibanding_modes[size] = (uint8_t)val; 2333 size++; 2334 } 2335 2336 } 2337 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, 2338 avail_antibanding_modes, 2339 size); 2340 2341 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX]; 2342 size = 0; 2343 for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) { 2344 int val = lookupFwkName(FOCUS_MODES_MAP, 2345 sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]), 2346 gCamCapability[cameraId]->supported_focus_modes[i]); 2347 if (val != NAME_NOT_FOUND) { 2348 avail_af_modes[size] = (uint8_t)val; 2349 size++; 2350 } 2351 } 2352 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES, 2353 avail_af_modes, 2354 size); 2355 2356 uint8_t avail_awb_modes[CAM_WB_MODE_MAX]; 2357 size = 0; 2358 for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) { 2359 int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP, 2360 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]), 2361 gCamCapability[cameraId]->supported_white_balances[i]); 2362 if (val != NAME_NOT_FOUND) { 2363 avail_awb_modes[size] = (uint8_t)val; 2364 size++; 2365 } 2366 } 2367 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES, 2368 avail_awb_modes, 2369 size); 2370 2371 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX]; 2372 for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++) 2373 available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i]; 2374 2375 staticInfo.update(ANDROID_FLASH_FIRING_POWER, 2376 available_flash_levels, 2377 gCamCapability[cameraId]->supported_flash_firing_level_cnt); 2378 2379 2380 uint8_t flashAvailable = gCamCapability[cameraId]->flash_available; 2381 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE, 2382 &flashAvailable, 1); 2383 2384 uint8_t avail_ae_modes[5]; 2385 size = 0; 2386 for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) { 2387 avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i]; 2388 size++; 2389 } 2390 if (flashAvailable) { 2391 avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH; 2392 avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH; 2393 avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE; 2394 } 2395 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES, 2396 avail_ae_modes, 2397 size); 2398 2399 int32_t sensitivity_range[2]; 2400 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity; 2401 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity; 2402 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, 2403 sensitivity_range, 2404 sizeof(sensitivity_range) / sizeof(int32_t)); 2405 2406 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY, 2407 &gCamCapability[cameraId]->max_analog_sensitivity, 2408 1); 2409 2410 staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS, 2411 &gCamCapability[cameraId]->jpeg_min_duration[0], 2412 gCamCapability[cameraId]->picture_sizes_tbl_cnt); 2413 2414 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle; 2415 staticInfo.update(ANDROID_SENSOR_ORIENTATION, 2416 &sensor_orientation, 2417 1); 2418 2419 int32_t max_output_streams[3] = {1, 3, 1}; 2420 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, 2421 max_output_streams, 2422 3); 2423 2424 gStaticMetadata[cameraId] = staticInfo.release(); 2425 return rc; 2426} 2427 2428/*=========================================================================== 2429 * FUNCTION : makeTable 2430 * 2431 * DESCRIPTION: make a table of sizes 2432 * 2433 * PARAMETERS : 2434 * 2435 * 2436 *==========================================================================*/ 2437void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size, 2438 int32_t* sizeTable) 2439{ 2440 int j = 0; 2441 for (int i = 0; i < size; i++) { 2442 sizeTable[j] = dimTable[i].width; 2443 sizeTable[j+1] = dimTable[i].height; 2444 j+=2; 2445 } 2446} 2447 2448/*=========================================================================== 2449 * FUNCTION : makeFPSTable 2450 * 2451 * DESCRIPTION: make a table of fps ranges 2452 * 2453 * PARAMETERS : 2454 * 2455 *==========================================================================*/ 2456void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size, 2457 int32_t* fpsRangesTable) 2458{ 2459 int j = 0; 2460 for (int i = 0; i < size; i++) { 2461 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps; 2462 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps; 2463 j+=2; 2464 } 2465} 2466 2467/*=========================================================================== 2468 * FUNCTION : makeOverridesList 2469 * 2470 * DESCRIPTION: make a list of scene mode overrides 2471 * 2472 * PARAMETERS : 2473 * 2474 * 2475 *==========================================================================*/ 2476void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable, 2477 uint8_t size, uint8_t* overridesList, 2478 uint8_t* supported_indexes, 2479 int camera_id) 2480{ 2481 /*daemon will give a list of overrides for all scene modes. 2482 However we should send the fwk only the overrides for the scene modes 2483 supported by the framework*/ 2484 int j = 0, index = 0, supt = 0; 2485 uint8_t focus_override; 2486 for (int i = 0; i < size; i++) { 2487 supt = 0; 2488 index = supported_indexes[i]; 2489 overridesList[j] = gCamCapability[camera_id]->flash_available ? ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:ANDROID_CONTROL_AE_MODE_ON; 2490 overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP, 2491 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]), 2492 overridesTable[index].awb_mode); 2493 focus_override = (uint8_t)overridesTable[index].af_mode; 2494 for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) { 2495 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) { 2496 supt = 1; 2497 break; 2498 } 2499 } 2500 if (supt) { 2501 overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP, 2502 sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]), 2503 focus_override); 2504 } else { 2505 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF; 2506 } 2507 j+=3; 2508 } 2509} 2510 2511/*=========================================================================== 2512 * FUNCTION : getPreviewHalPixelFormat 2513 * 2514 * DESCRIPTION: convert the format to type recognized by framework 2515 * 2516 * PARAMETERS : format : the format from backend 2517 * 2518 ** RETURN : format recognized by framework 2519 * 2520 *==========================================================================*/ 2521int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format) 2522{ 2523 int32_t halPixelFormat; 2524 2525 switch (format) { 2526 case CAM_FORMAT_YUV_420_NV12: 2527 halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP; 2528 break; 2529 case CAM_FORMAT_YUV_420_NV21: 2530 halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP; 2531 break; 2532 case CAM_FORMAT_YUV_420_NV21_ADRENO: 2533 halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO; 2534 break; 2535 case CAM_FORMAT_YUV_420_YV12: 2536 halPixelFormat = HAL_PIXEL_FORMAT_YV12; 2537 break; 2538 case CAM_FORMAT_YUV_422_NV16: 2539 case CAM_FORMAT_YUV_422_NV61: 2540 default: 2541 halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP; 2542 break; 2543 } 2544 return halPixelFormat; 2545} 2546 2547/*=========================================================================== 2548 * FUNCTION : getSensorSensitivity 2549 * 2550 * DESCRIPTION: convert iso_mode to an integer value 2551 * 2552 * PARAMETERS : iso_mode : the iso_mode supported by sensor 2553 * 2554 ** RETURN : sensitivity supported by sensor 2555 * 2556 *==========================================================================*/ 2557int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode) 2558{ 2559 int32_t sensitivity; 2560 2561 switch (iso_mode) { 2562 case CAM_ISO_MODE_100: 2563 sensitivity = 100; 2564 break; 2565 case CAM_ISO_MODE_200: 2566 sensitivity = 200; 2567 break; 2568 case CAM_ISO_MODE_400: 2569 sensitivity = 400; 2570 break; 2571 case CAM_ISO_MODE_800: 2572 sensitivity = 800; 2573 break; 2574 case CAM_ISO_MODE_1600: 2575 sensitivity = 1600; 2576 break; 2577 default: 2578 sensitivity = -1; 2579 break; 2580 } 2581 return sensitivity; 2582} 2583 2584 2585/*=========================================================================== 2586 * FUNCTION : AddSetParmEntryToBatch 2587 * 2588 * DESCRIPTION: add set parameter entry into batch 2589 * 2590 * PARAMETERS : 2591 * @p_table : ptr to parameter buffer 2592 * @paramType : parameter type 2593 * @paramLength : length of parameter value 2594 * @paramValue : ptr to parameter value 2595 * 2596 * RETURN : int32_t type of status 2597 * NO_ERROR -- success 2598 * none-zero failure code 2599 *==========================================================================*/ 2600int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table, 2601 cam_intf_parm_type_t paramType, 2602 uint32_t paramLength, 2603 void *paramValue) 2604{ 2605 int position = paramType; 2606 int current, next; 2607 2608 /************************************************************************* 2609 * Code to take care of linking next flags * 2610 *************************************************************************/ 2611 current = GET_FIRST_PARAM_ID(p_table); 2612 if (position == current){ 2613 //DO NOTHING 2614 } else if (position < current){ 2615 SET_NEXT_PARAM_ID(position, p_table, current); 2616 SET_FIRST_PARAM_ID(p_table, position); 2617 } else { 2618 /* Search for the position in the linked list where we need to slot in*/ 2619 while (position > GET_NEXT_PARAM_ID(current, p_table)) 2620 current = GET_NEXT_PARAM_ID(current, p_table); 2621 2622 /*If node already exists no need to alter linking*/ 2623 if (position != GET_NEXT_PARAM_ID(current, p_table)) { 2624 next = GET_NEXT_PARAM_ID(current, p_table); 2625 SET_NEXT_PARAM_ID(current, p_table, position); 2626 SET_NEXT_PARAM_ID(position, p_table, next); 2627 } 2628 } 2629 2630 /************************************************************************* 2631 * Copy contents into entry * 2632 *************************************************************************/ 2633 2634 if (paramLength > sizeof(parm_type_t)) { 2635 ALOGE("%s:Size of input larger than max entry size",__func__); 2636 return BAD_VALUE; 2637 } 2638 memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength); 2639 return NO_ERROR; 2640} 2641 2642/*=========================================================================== 2643 * FUNCTION : lookupFwkName 2644 * 2645 * DESCRIPTION: In case the enum is not same in fwk and backend 2646 * make sure the parameter is correctly propogated 2647 * 2648 * PARAMETERS : 2649 * @arr : map between the two enums 2650 * @len : len of the map 2651 * @hal_name : name of the hal_parm to map 2652 * 2653 * RETURN : int type of status 2654 * fwk_name -- success 2655 * none-zero failure code 2656 *==========================================================================*/ 2657int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[], 2658 int len, int hal_name) 2659{ 2660 2661 for (int i = 0; i < len; i++) { 2662 if (arr[i].hal_name == hal_name) 2663 return arr[i].fwk_name; 2664 } 2665 2666 /* Not able to find matching framework type is not necessarily 2667 * an error case. This happens when mm-camera supports more attributes 2668 * than the frameworks do */ 2669 ALOGD("%s: Cannot find matching framework type", __func__); 2670 return NAME_NOT_FOUND; 2671} 2672 2673/*=========================================================================== 2674 * FUNCTION : lookupHalName 2675 * 2676 * DESCRIPTION: In case the enum is not same in fwk and backend 2677 * make sure the parameter is correctly propogated 2678 * 2679 * PARAMETERS : 2680 * @arr : map between the two enums 2681 * @len : len of the map 2682 * @fwk_name : name of the hal_parm to map 2683 * 2684 * RETURN : int32_t type of status 2685 * hal_name -- success 2686 * none-zero failure code 2687 *==========================================================================*/ 2688int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[], 2689 int len, int fwk_name) 2690{ 2691 for (int i = 0; i < len; i++) { 2692 if (arr[i].fwk_name == fwk_name) 2693 return arr[i].hal_name; 2694 } 2695 ALOGE("%s: Cannot find matching hal type", __func__); 2696 return NAME_NOT_FOUND; 2697} 2698 2699/*=========================================================================== 2700 * FUNCTION : getCapabilities 2701 * 2702 * DESCRIPTION: query camera capabilities 2703 * 2704 * PARAMETERS : 2705 * @cameraId : camera Id 2706 * @info : camera info struct to be filled in with camera capabilities 2707 * 2708 * RETURN : int32_t type of status 2709 * NO_ERROR -- success 2710 * none-zero failure code 2711 *==========================================================================*/ 2712int QCamera3HardwareInterface::getCamInfo(int cameraId, 2713 struct camera_info *info) 2714{ 2715 int rc = 0; 2716 2717 if (NULL == gCamCapability[cameraId]) { 2718 rc = initCapabilities(cameraId); 2719 if (rc < 0) { 2720 //pthread_mutex_unlock(&g_camlock); 2721 return rc; 2722 } 2723 } 2724 2725 if (NULL == gStaticMetadata[cameraId]) { 2726 rc = initStaticMetadata(cameraId); 2727 if (rc < 0) { 2728 return rc; 2729 } 2730 } 2731 2732 switch(gCamCapability[cameraId]->position) { 2733 case CAM_POSITION_BACK: 2734 info->facing = CAMERA_FACING_BACK; 2735 break; 2736 2737 case CAM_POSITION_FRONT: 2738 info->facing = CAMERA_FACING_FRONT; 2739 break; 2740 2741 default: 2742 ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId); 2743 rc = -1; 2744 break; 2745 } 2746 2747 2748 info->orientation = gCamCapability[cameraId]->sensor_mount_angle; 2749 info->device_version = CAMERA_DEVICE_API_VERSION_3_0; 2750 info->static_camera_characteristics = gStaticMetadata[cameraId]; 2751 2752 return rc; 2753} 2754 2755/*=========================================================================== 2756 * FUNCTION : translateMetadata 2757 * 2758 * DESCRIPTION: translate the metadata into camera_metadata_t 2759 * 2760 * PARAMETERS : type of the request 2761 * 2762 * 2763 * RETURN : success: camera_metadata_t* 2764 * failure: NULL 2765 * 2766 *==========================================================================*/ 2767camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type) 2768{ 2769 pthread_mutex_lock(&mMutex); 2770 2771 if (mDefaultMetadata[type] != NULL) { 2772 pthread_mutex_unlock(&mMutex); 2773 return mDefaultMetadata[type]; 2774 } 2775 //first time we are handling this request 2776 //fill up the metadata structure using the wrapper class 2777 CameraMetadata settings; 2778 //translate from cam_capability_t to camera_metadata_tag_t 2779 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE; 2780 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1); 2781 2782 /*control*/ 2783 2784 uint8_t controlIntent = 0; 2785 switch (type) { 2786 case CAMERA3_TEMPLATE_PREVIEW: 2787 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW; 2788 break; 2789 case CAMERA3_TEMPLATE_STILL_CAPTURE: 2790 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE; 2791 break; 2792 case CAMERA3_TEMPLATE_VIDEO_RECORD: 2793 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD; 2794 break; 2795 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT: 2796 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT; 2797 break; 2798 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG: 2799 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG; 2800 break; 2801 default: 2802 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM; 2803 break; 2804 } 2805 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1); 2806 2807 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, 2808 &gCamCapability[mCameraId]->exposure_compensation_default, 1); 2809 2810 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF; 2811 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1); 2812 2813 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF; 2814 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1); 2815 2816 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO; 2817 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1); 2818 2819 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO; 2820 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1); 2821 2822 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF; 2823 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1); 2824 2825 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; //similar to AUTO? 2826 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1); 2827 2828 static uint8_t focusMode; 2829 if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) { 2830 ALOGE("%s: Setting focus mode to auto", __func__); 2831 focusMode = ANDROID_CONTROL_AF_MODE_AUTO; 2832 } else { 2833 ALOGE("%s: Setting focus mode to off", __func__); 2834 focusMode = ANDROID_CONTROL_AF_MODE_OFF; 2835 } 2836 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1); 2837 2838 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON; 2839 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1); 2840 2841 /*flash*/ 2842 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF; 2843 settings.update(ANDROID_FLASH_MODE, &flashMode, 1); 2844 2845 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4; 2846 settings.update(ANDROID_FLASH_FIRING_POWER, 2847 &flashFiringLevel, 1); 2848 2849 /* lens */ 2850 float default_aperture = gCamCapability[mCameraId]->apertures[0]; 2851 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1); 2852 2853 if (gCamCapability[mCameraId]->filter_densities_count) { 2854 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0]; 2855 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density, 2856 gCamCapability[mCameraId]->filter_densities_count); 2857 } 2858 2859 float default_focal_length = gCamCapability[mCameraId]->focal_length; 2860 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1); 2861 2862 /* frame duration */ 2863 static const int64_t default_frame_duration = NSEC_PER_33MSEC; 2864 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1); 2865 2866 /* sensitivity */ 2867 static const int32_t default_sensitivity = 100; 2868 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1); 2869 2870 /*edge mode*/ 2871 static const uint8_t edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY; 2872 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1); 2873 2874 /*noise reduction mode*/ 2875 static const uint8_t noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY; 2876 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1); 2877 2878 /*color correction mode*/ 2879 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY; 2880 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1); 2881 2882 /*transform matrix mode*/ 2883 static const uint8_t tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY; 2884 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1); 2885 2886 mDefaultMetadata[type] = settings.release(); 2887 2888 pthread_mutex_unlock(&mMutex); 2889 return mDefaultMetadata[type]; 2890} 2891 2892/*=========================================================================== 2893 * FUNCTION : setFrameParameters 2894 * 2895 * DESCRIPTION: set parameters per frame as requested in the metadata from 2896 * framework 2897 * 2898 * PARAMETERS : 2899 * @request : request that needs to be serviced 2900 * @streamTypeMask : bit mask of stream types on which buffers are requested 2901 * 2902 * RETURN : success: NO_ERROR 2903 * failure: 2904 *==========================================================================*/ 2905int QCamera3HardwareInterface::setFrameParameters(camera3_capture_request_t *request, 2906 uint32_t streamTypeMask) 2907{ 2908 /*translate from camera_metadata_t type to parm_type_t*/ 2909 int rc = 0; 2910 if (request->settings == NULL && mFirstRequest) { 2911 /*settings cannot be null for the first request*/ 2912 return BAD_VALUE; 2913 } 2914 2915 int32_t hal_version = CAM_HAL_V3; 2916 2917 memset(mParameters, 0, sizeof(parm_buffer_t)); 2918 mParameters->first_flagged_entry = CAM_INTF_PARM_MAX; 2919 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION, 2920 sizeof(hal_version), &hal_version); 2921 if (rc < 0) { 2922 ALOGE("%s: Failed to set hal version in the parameters", __func__); 2923 return BAD_VALUE; 2924 } 2925 2926 /*we need to update the frame number in the parameters*/ 2927 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER, 2928 sizeof(request->frame_number), &(request->frame_number)); 2929 if (rc < 0) { 2930 ALOGE("%s: Failed to set the frame number in the parameters", __func__); 2931 return BAD_VALUE; 2932 } 2933 2934 /* Update stream id mask where buffers are requested */ 2935 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_TYPE_MASK, 2936 sizeof(streamTypeMask), &streamTypeMask); 2937 if (rc < 0) { 2938 ALOGE("%s: Failed to set stream type mask in the parameters", __func__); 2939 return BAD_VALUE; 2940 } 2941 2942 if(request->settings != NULL){ 2943 rc = translateMetadataToParameters(request); 2944 } 2945 /*set the parameters to backend*/ 2946 mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters); 2947 return rc; 2948} 2949 2950/*=========================================================================== 2951 * FUNCTION : translateMetadataToParameters 2952 * 2953 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t 2954 * 2955 * 2956 * PARAMETERS : 2957 * @request : request sent from framework 2958 * 2959 * 2960 * RETURN : success: NO_ERROR 2961 * failure: 2962 *==========================================================================*/ 2963int QCamera3HardwareInterface::translateMetadataToParameters 2964 (const camera3_capture_request_t *request) 2965{ 2966 int rc = 0; 2967 CameraMetadata frame_settings; 2968 frame_settings = request->settings; 2969 2970 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) { 2971 int32_t antibandingMode = 2972 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0]; 2973 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING, 2974 sizeof(antibandingMode), &antibandingMode); 2975 } 2976 2977 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) { 2978 int32_t expCompensation = frame_settings.find( 2979 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0]; 2980 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min) 2981 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min; 2982 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max) 2983 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max; 2984 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION, 2985 sizeof(expCompensation), &expCompensation); 2986 } 2987 2988 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) { 2989 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0]; 2990 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK, 2991 sizeof(aeLock), &aeLock); 2992 } 2993 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) { 2994 cam_fps_range_t fps_range; 2995 fps_range.min_fps = 2996 frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0]; 2997 fps_range.max_fps = 2998 frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1]; 2999 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE, 3000 sizeof(fps_range), &fps_range); 3001 } 3002 3003 float focalDistance = -1.0; 3004 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) { 3005 focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0]; 3006 rc = AddSetParmEntryToBatch(mParameters, 3007 CAM_INTF_META_LENS_FOCUS_DISTANCE, 3008 sizeof(focalDistance), &focalDistance); 3009 } 3010 3011 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) { 3012 uint8_t fwk_focusMode = 3013 frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0]; 3014 uint8_t focusMode; 3015 if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) { 3016 focusMode = CAM_FOCUS_MODE_INFINITY; 3017 } else{ 3018 focusMode = lookupHalName(FOCUS_MODES_MAP, 3019 sizeof(FOCUS_MODES_MAP), 3020 fwk_focusMode); 3021 } 3022 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE, 3023 sizeof(focusMode), &focusMode); 3024 } 3025 3026 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) { 3027 uint8_t awbLock = 3028 frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0]; 3029 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK, 3030 sizeof(awbLock), &awbLock); 3031 } 3032 3033 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) { 3034 uint8_t fwk_whiteLevel = 3035 frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0]; 3036 uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP, 3037 sizeof(WHITE_BALANCE_MODES_MAP), 3038 fwk_whiteLevel); 3039 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE, 3040 sizeof(whiteLevel), &whiteLevel); 3041 } 3042 3043 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) { 3044 uint8_t fwk_effectMode = 3045 frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0]; 3046 uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP, 3047 sizeof(EFFECT_MODES_MAP), 3048 fwk_effectMode); 3049 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT, 3050 sizeof(effectMode), &effectMode); 3051 } 3052 3053 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) { 3054 uint8_t fwk_aeMode = 3055 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0]; 3056 uint8_t aeMode; 3057 int32_t redeye; 3058 3059 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) { 3060 aeMode = CAM_AE_MODE_OFF; 3061 } else { 3062 aeMode = CAM_AE_MODE_ON; 3063 } 3064 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) { 3065 redeye = 1; 3066 } else { 3067 redeye = 0; 3068 } 3069 3070 int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP, 3071 sizeof(AE_FLASH_MODE_MAP), 3072 fwk_aeMode); 3073 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE, 3074 sizeof(aeMode), &aeMode); 3075 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE, 3076 sizeof(flashMode), &flashMode); 3077 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION, 3078 sizeof(redeye), &redeye); 3079 } 3080 3081 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) { 3082 uint8_t colorCorrectMode = 3083 frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0]; 3084 rc = 3085 AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE, 3086 sizeof(colorCorrectMode), &colorCorrectMode); 3087 } 3088 3089 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) { 3090 cam_color_correct_gains_t colorCorrectGains; 3091 for (int i = 0; i < 4; i++) { 3092 colorCorrectGains.gains[i] = 3093 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i]; 3094 } 3095 rc = 3096 AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_GAINS, 3097 sizeof(colorCorrectGains), &colorCorrectGains); 3098 } 3099 3100 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) { 3101 cam_color_correct_matrix_t colorCorrectTransform; 3102 cam_rational_type_t transform_elem; 3103 int num = 0; 3104 for (int i = 0; i < 3; i++) { 3105 for (int j = 0; j < 3; j++) { 3106 transform_elem.numerator = 3107 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator; 3108 transform_elem.denominator = 3109 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator; 3110 colorCorrectTransform.transform_matrix[i][j] = transform_elem; 3111 num++; 3112 } 3113 } 3114 rc = 3115 AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_TRANSFORM, 3116 sizeof(colorCorrectTransform), &colorCorrectTransform); 3117 } 3118 3119 cam_trigger_t aecTrigger; 3120 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE; 3121 aecTrigger.trigger_id = -1; 3122 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&& 3123 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) { 3124 aecTrigger.trigger = 3125 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0]; 3126 aecTrigger.trigger_id = 3127 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0]; 3128 } 3129 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, 3130 sizeof(aecTrigger), &aecTrigger); 3131 3132 /*af_trigger must come with a trigger id*/ 3133 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) && 3134 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) { 3135 cam_trigger_t af_trigger; 3136 af_trigger.trigger = 3137 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0]; 3138 af_trigger.trigger_id = 3139 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0]; 3140 rc = AddSetParmEntryToBatch(mParameters, 3141 CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger); 3142 } 3143 3144 if (frame_settings.exists(ANDROID_CONTROL_MODE)) { 3145 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0]; 3146 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE, 3147 sizeof(metaMode), &metaMode); 3148 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) { 3149 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0]; 3150 uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP, 3151 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]), 3152 fwk_sceneMode); 3153 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE, 3154 sizeof(sceneMode), &sceneMode); 3155 } else if (metaMode == ANDROID_CONTROL_MODE_OFF) { 3156 uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF; 3157 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE, 3158 sizeof(sceneMode), &sceneMode); 3159 } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) { 3160 uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF; 3161 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE, 3162 sizeof(sceneMode), &sceneMode); 3163 } 3164 } 3165 3166 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) { 3167 int32_t demosaic = 3168 frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0]; 3169 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC, 3170 sizeof(demosaic), &demosaic); 3171 } 3172 3173 if (frame_settings.exists(ANDROID_EDGE_MODE)) { 3174 cam_edge_application_t edge_application; 3175 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0]; 3176 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) { 3177 edge_application.sharpness = 0; 3178 } else { 3179 edge_application.sharpness = 10; 3180 } 3181 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE_MODE, 3182 sizeof(edge_application), &edge_application); 3183 } 3184 3185 if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) { 3186 int32_t edgeStrength = 3187 frame_settings.find(ANDROID_EDGE_STRENGTH).data.i32[0]; 3188 rc = AddSetParmEntryToBatch(mParameters, 3189 CAM_INTF_META_SHARPNESS_STRENGTH, sizeof(edgeStrength), &edgeStrength); 3190 } 3191 3192 if (frame_settings.exists(ANDROID_FLASH_MODE)) { 3193 int32_t respectFlashMode = 1; 3194 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) { 3195 uint8_t fwk_aeMode = 3196 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0]; 3197 if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) { 3198 respectFlashMode = 0; 3199 ALOGI("%s: AE Mode controls flash, ignore android.flash.mode", 3200 __func__); 3201 } 3202 } 3203 if (respectFlashMode) { 3204 uint8_t flashMode = 3205 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]; 3206 flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP, 3207 sizeof(FLASH_MODES_MAP), 3208 flashMode); 3209 ALOGI("%s: flash mode after mapping %d", __func__, flashMode); 3210 // To check: CAM_INTF_META_FLASH_MODE usage 3211 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE, 3212 sizeof(flashMode), &flashMode); 3213 } 3214 } 3215 3216 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) { 3217 uint8_t flashPower = 3218 frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0]; 3219 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER, 3220 sizeof(flashPower), &flashPower); 3221 } 3222 3223 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) { 3224 int64_t flashFiringTime = 3225 frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0]; 3226 rc = AddSetParmEntryToBatch(mParameters, 3227 CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime); 3228 } 3229 3230 if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) { 3231 uint8_t geometricMode = 3232 frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0]; 3233 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE, 3234 sizeof(geometricMode), &geometricMode); 3235 } 3236 3237 if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) { 3238 uint8_t geometricStrength = 3239 frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0]; 3240 rc = AddSetParmEntryToBatch(mParameters, 3241 CAM_INTF_META_GEOMETRIC_STRENGTH, 3242 sizeof(geometricStrength), &geometricStrength); 3243 } 3244 3245 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) { 3246 uint8_t hotPixelMode = 3247 frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0]; 3248 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE, 3249 sizeof(hotPixelMode), &hotPixelMode); 3250 } 3251 3252 if (frame_settings.exists(ANDROID_LENS_APERTURE)) { 3253 float lensAperture = 3254 frame_settings.find( ANDROID_LENS_APERTURE).data.f[0]; 3255 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE, 3256 sizeof(lensAperture), &lensAperture); 3257 } 3258 3259 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) { 3260 float filterDensity = 3261 frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0]; 3262 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY, 3263 sizeof(filterDensity), &filterDensity); 3264 } 3265 3266 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) { 3267 float focalLength = 3268 frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0]; 3269 rc = AddSetParmEntryToBatch(mParameters, 3270 CAM_INTF_META_LENS_FOCAL_LENGTH, 3271 sizeof(focalLength), &focalLength); 3272 } 3273 3274 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) { 3275 uint8_t optStabMode = 3276 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0]; 3277 rc = AddSetParmEntryToBatch(mParameters, 3278 CAM_INTF_META_LENS_OPT_STAB_MODE, 3279 sizeof(optStabMode), &optStabMode); 3280 } 3281 3282 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) { 3283 uint8_t noiseRedMode = 3284 frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]; 3285 rc = AddSetParmEntryToBatch(mParameters, 3286 CAM_INTF_META_NOISE_REDUCTION_MODE, 3287 sizeof(noiseRedMode), &noiseRedMode); 3288 } 3289 3290 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) { 3291 uint8_t noiseRedStrength = 3292 frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0]; 3293 rc = AddSetParmEntryToBatch(mParameters, 3294 CAM_INTF_META_NOISE_REDUCTION_STRENGTH, 3295 sizeof(noiseRedStrength), &noiseRedStrength); 3296 } 3297 3298 cam_crop_region_t scalerCropRegion; 3299 bool scalerCropSet = false; 3300 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) { 3301 scalerCropRegion.left = 3302 frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0]; 3303 scalerCropRegion.top = 3304 frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1]; 3305 scalerCropRegion.width = 3306 frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2]; 3307 scalerCropRegion.height = 3308 frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3]; 3309 rc = AddSetParmEntryToBatch(mParameters, 3310 CAM_INTF_META_SCALER_CROP_REGION, 3311 sizeof(scalerCropRegion), &scalerCropRegion); 3312 scalerCropSet = true; 3313 } 3314 3315 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) { 3316 int64_t sensorExpTime = 3317 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0]; 3318 ALOGV("%s: setting sensorExpTime %lld", __func__, sensorExpTime); 3319 rc = AddSetParmEntryToBatch(mParameters, 3320 CAM_INTF_META_SENSOR_EXPOSURE_TIME, 3321 sizeof(sensorExpTime), &sensorExpTime); 3322 } 3323 3324 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) { 3325 int64_t sensorFrameDuration = 3326 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0]; 3327 int64_t minFrameDuration = getMinFrameDuration(request); 3328 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration); 3329 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration) 3330 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration; 3331 ALOGV("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration); 3332 rc = AddSetParmEntryToBatch(mParameters, 3333 CAM_INTF_META_SENSOR_FRAME_DURATION, 3334 sizeof(sensorFrameDuration), &sensorFrameDuration); 3335 } 3336 3337 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) { 3338 int32_t sensorSensitivity = 3339 frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0]; 3340 if (sensorSensitivity < 3341 gCamCapability[mCameraId]->sensitivity_range.min_sensitivity) 3342 sensorSensitivity = 3343 gCamCapability[mCameraId]->sensitivity_range.min_sensitivity; 3344 if (sensorSensitivity > 3345 gCamCapability[mCameraId]->sensitivity_range.max_sensitivity) 3346 sensorSensitivity = 3347 gCamCapability[mCameraId]->sensitivity_range.max_sensitivity; 3348 ALOGV("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity); 3349 rc = AddSetParmEntryToBatch(mParameters, 3350 CAM_INTF_META_SENSOR_SENSITIVITY, 3351 sizeof(sensorSensitivity), &sensorSensitivity); 3352 } 3353 3354 if (frame_settings.exists(ANDROID_SHADING_MODE)) { 3355 int32_t shadingMode = 3356 frame_settings.find(ANDROID_SHADING_MODE).data.u8[0]; 3357 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE, 3358 sizeof(shadingMode), &shadingMode); 3359 } 3360 3361 if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) { 3362 uint8_t shadingStrength = 3363 frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0]; 3364 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH, 3365 sizeof(shadingStrength), &shadingStrength); 3366 } 3367 3368 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) { 3369 uint8_t fwk_facedetectMode = 3370 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0]; 3371 uint8_t facedetectMode = 3372 lookupHalName(FACEDETECT_MODES_MAP, 3373 sizeof(FACEDETECT_MODES_MAP), fwk_facedetectMode); 3374 rc = AddSetParmEntryToBatch(mParameters, 3375 CAM_INTF_META_STATS_FACEDETECT_MODE, 3376 sizeof(facedetectMode), &facedetectMode); 3377 } 3378 3379 if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) { 3380 uint8_t histogramMode = 3381 frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0]; 3382 rc = AddSetParmEntryToBatch(mParameters, 3383 CAM_INTF_META_STATS_HISTOGRAM_MODE, 3384 sizeof(histogramMode), &histogramMode); 3385 } 3386 3387 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) { 3388 uint8_t sharpnessMapMode = 3389 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0]; 3390 rc = AddSetParmEntryToBatch(mParameters, 3391 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, 3392 sizeof(sharpnessMapMode), &sharpnessMapMode); 3393 } 3394 3395 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) { 3396 uint8_t tonemapMode = 3397 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0]; 3398 rc = AddSetParmEntryToBatch(mParameters, 3399 CAM_INTF_META_TONEMAP_MODE, 3400 sizeof(tonemapMode), &tonemapMode); 3401 } 3402 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */ 3403 /*All tonemap channels will have the same number of points*/ 3404 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) && 3405 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) && 3406 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) { 3407 cam_rgb_tonemap_curves tonemapCurves; 3408 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2; 3409 3410 /* ch0 = G*/ 3411 int point = 0; 3412 cam_tonemap_curve_t tonemapCurveGreen; 3413 for (int i = 0; i < tonemapCurves.tonemap_points_cnt ; i++) { 3414 for (int j = 0; j < 2; j++) { 3415 tonemapCurveGreen.tonemap_points[i][j] = 3416 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point]; 3417 point++; 3418 } 3419 } 3420 tonemapCurves.curves[0] = tonemapCurveGreen; 3421 3422 /* ch 1 = B */ 3423 point = 0; 3424 cam_tonemap_curve_t tonemapCurveBlue; 3425 for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) { 3426 for (int j = 0; j < 2; j++) { 3427 tonemapCurveBlue.tonemap_points[i][j] = 3428 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point]; 3429 point++; 3430 } 3431 } 3432 tonemapCurves.curves[1] = tonemapCurveBlue; 3433 3434 /* ch 2 = R */ 3435 point = 0; 3436 cam_tonemap_curve_t tonemapCurveRed; 3437 for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) { 3438 for (int j = 0; j < 2; j++) { 3439 tonemapCurveRed.tonemap_points[i][j] = 3440 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point]; 3441 point++; 3442 } 3443 } 3444 tonemapCurves.curves[2] = tonemapCurveRed; 3445 3446 rc = AddSetParmEntryToBatch(mParameters, 3447 CAM_INTF_META_TONEMAP_CURVES, 3448 sizeof(tonemapCurves), &tonemapCurves); 3449 } 3450 3451 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) { 3452 uint8_t captureIntent = 3453 frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0]; 3454 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT, 3455 sizeof(captureIntent), &captureIntent); 3456 } 3457 3458 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) { 3459 uint8_t blackLevelLock = 3460 frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0]; 3461 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_BLACK_LEVEL_LOCK, 3462 sizeof(blackLevelLock), &blackLevelLock); 3463 } 3464 3465 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) { 3466 uint8_t lensShadingMapMode = 3467 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0]; 3468 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE, 3469 sizeof(lensShadingMapMode), &lensShadingMapMode); 3470 } 3471 3472 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) { 3473 cam_area_t roi; 3474 bool reset = true; 3475 convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AE_REGIONS); 3476 if (scalerCropSet) { 3477 reset = resetIfNeededROI(&roi, &scalerCropRegion); 3478 } 3479 if (reset) { 3480 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI, 3481 sizeof(roi), &roi); 3482 } 3483 } 3484 3485 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) { 3486 cam_area_t roi; 3487 bool reset = true; 3488 convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AF_REGIONS); 3489 if (scalerCropSet) { 3490 reset = resetIfNeededROI(&roi, &scalerCropRegion); 3491 } 3492 if (reset) { 3493 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI, 3494 sizeof(roi), &roi); 3495 } 3496 } 3497 3498 if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) { 3499 cam_area_t roi; 3500 bool reset = true; 3501 convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AWB_REGIONS); 3502 if (scalerCropSet) { 3503 reset = resetIfNeededROI(&roi, &scalerCropRegion); 3504 } 3505 if (reset) { 3506 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS, 3507 sizeof(roi), &roi); 3508 } 3509 } 3510 return rc; 3511} 3512 3513/*=========================================================================== 3514 * FUNCTION : getJpegSettings 3515 * 3516 * DESCRIPTION: save the jpeg settings in the HAL 3517 * 3518 * 3519 * PARAMETERS : 3520 * @settings : frame settings information from framework 3521 * 3522 * 3523 * RETURN : success: NO_ERROR 3524 * failure: 3525 *==========================================================================*/ 3526int QCamera3HardwareInterface::getJpegSettings 3527 (const camera_metadata_t *settings) 3528{ 3529 if (mJpegSettings) { 3530 if (mJpegSettings->gps_timestamp) { 3531 free(mJpegSettings->gps_timestamp); 3532 mJpegSettings->gps_timestamp = NULL; 3533 } 3534 if (mJpegSettings->gps_coordinates) { 3535 for (int i = 0; i < 3; i++) { 3536 free(mJpegSettings->gps_coordinates[i]); 3537 mJpegSettings->gps_coordinates[i] = NULL; 3538 } 3539 } 3540 free(mJpegSettings); 3541 mJpegSettings = NULL; 3542 } 3543 mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t)); 3544 CameraMetadata jpeg_settings; 3545 jpeg_settings = settings; 3546 3547 if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) { 3548 mJpegSettings->jpeg_orientation = 3549 jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0]; 3550 } else { 3551 mJpegSettings->jpeg_orientation = 0; 3552 } 3553 if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) { 3554 mJpegSettings->jpeg_quality = 3555 jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0]; 3556 } else { 3557 mJpegSettings->jpeg_quality = 85; 3558 } 3559 if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) { 3560 mJpegSettings->thumbnail_size.width = 3561 jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0]; 3562 mJpegSettings->thumbnail_size.height = 3563 jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1]; 3564 } else { 3565 mJpegSettings->thumbnail_size.width = 0; 3566 mJpegSettings->thumbnail_size.height = 0; 3567 } 3568 if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) { 3569 for (int i = 0; i < 3; i++) { 3570 mJpegSettings->gps_coordinates[i] = (double*)malloc(sizeof(double*)); 3571 *(mJpegSettings->gps_coordinates[i]) = 3572 jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i]; 3573 } 3574 } else{ 3575 for (int i = 0; i < 3; i++) { 3576 mJpegSettings->gps_coordinates[i] = NULL; 3577 } 3578 } 3579 3580 if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) { 3581 mJpegSettings->gps_timestamp = (int64_t*)malloc(sizeof(int64_t*)); 3582 *(mJpegSettings->gps_timestamp) = 3583 jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0]; 3584 } else { 3585 mJpegSettings->gps_timestamp = NULL; 3586 } 3587 3588 if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) { 3589 int len = jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count; 3590 for (int i = 0; i < len; i++) { 3591 mJpegSettings->gps_processing_method[i] = 3592 jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[i]; 3593 } 3594 if (mJpegSettings->gps_processing_method[len-1] != '\0') { 3595 mJpegSettings->gps_processing_method[len] = '\0'; 3596 } 3597 } else { 3598 mJpegSettings->gps_processing_method[0] = '\0'; 3599 } 3600 3601 if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) { 3602 mJpegSettings->sensor_sensitivity = 3603 jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0]; 3604 } else { 3605 mJpegSettings->sensor_sensitivity = mMetadataResponse.iso_speed; 3606 } 3607 3608 mJpegSettings->sensor_exposure_time = mMetadataResponse.exposure_time; 3609 3610 if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) { 3611 mJpegSettings->lens_focal_length = 3612 jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0]; 3613 } 3614 if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) { 3615 mJpegSettings->exposure_compensation = 3616 jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0]; 3617 } 3618 mJpegSettings->sharpness = 10; //default value 3619 if (jpeg_settings.exists(ANDROID_EDGE_MODE)) { 3620 uint8_t edgeMode = jpeg_settings.find(ANDROID_EDGE_MODE).data.u8[0]; 3621 if (edgeMode == ANDROID_EDGE_MODE_OFF) { 3622 mJpegSettings->sharpness = 0; 3623 } 3624 } 3625 mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step; 3626 mJpegSettings->max_jpeg_size = calcMaxJpegSize(); 3627 mJpegSettings->is_jpeg_format = true; 3628 mJpegSettings->min_required_pp_mask = gCamCapability[mCameraId]->min_required_pp_mask; 3629 return 0; 3630} 3631 3632/*=========================================================================== 3633 * FUNCTION : captureResultCb 3634 * 3635 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata) 3636 * 3637 * PARAMETERS : 3638 * @frame : frame information from mm-camera-interface 3639 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata. 3640 * @userdata: userdata 3641 * 3642 * RETURN : NONE 3643 *==========================================================================*/ 3644void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata, 3645 camera3_stream_buffer_t *buffer, 3646 uint32_t frame_number, void *userdata) 3647{ 3648 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata; 3649 if (hw == NULL) { 3650 ALOGE("%s: Invalid hw %p", __func__, hw); 3651 return; 3652 } 3653 3654 hw->captureResultCb(metadata, buffer, frame_number); 3655 return; 3656} 3657 3658 3659/*=========================================================================== 3660 * FUNCTION : initialize 3661 * 3662 * DESCRIPTION: Pass framework callback pointers to HAL 3663 * 3664 * PARAMETERS : 3665 * 3666 * 3667 * RETURN : Success : 0 3668 * Failure: -ENODEV 3669 *==========================================================================*/ 3670 3671int QCamera3HardwareInterface::initialize(const struct camera3_device *device, 3672 const camera3_callback_ops_t *callback_ops) 3673{ 3674 ALOGV("%s: E", __func__); 3675 QCamera3HardwareInterface *hw = 3676 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 3677 if (!hw) { 3678 ALOGE("%s: NULL camera device", __func__); 3679 return -ENODEV; 3680 } 3681 3682 int rc = hw->initialize(callback_ops); 3683 ALOGV("%s: X", __func__); 3684 return rc; 3685} 3686 3687/*=========================================================================== 3688 * FUNCTION : configure_streams 3689 * 3690 * DESCRIPTION: 3691 * 3692 * PARAMETERS : 3693 * 3694 * 3695 * RETURN : Success: 0 3696 * Failure: -EINVAL (if stream configuration is invalid) 3697 * -ENODEV (fatal error) 3698 *==========================================================================*/ 3699 3700int QCamera3HardwareInterface::configure_streams( 3701 const struct camera3_device *device, 3702 camera3_stream_configuration_t *stream_list) 3703{ 3704 ALOGV("%s: E", __func__); 3705 QCamera3HardwareInterface *hw = 3706 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 3707 if (!hw) { 3708 ALOGE("%s: NULL camera device", __func__); 3709 return -ENODEV; 3710 } 3711 int rc = hw->configureStreams(stream_list); 3712 ALOGV("%s: X", __func__); 3713 return rc; 3714} 3715 3716/*=========================================================================== 3717 * FUNCTION : register_stream_buffers 3718 * 3719 * DESCRIPTION: Register stream buffers with the device 3720 * 3721 * PARAMETERS : 3722 * 3723 * RETURN : 3724 *==========================================================================*/ 3725int QCamera3HardwareInterface::register_stream_buffers( 3726 const struct camera3_device *device, 3727 const camera3_stream_buffer_set_t *buffer_set) 3728{ 3729 ALOGV("%s: E", __func__); 3730 QCamera3HardwareInterface *hw = 3731 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 3732 if (!hw) { 3733 ALOGE("%s: NULL camera device", __func__); 3734 return -ENODEV; 3735 } 3736 int rc = hw->registerStreamBuffers(buffer_set); 3737 ALOGV("%s: X", __func__); 3738 return rc; 3739} 3740 3741/*=========================================================================== 3742 * FUNCTION : construct_default_request_settings 3743 * 3744 * DESCRIPTION: Configure a settings buffer to meet the required use case 3745 * 3746 * PARAMETERS : 3747 * 3748 * 3749 * RETURN : Success: Return valid metadata 3750 * Failure: Return NULL 3751 *==========================================================================*/ 3752const camera_metadata_t* QCamera3HardwareInterface:: 3753 construct_default_request_settings(const struct camera3_device *device, 3754 int type) 3755{ 3756 3757 ALOGV("%s: E", __func__); 3758 camera_metadata_t* fwk_metadata = NULL; 3759 QCamera3HardwareInterface *hw = 3760 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 3761 if (!hw) { 3762 ALOGE("%s: NULL camera device", __func__); 3763 return NULL; 3764 } 3765 3766 fwk_metadata = hw->translateCapabilityToMetadata(type); 3767 3768 ALOGV("%s: X", __func__); 3769 return fwk_metadata; 3770} 3771 3772/*=========================================================================== 3773 * FUNCTION : process_capture_request 3774 * 3775 * DESCRIPTION: 3776 * 3777 * PARAMETERS : 3778 * 3779 * 3780 * RETURN : 3781 *==========================================================================*/ 3782int QCamera3HardwareInterface::process_capture_request( 3783 const struct camera3_device *device, 3784 camera3_capture_request_t *request) 3785{ 3786 ALOGV("%s: E", __func__); 3787 QCamera3HardwareInterface *hw = 3788 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 3789 if (!hw) { 3790 ALOGE("%s: NULL camera device", __func__); 3791 return -EINVAL; 3792 } 3793 3794 int rc = hw->processCaptureRequest(request); 3795 ALOGV("%s: X", __func__); 3796 return rc; 3797} 3798 3799/*=========================================================================== 3800 * FUNCTION : get_metadata_vendor_tag_ops 3801 * 3802 * DESCRIPTION: 3803 * 3804 * PARAMETERS : 3805 * 3806 * 3807 * RETURN : 3808 *==========================================================================*/ 3809 3810void QCamera3HardwareInterface::get_metadata_vendor_tag_ops( 3811 const struct camera3_device *device, 3812 vendor_tag_query_ops_t* ops) 3813{ 3814 ALOGV("%s: E", __func__); 3815 QCamera3HardwareInterface *hw = 3816 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 3817 if (!hw) { 3818 ALOGE("%s: NULL camera device", __func__); 3819 return; 3820 } 3821 3822 hw->getMetadataVendorTagOps(ops); 3823 ALOGV("%s: X", __func__); 3824 return; 3825} 3826 3827/*=========================================================================== 3828 * FUNCTION : dump 3829 * 3830 * DESCRIPTION: 3831 * 3832 * PARAMETERS : 3833 * 3834 * 3835 * RETURN : 3836 *==========================================================================*/ 3837 3838void QCamera3HardwareInterface::dump( 3839 const struct camera3_device *device, int fd) 3840{ 3841 ALOGV("%s: E", __func__); 3842 QCamera3HardwareInterface *hw = 3843 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 3844 if (!hw) { 3845 ALOGE("%s: NULL camera device", __func__); 3846 return; 3847 } 3848 3849 hw->dump(fd); 3850 ALOGV("%s: X", __func__); 3851 return; 3852} 3853 3854/*=========================================================================== 3855 * FUNCTION : flush 3856 * 3857 * DESCRIPTION: 3858 * 3859 * PARAMETERS : 3860 * 3861 * 3862 * RETURN : 3863 *==========================================================================*/ 3864 3865int QCamera3HardwareInterface::flush( 3866 const struct camera3_device *device) 3867{ 3868 int rc; 3869 ALOGV("%s: E", __func__); 3870 QCamera3HardwareInterface *hw = 3871 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 3872 if (!hw) { 3873 ALOGE("%s: NULL camera device", __func__); 3874 return -EINVAL; 3875 } 3876 3877 rc = hw->flush(); 3878 ALOGV("%s: X", __func__); 3879 return rc; 3880} 3881 3882/*=========================================================================== 3883 * FUNCTION : close_camera_device 3884 * 3885 * DESCRIPTION: 3886 * 3887 * PARAMETERS : 3888 * 3889 * 3890 * RETURN : 3891 *==========================================================================*/ 3892int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device) 3893{ 3894 ALOGV("%s: E", __func__); 3895 int ret = NO_ERROR; 3896 QCamera3HardwareInterface *hw = 3897 reinterpret_cast<QCamera3HardwareInterface *>( 3898 reinterpret_cast<camera3_device_t *>(device)->priv); 3899 if (!hw) { 3900 ALOGE("NULL camera device"); 3901 return BAD_VALUE; 3902 } 3903 delete hw; 3904 3905 pthread_mutex_lock(&mCameraSessionLock); 3906 mCameraSessionActive = 0; 3907 pthread_mutex_unlock(&mCameraSessionLock); 3908 ALOGV("%s: X", __func__); 3909 return ret; 3910} 3911 3912/*=========================================================================== 3913 * FUNCTION : getWaveletDenoiseProcessPlate 3914 * 3915 * DESCRIPTION: query wavelet denoise process plate 3916 * 3917 * PARAMETERS : None 3918 * 3919 * RETURN : WNR prcocess plate vlaue 3920 *==========================================================================*/ 3921cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate() 3922{ 3923 char prop[PROPERTY_VALUE_MAX]; 3924 memset(prop, 0, sizeof(prop)); 3925 property_get("persist.denoise.process.plates", prop, "0"); 3926 int processPlate = atoi(prop); 3927 switch(processPlate) { 3928 case 0: 3929 return CAM_WAVELET_DENOISE_YCBCR_PLANE; 3930 case 1: 3931 return CAM_WAVELET_DENOISE_CBCR_ONLY; 3932 case 2: 3933 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR; 3934 case 3: 3935 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR; 3936 default: 3937 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR; 3938 } 3939} 3940 3941/*=========================================================================== 3942 * FUNCTION : needRotationReprocess 3943 * 3944 * DESCRIPTION: if rotation needs to be done by reprocess in pp 3945 * 3946 * PARAMETERS : none 3947 * 3948 * RETURN : true: needed 3949 * false: no need 3950 *==========================================================================*/ 3951bool QCamera3HardwareInterface::needRotationReprocess() 3952{ 3953 3954 if (!mJpegSettings->is_jpeg_format) { 3955 // RAW image, no need to reprocess 3956 return false; 3957 } 3958 3959 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0 && 3960 mJpegSettings->jpeg_orientation > 0) { 3961 // current rotation is not zero, and pp has the capability to process rotation 3962 ALOGD("%s: need do reprocess for rotation", __func__); 3963 return true; 3964 } 3965 3966 return false; 3967} 3968 3969/*=========================================================================== 3970 * FUNCTION : needReprocess 3971 * 3972 * DESCRIPTION: if reprocess in needed 3973 * 3974 * PARAMETERS : none 3975 * 3976 * RETURN : true: needed 3977 * false: no need 3978 *==========================================================================*/ 3979bool QCamera3HardwareInterface::needReprocess() 3980{ 3981 if (!mJpegSettings->is_jpeg_format) { 3982 // RAW image, no need to reprocess 3983 return false; 3984 } 3985 3986 if ((mJpegSettings->min_required_pp_mask > 0) || 3987 isWNREnabled()) { 3988 // TODO: add for ZSL HDR later 3989 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode 3990 ALOGD("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__); 3991 return true; 3992 } 3993 return needRotationReprocess(); 3994} 3995 3996/*=========================================================================== 3997 * FUNCTION : addOnlineReprocChannel 3998 * 3999 * DESCRIPTION: add a online reprocess channel that will do reprocess on frames 4000 * coming from input channel 4001 * 4002 * PARAMETERS : 4003 * @pInputChannel : ptr to input channel whose frames will be post-processed 4004 * 4005 * RETURN : Ptr to the newly created channel obj. NULL if failed. 4006 *==========================================================================*/ 4007QCamera3ReprocessChannel *QCamera3HardwareInterface::addOnlineReprocChannel( 4008 QCamera3Channel *pInputChannel, QCamera3PicChannel *picChHandle) 4009{ 4010 int32_t rc = NO_ERROR; 4011 QCamera3ReprocessChannel *pChannel = NULL; 4012 if (pInputChannel == NULL) { 4013 ALOGE("%s: input channel obj is NULL", __func__); 4014 return NULL; 4015 } 4016 4017 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle, 4018 mCameraHandle->ops, NULL, pInputChannel->mPaddingInfo, this, picChHandle); 4019 if (NULL == pChannel) { 4020 ALOGE("%s: no mem for reprocess channel", __func__); 4021 return NULL; 4022 } 4023 4024 // Capture channel, only need snapshot and postview streams start together 4025 mm_camera_channel_attr_t attr; 4026 memset(&attr, 0, sizeof(mm_camera_channel_attr_t)); 4027 attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS; 4028 attr.max_unmatched_frames = getMaxUnmatchedFramesInQueue(); 4029 rc = pChannel->initialize(); 4030 if (rc != NO_ERROR) { 4031 ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc); 4032 delete pChannel; 4033 return NULL; 4034 } 4035 4036 // pp feature config 4037 cam_pp_feature_config_t pp_config; 4038 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t)); 4039 if (gCamCapability[mCameraId]->min_required_pp_mask & CAM_QCOM_FEATURE_SHARPNESS) { 4040 pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS; 4041 pp_config.sharpness = mJpegSettings->sharpness; 4042 } 4043 4044 if (isWNREnabled()) { 4045 pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D; 4046 pp_config.denoise2d.denoise_enable = 1; 4047 pp_config.denoise2d.process_plates = getWaveletDenoiseProcessPlate(); 4048 } 4049 if (needRotationReprocess()) { 4050 pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION; 4051 int rotation = mJpegSettings->jpeg_orientation; 4052 if (rotation == 0) { 4053 pp_config.rotation = ROTATE_0; 4054 } else if (rotation == 90) { 4055 pp_config.rotation = ROTATE_90; 4056 } else if (rotation == 180) { 4057 pp_config.rotation = ROTATE_180; 4058 } else if (rotation == 270) { 4059 pp_config.rotation = ROTATE_270; 4060 } 4061 } 4062 4063 rc = pChannel->addReprocStreamsFromSource(pp_config, 4064 pInputChannel, 4065 mMetadataChannel); 4066 4067 if (rc != NO_ERROR) { 4068 delete pChannel; 4069 return NULL; 4070 } 4071 return pChannel; 4072} 4073 4074int QCamera3HardwareInterface::getMaxUnmatchedFramesInQueue() 4075{ 4076 return gCamCapability[mCameraId]->min_num_pp_bufs; 4077} 4078 4079bool QCamera3HardwareInterface::isWNREnabled() { 4080 return gCamCapability[mCameraId]->isWnrSupported; 4081} 4082 4083}; //end namespace qcamera 4084