QCamera3HWI.cpp revision 6356e2d4fc01ff878ac4664d86892c8d81e330a9
1/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved. 2* 3* Redistribution and use in source and binary forms, with or without 4* modification, are permitted provided that the following conditions are 5* met: 6* * Redistributions of source code must retain the above copyright 7* notice, this list of conditions and the following disclaimer. 8* * Redistributions in binary form must reproduce the above 9* copyright notice, this list of conditions and the following 10* disclaimer in the documentation and/or other materials provided 11* with the distribution. 12* * Neither the name of The Linux Foundation nor the names of its 13* contributors may be used to endorse or promote products derived 14* from this software without specific prior written permission. 15* 16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED 17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF 18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT 19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS 20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR 23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE 25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN 26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27* 28*/ 29 30#define LOG_TAG "QCamera3HWI" 31 32#include <cutils/properties.h> 33#include <hardware/camera3.h> 34#include <camera/CameraMetadata.h> 35#include <stdlib.h> 36#include <utils/Log.h> 37#include <utils/Errors.h> 38#include <ui/Fence.h> 39#include <gralloc_priv.h> 40#include "QCamera3HWI.h" 41#include "QCamera3Mem.h" 42#include "QCamera3Channel.h" 43#include "QCamera3PostProc.h" 44 45using namespace android; 46 47namespace qcamera { 48#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX ) 49cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS]; 50parm_buffer_t *prevSettings; 51const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS]; 52 53pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock = 54 PTHREAD_MUTEX_INITIALIZER; 55unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0; 56 57const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = { 58 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF }, 59 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO }, 60 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE }, 61 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE }, 62 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA }, 63 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE }, 64 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD }, 65 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD }, 66 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA } 67}; 68 69const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = { 70 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF }, 71 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO }, 72 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT }, 73 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT }, 74 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT}, 75 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT }, 76 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT }, 77 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT }, 78 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE } 79}; 80 81const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = { 82 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION }, 83 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT }, 84 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE }, 85 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT }, 86 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT }, 87 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE }, 88 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH }, 89 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW }, 90 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET }, 91 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE }, 92 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS }, 93 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS }, 94 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY }, 95 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT }, 96 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE} 97}; 98 99const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = { 100 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED }, 101 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO }, 102 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO }, 103 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF }, 104 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE }, 105 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO } 106}; 107 108const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = { 109 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF }, 110 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ }, 111 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ }, 112 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO } 113}; 114 115const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = { 116 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF }, 117 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF }, 118 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO}, 119 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON }, 120 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO} 121}; 122 123const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = { 124 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF }, 125 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE }, 126 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH } 127}; 128 129const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288, 130 320, 240, 176, 144, 0, 0}; 131 132camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = { 133 initialize: QCamera3HardwareInterface::initialize, 134 configure_streams: QCamera3HardwareInterface::configure_streams, 135 register_stream_buffers: QCamera3HardwareInterface::register_stream_buffers, 136 construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings, 137 process_capture_request: QCamera3HardwareInterface::process_capture_request, 138 get_metadata_vendor_tag_ops: QCamera3HardwareInterface::get_metadata_vendor_tag_ops, 139 dump: QCamera3HardwareInterface::dump, 140}; 141 142 143/*=========================================================================== 144 * FUNCTION : QCamera3HardwareInterface 145 * 146 * DESCRIPTION: constructor of QCamera3HardwareInterface 147 * 148 * PARAMETERS : 149 * @cameraId : camera ID 150 * 151 * RETURN : none 152 *==========================================================================*/ 153QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId) 154 : mCameraId(cameraId), 155 mCameraHandle(NULL), 156 mCameraOpened(false), 157 mCameraInitialized(false), 158 mCallbackOps(NULL), 159 mInputStream(NULL), 160 mMetadataChannel(NULL), 161 mFirstRequest(false), 162 mParamHeap(NULL), 163 mParameters(NULL), 164 mJpegSettings(NULL), 165 mIsZslMode(false), 166 m_pPowerModule(NULL) 167{ 168 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG; 169 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0; 170 mCameraDevice.common.close = close_camera_device; 171 mCameraDevice.ops = &mCameraOps; 172 mCameraDevice.priv = this; 173 gCamCapability[cameraId]->version = CAM_HAL_V3; 174 175 pthread_cond_init(&mRequestCond, NULL); 176 mPendingRequest = 0; 177 mCurrentRequestId = -1; 178 pthread_mutex_init(&mMutex, NULL); 179 180 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) 181 mDefaultMetadata[i] = NULL; 182 183#ifdef HAS_MULTIMEDIA_HINTS 184 if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) { 185 ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID); 186 } 187#endif 188} 189 190/*=========================================================================== 191 * FUNCTION : ~QCamera3HardwareInterface 192 * 193 * DESCRIPTION: destructor of QCamera3HardwareInterface 194 * 195 * PARAMETERS : none 196 * 197 * RETURN : none 198 *==========================================================================*/ 199QCamera3HardwareInterface::~QCamera3HardwareInterface() 200{ 201 ALOGV("%s: E", __func__); 202 /* We need to stop all streams before deleting any stream */ 203 for (List<stream_info_t *>::iterator it = mStreamInfo.begin(); 204 it != mStreamInfo.end(); it++) { 205 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv; 206 if (channel) 207 channel->stop(); 208 } 209 for (List<stream_info_t *>::iterator it = mStreamInfo.begin(); 210 it != mStreamInfo.end(); it++) { 211 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv; 212 if (channel) 213 delete channel; 214 free (*it); 215 } 216 217 if (mJpegSettings != NULL) { 218 free(mJpegSettings); 219 mJpegSettings = NULL; 220 } 221 222 /* Clean up all channels */ 223 if (mCameraInitialized) { 224 mMetadataChannel->stop(); 225 delete mMetadataChannel; 226 mMetadataChannel = NULL; 227 deinitParameters(); 228 } 229 230 if (mCameraOpened) 231 closeCamera(); 232 233 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) 234 if (mDefaultMetadata[i]) 235 free_camera_metadata(mDefaultMetadata[i]); 236 237 pthread_cond_destroy(&mRequestCond); 238 239 pthread_mutex_destroy(&mMutex); 240 ALOGV("%s: X", __func__); 241} 242 243/*=========================================================================== 244 * FUNCTION : openCamera 245 * 246 * DESCRIPTION: open camera 247 * 248 * PARAMETERS : 249 * @hw_device : double ptr for camera device struct 250 * 251 * RETURN : int32_t type of status 252 * NO_ERROR -- success 253 * none-zero failure code 254 *==========================================================================*/ 255int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device) 256{ 257 int rc = 0; 258 pthread_mutex_lock(&mCameraSessionLock); 259 if (mCameraSessionActive) { 260 ALOGE("%s: multiple simultaneous camera instance not supported", __func__); 261 pthread_mutex_unlock(&mCameraSessionLock); 262 return INVALID_OPERATION; 263 } 264 265 if (mCameraOpened) { 266 *hw_device = NULL; 267 return PERMISSION_DENIED; 268 } 269 270 rc = openCamera(); 271 if (rc == 0) { 272 *hw_device = &mCameraDevice.common; 273 mCameraSessionActive = 1; 274 } else 275 *hw_device = NULL; 276 277#ifdef HAS_MULTIMEDIA_HINTS 278 if (rc == 0) { 279 if (m_pPowerModule) { 280 if (m_pPowerModule->powerHint) { 281 m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE, 282 (void *)"state=1"); 283 } 284 } 285 } 286#endif 287 pthread_mutex_unlock(&mCameraSessionLock); 288 return rc; 289} 290 291/*=========================================================================== 292 * FUNCTION : openCamera 293 * 294 * DESCRIPTION: open camera 295 * 296 * PARAMETERS : none 297 * 298 * RETURN : int32_t type of status 299 * NO_ERROR -- success 300 * none-zero failure code 301 *==========================================================================*/ 302int QCamera3HardwareInterface::openCamera() 303{ 304 if (mCameraHandle) { 305 ALOGE("Failure: Camera already opened"); 306 return ALREADY_EXISTS; 307 } 308 mCameraHandle = camera_open(mCameraId); 309 if (!mCameraHandle) { 310 ALOGE("camera_open failed."); 311 return UNKNOWN_ERROR; 312 } 313 314 mCameraOpened = true; 315 316 return NO_ERROR; 317} 318 319/*=========================================================================== 320 * FUNCTION : closeCamera 321 * 322 * DESCRIPTION: close camera 323 * 324 * PARAMETERS : none 325 * 326 * RETURN : int32_t type of status 327 * NO_ERROR -- success 328 * none-zero failure code 329 *==========================================================================*/ 330int QCamera3HardwareInterface::closeCamera() 331{ 332 int rc = NO_ERROR; 333 334 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle); 335 mCameraHandle = NULL; 336 mCameraOpened = false; 337 338#ifdef HAS_MULTIMEDIA_HINTS 339 if (rc == NO_ERROR) { 340 if (m_pPowerModule) { 341 if (m_pPowerModule->powerHint) { 342 m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE, 343 (void *)"state=0"); 344 } 345 } 346 } 347#endif 348 349 return rc; 350} 351 352/*=========================================================================== 353 * FUNCTION : initialize 354 * 355 * DESCRIPTION: Initialize frameworks callback functions 356 * 357 * PARAMETERS : 358 * @callback_ops : callback function to frameworks 359 * 360 * RETURN : 361 * 362 *==========================================================================*/ 363int QCamera3HardwareInterface::initialize( 364 const struct camera3_callback_ops *callback_ops) 365{ 366 int rc; 367 368 pthread_mutex_lock(&mMutex); 369 370 rc = initParameters(); 371 if (rc < 0) { 372 ALOGE("%s: initParamters failed %d", __func__, rc); 373 goto err1; 374 } 375 //Create metadata channel and initialize it 376 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle, 377 mCameraHandle->ops, captureResultCb, 378 &gCamCapability[mCameraId]->padding_info, this); 379 if (mMetadataChannel == NULL) { 380 ALOGE("%s: failed to allocate metadata channel", __func__); 381 rc = -ENOMEM; 382 goto err2; 383 } 384 rc = mMetadataChannel->initialize(); 385 if (rc < 0) { 386 ALOGE("%s: metadata channel initialization failed", __func__); 387 goto err3; 388 } 389 390 mCallbackOps = callback_ops; 391 392 pthread_mutex_unlock(&mMutex); 393 mCameraInitialized = true; 394 return 0; 395 396err3: 397 delete mMetadataChannel; 398 mMetadataChannel = NULL; 399err2: 400 deinitParameters(); 401err1: 402 pthread_mutex_unlock(&mMutex); 403 return rc; 404} 405 406/*=========================================================================== 407 * FUNCTION : configureStreams 408 * 409 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input 410 * and output streams. 411 * 412 * PARAMETERS : 413 * @stream_list : streams to be configured 414 * 415 * RETURN : 416 * 417 *==========================================================================*/ 418int QCamera3HardwareInterface::configureStreams( 419 camera3_stream_configuration_t *streamList) 420{ 421 int rc = 0; 422 pthread_mutex_lock(&mMutex); 423 // Sanity check stream_list 424 if (streamList == NULL) { 425 ALOGE("%s: NULL stream configuration", __func__); 426 pthread_mutex_unlock(&mMutex); 427 return BAD_VALUE; 428 } 429 430 if (streamList->streams == NULL) { 431 ALOGE("%s: NULL stream list", __func__); 432 pthread_mutex_unlock(&mMutex); 433 return BAD_VALUE; 434 } 435 436 if (streamList->num_streams < 1) { 437 ALOGE("%s: Bad number of streams requested: %d", __func__, 438 streamList->num_streams); 439 pthread_mutex_unlock(&mMutex); 440 return BAD_VALUE; 441 } 442 443 camera3_stream_t *inputStream = NULL; 444 camera3_stream_t *jpegStream = NULL; 445 /* first invalidate all the steams in the mStreamList 446 * if they appear again, they will be validated */ 447 for (List<stream_info_t*>::iterator it=mStreamInfo.begin(); 448 it != mStreamInfo.end(); it++) { 449 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv; 450 channel->stop(); 451 (*it)->status = INVALID; 452 } 453 454 for (size_t i = 0; i < streamList->num_streams; i++) { 455 camera3_stream_t *newStream = streamList->streams[i]; 456 ALOGV("%s: newStream type = %d, stream format = %d stream size : %d x %d", 457 __func__, newStream->stream_type, newStream->format, 458 newStream->width, newStream->height); 459 //if the stream is in the mStreamList validate it 460 bool stream_exists = false; 461 for (List<stream_info_t*>::iterator it=mStreamInfo.begin(); 462 it != mStreamInfo.end(); it++) { 463 if ((*it)->stream == newStream) { 464 QCamera3Channel *channel = 465 (QCamera3Channel*)(*it)->stream->priv; 466 stream_exists = true; 467 (*it)->status = RECONFIGURE; 468 /*delete the channel object associated with the stream because 469 we need to reconfigure*/ 470 delete channel; 471 (*it)->stream->priv = NULL; 472 } 473 } 474 if (!stream_exists) { 475 //new stream 476 stream_info_t* stream_info; 477 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t)); 478 stream_info->stream = newStream; 479 stream_info->status = VALID; 480 stream_info->registered = 0; 481 mStreamInfo.push_back(stream_info); 482 } 483 if (newStream->stream_type == CAMERA3_STREAM_INPUT 484 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) { 485 if (inputStream != NULL) { 486 ALOGE("%s: Multiple input streams requested!", __func__); 487 pthread_mutex_unlock(&mMutex); 488 return BAD_VALUE; 489 } 490 inputStream = newStream; 491 } 492 if (newStream->format == HAL_PIXEL_FORMAT_BLOB) { 493 jpegStream = newStream; 494 } 495 } 496 mInputStream = inputStream; 497 498 /*clean up invalid streams*/ 499 for (List<stream_info_t*>::iterator it=mStreamInfo.begin(); 500 it != mStreamInfo.end();) { 501 if(((*it)->status) == INVALID){ 502 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv; 503 delete channel; 504 delete[] (buffer_handle_t*)(*it)->buffer_set.buffers; 505 free(*it); 506 it = mStreamInfo.erase(it); 507 } else { 508 it++; 509 } 510 } 511 512 //mMetadataChannel->stop(); 513 514 /* Allocate channel objects for the requested streams */ 515 for (size_t i = 0; i < streamList->num_streams; i++) { 516 camera3_stream_t *newStream = streamList->streams[i]; 517 if (newStream->priv == NULL) { 518 //New stream, construct channel 519 switch (newStream->stream_type) { 520 case CAMERA3_STREAM_INPUT: 521 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ; 522 break; 523 case CAMERA3_STREAM_BIDIRECTIONAL: 524 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ | 525 GRALLOC_USAGE_HW_CAMERA_WRITE; 526 break; 527 case CAMERA3_STREAM_OUTPUT: 528 newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE; 529 break; 530 default: 531 ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type); 532 break; 533 } 534 535 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT || 536 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) { 537 QCamera3Channel *channel; 538 switch (newStream->format) { 539 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: 540 case HAL_PIXEL_FORMAT_YCbCr_420_888: 541 newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers; 542 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL && 543 jpegStream) { 544 uint32_t width = jpegStream->width; 545 uint32_t height = jpegStream->height; 546 mIsZslMode = true; 547 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle, 548 mCameraHandle->ops, captureResultCb, 549 &gCamCapability[mCameraId]->padding_info, this, newStream, 550 width, height); 551 } else 552 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle, 553 mCameraHandle->ops, captureResultCb, 554 &gCamCapability[mCameraId]->padding_info, this, newStream); 555 if (channel == NULL) { 556 ALOGE("%s: allocation of channel failed", __func__); 557 pthread_mutex_unlock(&mMutex); 558 return -ENOMEM; 559 } 560 561 newStream->priv = channel; 562 break; 563 case HAL_PIXEL_FORMAT_BLOB: 564 newStream->max_buffers = QCamera3PicChannel::kMaxBuffers; 565 channel = new QCamera3PicChannel(mCameraHandle->camera_handle, 566 mCameraHandle->ops, captureResultCb, 567 &gCamCapability[mCameraId]->padding_info, this, newStream); 568 if (channel == NULL) { 569 ALOGE("%s: allocation of channel failed", __func__); 570 pthread_mutex_unlock(&mMutex); 571 return -ENOMEM; 572 } 573 newStream->priv = channel; 574 break; 575 576 //TODO: Add support for app consumed format? 577 default: 578 ALOGE("%s: not a supported format 0x%x", __func__, newStream->format); 579 break; 580 } 581 } 582 } else { 583 // Channel already exists for this stream 584 // Do nothing for now 585 } 586 } 587 /*For the streams to be reconfigured we need to register the buffers 588 since the framework wont*/ 589 for (List<stream_info_t *>::iterator it = mStreamInfo.begin(); 590 it != mStreamInfo.end(); it++) { 591 if ((*it)->status == RECONFIGURE) { 592 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv; 593 /*only register buffers for streams that have already been 594 registered*/ 595 if ((*it)->registered) { 596 rc = channel->registerBuffers((*it)->buffer_set.num_buffers, 597 (*it)->buffer_set.buffers); 598 if (rc != NO_ERROR) { 599 ALOGE("%s: Failed to register the buffers of old stream,\ 600 rc = %d", __func__, rc); 601 } 602 ALOGV("%s: channel %p has %d buffers", 603 __func__, channel, (*it)->buffer_set.num_buffers); 604 } 605 } 606 607 ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream); 608 if (index == NAME_NOT_FOUND) { 609 mPendingBuffersMap.add((*it)->stream, 0); 610 } else { 611 mPendingBuffersMap.editValueAt(index) = 0; 612 } 613 } 614 615 /* Initialize mPendingRequestInfo and mPendnigBuffersMap */ 616 mPendingRequestsList.clear(); 617 618 //settings/parameters don't carry over for new configureStreams 619 memset(mParameters, 0, sizeof(parm_buffer_t)); 620 mFirstRequest = true; 621 622 pthread_mutex_unlock(&mMutex); 623 return rc; 624} 625 626/*=========================================================================== 627 * FUNCTION : validateCaptureRequest 628 * 629 * DESCRIPTION: validate a capture request from camera service 630 * 631 * PARAMETERS : 632 * @request : request from framework to process 633 * 634 * RETURN : 635 * 636 *==========================================================================*/ 637int QCamera3HardwareInterface::validateCaptureRequest( 638 camera3_capture_request_t *request) 639{ 640 ssize_t idx = 0; 641 const camera3_stream_buffer_t *b; 642 CameraMetadata meta; 643 644 /* Sanity check the request */ 645 if (request == NULL) { 646 ALOGE("%s: NULL capture request", __func__); 647 return BAD_VALUE; 648 } 649 650 uint32_t frameNumber = request->frame_number; 651 if (request->input_buffer != NULL && 652 request->input_buffer->stream != mInputStream) { 653 ALOGE("%s: Request %d: Input buffer not from input stream!", 654 __FUNCTION__, frameNumber); 655 return BAD_VALUE; 656 } 657 if (request->num_output_buffers < 1 || request->output_buffers == NULL) { 658 ALOGE("%s: Request %d: No output buffers provided!", 659 __FUNCTION__, frameNumber); 660 return BAD_VALUE; 661 } 662 if (request->input_buffer != NULL) { 663 b = request->input_buffer; 664 QCamera3Channel *channel = 665 static_cast<QCamera3Channel*>(b->stream->priv); 666 if (channel == NULL) { 667 ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!", 668 __func__, frameNumber, idx); 669 return BAD_VALUE; 670 } 671 if (b->status != CAMERA3_BUFFER_STATUS_OK) { 672 ALOGE("%s: Request %d: Buffer %d: Status not OK!", 673 __func__, frameNumber, idx); 674 return BAD_VALUE; 675 } 676 if (b->release_fence != -1) { 677 ALOGE("%s: Request %d: Buffer %d: Has a release fence!", 678 __func__, frameNumber, idx); 679 return BAD_VALUE; 680 } 681 if (b->buffer == NULL) { 682 ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!", 683 __func__, frameNumber, idx); 684 return BAD_VALUE; 685 } 686 } 687 688 // Validate all buffers 689 b = request->output_buffers; 690 do { 691 QCamera3Channel *channel = 692 static_cast<QCamera3Channel*>(b->stream->priv); 693 if (channel == NULL) { 694 ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!", 695 __func__, frameNumber, idx); 696 return BAD_VALUE; 697 } 698 if (b->status != CAMERA3_BUFFER_STATUS_OK) { 699 ALOGE("%s: Request %d: Buffer %d: Status not OK!", 700 __func__, frameNumber, idx); 701 return BAD_VALUE; 702 } 703 if (b->release_fence != -1) { 704 ALOGE("%s: Request %d: Buffer %d: Has a release fence!", 705 __func__, frameNumber, idx); 706 return BAD_VALUE; 707 } 708 if (b->buffer == NULL) { 709 ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!", 710 __func__, frameNumber, idx); 711 return BAD_VALUE; 712 } 713 idx++; 714 b = request->output_buffers + idx; 715 } while (idx < (ssize_t)request->num_output_buffers); 716 717 return NO_ERROR; 718} 719 720/*=========================================================================== 721 * FUNCTION : registerStreamBuffers 722 * 723 * DESCRIPTION: Register buffers for a given stream with the HAL device. 724 * 725 * PARAMETERS : 726 * @stream_list : streams to be configured 727 * 728 * RETURN : 729 * 730 *==========================================================================*/ 731int QCamera3HardwareInterface::registerStreamBuffers( 732 const camera3_stream_buffer_set_t *buffer_set) 733{ 734 int rc = 0; 735 736 pthread_mutex_lock(&mMutex); 737 738 if (buffer_set == NULL) { 739 ALOGE("%s: Invalid buffer_set parameter.", __func__); 740 pthread_mutex_unlock(&mMutex); 741 return -EINVAL; 742 } 743 if (buffer_set->stream == NULL) { 744 ALOGE("%s: Invalid stream parameter.", __func__); 745 pthread_mutex_unlock(&mMutex); 746 return -EINVAL; 747 } 748 if (buffer_set->num_buffers < 1) { 749 ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers); 750 pthread_mutex_unlock(&mMutex); 751 return -EINVAL; 752 } 753 if (buffer_set->buffers == NULL) { 754 ALOGE("%s: Invalid buffers parameter.", __func__); 755 pthread_mutex_unlock(&mMutex); 756 return -EINVAL; 757 } 758 759 camera3_stream_t *stream = buffer_set->stream; 760 QCamera3Channel *channel = (QCamera3Channel *)stream->priv; 761 762 //set the buffer_set in the mStreamInfo array 763 for (List<stream_info_t *>::iterator it = mStreamInfo.begin(); 764 it != mStreamInfo.end(); it++) { 765 if ((*it)->stream == stream) { 766 uint32_t numBuffers = buffer_set->num_buffers; 767 (*it)->buffer_set.stream = buffer_set->stream; 768 (*it)->buffer_set.num_buffers = numBuffers; 769 (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers]; 770 if ((*it)->buffer_set.buffers == NULL) { 771 ALOGE("%s: Failed to allocate buffer_handle_t*", __func__); 772 pthread_mutex_unlock(&mMutex); 773 return -ENOMEM; 774 } 775 for (size_t j = 0; j < numBuffers; j++){ 776 (*it)->buffer_set.buffers[j] = buffer_set->buffers[j]; 777 } 778 (*it)->registered = 1; 779 } 780 } 781 rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers); 782 if (rc < 0) { 783 ALOGE("%s: registerBUffers for stream %p failed", __func__, stream); 784 pthread_mutex_unlock(&mMutex); 785 return -ENODEV; 786 } 787 788 pthread_mutex_unlock(&mMutex); 789 return NO_ERROR; 790} 791 792/*=========================================================================== 793 * FUNCTION : processCaptureRequest 794 * 795 * DESCRIPTION: process a capture request from camera service 796 * 797 * PARAMETERS : 798 * @request : request from framework to process 799 * 800 * RETURN : 801 * 802 *==========================================================================*/ 803int QCamera3HardwareInterface::processCaptureRequest( 804 camera3_capture_request_t *request) 805{ 806 int rc = NO_ERROR; 807 int32_t request_id; 808 CameraMetadata meta; 809 810 pthread_mutex_lock(&mMutex); 811 812 rc = validateCaptureRequest(request); 813 if (rc != NO_ERROR) { 814 ALOGE("%s: incoming request is not valid", __func__); 815 pthread_mutex_unlock(&mMutex); 816 return rc; 817 } 818 819 uint32_t frameNumber = request->frame_number; 820 rc = setFrameParameters(request->frame_number, request->settings); 821 if (rc < 0) { 822 ALOGE("%s: fail to set frame parameters", __func__); 823 pthread_mutex_unlock(&mMutex); 824 return rc; 825 } 826 827 meta = request->settings; 828 if (meta.exists(ANDROID_REQUEST_ID)) { 829 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0]; 830 mCurrentRequestId = request_id; 831 ALOGV("%s: Received request with id: %d",__func__, request_id); 832 } else if (mFirstRequest || mCurrentRequestId == -1){ 833 ALOGE("%s: Unable to find request id field, \ 834 & no previous id available", __func__); 835 return NAME_NOT_FOUND; 836 } else { 837 ALOGV("%s: Re-using old request id", __func__); 838 request_id = mCurrentRequestId; 839 } 840 841 ALOGV("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d", 842 __func__, __LINE__, 843 request->num_output_buffers, 844 request->input_buffer, 845 frameNumber); 846 // Acquire all request buffers first 847 for (size_t i = 0; i < request->num_output_buffers; i++) { 848 const camera3_stream_buffer_t& output = request->output_buffers[i]; 849 sp<Fence> acquireFence = new Fence(output.acquire_fence); 850 851 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) { 852 //Call function to store local copy of jpeg data for encode params. 853 rc = getJpegSettings(request->settings); 854 if (rc < 0) { 855 ALOGE("%s: failed to get jpeg parameters", __func__); 856 pthread_mutex_unlock(&mMutex); 857 return rc; 858 } 859 } 860 861 rc = acquireFence->wait(Fence::TIMEOUT_NEVER); 862 if (rc != OK) { 863 ALOGE("%s: fence wait failed %d", __func__, rc); 864 pthread_mutex_unlock(&mMutex); 865 return rc; 866 } 867 } 868 869 /* Update pending request list and pending buffers map */ 870 PendingRequestInfo pendingRequest; 871 pendingRequest.frame_number = frameNumber; 872 pendingRequest.num_buffers = request->num_output_buffers; 873 pendingRequest.request_id = request_id; 874 875 for (size_t i = 0; i < request->num_output_buffers; i++) { 876 RequestedBufferInfo requestedBuf; 877 requestedBuf.stream = request->output_buffers[i].stream; 878 requestedBuf.buffer = NULL; 879 pendingRequest.buffers.push_back(requestedBuf); 880 881 mPendingBuffersMap.editValueFor(requestedBuf.stream)++; 882 } 883 mPendingRequestsList.push_back(pendingRequest); 884 885 // Notify metadata channel we receive a request 886 mMetadataChannel->request(NULL, frameNumber); 887 888 // Call request on other streams 889 for (size_t i = 0; i < request->num_output_buffers; i++) { 890 const camera3_stream_buffer_t& output = request->output_buffers[i]; 891 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv; 892 mm_camera_buf_def_t *pInputBuffer = NULL; 893 894 if (channel == NULL) { 895 ALOGE("%s: invalid channel pointer for stream", __func__); 896 continue; 897 } 898 899 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) { 900 QCamera3RegularChannel* inputChannel = NULL; 901 if(request->input_buffer != NULL){ 902 903 //Try to get the internal format 904 inputChannel = (QCamera3RegularChannel*) 905 request->input_buffer->stream->priv; 906 if(inputChannel == NULL ){ 907 ALOGE("%s: failed to get input channel handle", __func__); 908 } else { 909 pInputBuffer = 910 inputChannel->getInternalFormatBuffer( 911 request->input_buffer->buffer); 912 ALOGD("%s: Input buffer dump",__func__); 913 ALOGD("Stream id: %d", pInputBuffer->stream_id); 914 ALOGD("streamtype:%d", pInputBuffer->stream_type); 915 ALOGD("frame len:%d", pInputBuffer->frame_len); 916 } 917 } 918 rc = channel->request(output.buffer, frameNumber, mJpegSettings, 919 pInputBuffer,(QCamera3Channel*)inputChannel); 920 } else { 921 ALOGV("%s: %d, request with buffer %p, frame_number %d", __func__, 922 __LINE__, output.buffer, frameNumber); 923 rc = channel->request(output.buffer, frameNumber); 924 } 925 if (rc < 0) 926 ALOGE("%s: request failed", __func__); 927 } 928 929 mFirstRequest = false; 930 931 //Block on conditional variable 932 mPendingRequest = 1; 933 while (mPendingRequest == 1) { 934 pthread_cond_wait(&mRequestCond, &mMutex); 935 } 936 937 pthread_mutex_unlock(&mMutex); 938 return rc; 939} 940 941/*=========================================================================== 942 * FUNCTION : getMetadataVendorTagOps 943 * 944 * DESCRIPTION: 945 * 946 * PARAMETERS : 947 * 948 * 949 * RETURN : 950 *==========================================================================*/ 951void QCamera3HardwareInterface::getMetadataVendorTagOps( 952 vendor_tag_query_ops_t* /*ops*/) 953{ 954 /* Enable locks when we eventually add Vendor Tags */ 955 /* 956 pthread_mutex_lock(&mMutex); 957 958 pthread_mutex_unlock(&mMutex); 959 */ 960 return; 961} 962 963/*=========================================================================== 964 * FUNCTION : dump 965 * 966 * DESCRIPTION: 967 * 968 * PARAMETERS : 969 * 970 * 971 * RETURN : 972 *==========================================================================*/ 973void QCamera3HardwareInterface::dump(int /*fd*/) 974{ 975 /*Enable lock when we implement this function*/ 976 /* 977 pthread_mutex_lock(&mMutex); 978 979 pthread_mutex_unlock(&mMutex); 980 */ 981 return; 982} 983 984/*=========================================================================== 985 * FUNCTION : captureResultCb 986 * 987 * DESCRIPTION: Callback handler for all capture result 988 * (streams, as well as metadata) 989 * 990 * PARAMETERS : 991 * @metadata : metadata information 992 * @buffer : actual gralloc buffer to be returned to frameworks. 993 * NULL if metadata. 994 * 995 * RETURN : NONE 996 *==========================================================================*/ 997void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf, 998 camera3_stream_buffer_t *buffer, uint32_t frame_number) 999{ 1000 pthread_mutex_lock(&mMutex); 1001 1002 if (metadata_buf) { 1003 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer; 1004 int32_t frame_number_valid = *(int32_t *) 1005 POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata); 1006 uint32_t pending_requests = *(uint32_t *)POINTER_OF( 1007 CAM_INTF_META_PENDING_REQUESTS, metadata); 1008 uint32_t frame_number = *(uint32_t *) 1009 POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata); 1010 const struct timeval *tv = (const struct timeval *) 1011 POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata); 1012 nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC + 1013 tv->tv_usec * NSEC_PER_USEC; 1014 1015 if (!frame_number_valid) { 1016 ALOGV("%s: Not a valid frame number, used as SOF only", __func__); 1017 mMetadataChannel->bufDone(metadata_buf); 1018 goto done_metadata; 1019 } 1020 ALOGV("%s: valid frame_number = %d, capture_time = %lld", __func__, 1021 frame_number, capture_time); 1022 1023 // Go through the pending requests info and send shutter/results to frameworks 1024 for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin(); 1025 i != mPendingRequestsList.end() && i->frame_number <= frame_number;) { 1026 camera3_capture_result_t result; 1027 camera3_notify_msg_t notify_msg; 1028 ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number); 1029 1030 // Flush out all entries with less or equal frame numbers. 1031 1032 //TODO: Make sure shutter timestamp really reflects shutter timestamp. 1033 //Right now it's the same as metadata timestamp 1034 1035 //TODO: When there is metadata drop, how do we derive the timestamp of 1036 //dropped frames? For now, we fake the dropped timestamp by substracting 1037 //from the reported timestamp 1038 nsecs_t current_capture_time = capture_time - 1039 (frame_number - i->frame_number) * NSEC_PER_33MSEC; 1040 1041 // Send shutter notify to frameworks 1042 notify_msg.type = CAMERA3_MSG_SHUTTER; 1043 notify_msg.message.shutter.frame_number = i->frame_number; 1044 notify_msg.message.shutter.timestamp = current_capture_time; 1045 mCallbackOps->notify(mCallbackOps, ¬ify_msg); 1046 ALOGV("%s: notify frame_number = %d, capture_time = %lld", __func__, 1047 i->frame_number, capture_time); 1048 1049 // Send empty metadata with already filled buffers for dropped metadata 1050 // and send valid metadata with already filled buffers for current metadata 1051 if (i->frame_number < frame_number) { 1052 CameraMetadata dummyMetadata; 1053 dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP, 1054 ¤t_capture_time, 1); 1055 dummyMetadata.update(ANDROID_REQUEST_ID, 1056 &(i->request_id), 1); 1057 result.result = dummyMetadata.release(); 1058 } else { 1059 result.result = translateCbMetadataToResultMetadata(metadata, 1060 current_capture_time, i->request_id); 1061 // Return metadata buffer 1062 mMetadataChannel->bufDone(metadata_buf); 1063 } 1064 if (!result.result) { 1065 ALOGE("%s: metadata is NULL", __func__); 1066 } 1067 result.frame_number = i->frame_number; 1068 result.num_output_buffers = 0; 1069 result.output_buffers = NULL; 1070 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin(); 1071 j != i->buffers.end(); j++) { 1072 if (j->buffer) { 1073 result.num_output_buffers++; 1074 } 1075 } 1076 1077 if (result.num_output_buffers > 0) { 1078 camera3_stream_buffer_t *result_buffers = 1079 new camera3_stream_buffer_t[result.num_output_buffers]; 1080 if (!result_buffers) { 1081 ALOGE("%s: Fatal error: out of memory", __func__); 1082 } 1083 size_t result_buffers_idx = 0; 1084 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin(); 1085 j != i->buffers.end(); j++) { 1086 if (j->buffer) { 1087 result_buffers[result_buffers_idx++] = *(j->buffer); 1088 free(j->buffer); 1089 j->buffer = NULL; 1090 mPendingBuffersMap.editValueFor(j->stream)--; 1091 } 1092 } 1093 result.output_buffers = result_buffers; 1094 1095 mCallbackOps->process_capture_result(mCallbackOps, &result); 1096 ALOGV("%s: meta frame_number = %d, capture_time = %lld", 1097 __func__, result.frame_number, current_capture_time); 1098 free_camera_metadata((camera_metadata_t *)result.result); 1099 delete[] result_buffers; 1100 } else { 1101 mCallbackOps->process_capture_result(mCallbackOps, &result); 1102 ALOGV("%s: meta frame_number = %d, capture_time = %lld", 1103 __func__, result.frame_number, current_capture_time); 1104 free_camera_metadata((camera_metadata_t *)result.result); 1105 } 1106 // erase the element from the list 1107 i = mPendingRequestsList.erase(i); 1108 } 1109 1110 1111done_metadata: 1112 bool max_buffers_dequeued = false; 1113 for (size_t i = 0; i < mPendingBuffersMap.size(); i++) { 1114 const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i); 1115 uint32_t queued_buffers = mPendingBuffersMap.valueAt(i); 1116 if (queued_buffers == stream->max_buffers) { 1117 max_buffers_dequeued = true; 1118 break; 1119 } 1120 } 1121 if (!max_buffers_dequeued && !pending_requests) { 1122 // Unblock process_capture_request 1123 mPendingRequest = 0; 1124 pthread_cond_signal(&mRequestCond); 1125 } 1126 } else { 1127 // If the frame number doesn't exist in the pending request list, 1128 // directly send the buffer to the frameworks, and update pending buffers map 1129 // Otherwise, book-keep the buffer. 1130 List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin(); 1131 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){ 1132 i++; 1133 } 1134 if (i == mPendingRequestsList.end()) { 1135 // Verify all pending requests frame_numbers are greater 1136 for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin(); 1137 j != mPendingRequestsList.end(); j++) { 1138 if (j->frame_number < frame_number) { 1139 ALOGE("%s: Error: pending frame number %d is smaller than %d", 1140 __func__, j->frame_number, frame_number); 1141 } 1142 } 1143 camera3_capture_result_t result; 1144 result.result = NULL; 1145 result.frame_number = frame_number; 1146 result.num_output_buffers = 1; 1147 result.output_buffers = buffer; 1148 ALOGV("%s: result frame_number = %d, buffer = %p", 1149 __func__, frame_number, buffer); 1150 mPendingBuffersMap.editValueFor(buffer->stream)--; 1151 mCallbackOps->process_capture_result(mCallbackOps, &result); 1152 } else { 1153 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin(); 1154 j != i->buffers.end(); j++) { 1155 if (j->stream == buffer->stream) { 1156 if (j->buffer != NULL) { 1157 ALOGE("%s: Error: buffer is already set", __func__); 1158 } else { 1159 j->buffer = (camera3_stream_buffer_t *)malloc( 1160 sizeof(camera3_stream_buffer_t)); 1161 *(j->buffer) = *buffer; 1162 ALOGV("%s: cache buffer %p at result frame_number %d", 1163 __func__, buffer, frame_number); 1164 } 1165 } 1166 } 1167 } 1168 } 1169 1170 pthread_mutex_unlock(&mMutex); 1171 return; 1172} 1173 1174/*=========================================================================== 1175 * FUNCTION : translateCbMetadataToResultMetadata 1176 * 1177 * DESCRIPTION: 1178 * 1179 * PARAMETERS : 1180 * @metadata : metadata information from callback 1181 * 1182 * RETURN : camera_metadata_t* 1183 * metadata in a format specified by fwk 1184 *==========================================================================*/ 1185camera_metadata_t* 1186QCamera3HardwareInterface::translateCbMetadataToResultMetadata 1187 (metadata_buffer_t *metadata, nsecs_t timestamp, 1188 int32_t request_id) 1189{ 1190 CameraMetadata camMetadata; 1191 camera_metadata_t* resultMetadata; 1192 1193 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, ×tamp, 1); 1194 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1); 1195 1196 /*CAM_INTF_META_HISTOGRAM - TODO*/ 1197 /*cam_hist_stats_t *histogram = 1198 (cam_hist_stats_t *)POINTER_OF(CAM_INTF_META_HISTOGRAM, 1199 metadata);*/ 1200 1201 /*face detection*/ 1202 cam_face_detection_data_t *faceDetectionInfo =(cam_face_detection_data_t *) 1203 POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata); 1204 uint8_t numFaces = faceDetectionInfo->num_faces_detected; 1205 int32_t faceIds[numFaces]; 1206 uint8_t faceScores[numFaces]; 1207 int32_t faceRectangles[numFaces * 4]; 1208 int32_t faceLandmarks[numFaces * 6]; 1209 int j = 0, k = 0; 1210 for (int i = 0; i < numFaces; i++) { 1211 faceIds[i] = faceDetectionInfo->faces[i].face_id; 1212 faceScores[i] = faceDetectionInfo->faces[i].score; 1213 convertToRegions(faceDetectionInfo->faces[i].face_boundary, 1214 faceRectangles+j, -1); 1215 convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k); 1216 j+= 4; 1217 k+= 6; 1218 } 1219 if (numFaces > 0) { 1220 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces); 1221 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces); 1222 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES, 1223 faceRectangles, numFaces*4); 1224 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS, 1225 faceLandmarks, numFaces*6); 1226 } 1227 1228 uint8_t *color_correct_mode = 1229 (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata); 1230 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1); 1231 1232 int32_t *ae_precapture_id = 1233 (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata); 1234 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1); 1235 1236 /*aec regions*/ 1237 cam_area_t *hAeRegions = 1238 (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata); 1239 int32_t aeRegions[5]; 1240 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight); 1241 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5); 1242 if(mIsZslMode) { 1243 uint8_t ae_state = ANDROID_CONTROL_AE_STATE_CONVERGED; 1244 camMetadata.update(ANDROID_CONTROL_AE_STATE, &ae_state, 1); 1245 } else { 1246 uint8_t *ae_state = 1247 (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata); 1248 camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1); 1249 } 1250 uint8_t *focusMode = 1251 (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata); 1252 camMetadata.update(ANDROID_CONTROL_AF_MODE, focusMode, 1); 1253 1254 /*af regions*/ 1255 cam_area_t *hAfRegions = 1256 (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata); 1257 int32_t afRegions[5]; 1258 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight); 1259 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5); 1260 1261 uint8_t *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata); 1262 camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1); 1263 1264 int32_t *afTriggerId = 1265 (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata); 1266 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1); 1267 1268 uint8_t *whiteBalance = 1269 (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata); 1270 camMetadata.update(ANDROID_CONTROL_AWB_MODE, whiteBalance, 1); 1271 1272 /*awb regions*/ 1273 cam_area_t *hAwbRegions = 1274 (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata); 1275 int32_t awbRegions[5]; 1276 convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight); 1277 camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5); 1278 1279 uint8_t *whiteBalanceState = 1280 (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata); 1281 camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1); 1282 1283 uint8_t *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata); 1284 camMetadata.update(ANDROID_CONTROL_MODE, mode, 1); 1285 1286 uint8_t *edgeMode = (uint8_t *)POINTER_OF(CAM_INTF_META_EDGE, metadata); 1287 camMetadata.update(ANDROID_EDGE_MODE, edgeMode, 1); 1288 1289 uint8_t *flashPower = 1290 (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata); 1291 camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1); 1292 1293 int64_t *flashFiringTime = 1294 (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata); 1295 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1); 1296 1297 /*int32_t *ledMode = 1298 (int32_t *)POINTER_OF(CAM_INTF_PARM_LED_MODE, metadata); 1299 camMetadata.update(ANDROID_FLASH_FIRING_TIME, ledMode, 1);*/ 1300 1301 uint8_t *flashState = 1302 (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata); 1303 camMetadata.update(ANDROID_FLASH_STATE, flashState, 1); 1304 1305 uint8_t *hotPixelMode = 1306 (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata); 1307 camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1); 1308 1309 float *lensAperture = 1310 (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata); 1311 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1); 1312 1313 float *filterDensity = 1314 (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata); 1315 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1); 1316 1317 float *focalLength = 1318 (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata); 1319 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1); 1320 1321 float *focusDistance = 1322 (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata); 1323 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1); 1324 1325 float *focusRange = 1326 (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata); 1327 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 1); 1328 1329 uint8_t *opticalStab = 1330 (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata); 1331 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1); 1332 1333 /*int32_t *focusState = 1334 (int32_t *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_STATE, metadata); 1335 camMetadata.update(ANDROID_LENS_STATE , focusState, 1); //check */ 1336 1337 uint8_t *noiseRedMode = 1338 (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata); 1339 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1); 1340 1341 /*CAM_INTF_META_SCALER_CROP_REGION - check size*/ 1342 1343 cam_crop_region_t *hScalerCropRegion =(cam_crop_region_t *) 1344 POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata); 1345 int32_t scalerCropRegion[4]; 1346 scalerCropRegion[0] = hScalerCropRegion->left; 1347 scalerCropRegion[1] = hScalerCropRegion->top; 1348 scalerCropRegion[2] = hScalerCropRegion->width; 1349 scalerCropRegion[3] = hScalerCropRegion->height; 1350 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4); 1351 1352 int64_t *sensorExpTime = 1353 (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata); 1354 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1); 1355 1356 int64_t *sensorFameDuration = 1357 (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata); 1358 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1); 1359 1360 int32_t *sensorSensitivity = 1361 (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata); 1362 mMetadataResponse.iso_speed = *sensorSensitivity; 1363 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1); 1364 1365 uint8_t *shadingMode = 1366 (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata); 1367 camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1); 1368 1369 uint8_t *faceDetectMode = 1370 (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata); 1371 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, faceDetectMode, 1); 1372 1373 uint8_t *histogramMode = 1374 (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata); 1375 camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1); 1376 1377 uint8_t *sharpnessMapMode = 1378 (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata); 1379 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, 1380 sharpnessMapMode, 1); 1381 1382 /*CAM_INTF_META_STATS_SHARPNESS_MAP - check size*/ 1383 cam_sharpness_map_t *sharpnessMap = (cam_sharpness_map_t *) 1384 POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata); 1385 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, 1386 (int32_t*)sharpnessMap->sharpness, 1387 CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT); 1388 1389 resultMetadata = camMetadata.release(); 1390 return resultMetadata; 1391} 1392 1393/*=========================================================================== 1394 * FUNCTION : convertToRegions 1395 * 1396 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array 1397 * 1398 * PARAMETERS : 1399 * @rect : cam_rect_t struct to convert 1400 * @region : int32_t destination array 1401 * @weight : if we are converting from cam_area_t, weight is valid 1402 * else weight = -1 1403 * 1404 *==========================================================================*/ 1405void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){ 1406 region[0] = rect.left; 1407 region[1] = rect.top; 1408 region[2] = rect.left + rect.width; 1409 region[3] = rect.top + rect.height; 1410 if (weight > -1) { 1411 region[4] = weight; 1412 } 1413} 1414 1415/*=========================================================================== 1416 * FUNCTION : convertFromRegions 1417 * 1418 * DESCRIPTION: helper method to convert from array to cam_rect_t 1419 * 1420 * PARAMETERS : 1421 * @rect : cam_rect_t struct to convert 1422 * @region : int32_t destination array 1423 * @weight : if we are converting from cam_area_t, weight is valid 1424 * else weight = -1 1425 * 1426 *==========================================================================*/ 1427void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi, 1428 const camera_metadata_t *settings, 1429 uint32_t tag){ 1430 CameraMetadata frame_settings; 1431 frame_settings = settings; 1432 int32_t x_min = frame_settings.find(tag).data.i32[0]; 1433 int32_t y_min = frame_settings.find(tag).data.i32[1]; 1434 int32_t x_max = frame_settings.find(tag).data.i32[2]; 1435 int32_t y_max = frame_settings.find(tag).data.i32[3]; 1436 roi->weight = frame_settings.find(tag).data.i32[4]; 1437 roi->rect.left = x_min; 1438 roi->rect.top = y_min; 1439 roi->rect.width = x_max - x_min; 1440 roi->rect.height = y_max - y_min; 1441} 1442 1443/*=========================================================================== 1444 * FUNCTION : resetIfNeededROI 1445 * 1446 * DESCRIPTION: helper method to reset the roi if it is greater than scaler 1447 * crop region 1448 * 1449 * PARAMETERS : 1450 * @roi : cam_area_t struct to resize 1451 * @scalerCropRegion : cam_crop_region_t region to compare against 1452 * 1453 * 1454 *==========================================================================*/ 1455bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi, 1456 const cam_crop_region_t* scalerCropRegion) 1457{ 1458 int32_t roi_x_max = roi->rect.width + roi->rect.left; 1459 int32_t roi_y_max = roi->rect.height + roi->rect.top; 1460 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->top; 1461 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->left; 1462 if ((roi_x_max < scalerCropRegion->left) || 1463 (roi_y_max < scalerCropRegion->top) || 1464 (roi->rect.left > crop_x_max) || 1465 (roi->rect.top > crop_y_max)){ 1466 return false; 1467 } 1468 if (roi->rect.left < scalerCropRegion->left) { 1469 roi->rect.left = scalerCropRegion->left; 1470 } 1471 if (roi->rect.top < scalerCropRegion->top) { 1472 roi->rect.top = scalerCropRegion->top; 1473 } 1474 if (roi_x_max > crop_x_max) { 1475 roi_x_max = crop_x_max; 1476 } 1477 if (roi_y_max > crop_y_max) { 1478 roi_y_max = crop_y_max; 1479 } 1480 roi->rect.width = roi_x_max - roi->rect.left; 1481 roi->rect.height = roi_y_max - roi->rect.top; 1482 return true; 1483} 1484 1485/*=========================================================================== 1486 * FUNCTION : convertLandmarks 1487 * 1488 * DESCRIPTION: helper method to extract the landmarks from face detection info 1489 * 1490 * PARAMETERS : 1491 * @face : cam_rect_t struct to convert 1492 * @landmarks : int32_t destination array 1493 * 1494 * 1495 *==========================================================================*/ 1496void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks) 1497{ 1498 landmarks[0] = face.left_eye_center.x; 1499 landmarks[1] = face.left_eye_center.y; 1500 landmarks[2] = face.right_eye_center.y; 1501 landmarks[3] = face.right_eye_center.y; 1502 landmarks[4] = face.mouth_center.x; 1503 landmarks[5] = face.mouth_center.y; 1504} 1505 1506#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX ) 1507/*=========================================================================== 1508 * FUNCTION : initCapabilities 1509 * 1510 * DESCRIPTION: initialize camera capabilities in static data struct 1511 * 1512 * PARAMETERS : 1513 * @cameraId : camera Id 1514 * 1515 * RETURN : int32_t type of status 1516 * NO_ERROR -- success 1517 * none-zero failure code 1518 *==========================================================================*/ 1519int QCamera3HardwareInterface::initCapabilities(int cameraId) 1520{ 1521 int rc = 0; 1522 mm_camera_vtbl_t *cameraHandle = NULL; 1523 QCamera3HeapMemory *capabilityHeap = NULL; 1524 1525 cameraHandle = camera_open(cameraId); 1526 if (!cameraHandle) { 1527 ALOGE("%s: camera_open failed", __func__); 1528 rc = -1; 1529 goto open_failed; 1530 } 1531 1532 capabilityHeap = new QCamera3HeapMemory(); 1533 if (capabilityHeap == NULL) { 1534 ALOGE("%s: creation of capabilityHeap failed", __func__); 1535 goto heap_creation_failed; 1536 } 1537 /* Allocate memory for capability buffer */ 1538 rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false); 1539 if(rc != OK) { 1540 ALOGE("%s: No memory for cappability", __func__); 1541 goto allocate_failed; 1542 } 1543 1544 /* Map memory for capability buffer */ 1545 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t)); 1546 rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle, 1547 CAM_MAPPING_BUF_TYPE_CAPABILITY, 1548 capabilityHeap->getFd(0), 1549 sizeof(cam_capability_t)); 1550 if(rc < 0) { 1551 ALOGE("%s: failed to map capability buffer", __func__); 1552 goto map_failed; 1553 } 1554 1555 /* Query Capability */ 1556 rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle); 1557 if(rc < 0) { 1558 ALOGE("%s: failed to query capability",__func__); 1559 goto query_failed; 1560 } 1561 gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t)); 1562 if (!gCamCapability[cameraId]) { 1563 ALOGE("%s: out of memory", __func__); 1564 goto query_failed; 1565 } 1566 memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0), 1567 sizeof(cam_capability_t)); 1568 rc = 0; 1569 1570query_failed: 1571 cameraHandle->ops->unmap_buf(cameraHandle->camera_handle, 1572 CAM_MAPPING_BUF_TYPE_CAPABILITY); 1573map_failed: 1574 capabilityHeap->deallocate(); 1575allocate_failed: 1576 delete capabilityHeap; 1577heap_creation_failed: 1578 cameraHandle->ops->close_camera(cameraHandle->camera_handle); 1579 cameraHandle = NULL; 1580open_failed: 1581 return rc; 1582} 1583 1584/*=========================================================================== 1585 * FUNCTION : initParameters 1586 * 1587 * DESCRIPTION: initialize camera parameters 1588 * 1589 * PARAMETERS : 1590 * 1591 * RETURN : int32_t type of status 1592 * NO_ERROR -- success 1593 * none-zero failure code 1594 *==========================================================================*/ 1595int QCamera3HardwareInterface::initParameters() 1596{ 1597 int rc = 0; 1598 1599 //Allocate Set Param Buffer 1600 mParamHeap = new QCamera3HeapMemory(); 1601 rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false); 1602 if(rc != OK) { 1603 rc = NO_MEMORY; 1604 ALOGE("Failed to allocate SETPARM Heap memory"); 1605 delete mParamHeap; 1606 mParamHeap = NULL; 1607 return rc; 1608 } 1609 1610 //Map memory for parameters buffer 1611 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle, 1612 CAM_MAPPING_BUF_TYPE_PARM_BUF, 1613 mParamHeap->getFd(0), 1614 sizeof(parm_buffer_t)); 1615 if(rc < 0) { 1616 ALOGE("%s:failed to map SETPARM buffer",__func__); 1617 rc = FAILED_TRANSACTION; 1618 mParamHeap->deallocate(); 1619 delete mParamHeap; 1620 mParamHeap = NULL; 1621 return rc; 1622 } 1623 1624 mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0); 1625 return rc; 1626} 1627 1628/*=========================================================================== 1629 * FUNCTION : deinitParameters 1630 * 1631 * DESCRIPTION: de-initialize camera parameters 1632 * 1633 * PARAMETERS : 1634 * 1635 * RETURN : NONE 1636 *==========================================================================*/ 1637void QCamera3HardwareInterface::deinitParameters() 1638{ 1639 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle, 1640 CAM_MAPPING_BUF_TYPE_PARM_BUF); 1641 1642 mParamHeap->deallocate(); 1643 delete mParamHeap; 1644 mParamHeap = NULL; 1645 1646 mParameters = NULL; 1647} 1648 1649/*=========================================================================== 1650 * FUNCTION : calcMaxJpegSize 1651 * 1652 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId 1653 * 1654 * PARAMETERS : 1655 * 1656 * RETURN : max_jpeg_size 1657 *==========================================================================*/ 1658int QCamera3HardwareInterface::calcMaxJpegSize() 1659{ 1660 int32_t max_jpeg_size = 0; 1661 int temp_width, temp_height; 1662 for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) { 1663 temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width; 1664 temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height; 1665 if (temp_width * temp_height > max_jpeg_size ) { 1666 max_jpeg_size = temp_width * temp_height; 1667 } 1668 } 1669 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t); 1670 return max_jpeg_size; 1671} 1672 1673/*=========================================================================== 1674 * FUNCTION : initStaticMetadata 1675 * 1676 * DESCRIPTION: initialize the static metadata 1677 * 1678 * PARAMETERS : 1679 * @cameraId : camera Id 1680 * 1681 * RETURN : int32_t type of status 1682 * 0 -- success 1683 * non-zero failure code 1684 *==========================================================================*/ 1685int QCamera3HardwareInterface::initStaticMetadata(int cameraId) 1686{ 1687 int rc = 0; 1688 CameraMetadata staticInfo; 1689 1690 /* android.info: hardware level */ 1691 uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL; 1692 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL, 1693 &supportedHardwareLevel, 1); 1694 1695 int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK; 1696 /*HAL 3 only*/ 1697 /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE, 1698 &gCamCapability[cameraId]->min_focus_distance, 1); */ 1699 1700 /*hard coded for now but this should come from sensor*/ 1701 float min_focus_distance; 1702 if(facingBack){ 1703 min_focus_distance = 10; 1704 } else { 1705 min_focus_distance = 0; 1706 } 1707 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE, 1708 &min_focus_distance, 1); 1709 1710 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, 1711 &gCamCapability[cameraId]->hyper_focal_distance, 1); 1712 1713 /*should be using focal lengths but sensor doesn't provide that info now*/ 1714 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, 1715 &gCamCapability[cameraId]->focal_length, 1716 1); 1717 1718 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES, 1719 gCamCapability[cameraId]->apertures, 1720 gCamCapability[cameraId]->apertures_count); 1721 1722 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES, 1723 gCamCapability[cameraId]->filter_densities, 1724 gCamCapability[cameraId]->filter_densities_count); 1725 1726 1727 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION, 1728 (uint8_t*)gCamCapability[cameraId]->optical_stab_modes, 1729 gCamCapability[cameraId]->optical_stab_modes_count); 1730 1731 staticInfo.update(ANDROID_LENS_POSITION, 1732 gCamCapability[cameraId]->lens_position, 1733 sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float)); 1734 1735 int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width, 1736 gCamCapability[cameraId]->lens_shading_map_size.height}; 1737 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE, 1738 lens_shading_map_size, 1739 sizeof(lens_shading_map_size)/sizeof(int32_t)); 1740 1741 int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width, 1742 gCamCapability[cameraId]->geo_correction_map_size.height}; 1743 staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE, 1744 geo_correction_map_size, 1745 sizeof(geo_correction_map_size)/sizeof(int32_t)); 1746 1747 staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP, 1748 gCamCapability[cameraId]->geo_correction_map, 1749 sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float)); 1750 1751 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE, 1752 gCamCapability[cameraId]->sensor_physical_size, 2); 1753 1754 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, 1755 gCamCapability[cameraId]->exposure_time_range, 2); 1756 1757 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION, 1758 &gCamCapability[cameraId]->max_frame_duration, 1); 1759 1760 1761 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT, 1762 (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1); 1763 1764 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width, 1765 gCamCapability[cameraId]->pixel_array_size.height}; 1766 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, 1767 pixel_array_size, 2); 1768 1769 int32_t active_array_size[] = {0, 0, 1770 gCamCapability[cameraId]->active_array_size.width, 1771 gCamCapability[cameraId]->active_array_size.height}; 1772 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, 1773 active_array_size, 4); 1774 1775 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL, 1776 &gCamCapability[cameraId]->white_level, 1); 1777 1778 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN, 1779 gCamCapability[cameraId]->black_level_pattern, 4); 1780 1781 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION, 1782 &gCamCapability[cameraId]->flash_charge_duration, 1); 1783 1784 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS, 1785 &gCamCapability[cameraId]->max_tone_map_curve_points, 1); 1786 1787 /*staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, 1788 (int*)&gCamCapability[cameraId]->max_face_detection_count, 1);*/ 1789 /*hardcode 0 for now*/ 1790 int32_t max_face_count = 0; 1791 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, 1792 &max_face_count, 1); 1793 1794 staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT, 1795 &gCamCapability[cameraId]->histogram_size, 1); 1796 1797 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT, 1798 &gCamCapability[cameraId]->max_histogram_count, 1); 1799 1800 int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width, 1801 gCamCapability[cameraId]->sharpness_map_size.height}; 1802 1803 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, 1804 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t)); 1805 1806 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE, 1807 &gCamCapability[cameraId]->max_sharpness_map_value, 1); 1808 1809 1810 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS, 1811 &gCamCapability[cameraId]->raw_min_duration, 1812 1); 1813 1814 int32_t scalar_formats[] = {HAL_PIXEL_FORMAT_YCbCr_420_888, 1815 HAL_PIXEL_FORMAT_BLOB}; 1816 int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t); 1817 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, 1818 scalar_formats, 1819 scalar_formats_count); 1820 1821 int32_t available_processed_sizes[CAM_FORMAT_MAX * 2]; 1822 makeTable(gCamCapability[cameraId]->supported_sizes_tbl, 1823 gCamCapability[cameraId]->supported_sizes_tbl_cnt, 1824 available_processed_sizes); 1825 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES, 1826 available_processed_sizes, 1827 (gCamCapability[cameraId]->supported_sizes_tbl_cnt) * 2); 1828 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS, 1829 &gCamCapability[cameraId]->min_duration[0], 1830 gCamCapability[cameraId]->supported_sizes_tbl_cnt); 1831 1832 int32_t available_fps_ranges[MAX_SIZES_CNT * 2]; 1833 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl, 1834 gCamCapability[cameraId]->fps_ranges_tbl_cnt, 1835 available_fps_ranges); 1836 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, 1837 available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) ); 1838 1839 camera_metadata_rational exposureCompensationStep = { 1840 gCamCapability[cameraId]->exp_compensation_step.numerator, 1841 gCamCapability[cameraId]->exp_compensation_step.denominator}; 1842 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP, 1843 &exposureCompensationStep, 1); 1844 1845 /*TO DO*/ 1846 uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF}; 1847 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, 1848 availableVstabModes, sizeof(availableVstabModes)); 1849 1850 /*HAL 1 and HAL 3 common*/ 1851 float maxZoom = 4; 1852 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, 1853 &maxZoom, 1); 1854 1855 int32_t max3aRegions = 1; 1856 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS, 1857 &max3aRegions, 1); 1858 1859 uint8_t availableFaceDetectModes[] = { 1860 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF }; 1861 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES, 1862 availableFaceDetectModes, 1863 sizeof(availableFaceDetectModes)); 1864 1865 int32_t raw_size[] = {gCamCapability[cameraId]->raw_dim.width, 1866 gCamCapability[cameraId]->raw_dim.height}; 1867 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES, 1868 raw_size, 1869 sizeof(raw_size)/sizeof(uint32_t)); 1870 1871 int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min, 1872 gCamCapability[cameraId]->exposure_compensation_max}; 1873 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE, 1874 exposureCompensationRange, 1875 sizeof(exposureCompensationRange)/sizeof(int32_t)); 1876 1877 uint8_t lensFacing = (facingBack) ? 1878 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT; 1879 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1); 1880 1881 int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2]; 1882 makeTable(gCamCapability[cameraId]->picture_sizes_tbl, 1883 gCamCapability[cameraId]->picture_sizes_tbl_cnt, 1884 available_jpeg_sizes); 1885 staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES, 1886 available_jpeg_sizes, 1887 (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2)); 1888 1889 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, 1890 available_thumbnail_sizes, 1891 sizeof(available_thumbnail_sizes)/sizeof(int32_t)); 1892 1893 int32_t max_jpeg_size = 0; 1894 int temp_width, temp_height; 1895 for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) { 1896 temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width; 1897 temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height; 1898 if (temp_width * temp_height > max_jpeg_size ) { 1899 max_jpeg_size = temp_width * temp_height; 1900 } 1901 } 1902 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t); 1903 staticInfo.update(ANDROID_JPEG_MAX_SIZE, 1904 &max_jpeg_size, 1); 1905 1906 uint8_t avail_effects[CAM_EFFECT_MODE_MAX]; 1907 int32_t size = 0; 1908 for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) { 1909 int val = lookupFwkName(EFFECT_MODES_MAP, 1910 sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]), 1911 gCamCapability[cameraId]->supported_effects[i]); 1912 if (val != NAME_NOT_FOUND) { 1913 avail_effects[size] = (uint8_t)val; 1914 size++; 1915 } 1916 } 1917 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS, 1918 avail_effects, 1919 size); 1920 1921 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX]; 1922 uint8_t supported_indexes[CAM_SCENE_MODE_MAX]; 1923 int32_t supported_scene_modes_cnt = 0; 1924 for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) { 1925 int val = lookupFwkName(SCENE_MODES_MAP, 1926 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]), 1927 gCamCapability[cameraId]->supported_scene_modes[i]); 1928 if (val != NAME_NOT_FOUND) { 1929 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val; 1930 supported_indexes[supported_scene_modes_cnt] = i; 1931 supported_scene_modes_cnt++; 1932 } 1933 } 1934 1935 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES, 1936 avail_scene_modes, 1937 supported_scene_modes_cnt); 1938 1939 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3]; 1940 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides, 1941 supported_scene_modes_cnt, 1942 scene_mode_overrides, 1943 supported_indexes, 1944 cameraId); 1945 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES, 1946 scene_mode_overrides, 1947 supported_scene_modes_cnt*3); 1948 1949 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX]; 1950 size = 0; 1951 for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) { 1952 int val = lookupFwkName(ANTIBANDING_MODES_MAP, 1953 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]), 1954 gCamCapability[cameraId]->supported_antibandings[i]); 1955 if (val != NAME_NOT_FOUND) { 1956 avail_antibanding_modes[size] = (uint8_t)val; 1957 size++; 1958 } 1959 1960 } 1961 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, 1962 avail_antibanding_modes, 1963 size); 1964 1965 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX]; 1966 size = 0; 1967 for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) { 1968 int val = lookupFwkName(FOCUS_MODES_MAP, 1969 sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]), 1970 gCamCapability[cameraId]->supported_focus_modes[i]); 1971 if (val != NAME_NOT_FOUND) { 1972 avail_af_modes[size] = (uint8_t)val; 1973 size++; 1974 } 1975 } 1976 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES, 1977 avail_af_modes, 1978 size); 1979 1980 uint8_t avail_awb_modes[CAM_WB_MODE_MAX]; 1981 size = 0; 1982 for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) { 1983 int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP, 1984 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]), 1985 gCamCapability[cameraId]->supported_white_balances[i]); 1986 if (val != NAME_NOT_FOUND) { 1987 avail_awb_modes[size] = (uint8_t)val; 1988 size++; 1989 } 1990 } 1991 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES, 1992 avail_awb_modes, 1993 size); 1994 1995 uint8_t avail_flash_modes[CAM_FLASH_MODE_MAX]; 1996 size = 0; 1997 for (int i = 0; i < gCamCapability[cameraId]->supported_flash_modes_cnt; i++) { 1998 int val = lookupFwkName(FLASH_MODES_MAP, 1999 sizeof(FLASH_MODES_MAP)/sizeof(FLASH_MODES_MAP[0]), 2000 gCamCapability[cameraId]->supported_flash_modes[i]); 2001 if (val != NAME_NOT_FOUND) { 2002 avail_flash_modes[size] = (uint8_t)val; 2003 size++; 2004 } 2005 } 2006 static uint8_t flashAvailable = 0; 2007 if (size > 1) { 2008 //flash is supported 2009 flashAvailable = 1; 2010 } 2011 staticInfo.update(ANDROID_FLASH_MODE, 2012 avail_flash_modes, 2013 size); 2014 2015 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE, 2016 &flashAvailable, 1); 2017 2018 uint8_t avail_ae_modes[5]; 2019 size = 0; 2020 for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) { 2021 avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i]; 2022 size++; 2023 } 2024 if (flashAvailable) { 2025 avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH; 2026 avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH; 2027 avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE; 2028 } 2029 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES, 2030 avail_ae_modes, 2031 size); 2032 2033 int32_t sensitivity_range[2]; 2034 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity; 2035 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity; 2036 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, 2037 sensitivity_range, 2038 sizeof(sensitivity_range) / sizeof(int32_t)); 2039 2040 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY, 2041 &gCamCapability[cameraId]->max_analog_sensitivity, 2042 sizeof(int32_t) ); 2043 staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS, 2044 &gCamCapability[cameraId]->jpeg_min_duration[0], 2045 gCamCapability[cameraId]->picture_sizes_tbl_cnt); 2046 2047 gStaticMetadata[cameraId] = staticInfo.release(); 2048 return rc; 2049} 2050 2051/*=========================================================================== 2052 * FUNCTION : makeTable 2053 * 2054 * DESCRIPTION: make a table of sizes 2055 * 2056 * PARAMETERS : 2057 * 2058 * 2059 *==========================================================================*/ 2060void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size, 2061 int32_t* sizeTable) 2062{ 2063 int j = 0; 2064 for (int i = 0; i < size; i++) { 2065 sizeTable[j] = dimTable[i].width; 2066 sizeTable[j+1] = dimTable[i].height; 2067 j+=2; 2068 } 2069} 2070 2071/*=========================================================================== 2072 * FUNCTION : makeFPSTable 2073 * 2074 * DESCRIPTION: make a table of fps ranges 2075 * 2076 * PARAMETERS : 2077 * 2078 *==========================================================================*/ 2079void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size, 2080 int32_t* fpsRangesTable) 2081{ 2082 int j = 0; 2083 for (int i = 0; i < size; i++) { 2084 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps; 2085 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps; 2086 j+=2; 2087 } 2088} 2089 2090/*=========================================================================== 2091 * FUNCTION : makeOverridesList 2092 * 2093 * DESCRIPTION: make a list of scene mode overrides 2094 * 2095 * PARAMETERS : 2096 * 2097 * 2098 *==========================================================================*/ 2099void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable, 2100 uint8_t size, uint8_t* overridesList, 2101 uint8_t* supported_indexes, 2102 int camera_id) 2103{ 2104 /*daemon will give a list of overrides for all scene modes. 2105 However we should send the fwk only the overrides for the scene modes 2106 supported by the framework*/ 2107 int j = 0, index = 0, supt = 0; 2108 uint8_t focus_override; 2109 for (int i = 0; i < size; i++) { 2110 supt = 0; 2111 index = supported_indexes[i]; 2112 overridesList[j] = (uint8_t)overridesTable[index].ae_mode; 2113 overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP, 2114 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]), 2115 overridesTable[index].awb_mode); 2116 focus_override = (uint8_t)overridesTable[index].af_mode; 2117 for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) { 2118 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) { 2119 supt = 1; 2120 break; 2121 } 2122 } 2123 if (supt) { 2124 overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP, 2125 sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]), 2126 focus_override); 2127 } else { 2128 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF; 2129 } 2130 j+=3; 2131 } 2132} 2133 2134/*=========================================================================== 2135 * FUNCTION : getPreviewHalPixelFormat 2136 * 2137 * DESCRIPTION: convert the format to type recognized by framework 2138 * 2139 * PARAMETERS : format : the format from backend 2140 * 2141 ** RETURN : format recognized by framework 2142 * 2143 *==========================================================================*/ 2144int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format) 2145{ 2146 int32_t halPixelFormat; 2147 2148 switch (format) { 2149 case CAM_FORMAT_YUV_420_NV12: 2150 halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP; 2151 break; 2152 case CAM_FORMAT_YUV_420_NV21: 2153 halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP; 2154 break; 2155 case CAM_FORMAT_YUV_420_NV21_ADRENO: 2156 halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO; 2157 break; 2158 case CAM_FORMAT_YUV_420_YV12: 2159 halPixelFormat = HAL_PIXEL_FORMAT_YV12; 2160 break; 2161 case CAM_FORMAT_YUV_422_NV16: 2162 case CAM_FORMAT_YUV_422_NV61: 2163 default: 2164 halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP; 2165 break; 2166 } 2167 return halPixelFormat; 2168} 2169 2170/*=========================================================================== 2171 * FUNCTION : getSensorSensitivity 2172 * 2173 * DESCRIPTION: convert iso_mode to an integer value 2174 * 2175 * PARAMETERS : iso_mode : the iso_mode supported by sensor 2176 * 2177 ** RETURN : sensitivity supported by sensor 2178 * 2179 *==========================================================================*/ 2180int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode) 2181{ 2182 int32_t sensitivity; 2183 2184 switch (iso_mode) { 2185 case CAM_ISO_MODE_100: 2186 sensitivity = 100; 2187 break; 2188 case CAM_ISO_MODE_200: 2189 sensitivity = 200; 2190 break; 2191 case CAM_ISO_MODE_400: 2192 sensitivity = 400; 2193 break; 2194 case CAM_ISO_MODE_800: 2195 sensitivity = 800; 2196 break; 2197 case CAM_ISO_MODE_1600: 2198 sensitivity = 1600; 2199 break; 2200 default: 2201 sensitivity = -1; 2202 break; 2203 } 2204 return sensitivity; 2205} 2206 2207 2208/*=========================================================================== 2209 * FUNCTION : AddSetParmEntryToBatch 2210 * 2211 * DESCRIPTION: add set parameter entry into batch 2212 * 2213 * PARAMETERS : 2214 * @p_table : ptr to parameter buffer 2215 * @paramType : parameter type 2216 * @paramLength : length of parameter value 2217 * @paramValue : ptr to parameter value 2218 * 2219 * RETURN : int32_t type of status 2220 * NO_ERROR -- success 2221 * none-zero failure code 2222 *==========================================================================*/ 2223int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table, 2224 cam_intf_parm_type_t paramType, 2225 uint32_t paramLength, 2226 void *paramValue) 2227{ 2228 int position = paramType; 2229 int current, next; 2230 2231 /************************************************************************* 2232 * Code to take care of linking next flags * 2233 *************************************************************************/ 2234 current = GET_FIRST_PARAM_ID(p_table); 2235 if (position == current){ 2236 //DO NOTHING 2237 } else if (position < current){ 2238 SET_NEXT_PARAM_ID(position, p_table, current); 2239 SET_FIRST_PARAM_ID(p_table, position); 2240 } else { 2241 /* Search for the position in the linked list where we need to slot in*/ 2242 while (position > GET_NEXT_PARAM_ID(current, p_table)) 2243 current = GET_NEXT_PARAM_ID(current, p_table); 2244 2245 /*If node already exists no need to alter linking*/ 2246 if (position != GET_NEXT_PARAM_ID(current, p_table)) { 2247 next = GET_NEXT_PARAM_ID(current, p_table); 2248 SET_NEXT_PARAM_ID(current, p_table, position); 2249 SET_NEXT_PARAM_ID(position, p_table, next); 2250 } 2251 } 2252 2253 /************************************************************************* 2254 * Copy contents into entry * 2255 *************************************************************************/ 2256 2257 if (paramLength > sizeof(parm_type_t)) { 2258 ALOGE("%s:Size of input larger than max entry size",__func__); 2259 return BAD_VALUE; 2260 } 2261 memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength); 2262 return NO_ERROR; 2263} 2264 2265/*=========================================================================== 2266 * FUNCTION : lookupFwkName 2267 * 2268 * DESCRIPTION: In case the enum is not same in fwk and backend 2269 * make sure the parameter is correctly propogated 2270 * 2271 * PARAMETERS : 2272 * @arr : map between the two enums 2273 * @len : len of the map 2274 * @hal_name : name of the hal_parm to map 2275 * 2276 * RETURN : int type of status 2277 * fwk_name -- success 2278 * none-zero failure code 2279 *==========================================================================*/ 2280int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[], 2281 int len, int hal_name) 2282{ 2283 2284 for (int i = 0; i < len; i++) { 2285 if (arr[i].hal_name == hal_name) 2286 return arr[i].fwk_name; 2287 } 2288 2289 /* Not able to find matching framework type is not necessarily 2290 * an error case. This happens when mm-camera supports more attributes 2291 * than the frameworks do */ 2292 ALOGD("%s: Cannot find matching framework type", __func__); 2293 return NAME_NOT_FOUND; 2294} 2295 2296/*=========================================================================== 2297 * FUNCTION : lookupHalName 2298 * 2299 * DESCRIPTION: In case the enum is not same in fwk and backend 2300 * make sure the parameter is correctly propogated 2301 * 2302 * PARAMETERS : 2303 * @arr : map between the two enums 2304 * @len : len of the map 2305 * @fwk_name : name of the hal_parm to map 2306 * 2307 * RETURN : int32_t type of status 2308 * hal_name -- success 2309 * none-zero failure code 2310 *==========================================================================*/ 2311int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[], 2312 int len, int fwk_name) 2313{ 2314 for (int i = 0; i < len; i++) { 2315 if (arr[i].fwk_name == fwk_name) 2316 return arr[i].hal_name; 2317 } 2318 ALOGE("%s: Cannot find matching hal type", __func__); 2319 return NAME_NOT_FOUND; 2320} 2321 2322/*=========================================================================== 2323 * FUNCTION : getCapabilities 2324 * 2325 * DESCRIPTION: query camera capabilities 2326 * 2327 * PARAMETERS : 2328 * @cameraId : camera Id 2329 * @info : camera info struct to be filled in with camera capabilities 2330 * 2331 * RETURN : int32_t type of status 2332 * NO_ERROR -- success 2333 * none-zero failure code 2334 *==========================================================================*/ 2335int QCamera3HardwareInterface::getCamInfo(int cameraId, 2336 struct camera_info *info) 2337{ 2338 int rc = 0; 2339 2340 if (NULL == gCamCapability[cameraId]) { 2341 rc = initCapabilities(cameraId); 2342 if (rc < 0) { 2343 //pthread_mutex_unlock(&g_camlock); 2344 return rc; 2345 } 2346 } 2347 2348 if (NULL == gStaticMetadata[cameraId]) { 2349 rc = initStaticMetadata(cameraId); 2350 if (rc < 0) { 2351 return rc; 2352 } 2353 } 2354 2355 switch(gCamCapability[cameraId]->position) { 2356 case CAM_POSITION_BACK: 2357 info->facing = CAMERA_FACING_BACK; 2358 break; 2359 2360 case CAM_POSITION_FRONT: 2361 info->facing = CAMERA_FACING_FRONT; 2362 break; 2363 2364 default: 2365 ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId); 2366 rc = -1; 2367 break; 2368 } 2369 2370 2371 info->orientation = gCamCapability[cameraId]->sensor_mount_angle; 2372 info->device_version = HARDWARE_DEVICE_API_VERSION(3, 0); 2373 info->static_camera_characteristics = gStaticMetadata[cameraId]; 2374 2375 return rc; 2376} 2377 2378/*=========================================================================== 2379 * FUNCTION : translateMetadata 2380 * 2381 * DESCRIPTION: translate the metadata into camera_metadata_t 2382 * 2383 * PARAMETERS : type of the request 2384 * 2385 * 2386 * RETURN : success: camera_metadata_t* 2387 * failure: NULL 2388 * 2389 *==========================================================================*/ 2390camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type) 2391{ 2392 pthread_mutex_lock(&mMutex); 2393 2394 if (mDefaultMetadata[type] != NULL) { 2395 pthread_mutex_unlock(&mMutex); 2396 return mDefaultMetadata[type]; 2397 } 2398 //first time we are handling this request 2399 //fill up the metadata structure using the wrapper class 2400 CameraMetadata settings; 2401 //translate from cam_capability_t to camera_metadata_tag_t 2402 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE; 2403 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1); 2404 2405 /*control*/ 2406 2407 uint8_t controlIntent = 0; 2408 switch (type) { 2409 case CAMERA3_TEMPLATE_PREVIEW: 2410 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW; 2411 break; 2412 case CAMERA3_TEMPLATE_STILL_CAPTURE: 2413 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE; 2414 break; 2415 case CAMERA3_TEMPLATE_VIDEO_RECORD: 2416 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD; 2417 break; 2418 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT: 2419 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT; 2420 break; 2421 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG: 2422 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG; 2423 break; 2424 default: 2425 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM; 2426 break; 2427 } 2428 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1); 2429 2430 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, 2431 &gCamCapability[mCameraId]->exposure_compensation_default, 1); 2432 2433 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF; 2434 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1); 2435 2436 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF; 2437 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1); 2438 2439 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO; 2440 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1); 2441 2442 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO; 2443 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1); 2444 2445 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF; 2446 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1); 2447 2448 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; //similar to AUTO? 2449 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1); 2450 2451 static uint8_t focusMode; 2452 if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) { 2453 ALOGE("%s: Setting focus mode to auto", __func__); 2454 focusMode = ANDROID_CONTROL_AF_MODE_AUTO; 2455 } else { 2456 ALOGE("%s: Setting focus mode to off", __func__); 2457 focusMode = ANDROID_CONTROL_AF_MODE_OFF; 2458 } 2459 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1); 2460 2461 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON; 2462 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1); 2463 2464 /*flash*/ 2465 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF; 2466 settings.update(ANDROID_FLASH_MODE, &flashMode, 1); 2467 2468 2469 /* lens */ 2470 float default_aperture = gCamCapability[mCameraId]->apertures[0]; 2471 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1); 2472 2473 if (gCamCapability[mCameraId]->filter_densities_count) { 2474 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0]; 2475 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density, 2476 gCamCapability[mCameraId]->filter_densities_count); 2477 } 2478 2479 float default_focal_length = gCamCapability[mCameraId]->focal_length; 2480 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1); 2481 2482 mDefaultMetadata[type] = settings.release(); 2483 2484 pthread_mutex_unlock(&mMutex); 2485 return mDefaultMetadata[type]; 2486} 2487 2488/*=========================================================================== 2489 * FUNCTION : setFrameParameters 2490 * 2491 * DESCRIPTION: set parameters per frame as requested in the metadata from 2492 * framework 2493 * 2494 * PARAMETERS : 2495 * @settings : frame settings information from framework 2496 * 2497 * 2498 * RETURN : success: NO_ERROR 2499 * failure: 2500 *==========================================================================*/ 2501int QCamera3HardwareInterface::setFrameParameters(int frame_id, 2502 const camera_metadata_t *settings) 2503{ 2504 /*translate from camera_metadata_t type to parm_type_t*/ 2505 int rc = 0; 2506 if (settings == NULL && mFirstRequest) { 2507 /*settings cannot be null for the first request*/ 2508 return BAD_VALUE; 2509 } 2510 2511 int32_t hal_version = CAM_HAL_V3; 2512 2513 memset(mParameters, 0, sizeof(parm_buffer_t)); 2514 mParameters->first_flagged_entry = CAM_INTF_PARM_MAX; 2515 AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION, 2516 sizeof(hal_version), &hal_version); 2517 2518 /*we need to update the frame number in the parameters*/ 2519 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER, 2520 sizeof(frame_id), &frame_id); 2521 if (rc < 0) { 2522 ALOGE("%s: Failed to set the frame number in the parameters", __func__); 2523 return BAD_VALUE; 2524 } 2525 2526 if(settings != NULL){ 2527 rc = translateMetadataToParameters(settings); 2528 } 2529 /*set the parameters to backend*/ 2530 mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters); 2531 return rc; 2532} 2533 2534/*=========================================================================== 2535 * FUNCTION : translateMetadataToParameters 2536 * 2537 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t 2538 * 2539 * 2540 * PARAMETERS : 2541 * @settings : frame settings information from framework 2542 * 2543 * 2544 * RETURN : success: NO_ERROR 2545 * failure: 2546 *==========================================================================*/ 2547int QCamera3HardwareInterface::translateMetadataToParameters 2548 (const camera_metadata_t *settings) 2549{ 2550 int rc = 0; 2551 CameraMetadata frame_settings; 2552 frame_settings = settings; 2553 2554 2555 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) { 2556 int32_t antibandingMode = 2557 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0]; 2558 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING, 2559 sizeof(antibandingMode), &antibandingMode); 2560 } 2561 2562 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) { 2563 int32_t expCompensation = frame_settings.find( 2564 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0]; 2565 if (expCompensation < gCamCapability[mCameraId]->exposure_time_range[0]) 2566 expCompensation = gCamCapability[mCameraId]->exposure_time_range[0]; 2567 if (expCompensation > gCamCapability[mCameraId]->exposure_time_range[1]) 2568 expCompensation = gCamCapability[mCameraId]->exposure_time_range[1]; 2569 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION, 2570 sizeof(expCompensation), &expCompensation); 2571 } 2572 2573 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) { 2574 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0]; 2575 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK, 2576 sizeof(aeLock), &aeLock); 2577 } 2578 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) { 2579 cam_fps_range_t fps_range; 2580 fps_range.min_fps = 2581 frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0]; 2582 fps_range.max_fps = 2583 frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1]; 2584 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE, 2585 sizeof(fps_range), &fps_range); 2586 } 2587 2588 float focalDistance = -1.0; 2589 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) { 2590 focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0]; 2591 rc = AddSetParmEntryToBatch(mParameters, 2592 CAM_INTF_META_LENS_FOCUS_DISTANCE, 2593 sizeof(focalDistance), &focalDistance); 2594 } 2595 2596 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) { 2597 uint8_t fwk_focusMode = 2598 frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0]; 2599 uint8_t focusMode; 2600 if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) { 2601 focusMode = CAM_FOCUS_MODE_INFINITY; 2602 } else{ 2603 focusMode = lookupHalName(FOCUS_MODES_MAP, 2604 sizeof(FOCUS_MODES_MAP), 2605 fwk_focusMode); 2606 } 2607 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE, 2608 sizeof(focusMode), &focusMode); 2609 } 2610 2611 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) { 2612 uint8_t awbLock = 2613 frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0]; 2614 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK, 2615 sizeof(awbLock), &awbLock); 2616 } 2617 2618 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) { 2619 uint8_t fwk_whiteLevel = 2620 frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0]; 2621 uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP, 2622 sizeof(WHITE_BALANCE_MODES_MAP), 2623 fwk_whiteLevel); 2624 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE, 2625 sizeof(whiteLevel), &whiteLevel); 2626 } 2627 2628 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) { 2629 uint8_t fwk_effectMode = 2630 frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0]; 2631 uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP, 2632 sizeof(EFFECT_MODES_MAP), 2633 fwk_effectMode); 2634 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT, 2635 sizeof(effectMode), &effectMode); 2636 } 2637 2638 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) { 2639 uint8_t fwk_aeMode = 2640 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0]; 2641 uint8_t aeMode; 2642 int32_t redeye; 2643 2644 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) { 2645 aeMode = CAM_AE_MODE_OFF; 2646 } else { 2647 aeMode = CAM_AE_MODE_ON; 2648 } 2649 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) { 2650 redeye = 1; 2651 } else { 2652 redeye = 0; 2653 } 2654 2655 int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP, 2656 sizeof(AE_FLASH_MODE_MAP), 2657 fwk_aeMode); 2658 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE, 2659 sizeof(aeMode), &aeMode); 2660 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE, 2661 sizeof(flashMode), &flashMode); 2662 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION, 2663 sizeof(redeye), &redeye); 2664 } 2665 2666 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) { 2667 uint8_t colorCorrectMode = 2668 frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0]; 2669 rc = 2670 AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE, 2671 sizeof(colorCorrectMode), &colorCorrectMode); 2672 } 2673 cam_trigger_t aecTrigger; 2674 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE; 2675 aecTrigger.trigger_id = -1; 2676 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&& 2677 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) { 2678 aecTrigger.trigger = 2679 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0]; 2680 aecTrigger.trigger_id = 2681 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0]; 2682 } 2683 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, 2684 sizeof(aecTrigger), &aecTrigger); 2685 2686 /*af_trigger must come with a trigger id*/ 2687 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) && 2688 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) { 2689 cam_trigger_t af_trigger; 2690 af_trigger.trigger = 2691 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0]; 2692 af_trigger.trigger_id = 2693 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0]; 2694 rc = AddSetParmEntryToBatch(mParameters, 2695 CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger); 2696 } 2697 2698 if (frame_settings.exists(ANDROID_CONTROL_MODE)) { 2699 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0]; 2700 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE, 2701 sizeof(metaMode), &metaMode); 2702 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) { 2703 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0]; 2704 uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP, 2705 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]), 2706 fwk_sceneMode); 2707 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE, 2708 sizeof(sceneMode), &sceneMode); 2709 } else if (metaMode == ANDROID_CONTROL_MODE_OFF) { 2710 uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF; 2711 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE, 2712 sizeof(sceneMode), &sceneMode); 2713 } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) { 2714 uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF; 2715 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE, 2716 sizeof(sceneMode), &sceneMode); 2717 } 2718 } 2719 2720 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) { 2721 int32_t demosaic = 2722 frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0]; 2723 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC, 2724 sizeof(demosaic), &demosaic); 2725 } 2726 2727 if (frame_settings.exists(ANDROID_EDGE_MODE)) { 2728 uint8_t edgeMode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0]; 2729 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE, 2730 sizeof(edgeMode), &edgeMode); 2731 } 2732 2733 if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) { 2734 int32_t edgeStrength = 2735 frame_settings.find(ANDROID_EDGE_STRENGTH).data.i32[0]; 2736 rc = AddSetParmEntryToBatch(mParameters, 2737 CAM_INTF_META_SHARPNESS_STRENGTH, sizeof(edgeStrength), &edgeStrength); 2738 } 2739 2740 if (frame_settings.exists(ANDROID_FLASH_MODE)) { 2741 int32_t respectFlashMode = 1; 2742 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) { 2743 uint8_t fwk_aeMode = 2744 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0]; 2745 if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) { 2746 respectFlashMode = 0; 2747 ALOGI("%s: AE Mode controls flash, ignore android.flash.mode", 2748 __func__); 2749 } 2750 } 2751 if (respectFlashMode) { 2752 uint8_t flashMode = 2753 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]; 2754 flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP, 2755 sizeof(FLASH_MODES_MAP), 2756 flashMode); 2757 ALOGI("%s: flash mode after mapping %d", __func__, flashMode); 2758 // To check: CAM_INTF_META_FLASH_MODE usage 2759 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE, 2760 sizeof(flashMode), &flashMode); 2761 } 2762 } 2763 2764 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) { 2765 uint8_t flashPower = 2766 frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0]; 2767 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER, 2768 sizeof(flashPower), &flashPower); 2769 } 2770 2771 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) { 2772 int64_t flashFiringTime = 2773 frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0]; 2774 rc = AddSetParmEntryToBatch(mParameters, 2775 CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime); 2776 } 2777 2778 if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) { 2779 uint8_t geometricMode = 2780 frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0]; 2781 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE, 2782 sizeof(geometricMode), &geometricMode); 2783 } 2784 2785 if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) { 2786 uint8_t geometricStrength = 2787 frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0]; 2788 rc = AddSetParmEntryToBatch(mParameters, 2789 CAM_INTF_META_GEOMETRIC_STRENGTH, 2790 sizeof(geometricStrength), &geometricStrength); 2791 } 2792 2793 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) { 2794 uint8_t hotPixelMode = 2795 frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0]; 2796 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE, 2797 sizeof(hotPixelMode), &hotPixelMode); 2798 } 2799 2800 if (frame_settings.exists(ANDROID_LENS_APERTURE)) { 2801 float lensAperture = 2802 frame_settings.find( ANDROID_LENS_APERTURE).data.f[0]; 2803 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE, 2804 sizeof(lensAperture), &lensAperture); 2805 } 2806 2807 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) { 2808 float filterDensity = 2809 frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0]; 2810 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY, 2811 sizeof(filterDensity), &filterDensity); 2812 } 2813 2814 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) { 2815 float focalLength = 2816 frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0]; 2817 rc = AddSetParmEntryToBatch(mParameters, 2818 CAM_INTF_META_LENS_FOCAL_LENGTH, 2819 sizeof(focalLength), &focalLength); 2820 } 2821 2822 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) { 2823 uint8_t optStabMode = 2824 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0]; 2825 rc = AddSetParmEntryToBatch(mParameters, 2826 CAM_INTF_META_LENS_OPT_STAB_MODE, 2827 sizeof(optStabMode), &optStabMode); 2828 } 2829 2830 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) { 2831 uint8_t noiseRedMode = 2832 frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]; 2833 rc = AddSetParmEntryToBatch(mParameters, 2834 CAM_INTF_META_NOISE_REDUCTION_MODE, 2835 sizeof(noiseRedMode), &noiseRedMode); 2836 } 2837 2838 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) { 2839 uint8_t noiseRedStrength = 2840 frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0]; 2841 rc = AddSetParmEntryToBatch(mParameters, 2842 CAM_INTF_META_NOISE_REDUCTION_STRENGTH, 2843 sizeof(noiseRedStrength), &noiseRedStrength); 2844 } 2845 2846 cam_crop_region_t scalerCropRegion; 2847 bool scalerCropSet = false; 2848 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) { 2849 scalerCropRegion.left = 2850 frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0]; 2851 scalerCropRegion.top = 2852 frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1]; 2853 scalerCropRegion.width = 2854 frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2]; 2855 scalerCropRegion.height = 2856 frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3]; 2857 rc = AddSetParmEntryToBatch(mParameters, 2858 CAM_INTF_META_SCALER_CROP_REGION, 2859 sizeof(scalerCropRegion), &scalerCropRegion); 2860 scalerCropSet = true; 2861 } 2862 2863 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) { 2864 int64_t sensorExpTime = 2865 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0]; 2866 rc = AddSetParmEntryToBatch(mParameters, 2867 CAM_INTF_META_SENSOR_EXPOSURE_TIME, 2868 sizeof(sensorExpTime), &sensorExpTime); 2869 } 2870 2871 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) { 2872 int64_t sensorFrameDuration = 2873 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0]; 2874 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration) 2875 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration; 2876 rc = AddSetParmEntryToBatch(mParameters, 2877 CAM_INTF_META_SENSOR_FRAME_DURATION, 2878 sizeof(sensorFrameDuration), &sensorFrameDuration); 2879 } 2880 2881 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) { 2882 int32_t sensorSensitivity = 2883 frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0]; 2884 if (sensorSensitivity < 2885 gCamCapability[mCameraId]->sensitivity_range.min_sensitivity) 2886 sensorSensitivity = 2887 gCamCapability[mCameraId]->sensitivity_range.min_sensitivity; 2888 if (sensorSensitivity > 2889 gCamCapability[mCameraId]->sensitivity_range.max_sensitivity) 2890 sensorSensitivity = 2891 gCamCapability[mCameraId]->sensitivity_range.max_sensitivity; 2892 rc = AddSetParmEntryToBatch(mParameters, 2893 CAM_INTF_META_SENSOR_SENSITIVITY, 2894 sizeof(sensorSensitivity), &sensorSensitivity); 2895 } 2896 2897 if (frame_settings.exists(ANDROID_SHADING_MODE)) { 2898 int32_t shadingMode = 2899 frame_settings.find(ANDROID_SHADING_MODE).data.u8[0]; 2900 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE, 2901 sizeof(shadingMode), &shadingMode); 2902 } 2903 2904 if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) { 2905 uint8_t shadingStrength = 2906 frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0]; 2907 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH, 2908 sizeof(shadingStrength), &shadingStrength); 2909 } 2910 2911 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) { 2912 uint8_t facedetectMode = 2913 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0]; 2914 rc = AddSetParmEntryToBatch(mParameters, 2915 CAM_INTF_META_STATS_FACEDETECT_MODE, 2916 sizeof(facedetectMode), &facedetectMode); 2917 } 2918 2919 if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) { 2920 uint8_t histogramMode = 2921 frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0]; 2922 rc = AddSetParmEntryToBatch(mParameters, 2923 CAM_INTF_META_STATS_HISTOGRAM_MODE, 2924 sizeof(histogramMode), &histogramMode); 2925 } 2926 2927 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) { 2928 uint8_t sharpnessMapMode = 2929 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0]; 2930 rc = AddSetParmEntryToBatch(mParameters, 2931 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, 2932 sizeof(sharpnessMapMode), &sharpnessMapMode); 2933 } 2934 2935 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) { 2936 uint8_t tonemapMode = 2937 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0]; 2938 rc = AddSetParmEntryToBatch(mParameters, 2939 CAM_INTF_META_TONEMAP_MODE, 2940 sizeof(tonemapMode), &tonemapMode); 2941 } 2942 2943 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) { 2944 uint8_t captureIntent = 2945 frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0]; 2946 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT, 2947 sizeof(captureIntent), &captureIntent); 2948 } 2949 2950 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) { 2951 cam_area_t roi; 2952 bool reset = true; 2953 convertFromRegions(&roi, settings, ANDROID_CONTROL_AE_REGIONS); 2954 if (scalerCropSet) { 2955 reset = resetIfNeededROI(&roi, &scalerCropRegion); 2956 } 2957 if (reset) { 2958 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI, 2959 sizeof(roi), &roi); 2960 } 2961 } 2962 2963 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) { 2964 cam_area_t roi; 2965 bool reset = true; 2966 convertFromRegions(&roi, settings, ANDROID_CONTROL_AF_REGIONS); 2967 if (scalerCropSet) { 2968 reset = resetIfNeededROI(&roi, &scalerCropRegion); 2969 } 2970 if (reset) { 2971 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI, 2972 sizeof(roi), &roi); 2973 } 2974 } 2975 2976 if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) { 2977 cam_area_t roi; 2978 bool reset = true; 2979 convertFromRegions(&roi, settings, ANDROID_CONTROL_AWB_REGIONS); 2980 if (scalerCropSet) { 2981 reset = resetIfNeededROI(&roi, &scalerCropRegion); 2982 } 2983 if (reset) { 2984 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS, 2985 sizeof(roi), &roi); 2986 } 2987 } 2988 return rc; 2989} 2990 2991/*=========================================================================== 2992 * FUNCTION : getJpegSettings 2993 * 2994 * DESCRIPTION: save the jpeg settings in the HAL 2995 * 2996 * 2997 * PARAMETERS : 2998 * @settings : frame settings information from framework 2999 * 3000 * 3001 * RETURN : success: NO_ERROR 3002 * failure: 3003 *==========================================================================*/ 3004int QCamera3HardwareInterface::getJpegSettings 3005 (const camera_metadata_t *settings) 3006{ 3007 if (mJpegSettings) { 3008 if (mJpegSettings->gps_timestamp) { 3009 free(mJpegSettings->gps_timestamp); 3010 mJpegSettings->gps_timestamp = NULL; 3011 } 3012 if (mJpegSettings->gps_coordinates) { 3013 for (int i = 0; i < 3; i++) { 3014 free(mJpegSettings->gps_coordinates[i]); 3015 mJpegSettings->gps_coordinates[i] = NULL; 3016 } 3017 } 3018 free(mJpegSettings); 3019 mJpegSettings = NULL; 3020 } 3021 mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t)); 3022 CameraMetadata jpeg_settings; 3023 jpeg_settings = settings; 3024 3025 if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) { 3026 mJpegSettings->jpeg_orientation = 3027 jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0]; 3028 } else { 3029 mJpegSettings->jpeg_orientation = 0; 3030 } 3031 if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) { 3032 mJpegSettings->jpeg_quality = 3033 jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0]; 3034 } else { 3035 mJpegSettings->jpeg_quality = 85; 3036 } 3037 if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) { 3038 mJpegSettings->thumbnail_size.width = 3039 jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0]; 3040 mJpegSettings->thumbnail_size.height = 3041 jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1]; 3042 } else { 3043 mJpegSettings->thumbnail_size.width = 0; 3044 mJpegSettings->thumbnail_size.height = 0; 3045 } 3046 if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) { 3047 for (int i = 0; i < 3; i++) { 3048 mJpegSettings->gps_coordinates[i] = (double*)malloc(sizeof(double*)); 3049 *(mJpegSettings->gps_coordinates[i]) = 3050 jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i]; 3051 } 3052 } else{ 3053 for (int i = 0; i < 3; i++) { 3054 mJpegSettings->gps_coordinates[i] = NULL; 3055 } 3056 } 3057 3058 if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) { 3059 mJpegSettings->gps_timestamp = (int64_t*)malloc(sizeof(int64_t*)); 3060 *(mJpegSettings->gps_timestamp) = 3061 jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0]; 3062 } else { 3063 mJpegSettings->gps_timestamp = NULL; 3064 } 3065 3066 if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) { 3067 int len = jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count; 3068 for (int i = 0; i < len; i++) { 3069 mJpegSettings->gps_processing_method[i] = 3070 jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[i]; 3071 } 3072 if (mJpegSettings->gps_processing_method[len-1] != '\0') { 3073 mJpegSettings->gps_processing_method[len] = '\0'; 3074 } 3075 } else { 3076 mJpegSettings->gps_processing_method[0] = '\0'; 3077 } 3078 3079 if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) { 3080 mJpegSettings->sensor_sensitivity = 3081 jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0]; 3082 } else { 3083 mJpegSettings->sensor_sensitivity = mMetadataResponse.iso_speed; 3084 } 3085 3086 if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) { 3087 mJpegSettings->lens_focal_length = 3088 jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0]; 3089 } 3090 if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) { 3091 mJpegSettings->exposure_compensation = 3092 jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0]; 3093 } 3094 mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step; 3095 mJpegSettings->max_jpeg_size = calcMaxJpegSize(); 3096 return 0; 3097} 3098 3099/*=========================================================================== 3100 * FUNCTION : captureResultCb 3101 * 3102 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata) 3103 * 3104 * PARAMETERS : 3105 * @frame : frame information from mm-camera-interface 3106 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata. 3107 * @userdata: userdata 3108 * 3109 * RETURN : NONE 3110 *==========================================================================*/ 3111void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata, 3112 camera3_stream_buffer_t *buffer, 3113 uint32_t frame_number, void *userdata) 3114{ 3115 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata; 3116 if (hw == NULL) { 3117 ALOGE("%s: Invalid hw %p", __func__, hw); 3118 return; 3119 } 3120 3121 hw->captureResultCb(metadata, buffer, frame_number); 3122 return; 3123} 3124 3125/*=========================================================================== 3126 * FUNCTION : initialize 3127 * 3128 * DESCRIPTION: Pass framework callback pointers to HAL 3129 * 3130 * PARAMETERS : 3131 * 3132 * 3133 * RETURN : Success : 0 3134 * Failure: -ENODEV 3135 *==========================================================================*/ 3136 3137int QCamera3HardwareInterface::initialize(const struct camera3_device *device, 3138 const camera3_callback_ops_t *callback_ops) 3139{ 3140 ALOGV("%s: E", __func__); 3141 QCamera3HardwareInterface *hw = 3142 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 3143 if (!hw) { 3144 ALOGE("%s: NULL camera device", __func__); 3145 return -ENODEV; 3146 } 3147 3148 int rc = hw->initialize(callback_ops); 3149 ALOGV("%s: X", __func__); 3150 return rc; 3151} 3152 3153/*=========================================================================== 3154 * FUNCTION : configure_streams 3155 * 3156 * DESCRIPTION: 3157 * 3158 * PARAMETERS : 3159 * 3160 * 3161 * RETURN : Success: 0 3162 * Failure: -EINVAL (if stream configuration is invalid) 3163 * -ENODEV (fatal error) 3164 *==========================================================================*/ 3165 3166int QCamera3HardwareInterface::configure_streams( 3167 const struct camera3_device *device, 3168 camera3_stream_configuration_t *stream_list) 3169{ 3170 ALOGV("%s: E", __func__); 3171 QCamera3HardwareInterface *hw = 3172 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 3173 if (!hw) { 3174 ALOGE("%s: NULL camera device", __func__); 3175 return -ENODEV; 3176 } 3177 int rc = hw->configureStreams(stream_list); 3178 ALOGV("%s: X", __func__); 3179 return rc; 3180} 3181 3182/*=========================================================================== 3183 * FUNCTION : register_stream_buffers 3184 * 3185 * DESCRIPTION: Register stream buffers with the device 3186 * 3187 * PARAMETERS : 3188 * 3189 * RETURN : 3190 *==========================================================================*/ 3191int QCamera3HardwareInterface::register_stream_buffers( 3192 const struct camera3_device *device, 3193 const camera3_stream_buffer_set_t *buffer_set) 3194{ 3195 ALOGV("%s: E", __func__); 3196 QCamera3HardwareInterface *hw = 3197 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 3198 if (!hw) { 3199 ALOGE("%s: NULL camera device", __func__); 3200 return -ENODEV; 3201 } 3202 int rc = hw->registerStreamBuffers(buffer_set); 3203 ALOGV("%s: X", __func__); 3204 return rc; 3205} 3206 3207/*=========================================================================== 3208 * FUNCTION : construct_default_request_settings 3209 * 3210 * DESCRIPTION: Configure a settings buffer to meet the required use case 3211 * 3212 * PARAMETERS : 3213 * 3214 * 3215 * RETURN : Success: Return valid metadata 3216 * Failure: Return NULL 3217 *==========================================================================*/ 3218const camera_metadata_t* QCamera3HardwareInterface:: 3219 construct_default_request_settings(const struct camera3_device *device, 3220 int type) 3221{ 3222 3223 ALOGV("%s: E", __func__); 3224 camera_metadata_t* fwk_metadata = NULL; 3225 QCamera3HardwareInterface *hw = 3226 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 3227 if (!hw) { 3228 ALOGE("%s: NULL camera device", __func__); 3229 return NULL; 3230 } 3231 3232 fwk_metadata = hw->translateCapabilityToMetadata(type); 3233 3234 ALOGV("%s: X", __func__); 3235 return fwk_metadata; 3236} 3237 3238/*=========================================================================== 3239 * FUNCTION : process_capture_request 3240 * 3241 * DESCRIPTION: 3242 * 3243 * PARAMETERS : 3244 * 3245 * 3246 * RETURN : 3247 *==========================================================================*/ 3248int QCamera3HardwareInterface::process_capture_request( 3249 const struct camera3_device *device, 3250 camera3_capture_request_t *request) 3251{ 3252 ALOGV("%s: E", __func__); 3253 QCamera3HardwareInterface *hw = 3254 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 3255 if (!hw) { 3256 ALOGE("%s: NULL camera device", __func__); 3257 return -EINVAL; 3258 } 3259 3260 int rc = hw->processCaptureRequest(request); 3261 ALOGV("%s: X", __func__); 3262 return rc; 3263} 3264 3265/*=========================================================================== 3266 * FUNCTION : get_metadata_vendor_tag_ops 3267 * 3268 * DESCRIPTION: 3269 * 3270 * PARAMETERS : 3271 * 3272 * 3273 * RETURN : 3274 *==========================================================================*/ 3275 3276void QCamera3HardwareInterface::get_metadata_vendor_tag_ops( 3277 const struct camera3_device *device, 3278 vendor_tag_query_ops_t* ops) 3279{ 3280 ALOGV("%s: E", __func__); 3281 QCamera3HardwareInterface *hw = 3282 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 3283 if (!hw) { 3284 ALOGE("%s: NULL camera device", __func__); 3285 return; 3286 } 3287 3288 hw->getMetadataVendorTagOps(ops); 3289 ALOGV("%s: X", __func__); 3290 return; 3291} 3292 3293/*=========================================================================== 3294 * FUNCTION : dump 3295 * 3296 * DESCRIPTION: 3297 * 3298 * PARAMETERS : 3299 * 3300 * 3301 * RETURN : 3302 *==========================================================================*/ 3303 3304void QCamera3HardwareInterface::dump( 3305 const struct camera3_device *device, int fd) 3306{ 3307 ALOGV("%s: E", __func__); 3308 QCamera3HardwareInterface *hw = 3309 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 3310 if (!hw) { 3311 ALOGE("%s: NULL camera device", __func__); 3312 return; 3313 } 3314 3315 hw->dump(fd); 3316 ALOGV("%s: X", __func__); 3317 return; 3318} 3319 3320/*=========================================================================== 3321 * FUNCTION : close_camera_device 3322 * 3323 * DESCRIPTION: 3324 * 3325 * PARAMETERS : 3326 * 3327 * 3328 * RETURN : 3329 *==========================================================================*/ 3330int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device) 3331{ 3332 ALOGV("%s: E", __func__); 3333 int ret = NO_ERROR; 3334 QCamera3HardwareInterface *hw = 3335 reinterpret_cast<QCamera3HardwareInterface *>( 3336 reinterpret_cast<camera3_device_t *>(device)->priv); 3337 if (!hw) { 3338 ALOGE("NULL camera device"); 3339 return BAD_VALUE; 3340 } 3341 delete hw; 3342 3343 pthread_mutex_lock(&mCameraSessionLock); 3344 mCameraSessionActive = 0; 3345 pthread_mutex_unlock(&mCameraSessionLock); 3346 ALOGV("%s: X", __func__); 3347 return ret; 3348} 3349 3350}; //end namespace qcamera 3351