QCamera3HWI.cpp revision 8867984d33ed870071109a4500402dddbc85c048
1/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved. 2* 3* Redistribution and use in source and binary forms, with or without 4* modification, are permitted provided that the following conditions are 5* met: 6* * Redistributions of source code must retain the above copyright 7* notice, this list of conditions and the following disclaimer. 8* * Redistributions in binary form must reproduce the above 9* copyright notice, this list of conditions and the following 10* disclaimer in the documentation and/or other materials provided 11* with the distribution. 12* * Neither the name of The Linux Foundation nor the names of its 13* contributors may be used to endorse or promote products derived 14* from this software without specific prior written permission. 15* 16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED 17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF 18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT 19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS 20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR 23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE 25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN 26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27* 28*/ 29 30#define LOG_TAG "QCamera3HWI" 31 32#include <cutils/properties.h> 33#include <hardware/camera3.h> 34#include <camera/CameraMetadata.h> 35#include <stdlib.h> 36#include <utils/Log.h> 37#include <utils/Errors.h> 38#include <ui/Fence.h> 39#include <gralloc_priv.h> 40#include "QCamera3HWI.h" 41#include "QCamera3Mem.h" 42#include "QCamera3Channel.h" 43#include "QCamera3PostProc.h" 44 45using namespace android; 46 47namespace qcamera { 48#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX ) 49cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS]; 50parm_buffer_t *prevSettings; 51const camera_metadata_t *gStaticMetadata; 52 53const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = { 54 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF }, 55 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO }, 56 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE }, 57 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE }, 58 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA }, 59 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE }, 60 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD }, 61 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD }, 62 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA } 63}; 64 65const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = { 66 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF }, 67 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO }, 68 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT }, 69 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT }, 70 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT}, 71 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT }, 72 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT }, 73 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT }, 74 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE } 75}; 76 77const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = { 78 { ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED, CAM_SCENE_MODE_OFF }, 79 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION }, 80 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT }, 81 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE }, 82 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT }, 83 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT }, 84 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE }, 85 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH }, 86 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW }, 87 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET }, 88 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE }, 89 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS }, 90 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS }, 91 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY }, 92 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT }, 93 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE} 94}; 95 96const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = { 97 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF }, 98 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED }, 99 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO }, 100 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO }, 101 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF }, 102 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE }, 103 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO } 104}; 105 106const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = { 107 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF }, 108 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ }, 109 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ }, 110 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO } 111}; 112 113const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AUTO_EXPOSURE_MAP[] = { 114 { ANDROID_CONTROL_AE_MODE_OFF, CAM_AEC_MODE_OFF }, 115 { ANDROID_CONTROL_AE_MODE_ON, CAM_AEC_MODE_FRAME_AVERAGE }, 116}; 117 118const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = { 119 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF }, 120 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_ON }, 121 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH} 122}; 123 124 125camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = { 126 initialize: QCamera3HardwareInterface::initialize, 127 configure_streams: QCamera3HardwareInterface::configure_streams, 128 register_stream_buffers: QCamera3HardwareInterface::register_stream_buffers, 129 construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings, 130 process_capture_request: QCamera3HardwareInterface::process_capture_request, 131 get_metadata_vendor_tag_ops: QCamera3HardwareInterface::get_metadata_vendor_tag_ops, 132 dump: QCamera3HardwareInterface::dump, 133}; 134 135 136/*=========================================================================== 137 * FUNCTION : QCamera3HardwareInterface 138 * 139 * DESCRIPTION: constructor of QCamera3HardwareInterface 140 * 141 * PARAMETERS : 142 * @cameraId : camera ID 143 * 144 * RETURN : none 145 *==========================================================================*/ 146QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId) 147 : mCameraId(cameraId), 148 mCameraHandle(NULL), 149 mCameraOpened(false), 150 mCallbackOps(NULL), 151 mInputStream(NULL), 152 mMetadataChannel(NULL), 153 mFirstRequest(false), 154 mParamHeap(NULL), 155 mParameters(NULL), 156 mJpegSettings(NULL) 157{ 158 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG; 159 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0; 160 mCameraDevice.common.close = close_camera_device; 161 mCameraDevice.ops = &mCameraOps; 162 mCameraDevice.priv = this; 163 gCamCapability[cameraId]->version = CAM_HAL_V3; 164 165 pthread_mutex_init(&mRequestLock, NULL); 166 pthread_cond_init(&mRequestCond, NULL); 167 mPendingRequest = 0; 168 169 pthread_mutex_init(&mMutex, NULL); 170 pthread_mutex_init(&mCaptureResultLock, NULL); 171 172 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) 173 mDefaultMetadata[i] = NULL; 174} 175 176/*=========================================================================== 177 * FUNCTION : ~QCamera3HardwareInterface 178 * 179 * DESCRIPTION: destructor of QCamera3HardwareInterface 180 * 181 * PARAMETERS : none 182 * 183 * RETURN : none 184 *==========================================================================*/ 185QCamera3HardwareInterface::~QCamera3HardwareInterface() 186{ 187 ALOGV("%s: E", __func__); 188 /* Clean up all channels */ 189 mMetadataChannel->stop(); 190 delete mMetadataChannel; 191 mMetadataChannel = NULL; 192 /* We need to stop all streams before deleting any stream */ 193 for (List<stream_info_t *>::iterator it = mStreamInfo.begin(); 194 it != mStreamInfo.end(); it++) { 195 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv; 196 channel->stop(); 197 } 198 for (List<stream_info_t *>::iterator it = mStreamInfo.begin(); 199 it != mStreamInfo.end(); it++) { 200 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv; 201 delete channel; 202 free (*it); 203 } 204 205 if (mJpegSettings != NULL) { 206 free(mJpegSettings); 207 mJpegSettings = NULL; 208 } 209 deinitParameters(); 210 closeCamera(); 211 212 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) 213 if (mDefaultMetadata[i]) 214 free_camera_metadata(mDefaultMetadata[i]); 215 216 pthread_mutex_destroy(&mRequestLock); 217 pthread_cond_destroy(&mRequestCond); 218 219 pthread_mutex_destroy(&mMutex); 220 pthread_mutex_destroy(&mCaptureResultLock); 221 ALOGV("%s: X", __func__); 222} 223 224/*=========================================================================== 225 * FUNCTION : openCamera 226 * 227 * DESCRIPTION: open camera 228 * 229 * PARAMETERS : 230 * @hw_device : double ptr for camera device struct 231 * 232 * RETURN : int32_t type of status 233 * NO_ERROR -- success 234 * none-zero failure code 235 *==========================================================================*/ 236int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device) 237{ 238 //int rc = NO_ERROR; 239 int rc = 0; 240 if (mCameraOpened) { 241 *hw_device = NULL; 242 return PERMISSION_DENIED; 243 } 244 245 rc = openCamera(); 246 if (rc == 0) 247 *hw_device = &mCameraDevice.common; 248 else 249 *hw_device = NULL; 250 return rc; 251} 252 253/*=========================================================================== 254 * FUNCTION : openCamera 255 * 256 * DESCRIPTION: open camera 257 * 258 * PARAMETERS : none 259 * 260 * RETURN : int32_t type of status 261 * NO_ERROR -- success 262 * none-zero failure code 263 *==========================================================================*/ 264int QCamera3HardwareInterface::openCamera() 265{ 266 if (mCameraHandle) { 267 ALOGE("Failure: Camera already opened"); 268 return ALREADY_EXISTS; 269 } 270 mCameraHandle = camera_open(mCameraId); 271 if (!mCameraHandle) { 272 ALOGE("camera_open failed."); 273 return UNKNOWN_ERROR; 274 } 275 276 mCameraOpened = true; 277 278 return NO_ERROR; 279} 280 281/*=========================================================================== 282 * FUNCTION : closeCamera 283 * 284 * DESCRIPTION: close camera 285 * 286 * PARAMETERS : none 287 * 288 * RETURN : int32_t type of status 289 * NO_ERROR -- success 290 * none-zero failure code 291 *==========================================================================*/ 292int QCamera3HardwareInterface::closeCamera() 293{ 294 int rc = NO_ERROR; 295 296 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle); 297 mCameraHandle = NULL; 298 mCameraOpened = false; 299 300 return rc; 301} 302 303/*=========================================================================== 304 * FUNCTION : initialize 305 * 306 * DESCRIPTION: Initialize frameworks callback functions 307 * 308 * PARAMETERS : 309 * @callback_ops : callback function to frameworks 310 * 311 * RETURN : 312 * 313 *==========================================================================*/ 314int QCamera3HardwareInterface::initialize( 315 const struct camera3_callback_ops *callback_ops) 316{ 317 int rc; 318 319 pthread_mutex_lock(&mMutex); 320 321 rc = initParameters(); 322 if (rc < 0) { 323 ALOGE("%s: initParamters failed %d", __func__, rc); 324 goto err1; 325 } 326 //Create metadata channel and initialize it 327 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle, 328 mCameraHandle->ops, captureResultCb, 329 &gCamCapability[mCameraId]->padding_info, this); 330 if (mMetadataChannel == NULL) { 331 ALOGE("%s: failed to allocate metadata channel", __func__); 332 rc = -ENOMEM; 333 goto err2; 334 } 335 rc = mMetadataChannel->initialize(); 336 if (rc < 0) { 337 ALOGE("%s: metadata channel initialization failed", __func__); 338 goto err3; 339 } 340 341 mCallbackOps = callback_ops; 342 343 pthread_mutex_unlock(&mMutex); 344 return 0; 345 346err3: 347 delete mMetadataChannel; 348 mMetadataChannel = NULL; 349err2: 350 deinitParameters(); 351err1: 352 pthread_mutex_unlock(&mMutex); 353 return rc; 354} 355 356/*=========================================================================== 357 * FUNCTION : configureStreams 358 * 359 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input 360 * and output streams. 361 * 362 * PARAMETERS : 363 * @stream_list : streams to be configured 364 * 365 * RETURN : 366 * 367 *==========================================================================*/ 368int QCamera3HardwareInterface::configureStreams( 369 camera3_stream_configuration_t *streamList) 370{ 371 int rc = 0; 372 pthread_mutex_lock(&mMutex); 373 374 // Sanity check stream_list 375 if (streamList == NULL) { 376 ALOGE("%s: NULL stream configuration", __func__); 377 pthread_mutex_unlock(&mMutex); 378 return BAD_VALUE; 379 } 380 381 if (streamList->streams == NULL) { 382 ALOGE("%s: NULL stream list", __func__); 383 pthread_mutex_unlock(&mMutex); 384 return BAD_VALUE; 385 } 386 387 if (streamList->num_streams < 1) { 388 ALOGE("%s: Bad number of streams requested: %d", __func__, 389 streamList->num_streams); 390 pthread_mutex_unlock(&mMutex); 391 return BAD_VALUE; 392 } 393 394 camera3_stream_t *inputStream = NULL; 395 /* first invalidate all the steams in the mStreamList 396 * if they appear again, they will be validated */ 397 for (List<stream_info_t*>::iterator it=mStreamInfo.begin(); 398 it != mStreamInfo.end(); it++) { 399 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv; 400 channel->stop(); 401 (*it)->status = INVALID; 402 } 403 404 for (size_t i = 0; i < streamList->num_streams; i++) { 405 camera3_stream_t *newStream = streamList->streams[i]; 406 ALOGV("%s: newStream type = %d, stream format = %d", 407 __func__, newStream->stream_type, newStream->format); 408 //if the stream is in the mStreamList validate it 409 bool stream_exists = false; 410 for (List<stream_info_t*>::iterator it=mStreamInfo.begin(); 411 it != mStreamInfo.end(); it++) { 412 if ((*it)->stream == newStream) { 413 QCamera3Channel *channel = 414 (QCamera3Channel*)(*it)->stream->priv; 415 stream_exists = true; 416 (*it)->status = RECONFIGURE; 417 /*delete the channel object associated with the stream because 418 we need to reconfigure*/ 419 delete channel; 420 (*it)->stream->priv = NULL; 421 } 422 } 423 if (!stream_exists) { 424 //new stream 425 stream_info_t* stream_info; 426 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t)); 427 stream_info->stream = newStream; 428 stream_info->status = VALID; 429 stream_info->registered = 0; 430 mStreamInfo.push_back(stream_info); 431 } 432 if (newStream->stream_type == CAMERA3_STREAM_INPUT) { 433 if (inputStream != NULL) { 434 ALOGE("%s: Multiple input streams requested!", __func__); 435 pthread_mutex_unlock(&mMutex); 436 return BAD_VALUE; 437 } 438 inputStream = newStream; 439 } 440 } 441 mInputStream = inputStream; 442 443 /*clean up invalid streams*/ 444 for (List<stream_info_t*>::iterator it=mStreamInfo.begin(); 445 it != mStreamInfo.end();) { 446 if(((*it)->status) == INVALID){ 447 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv; 448 delete channel; 449 delete[] (buffer_handle_t*)(*it)->buffer_set.buffers; 450 free(*it); 451 it = mStreamInfo.erase(it); 452 } else { 453 it++; 454 } 455 } 456 457 //mMetadataChannel->stop(); 458 459 /* Allocate channel objects for the requested streams */ 460 for (size_t i = 0; i < streamList->num_streams; i++) { 461 camera3_stream_t *newStream = streamList->streams[i]; 462 if (newStream->priv == NULL) { 463 //New stream, construct channel 464 switch (newStream->stream_type) { 465 case CAMERA3_STREAM_INPUT: 466 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ; 467 break; 468 case CAMERA3_STREAM_BIDIRECTIONAL: 469 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ | 470 GRALLOC_USAGE_HW_CAMERA_WRITE; 471 break; 472 case CAMERA3_STREAM_OUTPUT: 473 newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE; 474 break; 475 default: 476 ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type); 477 break; 478 } 479 480 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT || 481 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) { 482 QCamera3Channel *channel; 483 switch (newStream->format) { 484 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: 485 case HAL_PIXEL_FORMAT_YCbCr_420_888: 486 newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers; 487 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle, 488 mCameraHandle->ops, captureResultCb, 489 &gCamCapability[mCameraId]->padding_info, this, newStream); 490 if (channel == NULL) { 491 ALOGE("%s: allocation of channel failed", __func__); 492 pthread_mutex_unlock(&mMutex); 493 return -ENOMEM; 494 } 495 496 newStream->priv = channel; 497 break; 498 case HAL_PIXEL_FORMAT_BLOB: 499 newStream->max_buffers = QCamera3PicChannel::kMaxBuffers; 500 channel = new QCamera3PicChannel(mCameraHandle->camera_handle, 501 mCameraHandle->ops, captureResultCb, 502 &gCamCapability[mCameraId]->padding_info, this, newStream); 503 if (channel == NULL) { 504 ALOGE("%s: allocation of channel failed", __func__); 505 pthread_mutex_unlock(&mMutex); 506 return -ENOMEM; 507 } 508 newStream->priv = channel; 509 break; 510 511 //TODO: Add support for app consumed format? 512 default: 513 ALOGE("%s: not a supported format 0x%x", __func__, newStream->format); 514 break; 515 } 516 } 517 } else { 518 // Channel already exists for this stream 519 // Do nothing for now 520 } 521 } 522 /*For the streams to be reconfigured we need to register the buffers 523 since the framework wont*/ 524 for (List<stream_info_t *>::iterator it = mStreamInfo.begin(); 525 it != mStreamInfo.end(); it++) { 526 if ((*it)->status == RECONFIGURE) { 527 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv; 528 /*only register buffers for streams that have already been 529 registered*/ 530 if ((*it)->registered) { 531 rc = channel->registerBuffers((*it)->buffer_set.num_buffers, 532 (*it)->buffer_set.buffers); 533 if (rc != NO_ERROR) { 534 ALOGE("%s: Failed to register the buffers of old stream,\ 535 rc = %d", __func__, rc); 536 } 537 ALOGD("%s: channel %p has %d buffers", 538 __func__, channel, (*it)->buffer_set.num_buffers); 539 } 540 } 541 542 ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream); 543 if (index == NAME_NOT_FOUND) { 544 mPendingBuffersMap.add((*it)->stream, 0); 545 } else { 546 mPendingBuffersMap.editValueAt(index) = 0; 547 } 548 } 549 550 /* Initialize mPendingRequestInfo and mPendnigBuffersMap */ 551 mPendingRequestsList.clear(); 552 553 //settings/parameters don't carry over for new configureStreams 554 memset(mParameters, 0, sizeof(parm_buffer_t)); 555 mFirstRequest = true; 556 557 pthread_mutex_unlock(&mMutex); 558 return rc; 559} 560 561/*=========================================================================== 562 * FUNCTION : validateCaptureRequest 563 * 564 * DESCRIPTION: validate a capture request from camera service 565 * 566 * PARAMETERS : 567 * @request : request from framework to process 568 * 569 * RETURN : 570 * 571 *==========================================================================*/ 572int QCamera3HardwareInterface::validateCaptureRequest( 573 camera3_capture_request_t *request) 574{ 575 ssize_t idx = 0; 576 const camera3_stream_buffer_t *b; 577 CameraMetadata meta; 578 579 /* Sanity check the request */ 580 if (request == NULL) { 581 ALOGE("%s: NULL capture request", __func__); 582 return BAD_VALUE; 583 } 584 585 uint32_t frameNumber = request->frame_number; 586 if (request->input_buffer != NULL && 587 request->input_buffer->stream != mInputStream) { 588 ALOGE("%s: Request %d: Input buffer not from input stream!", 589 __FUNCTION__, frameNumber); 590 return BAD_VALUE; 591 } 592 if (request->num_output_buffers < 1 || request->output_buffers == NULL) { 593 ALOGE("%s: Request %d: No output buffers provided!", 594 __FUNCTION__, frameNumber); 595 return BAD_VALUE; 596 } 597 if (request->input_buffer != NULL) { 598 //TODO 599 ALOGE("%s: Not supporting input buffer yet", __func__); 600 return BAD_VALUE; 601 } 602 603 // Validate all buffers 604 b = request->output_buffers; 605 do { 606 QCamera3Channel *channel = 607 static_cast<QCamera3Channel*>(b->stream->priv); 608 if (channel == NULL) { 609 ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!", 610 __func__, frameNumber, idx); 611 return BAD_VALUE; 612 } 613 if (b->status != CAMERA3_BUFFER_STATUS_OK) { 614 ALOGE("%s: Request %d: Buffer %d: Status not OK!", 615 __func__, frameNumber, idx); 616 return BAD_VALUE; 617 } 618 if (b->release_fence != -1) { 619 ALOGE("%s: Request %d: Buffer %d: Has a release fence!", 620 __func__, frameNumber, idx); 621 return BAD_VALUE; 622 } 623 if (b->buffer == NULL) { 624 ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!", 625 __func__, frameNumber, idx); 626 return BAD_VALUE; 627 } 628 idx++; 629 b = request->output_buffers + idx; 630 } while (idx < (ssize_t)request->num_output_buffers); 631 632 return NO_ERROR; 633} 634 635/*=========================================================================== 636 * FUNCTION : registerStreamBuffers 637 * 638 * DESCRIPTION: Register buffers for a given stream with the HAL device. 639 * 640 * PARAMETERS : 641 * @stream_list : streams to be configured 642 * 643 * RETURN : 644 * 645 *==========================================================================*/ 646int QCamera3HardwareInterface::registerStreamBuffers( 647 const camera3_stream_buffer_set_t *buffer_set) 648{ 649 int rc = 0; 650 651 pthread_mutex_lock(&mMutex); 652 653 if (buffer_set == NULL) { 654 ALOGE("%s: Invalid buffer_set parameter.", __func__); 655 pthread_mutex_unlock(&mMutex); 656 return -EINVAL; 657 } 658 if (buffer_set->stream == NULL) { 659 ALOGE("%s: Invalid stream parameter.", __func__); 660 pthread_mutex_unlock(&mMutex); 661 return -EINVAL; 662 } 663 if (buffer_set->num_buffers < 1) { 664 ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers); 665 pthread_mutex_unlock(&mMutex); 666 return -EINVAL; 667 } 668 if (buffer_set->buffers == NULL) { 669 ALOGE("%s: Invalid buffers parameter.", __func__); 670 pthread_mutex_unlock(&mMutex); 671 return -EINVAL; 672 } 673 674 camera3_stream_t *stream = buffer_set->stream; 675 QCamera3Channel *channel = (QCamera3Channel *)stream->priv; 676 677 //set the buffer_set in the mStreamInfo array 678 for (List<stream_info_t *>::iterator it = mStreamInfo.begin(); 679 it != mStreamInfo.end(); it++) { 680 if ((*it)->stream == stream) { 681 uint32_t numBuffers = buffer_set->num_buffers; 682 (*it)->buffer_set.stream = buffer_set->stream; 683 (*it)->buffer_set.num_buffers = numBuffers; 684 (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers]; 685 if ((*it)->buffer_set.buffers == NULL) { 686 ALOGE("%s: Failed to allocate buffer_handle_t*", __func__); 687 pthread_mutex_unlock(&mMutex); 688 return -ENOMEM; 689 } 690 for (size_t j = 0; j < numBuffers; j++){ 691 (*it)->buffer_set.buffers[j] = buffer_set->buffers[j]; 692 } 693 (*it)->registered = 1; 694 } 695 } 696 697 if (stream->stream_type != CAMERA3_STREAM_OUTPUT) { 698 ALOGE("%s: not yet support non output type stream", __func__); 699 pthread_mutex_unlock(&mMutex); 700 return -EINVAL; 701 } 702 rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers); 703 if (rc < 0) { 704 ALOGE("%s: registerBUffers for stream %p failed", __func__, stream); 705 pthread_mutex_unlock(&mMutex); 706 return -ENODEV; 707 } 708 709 pthread_mutex_unlock(&mMutex); 710 return NO_ERROR; 711} 712 713/*=========================================================================== 714 * FUNCTION : processCaptureRequest 715 * 716 * DESCRIPTION: process a capture request from camera service 717 * 718 * PARAMETERS : 719 * @request : request from framework to process 720 * 721 * RETURN : 722 * 723 *==========================================================================*/ 724int QCamera3HardwareInterface::processCaptureRequest( 725 camera3_capture_request_t *request) 726{ 727 int rc = NO_ERROR; 728 CameraMetadata meta; 729 730 pthread_mutex_lock(&mMutex); 731 732 rc = validateCaptureRequest(request); 733 if (rc != NO_ERROR) { 734 ALOGE("%s: incoming request is not valid", __func__); 735 pthread_mutex_unlock(&mMutex); 736 return rc; 737 } 738 739 uint32_t frameNumber = request->frame_number; 740 741 rc = setFrameParameters(request->frame_number, request->settings); 742 if (rc < 0) { 743 ALOGE("%s: fail to set frame parameters", __func__); 744 pthread_mutex_unlock(&mMutex); 745 return rc; 746 } 747 748 ALOGV("%s: %d, num_output_buffers = %d", __func__, __LINE__, 749 request->num_output_buffers); 750 // Acquire all request buffers first 751 for (size_t i = 0; i < request->num_output_buffers; i++) { 752 const camera3_stream_buffer_t& output = request->output_buffers[i]; 753 sp<Fence> acquireFence = new Fence(output.acquire_fence); 754 755 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) { 756 //Call function to store local copy of jpeg data for encode params. 757 rc = getJpegSettings(request->settings); 758 if (rc < 0) { 759 ALOGE("%s: failed to get jpeg parameters", __func__); 760 pthread_mutex_unlock(&mMutex); 761 return rc; 762 } 763 } 764 765 rc = acquireFence->wait(Fence::TIMEOUT_NEVER); 766 if (rc != OK) { 767 ALOGE("%s: fence wait failed %d", __func__, rc); 768 pthread_mutex_unlock(&mMutex); 769 return rc; 770 } 771 } 772 773 /* Update pending request list and pending buffers map */ 774 pthread_mutex_lock(&mRequestLock); 775 PendingRequestInfo pendingRequest; 776 pendingRequest.frame_number = frameNumber; 777 pendingRequest.num_buffers = request->num_output_buffers; 778 for (size_t i = 0; i < request->num_output_buffers; i++) { 779 RequestedBufferInfo requestedBuf; 780 requestedBuf.stream = request->output_buffers[i].stream; 781 requestedBuf.buffer = NULL; 782 pendingRequest.buffers.push_back(requestedBuf); 783 784 mPendingBuffersMap.editValueFor(requestedBuf.stream)++; 785 } 786 mPendingRequestsList.push_back(pendingRequest); 787 pthread_mutex_unlock(&mRequestLock); 788 789 // Notify metadata channel we receive a request 790 mMetadataChannel->request(NULL, frameNumber); 791 792 // Call request on other streams 793 for (size_t i = 0; i < request->num_output_buffers; i++) { 794 const camera3_stream_buffer_t& output = request->output_buffers[i]; 795 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv; 796 797 if (channel == NULL) { 798 ALOGE("%s: invalid channel pointer for stream", __func__); 799 continue; 800 } 801 802 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) { 803 rc = channel->request(output.buffer, frameNumber, mJpegSettings); 804 } else { 805 ALOGI("%s: %d, request with buffer %p, frame_number %d", __func__, __LINE__, output.buffer, frameNumber); 806 rc = channel->request(output.buffer, frameNumber); 807 } 808 if (rc < 0) 809 ALOGE("%s: request failed", __func__); 810 } 811 812 mFirstRequest = false; 813 814 //Block on conditional variable 815 pthread_mutex_lock(&mRequestLock); 816 mPendingRequest = 1; 817 while (mPendingRequest == 1) { 818 pthread_cond_wait(&mRequestCond, &mRequestLock); 819 } 820 pthread_mutex_unlock(&mRequestLock); 821 822 pthread_mutex_unlock(&mMutex); 823 return rc; 824} 825 826/*=========================================================================== 827 * FUNCTION : getMetadataVendorTagOps 828 * 829 * DESCRIPTION: 830 * 831 * PARAMETERS : 832 * 833 * 834 * RETURN : 835 *==========================================================================*/ 836void QCamera3HardwareInterface::getMetadataVendorTagOps( 837 vendor_tag_query_ops_t* /*ops*/) 838{ 839 /* Enable locks when we eventually add Vendor Tags */ 840 /* 841 pthread_mutex_lock(&mMutex); 842 843 pthread_mutex_unlock(&mMutex); 844 */ 845 return; 846} 847 848/*=========================================================================== 849 * FUNCTION : dump 850 * 851 * DESCRIPTION: 852 * 853 * PARAMETERS : 854 * 855 * 856 * RETURN : 857 *==========================================================================*/ 858void QCamera3HardwareInterface::dump(int /*fd*/) 859{ 860 /*Enable lock when we implement this function*/ 861 /* 862 pthread_mutex_lock(&mMutex); 863 864 pthread_mutex_unlock(&mMutex); 865 */ 866 return; 867} 868 869/*=========================================================================== 870 * FUNCTION : captureResultCb 871 * 872 * DESCRIPTION: Callback handler for all capture result 873 * (streams, as well as metadata) 874 * 875 * PARAMETERS : 876 * @metadata : metadata information 877 * @buffer : actual gralloc buffer to be returned to frameworks. 878 * NULL if metadata. 879 * 880 * RETURN : NONE 881 *==========================================================================*/ 882void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf, 883 camera3_stream_buffer_t *buffer, uint32_t frame_number) 884{ 885 pthread_mutex_lock(&mRequestLock); 886 887 if (metadata_buf) { 888 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer; 889 int32_t frame_number_valid = *(int32_t *) 890 POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata); 891 uint32_t frame_number = *(uint32_t *) 892 POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata); 893 const struct timeval *tv = (const struct timeval *) 894 POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata); 895 nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC + 896 tv->tv_usec * NSEC_PER_USEC; 897 898 if (!frame_number_valid) { 899 ALOGD("%s: Not a valid frame number, used as SOF only", __func__); 900 mMetadataChannel->bufDone(metadata_buf); 901 goto done_metadata; 902 } 903 ALOGD("%s: valid frame_number = %d, capture_time = %lld", __func__, 904 frame_number, capture_time); 905 906 // Go through the pending requests info and send shutter/results to frameworks 907 for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin(); 908 i != mPendingRequestsList.end() && i->frame_number <= frame_number;) { 909 camera3_capture_result_t result; 910 camera3_notify_msg_t notify_msg; 911 ALOGD("%s: frame_number in the list is %d", __func__, i->frame_number); 912 913 // Flush out all entries with less or equal frame numbers. 914 915 //TODO: Make sure shutter timestamp really reflects shutter timestamp. 916 //Right now it's the same as metadata timestamp 917 918 //TODO: When there is metadata drop, how do we derive the timestamp of 919 //dropped frames? For now, we fake the dropped timestamp by substracting 920 //from the reported timestamp 921 nsecs_t current_capture_time = capture_time - 922 (frame_number - i->frame_number) * NSEC_PER_33MSEC; 923 924 // Send shutter notify to frameworks 925 notify_msg.type = CAMERA3_MSG_SHUTTER; 926 notify_msg.message.shutter.frame_number = i->frame_number; 927 notify_msg.message.shutter.timestamp = current_capture_time; 928 mCallbackOps->notify(mCallbackOps, ¬ify_msg); 929 ALOGD("%s: notify frame_number = %d, capture_time = %lld", __func__, 930 i->frame_number, capture_time); 931 932 // Send empty metadata with already filled buffers for dropped metadata 933 // and send valid metadata with already filled buffers for current metadata 934 if (i->frame_number < frame_number) { 935 CameraMetadata emptyMetadata(1, 0); 936 emptyMetadata.update(ANDROID_SENSOR_TIMESTAMP, 937 ¤t_capture_time, 1); 938 result.result = emptyMetadata.release(); 939 } else { 940 result.result = translateCbMetadataToResultMetadata(metadata, 941 current_capture_time); 942 // Return metadata buffer 943 mMetadataChannel->bufDone(metadata_buf); 944 } 945 if (!result.result) { 946 ALOGE("%s: metadata is NULL", __func__); 947 } 948 result.frame_number = i->frame_number; 949 result.num_output_buffers = 0; 950 result.output_buffers = NULL; 951 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin(); 952 j != i->buffers.end(); j++) { 953 if (j->buffer) { 954 result.num_output_buffers++; 955 } 956 } 957 958 if (result.num_output_buffers > 0) { 959 camera3_stream_buffer_t *result_buffers = 960 new camera3_stream_buffer_t[result.num_output_buffers]; 961 if (!result_buffers) { 962 ALOGE("%s: Fatal error: out of memory", __func__); 963 } 964 size_t result_buffers_idx = 0; 965 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin(); 966 j != i->buffers.end(); j++) { 967 if (j->buffer) { 968 result_buffers[result_buffers_idx++] = *(j->buffer); 969 free(j->buffer); 970 mPendingBuffersMap.editValueFor(j->stream)--; 971 } 972 } 973 result.output_buffers = result_buffers; 974 975 mCallbackOps->process_capture_result(mCallbackOps, &result); 976 ALOGD("%s: meta frame_number = %d, capture_time = %lld", 977 __func__, result.frame_number, current_capture_time); 978 free_camera_metadata((camera_metadata_t *)result.result); 979 delete[] result_buffers; 980 } else { 981 mCallbackOps->process_capture_result(mCallbackOps, &result); 982 ALOGD("%s: meta frame_number = %d, capture_time = %lld", 983 __func__, result.frame_number, current_capture_time); 984 free_camera_metadata((camera_metadata_t *)result.result); 985 } 986 // erase the element from the list 987 i = mPendingRequestsList.erase(i); 988 } 989 990 991done_metadata: 992 bool max_buffers_dequeued = false; 993 for (size_t i = 0; i < mPendingBuffersMap.size(); i++) { 994 const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i); 995 uint32_t queued_buffers = mPendingBuffersMap.valueAt(i); 996 if (queued_buffers == stream->max_buffers) { 997 max_buffers_dequeued = true; 998 break; 999 } 1000 } 1001 if (!max_buffers_dequeued) { 1002 // Unblock process_capture_request 1003 mPendingRequest = 0; 1004 pthread_cond_signal(&mRequestCond); 1005 } 1006 } else { 1007 // If the frame number doesn't exist in the pending request list, 1008 // directly send the buffer to the frameworks, and update pending buffers map 1009 // Otherwise, book-keep the buffer. 1010 List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin(); 1011 while (i != mPendingRequestsList.end() && i->frame_number != frame_number) 1012 i++; 1013 if (i == mPendingRequestsList.end()) { 1014 // Verify all pending requests frame_numbers are greater 1015 for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin(); 1016 j != mPendingRequestsList.end(); j++) { 1017 if (j->frame_number < frame_number) { 1018 ALOGE("%s: Error: pending frame number %d is smaller than %d", 1019 __func__, j->frame_number, frame_number); 1020 } 1021 } 1022 camera3_capture_result_t result; 1023 result.result = NULL; 1024 result.frame_number = frame_number; 1025 result.num_output_buffers = 1; 1026 result.output_buffers = buffer; 1027 ALOGD("%s: result frame_number = %d, buffer = %p", 1028 __func__, frame_number, buffer); 1029 mPendingBuffersMap.editValueFor(buffer->stream)--; 1030 mCallbackOps->process_capture_result(mCallbackOps, &result); 1031 } else { 1032 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin(); 1033 j != i->buffers.end(); j++) { 1034 if (j->stream == buffer->stream) { 1035 if (j->buffer != NULL) { 1036 ALOGE("%s: Error: buffer is already set", __func__); 1037 } else { 1038 j->buffer = (camera3_stream_buffer_t *)malloc( 1039 sizeof(camera3_stream_buffer_t)); 1040 *(j->buffer) = *buffer; 1041 ALOGD("%s: cache buffer %p at result frame_number %d", 1042 __func__, buffer, frame_number); 1043 } 1044 } 1045 } 1046 } 1047 } 1048 1049 pthread_mutex_unlock(&mRequestLock); 1050 return; 1051} 1052 1053/*=========================================================================== 1054 * FUNCTION : translateCbMetadataToResultMetadata 1055 * 1056 * DESCRIPTION: 1057 * 1058 * PARAMETERS : 1059 * @metadata : metadata information from callback 1060 * 1061 * RETURN : camera_metadata_t* 1062 * metadata in a format specified by fwk 1063 *==========================================================================*/ 1064camera_metadata_t* 1065QCamera3HardwareInterface::translateCbMetadataToResultMetadata 1066 (metadata_buffer_t *metadata, nsecs_t timestamp) 1067{ 1068 CameraMetadata camMetadata; 1069 camera_metadata_t* resultMetadata; 1070 1071 1072 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, ×tamp, 1); 1073 1074 /*CAM_INTF_META_HISTOGRAM - TODO*/ 1075 /*cam_hist_stats_t *histogram = 1076 (cam_hist_stats_t *)POINTER_OF(CAM_INTF_META_HISTOGRAM, 1077 metadata);*/ 1078 1079 /*face detection*/ 1080 cam_face_detection_data_t *faceDetectionInfo =(cam_face_detection_data_t *) 1081 POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata); 1082 uint8_t numFaces = faceDetectionInfo->num_faces_detected; 1083 int32_t faceIds[numFaces]; 1084 uint8_t faceScores[numFaces]; 1085 int32_t faceRectangles[numFaces * 4]; 1086 int32_t faceLandmarks[numFaces * 6]; 1087 int j = 0, k = 0; 1088 for (int i = 0; i < numFaces; i++) { 1089 faceIds[i] = faceDetectionInfo->faces[i].face_id; 1090 faceScores[i] = faceDetectionInfo->faces[i].score; 1091 convertRegions(faceDetectionInfo->faces[i].face_boundary, 1092 faceRectangles+j, -1); 1093 convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k); 1094 j+= 4; 1095 k+= 6; 1096 } 1097 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces); 1098 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces); 1099 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES, 1100 faceRectangles, numFaces*4); 1101 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS, 1102 faceLandmarks, numFaces*6); 1103 1104 1105 /*autofocus - TODO*/ 1106 /*cam_auto_focus_data_t *afData =(cam_auto_focus_data_t *) 1107 POINTER_OF(CAM_INTF_META_AUTOFOCUS_DATA,metadata);*/ 1108 1109 uint8_t *color_correct_mode = 1110 (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata); 1111 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1); 1112 1113 int32_t *ae_precapture_id = 1114 (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata); 1115 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1); 1116 1117 /*aec regions*/ 1118 cam_area_t *hAeRegions = 1119 (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata); 1120 int32_t aeRegions[5]; 1121 convertRegions(hAeRegions->rect, aeRegions, hAeRegions->weight); 1122 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5); 1123 1124 uint8_t *ae_state = 1125 (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata); 1126 camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1); 1127 1128 uint8_t *focusMode = 1129 (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata); 1130 camMetadata.update(ANDROID_CONTROL_AF_MODE, focusMode, 1); 1131 1132 /*af regions*/ 1133 cam_area_t *hAfRegions = 1134 (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata); 1135 int32_t afRegions[5]; 1136 convertRegions(hAfRegions->rect, afRegions, hAfRegions->weight); 1137 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5); 1138 1139 uint8_t *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata); 1140 camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1); 1141 1142 int32_t *afTriggerId = 1143 (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata); 1144 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1); 1145 1146 uint8_t *whiteBalance = 1147 (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata); 1148 camMetadata.update(ANDROID_CONTROL_AWB_MODE, whiteBalance, 1); 1149 1150 /*awb regions*/ 1151 cam_area_t *hAwbRegions = 1152 (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata); 1153 int32_t awbRegions[5]; 1154 convertRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight); 1155 camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5); 1156 1157 uint8_t *whiteBalanceState = 1158 (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata); 1159 camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1); 1160 1161 uint8_t *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata); 1162 camMetadata.update(ANDROID_CONTROL_MODE, mode, 1); 1163 1164 uint8_t *edgeMode = (uint8_t *)POINTER_OF(CAM_INTF_META_EDGE, metadata); 1165 camMetadata.update(ANDROID_EDGE_MODE, edgeMode, 1); 1166 1167 uint8_t *flashPower = 1168 (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata); 1169 camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1); 1170 1171 int64_t *flashFiringTime = 1172 (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata); 1173 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1); 1174 1175 /*int32_t *ledMode = 1176 (int32_t *)POINTER_OF(CAM_INTF_PARM_LED_MODE, metadata); 1177 camMetadata.update(ANDROID_FLASH_FIRING_TIME, ledMode, 1);*/ 1178 1179 uint8_t *flashState = 1180 (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata); 1181 camMetadata.update(ANDROID_FLASH_STATE, flashState, 1); 1182 1183 uint8_t *hotPixelMode = 1184 (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata); 1185 camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1); 1186 1187 float *lensAperture = 1188 (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata); 1189 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1); 1190 1191 float *filterDensity = 1192 (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata); 1193 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1); 1194 1195 float *focalLength = 1196 (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata); 1197 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1); 1198 1199 float *focusDistance = 1200 (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata); 1201 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1); 1202 1203 float *focusRange = 1204 (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata); 1205 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 1); 1206 1207 uint8_t *opticalStab = 1208 (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata); 1209 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1); 1210 1211 /*int32_t *focusState = 1212 (int32_t *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_STATE, metadata); 1213 camMetadata.update(ANDROID_LENS_STATE , focusState, 1); //check */ 1214 1215 uint8_t *noiseRedMode = 1216 (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata); 1217 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1); 1218 1219 /*CAM_INTF_META_SCALER_CROP_REGION - check size*/ 1220 1221 cam_crop_region_t *hScalerCropRegion =(cam_crop_region_t *) 1222 POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata); 1223 int32_t scalerCropRegion[3]; 1224 scalerCropRegion[0] = hScalerCropRegion->left; 1225 scalerCropRegion[1] = hScalerCropRegion->top; 1226 scalerCropRegion[2] = hScalerCropRegion->width; 1227 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 3); 1228 1229 int64_t *sensorExpTime = 1230 (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata); 1231 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1); 1232 1233 int64_t *sensorFameDuration = 1234 (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata); 1235 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1); 1236 1237 int32_t *sensorSensitivity = 1238 (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata); 1239 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1); 1240 1241 uint8_t *shadingMode = 1242 (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata); 1243 camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1); 1244 1245 uint8_t *faceDetectMode = 1246 (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata); 1247 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, faceDetectMode, 1); 1248 1249 uint8_t *histogramMode = 1250 (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata); 1251 camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1); 1252 1253 uint8_t *sharpnessMapMode = 1254 (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata); 1255 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, 1256 sharpnessMapMode, 1); 1257 1258 /*CAM_INTF_META_STATS_SHARPNESS_MAP - check size*/ 1259 cam_sharpness_map_t *sharpnessMap = (cam_sharpness_map_t *) 1260 POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata); 1261 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, 1262 (int32_t*)sharpnessMap->sharpness, 1263 CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT); 1264 1265 resultMetadata = camMetadata.release(); 1266 return resultMetadata; 1267} 1268 1269/*=========================================================================== 1270 * FUNCTION : convertRegions 1271 * 1272 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array 1273 * 1274 * PARAMETERS : 1275 * @rect : cam_rect_t struct to convert 1276 * @region : int32_t destination array 1277 * @weight : if we are converting from cam_area_t, weight is valid 1278 * else weight = -1 1279 * 1280 *==========================================================================*/ 1281void QCamera3HardwareInterface::convertRegions(cam_rect_t rect, int32_t* region, int weight){ 1282 region[0] = rect.left; 1283 region[1] = rect.top; 1284 region[2] = rect.width; 1285 region[3] = rect.height; 1286 if (weight > -1) { 1287 region[4] = weight; 1288 } 1289} 1290/*=========================================================================== 1291 * FUNCTION : convertLandmarks 1292 * 1293 * DESCRIPTION: helper method to extract the landmarks from face detection info 1294 * 1295 * PARAMETERS : 1296 * @face : cam_rect_t struct to convert 1297 * @landmarks : int32_t destination array 1298 * 1299 * 1300 *==========================================================================*/ 1301void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks) 1302{ 1303 landmarks[0] = face.left_eye_center.x; 1304 landmarks[1] = face.left_eye_center.y; 1305 landmarks[2] = face.right_eye_center.y; 1306 landmarks[3] = face.right_eye_center.y; 1307 landmarks[4] = face.mouth_center.x; 1308 landmarks[5] = face.mouth_center.y; 1309} 1310 1311#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX ) 1312/*=========================================================================== 1313 * FUNCTION : initCapabilities 1314 * 1315 * DESCRIPTION: initialize camera capabilities in static data struct 1316 * 1317 * PARAMETERS : 1318 * @cameraId : camera Id 1319 * 1320 * RETURN : int32_t type of status 1321 * NO_ERROR -- success 1322 * none-zero failure code 1323 *==========================================================================*/ 1324int QCamera3HardwareInterface::initCapabilities(int cameraId) 1325{ 1326 int rc = 0; 1327 mm_camera_vtbl_t *cameraHandle = NULL; 1328 QCamera3HeapMemory *capabilityHeap = NULL; 1329 1330 cameraHandle = camera_open(cameraId); 1331 if (!cameraHandle) { 1332 ALOGE("%s: camera_open failed", __func__); 1333 rc = -1; 1334 goto open_failed; 1335 } 1336 1337 capabilityHeap = new QCamera3HeapMemory(); 1338 if (capabilityHeap == NULL) { 1339 ALOGE("%s: creation of capabilityHeap failed", __func__); 1340 goto heap_creation_failed; 1341 } 1342 /* Allocate memory for capability buffer */ 1343 rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false); 1344 if(rc != OK) { 1345 ALOGE("%s: No memory for cappability", __func__); 1346 goto allocate_failed; 1347 } 1348 1349 /* Map memory for capability buffer */ 1350 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t)); 1351 rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle, 1352 CAM_MAPPING_BUF_TYPE_CAPABILITY, 1353 capabilityHeap->getFd(0), 1354 sizeof(cam_capability_t)); 1355 if(rc < 0) { 1356 ALOGE("%s: failed to map capability buffer", __func__); 1357 goto map_failed; 1358 } 1359 1360 /* Query Capability */ 1361 rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle); 1362 if(rc < 0) { 1363 ALOGE("%s: failed to query capability",__func__); 1364 goto query_failed; 1365 } 1366 gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t)); 1367 if (!gCamCapability[cameraId]) { 1368 ALOGE("%s: out of memory", __func__); 1369 goto query_failed; 1370 } 1371 memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0), 1372 sizeof(cam_capability_t)); 1373 rc = 0; 1374 1375query_failed: 1376 cameraHandle->ops->unmap_buf(cameraHandle->camera_handle, 1377 CAM_MAPPING_BUF_TYPE_CAPABILITY); 1378map_failed: 1379 capabilityHeap->deallocate(); 1380allocate_failed: 1381 delete capabilityHeap; 1382heap_creation_failed: 1383 cameraHandle->ops->close_camera(cameraHandle->camera_handle); 1384 cameraHandle = NULL; 1385open_failed: 1386 return rc; 1387} 1388 1389/*=========================================================================== 1390 * FUNCTION : initParameters 1391 * 1392 * DESCRIPTION: initialize camera parameters 1393 * 1394 * PARAMETERS : 1395 * 1396 * RETURN : int32_t type of status 1397 * NO_ERROR -- success 1398 * none-zero failure code 1399 *==========================================================================*/ 1400int QCamera3HardwareInterface::initParameters() 1401{ 1402 int rc = 0; 1403 1404 //Allocate Set Param Buffer 1405 mParamHeap = new QCamera3HeapMemory(); 1406 rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false); 1407 if(rc != OK) { 1408 rc = NO_MEMORY; 1409 ALOGE("Failed to allocate SETPARM Heap memory"); 1410 delete mParamHeap; 1411 mParamHeap = NULL; 1412 return rc; 1413 } 1414 1415 //Map memory for parameters buffer 1416 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle, 1417 CAM_MAPPING_BUF_TYPE_PARM_BUF, 1418 mParamHeap->getFd(0), 1419 sizeof(parm_buffer_t)); 1420 if(rc < 0) { 1421 ALOGE("%s:failed to map SETPARM buffer",__func__); 1422 rc = FAILED_TRANSACTION; 1423 mParamHeap->deallocate(); 1424 delete mParamHeap; 1425 mParamHeap = NULL; 1426 return rc; 1427 } 1428 1429 mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0); 1430 return rc; 1431} 1432 1433/*=========================================================================== 1434 * FUNCTION : deinitParameters 1435 * 1436 * DESCRIPTION: de-initialize camera parameters 1437 * 1438 * PARAMETERS : 1439 * 1440 * RETURN : NONE 1441 *==========================================================================*/ 1442void QCamera3HardwareInterface::deinitParameters() 1443{ 1444 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle, 1445 CAM_MAPPING_BUF_TYPE_PARM_BUF); 1446 1447 mParamHeap->deallocate(); 1448 delete mParamHeap; 1449 mParamHeap = NULL; 1450 1451 mParameters = NULL; 1452} 1453 1454/*=========================================================================== 1455 * FUNCTION : calcMaxJpegSize 1456 * 1457 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId 1458 * 1459 * PARAMETERS : 1460 * 1461 * RETURN : max_jpeg_size 1462 *==========================================================================*/ 1463int QCamera3HardwareInterface::calcMaxJpegSize() 1464{ 1465 int32_t max_jpeg_size = 0; 1466 int temp_width, temp_height; 1467 for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) { 1468 temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width; 1469 temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height; 1470 if (temp_width * temp_height > max_jpeg_size ) { 1471 max_jpeg_size = temp_width * temp_height; 1472 } 1473 } 1474 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t); 1475 return max_jpeg_size; 1476} 1477 1478/*=========================================================================== 1479 * FUNCTION : initStaticMetadata 1480 * 1481 * DESCRIPTION: initialize the static metadata 1482 * 1483 * PARAMETERS : 1484 * @cameraId : camera Id 1485 * 1486 * RETURN : int32_t type of status 1487 * 0 -- success 1488 * non-zero failure code 1489 *==========================================================================*/ 1490int QCamera3HardwareInterface::initStaticMetadata(int cameraId) 1491{ 1492 int rc = 0; 1493 CameraMetadata staticInfo; 1494 int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK; 1495 /*HAL 3 only*/ 1496 #ifdef HAL_3_CAPABILITIES 1497 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE, 1498 &gCamCapability[cameraId]->min_focus_distance, 1); 1499 1500 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, 1501 &gCamCapability[cameraId]->hyper_focal_distance, 1); 1502 1503 /*should be using focal lengths but sensor doesn't provide that info now*/ 1504 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, 1505 &gCamCapability[cameraId]->focal_length, 1506 1); 1507 1508 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES, 1509 gCamCapability[cameraId]->apertures, 1510 gCamCapability[cameraId]->apertures_count); 1511 1512 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES, 1513 gCamCapability[cameraId]->filter_densities, 1514 gCamCapability[cameraId]->filter_densities_count); 1515 1516 1517 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION, 1518 (uint8_t*)gCamCapability[cameraId]->optical_stab_modes, 1519 gCamCapability[cameraId]->optical_stab_modes_count); 1520 1521 staticInfo.update(ANDROID_LENS_POSITION, 1522 gCamCapability[cameraId]->lens_position, 1523 sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float)); 1524 1525 static const int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width, 1526 gCamCapability[cameraId]->lens_shading_map_size.height}; 1527 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE, 1528 lens_shading_map_size, 1529 sizeof(lens_shading_map_size)/sizeof(int32_t)); 1530 1531 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP, gCamCapability[cameraId]->lens_shading_map, 1532 sizeof(gCamCapability[cameraId]->lens_shading_map)/ sizeof(float)); 1533 1534 static const int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width, 1535 gCamCapability[cameraId]->geo_correction_map_size.height}; 1536 staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE, 1537 geo_correction_map_size, 1538 sizeof(geo_correction_map_size)/sizeof(int32_t)); 1539 1540 staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP, 1541 gCamCapability[cameraId]->geo_correction_map, 1542 sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float)); 1543 1544 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE, 1545 gCamCapability[cameraId]->sensor_physical_size, 2); 1546 1547 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, 1548 gCamCapability[cameraId]->exposure_time_range, 2); 1549 1550 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION, 1551 &gCamCapability[cameraId]->max_frame_duration, 1); 1552 1553 1554 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT, 1555 (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1); 1556 1557 static const int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width, 1558 gCamCapability[cameraId]->pixel_array_size.height}; 1559 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, 1560 pixel_array_size, 2); 1561 1562 static const int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.width, 1563 gCamCapability[cameraId]->active_array_size.height}; 1564 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, 1565 active_array_size, 2); 1566 1567 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL, 1568 &gCamCapability[cameraId]->white_level, 1); 1569 1570 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN, 1571 gCamCapability[cameraId]->black_level_pattern, 4); 1572 1573 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION, 1574 &gCamCapability[cameraId]->flash_charge_duration, 1); 1575 1576 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS, 1577 &gCamCapability[cameraId]->max_tone_map_curve_points, 1); 1578 1579 /*staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, 1580 (int*)&gCamCapability[cameraId]->max_face_detection_count, 1);*/ 1581 /*hardcode 0 for now*/ 1582 int32_t max_face_count = 0; 1583 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, 1584 &max_face_count, 1); 1585 1586 staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT, 1587 &gCamCapability[cameraId]->histogram_size, 1); 1588 1589 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT, 1590 &gCamCapability[cameraId]->max_histogram_count, 1); 1591 1592 static const int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width, 1593 gCamCapability[cameraId]->sharpness_map_size.height}; 1594 1595 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, 1596 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t)); 1597 1598 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE, 1599 &gCamCapability[cameraId]->max_sharpness_map_value, 1); 1600 1601 1602 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS, 1603 &gCamCapability[cameraId]->raw_min_duration, 1604 1); 1605 1606 static int32_t scalar_formats[CAM_FORMAT_MAX]; 1607 int scalar_formats_count = gCamCapability[cameraId]->supported_scalar_format_cnt; 1608 for (int i = 0; i < scalar_formats_count; i++) { 1609 scalar_formats[i] = getScalarFormat(gCamCapability[cameraId]->supported_scalar_fmts[i]); 1610 } 1611 scalar_formats[scalar_formats_count] = HAL_PIXEL_FORMAT_YCbCr_420_888; 1612 scalar_formats_count++; 1613 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, 1614 scalar_formats, 1615 scalar_formats_count); 1616 1617 static int32_t available_processed_sizes[CAM_FORMAT_MAX]; 1618 makeTable(gCamCapability[cameraId]->supported_sizes_tbl, 1619 gCamCapability[cameraId]->supported_sizes_tbl_cnt, 1620 available_processed_sizes); 1621 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES, 1622 available_processed_sizes, 1623 (gCamCapability[cameraId]->supported_sizes_tbl_cnt) * 2); 1624 1625 static int32_t available_fps_ranges[MAX_SIZES_CNT]; 1626 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl, 1627 gCamCapability[cameraId]->fps_ranges_tbl_cnt, 1628 available_fps_ranges); 1629 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, 1630 available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) ); 1631 1632 static const camera_metadata_rational exposureCompensationStep = { 1633 gCamCapability[cameraId]->exp_compensation_step.numerator, 1634 gCamCapability[cameraId]->exp_compensation_step.denominator}; 1635 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP, 1636 &exposureCompensationStep, 1); 1637 1638 /*TO DO*/ 1639 static const uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF}; 1640 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, 1641 availableVstabModes, sizeof(availableVstabModes)); 1642 1643 #else 1644 const float minFocusDistance = 0; 1645 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE, 1646 &minFocusDistance, 1); 1647 1648 const float hyperFocusDistance = 0; 1649 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, 1650 &hyperFocusDistance, 1); 1651 1652 static const float focalLength = 3.30f; 1653 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, 1654 &focalLength, 1655 1); 1656 1657 static const float aperture = 2.8f; 1658 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES, 1659 &aperture, 1660 1); 1661 1662 static const float filterDensity = 0; 1663 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES, 1664 &filterDensity, 1); 1665 1666 static const uint8_t availableOpticalStabilization = 1667 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF; 1668 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION, 1669 &availableOpticalStabilization, 1); 1670 1671 float lensPosition[3]; 1672 if (facingBack) { 1673 // Back-facing camera is center-top on device 1674 lensPosition[0] = 0; 1675 lensPosition[1] = 20; 1676 lensPosition[2] = -5; 1677 } else { 1678 // Front-facing camera is center-right on device 1679 lensPosition[0] = 20; 1680 lensPosition[1] = 20; 1681 lensPosition[2] = 0; 1682 } 1683 staticInfo.update(ANDROID_LENS_POSITION, 1684 lensPosition, 1685 sizeof(lensPosition)/ sizeof(float)); 1686 1687 static const int32_t lensShadingMapSize[] = {1, 1}; 1688 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE, 1689 lensShadingMapSize, 1690 sizeof(lensShadingMapSize)/sizeof(int32_t)); 1691 1692 static const float lensShadingMap[3 * 1 * 1 ] = 1693 { 1.f, 1.f, 1.f }; 1694 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP, 1695 lensShadingMap, 1696 sizeof(lensShadingMap)/ sizeof(float)); 1697 1698 static const int32_t geometricCorrectionMapSize[] = {2, 2}; 1699 staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE, 1700 geometricCorrectionMapSize, 1701 sizeof(geometricCorrectionMapSize)/sizeof(int32_t)); 1702 1703 static const float geometricCorrectionMap[2 * 3 * 2 * 2] = { 1704 0.f, 0.f, 0.f, 0.f, 0.f, 0.f, 1705 1.f, 0.f, 1.f, 0.f, 1.f, 0.f, 1706 0.f, 1.f, 0.f, 1.f, 0.f, 1.f, 1707 1.f, 1.f, 1.f, 1.f, 1.f, 1.f}; 1708 staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP, 1709 geometricCorrectionMap, 1710 sizeof(geometricCorrectionMap)/ sizeof(float)); 1711 1712 static const float sensorPhysicalSize[2] = {3.20f, 2.40f}; 1713 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE, 1714 sensorPhysicalSize, 2); 1715 1716 const int64_t exposureTimeRange[2] = {1000L, 30000000000L} ; // 1 us - 30 sec 1717 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, 1718 exposureTimeRange, 2); 1719 1720 const int64_t frameDurationRange[2] = {33331760L, 30000000000L}; 1721 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION, 1722 frameDurationRange, 1); 1723 1724 const uint8_t colorFilterArrangement = 1725 ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB; 1726 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT, 1727 &colorFilterArrangement, 1); 1728 1729 const int resolution[2] = {640, 480}; 1730 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, 1731 resolution, 2); 1732 1733 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, 1734 resolution, 2); 1735 1736 const uint32_t whiteLevel = 4000; 1737 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL, 1738 (int32_t*)&whiteLevel, 1); 1739 1740 static const int32_t blackLevelPattern[4] = { 1741 1000, 1000, 1742 1000, 1000 }; 1743 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN, 1744 blackLevelPattern, 4); 1745 1746 static const int64_t flashChargeDuration = 0; 1747 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION, 1748 &flashChargeDuration, 1); 1749 1750 static const int32_t tonemapCurvePoints = 128; 1751 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS, 1752 &tonemapCurvePoints, 1); 1753 1754 static const int32_t maxFaceCount = 0; 1755 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, 1756 &maxFaceCount, 1); 1757 1758 static const int32_t histogramSize = 64; 1759 staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT, 1760 &histogramSize, 1); 1761 1762 static const int32_t maxHistogramCount = 1000; 1763 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT, 1764 &maxHistogramCount, 1); 1765 1766 static const int32_t sharpnessMapSize[2] = {64, 64}; 1767 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, 1768 sharpnessMapSize, sizeof(sharpnessMapSize)/sizeof(int32_t)); 1769 1770 static const int32_t maxSharpnessMapValue = 1000; 1771 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE, 1772 &maxSharpnessMapValue, 1); 1773 1774 static const uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF}; 1775 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, 1776 availableVstabModes, sizeof(availableVstabModes)); 1777 1778 const uint64_t availableRawMinDurations[1] = {33331760L}; 1779 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS, 1780 (int64_t*)&availableRawMinDurations, 1781 1); 1782 1783 const uint32_t availableFormats[5] = { 1784 HAL_PIXEL_FORMAT_RAW_SENSOR, 1785 HAL_PIXEL_FORMAT_BLOB, 1786 HAL_PIXEL_FORMAT_RGBA_8888, 1787 HAL_PIXEL_FORMAT_YCrCb_420_SP, 1788 HAL_PIXEL_FORMAT_YCbCr_420_888 1789 }; 1790 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, 1791 (int32_t*)availableFormats, 1792 5); 1793 1794 const uint32_t availableProcessedSizes[4] = {1280, 720, 640, 480}; 1795 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES, 1796 (int32_t*)availableProcessedSizes, 1797 sizeof(availableProcessedSizes)/sizeof(int32_t)); 1798 1799 staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES, 1800 resolution, 1801 sizeof(resolution)/sizeof(int)); 1802 1803 static const uint8_t availableSceneModes[] = { 1804 ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED }; 1805 1806 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES, 1807 availableSceneModes, sizeof(availableSceneModes)); 1808 1809 static const int32_t availableFpsRanges[] = {15, 30}; 1810 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, 1811 availableFpsRanges, sizeof(availableFpsRanges)/sizeof(int32_t)); 1812 1813 static const uint8_t availableEffectsModes[] = { 1814 ANDROID_CONTROL_EFFECT_MODE_OFF }; 1815 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS, 1816 availableEffectsModes, sizeof(availableEffectsModes)); 1817 1818 static const uint8_t availableAntibandingModes[] = { 1819 ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF }; 1820 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, 1821 availableAntibandingModes, sizeof(availableAntibandingModes)); 1822 1823 static const camera_metadata_rational exposureCompensationStep = { 1824 1, 3 1825 }; 1826 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP, 1827 &exposureCompensationStep, 1); 1828 1829 static const int32_t jpegThumbnailSizes[] = { 1830 0, 0, 1831 160, 120, 1832 320, 240 1833 }; 1834 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, 1835 jpegThumbnailSizes, sizeof(jpegThumbnailSizes)/sizeof(int32_t)); 1836 1837 static int64_t jpegMinDuration[] = {33331760L, 30000000000L}; 1838 staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS, 1839 jpegMinDuration, 1840 sizeof(jpegMinDuration)/sizeof(uint64_t)); 1841 #endif 1842 /*HAL 1 and HAL 3 common*/ 1843 static const float maxZoom = 10; 1844 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, 1845 &maxZoom, 1); 1846 1847 static const int32_t max3aRegions = 0; 1848 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS, 1849 &max3aRegions, 1); 1850 1851 static const uint8_t flashAvailable = 0; 1852 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE, 1853 &flashAvailable, sizeof(flashAvailable)); 1854 1855 static const uint8_t availableFaceDetectModes[] = { 1856 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF }; 1857 1858 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES, 1859 availableFaceDetectModes, 1860 sizeof(availableFaceDetectModes)); 1861 1862 static const int32_t raw_size[] = {gCamCapability[cameraId]->raw_dim.width, 1863 gCamCapability[cameraId]->raw_dim.height}; 1864 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES, 1865 raw_size, 1866 sizeof(raw_size)/sizeof(uint32_t)); 1867 1868 static const int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min, 1869 gCamCapability[cameraId]->exposure_compensation_max}; 1870 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE, 1871 exposureCompensationRange, 1872 sizeof(exposureCompensationRange)/sizeof(int32_t)); 1873 1874 uint8_t lensFacing = (facingBack) ? 1875 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT; 1876 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1); 1877 1878 static int32_t available_jpeg_sizes[MAX_SIZES_CNT]; 1879 makeTable(gCamCapability[cameraId]->picture_sizes_tbl, 1880 gCamCapability[cameraId]->picture_sizes_tbl_cnt, 1881 available_jpeg_sizes); 1882 staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES, 1883 available_jpeg_sizes, 1884 (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2)); 1885 1886 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, 1887 available_jpeg_sizes, 1888 (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2)); 1889 1890 static int32_t max_jpeg_size = 0; 1891 int temp_width, temp_height; 1892 for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) { 1893 temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width; 1894 temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height; 1895 if (temp_width * temp_height > max_jpeg_size ) { 1896 max_jpeg_size = temp_width * temp_height; 1897 } 1898 } 1899 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t); 1900 staticInfo.update(ANDROID_JPEG_MAX_SIZE, 1901 &max_jpeg_size, 1); 1902 1903 static uint8_t avail_effects[CAM_EFFECT_MODE_MAX]; 1904 int32_t size = 0; 1905 for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) { 1906 int val = lookupFwkName(EFFECT_MODES_MAP, 1907 sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]), 1908 gCamCapability[cameraId]->supported_effects[i]); 1909 if (val != NAME_NOT_FOUND) { 1910 avail_effects[size] = (uint8_t)val; 1911 size++; 1912 } 1913 } 1914 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS, 1915 avail_effects, 1916 size); 1917 1918 static uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX]; 1919 size = 0; 1920 for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) { 1921 int val = lookupFwkName(SCENE_MODES_MAP, 1922 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]), 1923 gCamCapability[cameraId]->supported_scene_modes[i]); 1924 if (val != NAME_NOT_FOUND) { 1925 avail_scene_modes[size] = (uint8_t)val; 1926 size++; 1927 } 1928 } 1929 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES, 1930 avail_scene_modes, 1931 size); 1932 1933 static uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX]; 1934 size = 0; 1935 for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) { 1936 int val = lookupFwkName(ANTIBANDING_MODES_MAP, 1937 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]), 1938 gCamCapability[cameraId]->supported_antibandings[i]); 1939 if (val != NAME_NOT_FOUND) { 1940 avail_antibanding_modes[size] = (uint8_t)val; 1941 size++; 1942 } 1943 1944 } 1945 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, 1946 avail_antibanding_modes, 1947 size); 1948 1949 static uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX]; 1950 size = 0; 1951 for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) { 1952 int val = lookupFwkName(FOCUS_MODES_MAP, 1953 sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]), 1954 gCamCapability[cameraId]->supported_focus_modes[i]); 1955 if (val != NAME_NOT_FOUND) { 1956 avail_af_modes[size] = (uint8_t)val; 1957 size++; 1958 } 1959 } 1960 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES, 1961 avail_af_modes, 1962 size); 1963 1964 static uint8_t avail_awb_modes[CAM_WB_MODE_MAX]; 1965 size = 0; 1966 for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) { 1967 int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP, 1968 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]), 1969 gCamCapability[cameraId]->supported_white_balances[i]); 1970 if (val != NAME_NOT_FOUND) { 1971 avail_awb_modes[size] = (uint8_t)val; 1972 size++; 1973 } 1974 } 1975 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES, 1976 avail_awb_modes, 1977 size); 1978 1979 static uint8_t avail_flash_modes[CAM_FLASH_MODE_MAX]; 1980 size = 0; 1981 for (int i = 0; i < gCamCapability[cameraId]->supported_flash_modes_cnt; i++) { 1982 int val = lookupFwkName(FLASH_MODES_MAP, 1983 sizeof(FLASH_MODES_MAP)/sizeof(FLASH_MODES_MAP[0]), 1984 gCamCapability[cameraId]->supported_flash_modes[i]); 1985 if (val != NAME_NOT_FOUND) { 1986 avail_flash_modes[size] = (uint8_t)val; 1987 size++; 1988 } 1989 } 1990 staticInfo.update(ANDROID_FLASH_MODE, 1991 avail_flash_modes, 1992 size); 1993 1994 /*so far fwk seems to support only 2 aec modes on and off*/ 1995 static const uint8_t avail_ae_modes[] = { 1996 ANDROID_CONTROL_AE_MODE_OFF, 1997 ANDROID_CONTROL_AE_MODE_ON 1998 }; 1999 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES, 2000 avail_ae_modes, 2001 sizeof(avail_ae_modes)); 2002 2003 gStaticMetadata = staticInfo.release(); 2004 return rc; 2005} 2006 2007/*=========================================================================== 2008 * FUNCTION : makeTable 2009 * 2010 * DESCRIPTION: make a table of sizes 2011 * 2012 * PARAMETERS : 2013 * 2014 * 2015 * 2016 * RETURN : int32_t type of status 2017 * NO_ERROR -- success 2018 * none-zero failure code 2019 *==========================================================================*/ 2020void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size, 2021 int32_t* sizeTable) 2022{ 2023 int j = 0; 2024 for (int i = 0; i < size; i++) { 2025 sizeTable[j] = dimTable[i].width; 2026 sizeTable[j+1] = dimTable[i].height; 2027 j+=2; 2028 } 2029} 2030 2031/*=========================================================================== 2032 * FUNCTION : makeFPSTable 2033 * 2034 * DESCRIPTION: make a table of fps ranges 2035 * 2036 * PARAMETERS : 2037 * 2038 * 2039 * 2040 * RETURN : int32_t type of status 2041 * NO_ERROR -- success 2042 * none-zero failure code 2043 *==========================================================================*/ 2044void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size, 2045 int32_t* fpsRangesTable) 2046{ 2047 int j = 0; 2048 for (int i = 0; i < size; i++) { 2049 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps; 2050 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps; 2051 j+=2; 2052 } 2053} 2054/*=========================================================================== 2055 * FUNCTION : getPreviewHalPixelFormat 2056 * 2057 * DESCRIPTION: convert the format to type recognized by framework 2058 * 2059 * PARAMETERS : format : the format from backend 2060 * 2061 ** RETURN : format recognized by framework 2062 * 2063 *==========================================================================*/ 2064int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format) 2065{ 2066 int32_t halPixelFormat; 2067 2068 switch (format) { 2069 case CAM_FORMAT_YUV_420_NV12: 2070 halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP; 2071 break; 2072 case CAM_FORMAT_YUV_420_NV21: 2073 halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP; 2074 break; 2075 case CAM_FORMAT_YUV_420_NV21_ADRENO: 2076 halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO; 2077 break; 2078 case CAM_FORMAT_YUV_420_YV12: 2079 halPixelFormat = HAL_PIXEL_FORMAT_YV12; 2080 break; 2081 case CAM_FORMAT_YUV_422_NV16: 2082 case CAM_FORMAT_YUV_422_NV61: 2083 default: 2084 halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP; 2085 break; 2086 } 2087 return halPixelFormat; 2088} 2089 2090/*=========================================================================== 2091 * FUNCTION : AddSetParmEntryToBatch 2092 * 2093 * DESCRIPTION: add set parameter entry into batch 2094 * 2095 * PARAMETERS : 2096 * @p_table : ptr to parameter buffer 2097 * @paramType : parameter type 2098 * @paramLength : length of parameter value 2099 * @paramValue : ptr to parameter value 2100 * 2101 * RETURN : int32_t type of status 2102 * NO_ERROR -- success 2103 * none-zero failure code 2104 *==========================================================================*/ 2105int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table, 2106 cam_intf_parm_type_t paramType, 2107 uint32_t paramLength, 2108 void *paramValue) 2109{ 2110 int position = paramType; 2111 int current, next; 2112 2113 /************************************************************************* 2114 * Code to take care of linking next flags * 2115 *************************************************************************/ 2116 current = GET_FIRST_PARAM_ID(p_table); 2117 if (position == current){ 2118 //DO NOTHING 2119 } else if (position < current){ 2120 SET_NEXT_PARAM_ID(position, p_table, current); 2121 SET_FIRST_PARAM_ID(p_table, position); 2122 } else { 2123 /* Search for the position in the linked list where we need to slot in*/ 2124 while (position > GET_NEXT_PARAM_ID(current, p_table)) 2125 current = GET_NEXT_PARAM_ID(current, p_table); 2126 2127 /*If node already exists no need to alter linking*/ 2128 if (position != GET_NEXT_PARAM_ID(current, p_table)) { 2129 next = GET_NEXT_PARAM_ID(current, p_table); 2130 SET_NEXT_PARAM_ID(current, p_table, position); 2131 SET_NEXT_PARAM_ID(position, p_table, next); 2132 } 2133 } 2134 2135 /************************************************************************* 2136 * Copy contents into entry * 2137 *************************************************************************/ 2138 2139 if (paramLength > sizeof(parm_type_t)) { 2140 ALOGE("%s:Size of input larger than max entry size",__func__); 2141 return BAD_VALUE; 2142 } 2143 memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength); 2144 return NO_ERROR; 2145} 2146 2147/*=========================================================================== 2148 * FUNCTION : lookupFwkName 2149 * 2150 * DESCRIPTION: In case the enum is not same in fwk and backend 2151 * make sure the parameter is correctly propogated 2152 * 2153 * PARAMETERS : 2154 * @arr : map between the two enums 2155 * @len : len of the map 2156 * @hal_name : name of the hal_parm to map 2157 * 2158 * RETURN : int type of status 2159 * fwk_name -- success 2160 * none-zero failure code 2161 *==========================================================================*/ 2162int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[], 2163 int len, int hal_name) 2164{ 2165 2166 for (int i = 0; i < len; i++) { 2167 if (arr[i].hal_name == hal_name) 2168 return arr[i].fwk_name; 2169 } 2170 2171 /* Not able to find matching framework type is not necessarily 2172 * an error case. This happens when mm-camera supports more attributes 2173 * than the frameworks do */ 2174 ALOGD("%s: Cannot find matching framework type", __func__); 2175 return NAME_NOT_FOUND; 2176} 2177 2178/*=========================================================================== 2179 * FUNCTION : lookupHalName 2180 * 2181 * DESCRIPTION: In case the enum is not same in fwk and backend 2182 * make sure the parameter is correctly propogated 2183 * 2184 * PARAMETERS : 2185 * @arr : map between the two enums 2186 * @len : len of the map 2187 * @fwk_name : name of the hal_parm to map 2188 * 2189 * RETURN : int32_t type of status 2190 * hal_name -- success 2191 * none-zero failure code 2192 *==========================================================================*/ 2193int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[], 2194 int len, int fwk_name) 2195{ 2196 for (int i = 0; i < len; i++) { 2197 if (arr[i].fwk_name == fwk_name) 2198 return arr[i].hal_name; 2199 } 2200 ALOGE("%s: Cannot find matching hal type", __func__); 2201 return NAME_NOT_FOUND; 2202} 2203 2204/*=========================================================================== 2205 * FUNCTION : getCapabilities 2206 * 2207 * DESCRIPTION: query camera capabilities 2208 * 2209 * PARAMETERS : 2210 * @cameraId : camera Id 2211 * @info : camera info struct to be filled in with camera capabilities 2212 * 2213 * RETURN : int32_t type of status 2214 * NO_ERROR -- success 2215 * none-zero failure code 2216 *==========================================================================*/ 2217int QCamera3HardwareInterface::getCamInfo(int cameraId, 2218 struct camera_info *info) 2219{ 2220 int rc = 0; 2221 2222 if (NULL == gCamCapability[cameraId]) { 2223 rc = initCapabilities(cameraId); 2224 if (rc < 0) { 2225 //pthread_mutex_unlock(&g_camlock); 2226 return rc; 2227 } 2228 } 2229 2230 if (NULL == gStaticMetadata) { 2231 rc = initStaticMetadata(cameraId); 2232 if (rc < 0) { 2233 return rc; 2234 } 2235 } 2236 2237 switch(gCamCapability[cameraId]->position) { 2238 case CAM_POSITION_BACK: 2239 info->facing = CAMERA_FACING_BACK; 2240 break; 2241 2242 case CAM_POSITION_FRONT: 2243 info->facing = CAMERA_FACING_FRONT; 2244 break; 2245 2246 default: 2247 ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId); 2248 rc = -1; 2249 break; 2250 } 2251 2252 2253 info->orientation = gCamCapability[cameraId]->sensor_mount_angle; 2254 info->device_version = HARDWARE_DEVICE_API_VERSION(3, 0); 2255 info->static_camera_characteristics = gStaticMetadata; 2256 2257 return rc; 2258} 2259 2260/*=========================================================================== 2261 * FUNCTION : translateMetadata 2262 * 2263 * DESCRIPTION: translate the metadata into camera_metadata_t 2264 * 2265 * PARAMETERS : type of the request 2266 * 2267 * 2268 * RETURN : success: camera_metadata_t* 2269 * failure: NULL 2270 * 2271 *==========================================================================*/ 2272camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type) 2273{ 2274 pthread_mutex_lock(&mMutex); 2275 2276 if (mDefaultMetadata[type] != NULL) { 2277 pthread_mutex_unlock(&mMutex); 2278 return mDefaultMetadata[type]; 2279 } 2280 //first time we are handling this request 2281 //fill up the metadata structure using the wrapper class 2282 CameraMetadata settings; 2283 //translate from cam_capability_t to camera_metadata_tag_t 2284 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE; 2285 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1); 2286 2287 /*control*/ 2288 2289 uint8_t controlIntent = 0; 2290 switch (type) { 2291 case CAMERA3_TEMPLATE_PREVIEW: 2292 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW; 2293 break; 2294 case CAMERA3_TEMPLATE_STILL_CAPTURE: 2295 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE; 2296 break; 2297 case CAMERA3_TEMPLATE_VIDEO_RECORD: 2298 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD; 2299 break; 2300 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT: 2301 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT; 2302 break; 2303 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG: 2304 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG; 2305 break; 2306 default: 2307 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM; 2308 break; 2309 } 2310 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1); 2311 2312 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, 2313 &gCamCapability[mCameraId]->exposure_compensation_default, 1); 2314 2315 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF; 2316 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1); 2317 2318 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF; 2319 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1); 2320 2321 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO; 2322 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1); 2323 2324 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO; 2325 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1); 2326 2327 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF; 2328 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1); 2329 2330 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; //similar to AUTO? 2331 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1); 2332 2333 /*flash*/ 2334 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF; 2335 settings.update(ANDROID_FLASH_MODE, &flashMode, 1); 2336 2337 2338 /* lens */ 2339 static const float default_aperture = gCamCapability[mCameraId]->apertures[0]; 2340 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1); 2341 2342 if (gCamCapability[mCameraId]->filter_densities_count) { 2343 static const float default_filter_density = gCamCapability[mCameraId]->filter_densities[0]; 2344 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density, 2345 gCamCapability[mCameraId]->filter_densities_count); 2346 } 2347 2348 /* TODO: Enable focus lengths once supported*/ 2349 /*if (gCamCapability[mCameraId]->focal_lengths_count) { 2350 static const float default_focal_length = gCamCapability[mCameraId]->focal_lengths[0]; 2351 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1); 2352 }*/ 2353 2354 mDefaultMetadata[type] = settings.release(); 2355 2356 pthread_mutex_unlock(&mMutex); 2357 return mDefaultMetadata[type]; 2358} 2359 2360/*=========================================================================== 2361 * FUNCTION : setFrameParameters 2362 * 2363 * DESCRIPTION: set parameters per frame as requested in the metadata from 2364 * framework 2365 * 2366 * PARAMETERS : 2367 * @settings : frame settings information from framework 2368 * 2369 * 2370 * RETURN : success: NO_ERROR 2371 * failure: 2372 *==========================================================================*/ 2373int QCamera3HardwareInterface::setFrameParameters(int frame_id, 2374 const camera_metadata_t *settings) 2375{ 2376 /*translate from camera_metadata_t type to parm_type_t*/ 2377 int rc = 0; 2378 if (settings == NULL && mFirstRequest) { 2379 /*settings cannot be null for the first request*/ 2380 return BAD_VALUE; 2381 } 2382 2383 int32_t hal_version = CAM_HAL_V3; 2384 2385 memset(mParameters, 0, sizeof(parm_buffer_t)); 2386 mParameters->first_flagged_entry = CAM_INTF_PARM_MAX; 2387 AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION, 2388 sizeof(hal_version), &hal_version); 2389 2390 /*we need to update the frame number in the parameters*/ 2391 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER, 2392 sizeof(frame_id), &frame_id); 2393 if (rc < 0) { 2394 ALOGE("%s: Failed to set the frame number in the parameters", __func__); 2395 return BAD_VALUE; 2396 } 2397 2398 if(settings != NULL){ 2399 rc = translateMetadataToParameters(settings); 2400 } 2401 /*set the parameters to backend*/ 2402 mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters); 2403 return rc; 2404} 2405 2406/*=========================================================================== 2407 * FUNCTION : translateMetadataToParameters 2408 * 2409 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t 2410 * 2411 * 2412 * PARAMETERS : 2413 * @settings : frame settings information from framework 2414 * 2415 * 2416 * RETURN : success: NO_ERROR 2417 * failure: 2418 *==========================================================================*/ 2419int QCamera3HardwareInterface::translateMetadataToParameters 2420 (const camera_metadata_t *settings) 2421{ 2422 int rc = 0; 2423 CameraMetadata frame_settings; 2424 frame_settings = settings; 2425 2426 2427 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) { 2428 int32_t antibandingMode = 2429 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0]; 2430 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING, 2431 sizeof(antibandingMode), &antibandingMode); 2432 } 2433 2434 /*int32_t expCompensation = frame_settings.find().data.i32[0]; 2435 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION, 2436 sizeof(expCompensation), &expCompensation);*/ 2437 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) { 2438 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0]; 2439 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK, 2440 sizeof(aeLock), &aeLock); 2441 } 2442 2443 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) { 2444 cam_fps_range_t fps_range; 2445 fps_range.min_fps = 2446 frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0]; 2447 fps_range.max_fps = 2448 frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0]; 2449 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE, 2450 sizeof(fps_range), &fps_range); 2451 } 2452 2453 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) { 2454 uint8_t focusMode = 2455 frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0]; 2456 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE, 2457 sizeof(focusMode), &focusMode); 2458 } 2459 2460 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) { 2461 uint8_t awbLock = 2462 frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0]; 2463 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK, 2464 sizeof(awbLock), &awbLock); 2465 } 2466 2467 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) { 2468 uint8_t fwk_whiteLevel = 2469 frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0]; 2470 uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP, 2471 sizeof(WHITE_BALANCE_MODES_MAP), 2472 fwk_whiteLevel); 2473 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE, 2474 sizeof(whiteLevel), &whiteLevel); 2475 } 2476 2477 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) { 2478 uint8_t fwk_effectMode = 2479 frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0]; 2480 uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP, 2481 sizeof(EFFECT_MODES_MAP), 2482 fwk_effectMode); 2483 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT, 2484 sizeof(effectMode), &effectMode); 2485 } 2486 2487 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) { 2488 uint8_t fwk_aeMode = 2489 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0]; 2490 uint8_t aeMode = lookupHalName(AUTO_EXPOSURE_MAP, 2491 sizeof(AUTO_EXPOSURE_MAP), 2492 fwk_aeMode); 2493 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE, 2494 sizeof(aeMode), &aeMode); 2495 } 2496 2497 if (frame_settings.exists(ANDROID_REQUEST_FRAME_COUNT)) { 2498 int32_t metaFrameNumber = 2499 frame_settings.find(ANDROID_REQUEST_FRAME_COUNT).data.i32[0]; 2500 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER, 2501 sizeof(metaFrameNumber), &metaFrameNumber); 2502 } 2503 2504 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) { 2505 uint8_t colorCorrectMode = 2506 frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0]; 2507 rc = 2508 AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE, 2509 sizeof(colorCorrectMode), &colorCorrectMode); 2510 } 2511 2512 uint8_t aecTrigger = CAM_AEC_TRIGGER_IDLE; 2513 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)) { 2514 aecTrigger = 2515 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0]; 2516 } 2517 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, 2518 sizeof(aecTrigger), &aecTrigger); 2519 2520 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER)) { 2521 uint8_t afTrigger = 2522 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0]; 2523 rc = AddSetParmEntryToBatch(mParameters, 2524 CAM_INTF_META_AF_TRIGGER, sizeof(afTrigger), &afTrigger); 2525 } 2526 2527 if (frame_settings.exists(ANDROID_CONTROL_MODE)) { 2528 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0]; 2529 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE, 2530 sizeof(metaMode), &metaMode); 2531 } 2532 2533 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) { 2534 int32_t demosaic = 2535 frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0]; 2536 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC, 2537 sizeof(demosaic), &demosaic); 2538 } 2539 2540 if (frame_settings.exists(ANDROID_EDGE_MODE)) { 2541 uint8_t edgeMode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0]; 2542 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE, 2543 sizeof(edgeMode), &edgeMode); 2544 } 2545 2546 if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) { 2547 int32_t edgeStrength = 2548 frame_settings.find(ANDROID_EDGE_STRENGTH).data.i32[0]; 2549 rc = AddSetParmEntryToBatch(mParameters, 2550 CAM_INTF_META_SHARPNESS_STRENGTH, sizeof(edgeStrength), &edgeStrength); 2551 } 2552 2553 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) { 2554 uint8_t flashPower = 2555 frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0]; 2556 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER, 2557 sizeof(flashPower), &flashPower); 2558 } 2559 2560 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) { 2561 int64_t flashFiringTime = 2562 frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0]; 2563 rc = AddSetParmEntryToBatch(mParameters, 2564 CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime); 2565 } 2566 2567 if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) { 2568 uint8_t geometricMode = 2569 frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0]; 2570 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE, 2571 sizeof(geometricMode), &geometricMode); 2572 } 2573 2574 if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) { 2575 uint8_t geometricStrength = 2576 frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0]; 2577 rc = AddSetParmEntryToBatch(mParameters, 2578 CAM_INTF_META_GEOMETRIC_STRENGTH, 2579 sizeof(geometricStrength), &geometricStrength); 2580 } 2581 2582 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) { 2583 uint8_t hotPixelMode = 2584 frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0]; 2585 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE, 2586 sizeof(hotPixelMode), &hotPixelMode); 2587 } 2588 2589 if (frame_settings.exists(ANDROID_LENS_APERTURE)) { 2590 float lensAperture = 2591 frame_settings.find( ANDROID_LENS_APERTURE).data.f[0]; 2592 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE, 2593 sizeof(lensAperture), &lensAperture); 2594 } 2595 2596 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) { 2597 float filterDensity = 2598 frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0]; 2599 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY, 2600 sizeof(filterDensity), &filterDensity); 2601 } 2602 2603 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) { 2604 float focalLength = 2605 frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0]; 2606 rc = AddSetParmEntryToBatch(mParameters, 2607 CAM_INTF_META_LENS_FOCAL_LENGTH, 2608 sizeof(focalLength), &focalLength); 2609 } 2610 2611 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) { 2612 float focalDistance = 2613 frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0]; 2614 rc = AddSetParmEntryToBatch(mParameters, 2615 CAM_INTF_META_LENS_FOCUS_DISTANCE, 2616 sizeof(focalDistance), &focalDistance); 2617 } 2618 2619 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) { 2620 uint8_t optStabMode = 2621 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0]; 2622 rc = AddSetParmEntryToBatch(mParameters, 2623 CAM_INTF_META_LENS_OPT_STAB_MODE, 2624 sizeof(optStabMode), &optStabMode); 2625 } 2626 2627 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) { 2628 uint8_t noiseRedMode = 2629 frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]; 2630 rc = AddSetParmEntryToBatch(mParameters, 2631 CAM_INTF_META_NOISE_REDUCTION_MODE, 2632 sizeof(noiseRedMode), &noiseRedMode); 2633 } 2634 2635 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) { 2636 uint8_t noiseRedStrength = 2637 frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0]; 2638 rc = AddSetParmEntryToBatch(mParameters, 2639 CAM_INTF_META_NOISE_REDUCTION_STRENGTH, 2640 sizeof(noiseRedStrength), &noiseRedStrength); 2641 } 2642 2643 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) { 2644 cam_crop_region_t scalerCropRegion; 2645 scalerCropRegion.left = 2646 frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0]; 2647 scalerCropRegion.top = 2648 frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1]; 2649 scalerCropRegion.width = 2650 frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2]; 2651 rc = AddSetParmEntryToBatch(mParameters, 2652 CAM_INTF_META_SCALER_CROP_REGION, 2653 sizeof(scalerCropRegion), &scalerCropRegion); 2654 } 2655 2656 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) { 2657 int64_t sensorExpTime = 2658 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0]; 2659 rc = AddSetParmEntryToBatch(mParameters, 2660 CAM_INTF_META_SENSOR_EXPOSURE_TIME, 2661 sizeof(sensorExpTime), &sensorExpTime); 2662 } 2663 2664 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) { 2665 int64_t sensorFrameDuration = 2666 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0]; 2667 rc = AddSetParmEntryToBatch(mParameters, 2668 CAM_INTF_META_SENSOR_FRAME_DURATION, 2669 sizeof(sensorFrameDuration), &sensorFrameDuration); 2670 } 2671 2672 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) { 2673 int32_t sensorSensitivity = 2674 frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0]; 2675 rc = AddSetParmEntryToBatch(mParameters, 2676 CAM_INTF_META_SENSOR_SENSITIVITY, 2677 sizeof(sensorSensitivity), &sensorSensitivity); 2678 } 2679 2680 if (frame_settings.exists(ANDROID_SHADING_MODE)) { 2681 int32_t shadingMode = 2682 frame_settings.find(ANDROID_SHADING_MODE).data.u8[0]; 2683 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE, 2684 sizeof(shadingMode), &shadingMode); 2685 } 2686 2687 if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) { 2688 uint8_t shadingStrength = 2689 frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0]; 2690 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH, 2691 sizeof(shadingStrength), &shadingStrength); 2692 } 2693 2694 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) { 2695 uint8_t facedetectMode = 2696 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0]; 2697 rc = AddSetParmEntryToBatch(mParameters, 2698 CAM_INTF_META_STATS_FACEDETECT_MODE, 2699 sizeof(facedetectMode), &facedetectMode); 2700 } 2701 2702 if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) { 2703 uint8_t histogramMode = 2704 frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0]; 2705 rc = AddSetParmEntryToBatch(mParameters, 2706 CAM_INTF_META_STATS_HISTOGRAM_MODE, 2707 sizeof(histogramMode), &histogramMode); 2708 } 2709 2710 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) { 2711 uint8_t sharpnessMapMode = 2712 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0]; 2713 rc = AddSetParmEntryToBatch(mParameters, 2714 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, 2715 sizeof(sharpnessMapMode), &sharpnessMapMode); 2716 } 2717 2718 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) { 2719 uint8_t tonemapMode = 2720 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0]; 2721 rc = AddSetParmEntryToBatch(mParameters, 2722 CAM_INTF_META_TONEMAP_MODE, 2723 sizeof(tonemapMode), &tonemapMode); 2724 } 2725 2726 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) { 2727 uint8_t captureIntent = 2728 frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0]; 2729 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT, 2730 sizeof(captureIntent), &captureIntent); 2731 } 2732 2733 return rc; 2734} 2735 2736/*=========================================================================== 2737 * FUNCTION : getJpegSettings 2738 * 2739 * DESCRIPTION: save the jpeg settings in the HAL 2740 * 2741 * 2742 * PARAMETERS : 2743 * @settings : frame settings information from framework 2744 * 2745 * 2746 * RETURN : success: NO_ERROR 2747 * failure: 2748 *==========================================================================*/ 2749int QCamera3HardwareInterface::getJpegSettings 2750 (const camera_metadata_t *settings) 2751{ 2752 if (mJpegSettings) { 2753 free(mJpegSettings); 2754 mJpegSettings = NULL; 2755 } 2756 mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t)); 2757 CameraMetadata jpeg_settings; 2758 jpeg_settings = settings; 2759 2760 if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) { 2761 mJpegSettings->jpeg_orientation = 2762 jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0]; 2763 } else { 2764 mJpegSettings->jpeg_orientation = 0; 2765 } 2766 if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) { 2767 mJpegSettings->jpeg_quality = 2768 jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0]; 2769 } else { 2770 mJpegSettings->jpeg_quality = 85; 2771 } 2772 if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) { 2773 mJpegSettings->thumbnail_size.width = 2774 jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0]; 2775 mJpegSettings->thumbnail_size.height = 2776 jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1]; 2777 mJpegSettings->thumbnail_size.width = 320; 2778 mJpegSettings->thumbnail_size.height = 240; 2779 } else { 2780 mJpegSettings->thumbnail_size.width = 640; 2781 mJpegSettings->thumbnail_size.height = 480; 2782 } 2783 if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) { 2784 for (int i = 0; i < 3; i++) { 2785 mJpegSettings->gps_coordinates[i] = 2786 jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i]; 2787 } 2788 } 2789 if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) { 2790 mJpegSettings->gps_timestamp = 2791 jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0]; 2792 } 2793 2794 if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) { 2795 mJpegSettings->gps_processing_method = 2796 jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[0]; 2797 } 2798 if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) { 2799 mJpegSettings->sensor_sensitivity = 2800 jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0]; 2801 } 2802 if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) { 2803 mJpegSettings->lens_focal_length = 2804 jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0]; 2805 } 2806 mJpegSettings->max_jpeg_size = calcMaxJpegSize(); 2807 return 0; 2808} 2809 2810/*=========================================================================== 2811 * FUNCTION : captureResultCb 2812 * 2813 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata) 2814 * 2815 * PARAMETERS : 2816 * @frame : frame information from mm-camera-interface 2817 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata. 2818 * @userdata: userdata 2819 * 2820 * RETURN : NONE 2821 *==========================================================================*/ 2822void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata, 2823 camera3_stream_buffer_t *buffer, 2824 uint32_t frame_number, void *userdata) 2825{ 2826 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata; 2827 if (hw == NULL) { 2828 ALOGE("%s: Invalid hw %p", __func__, hw); 2829 return; 2830 } 2831 2832 hw->captureResultCb(metadata, buffer, frame_number); 2833 return; 2834} 2835 2836/*=========================================================================== 2837 * FUNCTION : initialize 2838 * 2839 * DESCRIPTION: Pass framework callback pointers to HAL 2840 * 2841 * PARAMETERS : 2842 * 2843 * 2844 * RETURN : Success : 0 2845 * Failure: -ENODEV 2846 *==========================================================================*/ 2847 2848int QCamera3HardwareInterface::initialize(const struct camera3_device *device, 2849 const camera3_callback_ops_t *callback_ops) 2850{ 2851 ALOGV("%s: E", __func__); 2852 QCamera3HardwareInterface *hw = 2853 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 2854 if (!hw) { 2855 ALOGE("%s: NULL camera device", __func__); 2856 return -ENODEV; 2857 } 2858 2859 int rc = hw->initialize(callback_ops); 2860 ALOGV("%s: X", __func__); 2861 return rc; 2862} 2863 2864/*=========================================================================== 2865 * FUNCTION : configure_streams 2866 * 2867 * DESCRIPTION: 2868 * 2869 * PARAMETERS : 2870 * 2871 * 2872 * RETURN : Success: 0 2873 * Failure: -EINVAL (if stream configuration is invalid) 2874 * -ENODEV (fatal error) 2875 *==========================================================================*/ 2876 2877int QCamera3HardwareInterface::configure_streams( 2878 const struct camera3_device *device, 2879 camera3_stream_configuration_t *stream_list) 2880{ 2881 ALOGV("%s: E", __func__); 2882 QCamera3HardwareInterface *hw = 2883 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 2884 if (!hw) { 2885 ALOGE("%s: NULL camera device", __func__); 2886 return -ENODEV; 2887 } 2888 int rc = hw->configureStreams(stream_list); 2889 ALOGV("%s: X", __func__); 2890 return rc; 2891} 2892 2893/*=========================================================================== 2894 * FUNCTION : register_stream_buffers 2895 * 2896 * DESCRIPTION: Register stream buffers with the device 2897 * 2898 * PARAMETERS : 2899 * 2900 * RETURN : 2901 *==========================================================================*/ 2902int QCamera3HardwareInterface::register_stream_buffers( 2903 const struct camera3_device *device, 2904 const camera3_stream_buffer_set_t *buffer_set) 2905{ 2906 ALOGV("%s: E", __func__); 2907 QCamera3HardwareInterface *hw = 2908 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 2909 if (!hw) { 2910 ALOGE("%s: NULL camera device", __func__); 2911 return -ENODEV; 2912 } 2913 int rc = hw->registerStreamBuffers(buffer_set); 2914 ALOGV("%s: X", __func__); 2915 return rc; 2916} 2917 2918/*=========================================================================== 2919 * FUNCTION : construct_default_request_settings 2920 * 2921 * DESCRIPTION: Configure a settings buffer to meet the required use case 2922 * 2923 * PARAMETERS : 2924 * 2925 * 2926 * RETURN : Success: Return valid metadata 2927 * Failure: Return NULL 2928 *==========================================================================*/ 2929const camera_metadata_t* QCamera3HardwareInterface:: 2930 construct_default_request_settings(const struct camera3_device *device, 2931 int type) 2932{ 2933 2934 ALOGV("%s: E", __func__); 2935 camera_metadata_t* fwk_metadata = NULL; 2936 QCamera3HardwareInterface *hw = 2937 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 2938 if (!hw) { 2939 ALOGE("%s: NULL camera device", __func__); 2940 return NULL; 2941 } 2942 2943 fwk_metadata = hw->translateCapabilityToMetadata(type); 2944 2945 ALOGV("%s: X", __func__); 2946 return fwk_metadata; 2947} 2948 2949/*=========================================================================== 2950 * FUNCTION : process_capture_request 2951 * 2952 * DESCRIPTION: 2953 * 2954 * PARAMETERS : 2955 * 2956 * 2957 * RETURN : 2958 *==========================================================================*/ 2959int QCamera3HardwareInterface::process_capture_request( 2960 const struct camera3_device *device, 2961 camera3_capture_request_t *request) 2962{ 2963 ALOGV("%s: E", __func__); 2964 QCamera3HardwareInterface *hw = 2965 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 2966 if (!hw) { 2967 ALOGE("%s: NULL camera device", __func__); 2968 return -EINVAL; 2969 } 2970 2971 int rc = hw->processCaptureRequest(request); 2972 ALOGV("%s: X", __func__); 2973 return rc; 2974} 2975 2976/*=========================================================================== 2977 * FUNCTION : get_metadata_vendor_tag_ops 2978 * 2979 * DESCRIPTION: 2980 * 2981 * PARAMETERS : 2982 * 2983 * 2984 * RETURN : 2985 *==========================================================================*/ 2986 2987void QCamera3HardwareInterface::get_metadata_vendor_tag_ops( 2988 const struct camera3_device *device, 2989 vendor_tag_query_ops_t* ops) 2990{ 2991 ALOGV("%s: E", __func__); 2992 QCamera3HardwareInterface *hw = 2993 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 2994 if (!hw) { 2995 ALOGE("%s: NULL camera device", __func__); 2996 return; 2997 } 2998 2999 hw->getMetadataVendorTagOps(ops); 3000 ALOGV("%s: X", __func__); 3001 return; 3002} 3003 3004/*=========================================================================== 3005 * FUNCTION : dump 3006 * 3007 * DESCRIPTION: 3008 * 3009 * PARAMETERS : 3010 * 3011 * 3012 * RETURN : 3013 *==========================================================================*/ 3014 3015void QCamera3HardwareInterface::dump( 3016 const struct camera3_device *device, int fd) 3017{ 3018 ALOGV("%s: E", __func__); 3019 QCamera3HardwareInterface *hw = 3020 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 3021 if (!hw) { 3022 ALOGE("%s: NULL camera device", __func__); 3023 return; 3024 } 3025 3026 hw->dump(fd); 3027 ALOGV("%s: X", __func__); 3028 return; 3029} 3030 3031/*=========================================================================== 3032 * FUNCTION : close_camera_device 3033 * 3034 * DESCRIPTION: 3035 * 3036 * PARAMETERS : 3037 * 3038 * 3039 * RETURN : 3040 *==========================================================================*/ 3041int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device) 3042{ 3043 ALOGV("%s: E", __func__); 3044 int ret = NO_ERROR; 3045 QCamera3HardwareInterface *hw = 3046 reinterpret_cast<QCamera3HardwareInterface *>( 3047 reinterpret_cast<camera3_device_t *>(device)->priv); 3048 if (!hw) { 3049 ALOGE("NULL camera device"); 3050 return BAD_VALUE; 3051 } 3052 delete hw; 3053 ALOGV("%s: X", __func__); 3054 return ret; 3055} 3056 3057}; //end namespace qcamera 3058