QCamera3HWI.cpp revision 65585b2d14faf1e7fa78560f8c9cd27d76c408ab
1/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved. 2* 3* Redistribution and use in source and binary forms, with or without 4* modification, are permitted provided that the following conditions are 5* met: 6* * Redistributions of source code must retain the above copyright 7* notice, this list of conditions and the following disclaimer. 8* * Redistributions in binary form must reproduce the above 9* copyright notice, this list of conditions and the following 10* disclaimer in the documentation and/or other materials provided 11* with the distribution. 12* * Neither the name of The Linux Foundation nor the names of its 13* contributors may be used to endorse or promote products derived 14* from this software without specific prior written permission. 15* 16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED 17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF 18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT 19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS 20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR 23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE 25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN 26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27* 28*/ 29 30#define LOG_TAG "QCamera3HWI" 31 32#include <cutils/properties.h> 33#include <hardware/camera3.h> 34#include <camera/CameraMetadata.h> 35#include <stdlib.h> 36#include <utils/Log.h> 37#include <utils/Errors.h> 38#include <ui/Fence.h> 39#include <gralloc_priv.h> 40#include "QCamera3HWI.h" 41#include "QCamera3Mem.h" 42#include "QCamera3Channel.h" 43#include "QCamera3PostProc.h" 44 45using namespace android; 46 47namespace qcamera { 48#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX ) 49cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS]; 50parm_buffer_t *prevSettings; 51const camera_metadata_t *gStaticMetadata; 52 53const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = { 54 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF }, 55 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO }, 56 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE }, 57 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE }, 58 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA }, 59 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE }, 60 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD }, 61 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD }, 62 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA } 63}; 64 65const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = { 66 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF }, 67 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO }, 68 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT }, 69 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT }, 70 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT}, 71 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT }, 72 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT }, 73 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT }, 74 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE } 75}; 76 77const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = { 78 { ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED, CAM_SCENE_MODE_OFF }, 79 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION }, 80 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT }, 81 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE }, 82 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT }, 83 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT }, 84 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE }, 85 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH }, 86 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW }, 87 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET }, 88 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE }, 89 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS }, 90 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS }, 91 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY }, 92 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT }, 93 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE} 94}; 95 96const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = { 97 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF }, 98 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED }, 99 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO }, 100 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO }, 101 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF }, 102 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE }, 103 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO } 104}; 105 106const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = { 107 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF }, 108 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ }, 109 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ }, 110 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO } 111}; 112 113const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AUTO_EXPOSURE_MAP[] = { 114 { ANDROID_CONTROL_AE_MODE_OFF, CAM_AEC_MODE_OFF }, 115 { ANDROID_CONTROL_AE_MODE_ON, CAM_AEC_MODE_FRAME_AVERAGE }, 116}; 117 118const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = { 119 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF }, 120 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_ON }, 121 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH} 122}; 123 124 125camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = { 126 initialize: QCamera3HardwareInterface::initialize, 127 configure_streams: QCamera3HardwareInterface::configure_streams, 128 register_stream_buffers: QCamera3HardwareInterface::register_stream_buffers, 129 construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings, 130 process_capture_request: QCamera3HardwareInterface::process_capture_request, 131 get_metadata_vendor_tag_ops: QCamera3HardwareInterface::get_metadata_vendor_tag_ops, 132 dump: QCamera3HardwareInterface::dump, 133}; 134 135 136/*=========================================================================== 137 * FUNCTION : QCamera3HardwareInterface 138 * 139 * DESCRIPTION: constructor of QCamera3HardwareInterface 140 * 141 * PARAMETERS : 142 * @cameraId : camera ID 143 * 144 * RETURN : none 145 *==========================================================================*/ 146QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId) 147 : mCameraId(cameraId), 148 mCameraHandle(NULL), 149 mCameraOpened(false), 150 mCallbackOps(NULL), 151 mInputStream(NULL), 152 mMetadataChannel(NULL), 153 mFirstRequest(false), 154 mParamHeap(NULL), 155 mParameters(NULL), 156 mJpegSettings(NULL) 157{ 158 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG; 159 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0; 160 mCameraDevice.common.close = close_camera_device; 161 mCameraDevice.ops = &mCameraOps; 162 mCameraDevice.priv = this; 163 gCamCapability[cameraId]->version = CAM_HAL_V3; 164 165 pthread_mutex_init(&mRequestLock, NULL); 166 pthread_cond_init(&mRequestCond, NULL); 167 mPendingRequest = 0; 168 169 pthread_mutex_init(&mMutex, NULL); 170 pthread_mutex_init(&mCaptureResultLock, NULL); 171 172 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) 173 mDefaultMetadata[i] = NULL; 174} 175 176/*=========================================================================== 177 * FUNCTION : ~QCamera3HardwareInterface 178 * 179 * DESCRIPTION: destructor of QCamera3HardwareInterface 180 * 181 * PARAMETERS : none 182 * 183 * RETURN : none 184 *==========================================================================*/ 185QCamera3HardwareInterface::~QCamera3HardwareInterface() 186{ 187 ALOGE("%s: %d", __func__, __LINE__); 188 /* Clean up all channels */ 189 mMetadataChannel->stop(); 190 delete mMetadataChannel; 191 mMetadataChannel = NULL; 192 for (List<stream_info_t *>::iterator it = mStreamInfo.begin(); 193 it != mStreamInfo.end(); it++) { 194 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv; 195 channel->stop(); 196 delete channel; 197 free (*it); 198 } 199 200 ALOGE("%s: %d", __func__, __LINE__); 201 if (mJpegSettings != NULL) { 202 free(mJpegSettings); 203 mJpegSettings = NULL; 204 } 205 ALOGE("%s: %d", __func__, __LINE__); 206 deinitParameters(); 207 ALOGE("%s: %d", __func__, __LINE__); 208 closeCamera(); 209 210 ALOGE("%s: %d", __func__, __LINE__); 211 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) 212 if (mDefaultMetadata[i]) 213 free_camera_metadata(mDefaultMetadata[i]); 214 215 pthread_mutex_destroy(&mRequestLock); 216 pthread_cond_destroy(&mRequestCond); 217 218 pthread_mutex_destroy(&mMutex); 219 pthread_mutex_destroy(&mCaptureResultLock); 220} 221 222/*=========================================================================== 223 * FUNCTION : openCamera 224 * 225 * DESCRIPTION: open camera 226 * 227 * PARAMETERS : 228 * @hw_device : double ptr for camera device struct 229 * 230 * RETURN : int32_t type of status 231 * NO_ERROR -- success 232 * none-zero failure code 233 *==========================================================================*/ 234int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device) 235{ 236 //int rc = NO_ERROR; 237 int rc = 0; 238 if (mCameraOpened) { 239 *hw_device = NULL; 240 return PERMISSION_DENIED; 241 } 242 243 rc = openCamera(); 244 if (rc == 0) 245 *hw_device = &mCameraDevice.common; 246 else 247 *hw_device = NULL; 248 return rc; 249} 250 251/*=========================================================================== 252 * FUNCTION : openCamera 253 * 254 * DESCRIPTION: open camera 255 * 256 * PARAMETERS : none 257 * 258 * RETURN : int32_t type of status 259 * NO_ERROR -- success 260 * none-zero failure code 261 *==========================================================================*/ 262int QCamera3HardwareInterface::openCamera() 263{ 264 if (mCameraHandle) { 265 ALOGE("Failure: Camera already opened"); 266 return ALREADY_EXISTS; 267 } 268 mCameraHandle = camera_open(mCameraId); 269 if (!mCameraHandle) { 270 ALOGE("camera_open failed."); 271 return UNKNOWN_ERROR; 272 } 273 274 mCameraOpened = true; 275 276 return NO_ERROR; 277} 278 279/*=========================================================================== 280 * FUNCTION : closeCamera 281 * 282 * DESCRIPTION: close camera 283 * 284 * PARAMETERS : none 285 * 286 * RETURN : int32_t type of status 287 * NO_ERROR -- success 288 * none-zero failure code 289 *==========================================================================*/ 290int QCamera3HardwareInterface::closeCamera() 291{ 292 int rc = NO_ERROR; 293 294 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle); 295 mCameraHandle = NULL; 296 mCameraOpened = false; 297 298 return rc; 299} 300 301/*=========================================================================== 302 * FUNCTION : initialize 303 * 304 * DESCRIPTION: Initialize frameworks callback functions 305 * 306 * PARAMETERS : 307 * @callback_ops : callback function to frameworks 308 * 309 * RETURN : 310 * 311 *==========================================================================*/ 312int QCamera3HardwareInterface::initialize( 313 const struct camera3_callback_ops *callback_ops) 314{ 315 int rc; 316 317 pthread_mutex_lock(&mMutex); 318 319 rc = initParameters(); 320 if (rc < 0) { 321 ALOGE("%s: initParamters failed %d", __func__, rc); 322 goto err1; 323 } 324 //Create metadata channel and initialize it 325 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle, 326 mCameraHandle->ops, captureResultCb, 327 &gCamCapability[mCameraId]->padding_info, this); 328 if (mMetadataChannel == NULL) { 329 ALOGE("%s: failed to allocate metadata channel", __func__); 330 rc = -ENOMEM; 331 goto err2; 332 } 333 rc = mMetadataChannel->initialize(); 334 if (rc < 0) { 335 ALOGE("%s: metadata channel initialization failed", __func__); 336 goto err3; 337 } 338 339 mCallbackOps = callback_ops; 340 341 pthread_mutex_unlock(&mMutex); 342 return 0; 343 344err3: 345 delete mMetadataChannel; 346 mMetadataChannel = NULL; 347err2: 348 deinitParameters(); 349err1: 350 pthread_mutex_unlock(&mMutex); 351 return rc; 352} 353 354/*=========================================================================== 355 * FUNCTION : configureStreams 356 * 357 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input 358 * and output streams. 359 * 360 * PARAMETERS : 361 * @stream_list : streams to be configured 362 * 363 * RETURN : 364 * 365 *==========================================================================*/ 366int QCamera3HardwareInterface::configureStreams( 367 camera3_stream_configuration_t *streamList) 368{ 369 int rc = 0; 370 pthread_mutex_lock(&mMutex); 371 372 // Sanity check stream_list 373 if (streamList == NULL) { 374 ALOGE("%s: NULL stream configuration", __func__); 375 pthread_mutex_unlock(&mMutex); 376 return BAD_VALUE; 377 } 378 379 if (streamList->streams == NULL) { 380 ALOGE("%s: NULL stream list", __func__); 381 pthread_mutex_unlock(&mMutex); 382 return BAD_VALUE; 383 } 384 385 if (streamList->num_streams < 1) { 386 ALOGE("%s: Bad number of streams requested: %d", __func__, 387 streamList->num_streams); 388 pthread_mutex_unlock(&mMutex); 389 return BAD_VALUE; 390 } 391 392 camera3_stream_t *inputStream = NULL; 393 /* first invalidate all the steams in the mStreamList 394 * if they appear again, they will be validated */ 395 for (List<stream_info_t*>::iterator it=mStreamInfo.begin(); 396 it != mStreamInfo.end(); it++) { 397 (*it)->status = INVALID; 398 } 399 for (size_t i = 0; i < streamList->num_streams; i++) { 400 camera3_stream_t *newStream = streamList->streams[i]; 401 ALOGV("%s: newStream type = %d, stream format = %d", 402 __func__, newStream->stream_type, newStream->format); 403 //if the stream is in the mStreamList validate it 404 bool stream_exists = false; 405 for (List<stream_info_t*>::iterator it=mStreamInfo.begin(); 406 it != mStreamInfo.end(); it++) { 407 if ((*it)->stream == newStream) { 408 QCamera3Channel *channel = 409 (QCamera3Channel*)(*it)->stream->priv; 410 stream_exists = true; 411 (*it)->status = RECONFIGURE; 412 /*delete the channel object associated with the stream because 413 we need to reconfigure*/ 414 channel->stop(); 415 delete channel; 416 (*it)->stream->priv = NULL; 417 } 418 } 419 if (!stream_exists) { 420 //new stream 421 stream_info_t* stream_info; 422 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t)); 423 stream_info->stream = newStream; 424 stream_info->status = VALID; 425 stream_info->registered = 0; 426 mStreamInfo.push_back(stream_info); 427 } 428 if (newStream->stream_type == CAMERA3_STREAM_INPUT) { 429 if (inputStream != NULL) { 430 ALOGE("%s: Multiple input streams requested!", __func__); 431 pthread_mutex_unlock(&mMutex); 432 return BAD_VALUE; 433 } 434 inputStream = newStream; 435 } 436 } 437 mInputStream = inputStream; 438 439 /* TODO: Clean up no longer used streams, and maintain others if this 440 * is not the 1st time configureStreams is called */ 441 /*clean up invalid streams*/ 442 for (List<stream_info_t*>::iterator it=mStreamInfo.begin(); 443 it != mStreamInfo.end();) { 444 if(((*it)->status) == INVALID){ 445 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv; 446 channel->stop(); 447 delete channel; 448 delete[] (buffer_handle_t*)(*it)->buffer_set.buffers; 449 free(*it); 450 it = mStreamInfo.erase(it); 451 } else { 452 it++; 453 } 454 } 455 456 //mMetadataChannel->stop(); 457 458 /* Allocate channel objects for the requested streams */ 459 for (size_t i = 0; i < streamList->num_streams; i++) { 460 camera3_stream_t *newStream = streamList->streams[i]; 461 if (newStream->priv == NULL) { 462 //New stream, construct channel 463 switch (newStream->stream_type) { 464 case CAMERA3_STREAM_INPUT: 465 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ; 466 break; 467 case CAMERA3_STREAM_BIDIRECTIONAL: 468 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ | 469 GRALLOC_USAGE_HW_CAMERA_WRITE; 470 break; 471 case CAMERA3_STREAM_OUTPUT: 472 newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE; 473 break; 474 default: 475 ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type); 476 break; 477 } 478 479 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT || 480 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) { 481 QCamera3Channel *channel; 482 switch (newStream->format) { 483 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: 484 newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers; 485 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle, 486 mCameraHandle->ops, captureResultCb, 487 &gCamCapability[mCameraId]->padding_info, this, newStream); 488 if (channel == NULL) { 489 ALOGE("%s: allocation of channel failed", __func__); 490 pthread_mutex_unlock(&mMutex); 491 return -ENOMEM; 492 } 493 494 newStream->priv = channel; 495 break; 496 case HAL_PIXEL_FORMAT_BLOB: 497 newStream->max_buffers = QCamera3PicChannel::kMaxBuffers; 498 channel = new QCamera3PicChannel(mCameraHandle->camera_handle, 499 mCameraHandle->ops, captureResultCb, 500 &gCamCapability[mCameraId]->padding_info, this, newStream); 501 if (channel == NULL) { 502 ALOGE("%s: allocation of channel failed", __func__); 503 pthread_mutex_unlock(&mMutex); 504 return -ENOMEM; 505 } 506 newStream->priv = channel; 507 break; 508 509 //TODO: Add support for app consumed format? 510 default: 511 ALOGE("%s: not a supported format 0x%x", __func__, newStream->format); 512 break; 513 } 514 } 515 } else { 516 // Channel already exists for this stream 517 // Do nothing for now 518 } 519 } 520 /*For the streams to be reconfigured we need to register the buffers 521 since the framework wont*/ 522 for (List<stream_info_t *>::iterator it = mStreamInfo.begin(); 523 it != mStreamInfo.end(); it++) { 524 if ((*it)->status == RECONFIGURE) { 525 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv; 526 /*only register buffers for streams that have already been 527 registered*/ 528 if ((*it)->registered) { 529 rc = channel->registerBuffers((*it)->buffer_set.num_buffers, 530 (*it)->buffer_set.buffers); 531 if (rc != NO_ERROR) { 532 ALOGE("%s: Failed to register the buffers of old stream,\ 533 rc = %d", __func__, rc); 534 } 535 ALOGE("%s: channel %p has %d buffers", 536 __func__, channel, (*it)->buffer_set.num_buffers); 537 } 538 } 539 540 ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream); 541 if (index == NAME_NOT_FOUND) { 542 mPendingBuffersMap.add((*it)->stream, 0); 543 } else { 544 mPendingBuffersMap.editValueAt(index) = 0; 545 } 546 } 547 548 /* Initialize mPendingRequestInfo and mPendnigBuffersMap */ 549 mPendingRequestsList.clear(); 550 551 //settings/parameters don't carry over for new configureStreams 552 memset(mParameters, 0, sizeof(parm_buffer_t)); 553 mFirstRequest = true; 554 555end: 556 pthread_mutex_unlock(&mMutex); 557 return rc; 558} 559 560/*=========================================================================== 561 * FUNCTION : validateCaptureRequest 562 * 563 * DESCRIPTION: validate a capture request from camera service 564 * 565 * PARAMETERS : 566 * @request : request from framework to process 567 * 568 * RETURN : 569 * 570 *==========================================================================*/ 571int QCamera3HardwareInterface::validateCaptureRequest( 572 camera3_capture_request_t *request) 573{ 574 ssize_t idx = 0; 575 const camera3_stream_buffer_t *b; 576 CameraMetadata meta; 577 578 /* Sanity check the request */ 579 if (request == NULL) { 580 ALOGE("%s: NULL capture request", __func__); 581 return BAD_VALUE; 582 } 583 584 uint32_t frameNumber = request->frame_number; 585 if (request->input_buffer != NULL && 586 request->input_buffer->stream != mInputStream) { 587 ALOGE("%s: Request %d: Input buffer not from input stream!", 588 __FUNCTION__, frameNumber); 589 return BAD_VALUE; 590 } 591 if (request->num_output_buffers < 1 || request->output_buffers == NULL) { 592 ALOGE("%s: Request %d: No output buffers provided!", 593 __FUNCTION__, frameNumber); 594 return BAD_VALUE; 595 } 596 if (request->input_buffer != NULL) { 597 //TODO 598 ALOGE("%s: Not supporting input buffer yet", __func__); 599 return BAD_VALUE; 600 } 601 602 // Validate all buffers 603 b = request->output_buffers; 604 do { 605 QCamera3Channel *channel = 606 static_cast<QCamera3Channel*>(b->stream->priv); 607 if (channel == NULL) { 608 ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!", 609 __func__, frameNumber, idx); 610 return BAD_VALUE; 611 } 612 if (b->status != CAMERA3_BUFFER_STATUS_OK) { 613 ALOGE("%s: Request %d: Buffer %d: Status not OK!", 614 __func__, frameNumber, idx); 615 return BAD_VALUE; 616 } 617 if (b->release_fence != -1) { 618 ALOGE("%s: Request %d: Buffer %d: Has a release fence!", 619 __func__, frameNumber, idx); 620 return BAD_VALUE; 621 } 622 if (b->buffer == NULL) { 623 ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!", 624 __func__, frameNumber, idx); 625 return BAD_VALUE; 626 } 627 idx++; 628 b = request->output_buffers + idx; 629 } while (idx < (ssize_t)request->num_output_buffers); 630 631 return NO_ERROR; 632} 633 634/*=========================================================================== 635 * FUNCTION : registerStreamBuffers 636 * 637 * DESCRIPTION: Register buffers for a given stream with the HAL device. 638 * 639 * PARAMETERS : 640 * @stream_list : streams to be configured 641 * 642 * RETURN : 643 * 644 *==========================================================================*/ 645int QCamera3HardwareInterface::registerStreamBuffers( 646 const camera3_stream_buffer_set_t *buffer_set) 647{ 648 int rc = 0; 649 650 pthread_mutex_lock(&mMutex); 651 652 if (buffer_set == NULL) { 653 ALOGE("%s: Invalid buffer_set parameter.", __func__); 654 pthread_mutex_unlock(&mMutex); 655 return -EINVAL; 656 } 657 if (buffer_set->stream == NULL) { 658 ALOGE("%s: Invalid stream parameter.", __func__); 659 pthread_mutex_unlock(&mMutex); 660 return -EINVAL; 661 } 662 if (buffer_set->num_buffers < 1) { 663 ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers); 664 pthread_mutex_unlock(&mMutex); 665 return -EINVAL; 666 } 667 if (buffer_set->buffers == NULL) { 668 ALOGE("%s: Invalid buffers parameter.", __func__); 669 pthread_mutex_unlock(&mMutex); 670 return -EINVAL; 671 } 672 673 camera3_stream_t *stream = buffer_set->stream; 674 QCamera3Channel *channel = (QCamera3Channel *)stream->priv; 675 676 //set the buffer_set in the mStreamInfo array 677 for (List<stream_info_t *>::iterator it = mStreamInfo.begin(); 678 it != mStreamInfo.end(); it++) { 679 if ((*it)->stream == stream) { 680 uint32_t numBuffers = buffer_set->num_buffers; 681 (*it)->buffer_set.stream = buffer_set->stream; 682 (*it)->buffer_set.num_buffers = numBuffers; 683 (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers]; 684 if ((*it)->buffer_set.buffers == NULL) { 685 ALOGE("%s: Failed to allocate buffer_handle_t*", __func__); 686 pthread_mutex_unlock(&mMutex); 687 return -ENOMEM; 688 } 689 for (size_t j = 0; j < numBuffers; j++){ 690 (*it)->buffer_set.buffers[j] = buffer_set->buffers[j]; 691 } 692 (*it)->registered = 1; 693 } 694 } 695 696 if (stream->stream_type != CAMERA3_STREAM_OUTPUT) { 697 ALOGE("%s: not yet support non output type stream", __func__); 698 pthread_mutex_unlock(&mMutex); 699 return -EINVAL; 700 } 701 rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers); 702 if (rc < 0) { 703 ALOGE("%s: registerBUffers for stream %p failed", __func__, stream); 704 pthread_mutex_unlock(&mMutex); 705 return -ENODEV; 706 } 707 708 pthread_mutex_unlock(&mMutex); 709 return NO_ERROR; 710} 711 712/*=========================================================================== 713 * FUNCTION : processCaptureRequest 714 * 715 * DESCRIPTION: process a capture request from camera service 716 * 717 * PARAMETERS : 718 * @request : request from framework to process 719 * 720 * RETURN : 721 * 722 *==========================================================================*/ 723int QCamera3HardwareInterface::processCaptureRequest( 724 camera3_capture_request_t *request) 725{ 726 int rc = NO_ERROR; 727 CameraMetadata meta; 728 729 pthread_mutex_lock(&mMutex); 730 731 rc = validateCaptureRequest(request); 732 if (rc != NO_ERROR) { 733 ALOGE("%s: incoming request is not valid", __func__); 734 pthread_mutex_unlock(&mMutex); 735 return rc; 736 } 737 738 uint32_t frameNumber = request->frame_number; 739 740 rc = setFrameParameters(request->frame_number, request->settings); 741 if (rc < 0) { 742 ALOGE("%s: fail to set frame parameters", __func__); 743 pthread_mutex_unlock(&mMutex); 744 return rc; 745 } 746 747 ALOGV("%s: %d, num_output_buffers = %d", __func__, __LINE__, 748 request->num_output_buffers); 749 // Acquire all request buffers first 750 for (size_t i = 0; i < request->num_output_buffers; i++) { 751 const camera3_stream_buffer_t& output = request->output_buffers[i]; 752 sp<Fence> acquireFence = new Fence(output.acquire_fence); 753 754 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) { 755 //Call function to store local copy of jpeg data for encode params. 756 rc = getJpegSettings(request->settings); 757 if (rc < 0) { 758 ALOGE("%s: failed to get jpeg parameters", __func__); 759 pthread_mutex_unlock(&mMutex); 760 return rc; 761 } 762 } 763 764 rc = acquireFence->wait(Fence::TIMEOUT_NEVER); 765 if (rc != OK) { 766 ALOGE("%s: fence wait failed %d", __func__, rc); 767 pthread_mutex_unlock(&mMutex); 768 return rc; 769 } 770 } 771 772 ALOGV("%s: %d", __func__, __LINE__); 773 774 /* Update pending request list and pending buffers map */ 775 pthread_mutex_lock(&mRequestLock); 776 PendingRequestInfo pendingRequest; 777 pendingRequest.frame_number = frameNumber; 778 pendingRequest.num_buffers = request->num_output_buffers; 779// pendingRequest.metadata = NULL; 780 for (size_t i = 0; i < request->num_output_buffers; i++) { 781 RequestedBufferInfo requestedBuf; 782 requestedBuf.stream = request->output_buffers[i].stream; 783 requestedBuf.buffer = NULL; 784 pendingRequest.buffers.push_back(requestedBuf); 785 786 mPendingBuffersMap.editValueFor(requestedBuf.stream)++; 787 } 788 mPendingRequestsList.push_back(pendingRequest); 789 pthread_mutex_unlock(&mRequestLock); 790 791 // Notify metadata channel we receive a request 792 mMetadataChannel->request(NULL, frameNumber); 793 794 // Call request on other streams 795 for (size_t i = 0; i < request->num_output_buffers; i++) { 796 const camera3_stream_buffer_t& output = request->output_buffers[i]; 797 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv; 798 799 if (channel == NULL) { 800 ALOGE("%s: invalid channel pointer for stream", __func__); 801 continue; 802 } 803 804 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) { 805 rc = channel->request(output.buffer, frameNumber, mJpegSettings); 806 } else { 807 ALOGE("%s: %d, request with buffer %p, frame_number %d", __func__, __LINE__, output.buffer, frameNumber); 808 rc = channel->request(output.buffer, frameNumber); 809 } 810 if (rc < 0) 811 ALOGE("%s: request failed", __func__); 812 } 813 814 mFirstRequest = false; 815 816 //Block on conditional variable 817 pthread_mutex_lock(&mRequestLock); 818 mPendingRequest = 1; 819 while (mPendingRequest == 1) { 820 pthread_cond_wait(&mRequestCond, &mRequestLock); 821 } 822 pthread_mutex_unlock(&mRequestLock); 823 824 pthread_mutex_unlock(&mMutex); 825 return rc; 826} 827 828/*=========================================================================== 829 * FUNCTION : getMetadataVendorTagOps 830 * 831 * DESCRIPTION: 832 * 833 * PARAMETERS : 834 * 835 * 836 * RETURN : 837 *==========================================================================*/ 838void QCamera3HardwareInterface::getMetadataVendorTagOps( 839 vendor_tag_query_ops_t* /*ops*/) 840{ 841 /* Enable locks when we eventually add Vendor Tags */ 842 /* 843 pthread_mutex_lock(&mMutex); 844 845 pthread_mutex_unlock(&mMutex); 846 */ 847 return; 848} 849 850/*=========================================================================== 851 * FUNCTION : dump 852 * 853 * DESCRIPTION: 854 * 855 * PARAMETERS : 856 * 857 * 858 * RETURN : 859 *==========================================================================*/ 860void QCamera3HardwareInterface::dump(int /*fd*/) 861{ 862 /*Enable lock when we implement this function*/ 863 /* 864 pthread_mutex_lock(&mMutex); 865 866 pthread_mutex_unlock(&mMutex); 867 */ 868 return; 869} 870 871/*=========================================================================== 872 * FUNCTION : captureResultCb 873 * 874 * DESCRIPTION: Callback handler for all capture result 875 * (streams, as well as metadata) 876 * 877 * PARAMETERS : 878 * @metadata : metadata information 879 * @buffer : actual gralloc buffer to be returned to frameworks. 880 * NULL if metadata. 881 * 882 * RETURN : NONE 883 *==========================================================================*/ 884void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf, 885 camera3_stream_buffer_t *buffer, uint32_t frame_number) 886{ 887 pthread_mutex_lock(&mRequestLock); 888 889 if (metadata_buf) { 890 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer; 891 int32_t frame_number_valid = *(int32_t *) 892 POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata); 893 uint32_t frame_number = *(uint32_t *) 894 POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata); 895 const struct timeval *tv = (const struct timeval *) 896 POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata); 897 nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC + 898 tv->tv_usec * NSEC_PER_USEC; 899 900 if (!frame_number_valid) { 901 ALOGI("%s: Not a valid frame number, used as SOF only", __func__); 902 mMetadataChannel->bufDone(metadata_buf); 903 goto done_metadata; 904 } 905 ALOGE("%s: valid frame_number = %d, capture_time = %lld", __func__, 906 frame_number, capture_time); 907 908 // Go through the pending requests info and send shutter/results to frameworks 909 for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin(); 910 i != mPendingRequestsList.end() && i->frame_number <= frame_number;) { 911 camera3_capture_result_t result; 912 camera3_notify_msg_t notify_msg; 913 ALOGE("%s: frame_number in the list is %d", __func__, i->frame_number); 914 915 // Flush out all entries with less or equal frame numbers. 916 917 //TODO: Make sure shutter timestamp really reflects shutter timestamp. 918 //Right now it's the same as metadata timestamp 919 920 //TODO: When there is metadata drop, how do we derive the timestamp of 921 //dropped frames? For now, we fake the dropped timestamp by substracting 922 //from the reported timestamp 923 nsecs_t current_capture_time = capture_time - 924 (frame_number - i->frame_number) * NSEC_PER_33MSEC; 925 926 // Send shutter notify to frameworks 927 notify_msg.type = CAMERA3_MSG_SHUTTER; 928 notify_msg.message.shutter.frame_number = i->frame_number; 929 notify_msg.message.shutter.timestamp = current_capture_time; 930 mCallbackOps->notify(mCallbackOps, ¬ify_msg); 931 ALOGE("%s: notify frame_number = %d, capture_time = %lld", __func__, 932 i->frame_number, capture_time); 933 934 // Send empty metadata with already filled buffers for dropped metadata 935 // and send valid metadata with already filled buffers for current metadata 936 if (i->frame_number < frame_number) { 937 CameraMetadata emptyMetadata(1, 0); 938 emptyMetadata.update(ANDROID_SENSOR_TIMESTAMP, 939 ¤t_capture_time, 1); 940 result.result = emptyMetadata.release(); 941 } else { 942 result.result = translateCbMetadataToResultMetadata(metadata, 943 current_capture_time); 944 // Return metadata buffer 945 mMetadataChannel->bufDone(metadata_buf); 946 } 947 if (!result.result) { 948 ALOGE("%s: metadata is NULL", __func__); 949 } 950 result.frame_number = i->frame_number; 951 result.num_output_buffers = 0; 952 result.output_buffers = NULL; 953 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin(); 954 j != i->buffers.end(); j++) { 955 if (j->buffer) { 956 result.num_output_buffers++; 957 } 958 } 959 960 if (result.num_output_buffers > 0) { 961 camera3_stream_buffer_t *result_buffers = 962 new camera3_stream_buffer_t[result.num_output_buffers]; 963 if (!result_buffers) { 964 ALOGE("%s: Fatal error: out of memory", __func__); 965 } 966 size_t result_buffers_idx = 0; 967 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin(); 968 j != i->buffers.end(); j++) { 969 if (j->buffer) { 970 result_buffers[result_buffers_idx++] = *(j->buffer); 971 free(j->buffer); 972 mPendingBuffersMap.editValueFor(j->stream)--; 973 } 974 } 975 result.output_buffers = result_buffers; 976 977 mCallbackOps->process_capture_result(mCallbackOps, &result); 978 ALOGE("%s: meta frame_number = %d, capture_time = %lld", __func__, 979 result.frame_number, 980 current_capture_time); 981 free_camera_metadata((camera_metadata_t *)result.result); 982 delete[] result_buffers; 983 } else { 984 mCallbackOps->process_capture_result(mCallbackOps, &result); 985 ALOGE("%s: meta frame_number = %d, capture_time = %lld", __func__, 986 result.frame_number, current_capture_time); 987 free_camera_metadata((camera_metadata_t *)result.result); 988 } 989 // erase the element from the list 990 i = mPendingRequestsList.erase(i); 991 } 992 993 994done_metadata: 995 bool max_buffers_dequeued = false; 996 for (size_t i = 0; i < mPendingBuffersMap.size(); i++) { 997 const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i); 998 uint32_t queued_buffers = mPendingBuffersMap.valueAt(i); 999 if (queued_buffers == stream->max_buffers) { 1000 max_buffers_dequeued = true; 1001 break; 1002 } 1003 } 1004 if (!max_buffers_dequeued) { 1005 // Unblock process_capture_request 1006 mPendingRequest = 0; 1007 pthread_cond_signal(&mRequestCond); 1008 } 1009 } else { 1010 // If the frame number doesn't exist in the pending request list, 1011 // directly send the buffer to the frameworks, and update pending buffers map 1012 // Otherwise, book-keep the buffer. 1013 List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin(); 1014 while (i != mPendingRequestsList.end() && i->frame_number != frame_number) 1015 i++; 1016 if (i == mPendingRequestsList.end()) { 1017 // Verify all pending requests frame_numbers are greater 1018 for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin(); 1019 j != mPendingRequestsList.end(); j++) { 1020 if (j->frame_number < frame_number) { 1021 ALOGE("%s: Error: pending frame number %d is smaller than %d", 1022 __func__, j->frame_number, frame_number); 1023 } 1024 } 1025 camera3_capture_result_t result; 1026 result.result = NULL; 1027 result.frame_number = frame_number; 1028 result.num_output_buffers = 1; 1029 result.output_buffers = buffer; 1030 ALOGE("%s: result frame_number = %d, buffer = %p", 1031 __func__, frame_number, buffer); 1032 mPendingBuffersMap.editValueFor(buffer->stream)--; 1033 mCallbackOps->process_capture_result(mCallbackOps, &result); 1034 } else { 1035 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin(); 1036 j != i->buffers.end(); j++) { 1037 if (j->stream == buffer->stream) { 1038 if (j->buffer != NULL) { 1039 ALOGE("%s: Error: buffer is already set", __func__); 1040 } else { 1041 j->buffer = (camera3_stream_buffer_t *)malloc( 1042 sizeof(camera3_stream_buffer_t)); 1043 *(j->buffer) = *buffer; 1044 ALOGE("%s: cache buffer %p at result frame_number %d", 1045 __func__, buffer, frame_number); 1046 } 1047 } 1048 } 1049 } 1050 } 1051 1052 pthread_mutex_unlock(&mRequestLock); 1053 return; 1054} 1055 1056/*=========================================================================== 1057 * FUNCTION : translateCbMetadataToResultMetadata 1058 * 1059 * DESCRIPTION: 1060 * 1061 * PARAMETERS : 1062 * @metadata : metadata information from callback 1063 * 1064 * RETURN : camera_metadata_t* 1065 * metadata in a format specified by fwk 1066 *==========================================================================*/ 1067camera_metadata_t* 1068QCamera3HardwareInterface::translateCbMetadataToResultMetadata 1069 (metadata_buffer_t *metadata, nsecs_t timestamp) 1070{ 1071 CameraMetadata camMetadata; 1072 camera_metadata_t* resultMetadata; 1073 1074 1075 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, ×tamp, 1); 1076 1077 /*CAM_INTF_META_HISTOGRAM - TODO*/ 1078 /*cam_hist_stats_t *histogram = 1079 (cam_hist_stats_t *)POINTER_OF(CAM_INTF_META_HISTOGRAM, 1080 metadata);*/ 1081 1082 /*face detection*/ 1083 cam_face_detection_data_t *faceDetectionInfo =(cam_face_detection_data_t *) 1084 POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata); 1085 uint8_t numFaces = faceDetectionInfo->num_faces_detected; 1086 int32_t faceIds[numFaces]; 1087 uint8_t faceScores[numFaces]; 1088 int32_t faceRectangles[numFaces * 4]; 1089 int32_t faceLandmarks[numFaces * 6]; 1090 int j = 0, k = 0; 1091 for (int i = 0; i < numFaces; i++) { 1092 faceIds[i] = faceDetectionInfo->faces[i].face_id; 1093 faceScores[i] = faceDetectionInfo->faces[i].score; 1094 convertRegions(faceDetectionInfo->faces[i].face_boundary, 1095 faceRectangles+j, -1); 1096 convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k); 1097 j+= 4; 1098 k+= 6; 1099 } 1100 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces); 1101 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces); 1102 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES, 1103 faceRectangles, numFaces*4); 1104 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS, 1105 faceLandmarks, numFaces*6); 1106 1107 1108 /*autofocus - TODO*/ 1109 /*cam_auto_focus_data_t *afData =(cam_auto_focus_data_t *) 1110 POINTER_OF(CAM_INTF_META_AUTOFOCUS_DATA,metadata);*/ 1111 1112 uint8_t *color_correct_mode = 1113 (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata); 1114 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1); 1115 1116 int32_t *ae_precapture_id = 1117 (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata); 1118 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1); 1119 1120 /*aec regions*/ 1121 cam_area_t *hAeRegions = 1122 (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata); 1123 int32_t aeRegions[5]; 1124 convertRegions(hAeRegions->rect, aeRegions, hAeRegions->weight); 1125 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5); 1126 1127 uint8_t *ae_state = 1128 (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata); 1129 camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1); 1130 1131 uint8_t *focusMode = 1132 (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata); 1133 camMetadata.update(ANDROID_CONTROL_AF_MODE, focusMode, 1); 1134 1135 /*af regions*/ 1136 cam_area_t *hAfRegions = 1137 (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata); 1138 int32_t afRegions[5]; 1139 convertRegions(hAfRegions->rect, afRegions, hAfRegions->weight); 1140 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5); 1141 1142 uint8_t *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata); 1143 camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1); 1144 1145 int32_t *afTriggerId = 1146 (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata); 1147 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1); 1148 1149 uint8_t *whiteBalance = 1150 (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata); 1151 camMetadata.update(ANDROID_CONTROL_AWB_MODE, whiteBalance, 1); 1152 1153 /*awb regions*/ 1154 cam_area_t *hAwbRegions = 1155 (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata); 1156 int32_t awbRegions[5]; 1157 convertRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight); 1158 camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5); 1159 1160 uint8_t *whiteBalanceState = 1161 (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata); 1162 camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1); 1163 1164 uint8_t *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata); 1165 camMetadata.update(ANDROID_CONTROL_MODE, mode, 1); 1166 1167 uint8_t *edgeMode = (uint8_t *)POINTER_OF(CAM_INTF_META_EDGE, metadata); 1168 camMetadata.update(ANDROID_EDGE_MODE, edgeMode, 1); 1169 1170 uint8_t *flashPower = 1171 (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata); 1172 camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1); 1173 1174 int64_t *flashFiringTime = 1175 (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata); 1176 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1); 1177 1178 /*int32_t *ledMode = 1179 (int32_t *)POINTER_OF(CAM_INTF_PARM_LED_MODE, metadata); 1180 camMetadata.update(ANDROID_FLASH_FIRING_TIME, ledMode, 1);*/ 1181 1182 uint8_t *flashState = 1183 (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata); 1184 camMetadata.update(ANDROID_FLASH_STATE, flashState, 1); 1185 1186 uint8_t *hotPixelMode = 1187 (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata); 1188 camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1); 1189 1190 float *lensAperture = 1191 (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata); 1192 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1); 1193 1194 float *filterDensity = 1195 (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata); 1196 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1); 1197 1198 float *focalLength = 1199 (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata); 1200 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1); 1201 1202 float *focusDistance = 1203 (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata); 1204 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1); 1205 1206 float *focusRange = 1207 (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata); 1208 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 1); 1209 1210 uint8_t *opticalStab = 1211 (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata); 1212 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1); 1213 1214 /*int32_t *focusState = 1215 (int32_t *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_STATE, metadata); 1216 camMetadata.update(ANDROID_LENS_STATE , focusState, 1); //check */ 1217 1218 uint8_t *noiseRedMode = 1219 (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata); 1220 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1); 1221 1222 /*CAM_INTF_META_SCALER_CROP_REGION - check size*/ 1223 1224 cam_crop_region_t *hScalerCropRegion =(cam_crop_region_t *) 1225 POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata); 1226 int32_t scalerCropRegion[3]; 1227 scalerCropRegion[0] = hScalerCropRegion->left; 1228 scalerCropRegion[1] = hScalerCropRegion->top; 1229 scalerCropRegion[2] = hScalerCropRegion->width; 1230 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 3); 1231 1232 int64_t *sensorExpTime = 1233 (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata); 1234 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1); 1235 1236 int64_t *sensorFameDuration = 1237 (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata); 1238 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1); 1239 1240 int32_t *sensorSensitivity = 1241 (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata); 1242 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1); 1243 1244 uint8_t *shadingMode = 1245 (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata); 1246 camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1); 1247 1248 uint8_t *faceDetectMode = 1249 (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata); 1250 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, faceDetectMode, 1); 1251 1252 uint8_t *histogramMode = 1253 (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata); 1254 camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1); 1255 1256 uint8_t *sharpnessMapMode = 1257 (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata); 1258 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, 1259 sharpnessMapMode, 1); 1260 1261 /*CAM_INTF_META_STATS_SHARPNESS_MAP - check size*/ 1262 cam_sharpness_map_t *sharpnessMap = (cam_sharpness_map_t *) 1263 POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata); 1264 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, 1265 (int32_t*)sharpnessMap->sharpness, 1266 CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT); 1267 1268 resultMetadata = camMetadata.release(); 1269 return resultMetadata; 1270} 1271 1272/*=========================================================================== 1273 * FUNCTION : convertRegions 1274 * 1275 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array 1276 * 1277 * PARAMETERS : 1278 * @rect : cam_rect_t struct to convert 1279 * @region : int32_t destination array 1280 * @weight : if we are converting from cam_area_t, weight is valid 1281 * else weight = -1 1282 * 1283 *==========================================================================*/ 1284void QCamera3HardwareInterface::convertRegions(cam_rect_t rect, int32_t* region, int weight){ 1285 region[0] = rect.left; 1286 region[1] = rect.top; 1287 region[2] = rect.width; 1288 region[3] = rect.height; 1289 if (weight > -1) { 1290 region[4] = weight; 1291 } 1292} 1293/*=========================================================================== 1294 * FUNCTION : convertLandmarks 1295 * 1296 * DESCRIPTION: helper method to extract the landmarks from face detection info 1297 * 1298 * PARAMETERS : 1299 * @face : cam_rect_t struct to convert 1300 * @landmarks : int32_t destination array 1301 * 1302 * 1303 *==========================================================================*/ 1304void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks) 1305{ 1306 landmarks[0] = face.left_eye_center.x; 1307 landmarks[1] = face.left_eye_center.y; 1308 landmarks[2] = face.right_eye_center.y; 1309 landmarks[3] = face.right_eye_center.y; 1310 landmarks[4] = face.mouth_center.x; 1311 landmarks[5] = face.mouth_center.y; 1312} 1313 1314#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX ) 1315/*=========================================================================== 1316 * FUNCTION : initCapabilities 1317 * 1318 * DESCRIPTION: initialize camera capabilities in static data struct 1319 * 1320 * PARAMETERS : 1321 * @cameraId : camera Id 1322 * 1323 * RETURN : int32_t type of status 1324 * NO_ERROR -- success 1325 * none-zero failure code 1326 *==========================================================================*/ 1327int QCamera3HardwareInterface::initCapabilities(int cameraId) 1328{ 1329 int rc = 0; 1330 mm_camera_vtbl_t *cameraHandle = NULL; 1331 QCamera3HeapMemory *capabilityHeap = NULL; 1332 1333 cameraHandle = camera_open(cameraId); 1334 if (!cameraHandle) { 1335 ALOGE("%s: camera_open failed", __func__); 1336 rc = -1; 1337 goto open_failed; 1338 } 1339 1340 capabilityHeap = new QCamera3HeapMemory(); 1341 if (capabilityHeap == NULL) { 1342 ALOGE("%s: creation of capabilityHeap failed", __func__); 1343 goto heap_creation_failed; 1344 } 1345 /* Allocate memory for capability buffer */ 1346 rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false); 1347 if(rc != OK) { 1348 ALOGE("%s: No memory for cappability", __func__); 1349 goto allocate_failed; 1350 } 1351 1352 /* Map memory for capability buffer */ 1353 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t)); 1354 rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle, 1355 CAM_MAPPING_BUF_TYPE_CAPABILITY, 1356 capabilityHeap->getFd(0), 1357 sizeof(cam_capability_t)); 1358 if(rc < 0) { 1359 ALOGE("%s: failed to map capability buffer", __func__); 1360 goto map_failed; 1361 } 1362 1363 /* Query Capability */ 1364 rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle); 1365 if(rc < 0) { 1366 ALOGE("%s: failed to query capability",__func__); 1367 goto query_failed; 1368 } 1369 gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t)); 1370 if (!gCamCapability[cameraId]) { 1371 ALOGE("%s: out of memory", __func__); 1372 goto query_failed; 1373 } 1374 memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0), 1375 sizeof(cam_capability_t)); 1376 rc = 0; 1377 1378query_failed: 1379 cameraHandle->ops->unmap_buf(cameraHandle->camera_handle, 1380 CAM_MAPPING_BUF_TYPE_CAPABILITY); 1381map_failed: 1382 capabilityHeap->deallocate(); 1383allocate_failed: 1384 delete capabilityHeap; 1385heap_creation_failed: 1386 cameraHandle->ops->close_camera(cameraHandle->camera_handle); 1387 cameraHandle = NULL; 1388open_failed: 1389 return rc; 1390} 1391 1392/*=========================================================================== 1393 * FUNCTION : initParameters 1394 * 1395 * DESCRIPTION: initialize camera parameters 1396 * 1397 * PARAMETERS : 1398 * 1399 * RETURN : int32_t type of status 1400 * NO_ERROR -- success 1401 * none-zero failure code 1402 *==========================================================================*/ 1403int QCamera3HardwareInterface::initParameters() 1404{ 1405 int rc = 0; 1406 1407 //Allocate Set Param Buffer 1408 mParamHeap = new QCamera3HeapMemory(); 1409 rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false); 1410 if(rc != OK) { 1411 rc = NO_MEMORY; 1412 ALOGE("Failed to allocate SETPARM Heap memory"); 1413 delete mParamHeap; 1414 mParamHeap = NULL; 1415 return rc; 1416 } 1417 1418 //Map memory for parameters buffer 1419 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle, 1420 CAM_MAPPING_BUF_TYPE_PARM_BUF, 1421 mParamHeap->getFd(0), 1422 sizeof(parm_buffer_t)); 1423 if(rc < 0) { 1424 ALOGE("%s:failed to map SETPARM buffer",__func__); 1425 rc = FAILED_TRANSACTION; 1426 mParamHeap->deallocate(); 1427 delete mParamHeap; 1428 mParamHeap = NULL; 1429 return rc; 1430 } 1431 1432 mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0); 1433 return rc; 1434} 1435 1436/*=========================================================================== 1437 * FUNCTION : deinitParameters 1438 * 1439 * DESCRIPTION: de-initialize camera parameters 1440 * 1441 * PARAMETERS : 1442 * 1443 * RETURN : NONE 1444 *==========================================================================*/ 1445void QCamera3HardwareInterface::deinitParameters() 1446{ 1447 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle, 1448 CAM_MAPPING_BUF_TYPE_PARM_BUF); 1449 1450 mParamHeap->deallocate(); 1451 delete mParamHeap; 1452 mParamHeap = NULL; 1453 1454 mParameters = NULL; 1455} 1456 1457/*=========================================================================== 1458 * FUNCTION : initStaticMetadata 1459 * 1460 * DESCRIPTION: initialize the static metadata 1461 * 1462 * PARAMETERS : 1463 * @cameraId : camera Id 1464 * 1465 * RETURN : int32_t type of status 1466 * 0 -- success 1467 * non-zero failure code 1468 *==========================================================================*/ 1469int QCamera3HardwareInterface::initStaticMetadata(int cameraId) 1470{ 1471 int rc = 0; 1472 CameraMetadata staticInfo; 1473 int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK; 1474 /*HAL 3 only*/ 1475 #ifdef HAL_3_CAPABILITIES 1476 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE, 1477 &gCamCapability[cameraId]->min_focus_distance, 1); 1478 1479 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, 1480 &gCamCapability[cameraId]->hyper_focal_distance, 1); 1481 1482 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, 1483 gCamCapability[cameraId]->focal_lengths, 1484 gCamCapability[cameraId]->focal_lengths_count); 1485 1486 1487 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES, 1488 gCamCapability[cameraId]->apertures, 1489 gCamCapability[cameraId]->apertures_count); 1490 1491 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES, 1492 gCamCapability[cameraId]->filter_densities, 1493 gCamCapability[cameraId]->filter_densities_count); 1494 1495 1496 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION, 1497 (int*)gCamCapability[cameraId]->optical_stab_modes, 1498 gCamCapability[cameraId]->optical_stab_modes_count); 1499 1500 staticInfo.update(ANDROID_LENS_POSITION, 1501 gCamCapability[cameraId]->lens_position, 1502 sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float)); 1503 1504 static const int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width, 1505 gCamCapability[cameraId]->lens_shading_map_size.height}; 1506 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE, 1507 lens_shading_map_size, 1508 sizeof(lens_shading_map_size)/sizeof(int32_t)); 1509 1510 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP, gCamCapability[cameraId]->lens_shading_map, 1511 sizeof(gCamCapability[cameraId]->lens_shading_map)/ sizeof(float)); 1512 1513 static const int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width, 1514 gCamCapability[cameraId]->geo_correction_map_size.height}; 1515 staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE, 1516 geo_correction_map_size, 1517 sizeof(geo_correction_map_size)/sizeof(int32_t)); 1518 1519 staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP, 1520 gCamCapability[cameraId]->geo_correction_map, 1521 sizeof(geo_correction_map)/sizeof(float)); 1522 1523 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE, 1524 gCamCapability[cameraId]->sensor_physical_size, 2); 1525 1526 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, 1527 gCamCapability[cameraId]->exposure_time_range, 2); 1528 1529 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION, 1530 &gCamCapability[cameraId]->max_frame_duration, 1); 1531 1532 1533 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT, 1534 (int*)&gCamCapability[cameraId]->color_arrangement, 1); 1535 1536 static const int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width, 1537 gCamCapability[cameraId]->pixel_array_size.height}; 1538 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, 1539 pixel_array_size, 2); 1540 1541 static const int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.width, 1542 gCamCapability[cameraId]->active_array_size.height}; 1543 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, 1544 active_array_size, 2); 1545 1546 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL, 1547 &gCamCapability[cameraId]->white_level, 1); 1548 1549 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN, 1550 gCamCapability[cameraId]->black_level_pattern, 4); 1551 1552 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION, 1553 &gCamCapability[cameraId]->flash_charge_duration, 1); 1554 1555 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS, 1556 &gCamCapability[cameraId]->max_tone_map_curve_points, 1); 1557 1558 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, 1559 (int*)&gCamCapability[cameraId]->max_face_detection_count, 1); 1560 1561 staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT, 1562 &gCamCapability[cameraId]->histogram_size, 1); 1563 1564 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT, 1565 &gCamCapability[cameraId]->max_histogram_count, 1); 1566 1567 static const int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width, 1568 gCamCapability[cameraId]->sharpness_map_size.height}; 1569 1570 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, 1571 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t)); 1572 1573 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE, 1574 &gCamCapability[cameraId]->max_sharpness_map_value, 1); 1575 1576 1577 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS, 1578 &gCamCapability[cameraId]->raw_min_duration, 1579 1); 1580 1581 static int32_t scalar_formats[CAM_FORMAT_MAX]; 1582 for (int i = 0; i < gCamCapability[cameraId]->supported_scalar_format_cnt; i++) { 1583 scalar_formats[i] = getScalarFormat(gCamCapability[cameraId]->supported_scalar_fmts[i]); 1584 } 1585 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, 1586 scalar_formats, 1587 gCamCapability[cameraId]->supported_scalar_format_cnt); 1588 1589 static int32_t available_processed_sizes[CAM_FORMAT_MAX]; 1590 makeTable(gCamCapability[cameraId]->supported_sizes_tbl, 1591 gCamCapability[cameraId]->supported_sizes_tbl_cnt, 1592 available_processed_sizes); 1593 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES, 1594 available_processed_sizes, 1595 gCamCapability[cameraId]->supported_sizes_tbl_cnt); 1596 1597 static float available_fps_ranges[gCamCapability[cameraId]->fps_ranges_tbl_cnt]; 1598 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl, 1599 gCamCapability[cameraId]->fps_ranges_tbl_cnt, 1600 available_fps_ranges); 1601 #else 1602 const float minFocusDistance = 0; 1603 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE, 1604 &minFocusDistance, 1); 1605 1606 const float hyperFocusDistance = 0; 1607 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, 1608 &hyperFocusDistance, 1); 1609 1610 static const float focalLength = 3.30f; 1611 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, 1612 &focalLength, 1613 1); 1614 1615 static const float aperture = 2.8f; 1616 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES, 1617 &aperture, 1618 1); 1619 1620 static const float filterDensity = 0; 1621 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES, 1622 &filterDensity, 1); 1623 1624 static const uint8_t availableOpticalStabilization = 1625 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF; 1626 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION, 1627 &availableOpticalStabilization, 1); 1628 1629 float lensPosition[3]; 1630 if (facingBack) { 1631 // Back-facing camera is center-top on device 1632 lensPosition[0] = 0; 1633 lensPosition[1] = 20; 1634 lensPosition[2] = -5; 1635 } else { 1636 // Front-facing camera is center-right on device 1637 lensPosition[0] = 20; 1638 lensPosition[1] = 20; 1639 lensPosition[2] = 0; 1640 } 1641 staticInfo.update(ANDROID_LENS_POSITION, 1642 lensPosition, 1643 sizeof(lensPosition)/ sizeof(float)); 1644 1645 static const int32_t lensShadingMapSize[] = {1, 1}; 1646 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE, 1647 lensShadingMapSize, 1648 sizeof(lensShadingMapSize)/sizeof(int32_t)); 1649 1650 static const float lensShadingMap[3 * 1 * 1 ] = 1651 { 1.f, 1.f, 1.f }; 1652 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP, 1653 lensShadingMap, 1654 sizeof(lensShadingMap)/ sizeof(float)); 1655 1656 static const int32_t geometricCorrectionMapSize[] = {2, 2}; 1657 staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE, 1658 geometricCorrectionMapSize, 1659 sizeof(geometricCorrectionMapSize)/sizeof(int32_t)); 1660 1661 static const float geometricCorrectionMap[2 * 3 * 2 * 2] = { 1662 0.f, 0.f, 0.f, 0.f, 0.f, 0.f, 1663 1.f, 0.f, 1.f, 0.f, 1.f, 0.f, 1664 0.f, 1.f, 0.f, 1.f, 0.f, 1.f, 1665 1.f, 1.f, 1.f, 1.f, 1.f, 1.f}; 1666 staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP, 1667 geometricCorrectionMap, 1668 sizeof(geometricCorrectionMap)/ sizeof(float)); 1669 1670 static const float sensorPhysicalSize[2] = {3.20f, 2.40f}; 1671 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE, 1672 sensorPhysicalSize, 2); 1673 1674 const int64_t exposureTimeRange[2] = {1000L, 30000000000L} ; // 1 us - 30 sec 1675 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, 1676 exposureTimeRange, 2); 1677 1678 const int64_t frameDurationRange[2] = {33331760L, 30000000000L}; 1679 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION, 1680 frameDurationRange, 1); 1681 1682 const uint8_t colorFilterArrangement = 1683 ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB; 1684 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT, 1685 &colorFilterArrangement, 1); 1686 1687 const int resolution[2] = {640, 480}; 1688 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, 1689 resolution, 2); 1690 1691 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, 1692 resolution, 2); 1693 1694 const uint32_t whiteLevel = 4000; 1695 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL, 1696 (int32_t*)&whiteLevel, 1); 1697 1698 static const int32_t blackLevelPattern[4] = { 1699 1000, 1000, 1700 1000, 1000 }; 1701 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN, 1702 blackLevelPattern, 4); 1703 1704 static const int64_t flashChargeDuration = 0; 1705 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION, 1706 &flashChargeDuration, 1); 1707 1708 static const int32_t tonemapCurvePoints = 128; 1709 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS, 1710 &tonemapCurvePoints, 1); 1711 1712 static const int32_t maxFaceCount = 0; 1713 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, 1714 &maxFaceCount, 1); 1715 1716 static const int32_t histogramSize = 64; 1717 staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT, 1718 &histogramSize, 1); 1719 1720 static const int32_t maxHistogramCount = 1000; 1721 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT, 1722 &maxHistogramCount, 1); 1723 1724 static const int32_t sharpnessMapSize[2] = {64, 64}; 1725 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, 1726 sharpnessMapSize, sizeof(sharpnessMapSize)/sizeof(int32_t)); 1727 1728 static const int32_t maxSharpnessMapValue = 1000; 1729 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE, 1730 &maxSharpnessMapValue, 1); 1731 1732 static const uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF}; 1733 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, 1734 availableVstabModes, sizeof(availableVstabModes)); 1735 1736 const uint64_t availableRawMinDurations[1] = {33331760L}; 1737 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS, 1738 (int64_t*)&availableRawMinDurations, 1739 1); 1740 1741 const uint32_t availableFormats[4] = { 1742 HAL_PIXEL_FORMAT_RAW_SENSOR, 1743 HAL_PIXEL_FORMAT_BLOB, 1744 HAL_PIXEL_FORMAT_RGBA_8888, 1745 HAL_PIXEL_FORMAT_YCrCb_420_SP 1746 }; 1747 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, 1748 (int32_t*)availableFormats, 1749 4); 1750 1751 const uint32_t availableProcessedSizes[4] = {1280, 720, 640, 480}; 1752 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES, 1753 (int32_t*)availableProcessedSizes, 1754 sizeof(availableProcessedSizes)/sizeof(int32_t)); 1755 1756 staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES, 1757 resolution, 1758 sizeof(resolution)/sizeof(int)); 1759 1760 static const uint8_t availableFaceDetectModes[] = { 1761 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF }; 1762 1763 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES, 1764 availableFaceDetectModes, 1765 sizeof(availableFaceDetectModes)); 1766 1767 static const uint8_t availableSceneModes[] = { 1768 ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED }; 1769 1770 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES, 1771 availableSceneModes, sizeof(availableSceneModes)); 1772 1773 static const int32_t availableFpsRanges[] = {15, 30}; 1774 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, 1775 availableFpsRanges, sizeof(availableFpsRanges)/sizeof(int32_t)); 1776 1777 static const uint8_t availableEffectsModes[] = { 1778 ANDROID_CONTROL_EFFECT_MODE_OFF }; 1779 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS, 1780 availableEffectsModes, sizeof(availableEffectsModes)); 1781 1782 static const uint8_t availableAntibandingModes[] = { 1783 ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF }; 1784 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, 1785 availableAntibandingModes, sizeof(availableAntibandingModes)); 1786 1787 static const uint8_t flashAvailable = 0; 1788 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE, 1789 &flashAvailable, sizeof(flashAvailable)); 1790 1791 static const int32_t max3aRegions = 0; 1792 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS, 1793 &max3aRegions, 1); 1794 1795 static const camera_metadata_rational exposureCompensationStep = { 1796 1, 3 1797 }; 1798 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP, 1799 &exposureCompensationStep, 1); 1800 1801 static const int32_t jpegThumbnailSizes[] = { 1802 0, 0, 1803 160, 120, 1804 320, 240 1805 }; 1806 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, 1807 jpegThumbnailSizes, sizeof(jpegThumbnailSizes)/sizeof(int32_t)); 1808 1809 static const int32_t maxZoom = 10; 1810 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, 1811 &maxZoom, 1); 1812 1813 static int64_t jpegMinDuration[] = {33331760L, 30000000000L}; 1814 staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS, 1815 jpegMinDuration, 1816 sizeof(jpegMinDuration)/sizeof(uint64_t)); 1817 #endif 1818 /*HAL 1 and HAL 3 common*/ 1819 static const int32_t raw_size[] = {gCamCapability[cameraId]->raw_dim.width, 1820 gCamCapability[cameraId]->raw_dim.height}; 1821 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES, 1822 raw_size, 1823 sizeof(raw_size)/sizeof(uint32_t)); 1824 1825 static const int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min, 1826 gCamCapability[cameraId]->exposure_compensation_max}; 1827 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE, 1828 exposureCompensationRange, 1829 sizeof(exposureCompensationRange)/sizeof(int32_t)); 1830 1831 uint8_t lensFacing = (facingBack) ? 1832 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT; 1833 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1); 1834 1835 static int32_t available_jpeg_sizes[MAX_SIZES_CNT]; 1836 makeTable(gCamCapability[cameraId]->picture_sizes_tbl, 1837 gCamCapability[cameraId]->picture_sizes_tbl_cnt, 1838 available_jpeg_sizes); 1839 staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES, 1840 available_jpeg_sizes, 1841 gCamCapability[cameraId]->picture_sizes_tbl_cnt); 1842 1843 static int32_t max_jpeg_size = 0; 1844 int temp_width, temp_height; 1845 for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) { 1846 temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width; 1847 temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height; 1848 if (temp_width * temp_height > max_jpeg_size ) { 1849 max_jpeg_size = temp_width * temp_height; 1850 } 1851 } 1852 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t); 1853 staticInfo.update(ANDROID_JPEG_MAX_SIZE, 1854 &max_jpeg_size, 1); 1855 1856 static uint8_t avail_effects[CAM_EFFECT_MODE_MAX]; 1857 int32_t size = 0; 1858 for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) { 1859 int val = lookupFwkName(EFFECT_MODES_MAP, 1860 sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]), 1861 gCamCapability[cameraId]->supported_effects[i]); 1862 if (val != NAME_NOT_FOUND) { 1863 avail_effects[size] = (uint8_t)val; 1864 size++; 1865 } 1866 } 1867 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS, 1868 avail_effects, 1869 size); 1870 1871 static uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX]; 1872 size = 0; 1873 for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) { 1874 int val = lookupFwkName(SCENE_MODES_MAP, 1875 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]), 1876 gCamCapability[cameraId]->supported_scene_modes[i]); 1877 if (val != NAME_NOT_FOUND) { 1878 avail_scene_modes[size] = (uint8_t)val; 1879 size++; 1880 } 1881 } 1882 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES, 1883 avail_scene_modes, 1884 size); 1885 1886 static uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX]; 1887 size = 0; 1888 for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) { 1889 int val = lookupFwkName(ANTIBANDING_MODES_MAP, 1890 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]), 1891 gCamCapability[cameraId]->supported_antibandings[i]); 1892 if (val != NAME_NOT_FOUND) { 1893 avail_antibanding_modes[size] = (uint8_t)val; 1894 size++; 1895 } 1896 1897 } 1898 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, 1899 avail_antibanding_modes, 1900 size); 1901 1902 ALOGE("%s: %d", __func__, __LINE__); 1903 static uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX]; 1904 size = 0; 1905 for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) { 1906 int val = lookupFwkName(FOCUS_MODES_MAP, 1907 sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]), 1908 gCamCapability[cameraId]->supported_focus_modes[i]); 1909 if (val != NAME_NOT_FOUND) { 1910 avail_af_modes[size] = (uint8_t)val; 1911 size++; 1912 } 1913 } 1914 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES, 1915 avail_af_modes, 1916 size); 1917 1918 static uint8_t avail_awb_modes[CAM_WB_MODE_MAX]; 1919 size = 0; 1920 for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) { 1921 int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP, 1922 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]), 1923 gCamCapability[cameraId]->supported_white_balances[i]); 1924 if (val != NAME_NOT_FOUND) { 1925 avail_awb_modes[size] = (uint8_t)val; 1926 size++; 1927 } 1928 } 1929 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES, 1930 avail_awb_modes, 1931 size); 1932 1933 static uint8_t avail_flash_modes[CAM_FLASH_MODE_MAX]; 1934 size = 0; 1935 for (int i = 0; i < gCamCapability[cameraId]->supported_flash_modes_cnt; i++) { 1936 int val = lookupFwkName(FLASH_MODES_MAP, 1937 sizeof(FLASH_MODES_MAP)/sizeof(FLASH_MODES_MAP[0]), 1938 gCamCapability[cameraId]->supported_flash_modes[i]); 1939 if (val != NAME_NOT_FOUND) { 1940 avail_flash_modes[size] = (uint8_t)val; 1941 size++; 1942 } 1943 } 1944 staticInfo.update(ANDROID_FLASH_MODE, 1945 avail_flash_modes, 1946 size); 1947 1948 /*so far fwk seems to support only 2 aec modes on and off*/ 1949 static const uint8_t avail_ae_modes[] = { 1950 ANDROID_CONTROL_AE_MODE_OFF, 1951 ANDROID_CONTROL_AE_MODE_ON 1952 }; 1953 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES, 1954 avail_ae_modes, 1955 sizeof(avail_ae_modes)); 1956 1957 gStaticMetadata = staticInfo.release(); 1958 return rc; 1959} 1960 1961/*=========================================================================== 1962 * FUNCTION : makeTable 1963 * 1964 * DESCRIPTION: make a table of sizes 1965 * 1966 * PARAMETERS : 1967 * 1968 * 1969 * 1970 * RETURN : int32_t type of status 1971 * NO_ERROR -- success 1972 * none-zero failure code 1973 *==========================================================================*/ 1974void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size, 1975 int32_t* sizeTable) 1976{ 1977 int j = 0; 1978 for (int i = 0; i < size; i++) { 1979 sizeTable[j] = dimTable[i].width; 1980 sizeTable[j+1] = dimTable[i].height; 1981 j+=2; 1982 } 1983} 1984 1985/*=========================================================================== 1986 * FUNCTION : makeFPSTable 1987 * 1988 * DESCRIPTION: make a table of fps ranges 1989 * 1990 * PARAMETERS : 1991 * 1992 * 1993 * 1994 * RETURN : int32_t type of status 1995 * NO_ERROR -- success 1996 * none-zero failure code 1997 *==========================================================================*/ 1998void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size, 1999 float* fpsRangesTable) 2000{ 2001 int j = 0; 2002 for (int i = 0; i < size; i++) { 2003 fpsRangesTable[j] = fpsTable[i].min_fps; 2004 fpsRangesTable[j+1] = fpsTable[i].max_fps; 2005 j+=2; 2006 } 2007} 2008/*=========================================================================== 2009 * FUNCTION : getPreviewHalPixelFormat 2010 * 2011 * DESCRIPTION: convert the format to type recognized by framework 2012 * 2013 * PARAMETERS : format : the format from backend 2014 * 2015 ** RETURN : format recognized by framework 2016 * 2017 *==========================================================================*/ 2018int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format) 2019{ 2020 int32_t halPixelFormat; 2021 2022 switch (format) { 2023 case CAM_FORMAT_YUV_420_NV12: 2024 halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP; 2025 break; 2026 case CAM_FORMAT_YUV_420_NV21: 2027 halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP; 2028 break; 2029 case CAM_FORMAT_YUV_420_NV21_ADRENO: 2030 halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO; 2031 break; 2032 case CAM_FORMAT_YUV_420_YV12: 2033 halPixelFormat = HAL_PIXEL_FORMAT_YV12; 2034 break; 2035 case CAM_FORMAT_YUV_422_NV16: 2036 case CAM_FORMAT_YUV_422_NV61: 2037 default: 2038 halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP; 2039 break; 2040 } 2041 return halPixelFormat; 2042} 2043 2044/*=========================================================================== 2045 * FUNCTION : AddSetParmEntryToBatch 2046 * 2047 * DESCRIPTION: add set parameter entry into batch 2048 * 2049 * PARAMETERS : 2050 * @p_table : ptr to parameter buffer 2051 * @paramType : parameter type 2052 * @paramLength : length of parameter value 2053 * @paramValue : ptr to parameter value 2054 * 2055 * RETURN : int32_t type of status 2056 * NO_ERROR -- success 2057 * none-zero failure code 2058 *==========================================================================*/ 2059int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table, 2060 cam_intf_parm_type_t paramType, 2061 uint32_t paramLength, 2062 void *paramValue) 2063{ 2064 int position = paramType; 2065 int current, next; 2066 2067 /************************************************************************* 2068 * Code to take care of linking next flags * 2069 *************************************************************************/ 2070 current = GET_FIRST_PARAM_ID(p_table); 2071 if (position == current){ 2072 //DO NOTHING 2073 } else if (position < current){ 2074 SET_NEXT_PARAM_ID(position, p_table, current); 2075 SET_FIRST_PARAM_ID(p_table, position); 2076 } else { 2077 /* Search for the position in the linked list where we need to slot in*/ 2078 while (position > GET_NEXT_PARAM_ID(current, p_table)) 2079 current = GET_NEXT_PARAM_ID(current, p_table); 2080 2081 /*If node already exists no need to alter linking*/ 2082 if (position != GET_NEXT_PARAM_ID(current, p_table)) { 2083 next = GET_NEXT_PARAM_ID(current, p_table); 2084 SET_NEXT_PARAM_ID(current, p_table, position); 2085 SET_NEXT_PARAM_ID(position, p_table, next); 2086 } 2087 } 2088 2089 /************************************************************************* 2090 * Copy contents into entry * 2091 *************************************************************************/ 2092 2093 if (paramLength > sizeof(parm_type_t)) { 2094 ALOGE("%s:Size of input larger than max entry size",__func__); 2095 return BAD_VALUE; 2096 } 2097 memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength); 2098 return NO_ERROR; 2099} 2100 2101/*=========================================================================== 2102 * FUNCTION : lookupFwkName 2103 * 2104 * DESCRIPTION: In case the enum is not same in fwk and backend 2105 * make sure the parameter is correctly propogated 2106 * 2107 * PARAMETERS : 2108 * @arr : map between the two enums 2109 * @len : len of the map 2110 * @hal_name : name of the hal_parm to map 2111 * 2112 * RETURN : int type of status 2113 * fwk_name -- success 2114 * none-zero failure code 2115 *==========================================================================*/ 2116int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[], 2117 int len, int hal_name) 2118{ 2119 2120 for (int i = 0; i < len; i++) { 2121 if (arr[i].hal_name == hal_name) 2122 return arr[i].fwk_name; 2123 } 2124 ALOGE("%s: Cannot find matching framework type", __func__); 2125 return NAME_NOT_FOUND; 2126} 2127 2128/*=========================================================================== 2129 * FUNCTION : lookupHalName 2130 * 2131 * DESCRIPTION: In case the enum is not same in fwk and backend 2132 * make sure the parameter is correctly propogated 2133 * 2134 * PARAMETERS : 2135 * @arr : map between the two enums 2136 * @len : len of the map 2137 * @fwk_name : name of the hal_parm to map 2138 * 2139 * RETURN : int32_t type of status 2140 * hal_name -- success 2141 * none-zero failure code 2142 *==========================================================================*/ 2143int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[], 2144 int len, int fwk_name) 2145{ 2146 for (int i = 0; i < len; i++) { 2147 if (arr[i].fwk_name == fwk_name) 2148 return arr[i].hal_name; 2149 } 2150 ALOGE("%s: Cannot find matching hal type", __func__); 2151 return NAME_NOT_FOUND; 2152} 2153 2154/*=========================================================================== 2155 * FUNCTION : getCapabilities 2156 * 2157 * DESCRIPTION: query camera capabilities 2158 * 2159 * PARAMETERS : 2160 * @cameraId : camera Id 2161 * @info : camera info struct to be filled in with camera capabilities 2162 * 2163 * RETURN : int32_t type of status 2164 * NO_ERROR -- success 2165 * none-zero failure code 2166 *==========================================================================*/ 2167int QCamera3HardwareInterface::getCamInfo(int cameraId, 2168 struct camera_info *info) 2169{ 2170 int rc = 0; 2171 2172 if (NULL == gCamCapability[cameraId]) { 2173 rc = initCapabilities(cameraId); 2174 if (rc < 0) { 2175 //pthread_mutex_unlock(&g_camlock); 2176 return rc; 2177 } 2178 } 2179 2180 if (NULL == gStaticMetadata) { 2181 rc = initStaticMetadata(cameraId); 2182 if (rc < 0) { 2183 return rc; 2184 } 2185 } 2186 2187 switch(gCamCapability[cameraId]->position) { 2188 case CAM_POSITION_BACK: 2189 info->facing = CAMERA_FACING_BACK; 2190 break; 2191 2192 case CAM_POSITION_FRONT: 2193 info->facing = CAMERA_FACING_FRONT; 2194 break; 2195 2196 default: 2197 ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId); 2198 rc = -1; 2199 break; 2200 } 2201 2202 2203 info->orientation = gCamCapability[cameraId]->sensor_mount_angle; 2204 info->device_version = HARDWARE_DEVICE_API_VERSION(3, 0); 2205 info->static_camera_characteristics = gStaticMetadata; 2206 2207 return rc; 2208} 2209 2210/*=========================================================================== 2211 * FUNCTION : translateMetadata 2212 * 2213 * DESCRIPTION: translate the metadata into camera_metadata_t 2214 * 2215 * PARAMETERS : type of the request 2216 * 2217 * 2218 * RETURN : success: camera_metadata_t* 2219 * failure: NULL 2220 * 2221 *==========================================================================*/ 2222camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type) 2223{ 2224 pthread_mutex_lock(&mMutex); 2225 2226 if (mDefaultMetadata[type] != NULL) { 2227 pthread_mutex_unlock(&mMutex); 2228 return mDefaultMetadata[type]; 2229 } 2230 //first time we are handling this request 2231 //fill up the metadata structure using the wrapper class 2232 CameraMetadata settings; 2233 //translate from cam_capability_t to camera_metadata_tag_t 2234 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE; 2235 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1); 2236 2237 /*control*/ 2238 2239 uint8_t controlIntent = 0; 2240 switch (type) { 2241 case CAMERA3_TEMPLATE_PREVIEW: 2242 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW; 2243 break; 2244 case CAMERA3_TEMPLATE_STILL_CAPTURE: 2245 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE; 2246 break; 2247 case CAMERA3_TEMPLATE_VIDEO_RECORD: 2248 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD; 2249 break; 2250 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT: 2251 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT; 2252 break; 2253 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG: 2254 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG; 2255 break; 2256 default: 2257 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM; 2258 break; 2259 } 2260 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1); 2261 2262 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, 2263 &gCamCapability[mCameraId]->exposure_compensation_default, 1); 2264 2265 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF; 2266 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1); 2267 2268 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF; 2269 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1); 2270 2271 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO; 2272 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1); 2273 2274 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO; 2275 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1); 2276 2277 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF; 2278 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1); 2279 2280 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; //similar to AUTO? 2281 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1); 2282 2283 /*flash*/ 2284 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF; 2285 settings.update(ANDROID_FLASH_MODE, &flashMode, 1); 2286 2287 2288 /* lens */ 2289 static const float default_aperture = gCamCapability[mCameraId]->apertures[0]; 2290 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1); 2291 2292 if (gCamCapability[mCameraId]->filter_densities_count) { 2293 static const float default_filter_density = gCamCapability[mCameraId]->filter_densities[0]; 2294 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density, 2295 gCamCapability[mCameraId]->filter_densities_count); 2296 } 2297 2298 /* TODO: Enable focus lengths once supported*/ 2299 /*if (gCamCapability[mCameraId]->focal_lengths_count) { 2300 static const float default_focal_length = gCamCapability[mCameraId]->focal_lengths[0]; 2301 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1); 2302 }*/ 2303 2304 mDefaultMetadata[type] = settings.release(); 2305 2306 pthread_mutex_unlock(&mMutex); 2307 return mDefaultMetadata[type]; 2308} 2309 2310/*=========================================================================== 2311 * FUNCTION : setFrameParameters 2312 * 2313 * DESCRIPTION: set parameters per frame as requested in the metadata from 2314 * framework 2315 * 2316 * PARAMETERS : 2317 * @settings : frame settings information from framework 2318 * 2319 * 2320 * RETURN : success: NO_ERROR 2321 * failure: 2322 *==========================================================================*/ 2323int QCamera3HardwareInterface::setFrameParameters(int frame_id, 2324 const camera_metadata_t *settings) 2325{ 2326 /*translate from camera_metadata_t type to parm_type_t*/ 2327 int rc = 0; 2328 if (settings == NULL && mFirstRequest) { 2329 /*settings cannot be null for the first request*/ 2330 return BAD_VALUE; 2331 } 2332 2333 int32_t hal_version = CAM_HAL_V3; 2334 2335 memset(mParameters, 0, sizeof(parm_buffer_t)); 2336 mParameters->first_flagged_entry = CAM_INTF_PARM_MAX; 2337 AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION, 2338 sizeof(hal_version), &hal_version); 2339 2340 /*we need to update the frame number in the parameters*/ 2341 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER, 2342 sizeof(frame_id), &frame_id); 2343 if (rc < 0) { 2344 ALOGE("%s: Failed to set the frame number in the parameters", __func__); 2345 return BAD_VALUE; 2346 } 2347 2348 if(settings != NULL){ 2349 rc = translateMetadataToParameters(settings); 2350 } 2351 /*set the parameters to backend*/ 2352 ALOGE("%s: %d", __func__, __LINE__); 2353 mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters); 2354 return rc; 2355} 2356 2357/*=========================================================================== 2358 * FUNCTION : translateMetadataToParameters 2359 * 2360 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t 2361 * 2362 * 2363 * PARAMETERS : 2364 * @settings : frame settings information from framework 2365 * 2366 * 2367 * RETURN : success: NO_ERROR 2368 * failure: 2369 *==========================================================================*/ 2370int QCamera3HardwareInterface::translateMetadataToParameters 2371 (const camera_metadata_t *settings) 2372{ 2373 int rc = 0; 2374 CameraMetadata frame_settings; 2375 frame_settings = settings; 2376 2377 2378 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) { 2379 int32_t antibandingMode = 2380 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0]; 2381 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING, 2382 sizeof(antibandingMode), &antibandingMode); 2383 } 2384 2385 /*int32_t expCompensation = frame_settings.find().data.i32[0]; 2386 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION, 2387 sizeof(expCompensation), &expCompensation);*/ 2388 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) { 2389 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0]; 2390 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK, 2391 sizeof(aeLock), &aeLock); 2392 } 2393 2394 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) { 2395 cam_fps_range_t fps_range; 2396 fps_range.min_fps = 2397 frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0]; 2398 fps_range.max_fps = 2399 frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0]; 2400 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE, 2401 sizeof(fps_range), &fps_range); 2402 } 2403 2404 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) { 2405 uint8_t focusMode = 2406 frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0]; 2407 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE, 2408 sizeof(focusMode), &focusMode); 2409 } 2410 2411 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) { 2412 uint8_t awbLock = 2413 frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0]; 2414 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK, 2415 sizeof(awbLock), &awbLock); 2416 } 2417 2418 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) { 2419 uint8_t fwk_whiteLevel = 2420 frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0]; 2421 uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP, 2422 sizeof(WHITE_BALANCE_MODES_MAP), 2423 fwk_whiteLevel); 2424 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE, 2425 sizeof(whiteLevel), &whiteLevel); 2426 } 2427 2428 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) { 2429 uint8_t fwk_effectMode = 2430 frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0]; 2431 uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP, 2432 sizeof(EFFECT_MODES_MAP), 2433 fwk_effectMode); 2434 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT, 2435 sizeof(effectMode), &effectMode); 2436 } 2437 2438 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) { 2439 uint8_t fwk_aeMode = 2440 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0]; 2441 uint8_t aeMode = lookupHalName(AUTO_EXPOSURE_MAP, 2442 sizeof(AUTO_EXPOSURE_MAP), 2443 fwk_aeMode); 2444 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE, 2445 sizeof(aeMode), &aeMode); 2446 } 2447 2448 if (frame_settings.exists(ANDROID_REQUEST_FRAME_COUNT)) { 2449 int32_t metaFrameNumber = 2450 frame_settings.find(ANDROID_REQUEST_FRAME_COUNT).data.i32[0]; 2451 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER, 2452 sizeof(metaFrameNumber), &metaFrameNumber); 2453 } 2454 2455 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) { 2456 uint8_t colorCorrectMode = 2457 frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0]; 2458 rc = 2459 AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE, 2460 sizeof(colorCorrectMode), &colorCorrectMode); 2461 } 2462 2463 uint8_t aecTrigger = CAM_AEC_TRIGGER_IDLE; 2464 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)) { 2465 aecTrigger = 2466 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0]; 2467 } 2468 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, 2469 sizeof(aecTrigger), &aecTrigger); 2470 2471 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER)) { 2472 uint8_t afTrigger = 2473 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0]; 2474 rc = AddSetParmEntryToBatch(mParameters, 2475 CAM_INTF_META_AF_TRIGGER, sizeof(afTrigger), &afTrigger); 2476 } 2477 2478 if (frame_settings.exists(ANDROID_CONTROL_MODE)) { 2479 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0]; 2480 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE, 2481 sizeof(metaMode), &metaMode); 2482 } 2483 2484 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) { 2485 int32_t demosaic = 2486 frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0]; 2487 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC, 2488 sizeof(demosaic), &demosaic); 2489 } 2490 2491 if (frame_settings.exists(ANDROID_EDGE_MODE)) { 2492 uint8_t edgeMode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0]; 2493 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE, 2494 sizeof(edgeMode), &edgeMode); 2495 } 2496 2497 if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) { 2498 int32_t edgeStrength = 2499 frame_settings.find(ANDROID_EDGE_STRENGTH).data.i32[0]; 2500 rc = AddSetParmEntryToBatch(mParameters, 2501 CAM_INTF_META_SHARPNESS_STRENGTH, sizeof(edgeStrength), &edgeStrength); 2502 } 2503 2504 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) { 2505 uint8_t flashPower = 2506 frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0]; 2507 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER, 2508 sizeof(flashPower), &flashPower); 2509 } 2510 2511 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) { 2512 int64_t flashFiringTime = 2513 frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0]; 2514 rc = AddSetParmEntryToBatch(mParameters, 2515 CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime); 2516 } 2517 2518 if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) { 2519 uint8_t geometricMode = 2520 frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0]; 2521 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE, 2522 sizeof(geometricMode), &geometricMode); 2523 } 2524 2525 if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) { 2526 uint8_t geometricStrength = 2527 frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0]; 2528 rc = AddSetParmEntryToBatch(mParameters, 2529 CAM_INTF_META_GEOMETRIC_STRENGTH, 2530 sizeof(geometricStrength), &geometricStrength); 2531 } 2532 2533 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) { 2534 uint8_t hotPixelMode = 2535 frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0]; 2536 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE, 2537 sizeof(hotPixelMode), &hotPixelMode); 2538 } 2539 2540 if (frame_settings.exists(ANDROID_LENS_APERTURE)) { 2541 float lensAperture = 2542 frame_settings.find( ANDROID_LENS_APERTURE).data.f[0]; 2543 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE, 2544 sizeof(lensAperture), &lensAperture); 2545 } 2546 2547 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) { 2548 float filterDensity = 2549 frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0]; 2550 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY, 2551 sizeof(filterDensity), &filterDensity); 2552 } 2553 2554 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) { 2555 float focalLength = 2556 frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0]; 2557 rc = AddSetParmEntryToBatch(mParameters, 2558 CAM_INTF_META_LENS_FOCAL_LENGTH, 2559 sizeof(focalLength), &focalLength); 2560 } 2561 2562 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) { 2563 float focalDistance = 2564 frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0]; 2565 rc = AddSetParmEntryToBatch(mParameters, 2566 CAM_INTF_META_LENS_FOCUS_DISTANCE, 2567 sizeof(focalDistance), &focalDistance); 2568 } 2569 2570 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) { 2571 uint8_t optStabMode = 2572 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0]; 2573 rc = AddSetParmEntryToBatch(mParameters, 2574 CAM_INTF_META_LENS_OPT_STAB_MODE, 2575 sizeof(optStabMode), &optStabMode); 2576 } 2577 2578 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) { 2579 uint8_t noiseRedMode = 2580 frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]; 2581 rc = AddSetParmEntryToBatch(mParameters, 2582 CAM_INTF_META_NOISE_REDUCTION_MODE, 2583 sizeof(noiseRedMode), &noiseRedMode); 2584 } 2585 2586 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) { 2587 uint8_t noiseRedStrength = 2588 frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0]; 2589 rc = AddSetParmEntryToBatch(mParameters, 2590 CAM_INTF_META_NOISE_REDUCTION_STRENGTH, 2591 sizeof(noiseRedStrength), &noiseRedStrength); 2592 } 2593 2594 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) { 2595 cam_crop_region_t scalerCropRegion; 2596 scalerCropRegion.left = 2597 frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0]; 2598 scalerCropRegion.top = 2599 frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1]; 2600 scalerCropRegion.width = 2601 frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2]; 2602 rc = AddSetParmEntryToBatch(mParameters, 2603 CAM_INTF_META_SCALER_CROP_REGION, 2604 sizeof(scalerCropRegion), &scalerCropRegion); 2605 } 2606 2607 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) { 2608 int64_t sensorExpTime = 2609 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0]; 2610 rc = AddSetParmEntryToBatch(mParameters, 2611 CAM_INTF_META_SENSOR_EXPOSURE_TIME, 2612 sizeof(sensorExpTime), &sensorExpTime); 2613 } 2614 2615 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) { 2616 int64_t sensorFrameDuration = 2617 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0]; 2618 rc = AddSetParmEntryToBatch(mParameters, 2619 CAM_INTF_META_SENSOR_FRAME_DURATION, 2620 sizeof(sensorFrameDuration), &sensorFrameDuration); 2621 } 2622 2623 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) { 2624 int32_t sensorSensitivity = 2625 frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0]; 2626 rc = AddSetParmEntryToBatch(mParameters, 2627 CAM_INTF_META_SENSOR_SENSITIVITY, 2628 sizeof(sensorSensitivity), &sensorSensitivity); 2629 } 2630 2631 if (frame_settings.exists(ANDROID_SHADING_MODE)) { 2632 int32_t shadingMode = 2633 frame_settings.find(ANDROID_SHADING_MODE).data.u8[0]; 2634 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE, 2635 sizeof(shadingMode), &shadingMode); 2636 } 2637 2638 if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) { 2639 uint8_t shadingStrength = 2640 frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0]; 2641 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH, 2642 sizeof(shadingStrength), &shadingStrength); 2643 } 2644 2645 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) { 2646 uint8_t facedetectMode = 2647 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0]; 2648 rc = AddSetParmEntryToBatch(mParameters, 2649 CAM_INTF_META_STATS_FACEDETECT_MODE, 2650 sizeof(facedetectMode), &facedetectMode); 2651 } 2652 2653 if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) { 2654 uint8_t histogramMode = 2655 frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0]; 2656 rc = AddSetParmEntryToBatch(mParameters, 2657 CAM_INTF_META_STATS_HISTOGRAM_MODE, 2658 sizeof(histogramMode), &histogramMode); 2659 } 2660 2661 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) { 2662 uint8_t sharpnessMapMode = 2663 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0]; 2664 rc = AddSetParmEntryToBatch(mParameters, 2665 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, 2666 sizeof(sharpnessMapMode), &sharpnessMapMode); 2667 } 2668 2669 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) { 2670 uint8_t tonemapMode = 2671 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0]; 2672 rc = AddSetParmEntryToBatch(mParameters, 2673 CAM_INTF_META_TONEMAP_MODE, 2674 sizeof(tonemapMode), &tonemapMode); 2675 } 2676 2677 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) { 2678 uint8_t captureIntent = 2679 frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0]; 2680 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT, 2681 sizeof(captureIntent), &captureIntent); 2682 } 2683 2684 return rc; 2685} 2686 2687/*=========================================================================== 2688 * FUNCTION : getJpegSettings 2689 * 2690 * DESCRIPTION: save the jpeg settings in the HAL 2691 * 2692 * 2693 * PARAMETERS : 2694 * @settings : frame settings information from framework 2695 * 2696 * 2697 * RETURN : success: NO_ERROR 2698 * failure: 2699 *==========================================================================*/ 2700int QCamera3HardwareInterface::getJpegSettings 2701 (const camera_metadata_t *settings) 2702{ 2703 if (mJpegSettings) { 2704 free(mJpegSettings); 2705 mJpegSettings = NULL; 2706 } 2707 mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t)); 2708 CameraMetadata jpeg_settings; 2709 jpeg_settings = settings; 2710 2711 if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) { 2712 mJpegSettings->jpeg_orientation = 2713 jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0]; 2714 } else { 2715 mJpegSettings->jpeg_orientation = 0; 2716 } 2717 if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) { 2718 mJpegSettings->jpeg_quality = 2719 jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0]; 2720 } else { 2721 mJpegSettings->jpeg_quality = 85; 2722 } 2723 if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) { 2724 mJpegSettings->thumbnail_size.width = 2725 jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0]; 2726 mJpegSettings->thumbnail_size.height = 2727 jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1]; 2728 mJpegSettings->thumbnail_size.width = 320; 2729 mJpegSettings->thumbnail_size.height = 240; 2730 } else { 2731 mJpegSettings->thumbnail_size.width = 640; 2732 mJpegSettings->thumbnail_size.height = 480; 2733 } 2734 if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) { 2735 for (int i = 0; i < 3; i++) { 2736 mJpegSettings->gps_coordinates[i] = 2737 jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i]; 2738 } 2739 } 2740 if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) { 2741 mJpegSettings->gps_timestamp = 2742 jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0]; 2743 } 2744 2745 if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) { 2746 mJpegSettings->gps_processing_method = 2747 jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[0]; 2748 } 2749 if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) { 2750 mJpegSettings->sensor_sensitivity = 2751 jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0]; 2752 } 2753 if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) { 2754 mJpegSettings->lens_focal_length = 2755 jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0]; 2756 } 2757 return 0; 2758} 2759 2760/*=========================================================================== 2761 * FUNCTION : captureResultCb 2762 * 2763 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata) 2764 * 2765 * PARAMETERS : 2766 * @frame : frame information from mm-camera-interface 2767 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata. 2768 * @userdata: userdata 2769 * 2770 * RETURN : NONE 2771 *==========================================================================*/ 2772void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata, 2773 camera3_stream_buffer_t *buffer, 2774 uint32_t frame_number, void *userdata) 2775{ 2776 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata; 2777 if (hw == NULL) { 2778 ALOGE("%s: Invalid hw %p", __func__, hw); 2779 return; 2780 } 2781 2782 hw->captureResultCb(metadata, buffer, frame_number); 2783 return; 2784} 2785 2786/*=========================================================================== 2787 * FUNCTION : initialize 2788 * 2789 * DESCRIPTION: Pass framework callback pointers to HAL 2790 * 2791 * PARAMETERS : 2792 * 2793 * 2794 * RETURN : Success : 0 2795 * Failure: -ENODEV 2796 *==========================================================================*/ 2797 2798int QCamera3HardwareInterface::initialize(const struct camera3_device *device, 2799 const camera3_callback_ops_t *callback_ops) 2800{ 2801 ALOGE("%s: E", __func__); 2802 QCamera3HardwareInterface *hw = 2803 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 2804 if (!hw) { 2805 ALOGE("%s: NULL camera device", __func__); 2806 return -ENODEV; 2807 } 2808 2809 int rc = hw->initialize(callback_ops); 2810 ALOGE("%s: X", __func__); 2811 return rc; 2812} 2813 2814/*=========================================================================== 2815 * FUNCTION : configure_streams 2816 * 2817 * DESCRIPTION: 2818 * 2819 * PARAMETERS : 2820 * 2821 * 2822 * RETURN : Success: 0 2823 * Failure: -EINVAL (if stream configuration is invalid) 2824 * -ENODEV (fatal error) 2825 *==========================================================================*/ 2826 2827int QCamera3HardwareInterface::configure_streams( 2828 const struct camera3_device *device, 2829 camera3_stream_configuration_t *stream_list) 2830{ 2831 ALOGE("%s: E", __func__); 2832 QCamera3HardwareInterface *hw = 2833 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 2834 if (!hw) { 2835 ALOGE("%s: NULL camera device", __func__); 2836 return -ENODEV; 2837 } 2838 int rc = hw->configureStreams(stream_list); 2839 ALOGE("%s: X", __func__); 2840 return rc; 2841} 2842 2843/*=========================================================================== 2844 * FUNCTION : register_stream_buffers 2845 * 2846 * DESCRIPTION: Register stream buffers with the device 2847 * 2848 * PARAMETERS : 2849 * 2850 * RETURN : 2851 *==========================================================================*/ 2852int QCamera3HardwareInterface::register_stream_buffers( 2853 const struct camera3_device *device, 2854 const camera3_stream_buffer_set_t *buffer_set) 2855{ 2856 ALOGE("%s: E", __func__); 2857 QCamera3HardwareInterface *hw = 2858 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 2859 if (!hw) { 2860 ALOGE("%s: NULL camera device", __func__); 2861 return -ENODEV; 2862 } 2863 int rc = hw->registerStreamBuffers(buffer_set); 2864 ALOGE("%s: X", __func__); 2865 return rc; 2866} 2867 2868/*=========================================================================== 2869 * FUNCTION : construct_default_request_settings 2870 * 2871 * DESCRIPTION: Configure a settings buffer to meet the required use case 2872 * 2873 * PARAMETERS : 2874 * 2875 * 2876 * RETURN : Success: Return valid metadata 2877 * Failure: Return NULL 2878 *==========================================================================*/ 2879const camera_metadata_t* QCamera3HardwareInterface:: 2880 construct_default_request_settings(const struct camera3_device *device, 2881 int type) 2882{ 2883 2884 ALOGE("%s: E", __func__); 2885 camera_metadata_t* fwk_metadata = NULL; 2886 QCamera3HardwareInterface *hw = 2887 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 2888 if (!hw) { 2889 ALOGE("%s: NULL camera device", __func__); 2890 return NULL; 2891 } 2892 2893 fwk_metadata = hw->translateCapabilityToMetadata(type); 2894 2895 ALOGE("%s: X", __func__); 2896 return fwk_metadata; 2897} 2898 2899/*=========================================================================== 2900 * FUNCTION : process_capture_request 2901 * 2902 * DESCRIPTION: 2903 * 2904 * PARAMETERS : 2905 * 2906 * 2907 * RETURN : 2908 *==========================================================================*/ 2909int QCamera3HardwareInterface::process_capture_request( 2910 const struct camera3_device *device, 2911 camera3_capture_request_t *request) 2912{ 2913 ALOGE("%s: E", __func__); 2914 QCamera3HardwareInterface *hw = 2915 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 2916 if (!hw) { 2917 ALOGE("%s: NULL camera device", __func__); 2918 return -EINVAL; 2919 } 2920 2921 int rc = hw->processCaptureRequest(request); 2922 ALOGE("%s: X", __func__); 2923 return rc; 2924} 2925 2926/*=========================================================================== 2927 * FUNCTION : get_metadata_vendor_tag_ops 2928 * 2929 * DESCRIPTION: 2930 * 2931 * PARAMETERS : 2932 * 2933 * 2934 * RETURN : 2935 *==========================================================================*/ 2936 2937void QCamera3HardwareInterface::get_metadata_vendor_tag_ops( 2938 const struct camera3_device *device, 2939 vendor_tag_query_ops_t* ops) 2940{ 2941 ALOGE("%s: E", __func__); 2942 QCamera3HardwareInterface *hw = 2943 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 2944 if (!hw) { 2945 ALOGE("%s: NULL camera device", __func__); 2946 return; 2947 } 2948 2949 hw->getMetadataVendorTagOps(ops); 2950 ALOGE("%s: X", __func__); 2951 return; 2952} 2953 2954/*=========================================================================== 2955 * FUNCTION : dump 2956 * 2957 * DESCRIPTION: 2958 * 2959 * PARAMETERS : 2960 * 2961 * 2962 * RETURN : 2963 *==========================================================================*/ 2964 2965void QCamera3HardwareInterface::dump( 2966 const struct camera3_device *device, int fd) 2967{ 2968 ALOGE("%s: E", __func__); 2969 QCamera3HardwareInterface *hw = 2970 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 2971 if (!hw) { 2972 ALOGE("%s: NULL camera device", __func__); 2973 return; 2974 } 2975 2976 hw->dump(fd); 2977 ALOGE("%s: X", __func__); 2978 return; 2979} 2980 2981/*=========================================================================== 2982 * FUNCTION : close_camera_device 2983 * 2984 * DESCRIPTION: 2985 * 2986 * PARAMETERS : 2987 * 2988 * 2989 * RETURN : 2990 *==========================================================================*/ 2991int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device) 2992{ 2993 ALOGE("%s: E", __func__); 2994 int ret = NO_ERROR; 2995 QCamera3HardwareInterface *hw = 2996 reinterpret_cast<QCamera3HardwareInterface *>( 2997 reinterpret_cast<camera3_device_t *>(device)->priv); 2998 if (!hw) { 2999 ALOGE("NULL camera device"); 3000 return BAD_VALUE; 3001 } 3002 delete hw; 3003 ALOGE("%s: X", __func__); 3004 return ret; 3005} 3006 3007}; //end namespace qcamera 3008