QCamera3HWI.cpp revision b66f295ef24aea24873bcf80cee589835338e2a1
1/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved. 2* 3* Redistribution and use in source and binary forms, with or without 4* modification, are permitted provided that the following conditions are 5* met: 6* * Redistributions of source code must retain the above copyright 7* notice, this list of conditions and the following disclaimer. 8* * Redistributions in binary form must reproduce the above 9* copyright notice, this list of conditions and the following 10* disclaimer in the documentation and/or other materials provided 11* with the distribution. 12* * Neither the name of The Linux Foundation nor the names of its 13* contributors may be used to endorse or promote products derived 14* from this software without specific prior written permission. 15* 16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED 17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF 18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT 19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS 20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR 23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE 25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN 26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27* 28*/ 29 30#define LOG_TAG "QCamera3HWI" 31 32#include <cutils/properties.h> 33#include <hardware/camera3.h> 34#include <camera/CameraMetadata.h> 35#include <stdlib.h> 36#include <utils/Log.h> 37#include <utils/Errors.h> 38#include <ui/Fence.h> 39#include <gralloc_priv.h> 40#include "QCamera3HWI.h" 41#include "QCamera3Mem.h" 42#include "QCamera3Channel.h" 43#include "QCamera3PostProc.h" 44 45using namespace android; 46 47namespace qcamera { 48#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX ) 49cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS]; 50parm_buffer_t *prevSettings; 51const camera_metadata_t *gStaticMetadata; 52 53const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = { 54 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF }, 55 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO }, 56 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE }, 57 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE }, 58 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA }, 59 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE }, 60 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD }, 61 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD }, 62 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA } 63}; 64 65const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = { 66 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF }, 67 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO }, 68 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT }, 69 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT }, 70 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT}, 71 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT }, 72 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT }, 73 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT }, 74 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE } 75}; 76 77const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = { 78 { ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED, CAM_SCENE_MODE_OFF }, 79 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION }, 80 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT }, 81 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE }, 82 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT }, 83 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT }, 84 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE }, 85 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH }, 86 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW }, 87 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET }, 88 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE }, 89 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS }, 90 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS }, 91 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY }, 92 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT }, 93 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE} 94}; 95 96const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = { 97 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF }, 98 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED }, 99 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO }, 100 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO }, 101 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF }, 102 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE }, 103 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO } 104}; 105 106const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = { 107 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF }, 108 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ }, 109 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ }, 110 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO } 111}; 112 113const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AUTO_EXPOSURE_MAP[] = { 114 { ANDROID_CONTROL_AE_MODE_OFF, CAM_AEC_MODE_OFF }, 115 { ANDROID_CONTROL_AE_MODE_ON, CAM_AEC_MODE_FRAME_AVERAGE }, 116}; 117 118const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = { 119 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF }, 120 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_ON }, 121 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH} 122}; 123 124 125camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = { 126 initialize: QCamera3HardwareInterface::initialize, 127 configure_streams: QCamera3HardwareInterface::configure_streams, 128 register_stream_buffers: QCamera3HardwareInterface::register_stream_buffers, 129 construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings, 130 process_capture_request: QCamera3HardwareInterface::process_capture_request, 131 get_metadata_vendor_tag_ops: QCamera3HardwareInterface::get_metadata_vendor_tag_ops, 132 dump: QCamera3HardwareInterface::dump, 133}; 134 135 136/*=========================================================================== 137 * FUNCTION : QCamera3HardwareInterface 138 * 139 * DESCRIPTION: constructor of QCamera3HardwareInterface 140 * 141 * PARAMETERS : 142 * @cameraId : camera ID 143 * 144 * RETURN : none 145 *==========================================================================*/ 146QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId) 147 : mCameraId(cameraId), 148 mCameraHandle(NULL), 149 mCameraOpened(false), 150 mCallbackOps(NULL), 151 mInputStream(NULL), 152 mMetadataChannel(NULL), 153 mFirstRequest(false), 154 mParamHeap(NULL), 155 mParameters(NULL), 156 mJpegSettings(NULL) 157{ 158 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG; 159 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0; 160 mCameraDevice.common.close = close_camera_device; 161 mCameraDevice.ops = &mCameraOps; 162 mCameraDevice.priv = this; 163 gCamCapability[cameraId]->version = CAM_HAL_V3; 164 165 pthread_mutex_init(&mRequestLock, NULL); 166 pthread_cond_init(&mRequestCond, NULL); 167 mPendingRequest = 0; 168 169 pthread_mutex_init(&mMutex, NULL); 170 pthread_mutex_init(&mCaptureResultLock, NULL); 171 172 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) 173 mDefaultMetadata[i] = NULL; 174} 175 176/*=========================================================================== 177 * FUNCTION : ~QCamera3HardwareInterface 178 * 179 * DESCRIPTION: destructor of QCamera3HardwareInterface 180 * 181 * PARAMETERS : none 182 * 183 * RETURN : none 184 *==========================================================================*/ 185QCamera3HardwareInterface::~QCamera3HardwareInterface() 186{ 187 ALOGE("%s: %d", __func__, __LINE__); 188 /* Clean up all channels */ 189 mMetadataChannel->stop(); 190 delete mMetadataChannel; 191 mMetadataChannel = NULL; 192 for (List<stream_info_t *>::iterator it = mStreamInfo.begin(); 193 it != mStreamInfo.end(); it++) { 194 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv; 195 channel->stop(); 196 delete channel; 197 free (*it); 198 } 199 200 ALOGE("%s: %d", __func__, __LINE__); 201 if (mJpegSettings != NULL) { 202 free(mJpegSettings); 203 mJpegSettings = NULL; 204 } 205 ALOGE("%s: %d", __func__, __LINE__); 206 deinitParameters(); 207 ALOGE("%s: %d", __func__, __LINE__); 208 closeCamera(); 209 210 ALOGE("%s: %d", __func__, __LINE__); 211 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) 212 if (mDefaultMetadata[i]) 213 free_camera_metadata(mDefaultMetadata[i]); 214 215 pthread_mutex_destroy(&mRequestLock); 216 pthread_cond_destroy(&mRequestCond); 217 218 pthread_mutex_destroy(&mMutex); 219 pthread_mutex_destroy(&mCaptureResultLock); 220} 221 222/*=========================================================================== 223 * FUNCTION : openCamera 224 * 225 * DESCRIPTION: open camera 226 * 227 * PARAMETERS : 228 * @hw_device : double ptr for camera device struct 229 * 230 * RETURN : int32_t type of status 231 * NO_ERROR -- success 232 * none-zero failure code 233 *==========================================================================*/ 234int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device) 235{ 236 //int rc = NO_ERROR; 237 int rc = 0; 238 if (mCameraOpened) { 239 *hw_device = NULL; 240 return PERMISSION_DENIED; 241 } 242 243 rc = openCamera(); 244 if (rc == 0) 245 *hw_device = &mCameraDevice.common; 246 else 247 *hw_device = NULL; 248 return rc; 249} 250 251/*=========================================================================== 252 * FUNCTION : openCamera 253 * 254 * DESCRIPTION: open camera 255 * 256 * PARAMETERS : none 257 * 258 * RETURN : int32_t type of status 259 * NO_ERROR -- success 260 * none-zero failure code 261 *==========================================================================*/ 262int QCamera3HardwareInterface::openCamera() 263{ 264 if (mCameraHandle) { 265 ALOGE("Failure: Camera already opened"); 266 return ALREADY_EXISTS; 267 } 268 mCameraHandle = camera_open(mCameraId); 269 if (!mCameraHandle) { 270 ALOGE("camera_open failed."); 271 return UNKNOWN_ERROR; 272 } 273 274 mCameraOpened = true; 275 276 return NO_ERROR; 277} 278 279/*=========================================================================== 280 * FUNCTION : closeCamera 281 * 282 * DESCRIPTION: close camera 283 * 284 * PARAMETERS : none 285 * 286 * RETURN : int32_t type of status 287 * NO_ERROR -- success 288 * none-zero failure code 289 *==========================================================================*/ 290int QCamera3HardwareInterface::closeCamera() 291{ 292 int rc = NO_ERROR; 293 294 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle); 295 mCameraHandle = NULL; 296 mCameraOpened = false; 297 298 return rc; 299} 300 301/*=========================================================================== 302 * FUNCTION : initialize 303 * 304 * DESCRIPTION: Initialize frameworks callback functions 305 * 306 * PARAMETERS : 307 * @callback_ops : callback function to frameworks 308 * 309 * RETURN : 310 * 311 *==========================================================================*/ 312int QCamera3HardwareInterface::initialize( 313 const struct camera3_callback_ops *callback_ops) 314{ 315 int rc; 316 317 pthread_mutex_lock(&mMutex); 318 319 rc = initParameters(); 320 if (rc < 0) { 321 ALOGE("%s: initParamters failed %d", __func__, rc); 322 goto err1; 323 } 324 //Create metadata channel and initialize it 325 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle, 326 mCameraHandle->ops, captureResultCb, 327 &gCamCapability[mCameraId]->padding_info, this); 328 if (mMetadataChannel == NULL) { 329 ALOGE("%s: failed to allocate metadata channel", __func__); 330 rc = -ENOMEM; 331 goto err2; 332 } 333 rc = mMetadataChannel->initialize(); 334 if (rc < 0) { 335 ALOGE("%s: metadata channel initialization failed", __func__); 336 goto err3; 337 } 338 339 mCallbackOps = callback_ops; 340 341 pthread_mutex_unlock(&mMutex); 342 return 0; 343 344err3: 345 delete mMetadataChannel; 346 mMetadataChannel = NULL; 347err2: 348 deinitParameters(); 349err1: 350 pthread_mutex_unlock(&mMutex); 351 return rc; 352} 353 354/*=========================================================================== 355 * FUNCTION : configureStreams 356 * 357 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input 358 * and output streams. 359 * 360 * PARAMETERS : 361 * @stream_list : streams to be configured 362 * 363 * RETURN : 364 * 365 *==========================================================================*/ 366int QCamera3HardwareInterface::configureStreams( 367 camera3_stream_configuration_t *streamList) 368{ 369 int rc = 0; 370 pthread_mutex_lock(&mMutex); 371 372 // Sanity check stream_list 373 if (streamList == NULL) { 374 ALOGE("%s: NULL stream configuration", __func__); 375 pthread_mutex_unlock(&mMutex); 376 return BAD_VALUE; 377 } 378 379 if (streamList->streams == NULL) { 380 ALOGE("%s: NULL stream list", __func__); 381 pthread_mutex_unlock(&mMutex); 382 return BAD_VALUE; 383 } 384 385 if (streamList->num_streams < 1) { 386 ALOGE("%s: Bad number of streams requested: %d", __func__, 387 streamList->num_streams); 388 pthread_mutex_unlock(&mMutex); 389 return BAD_VALUE; 390 } 391 392 camera3_stream_t *inputStream = NULL; 393 /* first invalidate all the steams in the mStreamList 394 * if they appear again, they will be validated */ 395 for (List<stream_info_t*>::iterator it=mStreamInfo.begin(); 396 it != mStreamInfo.end(); it++) { 397 (*it)->status = INVALID; 398 } 399 for (size_t i = 0; i < streamList->num_streams; i++) { 400 camera3_stream_t *newStream = streamList->streams[i]; 401 ALOGV("%s: newStream type = %d, stream format = %d", 402 __func__, newStream->stream_type, newStream->format); 403 //if the stream is in the mStreamList validate it 404 bool stream_exists = false; 405 for (List<stream_info_t*>::iterator it=mStreamInfo.begin(); 406 it != mStreamInfo.end(); it++) { 407 if ((*it)->stream == newStream) { 408 QCamera3Channel *channel = 409 (QCamera3Channel*)(*it)->stream->priv; 410 stream_exists = true; 411 (*it)->status = RECONFIGURE; 412 /*delete the channel object associated with the stream because 413 we need to reconfigure*/ 414 channel->stop(); 415 delete channel; 416 (*it)->stream->priv = NULL; 417 } 418 } 419 if (!stream_exists) { 420 //new stream 421 stream_info_t* stream_info; 422 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t)); 423 stream_info->stream = newStream; 424 stream_info->status = VALID; 425 stream_info->registered = 0; 426 mStreamInfo.push_back(stream_info); 427 } 428 if (newStream->stream_type == CAMERA3_STREAM_INPUT) { 429 if (inputStream != NULL) { 430 ALOGE("%s: Multiple input streams requested!", __func__); 431 pthread_mutex_unlock(&mMutex); 432 return BAD_VALUE; 433 } 434 inputStream = newStream; 435 } 436 } 437 mInputStream = inputStream; 438 439 /* TODO: Clean up no longer used streams, and maintain others if this 440 * is not the 1st time configureStreams is called */ 441 /*clean up invalid streams*/ 442 for (List<stream_info_t*>::iterator it=mStreamInfo.begin(); 443 it != mStreamInfo.end();) { 444 if(((*it)->status) == INVALID){ 445 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv; 446 channel->stop(); 447 delete channel; 448 delete[] (buffer_handle_t*)(*it)->buffer_set.buffers; 449 free(*it); 450 it = mStreamInfo.erase(it); 451 } else { 452 it++; 453 } 454 } 455 456 //mMetadataChannel->stop(); 457 458 /* Allocate channel objects for the requested streams */ 459 for (size_t i = 0; i < streamList->num_streams; i++) { 460 camera3_stream_t *newStream = streamList->streams[i]; 461 if (newStream->priv == NULL) { 462 //New stream, construct channel 463 464 switch (newStream->stream_type) { 465 case CAMERA3_STREAM_INPUT: 466 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ; 467 break; 468 case CAMERA3_STREAM_BIDIRECTIONAL: 469 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ | 470 GRALLOC_USAGE_HW_CAMERA_WRITE; 471 break; 472 case CAMERA3_STREAM_OUTPUT: 473 newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE; 474 break; 475 default: 476 ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type); 477 break; 478 } 479 480 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT || 481 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) { 482 QCamera3Channel *channel; 483 switch (newStream->format) { 484 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: 485 newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers; 486 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle, 487 mCameraHandle->ops, captureResultCb, 488 &gCamCapability[mCameraId]->padding_info, this, newStream); 489 if (channel == NULL) { 490 ALOGE("%s: allocation of channel failed", __func__); 491 pthread_mutex_unlock(&mMutex); 492 return -ENOMEM; 493 } 494 495 newStream->priv = channel; 496 break; 497 case HAL_PIXEL_FORMAT_BLOB: 498 newStream->max_buffers = QCamera3PicChannel::kMaxBuffers; 499 channel = new QCamera3PicChannel(mCameraHandle->camera_handle, 500 mCameraHandle->ops, captureResultCb, 501 &gCamCapability[mCameraId]->padding_info, this, newStream); 502 if (channel == NULL) { 503 ALOGE("%s: allocation of channel failed", __func__); 504 pthread_mutex_unlock(&mMutex); 505 return -ENOMEM; 506 } 507 //Register Jpeg callback with mm-camera-interface 508 rc = channel->initialize(); 509 if (rc < 0) { 510 ALOGE("%s: snapshot channel initialization failed", __func__); 511 delete channel; 512 channel = NULL; 513 goto end; 514 } 515 516 newStream->priv = channel; 517 break; 518 519 //TODO: Add support for app consumed format? 520 default: 521 ALOGE("%s: not a supported format 0x%x", __func__, newStream->format); 522 break; 523 } 524 } 525 } else { 526 // Channel already exists for this stream 527 // Do nothing for now 528 } 529 } 530 /*For the streams to be reconfigured we need to register the buffers 531 since the framework wont*/ 532 for (List<stream_info_t *>::iterator it = mStreamInfo.begin(); 533 it != mStreamInfo.end(); it++) { 534 if ((*it)->status == RECONFIGURE) { 535 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv; 536 /*only register buffers for streams that have already been 537 registered*/ 538 if ((*it)->registered) { 539 rc = channel->registerBuffers((*it)->buffer_set.num_buffers, 540 (*it)->buffer_set.buffers); 541 if (rc != NO_ERROR) { 542 ALOGE("%s: Failed to register the buffers of old stream,\ 543 rc = %d", __func__, rc); 544 } 545 ALOGE("%s: channel %p has %d buffers", 546 __func__, channel, (*it)->buffer_set.num_buffers); 547 } 548 } 549 550 ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream); 551 if (index == NAME_NOT_FOUND) { 552 mPendingBuffersMap.add((*it)->stream, 0); 553 } else { 554 mPendingBuffersMap.editValueAt(index) = 0; 555 } 556 } 557 558 /* Initialize mPendingRequestInfo and mPendnigBuffersMap */ 559 mPendingRequestsList.clear(); 560 561 //settings/parameters don't carry over for new configureStreams 562 memset(mParameters, 0, sizeof(parm_buffer_t)); 563 mFirstRequest = true; 564 565end: 566 pthread_mutex_unlock(&mMutex); 567 return rc; 568} 569 570/*=========================================================================== 571 * FUNCTION : validateCaptureRequest 572 * 573 * DESCRIPTION: validate a capture request from camera service 574 * 575 * PARAMETERS : 576 * @request : request from framework to process 577 * 578 * RETURN : 579 * 580 *==========================================================================*/ 581int QCamera3HardwareInterface::validateCaptureRequest( 582 camera3_capture_request_t *request) 583{ 584 ssize_t idx = 0; 585 const camera3_stream_buffer_t *b; 586 CameraMetadata meta; 587 588 /* Sanity check the request */ 589 if (request == NULL) { 590 ALOGE("%s: NULL capture request", __func__); 591 return BAD_VALUE; 592 } 593 594 uint32_t frameNumber = request->frame_number; 595 if (request->input_buffer != NULL && 596 request->input_buffer->stream != mInputStream) { 597 ALOGE("%s: Request %d: Input buffer not from input stream!", 598 __FUNCTION__, frameNumber); 599 return BAD_VALUE; 600 } 601 if (request->num_output_buffers < 1 || request->output_buffers == NULL) { 602 ALOGE("%s: Request %d: No output buffers provided!", 603 __FUNCTION__, frameNumber); 604 return BAD_VALUE; 605 } 606 if (request->input_buffer != NULL) { 607 //TODO 608 ALOGE("%s: Not supporting input buffer yet", __func__); 609 return BAD_VALUE; 610 } 611 612 // Validate all buffers 613 b = request->output_buffers; 614 do { 615 QCamera3Channel *channel = 616 static_cast<QCamera3Channel*>(b->stream->priv); 617 if (channel == NULL) { 618 ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!", 619 __func__, frameNumber, idx); 620 return BAD_VALUE; 621 } 622 if (b->status != CAMERA3_BUFFER_STATUS_OK) { 623 ALOGE("%s: Request %d: Buffer %d: Status not OK!", 624 __func__, frameNumber, idx); 625 return BAD_VALUE; 626 } 627 if (b->release_fence != -1) { 628 ALOGE("%s: Request %d: Buffer %d: Has a release fence!", 629 __func__, frameNumber, idx); 630 return BAD_VALUE; 631 } 632 if (b->buffer == NULL) { 633 ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!", 634 __func__, frameNumber, idx); 635 return BAD_VALUE; 636 } 637 idx++; 638 b = request->output_buffers + idx; 639 } while (idx < (ssize_t)request->num_output_buffers); 640 641 return NO_ERROR; 642} 643 644/*=========================================================================== 645 * FUNCTION : registerStreamBuffers 646 * 647 * DESCRIPTION: Register buffers for a given stream with the HAL device. 648 * 649 * PARAMETERS : 650 * @stream_list : streams to be configured 651 * 652 * RETURN : 653 * 654 *==========================================================================*/ 655int QCamera3HardwareInterface::registerStreamBuffers( 656 const camera3_stream_buffer_set_t *buffer_set) 657{ 658 int rc = 0; 659 660 pthread_mutex_lock(&mMutex); 661 662 if (buffer_set == NULL) { 663 ALOGE("%s: Invalid buffer_set parameter.", __func__); 664 pthread_mutex_unlock(&mMutex); 665 return -EINVAL; 666 } 667 if (buffer_set->stream == NULL) { 668 ALOGE("%s: Invalid stream parameter.", __func__); 669 pthread_mutex_unlock(&mMutex); 670 return -EINVAL; 671 } 672 if (buffer_set->num_buffers < 1) { 673 ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers); 674 pthread_mutex_unlock(&mMutex); 675 return -EINVAL; 676 } 677 if (buffer_set->buffers == NULL) { 678 ALOGE("%s: Invalid buffers parameter.", __func__); 679 pthread_mutex_unlock(&mMutex); 680 return -EINVAL; 681 } 682 683 camera3_stream_t *stream = buffer_set->stream; 684 QCamera3Channel *channel = (QCamera3Channel *)stream->priv; 685 686 //set the buffer_set in the mStreamInfo array 687 for (List<stream_info_t *>::iterator it = mStreamInfo.begin(); 688 it != mStreamInfo.end(); it++) { 689 if ((*it)->stream == stream) { 690 uint32_t numBuffers = buffer_set->num_buffers; 691 (*it)->buffer_set.stream = buffer_set->stream; 692 (*it)->buffer_set.num_buffers = numBuffers; 693 (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers]; 694 if ((*it)->buffer_set.buffers == NULL) { 695 ALOGE("%s: Failed to allocate buffer_handle_t*", __func__); 696 pthread_mutex_unlock(&mMutex); 697 return -ENOMEM; 698 } 699 for (size_t j = 0; j < numBuffers; j++){ 700 (*it)->buffer_set.buffers[j] = buffer_set->buffers[j]; 701 } 702 (*it)->registered = 1; 703 } 704 } 705 706 if (stream->stream_type != CAMERA3_STREAM_OUTPUT) { 707 ALOGE("%s: not yet support non output type stream", __func__); 708 pthread_mutex_unlock(&mMutex); 709 return -EINVAL; 710 } 711 rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers); 712 if (rc < 0) { 713 ALOGE("%s: registerBUffers for stream %p failed", __func__, stream); 714 pthread_mutex_unlock(&mMutex); 715 return -ENODEV; 716 } 717 718 pthread_mutex_unlock(&mMutex); 719 return NO_ERROR; 720} 721 722/*=========================================================================== 723 * FUNCTION : processCaptureRequest 724 * 725 * DESCRIPTION: process a capture request from camera service 726 * 727 * PARAMETERS : 728 * @request : request from framework to process 729 * 730 * RETURN : 731 * 732 *==========================================================================*/ 733int QCamera3HardwareInterface::processCaptureRequest( 734 camera3_capture_request_t *request) 735{ 736 int rc = NO_ERROR; 737 CameraMetadata meta; 738 739 pthread_mutex_lock(&mMutex); 740 741 rc = validateCaptureRequest(request); 742 if (rc != NO_ERROR) { 743 ALOGE("%s: incoming request is not valid", __func__); 744 pthread_mutex_unlock(&mMutex); 745 return rc; 746 } 747 748 uint32_t frameNumber = request->frame_number; 749 750 rc = setFrameParameters(request->frame_number, request->settings); 751 if (rc < 0) { 752 ALOGE("%s: fail to set frame parameters", __func__); 753 pthread_mutex_unlock(&mMutex); 754 return rc; 755 } 756 757 ALOGV("%s: %d, num_output_buffers = %d", __func__, __LINE__, 758 request->num_output_buffers); 759 // Acquire all request buffers first 760 for (size_t i = 0; i < request->num_output_buffers; i++) { 761 const camera3_stream_buffer_t& output = request->output_buffers[i]; 762 sp<Fence> acquireFence = new Fence(output.acquire_fence); 763 764 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) { 765 //Call function to store local copy of jpeg data for encode params. 766 rc = getJpegSettings(request->settings); 767 if (rc < 0) { 768 ALOGE("%s: failed to get jpeg parameters", __func__); 769 pthread_mutex_unlock(&mMutex); 770 return rc; 771 } 772 } 773 774 rc = acquireFence->wait(Fence::TIMEOUT_NEVER); 775 if (rc != OK) { 776 ALOGE("%s: fence wait failed %d", __func__, rc); 777 pthread_mutex_unlock(&mMutex); 778 return rc; 779 } 780 } 781 782 ALOGV("%s: %d", __func__, __LINE__); 783 784 /* Update pending request list and pending buffers map */ 785 pthread_mutex_lock(&mRequestLock); 786 PendingRequestInfo pendingRequest; 787 pendingRequest.frame_number = frameNumber; 788 pendingRequest.num_buffers = request->num_output_buffers; 789// pendingRequest.metadata = NULL; 790 for (size_t i = 0; i < request->num_output_buffers; i++) { 791 RequestedBufferInfo requestedBuf; 792 requestedBuf.stream = request->output_buffers[i].stream; 793 requestedBuf.buffer = NULL; 794 pendingRequest.buffers.push_back(requestedBuf); 795 796 mPendingBuffersMap.editValueFor(requestedBuf.stream)++; 797 } 798 mPendingRequestsList.push_back(pendingRequest); 799 pthread_mutex_unlock(&mRequestLock); 800 801 // Notify metadata channel we receive a request 802 mMetadataChannel->request(NULL, frameNumber); 803 804 // Call request on other streams 805 for (size_t i = 0; i < request->num_output_buffers; i++) { 806 const camera3_stream_buffer_t& output = request->output_buffers[i]; 807 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv; 808 809 if (channel == NULL) { 810 ALOGE("%s: invalid channel pointer for stream", __func__); 811 continue; 812 } 813 814 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) { 815 rc = channel->request(output.buffer, frameNumber, mJpegSettings); 816 } else { 817 ALOGE("%s: %d, request with buffer %p, frame_number %d", __func__, __LINE__, output.buffer, frameNumber); 818 rc = channel->request(output.buffer, frameNumber); 819 } 820 if (rc < 0) 821 ALOGE("%s: request failed", __func__); 822 } 823 824 mFirstRequest = false; 825 826 //Block on conditional variable 827 pthread_mutex_lock(&mRequestLock); 828 mPendingRequest = 1; 829 while (mPendingRequest == 1) { 830 pthread_cond_wait(&mRequestCond, &mRequestLock); 831 } 832 pthread_mutex_unlock(&mRequestLock); 833 834 pthread_mutex_unlock(&mMutex); 835 return rc; 836} 837 838/*=========================================================================== 839 * FUNCTION : getMetadataVendorTagOps 840 * 841 * DESCRIPTION: 842 * 843 * PARAMETERS : 844 * 845 * 846 * RETURN : 847 *==========================================================================*/ 848void QCamera3HardwareInterface::getMetadataVendorTagOps( 849 vendor_tag_query_ops_t* /*ops*/) 850{ 851 /* Enable locks when we eventually add Vendor Tags */ 852 /* 853 pthread_mutex_lock(&mMutex); 854 855 pthread_mutex_unlock(&mMutex); 856 */ 857 return; 858} 859 860/*=========================================================================== 861 * FUNCTION : dump 862 * 863 * DESCRIPTION: 864 * 865 * PARAMETERS : 866 * 867 * 868 * RETURN : 869 *==========================================================================*/ 870void QCamera3HardwareInterface::dump(int /*fd*/) 871{ 872 /*Enable lock when we implement this function*/ 873 /* 874 pthread_mutex_lock(&mMutex); 875 876 pthread_mutex_unlock(&mMutex); 877 */ 878 return; 879} 880 881/*=========================================================================== 882 * FUNCTION : captureResultCb 883 * 884 * DESCRIPTION: Callback handler for all capture result 885 * (streams, as well as metadata) 886 * 887 * PARAMETERS : 888 * @metadata : metadata information 889 * @buffer : actual gralloc buffer to be returned to frameworks. 890 * NULL if metadata. 891 * 892 * RETURN : NONE 893 *==========================================================================*/ 894void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf, 895 camera3_stream_buffer_t *buffer, uint32_t frame_number) 896{ 897 pthread_mutex_lock(&mRequestLock); 898 899 if (metadata_buf) { 900 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer; 901 int32_t frame_number_valid = *(int32_t *) 902 POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata); 903 uint32_t frame_number = *(uint32_t *) 904 POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata); 905 const struct timeval *tv = (const struct timeval *) 906 POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata); 907 nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC + 908 tv->tv_usec * NSEC_PER_USEC; 909 910 if (!frame_number_valid) { 911 ALOGI("%s: Not a valid frame number, used as SOF only", __func__); 912 mMetadataChannel->bufDone(metadata_buf); 913 goto done_metadata; 914 } 915 ALOGE("%s: valid frame_number = %d, capture_time = %lld", __func__, 916 frame_number, capture_time); 917 918 // Go through the pending requests info and send shutter/results to frameworks 919 for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin(); 920 i != mPendingRequestsList.end() && i->frame_number <= frame_number;) { 921 camera3_capture_result_t result; 922 camera3_notify_msg_t notify_msg; 923 ALOGE("%s: frame_number in the list is %d", __func__, i->frame_number); 924 925 // Flush out all entries with less or equal frame numbers. 926 927 //TODO: Make sure shutter timestamp really reflects shutter timestamp. 928 //Right now it's the same as metadata timestamp 929 930 //TODO: When there is metadata drop, how do we derive the timestamp of 931 //dropped frames? For now, we fake the dropped timestamp by substracting 932 //from the reported timestamp 933 nsecs_t current_capture_time = capture_time - 934 (frame_number - i->frame_number) * NSEC_PER_33MSEC; 935 936 // Send shutter notify to frameworks 937 notify_msg.type = CAMERA3_MSG_SHUTTER; 938 notify_msg.message.shutter.frame_number = i->frame_number; 939 notify_msg.message.shutter.timestamp = current_capture_time; 940 mCallbackOps->notify(mCallbackOps, ¬ify_msg); 941 ALOGE("%s: notify frame_number = %d, capture_time = %lld", __func__, 942 i->frame_number, capture_time); 943 944 // Send empty metadata with already filled buffers for dropped metadata 945 // and send valid metadata with already filled buffers for current metadata 946 if (i->frame_number < frame_number) { 947 CameraMetadata emptyMetadata(1, 0); 948 emptyMetadata.update(ANDROID_SENSOR_TIMESTAMP, 949 ¤t_capture_time, 1); 950 result.result = emptyMetadata.release(); 951 } else { 952 result.result = translateCbMetadataToResultMetadata(metadata, 953 current_capture_time); 954 // Return metadata buffer 955 mMetadataChannel->bufDone(metadata_buf); 956 } 957 if (!result.result) { 958 ALOGE("%s: metadata is NULL", __func__); 959 } 960 result.frame_number = i->frame_number; 961 result.num_output_buffers = 0; 962 result.output_buffers = NULL; 963 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin(); 964 j != i->buffers.end(); j++) { 965 if (j->buffer) { 966 result.num_output_buffers++; 967 } 968 } 969 970 if (result.num_output_buffers > 0) { 971 camera3_stream_buffer_t *result_buffers = 972 new camera3_stream_buffer_t[result.num_output_buffers]; 973 if (!result_buffers) { 974 ALOGE("%s: Fatal error: out of memory", __func__); 975 } 976 size_t result_buffers_idx = 0; 977 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin(); 978 j != i->buffers.end(); j++) { 979 if (j->buffer) { 980 result_buffers[result_buffers_idx++] = *(j->buffer); 981 free(j->buffer); 982 mPendingBuffersMap.editValueFor(j->stream)--; 983 } 984 } 985 result.output_buffers = result_buffers; 986 987 mCallbackOps->process_capture_result(mCallbackOps, &result); 988 ALOGE("%s: meta frame_number = %d, capture_time = %lld", __func__, 989 result.frame_number, 990 current_capture_time); 991 free_camera_metadata((camera_metadata_t *)result.result); 992 delete[] result_buffers; 993 } else { 994 mCallbackOps->process_capture_result(mCallbackOps, &result); 995 ALOGE("%s: meta frame_number = %d, capture_time = %lld", __func__, 996 result.frame_number, current_capture_time); 997 free_camera_metadata((camera_metadata_t *)result.result); 998 } 999 // erase the element from the list 1000 i = mPendingRequestsList.erase(i); 1001 } 1002 1003 1004done_metadata: 1005 bool max_buffers_dequeued = false; 1006 for (size_t i = 0; i < mPendingBuffersMap.size(); i++) { 1007 const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i); 1008 uint32_t queued_buffers = mPendingBuffersMap.valueAt(i); 1009 if (queued_buffers == stream->max_buffers) { 1010 max_buffers_dequeued = true; 1011 break; 1012 } 1013 } 1014 if (!max_buffers_dequeued) { 1015 // Unblock process_capture_request 1016 mPendingRequest = 0; 1017 pthread_cond_signal(&mRequestCond); 1018 } 1019 } else { 1020 // If the frame number doesn't exist in the pending request list, 1021 // directly send the buffer to the frameworks, and update pending buffers map 1022 // Otherwise, book-keep the buffer. 1023 List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin(); 1024 while (i != mPendingRequestsList.end() && i->frame_number != frame_number) 1025 i++; 1026 if (i == mPendingRequestsList.end()) { 1027 // Verify all pending requests frame_numbers are greater 1028 for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin(); 1029 j != mPendingRequestsList.end(); j++) { 1030 if (j->frame_number < frame_number) { 1031 ALOGE("%s: Error: pending frame number %d is smaller than %d", 1032 __func__, j->frame_number, frame_number); 1033 } 1034 } 1035 camera3_capture_result_t result; 1036 result.result = NULL; 1037 result.frame_number = frame_number; 1038 result.num_output_buffers = 1; 1039 result.output_buffers = buffer; 1040 ALOGE("%s: result frame_number = %d, buffer = %p", 1041 __func__, frame_number, buffer); 1042 mPendingBuffersMap.editValueFor(buffer->stream)--; 1043 mCallbackOps->process_capture_result(mCallbackOps, &result); 1044 } else { 1045 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin(); 1046 j != i->buffers.end(); j++) { 1047 if (j->stream == buffer->stream) { 1048 if (j->buffer != NULL) { 1049 ALOGE("%s: Error: buffer is already set", __func__); 1050 } else { 1051 j->buffer = (camera3_stream_buffer_t *)malloc( 1052 sizeof(camera3_stream_buffer_t)); 1053 *(j->buffer) = *buffer; 1054 ALOGE("%s: cache buffer %p at result frame_number %d", 1055 __func__, buffer, frame_number); 1056 } 1057 } 1058 } 1059 } 1060 } 1061 1062 pthread_mutex_unlock(&mRequestLock); 1063 return; 1064} 1065 1066/*=========================================================================== 1067 * FUNCTION : translateCbMetadataToResultMetadata 1068 * 1069 * DESCRIPTION: 1070 * 1071 * PARAMETERS : 1072 * @metadata : metadata information from callback 1073 * 1074 * RETURN : camera_metadata_t* 1075 * metadata in a format specified by fwk 1076 *==========================================================================*/ 1077camera_metadata_t* 1078QCamera3HardwareInterface::translateCbMetadataToResultMetadata 1079 (metadata_buffer_t *metadata, nsecs_t timestamp) 1080{ 1081 CameraMetadata camMetadata; 1082 camera_metadata_t* resultMetadata; 1083 1084 1085 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, ×tamp, 1); 1086 1087 /*CAM_INTF_META_HISTOGRAM - TODO*/ 1088 /*cam_hist_stats_t *histogram = 1089 (cam_hist_stats_t *)POINTER_OF(CAM_INTF_META_HISTOGRAM, 1090 metadata);*/ 1091 1092 /*face detection*/ 1093 cam_face_detection_data_t *faceDetectionInfo =(cam_face_detection_data_t *) 1094 POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata); 1095 uint8_t numFaces = faceDetectionInfo->num_faces_detected; 1096 int32_t faceIds[numFaces]; 1097 uint8_t faceScores[numFaces]; 1098 int32_t faceRectangles[numFaces * 4]; 1099 int32_t faceLandmarks[numFaces * 6]; 1100 int j = 0, k = 0; 1101 for (int i = 0; i < numFaces; i++) { 1102 faceIds[i] = faceDetectionInfo->faces[i].face_id; 1103 faceScores[i] = faceDetectionInfo->faces[i].score; 1104 convertRegions(faceDetectionInfo->faces[i].face_boundary, 1105 faceRectangles+j, -1); 1106 convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k); 1107 j+= 4; 1108 k+= 6; 1109 } 1110 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces); 1111 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces); 1112 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES, 1113 faceRectangles, numFaces*4); 1114 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS, 1115 faceLandmarks, numFaces*6); 1116 1117 1118 /*autofocus - TODO*/ 1119 /*cam_auto_focus_data_t *afData =(cam_auto_focus_data_t *) 1120 POINTER_OF(CAM_INTF_META_AUTOFOCUS_DATA,metadata);*/ 1121 1122 uint8_t *color_correct_mode = 1123 (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata); 1124 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1); 1125 1126 int32_t *ae_precapture_id = 1127 (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata); 1128 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1); 1129 1130 /*aec regions*/ 1131 cam_area_t *hAeRegions = 1132 (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata); 1133 int32_t aeRegions[5]; 1134 convertRegions(hAeRegions->rect, aeRegions, hAeRegions->weight); 1135 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5); 1136 1137 uint8_t *ae_state = 1138 (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata); 1139 camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1); 1140 1141 uint8_t *focusMode = 1142 (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata); 1143 camMetadata.update(ANDROID_CONTROL_AF_MODE, focusMode, 1); 1144 1145 /*af regions*/ 1146 cam_area_t *hAfRegions = 1147 (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata); 1148 int32_t afRegions[5]; 1149 convertRegions(hAfRegions->rect, afRegions, hAfRegions->weight); 1150 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5); 1151 1152 uint8_t *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata); 1153 camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1); 1154 1155 int32_t *afTriggerId = 1156 (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata); 1157 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1); 1158 1159 uint8_t *whiteBalance = 1160 (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata); 1161 camMetadata.update(ANDROID_CONTROL_AWB_MODE, whiteBalance, 1); 1162 1163 /*awb regions*/ 1164 cam_area_t *hAwbRegions = 1165 (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata); 1166 int32_t awbRegions[5]; 1167 convertRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight); 1168 camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5); 1169 1170 uint8_t *whiteBalanceState = 1171 (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata); 1172 camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1); 1173 1174 uint8_t *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata); 1175 camMetadata.update(ANDROID_CONTROL_MODE, mode, 1); 1176 1177 uint8_t *edgeMode = (uint8_t *)POINTER_OF(CAM_INTF_META_EDGE, metadata); 1178 camMetadata.update(ANDROID_EDGE_MODE, edgeMode, 1); 1179 1180 uint8_t *flashPower = 1181 (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata); 1182 camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1); 1183 1184 int64_t *flashFiringTime = 1185 (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata); 1186 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1); 1187 1188 /*int32_t *ledMode = 1189 (int32_t *)POINTER_OF(CAM_INTF_PARM_LED_MODE, metadata); 1190 camMetadata.update(ANDROID_FLASH_FIRING_TIME, ledMode, 1);*/ 1191 1192 uint8_t *flashState = 1193 (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata); 1194 camMetadata.update(ANDROID_FLASH_STATE, flashState, 1); 1195 1196 uint8_t *hotPixelMode = 1197 (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata); 1198 camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1); 1199 1200 float *lensAperture = 1201 (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata); 1202 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1); 1203 1204 float *filterDensity = 1205 (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata); 1206 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1); 1207 1208 float *focalLength = 1209 (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata); 1210 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1); 1211 1212 float *focusDistance = 1213 (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata); 1214 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1); 1215 1216 float *focusRange = 1217 (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata); 1218 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 1); 1219 1220 uint8_t *opticalStab = 1221 (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata); 1222 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1); 1223 1224 /*int32_t *focusState = 1225 (int32_t *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_STATE, metadata); 1226 camMetadata.update(ANDROID_LENS_STATE , focusState, 1); //check */ 1227 1228 uint8_t *noiseRedMode = 1229 (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata); 1230 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1); 1231 1232 /*CAM_INTF_META_SCALER_CROP_REGION - check size*/ 1233 1234 cam_crop_region_t *hScalerCropRegion =(cam_crop_region_t *) 1235 POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata); 1236 int32_t scalerCropRegion[3]; 1237 scalerCropRegion[0] = hScalerCropRegion->left; 1238 scalerCropRegion[1] = hScalerCropRegion->top; 1239 scalerCropRegion[2] = hScalerCropRegion->width; 1240 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 3); 1241 1242 int64_t *sensorExpTime = 1243 (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata); 1244 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1); 1245 1246 int64_t *sensorFameDuration = 1247 (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata); 1248 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1); 1249 1250 int32_t *sensorSensitivity = 1251 (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata); 1252 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1); 1253 1254 uint8_t *shadingMode = 1255 (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata); 1256 camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1); 1257 1258 uint8_t *faceDetectMode = 1259 (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata); 1260 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, faceDetectMode, 1); 1261 1262 uint8_t *histogramMode = 1263 (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata); 1264 camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1); 1265 1266 uint8_t *sharpnessMapMode = 1267 (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata); 1268 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, 1269 sharpnessMapMode, 1); 1270 1271 /*CAM_INTF_META_STATS_SHARPNESS_MAP - check size*/ 1272 cam_sharpness_map_t *sharpnessMap = (cam_sharpness_map_t *) 1273 POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata); 1274 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, 1275 (int32_t*)sharpnessMap->sharpness, 1276 CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT); 1277 1278 resultMetadata = camMetadata.release(); 1279 return resultMetadata; 1280} 1281 1282/*=========================================================================== 1283 * FUNCTION : convertRegions 1284 * 1285 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array 1286 * 1287 * PARAMETERS : 1288 * @rect : cam_rect_t struct to convert 1289 * @region : int32_t destination array 1290 * @weight : if we are converting from cam_area_t, weight is valid 1291 * else weight = -1 1292 * 1293 *==========================================================================*/ 1294void QCamera3HardwareInterface::convertRegions(cam_rect_t rect, int32_t* region, int weight){ 1295 region[0] = rect.left; 1296 region[1] = rect.top; 1297 region[2] = rect.width; 1298 region[3] = rect.height; 1299 if (weight > -1) { 1300 region[4] = weight; 1301 } 1302} 1303/*=========================================================================== 1304 * FUNCTION : convertLandmarks 1305 * 1306 * DESCRIPTION: helper method to extract the landmarks from face detection info 1307 * 1308 * PARAMETERS : 1309 * @face : cam_rect_t struct to convert 1310 * @landmarks : int32_t destination array 1311 * 1312 * 1313 *==========================================================================*/ 1314void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks) 1315{ 1316 landmarks[0] = face.left_eye_center.x; 1317 landmarks[1] = face.left_eye_center.y; 1318 landmarks[2] = face.right_eye_center.y; 1319 landmarks[3] = face.right_eye_center.y; 1320 landmarks[4] = face.mouth_center.x; 1321 landmarks[5] = face.mouth_center.y; 1322} 1323 1324#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX ) 1325/*=========================================================================== 1326 * FUNCTION : initCapabilities 1327 * 1328 * DESCRIPTION: initialize camera capabilities in static data struct 1329 * 1330 * PARAMETERS : 1331 * @cameraId : camera Id 1332 * 1333 * RETURN : int32_t type of status 1334 * NO_ERROR -- success 1335 * none-zero failure code 1336 *==========================================================================*/ 1337int QCamera3HardwareInterface::initCapabilities(int cameraId) 1338{ 1339 int rc = 0; 1340 mm_camera_vtbl_t *cameraHandle = NULL; 1341 QCamera3HeapMemory *capabilityHeap = NULL; 1342 1343 cameraHandle = camera_open(cameraId); 1344 if (!cameraHandle) { 1345 ALOGE("%s: camera_open failed", __func__); 1346 rc = -1; 1347 goto open_failed; 1348 } 1349 1350 capabilityHeap = new QCamera3HeapMemory(); 1351 if (capabilityHeap == NULL) { 1352 ALOGE("%s: creation of capabilityHeap failed", __func__); 1353 goto heap_creation_failed; 1354 } 1355 /* Allocate memory for capability buffer */ 1356 rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false); 1357 if(rc != OK) { 1358 ALOGE("%s: No memory for cappability", __func__); 1359 goto allocate_failed; 1360 } 1361 1362 /* Map memory for capability buffer */ 1363 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t)); 1364 rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle, 1365 CAM_MAPPING_BUF_TYPE_CAPABILITY, 1366 capabilityHeap->getFd(0), 1367 sizeof(cam_capability_t)); 1368 if(rc < 0) { 1369 ALOGE("%s: failed to map capability buffer", __func__); 1370 goto map_failed; 1371 } 1372 1373 /* Query Capability */ 1374 rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle); 1375 if(rc < 0) { 1376 ALOGE("%s: failed to query capability",__func__); 1377 goto query_failed; 1378 } 1379 gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t)); 1380 if (!gCamCapability[cameraId]) { 1381 ALOGE("%s: out of memory", __func__); 1382 goto query_failed; 1383 } 1384 memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0), 1385 sizeof(cam_capability_t)); 1386 rc = 0; 1387 1388query_failed: 1389 cameraHandle->ops->unmap_buf(cameraHandle->camera_handle, 1390 CAM_MAPPING_BUF_TYPE_CAPABILITY); 1391map_failed: 1392 capabilityHeap->deallocate(); 1393allocate_failed: 1394 delete capabilityHeap; 1395heap_creation_failed: 1396 cameraHandle->ops->close_camera(cameraHandle->camera_handle); 1397 cameraHandle = NULL; 1398open_failed: 1399 return rc; 1400} 1401 1402/*=========================================================================== 1403 * FUNCTION : initParameters 1404 * 1405 * DESCRIPTION: initialize camera parameters 1406 * 1407 * PARAMETERS : 1408 * 1409 * RETURN : int32_t type of status 1410 * NO_ERROR -- success 1411 * none-zero failure code 1412 *==========================================================================*/ 1413int QCamera3HardwareInterface::initParameters() 1414{ 1415 int rc = 0; 1416 1417 //Allocate Set Param Buffer 1418 mParamHeap = new QCamera3HeapMemory(); 1419 rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false); 1420 if(rc != OK) { 1421 rc = NO_MEMORY; 1422 ALOGE("Failed to allocate SETPARM Heap memory"); 1423 delete mParamHeap; 1424 mParamHeap = NULL; 1425 return rc; 1426 } 1427 1428 //Map memory for parameters buffer 1429 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle, 1430 CAM_MAPPING_BUF_TYPE_PARM_BUF, 1431 mParamHeap->getFd(0), 1432 sizeof(parm_buffer_t)); 1433 if(rc < 0) { 1434 ALOGE("%s:failed to map SETPARM buffer",__func__); 1435 rc = FAILED_TRANSACTION; 1436 mParamHeap->deallocate(); 1437 delete mParamHeap; 1438 mParamHeap = NULL; 1439 return rc; 1440 } 1441 1442 mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0); 1443 return rc; 1444} 1445 1446/*=========================================================================== 1447 * FUNCTION : deinitParameters 1448 * 1449 * DESCRIPTION: de-initialize camera parameters 1450 * 1451 * PARAMETERS : 1452 * 1453 * RETURN : NONE 1454 *==========================================================================*/ 1455void QCamera3HardwareInterface::deinitParameters() 1456{ 1457 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle, 1458 CAM_MAPPING_BUF_TYPE_PARM_BUF); 1459 1460 mParamHeap->deallocate(); 1461 delete mParamHeap; 1462 mParamHeap = NULL; 1463 1464 mParameters = NULL; 1465} 1466 1467/*=========================================================================== 1468 * FUNCTION : initStaticMetadata 1469 * 1470 * DESCRIPTION: initialize the static metadata 1471 * 1472 * PARAMETERS : 1473 * @cameraId : camera Id 1474 * 1475 * RETURN : int32_t type of status 1476 * 0 -- success 1477 * non-zero failure code 1478 *==========================================================================*/ 1479int QCamera3HardwareInterface::initStaticMetadata(int cameraId) 1480{ 1481 int rc = 0; 1482 CameraMetadata staticInfo; 1483 int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK; 1484 /*HAL 3 only*/ 1485 #ifdef HAL_3_CAPABILITIES 1486 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE, 1487 &gCamCapability[cameraId]->min_focus_distance, 1); 1488 1489 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, 1490 &gCamCapability[cameraId]->hyper_focal_distance, 1); 1491 1492 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, 1493 gCamCapability[cameraId]->focal_lengths, 1494 gCamCapability[cameraId]->focal_lengths_count); 1495 1496 1497 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES, 1498 gCamCapability[cameraId]->apertures, 1499 gCamCapability[cameraId]->apertures_count); 1500 1501 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES, 1502 gCamCapability[cameraId]->filter_densities, 1503 gCamCapability[cameraId]->filter_densities_count); 1504 1505 1506 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION, 1507 (int*)gCamCapability[cameraId]->optical_stab_modes, 1508 gCamCapability[cameraId]->optical_stab_modes_count); 1509 1510 staticInfo.update(ANDROID_LENS_POSITION, 1511 gCamCapability[cameraId]->lens_position, 1512 sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float)); 1513 1514 static const int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width, 1515 gCamCapability[cameraId]->lens_shading_map_size.height}; 1516 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE, 1517 lens_shading_map_size, 1518 sizeof(lens_shading_map_size)/sizeof(int32_t)); 1519 1520 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP, gCamCapability[cameraId]->lens_shading_map, 1521 sizeof(gCamCapability[cameraId]->lens_shading_map)/ sizeof(float)); 1522 1523 static const int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width, 1524 gCamCapability[cameraId]->geo_correction_map_size.height}; 1525 staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE, 1526 geo_correction_map_size, 1527 sizeof(geo_correction_map_size)/sizeof(int32_t)); 1528 1529 staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP, 1530 gCamCapability[cameraId]->geo_correction_map, 1531 sizeof(geo_correction_map)/sizeof(float)); 1532 1533 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE, 1534 gCamCapability[cameraId]->sensor_physical_size, 2); 1535 1536 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, 1537 gCamCapability[cameraId]->exposure_time_range, 2); 1538 1539 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION, 1540 &gCamCapability[cameraId]->max_frame_duration, 1); 1541 1542 1543 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT, 1544 (int*)&gCamCapability[cameraId]->color_arrangement, 1); 1545 1546 static const int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width, 1547 gCamCapability[cameraId]->pixel_array_size.height}; 1548 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, 1549 pixel_array_size, 2); 1550 1551 static const int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.width, 1552 gCamCapability[cameraId]->active_array_size.height}; 1553 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, 1554 active_array_size, 2); 1555 1556 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL, 1557 &gCamCapability[cameraId]->white_level, 1); 1558 1559 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN, 1560 gCamCapability[cameraId]->black_level_pattern, 4); 1561 1562 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION, 1563 &gCamCapability[cameraId]->flash_charge_duration, 1); 1564 1565 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS, 1566 &gCamCapability[cameraId]->max_tone_map_curve_points, 1); 1567 1568 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, 1569 (int*)&gCamCapability[cameraId]->max_face_detection_count, 1); 1570 1571 staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT, 1572 &gCamCapability[cameraId]->histogram_size, 1); 1573 1574 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT, 1575 &gCamCapability[cameraId]->max_histogram_count, 1); 1576 1577 static const int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width, 1578 gCamCapability[cameraId]->sharpness_map_size.height}; 1579 1580 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, 1581 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t)); 1582 1583 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE, 1584 &gCamCapability[cameraId]->max_sharpness_map_value, 1); 1585 1586 1587 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS, 1588 &gCamCapability[cameraId]->raw_min_duration, 1589 1); 1590 1591 static int32_t scalar_formats[CAM_FORMAT_MAX]; 1592 for (int i = 0; i < gCamCapability[cameraId]->supported_scalar_format_cnt; i++) { 1593 scalar_formats[i] = getScalarFormat(gCamCapability[cameraId]->supported_scalar_fmts[i]); 1594 } 1595 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, 1596 scalar_formats, 1597 gCamCapability[cameraId]->supported_scalar_format_cnt); 1598 1599 static int32_t available_processed_sizes[CAM_FORMAT_MAX]; 1600 makeTable(gCamCapability[cameraId]->supported_sizes_tbl, 1601 gCamCapability[cameraId]->supported_sizes_tbl_cnt, 1602 available_processed_sizes); 1603 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES, 1604 available_processed_sizes, 1605 gCamCapability[cameraId]->supported_sizes_tbl_cnt); 1606 1607 static float available_fps_ranges[gCamCapability[cameraId]->fps_ranges_tbl_cnt]; 1608 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl, 1609 gCamCapability[cameraId]->fps_ranges_tbl_cnt, 1610 available_fps_ranges); 1611 #else 1612 const float minFocusDistance = 0; 1613 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE, 1614 &minFocusDistance, 1); 1615 1616 const float hyperFocusDistance = 0; 1617 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, 1618 &hyperFocusDistance, 1); 1619 1620 static const float focalLength = 3.30f; 1621 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, 1622 &focalLength, 1623 1); 1624 1625 static const float aperture = 2.8f; 1626 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES, 1627 &aperture, 1628 1); 1629 1630 static const float filterDensity = 0; 1631 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES, 1632 &filterDensity, 1); 1633 1634 static const uint8_t availableOpticalStabilization = 1635 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF; 1636 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION, 1637 &availableOpticalStabilization, 1); 1638 1639 float lensPosition[3]; 1640 if (facingBack) { 1641 // Back-facing camera is center-top on device 1642 lensPosition[0] = 0; 1643 lensPosition[1] = 20; 1644 lensPosition[2] = -5; 1645 } else { 1646 // Front-facing camera is center-right on device 1647 lensPosition[0] = 20; 1648 lensPosition[1] = 20; 1649 lensPosition[2] = 0; 1650 } 1651 staticInfo.update(ANDROID_LENS_POSITION, 1652 lensPosition, 1653 sizeof(lensPosition)/ sizeof(float)); 1654 1655 static const int32_t lensShadingMapSize[] = {1, 1}; 1656 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE, 1657 lensShadingMapSize, 1658 sizeof(lensShadingMapSize)/sizeof(int32_t)); 1659 1660 static const float lensShadingMap[3 * 1 * 1 ] = 1661 { 1.f, 1.f, 1.f }; 1662 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP, 1663 lensShadingMap, 1664 sizeof(lensShadingMap)/ sizeof(float)); 1665 1666 static const int32_t geometricCorrectionMapSize[] = {2, 2}; 1667 staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE, 1668 geometricCorrectionMapSize, 1669 sizeof(geometricCorrectionMapSize)/sizeof(int32_t)); 1670 1671 static const float geometricCorrectionMap[2 * 3 * 2 * 2] = { 1672 0.f, 0.f, 0.f, 0.f, 0.f, 0.f, 1673 1.f, 0.f, 1.f, 0.f, 1.f, 0.f, 1674 0.f, 1.f, 0.f, 1.f, 0.f, 1.f, 1675 1.f, 1.f, 1.f, 1.f, 1.f, 1.f}; 1676 staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP, 1677 geometricCorrectionMap, 1678 sizeof(geometricCorrectionMap)/ sizeof(float)); 1679 1680 static const float sensorPhysicalSize[2] = {3.20f, 2.40f}; 1681 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE, 1682 sensorPhysicalSize, 2); 1683 1684 const int64_t exposureTimeRange[2] = {1000L, 30000000000L} ; // 1 us - 30 sec 1685 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, 1686 exposureTimeRange, 2); 1687 1688 const int64_t frameDurationRange[2] = {33331760L, 30000000000L}; 1689 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION, 1690 frameDurationRange, 1); 1691 1692 const uint8_t colorFilterArrangement = 1693 ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB; 1694 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT, 1695 &colorFilterArrangement, 1); 1696 1697 const int resolution[2] = {640, 480}; 1698 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, 1699 resolution, 2); 1700 1701 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, 1702 resolution, 2); 1703 1704 const uint32_t whiteLevel = 4000; 1705 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL, 1706 (int32_t*)&whiteLevel, 1); 1707 1708 static const int32_t blackLevelPattern[4] = { 1709 1000, 1000, 1710 1000, 1000 }; 1711 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN, 1712 blackLevelPattern, 4); 1713 1714 static const int64_t flashChargeDuration = 0; 1715 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION, 1716 &flashChargeDuration, 1); 1717 1718 static const int32_t tonemapCurvePoints = 128; 1719 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS, 1720 &tonemapCurvePoints, 1); 1721 1722 static const int32_t maxFaceCount = 0; 1723 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, 1724 &maxFaceCount, 1); 1725 1726 static const int32_t histogramSize = 64; 1727 staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT, 1728 &histogramSize, 1); 1729 1730 static const int32_t maxHistogramCount = 1000; 1731 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT, 1732 &maxHistogramCount, 1); 1733 1734 static const int32_t sharpnessMapSize[2] = {64, 64}; 1735 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, 1736 sharpnessMapSize, sizeof(sharpnessMapSize)/sizeof(int32_t)); 1737 1738 static const int32_t maxSharpnessMapValue = 1000; 1739 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE, 1740 &maxSharpnessMapValue, 1); 1741 1742 static const uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF}; 1743 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, 1744 availableVstabModes, sizeof(availableVstabModes)); 1745 1746 const uint64_t availableRawMinDurations[1] = {33331760L}; 1747 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS, 1748 (int64_t*)&availableRawMinDurations, 1749 1); 1750 1751 const uint32_t availableFormats[4] = { 1752 HAL_PIXEL_FORMAT_RAW_SENSOR, 1753 HAL_PIXEL_FORMAT_BLOB, 1754 HAL_PIXEL_FORMAT_RGBA_8888, 1755 HAL_PIXEL_FORMAT_YCrCb_420_SP 1756 }; 1757 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, 1758 (int32_t*)availableFormats, 1759 4); 1760 1761 const uint32_t availableProcessedSizes[4] = {1280, 720, 640, 480}; 1762 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES, 1763 (int32_t*)availableProcessedSizes, 1764 sizeof(availableProcessedSizes)/sizeof(int32_t)); 1765 1766 staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES, 1767 resolution, 1768 sizeof(resolution)/sizeof(int)); 1769 1770 static const uint8_t availableFaceDetectModes[] = { 1771 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF }; 1772 1773 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES, 1774 availableFaceDetectModes, 1775 sizeof(availableFaceDetectModes)); 1776 1777 static const uint8_t availableSceneModes[] = { 1778 ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED }; 1779 1780 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES, 1781 availableSceneModes, sizeof(availableSceneModes)); 1782 1783 static const int32_t availableFpsRanges[] = {15, 30}; 1784 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, 1785 availableFpsRanges, sizeof(availableFpsRanges)/sizeof(int32_t)); 1786 1787 static const uint8_t availableEffectsModes[] = { 1788 ANDROID_CONTROL_EFFECT_MODE_OFF }; 1789 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS, 1790 availableEffectsModes, sizeof(availableEffectsModes)); 1791 1792 static const uint8_t availableAntibandingModes[] = { 1793 ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF }; 1794 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, 1795 availableAntibandingModes, sizeof(availableAntibandingModes)); 1796 1797 static const uint8_t flashAvailable = 0; 1798 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE, 1799 &flashAvailable, sizeof(flashAvailable)); 1800 1801 static const int32_t max3aRegions = 0; 1802 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS, 1803 &max3aRegions, 1); 1804 1805 static const camera_metadata_rational exposureCompensationStep = { 1806 1, 3 1807 }; 1808 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP, 1809 &exposureCompensationStep, 1); 1810 1811 static const int32_t jpegThumbnailSizes[] = { 1812 0, 0, 1813 160, 120, 1814 320, 240 1815 }; 1816 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, 1817 jpegThumbnailSizes, sizeof(jpegThumbnailSizes)/sizeof(int32_t)); 1818 1819 static const int32_t maxZoom = 10; 1820 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, 1821 &maxZoom, 1); 1822 1823 static int64_t jpegMinDuration[] = {33331760L, 30000000000L}; 1824 staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS, 1825 jpegMinDuration, 1826 sizeof(jpegMinDuration)/sizeof(uint64_t)); 1827 #endif 1828 /*HAL 1 and HAL 3 common*/ 1829 static const int32_t raw_size[] = {gCamCapability[cameraId]->raw_dim.width, 1830 gCamCapability[cameraId]->raw_dim.height}; 1831 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES, 1832 raw_size, 1833 sizeof(raw_size)/sizeof(uint32_t)); 1834 1835 static const int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min, 1836 gCamCapability[cameraId]->exposure_compensation_max}; 1837 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE, 1838 exposureCompensationRange, 1839 sizeof(exposureCompensationRange)/sizeof(int32_t)); 1840 1841 uint8_t lensFacing = (facingBack) ? 1842 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT; 1843 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1); 1844 1845 static int32_t available_jpeg_sizes[MAX_SIZES_CNT]; 1846 makeTable(gCamCapability[cameraId]->picture_sizes_tbl, 1847 gCamCapability[cameraId]->picture_sizes_tbl_cnt, 1848 available_jpeg_sizes); 1849 staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES, 1850 available_jpeg_sizes, 1851 gCamCapability[cameraId]->picture_sizes_tbl_cnt); 1852 1853 static int32_t max_jpeg_size = 0; 1854 int temp_width, temp_height; 1855 for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) { 1856 temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width; 1857 temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height; 1858 if (temp_width * temp_height > max_jpeg_size ) { 1859 max_jpeg_size = temp_width * temp_height; 1860 } 1861 } 1862 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t); 1863 staticInfo.update(ANDROID_JPEG_MAX_SIZE, 1864 &max_jpeg_size, 1); 1865 1866 static uint8_t avail_effects[CAM_EFFECT_MODE_MAX]; 1867 int32_t size = 0; 1868 for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) { 1869 int val = lookupFwkName(EFFECT_MODES_MAP, 1870 sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]), 1871 gCamCapability[cameraId]->supported_effects[i]); 1872 if (val != NAME_NOT_FOUND) { 1873 avail_effects[size] = (uint8_t)val; 1874 size++; 1875 } 1876 } 1877 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS, 1878 avail_effects, 1879 size); 1880 1881 static uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX]; 1882 size = 0; 1883 for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) { 1884 int val = lookupFwkName(SCENE_MODES_MAP, 1885 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]), 1886 gCamCapability[cameraId]->supported_scene_modes[i]); 1887 if (val != NAME_NOT_FOUND) { 1888 avail_scene_modes[size] = (uint8_t)val; 1889 size++; 1890 } 1891 } 1892 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES, 1893 avail_scene_modes, 1894 size); 1895 1896 static uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX]; 1897 size = 0; 1898 for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) { 1899 int val = lookupFwkName(ANTIBANDING_MODES_MAP, 1900 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]), 1901 gCamCapability[cameraId]->supported_antibandings[i]); 1902 if (val != NAME_NOT_FOUND) { 1903 avail_antibanding_modes[size] = (uint8_t)val; 1904 size++; 1905 } 1906 1907 } 1908 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, 1909 avail_antibanding_modes, 1910 size); 1911 1912 ALOGE("%s: %d", __func__, __LINE__); 1913 static uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX]; 1914 size = 0; 1915 for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) { 1916 int val = lookupFwkName(FOCUS_MODES_MAP, 1917 sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]), 1918 gCamCapability[cameraId]->supported_focus_modes[i]); 1919 if (val != NAME_NOT_FOUND) { 1920 avail_af_modes[size] = (uint8_t)val; 1921 size++; 1922 } 1923 } 1924 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES, 1925 avail_af_modes, 1926 size); 1927 1928 static uint8_t avail_awb_modes[CAM_WB_MODE_MAX]; 1929 size = 0; 1930 for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) { 1931 int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP, 1932 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]), 1933 gCamCapability[cameraId]->supported_white_balances[i]); 1934 if (val != NAME_NOT_FOUND) { 1935 avail_awb_modes[size] = (uint8_t)val; 1936 size++; 1937 } 1938 } 1939 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES, 1940 avail_awb_modes, 1941 size); 1942 1943 static uint8_t avail_flash_modes[CAM_FLASH_MODE_MAX]; 1944 size = 0; 1945 for (int i = 0; i < gCamCapability[cameraId]->supported_flash_modes_cnt; i++) { 1946 int val = lookupFwkName(FLASH_MODES_MAP, 1947 sizeof(FLASH_MODES_MAP)/sizeof(FLASH_MODES_MAP[0]), 1948 gCamCapability[cameraId]->supported_flash_modes[i]); 1949 if (val != NAME_NOT_FOUND) { 1950 avail_flash_modes[size] = (uint8_t)val; 1951 size++; 1952 } 1953 } 1954 staticInfo.update(ANDROID_FLASH_MODE, 1955 avail_flash_modes, 1956 size); 1957 1958 /*so far fwk seems to support only 2 aec modes on and off*/ 1959 static const uint8_t avail_ae_modes[] = { 1960 ANDROID_CONTROL_AE_MODE_OFF, 1961 ANDROID_CONTROL_AE_MODE_ON 1962 }; 1963 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES, 1964 avail_ae_modes, 1965 sizeof(avail_ae_modes)); 1966 1967 gStaticMetadata = staticInfo.release(); 1968 return rc; 1969} 1970 1971/*=========================================================================== 1972 * FUNCTION : makeTable 1973 * 1974 * DESCRIPTION: make a table of sizes 1975 * 1976 * PARAMETERS : 1977 * 1978 * 1979 * 1980 * RETURN : int32_t type of status 1981 * NO_ERROR -- success 1982 * none-zero failure code 1983 *==========================================================================*/ 1984void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size, 1985 int32_t* sizeTable) 1986{ 1987 int j = 0; 1988 for (int i = 0; i < size; i++) { 1989 sizeTable[j] = dimTable[i].width; 1990 sizeTable[j+1] = dimTable[i].height; 1991 j+=2; 1992 } 1993} 1994 1995/*=========================================================================== 1996 * FUNCTION : makeFPSTable 1997 * 1998 * DESCRIPTION: make a table of fps ranges 1999 * 2000 * PARAMETERS : 2001 * 2002 * 2003 * 2004 * RETURN : int32_t type of status 2005 * NO_ERROR -- success 2006 * none-zero failure code 2007 *==========================================================================*/ 2008void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size, 2009 float* fpsRangesTable) 2010{ 2011 int j = 0; 2012 for (int i = 0; i < size; i++) { 2013 fpsRangesTable[j] = fpsTable[i].min_fps; 2014 fpsRangesTable[j+1] = fpsTable[i].max_fps; 2015 j+=2; 2016 } 2017} 2018/*=========================================================================== 2019 * FUNCTION : getPreviewHalPixelFormat 2020 * 2021 * DESCRIPTION: convert the format to type recognized by framework 2022 * 2023 * PARAMETERS : format : the format from backend 2024 * 2025 ** RETURN : format recognized by framework 2026 * 2027 *==========================================================================*/ 2028int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format) 2029{ 2030 int32_t halPixelFormat; 2031 2032 switch (format) { 2033 case CAM_FORMAT_YUV_420_NV12: 2034 halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP; 2035 break; 2036 case CAM_FORMAT_YUV_420_NV21: 2037 halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP; 2038 break; 2039 case CAM_FORMAT_YUV_420_NV21_ADRENO: 2040 halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO; 2041 break; 2042 case CAM_FORMAT_YUV_420_YV12: 2043 halPixelFormat = HAL_PIXEL_FORMAT_YV12; 2044 break; 2045 case CAM_FORMAT_YUV_422_NV16: 2046 case CAM_FORMAT_YUV_422_NV61: 2047 default: 2048 halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP; 2049 break; 2050 } 2051 return halPixelFormat; 2052} 2053 2054/*=========================================================================== 2055 * FUNCTION : AddSetParmEntryToBatch 2056 * 2057 * DESCRIPTION: add set parameter entry into batch 2058 * 2059 * PARAMETERS : 2060 * @p_table : ptr to parameter buffer 2061 * @paramType : parameter type 2062 * @paramLength : length of parameter value 2063 * @paramValue : ptr to parameter value 2064 * 2065 * RETURN : int32_t type of status 2066 * NO_ERROR -- success 2067 * none-zero failure code 2068 *==========================================================================*/ 2069int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table, 2070 cam_intf_parm_type_t paramType, 2071 uint32_t paramLength, 2072 void *paramValue) 2073{ 2074 int position = paramType; 2075 int current, next; 2076 2077 /************************************************************************* 2078 * Code to take care of linking next flags * 2079 *************************************************************************/ 2080 current = GET_FIRST_PARAM_ID(p_table); 2081 if (position == current){ 2082 //DO NOTHING 2083 } else if (position < current){ 2084 SET_NEXT_PARAM_ID(position, p_table, current); 2085 SET_FIRST_PARAM_ID(p_table, position); 2086 } else { 2087 /* Search for the position in the linked list where we need to slot in*/ 2088 while (position > GET_NEXT_PARAM_ID(current, p_table)) 2089 current = GET_NEXT_PARAM_ID(current, p_table); 2090 2091 /*If node already exists no need to alter linking*/ 2092 if (position != GET_NEXT_PARAM_ID(current, p_table)) { 2093 next = GET_NEXT_PARAM_ID(current, p_table); 2094 SET_NEXT_PARAM_ID(current, p_table, position); 2095 SET_NEXT_PARAM_ID(position, p_table, next); 2096 } 2097 } 2098 2099 /************************************************************************* 2100 * Copy contents into entry * 2101 *************************************************************************/ 2102 2103 if (paramLength > sizeof(parm_type_t)) { 2104 ALOGE("%s:Size of input larger than max entry size",__func__); 2105 return BAD_VALUE; 2106 } 2107 memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength); 2108 return NO_ERROR; 2109} 2110 2111/*=========================================================================== 2112 * FUNCTION : lookupFwkName 2113 * 2114 * DESCRIPTION: In case the enum is not same in fwk and backend 2115 * make sure the parameter is correctly propogated 2116 * 2117 * PARAMETERS : 2118 * @arr : map between the two enums 2119 * @len : len of the map 2120 * @hal_name : name of the hal_parm to map 2121 * 2122 * RETURN : int type of status 2123 * fwk_name -- success 2124 * none-zero failure code 2125 *==========================================================================*/ 2126int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[], 2127 int len, int hal_name) 2128{ 2129 2130 for (int i = 0; i < len; i++) { 2131 if (arr[i].hal_name == hal_name) 2132 return arr[i].fwk_name; 2133 } 2134 ALOGE("%s: Cannot find matching framework type", __func__); 2135 return NAME_NOT_FOUND; 2136} 2137 2138/*=========================================================================== 2139 * FUNCTION : lookupHalName 2140 * 2141 * DESCRIPTION: In case the enum is not same in fwk and backend 2142 * make sure the parameter is correctly propogated 2143 * 2144 * PARAMETERS : 2145 * @arr : map between the two enums 2146 * @len : len of the map 2147 * @fwk_name : name of the hal_parm to map 2148 * 2149 * RETURN : int32_t type of status 2150 * hal_name -- success 2151 * none-zero failure code 2152 *==========================================================================*/ 2153int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[], 2154 int len, int fwk_name) 2155{ 2156 for (int i = 0; i < len; i++) { 2157 if (arr[i].fwk_name == fwk_name) 2158 return arr[i].hal_name; 2159 } 2160 ALOGE("%s: Cannot find matching hal type", __func__); 2161 return NAME_NOT_FOUND; 2162} 2163 2164/*=========================================================================== 2165 * FUNCTION : getCapabilities 2166 * 2167 * DESCRIPTION: query camera capabilities 2168 * 2169 * PARAMETERS : 2170 * @cameraId : camera Id 2171 * @info : camera info struct to be filled in with camera capabilities 2172 * 2173 * RETURN : int32_t type of status 2174 * NO_ERROR -- success 2175 * none-zero failure code 2176 *==========================================================================*/ 2177int QCamera3HardwareInterface::getCamInfo(int cameraId, 2178 struct camera_info *info) 2179{ 2180 int rc = 0; 2181 2182 if (NULL == gCamCapability[cameraId]) { 2183 rc = initCapabilities(cameraId); 2184 if (rc < 0) { 2185 //pthread_mutex_unlock(&g_camlock); 2186 return rc; 2187 } 2188 } 2189 2190 if (NULL == gStaticMetadata) { 2191 rc = initStaticMetadata(cameraId); 2192 if (rc < 0) { 2193 return rc; 2194 } 2195 } 2196 2197 switch(gCamCapability[cameraId]->position) { 2198 case CAM_POSITION_BACK: 2199 info->facing = CAMERA_FACING_BACK; 2200 break; 2201 2202 case CAM_POSITION_FRONT: 2203 info->facing = CAMERA_FACING_FRONT; 2204 break; 2205 2206 default: 2207 ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId); 2208 rc = -1; 2209 break; 2210 } 2211 2212 2213 info->orientation = gCamCapability[cameraId]->sensor_mount_angle; 2214 info->device_version = HARDWARE_DEVICE_API_VERSION(3, 0); 2215 info->static_camera_characteristics = gStaticMetadata; 2216 2217 return rc; 2218} 2219 2220/*=========================================================================== 2221 * FUNCTION : translateMetadata 2222 * 2223 * DESCRIPTION: translate the metadata into camera_metadata_t 2224 * 2225 * PARAMETERS : type of the request 2226 * 2227 * 2228 * RETURN : success: camera_metadata_t* 2229 * failure: NULL 2230 * 2231 *==========================================================================*/ 2232camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type) 2233{ 2234 pthread_mutex_lock(&mMutex); 2235 2236 if (mDefaultMetadata[type] != NULL) { 2237 pthread_mutex_unlock(&mMutex); 2238 return mDefaultMetadata[type]; 2239 } 2240 //first time we are handling this request 2241 //fill up the metadata structure using the wrapper class 2242 CameraMetadata settings; 2243 //translate from cam_capability_t to camera_metadata_tag_t 2244 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE; 2245 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1); 2246 2247 /*control*/ 2248 2249 uint8_t controlIntent = 0; 2250 switch (type) { 2251 case CAMERA3_TEMPLATE_PREVIEW: 2252 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW; 2253 break; 2254 case CAMERA3_TEMPLATE_STILL_CAPTURE: 2255 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE; 2256 break; 2257 case CAMERA3_TEMPLATE_VIDEO_RECORD: 2258 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD; 2259 break; 2260 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT: 2261 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT; 2262 break; 2263 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG: 2264 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG; 2265 break; 2266 default: 2267 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM; 2268 break; 2269 } 2270 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1); 2271 2272 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, 2273 &gCamCapability[mCameraId]->exposure_compensation_default, 1); 2274 2275 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF; 2276 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1); 2277 2278 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF; 2279 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1); 2280 2281 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO; 2282 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1); 2283 2284 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO; 2285 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1); 2286 2287 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF; 2288 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1); 2289 2290 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; //similar to AUTO? 2291 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1); 2292 2293 /*flash*/ 2294 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF; 2295 settings.update(ANDROID_FLASH_MODE, &flashMode, 1); 2296 2297 2298 /* lens */ 2299 static const float default_aperture = gCamCapability[mCameraId]->apertures[0]; 2300 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1); 2301 2302 if (gCamCapability[mCameraId]->filter_densities_count) { 2303 static const float default_filter_density = gCamCapability[mCameraId]->filter_densities[0]; 2304 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density, 2305 gCamCapability[mCameraId]->filter_densities_count); 2306 } 2307 2308 /* TODO: Enable focus lengths once supported*/ 2309 /*if (gCamCapability[mCameraId]->focal_lengths_count) { 2310 static const float default_focal_length = gCamCapability[mCameraId]->focal_lengths[0]; 2311 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1); 2312 }*/ 2313 2314 mDefaultMetadata[type] = settings.release(); 2315 2316 pthread_mutex_unlock(&mMutex); 2317 return mDefaultMetadata[type]; 2318} 2319 2320/*=========================================================================== 2321 * FUNCTION : setFrameParameters 2322 * 2323 * DESCRIPTION: set parameters per frame as requested in the metadata from 2324 * framework 2325 * 2326 * PARAMETERS : 2327 * @settings : frame settings information from framework 2328 * 2329 * 2330 * RETURN : success: NO_ERROR 2331 * failure: 2332 *==========================================================================*/ 2333int QCamera3HardwareInterface::setFrameParameters(int frame_id, 2334 const camera_metadata_t *settings) 2335{ 2336 /*translate from camera_metadata_t type to parm_type_t*/ 2337 int rc = 0; 2338 if (settings == NULL && mFirstRequest) { 2339 /*settings cannot be null for the first request*/ 2340 return BAD_VALUE; 2341 } 2342 2343 int32_t hal_version = CAM_HAL_V3; 2344 2345 memset(mParameters, 0, sizeof(parm_buffer_t)); 2346 mParameters->first_flagged_entry = CAM_INTF_PARM_MAX; 2347 AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION, 2348 sizeof(hal_version), &hal_version); 2349 2350 /*we need to update the frame number in the parameters*/ 2351 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER, 2352 sizeof(frame_id), &frame_id); 2353 if (rc < 0) { 2354 ALOGE("%s: Failed to set the frame number in the parameters", __func__); 2355 return BAD_VALUE; 2356 } 2357 2358 if(settings != NULL){ 2359 rc = translateMetadataToParameters(settings); 2360 } 2361 /*set the parameters to backend*/ 2362 ALOGE("%s: %d", __func__, __LINE__); 2363 mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters); 2364 return rc; 2365} 2366 2367/*=========================================================================== 2368 * FUNCTION : translateMetadataToParameters 2369 * 2370 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t 2371 * 2372 * 2373 * PARAMETERS : 2374 * @settings : frame settings information from framework 2375 * 2376 * 2377 * RETURN : success: NO_ERROR 2378 * failure: 2379 *==========================================================================*/ 2380int QCamera3HardwareInterface::translateMetadataToParameters 2381 (const camera_metadata_t *settings) 2382{ 2383 int rc = 0; 2384 CameraMetadata frame_settings; 2385 frame_settings = settings; 2386 2387 2388 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) { 2389 int32_t antibandingMode = 2390 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0]; 2391 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING, 2392 sizeof(antibandingMode), &antibandingMode); 2393 } 2394 2395 /*int32_t expCompensation = frame_settings.find().data.i32[0]; 2396 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION, 2397 sizeof(expCompensation), &expCompensation);*/ 2398 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) { 2399 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0]; 2400 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK, 2401 sizeof(aeLock), &aeLock); 2402 } 2403 2404 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) { 2405 cam_fps_range_t fps_range; 2406 fps_range.min_fps = 2407 frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0]; 2408 fps_range.max_fps = 2409 frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0]; 2410 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE, 2411 sizeof(fps_range), &fps_range); 2412 } 2413 2414 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) { 2415 uint8_t focusMode = 2416 frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0]; 2417 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE, 2418 sizeof(focusMode), &focusMode); 2419 } 2420 2421 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) { 2422 uint8_t awbLock = 2423 frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0]; 2424 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK, 2425 sizeof(awbLock), &awbLock); 2426 } 2427 2428 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) { 2429 uint8_t fwk_whiteLevel = 2430 frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0]; 2431 uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP, 2432 sizeof(WHITE_BALANCE_MODES_MAP), 2433 fwk_whiteLevel); 2434 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE, 2435 sizeof(whiteLevel), &whiteLevel); 2436 } 2437 2438 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) { 2439 uint8_t fwk_effectMode = 2440 frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0]; 2441 uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP, 2442 sizeof(EFFECT_MODES_MAP), 2443 fwk_effectMode); 2444 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT, 2445 sizeof(effectMode), &effectMode); 2446 } 2447 2448 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) { 2449 uint8_t fwk_aeMode = 2450 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0]; 2451 uint8_t aeMode = lookupHalName(AUTO_EXPOSURE_MAP, 2452 sizeof(AUTO_EXPOSURE_MAP), 2453 fwk_aeMode); 2454 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE, 2455 sizeof(aeMode), &aeMode); 2456 } 2457 2458 if (frame_settings.exists(ANDROID_REQUEST_FRAME_COUNT)) { 2459 int32_t metaFrameNumber = 2460 frame_settings.find(ANDROID_REQUEST_FRAME_COUNT).data.i32[0]; 2461 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER, 2462 sizeof(metaFrameNumber), &metaFrameNumber); 2463 } 2464 2465 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) { 2466 uint8_t colorCorrectMode = 2467 frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0]; 2468 rc = 2469 AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE, 2470 sizeof(colorCorrectMode), &colorCorrectMode); 2471 } 2472 2473 uint8_t aecTrigger = CAM_AEC_TRIGGER_IDLE; 2474 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)) { 2475 aecTrigger = 2476 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0]; 2477 } 2478 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, 2479 sizeof(aecTrigger), &aecTrigger); 2480 2481 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER)) { 2482 uint8_t afTrigger = 2483 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0]; 2484 rc = AddSetParmEntryToBatch(mParameters, 2485 CAM_INTF_META_AF_TRIGGER, sizeof(afTrigger), &afTrigger); 2486 } 2487 2488 if (frame_settings.exists(ANDROID_CONTROL_MODE)) { 2489 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0]; 2490 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE, 2491 sizeof(metaMode), &metaMode); 2492 } 2493 2494 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) { 2495 int32_t demosaic = 2496 frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0]; 2497 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC, 2498 sizeof(demosaic), &demosaic); 2499 } 2500 2501 if (frame_settings.exists(ANDROID_EDGE_MODE)) { 2502 uint8_t edgeMode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0]; 2503 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE, 2504 sizeof(edgeMode), &edgeMode); 2505 } 2506 2507 if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) { 2508 int32_t edgeStrength = 2509 frame_settings.find(ANDROID_EDGE_STRENGTH).data.i32[0]; 2510 rc = AddSetParmEntryToBatch(mParameters, 2511 CAM_INTF_META_SHARPNESS_STRENGTH, sizeof(edgeStrength), &edgeStrength); 2512 } 2513 2514 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) { 2515 uint8_t flashPower = 2516 frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0]; 2517 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER, 2518 sizeof(flashPower), &flashPower); 2519 } 2520 2521 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) { 2522 int64_t flashFiringTime = 2523 frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0]; 2524 rc = AddSetParmEntryToBatch(mParameters, 2525 CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime); 2526 } 2527 2528 if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) { 2529 uint8_t geometricMode = 2530 frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0]; 2531 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE, 2532 sizeof(geometricMode), &geometricMode); 2533 } 2534 2535 if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) { 2536 uint8_t geometricStrength = 2537 frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0]; 2538 rc = AddSetParmEntryToBatch(mParameters, 2539 CAM_INTF_META_GEOMETRIC_STRENGTH, 2540 sizeof(geometricStrength), &geometricStrength); 2541 } 2542 2543 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) { 2544 uint8_t hotPixelMode = 2545 frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0]; 2546 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE, 2547 sizeof(hotPixelMode), &hotPixelMode); 2548 } 2549 2550 if (frame_settings.exists(ANDROID_LENS_APERTURE)) { 2551 float lensAperture = 2552 frame_settings.find( ANDROID_LENS_APERTURE).data.f[0]; 2553 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE, 2554 sizeof(lensAperture), &lensAperture); 2555 } 2556 2557 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) { 2558 float filterDensity = 2559 frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0]; 2560 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY, 2561 sizeof(filterDensity), &filterDensity); 2562 } 2563 2564 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) { 2565 float focalLength = 2566 frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0]; 2567 rc = AddSetParmEntryToBatch(mParameters, 2568 CAM_INTF_META_LENS_FOCAL_LENGTH, 2569 sizeof(focalLength), &focalLength); 2570 } 2571 2572 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) { 2573 float focalDistance = 2574 frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0]; 2575 rc = AddSetParmEntryToBatch(mParameters, 2576 CAM_INTF_META_LENS_FOCUS_DISTANCE, 2577 sizeof(focalDistance), &focalDistance); 2578 } 2579 2580 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) { 2581 uint8_t optStabMode = 2582 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0]; 2583 rc = AddSetParmEntryToBatch(mParameters, 2584 CAM_INTF_META_LENS_OPT_STAB_MODE, 2585 sizeof(optStabMode), &optStabMode); 2586 } 2587 2588 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) { 2589 uint8_t noiseRedMode = 2590 frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]; 2591 rc = AddSetParmEntryToBatch(mParameters, 2592 CAM_INTF_META_NOISE_REDUCTION_MODE, 2593 sizeof(noiseRedMode), &noiseRedMode); 2594 } 2595 2596 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) { 2597 uint8_t noiseRedStrength = 2598 frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0]; 2599 rc = AddSetParmEntryToBatch(mParameters, 2600 CAM_INTF_META_NOISE_REDUCTION_STRENGTH, 2601 sizeof(noiseRedStrength), &noiseRedStrength); 2602 } 2603 2604 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) { 2605 cam_crop_region_t scalerCropRegion; 2606 scalerCropRegion.left = 2607 frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0]; 2608 scalerCropRegion.top = 2609 frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1]; 2610 scalerCropRegion.width = 2611 frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2]; 2612 rc = AddSetParmEntryToBatch(mParameters, 2613 CAM_INTF_META_SCALER_CROP_REGION, 2614 sizeof(scalerCropRegion), &scalerCropRegion); 2615 } 2616 2617 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) { 2618 int64_t sensorExpTime = 2619 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0]; 2620 rc = AddSetParmEntryToBatch(mParameters, 2621 CAM_INTF_META_SENSOR_EXPOSURE_TIME, 2622 sizeof(sensorExpTime), &sensorExpTime); 2623 } 2624 2625 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) { 2626 int64_t sensorFrameDuration = 2627 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0]; 2628 rc = AddSetParmEntryToBatch(mParameters, 2629 CAM_INTF_META_SENSOR_FRAME_DURATION, 2630 sizeof(sensorFrameDuration), &sensorFrameDuration); 2631 } 2632 2633 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) { 2634 int32_t sensorSensitivity = 2635 frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0]; 2636 rc = AddSetParmEntryToBatch(mParameters, 2637 CAM_INTF_META_SENSOR_SENSITIVITY, 2638 sizeof(sensorSensitivity), &sensorSensitivity); 2639 } 2640 2641 if (frame_settings.exists(ANDROID_SHADING_MODE)) { 2642 int32_t shadingMode = 2643 frame_settings.find(ANDROID_SHADING_MODE).data.u8[0]; 2644 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE, 2645 sizeof(shadingMode), &shadingMode); 2646 } 2647 2648 if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) { 2649 uint8_t shadingStrength = 2650 frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0]; 2651 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH, 2652 sizeof(shadingStrength), &shadingStrength); 2653 } 2654 2655 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) { 2656 uint8_t facedetectMode = 2657 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0]; 2658 rc = AddSetParmEntryToBatch(mParameters, 2659 CAM_INTF_META_STATS_FACEDETECT_MODE, 2660 sizeof(facedetectMode), &facedetectMode); 2661 } 2662 2663 if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) { 2664 uint8_t histogramMode = 2665 frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0]; 2666 rc = AddSetParmEntryToBatch(mParameters, 2667 CAM_INTF_META_STATS_HISTOGRAM_MODE, 2668 sizeof(histogramMode), &histogramMode); 2669 } 2670 2671 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) { 2672 uint8_t sharpnessMapMode = 2673 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0]; 2674 rc = AddSetParmEntryToBatch(mParameters, 2675 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, 2676 sizeof(sharpnessMapMode), &sharpnessMapMode); 2677 } 2678 2679 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) { 2680 uint8_t tonemapMode = 2681 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0]; 2682 rc = AddSetParmEntryToBatch(mParameters, 2683 CAM_INTF_META_TONEMAP_MODE, 2684 sizeof(tonemapMode), &tonemapMode); 2685 } 2686 2687 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) { 2688 uint8_t captureIntent = 2689 frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0]; 2690 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT, 2691 sizeof(captureIntent), &captureIntent); 2692 } 2693 2694 return rc; 2695} 2696 2697/*=========================================================================== 2698 * FUNCTION : getJpegSettings 2699 * 2700 * DESCRIPTION: save the jpeg settings in the HAL 2701 * 2702 * 2703 * PARAMETERS : 2704 * @settings : frame settings information from framework 2705 * 2706 * 2707 * RETURN : success: NO_ERROR 2708 * failure: 2709 *==========================================================================*/ 2710int QCamera3HardwareInterface::getJpegSettings 2711 (const camera_metadata_t *settings) 2712{ 2713 if (mJpegSettings) { 2714 free(mJpegSettings); 2715 mJpegSettings = NULL; 2716 } 2717 mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t)); 2718 CameraMetadata jpeg_settings; 2719 jpeg_settings = settings; 2720 2721 if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) { 2722 mJpegSettings->jpeg_orientation = 2723 jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0]; 2724 } else { 2725 mJpegSettings->jpeg_orientation = 0; 2726 } 2727 if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) { 2728 mJpegSettings->jpeg_quality = 2729 jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0]; 2730 } else { 2731 mJpegSettings->jpeg_quality = 85; 2732 } 2733 if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) { 2734 mJpegSettings->thumbnail_size.width = 2735 jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0]; 2736 mJpegSettings->thumbnail_size.height = 2737 jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1]; 2738 mJpegSettings->thumbnail_size.width = 320; 2739 mJpegSettings->thumbnail_size.height = 240; 2740 } else { 2741 mJpegSettings->thumbnail_size.width = 640; 2742 mJpegSettings->thumbnail_size.height = 480; 2743 } 2744 if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) { 2745 for (int i = 0; i < 3; i++) { 2746 mJpegSettings->gps_coordinates[i] = 2747 jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i]; 2748 } 2749 } 2750 if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) { 2751 mJpegSettings->gps_timestamp = 2752 jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0]; 2753 } 2754 2755 if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) { 2756 mJpegSettings->gps_processing_method = 2757 jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[0]; 2758 } 2759 if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) { 2760 mJpegSettings->sensor_sensitivity = 2761 jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0]; 2762 } 2763 if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) { 2764 mJpegSettings->lens_focal_length = 2765 jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0]; 2766 } 2767 return 0; 2768} 2769 2770/*=========================================================================== 2771 * FUNCTION : captureResultCb 2772 * 2773 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata) 2774 * 2775 * PARAMETERS : 2776 * @frame : frame information from mm-camera-interface 2777 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata. 2778 * @userdata: userdata 2779 * 2780 * RETURN : NONE 2781 *==========================================================================*/ 2782void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata, 2783 camera3_stream_buffer_t *buffer, 2784 uint32_t frame_number, void *userdata) 2785{ 2786 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata; 2787 if (hw == NULL) { 2788 ALOGE("%s: Invalid hw %p", __func__, hw); 2789 return; 2790 } 2791 2792 hw->captureResultCb(metadata, buffer, frame_number); 2793 return; 2794} 2795 2796/*=========================================================================== 2797 * FUNCTION : initialize 2798 * 2799 * DESCRIPTION: Pass framework callback pointers to HAL 2800 * 2801 * PARAMETERS : 2802 * 2803 * 2804 * RETURN : Success : 0 2805 * Failure: -ENODEV 2806 *==========================================================================*/ 2807 2808int QCamera3HardwareInterface::initialize(const struct camera3_device *device, 2809 const camera3_callback_ops_t *callback_ops) 2810{ 2811 ALOGE("%s: E", __func__); 2812 QCamera3HardwareInterface *hw = 2813 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 2814 if (!hw) { 2815 ALOGE("%s: NULL camera device", __func__); 2816 return -ENODEV; 2817 } 2818 2819 int rc = hw->initialize(callback_ops); 2820 ALOGE("%s: X", __func__); 2821 return rc; 2822} 2823 2824/*=========================================================================== 2825 * FUNCTION : configure_streams 2826 * 2827 * DESCRIPTION: 2828 * 2829 * PARAMETERS : 2830 * 2831 * 2832 * RETURN : Success: 0 2833 * Failure: -EINVAL (if stream configuration is invalid) 2834 * -ENODEV (fatal error) 2835 *==========================================================================*/ 2836 2837int QCamera3HardwareInterface::configure_streams( 2838 const struct camera3_device *device, 2839 camera3_stream_configuration_t *stream_list) 2840{ 2841 ALOGE("%s: E", __func__); 2842 QCamera3HardwareInterface *hw = 2843 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 2844 if (!hw) { 2845 ALOGE("%s: NULL camera device", __func__); 2846 return -ENODEV; 2847 } 2848 int rc = hw->configureStreams(stream_list); 2849 ALOGE("%s: X", __func__); 2850 return rc; 2851} 2852 2853/*=========================================================================== 2854 * FUNCTION : register_stream_buffers 2855 * 2856 * DESCRIPTION: Register stream buffers with the device 2857 * 2858 * PARAMETERS : 2859 * 2860 * RETURN : 2861 *==========================================================================*/ 2862int QCamera3HardwareInterface::register_stream_buffers( 2863 const struct camera3_device *device, 2864 const camera3_stream_buffer_set_t *buffer_set) 2865{ 2866 ALOGE("%s: E", __func__); 2867 QCamera3HardwareInterface *hw = 2868 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 2869 if (!hw) { 2870 ALOGE("%s: NULL camera device", __func__); 2871 return -ENODEV; 2872 } 2873 int rc = hw->registerStreamBuffers(buffer_set); 2874 ALOGE("%s: X", __func__); 2875 return rc; 2876} 2877 2878/*=========================================================================== 2879 * FUNCTION : construct_default_request_settings 2880 * 2881 * DESCRIPTION: Configure a settings buffer to meet the required use case 2882 * 2883 * PARAMETERS : 2884 * 2885 * 2886 * RETURN : Success: Return valid metadata 2887 * Failure: Return NULL 2888 *==========================================================================*/ 2889const camera_metadata_t* QCamera3HardwareInterface:: 2890 construct_default_request_settings(const struct camera3_device *device, 2891 int type) 2892{ 2893 2894 ALOGE("%s: E", __func__); 2895 camera_metadata_t* fwk_metadata = NULL; 2896 QCamera3HardwareInterface *hw = 2897 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 2898 if (!hw) { 2899 ALOGE("%s: NULL camera device", __func__); 2900 return NULL; 2901 } 2902 2903 fwk_metadata = hw->translateCapabilityToMetadata(type); 2904 2905 ALOGE("%s: X", __func__); 2906 return fwk_metadata; 2907} 2908 2909/*=========================================================================== 2910 * FUNCTION : process_capture_request 2911 * 2912 * DESCRIPTION: 2913 * 2914 * PARAMETERS : 2915 * 2916 * 2917 * RETURN : 2918 *==========================================================================*/ 2919int QCamera3HardwareInterface::process_capture_request( 2920 const struct camera3_device *device, 2921 camera3_capture_request_t *request) 2922{ 2923 ALOGE("%s: E", __func__); 2924 QCamera3HardwareInterface *hw = 2925 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 2926 if (!hw) { 2927 ALOGE("%s: NULL camera device", __func__); 2928 return -EINVAL; 2929 } 2930 2931 int rc = hw->processCaptureRequest(request); 2932 ALOGE("%s: X", __func__); 2933 return rc; 2934} 2935 2936/*=========================================================================== 2937 * FUNCTION : get_metadata_vendor_tag_ops 2938 * 2939 * DESCRIPTION: 2940 * 2941 * PARAMETERS : 2942 * 2943 * 2944 * RETURN : 2945 *==========================================================================*/ 2946 2947void QCamera3HardwareInterface::get_metadata_vendor_tag_ops( 2948 const struct camera3_device *device, 2949 vendor_tag_query_ops_t* ops) 2950{ 2951 ALOGE("%s: E", __func__); 2952 QCamera3HardwareInterface *hw = 2953 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 2954 if (!hw) { 2955 ALOGE("%s: NULL camera device", __func__); 2956 return; 2957 } 2958 2959 hw->getMetadataVendorTagOps(ops); 2960 ALOGE("%s: X", __func__); 2961 return; 2962} 2963 2964/*=========================================================================== 2965 * FUNCTION : dump 2966 * 2967 * DESCRIPTION: 2968 * 2969 * PARAMETERS : 2970 * 2971 * 2972 * RETURN : 2973 *==========================================================================*/ 2974 2975void QCamera3HardwareInterface::dump( 2976 const struct camera3_device *device, int fd) 2977{ 2978 ALOGE("%s: E", __func__); 2979 QCamera3HardwareInterface *hw = 2980 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 2981 if (!hw) { 2982 ALOGE("%s: NULL camera device", __func__); 2983 return; 2984 } 2985 2986 hw->dump(fd); 2987 ALOGE("%s: X", __func__); 2988 return; 2989} 2990 2991/*=========================================================================== 2992 * FUNCTION : close_camera_device 2993 * 2994 * DESCRIPTION: 2995 * 2996 * PARAMETERS : 2997 * 2998 * 2999 * RETURN : 3000 *==========================================================================*/ 3001int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device) 3002{ 3003 ALOGE("%s: E", __func__); 3004 int ret = NO_ERROR; 3005 QCamera3HardwareInterface *hw = 3006 reinterpret_cast<QCamera3HardwareInterface *>( 3007 reinterpret_cast<camera3_device_t *>(device)->priv); 3008 if (!hw) { 3009 ALOGE("NULL camera device"); 3010 return BAD_VALUE; 3011 } 3012 delete hw; 3013 ALOGE("%s: X", __func__); 3014 return ret; 3015} 3016 3017}; //end namespace qcamera 3018