QCamera3HWI.cpp revision 47a3010f28bcbf34695ad53608b98be07b4618a6
1/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved. 2* 3* Redistribution and use in source and binary forms, with or without 4* modification, are permitted provided that the following conditions are 5* met: 6* * Redistributions of source code must retain the above copyright 7* notice, this list of conditions and the following disclaimer. 8* * Redistributions in binary form must reproduce the above 9* copyright notice, this list of conditions and the following 10* disclaimer in the documentation and/or other materials provided 11* with the distribution. 12* * Neither the name of The Linux Foundation nor the names of its 13* contributors may be used to endorse or promote products derived 14* from this software without specific prior written permission. 15* 16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED 17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF 18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT 19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS 20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR 23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE 25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN 26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27* 28*/ 29 30#define LOG_TAG "QCamera3HWI" 31 32#include <cutils/properties.h> 33#include <hardware/camera3.h> 34#include <camera/CameraMetadata.h> 35#include <stdlib.h> 36#include <utils/Log.h> 37#include <utils/Errors.h> 38#include <ui/Fence.h> 39#include <gralloc_priv.h> 40#include "QCamera3HWI.h" 41#include "QCamera3Mem.h" 42#include "QCamera3Channel.h" 43#include "QCamera3PostProc.h" 44 45using namespace android; 46 47namespace qcamera { 48#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX ) 49cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS]; 50parm_buffer_t *prevSettings; 51const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS]; 52 53const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = { 54 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF }, 55 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO }, 56 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE }, 57 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE }, 58 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA }, 59 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE }, 60 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD }, 61 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD }, 62 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA } 63}; 64 65const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = { 66 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF }, 67 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO }, 68 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT }, 69 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT }, 70 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT}, 71 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT }, 72 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT }, 73 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT }, 74 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE } 75}; 76 77const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = { 78 { ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED, CAM_SCENE_MODE_OFF }, 79 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION }, 80 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT }, 81 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE }, 82 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT }, 83 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT }, 84 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE }, 85 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH }, 86 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW }, 87 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET }, 88 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE }, 89 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS }, 90 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS }, 91 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY }, 92 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT }, 93 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE} 94}; 95 96const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = { 97 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED }, 98 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO }, 99 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO }, 100 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF }, 101 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE }, 102 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO } 103}; 104 105const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = { 106 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF }, 107 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ }, 108 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ }, 109 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO } 110}; 111 112const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = { 113 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF }, 114 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF }, 115 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO}, 116 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON }, 117 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO} 118}; 119 120const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = { 121 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF }, 122 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_ON }, 123 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH} 124}; 125 126 127camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = { 128 initialize: QCamera3HardwareInterface::initialize, 129 configure_streams: QCamera3HardwareInterface::configure_streams, 130 register_stream_buffers: QCamera3HardwareInterface::register_stream_buffers, 131 construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings, 132 process_capture_request: QCamera3HardwareInterface::process_capture_request, 133 get_metadata_vendor_tag_ops: QCamera3HardwareInterface::get_metadata_vendor_tag_ops, 134 dump: QCamera3HardwareInterface::dump, 135}; 136 137 138/*=========================================================================== 139 * FUNCTION : QCamera3HardwareInterface 140 * 141 * DESCRIPTION: constructor of QCamera3HardwareInterface 142 * 143 * PARAMETERS : 144 * @cameraId : camera ID 145 * 146 * RETURN : none 147 *==========================================================================*/ 148QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId) 149 : mCameraId(cameraId), 150 mCameraHandle(NULL), 151 mCameraOpened(false), 152 mCallbackOps(NULL), 153 mInputStream(NULL), 154 mMetadataChannel(NULL), 155 mFirstRequest(false), 156 mParamHeap(NULL), 157 mParameters(NULL), 158 mJpegSettings(NULL) 159{ 160 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG; 161 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0; 162 mCameraDevice.common.close = close_camera_device; 163 mCameraDevice.ops = &mCameraOps; 164 mCameraDevice.priv = this; 165 gCamCapability[cameraId]->version = CAM_HAL_V3; 166 167 pthread_mutex_init(&mRequestLock, NULL); 168 pthread_cond_init(&mRequestCond, NULL); 169 mPendingRequest = 0; 170 171 pthread_mutex_init(&mMutex, NULL); 172 pthread_mutex_init(&mCaptureResultLock, NULL); 173 174 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) 175 mDefaultMetadata[i] = NULL; 176} 177 178/*=========================================================================== 179 * FUNCTION : ~QCamera3HardwareInterface 180 * 181 * DESCRIPTION: destructor of QCamera3HardwareInterface 182 * 183 * PARAMETERS : none 184 * 185 * RETURN : none 186 *==========================================================================*/ 187QCamera3HardwareInterface::~QCamera3HardwareInterface() 188{ 189 ALOGV("%s: E", __func__); 190 /* Clean up all channels */ 191 mMetadataChannel->stop(); 192 delete mMetadataChannel; 193 mMetadataChannel = NULL; 194 /* We need to stop all streams before deleting any stream */ 195 for (List<stream_info_t *>::iterator it = mStreamInfo.begin(); 196 it != mStreamInfo.end(); it++) { 197 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv; 198 channel->stop(); 199 } 200 for (List<stream_info_t *>::iterator it = mStreamInfo.begin(); 201 it != mStreamInfo.end(); it++) { 202 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv; 203 delete channel; 204 free (*it); 205 } 206 207 if (mJpegSettings != NULL) { 208 free(mJpegSettings); 209 mJpegSettings = NULL; 210 } 211 deinitParameters(); 212 closeCamera(); 213 214 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) 215 if (mDefaultMetadata[i]) 216 free_camera_metadata(mDefaultMetadata[i]); 217 218 pthread_mutex_destroy(&mRequestLock); 219 pthread_cond_destroy(&mRequestCond); 220 221 pthread_mutex_destroy(&mMutex); 222 pthread_mutex_destroy(&mCaptureResultLock); 223 ALOGV("%s: X", __func__); 224} 225 226/*=========================================================================== 227 * FUNCTION : openCamera 228 * 229 * DESCRIPTION: open camera 230 * 231 * PARAMETERS : 232 * @hw_device : double ptr for camera device struct 233 * 234 * RETURN : int32_t type of status 235 * NO_ERROR -- success 236 * none-zero failure code 237 *==========================================================================*/ 238int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device) 239{ 240 //int rc = NO_ERROR; 241 int rc = 0; 242 if (mCameraOpened) { 243 *hw_device = NULL; 244 return PERMISSION_DENIED; 245 } 246 247 rc = openCamera(); 248 if (rc == 0) 249 *hw_device = &mCameraDevice.common; 250 else 251 *hw_device = NULL; 252 return rc; 253} 254 255/*=========================================================================== 256 * FUNCTION : openCamera 257 * 258 * DESCRIPTION: open camera 259 * 260 * PARAMETERS : none 261 * 262 * RETURN : int32_t type of status 263 * NO_ERROR -- success 264 * none-zero failure code 265 *==========================================================================*/ 266int QCamera3HardwareInterface::openCamera() 267{ 268 if (mCameraHandle) { 269 ALOGE("Failure: Camera already opened"); 270 return ALREADY_EXISTS; 271 } 272 mCameraHandle = camera_open(mCameraId); 273 if (!mCameraHandle) { 274 ALOGE("camera_open failed."); 275 return UNKNOWN_ERROR; 276 } 277 278 mCameraOpened = true; 279 280 return NO_ERROR; 281} 282 283/*=========================================================================== 284 * FUNCTION : closeCamera 285 * 286 * DESCRIPTION: close camera 287 * 288 * PARAMETERS : none 289 * 290 * RETURN : int32_t type of status 291 * NO_ERROR -- success 292 * none-zero failure code 293 *==========================================================================*/ 294int QCamera3HardwareInterface::closeCamera() 295{ 296 int rc = NO_ERROR; 297 298 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle); 299 mCameraHandle = NULL; 300 mCameraOpened = false; 301 302 return rc; 303} 304 305/*=========================================================================== 306 * FUNCTION : initialize 307 * 308 * DESCRIPTION: Initialize frameworks callback functions 309 * 310 * PARAMETERS : 311 * @callback_ops : callback function to frameworks 312 * 313 * RETURN : 314 * 315 *==========================================================================*/ 316int QCamera3HardwareInterface::initialize( 317 const struct camera3_callback_ops *callback_ops) 318{ 319 int rc; 320 321 pthread_mutex_lock(&mMutex); 322 323 rc = initParameters(); 324 if (rc < 0) { 325 ALOGE("%s: initParamters failed %d", __func__, rc); 326 goto err1; 327 } 328 //Create metadata channel and initialize it 329 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle, 330 mCameraHandle->ops, captureResultCb, 331 &gCamCapability[mCameraId]->padding_info, this); 332 if (mMetadataChannel == NULL) { 333 ALOGE("%s: failed to allocate metadata channel", __func__); 334 rc = -ENOMEM; 335 goto err2; 336 } 337 rc = mMetadataChannel->initialize(); 338 if (rc < 0) { 339 ALOGE("%s: metadata channel initialization failed", __func__); 340 goto err3; 341 } 342 343 mCallbackOps = callback_ops; 344 345 pthread_mutex_unlock(&mMutex); 346 return 0; 347 348err3: 349 delete mMetadataChannel; 350 mMetadataChannel = NULL; 351err2: 352 deinitParameters(); 353err1: 354 pthread_mutex_unlock(&mMutex); 355 return rc; 356} 357 358/*=========================================================================== 359 * FUNCTION : configureStreams 360 * 361 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input 362 * and output streams. 363 * 364 * PARAMETERS : 365 * @stream_list : streams to be configured 366 * 367 * RETURN : 368 * 369 *==========================================================================*/ 370int QCamera3HardwareInterface::configureStreams( 371 camera3_stream_configuration_t *streamList) 372{ 373 int rc = 0; 374 pthread_mutex_lock(&mMutex); 375 376 // Sanity check stream_list 377 if (streamList == NULL) { 378 ALOGE("%s: NULL stream configuration", __func__); 379 pthread_mutex_unlock(&mMutex); 380 return BAD_VALUE; 381 } 382 383 if (streamList->streams == NULL) { 384 ALOGE("%s: NULL stream list", __func__); 385 pthread_mutex_unlock(&mMutex); 386 return BAD_VALUE; 387 } 388 389 if (streamList->num_streams < 1) { 390 ALOGE("%s: Bad number of streams requested: %d", __func__, 391 streamList->num_streams); 392 pthread_mutex_unlock(&mMutex); 393 return BAD_VALUE; 394 } 395 396 camera3_stream_t *inputStream = NULL; 397 /* first invalidate all the steams in the mStreamList 398 * if they appear again, they will be validated */ 399 for (List<stream_info_t*>::iterator it=mStreamInfo.begin(); 400 it != mStreamInfo.end(); it++) { 401 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv; 402 channel->stop(); 403 (*it)->status = INVALID; 404 } 405 406 for (size_t i = 0; i < streamList->num_streams; i++) { 407 camera3_stream_t *newStream = streamList->streams[i]; 408 ALOGV("%s: newStream type = %d, stream format = %d", 409 __func__, newStream->stream_type, newStream->format); 410 //if the stream is in the mStreamList validate it 411 bool stream_exists = false; 412 for (List<stream_info_t*>::iterator it=mStreamInfo.begin(); 413 it != mStreamInfo.end(); it++) { 414 if ((*it)->stream == newStream) { 415 QCamera3Channel *channel = 416 (QCamera3Channel*)(*it)->stream->priv; 417 stream_exists = true; 418 (*it)->status = RECONFIGURE; 419 /*delete the channel object associated with the stream because 420 we need to reconfigure*/ 421 delete channel; 422 (*it)->stream->priv = NULL; 423 } 424 } 425 if (!stream_exists) { 426 //new stream 427 stream_info_t* stream_info; 428 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t)); 429 stream_info->stream = newStream; 430 stream_info->status = VALID; 431 stream_info->registered = 0; 432 mStreamInfo.push_back(stream_info); 433 } 434 if (newStream->stream_type == CAMERA3_STREAM_INPUT) { 435 if (inputStream != NULL) { 436 ALOGE("%s: Multiple input streams requested!", __func__); 437 pthread_mutex_unlock(&mMutex); 438 return BAD_VALUE; 439 } 440 inputStream = newStream; 441 } 442 } 443 mInputStream = inputStream; 444 445 /*clean up invalid streams*/ 446 for (List<stream_info_t*>::iterator it=mStreamInfo.begin(); 447 it != mStreamInfo.end();) { 448 if(((*it)->status) == INVALID){ 449 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv; 450 delete channel; 451 delete[] (buffer_handle_t*)(*it)->buffer_set.buffers; 452 free(*it); 453 it = mStreamInfo.erase(it); 454 } else { 455 it++; 456 } 457 } 458 459 //mMetadataChannel->stop(); 460 461 /* Allocate channel objects for the requested streams */ 462 for (size_t i = 0; i < streamList->num_streams; i++) { 463 camera3_stream_t *newStream = streamList->streams[i]; 464 if (newStream->priv == NULL) { 465 //New stream, construct channel 466 switch (newStream->stream_type) { 467 case CAMERA3_STREAM_INPUT: 468 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ; 469 break; 470 case CAMERA3_STREAM_BIDIRECTIONAL: 471 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ | 472 GRALLOC_USAGE_HW_CAMERA_WRITE; 473 break; 474 case CAMERA3_STREAM_OUTPUT: 475 newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE; 476 break; 477 default: 478 ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type); 479 break; 480 } 481 482 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT || 483 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) { 484 QCamera3Channel *channel; 485 switch (newStream->format) { 486 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: 487 case HAL_PIXEL_FORMAT_YCbCr_420_888: 488 newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers; 489 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle, 490 mCameraHandle->ops, captureResultCb, 491 &gCamCapability[mCameraId]->padding_info, this, newStream); 492 if (channel == NULL) { 493 ALOGE("%s: allocation of channel failed", __func__); 494 pthread_mutex_unlock(&mMutex); 495 return -ENOMEM; 496 } 497 498 newStream->priv = channel; 499 break; 500 case HAL_PIXEL_FORMAT_BLOB: 501 newStream->max_buffers = QCamera3PicChannel::kMaxBuffers; 502 channel = new QCamera3PicChannel(mCameraHandle->camera_handle, 503 mCameraHandle->ops, captureResultCb, 504 &gCamCapability[mCameraId]->padding_info, this, newStream); 505 if (channel == NULL) { 506 ALOGE("%s: allocation of channel failed", __func__); 507 pthread_mutex_unlock(&mMutex); 508 return -ENOMEM; 509 } 510 newStream->priv = channel; 511 break; 512 513 //TODO: Add support for app consumed format? 514 default: 515 ALOGE("%s: not a supported format 0x%x", __func__, newStream->format); 516 break; 517 } 518 } 519 } else { 520 // Channel already exists for this stream 521 // Do nothing for now 522 } 523 } 524 /*For the streams to be reconfigured we need to register the buffers 525 since the framework wont*/ 526 for (List<stream_info_t *>::iterator it = mStreamInfo.begin(); 527 it != mStreamInfo.end(); it++) { 528 if ((*it)->status == RECONFIGURE) { 529 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv; 530 /*only register buffers for streams that have already been 531 registered*/ 532 if ((*it)->registered) { 533 rc = channel->registerBuffers((*it)->buffer_set.num_buffers, 534 (*it)->buffer_set.buffers); 535 if (rc != NO_ERROR) { 536 ALOGE("%s: Failed to register the buffers of old stream,\ 537 rc = %d", __func__, rc); 538 } 539 ALOGD("%s: channel %p has %d buffers", 540 __func__, channel, (*it)->buffer_set.num_buffers); 541 } 542 } 543 544 ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream); 545 if (index == NAME_NOT_FOUND) { 546 mPendingBuffersMap.add((*it)->stream, 0); 547 } else { 548 mPendingBuffersMap.editValueAt(index) = 0; 549 } 550 } 551 552 /* Initialize mPendingRequestInfo and mPendnigBuffersMap */ 553 mPendingRequestsList.clear(); 554 555 //settings/parameters don't carry over for new configureStreams 556 memset(mParameters, 0, sizeof(parm_buffer_t)); 557 mFirstRequest = true; 558 559 pthread_mutex_unlock(&mMutex); 560 return rc; 561} 562 563/*=========================================================================== 564 * FUNCTION : validateCaptureRequest 565 * 566 * DESCRIPTION: validate a capture request from camera service 567 * 568 * PARAMETERS : 569 * @request : request from framework to process 570 * 571 * RETURN : 572 * 573 *==========================================================================*/ 574int QCamera3HardwareInterface::validateCaptureRequest( 575 camera3_capture_request_t *request) 576{ 577 ssize_t idx = 0; 578 const camera3_stream_buffer_t *b; 579 CameraMetadata meta; 580 581 /* Sanity check the request */ 582 if (request == NULL) { 583 ALOGE("%s: NULL capture request", __func__); 584 return BAD_VALUE; 585 } 586 587 uint32_t frameNumber = request->frame_number; 588 if (request->input_buffer != NULL && 589 request->input_buffer->stream != mInputStream) { 590 ALOGE("%s: Request %d: Input buffer not from input stream!", 591 __FUNCTION__, frameNumber); 592 return BAD_VALUE; 593 } 594 if (request->num_output_buffers < 1 || request->output_buffers == NULL) { 595 ALOGE("%s: Request %d: No output buffers provided!", 596 __FUNCTION__, frameNumber); 597 return BAD_VALUE; 598 } 599 if (request->input_buffer != NULL) { 600 //TODO 601 ALOGE("%s: Not supporting input buffer yet", __func__); 602 return BAD_VALUE; 603 } 604 605 // Validate all buffers 606 b = request->output_buffers; 607 do { 608 QCamera3Channel *channel = 609 static_cast<QCamera3Channel*>(b->stream->priv); 610 if (channel == NULL) { 611 ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!", 612 __func__, frameNumber, idx); 613 return BAD_VALUE; 614 } 615 if (b->status != CAMERA3_BUFFER_STATUS_OK) { 616 ALOGE("%s: Request %d: Buffer %d: Status not OK!", 617 __func__, frameNumber, idx); 618 return BAD_VALUE; 619 } 620 if (b->release_fence != -1) { 621 ALOGE("%s: Request %d: Buffer %d: Has a release fence!", 622 __func__, frameNumber, idx); 623 return BAD_VALUE; 624 } 625 if (b->buffer == NULL) { 626 ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!", 627 __func__, frameNumber, idx); 628 return BAD_VALUE; 629 } 630 idx++; 631 b = request->output_buffers + idx; 632 } while (idx < (ssize_t)request->num_output_buffers); 633 634 return NO_ERROR; 635} 636 637/*=========================================================================== 638 * FUNCTION : registerStreamBuffers 639 * 640 * DESCRIPTION: Register buffers for a given stream with the HAL device. 641 * 642 * PARAMETERS : 643 * @stream_list : streams to be configured 644 * 645 * RETURN : 646 * 647 *==========================================================================*/ 648int QCamera3HardwareInterface::registerStreamBuffers( 649 const camera3_stream_buffer_set_t *buffer_set) 650{ 651 int rc = 0; 652 653 pthread_mutex_lock(&mMutex); 654 655 if (buffer_set == NULL) { 656 ALOGE("%s: Invalid buffer_set parameter.", __func__); 657 pthread_mutex_unlock(&mMutex); 658 return -EINVAL; 659 } 660 if (buffer_set->stream == NULL) { 661 ALOGE("%s: Invalid stream parameter.", __func__); 662 pthread_mutex_unlock(&mMutex); 663 return -EINVAL; 664 } 665 if (buffer_set->num_buffers < 1) { 666 ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers); 667 pthread_mutex_unlock(&mMutex); 668 return -EINVAL; 669 } 670 if (buffer_set->buffers == NULL) { 671 ALOGE("%s: Invalid buffers parameter.", __func__); 672 pthread_mutex_unlock(&mMutex); 673 return -EINVAL; 674 } 675 676 camera3_stream_t *stream = buffer_set->stream; 677 QCamera3Channel *channel = (QCamera3Channel *)stream->priv; 678 679 //set the buffer_set in the mStreamInfo array 680 for (List<stream_info_t *>::iterator it = mStreamInfo.begin(); 681 it != mStreamInfo.end(); it++) { 682 if ((*it)->stream == stream) { 683 uint32_t numBuffers = buffer_set->num_buffers; 684 (*it)->buffer_set.stream = buffer_set->stream; 685 (*it)->buffer_set.num_buffers = numBuffers; 686 (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers]; 687 if ((*it)->buffer_set.buffers == NULL) { 688 ALOGE("%s: Failed to allocate buffer_handle_t*", __func__); 689 pthread_mutex_unlock(&mMutex); 690 return -ENOMEM; 691 } 692 for (size_t j = 0; j < numBuffers; j++){ 693 (*it)->buffer_set.buffers[j] = buffer_set->buffers[j]; 694 } 695 (*it)->registered = 1; 696 } 697 } 698 699 if (stream->stream_type != CAMERA3_STREAM_OUTPUT) { 700 ALOGE("%s: not yet support non output type stream", __func__); 701 pthread_mutex_unlock(&mMutex); 702 return -EINVAL; 703 } 704 rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers); 705 if (rc < 0) { 706 ALOGE("%s: registerBUffers for stream %p failed", __func__, stream); 707 pthread_mutex_unlock(&mMutex); 708 return -ENODEV; 709 } 710 711 pthread_mutex_unlock(&mMutex); 712 return NO_ERROR; 713} 714 715/*=========================================================================== 716 * FUNCTION : processCaptureRequest 717 * 718 * DESCRIPTION: process a capture request from camera service 719 * 720 * PARAMETERS : 721 * @request : request from framework to process 722 * 723 * RETURN : 724 * 725 *==========================================================================*/ 726int QCamera3HardwareInterface::processCaptureRequest( 727 camera3_capture_request_t *request) 728{ 729 int rc = NO_ERROR; 730 CameraMetadata meta; 731 732 pthread_mutex_lock(&mMutex); 733 734 rc = validateCaptureRequest(request); 735 if (rc != NO_ERROR) { 736 ALOGE("%s: incoming request is not valid", __func__); 737 pthread_mutex_unlock(&mMutex); 738 return rc; 739 } 740 741 uint32_t frameNumber = request->frame_number; 742 743 rc = setFrameParameters(request->frame_number, request->settings); 744 if (rc < 0) { 745 ALOGE("%s: fail to set frame parameters", __func__); 746 pthread_mutex_unlock(&mMutex); 747 return rc; 748 } 749 750 ALOGV("%s: %d, num_output_buffers = %d", __func__, __LINE__, 751 request->num_output_buffers); 752 // Acquire all request buffers first 753 for (size_t i = 0; i < request->num_output_buffers; i++) { 754 const camera3_stream_buffer_t& output = request->output_buffers[i]; 755 sp<Fence> acquireFence = new Fence(output.acquire_fence); 756 757 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) { 758 //Call function to store local copy of jpeg data for encode params. 759 rc = getJpegSettings(request->settings); 760 if (rc < 0) { 761 ALOGE("%s: failed to get jpeg parameters", __func__); 762 pthread_mutex_unlock(&mMutex); 763 return rc; 764 } 765 } 766 767 rc = acquireFence->wait(Fence::TIMEOUT_NEVER); 768 if (rc != OK) { 769 ALOGE("%s: fence wait failed %d", __func__, rc); 770 pthread_mutex_unlock(&mMutex); 771 return rc; 772 } 773 } 774 775 /* Update pending request list and pending buffers map */ 776 pthread_mutex_lock(&mRequestLock); 777 PendingRequestInfo pendingRequest; 778 pendingRequest.frame_number = frameNumber; 779 pendingRequest.num_buffers = request->num_output_buffers; 780 for (size_t i = 0; i < request->num_output_buffers; i++) { 781 RequestedBufferInfo requestedBuf; 782 requestedBuf.stream = request->output_buffers[i].stream; 783 requestedBuf.buffer = NULL; 784 pendingRequest.buffers.push_back(requestedBuf); 785 786 mPendingBuffersMap.editValueFor(requestedBuf.stream)++; 787 } 788 mPendingRequestsList.push_back(pendingRequest); 789 pthread_mutex_unlock(&mRequestLock); 790 791 // Notify metadata channel we receive a request 792 mMetadataChannel->request(NULL, frameNumber); 793 794 // Call request on other streams 795 for (size_t i = 0; i < request->num_output_buffers; i++) { 796 const camera3_stream_buffer_t& output = request->output_buffers[i]; 797 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv; 798 799 if (channel == NULL) { 800 ALOGE("%s: invalid channel pointer for stream", __func__); 801 continue; 802 } 803 804 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) { 805 rc = channel->request(output.buffer, frameNumber, mJpegSettings); 806 } else { 807 ALOGI("%s: %d, request with buffer %p, frame_number %d", __func__, __LINE__, output.buffer, frameNumber); 808 rc = channel->request(output.buffer, frameNumber); 809 } 810 if (rc < 0) 811 ALOGE("%s: request failed", __func__); 812 } 813 814 mFirstRequest = false; 815 816 //Block on conditional variable 817 pthread_mutex_lock(&mRequestLock); 818 mPendingRequest = 1; 819 while (mPendingRequest == 1) { 820 pthread_cond_wait(&mRequestCond, &mRequestLock); 821 } 822 pthread_mutex_unlock(&mRequestLock); 823 824 pthread_mutex_unlock(&mMutex); 825 return rc; 826} 827 828/*=========================================================================== 829 * FUNCTION : getMetadataVendorTagOps 830 * 831 * DESCRIPTION: 832 * 833 * PARAMETERS : 834 * 835 * 836 * RETURN : 837 *==========================================================================*/ 838void QCamera3HardwareInterface::getMetadataVendorTagOps( 839 vendor_tag_query_ops_t* /*ops*/) 840{ 841 /* Enable locks when we eventually add Vendor Tags */ 842 /* 843 pthread_mutex_lock(&mMutex); 844 845 pthread_mutex_unlock(&mMutex); 846 */ 847 return; 848} 849 850/*=========================================================================== 851 * FUNCTION : dump 852 * 853 * DESCRIPTION: 854 * 855 * PARAMETERS : 856 * 857 * 858 * RETURN : 859 *==========================================================================*/ 860void QCamera3HardwareInterface::dump(int /*fd*/) 861{ 862 /*Enable lock when we implement this function*/ 863 /* 864 pthread_mutex_lock(&mMutex); 865 866 pthread_mutex_unlock(&mMutex); 867 */ 868 return; 869} 870 871/*=========================================================================== 872 * FUNCTION : captureResultCb 873 * 874 * DESCRIPTION: Callback handler for all capture result 875 * (streams, as well as metadata) 876 * 877 * PARAMETERS : 878 * @metadata : metadata information 879 * @buffer : actual gralloc buffer to be returned to frameworks. 880 * NULL if metadata. 881 * 882 * RETURN : NONE 883 *==========================================================================*/ 884void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf, 885 camera3_stream_buffer_t *buffer, uint32_t frame_number) 886{ 887 pthread_mutex_lock(&mRequestLock); 888 889 if (metadata_buf) { 890 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer; 891 int32_t frame_number_valid = *(int32_t *) 892 POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata); 893 uint32_t frame_number = *(uint32_t *) 894 POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata); 895 const struct timeval *tv = (const struct timeval *) 896 POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata); 897 nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC + 898 tv->tv_usec * NSEC_PER_USEC; 899 900 if (!frame_number_valid) { 901 ALOGD("%s: Not a valid frame number, used as SOF only", __func__); 902 mMetadataChannel->bufDone(metadata_buf); 903 goto done_metadata; 904 } 905 ALOGD("%s: valid frame_number = %d, capture_time = %lld", __func__, 906 frame_number, capture_time); 907 908 // Go through the pending requests info and send shutter/results to frameworks 909 for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin(); 910 i != mPendingRequestsList.end() && i->frame_number <= frame_number;) { 911 camera3_capture_result_t result; 912 camera3_notify_msg_t notify_msg; 913 ALOGD("%s: frame_number in the list is %d", __func__, i->frame_number); 914 915 // Flush out all entries with less or equal frame numbers. 916 917 //TODO: Make sure shutter timestamp really reflects shutter timestamp. 918 //Right now it's the same as metadata timestamp 919 920 //TODO: When there is metadata drop, how do we derive the timestamp of 921 //dropped frames? For now, we fake the dropped timestamp by substracting 922 //from the reported timestamp 923 nsecs_t current_capture_time = capture_time - 924 (frame_number - i->frame_number) * NSEC_PER_33MSEC; 925 926 // Send shutter notify to frameworks 927 notify_msg.type = CAMERA3_MSG_SHUTTER; 928 notify_msg.message.shutter.frame_number = i->frame_number; 929 notify_msg.message.shutter.timestamp = current_capture_time; 930 mCallbackOps->notify(mCallbackOps, ¬ify_msg); 931 ALOGD("%s: notify frame_number = %d, capture_time = %lld", __func__, 932 i->frame_number, capture_time); 933 934 // Send empty metadata with already filled buffers for dropped metadata 935 // and send valid metadata with already filled buffers for current metadata 936 if (i->frame_number < frame_number) { 937 CameraMetadata emptyMetadata(1, 0); 938 emptyMetadata.update(ANDROID_SENSOR_TIMESTAMP, 939 ¤t_capture_time, 1); 940 result.result = emptyMetadata.release(); 941 } else { 942 result.result = translateCbMetadataToResultMetadata(metadata, 943 current_capture_time); 944 // Return metadata buffer 945 mMetadataChannel->bufDone(metadata_buf); 946 } 947 if (!result.result) { 948 ALOGE("%s: metadata is NULL", __func__); 949 } 950 result.frame_number = i->frame_number; 951 result.num_output_buffers = 0; 952 result.output_buffers = NULL; 953 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin(); 954 j != i->buffers.end(); j++) { 955 if (j->buffer) { 956 result.num_output_buffers++; 957 } 958 } 959 960 if (result.num_output_buffers > 0) { 961 camera3_stream_buffer_t *result_buffers = 962 new camera3_stream_buffer_t[result.num_output_buffers]; 963 if (!result_buffers) { 964 ALOGE("%s: Fatal error: out of memory", __func__); 965 } 966 size_t result_buffers_idx = 0; 967 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin(); 968 j != i->buffers.end(); j++) { 969 if (j->buffer) { 970 result_buffers[result_buffers_idx++] = *(j->buffer); 971 free(j->buffer); 972 mPendingBuffersMap.editValueFor(j->stream)--; 973 } 974 } 975 result.output_buffers = result_buffers; 976 977 mCallbackOps->process_capture_result(mCallbackOps, &result); 978 ALOGD("%s: meta frame_number = %d, capture_time = %lld", 979 __func__, result.frame_number, current_capture_time); 980 free_camera_metadata((camera_metadata_t *)result.result); 981 delete[] result_buffers; 982 } else { 983 mCallbackOps->process_capture_result(mCallbackOps, &result); 984 ALOGD("%s: meta frame_number = %d, capture_time = %lld", 985 __func__, result.frame_number, current_capture_time); 986 free_camera_metadata((camera_metadata_t *)result.result); 987 } 988 // erase the element from the list 989 i = mPendingRequestsList.erase(i); 990 } 991 992 993done_metadata: 994 bool max_buffers_dequeued = false; 995 for (size_t i = 0; i < mPendingBuffersMap.size(); i++) { 996 const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i); 997 uint32_t queued_buffers = mPendingBuffersMap.valueAt(i); 998 if (queued_buffers == stream->max_buffers) { 999 max_buffers_dequeued = true; 1000 break; 1001 } 1002 } 1003 if (!max_buffers_dequeued) { 1004 // Unblock process_capture_request 1005 mPendingRequest = 0; 1006 pthread_cond_signal(&mRequestCond); 1007 } 1008 } else { 1009 // If the frame number doesn't exist in the pending request list, 1010 // directly send the buffer to the frameworks, and update pending buffers map 1011 // Otherwise, book-keep the buffer. 1012 List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin(); 1013 while (i != mPendingRequestsList.end() && i->frame_number != frame_number) 1014 i++; 1015 if (i == mPendingRequestsList.end()) { 1016 // Verify all pending requests frame_numbers are greater 1017 for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin(); 1018 j != mPendingRequestsList.end(); j++) { 1019 if (j->frame_number < frame_number) { 1020 ALOGE("%s: Error: pending frame number %d is smaller than %d", 1021 __func__, j->frame_number, frame_number); 1022 } 1023 } 1024 camera3_capture_result_t result; 1025 result.result = NULL; 1026 result.frame_number = frame_number; 1027 result.num_output_buffers = 1; 1028 result.output_buffers = buffer; 1029 ALOGD("%s: result frame_number = %d, buffer = %p", 1030 __func__, frame_number, buffer); 1031 mPendingBuffersMap.editValueFor(buffer->stream)--; 1032 mCallbackOps->process_capture_result(mCallbackOps, &result); 1033 } else { 1034 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin(); 1035 j != i->buffers.end(); j++) { 1036 if (j->stream == buffer->stream) { 1037 if (j->buffer != NULL) { 1038 ALOGE("%s: Error: buffer is already set", __func__); 1039 } else { 1040 j->buffer = (camera3_stream_buffer_t *)malloc( 1041 sizeof(camera3_stream_buffer_t)); 1042 *(j->buffer) = *buffer; 1043 ALOGD("%s: cache buffer %p at result frame_number %d", 1044 __func__, buffer, frame_number); 1045 } 1046 } 1047 } 1048 } 1049 } 1050 1051 pthread_mutex_unlock(&mRequestLock); 1052 return; 1053} 1054 1055/*=========================================================================== 1056 * FUNCTION : translateCbMetadataToResultMetadata 1057 * 1058 * DESCRIPTION: 1059 * 1060 * PARAMETERS : 1061 * @metadata : metadata information from callback 1062 * 1063 * RETURN : camera_metadata_t* 1064 * metadata in a format specified by fwk 1065 *==========================================================================*/ 1066camera_metadata_t* 1067QCamera3HardwareInterface::translateCbMetadataToResultMetadata 1068 (metadata_buffer_t *metadata, nsecs_t timestamp) 1069{ 1070 CameraMetadata camMetadata; 1071 camera_metadata_t* resultMetadata; 1072 1073 1074 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, ×tamp, 1); 1075 1076 /*CAM_INTF_META_HISTOGRAM - TODO*/ 1077 /*cam_hist_stats_t *histogram = 1078 (cam_hist_stats_t *)POINTER_OF(CAM_INTF_META_HISTOGRAM, 1079 metadata);*/ 1080 1081 /*face detection*/ 1082 cam_face_detection_data_t *faceDetectionInfo =(cam_face_detection_data_t *) 1083 POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata); 1084 uint8_t numFaces = faceDetectionInfo->num_faces_detected; 1085 int32_t faceIds[numFaces]; 1086 uint8_t faceScores[numFaces]; 1087 int32_t faceRectangles[numFaces * 4]; 1088 int32_t faceLandmarks[numFaces * 6]; 1089 int j = 0, k = 0; 1090 for (int i = 0; i < numFaces; i++) { 1091 faceIds[i] = faceDetectionInfo->faces[i].face_id; 1092 faceScores[i] = faceDetectionInfo->faces[i].score; 1093 convertToRegions(faceDetectionInfo->faces[i].face_boundary, 1094 faceRectangles+j, -1); 1095 convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k); 1096 j+= 4; 1097 k+= 6; 1098 } 1099 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces); 1100 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces); 1101 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES, 1102 faceRectangles, numFaces*4); 1103 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS, 1104 faceLandmarks, numFaces*6); 1105 1106 1107 /*autofocus - TODO*/ 1108 /*cam_auto_focus_data_t *afData =(cam_auto_focus_data_t *) 1109 POINTER_OF(CAM_INTF_META_AUTOFOCUS_DATA,metadata);*/ 1110 1111 uint8_t *color_correct_mode = 1112 (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata); 1113 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1); 1114 1115 int32_t *ae_precapture_id = 1116 (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata); 1117 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1); 1118 1119 /*aec regions*/ 1120 cam_area_t *hAeRegions = 1121 (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata); 1122 int32_t aeRegions[5]; 1123 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight); 1124 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5); 1125 1126 uint8_t *ae_state = 1127 (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata); 1128 camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1); 1129 1130 uint8_t *focusMode = 1131 (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata); 1132 camMetadata.update(ANDROID_CONTROL_AF_MODE, focusMode, 1); 1133 1134 /*af regions*/ 1135 cam_area_t *hAfRegions = 1136 (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata); 1137 int32_t afRegions[5]; 1138 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight); 1139 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5); 1140 1141 uint8_t *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata); 1142 camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1); 1143 1144 int32_t *afTriggerId = 1145 (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata); 1146 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1); 1147 1148 uint8_t *whiteBalance = 1149 (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata); 1150 camMetadata.update(ANDROID_CONTROL_AWB_MODE, whiteBalance, 1); 1151 1152 /*awb regions*/ 1153 cam_area_t *hAwbRegions = 1154 (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata); 1155 int32_t awbRegions[5]; 1156 convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight); 1157 camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5); 1158 1159 uint8_t *whiteBalanceState = 1160 (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata); 1161 camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1); 1162 1163 uint8_t *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata); 1164 camMetadata.update(ANDROID_CONTROL_MODE, mode, 1); 1165 1166 uint8_t *edgeMode = (uint8_t *)POINTER_OF(CAM_INTF_META_EDGE, metadata); 1167 camMetadata.update(ANDROID_EDGE_MODE, edgeMode, 1); 1168 1169 uint8_t *flashPower = 1170 (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata); 1171 camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1); 1172 1173 int64_t *flashFiringTime = 1174 (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata); 1175 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1); 1176 1177 /*int32_t *ledMode = 1178 (int32_t *)POINTER_OF(CAM_INTF_PARM_LED_MODE, metadata); 1179 camMetadata.update(ANDROID_FLASH_FIRING_TIME, ledMode, 1);*/ 1180 1181 uint8_t *flashState = 1182 (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata); 1183 camMetadata.update(ANDROID_FLASH_STATE, flashState, 1); 1184 1185 uint8_t *hotPixelMode = 1186 (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata); 1187 camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1); 1188 1189 float *lensAperture = 1190 (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata); 1191 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1); 1192 1193 float *filterDensity = 1194 (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata); 1195 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1); 1196 1197 float *focalLength = 1198 (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata); 1199 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1); 1200 1201 float *focusDistance = 1202 (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata); 1203 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1); 1204 1205 float *focusRange = 1206 (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata); 1207 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 1); 1208 1209 uint8_t *opticalStab = 1210 (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata); 1211 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1); 1212 1213 /*int32_t *focusState = 1214 (int32_t *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_STATE, metadata); 1215 camMetadata.update(ANDROID_LENS_STATE , focusState, 1); //check */ 1216 1217 uint8_t *noiseRedMode = 1218 (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata); 1219 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1); 1220 1221 /*CAM_INTF_META_SCALER_CROP_REGION - check size*/ 1222 1223 cam_crop_region_t *hScalerCropRegion =(cam_crop_region_t *) 1224 POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata); 1225 int32_t scalerCropRegion[3]; 1226 scalerCropRegion[0] = hScalerCropRegion->left; 1227 scalerCropRegion[1] = hScalerCropRegion->top; 1228 scalerCropRegion[2] = hScalerCropRegion->width; 1229 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 3); 1230 1231 int64_t *sensorExpTime = 1232 (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata); 1233 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1); 1234 1235 int64_t *sensorFameDuration = 1236 (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata); 1237 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1); 1238 1239 int32_t *sensorSensitivity = 1240 (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata); 1241 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1); 1242 1243 uint8_t *shadingMode = 1244 (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata); 1245 camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1); 1246 1247 uint8_t *faceDetectMode = 1248 (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata); 1249 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, faceDetectMode, 1); 1250 1251 uint8_t *histogramMode = 1252 (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata); 1253 camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1); 1254 1255 uint8_t *sharpnessMapMode = 1256 (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata); 1257 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, 1258 sharpnessMapMode, 1); 1259 1260 /*CAM_INTF_META_STATS_SHARPNESS_MAP - check size*/ 1261 cam_sharpness_map_t *sharpnessMap = (cam_sharpness_map_t *) 1262 POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata); 1263 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, 1264 (int32_t*)sharpnessMap->sharpness, 1265 CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT); 1266 1267 resultMetadata = camMetadata.release(); 1268 return resultMetadata; 1269} 1270 1271/*=========================================================================== 1272 * FUNCTION : convertToRegions 1273 * 1274 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array 1275 * 1276 * PARAMETERS : 1277 * @rect : cam_rect_t struct to convert 1278 * @region : int32_t destination array 1279 * @weight : if we are converting from cam_area_t, weight is valid 1280 * else weight = -1 1281 * 1282 *==========================================================================*/ 1283void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){ 1284 region[0] = rect.left; 1285 region[1] = rect.top; 1286 region[2] = rect.left + rect.width; 1287 region[3] = rect.top + rect.height; 1288 if (weight > -1) { 1289 region[4] = weight; 1290 } 1291} 1292 1293/*=========================================================================== 1294 * FUNCTION : convertFromRegions 1295 * 1296 * DESCRIPTION: helper method to convert from array to cam_rect_t 1297 * 1298 * PARAMETERS : 1299 * @rect : cam_rect_t struct to convert 1300 * @region : int32_t destination array 1301 * @weight : if we are converting from cam_area_t, weight is valid 1302 * else weight = -1 1303 * 1304 *==========================================================================*/ 1305void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi, 1306 const camera_metadata_t *settings, 1307 uint32_t tag){ 1308 CameraMetadata frame_settings; 1309 frame_settings = settings; 1310 int32_t x_min = frame_settings.find(tag).data.i32[0]; 1311 int32_t y_min = frame_settings.find(tag).data.i32[1]; 1312 int32_t x_max = frame_settings.find(tag).data.i32[2]; 1313 int32_t y_max = frame_settings.find(tag).data.i32[3]; 1314 roi->weight = frame_settings.find(tag).data.i32[4]; 1315 roi->rect.left = x_min; 1316 roi->rect.top = y_min; 1317 roi->rect.width = x_max - x_min; 1318 roi->rect.height = y_max - y_min; 1319} 1320 1321/*=========================================================================== 1322 * FUNCTION : convertLandmarks 1323 * 1324 * DESCRIPTION: helper method to extract the landmarks from face detection info 1325 * 1326 * PARAMETERS : 1327 * @face : cam_rect_t struct to convert 1328 * @landmarks : int32_t destination array 1329 * 1330 * 1331 *==========================================================================*/ 1332void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks) 1333{ 1334 landmarks[0] = face.left_eye_center.x; 1335 landmarks[1] = face.left_eye_center.y; 1336 landmarks[2] = face.right_eye_center.y; 1337 landmarks[3] = face.right_eye_center.y; 1338 landmarks[4] = face.mouth_center.x; 1339 landmarks[5] = face.mouth_center.y; 1340} 1341 1342#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX ) 1343/*=========================================================================== 1344 * FUNCTION : initCapabilities 1345 * 1346 * DESCRIPTION: initialize camera capabilities in static data struct 1347 * 1348 * PARAMETERS : 1349 * @cameraId : camera Id 1350 * 1351 * RETURN : int32_t type of status 1352 * NO_ERROR -- success 1353 * none-zero failure code 1354 *==========================================================================*/ 1355int QCamera3HardwareInterface::initCapabilities(int cameraId) 1356{ 1357 int rc = 0; 1358 mm_camera_vtbl_t *cameraHandle = NULL; 1359 QCamera3HeapMemory *capabilityHeap = NULL; 1360 1361 cameraHandle = camera_open(cameraId); 1362 if (!cameraHandle) { 1363 ALOGE("%s: camera_open failed", __func__); 1364 rc = -1; 1365 goto open_failed; 1366 } 1367 1368 capabilityHeap = new QCamera3HeapMemory(); 1369 if (capabilityHeap == NULL) { 1370 ALOGE("%s: creation of capabilityHeap failed", __func__); 1371 goto heap_creation_failed; 1372 } 1373 /* Allocate memory for capability buffer */ 1374 rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false); 1375 if(rc != OK) { 1376 ALOGE("%s: No memory for cappability", __func__); 1377 goto allocate_failed; 1378 } 1379 1380 /* Map memory for capability buffer */ 1381 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t)); 1382 rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle, 1383 CAM_MAPPING_BUF_TYPE_CAPABILITY, 1384 capabilityHeap->getFd(0), 1385 sizeof(cam_capability_t)); 1386 if(rc < 0) { 1387 ALOGE("%s: failed to map capability buffer", __func__); 1388 goto map_failed; 1389 } 1390 1391 /* Query Capability */ 1392 rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle); 1393 if(rc < 0) { 1394 ALOGE("%s: failed to query capability",__func__); 1395 goto query_failed; 1396 } 1397 gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t)); 1398 if (!gCamCapability[cameraId]) { 1399 ALOGE("%s: out of memory", __func__); 1400 goto query_failed; 1401 } 1402 memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0), 1403 sizeof(cam_capability_t)); 1404 rc = 0; 1405 1406query_failed: 1407 cameraHandle->ops->unmap_buf(cameraHandle->camera_handle, 1408 CAM_MAPPING_BUF_TYPE_CAPABILITY); 1409map_failed: 1410 capabilityHeap->deallocate(); 1411allocate_failed: 1412 delete capabilityHeap; 1413heap_creation_failed: 1414 cameraHandle->ops->close_camera(cameraHandle->camera_handle); 1415 cameraHandle = NULL; 1416open_failed: 1417 return rc; 1418} 1419 1420/*=========================================================================== 1421 * FUNCTION : initParameters 1422 * 1423 * DESCRIPTION: initialize camera parameters 1424 * 1425 * PARAMETERS : 1426 * 1427 * RETURN : int32_t type of status 1428 * NO_ERROR -- success 1429 * none-zero failure code 1430 *==========================================================================*/ 1431int QCamera3HardwareInterface::initParameters() 1432{ 1433 int rc = 0; 1434 1435 //Allocate Set Param Buffer 1436 mParamHeap = new QCamera3HeapMemory(); 1437 rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false); 1438 if(rc != OK) { 1439 rc = NO_MEMORY; 1440 ALOGE("Failed to allocate SETPARM Heap memory"); 1441 delete mParamHeap; 1442 mParamHeap = NULL; 1443 return rc; 1444 } 1445 1446 //Map memory for parameters buffer 1447 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle, 1448 CAM_MAPPING_BUF_TYPE_PARM_BUF, 1449 mParamHeap->getFd(0), 1450 sizeof(parm_buffer_t)); 1451 if(rc < 0) { 1452 ALOGE("%s:failed to map SETPARM buffer",__func__); 1453 rc = FAILED_TRANSACTION; 1454 mParamHeap->deallocate(); 1455 delete mParamHeap; 1456 mParamHeap = NULL; 1457 return rc; 1458 } 1459 1460 mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0); 1461 return rc; 1462} 1463 1464/*=========================================================================== 1465 * FUNCTION : deinitParameters 1466 * 1467 * DESCRIPTION: de-initialize camera parameters 1468 * 1469 * PARAMETERS : 1470 * 1471 * RETURN : NONE 1472 *==========================================================================*/ 1473void QCamera3HardwareInterface::deinitParameters() 1474{ 1475 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle, 1476 CAM_MAPPING_BUF_TYPE_PARM_BUF); 1477 1478 mParamHeap->deallocate(); 1479 delete mParamHeap; 1480 mParamHeap = NULL; 1481 1482 mParameters = NULL; 1483} 1484 1485/*=========================================================================== 1486 * FUNCTION : calcMaxJpegSize 1487 * 1488 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId 1489 * 1490 * PARAMETERS : 1491 * 1492 * RETURN : max_jpeg_size 1493 *==========================================================================*/ 1494int QCamera3HardwareInterface::calcMaxJpegSize() 1495{ 1496 int32_t max_jpeg_size = 0; 1497 int temp_width, temp_height; 1498 for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) { 1499 temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width; 1500 temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height; 1501 if (temp_width * temp_height > max_jpeg_size ) { 1502 max_jpeg_size = temp_width * temp_height; 1503 } 1504 } 1505 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t); 1506 return max_jpeg_size; 1507} 1508 1509/*=========================================================================== 1510 * FUNCTION : initStaticMetadata 1511 * 1512 * DESCRIPTION: initialize the static metadata 1513 * 1514 * PARAMETERS : 1515 * @cameraId : camera Id 1516 * 1517 * RETURN : int32_t type of status 1518 * 0 -- success 1519 * non-zero failure code 1520 *==========================================================================*/ 1521int QCamera3HardwareInterface::initStaticMetadata(int cameraId) 1522{ 1523 int rc = 0; 1524 CameraMetadata staticInfo; 1525 int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK; 1526 /*HAL 3 only*/ 1527 /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE, 1528 &gCamCapability[cameraId]->min_focus_distance, 1); */ 1529 1530 /*hard coded for now but this should come from sensor*/ 1531 float min_focus_distance; 1532 if(facingBack){ 1533 min_focus_distance = 10; 1534 } else { 1535 min_focus_distance = 0; 1536 } 1537 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE, 1538 &min_focus_distance, 1); 1539 1540 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, 1541 &gCamCapability[cameraId]->hyper_focal_distance, 1); 1542 1543 /*should be using focal lengths but sensor doesn't provide that info now*/ 1544 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, 1545 &gCamCapability[cameraId]->focal_length, 1546 1); 1547 1548 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES, 1549 gCamCapability[cameraId]->apertures, 1550 gCamCapability[cameraId]->apertures_count); 1551 1552 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES, 1553 gCamCapability[cameraId]->filter_densities, 1554 gCamCapability[cameraId]->filter_densities_count); 1555 1556 1557 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION, 1558 (uint8_t*)gCamCapability[cameraId]->optical_stab_modes, 1559 gCamCapability[cameraId]->optical_stab_modes_count); 1560 1561 staticInfo.update(ANDROID_LENS_POSITION, 1562 gCamCapability[cameraId]->lens_position, 1563 sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float)); 1564 1565 int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width, 1566 gCamCapability[cameraId]->lens_shading_map_size.height}; 1567 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE, 1568 lens_shading_map_size, 1569 sizeof(lens_shading_map_size)/sizeof(int32_t)); 1570 1571 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP, gCamCapability[cameraId]->lens_shading_map, 1572 sizeof(gCamCapability[cameraId]->lens_shading_map)/ sizeof(float)); 1573 1574 int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width, 1575 gCamCapability[cameraId]->geo_correction_map_size.height}; 1576 staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE, 1577 geo_correction_map_size, 1578 sizeof(geo_correction_map_size)/sizeof(int32_t)); 1579 1580 staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP, 1581 gCamCapability[cameraId]->geo_correction_map, 1582 sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float)); 1583 1584 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE, 1585 gCamCapability[cameraId]->sensor_physical_size, 2); 1586 1587 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, 1588 gCamCapability[cameraId]->exposure_time_range, 2); 1589 1590 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION, 1591 &gCamCapability[cameraId]->max_frame_duration, 1); 1592 1593 1594 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT, 1595 (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1); 1596 1597 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width, 1598 gCamCapability[cameraId]->pixel_array_size.height}; 1599 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, 1600 pixel_array_size, 2); 1601 1602 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.width, 1603 gCamCapability[cameraId]->active_array_size.height}; 1604 1605 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, 1606 active_array_size, 2); 1607 1608 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL, 1609 &gCamCapability[cameraId]->white_level, 1); 1610 1611 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN, 1612 gCamCapability[cameraId]->black_level_pattern, 4); 1613 1614 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION, 1615 &gCamCapability[cameraId]->flash_charge_duration, 1); 1616 1617 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS, 1618 &gCamCapability[cameraId]->max_tone_map_curve_points, 1); 1619 1620 /*staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, 1621 (int*)&gCamCapability[cameraId]->max_face_detection_count, 1);*/ 1622 /*hardcode 0 for now*/ 1623 int32_t max_face_count = 0; 1624 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, 1625 &max_face_count, 1); 1626 1627 staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT, 1628 &gCamCapability[cameraId]->histogram_size, 1); 1629 1630 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT, 1631 &gCamCapability[cameraId]->max_histogram_count, 1); 1632 1633 int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width, 1634 gCamCapability[cameraId]->sharpness_map_size.height}; 1635 1636 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, 1637 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t)); 1638 1639 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE, 1640 &gCamCapability[cameraId]->max_sharpness_map_value, 1); 1641 1642 1643 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS, 1644 &gCamCapability[cameraId]->raw_min_duration, 1645 1); 1646 1647 int32_t scalar_formats[CAM_FORMAT_MAX]; 1648 int scalar_formats_count = gCamCapability[cameraId]->supported_scalar_format_cnt; 1649 for (int i = 0; i < scalar_formats_count; i++) { 1650 scalar_formats[i] = getScalarFormat(gCamCapability[cameraId]->supported_scalar_fmts[i]); 1651 } 1652 scalar_formats[scalar_formats_count] = HAL_PIXEL_FORMAT_YCbCr_420_888; 1653 scalar_formats_count++; 1654 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, 1655 scalar_formats, 1656 scalar_formats_count); 1657 1658 int32_t available_processed_sizes[CAM_FORMAT_MAX * 2]; 1659 makeTable(gCamCapability[cameraId]->supported_sizes_tbl, 1660 gCamCapability[cameraId]->supported_sizes_tbl_cnt, 1661 available_processed_sizes); 1662 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES, 1663 available_processed_sizes, 1664 (gCamCapability[cameraId]->supported_sizes_tbl_cnt) * 2); 1665 1666 int32_t available_fps_ranges[MAX_SIZES_CNT * 2]; 1667 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl, 1668 gCamCapability[cameraId]->fps_ranges_tbl_cnt, 1669 available_fps_ranges); 1670 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, 1671 available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) ); 1672 1673 camera_metadata_rational exposureCompensationStep = { 1674 gCamCapability[cameraId]->exp_compensation_step.numerator, 1675 gCamCapability[cameraId]->exp_compensation_step.denominator}; 1676 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP, 1677 &exposureCompensationStep, 1); 1678 1679 /*TO DO*/ 1680 uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF}; 1681 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, 1682 availableVstabModes, sizeof(availableVstabModes)); 1683 1684 /*HAL 1 and HAL 3 common*/ 1685 float maxZoom = 10; 1686 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, 1687 &maxZoom, 1); 1688 1689 int32_t max3aRegions = 1; 1690 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS, 1691 &max3aRegions, 1); 1692 1693 uint8_t availableFaceDetectModes[] = { 1694 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF }; 1695 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES, 1696 availableFaceDetectModes, 1697 sizeof(availableFaceDetectModes)); 1698 1699 int32_t raw_size[] = {gCamCapability[cameraId]->raw_dim.width, 1700 gCamCapability[cameraId]->raw_dim.height}; 1701 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES, 1702 raw_size, 1703 sizeof(raw_size)/sizeof(uint32_t)); 1704 1705 int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min, 1706 gCamCapability[cameraId]->exposure_compensation_max}; 1707 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE, 1708 exposureCompensationRange, 1709 sizeof(exposureCompensationRange)/sizeof(int32_t)); 1710 1711 uint8_t lensFacing = (facingBack) ? 1712 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT; 1713 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1); 1714 1715 int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2]; 1716 makeTable(gCamCapability[cameraId]->picture_sizes_tbl, 1717 gCamCapability[cameraId]->picture_sizes_tbl_cnt, 1718 available_jpeg_sizes); 1719 staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES, 1720 available_jpeg_sizes, 1721 (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2)); 1722 1723 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, 1724 available_jpeg_sizes, 1725 (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2)); 1726 1727 int32_t max_jpeg_size = 0; 1728 int temp_width, temp_height; 1729 for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) { 1730 temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width; 1731 temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height; 1732 if (temp_width * temp_height > max_jpeg_size ) { 1733 max_jpeg_size = temp_width * temp_height; 1734 } 1735 } 1736 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t); 1737 staticInfo.update(ANDROID_JPEG_MAX_SIZE, 1738 &max_jpeg_size, 1); 1739 1740 uint8_t avail_effects[CAM_EFFECT_MODE_MAX]; 1741 int32_t size = 0; 1742 for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) { 1743 int val = lookupFwkName(EFFECT_MODES_MAP, 1744 sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]), 1745 gCamCapability[cameraId]->supported_effects[i]); 1746 if (val != NAME_NOT_FOUND) { 1747 avail_effects[size] = (uint8_t)val; 1748 size++; 1749 } 1750 } 1751 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS, 1752 avail_effects, 1753 size); 1754 1755 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX]; 1756 uint8_t supported_indexes[CAM_SCENE_MODE_MAX]; 1757 int32_t supported_scene_modes_cnt = 0; 1758 for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) { 1759 int val = lookupFwkName(SCENE_MODES_MAP, 1760 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]), 1761 gCamCapability[cameraId]->supported_scene_modes[i]); 1762 if (val != NAME_NOT_FOUND) { 1763 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val; 1764 supported_indexes[supported_scene_modes_cnt] = i; 1765 supported_scene_modes_cnt++; 1766 } 1767 } 1768 1769 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES, 1770 avail_scene_modes, 1771 supported_scene_modes_cnt); 1772 1773 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3]; 1774 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides, 1775 supported_scene_modes_cnt, 1776 scene_mode_overrides, 1777 supported_indexes); 1778 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES, 1779 scene_mode_overrides, 1780 supported_scene_modes_cnt*3); 1781 1782 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX]; 1783 size = 0; 1784 for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) { 1785 int val = lookupFwkName(ANTIBANDING_MODES_MAP, 1786 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]), 1787 gCamCapability[cameraId]->supported_antibandings[i]); 1788 if (val != NAME_NOT_FOUND) { 1789 avail_antibanding_modes[size] = (uint8_t)val; 1790 size++; 1791 } 1792 1793 } 1794 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, 1795 avail_antibanding_modes, 1796 size); 1797 1798 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX]; 1799 size = 0; 1800 for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) { 1801 int val = lookupFwkName(FOCUS_MODES_MAP, 1802 sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]), 1803 gCamCapability[cameraId]->supported_focus_modes[i]); 1804 if (val != NAME_NOT_FOUND) { 1805 avail_af_modes[size] = (uint8_t)val; 1806 size++; 1807 } 1808 } 1809 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES, 1810 avail_af_modes, 1811 size); 1812 1813 uint8_t avail_awb_modes[CAM_WB_MODE_MAX]; 1814 size = 0; 1815 for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) { 1816 int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP, 1817 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]), 1818 gCamCapability[cameraId]->supported_white_balances[i]); 1819 if (val != NAME_NOT_FOUND) { 1820 avail_awb_modes[size] = (uint8_t)val; 1821 size++; 1822 } 1823 } 1824 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES, 1825 avail_awb_modes, 1826 size); 1827 1828 uint8_t avail_flash_modes[CAM_FLASH_MODE_MAX]; 1829 size = 0; 1830 for (int i = 0; i < gCamCapability[cameraId]->supported_flash_modes_cnt; i++) { 1831 int val = lookupFwkName(FLASH_MODES_MAP, 1832 sizeof(FLASH_MODES_MAP)/sizeof(FLASH_MODES_MAP[0]), 1833 gCamCapability[cameraId]->supported_flash_modes[i]); 1834 if (val != NAME_NOT_FOUND) { 1835 avail_flash_modes[size] = (uint8_t)val; 1836 size++; 1837 } 1838 } 1839 static uint8_t flashAvailable = 0; 1840 if (size > 1) { 1841 //flash is supported 1842 flashAvailable = 1; 1843 } 1844 staticInfo.update(ANDROID_FLASH_MODE, 1845 avail_flash_modes, 1846 size); 1847 1848 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE, 1849 &flashAvailable, 1); 1850 1851 uint8_t avail_ae_modes[5]; 1852 size = 0; 1853 for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) { 1854 avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i]; 1855 size++; 1856 } 1857 if (flashAvailable) { 1858 avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH; 1859 avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH; 1860 avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE; 1861 } 1862 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES, 1863 avail_ae_modes, 1864 size); 1865 1866 gStaticMetadata[cameraId] = staticInfo.release(); 1867 return rc; 1868} 1869 1870/*=========================================================================== 1871 * FUNCTION : makeTable 1872 * 1873 * DESCRIPTION: make a table of sizes 1874 * 1875 * PARAMETERS : 1876 * 1877 * 1878 *==========================================================================*/ 1879void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size, 1880 int32_t* sizeTable) 1881{ 1882 int j = 0; 1883 for (int i = 0; i < size; i++) { 1884 sizeTable[j] = dimTable[i].width; 1885 sizeTable[j+1] = dimTable[i].height; 1886 j+=2; 1887 } 1888} 1889 1890/*=========================================================================== 1891 * FUNCTION : makeFPSTable 1892 * 1893 * DESCRIPTION: make a table of fps ranges 1894 * 1895 * PARAMETERS : 1896 * 1897 *==========================================================================*/ 1898void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size, 1899 int32_t* fpsRangesTable) 1900{ 1901 int j = 0; 1902 for (int i = 0; i < size; i++) { 1903 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps; 1904 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps; 1905 j+=2; 1906 } 1907} 1908 1909/*=========================================================================== 1910 * FUNCTION : makeOverridesList 1911 * 1912 * DESCRIPTION: make a list of scene mode overrides 1913 * 1914 * PARAMETERS : 1915 * 1916 * 1917 *==========================================================================*/ 1918void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable, 1919 uint8_t size, uint8_t* overridesList, 1920 uint8_t* supported_indexes) 1921{ 1922 /*daemon will give a list of overrides for all scene modes. 1923 However we should send the fwk only the overrides for the scene modes 1924 supported by the framework*/ 1925 int j = 0, index = 0; 1926 for (int i = 0; i < size; i++) { 1927 index = supported_indexes[i]; 1928 overridesList[j] = (int32_t)overridesTable[index].ae_mode; 1929 overridesList[j+1] = (int32_t)overridesTable[index].awb_mode; 1930 overridesList[j+2] = (int32_t)overridesTable[index].af_mode; 1931 j+=3; 1932 } 1933} 1934 1935/*=========================================================================== 1936 * FUNCTION : getPreviewHalPixelFormat 1937 * 1938 * DESCRIPTION: convert the format to type recognized by framework 1939 * 1940 * PARAMETERS : format : the format from backend 1941 * 1942 ** RETURN : format recognized by framework 1943 * 1944 *==========================================================================*/ 1945int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format) 1946{ 1947 int32_t halPixelFormat; 1948 1949 switch (format) { 1950 case CAM_FORMAT_YUV_420_NV12: 1951 halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP; 1952 break; 1953 case CAM_FORMAT_YUV_420_NV21: 1954 halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP; 1955 break; 1956 case CAM_FORMAT_YUV_420_NV21_ADRENO: 1957 halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO; 1958 break; 1959 case CAM_FORMAT_YUV_420_YV12: 1960 halPixelFormat = HAL_PIXEL_FORMAT_YV12; 1961 break; 1962 case CAM_FORMAT_YUV_422_NV16: 1963 case CAM_FORMAT_YUV_422_NV61: 1964 default: 1965 halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP; 1966 break; 1967 } 1968 return halPixelFormat; 1969} 1970 1971/*=========================================================================== 1972 * FUNCTION : AddSetParmEntryToBatch 1973 * 1974 * DESCRIPTION: add set parameter entry into batch 1975 * 1976 * PARAMETERS : 1977 * @p_table : ptr to parameter buffer 1978 * @paramType : parameter type 1979 * @paramLength : length of parameter value 1980 * @paramValue : ptr to parameter value 1981 * 1982 * RETURN : int32_t type of status 1983 * NO_ERROR -- success 1984 * none-zero failure code 1985 *==========================================================================*/ 1986int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table, 1987 cam_intf_parm_type_t paramType, 1988 uint32_t paramLength, 1989 void *paramValue) 1990{ 1991 int position = paramType; 1992 int current, next; 1993 1994 /************************************************************************* 1995 * Code to take care of linking next flags * 1996 *************************************************************************/ 1997 current = GET_FIRST_PARAM_ID(p_table); 1998 if (position == current){ 1999 //DO NOTHING 2000 } else if (position < current){ 2001 SET_NEXT_PARAM_ID(position, p_table, current); 2002 SET_FIRST_PARAM_ID(p_table, position); 2003 } else { 2004 /* Search for the position in the linked list where we need to slot in*/ 2005 while (position > GET_NEXT_PARAM_ID(current, p_table)) 2006 current = GET_NEXT_PARAM_ID(current, p_table); 2007 2008 /*If node already exists no need to alter linking*/ 2009 if (position != GET_NEXT_PARAM_ID(current, p_table)) { 2010 next = GET_NEXT_PARAM_ID(current, p_table); 2011 SET_NEXT_PARAM_ID(current, p_table, position); 2012 SET_NEXT_PARAM_ID(position, p_table, next); 2013 } 2014 } 2015 2016 /************************************************************************* 2017 * Copy contents into entry * 2018 *************************************************************************/ 2019 2020 if (paramLength > sizeof(parm_type_t)) { 2021 ALOGE("%s:Size of input larger than max entry size",__func__); 2022 return BAD_VALUE; 2023 } 2024 memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength); 2025 return NO_ERROR; 2026} 2027 2028/*=========================================================================== 2029 * FUNCTION : lookupFwkName 2030 * 2031 * DESCRIPTION: In case the enum is not same in fwk and backend 2032 * make sure the parameter is correctly propogated 2033 * 2034 * PARAMETERS : 2035 * @arr : map between the two enums 2036 * @len : len of the map 2037 * @hal_name : name of the hal_parm to map 2038 * 2039 * RETURN : int type of status 2040 * fwk_name -- success 2041 * none-zero failure code 2042 *==========================================================================*/ 2043int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[], 2044 int len, int hal_name) 2045{ 2046 2047 for (int i = 0; i < len; i++) { 2048 if (arr[i].hal_name == hal_name) 2049 return arr[i].fwk_name; 2050 } 2051 2052 /* Not able to find matching framework type is not necessarily 2053 * an error case. This happens when mm-camera supports more attributes 2054 * than the frameworks do */ 2055 ALOGD("%s: Cannot find matching framework type", __func__); 2056 return NAME_NOT_FOUND; 2057} 2058 2059/*=========================================================================== 2060 * FUNCTION : lookupHalName 2061 * 2062 * DESCRIPTION: In case the enum is not same in fwk and backend 2063 * make sure the parameter is correctly propogated 2064 * 2065 * PARAMETERS : 2066 * @arr : map between the two enums 2067 * @len : len of the map 2068 * @fwk_name : name of the hal_parm to map 2069 * 2070 * RETURN : int32_t type of status 2071 * hal_name -- success 2072 * none-zero failure code 2073 *==========================================================================*/ 2074int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[], 2075 int len, int fwk_name) 2076{ 2077 for (int i = 0; i < len; i++) { 2078 if (arr[i].fwk_name == fwk_name) 2079 return arr[i].hal_name; 2080 } 2081 ALOGE("%s: Cannot find matching hal type", __func__); 2082 return NAME_NOT_FOUND; 2083} 2084 2085/*=========================================================================== 2086 * FUNCTION : getCapabilities 2087 * 2088 * DESCRIPTION: query camera capabilities 2089 * 2090 * PARAMETERS : 2091 * @cameraId : camera Id 2092 * @info : camera info struct to be filled in with camera capabilities 2093 * 2094 * RETURN : int32_t type of status 2095 * NO_ERROR -- success 2096 * none-zero failure code 2097 *==========================================================================*/ 2098int QCamera3HardwareInterface::getCamInfo(int cameraId, 2099 struct camera_info *info) 2100{ 2101 int rc = 0; 2102 2103 if (NULL == gCamCapability[cameraId]) { 2104 rc = initCapabilities(cameraId); 2105 if (rc < 0) { 2106 //pthread_mutex_unlock(&g_camlock); 2107 return rc; 2108 } 2109 } 2110 2111 if (NULL == gStaticMetadata[cameraId]) { 2112 rc = initStaticMetadata(cameraId); 2113 if (rc < 0) { 2114 return rc; 2115 } 2116 } 2117 2118 switch(gCamCapability[cameraId]->position) { 2119 case CAM_POSITION_BACK: 2120 info->facing = CAMERA_FACING_BACK; 2121 break; 2122 2123 case CAM_POSITION_FRONT: 2124 info->facing = CAMERA_FACING_FRONT; 2125 break; 2126 2127 default: 2128 ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId); 2129 rc = -1; 2130 break; 2131 } 2132 2133 2134 info->orientation = gCamCapability[cameraId]->sensor_mount_angle; 2135 info->device_version = HARDWARE_DEVICE_API_VERSION(3, 0); 2136 info->static_camera_characteristics = gStaticMetadata[cameraId]; 2137 2138 return rc; 2139} 2140 2141/*=========================================================================== 2142 * FUNCTION : translateMetadata 2143 * 2144 * DESCRIPTION: translate the metadata into camera_metadata_t 2145 * 2146 * PARAMETERS : type of the request 2147 * 2148 * 2149 * RETURN : success: camera_metadata_t* 2150 * failure: NULL 2151 * 2152 *==========================================================================*/ 2153camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type) 2154{ 2155 pthread_mutex_lock(&mMutex); 2156 2157 if (mDefaultMetadata[type] != NULL) { 2158 pthread_mutex_unlock(&mMutex); 2159 return mDefaultMetadata[type]; 2160 } 2161 //first time we are handling this request 2162 //fill up the metadata structure using the wrapper class 2163 CameraMetadata settings; 2164 //translate from cam_capability_t to camera_metadata_tag_t 2165 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE; 2166 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1); 2167 2168 /*control*/ 2169 2170 uint8_t controlIntent = 0; 2171 switch (type) { 2172 case CAMERA3_TEMPLATE_PREVIEW: 2173 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW; 2174 break; 2175 case CAMERA3_TEMPLATE_STILL_CAPTURE: 2176 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE; 2177 break; 2178 case CAMERA3_TEMPLATE_VIDEO_RECORD: 2179 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD; 2180 break; 2181 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT: 2182 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT; 2183 break; 2184 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG: 2185 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG; 2186 break; 2187 default: 2188 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM; 2189 break; 2190 } 2191 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1); 2192 2193 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, 2194 &gCamCapability[mCameraId]->exposure_compensation_default, 1); 2195 2196 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF; 2197 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1); 2198 2199 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF; 2200 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1); 2201 2202 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO; 2203 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1); 2204 2205 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO; 2206 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1); 2207 2208 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF; 2209 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1); 2210 2211 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; //similar to AUTO? 2212 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1); 2213 2214 static uint8_t focusMode; 2215 if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) { 2216 ALOGE("%s: Setting focus mode to auto", __func__); 2217 focusMode = ANDROID_CONTROL_AF_MODE_AUTO; 2218 } else { 2219 ALOGE("%s: Setting focus mode to off", __func__); 2220 focusMode = ANDROID_CONTROL_AF_MODE_OFF; 2221 } 2222 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1); 2223 2224 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON; 2225 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1); 2226 2227 /*flash*/ 2228 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF; 2229 settings.update(ANDROID_FLASH_MODE, &flashMode, 1); 2230 2231 2232 /* lens */ 2233 float default_aperture = gCamCapability[mCameraId]->apertures[0]; 2234 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1); 2235 2236 if (gCamCapability[mCameraId]->filter_densities_count) { 2237 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0]; 2238 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density, 2239 gCamCapability[mCameraId]->filter_densities_count); 2240 } 2241 2242 /* TODO: Enable focus lengths once supported*/ 2243 /*if (gCamCapability[mCameraId]->focal_lengths_count) { 2244 float default_focal_length = gCamCapability[mCameraId]->focal_lengths[0]; 2245 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1); 2246 }*/ 2247 2248 mDefaultMetadata[type] = settings.release(); 2249 2250 pthread_mutex_unlock(&mMutex); 2251 return mDefaultMetadata[type]; 2252} 2253 2254/*=========================================================================== 2255 * FUNCTION : setFrameParameters 2256 * 2257 * DESCRIPTION: set parameters per frame as requested in the metadata from 2258 * framework 2259 * 2260 * PARAMETERS : 2261 * @settings : frame settings information from framework 2262 * 2263 * 2264 * RETURN : success: NO_ERROR 2265 * failure: 2266 *==========================================================================*/ 2267int QCamera3HardwareInterface::setFrameParameters(int frame_id, 2268 const camera_metadata_t *settings) 2269{ 2270 /*translate from camera_metadata_t type to parm_type_t*/ 2271 int rc = 0; 2272 if (settings == NULL && mFirstRequest) { 2273 /*settings cannot be null for the first request*/ 2274 return BAD_VALUE; 2275 } 2276 2277 int32_t hal_version = CAM_HAL_V3; 2278 2279 memset(mParameters, 0, sizeof(parm_buffer_t)); 2280 mParameters->first_flagged_entry = CAM_INTF_PARM_MAX; 2281 AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION, 2282 sizeof(hal_version), &hal_version); 2283 2284 /*we need to update the frame number in the parameters*/ 2285 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER, 2286 sizeof(frame_id), &frame_id); 2287 if (rc < 0) { 2288 ALOGE("%s: Failed to set the frame number in the parameters", __func__); 2289 return BAD_VALUE; 2290 } 2291 2292 if(settings != NULL){ 2293 rc = translateMetadataToParameters(settings); 2294 } 2295 /*set the parameters to backend*/ 2296 mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters); 2297 return rc; 2298} 2299 2300/*=========================================================================== 2301 * FUNCTION : translateMetadataToParameters 2302 * 2303 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t 2304 * 2305 * 2306 * PARAMETERS : 2307 * @settings : frame settings information from framework 2308 * 2309 * 2310 * RETURN : success: NO_ERROR 2311 * failure: 2312 *==========================================================================*/ 2313int QCamera3HardwareInterface::translateMetadataToParameters 2314 (const camera_metadata_t *settings) 2315{ 2316 int rc = 0; 2317 CameraMetadata frame_settings; 2318 frame_settings = settings; 2319 2320 2321 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) { 2322 int32_t antibandingMode = 2323 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0]; 2324 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING, 2325 sizeof(antibandingMode), &antibandingMode); 2326 } 2327 2328 /*int32_t expCompensation = frame_settings.find().data.i32[0]; 2329 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION, 2330 sizeof(expCompensation), &expCompensation);*/ 2331 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) { 2332 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0]; 2333 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK, 2334 sizeof(aeLock), &aeLock); 2335 } 2336 2337 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) { 2338 cam_fps_range_t fps_range; 2339 fps_range.min_fps = 2340 frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0]; 2341 fps_range.max_fps = 2342 frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0]; 2343 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE, 2344 sizeof(fps_range), &fps_range); 2345 } 2346 2347 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) { 2348 uint8_t fwk_focusMode = 2349 frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0]; 2350 uint8_t focusMode = lookupHalName(FOCUS_MODES_MAP, 2351 sizeof(FOCUS_MODES_MAP), 2352 fwk_focusMode); 2353 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE, 2354 sizeof(focusMode), &focusMode); 2355 } 2356 2357 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) { 2358 uint8_t awbLock = 2359 frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0]; 2360 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK, 2361 sizeof(awbLock), &awbLock); 2362 } 2363 2364 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) { 2365 uint8_t fwk_whiteLevel = 2366 frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0]; 2367 uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP, 2368 sizeof(WHITE_BALANCE_MODES_MAP), 2369 fwk_whiteLevel); 2370 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE, 2371 sizeof(whiteLevel), &whiteLevel); 2372 } 2373 2374 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) { 2375 uint8_t fwk_effectMode = 2376 frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0]; 2377 uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP, 2378 sizeof(EFFECT_MODES_MAP), 2379 fwk_effectMode); 2380 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT, 2381 sizeof(effectMode), &effectMode); 2382 } 2383 2384 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) { 2385 uint8_t fwk_aeMode = 2386 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0]; 2387 uint8_t aeMode; 2388 int32_t redeye; 2389 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) { 2390 aeMode = CAM_AE_MODE_OFF; 2391 } else { 2392 aeMode = CAM_AE_MODE_ON; 2393 } 2394 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) { 2395 redeye = 1; 2396 } else { 2397 redeye = 0; 2398 } 2399 int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP, 2400 sizeof(AE_FLASH_MODE_MAP), 2401 aeMode); 2402 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE, 2403 sizeof(aeMode), &aeMode); 2404 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE, 2405 sizeof(flashMode), &flashMode); 2406 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION, 2407 sizeof(redeye), &redeye); 2408 } 2409 2410 if (frame_settings.exists(ANDROID_REQUEST_FRAME_COUNT)) { 2411 int32_t metaFrameNumber = 2412 frame_settings.find(ANDROID_REQUEST_FRAME_COUNT).data.i32[0]; 2413 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER, 2414 sizeof(metaFrameNumber), &metaFrameNumber); 2415 } 2416 2417 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) { 2418 uint8_t colorCorrectMode = 2419 frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0]; 2420 rc = 2421 AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE, 2422 sizeof(colorCorrectMode), &colorCorrectMode); 2423 } 2424 cam_trigger_t aecTrigger; 2425 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE; 2426 aecTrigger.trigger_id = -1; 2427 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&& 2428 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) { 2429 aecTrigger.trigger = 2430 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0]; 2431 aecTrigger.trigger_id = 2432 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0]; 2433 } 2434 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, 2435 sizeof(aecTrigger), &aecTrigger); 2436 2437 /*af_trigger must come with a trigger id*/ 2438 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) && 2439 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) { 2440 cam_trigger_t af_trigger; 2441 af_trigger.trigger = 2442 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0]; 2443 af_trigger.trigger_id = 2444 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0]; 2445 rc = AddSetParmEntryToBatch(mParameters, 2446 CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger); 2447 } 2448 2449 if (frame_settings.exists(ANDROID_CONTROL_MODE)) { 2450 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0]; 2451 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE, 2452 sizeof(metaMode), &metaMode); 2453 } 2454 2455 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) { 2456 int32_t demosaic = 2457 frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0]; 2458 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC, 2459 sizeof(demosaic), &demosaic); 2460 } 2461 2462 if (frame_settings.exists(ANDROID_EDGE_MODE)) { 2463 uint8_t edgeMode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0]; 2464 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE, 2465 sizeof(edgeMode), &edgeMode); 2466 } 2467 2468 if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) { 2469 int32_t edgeStrength = 2470 frame_settings.find(ANDROID_EDGE_STRENGTH).data.i32[0]; 2471 rc = AddSetParmEntryToBatch(mParameters, 2472 CAM_INTF_META_SHARPNESS_STRENGTH, sizeof(edgeStrength), &edgeStrength); 2473 } 2474 2475 if (frame_settings.exists(ANDROID_FLASH_MODE)) { 2476 uint8_t flashMode = 2477 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]; 2478 rc = AddSetParmEntryToBatch(mParameters, 2479 CAM_INTF_META_FLASH_MODE, sizeof(flashMode), &flashMode); 2480 } 2481 2482 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) { 2483 uint8_t flashPower = 2484 frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0]; 2485 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER, 2486 sizeof(flashPower), &flashPower); 2487 } 2488 2489 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) { 2490 int64_t flashFiringTime = 2491 frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0]; 2492 rc = AddSetParmEntryToBatch(mParameters, 2493 CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime); 2494 } 2495 2496 if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) { 2497 uint8_t geometricMode = 2498 frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0]; 2499 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE, 2500 sizeof(geometricMode), &geometricMode); 2501 } 2502 2503 if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) { 2504 uint8_t geometricStrength = 2505 frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0]; 2506 rc = AddSetParmEntryToBatch(mParameters, 2507 CAM_INTF_META_GEOMETRIC_STRENGTH, 2508 sizeof(geometricStrength), &geometricStrength); 2509 } 2510 2511 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) { 2512 uint8_t hotPixelMode = 2513 frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0]; 2514 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE, 2515 sizeof(hotPixelMode), &hotPixelMode); 2516 } 2517 2518 if (frame_settings.exists(ANDROID_LENS_APERTURE)) { 2519 float lensAperture = 2520 frame_settings.find( ANDROID_LENS_APERTURE).data.f[0]; 2521 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE, 2522 sizeof(lensAperture), &lensAperture); 2523 } 2524 2525 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) { 2526 float filterDensity = 2527 frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0]; 2528 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY, 2529 sizeof(filterDensity), &filterDensity); 2530 } 2531 2532 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) { 2533 float focalLength = 2534 frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0]; 2535 rc = AddSetParmEntryToBatch(mParameters, 2536 CAM_INTF_META_LENS_FOCAL_LENGTH, 2537 sizeof(focalLength), &focalLength); 2538 } 2539 2540 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) { 2541 float focalDistance = 2542 frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0]; 2543 rc = AddSetParmEntryToBatch(mParameters, 2544 CAM_INTF_META_LENS_FOCUS_DISTANCE, 2545 sizeof(focalDistance), &focalDistance); 2546 } 2547 2548 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) { 2549 uint8_t optStabMode = 2550 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0]; 2551 rc = AddSetParmEntryToBatch(mParameters, 2552 CAM_INTF_META_LENS_OPT_STAB_MODE, 2553 sizeof(optStabMode), &optStabMode); 2554 } 2555 2556 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) { 2557 uint8_t noiseRedMode = 2558 frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]; 2559 rc = AddSetParmEntryToBatch(mParameters, 2560 CAM_INTF_META_NOISE_REDUCTION_MODE, 2561 sizeof(noiseRedMode), &noiseRedMode); 2562 } 2563 2564 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) { 2565 uint8_t noiseRedStrength = 2566 frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0]; 2567 rc = AddSetParmEntryToBatch(mParameters, 2568 CAM_INTF_META_NOISE_REDUCTION_STRENGTH, 2569 sizeof(noiseRedStrength), &noiseRedStrength); 2570 } 2571 2572 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) { 2573 cam_crop_region_t scalerCropRegion; 2574 scalerCropRegion.left = 2575 frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0]; 2576 scalerCropRegion.top = 2577 frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1]; 2578 scalerCropRegion.width = 2579 frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2]; 2580 scalerCropRegion.height = 2581 frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3]; 2582 rc = AddSetParmEntryToBatch(mParameters, 2583 CAM_INTF_META_SCALER_CROP_REGION, 2584 sizeof(scalerCropRegion), &scalerCropRegion); 2585 } 2586 2587 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) { 2588 int64_t sensorExpTime = 2589 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0]; 2590 rc = AddSetParmEntryToBatch(mParameters, 2591 CAM_INTF_META_SENSOR_EXPOSURE_TIME, 2592 sizeof(sensorExpTime), &sensorExpTime); 2593 } 2594 2595 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) { 2596 int64_t sensorFrameDuration = 2597 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0]; 2598 rc = AddSetParmEntryToBatch(mParameters, 2599 CAM_INTF_META_SENSOR_FRAME_DURATION, 2600 sizeof(sensorFrameDuration), &sensorFrameDuration); 2601 } 2602 2603 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) { 2604 int32_t sensorSensitivity = 2605 frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0]; 2606 rc = AddSetParmEntryToBatch(mParameters, 2607 CAM_INTF_META_SENSOR_SENSITIVITY, 2608 sizeof(sensorSensitivity), &sensorSensitivity); 2609 } 2610 2611 if (frame_settings.exists(ANDROID_SHADING_MODE)) { 2612 int32_t shadingMode = 2613 frame_settings.find(ANDROID_SHADING_MODE).data.u8[0]; 2614 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE, 2615 sizeof(shadingMode), &shadingMode); 2616 } 2617 2618 if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) { 2619 uint8_t shadingStrength = 2620 frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0]; 2621 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH, 2622 sizeof(shadingStrength), &shadingStrength); 2623 } 2624 2625 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) { 2626 uint8_t facedetectMode = 2627 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0]; 2628 rc = AddSetParmEntryToBatch(mParameters, 2629 CAM_INTF_META_STATS_FACEDETECT_MODE, 2630 sizeof(facedetectMode), &facedetectMode); 2631 } 2632 2633 if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) { 2634 uint8_t histogramMode = 2635 frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0]; 2636 rc = AddSetParmEntryToBatch(mParameters, 2637 CAM_INTF_META_STATS_HISTOGRAM_MODE, 2638 sizeof(histogramMode), &histogramMode); 2639 } 2640 2641 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) { 2642 uint8_t sharpnessMapMode = 2643 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0]; 2644 rc = AddSetParmEntryToBatch(mParameters, 2645 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, 2646 sizeof(sharpnessMapMode), &sharpnessMapMode); 2647 } 2648 2649 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) { 2650 uint8_t tonemapMode = 2651 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0]; 2652 rc = AddSetParmEntryToBatch(mParameters, 2653 CAM_INTF_META_TONEMAP_MODE, 2654 sizeof(tonemapMode), &tonemapMode); 2655 } 2656 2657 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) { 2658 uint8_t captureIntent = 2659 frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0]; 2660 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT, 2661 sizeof(captureIntent), &captureIntent); 2662 } 2663 2664 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) { 2665 cam_area_t roi; 2666 convertFromRegions(&roi, settings, ANDROID_CONTROL_AE_REGIONS); 2667 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI, 2668 sizeof(roi), &roi); 2669 } 2670 2671 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) { 2672 cam_area_t roi; 2673 convertFromRegions(&roi, settings, ANDROID_CONTROL_AF_REGIONS); 2674 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI, 2675 sizeof(roi), &roi); 2676 } 2677 2678 if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) { 2679 cam_area_t roi; 2680 convertFromRegions(&roi, settings, ANDROID_CONTROL_AWB_REGIONS); 2681 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS, 2682 sizeof(roi), &roi); 2683 } 2684 return rc; 2685} 2686 2687/*=========================================================================== 2688 * FUNCTION : getJpegSettings 2689 * 2690 * DESCRIPTION: save the jpeg settings in the HAL 2691 * 2692 * 2693 * PARAMETERS : 2694 * @settings : frame settings information from framework 2695 * 2696 * 2697 * RETURN : success: NO_ERROR 2698 * failure: 2699 *==========================================================================*/ 2700int QCamera3HardwareInterface::getJpegSettings 2701 (const camera_metadata_t *settings) 2702{ 2703 if (mJpegSettings) { 2704 free(mJpegSettings); 2705 mJpegSettings = NULL; 2706 } 2707 mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t)); 2708 CameraMetadata jpeg_settings; 2709 jpeg_settings = settings; 2710 2711 if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) { 2712 mJpegSettings->jpeg_orientation = 2713 jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0]; 2714 } else { 2715 mJpegSettings->jpeg_orientation = 0; 2716 } 2717 if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) { 2718 mJpegSettings->jpeg_quality = 2719 jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0]; 2720 } else { 2721 mJpegSettings->jpeg_quality = 85; 2722 } 2723 if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) { 2724 mJpegSettings->thumbnail_size.width = 2725 jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0]; 2726 mJpegSettings->thumbnail_size.height = 2727 jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1]; 2728 mJpegSettings->thumbnail_size.width = 320; 2729 mJpegSettings->thumbnail_size.height = 240; 2730 } else { 2731 mJpegSettings->thumbnail_size.width = 640; 2732 mJpegSettings->thumbnail_size.height = 480; 2733 } 2734 if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) { 2735 for (int i = 0; i < 3; i++) { 2736 mJpegSettings->gps_coordinates[i] = 2737 jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i]; 2738 } 2739 } 2740 if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) { 2741 mJpegSettings->gps_timestamp = 2742 jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0]; 2743 } 2744 2745 if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) { 2746 mJpegSettings->gps_processing_method = 2747 jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[0]; 2748 } 2749 if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) { 2750 mJpegSettings->sensor_sensitivity = 2751 jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0]; 2752 } 2753 if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) { 2754 mJpegSettings->lens_focal_length = 2755 jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0]; 2756 } 2757 mJpegSettings->max_jpeg_size = calcMaxJpegSize(); 2758 return 0; 2759} 2760 2761/*=========================================================================== 2762 * FUNCTION : captureResultCb 2763 * 2764 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata) 2765 * 2766 * PARAMETERS : 2767 * @frame : frame information from mm-camera-interface 2768 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata. 2769 * @userdata: userdata 2770 * 2771 * RETURN : NONE 2772 *==========================================================================*/ 2773void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata, 2774 camera3_stream_buffer_t *buffer, 2775 uint32_t frame_number, void *userdata) 2776{ 2777 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata; 2778 if (hw == NULL) { 2779 ALOGE("%s: Invalid hw %p", __func__, hw); 2780 return; 2781 } 2782 2783 hw->captureResultCb(metadata, buffer, frame_number); 2784 return; 2785} 2786 2787/*=========================================================================== 2788 * FUNCTION : initialize 2789 * 2790 * DESCRIPTION: Pass framework callback pointers to HAL 2791 * 2792 * PARAMETERS : 2793 * 2794 * 2795 * RETURN : Success : 0 2796 * Failure: -ENODEV 2797 *==========================================================================*/ 2798 2799int QCamera3HardwareInterface::initialize(const struct camera3_device *device, 2800 const camera3_callback_ops_t *callback_ops) 2801{ 2802 ALOGV("%s: E", __func__); 2803 QCamera3HardwareInterface *hw = 2804 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 2805 if (!hw) { 2806 ALOGE("%s: NULL camera device", __func__); 2807 return -ENODEV; 2808 } 2809 2810 int rc = hw->initialize(callback_ops); 2811 ALOGV("%s: X", __func__); 2812 return rc; 2813} 2814 2815/*=========================================================================== 2816 * FUNCTION : configure_streams 2817 * 2818 * DESCRIPTION: 2819 * 2820 * PARAMETERS : 2821 * 2822 * 2823 * RETURN : Success: 0 2824 * Failure: -EINVAL (if stream configuration is invalid) 2825 * -ENODEV (fatal error) 2826 *==========================================================================*/ 2827 2828int QCamera3HardwareInterface::configure_streams( 2829 const struct camera3_device *device, 2830 camera3_stream_configuration_t *stream_list) 2831{ 2832 ALOGV("%s: E", __func__); 2833 QCamera3HardwareInterface *hw = 2834 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 2835 if (!hw) { 2836 ALOGE("%s: NULL camera device", __func__); 2837 return -ENODEV; 2838 } 2839 int rc = hw->configureStreams(stream_list); 2840 ALOGV("%s: X", __func__); 2841 return rc; 2842} 2843 2844/*=========================================================================== 2845 * FUNCTION : register_stream_buffers 2846 * 2847 * DESCRIPTION: Register stream buffers with the device 2848 * 2849 * PARAMETERS : 2850 * 2851 * RETURN : 2852 *==========================================================================*/ 2853int QCamera3HardwareInterface::register_stream_buffers( 2854 const struct camera3_device *device, 2855 const camera3_stream_buffer_set_t *buffer_set) 2856{ 2857 ALOGV("%s: E", __func__); 2858 QCamera3HardwareInterface *hw = 2859 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 2860 if (!hw) { 2861 ALOGE("%s: NULL camera device", __func__); 2862 return -ENODEV; 2863 } 2864 int rc = hw->registerStreamBuffers(buffer_set); 2865 ALOGV("%s: X", __func__); 2866 return rc; 2867} 2868 2869/*=========================================================================== 2870 * FUNCTION : construct_default_request_settings 2871 * 2872 * DESCRIPTION: Configure a settings buffer to meet the required use case 2873 * 2874 * PARAMETERS : 2875 * 2876 * 2877 * RETURN : Success: Return valid metadata 2878 * Failure: Return NULL 2879 *==========================================================================*/ 2880const camera_metadata_t* QCamera3HardwareInterface:: 2881 construct_default_request_settings(const struct camera3_device *device, 2882 int type) 2883{ 2884 2885 ALOGV("%s: E", __func__); 2886 camera_metadata_t* fwk_metadata = NULL; 2887 QCamera3HardwareInterface *hw = 2888 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 2889 if (!hw) { 2890 ALOGE("%s: NULL camera device", __func__); 2891 return NULL; 2892 } 2893 2894 fwk_metadata = hw->translateCapabilityToMetadata(type); 2895 2896 ALOGV("%s: X", __func__); 2897 return fwk_metadata; 2898} 2899 2900/*=========================================================================== 2901 * FUNCTION : process_capture_request 2902 * 2903 * DESCRIPTION: 2904 * 2905 * PARAMETERS : 2906 * 2907 * 2908 * RETURN : 2909 *==========================================================================*/ 2910int QCamera3HardwareInterface::process_capture_request( 2911 const struct camera3_device *device, 2912 camera3_capture_request_t *request) 2913{ 2914 ALOGV("%s: E", __func__); 2915 QCamera3HardwareInterface *hw = 2916 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 2917 if (!hw) { 2918 ALOGE("%s: NULL camera device", __func__); 2919 return -EINVAL; 2920 } 2921 2922 int rc = hw->processCaptureRequest(request); 2923 ALOGV("%s: X", __func__); 2924 return rc; 2925} 2926 2927/*=========================================================================== 2928 * FUNCTION : get_metadata_vendor_tag_ops 2929 * 2930 * DESCRIPTION: 2931 * 2932 * PARAMETERS : 2933 * 2934 * 2935 * RETURN : 2936 *==========================================================================*/ 2937 2938void QCamera3HardwareInterface::get_metadata_vendor_tag_ops( 2939 const struct camera3_device *device, 2940 vendor_tag_query_ops_t* ops) 2941{ 2942 ALOGV("%s: E", __func__); 2943 QCamera3HardwareInterface *hw = 2944 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 2945 if (!hw) { 2946 ALOGE("%s: NULL camera device", __func__); 2947 return; 2948 } 2949 2950 hw->getMetadataVendorTagOps(ops); 2951 ALOGV("%s: X", __func__); 2952 return; 2953} 2954 2955/*=========================================================================== 2956 * FUNCTION : dump 2957 * 2958 * DESCRIPTION: 2959 * 2960 * PARAMETERS : 2961 * 2962 * 2963 * RETURN : 2964 *==========================================================================*/ 2965 2966void QCamera3HardwareInterface::dump( 2967 const struct camera3_device *device, int fd) 2968{ 2969 ALOGV("%s: E", __func__); 2970 QCamera3HardwareInterface *hw = 2971 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 2972 if (!hw) { 2973 ALOGE("%s: NULL camera device", __func__); 2974 return; 2975 } 2976 2977 hw->dump(fd); 2978 ALOGV("%s: X", __func__); 2979 return; 2980} 2981 2982/*=========================================================================== 2983 * FUNCTION : close_camera_device 2984 * 2985 * DESCRIPTION: 2986 * 2987 * PARAMETERS : 2988 * 2989 * 2990 * RETURN : 2991 *==========================================================================*/ 2992int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device) 2993{ 2994 ALOGV("%s: E", __func__); 2995 int ret = NO_ERROR; 2996 QCamera3HardwareInterface *hw = 2997 reinterpret_cast<QCamera3HardwareInterface *>( 2998 reinterpret_cast<camera3_device_t *>(device)->priv); 2999 if (!hw) { 3000 ALOGE("NULL camera device"); 3001 return BAD_VALUE; 3002 } 3003 delete hw; 3004 ALOGV("%s: X", __func__); 3005 return ret; 3006} 3007 3008}; //end namespace qcamera 3009