QCamera3HWI.cpp revision 08ad023e7517b0bb44b3e636abfea3ef3bba0aff
1/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved. 2* 3* Redistribution and use in source and binary forms, with or without 4* modification, are permitted provided that the following conditions are 5* met: 6* * Redistributions of source code must retain the above copyright 7* notice, this list of conditions and the following disclaimer. 8* * Redistributions in binary form must reproduce the above 9* copyright notice, this list of conditions and the following 10* disclaimer in the documentation and/or other materials provided 11* with the distribution. 12* * Neither the name of The Linux Foundation nor the names of its 13* contributors may be used to endorse or promote products derived 14* from this software without specific prior written permission. 15* 16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED 17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF 18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT 19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS 20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR 23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE 25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN 26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27* 28*/ 29 30#define LOG_TAG "QCamera3HWI" 31 32#include <cutils/properties.h> 33#include <hardware/camera3.h> 34#include <camera/CameraMetadata.h> 35#include <stdlib.h> 36#include <utils/Log.h> 37#include <utils/Errors.h> 38#include <ui/Fence.h> 39#include <gralloc_priv.h> 40#include "QCamera3HWI.h" 41#include "QCamera3Mem.h" 42#include "QCamera3Channel.h" 43#include "QCamera3PostProc.h" 44 45using namespace android; 46 47namespace qcamera { 48#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX ) 49cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS]; 50parm_buffer_t *prevSettings; 51const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS]; 52 53const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = { 54 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF }, 55 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO }, 56 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE }, 57 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE }, 58 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA }, 59 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE }, 60 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD }, 61 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD }, 62 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA } 63}; 64 65const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = { 66 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF }, 67 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO }, 68 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT }, 69 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT }, 70 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT}, 71 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT }, 72 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT }, 73 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT }, 74 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE } 75}; 76 77const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = { 78 { ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED, CAM_SCENE_MODE_OFF }, 79 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION }, 80 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT }, 81 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE }, 82 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT }, 83 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT }, 84 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE }, 85 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH }, 86 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW }, 87 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET }, 88 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE }, 89 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS }, 90 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS }, 91 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY }, 92 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT }, 93 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE} 94}; 95 96const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = { 97 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED }, 98 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO }, 99 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO }, 100 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF }, 101 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE }, 102 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO } 103}; 104 105const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = { 106 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF }, 107 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ }, 108 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ }, 109 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO } 110}; 111 112const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = { 113 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF }, 114 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF }, 115 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO}, 116 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON }, 117 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO} 118}; 119 120const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = { 121 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF }, 122 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_ON }, 123 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH} 124}; 125 126const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288, 127 320, 240, 176, 144, 0, 0}; 128 129camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = { 130 initialize: QCamera3HardwareInterface::initialize, 131 configure_streams: QCamera3HardwareInterface::configure_streams, 132 register_stream_buffers: QCamera3HardwareInterface::register_stream_buffers, 133 construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings, 134 process_capture_request: QCamera3HardwareInterface::process_capture_request, 135 get_metadata_vendor_tag_ops: QCamera3HardwareInterface::get_metadata_vendor_tag_ops, 136 dump: QCamera3HardwareInterface::dump, 137}; 138 139 140/*=========================================================================== 141 * FUNCTION : QCamera3HardwareInterface 142 * 143 * DESCRIPTION: constructor of QCamera3HardwareInterface 144 * 145 * PARAMETERS : 146 * @cameraId : camera ID 147 * 148 * RETURN : none 149 *==========================================================================*/ 150QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId) 151 : mCameraId(cameraId), 152 mCameraHandle(NULL), 153 mCameraOpened(false), 154 mCallbackOps(NULL), 155 mInputStream(NULL), 156 mMetadataChannel(NULL), 157 mFirstRequest(false), 158 mParamHeap(NULL), 159 mParameters(NULL), 160 mJpegSettings(NULL) 161{ 162 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG; 163 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0; 164 mCameraDevice.common.close = close_camera_device; 165 mCameraDevice.ops = &mCameraOps; 166 mCameraDevice.priv = this; 167 gCamCapability[cameraId]->version = CAM_HAL_V3; 168 169 pthread_mutex_init(&mRequestLock, NULL); 170 pthread_cond_init(&mRequestCond, NULL); 171 mPendingRequest = 0; 172 173 pthread_mutex_init(&mMutex, NULL); 174 pthread_mutex_init(&mCaptureResultLock, NULL); 175 176 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) 177 mDefaultMetadata[i] = NULL; 178} 179 180/*=========================================================================== 181 * FUNCTION : ~QCamera3HardwareInterface 182 * 183 * DESCRIPTION: destructor of QCamera3HardwareInterface 184 * 185 * PARAMETERS : none 186 * 187 * RETURN : none 188 *==========================================================================*/ 189QCamera3HardwareInterface::~QCamera3HardwareInterface() 190{ 191 ALOGV("%s: E", __func__); 192 /* Clean up all channels */ 193 mMetadataChannel->stop(); 194 delete mMetadataChannel; 195 mMetadataChannel = NULL; 196 /* We need to stop all streams before deleting any stream */ 197 for (List<stream_info_t *>::iterator it = mStreamInfo.begin(); 198 it != mStreamInfo.end(); it++) { 199 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv; 200 channel->stop(); 201 } 202 for (List<stream_info_t *>::iterator it = mStreamInfo.begin(); 203 it != mStreamInfo.end(); it++) { 204 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv; 205 delete channel; 206 free (*it); 207 } 208 209 if (mJpegSettings != NULL) { 210 free(mJpegSettings); 211 mJpegSettings = NULL; 212 } 213 deinitParameters(); 214 closeCamera(); 215 216 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) 217 if (mDefaultMetadata[i]) 218 free_camera_metadata(mDefaultMetadata[i]); 219 220 pthread_mutex_destroy(&mRequestLock); 221 pthread_cond_destroy(&mRequestCond); 222 223 pthread_mutex_destroy(&mMutex); 224 pthread_mutex_destroy(&mCaptureResultLock); 225 ALOGV("%s: X", __func__); 226} 227 228/*=========================================================================== 229 * FUNCTION : openCamera 230 * 231 * DESCRIPTION: open camera 232 * 233 * PARAMETERS : 234 * @hw_device : double ptr for camera device struct 235 * 236 * RETURN : int32_t type of status 237 * NO_ERROR -- success 238 * none-zero failure code 239 *==========================================================================*/ 240int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device) 241{ 242 //int rc = NO_ERROR; 243 int rc = 0; 244 if (mCameraOpened) { 245 *hw_device = NULL; 246 return PERMISSION_DENIED; 247 } 248 249 rc = openCamera(); 250 if (rc == 0) 251 *hw_device = &mCameraDevice.common; 252 else 253 *hw_device = NULL; 254 return rc; 255} 256 257/*=========================================================================== 258 * FUNCTION : openCamera 259 * 260 * DESCRIPTION: open camera 261 * 262 * PARAMETERS : none 263 * 264 * RETURN : int32_t type of status 265 * NO_ERROR -- success 266 * none-zero failure code 267 *==========================================================================*/ 268int QCamera3HardwareInterface::openCamera() 269{ 270 if (mCameraHandle) { 271 ALOGE("Failure: Camera already opened"); 272 return ALREADY_EXISTS; 273 } 274 mCameraHandle = camera_open(mCameraId); 275 if (!mCameraHandle) { 276 ALOGE("camera_open failed."); 277 return UNKNOWN_ERROR; 278 } 279 280 mCameraOpened = true; 281 282 return NO_ERROR; 283} 284 285/*=========================================================================== 286 * FUNCTION : closeCamera 287 * 288 * DESCRIPTION: close camera 289 * 290 * PARAMETERS : none 291 * 292 * RETURN : int32_t type of status 293 * NO_ERROR -- success 294 * none-zero failure code 295 *==========================================================================*/ 296int QCamera3HardwareInterface::closeCamera() 297{ 298 int rc = NO_ERROR; 299 300 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle); 301 mCameraHandle = NULL; 302 mCameraOpened = false; 303 304 return rc; 305} 306 307/*=========================================================================== 308 * FUNCTION : initialize 309 * 310 * DESCRIPTION: Initialize frameworks callback functions 311 * 312 * PARAMETERS : 313 * @callback_ops : callback function to frameworks 314 * 315 * RETURN : 316 * 317 *==========================================================================*/ 318int QCamera3HardwareInterface::initialize( 319 const struct camera3_callback_ops *callback_ops) 320{ 321 int rc; 322 323 pthread_mutex_lock(&mMutex); 324 325 rc = initParameters(); 326 if (rc < 0) { 327 ALOGE("%s: initParamters failed %d", __func__, rc); 328 goto err1; 329 } 330 //Create metadata channel and initialize it 331 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle, 332 mCameraHandle->ops, captureResultCb, 333 &gCamCapability[mCameraId]->padding_info, this); 334 if (mMetadataChannel == NULL) { 335 ALOGE("%s: failed to allocate metadata channel", __func__); 336 rc = -ENOMEM; 337 goto err2; 338 } 339 rc = mMetadataChannel->initialize(); 340 if (rc < 0) { 341 ALOGE("%s: metadata channel initialization failed", __func__); 342 goto err3; 343 } 344 345 mCallbackOps = callback_ops; 346 347 pthread_mutex_unlock(&mMutex); 348 return 0; 349 350err3: 351 delete mMetadataChannel; 352 mMetadataChannel = NULL; 353err2: 354 deinitParameters(); 355err1: 356 pthread_mutex_unlock(&mMutex); 357 return rc; 358} 359 360/*=========================================================================== 361 * FUNCTION : configureStreams 362 * 363 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input 364 * and output streams. 365 * 366 * PARAMETERS : 367 * @stream_list : streams to be configured 368 * 369 * RETURN : 370 * 371 *==========================================================================*/ 372int QCamera3HardwareInterface::configureStreams( 373 camera3_stream_configuration_t *streamList) 374{ 375 int rc = 0; 376 pthread_mutex_lock(&mMutex); 377 378 // Sanity check stream_list 379 if (streamList == NULL) { 380 ALOGE("%s: NULL stream configuration", __func__); 381 pthread_mutex_unlock(&mMutex); 382 return BAD_VALUE; 383 } 384 385 if (streamList->streams == NULL) { 386 ALOGE("%s: NULL stream list", __func__); 387 pthread_mutex_unlock(&mMutex); 388 return BAD_VALUE; 389 } 390 391 if (streamList->num_streams < 1) { 392 ALOGE("%s: Bad number of streams requested: %d", __func__, 393 streamList->num_streams); 394 pthread_mutex_unlock(&mMutex); 395 return BAD_VALUE; 396 } 397 398 camera3_stream_t *inputStream = NULL; 399 /* first invalidate all the steams in the mStreamList 400 * if they appear again, they will be validated */ 401 for (List<stream_info_t*>::iterator it=mStreamInfo.begin(); 402 it != mStreamInfo.end(); it++) { 403 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv; 404 channel->stop(); 405 (*it)->status = INVALID; 406 } 407 408 for (size_t i = 0; i < streamList->num_streams; i++) { 409 camera3_stream_t *newStream = streamList->streams[i]; 410 ALOGV("%s: newStream type = %d, stream format = %d", 411 __func__, newStream->stream_type, newStream->format); 412 //if the stream is in the mStreamList validate it 413 bool stream_exists = false; 414 for (List<stream_info_t*>::iterator it=mStreamInfo.begin(); 415 it != mStreamInfo.end(); it++) { 416 if ((*it)->stream == newStream) { 417 QCamera3Channel *channel = 418 (QCamera3Channel*)(*it)->stream->priv; 419 stream_exists = true; 420 (*it)->status = RECONFIGURE; 421 /*delete the channel object associated with the stream because 422 we need to reconfigure*/ 423 delete channel; 424 (*it)->stream->priv = NULL; 425 } 426 } 427 if (!stream_exists) { 428 //new stream 429 stream_info_t* stream_info; 430 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t)); 431 stream_info->stream = newStream; 432 stream_info->status = VALID; 433 stream_info->registered = 0; 434 mStreamInfo.push_back(stream_info); 435 } 436 if (newStream->stream_type == CAMERA3_STREAM_INPUT) { 437 if (inputStream != NULL) { 438 ALOGE("%s: Multiple input streams requested!", __func__); 439 pthread_mutex_unlock(&mMutex); 440 return BAD_VALUE; 441 } 442 inputStream = newStream; 443 } 444 } 445 mInputStream = inputStream; 446 447 /*clean up invalid streams*/ 448 for (List<stream_info_t*>::iterator it=mStreamInfo.begin(); 449 it != mStreamInfo.end();) { 450 if(((*it)->status) == INVALID){ 451 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv; 452 delete channel; 453 delete[] (buffer_handle_t*)(*it)->buffer_set.buffers; 454 free(*it); 455 it = mStreamInfo.erase(it); 456 } else { 457 it++; 458 } 459 } 460 461 //mMetadataChannel->stop(); 462 463 /* Allocate channel objects for the requested streams */ 464 for (size_t i = 0; i < streamList->num_streams; i++) { 465 camera3_stream_t *newStream = streamList->streams[i]; 466 if (newStream->priv == NULL) { 467 //New stream, construct channel 468 switch (newStream->stream_type) { 469 case CAMERA3_STREAM_INPUT: 470 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ; 471 break; 472 case CAMERA3_STREAM_BIDIRECTIONAL: 473 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ | 474 GRALLOC_USAGE_HW_CAMERA_WRITE; 475 break; 476 case CAMERA3_STREAM_OUTPUT: 477 newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE; 478 break; 479 default: 480 ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type); 481 break; 482 } 483 484 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT || 485 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) { 486 QCamera3Channel *channel; 487 switch (newStream->format) { 488 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: 489 case HAL_PIXEL_FORMAT_YCbCr_420_888: 490 newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers; 491 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle, 492 mCameraHandle->ops, captureResultCb, 493 &gCamCapability[mCameraId]->padding_info, this, newStream); 494 if (channel == NULL) { 495 ALOGE("%s: allocation of channel failed", __func__); 496 pthread_mutex_unlock(&mMutex); 497 return -ENOMEM; 498 } 499 500 newStream->priv = channel; 501 break; 502 case HAL_PIXEL_FORMAT_BLOB: 503 newStream->max_buffers = QCamera3PicChannel::kMaxBuffers; 504 channel = new QCamera3PicChannel(mCameraHandle->camera_handle, 505 mCameraHandle->ops, captureResultCb, 506 &gCamCapability[mCameraId]->padding_info, this, newStream); 507 if (channel == NULL) { 508 ALOGE("%s: allocation of channel failed", __func__); 509 pthread_mutex_unlock(&mMutex); 510 return -ENOMEM; 511 } 512 newStream->priv = channel; 513 break; 514 515 //TODO: Add support for app consumed format? 516 default: 517 ALOGE("%s: not a supported format 0x%x", __func__, newStream->format); 518 break; 519 } 520 } 521 } else { 522 // Channel already exists for this stream 523 // Do nothing for now 524 } 525 } 526 /*For the streams to be reconfigured we need to register the buffers 527 since the framework wont*/ 528 for (List<stream_info_t *>::iterator it = mStreamInfo.begin(); 529 it != mStreamInfo.end(); it++) { 530 if ((*it)->status == RECONFIGURE) { 531 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv; 532 /*only register buffers for streams that have already been 533 registered*/ 534 if ((*it)->registered) { 535 rc = channel->registerBuffers((*it)->buffer_set.num_buffers, 536 (*it)->buffer_set.buffers); 537 if (rc != NO_ERROR) { 538 ALOGE("%s: Failed to register the buffers of old stream,\ 539 rc = %d", __func__, rc); 540 } 541 ALOGD("%s: channel %p has %d buffers", 542 __func__, channel, (*it)->buffer_set.num_buffers); 543 } 544 } 545 546 ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream); 547 if (index == NAME_NOT_FOUND) { 548 mPendingBuffersMap.add((*it)->stream, 0); 549 } else { 550 mPendingBuffersMap.editValueAt(index) = 0; 551 } 552 } 553 554 /* Initialize mPendingRequestInfo and mPendnigBuffersMap */ 555 mPendingRequestsList.clear(); 556 557 //settings/parameters don't carry over for new configureStreams 558 memset(mParameters, 0, sizeof(parm_buffer_t)); 559 mFirstRequest = true; 560 561 pthread_mutex_unlock(&mMutex); 562 return rc; 563} 564 565/*=========================================================================== 566 * FUNCTION : validateCaptureRequest 567 * 568 * DESCRIPTION: validate a capture request from camera service 569 * 570 * PARAMETERS : 571 * @request : request from framework to process 572 * 573 * RETURN : 574 * 575 *==========================================================================*/ 576int QCamera3HardwareInterface::validateCaptureRequest( 577 camera3_capture_request_t *request) 578{ 579 ssize_t idx = 0; 580 const camera3_stream_buffer_t *b; 581 CameraMetadata meta; 582 583 /* Sanity check the request */ 584 if (request == NULL) { 585 ALOGE("%s: NULL capture request", __func__); 586 return BAD_VALUE; 587 } 588 589 uint32_t frameNumber = request->frame_number; 590 if (request->input_buffer != NULL && 591 request->input_buffer->stream != mInputStream) { 592 ALOGE("%s: Request %d: Input buffer not from input stream!", 593 __FUNCTION__, frameNumber); 594 return BAD_VALUE; 595 } 596 if (request->num_output_buffers < 1 || request->output_buffers == NULL) { 597 ALOGE("%s: Request %d: No output buffers provided!", 598 __FUNCTION__, frameNumber); 599 return BAD_VALUE; 600 } 601 if (request->input_buffer != NULL) { 602 //TODO 603 ALOGE("%s: Not supporting input buffer yet", __func__); 604 return BAD_VALUE; 605 } 606 607 // Validate all buffers 608 b = request->output_buffers; 609 do { 610 QCamera3Channel *channel = 611 static_cast<QCamera3Channel*>(b->stream->priv); 612 if (channel == NULL) { 613 ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!", 614 __func__, frameNumber, idx); 615 return BAD_VALUE; 616 } 617 if (b->status != CAMERA3_BUFFER_STATUS_OK) { 618 ALOGE("%s: Request %d: Buffer %d: Status not OK!", 619 __func__, frameNumber, idx); 620 return BAD_VALUE; 621 } 622 if (b->release_fence != -1) { 623 ALOGE("%s: Request %d: Buffer %d: Has a release fence!", 624 __func__, frameNumber, idx); 625 return BAD_VALUE; 626 } 627 if (b->buffer == NULL) { 628 ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!", 629 __func__, frameNumber, idx); 630 return BAD_VALUE; 631 } 632 idx++; 633 b = request->output_buffers + idx; 634 } while (idx < (ssize_t)request->num_output_buffers); 635 636 return NO_ERROR; 637} 638 639/*=========================================================================== 640 * FUNCTION : registerStreamBuffers 641 * 642 * DESCRIPTION: Register buffers for a given stream with the HAL device. 643 * 644 * PARAMETERS : 645 * @stream_list : streams to be configured 646 * 647 * RETURN : 648 * 649 *==========================================================================*/ 650int QCamera3HardwareInterface::registerStreamBuffers( 651 const camera3_stream_buffer_set_t *buffer_set) 652{ 653 int rc = 0; 654 655 pthread_mutex_lock(&mMutex); 656 657 if (buffer_set == NULL) { 658 ALOGE("%s: Invalid buffer_set parameter.", __func__); 659 pthread_mutex_unlock(&mMutex); 660 return -EINVAL; 661 } 662 if (buffer_set->stream == NULL) { 663 ALOGE("%s: Invalid stream parameter.", __func__); 664 pthread_mutex_unlock(&mMutex); 665 return -EINVAL; 666 } 667 if (buffer_set->num_buffers < 1) { 668 ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers); 669 pthread_mutex_unlock(&mMutex); 670 return -EINVAL; 671 } 672 if (buffer_set->buffers == NULL) { 673 ALOGE("%s: Invalid buffers parameter.", __func__); 674 pthread_mutex_unlock(&mMutex); 675 return -EINVAL; 676 } 677 678 camera3_stream_t *stream = buffer_set->stream; 679 QCamera3Channel *channel = (QCamera3Channel *)stream->priv; 680 681 //set the buffer_set in the mStreamInfo array 682 for (List<stream_info_t *>::iterator it = mStreamInfo.begin(); 683 it != mStreamInfo.end(); it++) { 684 if ((*it)->stream == stream) { 685 uint32_t numBuffers = buffer_set->num_buffers; 686 (*it)->buffer_set.stream = buffer_set->stream; 687 (*it)->buffer_set.num_buffers = numBuffers; 688 (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers]; 689 if ((*it)->buffer_set.buffers == NULL) { 690 ALOGE("%s: Failed to allocate buffer_handle_t*", __func__); 691 pthread_mutex_unlock(&mMutex); 692 return -ENOMEM; 693 } 694 for (size_t j = 0; j < numBuffers; j++){ 695 (*it)->buffer_set.buffers[j] = buffer_set->buffers[j]; 696 } 697 (*it)->registered = 1; 698 } 699 } 700 701 if (stream->stream_type != CAMERA3_STREAM_OUTPUT) { 702 ALOGE("%s: not yet support non output type stream", __func__); 703 pthread_mutex_unlock(&mMutex); 704 return -EINVAL; 705 } 706 rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers); 707 if (rc < 0) { 708 ALOGE("%s: registerBUffers for stream %p failed", __func__, stream); 709 pthread_mutex_unlock(&mMutex); 710 return -ENODEV; 711 } 712 713 pthread_mutex_unlock(&mMutex); 714 return NO_ERROR; 715} 716 717/*=========================================================================== 718 * FUNCTION : processCaptureRequest 719 * 720 * DESCRIPTION: process a capture request from camera service 721 * 722 * PARAMETERS : 723 * @request : request from framework to process 724 * 725 * RETURN : 726 * 727 *==========================================================================*/ 728int QCamera3HardwareInterface::processCaptureRequest( 729 camera3_capture_request_t *request) 730{ 731 int rc = NO_ERROR; 732 CameraMetadata meta; 733 734 pthread_mutex_lock(&mMutex); 735 736 rc = validateCaptureRequest(request); 737 if (rc != NO_ERROR) { 738 ALOGE("%s: incoming request is not valid", __func__); 739 pthread_mutex_unlock(&mMutex); 740 return rc; 741 } 742 743 uint32_t frameNumber = request->frame_number; 744 745 rc = setFrameParameters(request->frame_number, request->settings); 746 if (rc < 0) { 747 ALOGE("%s: fail to set frame parameters", __func__); 748 pthread_mutex_unlock(&mMutex); 749 return rc; 750 } 751 752 ALOGV("%s: %d, num_output_buffers = %d", __func__, __LINE__, 753 request->num_output_buffers); 754 // Acquire all request buffers first 755 for (size_t i = 0; i < request->num_output_buffers; i++) { 756 const camera3_stream_buffer_t& output = request->output_buffers[i]; 757 sp<Fence> acquireFence = new Fence(output.acquire_fence); 758 759 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) { 760 //Call function to store local copy of jpeg data for encode params. 761 rc = getJpegSettings(request->settings); 762 if (rc < 0) { 763 ALOGE("%s: failed to get jpeg parameters", __func__); 764 pthread_mutex_unlock(&mMutex); 765 return rc; 766 } 767 } 768 769 rc = acquireFence->wait(Fence::TIMEOUT_NEVER); 770 if (rc != OK) { 771 ALOGE("%s: fence wait failed %d", __func__, rc); 772 pthread_mutex_unlock(&mMutex); 773 return rc; 774 } 775 } 776 777 /* Update pending request list and pending buffers map */ 778 pthread_mutex_lock(&mRequestLock); 779 PendingRequestInfo pendingRequest; 780 pendingRequest.frame_number = frameNumber; 781 pendingRequest.num_buffers = request->num_output_buffers; 782 for (size_t i = 0; i < request->num_output_buffers; i++) { 783 RequestedBufferInfo requestedBuf; 784 requestedBuf.stream = request->output_buffers[i].stream; 785 requestedBuf.buffer = NULL; 786 pendingRequest.buffers.push_back(requestedBuf); 787 788 mPendingBuffersMap.editValueFor(requestedBuf.stream)++; 789 } 790 mPendingRequestsList.push_back(pendingRequest); 791 pthread_mutex_unlock(&mRequestLock); 792 793 // Notify metadata channel we receive a request 794 mMetadataChannel->request(NULL, frameNumber); 795 796 // Call request on other streams 797 for (size_t i = 0; i < request->num_output_buffers; i++) { 798 const camera3_stream_buffer_t& output = request->output_buffers[i]; 799 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv; 800 801 if (channel == NULL) { 802 ALOGE("%s: invalid channel pointer for stream", __func__); 803 continue; 804 } 805 806 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) { 807 rc = channel->request(output.buffer, frameNumber, mJpegSettings); 808 } else { 809 ALOGI("%s: %d, request with buffer %p, frame_number %d", __func__, __LINE__, output.buffer, frameNumber); 810 rc = channel->request(output.buffer, frameNumber); 811 } 812 if (rc < 0) 813 ALOGE("%s: request failed", __func__); 814 } 815 816 mFirstRequest = false; 817 818 //Block on conditional variable 819 pthread_mutex_lock(&mRequestLock); 820 mPendingRequest = 1; 821 while (mPendingRequest == 1) { 822 pthread_cond_wait(&mRequestCond, &mRequestLock); 823 } 824 pthread_mutex_unlock(&mRequestLock); 825 826 pthread_mutex_unlock(&mMutex); 827 return rc; 828} 829 830/*=========================================================================== 831 * FUNCTION : getMetadataVendorTagOps 832 * 833 * DESCRIPTION: 834 * 835 * PARAMETERS : 836 * 837 * 838 * RETURN : 839 *==========================================================================*/ 840void QCamera3HardwareInterface::getMetadataVendorTagOps( 841 vendor_tag_query_ops_t* /*ops*/) 842{ 843 /* Enable locks when we eventually add Vendor Tags */ 844 /* 845 pthread_mutex_lock(&mMutex); 846 847 pthread_mutex_unlock(&mMutex); 848 */ 849 return; 850} 851 852/*=========================================================================== 853 * FUNCTION : dump 854 * 855 * DESCRIPTION: 856 * 857 * PARAMETERS : 858 * 859 * 860 * RETURN : 861 *==========================================================================*/ 862void QCamera3HardwareInterface::dump(int /*fd*/) 863{ 864 /*Enable lock when we implement this function*/ 865 /* 866 pthread_mutex_lock(&mMutex); 867 868 pthread_mutex_unlock(&mMutex); 869 */ 870 return; 871} 872 873/*=========================================================================== 874 * FUNCTION : captureResultCb 875 * 876 * DESCRIPTION: Callback handler for all capture result 877 * (streams, as well as metadata) 878 * 879 * PARAMETERS : 880 * @metadata : metadata information 881 * @buffer : actual gralloc buffer to be returned to frameworks. 882 * NULL if metadata. 883 * 884 * RETURN : NONE 885 *==========================================================================*/ 886void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf, 887 camera3_stream_buffer_t *buffer, uint32_t frame_number) 888{ 889 pthread_mutex_lock(&mRequestLock); 890 891 if (metadata_buf) { 892 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer; 893 int32_t frame_number_valid = *(int32_t *) 894 POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata); 895 uint32_t frame_number = *(uint32_t *) 896 POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata); 897 const struct timeval *tv = (const struct timeval *) 898 POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata); 899 nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC + 900 tv->tv_usec * NSEC_PER_USEC; 901 902 if (!frame_number_valid) { 903 ALOGD("%s: Not a valid frame number, used as SOF only", __func__); 904 mMetadataChannel->bufDone(metadata_buf); 905 goto done_metadata; 906 } 907 ALOGD("%s: valid frame_number = %d, capture_time = %lld", __func__, 908 frame_number, capture_time); 909 910 // Go through the pending requests info and send shutter/results to frameworks 911 for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin(); 912 i != mPendingRequestsList.end() && i->frame_number <= frame_number;) { 913 camera3_capture_result_t result; 914 camera3_notify_msg_t notify_msg; 915 ALOGD("%s: frame_number in the list is %d", __func__, i->frame_number); 916 917 // Flush out all entries with less or equal frame numbers. 918 919 //TODO: Make sure shutter timestamp really reflects shutter timestamp. 920 //Right now it's the same as metadata timestamp 921 922 //TODO: When there is metadata drop, how do we derive the timestamp of 923 //dropped frames? For now, we fake the dropped timestamp by substracting 924 //from the reported timestamp 925 nsecs_t current_capture_time = capture_time - 926 (frame_number - i->frame_number) * NSEC_PER_33MSEC; 927 928 // Send shutter notify to frameworks 929 notify_msg.type = CAMERA3_MSG_SHUTTER; 930 notify_msg.message.shutter.frame_number = i->frame_number; 931 notify_msg.message.shutter.timestamp = current_capture_time; 932 mCallbackOps->notify(mCallbackOps, ¬ify_msg); 933 ALOGD("%s: notify frame_number = %d, capture_time = %lld", __func__, 934 i->frame_number, capture_time); 935 936 // Send empty metadata with already filled buffers for dropped metadata 937 // and send valid metadata with already filled buffers for current metadata 938 if (i->frame_number < frame_number) { 939 CameraMetadata emptyMetadata(1, 0); 940 emptyMetadata.update(ANDROID_SENSOR_TIMESTAMP, 941 ¤t_capture_time, 1); 942 result.result = emptyMetadata.release(); 943 } else { 944 result.result = translateCbMetadataToResultMetadata(metadata, 945 current_capture_time); 946 // Return metadata buffer 947 mMetadataChannel->bufDone(metadata_buf); 948 } 949 if (!result.result) { 950 ALOGE("%s: metadata is NULL", __func__); 951 } 952 result.frame_number = i->frame_number; 953 result.num_output_buffers = 0; 954 result.output_buffers = NULL; 955 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin(); 956 j != i->buffers.end(); j++) { 957 if (j->buffer) { 958 result.num_output_buffers++; 959 } 960 } 961 962 if (result.num_output_buffers > 0) { 963 camera3_stream_buffer_t *result_buffers = 964 new camera3_stream_buffer_t[result.num_output_buffers]; 965 if (!result_buffers) { 966 ALOGE("%s: Fatal error: out of memory", __func__); 967 } 968 size_t result_buffers_idx = 0; 969 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin(); 970 j != i->buffers.end(); j++) { 971 if (j->buffer) { 972 result_buffers[result_buffers_idx++] = *(j->buffer); 973 free(j->buffer); 974 mPendingBuffersMap.editValueFor(j->stream)--; 975 } 976 } 977 result.output_buffers = result_buffers; 978 979 mCallbackOps->process_capture_result(mCallbackOps, &result); 980 ALOGD("%s: meta frame_number = %d, capture_time = %lld", 981 __func__, result.frame_number, current_capture_time); 982 free_camera_metadata((camera_metadata_t *)result.result); 983 delete[] result_buffers; 984 } else { 985 mCallbackOps->process_capture_result(mCallbackOps, &result); 986 ALOGD("%s: meta frame_number = %d, capture_time = %lld", 987 __func__, result.frame_number, current_capture_time); 988 free_camera_metadata((camera_metadata_t *)result.result); 989 } 990 // erase the element from the list 991 i = mPendingRequestsList.erase(i); 992 } 993 994 995done_metadata: 996 bool max_buffers_dequeued = false; 997 for (size_t i = 0; i < mPendingBuffersMap.size(); i++) { 998 const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i); 999 uint32_t queued_buffers = mPendingBuffersMap.valueAt(i); 1000 if (queued_buffers == stream->max_buffers) { 1001 max_buffers_dequeued = true; 1002 break; 1003 } 1004 } 1005 if (!max_buffers_dequeued) { 1006 // Unblock process_capture_request 1007 mPendingRequest = 0; 1008 pthread_cond_signal(&mRequestCond); 1009 } 1010 } else { 1011 // If the frame number doesn't exist in the pending request list, 1012 // directly send the buffer to the frameworks, and update pending buffers map 1013 // Otherwise, book-keep the buffer. 1014 List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin(); 1015 while (i != mPendingRequestsList.end() && i->frame_number != frame_number) 1016 i++; 1017 if (i == mPendingRequestsList.end()) { 1018 // Verify all pending requests frame_numbers are greater 1019 for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin(); 1020 j != mPendingRequestsList.end(); j++) { 1021 if (j->frame_number < frame_number) { 1022 ALOGE("%s: Error: pending frame number %d is smaller than %d", 1023 __func__, j->frame_number, frame_number); 1024 } 1025 } 1026 camera3_capture_result_t result; 1027 result.result = NULL; 1028 result.frame_number = frame_number; 1029 result.num_output_buffers = 1; 1030 result.output_buffers = buffer; 1031 ALOGD("%s: result frame_number = %d, buffer = %p", 1032 __func__, frame_number, buffer); 1033 mPendingBuffersMap.editValueFor(buffer->stream)--; 1034 mCallbackOps->process_capture_result(mCallbackOps, &result); 1035 } else { 1036 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin(); 1037 j != i->buffers.end(); j++) { 1038 if (j->stream == buffer->stream) { 1039 if (j->buffer != NULL) { 1040 ALOGE("%s: Error: buffer is already set", __func__); 1041 } else { 1042 j->buffer = (camera3_stream_buffer_t *)malloc( 1043 sizeof(camera3_stream_buffer_t)); 1044 *(j->buffer) = *buffer; 1045 ALOGD("%s: cache buffer %p at result frame_number %d", 1046 __func__, buffer, frame_number); 1047 } 1048 } 1049 } 1050 } 1051 } 1052 1053 pthread_mutex_unlock(&mRequestLock); 1054 return; 1055} 1056 1057/*=========================================================================== 1058 * FUNCTION : translateCbMetadataToResultMetadata 1059 * 1060 * DESCRIPTION: 1061 * 1062 * PARAMETERS : 1063 * @metadata : metadata information from callback 1064 * 1065 * RETURN : camera_metadata_t* 1066 * metadata in a format specified by fwk 1067 *==========================================================================*/ 1068camera_metadata_t* 1069QCamera3HardwareInterface::translateCbMetadataToResultMetadata 1070 (metadata_buffer_t *metadata, nsecs_t timestamp) 1071{ 1072 CameraMetadata camMetadata; 1073 camera_metadata_t* resultMetadata; 1074 1075 1076 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, ×tamp, 1); 1077 1078 /*CAM_INTF_META_HISTOGRAM - TODO*/ 1079 /*cam_hist_stats_t *histogram = 1080 (cam_hist_stats_t *)POINTER_OF(CAM_INTF_META_HISTOGRAM, 1081 metadata);*/ 1082 1083 /*face detection*/ 1084 cam_face_detection_data_t *faceDetectionInfo =(cam_face_detection_data_t *) 1085 POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata); 1086 uint8_t numFaces = faceDetectionInfo->num_faces_detected; 1087 int32_t faceIds[numFaces]; 1088 uint8_t faceScores[numFaces]; 1089 int32_t faceRectangles[numFaces * 4]; 1090 int32_t faceLandmarks[numFaces * 6]; 1091 int j = 0, k = 0; 1092 for (int i = 0; i < numFaces; i++) { 1093 faceIds[i] = faceDetectionInfo->faces[i].face_id; 1094 faceScores[i] = faceDetectionInfo->faces[i].score; 1095 convertToRegions(faceDetectionInfo->faces[i].face_boundary, 1096 faceRectangles+j, -1); 1097 convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k); 1098 j+= 4; 1099 k+= 6; 1100 } 1101 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces); 1102 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces); 1103 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES, 1104 faceRectangles, numFaces*4); 1105 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS, 1106 faceLandmarks, numFaces*6); 1107 1108 1109 /*autofocus - TODO*/ 1110 /*cam_auto_focus_data_t *afData =(cam_auto_focus_data_t *) 1111 POINTER_OF(CAM_INTF_META_AUTOFOCUS_DATA,metadata);*/ 1112 1113 uint8_t *color_correct_mode = 1114 (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata); 1115 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1); 1116 1117 int32_t *ae_precapture_id = 1118 (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata); 1119 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1); 1120 1121 /*aec regions*/ 1122 cam_area_t *hAeRegions = 1123 (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata); 1124 int32_t aeRegions[5]; 1125 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight); 1126 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5); 1127 1128 uint8_t *ae_state = 1129 (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata); 1130 camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1); 1131 1132 uint8_t *focusMode = 1133 (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata); 1134 camMetadata.update(ANDROID_CONTROL_AF_MODE, focusMode, 1); 1135 1136 /*af regions*/ 1137 cam_area_t *hAfRegions = 1138 (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata); 1139 int32_t afRegions[5]; 1140 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight); 1141 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5); 1142 1143 uint8_t *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata); 1144 camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1); 1145 1146 int32_t *afTriggerId = 1147 (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata); 1148 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1); 1149 1150 uint8_t *whiteBalance = 1151 (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata); 1152 camMetadata.update(ANDROID_CONTROL_AWB_MODE, whiteBalance, 1); 1153 1154 /*awb regions*/ 1155 cam_area_t *hAwbRegions = 1156 (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata); 1157 int32_t awbRegions[5]; 1158 convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight); 1159 camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5); 1160 1161 uint8_t *whiteBalanceState = 1162 (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata); 1163 camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1); 1164 1165 uint8_t *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata); 1166 camMetadata.update(ANDROID_CONTROL_MODE, mode, 1); 1167 1168 uint8_t *edgeMode = (uint8_t *)POINTER_OF(CAM_INTF_META_EDGE, metadata); 1169 camMetadata.update(ANDROID_EDGE_MODE, edgeMode, 1); 1170 1171 uint8_t *flashPower = 1172 (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata); 1173 camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1); 1174 1175 int64_t *flashFiringTime = 1176 (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata); 1177 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1); 1178 1179 /*int32_t *ledMode = 1180 (int32_t *)POINTER_OF(CAM_INTF_PARM_LED_MODE, metadata); 1181 camMetadata.update(ANDROID_FLASH_FIRING_TIME, ledMode, 1);*/ 1182 1183 uint8_t *flashState = 1184 (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata); 1185 camMetadata.update(ANDROID_FLASH_STATE, flashState, 1); 1186 1187 uint8_t *hotPixelMode = 1188 (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata); 1189 camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1); 1190 1191 float *lensAperture = 1192 (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata); 1193 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1); 1194 1195 float *filterDensity = 1196 (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata); 1197 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1); 1198 1199 float *focalLength = 1200 (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata); 1201 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1); 1202 1203 float *focusDistance = 1204 (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata); 1205 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1); 1206 1207 float *focusRange = 1208 (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata); 1209 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 1); 1210 1211 uint8_t *opticalStab = 1212 (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata); 1213 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1); 1214 1215 /*int32_t *focusState = 1216 (int32_t *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_STATE, metadata); 1217 camMetadata.update(ANDROID_LENS_STATE , focusState, 1); //check */ 1218 1219 uint8_t *noiseRedMode = 1220 (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata); 1221 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1); 1222 1223 /*CAM_INTF_META_SCALER_CROP_REGION - check size*/ 1224 1225 cam_crop_region_t *hScalerCropRegion =(cam_crop_region_t *) 1226 POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata); 1227 int32_t scalerCropRegion[3]; 1228 scalerCropRegion[0] = hScalerCropRegion->left; 1229 scalerCropRegion[1] = hScalerCropRegion->top; 1230 scalerCropRegion[2] = hScalerCropRegion->width; 1231 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 3); 1232 1233 int64_t *sensorExpTime = 1234 (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata); 1235 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1); 1236 1237 int64_t *sensorFameDuration = 1238 (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata); 1239 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1); 1240 1241 int32_t *sensorSensitivity = 1242 (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata); 1243 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1); 1244 1245 uint8_t *shadingMode = 1246 (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata); 1247 camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1); 1248 1249 uint8_t *faceDetectMode = 1250 (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata); 1251 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, faceDetectMode, 1); 1252 1253 uint8_t *histogramMode = 1254 (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata); 1255 camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1); 1256 1257 uint8_t *sharpnessMapMode = 1258 (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata); 1259 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, 1260 sharpnessMapMode, 1); 1261 1262 /*CAM_INTF_META_STATS_SHARPNESS_MAP - check size*/ 1263 cam_sharpness_map_t *sharpnessMap = (cam_sharpness_map_t *) 1264 POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata); 1265 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, 1266 (int32_t*)sharpnessMap->sharpness, 1267 CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT); 1268 1269 resultMetadata = camMetadata.release(); 1270 return resultMetadata; 1271} 1272 1273/*=========================================================================== 1274 * FUNCTION : convertToRegions 1275 * 1276 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array 1277 * 1278 * PARAMETERS : 1279 * @rect : cam_rect_t struct to convert 1280 * @region : int32_t destination array 1281 * @weight : if we are converting from cam_area_t, weight is valid 1282 * else weight = -1 1283 * 1284 *==========================================================================*/ 1285void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){ 1286 region[0] = rect.left; 1287 region[1] = rect.top; 1288 region[2] = rect.left + rect.width; 1289 region[3] = rect.top + rect.height; 1290 if (weight > -1) { 1291 region[4] = weight; 1292 } 1293} 1294 1295/*=========================================================================== 1296 * FUNCTION : convertFromRegions 1297 * 1298 * DESCRIPTION: helper method to convert from array to cam_rect_t 1299 * 1300 * PARAMETERS : 1301 * @rect : cam_rect_t struct to convert 1302 * @region : int32_t destination array 1303 * @weight : if we are converting from cam_area_t, weight is valid 1304 * else weight = -1 1305 * 1306 *==========================================================================*/ 1307void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi, 1308 const camera_metadata_t *settings, 1309 uint32_t tag){ 1310 CameraMetadata frame_settings; 1311 frame_settings = settings; 1312 int32_t x_min = frame_settings.find(tag).data.i32[0]; 1313 int32_t y_min = frame_settings.find(tag).data.i32[1]; 1314 int32_t x_max = frame_settings.find(tag).data.i32[2]; 1315 int32_t y_max = frame_settings.find(tag).data.i32[3]; 1316 roi->weight = frame_settings.find(tag).data.i32[4]; 1317 roi->rect.left = x_min; 1318 roi->rect.top = y_min; 1319 roi->rect.width = x_max - x_min; 1320 roi->rect.height = y_max - y_min; 1321} 1322 1323/*=========================================================================== 1324 * FUNCTION : convertLandmarks 1325 * 1326 * DESCRIPTION: helper method to extract the landmarks from face detection info 1327 * 1328 * PARAMETERS : 1329 * @face : cam_rect_t struct to convert 1330 * @landmarks : int32_t destination array 1331 * 1332 * 1333 *==========================================================================*/ 1334void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks) 1335{ 1336 landmarks[0] = face.left_eye_center.x; 1337 landmarks[1] = face.left_eye_center.y; 1338 landmarks[2] = face.right_eye_center.y; 1339 landmarks[3] = face.right_eye_center.y; 1340 landmarks[4] = face.mouth_center.x; 1341 landmarks[5] = face.mouth_center.y; 1342} 1343 1344#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX ) 1345/*=========================================================================== 1346 * FUNCTION : initCapabilities 1347 * 1348 * DESCRIPTION: initialize camera capabilities in static data struct 1349 * 1350 * PARAMETERS : 1351 * @cameraId : camera Id 1352 * 1353 * RETURN : int32_t type of status 1354 * NO_ERROR -- success 1355 * none-zero failure code 1356 *==========================================================================*/ 1357int QCamera3HardwareInterface::initCapabilities(int cameraId) 1358{ 1359 int rc = 0; 1360 mm_camera_vtbl_t *cameraHandle = NULL; 1361 QCamera3HeapMemory *capabilityHeap = NULL; 1362 1363 cameraHandle = camera_open(cameraId); 1364 if (!cameraHandle) { 1365 ALOGE("%s: camera_open failed", __func__); 1366 rc = -1; 1367 goto open_failed; 1368 } 1369 1370 capabilityHeap = new QCamera3HeapMemory(); 1371 if (capabilityHeap == NULL) { 1372 ALOGE("%s: creation of capabilityHeap failed", __func__); 1373 goto heap_creation_failed; 1374 } 1375 /* Allocate memory for capability buffer */ 1376 rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false); 1377 if(rc != OK) { 1378 ALOGE("%s: No memory for cappability", __func__); 1379 goto allocate_failed; 1380 } 1381 1382 /* Map memory for capability buffer */ 1383 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t)); 1384 rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle, 1385 CAM_MAPPING_BUF_TYPE_CAPABILITY, 1386 capabilityHeap->getFd(0), 1387 sizeof(cam_capability_t)); 1388 if(rc < 0) { 1389 ALOGE("%s: failed to map capability buffer", __func__); 1390 goto map_failed; 1391 } 1392 1393 /* Query Capability */ 1394 rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle); 1395 if(rc < 0) { 1396 ALOGE("%s: failed to query capability",__func__); 1397 goto query_failed; 1398 } 1399 gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t)); 1400 if (!gCamCapability[cameraId]) { 1401 ALOGE("%s: out of memory", __func__); 1402 goto query_failed; 1403 } 1404 memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0), 1405 sizeof(cam_capability_t)); 1406 rc = 0; 1407 1408query_failed: 1409 cameraHandle->ops->unmap_buf(cameraHandle->camera_handle, 1410 CAM_MAPPING_BUF_TYPE_CAPABILITY); 1411map_failed: 1412 capabilityHeap->deallocate(); 1413allocate_failed: 1414 delete capabilityHeap; 1415heap_creation_failed: 1416 cameraHandle->ops->close_camera(cameraHandle->camera_handle); 1417 cameraHandle = NULL; 1418open_failed: 1419 return rc; 1420} 1421 1422/*=========================================================================== 1423 * FUNCTION : initParameters 1424 * 1425 * DESCRIPTION: initialize camera parameters 1426 * 1427 * PARAMETERS : 1428 * 1429 * RETURN : int32_t type of status 1430 * NO_ERROR -- success 1431 * none-zero failure code 1432 *==========================================================================*/ 1433int QCamera3HardwareInterface::initParameters() 1434{ 1435 int rc = 0; 1436 1437 //Allocate Set Param Buffer 1438 mParamHeap = new QCamera3HeapMemory(); 1439 rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false); 1440 if(rc != OK) { 1441 rc = NO_MEMORY; 1442 ALOGE("Failed to allocate SETPARM Heap memory"); 1443 delete mParamHeap; 1444 mParamHeap = NULL; 1445 return rc; 1446 } 1447 1448 //Map memory for parameters buffer 1449 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle, 1450 CAM_MAPPING_BUF_TYPE_PARM_BUF, 1451 mParamHeap->getFd(0), 1452 sizeof(parm_buffer_t)); 1453 if(rc < 0) { 1454 ALOGE("%s:failed to map SETPARM buffer",__func__); 1455 rc = FAILED_TRANSACTION; 1456 mParamHeap->deallocate(); 1457 delete mParamHeap; 1458 mParamHeap = NULL; 1459 return rc; 1460 } 1461 1462 mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0); 1463 return rc; 1464} 1465 1466/*=========================================================================== 1467 * FUNCTION : deinitParameters 1468 * 1469 * DESCRIPTION: de-initialize camera parameters 1470 * 1471 * PARAMETERS : 1472 * 1473 * RETURN : NONE 1474 *==========================================================================*/ 1475void QCamera3HardwareInterface::deinitParameters() 1476{ 1477 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle, 1478 CAM_MAPPING_BUF_TYPE_PARM_BUF); 1479 1480 mParamHeap->deallocate(); 1481 delete mParamHeap; 1482 mParamHeap = NULL; 1483 1484 mParameters = NULL; 1485} 1486 1487/*=========================================================================== 1488 * FUNCTION : calcMaxJpegSize 1489 * 1490 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId 1491 * 1492 * PARAMETERS : 1493 * 1494 * RETURN : max_jpeg_size 1495 *==========================================================================*/ 1496int QCamera3HardwareInterface::calcMaxJpegSize() 1497{ 1498 int32_t max_jpeg_size = 0; 1499 int temp_width, temp_height; 1500 for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) { 1501 temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width; 1502 temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height; 1503 if (temp_width * temp_height > max_jpeg_size ) { 1504 max_jpeg_size = temp_width * temp_height; 1505 } 1506 } 1507 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t); 1508 return max_jpeg_size; 1509} 1510 1511/*=========================================================================== 1512 * FUNCTION : initStaticMetadata 1513 * 1514 * DESCRIPTION: initialize the static metadata 1515 * 1516 * PARAMETERS : 1517 * @cameraId : camera Id 1518 * 1519 * RETURN : int32_t type of status 1520 * 0 -- success 1521 * non-zero failure code 1522 *==========================================================================*/ 1523int QCamera3HardwareInterface::initStaticMetadata(int cameraId) 1524{ 1525 int rc = 0; 1526 CameraMetadata staticInfo; 1527 int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK; 1528 /*HAL 3 only*/ 1529 /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE, 1530 &gCamCapability[cameraId]->min_focus_distance, 1); */ 1531 1532 /*hard coded for now but this should come from sensor*/ 1533 float min_focus_distance; 1534 if(facingBack){ 1535 min_focus_distance = 10; 1536 } else { 1537 min_focus_distance = 0; 1538 } 1539 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE, 1540 &min_focus_distance, 1); 1541 1542 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, 1543 &gCamCapability[cameraId]->hyper_focal_distance, 1); 1544 1545 /*should be using focal lengths but sensor doesn't provide that info now*/ 1546 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, 1547 &gCamCapability[cameraId]->focal_length, 1548 1); 1549 1550 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES, 1551 gCamCapability[cameraId]->apertures, 1552 gCamCapability[cameraId]->apertures_count); 1553 1554 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES, 1555 gCamCapability[cameraId]->filter_densities, 1556 gCamCapability[cameraId]->filter_densities_count); 1557 1558 1559 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION, 1560 (uint8_t*)gCamCapability[cameraId]->optical_stab_modes, 1561 gCamCapability[cameraId]->optical_stab_modes_count); 1562 1563 staticInfo.update(ANDROID_LENS_POSITION, 1564 gCamCapability[cameraId]->lens_position, 1565 sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float)); 1566 1567 int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width, 1568 gCamCapability[cameraId]->lens_shading_map_size.height}; 1569 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE, 1570 lens_shading_map_size, 1571 sizeof(lens_shading_map_size)/sizeof(int32_t)); 1572 1573 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP, gCamCapability[cameraId]->lens_shading_map, 1574 sizeof(gCamCapability[cameraId]->lens_shading_map)/ sizeof(float)); 1575 1576 int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width, 1577 gCamCapability[cameraId]->geo_correction_map_size.height}; 1578 staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE, 1579 geo_correction_map_size, 1580 sizeof(geo_correction_map_size)/sizeof(int32_t)); 1581 1582 staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP, 1583 gCamCapability[cameraId]->geo_correction_map, 1584 sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float)); 1585 1586 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE, 1587 gCamCapability[cameraId]->sensor_physical_size, 2); 1588 1589 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, 1590 gCamCapability[cameraId]->exposure_time_range, 2); 1591 1592 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION, 1593 &gCamCapability[cameraId]->max_frame_duration, 1); 1594 1595 1596 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT, 1597 (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1); 1598 1599 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width, 1600 gCamCapability[cameraId]->pixel_array_size.height}; 1601 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, 1602 pixel_array_size, 2); 1603 1604 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.width, 1605 gCamCapability[cameraId]->active_array_size.height}; 1606 1607 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, 1608 active_array_size, 2); 1609 1610 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL, 1611 &gCamCapability[cameraId]->white_level, 1); 1612 1613 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN, 1614 gCamCapability[cameraId]->black_level_pattern, 4); 1615 1616 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION, 1617 &gCamCapability[cameraId]->flash_charge_duration, 1); 1618 1619 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS, 1620 &gCamCapability[cameraId]->max_tone_map_curve_points, 1); 1621 1622 /*staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, 1623 (int*)&gCamCapability[cameraId]->max_face_detection_count, 1);*/ 1624 /*hardcode 0 for now*/ 1625 int32_t max_face_count = 0; 1626 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, 1627 &max_face_count, 1); 1628 1629 staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT, 1630 &gCamCapability[cameraId]->histogram_size, 1); 1631 1632 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT, 1633 &gCamCapability[cameraId]->max_histogram_count, 1); 1634 1635 int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width, 1636 gCamCapability[cameraId]->sharpness_map_size.height}; 1637 1638 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, 1639 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t)); 1640 1641 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE, 1642 &gCamCapability[cameraId]->max_sharpness_map_value, 1); 1643 1644 1645 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS, 1646 &gCamCapability[cameraId]->raw_min_duration, 1647 1); 1648 1649 int32_t scalar_formats[CAM_FORMAT_MAX]; 1650 int scalar_formats_count = gCamCapability[cameraId]->supported_scalar_format_cnt; 1651 for (int i = 0; i < scalar_formats_count; i++) { 1652 scalar_formats[i] = getScalarFormat(gCamCapability[cameraId]->supported_scalar_fmts[i]); 1653 } 1654 scalar_formats[scalar_formats_count] = HAL_PIXEL_FORMAT_YCbCr_420_888; 1655 scalar_formats_count++; 1656 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, 1657 scalar_formats, 1658 scalar_formats_count); 1659 1660 int32_t available_processed_sizes[CAM_FORMAT_MAX * 2]; 1661 makeTable(gCamCapability[cameraId]->supported_sizes_tbl, 1662 gCamCapability[cameraId]->supported_sizes_tbl_cnt, 1663 available_processed_sizes); 1664 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES, 1665 available_processed_sizes, 1666 (gCamCapability[cameraId]->supported_sizes_tbl_cnt) * 2); 1667 1668 int32_t available_fps_ranges[MAX_SIZES_CNT * 2]; 1669 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl, 1670 gCamCapability[cameraId]->fps_ranges_tbl_cnt, 1671 available_fps_ranges); 1672 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, 1673 available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) ); 1674 1675 camera_metadata_rational exposureCompensationStep = { 1676 gCamCapability[cameraId]->exp_compensation_step.numerator, 1677 gCamCapability[cameraId]->exp_compensation_step.denominator}; 1678 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP, 1679 &exposureCompensationStep, 1); 1680 1681 /*TO DO*/ 1682 uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF}; 1683 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, 1684 availableVstabModes, sizeof(availableVstabModes)); 1685 1686 /*HAL 1 and HAL 3 common*/ 1687 float maxZoom = 10; 1688 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, 1689 &maxZoom, 1); 1690 1691 int32_t max3aRegions = 1; 1692 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS, 1693 &max3aRegions, 1); 1694 1695 uint8_t availableFaceDetectModes[] = { 1696 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF }; 1697 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES, 1698 availableFaceDetectModes, 1699 sizeof(availableFaceDetectModes)); 1700 1701 int32_t raw_size[] = {gCamCapability[cameraId]->raw_dim.width, 1702 gCamCapability[cameraId]->raw_dim.height}; 1703 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES, 1704 raw_size, 1705 sizeof(raw_size)/sizeof(uint32_t)); 1706 1707 int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min, 1708 gCamCapability[cameraId]->exposure_compensation_max}; 1709 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE, 1710 exposureCompensationRange, 1711 sizeof(exposureCompensationRange)/sizeof(int32_t)); 1712 1713 uint8_t lensFacing = (facingBack) ? 1714 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT; 1715 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1); 1716 1717 int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2]; 1718 makeTable(gCamCapability[cameraId]->picture_sizes_tbl, 1719 gCamCapability[cameraId]->picture_sizes_tbl_cnt, 1720 available_jpeg_sizes); 1721 staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES, 1722 available_jpeg_sizes, 1723 (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2)); 1724 1725 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, 1726 available_thumbnail_sizes, 1727 sizeof(available_thumbnail_sizes)/sizeof(int32_t)); 1728 1729 int32_t max_jpeg_size = 0; 1730 int temp_width, temp_height; 1731 for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) { 1732 temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width; 1733 temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height; 1734 if (temp_width * temp_height > max_jpeg_size ) { 1735 max_jpeg_size = temp_width * temp_height; 1736 } 1737 } 1738 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t); 1739 staticInfo.update(ANDROID_JPEG_MAX_SIZE, 1740 &max_jpeg_size, 1); 1741 1742 uint8_t avail_effects[CAM_EFFECT_MODE_MAX]; 1743 int32_t size = 0; 1744 for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) { 1745 int val = lookupFwkName(EFFECT_MODES_MAP, 1746 sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]), 1747 gCamCapability[cameraId]->supported_effects[i]); 1748 if (val != NAME_NOT_FOUND) { 1749 avail_effects[size] = (uint8_t)val; 1750 size++; 1751 } 1752 } 1753 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS, 1754 avail_effects, 1755 size); 1756 1757 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX]; 1758 uint8_t supported_indexes[CAM_SCENE_MODE_MAX]; 1759 int32_t supported_scene_modes_cnt = 0; 1760 for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) { 1761 int val = lookupFwkName(SCENE_MODES_MAP, 1762 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]), 1763 gCamCapability[cameraId]->supported_scene_modes[i]); 1764 if (val != NAME_NOT_FOUND) { 1765 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val; 1766 supported_indexes[supported_scene_modes_cnt] = i; 1767 supported_scene_modes_cnt++; 1768 } 1769 } 1770 1771 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES, 1772 avail_scene_modes, 1773 supported_scene_modes_cnt); 1774 1775 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3]; 1776 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides, 1777 supported_scene_modes_cnt, 1778 scene_mode_overrides, 1779 supported_indexes); 1780 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES, 1781 scene_mode_overrides, 1782 supported_scene_modes_cnt*3); 1783 1784 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX]; 1785 size = 0; 1786 for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) { 1787 int val = lookupFwkName(ANTIBANDING_MODES_MAP, 1788 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]), 1789 gCamCapability[cameraId]->supported_antibandings[i]); 1790 if (val != NAME_NOT_FOUND) { 1791 avail_antibanding_modes[size] = (uint8_t)val; 1792 size++; 1793 } 1794 1795 } 1796 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, 1797 avail_antibanding_modes, 1798 size); 1799 1800 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX]; 1801 size = 0; 1802 for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) { 1803 int val = lookupFwkName(FOCUS_MODES_MAP, 1804 sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]), 1805 gCamCapability[cameraId]->supported_focus_modes[i]); 1806 if (val != NAME_NOT_FOUND) { 1807 avail_af_modes[size] = (uint8_t)val; 1808 size++; 1809 } 1810 } 1811 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES, 1812 avail_af_modes, 1813 size); 1814 1815 uint8_t avail_awb_modes[CAM_WB_MODE_MAX]; 1816 size = 0; 1817 for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) { 1818 int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP, 1819 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]), 1820 gCamCapability[cameraId]->supported_white_balances[i]); 1821 if (val != NAME_NOT_FOUND) { 1822 avail_awb_modes[size] = (uint8_t)val; 1823 size++; 1824 } 1825 } 1826 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES, 1827 avail_awb_modes, 1828 size); 1829 1830 uint8_t avail_flash_modes[CAM_FLASH_MODE_MAX]; 1831 size = 0; 1832 for (int i = 0; i < gCamCapability[cameraId]->supported_flash_modes_cnt; i++) { 1833 int val = lookupFwkName(FLASH_MODES_MAP, 1834 sizeof(FLASH_MODES_MAP)/sizeof(FLASH_MODES_MAP[0]), 1835 gCamCapability[cameraId]->supported_flash_modes[i]); 1836 if (val != NAME_NOT_FOUND) { 1837 avail_flash_modes[size] = (uint8_t)val; 1838 size++; 1839 } 1840 } 1841 static uint8_t flashAvailable = 0; 1842 if (size > 1) { 1843 //flash is supported 1844 flashAvailable = 1; 1845 } 1846 staticInfo.update(ANDROID_FLASH_MODE, 1847 avail_flash_modes, 1848 size); 1849 1850 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE, 1851 &flashAvailable, 1); 1852 1853 uint8_t avail_ae_modes[5]; 1854 size = 0; 1855 for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) { 1856 avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i]; 1857 size++; 1858 } 1859 if (flashAvailable) { 1860 avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH; 1861 avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH; 1862 avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE; 1863 } 1864 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES, 1865 avail_ae_modes, 1866 size); 1867 1868 gStaticMetadata[cameraId] = staticInfo.release(); 1869 return rc; 1870} 1871 1872/*=========================================================================== 1873 * FUNCTION : makeTable 1874 * 1875 * DESCRIPTION: make a table of sizes 1876 * 1877 * PARAMETERS : 1878 * 1879 * 1880 *==========================================================================*/ 1881void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size, 1882 int32_t* sizeTable) 1883{ 1884 int j = 0; 1885 for (int i = 0; i < size; i++) { 1886 sizeTable[j] = dimTable[i].width; 1887 sizeTable[j+1] = dimTable[i].height; 1888 j+=2; 1889 } 1890} 1891 1892/*=========================================================================== 1893 * FUNCTION : makeFPSTable 1894 * 1895 * DESCRIPTION: make a table of fps ranges 1896 * 1897 * PARAMETERS : 1898 * 1899 *==========================================================================*/ 1900void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size, 1901 int32_t* fpsRangesTable) 1902{ 1903 int j = 0; 1904 for (int i = 0; i < size; i++) { 1905 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps; 1906 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps; 1907 j+=2; 1908 } 1909} 1910 1911/*=========================================================================== 1912 * FUNCTION : makeOverridesList 1913 * 1914 * DESCRIPTION: make a list of scene mode overrides 1915 * 1916 * PARAMETERS : 1917 * 1918 * 1919 *==========================================================================*/ 1920void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable, 1921 uint8_t size, uint8_t* overridesList, 1922 uint8_t* supported_indexes) 1923{ 1924 /*daemon will give a list of overrides for all scene modes. 1925 However we should send the fwk only the overrides for the scene modes 1926 supported by the framework*/ 1927 int j = 0, index = 0; 1928 for (int i = 0; i < size; i++) { 1929 index = supported_indexes[i]; 1930 overridesList[j] = (int32_t)overridesTable[index].ae_mode; 1931 overridesList[j+1] = (int32_t)overridesTable[index].awb_mode; 1932 overridesList[j+2] = (int32_t)overridesTable[index].af_mode; 1933 j+=3; 1934 } 1935} 1936 1937/*=========================================================================== 1938 * FUNCTION : getPreviewHalPixelFormat 1939 * 1940 * DESCRIPTION: convert the format to type recognized by framework 1941 * 1942 * PARAMETERS : format : the format from backend 1943 * 1944 ** RETURN : format recognized by framework 1945 * 1946 *==========================================================================*/ 1947int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format) 1948{ 1949 int32_t halPixelFormat; 1950 1951 switch (format) { 1952 case CAM_FORMAT_YUV_420_NV12: 1953 halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP; 1954 break; 1955 case CAM_FORMAT_YUV_420_NV21: 1956 halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP; 1957 break; 1958 case CAM_FORMAT_YUV_420_NV21_ADRENO: 1959 halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO; 1960 break; 1961 case CAM_FORMAT_YUV_420_YV12: 1962 halPixelFormat = HAL_PIXEL_FORMAT_YV12; 1963 break; 1964 case CAM_FORMAT_YUV_422_NV16: 1965 case CAM_FORMAT_YUV_422_NV61: 1966 default: 1967 halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP; 1968 break; 1969 } 1970 return halPixelFormat; 1971} 1972 1973/*=========================================================================== 1974 * FUNCTION : AddSetParmEntryToBatch 1975 * 1976 * DESCRIPTION: add set parameter entry into batch 1977 * 1978 * PARAMETERS : 1979 * @p_table : ptr to parameter buffer 1980 * @paramType : parameter type 1981 * @paramLength : length of parameter value 1982 * @paramValue : ptr to parameter value 1983 * 1984 * RETURN : int32_t type of status 1985 * NO_ERROR -- success 1986 * none-zero failure code 1987 *==========================================================================*/ 1988int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table, 1989 cam_intf_parm_type_t paramType, 1990 uint32_t paramLength, 1991 void *paramValue) 1992{ 1993 int position = paramType; 1994 int current, next; 1995 1996 /************************************************************************* 1997 * Code to take care of linking next flags * 1998 *************************************************************************/ 1999 current = GET_FIRST_PARAM_ID(p_table); 2000 if (position == current){ 2001 //DO NOTHING 2002 } else if (position < current){ 2003 SET_NEXT_PARAM_ID(position, p_table, current); 2004 SET_FIRST_PARAM_ID(p_table, position); 2005 } else { 2006 /* Search for the position in the linked list where we need to slot in*/ 2007 while (position > GET_NEXT_PARAM_ID(current, p_table)) 2008 current = GET_NEXT_PARAM_ID(current, p_table); 2009 2010 /*If node already exists no need to alter linking*/ 2011 if (position != GET_NEXT_PARAM_ID(current, p_table)) { 2012 next = GET_NEXT_PARAM_ID(current, p_table); 2013 SET_NEXT_PARAM_ID(current, p_table, position); 2014 SET_NEXT_PARAM_ID(position, p_table, next); 2015 } 2016 } 2017 2018 /************************************************************************* 2019 * Copy contents into entry * 2020 *************************************************************************/ 2021 2022 if (paramLength > sizeof(parm_type_t)) { 2023 ALOGE("%s:Size of input larger than max entry size",__func__); 2024 return BAD_VALUE; 2025 } 2026 memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength); 2027 return NO_ERROR; 2028} 2029 2030/*=========================================================================== 2031 * FUNCTION : lookupFwkName 2032 * 2033 * DESCRIPTION: In case the enum is not same in fwk and backend 2034 * make sure the parameter is correctly propogated 2035 * 2036 * PARAMETERS : 2037 * @arr : map between the two enums 2038 * @len : len of the map 2039 * @hal_name : name of the hal_parm to map 2040 * 2041 * RETURN : int type of status 2042 * fwk_name -- success 2043 * none-zero failure code 2044 *==========================================================================*/ 2045int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[], 2046 int len, int hal_name) 2047{ 2048 2049 for (int i = 0; i < len; i++) { 2050 if (arr[i].hal_name == hal_name) 2051 return arr[i].fwk_name; 2052 } 2053 2054 /* Not able to find matching framework type is not necessarily 2055 * an error case. This happens when mm-camera supports more attributes 2056 * than the frameworks do */ 2057 ALOGD("%s: Cannot find matching framework type", __func__); 2058 return NAME_NOT_FOUND; 2059} 2060 2061/*=========================================================================== 2062 * FUNCTION : lookupHalName 2063 * 2064 * DESCRIPTION: In case the enum is not same in fwk and backend 2065 * make sure the parameter is correctly propogated 2066 * 2067 * PARAMETERS : 2068 * @arr : map between the two enums 2069 * @len : len of the map 2070 * @fwk_name : name of the hal_parm to map 2071 * 2072 * RETURN : int32_t type of status 2073 * hal_name -- success 2074 * none-zero failure code 2075 *==========================================================================*/ 2076int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[], 2077 int len, int fwk_name) 2078{ 2079 for (int i = 0; i < len; i++) { 2080 if (arr[i].fwk_name == fwk_name) 2081 return arr[i].hal_name; 2082 } 2083 ALOGE("%s: Cannot find matching hal type", __func__); 2084 return NAME_NOT_FOUND; 2085} 2086 2087/*=========================================================================== 2088 * FUNCTION : getCapabilities 2089 * 2090 * DESCRIPTION: query camera capabilities 2091 * 2092 * PARAMETERS : 2093 * @cameraId : camera Id 2094 * @info : camera info struct to be filled in with camera capabilities 2095 * 2096 * RETURN : int32_t type of status 2097 * NO_ERROR -- success 2098 * none-zero failure code 2099 *==========================================================================*/ 2100int QCamera3HardwareInterface::getCamInfo(int cameraId, 2101 struct camera_info *info) 2102{ 2103 int rc = 0; 2104 2105 if (NULL == gCamCapability[cameraId]) { 2106 rc = initCapabilities(cameraId); 2107 if (rc < 0) { 2108 //pthread_mutex_unlock(&g_camlock); 2109 return rc; 2110 } 2111 } 2112 2113 if (NULL == gStaticMetadata[cameraId]) { 2114 rc = initStaticMetadata(cameraId); 2115 if (rc < 0) { 2116 return rc; 2117 } 2118 } 2119 2120 switch(gCamCapability[cameraId]->position) { 2121 case CAM_POSITION_BACK: 2122 info->facing = CAMERA_FACING_BACK; 2123 break; 2124 2125 case CAM_POSITION_FRONT: 2126 info->facing = CAMERA_FACING_FRONT; 2127 break; 2128 2129 default: 2130 ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId); 2131 rc = -1; 2132 break; 2133 } 2134 2135 2136 info->orientation = gCamCapability[cameraId]->sensor_mount_angle; 2137 info->device_version = HARDWARE_DEVICE_API_VERSION(3, 0); 2138 info->static_camera_characteristics = gStaticMetadata[cameraId]; 2139 2140 return rc; 2141} 2142 2143/*=========================================================================== 2144 * FUNCTION : translateMetadata 2145 * 2146 * DESCRIPTION: translate the metadata into camera_metadata_t 2147 * 2148 * PARAMETERS : type of the request 2149 * 2150 * 2151 * RETURN : success: camera_metadata_t* 2152 * failure: NULL 2153 * 2154 *==========================================================================*/ 2155camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type) 2156{ 2157 pthread_mutex_lock(&mMutex); 2158 2159 if (mDefaultMetadata[type] != NULL) { 2160 pthread_mutex_unlock(&mMutex); 2161 return mDefaultMetadata[type]; 2162 } 2163 //first time we are handling this request 2164 //fill up the metadata structure using the wrapper class 2165 CameraMetadata settings; 2166 //translate from cam_capability_t to camera_metadata_tag_t 2167 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE; 2168 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1); 2169 2170 /*control*/ 2171 2172 uint8_t controlIntent = 0; 2173 switch (type) { 2174 case CAMERA3_TEMPLATE_PREVIEW: 2175 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW; 2176 break; 2177 case CAMERA3_TEMPLATE_STILL_CAPTURE: 2178 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE; 2179 break; 2180 case CAMERA3_TEMPLATE_VIDEO_RECORD: 2181 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD; 2182 break; 2183 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT: 2184 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT; 2185 break; 2186 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG: 2187 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG; 2188 break; 2189 default: 2190 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM; 2191 break; 2192 } 2193 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1); 2194 2195 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, 2196 &gCamCapability[mCameraId]->exposure_compensation_default, 1); 2197 2198 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF; 2199 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1); 2200 2201 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF; 2202 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1); 2203 2204 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO; 2205 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1); 2206 2207 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO; 2208 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1); 2209 2210 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF; 2211 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1); 2212 2213 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; //similar to AUTO? 2214 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1); 2215 2216 static uint8_t focusMode; 2217 if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) { 2218 ALOGE("%s: Setting focus mode to auto", __func__); 2219 focusMode = ANDROID_CONTROL_AF_MODE_AUTO; 2220 } else { 2221 ALOGE("%s: Setting focus mode to off", __func__); 2222 focusMode = ANDROID_CONTROL_AF_MODE_OFF; 2223 } 2224 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1); 2225 2226 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON; 2227 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1); 2228 2229 /*flash*/ 2230 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF; 2231 settings.update(ANDROID_FLASH_MODE, &flashMode, 1); 2232 2233 2234 /* lens */ 2235 float default_aperture = gCamCapability[mCameraId]->apertures[0]; 2236 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1); 2237 2238 if (gCamCapability[mCameraId]->filter_densities_count) { 2239 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0]; 2240 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density, 2241 gCamCapability[mCameraId]->filter_densities_count); 2242 } 2243 2244 /* TODO: Enable focus lengths once supported*/ 2245 /*if (gCamCapability[mCameraId]->focal_lengths_count) { 2246 float default_focal_length = gCamCapability[mCameraId]->focal_lengths[0]; 2247 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1); 2248 }*/ 2249 2250 mDefaultMetadata[type] = settings.release(); 2251 2252 pthread_mutex_unlock(&mMutex); 2253 return mDefaultMetadata[type]; 2254} 2255 2256/*=========================================================================== 2257 * FUNCTION : setFrameParameters 2258 * 2259 * DESCRIPTION: set parameters per frame as requested in the metadata from 2260 * framework 2261 * 2262 * PARAMETERS : 2263 * @settings : frame settings information from framework 2264 * 2265 * 2266 * RETURN : success: NO_ERROR 2267 * failure: 2268 *==========================================================================*/ 2269int QCamera3HardwareInterface::setFrameParameters(int frame_id, 2270 const camera_metadata_t *settings) 2271{ 2272 /*translate from camera_metadata_t type to parm_type_t*/ 2273 int rc = 0; 2274 if (settings == NULL && mFirstRequest) { 2275 /*settings cannot be null for the first request*/ 2276 return BAD_VALUE; 2277 } 2278 2279 int32_t hal_version = CAM_HAL_V3; 2280 2281 memset(mParameters, 0, sizeof(parm_buffer_t)); 2282 mParameters->first_flagged_entry = CAM_INTF_PARM_MAX; 2283 AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION, 2284 sizeof(hal_version), &hal_version); 2285 2286 /*we need to update the frame number in the parameters*/ 2287 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER, 2288 sizeof(frame_id), &frame_id); 2289 if (rc < 0) { 2290 ALOGE("%s: Failed to set the frame number in the parameters", __func__); 2291 return BAD_VALUE; 2292 } 2293 2294 if(settings != NULL){ 2295 rc = translateMetadataToParameters(settings); 2296 } 2297 /*set the parameters to backend*/ 2298 mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters); 2299 return rc; 2300} 2301 2302/*=========================================================================== 2303 * FUNCTION : translateMetadataToParameters 2304 * 2305 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t 2306 * 2307 * 2308 * PARAMETERS : 2309 * @settings : frame settings information from framework 2310 * 2311 * 2312 * RETURN : success: NO_ERROR 2313 * failure: 2314 *==========================================================================*/ 2315int QCamera3HardwareInterface::translateMetadataToParameters 2316 (const camera_metadata_t *settings) 2317{ 2318 int rc = 0; 2319 CameraMetadata frame_settings; 2320 frame_settings = settings; 2321 2322 2323 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) { 2324 int32_t antibandingMode = 2325 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0]; 2326 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING, 2327 sizeof(antibandingMode), &antibandingMode); 2328 } 2329 2330 /*int32_t expCompensation = frame_settings.find().data.i32[0]; 2331 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION, 2332 sizeof(expCompensation), &expCompensation);*/ 2333 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) { 2334 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0]; 2335 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK, 2336 sizeof(aeLock), &aeLock); 2337 } 2338 2339 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) { 2340 cam_fps_range_t fps_range; 2341 fps_range.min_fps = 2342 frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0]; 2343 fps_range.max_fps = 2344 frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0]; 2345 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE, 2346 sizeof(fps_range), &fps_range); 2347 } 2348 2349 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) { 2350 uint8_t fwk_focusMode = 2351 frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0]; 2352 uint8_t focusMode = lookupHalName(FOCUS_MODES_MAP, 2353 sizeof(FOCUS_MODES_MAP), 2354 fwk_focusMode); 2355 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE, 2356 sizeof(focusMode), &focusMode); 2357 } 2358 2359 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) { 2360 uint8_t awbLock = 2361 frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0]; 2362 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK, 2363 sizeof(awbLock), &awbLock); 2364 } 2365 2366 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) { 2367 uint8_t fwk_whiteLevel = 2368 frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0]; 2369 uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP, 2370 sizeof(WHITE_BALANCE_MODES_MAP), 2371 fwk_whiteLevel); 2372 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE, 2373 sizeof(whiteLevel), &whiteLevel); 2374 } 2375 2376 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) { 2377 uint8_t fwk_effectMode = 2378 frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0]; 2379 uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP, 2380 sizeof(EFFECT_MODES_MAP), 2381 fwk_effectMode); 2382 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT, 2383 sizeof(effectMode), &effectMode); 2384 } 2385 2386 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) { 2387 uint8_t fwk_aeMode = 2388 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0]; 2389 uint8_t aeMode; 2390 int32_t redeye; 2391 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) { 2392 aeMode = CAM_AE_MODE_OFF; 2393 } else { 2394 aeMode = CAM_AE_MODE_ON; 2395 } 2396 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) { 2397 redeye = 1; 2398 } else { 2399 redeye = 0; 2400 } 2401 int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP, 2402 sizeof(AE_FLASH_MODE_MAP), 2403 aeMode); 2404 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE, 2405 sizeof(aeMode), &aeMode); 2406 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE, 2407 sizeof(flashMode), &flashMode); 2408 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION, 2409 sizeof(redeye), &redeye); 2410 } 2411 2412 if (frame_settings.exists(ANDROID_REQUEST_FRAME_COUNT)) { 2413 int32_t metaFrameNumber = 2414 frame_settings.find(ANDROID_REQUEST_FRAME_COUNT).data.i32[0]; 2415 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER, 2416 sizeof(metaFrameNumber), &metaFrameNumber); 2417 } 2418 2419 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) { 2420 uint8_t colorCorrectMode = 2421 frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0]; 2422 rc = 2423 AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE, 2424 sizeof(colorCorrectMode), &colorCorrectMode); 2425 } 2426 cam_trigger_t aecTrigger; 2427 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE; 2428 aecTrigger.trigger_id = -1; 2429 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&& 2430 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) { 2431 aecTrigger.trigger = 2432 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0]; 2433 aecTrigger.trigger_id = 2434 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0]; 2435 } 2436 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, 2437 sizeof(aecTrigger), &aecTrigger); 2438 2439 /*af_trigger must come with a trigger id*/ 2440 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) && 2441 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) { 2442 cam_trigger_t af_trigger; 2443 af_trigger.trigger = 2444 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0]; 2445 af_trigger.trigger_id = 2446 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0]; 2447 rc = AddSetParmEntryToBatch(mParameters, 2448 CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger); 2449 } 2450 2451 if (frame_settings.exists(ANDROID_CONTROL_MODE)) { 2452 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0]; 2453 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE, 2454 sizeof(metaMode), &metaMode); 2455 } 2456 2457 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) { 2458 int32_t demosaic = 2459 frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0]; 2460 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC, 2461 sizeof(demosaic), &demosaic); 2462 } 2463 2464 if (frame_settings.exists(ANDROID_EDGE_MODE)) { 2465 uint8_t edgeMode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0]; 2466 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE, 2467 sizeof(edgeMode), &edgeMode); 2468 } 2469 2470 if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) { 2471 int32_t edgeStrength = 2472 frame_settings.find(ANDROID_EDGE_STRENGTH).data.i32[0]; 2473 rc = AddSetParmEntryToBatch(mParameters, 2474 CAM_INTF_META_SHARPNESS_STRENGTH, sizeof(edgeStrength), &edgeStrength); 2475 } 2476 2477 if (frame_settings.exists(ANDROID_FLASH_MODE)) { 2478 uint8_t flashMode = 2479 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]; 2480 rc = AddSetParmEntryToBatch(mParameters, 2481 CAM_INTF_META_FLASH_MODE, sizeof(flashMode), &flashMode); 2482 } 2483 2484 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) { 2485 uint8_t flashPower = 2486 frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0]; 2487 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER, 2488 sizeof(flashPower), &flashPower); 2489 } 2490 2491 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) { 2492 int64_t flashFiringTime = 2493 frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0]; 2494 rc = AddSetParmEntryToBatch(mParameters, 2495 CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime); 2496 } 2497 2498 if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) { 2499 uint8_t geometricMode = 2500 frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0]; 2501 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE, 2502 sizeof(geometricMode), &geometricMode); 2503 } 2504 2505 if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) { 2506 uint8_t geometricStrength = 2507 frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0]; 2508 rc = AddSetParmEntryToBatch(mParameters, 2509 CAM_INTF_META_GEOMETRIC_STRENGTH, 2510 sizeof(geometricStrength), &geometricStrength); 2511 } 2512 2513 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) { 2514 uint8_t hotPixelMode = 2515 frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0]; 2516 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE, 2517 sizeof(hotPixelMode), &hotPixelMode); 2518 } 2519 2520 if (frame_settings.exists(ANDROID_LENS_APERTURE)) { 2521 float lensAperture = 2522 frame_settings.find( ANDROID_LENS_APERTURE).data.f[0]; 2523 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE, 2524 sizeof(lensAperture), &lensAperture); 2525 } 2526 2527 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) { 2528 float filterDensity = 2529 frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0]; 2530 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY, 2531 sizeof(filterDensity), &filterDensity); 2532 } 2533 2534 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) { 2535 float focalLength = 2536 frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0]; 2537 rc = AddSetParmEntryToBatch(mParameters, 2538 CAM_INTF_META_LENS_FOCAL_LENGTH, 2539 sizeof(focalLength), &focalLength); 2540 } 2541 2542 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) { 2543 float focalDistance = 2544 frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0]; 2545 rc = AddSetParmEntryToBatch(mParameters, 2546 CAM_INTF_META_LENS_FOCUS_DISTANCE, 2547 sizeof(focalDistance), &focalDistance); 2548 } 2549 2550 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) { 2551 uint8_t optStabMode = 2552 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0]; 2553 rc = AddSetParmEntryToBatch(mParameters, 2554 CAM_INTF_META_LENS_OPT_STAB_MODE, 2555 sizeof(optStabMode), &optStabMode); 2556 } 2557 2558 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) { 2559 uint8_t noiseRedMode = 2560 frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]; 2561 rc = AddSetParmEntryToBatch(mParameters, 2562 CAM_INTF_META_NOISE_REDUCTION_MODE, 2563 sizeof(noiseRedMode), &noiseRedMode); 2564 } 2565 2566 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) { 2567 uint8_t noiseRedStrength = 2568 frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0]; 2569 rc = AddSetParmEntryToBatch(mParameters, 2570 CAM_INTF_META_NOISE_REDUCTION_STRENGTH, 2571 sizeof(noiseRedStrength), &noiseRedStrength); 2572 } 2573 2574 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) { 2575 cam_crop_region_t scalerCropRegion; 2576 scalerCropRegion.left = 2577 frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0]; 2578 scalerCropRegion.top = 2579 frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1]; 2580 scalerCropRegion.width = 2581 frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2]; 2582 scalerCropRegion.height = 2583 frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3]; 2584 rc = AddSetParmEntryToBatch(mParameters, 2585 CAM_INTF_META_SCALER_CROP_REGION, 2586 sizeof(scalerCropRegion), &scalerCropRegion); 2587 } 2588 2589 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) { 2590 int64_t sensorExpTime = 2591 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0]; 2592 rc = AddSetParmEntryToBatch(mParameters, 2593 CAM_INTF_META_SENSOR_EXPOSURE_TIME, 2594 sizeof(sensorExpTime), &sensorExpTime); 2595 } 2596 2597 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) { 2598 int64_t sensorFrameDuration = 2599 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0]; 2600 rc = AddSetParmEntryToBatch(mParameters, 2601 CAM_INTF_META_SENSOR_FRAME_DURATION, 2602 sizeof(sensorFrameDuration), &sensorFrameDuration); 2603 } 2604 2605 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) { 2606 int32_t sensorSensitivity = 2607 frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0]; 2608 rc = AddSetParmEntryToBatch(mParameters, 2609 CAM_INTF_META_SENSOR_SENSITIVITY, 2610 sizeof(sensorSensitivity), &sensorSensitivity); 2611 } 2612 2613 if (frame_settings.exists(ANDROID_SHADING_MODE)) { 2614 int32_t shadingMode = 2615 frame_settings.find(ANDROID_SHADING_MODE).data.u8[0]; 2616 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE, 2617 sizeof(shadingMode), &shadingMode); 2618 } 2619 2620 if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) { 2621 uint8_t shadingStrength = 2622 frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0]; 2623 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH, 2624 sizeof(shadingStrength), &shadingStrength); 2625 } 2626 2627 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) { 2628 uint8_t facedetectMode = 2629 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0]; 2630 rc = AddSetParmEntryToBatch(mParameters, 2631 CAM_INTF_META_STATS_FACEDETECT_MODE, 2632 sizeof(facedetectMode), &facedetectMode); 2633 } 2634 2635 if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) { 2636 uint8_t histogramMode = 2637 frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0]; 2638 rc = AddSetParmEntryToBatch(mParameters, 2639 CAM_INTF_META_STATS_HISTOGRAM_MODE, 2640 sizeof(histogramMode), &histogramMode); 2641 } 2642 2643 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) { 2644 uint8_t sharpnessMapMode = 2645 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0]; 2646 rc = AddSetParmEntryToBatch(mParameters, 2647 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, 2648 sizeof(sharpnessMapMode), &sharpnessMapMode); 2649 } 2650 2651 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) { 2652 uint8_t tonemapMode = 2653 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0]; 2654 rc = AddSetParmEntryToBatch(mParameters, 2655 CAM_INTF_META_TONEMAP_MODE, 2656 sizeof(tonemapMode), &tonemapMode); 2657 } 2658 2659 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) { 2660 uint8_t captureIntent = 2661 frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0]; 2662 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT, 2663 sizeof(captureIntent), &captureIntent); 2664 } 2665 2666 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) { 2667 cam_area_t roi; 2668 convertFromRegions(&roi, settings, ANDROID_CONTROL_AE_REGIONS); 2669 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI, 2670 sizeof(roi), &roi); 2671 } 2672 2673 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) { 2674 cam_area_t roi; 2675 convertFromRegions(&roi, settings, ANDROID_CONTROL_AF_REGIONS); 2676 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI, 2677 sizeof(roi), &roi); 2678 } 2679 2680 if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) { 2681 cam_area_t roi; 2682 convertFromRegions(&roi, settings, ANDROID_CONTROL_AWB_REGIONS); 2683 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS, 2684 sizeof(roi), &roi); 2685 } 2686 return rc; 2687} 2688 2689/*=========================================================================== 2690 * FUNCTION : getJpegSettings 2691 * 2692 * DESCRIPTION: save the jpeg settings in the HAL 2693 * 2694 * 2695 * PARAMETERS : 2696 * @settings : frame settings information from framework 2697 * 2698 * 2699 * RETURN : success: NO_ERROR 2700 * failure: 2701 *==========================================================================*/ 2702int QCamera3HardwareInterface::getJpegSettings 2703 (const camera_metadata_t *settings) 2704{ 2705 if (mJpegSettings) { 2706 free(mJpegSettings); 2707 mJpegSettings = NULL; 2708 } 2709 mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t)); 2710 CameraMetadata jpeg_settings; 2711 jpeg_settings = settings; 2712 2713 if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) { 2714 mJpegSettings->jpeg_orientation = 2715 jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0]; 2716 } else { 2717 mJpegSettings->jpeg_orientation = 0; 2718 } 2719 if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) { 2720 mJpegSettings->jpeg_quality = 2721 jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0]; 2722 } else { 2723 mJpegSettings->jpeg_quality = 85; 2724 } 2725 if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) { 2726 mJpegSettings->thumbnail_size.width = 2727 jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0]; 2728 mJpegSettings->thumbnail_size.height = 2729 jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1]; 2730 } else { 2731 mJpegSettings->thumbnail_size.width = 0; 2732 mJpegSettings->thumbnail_size.height = 0; 2733 } 2734 if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) { 2735 for (int i = 0; i < 3; i++) { 2736 mJpegSettings->gps_coordinates[i] = 2737 jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i]; 2738 } 2739 } 2740 if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) { 2741 mJpegSettings->gps_timestamp = 2742 jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0]; 2743 } 2744 2745 if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) { 2746 mJpegSettings->gps_processing_method = 2747 jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[0]; 2748 } 2749 if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) { 2750 mJpegSettings->sensor_sensitivity = 2751 jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0]; 2752 } 2753 if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) { 2754 mJpegSettings->lens_focal_length = 2755 jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0]; 2756 } 2757 mJpegSettings->max_jpeg_size = calcMaxJpegSize(); 2758 return 0; 2759} 2760 2761/*=========================================================================== 2762 * FUNCTION : captureResultCb 2763 * 2764 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata) 2765 * 2766 * PARAMETERS : 2767 * @frame : frame information from mm-camera-interface 2768 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata. 2769 * @userdata: userdata 2770 * 2771 * RETURN : NONE 2772 *==========================================================================*/ 2773void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata, 2774 camera3_stream_buffer_t *buffer, 2775 uint32_t frame_number, void *userdata) 2776{ 2777 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata; 2778 if (hw == NULL) { 2779 ALOGE("%s: Invalid hw %p", __func__, hw); 2780 return; 2781 } 2782 2783 hw->captureResultCb(metadata, buffer, frame_number); 2784 return; 2785} 2786 2787/*=========================================================================== 2788 * FUNCTION : initialize 2789 * 2790 * DESCRIPTION: Pass framework callback pointers to HAL 2791 * 2792 * PARAMETERS : 2793 * 2794 * 2795 * RETURN : Success : 0 2796 * Failure: -ENODEV 2797 *==========================================================================*/ 2798 2799int QCamera3HardwareInterface::initialize(const struct camera3_device *device, 2800 const camera3_callback_ops_t *callback_ops) 2801{ 2802 ALOGV("%s: E", __func__); 2803 QCamera3HardwareInterface *hw = 2804 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 2805 if (!hw) { 2806 ALOGE("%s: NULL camera device", __func__); 2807 return -ENODEV; 2808 } 2809 2810 int rc = hw->initialize(callback_ops); 2811 ALOGV("%s: X", __func__); 2812 return rc; 2813} 2814 2815/*=========================================================================== 2816 * FUNCTION : configure_streams 2817 * 2818 * DESCRIPTION: 2819 * 2820 * PARAMETERS : 2821 * 2822 * 2823 * RETURN : Success: 0 2824 * Failure: -EINVAL (if stream configuration is invalid) 2825 * -ENODEV (fatal error) 2826 *==========================================================================*/ 2827 2828int QCamera3HardwareInterface::configure_streams( 2829 const struct camera3_device *device, 2830 camera3_stream_configuration_t *stream_list) 2831{ 2832 ALOGV("%s: E", __func__); 2833 QCamera3HardwareInterface *hw = 2834 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 2835 if (!hw) { 2836 ALOGE("%s: NULL camera device", __func__); 2837 return -ENODEV; 2838 } 2839 int rc = hw->configureStreams(stream_list); 2840 ALOGV("%s: X", __func__); 2841 return rc; 2842} 2843 2844/*=========================================================================== 2845 * FUNCTION : register_stream_buffers 2846 * 2847 * DESCRIPTION: Register stream buffers with the device 2848 * 2849 * PARAMETERS : 2850 * 2851 * RETURN : 2852 *==========================================================================*/ 2853int QCamera3HardwareInterface::register_stream_buffers( 2854 const struct camera3_device *device, 2855 const camera3_stream_buffer_set_t *buffer_set) 2856{ 2857 ALOGV("%s: E", __func__); 2858 QCamera3HardwareInterface *hw = 2859 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 2860 if (!hw) { 2861 ALOGE("%s: NULL camera device", __func__); 2862 return -ENODEV; 2863 } 2864 int rc = hw->registerStreamBuffers(buffer_set); 2865 ALOGV("%s: X", __func__); 2866 return rc; 2867} 2868 2869/*=========================================================================== 2870 * FUNCTION : construct_default_request_settings 2871 * 2872 * DESCRIPTION: Configure a settings buffer to meet the required use case 2873 * 2874 * PARAMETERS : 2875 * 2876 * 2877 * RETURN : Success: Return valid metadata 2878 * Failure: Return NULL 2879 *==========================================================================*/ 2880const camera_metadata_t* QCamera3HardwareInterface:: 2881 construct_default_request_settings(const struct camera3_device *device, 2882 int type) 2883{ 2884 2885 ALOGV("%s: E", __func__); 2886 camera_metadata_t* fwk_metadata = NULL; 2887 QCamera3HardwareInterface *hw = 2888 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 2889 if (!hw) { 2890 ALOGE("%s: NULL camera device", __func__); 2891 return NULL; 2892 } 2893 2894 fwk_metadata = hw->translateCapabilityToMetadata(type); 2895 2896 ALOGV("%s: X", __func__); 2897 return fwk_metadata; 2898} 2899 2900/*=========================================================================== 2901 * FUNCTION : process_capture_request 2902 * 2903 * DESCRIPTION: 2904 * 2905 * PARAMETERS : 2906 * 2907 * 2908 * RETURN : 2909 *==========================================================================*/ 2910int QCamera3HardwareInterface::process_capture_request( 2911 const struct camera3_device *device, 2912 camera3_capture_request_t *request) 2913{ 2914 ALOGV("%s: E", __func__); 2915 QCamera3HardwareInterface *hw = 2916 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 2917 if (!hw) { 2918 ALOGE("%s: NULL camera device", __func__); 2919 return -EINVAL; 2920 } 2921 2922 int rc = hw->processCaptureRequest(request); 2923 ALOGV("%s: X", __func__); 2924 return rc; 2925} 2926 2927/*=========================================================================== 2928 * FUNCTION : get_metadata_vendor_tag_ops 2929 * 2930 * DESCRIPTION: 2931 * 2932 * PARAMETERS : 2933 * 2934 * 2935 * RETURN : 2936 *==========================================================================*/ 2937 2938void QCamera3HardwareInterface::get_metadata_vendor_tag_ops( 2939 const struct camera3_device *device, 2940 vendor_tag_query_ops_t* ops) 2941{ 2942 ALOGV("%s: E", __func__); 2943 QCamera3HardwareInterface *hw = 2944 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 2945 if (!hw) { 2946 ALOGE("%s: NULL camera device", __func__); 2947 return; 2948 } 2949 2950 hw->getMetadataVendorTagOps(ops); 2951 ALOGV("%s: X", __func__); 2952 return; 2953} 2954 2955/*=========================================================================== 2956 * FUNCTION : dump 2957 * 2958 * DESCRIPTION: 2959 * 2960 * PARAMETERS : 2961 * 2962 * 2963 * RETURN : 2964 *==========================================================================*/ 2965 2966void QCamera3HardwareInterface::dump( 2967 const struct camera3_device *device, int fd) 2968{ 2969 ALOGV("%s: E", __func__); 2970 QCamera3HardwareInterface *hw = 2971 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 2972 if (!hw) { 2973 ALOGE("%s: NULL camera device", __func__); 2974 return; 2975 } 2976 2977 hw->dump(fd); 2978 ALOGV("%s: X", __func__); 2979 return; 2980} 2981 2982/*=========================================================================== 2983 * FUNCTION : close_camera_device 2984 * 2985 * DESCRIPTION: 2986 * 2987 * PARAMETERS : 2988 * 2989 * 2990 * RETURN : 2991 *==========================================================================*/ 2992int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device) 2993{ 2994 ALOGV("%s: E", __func__); 2995 int ret = NO_ERROR; 2996 QCamera3HardwareInterface *hw = 2997 reinterpret_cast<QCamera3HardwareInterface *>( 2998 reinterpret_cast<camera3_device_t *>(device)->priv); 2999 if (!hw) { 3000 ALOGE("NULL camera device"); 3001 return BAD_VALUE; 3002 } 3003 delete hw; 3004 ALOGV("%s: X", __func__); 3005 return ret; 3006} 3007 3008}; //end namespace qcamera 3009