QCamera3HWI.cpp revision 80495236085279a54afee1b2dbfcf32a0a309f42
1/* Copyright (c) 2012-2015, The Linux Foundataion. All rights reserved. 2* 3* Redistribution and use in source and binary forms, with or without 4* modification, are permitted provided that the following conditions are 5* met: 6* * Redistributions of source code must retain the above copyright 7* notice, this list of conditions and the following disclaimer. 8* * Redistributions in binary form must reproduce the above 9* copyright notice, this list of conditions and the following 10* disclaimer in the documentation and/or other materials provided 11* with the distribution. 12* * Neither the name of The Linux Foundation nor the names of its 13* contributors may be used to endorse or promote products derived 14* from this software without specific prior written permission. 15* 16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED 17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF 18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT 19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS 20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR 23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE 25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN 26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27* 28*/ 29 30#define ATRACE_TAG ATRACE_TAG_CAMERA 31#define LOG_TAG "QCamera3HWI" 32//#define LOG_NDEBUG 0 33 34#define __STDC_LIMIT_MACROS 35#include <cutils/properties.h> 36#include <hardware/camera3.h> 37#include <camera/CameraMetadata.h> 38#include <stdio.h> 39#include <stdlib.h> 40#include <fcntl.h> 41#include <stdint.h> 42#include <utils/Log.h> 43#include <utils/Errors.h> 44#include <utils/Trace.h> 45#include <ui/Fence.h> 46#include <gralloc_priv.h> 47#include "QCamera3HWI.h" 48#include "QCamera3Mem.h" 49#include "QCamera3Channel.h" 50#include "QCamera3PostProc.h" 51#include "QCamera3VendorTags.h" 52 53using namespace android; 54 55namespace qcamera { 56 57#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX ) 58 59#define EMPTY_PIPELINE_DELAY 2 60#define PARTIAL_RESULT_COUNT 2 61#define FRAME_SKIP_DELAY 0 62#define CAM_MAX_SYNC_LATENCY 4 63 64#define MAX_VALUE_8BIT ((1<<8)-1) 65#define MAX_VALUE_10BIT ((1<<10)-1) 66#define MAX_VALUE_12BIT ((1<<12)-1) 67 68#define VIDEO_4K_WIDTH 3840 69#define VIDEO_4K_HEIGHT 2160 70 71#define MAX_RAW_STREAMS 1 72#define MAX_STALLING_STREAMS 1 73#define MAX_PROCESSED_STREAMS 3 74/* Batch mode is enabled only if FPS set is equal to or greater than this */ 75#define MIN_FPS_FOR_BATCH_MODE (120) 76#define PREVIEW_FPS_FOR_HFR (30) 77#define DEFAULT_VIDEO_FPS (30.0) 78#define MAX_HFR_BATCH_SIZE (4) 79#define REGIONS_TUPLE_COUNT 5 80 81#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0])) 82 83#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\ 84 CAM_QCOM_FEATURE_CROP |\ 85 CAM_QCOM_FEATURE_ROTATION |\ 86 CAM_QCOM_FEATURE_SHARPNESS |\ 87 CAM_QCOM_FEATURE_SCALE |\ 88 CAM_QCOM_FEATURE_CAC ) 89 90cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS]; 91const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS]; 92static pthread_mutex_t gCamLock = PTHREAD_MUTEX_INITIALIZER; 93volatile uint32_t gCamHal3LogLevel = 1; 94 95const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = { 96 {"On", CAM_CDS_MODE_ON}, 97 {"Off", CAM_CDS_MODE_OFF}, 98 {"Auto",CAM_CDS_MODE_AUTO} 99}; 100 101const QCamera3HardwareInterface::QCameraMap< 102 camera_metadata_enum_android_control_effect_mode_t, 103 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = { 104 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF }, 105 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO }, 106 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE }, 107 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE }, 108 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA }, 109 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE }, 110 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD }, 111 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD }, 112 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA } 113}; 114 115const QCamera3HardwareInterface::QCameraMap< 116 camera_metadata_enum_android_control_awb_mode_t, 117 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = { 118 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF }, 119 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO }, 120 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT }, 121 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT }, 122 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT}, 123 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT }, 124 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT }, 125 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT }, 126 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE } 127}; 128 129const QCamera3HardwareInterface::QCameraMap< 130 camera_metadata_enum_android_control_scene_mode_t, 131 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = { 132 { ANDROID_CONTROL_SCENE_MODE_DISABLED, CAM_SCENE_MODE_OFF}, 133 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY }, 134 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION }, 135 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT }, 136 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE }, 137 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT }, 138 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT }, 139 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE }, 140 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH }, 141 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW }, 142 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET }, 143 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE }, 144 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS }, 145 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS }, 146 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY }, 147 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT }, 148 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE} 149}; 150 151const QCamera3HardwareInterface::QCameraMap< 152 camera_metadata_enum_android_control_af_mode_t, 153 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = { 154 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF }, 155 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED }, 156 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO }, 157 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO }, 158 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF }, 159 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE }, 160 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO } 161}; 162 163const QCamera3HardwareInterface::QCameraMap< 164 camera_metadata_enum_android_color_correction_aberration_mode_t, 165 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = { 166 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF, 167 CAM_COLOR_CORRECTION_ABERRATION_OFF }, 168 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST, 169 CAM_COLOR_CORRECTION_ABERRATION_FAST }, 170 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY, 171 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY }, 172}; 173 174const QCamera3HardwareInterface::QCameraMap< 175 camera_metadata_enum_android_control_ae_antibanding_mode_t, 176 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = { 177 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF }, 178 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ }, 179 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ }, 180 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO } 181}; 182 183const QCamera3HardwareInterface::QCameraMap< 184 camera_metadata_enum_android_control_ae_mode_t, 185 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = { 186 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF }, 187 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF }, 188 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO}, 189 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON }, 190 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO} 191}; 192 193const QCamera3HardwareInterface::QCameraMap< 194 camera_metadata_enum_android_flash_mode_t, 195 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = { 196 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF }, 197 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE }, 198 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH } 199}; 200 201const QCamera3HardwareInterface::QCameraMap< 202 camera_metadata_enum_android_statistics_face_detect_mode_t, 203 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = { 204 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF }, 205 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL } 206}; 207 208const QCamera3HardwareInterface::QCameraMap< 209 camera_metadata_enum_android_lens_info_focus_distance_calibration_t, 210 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = { 211 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED, 212 CAM_FOCUS_UNCALIBRATED }, 213 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE, 214 CAM_FOCUS_APPROXIMATE }, 215 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED, 216 CAM_FOCUS_CALIBRATED } 217}; 218 219const QCamera3HardwareInterface::QCameraMap< 220 camera_metadata_enum_android_lens_state_t, 221 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = { 222 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY}, 223 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING} 224}; 225 226const int32_t available_thumbnail_sizes[] = {0, 0, 227 176, 144, 228 320, 240, 229 432, 288, 230 480, 288, 231 512, 288, 232 512, 384}; 233 234const QCamera3HardwareInterface::QCameraMap< 235 camera_metadata_enum_android_sensor_test_pattern_mode_t, 236 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = { 237 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF }, 238 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR }, 239 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS }, 240 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY }, 241 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 }, 242}; 243 244/* Since there is no mapping for all the options some Android enum are not listed. 245 * Also, the order in this list is important because while mapping from HAL to Android it will 246 * traverse from lower to higher index which means that for HAL values that are map to different 247 * Android values, the traverse logic will select the first one found. 248 */ 249const QCamera3HardwareInterface::QCameraMap< 250 camera_metadata_enum_android_sensor_reference_illuminant1_t, 251 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = { 252 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO}, 253 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT }, 254 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO }, 255 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A }, 256 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON }, 257 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 }, 258 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 }, 259 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 }, 260 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A}, 261 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 }, 262 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A }, 263 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 }, 264 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 }, 265 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 }, 266 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT }, 267 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO}, 268}; 269 270const QCamera3HardwareInterface::QCameraMap< 271 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = { 272 { 60, CAM_HFR_MODE_60FPS}, 273 { 90, CAM_HFR_MODE_90FPS}, 274 { 120, CAM_HFR_MODE_120FPS}, 275 { 150, CAM_HFR_MODE_150FPS}, 276 { 180, CAM_HFR_MODE_180FPS}, 277 { 210, CAM_HFR_MODE_210FPS}, 278 { 240, CAM_HFR_MODE_240FPS}, 279 { 480, CAM_HFR_MODE_480FPS}, 280}; 281 282camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = { 283 initialize: QCamera3HardwareInterface::initialize, 284 configure_streams: QCamera3HardwareInterface::configure_streams, 285 register_stream_buffers: NULL, 286 construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings, 287 process_capture_request: QCamera3HardwareInterface::process_capture_request, 288 get_metadata_vendor_tag_ops: NULL, 289 dump: QCamera3HardwareInterface::dump, 290 flush: QCamera3HardwareInterface::flush, 291 reserved: {0}, 292}; 293 294/*=========================================================================== 295 * FUNCTION : QCamera3HardwareInterface 296 * 297 * DESCRIPTION: constructor of QCamera3HardwareInterface 298 * 299 * PARAMETERS : 300 * @cameraId : camera ID 301 * 302 * RETURN : none 303 *==========================================================================*/ 304QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId, 305 const camera_module_callbacks_t *callbacks) 306 : mCameraId(cameraId), 307 mCameraHandle(NULL), 308 mCameraOpened(false), 309 mCameraInitialized(false), 310 mCallbackOps(NULL), 311 mMetadataChannel(NULL), 312 mPictureChannel(NULL), 313 mRawChannel(NULL), 314 mSupportChannel(NULL), 315 mAnalysisChannel(NULL), 316 mRawDumpChannel(NULL), 317 mFirstRequest(false), 318 mFirstConfiguration(true), 319 mFlush(false), 320 mParamHeap(NULL), 321 mParameters(NULL), 322 mPrevParameters(NULL), 323 m_bIsVideo(false), 324 m_bIs4KVideo(false), 325 m_bEisSupportedSize(false), 326 m_bEisEnable(false), 327 m_MobicatMask(0), 328 mMinProcessedFrameDuration(0), 329 mMinJpegFrameDuration(0), 330 mMinRawFrameDuration(0), 331 m_pPowerModule(NULL), 332 mMetaFrameCount(0U), 333 mUpdateDebugLevel(false), 334 mCallbacks(callbacks), 335 mCaptureIntent(0), 336 mBatchSize(0), 337 mToBeQueuedVidBufs(0), 338 mHFRVideoFps(DEFAULT_VIDEO_FPS), 339 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE) 340{ 341 getLogLevel(); 342 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG; 343 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3; 344 mCameraDevice.common.close = close_camera_device; 345 mCameraDevice.ops = &mCameraOps; 346 mCameraDevice.priv = this; 347 gCamCapability[cameraId]->version = CAM_HAL_V3; 348 // TODO: hardcode for now until mctl add support for min_num_pp_bufs 349 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3 350 gCamCapability[cameraId]->min_num_pp_bufs = 3; 351 352 pthread_cond_init(&mRequestCond, NULL); 353 mPendingRequest = 0; 354 mCurrentRequestId = -1; 355 pthread_mutex_init(&mMutex, NULL); 356 357 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) 358 mDefaultMetadata[i] = NULL; 359 360#ifdef HAS_MULTIMEDIA_HINTS 361 if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) { 362 ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID); 363 } 364#endif 365 366 char prop[PROPERTY_VALUE_MAX]; 367 property_get("persist.camera.raw.dump", prop, "0"); 368 mEnableRawDump = atoi(prop); 369 if (mEnableRawDump) 370 CDBG("%s: Raw dump from Camera HAL enabled", __func__); 371} 372 373/*=========================================================================== 374 * FUNCTION : ~QCamera3HardwareInterface 375 * 376 * DESCRIPTION: destructor of QCamera3HardwareInterface 377 * 378 * PARAMETERS : none 379 * 380 * RETURN : none 381 *==========================================================================*/ 382QCamera3HardwareInterface::~QCamera3HardwareInterface() 383{ 384 CDBG("%s: E", __func__); 385 /* We need to stop all streams before deleting any stream */ 386 387 388 if (mRawDumpChannel) { 389 mRawDumpChannel->stop(); 390 } 391 392 // NOTE: 'camera3_stream_t *' objects are already freed at 393 // this stage by the framework 394 for (List<stream_info_t *>::iterator it = mStreamInfo.begin(); 395 it != mStreamInfo.end(); it++) { 396 QCamera3Channel *channel = (*it)->channel; 397 if (channel) { 398 channel->stop(); 399 } 400 } 401 if (mSupportChannel) 402 mSupportChannel->stop(); 403 404 if (mAnalysisChannel) { 405 mAnalysisChannel->stop(); 406 } 407 408 /* Turn off video hint */ 409 updatePowerHint(m_bIsVideo, false); 410 411 for (List<stream_info_t *>::iterator it = mStreamInfo.begin(); 412 it != mStreamInfo.end(); it++) { 413 QCamera3Channel *channel = (*it)->channel; 414 if (channel) 415 delete channel; 416 free (*it); 417 } 418 if (mSupportChannel) { 419 delete mSupportChannel; 420 mSupportChannel = NULL; 421 } 422 423 if (mAnalysisChannel) { 424 delete mAnalysisChannel; 425 mAnalysisChannel = NULL; 426 } 427 if (mRawDumpChannel) { 428 delete mRawDumpChannel; 429 mRawDumpChannel = NULL; 430 } 431 mPictureChannel = NULL; 432 433 /* Clean up all channels */ 434 if (mCameraInitialized) { 435 if (mMetadataChannel) { 436 mMetadataChannel->stop(); 437 delete mMetadataChannel; 438 mMetadataChannel = NULL; 439 } 440 if(!mFirstConfiguration){ 441 //send the last unconfigure 442 cam_stream_size_info_t stream_config_info; 443 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t)); 444 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS; 445 stream_config_info.buffer_info.max_buffers = MAX_INFLIGHT_REQUESTS; 446 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO, 447 stream_config_info); 448 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters); 449 if (rc < 0) { 450 ALOGE("%s: set_parms failed for unconfigure", __func__); 451 } 452 } 453 deinitParameters(); 454 } 455 456 if (mCameraOpened) 457 closeCamera(); 458 459 mPendingBuffersMap.mPendingBufferList.clear(); 460 mPendingReprocessResultList.clear(); 461 for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin(); 462 i != mPendingRequestsList.end(); i++) { 463 clearInputBuffer(i->input_buffer); 464 i = mPendingRequestsList.erase(i); 465 } 466 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) 467 if (mDefaultMetadata[i]) 468 free_camera_metadata(mDefaultMetadata[i]); 469 470 pthread_cond_destroy(&mRequestCond); 471 472 pthread_mutex_destroy(&mMutex); 473 CDBG("%s: X", __func__); 474} 475 476/*=========================================================================== 477 * FUNCTION : camEvtHandle 478 * 479 * DESCRIPTION: Function registered to mm-camera-interface to handle events 480 * 481 * PARAMETERS : 482 * @camera_handle : interface layer camera handle 483 * @evt : ptr to event 484 * @user_data : user data ptr 485 * 486 * RETURN : none 487 *==========================================================================*/ 488void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/, 489 mm_camera_event_t *evt, 490 void *user_data) 491{ 492 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data; 493 if (obj && evt) { 494 switch(evt->server_event_type) { 495 case CAM_EVENT_TYPE_DAEMON_DIED: 496 ALOGE("%s: Fatal, camera daemon died", __func__); 497 camera3_notify_msg_t notify_msg; 498 memset(¬ify_msg, 0, sizeof(camera3_notify_msg_t)); 499 notify_msg.type = CAMERA3_MSG_ERROR; 500 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE; 501 notify_msg.message.error.error_stream = NULL; 502 notify_msg.message.error.frame_number = 0; 503 obj->mCallbackOps->notify(obj->mCallbackOps, ¬ify_msg); 504 break; 505 506 case CAM_EVENT_TYPE_DAEMON_PULL_REQ: 507 CDBG("%s: HAL got request pull from Daemon", __func__); 508 pthread_mutex_lock(&obj->mMutex); 509 obj->mWokenUpByDaemon = true; 510 obj->unblockRequestIfNecessary(); 511 pthread_mutex_unlock(&obj->mMutex); 512 break; 513 514 default: 515 CDBG_HIGH("%s: Warning: Unhandled event %d", __func__, 516 evt->server_event_type); 517 break; 518 } 519 } else { 520 ALOGE("%s: NULL user_data/evt", __func__); 521 } 522} 523 524/*=========================================================================== 525 * FUNCTION : openCamera 526 * 527 * DESCRIPTION: open camera 528 * 529 * PARAMETERS : 530 * @hw_device : double ptr for camera device struct 531 * 532 * RETURN : int32_t type of status 533 * NO_ERROR -- success 534 * none-zero failure code 535 *==========================================================================*/ 536int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device) 537{ 538 int rc = 0; 539 if (mCameraOpened) { 540 *hw_device = NULL; 541 return PERMISSION_DENIED; 542 } 543 544 rc = openCamera(); 545 if (rc == 0) { 546 *hw_device = &mCameraDevice.common; 547 } else 548 *hw_device = NULL; 549 550 return rc; 551} 552 553/*=========================================================================== 554 * FUNCTION : openCamera 555 * 556 * DESCRIPTION: open camera 557 * 558 * PARAMETERS : none 559 * 560 * RETURN : int32_t type of status 561 * NO_ERROR -- success 562 * none-zero failure code 563 *==========================================================================*/ 564int QCamera3HardwareInterface::openCamera() 565{ 566 int rc = 0; 567 568 ATRACE_CALL(); 569 if (mCameraHandle) { 570 ALOGE("Failure: Camera already opened"); 571 return ALREADY_EXISTS; 572 } 573 mCameraHandle = camera_open((uint8_t)mCameraId); 574 if (!mCameraHandle) { 575 ALOGE("camera_open failed."); 576 return UNKNOWN_ERROR; 577 } 578 579 mCameraOpened = true; 580 581 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle, 582 camEvtHandle, (void *)this); 583 584 if (rc < 0) { 585 ALOGE("%s: Error, failed to register event callback", __func__); 586 /* Not closing camera here since it is already handled in destructor */ 587 return FAILED_TRANSACTION; 588 } 589 mFirstConfiguration = true; 590 return NO_ERROR; 591} 592 593/*=========================================================================== 594 * FUNCTION : closeCamera 595 * 596 * DESCRIPTION: close camera 597 * 598 * PARAMETERS : none 599 * 600 * RETURN : int32_t type of status 601 * NO_ERROR -- success 602 * none-zero failure code 603 *==========================================================================*/ 604int QCamera3HardwareInterface::closeCamera() 605{ 606 ATRACE_CALL(); 607 int rc = NO_ERROR; 608 609 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle); 610 mCameraHandle = NULL; 611 mCameraOpened = false; 612 613 return rc; 614} 615 616/*=========================================================================== 617 * FUNCTION : initialize 618 * 619 * DESCRIPTION: Initialize frameworks callback functions 620 * 621 * PARAMETERS : 622 * @callback_ops : callback function to frameworks 623 * 624 * RETURN : 625 * 626 *==========================================================================*/ 627int QCamera3HardwareInterface::initialize( 628 const struct camera3_callback_ops *callback_ops) 629{ 630 ATRACE_CALL(); 631 int rc; 632 633 pthread_mutex_lock(&mMutex); 634 635 rc = initParameters(); 636 if (rc < 0) { 637 ALOGE("%s: initParamters failed %d", __func__, rc); 638 goto err1; 639 } 640 mCallbackOps = callback_ops; 641 642 pthread_mutex_unlock(&mMutex); 643 mCameraInitialized = true; 644 return 0; 645 646err1: 647 pthread_mutex_unlock(&mMutex); 648 return rc; 649} 650 651/*=========================================================================== 652 * FUNCTION : validateStreamDimensions 653 * 654 * DESCRIPTION: Check if the configuration requested are those advertised 655 * 656 * PARAMETERS : 657 * @stream_list : streams to be configured 658 * 659 * RETURN : 660 * 661 *==========================================================================*/ 662int QCamera3HardwareInterface::validateStreamDimensions( 663 camera3_stream_configuration_t *streamList) 664{ 665 int rc = NO_ERROR; 666 int32_t available_processed_sizes[MAX_SIZES_CNT * 2]; 667 int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2]; 668 size_t count = 0; 669 670 camera3_stream_t *inputStream = NULL; 671 /* 672 * Loop through all streams to find input stream if it exists* 673 */ 674 for (size_t i = 0; i< streamList->num_streams; i++) { 675 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) { 676 if (inputStream != NULL) { 677 ALOGE("%s: Error, Multiple input streams requested"); 678 return -EINVAL; 679 } 680 inputStream = streamList->streams[i]; 681 } 682 } 683 /* 684 * Loop through all streams requested in configuration 685 * Check if unsupported sizes have been requested on any of them 686 */ 687 for (size_t j = 0; j < streamList->num_streams; j++) { 688 bool sizeFound = false; 689 size_t jpeg_sizes_cnt = 0; 690 camera3_stream_t *newStream = streamList->streams[j]; 691 692 uint32_t rotatedHeight = newStream->height; 693 uint32_t rotatedWidth = newStream->width; 694 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) || 695 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) { 696 rotatedHeight = newStream->width; 697 rotatedWidth = newStream->height; 698 } 699 700 /* 701 * Sizes are different for each type of stream format check against 702 * appropriate table. 703 */ 704 switch (newStream->format) { 705 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16: 706 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE: 707 case HAL_PIXEL_FORMAT_RAW10: 708 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT); 709 for (size_t i = 0; i < count; i++) { 710 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) && 711 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) { 712 sizeFound = true; 713 break; 714 } 715 } 716 break; 717 case HAL_PIXEL_FORMAT_BLOB: 718 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT); 719 /* Generate JPEG sizes table */ 720 makeTable(gCamCapability[mCameraId]->picture_sizes_tbl, 721 count, 722 MAX_SIZES_CNT, 723 available_processed_sizes); 724 jpeg_sizes_cnt = filterJpegSizes( 725 available_jpeg_sizes, 726 available_processed_sizes, 727 count * 2, 728 MAX_SIZES_CNT * 2, 729 gCamCapability[mCameraId]->active_array_size, 730 gCamCapability[mCameraId]->max_downscale_factor); 731 732 /* Verify set size against generated sizes table */ 733 for (size_t i = 0; i < (jpeg_sizes_cnt / 2); i++) { 734 if (((int32_t)rotatedWidth == available_jpeg_sizes[i*2]) && 735 ((int32_t)rotatedHeight == available_jpeg_sizes[i*2+1])) { 736 sizeFound = true; 737 break; 738 } 739 } 740 break; 741 742 743 case HAL_PIXEL_FORMAT_YCbCr_420_888: 744 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: 745 default: 746 /* ZSL stream will be full active array size validate that*/ 747 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL 748 || newStream->stream_type == CAMERA3_STREAM_INPUT 749 || newStream->usage & GRALLOC_USAGE_HW_CAMERA_ZSL) { 750 if (((int32_t)rotatedWidth == 751 gCamCapability[mCameraId]->active_array_size.width) && 752 ((int32_t)rotatedHeight == 753 gCamCapability[mCameraId]->active_array_size.height)) { 754 sizeFound = true; 755 } 756 /* We could potentially break here to enforce ZSL stream 757 * set from frameworks always has full active array size 758 * but it is not clear from spec if framework will always 759 * follow that, also we have logic to override to full array 760 * size, so keeping this logic lenient at the moment. 761 */ 762 } 763 764 /* Non ZSL stream still need to conform to advertised sizes*/ 765 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, 766 MAX_SIZES_CNT); 767 for (size_t i = 0; i < count; i++) { 768 if (((int32_t)rotatedWidth == 769 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) && 770 ((int32_t)rotatedHeight == 771 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) { 772 sizeFound = true; 773 break; 774 } 775 } 776 break; 777 } /* End of switch(newStream->format) */ 778 779 /* We error out even if a single stream has unsupported size set */ 780 if (!sizeFound) { 781 ALOGE("%s: Error: Unsupported size of %d x %d requested for stream" 782 "type:%d", __func__, rotatedWidth, rotatedHeight, 783 newStream->format); 784 ALOGE("%s: Active array size is %d x %d", __func__, 785 gCamCapability[mCameraId]->active_array_size.width, 786 gCamCapability[mCameraId]->active_array_size.height); 787 rc = -EINVAL; 788 break; 789 } 790 } /* End of for each stream */ 791 return rc; 792} 793 794/*============================================================================== 795 * FUNCTION : isSupportChannelNeeded 796 * 797 * DESCRIPTION: Simple heuristic func to determine if support channels is needed 798 * 799 * PARAMETERS : 800 * @stream_list : streams to be configured 801 * 802 * RETURN : Boolen true/false decision 803 * 804 *==========================================================================*/ 805bool QCamera3HardwareInterface::isSupportChannelNeeded(camera3_stream_configuration_t *streamList, 806 cam_stream_size_info_t stream_config_info) 807{ 808 uint32_t i; 809 bool bSuperSetPresent = false; 810 /* Check for conditions where PProc pipeline does not have any streams*/ 811 for (i = 0; i < stream_config_info.num_streams; i++) { 812 if (stream_config_info.postprocess_mask[i] == CAM_QCOM_FEATURE_PP_SUPERSET) { 813 bSuperSetPresent = true; 814 break; 815 } 816 } 817 818 if (bSuperSetPresent == false ) 819 return true; 820 821 /* Dummy stream needed if only raw or jpeg streams present */ 822 for (i = 0;i < streamList->num_streams;i++) { 823 switch(streamList->streams[i]->format) { 824 case HAL_PIXEL_FORMAT_RAW_OPAQUE: 825 case HAL_PIXEL_FORMAT_RAW10: 826 case HAL_PIXEL_FORMAT_RAW16: 827 case HAL_PIXEL_FORMAT_BLOB: 828 break; 829 default: 830 return false; 831 } 832 } 833 return true; 834} 835 836/*============================================================================== 837 * FUNCTION : getSensorOutputSize 838 * 839 * DESCRIPTION: Get sensor output size based on current stream configuratoin 840 * 841 * PARAMETERS : 842 * @sensor_dim : sensor output dimension (output) 843 * 844 * RETURN : int32_t type of status 845 * NO_ERROR -- success 846 * none-zero failure code 847 * 848 *==========================================================================*/ 849int32_t QCamera3HardwareInterface::getSensorOutputSize(cam_dimension_t &sensor_dim) 850{ 851 int32_t rc = NO_ERROR; 852 853 cam_dimension_t max_dim = {0, 0}; 854 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) { 855 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width) 856 max_dim.width = mStreamConfigInfo.stream_sizes[i].width; 857 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height) 858 max_dim.height = mStreamConfigInfo.stream_sizes[i].height; 859 } 860 861 clear_metadata_buffer(mParameters); 862 863 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION, 864 max_dim); 865 if (rc != NO_ERROR) { 866 ALOGE("%s:Failed to update table for CAM_INTF_PARM_MAX_DIMENSION", __func__); 867 return rc; 868 } 869 870 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters); 871 if (rc != NO_ERROR) { 872 ALOGE("%s: Failed to set CAM_INTF_PARM_MAX_DIMENSION", __func__); 873 return rc; 874 } 875 876 clear_metadata_buffer(mParameters); 877 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_RAW_DIMENSION); 878 879 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle, 880 mParameters); 881 if (rc != NO_ERROR) { 882 ALOGE("%s: Failed to get CAM_INTF_PARM_RAW_DIMENSION", __func__); 883 return rc; 884 } 885 886 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_RAW_DIMENSION, sensor_dim); 887 ALOGI("%s: sensor output dimension = %d x %d", __func__, sensor_dim.width, sensor_dim.height); 888 889 return rc; 890} 891 892/*============================================================================== 893 * FUNCTION : updatePowerHint 894 * 895 * DESCRIPTION: update power hint based on whether it's video mode or not. 896 * 897 * PARAMETERS : 898 * @bWasVideo : whether video mode before the switch 899 * @bIsVideo : whether new mode is video or not. 900 * 901 * RETURN : NULL 902 * 903 *==========================================================================*/ 904void QCamera3HardwareInterface::updatePowerHint(bool bWasVideo, bool bIsVideo) 905{ 906#ifdef HAS_MULTIMEDIA_HINTS 907 if (bWasVideo == bIsVideo) 908 return; 909 910 if (m_pPowerModule && m_pPowerModule->powerHint) { 911 if (bIsVideo) 912 m_pPowerModule->powerHint(m_pPowerModule, 913 POWER_HINT_VIDEO_ENCODE, (void *)"state=1"); 914 else 915 m_pPowerModule->powerHint(m_pPowerModule, 916 POWER_HINT_VIDEO_ENCODE, (void *)"state=0"); 917 } 918#endif 919} 920 921/*=========================================================================== 922 * FUNCTION : configureStreams 923 * 924 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input 925 * and output streams. 926 * 927 * PARAMETERS : 928 * @stream_list : streams to be configured 929 * 930 * RETURN : 931 * 932 *==========================================================================*/ 933int QCamera3HardwareInterface::configureStreams( 934 camera3_stream_configuration_t *streamList) 935{ 936 ATRACE_CALL(); 937 int rc = 0; 938 bool bWasVideo = m_bIsVideo; 939 uint32_t numBuffers = MAX_INFLIGHT_REQUESTS; 940 941 // Sanity check stream_list 942 if (streamList == NULL) { 943 ALOGE("%s: NULL stream configuration", __func__); 944 return BAD_VALUE; 945 } 946 if (streamList->streams == NULL) { 947 ALOGE("%s: NULL stream list", __func__); 948 return BAD_VALUE; 949 } 950 951 if (streamList->num_streams < 1) { 952 ALOGE("%s: Bad number of streams requested: %d", __func__, 953 streamList->num_streams); 954 return BAD_VALUE; 955 } 956 957 if (streamList->num_streams >= MAX_NUM_STREAMS) { 958 ALOGE("%s: Maximum number of streams %d exceeded: %d", __func__, 959 MAX_NUM_STREAMS, streamList->num_streams); 960 return BAD_VALUE; 961 } 962 963 mOpMode = streamList->operation_mode; 964 CDBG("%s: mOpMode: %d", __func__, mOpMode); 965 966 /* first invalidate all the steams in the mStreamList 967 * if they appear again, they will be validated */ 968 for (List<stream_info_t*>::iterator it = mStreamInfo.begin(); 969 it != mStreamInfo.end(); it++) { 970 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv; 971 channel->stop(); 972 (*it)->status = INVALID; 973 } 974 975 if (mRawDumpChannel) { 976 mRawDumpChannel->stop(); 977 delete mRawDumpChannel; 978 mRawDumpChannel = NULL; 979 } 980 981 if (mSupportChannel) 982 mSupportChannel->stop(); 983 984 if (mAnalysisChannel) { 985 mAnalysisChannel->stop(); 986 } 987 if (mMetadataChannel) { 988 /* If content of mStreamInfo is not 0, there is metadata stream */ 989 mMetadataChannel->stop(); 990 } 991 992 pthread_mutex_lock(&mMutex); 993 994 /* Check whether we have video stream */ 995 m_bIs4KVideo = false; 996 m_bIsVideo = false; 997 m_bEisSupportedSize = false; 998 bool isZsl = false; 999 uint32_t videoWidth = 0U; 1000 uint32_t videoHeight = 0U; 1001 size_t rawStreamCnt = 0; 1002 size_t stallStreamCnt = 0; 1003 size_t processedStreamCnt = 0; 1004 // Number of streams on ISP encoder path 1005 size_t numStreamsOnEncoder = 0; 1006 cam_dimension_t maxViewfinderSize; 1007 bool bJpegExceeds4K = false; 1008 bool bUseCommonFeatureMask = false; 1009 uint32_t commonFeatureMask = 0; 1010 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size; 1011 camera3_stream_t *inputStream = NULL; 1012 1013 /*EIS configuration*/ 1014 bool eisSupported = false; 1015 bool oisSupported = false; 1016 int32_t margin_index = -1; 1017 uint8_t eis_prop_set; 1018 uint32_t maxEisWidth = 0; 1019 uint32_t maxEisHeight = 0; 1020 int32_t hal_version = CAM_HAL_V3; 1021 1022 size_t count = IS_TYPE_MAX; 1023 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count); 1024 for (size_t i = 0; i < count; i++) { 1025 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) { 1026 eisSupported = true; 1027 margin_index = (int32_t)i; 1028 break; 1029 } 1030 } 1031 1032 count = CAM_OPT_STAB_MAX; 1033 count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count); 1034 for (size_t i = 0; i < count; i++) { 1035 if (gCamCapability[mCameraId]->optical_stab_modes[i] == CAM_OPT_STAB_ON) { 1036 oisSupported = true; 1037 break; 1038 } 1039 } 1040 1041 if (eisSupported) { 1042 maxEisWidth = (uint32_t) 1043 ((gCamCapability[mCameraId]->active_array_size.width * 1.0) / 1044 (1+ gCamCapability[mCameraId]->supported_is_type_margins[margin_index])); 1045 maxEisHeight = (uint32_t) 1046 ((gCamCapability[mCameraId]->active_array_size.height * 1.0) / 1047 (1+ gCamCapability[mCameraId]->supported_is_type_margins[margin_index])); 1048 } 1049 1050 /* EIS setprop control */ 1051 char eis_prop[PROPERTY_VALUE_MAX]; 1052 memset(eis_prop, 0, sizeof(eis_prop)); 1053 property_get("camera.eis.enable", eis_prop, "0"); 1054 eis_prop_set = (uint8_t)atoi(eis_prop); 1055 1056 m_bEisEnable = eis_prop_set && (!oisSupported && eisSupported); 1057 1058 /* stream configurations */ 1059 for (size_t i = 0; i < streamList->num_streams; i++) { 1060 camera3_stream_t *newStream = streamList->streams[i]; 1061 ALOGI("%s: stream[%d] type = %d, format = %d, width = %d, " 1062 "height = %d, rotation = %d", 1063 __func__, i, newStream->stream_type, newStream->format, 1064 newStream->width, newStream->height, newStream->rotation); 1065 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL || 1066 newStream->stream_type == CAMERA3_STREAM_INPUT){ 1067 isZsl = true; 1068 } 1069 if (newStream->stream_type == CAMERA3_STREAM_INPUT){ 1070 inputStream = newStream; 1071 } 1072 1073 if (newStream->format == HAL_PIXEL_FORMAT_BLOB) { 1074 if (newStream->width > VIDEO_4K_WIDTH || 1075 newStream->height > VIDEO_4K_HEIGHT) 1076 bJpegExceeds4K = true; 1077 } 1078 1079 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) && 1080 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) { 1081 m_bIsVideo = true; 1082 if ((VIDEO_4K_WIDTH <= newStream->width) && 1083 (VIDEO_4K_HEIGHT <= newStream->height)) { 1084 videoWidth = newStream->width; 1085 videoHeight = newStream->height; 1086 m_bIs4KVideo = true; 1087 } 1088 m_bEisSupportedSize = (newStream->width <= maxEisWidth) && 1089 (newStream->height <= maxEisHeight); 1090 } 1091 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL || 1092 newStream->stream_type == CAMERA3_STREAM_OUTPUT) { 1093 switch (newStream->format) { 1094 case HAL_PIXEL_FORMAT_BLOB: 1095 stallStreamCnt++; 1096 if (((int32_t)newStream->width > maxViewfinderSize.width) || 1097 ((int32_t)newStream->height > maxViewfinderSize.height)) { 1098 commonFeatureMask |= CAM_QCOM_FEATURE_NONE; 1099 numStreamsOnEncoder++; 1100 } 1101 break; 1102 case HAL_PIXEL_FORMAT_RAW10: 1103 case HAL_PIXEL_FORMAT_RAW_OPAQUE: 1104 case HAL_PIXEL_FORMAT_RAW16: 1105 rawStreamCnt++; 1106 break; 1107 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: 1108 processedStreamCnt++; 1109 if (((int32_t)newStream->width > maxViewfinderSize.width) || 1110 ((int32_t)newStream->height > maxViewfinderSize.height)) { 1111 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL || 1112 newStream->usage & GRALLOC_USAGE_HW_CAMERA_ZSL) { 1113 commonFeatureMask |= CAM_QCOM_FEATURE_NONE; 1114 } else { 1115 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3; 1116 } 1117 numStreamsOnEncoder++; 1118 } 1119 break; 1120 case HAL_PIXEL_FORMAT_YCbCr_420_888: 1121 default: 1122 processedStreamCnt++; 1123 if (((int32_t)newStream->width > maxViewfinderSize.width) || 1124 ((int32_t)newStream->height > maxViewfinderSize.height)) { 1125 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3; 1126 numStreamsOnEncoder++; 1127 } 1128 break; 1129 } 1130 1131 } 1132 } 1133 1134 /* Check if num_streams is sane */ 1135 if (stallStreamCnt > MAX_STALLING_STREAMS || 1136 rawStreamCnt > MAX_RAW_STREAMS || 1137 processedStreamCnt > MAX_PROCESSED_STREAMS) { 1138 ALOGE("%s: Invalid stream configu: stall: %d, raw: %d, processed %d", 1139 __func__, stallStreamCnt, rawStreamCnt, processedStreamCnt); 1140 pthread_mutex_unlock(&mMutex); 1141 return -EINVAL; 1142 } 1143 /* Check whether we have zsl stream or 4k video case */ 1144 if (isZsl && m_bIsVideo) { 1145 ALOGE("%s: Currently invalid configuration ZSL&Video!", __func__); 1146 pthread_mutex_unlock(&mMutex); 1147 return -EINVAL; 1148 } 1149 /* Check if stream sizes are sane */ 1150 if (numStreamsOnEncoder > 2) { 1151 ALOGE("%s: Number of streams on ISP encoder path exceeds limits of 2", 1152 __func__); 1153 pthread_mutex_unlock(&mMutex); 1154 return -EINVAL; 1155 } else if (1 < numStreamsOnEncoder){ 1156 bUseCommonFeatureMask = true; 1157 CDBG_HIGH("%s: Multiple streams above max viewfinder size, common mask needed", 1158 __func__); 1159 } 1160 /* Check if BLOB size is greater than 4k in 4k recording case */ 1161 if (m_bIs4KVideo && bJpegExceeds4K) { 1162 ALOGE("%s: HAL doesn't support Blob size greater than 4k in 4k recording", 1163 __func__); 1164 pthread_mutex_unlock(&mMutex); 1165 return -EINVAL; 1166 } 1167 1168 rc = validateStreamDimensions(streamList); 1169 if (rc == NO_ERROR) { 1170 rc = validateStreamRotations(streamList); 1171 } 1172 if (rc != NO_ERROR) { 1173 ALOGE("%s: Invalid stream configuration requested!", __func__); 1174 pthread_mutex_unlock(&mMutex); 1175 return rc; 1176 } 1177 1178 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle! 1179 camera3_stream_t *jpegStream = NULL; 1180 for (size_t i = 0; i < streamList->num_streams; i++) { 1181 camera3_stream_t *newStream = streamList->streams[i]; 1182 CDBG_HIGH("%s: newStream type = %d, stream format = %d " 1183 "stream size : %d x %d, stream rotation = %d", 1184 __func__, newStream->stream_type, newStream->format, 1185 newStream->width, newStream->height, newStream->rotation); 1186 //if the stream is in the mStreamList validate it 1187 bool stream_exists = false; 1188 for (List<stream_info_t*>::iterator it=mStreamInfo.begin(); 1189 it != mStreamInfo.end(); it++) { 1190 if ((*it)->stream == newStream) { 1191 QCamera3Channel *channel = 1192 (QCamera3Channel*)(*it)->stream->priv; 1193 stream_exists = true; 1194 if (channel) 1195 delete channel; 1196 (*it)->status = VALID; 1197 (*it)->stream->priv = NULL; 1198 (*it)->channel = NULL; 1199 } 1200 } 1201 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) { 1202 //new stream 1203 stream_info_t* stream_info; 1204 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t)); 1205 if (!stream_info) { 1206 ALOGE("%s: Could not allocate stream info", __func__); 1207 rc = -ENOMEM; 1208 pthread_mutex_unlock(&mMutex); 1209 return rc; 1210 } 1211 stream_info->stream = newStream; 1212 stream_info->status = VALID; 1213 stream_info->channel = NULL; 1214 mStreamInfo.push_back(stream_info); 1215 } 1216 /* Covers Opaque ZSL and API1 F/W ZSL */ 1217 if (newStream->usage & GRALLOC_USAGE_HW_CAMERA_ZSL 1218 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) { 1219 if (zslStream != NULL) { 1220 ALOGE("%s: Multiple input/reprocess streams requested!", __func__); 1221 pthread_mutex_unlock(&mMutex); 1222 return BAD_VALUE; 1223 } 1224 zslStream = newStream; 1225 } 1226 /* Covers YUV reprocess */ 1227 if (inputStream != NULL) { 1228 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT 1229 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888 1230 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888 1231 && inputStream->width == newStream->width 1232 && inputStream->height == newStream->height) { 1233 if (zslStream != NULL) { 1234 /* This scenario indicates multiple YUV streams with same size 1235 * as input stream have been requested, since zsl stream handle 1236 * is solely use for the purpose of overriding the size of streams 1237 * which share h/w streams we will just make a guess here as to 1238 * which of the stream is a ZSL stream, this will be refactored 1239 * once we make generic logic for streams sharing encoder output 1240 */ 1241 CDBG_HIGH("%s: Warning, Multiple ip/reprocess streams requested!", __func__); 1242 } 1243 zslStream = newStream; 1244 } 1245 } 1246 if (newStream->format == HAL_PIXEL_FORMAT_BLOB) { 1247 jpegStream = newStream; 1248 } 1249 } 1250 1251 cleanAndSortStreamInfo(); 1252 if (mMetadataChannel) { 1253 delete mMetadataChannel; 1254 mMetadataChannel = NULL; 1255 } 1256 if (mSupportChannel) { 1257 delete mSupportChannel; 1258 mSupportChannel = NULL; 1259 } 1260 1261 if (mAnalysisChannel) { 1262 delete mAnalysisChannel; 1263 mAnalysisChannel = NULL; 1264 } 1265 1266 //Create metadata channel and initialize it 1267 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle, 1268 mCameraHandle->ops, captureResultCb, 1269 &gCamCapability[mCameraId]->padding_info, CAM_QCOM_FEATURE_NONE, this); 1270 if (mMetadataChannel == NULL) { 1271 ALOGE("%s: failed to allocate metadata channel", __func__); 1272 rc = -ENOMEM; 1273 pthread_mutex_unlock(&mMutex); 1274 return rc; 1275 } 1276 rc = mMetadataChannel->initialize(IS_TYPE_NONE); 1277 if (rc < 0) { 1278 ALOGE("%s: metadata channel initialization failed", __func__); 1279 delete mMetadataChannel; 1280 mMetadataChannel = NULL; 1281 pthread_mutex_unlock(&mMutex); 1282 return rc; 1283 } 1284 1285 /* Create analysis stream if h/w support is available */ 1286 if (gCamCapability[mCameraId]->hw_analysis_supported) { 1287 mAnalysisChannel = new QCamera3SupportChannel( 1288 mCameraHandle->camera_handle, 1289 mCameraHandle->ops, 1290 &gCamCapability[mCameraId]->padding_info, 1291 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3, 1292 CAM_STREAM_TYPE_ANALYSIS, 1293 &gCamCapability[mCameraId]->analysis_recommended_res, 1294 this); 1295 if (!mAnalysisChannel) { 1296 ALOGE("%s: H/W Analysis channel cannot be created", __func__); 1297 pthread_mutex_unlock(&mMutex); 1298 return -ENOMEM; 1299 } 1300 } 1301 1302 bool isRawStreamRequested = false; 1303 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t)); 1304 /* Allocate channel objects for the requested streams */ 1305 for (size_t i = 0; i < streamList->num_streams; i++) { 1306 camera3_stream_t *newStream = streamList->streams[i]; 1307 uint32_t stream_usage = newStream->usage; 1308 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width; 1309 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height; 1310 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL 1311 || newStream->usage & GRALLOC_USAGE_HW_CAMERA_ZSL) && 1312 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED && jpegStream){ 1313 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT; 1314 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE; 1315 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) { 1316 CDBG_HIGH("%s: Input stream configured, reprocess config", __func__); 1317 } else { 1318 //for non zsl streams find out the format 1319 switch (newStream->format) { 1320 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED : 1321 { 1322 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) { 1323 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_VIDEO; 1324 } else { 1325 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_PREVIEW; 1326 } 1327 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] 1328 = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3; 1329 1330 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) || 1331 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) { 1332 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = 1333 newStream->height; 1334 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = 1335 newStream->width; 1336 } 1337 } 1338 break; 1339 case HAL_PIXEL_FORMAT_YCbCr_420_888: 1340 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK; 1341 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3; 1342 break; 1343 case HAL_PIXEL_FORMAT_BLOB: 1344 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT; 1345 if (m_bIs4KVideo && !isZsl) { 1346 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] 1347 = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3; 1348 } else { 1349 if (bUseCommonFeatureMask && 1350 (((int32_t)newStream->width > maxViewfinderSize.width) || 1351 ((int32_t)newStream->height > maxViewfinderSize.height))) { 1352 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask; 1353 } else { 1354 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE; 1355 } 1356 } 1357 if (isZsl) { 1358 if (zslStream) { 1359 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = 1360 (int32_t)zslStream->width; 1361 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = 1362 (int32_t)zslStream->height; 1363 } else { 1364 ALOGE("%s: Error, No ZSL stream identified",__func__); 1365 pthread_mutex_unlock(&mMutex); 1366 return -EINVAL; 1367 } 1368 } else if (m_bIs4KVideo) { 1369 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = 1370 (int32_t)videoWidth; 1371 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = 1372 (int32_t)videoHeight; 1373 } 1374 break; 1375 case HAL_PIXEL_FORMAT_RAW_OPAQUE: 1376 case HAL_PIXEL_FORMAT_RAW16: 1377 case HAL_PIXEL_FORMAT_RAW10: 1378 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW; 1379 isRawStreamRequested = true; 1380 break; 1381 default: 1382 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT; 1383 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE; 1384 break; 1385 } 1386 1387 } 1388 if (newStream->priv == NULL) { 1389 //New stream, construct channel 1390 switch (newStream->stream_type) { 1391 case CAMERA3_STREAM_INPUT: 1392 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ; 1393 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's 1394 break; 1395 case CAMERA3_STREAM_BIDIRECTIONAL: 1396 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ | 1397 GRALLOC_USAGE_HW_CAMERA_WRITE; 1398 break; 1399 case CAMERA3_STREAM_OUTPUT: 1400 /* For video encoding stream, set read/write rarely 1401 * flag so that they may be set to un-cached */ 1402 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER) 1403 newStream->usage |= 1404 (GRALLOC_USAGE_SW_READ_RARELY | 1405 GRALLOC_USAGE_SW_WRITE_RARELY | 1406 GRALLOC_USAGE_HW_CAMERA_WRITE); 1407 else if (newStream->usage & GRALLOC_USAGE_HW_CAMERA_ZSL) 1408 CDBG("%s: ZSL usage flag skipping", __func__); 1409 else 1410 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE; 1411 break; 1412 default: 1413 ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type); 1414 break; 1415 } 1416 1417 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT || 1418 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) { 1419 QCamera3Channel *channel = NULL; 1420 switch (newStream->format) { 1421 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: 1422 /* use higher number of buffers for HFR mode */ 1423 if((newStream->format == 1424 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) && 1425 (newStream->usage & 1426 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) && 1427 (streamList->operation_mode == 1428 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) 1429 ) { 1430 numBuffers = MAX_INFLIGHT_REQUESTS * MAX_HFR_BATCH_SIZE; 1431 ALOGI("%s: num video buffers in HFR mode: %d", 1432 __func__, numBuffers); 1433 } 1434 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle, 1435 mCameraHandle->ops, captureResultCb, 1436 &gCamCapability[mCameraId]->padding_info, 1437 this, 1438 newStream, 1439 (cam_stream_type_t) 1440 mStreamConfigInfo.type[mStreamConfigInfo.num_streams], 1441 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams], 1442 mMetadataChannel, 1443 numBuffers); 1444 if (channel == NULL) { 1445 ALOGE("%s: allocation of channel failed", __func__); 1446 pthread_mutex_unlock(&mMutex); 1447 return -ENOMEM; 1448 } 1449 newStream->max_buffers = channel->getNumBuffers(); 1450 newStream->priv = channel; 1451 break; 1452 case HAL_PIXEL_FORMAT_YCbCr_420_888: 1453 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle, 1454 mCameraHandle->ops, captureResultCb, 1455 &gCamCapability[mCameraId]->padding_info, 1456 this, 1457 newStream, 1458 (cam_stream_type_t) 1459 mStreamConfigInfo.type[mStreamConfigInfo.num_streams], 1460 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams], 1461 mMetadataChannel); 1462 if (channel == NULL) { 1463 ALOGE("%s: allocation of YUV channel failed", __func__); 1464 pthread_mutex_unlock(&mMutex); 1465 return -ENOMEM; 1466 } 1467 newStream->max_buffers = channel->getNumBuffers(); 1468 newStream->priv = channel; 1469 break; 1470 case HAL_PIXEL_FORMAT_RAW_OPAQUE: 1471 case HAL_PIXEL_FORMAT_RAW16: 1472 case HAL_PIXEL_FORMAT_RAW10: 1473 mRawChannel = new QCamera3RawChannel( 1474 mCameraHandle->camera_handle, 1475 mCameraHandle->ops, captureResultCb, 1476 &gCamCapability[mCameraId]->padding_info, 1477 this, newStream, CAM_QCOM_FEATURE_NONE, 1478 mMetadataChannel, 1479 (newStream->format == HAL_PIXEL_FORMAT_RAW16)); 1480 if (mRawChannel == NULL) { 1481 ALOGE("%s: allocation of raw channel failed", __func__); 1482 pthread_mutex_unlock(&mMutex); 1483 return -ENOMEM; 1484 } 1485 newStream->max_buffers = mRawChannel->getNumBuffers(); 1486 newStream->priv = (QCamera3Channel*)mRawChannel; 1487 break; 1488 case HAL_PIXEL_FORMAT_BLOB: 1489 // Max live snapshot inflight buffer is 1. This is to mitigate 1490 // frame drop issues for video snapshot. The more buffers being 1491 // allocated, the more frame drops there are. 1492 mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle, 1493 mCameraHandle->ops, captureResultCb, 1494 &gCamCapability[mCameraId]->padding_info, this, newStream, 1495 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams], 1496 m_bIs4KVideo, mMetadataChannel, 1497 (m_bIsVideo ? 1 : MAX_INFLIGHT_REQUESTS)); 1498 if (mPictureChannel == NULL) { 1499 ALOGE("%s: allocation of channel failed", __func__); 1500 pthread_mutex_unlock(&mMutex); 1501 return -ENOMEM; 1502 } 1503 newStream->priv = (QCamera3Channel*)mPictureChannel; 1504 newStream->max_buffers = mPictureChannel->getNumBuffers(); 1505 break; 1506 1507 default: 1508 ALOGE("%s: not a supported format 0x%x", __func__, newStream->format); 1509 break; 1510 } 1511 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) { 1512 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS; 1513 } else { 1514 ALOGE("%s: Error, Unknown stream type", __func__); 1515 return -EINVAL; 1516 } 1517 1518 for (List<stream_info_t*>::iterator it=mStreamInfo.begin(); 1519 it != mStreamInfo.end(); it++) { 1520 if ((*it)->stream == newStream) { 1521 (*it)->channel = (QCamera3Channel*) newStream->priv; 1522 break; 1523 } 1524 } 1525 } else { 1526 // Channel already exists for this stream 1527 // Do nothing for now 1528 } 1529 1530 /* Do not add entries for input stream in metastream info 1531 * since there is no real stream associated with it 1532 */ 1533 if (newStream->stream_type != CAMERA3_STREAM_INPUT) 1534 mStreamConfigInfo.num_streams++; 1535 } 1536 1537 if (isZsl) { 1538 if (zslStream == NULL) { 1539 ALOGE("%s: Error Zsl stream handle missing", __func__); 1540 pthread_mutex_unlock(&mMutex); 1541 return -EINVAL; 1542 } 1543 /* This override is possible since the f/w gaurantees that the ZSL 1544 stream will always be the active array size in case of Bidirectional 1545 or will be limited to the max i/p stream size which we can control to 1546 be equal to be the largest YUV/Opaque stream size 1547 */ 1548 if (mPictureChannel) { 1549 mPictureChannel->overrideYuvSize(zslStream->width, zslStream->height); 1550 } 1551 } else if (mPictureChannel && m_bIs4KVideo) { 1552 mPictureChannel->overrideYuvSize(videoWidth, videoHeight); 1553 } 1554 1555 //RAW DUMP channel 1556 if (mEnableRawDump && isRawStreamRequested == false){ 1557 cam_dimension_t rawDumpSize; 1558 rawDumpSize = getMaxRawSize(mCameraId); 1559 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle, 1560 mCameraHandle->ops, 1561 rawDumpSize, 1562 &gCamCapability[mCameraId]->padding_info, 1563 this, CAM_QCOM_FEATURE_NONE); 1564 if (!mRawDumpChannel) { 1565 ALOGE("%s: Raw Dump channel cannot be created", __func__); 1566 pthread_mutex_unlock(&mMutex); 1567 return -ENOMEM; 1568 } 1569 } 1570 1571 1572 if (mAnalysisChannel) { 1573 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = 1574 gCamCapability[mCameraId]->analysis_recommended_res; 1575 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = 1576 CAM_STREAM_TYPE_ANALYSIS; 1577 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = 1578 CAM_QCOM_FEATURE_FACE_DETECTION; 1579 mStreamConfigInfo.num_streams++; 1580 } 1581 1582 if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) { 1583 mSupportChannel = new QCamera3SupportChannel( 1584 mCameraHandle->camera_handle, 1585 mCameraHandle->ops, 1586 &gCamCapability[mCameraId]->padding_info, 1587 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3, 1588 CAM_STREAM_TYPE_CALLBACK, 1589 &QCamera3SupportChannel::kDim, 1590 this); 1591 if (!mSupportChannel) { 1592 ALOGE("%s: dummy channel cannot be created", __func__); 1593 pthread_mutex_unlock(&mMutex); 1594 return -ENOMEM; 1595 } 1596 } 1597 1598 if (mSupportChannel) { 1599 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = 1600 QCamera3SupportChannel::kDim; 1601 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = 1602 CAM_STREAM_TYPE_CALLBACK; 1603 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = 1604 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3; 1605 mStreamConfigInfo.num_streams++; 1606 } 1607 1608 if (mRawDumpChannel) { 1609 cam_dimension_t rawSize; 1610 rawSize = getMaxRawSize(mCameraId); 1611 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = 1612 rawSize; 1613 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = 1614 CAM_STREAM_TYPE_RAW; 1615 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = 1616 CAM_QCOM_FEATURE_NONE; 1617 mStreamConfigInfo.num_streams++; 1618 } 1619 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS; 1620 mStreamConfigInfo.buffer_info.max_buffers = MAX_INFLIGHT_REQUESTS; 1621 1622 /* Initialize mPendingRequestInfo and mPendnigBuffersMap */ 1623 for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin(); 1624 i != mPendingRequestsList.end(); i++) { 1625 clearInputBuffer(i->input_buffer); 1626 i = mPendingRequestsList.erase(i); 1627 } 1628 mPendingFrameDropList.clear(); 1629 // Initialize/Reset the pending buffers list 1630 mPendingBuffersMap.num_buffers = 0; 1631 mPendingBuffersMap.mPendingBufferList.clear(); 1632 mPendingReprocessResultList.clear(); 1633 1634 mFirstRequest = true; 1635 //Get min frame duration for this streams configuration 1636 deriveMinFrameDuration(); 1637 1638 /* Turn on video hint only if video stream is configured */ 1639 updatePowerHint(bWasVideo, m_bIsVideo); 1640 1641 pthread_mutex_unlock(&mMutex); 1642 return rc; 1643} 1644 1645/*=========================================================================== 1646 * FUNCTION : validateCaptureRequest 1647 * 1648 * DESCRIPTION: validate a capture request from camera service 1649 * 1650 * PARAMETERS : 1651 * @request : request from framework to process 1652 * 1653 * RETURN : 1654 * 1655 *==========================================================================*/ 1656int QCamera3HardwareInterface::validateCaptureRequest( 1657 camera3_capture_request_t *request) 1658{ 1659 ssize_t idx = 0; 1660 const camera3_stream_buffer_t *b; 1661 CameraMetadata meta; 1662 1663 /* Sanity check the request */ 1664 if (request == NULL) { 1665 ALOGE("%s: NULL capture request", __func__); 1666 return BAD_VALUE; 1667 } 1668 1669 if (request->settings == NULL && mFirstRequest) { 1670 /*settings cannot be null for the first request*/ 1671 return BAD_VALUE; 1672 } 1673 1674 uint32_t frameNumber = request->frame_number; 1675 if (request->num_output_buffers < 1 || request->output_buffers == NULL) { 1676 ALOGE("%s: Request %d: No output buffers provided!", 1677 __FUNCTION__, frameNumber); 1678 return BAD_VALUE; 1679 } 1680 if (request->num_output_buffers >= MAX_NUM_STREAMS) { 1681 ALOGE("%s: Number of buffers %d equals or is greater than maximum number of streams!", 1682 __func__, request->num_output_buffers, MAX_NUM_STREAMS); 1683 return BAD_VALUE; 1684 } 1685 if (request->input_buffer != NULL) { 1686 b = request->input_buffer; 1687 if (b->status != CAMERA3_BUFFER_STATUS_OK) { 1688 ALOGE("%s: Request %d: Buffer %ld: Status not OK!", 1689 __func__, frameNumber, (long)idx); 1690 return BAD_VALUE; 1691 } 1692 if (b->release_fence != -1) { 1693 ALOGE("%s: Request %d: Buffer %ld: Has a release fence!", 1694 __func__, frameNumber, (long)idx); 1695 return BAD_VALUE; 1696 } 1697 if (b->buffer == NULL) { 1698 ALOGE("%s: Request %d: Buffer %ld: NULL buffer handle!", 1699 __func__, frameNumber, (long)idx); 1700 return BAD_VALUE; 1701 } 1702 } 1703 1704 // Validate all buffers 1705 b = request->output_buffers; 1706 do { 1707 QCamera3Channel *channel = 1708 static_cast<QCamera3Channel*>(b->stream->priv); 1709 if (channel == NULL) { 1710 ALOGE("%s: Request %d: Buffer %ld: Unconfigured stream!", 1711 __func__, frameNumber, (long)idx); 1712 return BAD_VALUE; 1713 } 1714 if (b->status != CAMERA3_BUFFER_STATUS_OK) { 1715 ALOGE("%s: Request %d: Buffer %ld: Status not OK!", 1716 __func__, frameNumber, (long)idx); 1717 return BAD_VALUE; 1718 } 1719 if (b->release_fence != -1) { 1720 ALOGE("%s: Request %d: Buffer %ld: Has a release fence!", 1721 __func__, frameNumber, (long)idx); 1722 return BAD_VALUE; 1723 } 1724 if (b->buffer == NULL) { 1725 ALOGE("%s: Request %d: Buffer %ld: NULL buffer handle!", 1726 __func__, frameNumber, (long)idx); 1727 return BAD_VALUE; 1728 } 1729 if (*(b->buffer) == NULL) { 1730 ALOGE("%s: Request %d: Buffer %ld: NULL private handle!", 1731 __func__, frameNumber, (long)idx); 1732 return BAD_VALUE; 1733 } 1734 idx++; 1735 b = request->output_buffers + idx; 1736 } while (idx < (ssize_t)request->num_output_buffers); 1737 1738 return NO_ERROR; 1739} 1740 1741/*=========================================================================== 1742 * FUNCTION : deriveMinFrameDuration 1743 * 1744 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based 1745 * on currently configured streams. 1746 * 1747 * PARAMETERS : NONE 1748 * 1749 * RETURN : NONE 1750 * 1751 *==========================================================================*/ 1752void QCamera3HardwareInterface::deriveMinFrameDuration() 1753{ 1754 int32_t maxJpegDim, maxProcessedDim, maxRawDim; 1755 1756 maxJpegDim = 0; 1757 maxProcessedDim = 0; 1758 maxRawDim = 0; 1759 1760 // Figure out maximum jpeg, processed, and raw dimensions 1761 for (List<stream_info_t*>::iterator it = mStreamInfo.begin(); 1762 it != mStreamInfo.end(); it++) { 1763 1764 // Input stream doesn't have valid stream_type 1765 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT) 1766 continue; 1767 1768 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height); 1769 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) { 1770 if (dimension > maxJpegDim) 1771 maxJpegDim = dimension; 1772 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE || 1773 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 || 1774 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) { 1775 if (dimension > maxRawDim) 1776 maxRawDim = dimension; 1777 } else { 1778 if (dimension > maxProcessedDim) 1779 maxProcessedDim = dimension; 1780 } 1781 } 1782 1783 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, 1784 MAX_SIZES_CNT); 1785 1786 //Assume all jpeg dimensions are in processed dimensions. 1787 if (maxJpegDim > maxProcessedDim) 1788 maxProcessedDim = maxJpegDim; 1789 //Find the smallest raw dimension that is greater or equal to jpeg dimension 1790 if (maxProcessedDim > maxRawDim) { 1791 maxRawDim = INT32_MAX; 1792 1793 for (size_t i = 0; i < count; i++) { 1794 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width * 1795 gCamCapability[mCameraId]->raw_dim[i].height; 1796 if (dimension >= maxProcessedDim && dimension < maxRawDim) 1797 maxRawDim = dimension; 1798 } 1799 } 1800 1801 //Find minimum durations for processed, jpeg, and raw 1802 for (size_t i = 0; i < count; i++) { 1803 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width * 1804 gCamCapability[mCameraId]->raw_dim[i].height) { 1805 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i]; 1806 break; 1807 } 1808 } 1809 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT); 1810 for (size_t i = 0; i < count; i++) { 1811 if (maxProcessedDim == 1812 gCamCapability[mCameraId]->picture_sizes_tbl[i].width * 1813 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) { 1814 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i]; 1815 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i]; 1816 break; 1817 } 1818 } 1819} 1820 1821/*=========================================================================== 1822 * FUNCTION : getMinFrameDuration 1823 * 1824 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations 1825 * and current request configuration. 1826 * 1827 * PARAMETERS : @request: requset sent by the frameworks 1828 * 1829 * RETURN : min farme duration for a particular request 1830 * 1831 *==========================================================================*/ 1832int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request) 1833{ 1834 bool hasJpegStream = false; 1835 bool hasRawStream = false; 1836 for (uint32_t i = 0; i < request->num_output_buffers; i ++) { 1837 const camera3_stream_t *stream = request->output_buffers[i].stream; 1838 if (stream->format == HAL_PIXEL_FORMAT_BLOB) 1839 hasJpegStream = true; 1840 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE || 1841 stream->format == HAL_PIXEL_FORMAT_RAW10 || 1842 stream->format == HAL_PIXEL_FORMAT_RAW16) 1843 hasRawStream = true; 1844 } 1845 1846 if (!hasJpegStream) 1847 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration); 1848 else 1849 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration); 1850} 1851 1852/*=========================================================================== 1853 * FUNCTION : handlePendingReprocResults 1854 * 1855 * DESCRIPTION: check and notify on any pending reprocess results 1856 * 1857 * PARAMETERS : 1858 * @frame_number : Pending request frame number 1859 * 1860 * RETURN : int32_t type of status 1861 * NO_ERROR -- success 1862 * none-zero failure code 1863 *==========================================================================*/ 1864int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number) 1865{ 1866 for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin(); 1867 j != mPendingReprocessResultList.end(); j++) { 1868 if (j->frame_number == frame_number) { 1869 mCallbackOps->notify(mCallbackOps, &j->notify_msg); 1870 1871 CDBG("%s: Delayed reprocess notify %d", __func__, 1872 frame_number); 1873 1874 for (List<PendingRequestInfo>::iterator k = mPendingRequestsList.begin(); 1875 k != mPendingRequestsList.end(); k++) { 1876 1877 if (k->frame_number == j->frame_number) { 1878 CDBG("%s: Found reprocess frame number %d in pending reprocess List " 1879 "Take it out!!", __func__, 1880 k->frame_number); 1881 1882 camera3_capture_result result; 1883 memset(&result, 0, sizeof(camera3_capture_result)); 1884 result.frame_number = frame_number; 1885 result.num_output_buffers = 1; 1886 result.output_buffers = &j->buffer; 1887 result.input_buffer = k->input_buffer; 1888 result.result = k->settings; 1889 result.partial_result = PARTIAL_RESULT_COUNT; 1890 mCallbackOps->process_capture_result(mCallbackOps, &result); 1891 1892 clearInputBuffer(k->input_buffer); 1893 mPendingRequestsList.erase(k); 1894 mPendingRequest--; 1895 break; 1896 } 1897 } 1898 mPendingReprocessResultList.erase(j); 1899 break; 1900 } 1901 } 1902 return NO_ERROR; 1903} 1904 1905/*=========================================================================== 1906 * FUNCTION : handleBatchMetadata 1907 * 1908 * DESCRIPTION: Handles metadata buffer callback in batch mode 1909 * 1910 * PARAMETERS : @metadata_buf: metadata buffer 1911 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free 1912 * the meta buf in this method 1913 * 1914 * RETURN : 1915 * 1916 *==========================================================================*/ 1917void QCamera3HardwareInterface::handleBatchMetadata( 1918 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf) 1919{ 1920 ATRACE_CALL(); 1921 1922 if (NULL == metadata_buf) { 1923 ALOGE("%s: metadata_buf is NULL", __func__); 1924 return; 1925 } 1926 metadata_buffer_t *metadata = 1927 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer; 1928 int32_t frame_number_valid, urgent_frame_number_valid; 1929 uint32_t last_frame_number, last_urgent_frame_number; 1930 uint32_t frame_number, urgent_frame_number = 0; 1931 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time; 1932 bool invalid_metadata = false; 1933 1934 int32_t *p_frame_number_valid = 1935 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata); 1936 uint32_t *p_frame_number = 1937 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata); 1938 int64_t *p_capture_time = 1939 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata); 1940 int32_t *p_urgent_frame_number_valid = 1941 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata); 1942 uint32_t *p_urgent_frame_number = 1943 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata); 1944 1945 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || 1946 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) || 1947 (NULL == p_urgent_frame_number)) { 1948 ALOGE("%s: Invalid metadata", __func__); 1949 invalid_metadata = true; 1950 } else { 1951 frame_number_valid = *p_frame_number_valid; 1952 last_frame_number = *p_frame_number; 1953 last_frame_capture_time = *p_capture_time; 1954 urgent_frame_number_valid = *p_urgent_frame_number_valid; 1955 last_urgent_frame_number = *p_urgent_frame_number; 1956 } 1957 1958 // If reported capture_time is 0, skip handling this metadata 1959 if (!last_frame_capture_time) { 1960 goto done_batch_metadata; 1961 } 1962 1963 for (size_t i = 0; i < mBatchSize; i++) { 1964 /* handleMetadataWithLock is called even for invalid_metadata for 1965 * pipeline depth calculation */ 1966 if (!invalid_metadata) { 1967 /* Infer frame number. Batch metadata contains frame number of the 1968 * last frame */ 1969 if (urgent_frame_number_valid) { 1970 urgent_frame_number = 1971 last_urgent_frame_number + 1 - mBatchSize + i; 1972 CDBG("%s: last urgent frame_number in batch: %d, " 1973 "inferred urgent frame_number: %d", 1974 __func__, last_urgent_frame_number, urgent_frame_number); 1975 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata, 1976 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number); 1977 } 1978 1979 /* Infer frame number. Batch metadata contains frame number of the 1980 * last frame */ 1981 if (frame_number_valid) { 1982 frame_number = last_frame_number + 1 - mBatchSize + i; 1983 CDBG("%s: last frame_number in batch: %d, " 1984 "inferred frame_number: %d", 1985 __func__, last_frame_number, frame_number); 1986 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata, 1987 CAM_INTF_META_FRAME_NUMBER, frame_number); 1988 } 1989 1990 //Infer timestamp 1991 first_frame_capture_time = last_frame_capture_time - 1992 (((mBatchSize - 1) * NSEC_PER_SEC) / mHFRVideoFps); 1993 capture_time = 1994 first_frame_capture_time + (i * NSEC_PER_SEC / mHFRVideoFps); 1995 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata, 1996 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time); 1997 CDBG("%s: batch capture_time: %lld, capture_time: %lld", 1998 __func__, last_frame_capture_time, capture_time); 1999 } 2000 pthread_mutex_lock(&mMutex); 2001 handleMetadataWithLock(metadata_buf, 2002 false /* free_and_bufdone_meta_buf */); 2003 pthread_mutex_unlock(&mMutex); 2004 } 2005 2006done_batch_metadata: 2007 /* BufDone metadata buffer */ 2008 if (free_and_bufdone_meta_buf) { 2009 mMetadataChannel->bufDone(metadata_buf); 2010 free(metadata_buf); 2011 } 2012} 2013 2014/*=========================================================================== 2015 * FUNCTION : handleMetadataWithLock 2016 * 2017 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held. 2018 * 2019 * PARAMETERS : @metadata_buf: metadata buffer 2020 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free 2021 * the meta buf in this method 2022 * 2023 * RETURN : 2024 * 2025 *==========================================================================*/ 2026void QCamera3HardwareInterface::handleMetadataWithLock( 2027 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf) 2028{ 2029 ATRACE_CALL(); 2030 2031 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer; 2032 int32_t frame_number_valid, urgent_frame_number_valid; 2033 uint32_t frame_number, urgent_frame_number; 2034 int64_t capture_time; 2035 2036 int32_t *p_frame_number_valid = 2037 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata); 2038 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata); 2039 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata); 2040 int32_t *p_urgent_frame_number_valid = 2041 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata); 2042 uint32_t *p_urgent_frame_number = 2043 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata); 2044 IF_META_AVAILABLE(cam_frame_dropped_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED, 2045 metadata) { 2046 CDBG("%s: Dropped frame info for frame_number_valid %d, frame_number %d", 2047 __func__, *p_frame_number_valid, *p_frame_number); 2048 } 2049 2050 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) || 2051 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) { 2052 ALOGE("%s: Invalid metadata", __func__); 2053 if (free_and_bufdone_meta_buf) { 2054 mMetadataChannel->bufDone(metadata_buf); 2055 free(metadata_buf); 2056 } 2057 goto done_metadata; 2058 } else { 2059 frame_number_valid = *p_frame_number_valid; 2060 frame_number = *p_frame_number; 2061 capture_time = *p_capture_time; 2062 urgent_frame_number_valid = *p_urgent_frame_number_valid; 2063 urgent_frame_number = *p_urgent_frame_number; 2064 } 2065 //Partial result on process_capture_result for timestamp 2066 if (urgent_frame_number_valid) { 2067 CDBG("%s: valid urgent frame_number = %u, capture_time = %lld", 2068 __func__, urgent_frame_number, capture_time); 2069 2070 //Recieved an urgent Frame Number, handle it 2071 //using partial results 2072 for (List<PendingRequestInfo>::iterator i = 2073 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) { 2074 CDBG("%s: Iterator Frame = %d urgent frame = %d", 2075 __func__, i->frame_number, urgent_frame_number); 2076 2077 if (i->frame_number < urgent_frame_number && 2078 i->partial_result_cnt == 0) { 2079 ALOGE("%s: Error: HAL missed urgent metadata for frame number %d", 2080 __func__, i->frame_number); 2081 } 2082 2083 if (i->frame_number == urgent_frame_number && 2084 i->bUrgentReceived == 0) { 2085 2086 camera3_capture_result_t result; 2087 memset(&result, 0, sizeof(camera3_capture_result_t)); 2088 2089 i->partial_result_cnt++; 2090 i->bUrgentReceived = 1; 2091 // Extract 3A metadata 2092 result.result = 2093 translateCbUrgentMetadataToResultMetadata(metadata); 2094 // Populate metadata result 2095 result.frame_number = urgent_frame_number; 2096 result.num_output_buffers = 0; 2097 result.output_buffers = NULL; 2098 result.partial_result = i->partial_result_cnt; 2099 2100 mCallbackOps->process_capture_result(mCallbackOps, &result); 2101 CDBG("%s: urgent frame_number = %u, capture_time = %lld", 2102 __func__, result.frame_number, capture_time); 2103 free_camera_metadata((camera_metadata_t *)result.result); 2104 break; 2105 } 2106 } 2107 } 2108 2109 if (!frame_number_valid) { 2110 CDBG("%s: Not a valid normal frame number, used as SOF only", __func__); 2111 if (free_and_bufdone_meta_buf) { 2112 mMetadataChannel->bufDone(metadata_buf); 2113 free(metadata_buf); 2114 } 2115 goto done_metadata; 2116 } 2117 CDBG("%s: valid frame_number = %u, capture_time = %lld", __func__, 2118 frame_number, capture_time); 2119 2120 for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin(); 2121 i != mPendingRequestsList.end() && i->frame_number <= frame_number;) { 2122 camera3_capture_result_t result; 2123 memset(&result, 0, sizeof(camera3_capture_result_t)); 2124 2125 CDBG("%s: frame_number in the list is %u", __func__, i->frame_number); 2126 i->partial_result_cnt++; 2127 result.partial_result = i->partial_result_cnt; 2128 2129 // Flush out all entries with less or equal frame numbers. 2130 mPendingRequest--; 2131 2132 // Check whether any stream buffer corresponding to this is dropped or not 2133 // If dropped, then send the ERROR_BUFFER for the corresponding stream 2134 // The API does not expect a blob buffer to be dropped 2135 if (p_cam_frame_drop && p_cam_frame_drop->frame_dropped) { 2136 /* Clear notify_msg structure */ 2137 camera3_notify_msg_t notify_msg; 2138 memset(¬ify_msg, 0, sizeof(camera3_notify_msg_t)); 2139 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin(); 2140 j != i->buffers.end(); j++) { 2141 if (j->stream->format != HAL_PIXEL_FORMAT_BLOB) { 2142 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv; 2143 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask()); 2144 for (uint32_t k = 0; k < p_cam_frame_drop->cam_stream_ID.num_streams; k++) { 2145 if (streamID == p_cam_frame_drop->cam_stream_ID.streamID[k]) { 2146 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER 2147 CDBG("%s: Start of reporting error frame#=%u, streamID=%u", 2148 __func__, i->frame_number, streamID); 2149 notify_msg.type = CAMERA3_MSG_ERROR; 2150 notify_msg.message.error.frame_number = i->frame_number; 2151 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ; 2152 notify_msg.message.error.error_stream = j->stream; 2153 mCallbackOps->notify(mCallbackOps, ¬ify_msg); 2154 CDBG("%s: End of reporting error frame#=%u, streamID=%u", 2155 __func__, i->frame_number, streamID); 2156 PendingFrameDropInfo PendingFrameDrop; 2157 PendingFrameDrop.frame_number=i->frame_number; 2158 PendingFrameDrop.stream_ID = streamID; 2159 // Add the Frame drop info to mPendingFrameDropList 2160 mPendingFrameDropList.push_back(PendingFrameDrop); 2161 } 2162 } 2163 } 2164 } 2165 } 2166 2167 //TODO: batch handling for dropped metadata 2168 2169 // Send empty metadata with already filled buffers for dropped metadata 2170 // and send valid metadata with already filled buffers for current metadata 2171 if (i->frame_number < frame_number) { 2172 /* Clear notify_msg structure */ 2173 camera3_notify_msg_t notify_msg; 2174 memset(¬ify_msg, 0, sizeof(camera3_notify_msg_t)); 2175 2176 notify_msg.type = CAMERA3_MSG_SHUTTER; 2177 notify_msg.message.shutter.frame_number = i->frame_number; 2178 notify_msg.message.shutter.timestamp = (uint64_t)capture_time - 2179 (urgent_frame_number - i->frame_number) * NSEC_PER_33MSEC; 2180 mCallbackOps->notify(mCallbackOps, ¬ify_msg); 2181 i->timestamp = (nsecs_t)notify_msg.message.shutter.timestamp; 2182 CDBG("%s: Support notification !!!! notify frame_number = %u, capture_time = %llu", 2183 __func__, i->frame_number, notify_msg.message.shutter.timestamp); 2184 2185 CameraMetadata dummyMetadata; 2186 dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP, 2187 &i->timestamp, 1); 2188 dummyMetadata.update(ANDROID_REQUEST_ID, 2189 &(i->request_id), 1); 2190 result.result = dummyMetadata.release(); 2191 } else { 2192 /* Clear notify_msg structure */ 2193 camera3_notify_msg_t notify_msg; 2194 memset(¬ify_msg, 0, sizeof(camera3_notify_msg_t)); 2195 2196 // Send shutter notify to frameworks 2197 notify_msg.type = CAMERA3_MSG_SHUTTER; 2198 notify_msg.message.shutter.frame_number = i->frame_number; 2199 notify_msg.message.shutter.timestamp = (uint64_t)capture_time; 2200 mCallbackOps->notify(mCallbackOps, ¬ify_msg); 2201 2202 i->timestamp = capture_time; 2203 2204 result.result = translateFromHalMetadata(metadata, 2205 i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth, 2206 i->capture_intent); 2207 2208 saveExifParams(metadata); 2209 2210 if (i->blob_request) { 2211 { 2212 //Dump tuning metadata if enabled and available 2213 char prop[PROPERTY_VALUE_MAX]; 2214 memset(prop, 0, sizeof(prop)); 2215 property_get("persist.camera.dumpmetadata", prop, "0"); 2216 int32_t enabled = atoi(prop); 2217 if (enabled && metadata->is_tuning_params_valid) { 2218 dumpMetadataToFile(metadata->tuning_params, 2219 mMetaFrameCount, 2220 enabled, 2221 "Snapshot", 2222 frame_number); 2223 } 2224 } 2225 2226 2227 mPictureChannel->queueReprocMetadata(metadata_buf); 2228 } else { 2229 // Return metadata buffer 2230 if (free_and_bufdone_meta_buf) { 2231 mMetadataChannel->bufDone(metadata_buf); 2232 free(metadata_buf); 2233 } 2234 } 2235 } 2236 if (!result.result) { 2237 ALOGE("%s: metadata is NULL", __func__); 2238 } 2239 result.frame_number = i->frame_number; 2240 result.input_buffer = i->input_buffer; 2241 result.num_output_buffers = 0; 2242 result.output_buffers = NULL; 2243 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin(); 2244 j != i->buffers.end(); j++) { 2245 if (j->buffer) { 2246 result.num_output_buffers++; 2247 } 2248 } 2249 2250 if (result.num_output_buffers > 0) { 2251 camera3_stream_buffer_t *result_buffers = 2252 new camera3_stream_buffer_t[result.num_output_buffers]; 2253 if (!result_buffers) { 2254 ALOGE("%s: Fatal error: out of memory", __func__); 2255 } 2256 size_t result_buffers_idx = 0; 2257 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin(); 2258 j != i->buffers.end(); j++) { 2259 if (j->buffer) { 2260 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin(); 2261 m != mPendingFrameDropList.end(); m++) { 2262 QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv; 2263 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask()); 2264 if((m->stream_ID == streamID) && (m->frame_number==frame_number)) { 2265 j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR; 2266 CDBG("%s: Stream STATUS_ERROR frame_number=%u, streamID=%u", 2267 __func__, frame_number, streamID); 2268 m = mPendingFrameDropList.erase(m); 2269 break; 2270 } 2271 } 2272 2273 for (List<PendingBufferInfo>::iterator k = 2274 mPendingBuffersMap.mPendingBufferList.begin(); 2275 k != mPendingBuffersMap.mPendingBufferList.end(); k++) { 2276 if (k->buffer == j->buffer->buffer) { 2277 CDBG("%s: Found buffer %p in pending buffer List " 2278 "for frame %u, Take it out!!", __func__, 2279 k->buffer, k->frame_number); 2280 mPendingBuffersMap.num_buffers--; 2281 k = mPendingBuffersMap.mPendingBufferList.erase(k); 2282 break; 2283 } 2284 } 2285 2286 result_buffers[result_buffers_idx++] = *(j->buffer); 2287 free(j->buffer); 2288 j->buffer = NULL; 2289 } 2290 } 2291 result.output_buffers = result_buffers; 2292 mCallbackOps->process_capture_result(mCallbackOps, &result); 2293 CDBG("%s: meta frame_number = %u, capture_time = %lld", 2294 __func__, result.frame_number, i->timestamp); 2295 free_camera_metadata((camera_metadata_t *)result.result); 2296 delete[] result_buffers; 2297 } else { 2298 mCallbackOps->process_capture_result(mCallbackOps, &result); 2299 CDBG("%s: meta frame_number = %u, capture_time = %lld", 2300 __func__, result.frame_number, i->timestamp); 2301 free_camera_metadata((camera_metadata_t *)result.result); 2302 } 2303 // erase the element from the list 2304 clearInputBuffer(i->input_buffer); 2305 i = mPendingRequestsList.erase(i); 2306 2307 if (!mPendingReprocessResultList.empty()) { 2308 handlePendingReprocResults(frame_number + 1); 2309 } 2310 } 2311 2312done_metadata: 2313 for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin(); 2314 i != mPendingRequestsList.end() ;i++) { 2315 i->pipeline_depth++; 2316 } 2317 unblockRequestIfNecessary(); 2318 2319} 2320 2321/*=========================================================================== 2322 * FUNCTION : handleBufferWithLock 2323 * 2324 * DESCRIPTION: Handles image buffer callback with mMutex lock held. 2325 * 2326 * PARAMETERS : @buffer: image buffer for the callback 2327 * @frame_number: frame number of the image buffer 2328 * 2329 * RETURN : 2330 * 2331 *==========================================================================*/ 2332void QCamera3HardwareInterface::handleBufferWithLock( 2333 camera3_stream_buffer_t *buffer, uint32_t frame_number) 2334{ 2335 ATRACE_CALL(); 2336 // If the frame number doesn't exist in the pending request list, 2337 // directly send the buffer to the frameworks, and update pending buffers map 2338 // Otherwise, book-keep the buffer. 2339 List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin(); 2340 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){ 2341 i++; 2342 } 2343 if (i == mPendingRequestsList.end()) { 2344 // Verify all pending requests frame_numbers are greater 2345 for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin(); 2346 j != mPendingRequestsList.end(); j++) { 2347 if (j->frame_number < frame_number) { 2348 ALOGE("%s: Error: pending frame number %d is smaller than %d", 2349 __func__, j->frame_number, frame_number); 2350 } 2351 } 2352 camera3_capture_result_t result; 2353 memset(&result, 0, sizeof(camera3_capture_result_t)); 2354 result.result = NULL; 2355 result.frame_number = frame_number; 2356 result.num_output_buffers = 1; 2357 result.partial_result = 0; 2358 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin(); 2359 m != mPendingFrameDropList.end(); m++) { 2360 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv; 2361 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask()); 2362 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) { 2363 buffer->status=CAMERA3_BUFFER_STATUS_ERROR; 2364 CDBG("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d", 2365 __func__, frame_number, streamID); 2366 m = mPendingFrameDropList.erase(m); 2367 break; 2368 } 2369 } 2370 result.output_buffers = buffer; 2371 CDBG("%s: result frame_number = %d, buffer = %p", 2372 __func__, frame_number, buffer->buffer); 2373 2374 for (List<PendingBufferInfo>::iterator k = 2375 mPendingBuffersMap.mPendingBufferList.begin(); 2376 k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) { 2377 if (k->buffer == buffer->buffer) { 2378 CDBG("%s: Found Frame buffer, take it out from list", 2379 __func__); 2380 2381 mPendingBuffersMap.num_buffers--; 2382 k = mPendingBuffersMap.mPendingBufferList.erase(k); 2383 break; 2384 } 2385 } 2386 CDBG("%s: mPendingBuffersMap.num_buffers = %d", 2387 __func__, mPendingBuffersMap.num_buffers); 2388 2389 mCallbackOps->process_capture_result(mCallbackOps, &result); 2390 } else { 2391 if (i->input_buffer) { 2392 CameraMetadata settings; 2393 camera3_notify_msg_t notify_msg; 2394 memset(¬ify_msg, 0, sizeof(camera3_notify_msg_t)); 2395 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC); 2396 if(i->settings) { 2397 settings = i->settings; 2398 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) { 2399 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0]; 2400 } else { 2401 ALOGE("%s: No timestamp in input settings! Using current one.", 2402 __func__); 2403 } 2404 } else { 2405 ALOGE("%s: Input settings missing!", __func__); 2406 } 2407 2408 notify_msg.type = CAMERA3_MSG_SHUTTER; 2409 notify_msg.message.shutter.frame_number = frame_number; 2410 notify_msg.message.shutter.timestamp = (uint64_t)capture_time; 2411 2412 sp<Fence> releaseFence = new Fence(i->input_buffer->release_fence); 2413 int32_t rc = releaseFence->wait(Fence::TIMEOUT_NEVER); 2414 if (rc != OK) { 2415 ALOGE("%s: input buffer fence wait failed %d", __func__, rc); 2416 } 2417 2418 for (List<PendingBufferInfo>::iterator k = 2419 mPendingBuffersMap.mPendingBufferList.begin(); 2420 k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) { 2421 if (k->buffer == buffer->buffer) { 2422 CDBG("%s: Found Frame buffer, take it out from list", 2423 __func__); 2424 2425 mPendingBuffersMap.num_buffers--; 2426 k = mPendingBuffersMap.mPendingBufferList.erase(k); 2427 break; 2428 } 2429 } 2430 CDBG("%s: mPendingBuffersMap.num_buffers = %d", 2431 __func__, mPendingBuffersMap.num_buffers); 2432 2433 bool notifyNow = true; 2434 for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin(); 2435 j != mPendingRequestsList.end(); j++) { 2436 if (j->frame_number < frame_number) { 2437 notifyNow = false; 2438 break; 2439 } 2440 } 2441 2442 if (notifyNow) { 2443 camera3_capture_result result; 2444 memset(&result, 0, sizeof(camera3_capture_result)); 2445 result.frame_number = frame_number; 2446 result.result = i->settings; 2447 result.input_buffer = i->input_buffer; 2448 result.num_output_buffers = 1; 2449 result.output_buffers = buffer; 2450 result.partial_result = PARTIAL_RESULT_COUNT; 2451 2452 mCallbackOps->notify(mCallbackOps, ¬ify_msg); 2453 mCallbackOps->process_capture_result(mCallbackOps, &result); 2454 CDBG("%s: Notify reprocess now %d!", __func__, frame_number); 2455 clearInputBuffer(i->input_buffer); 2456 i = mPendingRequestsList.erase(i); 2457 mPendingRequest--; 2458 } else { 2459 // Cache reprocess result for later 2460 PendingReprocessResult pendingResult; 2461 memset(&pendingResult, 0, sizeof(PendingReprocessResult)); 2462 pendingResult.notify_msg = notify_msg; 2463 pendingResult.buffer = *buffer; 2464 pendingResult.frame_number = frame_number; 2465 mPendingReprocessResultList.push_back(pendingResult); 2466 CDBG("%s: Cache reprocess result %d!", __func__, frame_number); 2467 } 2468 } else { 2469 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin(); 2470 j != i->buffers.end(); j++) { 2471 if (j->stream == buffer->stream) { 2472 if (j->buffer != NULL) { 2473 ALOGE("%s: Error: buffer is already set", __func__); 2474 } else { 2475 j->buffer = (camera3_stream_buffer_t *)malloc( 2476 sizeof(camera3_stream_buffer_t)); 2477 *(j->buffer) = *buffer; 2478 CDBG("%s: cache buffer %p at result frame_number %d", 2479 __func__, buffer, frame_number); 2480 } 2481 } 2482 } 2483 } 2484 } 2485} 2486 2487/*=========================================================================== 2488 * FUNCTION : unblockRequestIfNecessary 2489 * 2490 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note 2491 * that mMutex is held when this function is called. 2492 * 2493 * PARAMETERS : 2494 * 2495 * RETURN : 2496 * 2497 *==========================================================================*/ 2498void QCamera3HardwareInterface::unblockRequestIfNecessary() 2499{ 2500 // Unblock process_capture_request 2501 pthread_cond_signal(&mRequestCond); 2502} 2503 2504/*=========================================================================== 2505 * FUNCTION : processCaptureRequest 2506 * 2507 * DESCRIPTION: process a capture request from camera service 2508 * 2509 * PARAMETERS : 2510 * @request : request from framework to process 2511 * 2512 * RETURN : 2513 * 2514 *==========================================================================*/ 2515int QCamera3HardwareInterface::processCaptureRequest( 2516 camera3_capture_request_t *request) 2517{ 2518 ATRACE_CALL(); 2519 int rc = NO_ERROR; 2520 int32_t request_id; 2521 CameraMetadata meta; 2522 uint32_t minInFlightRequests = MIN_INFLIGHT_REQUESTS; 2523 uint32_t maxInFlightRequests = MAX_INFLIGHT_REQUESTS; 2524 bool isVidBufRequested = false; 2525 camera3_stream_buffer_t *pInputBuffer; 2526 2527 pthread_mutex_lock(&mMutex); 2528 2529 rc = validateCaptureRequest(request); 2530 if (rc != NO_ERROR) { 2531 ALOGE("%s: incoming request is not valid", __func__); 2532 pthread_mutex_unlock(&mMutex); 2533 return rc; 2534 } 2535 2536 meta = request->settings; 2537 2538 // For first capture request, send capture intent, and 2539 // stream on all streams 2540 if (mFirstRequest) { 2541 // send an unconfigure to the backend so that the isp 2542 // resources are deallocated 2543 if (!mFirstConfiguration) { 2544 cam_stream_size_info_t stream_config_info; 2545 int32_t hal_version = CAM_HAL_V3; 2546 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t)); 2547 stream_config_info.buffer_info.min_buffers = 2548 MIN_INFLIGHT_REQUESTS; 2549 stream_config_info.buffer_info.max_buffers = 2550 MAX_INFLIGHT_REQUESTS; 2551 clear_metadata_buffer(mParameters); 2552 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, 2553 CAM_INTF_PARM_HAL_VERSION, hal_version); 2554 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, 2555 CAM_INTF_META_STREAM_INFO, stream_config_info); 2556 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, 2557 mParameters); 2558 if (rc < 0) { 2559 ALOGE("%s: set_parms for unconfigure failed", __func__); 2560 pthread_mutex_unlock(&mMutex); 2561 return rc; 2562 } 2563 } 2564 2565 /* get eis information for stream configuration */ 2566 cam_is_type_t is_type; 2567 char is_type_value[PROPERTY_VALUE_MAX]; 2568 property_get("camera.is_type", is_type_value, "0"); 2569 is_type = static_cast<cam_is_type_t>(atoi(is_type_value)); 2570 2571 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) { 2572 int32_t hal_version = CAM_HAL_V3; 2573 uint8_t captureIntent = 2574 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0]; 2575 mCaptureIntent = captureIntent; 2576 clear_metadata_buffer(mParameters); 2577 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version); 2578 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent); 2579 } 2580 2581 //If EIS is enabled, turn it on for video 2582 bool setEis = m_bEisEnable && m_bEisSupportedSize && 2583 ((mCaptureIntent == CAMERA3_TEMPLATE_VIDEO_RECORD) || 2584 (mCaptureIntent == CAMERA3_TEMPLATE_VIDEO_SNAPSHOT)); 2585 int32_t vsMode; 2586 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE; 2587 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) { 2588 rc = BAD_VALUE; 2589 } 2590 2591 //IS type will be 0 unless EIS is supported. If EIS is supported 2592 //it could either be 1 or 4 depending on the stream and video size 2593 if (setEis){ 2594 if (!m_bEisSupportedSize) { 2595 is_type = IS_TYPE_DIS; 2596 } else { 2597 is_type = IS_TYPE_EIS_2_0; 2598 } 2599 } 2600 2601 if (mCaptureIntent == CAMERA3_TEMPLATE_VIDEO_RECORD) { 2602 mStreamConfigInfo.is_type = is_type; 2603 } else { 2604 mStreamConfigInfo.is_type = IS_TYPE_NONE; 2605 } 2606 2607 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, 2608 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo); 2609 int32_t tintless_value = 1; 2610 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, 2611 CAM_INTF_PARM_TINTLESS, tintless_value); 2612 2613 setMobicat(); 2614 2615 /* Set fps and hfr mode while sending meta stream info so that sensor 2616 * can configure appropriate streaming mode */ 2617 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) { 2618 rc = setHalFpsRange(meta, mParameters); 2619 if (rc != NO_ERROR) { 2620 ALOGE("%s: setHalFpsRange failed", __func__); 2621 } 2622 } 2623 if (meta.exists(ANDROID_CONTROL_MODE)) { 2624 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0]; 2625 rc = extractSceneMode(meta, metaMode, mParameters); 2626 if (rc != NO_ERROR) { 2627 ALOGE("%s: extractSceneMode failed", __func__); 2628 } 2629 } 2630 2631 //TODO: validate the arguments, HSV scenemode should have only the 2632 //advertised fps ranges 2633 2634 /*set the capture intent, hal version, tintless, stream info, 2635 *and disenable parameters to the backend*/ 2636 CDBG("%s: set_parms META_STREAM_INFO ", __func__ ); 2637 mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, 2638 mParameters); 2639 2640 cam_dimension_t sensor_dim; 2641 memset(&sensor_dim, 0, sizeof(sensor_dim)); 2642 rc = getSensorOutputSize(sensor_dim); 2643 if (rc != NO_ERROR) { 2644 ALOGE("%s: Failed to get sensor output size", __func__); 2645 pthread_mutex_unlock(&mMutex); 2646 return rc; 2647 } 2648 2649 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width, 2650 gCamCapability[mCameraId]->active_array_size.height, 2651 sensor_dim.width, sensor_dim.height); 2652 2653 /* Set batchmode before initializing channel. Since registerBuffer 2654 * internally initializes some of the channels, better set batchmode 2655 * even before first register buffer */ 2656 for (List<stream_info_t *>::iterator it = mStreamInfo.begin(); 2657 it != mStreamInfo.end(); it++) { 2658 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv; 2659 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) 2660 && mBatchSize) { 2661 rc = channel->setBatchSize(mBatchSize); 2662 //Disable per frame map unmap for HFR/batchmode case 2663 rc |= channel->setPerFrameMapUnmap(false); 2664 if (NO_ERROR != rc) { 2665 ALOGE("%s : Channel init failed %d", __func__, rc); 2666 pthread_mutex_unlock(&mMutex); 2667 return rc; 2668 } 2669 } 2670 } 2671 2672 for (size_t i = 0; i < request->num_output_buffers; i++) { 2673 const camera3_stream_buffer_t& output = request->output_buffers[i]; 2674 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv; 2675 /*for livesnapshot stream is_type will be DIS*/ 2676 if (setEis && output.stream->format == HAL_PIXEL_FORMAT_BLOB) { 2677 rc = channel->registerBuffer(output.buffer, IS_TYPE_DIS); 2678 } else { 2679 rc = channel->registerBuffer(output.buffer, is_type); 2680 } 2681 if (rc < 0) { 2682 ALOGE("%s: registerBuffer failed", 2683 __func__); 2684 pthread_mutex_unlock(&mMutex); 2685 return -ENODEV; 2686 } 2687 } 2688 2689 //First initialize all streams 2690 for (List<stream_info_t *>::iterator it = mStreamInfo.begin(); 2691 it != mStreamInfo.end(); it++) { 2692 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv; 2693 if (setEis && (*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) { 2694 rc = channel->initialize(IS_TYPE_DIS); 2695 } else { 2696 rc = channel->initialize(is_type); 2697 } 2698 if (NO_ERROR != rc) { 2699 ALOGE("%s : Channel initialization failed %d", __func__, rc); 2700 pthread_mutex_unlock(&mMutex); 2701 return rc; 2702 } 2703 } 2704 2705 if (mRawDumpChannel) { 2706 rc = mRawDumpChannel->initialize(is_type); 2707 if (rc != NO_ERROR) { 2708 ALOGE("%s: Error: Raw Dump Channel init failed", __func__); 2709 pthread_mutex_unlock(&mMutex); 2710 return rc; 2711 } 2712 } 2713 if (mSupportChannel) { 2714 rc = mSupportChannel->initialize(is_type); 2715 if (rc < 0) { 2716 ALOGE("%s: Support channel initialization failed", __func__); 2717 pthread_mutex_unlock(&mMutex); 2718 return rc; 2719 } 2720 } 2721 if (mAnalysisChannel) { 2722 rc = mAnalysisChannel->initialize(is_type); 2723 if (rc < 0) { 2724 ALOGE("%s: Analysis channel initialization failed", __func__); 2725 pthread_mutex_unlock(&mMutex); 2726 return rc; 2727 } 2728 } 2729 2730 //Then start them. 2731 CDBG_HIGH("%s: Start META Channel", __func__); 2732 rc = mMetadataChannel->start(); 2733 if (rc < 0) { 2734 ALOGE("%s: META channel start failed", __func__); 2735 pthread_mutex_unlock(&mMutex); 2736 return rc; 2737 } 2738 2739 if (mAnalysisChannel) { 2740 rc = mAnalysisChannel->start(); 2741 if (rc < 0) { 2742 ALOGE("%s: Analysis channel start failed", __func__); 2743 mMetadataChannel->stop(); 2744 pthread_mutex_unlock(&mMutex); 2745 return rc; 2746 } 2747 } 2748 2749 if (mSupportChannel) { 2750 rc = mSupportChannel->start(); 2751 if (rc < 0) { 2752 ALOGE("%s: Support channel start failed", __func__); 2753 mMetadataChannel->stop(); 2754 /* Although support and analysis are mutually exclusive today 2755 adding it in anycase for future proofing */ 2756 if (mAnalysisChannel) { 2757 mAnalysisChannel->stop(); 2758 } 2759 pthread_mutex_unlock(&mMutex); 2760 return rc; 2761 } 2762 } 2763 for (List<stream_info_t *>::iterator it = mStreamInfo.begin(); 2764 it != mStreamInfo.end(); it++) { 2765 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv; 2766 CDBG_HIGH("%s: Start Processing Channel mask=%d", 2767 __func__, channel->getStreamTypeMask()); 2768 rc = channel->start(); 2769 if (rc < 0) { 2770 ALOGE("%s: channel start failed", __func__); 2771 pthread_mutex_unlock(&mMutex); 2772 return rc; 2773 } 2774 } 2775 2776 if (mRawDumpChannel) { 2777 CDBG("%s: Starting raw dump stream",__func__); 2778 rc = mRawDumpChannel->start(); 2779 if (rc != NO_ERROR) { 2780 ALOGE("%s: Error Starting Raw Dump Channel", __func__); 2781 for (List<stream_info_t *>::iterator it = mStreamInfo.begin(); 2782 it != mStreamInfo.end(); it++) { 2783 QCamera3Channel *channel = 2784 (QCamera3Channel *)(*it)->stream->priv; 2785 ALOGE("%s: Stopping Processing Channel mask=%d", __func__, 2786 channel->getStreamTypeMask()); 2787 channel->stop(); 2788 } 2789 if (mSupportChannel) 2790 mSupportChannel->stop(); 2791 if (mAnalysisChannel) { 2792 mAnalysisChannel->stop(); 2793 } 2794 mMetadataChannel->stop(); 2795 pthread_mutex_unlock(&mMutex); 2796 return rc; 2797 } 2798 } 2799 mWokenUpByDaemon = false; 2800 mPendingRequest = 0; 2801 mFirstConfiguration = false; 2802 mBatchStreamID.num_streams = 0; 2803 } 2804 2805 uint32_t frameNumber = request->frame_number; 2806 cam_stream_ID_t streamID; 2807 2808 if (meta.exists(ANDROID_REQUEST_ID)) { 2809 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0]; 2810 mCurrentRequestId = request_id; 2811 CDBG("%s: Received request with id: %d",__func__, request_id); 2812 } else if (mFirstRequest || mCurrentRequestId == -1){ 2813 ALOGE("%s: Unable to find request id field, \ 2814 & no previous id available", __func__); 2815 pthread_mutex_unlock(&mMutex); 2816 return NAME_NOT_FOUND; 2817 } else { 2818 CDBG("%s: Re-using old request id", __func__); 2819 request_id = mCurrentRequestId; 2820 } 2821 2822 CDBG("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d", 2823 __func__, __LINE__, 2824 request->num_output_buffers, 2825 request->input_buffer, 2826 frameNumber); 2827 // Acquire all request buffers first 2828 streamID.num_streams = 0; 2829 int blob_request = 0; 2830 uint32_t snapshotStreamId = 0; 2831 for (size_t i = 0; i < request->num_output_buffers; i++) { 2832 const camera3_stream_buffer_t& output = request->output_buffers[i]; 2833 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv; 2834 sp<Fence> acquireFence = new Fence(output.acquire_fence); 2835 2836 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) { 2837 //Call function to store local copy of jpeg data for encode params. 2838 blob_request = 1; 2839 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask()); 2840 } 2841 2842 rc = acquireFence->wait(Fence::TIMEOUT_NEVER); 2843 if (rc != OK) { 2844 ALOGE("%s: fence wait failed %d", __func__, rc); 2845 pthread_mutex_unlock(&mMutex); 2846 return rc; 2847 } 2848 2849 streamID.streamID[streamID.num_streams] = 2850 channel->getStreamID(channel->getStreamTypeMask()); 2851 streamID.num_streams++; 2852 2853 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) { 2854 isVidBufRequested = true; 2855 } 2856 } 2857 2858 if (blob_request && mRawDumpChannel) { 2859 CDBG("%s: Trigger Raw based on blob request if Raw dump is enabled", __func__); 2860 streamID.streamID[streamID.num_streams] = 2861 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask()); 2862 streamID.num_streams++; 2863 } 2864 2865 if(request->input_buffer == NULL) { 2866 /* Parse the settings: 2867 * - For every request in NORMAL MODE 2868 * - For every request in HFR mode during preview only case 2869 * - For first request of every batch in HFR mode during video 2870 * recording. In batchmode the same settings except frame number is 2871 * repeated in each request of the batch. 2872 */ 2873 if (!mBatchSize || 2874 (mBatchSize && !isVidBufRequested) || 2875 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) { 2876 rc = setFrameParameters(request, streamID, blob_request, snapshotStreamId); 2877 if (rc < 0) { 2878 ALOGE("%s: fail to set frame parameters", __func__); 2879 pthread_mutex_unlock(&mMutex); 2880 return rc; 2881 } 2882 } 2883 /* For batchMode HFR, setFrameParameters is not called for every 2884 * request. But only frame number of the latest request is parsed */ 2885 if (mBatchSize && ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, 2886 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) { 2887 ALOGE("%s: Failed to set the frame number in the parameters", __func__); 2888 return BAD_VALUE; 2889 } 2890 } else { 2891 sp<Fence> acquireFence = new Fence(request->input_buffer->acquire_fence); 2892 2893 rc = acquireFence->wait(Fence::TIMEOUT_NEVER); 2894 if (rc != OK) { 2895 ALOGE("%s: input buffer fence wait failed %d", __func__, rc); 2896 pthread_mutex_unlock(&mMutex); 2897 return rc; 2898 } 2899 } 2900 2901 /* Update pending request list and pending buffers map */ 2902 PendingRequestInfo pendingRequest; 2903 pendingRequest.frame_number = frameNumber; 2904 pendingRequest.num_buffers = request->num_output_buffers; 2905 pendingRequest.request_id = request_id; 2906 pendingRequest.blob_request = blob_request; 2907 pendingRequest.timestamp = 0; 2908 pendingRequest.bUrgentReceived = 0; 2909 if (request->input_buffer) { 2910 pendingRequest.input_buffer = 2911 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t)); 2912 memcpy(pendingRequest.input_buffer, request->input_buffer, sizeof(camera3_stream_buffer_t)); 2913 pInputBuffer = pendingRequest.input_buffer; 2914 } else { 2915 pendingRequest.input_buffer = NULL; 2916 pInputBuffer = NULL; 2917 } 2918 pendingRequest.settings = request->settings; 2919 pendingRequest.pipeline_depth = 0; 2920 pendingRequest.partial_result_cnt = 0; 2921 extractJpegMetadata(pendingRequest.jpegMetadata, request); 2922 2923 //extract capture intent 2924 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) { 2925 mCaptureIntent = 2926 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0]; 2927 } 2928 pendingRequest.capture_intent = mCaptureIntent; 2929 2930 for (size_t i = 0; i < request->num_output_buffers; i++) { 2931 RequestedBufferInfo requestedBuf; 2932 requestedBuf.stream = request->output_buffers[i].stream; 2933 requestedBuf.buffer = NULL; 2934 pendingRequest.buffers.push_back(requestedBuf); 2935 2936 // Add to buffer handle the pending buffers list 2937 PendingBufferInfo bufferInfo; 2938 bufferInfo.frame_number = frameNumber; 2939 bufferInfo.buffer = request->output_buffers[i].buffer; 2940 bufferInfo.stream = request->output_buffers[i].stream; 2941 mPendingBuffersMap.mPendingBufferList.push_back(bufferInfo); 2942 mPendingBuffersMap.num_buffers++; 2943 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv; 2944 CDBG("%s: frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d", 2945 __func__, frameNumber, bufferInfo.buffer, 2946 channel->getStreamTypeMask(), bufferInfo.stream->format); 2947 } 2948 CDBG("%s: mPendingBuffersMap.num_buffers = %d", 2949 __func__, mPendingBuffersMap.num_buffers); 2950 2951 mPendingRequestsList.push_back(pendingRequest); 2952 2953 if(mFlush) { 2954 pthread_mutex_unlock(&mMutex); 2955 return NO_ERROR; 2956 } 2957 2958 // Notify metadata channel we receive a request 2959 mMetadataChannel->request(NULL, frameNumber); 2960 2961 if(request->input_buffer != NULL){ 2962 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId); 2963 if (NO_ERROR != rc) { 2964 ALOGE("%s: fail to set reproc parameters", __func__); 2965 pthread_mutex_unlock(&mMutex); 2966 return rc; 2967 } 2968 } 2969 2970 // Call request on other streams 2971 for (size_t i = 0; i < request->num_output_buffers; i++) { 2972 const camera3_stream_buffer_t& output = request->output_buffers[i]; 2973 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv; 2974 2975 if (channel == NULL) { 2976 ALOGE("%s: invalid channel pointer for stream", __func__); 2977 continue; 2978 } 2979 2980 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) { 2981 if(request->input_buffer != NULL){ 2982 rc = channel->request(output.buffer, frameNumber, 2983 pInputBuffer, &mReprocMeta); 2984 if (rc < 0) { 2985 ALOGE("%s: Fail to request on picture channel", __func__); 2986 pthread_mutex_unlock(&mMutex); 2987 return rc; 2988 } 2989 } else { 2990 CDBG("%s: %d, snapshot request with buffer %p, frame_number %d", __func__, 2991 __LINE__, output.buffer, frameNumber); 2992 if (!request->settings) { 2993 rc = channel->request(output.buffer, frameNumber, 2994 NULL, mPrevParameters); 2995 } else { 2996 rc = channel->request(output.buffer, frameNumber, 2997 NULL, mParameters); 2998 } 2999 if (rc < 0) { 3000 ALOGE("%s: Fail to request on picture channel", __func__); 3001 pthread_mutex_unlock(&mMutex); 3002 return rc; 3003 } 3004 } 3005 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) { 3006 rc = channel->request(output.buffer, frameNumber, 3007 pInputBuffer, 3008 pInputBuffer? &mReprocMeta : mParameters); 3009 if (rc < 0) { 3010 ALOGE("%s: Fail to request on YUV channel", __func__); 3011 pthread_mutex_unlock(&mMutex); 3012 return rc; 3013 } 3014 } else { 3015 CDBG("%s: %d, request with buffer %p, frame_number %d", __func__, 3016 __LINE__, output.buffer, frameNumber); 3017 rc = channel->request(output.buffer, frameNumber); 3018 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) 3019 && mBatchSize) { 3020 mToBeQueuedVidBufs++; 3021 if (mToBeQueuedVidBufs == mBatchSize) { 3022 channel->queueBatchBuf(); 3023 } 3024 } 3025 if (rc < 0) { 3026 ALOGE("%s: request failed", __func__); 3027 pthread_mutex_unlock(&mMutex); 3028 return rc; 3029 } 3030 } 3031 } 3032 3033 if(request->input_buffer == NULL) { 3034 /* Set the parameters to backend: 3035 * - For every request in NORMAL MODE 3036 * - For every request in HFR mode during preview only case 3037 * - Once every batch in HFR mode during video recording 3038 */ 3039 if (!mBatchSize || 3040 (mBatchSize && !isVidBufRequested) || 3041 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) { 3042 CDBG("%s: set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ", 3043 __func__, mBatchSize, isVidBufRequested, 3044 mToBeQueuedVidBufs); 3045 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, 3046 mParameters); 3047 if (rc < 0) { 3048 ALOGE("%s: set_parms failed", __func__); 3049 } 3050 /* reset to zero coz, the batch is queued */ 3051 mToBeQueuedVidBufs = 0; 3052 } 3053 } 3054 3055 mFirstRequest = false; 3056 // Added a timed condition wait 3057 struct timespec ts; 3058 uint8_t isValidTimeout = 1; 3059 rc = clock_gettime(CLOCK_REALTIME, &ts); 3060 if (rc < 0) { 3061 isValidTimeout = 0; 3062 ALOGE("%s: Error reading the real time clock!!", __func__); 3063 } 3064 else { 3065 // Make timeout as 5 sec for request to be honored 3066 ts.tv_sec += 5; 3067 } 3068 //Block on conditional variable 3069 3070 mPendingRequest++; 3071 if (mBatchSize) { 3072 /* For HFR, more buffers are dequeued upfront to improve the performance */ 3073 minInFlightRequests = (MIN_INFLIGHT_REQUESTS + 1) * mBatchSize; 3074 maxInFlightRequests = MAX_INFLIGHT_REQUESTS * mBatchSize; 3075 } 3076 while (mPendingRequest >= minInFlightRequests) { 3077 if (!isValidTimeout) { 3078 CDBG("%s: Blocking on conditional wait", __func__); 3079 pthread_cond_wait(&mRequestCond, &mMutex); 3080 } 3081 else { 3082 CDBG("%s: Blocking on timed conditional wait", __func__); 3083 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts); 3084 if (rc == ETIMEDOUT) { 3085 rc = -ENODEV; 3086 ALOGE("%s: Unblocked on timeout!!!!", __func__); 3087 break; 3088 } 3089 } 3090 CDBG("%s: Unblocked", __func__); 3091 if (mWokenUpByDaemon) { 3092 mWokenUpByDaemon = false; 3093 if (mPendingRequest < maxInFlightRequests) 3094 break; 3095 } 3096 } 3097 pthread_mutex_unlock(&mMutex); 3098 3099 return rc; 3100} 3101 3102/*=========================================================================== 3103 * FUNCTION : dump 3104 * 3105 * DESCRIPTION: 3106 * 3107 * PARAMETERS : 3108 * 3109 * 3110 * RETURN : 3111 *==========================================================================*/ 3112void QCamera3HardwareInterface::dump(int fd) 3113{ 3114 pthread_mutex_lock(&mMutex); 3115 dprintf(fd, "\n Camera HAL3 information Begin \n"); 3116 3117 dprintf(fd, "\nNumber of pending requests: %zu \n", 3118 mPendingRequestsList.size()); 3119 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n"); 3120 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n"); 3121 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n"); 3122 for(List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin(); 3123 i != mPendingRequestsList.end(); i++) { 3124 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n", 3125 i->frame_number, i->num_buffers, i->request_id, i->blob_request, 3126 i->input_buffer); 3127 } 3128 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n", 3129 mPendingBuffersMap.num_buffers); 3130 dprintf(fd, "-------+------------------\n"); 3131 dprintf(fd, " Frame | Stream type mask \n"); 3132 dprintf(fd, "-------+------------------\n"); 3133 for(List<PendingBufferInfo>::iterator i = 3134 mPendingBuffersMap.mPendingBufferList.begin(); 3135 i != mPendingBuffersMap.mPendingBufferList.end(); i++) { 3136 QCamera3Channel *channel = (QCamera3Channel *)(i->stream->priv); 3137 dprintf(fd, " %5d | %11d \n", 3138 i->frame_number, channel->getStreamTypeMask()); 3139 } 3140 dprintf(fd, "-------+------------------\n"); 3141 3142 dprintf(fd, "\nPending frame drop list: %zu\n", 3143 mPendingFrameDropList.size()); 3144 dprintf(fd, "-------+-----------\n"); 3145 dprintf(fd, " Frame | Stream ID \n"); 3146 dprintf(fd, "-------+-----------\n"); 3147 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin(); 3148 i != mPendingFrameDropList.end(); i++) { 3149 dprintf(fd, " %5d | %9d \n", 3150 i->frame_number, i->stream_ID); 3151 } 3152 dprintf(fd, "-------+-----------\n"); 3153 3154 dprintf(fd, "\n Camera HAL3 information End \n"); 3155 3156 /* use dumpsys media.camera as trigger to send update debug level event */ 3157 mUpdateDebugLevel = true; 3158 pthread_mutex_unlock(&mMutex); 3159 return; 3160} 3161 3162/*=========================================================================== 3163 * FUNCTION : flush 3164 * 3165 * DESCRIPTION: 3166 * 3167 * PARAMETERS : 3168 * 3169 * 3170 * RETURN : 3171 *==========================================================================*/ 3172int QCamera3HardwareInterface::flush() 3173{ 3174 ATRACE_CALL(); 3175 unsigned int frameNum = 0; 3176 camera3_capture_result_t result; 3177 camera3_stream_buffer_t *pStream_Buf = NULL; 3178 FlushMap flushMap; 3179 3180 CDBG("%s: Unblocking Process Capture Request", __func__); 3181 pthread_mutex_lock(&mMutex); 3182 mFlush = true; 3183 pthread_mutex_unlock(&mMutex); 3184 3185 memset(&result, 0, sizeof(camera3_capture_result_t)); 3186 3187 // Stop the Streams/Channels 3188 for (List<stream_info_t *>::iterator it = mStreamInfo.begin(); 3189 it != mStreamInfo.end(); it++) { 3190 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv; 3191 channel->stop(); 3192 (*it)->status = INVALID; 3193 } 3194 3195 if (mSupportChannel) { 3196 mSupportChannel->stop(); 3197 } 3198 if (mAnalysisChannel) { 3199 mAnalysisChannel->stop(); 3200 } 3201 if (mRawDumpChannel) { 3202 mRawDumpChannel->stop(); 3203 } 3204 if (mMetadataChannel) { 3205 /* If content of mStreamInfo is not 0, there is metadata stream */ 3206 mMetadataChannel->stop(); 3207 } 3208 3209 // Mutex Lock 3210 pthread_mutex_lock(&mMutex); 3211 3212 // Unblock process_capture_request 3213 mPendingRequest = 0; 3214 pthread_cond_signal(&mRequestCond); 3215 3216 List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin(); 3217 frameNum = i->frame_number; 3218 CDBG("%s: Oldest frame num on mPendingRequestsList = %d", 3219 __func__, frameNum); 3220 3221 // Go through the pending buffers and group them depending 3222 // on frame number 3223 for (List<PendingBufferInfo>::iterator k = 3224 mPendingBuffersMap.mPendingBufferList.begin(); 3225 k != mPendingBuffersMap.mPendingBufferList.end();) { 3226 3227 if (k->frame_number < frameNum) { 3228 ssize_t idx = flushMap.indexOfKey(k->frame_number); 3229 if (idx == NAME_NOT_FOUND) { 3230 Vector<PendingBufferInfo> pending; 3231 pending.add(*k); 3232 flushMap.add(k->frame_number, pending); 3233 } else { 3234 Vector<PendingBufferInfo> &pending = 3235 flushMap.editValueFor(k->frame_number); 3236 pending.add(*k); 3237 } 3238 3239 mPendingBuffersMap.num_buffers--; 3240 k = mPendingBuffersMap.mPendingBufferList.erase(k); 3241 } else { 3242 k++; 3243 } 3244 } 3245 3246 for (size_t iFlush = 0; iFlush < flushMap.size(); iFlush++) { 3247 uint32_t frame_number = flushMap.keyAt(iFlush); 3248 const Vector<PendingBufferInfo> &pending = flushMap.valueAt(iFlush); 3249 3250 // Send Error notify to frameworks for each buffer for which 3251 // metadata buffer is already sent 3252 CDBG("%s: Sending ERROR BUFFER for frame %d number of buffer %d", 3253 __func__, frame_number, pending.size()); 3254 3255 pStream_Buf = new camera3_stream_buffer_t[pending.size()]; 3256 if (NULL == pStream_Buf) { 3257 ALOGE("%s: No memory for pending buffers array", __func__); 3258 pthread_mutex_unlock(&mMutex); 3259 return NO_MEMORY; 3260 } 3261 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*pending.size()); 3262 3263 for (size_t j = 0; j < pending.size(); j++) { 3264 const PendingBufferInfo &info = pending.itemAt(j); 3265 camera3_notify_msg_t notify_msg; 3266 memset(¬ify_msg, 0, sizeof(camera3_notify_msg_t)); 3267 notify_msg.type = CAMERA3_MSG_ERROR; 3268 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER; 3269 notify_msg.message.error.error_stream = info.stream; 3270 notify_msg.message.error.frame_number = frame_number; 3271 pStream_Buf[j].acquire_fence = -1; 3272 pStream_Buf[j].release_fence = -1; 3273 pStream_Buf[j].buffer = info.buffer; 3274 pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR; 3275 pStream_Buf[j].stream = info.stream; 3276 mCallbackOps->notify(mCallbackOps, ¬ify_msg); 3277 CDBG("%s: notify frame_number = %d stream %p", __func__, 3278 frame_number, info.stream); 3279 } 3280 3281 result.result = NULL; 3282 result.frame_number = frame_number; 3283 result.num_output_buffers = (uint32_t)pending.size(); 3284 result.output_buffers = pStream_Buf; 3285 mCallbackOps->process_capture_result(mCallbackOps, &result); 3286 3287 delete [] pStream_Buf; 3288 } 3289 3290 CDBG("%s:Sending ERROR REQUEST for all pending requests", __func__); 3291 3292 flushMap.clear(); 3293 for (List<PendingBufferInfo>::iterator k = 3294 mPendingBuffersMap.mPendingBufferList.begin(); 3295 k != mPendingBuffersMap.mPendingBufferList.end();) { 3296 ssize_t idx = flushMap.indexOfKey(k->frame_number); 3297 if (idx == NAME_NOT_FOUND) { 3298 Vector<PendingBufferInfo> pending; 3299 pending.add(*k); 3300 flushMap.add(k->frame_number, pending); 3301 } else { 3302 Vector<PendingBufferInfo> &pending = 3303 flushMap.editValueFor(k->frame_number); 3304 pending.add(*k); 3305 } 3306 3307 mPendingBuffersMap.num_buffers--; 3308 k = mPendingBuffersMap.mPendingBufferList.erase(k); 3309 } 3310 3311 // Go through the pending requests info and send error request to framework 3312 for (size_t iFlush = 0; iFlush < flushMap.size(); iFlush++) { 3313 uint32_t frame_number = flushMap.keyAt(iFlush); 3314 const Vector<PendingBufferInfo> &pending = flushMap.valueAt(iFlush); 3315 CDBG("%s:Sending ERROR REQUEST for frame %d", 3316 __func__, frame_number); 3317 3318 // Send shutter notify to frameworks 3319 camera3_notify_msg_t notify_msg; 3320 memset(¬ify_msg, 0, sizeof(camera3_notify_msg_t)); 3321 notify_msg.type = CAMERA3_MSG_ERROR; 3322 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST; 3323 notify_msg.message.error.error_stream = NULL; 3324 notify_msg.message.error.frame_number = frame_number; 3325 mCallbackOps->notify(mCallbackOps, ¬ify_msg); 3326 3327 pStream_Buf = new camera3_stream_buffer_t[pending.size()]; 3328 if (NULL == pStream_Buf) { 3329 ALOGE("%s: No memory for pending buffers array", __func__); 3330 pthread_mutex_unlock(&mMutex); 3331 return NO_MEMORY; 3332 } 3333 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*pending.size()); 3334 3335 for (size_t j = 0; j < pending.size(); j++) { 3336 const PendingBufferInfo &info = pending.itemAt(j); 3337 pStream_Buf[j].acquire_fence = -1; 3338 pStream_Buf[j].release_fence = -1; 3339 pStream_Buf[j].buffer = info.buffer; 3340 pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR; 3341 pStream_Buf[j].stream = info.stream; 3342 } 3343 3344 result.num_output_buffers = (uint32_t)pending.size(); 3345 result.output_buffers = pStream_Buf; 3346 result.result = NULL; 3347 result.frame_number = frame_number; 3348 mCallbackOps->process_capture_result(mCallbackOps, &result); 3349 delete [] pStream_Buf; 3350 } 3351 3352 /* Reset pending buffer list and requests list */ 3353 for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin(); 3354 i != mPendingRequestsList.end(); i++) { 3355 clearInputBuffer(i->input_buffer); 3356 i = mPendingRequestsList.erase(i); 3357 } 3358 /* Reset pending frame Drop list and requests list */ 3359 mPendingFrameDropList.clear(); 3360 3361 flushMap.clear(); 3362 mPendingBuffersMap.num_buffers = 0; 3363 mPendingBuffersMap.mPendingBufferList.clear(); 3364 mPendingReprocessResultList.clear(); 3365 CDBG("%s: Cleared all the pending buffers ", __func__); 3366 3367 mFlush = false; 3368 3369 // Start the Streams/Channels 3370 int rc = NO_ERROR; 3371 if (mMetadataChannel) { 3372 /* If content of mStreamInfo is not 0, there is metadata stream */ 3373 rc = mMetadataChannel->start(); 3374 if (rc < 0) { 3375 ALOGE("%s: META channel start failed", __func__); 3376 pthread_mutex_unlock(&mMutex); 3377 return rc; 3378 } 3379 } 3380 for (List<stream_info_t *>::iterator it = mStreamInfo.begin(); 3381 it != mStreamInfo.end(); it++) { 3382 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv; 3383 rc = channel->start(); 3384 if (rc < 0) { 3385 ALOGE("%s: channel start failed", __func__); 3386 pthread_mutex_unlock(&mMutex); 3387 return rc; 3388 } 3389 } 3390 if (mAnalysisChannel) { 3391 mAnalysisChannel->start(); 3392 } 3393 if (mSupportChannel) { 3394 rc = mSupportChannel->start(); 3395 if (rc < 0) { 3396 ALOGE("%s: Support channel start failed", __func__); 3397 pthread_mutex_unlock(&mMutex); 3398 return rc; 3399 } 3400 } 3401 if (mRawDumpChannel) { 3402 rc = mRawDumpChannel->start(); 3403 if (rc < 0) { 3404 ALOGE("%s: RAW dump channel start failed", __func__); 3405 pthread_mutex_unlock(&mMutex); 3406 return rc; 3407 } 3408 } 3409 3410 pthread_mutex_unlock(&mMutex); 3411 3412 return 0; 3413} 3414 3415/*=========================================================================== 3416 * FUNCTION : captureResultCb 3417 * 3418 * DESCRIPTION: Callback handler for all capture result 3419 * (streams, as well as metadata) 3420 * 3421 * PARAMETERS : 3422 * @metadata : metadata information 3423 * @buffer : actual gralloc buffer to be returned to frameworks. 3424 * NULL if metadata. 3425 * 3426 * RETURN : NONE 3427 *==========================================================================*/ 3428void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf, 3429 camera3_stream_buffer_t *buffer, uint32_t frame_number) 3430{ 3431 if (metadata_buf) { 3432 if (mBatchSize) { 3433 handleBatchMetadata(metadata_buf, 3434 true /* free_and_bufdone_meta_buf */); 3435 } else { /* mBatchSize = 0 */ 3436 pthread_mutex_lock(&mMutex); 3437 handleMetadataWithLock(metadata_buf, 3438 true /* free_and_bufdone_meta_buf */); 3439 pthread_mutex_unlock(&mMutex); 3440 } 3441 } else { 3442 pthread_mutex_lock(&mMutex); 3443 handleBufferWithLock(buffer, frame_number); 3444 pthread_mutex_unlock(&mMutex); 3445 } 3446 return; 3447} 3448 3449/*=========================================================================== 3450 * FUNCTION : lookupFwkName 3451 * 3452 * DESCRIPTION: In case the enum is not same in fwk and backend 3453 * make sure the parameter is correctly propogated 3454 * 3455 * PARAMETERS : 3456 * @arr : map between the two enums 3457 * @len : len of the map 3458 * @hal_name : name of the hal_parm to map 3459 * 3460 * RETURN : int type of status 3461 * fwk_name -- success 3462 * none-zero failure code 3463 *==========================================================================*/ 3464template <typename halType, class mapType> int lookupFwkName(const mapType *arr, 3465 size_t len, halType hal_name) 3466{ 3467 3468 for (size_t i = 0; i < len; i++) { 3469 if (arr[i].hal_name == hal_name) { 3470 return arr[i].fwk_name; 3471 } 3472 } 3473 3474 /* Not able to find matching framework type is not necessarily 3475 * an error case. This happens when mm-camera supports more attributes 3476 * than the frameworks do */ 3477 CDBG_HIGH("%s: Cannot find matching framework type", __func__); 3478 return NAME_NOT_FOUND; 3479} 3480 3481/*=========================================================================== 3482 * FUNCTION : lookupHalName 3483 * 3484 * DESCRIPTION: In case the enum is not same in fwk and backend 3485 * make sure the parameter is correctly propogated 3486 * 3487 * PARAMETERS : 3488 * @arr : map between the two enums 3489 * @len : len of the map 3490 * @fwk_name : name of the hal_parm to map 3491 * 3492 * RETURN : int32_t type of status 3493 * hal_name -- success 3494 * none-zero failure code 3495 *==========================================================================*/ 3496template <typename fwkType, class mapType> int lookupHalName(const mapType *arr, 3497 size_t len, fwkType fwk_name) 3498{ 3499 for (size_t i = 0; i < len; i++) { 3500 if (arr[i].fwk_name == fwk_name) { 3501 return arr[i].hal_name; 3502 } 3503 } 3504 3505 ALOGE("%s: Cannot find matching hal type fwk_name=%d", __func__, fwk_name); 3506 return NAME_NOT_FOUND; 3507} 3508 3509/*=========================================================================== 3510 * FUNCTION : lookupProp 3511 * 3512 * DESCRIPTION: lookup a value by its name 3513 * 3514 * PARAMETERS : 3515 * @arr : map between the two enums 3516 * @len : size of the map 3517 * @name : name to be looked up 3518 * 3519 * RETURN : Value if found 3520 * CAM_CDS_MODE_MAX if not found 3521 *==========================================================================*/ 3522template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr, 3523 size_t len, const char *name) 3524{ 3525 if (name) { 3526 for (size_t i = 0; i < len; i++) { 3527 if (!strcmp(arr[i].desc, name)) { 3528 return arr[i].val; 3529 } 3530 } 3531 } 3532 return CAM_CDS_MODE_MAX; 3533} 3534 3535/*=========================================================================== 3536 * FUNCTION : clearInputBuffer 3537 * 3538 * DESCRIPTION: free the input buffer 3539 * 3540 * PARAMETERS : 3541 * @input_buffer : ptr to input buffer data to be freed 3542 * 3543 * RETURN : NONE 3544 *==========================================================================*/ 3545void QCamera3HardwareInterface::clearInputBuffer(camera3_stream_buffer_t *input_buffer) 3546{ 3547 if (input_buffer) { 3548 free(input_buffer); 3549 input_buffer = NULL; 3550 } 3551} 3552 3553/*=========================================================================== 3554 * 3555 * DESCRIPTION: 3556 * 3557 * PARAMETERS : 3558 * @metadata : metadata information from callback 3559 * @timestamp: metadata buffer timestamp 3560 * @request_id: request id 3561 * @jpegMetadata: additional jpeg metadata 3562 * 3563 * RETURN : camera_metadata_t* 3564 * metadata in a format specified by fwk 3565 *==========================================================================*/ 3566camera_metadata_t* 3567QCamera3HardwareInterface::translateFromHalMetadata( 3568 metadata_buffer_t *metadata, 3569 nsecs_t timestamp, 3570 int32_t request_id, 3571 const CameraMetadata& jpegMetadata, 3572 uint8_t pipeline_depth, 3573 uint8_t capture_intent) 3574{ 3575 CameraMetadata camMetadata; 3576 camera_metadata_t *resultMetadata; 3577 3578 if (jpegMetadata.entryCount()) 3579 camMetadata.append(jpegMetadata); 3580 3581 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, ×tamp, 1); 3582 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1); 3583 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1); 3584 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1); 3585 3586 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) { 3587 int64_t fwk_frame_number = *frame_number; 3588 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1); 3589 } 3590 3591 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) { 3592 int32_t fps_range[2]; 3593 fps_range[0] = (int32_t)float_range->min_fps; 3594 fps_range[1] = (int32_t)float_range->max_fps; 3595 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, 3596 fps_range, 2); 3597 CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]", 3598 __func__, fps_range[0], fps_range[1]); 3599 } 3600 3601 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) { 3602 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1); 3603 } 3604 3605 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) { 3606 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP, 3607 METADATA_MAP_SIZE(SCENE_MODES_MAP), 3608 *sceneMode); 3609 if (NAME_NOT_FOUND != val) { 3610 uint8_t fwkSceneMode = (uint8_t)val; 3611 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1); 3612 CDBG("%s: urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d", 3613 __func__, fwkSceneMode); 3614 } 3615 } 3616 3617 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) { 3618 uint8_t fwk_ae_lock = (uint8_t) *ae_lock; 3619 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1); 3620 } 3621 3622 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) { 3623 uint8_t fwk_awb_lock = (uint8_t) *awb_lock; 3624 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1); 3625 } 3626 3627 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo, 3628 CAM_INTF_META_FACE_DETECTION, metadata) { 3629 uint8_t numFaces = MIN(faceDetectionInfo->num_faces_detected, MAX_ROI); 3630 int32_t faceIds[MAX_ROI]; 3631 uint8_t faceScores[MAX_ROI]; 3632 int32_t faceRectangles[MAX_ROI * 4]; 3633 int32_t faceLandmarks[MAX_ROI * 6]; 3634 size_t j = 0, k = 0; 3635 for (size_t i = 0; i < numFaces; i++) { 3636 faceIds[i] = faceDetectionInfo->faces[i].face_id; 3637 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score; 3638 convertToRegions(faceDetectionInfo->faces[i].face_boundary, 3639 faceRectangles+j, -1); 3640 convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k); 3641 j+= 4; 3642 k+= 6; 3643 } 3644 if (numFaces <= 0) { 3645 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI); 3646 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI); 3647 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4); 3648 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6); 3649 } 3650 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces); 3651 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces); 3652 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES, faceRectangles, numFaces * 4U); 3653 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS, faceLandmarks, numFaces * 6U); 3654 } 3655 3656 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) { 3657 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode; 3658 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1); 3659 } 3660 3661 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication, 3662 CAM_INTF_META_EDGE_MODE, metadata) { 3663 uint8_t edgeStrength = (uint8_t) edgeApplication->sharpness; 3664 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1); 3665 camMetadata.update(ANDROID_EDGE_STRENGTH, &edgeStrength, 1); 3666 } 3667 3668 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) { 3669 uint8_t fwk_flashPower = (uint8_t) *flashPower; 3670 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1); 3671 } 3672 3673 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) { 3674 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1); 3675 } 3676 3677 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) { 3678 if (0 <= *flashState) { 3679 uint8_t fwk_flashState = (uint8_t) *flashState; 3680 if (!gCamCapability[mCameraId]->flash_available) { 3681 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE; 3682 } 3683 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1); 3684 } 3685 } 3686 3687 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) { 3688 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode); 3689 if (NAME_NOT_FOUND != val) { 3690 uint8_t fwk_flashMode = (uint8_t)val; 3691 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1); 3692 } 3693 } 3694 3695 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) { 3696 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode; 3697 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1); 3698 } 3699 3700 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) { 3701 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1); 3702 } 3703 3704 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) { 3705 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1); 3706 } 3707 3708 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) { 3709 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1); 3710 } 3711 3712 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) { 3713 uint8_t fwk_opticalStab = (uint8_t) *opticalStab; 3714 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1); 3715 } 3716 3717 /*EIS is currently not hooked up to the app, so set the mode to OFF*/ 3718 uint8_t vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF; 3719 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1); 3720 3721 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) { 3722 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode; 3723 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1); 3724 } 3725 3726 IF_META_AVAILABLE(uint32_t, noiseRedStrength, CAM_INTF_META_NOISE_REDUCTION_STRENGTH, metadata) { 3727 uint8_t fwk_noiseRedStrength = (uint8_t) *noiseRedStrength; 3728 camMetadata.update(ANDROID_NOISE_REDUCTION_STRENGTH, &fwk_noiseRedStrength, 1); 3729 } 3730 3731 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) { 3732 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1); 3733 } 3734 3735 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion, 3736 CAM_INTF_META_SCALER_CROP_REGION, metadata) { 3737 int32_t scalerCropRegion[4]; 3738 scalerCropRegion[0] = hScalerCropRegion->left; 3739 scalerCropRegion[1] = hScalerCropRegion->top; 3740 scalerCropRegion[2] = hScalerCropRegion->width; 3741 scalerCropRegion[3] = hScalerCropRegion->height; 3742 3743 // Adjust crop region from sensor output coordinate system to active 3744 // array coordinate system. 3745 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1], 3746 scalerCropRegion[2], scalerCropRegion[3]); 3747 3748 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4); 3749 } 3750 3751 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) { 3752 CDBG("%s: sensorExpTime = %lld", __func__, *sensorExpTime); 3753 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1); 3754 } 3755 3756 IF_META_AVAILABLE(int64_t, sensorFameDuration, 3757 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) { 3758 CDBG("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration); 3759 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1); 3760 } 3761 3762 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew, 3763 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) { 3764 CDBG("%s: sensorRollingShutterSkew = %lld", __func__, *sensorRollingShutterSkew); 3765 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW, 3766 sensorRollingShutterSkew, 1); 3767 } 3768 3769 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) { 3770 CDBG("%s: sensorSensitivity = %d", __func__, *sensorSensitivity); 3771 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1); 3772 3773 //calculate the noise profile based on sensitivity 3774 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity); 3775 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity); 3776 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels]; 3777 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) { 3778 noise_profile[i] = noise_profile_S; 3779 noise_profile[i+1] = noise_profile_O; 3780 } 3781 CDBG("%s: noise model entry (S, O) is (%f, %f)", __func__, 3782 noise_profile_S, noise_profile_O); 3783 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile, 3784 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels)); 3785 } 3786 3787 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) { 3788 uint8_t fwk_shadingMode = (uint8_t) *shadingMode; 3789 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1); 3790 } 3791 3792 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) { 3793 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP), 3794 *faceDetectMode); 3795 if (NAME_NOT_FOUND != val) { 3796 uint8_t fwk_faceDetectMode = (uint8_t)val; 3797 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1); 3798 } 3799 } 3800 3801 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) { 3802 uint8_t fwk_histogramMode = (uint8_t) *histogramMode; 3803 camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &fwk_histogramMode, 1); 3804 } 3805 3806 IF_META_AVAILABLE(uint32_t, sharpnessMapMode, 3807 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) { 3808 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode; 3809 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1); 3810 } 3811 3812 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap, 3813 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) { 3814 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness, 3815 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3); 3816 } 3817 3818 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap, 3819 CAM_INTF_META_LENS_SHADING_MAP, metadata) { 3820 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height, 3821 CAM_MAX_SHADING_MAP_HEIGHT); 3822 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width, 3823 CAM_MAX_SHADING_MAP_WIDTH); 3824 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP, 3825 lensShadingMap->lens_shading, 4U * map_width * map_height); 3826 } 3827 3828 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) { 3829 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode; 3830 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1); 3831 } 3832 3833 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) { 3834 //Populate CAM_INTF_META_TONEMAP_CURVES 3835 /* ch0 = G, ch 1 = B, ch 2 = R*/ 3836 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) { 3837 ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d", 3838 __func__, tonemap->tonemap_points_cnt, 3839 CAM_MAX_TONEMAP_CURVE_SIZE); 3840 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE; 3841 } 3842 3843 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN, 3844 &tonemap->curves[0].tonemap_points[0][0], 3845 tonemap->tonemap_points_cnt * 2); 3846 3847 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE, 3848 &tonemap->curves[1].tonemap_points[0][0], 3849 tonemap->tonemap_points_cnt * 2); 3850 3851 camMetadata.update(ANDROID_TONEMAP_CURVE_RED, 3852 &tonemap->curves[2].tonemap_points[0][0], 3853 tonemap->tonemap_points_cnt * 2); 3854 } 3855 3856 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains, 3857 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) { 3858 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 3859 CC_GAINS_COUNT); 3860 } 3861 3862 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix, 3863 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) { 3864 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM, 3865 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix, 3866 CC_MATRIX_COLS * CC_MATRIX_ROWS); 3867 } 3868 3869 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve, 3870 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) { 3871 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) { 3872 ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d", 3873 __func__, toneCurve->tonemap_points_cnt, 3874 CAM_MAX_TONEMAP_CURVE_SIZE); 3875 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE; 3876 } 3877 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE, 3878 (float*)toneCurve->curve.tonemap_points, 3879 toneCurve->tonemap_points_cnt * 2); 3880 } 3881 3882 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains, 3883 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) { 3884 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, 3885 predColorCorrectionGains->gains, 4); 3886 } 3887 3888 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix, 3889 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) { 3890 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM, 3891 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix, 3892 CC_MATRIX_ROWS * CC_MATRIX_COLS); 3893 } 3894 3895 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) { 3896 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1); 3897 } 3898 3899 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) { 3900 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock; 3901 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1); 3902 } 3903 3904 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) { 3905 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker; 3906 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1); 3907 } 3908 3909 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) { 3910 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP), 3911 *effectMode); 3912 if (NAME_NOT_FOUND != val) { 3913 uint8_t fwk_effectMode = (uint8_t)val; 3914 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1); 3915 } 3916 } 3917 3918 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData, 3919 CAM_INTF_META_TEST_PATTERN_DATA, metadata) { 3920 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP, 3921 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode); 3922 if (NAME_NOT_FOUND != fwk_testPatternMode) { 3923 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1); 3924 } 3925 int32_t fwk_testPatternData[4]; 3926 fwk_testPatternData[0] = testPatternData->r; 3927 fwk_testPatternData[3] = testPatternData->b; 3928 switch (gCamCapability[mCameraId]->color_arrangement) { 3929 case CAM_FILTER_ARRANGEMENT_RGGB: 3930 case CAM_FILTER_ARRANGEMENT_GRBG: 3931 fwk_testPatternData[1] = testPatternData->gr; 3932 fwk_testPatternData[2] = testPatternData->gb; 3933 break; 3934 case CAM_FILTER_ARRANGEMENT_GBRG: 3935 case CAM_FILTER_ARRANGEMENT_BGGR: 3936 fwk_testPatternData[2] = testPatternData->gr; 3937 fwk_testPatternData[1] = testPatternData->gb; 3938 break; 3939 default: 3940 ALOGE("%s: color arrangement %d is not supported", __func__, 3941 gCamCapability[mCameraId]->color_arrangement); 3942 break; 3943 } 3944 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4); 3945 } 3946 3947 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) { 3948 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3); 3949 } 3950 3951 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) { 3952 String8 str((const char *)gps_methods); 3953 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str); 3954 } 3955 3956 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) { 3957 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1); 3958 } 3959 3960 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) { 3961 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1); 3962 } 3963 3964 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) { 3965 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality; 3966 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1); 3967 } 3968 3969 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) { 3970 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality; 3971 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1); 3972 } 3973 3974 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) { 3975 int32_t fwk_thumb_size[2]; 3976 fwk_thumb_size[0] = thumb_size->width; 3977 fwk_thumb_size[1] = thumb_size->height; 3978 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2); 3979 } 3980 3981 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) { 3982 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS, 3983 privateData, 3984 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t)); 3985 } 3986 3987 if (metadata->is_tuning_params_valid) { 3988 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)]; 3989 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0]; 3990 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION; 3991 3992 3993 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version), 3994 sizeof(uint32_t)); 3995 data += sizeof(uint32_t); 3996 3997 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size), 3998 sizeof(uint32_t)); 3999 CDBG("tuning_sensor_data_size %d",(int)(*(int *)data)); 4000 data += sizeof(uint32_t); 4001 4002 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size), 4003 sizeof(uint32_t)); 4004 CDBG("tuning_vfe_data_size %d",(int)(*(int *)data)); 4005 data += sizeof(uint32_t); 4006 4007 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size), 4008 sizeof(uint32_t)); 4009 CDBG("tuning_cpp_data_size %d",(int)(*(int *)data)); 4010 data += sizeof(uint32_t); 4011 4012 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size), 4013 sizeof(uint32_t)); 4014 CDBG("tuning_cac_data_size %d",(int)(*(int *)data)); 4015 data += sizeof(uint32_t); 4016 4017 metadata->tuning_params.tuning_mod3_data_size = 0; 4018 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size), 4019 sizeof(uint32_t)); 4020 CDBG("tuning_mod3_data_size %d",(int)(*(int *)data)); 4021 data += sizeof(uint32_t); 4022 4023 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size, 4024 TUNING_SENSOR_DATA_MAX); 4025 memcpy(data, ((uint8_t *)&metadata->tuning_params.data), 4026 count); 4027 data += count; 4028 4029 count = MIN(metadata->tuning_params.tuning_vfe_data_size, 4030 TUNING_VFE_DATA_MAX); 4031 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]), 4032 count); 4033 data += count; 4034 4035 count = MIN(metadata->tuning_params.tuning_cpp_data_size, 4036 TUNING_CPP_DATA_MAX); 4037 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]), 4038 count); 4039 data += count; 4040 4041 count = MIN(metadata->tuning_params.tuning_cac_data_size, 4042 TUNING_CAC_DATA_MAX); 4043 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]), 4044 count); 4045 data += count; 4046 4047 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB, 4048 (int32_t *)(void *)tuning_meta_data_blob, 4049 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t)); 4050 } 4051 4052 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint, 4053 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) { 4054 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT, 4055 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point, 4056 NEUTRAL_COL_POINTS); 4057 } 4058 4059 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) { 4060 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode; 4061 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1); 4062 } 4063 4064 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) { 4065 int32_t aeRegions[REGIONS_TUPLE_COUNT]; 4066 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight); 4067 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 4068 REGIONS_TUPLE_COUNT); 4069 CDBG("%s: Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]", 4070 __func__, aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3], 4071 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width, 4072 hAeRegions->rect.height); 4073 } 4074 4075 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) { 4076 /*af regions*/ 4077 int32_t afRegions[REGIONS_TUPLE_COUNT]; 4078 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight); 4079 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 4080 REGIONS_TUPLE_COUNT); 4081 CDBG("%s: Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]", 4082 __func__, afRegions[0], afRegions[1], afRegions[2], afRegions[3], 4083 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width, 4084 hAfRegions->rect.height); 4085 } 4086 4087 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) { 4088 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), 4089 *hal_ab_mode); 4090 if (NAME_NOT_FOUND != val) { 4091 uint8_t fwk_ab_mode = (uint8_t)val; 4092 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1); 4093 } 4094 } 4095 4096 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) { 4097 int val = lookupFwkName(SCENE_MODES_MAP, 4098 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode); 4099 if (NAME_NOT_FOUND != val) { 4100 uint8_t fwkBestshotMode = (uint8_t)val; 4101 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1); 4102 CDBG("%s: Metadata : ANDROID_CONTROL_SCENE_MODE", __func__); 4103 } else { 4104 CDBG_HIGH("%s: Metadata not found : ANDROID_CONTROL_SCENE_MODE", __func__); 4105 } 4106 } 4107 4108 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) { 4109 uint8_t fwk_mode = (uint8_t) *mode; 4110 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1); 4111 } 4112 4113 /* Constant metadata values to be update*/ 4114 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST; 4115 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1); 4116 4117 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF; 4118 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1); 4119 4120 int32_t hotPixelMap[2]; 4121 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0); 4122 4123 // CDS 4124 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) { 4125 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1); 4126 } 4127 4128 // Reprocess crop data 4129 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) { 4130 uint8_t cnt = crop_data->num_of_streams; 4131 if ((0 < cnt) && (cnt < MAX_NUM_STREAMS)) { 4132 int rc = NO_ERROR; 4133 int32_t *crop = new int32_t[cnt*4]; 4134 if (NULL == crop) { 4135 rc = NO_MEMORY; 4136 } 4137 4138 int32_t *crop_stream_ids = new int32_t[cnt]; 4139 if (NULL == crop_stream_ids) { 4140 rc = NO_MEMORY; 4141 } 4142 4143 Vector<int32_t> roi_map; 4144 4145 if (NO_ERROR == rc) { 4146 int32_t steams_found = 0; 4147 for (size_t i = 0; i < cnt; i++) { 4148 for (List<stream_info_t *>::iterator it = mStreamInfo.begin(); 4149 it != mStreamInfo.end(); it++) { 4150 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv; 4151 if (NULL != channel) { 4152 if (crop_data->crop_info[i].stream_id == 4153 channel->mStreams[0]->getMyServerID()) { 4154 crop[steams_found*4] = crop_data->crop_info[i].crop.left; 4155 crop[steams_found*4 + 1] = crop_data->crop_info[i].crop.top; 4156 crop[steams_found*4 + 2] = crop_data->crop_info[i].crop.width; 4157 crop[steams_found*4 + 3] = crop_data->crop_info[i].crop.height; 4158 // In a more general case we may want to generate 4159 // unique id depending on width, height, stream, private 4160 // data etc. 4161#ifdef __LP64__ 4162 // Using XORed value of lower and upper halves as ID 4163 crop_stream_ids[steams_found] = (int32_t) 4164 ((((int64_t)(*it)->stream) & 0x0000FFFF) ^ 4165 (((int64_t)(*it)->stream) >> 0x20 & 0x0000FFFF)); 4166#else 4167 // FIXME: Although using data address as ID doesn't guarantee 4168 // that all IDs will be unique, we are keeping existing nostrum 4169 // for now till found better solution. 4170 crop_stream_ids[steams_found] = (int32_t)(*it)->stream; 4171#endif 4172 steams_found++; 4173 roi_map.add(crop_data->crop_info[i].roi_map.left); 4174 roi_map.add(crop_data->crop_info[i].roi_map.top); 4175 roi_map.add(crop_data->crop_info[i].roi_map.width); 4176 roi_map.add(crop_data->crop_info[i].roi_map.height); 4177 CDBG("%s: Adding reprocess crop data for stream %p %dx%d, %dx%d", 4178 __func__, 4179 (*it)->stream, 4180 crop_data->crop_info[i].crop.left, 4181 crop_data->crop_info[i].crop.top, 4182 crop_data->crop_info[i].crop.width, 4183 crop_data->crop_info[i].crop.height); 4184 CDBG("%s: Adding reprocess crop roi map for stream %p %dx%d, %dx%d", 4185 __func__, 4186 (*it)->stream, 4187 crop_data->crop_info[i].roi_map.left, 4188 crop_data->crop_info[i].roi_map.top, 4189 crop_data->crop_info[i].roi_map.width, 4190 crop_data->crop_info[i].roi_map.height); 4191 break; 4192 } 4193 } 4194 } 4195 } 4196 4197 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS, 4198 &steams_found, 1); 4199 camMetadata.update(QCAMERA3_CROP_REPROCESS, 4200 crop, (size_t)(steams_found * 4)); 4201 camMetadata.update(QCAMERA3_CROP_STREAM_ID_REPROCESS, 4202 crop_stream_ids, (size_t)steams_found); 4203 if (roi_map.array()) { 4204 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS, 4205 roi_map.array(), roi_map.size()); 4206 } 4207 } 4208 4209 if (crop) { 4210 delete [] crop; 4211 } 4212 if (crop_stream_ids) { 4213 delete [] crop_stream_ids; 4214 } 4215 } else { 4216 // mm-qcamera-daemon only posts crop_data for streams 4217 // not linked to pproc. So no valid crop metadata is not 4218 // necessarily an error case. 4219 CDBG("%s: No valid crop metadata entries", __func__); 4220 } 4221 } 4222 4223 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) { 4224 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP), 4225 *cacMode); 4226 if (NAME_NOT_FOUND != val) { 4227 uint8_t fwkCacMode = (uint8_t)val; 4228 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1); 4229 } else { 4230 ALOGE("%s: Invalid CAC camera parameter: %d", __func__, *cacMode); 4231 } 4232 } 4233 4234 resultMetadata = camMetadata.release(); 4235 return resultMetadata; 4236} 4237 4238/*=========================================================================== 4239 * FUNCTION : saveExifParams 4240 * 4241 * DESCRIPTION: 4242 * 4243 * PARAMETERS : 4244 * @metadata : metadata information from callback 4245 * 4246 * RETURN : none 4247 * 4248 *==========================================================================*/ 4249void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata) 4250{ 4251 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params, 4252 CAM_INTF_META_EXIF_DEBUG_AE, metadata) { 4253 mExifParams.ae_debug_params = *ae_exif_debug_params; 4254 mExifParams.ae_debug_params_valid = TRUE; 4255 } 4256 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params, 4257 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) { 4258 mExifParams.awb_debug_params = *awb_exif_debug_params; 4259 mExifParams.awb_debug_params_valid = TRUE; 4260 } 4261 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params, 4262 CAM_INTF_META_EXIF_DEBUG_AF, metadata) { 4263 mExifParams.af_debug_params = *af_exif_debug_params; 4264 mExifParams.af_debug_params_valid = TRUE; 4265 } 4266 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params, 4267 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) { 4268 mExifParams.asd_debug_params = *asd_exif_debug_params; 4269 mExifParams.asd_debug_params_valid = TRUE; 4270 } 4271 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params, 4272 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) { 4273 mExifParams.stats_debug_params = *stats_exif_debug_params; 4274 mExifParams.stats_debug_params_valid = TRUE; 4275 } 4276} 4277 4278/*=========================================================================== 4279 * FUNCTION : get3AExifParams 4280 * 4281 * DESCRIPTION: 4282 * 4283 * PARAMETERS : none 4284 * 4285 * 4286 * RETURN : mm_jpeg_exif_params_t 4287 * 4288 *==========================================================================*/ 4289mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams() 4290{ 4291 return mExifParams; 4292} 4293 4294/*=========================================================================== 4295 * FUNCTION : translateCbUrgentMetadataToResultMetadata 4296 * 4297 * DESCRIPTION: 4298 * 4299 * PARAMETERS : 4300 * @metadata : metadata information from callback 4301 * 4302 * RETURN : camera_metadata_t* 4303 * metadata in a format specified by fwk 4304 *==========================================================================*/ 4305camera_metadata_t* 4306QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata 4307 (metadata_buffer_t *metadata) 4308{ 4309 CameraMetadata camMetadata; 4310 camera_metadata_t *resultMetadata; 4311 4312 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) { 4313 uint8_t fwk_afState = (uint8_t) *afState; 4314 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1); 4315 CDBG("%s: urgent Metadata : ANDROID_CONTROL_AF_STATE %u", __func__, *afState); 4316 } 4317 4318 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) { 4319 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1); 4320 } 4321 4322 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) { 4323 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2); 4324 } 4325 4326 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) { 4327 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState; 4328 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1); 4329 CDBG("%s: urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", __func__, *whiteBalanceState); 4330 } 4331 4332 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) { 4333 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, 4334 &aecTrigger->trigger, 1); 4335 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, 4336 &aecTrigger->trigger_id, 1); 4337 CDBG("%s: urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d", 4338 __func__, aecTrigger->trigger); 4339 CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d", __func__, 4340 aecTrigger->trigger_id); 4341 } 4342 4343 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) { 4344 uint8_t fwk_ae_state = (uint8_t) *ae_state; 4345 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1); 4346 CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_STATE %u", __func__, *ae_state); 4347 } 4348 4349 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) { 4350 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode); 4351 if (NAME_NOT_FOUND != val) { 4352 uint8_t fwkAfMode = (uint8_t)val; 4353 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1); 4354 CDBG("%s: urgent Metadata : ANDROID_CONTROL_AF_MODE", __func__); 4355 } else { 4356 CDBG_HIGH("%s: urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d", __func__, 4357 val); 4358 } 4359 } 4360 4361 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) { 4362 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER, 4363 &af_trigger->trigger, 1); 4364 CDBG("%s: urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d", 4365 __func__, af_trigger->trigger); 4366 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1); 4367 CDBG("%s: urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d", __func__, 4368 af_trigger->trigger_id); 4369 } 4370 4371 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) { 4372 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP, 4373 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance); 4374 if (NAME_NOT_FOUND != val) { 4375 uint8_t fwkWhiteBalanceMode = (uint8_t)val; 4376 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1); 4377 CDBG("%s: urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", __func__, val); 4378 } else { 4379 CDBG_HIGH("%s: urgent Metadata not found : ANDROID_CONTROL_AWB_MODE", __func__); 4380 } 4381 } 4382 4383 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF; 4384 uint32_t aeMode = CAM_AE_MODE_MAX; 4385 int32_t flashMode = CAM_FLASH_MODE_MAX; 4386 int32_t redeye = -1; 4387 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) { 4388 aeMode = *pAeMode; 4389 } 4390 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) { 4391 flashMode = *pFlashMode; 4392 } 4393 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) { 4394 redeye = *pRedeye; 4395 } 4396 4397 if (1 == redeye) { 4398 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE; 4399 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1); 4400 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) { 4401 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP), 4402 flashMode); 4403 if (NAME_NOT_FOUND != val) { 4404 fwk_aeMode = (uint8_t)val; 4405 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1); 4406 } else { 4407 ALOGE("%s: Unsupported flash mode %d", __func__, flashMode); 4408 } 4409 } else if (aeMode == CAM_AE_MODE_ON) { 4410 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON; 4411 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1); 4412 } else if (aeMode == CAM_AE_MODE_OFF) { 4413 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF; 4414 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1); 4415 } else { 4416 ALOGE("%s: Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, " 4417 "flashMode:%d, aeMode:%u!!!", 4418 __func__, redeye, flashMode, aeMode); 4419 } 4420 4421 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) { 4422 uint8_t fwk_lensState = *lensState; 4423 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1); 4424 } 4425 4426 resultMetadata = camMetadata.release(); 4427 return resultMetadata; 4428} 4429 4430/*=========================================================================== 4431 * FUNCTION : dumpMetadataToFile 4432 * 4433 * DESCRIPTION: Dumps tuning metadata to file system 4434 * 4435 * PARAMETERS : 4436 * @meta : tuning metadata 4437 * @dumpFrameCount : current dump frame count 4438 * @enabled : Enable mask 4439 * 4440 *==========================================================================*/ 4441void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta, 4442 uint32_t &dumpFrameCount, 4443 bool enabled, 4444 const char *type, 4445 uint32_t frameNumber) 4446{ 4447 uint32_t frm_num = 0; 4448 4449 //Some sanity checks 4450 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) { 4451 ALOGE("%s : Tuning sensor data size bigger than expected %d: %d", 4452 __func__, 4453 meta.tuning_sensor_data_size, 4454 TUNING_SENSOR_DATA_MAX); 4455 return; 4456 } 4457 4458 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) { 4459 ALOGE("%s : Tuning VFE data size bigger than expected %d: %d", 4460 __func__, 4461 meta.tuning_vfe_data_size, 4462 TUNING_VFE_DATA_MAX); 4463 return; 4464 } 4465 4466 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) { 4467 ALOGE("%s : Tuning CPP data size bigger than expected %d: %d", 4468 __func__, 4469 meta.tuning_cpp_data_size, 4470 TUNING_CPP_DATA_MAX); 4471 return; 4472 } 4473 4474 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) { 4475 ALOGE("%s : Tuning CAC data size bigger than expected %d: %d", 4476 __func__, 4477 meta.tuning_cac_data_size, 4478 TUNING_CAC_DATA_MAX); 4479 return; 4480 } 4481 // 4482 4483 if(enabled){ 4484 char timeBuf[FILENAME_MAX]; 4485 char buf[FILENAME_MAX]; 4486 memset(buf, 0, sizeof(buf)); 4487 memset(timeBuf, 0, sizeof(timeBuf)); 4488 time_t current_time; 4489 struct tm * timeinfo; 4490 time (¤t_time); 4491 timeinfo = localtime (¤t_time); 4492 if (timeinfo != NULL) { 4493 strftime (timeBuf, sizeof(timeBuf), 4494 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo); 4495 } 4496 String8 filePath(timeBuf); 4497 snprintf(buf, 4498 sizeof(buf), 4499 "%dm_%s_%d.bin", 4500 dumpFrameCount, 4501 type, 4502 frameNumber); 4503 filePath.append(buf); 4504 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777); 4505 if (file_fd >= 0) { 4506 ssize_t written_len = 0; 4507 meta.tuning_data_version = TUNING_DATA_VERSION; 4508 void *data = (void *)((uint8_t *)&meta.tuning_data_version); 4509 written_len += write(file_fd, data, sizeof(uint32_t)); 4510 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size); 4511 CDBG("tuning_sensor_data_size %d",(int)(*(int *)data)); 4512 written_len += write(file_fd, data, sizeof(uint32_t)); 4513 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size); 4514 CDBG("tuning_vfe_data_size %d",(int)(*(int *)data)); 4515 written_len += write(file_fd, data, sizeof(uint32_t)); 4516 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size); 4517 CDBG("tuning_cpp_data_size %d",(int)(*(int *)data)); 4518 written_len += write(file_fd, data, sizeof(uint32_t)); 4519 data = (void *)((uint8_t *)&meta.tuning_cac_data_size); 4520 CDBG("tuning_cac_data_size %d",(int)(*(int *)data)); 4521 written_len += write(file_fd, data, sizeof(uint32_t)); 4522 meta.tuning_mod3_data_size = 0; 4523 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size); 4524 CDBG("tuning_mod3_data_size %d",(int)(*(int *)data)); 4525 written_len += write(file_fd, data, sizeof(uint32_t)); 4526 size_t total_size = meta.tuning_sensor_data_size; 4527 data = (void *)((uint8_t *)&meta.data); 4528 written_len += write(file_fd, data, total_size); 4529 total_size = meta.tuning_vfe_data_size; 4530 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]); 4531 written_len += write(file_fd, data, total_size); 4532 total_size = meta.tuning_cpp_data_size; 4533 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]); 4534 written_len += write(file_fd, data, total_size); 4535 total_size = meta.tuning_cac_data_size; 4536 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]); 4537 written_len += write(file_fd, data, total_size); 4538 close(file_fd); 4539 }else { 4540 ALOGE("%s: fail to open file for metadata dumping", __func__); 4541 } 4542 } 4543} 4544 4545/*=========================================================================== 4546 * FUNCTION : cleanAndSortStreamInfo 4547 * 4548 * DESCRIPTION: helper method to clean up invalid streams in stream_info, 4549 * and sort them such that raw stream is at the end of the list 4550 * This is a workaround for camera daemon constraint. 4551 * 4552 * PARAMETERS : None 4553 * 4554 *==========================================================================*/ 4555void QCamera3HardwareInterface::cleanAndSortStreamInfo() 4556{ 4557 List<stream_info_t *> newStreamInfo; 4558 4559 /*clean up invalid streams*/ 4560 for (List<stream_info_t*>::iterator it=mStreamInfo.begin(); 4561 it != mStreamInfo.end();) { 4562 if(((*it)->status) == INVALID){ 4563 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv; 4564 delete channel; 4565 free(*it); 4566 it = mStreamInfo.erase(it); 4567 } else { 4568 it++; 4569 } 4570 } 4571 4572 // Move preview/video/callback/snapshot streams into newList 4573 for (List<stream_info_t *>::iterator it = mStreamInfo.begin(); 4574 it != mStreamInfo.end();) { 4575 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE && 4576 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 && 4577 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) { 4578 newStreamInfo.push_back(*it); 4579 it = mStreamInfo.erase(it); 4580 } else 4581 it++; 4582 } 4583 // Move raw streams into newList 4584 for (List<stream_info_t *>::iterator it = mStreamInfo.begin(); 4585 it != mStreamInfo.end();) { 4586 newStreamInfo.push_back(*it); 4587 it = mStreamInfo.erase(it); 4588 } 4589 4590 mStreamInfo = newStreamInfo; 4591} 4592 4593/*=========================================================================== 4594 * FUNCTION : extractJpegMetadata 4595 * 4596 * DESCRIPTION: helper method to extract Jpeg metadata from capture request. 4597 * JPEG metadata is cached in HAL, and return as part of capture 4598 * result when metadata is returned from camera daemon. 4599 * 4600 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted 4601 * @request: capture request 4602 * 4603 *==========================================================================*/ 4604void QCamera3HardwareInterface::extractJpegMetadata( 4605 CameraMetadata& jpegMetadata, 4606 const camera3_capture_request_t *request) 4607{ 4608 CameraMetadata frame_settings; 4609 frame_settings = request->settings; 4610 4611 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) 4612 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES, 4613 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d, 4614 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count); 4615 4616 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) 4617 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, 4618 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8, 4619 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count); 4620 4621 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) 4622 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, 4623 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64, 4624 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count); 4625 4626 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) 4627 jpegMetadata.update(ANDROID_JPEG_ORIENTATION, 4628 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32, 4629 frame_settings.find(ANDROID_JPEG_ORIENTATION).count); 4630 4631 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) 4632 jpegMetadata.update(ANDROID_JPEG_QUALITY, 4633 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8, 4634 frame_settings.find(ANDROID_JPEG_QUALITY).count); 4635 4636 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) 4637 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, 4638 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8, 4639 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count); 4640 4641 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) 4642 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, 4643 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32, 4644 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count); 4645} 4646 4647/*=========================================================================== 4648 * FUNCTION : convertToRegions 4649 * 4650 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array 4651 * 4652 * PARAMETERS : 4653 * @rect : cam_rect_t struct to convert 4654 * @region : int32_t destination array 4655 * @weight : if we are converting from cam_area_t, weight is valid 4656 * else weight = -1 4657 * 4658 *==========================================================================*/ 4659void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, 4660 int32_t *region, int weight) 4661{ 4662 region[0] = rect.left; 4663 region[1] = rect.top; 4664 region[2] = rect.left + rect.width; 4665 region[3] = rect.top + rect.height; 4666 if (weight > -1) { 4667 region[4] = weight; 4668 } 4669} 4670 4671/*=========================================================================== 4672 * FUNCTION : convertFromRegions 4673 * 4674 * DESCRIPTION: helper method to convert from array to cam_rect_t 4675 * 4676 * PARAMETERS : 4677 * @rect : cam_rect_t struct to convert 4678 * @region : int32_t destination array 4679 * @weight : if we are converting from cam_area_t, weight is valid 4680 * else weight = -1 4681 * 4682 *==========================================================================*/ 4683void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi, 4684 const camera_metadata_t *settings, uint32_t tag) 4685{ 4686 CameraMetadata frame_settings; 4687 frame_settings = settings; 4688 int32_t x_min = frame_settings.find(tag).data.i32[0]; 4689 int32_t y_min = frame_settings.find(tag).data.i32[1]; 4690 int32_t x_max = frame_settings.find(tag).data.i32[2]; 4691 int32_t y_max = frame_settings.find(tag).data.i32[3]; 4692 roi.weight = frame_settings.find(tag).data.i32[4]; 4693 roi.rect.left = x_min; 4694 roi.rect.top = y_min; 4695 roi.rect.width = x_max - x_min; 4696 roi.rect.height = y_max - y_min; 4697} 4698 4699/*=========================================================================== 4700 * FUNCTION : resetIfNeededROI 4701 * 4702 * DESCRIPTION: helper method to reset the roi if it is greater than scaler 4703 * crop region 4704 * 4705 * PARAMETERS : 4706 * @roi : cam_area_t struct to resize 4707 * @scalerCropRegion : cam_crop_region_t region to compare against 4708 * 4709 * 4710 *==========================================================================*/ 4711bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi, 4712 const cam_crop_region_t* scalerCropRegion) 4713{ 4714 int32_t roi_x_max = roi->rect.width + roi->rect.left; 4715 int32_t roi_y_max = roi->rect.height + roi->rect.top; 4716 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left; 4717 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top; 4718 4719 /* According to spec weight = 0 is used to indicate roi needs to be disabled 4720 * without having this check the calculations below to validate if the roi 4721 * is inside scalar crop region will fail resulting in the roi not being 4722 * reset causing algorithm to continue to use stale roi window 4723 */ 4724 if (roi->weight == 0) { 4725 return true; 4726 } 4727 4728 if ((roi_x_max < scalerCropRegion->left) || 4729 // right edge of roi window is left of scalar crop's left edge 4730 (roi_y_max < scalerCropRegion->top) || 4731 // bottom edge of roi window is above scalar crop's top edge 4732 (roi->rect.left > crop_x_max) || 4733 // left edge of roi window is beyond(right) of scalar crop's right edge 4734 (roi->rect.top > crop_y_max)){ 4735 // top edge of roi windo is above scalar crop's top edge 4736 return false; 4737 } 4738 if (roi->rect.left < scalerCropRegion->left) { 4739 roi->rect.left = scalerCropRegion->left; 4740 } 4741 if (roi->rect.top < scalerCropRegion->top) { 4742 roi->rect.top = scalerCropRegion->top; 4743 } 4744 if (roi_x_max > crop_x_max) { 4745 roi_x_max = crop_x_max; 4746 } 4747 if (roi_y_max > crop_y_max) { 4748 roi_y_max = crop_y_max; 4749 } 4750 roi->rect.width = roi_x_max - roi->rect.left; 4751 roi->rect.height = roi_y_max - roi->rect.top; 4752 return true; 4753} 4754 4755/*=========================================================================== 4756 * FUNCTION : convertLandmarks 4757 * 4758 * DESCRIPTION: helper method to extract the landmarks from face detection info 4759 * 4760 * PARAMETERS : 4761 * @face : cam_rect_t struct to convert 4762 * @landmarks : int32_t destination array 4763 * 4764 * 4765 *==========================================================================*/ 4766void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t *landmarks) 4767{ 4768 landmarks[0] = (int32_t)face.left_eye_center.x; 4769 landmarks[1] = (int32_t)face.left_eye_center.y; 4770 landmarks[2] = (int32_t)face.right_eye_center.x; 4771 landmarks[3] = (int32_t)face.right_eye_center.y; 4772 landmarks[4] = (int32_t)face.mouth_center.x; 4773 landmarks[5] = (int32_t)face.mouth_center.y; 4774} 4775 4776#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX ) 4777/*=========================================================================== 4778 * FUNCTION : initCapabilities 4779 * 4780 * DESCRIPTION: initialize camera capabilities in static data struct 4781 * 4782 * PARAMETERS : 4783 * @cameraId : camera Id 4784 * 4785 * RETURN : int32_t type of status 4786 * NO_ERROR -- success 4787 * none-zero failure code 4788 *==========================================================================*/ 4789int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId) 4790{ 4791 int rc = 0; 4792 mm_camera_vtbl_t *cameraHandle = NULL; 4793 QCamera3HeapMemory *capabilityHeap = NULL; 4794 4795 cameraHandle = camera_open((uint8_t)cameraId); 4796 if (!cameraHandle) { 4797 ALOGE("%s: camera_open failed", __func__); 4798 rc = -1; 4799 goto open_failed; 4800 } 4801 4802 capabilityHeap = new QCamera3HeapMemory(); 4803 if (capabilityHeap == NULL) { 4804 ALOGE("%s: creation of capabilityHeap failed", __func__); 4805 goto heap_creation_failed; 4806 } 4807 /* Allocate memory for capability buffer */ 4808 rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false); 4809 if(rc != OK) { 4810 ALOGE("%s: No memory for cappability", __func__); 4811 goto allocate_failed; 4812 } 4813 4814 /* Map memory for capability buffer */ 4815 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t)); 4816 rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle, 4817 CAM_MAPPING_BUF_TYPE_CAPABILITY, 4818 capabilityHeap->getFd(0), 4819 sizeof(cam_capability_t)); 4820 if(rc < 0) { 4821 ALOGE("%s: failed to map capability buffer", __func__); 4822 goto map_failed; 4823 } 4824 4825 /* Query Capability */ 4826 rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle); 4827 if(rc < 0) { 4828 ALOGE("%s: failed to query capability",__func__); 4829 goto query_failed; 4830 } 4831 gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t)); 4832 if (!gCamCapability[cameraId]) { 4833 ALOGE("%s: out of memory", __func__); 4834 goto query_failed; 4835 } 4836 memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0), 4837 sizeof(cam_capability_t)); 4838 rc = 0; 4839 4840query_failed: 4841 cameraHandle->ops->unmap_buf(cameraHandle->camera_handle, 4842 CAM_MAPPING_BUF_TYPE_CAPABILITY); 4843map_failed: 4844 capabilityHeap->deallocate(); 4845allocate_failed: 4846 delete capabilityHeap; 4847heap_creation_failed: 4848 cameraHandle->ops->close_camera(cameraHandle->camera_handle); 4849 cameraHandle = NULL; 4850open_failed: 4851 return rc; 4852} 4853 4854/*=========================================================================== 4855 * FUNCTION : initParameters 4856 * 4857 * DESCRIPTION: initialize camera parameters 4858 * 4859 * PARAMETERS : 4860 * 4861 * RETURN : int32_t type of status 4862 * NO_ERROR -- success 4863 * none-zero failure code 4864 *==========================================================================*/ 4865int QCamera3HardwareInterface::initParameters() 4866{ 4867 int rc = 0; 4868 4869 //Allocate Set Param Buffer 4870 mParamHeap = new QCamera3HeapMemory(); 4871 rc = mParamHeap->allocate(1, sizeof(metadata_buffer_t), false); 4872 if(rc != OK) { 4873 rc = NO_MEMORY; 4874 ALOGE("Failed to allocate SETPARM Heap memory"); 4875 delete mParamHeap; 4876 mParamHeap = NULL; 4877 return rc; 4878 } 4879 4880 //Map memory for parameters buffer 4881 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle, 4882 CAM_MAPPING_BUF_TYPE_PARM_BUF, 4883 mParamHeap->getFd(0), 4884 sizeof(metadata_buffer_t)); 4885 if(rc < 0) { 4886 ALOGE("%s:failed to map SETPARM buffer",__func__); 4887 rc = FAILED_TRANSACTION; 4888 mParamHeap->deallocate(); 4889 delete mParamHeap; 4890 mParamHeap = NULL; 4891 return rc; 4892 } 4893 4894 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0); 4895 4896 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t)); 4897 return rc; 4898} 4899 4900/*=========================================================================== 4901 * FUNCTION : deinitParameters 4902 * 4903 * DESCRIPTION: de-initialize camera parameters 4904 * 4905 * PARAMETERS : 4906 * 4907 * RETURN : NONE 4908 *==========================================================================*/ 4909void QCamera3HardwareInterface::deinitParameters() 4910{ 4911 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle, 4912 CAM_MAPPING_BUF_TYPE_PARM_BUF); 4913 4914 mParamHeap->deallocate(); 4915 delete mParamHeap; 4916 mParamHeap = NULL; 4917 4918 mParameters = NULL; 4919 4920 free(mPrevParameters); 4921 mPrevParameters = NULL; 4922} 4923 4924/*=========================================================================== 4925 * FUNCTION : calcMaxJpegSize 4926 * 4927 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId 4928 * 4929 * PARAMETERS : 4930 * 4931 * RETURN : max_jpeg_size 4932 *==========================================================================*/ 4933size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id) 4934{ 4935 size_t max_jpeg_size = 0; 4936 size_t temp_width, temp_height; 4937 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt, 4938 MAX_SIZES_CNT); 4939 for (size_t i = 0; i < count; i++) { 4940 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width; 4941 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height; 4942 if (temp_width * temp_height > max_jpeg_size ) { 4943 max_jpeg_size = temp_width * temp_height; 4944 } 4945 } 4946 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t); 4947 return max_jpeg_size; 4948} 4949 4950/*=========================================================================== 4951 * FUNCTION : getMaxRawSize 4952 * 4953 * DESCRIPTION: Fetches maximum raw size supported by the cameraId 4954 * 4955 * PARAMETERS : 4956 * 4957 * RETURN : Largest supported Raw Dimension 4958 *==========================================================================*/ 4959cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id) 4960{ 4961 int max_width = 0; 4962 cam_dimension_t maxRawSize; 4963 4964 memset(&maxRawSize, 0, sizeof(cam_dimension_t)); 4965 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) { 4966 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) { 4967 max_width = gCamCapability[camera_id]->raw_dim[i].width; 4968 maxRawSize = gCamCapability[camera_id]->raw_dim[i]; 4969 } 4970 } 4971 return maxRawSize; 4972} 4973 4974 4975/*=========================================================================== 4976 * FUNCTION : calcMaxJpegDim 4977 * 4978 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId 4979 * 4980 * PARAMETERS : 4981 * 4982 * RETURN : max_jpeg_dim 4983 *==========================================================================*/ 4984cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim() 4985{ 4986 cam_dimension_t max_jpeg_dim; 4987 cam_dimension_t curr_jpeg_dim; 4988 max_jpeg_dim.width = 0; 4989 max_jpeg_dim.height = 0; 4990 curr_jpeg_dim.width = 0; 4991 curr_jpeg_dim.height = 0; 4992 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) { 4993 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width; 4994 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height; 4995 if (curr_jpeg_dim.width * curr_jpeg_dim.height > 4996 max_jpeg_dim.width * max_jpeg_dim.height ) { 4997 max_jpeg_dim.width = curr_jpeg_dim.width; 4998 max_jpeg_dim.height = curr_jpeg_dim.height; 4999 } 5000 } 5001 return max_jpeg_dim; 5002} 5003 5004 5005/*=========================================================================== 5006 * FUNCTION : initStaticMetadata 5007 * 5008 * DESCRIPTION: initialize the static metadata 5009 * 5010 * PARAMETERS : 5011 * @cameraId : camera Id 5012 * 5013 * RETURN : int32_t type of status 5014 * 0 -- success 5015 * non-zero failure code 5016 *==========================================================================*/ 5017int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId) 5018{ 5019 int rc = 0; 5020 CameraMetadata staticInfo; 5021 size_t count = 0; 5022 bool limitedDevice = false; 5023 5024 /* If sensor is YUV sensor (no raw support) or if per-frame control is not 5025 * guaranteed, its advertised as limited device */ 5026 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support || 5027 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type); 5028 5029 uint8_t supportedHwLvl = limitedDevice ? 5030 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED : 5031 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL; 5032 5033 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL, 5034 &supportedHwLvl, 1); 5035 5036 bool facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK; 5037 /*HAL 3 only*/ 5038 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE, 5039 &gCamCapability[cameraId]->min_focus_distance, 1); 5040 5041 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, 5042 &gCamCapability[cameraId]->hyper_focal_distance, 1); 5043 5044 /*should be using focal lengths but sensor doesn't provide that info now*/ 5045 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, 5046 &gCamCapability[cameraId]->focal_length, 5047 1); 5048 5049 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES, 5050 gCamCapability[cameraId]->apertures, 5051 gCamCapability[cameraId]->apertures_count); 5052 5053 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES, 5054 gCamCapability[cameraId]->filter_densities, 5055 gCamCapability[cameraId]->filter_densities_count); 5056 5057 5058 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION, 5059 (uint8_t *)gCamCapability[cameraId]->optical_stab_modes, 5060 gCamCapability[cameraId]->optical_stab_modes_count); 5061 5062 int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width, 5063 gCamCapability[cameraId]->lens_shading_map_size.height}; 5064 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE, 5065 lens_shading_map_size, 5066 sizeof(lens_shading_map_size)/sizeof(int32_t)); 5067 5068 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE, 5069 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT); 5070 5071 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, 5072 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT); 5073 5074 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION, 5075 &gCamCapability[cameraId]->max_frame_duration, 1); 5076 5077 camera_metadata_rational baseGainFactor = { 5078 gCamCapability[cameraId]->base_gain_factor.numerator, 5079 gCamCapability[cameraId]->base_gain_factor.denominator}; 5080 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR, 5081 &baseGainFactor, 1); 5082 5083 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT, 5084 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1); 5085 5086 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width, 5087 gCamCapability[cameraId]->pixel_array_size.height}; 5088 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, 5089 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0])); 5090 5091 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left, 5092 gCamCapability[cameraId]->active_array_size.top, 5093 gCamCapability[cameraId]->active_array_size.width, 5094 gCamCapability[cameraId]->active_array_size.height}; 5095 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, 5096 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0])); 5097 5098 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL, 5099 &gCamCapability[cameraId]->white_level, 1); 5100 5101 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN, 5102 gCamCapability[cameraId]->black_level_pattern, BLACK_LEVEL_PATTERN_CNT); 5103 5104 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION, 5105 &gCamCapability[cameraId]->flash_charge_duration, 1); 5106 5107 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS, 5108 &gCamCapability[cameraId]->max_tone_map_curve_points, 1); 5109 5110 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi; 5111 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, 5112 (int32_t *)&maxFaces, 1); 5113 5114 uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN; 5115 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE, 5116 ×tampSource, 1); 5117 5118 staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT, 5119 &gCamCapability[cameraId]->histogram_size, 1); 5120 5121 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT, 5122 &gCamCapability[cameraId]->max_histogram_count, 1); 5123 5124 int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width, 5125 gCamCapability[cameraId]->sharpness_map_size.height}; 5126 5127 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, 5128 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t)); 5129 5130 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE, 5131 &gCamCapability[cameraId]->max_sharpness_map_value, 1); 5132 5133 int32_t scalar_formats[] = { 5134 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE, 5135 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16, 5136 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888, 5137 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB, 5138 HAL_PIXEL_FORMAT_RAW10, 5139 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED}; 5140 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t); 5141 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, 5142 scalar_formats, 5143 scalar_formats_count); 5144 5145 int32_t available_processed_sizes[MAX_SIZES_CNT * 2]; 5146 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT); 5147 makeTable(gCamCapability[cameraId]->picture_sizes_tbl, 5148 count, MAX_SIZES_CNT, available_processed_sizes); 5149 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES, 5150 available_processed_sizes, count * 2); 5151 5152 int32_t available_raw_sizes[MAX_SIZES_CNT * 2]; 5153 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT); 5154 makeTable(gCamCapability[cameraId]->raw_dim, 5155 count, MAX_SIZES_CNT, available_raw_sizes); 5156 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES, 5157 available_raw_sizes, count * 2); 5158 5159 int32_t available_fps_ranges[MAX_SIZES_CNT * 2]; 5160 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT); 5161 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl, 5162 count, MAX_SIZES_CNT, available_fps_ranges); 5163 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, 5164 available_fps_ranges, count * 2); 5165 5166 camera_metadata_rational exposureCompensationStep = { 5167 gCamCapability[cameraId]->exp_compensation_step.numerator, 5168 gCamCapability[cameraId]->exp_compensation_step.denominator}; 5169 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP, 5170 &exposureCompensationStep, 1); 5171 5172 uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF}; 5173 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, 5174 availableVstabModes, sizeof(availableVstabModes)); 5175 5176 /*HAL 1 and HAL 3 common*/ 5177 float maxZoom = 4; 5178 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, 5179 &maxZoom, 1); 5180 5181 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_FREEFORM; 5182 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1); 5183 5184 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1}; 5185 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1) 5186 max3aRegions[2] = 0; /* AF not supported */ 5187 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS, 5188 max3aRegions, 3); 5189 5190 uint8_t availableFaceDetectModes[] = { 5191 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, 5192 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL }; 5193 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES, 5194 availableFaceDetectModes, 5195 sizeof(availableFaceDetectModes)/sizeof(availableFaceDetectModes[0])); 5196 5197 int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min, 5198 gCamCapability[cameraId]->exposure_compensation_max}; 5199 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE, 5200 exposureCompensationRange, 5201 sizeof(exposureCompensationRange)/sizeof(int32_t)); 5202 5203 uint8_t lensFacing = (facingBack) ? 5204 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT; 5205 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1); 5206 5207 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, 5208 available_thumbnail_sizes, 5209 sizeof(available_thumbnail_sizes)/sizeof(int32_t)); 5210 5211 /*all sizes will be clubbed into this tag*/ 5212 int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2]; 5213 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT); 5214 size_t jpeg_sizes_cnt = filterJpegSizes(available_jpeg_sizes, available_processed_sizes, 5215 count * 2, MAX_SIZES_CNT * 2, gCamCapability[cameraId]->active_array_size, 5216 gCamCapability[cameraId]->max_downscale_factor); 5217 /*android.scaler.availableStreamConfigurations*/ 5218 size_t max_stream_configs_size = count * scalar_formats_count * 4; 5219 int32_t available_stream_configs[max_stream_configs_size]; 5220 /* Add input/output stream configurations for each scalar formats*/ 5221 size_t idx = 0; 5222 for (size_t j = 0; j < scalar_formats_count; j++) { 5223 switch (scalar_formats[j]) { 5224 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16: 5225 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE: 5226 case HAL_PIXEL_FORMAT_RAW10: 5227 for (size_t i = 0; i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) { 5228 available_stream_configs[idx] = scalar_formats[j]; 5229 available_stream_configs[idx+1] = 5230 gCamCapability[cameraId]->raw_dim[i].width; 5231 available_stream_configs[idx+2] = 5232 gCamCapability[cameraId]->raw_dim[i].height; 5233 available_stream_configs[idx+3] = 5234 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT; 5235 idx+=4; 5236 } 5237 break; 5238 case HAL_PIXEL_FORMAT_BLOB: 5239 for (size_t i = 0; i < jpeg_sizes_cnt/2; i++) { 5240 available_stream_configs[idx] = scalar_formats[j]; 5241 available_stream_configs[idx+1] = available_jpeg_sizes[i*2]; 5242 available_stream_configs[idx+2] = available_jpeg_sizes[i*2+1]; 5243 available_stream_configs[idx+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT; 5244 idx+=4; 5245 } 5246 break; 5247 5248 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: 5249 case HAL_PIXEL_FORMAT_YCbCr_420_888: 5250 default: 5251 cam_dimension_t largest_picture_size; 5252 memset(&largest_picture_size, 0, sizeof(cam_dimension_t)); 5253 for (size_t i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) { 5254 available_stream_configs[idx] = scalar_formats[j]; 5255 available_stream_configs[idx+1] = 5256 gCamCapability[cameraId]->picture_sizes_tbl[i].width; 5257 available_stream_configs[idx+2] = 5258 gCamCapability[cameraId]->picture_sizes_tbl[i].height; 5259 available_stream_configs[idx+3] = 5260 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT; 5261 idx+=4; 5262 5263 /* Book keep largest */ 5264 if (gCamCapability[cameraId]->picture_sizes_tbl[i].width 5265 >= largest_picture_size.width && 5266 gCamCapability[cameraId]->picture_sizes_tbl[i].height 5267 >= largest_picture_size.height) 5268 largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i]; 5269 } 5270 5271 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/ 5272 if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED || 5273 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) { 5274 available_stream_configs[idx] = scalar_formats[j]; 5275 available_stream_configs[idx+1] = largest_picture_size.width; 5276 available_stream_configs[idx+2] = largest_picture_size.height; 5277 available_stream_configs[idx+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT; 5278 idx+=4; 5279 } 5280 break; 5281 } 5282 } 5283 5284 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, 5285 available_stream_configs, idx); 5286 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST; 5287 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1); 5288 5289 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF; 5290 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1); 5291 5292 /* android.scaler.availableMinFrameDurations */ 5293 int64_t available_min_durations[max_stream_configs_size]; 5294 idx = 0; 5295 for (size_t j = 0; j < scalar_formats_count; j++) { 5296 switch (scalar_formats[j]) { 5297 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16: 5298 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE: 5299 case HAL_PIXEL_FORMAT_RAW10: 5300 for (size_t i = 0; i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) { 5301 available_min_durations[idx] = scalar_formats[j]; 5302 available_min_durations[idx+1] = 5303 gCamCapability[cameraId]->raw_dim[i].width; 5304 available_min_durations[idx+2] = 5305 gCamCapability[cameraId]->raw_dim[i].height; 5306 available_min_durations[idx+3] = 5307 gCamCapability[cameraId]->raw_min_duration[i]; 5308 idx+=4; 5309 } 5310 break; 5311 default: 5312 for (size_t i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) { 5313 available_min_durations[idx] = scalar_formats[j]; 5314 available_min_durations[idx+1] = 5315 gCamCapability[cameraId]->picture_sizes_tbl[i].width; 5316 available_min_durations[idx+2] = 5317 gCamCapability[cameraId]->picture_sizes_tbl[i].height; 5318 available_min_durations[idx+3] = 5319 gCamCapability[cameraId]->picture_min_duration[i]; 5320 idx+=4; 5321 } 5322 break; 5323 } 5324 } 5325 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, 5326 &available_min_durations[0], idx); 5327 5328 Vector<int32_t> available_hfr_configs; 5329 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) { 5330 int32_t fps = 0; 5331 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) { 5332 case CAM_HFR_MODE_60FPS: 5333 fps = 60; 5334 break; 5335 case CAM_HFR_MODE_90FPS: 5336 fps = 90; 5337 break; 5338 case CAM_HFR_MODE_120FPS: 5339 fps = 120; 5340 break; 5341 case CAM_HFR_MODE_150FPS: 5342 fps = 150; 5343 break; 5344 case CAM_HFR_MODE_180FPS: 5345 fps = 180; 5346 break; 5347 case CAM_HFR_MODE_210FPS: 5348 fps = 210; 5349 break; 5350 case CAM_HFR_MODE_240FPS: 5351 fps = 240; 5352 break; 5353 case CAM_HFR_MODE_480FPS: 5354 fps = 480; 5355 break; 5356 case CAM_HFR_MODE_OFF: 5357 case CAM_HFR_MODE_MAX: 5358 default: 5359 break; 5360 } 5361 5362 if (fps > 0) { 5363 /* For each HFR frame rate, need to advertise one variable fps range 5364 * and one fixed fps range. Eg: for 120 FPS, advertise [30, 120] and 5365 * [120, 120]. While camcorder preview alone is running [30, 120] is 5366 * set by the app. When video recording is started, [120, 120] is 5367 * set. This way sensor configuration does not change when recording 5368 * is started */ 5369 5370 /* (width, height, fps_min, fps_max, batch_size_max) */ 5371 available_hfr_configs.add( 5372 gCamCapability[cameraId]->hfr_tbl[i].dim.width); 5373 available_hfr_configs.add( 5374 gCamCapability[cameraId]->hfr_tbl[i].dim.height); 5375 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR); 5376 available_hfr_configs.add(fps); 5377 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR); 5378 5379 /* (width, height, fps_min, fps_max, batch_size_max) */ 5380 available_hfr_configs.add( 5381 gCamCapability[cameraId]->hfr_tbl[i].dim.width); 5382 available_hfr_configs.add( 5383 gCamCapability[cameraId]->hfr_tbl[i].dim.height); 5384 available_hfr_configs.add(fps); 5385 available_hfr_configs.add(fps); 5386 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR); 5387 } 5388 } 5389 //Advertise HFR capability only if the property is set 5390 char prop[PROPERTY_VALUE_MAX]; 5391 memset(prop, 0, sizeof(prop)); 5392 property_get("persist.camera.hal3hfr.enable", prop, "0"); 5393 uint8_t hfrEnable = (uint8_t)atoi(prop); 5394 5395 if(hfrEnable && available_hfr_configs.array()) { 5396 staticInfo.update( 5397 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS, 5398 available_hfr_configs.array(), available_hfr_configs.size()); 5399 } 5400 5401 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId); 5402 staticInfo.update(ANDROID_JPEG_MAX_SIZE, 5403 &max_jpeg_size, 1); 5404 5405 uint8_t avail_effects[CAM_EFFECT_MODE_MAX]; 5406 size_t size = 0; 5407 count = CAM_EFFECT_MODE_MAX; 5408 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count); 5409 for (size_t i = 0; i < count; i++) { 5410 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP), 5411 gCamCapability[cameraId]->supported_effects[i]); 5412 if (NAME_NOT_FOUND != val) { 5413 avail_effects[size] = (uint8_t)val; 5414 size++; 5415 } 5416 } 5417 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS, 5418 avail_effects, 5419 size); 5420 5421 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX]; 5422 uint8_t supported_indexes[CAM_SCENE_MODE_MAX]; 5423 size_t supported_scene_modes_cnt = 0; 5424 count = CAM_SCENE_MODE_MAX; 5425 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count); 5426 for (size_t i = 0; i < count; i++) { 5427 int val = lookupFwkName(SCENE_MODES_MAP, METADATA_MAP_SIZE(SCENE_MODES_MAP), 5428 gCamCapability[cameraId]->supported_scene_modes[i]); 5429 if (NAME_NOT_FOUND != val) { 5430 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val; 5431 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i; 5432 supported_scene_modes_cnt++; 5433 } 5434 } 5435 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES, 5436 avail_scene_modes, 5437 supported_scene_modes_cnt); 5438 5439 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3]; 5440 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides, 5441 supported_scene_modes_cnt, 5442 CAM_SCENE_MODE_MAX, 5443 scene_mode_overrides, 5444 supported_indexes, 5445 cameraId); 5446 5447 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES, 5448 scene_mode_overrides, supported_scene_modes_cnt * 3); 5449 5450 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX]; 5451 size = 0; 5452 count = CAM_ANTIBANDING_MODE_MAX; 5453 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count); 5454 for (size_t i = 0; i < count; i++) { 5455 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), 5456 gCamCapability[cameraId]->supported_antibandings[i]); 5457 if (NAME_NOT_FOUND != val) { 5458 avail_antibanding_modes[size] = (uint8_t)val; 5459 size++; 5460 } 5461 5462 } 5463 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, 5464 avail_antibanding_modes, 5465 size); 5466 5467 uint8_t avail_abberation_modes[CAM_COLOR_CORRECTION_ABERRATION_MAX]; 5468 size = 0; 5469 count = CAM_COLOR_CORRECTION_ABERRATION_MAX; 5470 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count); 5471 if (0 == count) { 5472 avail_abberation_modes[0] = 5473 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF; 5474 size++; 5475 } else { 5476 for (size_t i = 0; i < count; i++) { 5477 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP), 5478 gCamCapability[cameraId]->aberration_modes[i]); 5479 if (NAME_NOT_FOUND != val) { 5480 avail_abberation_modes[size] = (uint8_t)val; 5481 size++; 5482 } else { 5483 ALOGE("%s: Invalid CAC mode %d", __func__, 5484 gCamCapability[cameraId]->aberration_modes[i]); 5485 break; 5486 } 5487 } 5488 5489 } 5490 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES, 5491 avail_abberation_modes, 5492 size); 5493 5494 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX]; 5495 size = 0; 5496 count = CAM_FOCUS_MODE_MAX; 5497 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count); 5498 for (size_t i = 0; i < count; i++) { 5499 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), 5500 gCamCapability[cameraId]->supported_focus_modes[i]); 5501 if (NAME_NOT_FOUND != val) { 5502 avail_af_modes[size] = (uint8_t)val; 5503 size++; 5504 } 5505 } 5506 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES, 5507 avail_af_modes, 5508 size); 5509 5510 uint8_t avail_awb_modes[CAM_WB_MODE_MAX]; 5511 size = 0; 5512 count = CAM_WB_MODE_MAX; 5513 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count); 5514 for (size_t i = 0; i < count; i++) { 5515 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP, 5516 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), 5517 gCamCapability[cameraId]->supported_white_balances[i]); 5518 if (NAME_NOT_FOUND != val) { 5519 avail_awb_modes[size] = (uint8_t)val; 5520 size++; 5521 } 5522 } 5523 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES, 5524 avail_awb_modes, 5525 size); 5526 5527 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX]; 5528 count = CAM_FLASH_FIRING_LEVEL_MAX; 5529 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt, 5530 count); 5531 for (size_t i = 0; i < count; i++) { 5532 available_flash_levels[i] = 5533 gCamCapability[cameraId]->supported_firing_levels[i]; 5534 } 5535 staticInfo.update(ANDROID_FLASH_FIRING_POWER, 5536 available_flash_levels, count); 5537 5538 uint8_t flashAvailable; 5539 if (gCamCapability[cameraId]->flash_available) 5540 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE; 5541 else 5542 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE; 5543 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE, 5544 &flashAvailable, 1); 5545 5546 Vector<uint8_t> avail_ae_modes; 5547 count = CAM_AE_MODE_MAX; 5548 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count); 5549 for (size_t i = 0; i < count; i++) { 5550 avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]); 5551 } 5552 if (flashAvailable) { 5553 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH); 5554 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH); 5555 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE); 5556 } 5557 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES, 5558 avail_ae_modes.array(), 5559 avail_ae_modes.size()); 5560 5561 int32_t sensitivity_range[2]; 5562 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity; 5563 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity; 5564 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, 5565 sensitivity_range, 5566 sizeof(sensitivity_range) / sizeof(int32_t)); 5567 5568 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY, 5569 &gCamCapability[cameraId]->max_analog_sensitivity, 5570 1); 5571 5572 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle; 5573 staticInfo.update(ANDROID_SENSOR_ORIENTATION, 5574 &sensor_orientation, 5575 1); 5576 5577 int32_t max_output_streams[] = { 5578 MAX_STALLING_STREAMS, 5579 MAX_PROCESSED_STREAMS, 5580 MAX_RAW_STREAMS}; 5581 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, 5582 max_output_streams, 5583 sizeof(max_output_streams)/sizeof(max_output_streams[0])); 5584 5585 uint8_t avail_leds = 0; 5586 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS, 5587 &avail_leds, 0); 5588 5589 uint8_t focus_dist_calibrated; 5590 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP), 5591 gCamCapability[cameraId]->focus_dist_calibrated); 5592 if (NAME_NOT_FOUND != val) { 5593 focus_dist_calibrated = (uint8_t)val; 5594 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION, 5595 &focus_dist_calibrated, 1); 5596 } 5597 5598 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT]; 5599 size = 0; 5600 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt, 5601 MAX_TEST_PATTERN_CNT); 5602 for (size_t i = 0; i < count; i++) { 5603 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP), 5604 gCamCapability[cameraId]->supported_test_pattern_modes[i]); 5605 if (NAME_NOT_FOUND != testpatternMode) { 5606 avail_testpattern_modes[size] = testpatternMode; 5607 size++; 5608 } 5609 } 5610 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES, 5611 avail_testpattern_modes, 5612 size); 5613 5614 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY); 5615 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, 5616 &max_pipeline_depth, 5617 1); 5618 5619 int32_t partial_result_count = PARTIAL_RESULT_COUNT; 5620 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT, 5621 &partial_result_count, 5622 1); 5623 5624 int32_t max_stall_duration = MAX_REPROCESS_STALL; 5625 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1); 5626 5627 Vector<uint8_t> available_capabilities; 5628 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE); 5629 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR); 5630 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING); 5631 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS); 5632 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE); 5633 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING); 5634 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING); 5635 if (hfrEnable) { 5636 available_capabilities.add( 5637 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO); 5638 } 5639 5640 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) { 5641 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW); 5642 } 5643 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, 5644 available_capabilities.array(), 5645 available_capabilities.size()); 5646 5647 int32_t max_input_streams = 1; 5648 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS, 5649 &max_input_streams, 5650 1); 5651 5652 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 1, HAL_PIXEL_FORMAT_BLOB, 5653 HAL_PIXEL_FORMAT_YCbCr_420_888, 1,HAL_PIXEL_FORMAT_BLOB}; 5654 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP, 5655 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0])); 5656 5657 int32_t max_latency = (limitedDevice) ? 5658 CAM_MAX_SYNC_LATENCY : ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL; 5659 staticInfo.update(ANDROID_SYNC_MAX_LATENCY, 5660 &max_latency, 5661 1); 5662 5663 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST}; 5664 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES, 5665 available_hot_pixel_modes, 5666 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0])); 5667 5668 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF, 5669 ANDROID_EDGE_MODE_FAST}; 5670 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES, 5671 available_edge_modes, 5672 sizeof(available_edge_modes)/sizeof(available_edge_modes[0])); 5673 5674 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF, 5675 ANDROID_NOISE_REDUCTION_MODE_FAST, 5676 ANDROID_NOISE_REDUCTION_MODE_MINIMAL}; 5677 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES, 5678 available_noise_red_modes, 5679 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0])); 5680 5681 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE, 5682 ANDROID_TONEMAP_MODE_FAST}; 5683 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES, 5684 available_tonemap_modes, 5685 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0])); 5686 5687 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF}; 5688 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES, 5689 available_hot_pixel_map_modes, 5690 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0])); 5691 5692 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP), 5693 gCamCapability[cameraId]->reference_illuminant1); 5694 if (NAME_NOT_FOUND != val) { 5695 uint8_t fwkReferenceIlluminant = (uint8_t)val; 5696 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1); 5697 } 5698 5699 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP), 5700 gCamCapability[cameraId]->reference_illuminant2); 5701 if (NAME_NOT_FOUND != val) { 5702 uint8_t fwkReferenceIlluminant = (uint8_t)val; 5703 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1); 5704 } 5705 5706 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *) 5707 (void *)gCamCapability[cameraId]->forward_matrix1, 5708 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS); 5709 5710 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *) 5711 (void *)gCamCapability[cameraId]->forward_matrix2, 5712 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS); 5713 5714 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *) 5715 (void *)gCamCapability[cameraId]->color_transform1, 5716 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS); 5717 5718 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *) 5719 (void *)gCamCapability[cameraId]->color_transform2, 5720 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS); 5721 5722 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *) 5723 (void *)gCamCapability[cameraId]->calibration_transform1, 5724 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS); 5725 5726 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *) 5727 (void *)gCamCapability[cameraId]->calibration_transform2, 5728 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS); 5729 5730 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE, 5731 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS, 5732 ANDROID_COLOR_CORRECTION_ABERRATION_MODE, 5733 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, 5734 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE, 5735 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE, 5736 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE, 5737 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK, 5738 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT, 5739 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE, 5740 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, 5741 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE, ANDROID_EDGE_STRENGTH, 5742 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE, 5743 ANDROID_JPEG_GPS_COORDINATES, 5744 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP, 5745 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY, 5746 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY, 5747 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE, 5748 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE, 5749 ANDROID_NOISE_REDUCTION_STRENGTH, ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE, 5750 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME, 5751 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE, 5752 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, 5753 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE, 5754 ANDROID_SHADING_STRENGTH, ANDROID_STATISTICS_FACE_DETECT_MODE, 5755 ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE, 5756 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE, 5757 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE, 5758 ANDROID_BLACK_LEVEL_LOCK }; 5759 5760 size_t request_keys_cnt = 5761 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]); 5762 Vector<int32_t> available_request_keys; 5763 available_request_keys.appendArray(request_keys_basic, request_keys_cnt); 5764 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) { 5765 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS); 5766 } 5767 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, 5768 available_request_keys.array(), available_request_keys.size()); 5769 5770 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM, 5771 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS, 5772 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE, 5773 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE, 5774 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE, 5775 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE, 5776 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD, 5777 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, 5778 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, 5779 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE, 5780 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE, 5781 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID, 5782 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME, 5783 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY, 5784 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT, 5785 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE, 5786 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE, 5787 ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE, 5788 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE, 5789 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM, 5790 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_IDS, 5791 ANDROID_STATISTICS_FACE_LANDMARKS, ANDROID_STATISTICS_FACE_RECTANGLES, 5792 ANDROID_STATISTICS_FACE_SCORES}; 5793 size_t result_keys_cnt = 5794 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]); 5795 5796 Vector<int32_t> available_result_keys; 5797 available_result_keys.appendArray(result_keys_basic, result_keys_cnt); 5798 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) { 5799 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS); 5800 } 5801 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) { 5802 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE); 5803 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT); 5804 } 5805 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, 5806 available_result_keys.array(), available_result_keys.size()); 5807 5808 int32_t available_characteristics_keys[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, 5809 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, 5810 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP, 5811 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS, 5812 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES, 5813 ANDROID_SCALER_CROPPING_TYPE, 5814 ANDROID_SYNC_MAX_LATENCY, 5815 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE, 5816 ANDROID_CONTROL_AVAILABLE_SCENE_MODES, 5817 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, 5818 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS, 5819 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE, 5820 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, 5821 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES, 5822 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES, 5823 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, 5824 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION, 5825 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE, 5826 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION, 5827 ANDROID_LENS_FACING, 5828 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS, 5829 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES, 5830 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, 5831 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT, 5832 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, 5833 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP, 5834 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, 5835 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/ 5836 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1, 5837 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2, 5838 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1, 5839 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1, 5840 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, 5841 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT, 5842 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION, 5843 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, 5844 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR, 5845 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY, 5846 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES, 5847 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES, 5848 ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT, 5849 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT, 5850 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE, 5851 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES, 5852 ANDROID_EDGE_AVAILABLE_EDGE_MODES, 5853 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES, 5854 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES, 5855 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES, 5856 ANDROID_TONEMAP_MAX_CURVE_POINTS, ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL }; 5857 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, 5858 available_characteristics_keys, 5859 sizeof(available_characteristics_keys)/sizeof(int32_t)); 5860 5861 /*available stall durations depend on the hw + sw and will be different for different devices */ 5862 /*have to add for raw after implementation*/ 5863 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16}; 5864 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t); 5865 5866 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT); 5867 size_t raw_count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, 5868 MAX_SIZES_CNT); 5869 size_t available_stall_size = count * 4; 5870 int64_t available_stall_durations[available_stall_size]; 5871 idx = 0; 5872 for (uint32_t j = 0; j < stall_formats_count; j++) { 5873 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) { 5874 for (uint32_t i = 0; i < count; i++) { 5875 available_stall_durations[idx] = stall_formats[j]; 5876 available_stall_durations[idx+1] = gCamCapability[cameraId]->picture_sizes_tbl[i].width; 5877 available_stall_durations[idx+2] = gCamCapability[cameraId]->picture_sizes_tbl[i].height; 5878 available_stall_durations[idx+3] = gCamCapability[cameraId]->jpeg_stall_durations[i]; 5879 idx+=4; 5880 } 5881 } else { 5882 for (uint32_t i = 0; i < raw_count; i++) { 5883 available_stall_durations[idx] = stall_formats[j]; 5884 available_stall_durations[idx+1] = gCamCapability[cameraId]->raw_dim[i].width; 5885 available_stall_durations[idx+2] = gCamCapability[cameraId]->raw_dim[i].height; 5886 available_stall_durations[idx+3] = gCamCapability[cameraId]->raw16_stall_durations[i]; 5887 idx+=4; 5888 } 5889 } 5890 } 5891 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS, 5892 available_stall_durations, 5893 idx); 5894 //QCAMERA3_OPAQUE_RAW 5895 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY; 5896 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG; 5897 switch (gCamCapability[cameraId]->opaque_raw_fmt) { 5898 case LEGACY_RAW: 5899 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT) 5900 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG; 5901 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT) 5902 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG; 5903 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT) 5904 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG; 5905 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY; 5906 break; 5907 case MIPI_RAW: 5908 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT) 5909 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG; 5910 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT) 5911 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG; 5912 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT) 5913 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG; 5914 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI; 5915 break; 5916 default: 5917 ALOGE("%s: unknown opaque_raw_format %d", __func__, 5918 gCamCapability[cameraId]->opaque_raw_fmt); 5919 break; 5920 } 5921 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1); 5922 5923 int32_t strides[3*raw_count]; 5924 for (size_t i = 0; i < raw_count; i++) { 5925 cam_stream_buf_plane_info_t buf_planes; 5926 strides[i*3] = gCamCapability[cameraId]->raw_dim[i].width; 5927 strides[i*3+1] = gCamCapability[cameraId]->raw_dim[i].height; 5928 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i], 5929 &gCamCapability[cameraId]->padding_info, &buf_planes); 5930 strides[i*3+2] = buf_planes.plane_info.mp[0].stride; 5931 } 5932 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides, 5933 3*raw_count); 5934 5935 gStaticMetadata[cameraId] = staticInfo.release(); 5936 return rc; 5937} 5938 5939/*=========================================================================== 5940 * FUNCTION : makeTable 5941 * 5942 * DESCRIPTION: make a table of sizes 5943 * 5944 * PARAMETERS : 5945 * 5946 * 5947 *==========================================================================*/ 5948void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size, 5949 size_t max_size, int32_t *sizeTable) 5950{ 5951 size_t j = 0; 5952 if (size > max_size) { 5953 size = max_size; 5954 } 5955 for (size_t i = 0; i < size; i++) { 5956 sizeTable[j] = dimTable[i].width; 5957 sizeTable[j+1] = dimTable[i].height; 5958 j+=2; 5959 } 5960} 5961 5962/*=========================================================================== 5963 * FUNCTION : makeFPSTable 5964 * 5965 * DESCRIPTION: make a table of fps ranges 5966 * 5967 * PARAMETERS : 5968 * 5969 *==========================================================================*/ 5970void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size, 5971 size_t max_size, int32_t *fpsRangesTable) 5972{ 5973 size_t j = 0; 5974 if (size > max_size) { 5975 size = max_size; 5976 } 5977 for (size_t i = 0; i < size; i++) { 5978 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps; 5979 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps; 5980 j+=2; 5981 } 5982} 5983 5984/*=========================================================================== 5985 * FUNCTION : makeOverridesList 5986 * 5987 * DESCRIPTION: make a list of scene mode overrides 5988 * 5989 * PARAMETERS : 5990 * 5991 * 5992 *==========================================================================*/ 5993void QCamera3HardwareInterface::makeOverridesList( 5994 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size, 5995 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id) 5996{ 5997 /*daemon will give a list of overrides for all scene modes. 5998 However we should send the fwk only the overrides for the scene modes 5999 supported by the framework*/ 6000 size_t j = 0; 6001 if (size > max_size) { 6002 size = max_size; 6003 } 6004 size_t focus_count = CAM_FOCUS_MODE_MAX; 6005 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt, 6006 focus_count); 6007 for (size_t i = 0; i < size; i++) { 6008 bool supt = false; 6009 size_t index = supported_indexes[i]; 6010 overridesList[j] = gCamCapability[camera_id]->flash_available ? 6011 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON; 6012 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP, 6013 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), 6014 overridesTable[index].awb_mode); 6015 if (NAME_NOT_FOUND != val) { 6016 overridesList[j+1] = (uint8_t)val; 6017 } 6018 uint8_t focus_override = overridesTable[index].af_mode; 6019 for (size_t k = 0; k < focus_count; k++) { 6020 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) { 6021 supt = true; 6022 break; 6023 } 6024 } 6025 if (supt) { 6026 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), 6027 focus_override); 6028 if (NAME_NOT_FOUND != val) { 6029 overridesList[j+2] = (uint8_t)val; 6030 } 6031 } else { 6032 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF; 6033 } 6034 j+=3; 6035 } 6036} 6037 6038/*=========================================================================== 6039 * FUNCTION : filterJpegSizes 6040 * 6041 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that 6042 * could be downscaled to 6043 * 6044 * PARAMETERS : 6045 * 6046 * RETURN : length of jpegSizes array 6047 *==========================================================================*/ 6048 6049size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes, 6050 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size, 6051 uint8_t downscale_factor) 6052{ 6053 if (0 == downscale_factor) { 6054 downscale_factor = 1; 6055 } 6056 6057 int32_t min_width = active_array_size.width / downscale_factor; 6058 int32_t min_height = active_array_size.height / downscale_factor; 6059 size_t jpegSizesCnt = 0; 6060 if (processedSizesCnt > maxCount) { 6061 processedSizesCnt = maxCount; 6062 } 6063 for (size_t i = 0; i < processedSizesCnt; i+=2) { 6064 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) { 6065 jpegSizes[jpegSizesCnt] = processedSizes[i]; 6066 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1]; 6067 jpegSizesCnt += 2; 6068 } 6069 } 6070 return jpegSizesCnt; 6071} 6072 6073/*=========================================================================== 6074 * FUNCTION : getPreviewHalPixelFormat 6075 * 6076 * DESCRIPTION: convert the format to type recognized by framework 6077 * 6078 * PARAMETERS : format : the format from backend 6079 * 6080 ** RETURN : format recognized by framework 6081 * 6082 *==========================================================================*/ 6083int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format) 6084{ 6085 int32_t halPixelFormat; 6086 6087 switch (format) { 6088 case CAM_FORMAT_YUV_420_NV12: 6089 halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP; 6090 break; 6091 case CAM_FORMAT_YUV_420_NV21: 6092 halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP; 6093 break; 6094 case CAM_FORMAT_YUV_420_NV21_ADRENO: 6095 halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO; 6096 break; 6097 case CAM_FORMAT_YUV_420_YV12: 6098 halPixelFormat = HAL_PIXEL_FORMAT_YV12; 6099 break; 6100 case CAM_FORMAT_YUV_422_NV16: 6101 case CAM_FORMAT_YUV_422_NV61: 6102 default: 6103 halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP; 6104 break; 6105 } 6106 return halPixelFormat; 6107} 6108 6109/*=========================================================================== 6110 * FUNCTION : computeNoiseModelEntryS 6111 * 6112 * DESCRIPTION: function to map a given sensitivity to the S noise 6113 * model parameters in the DNG noise model. 6114 * 6115 * PARAMETERS : sens : the sensor sensitivity 6116 * 6117 ** RETURN : S (sensor amplification) noise 6118 * 6119 *==========================================================================*/ 6120double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) { 6121 double s = gCamCapability[mCameraId]->gradient_S * sens + 6122 gCamCapability[mCameraId]->offset_S; 6123 return ((s < 0.0) ? 0.0 : s); 6124} 6125 6126/*=========================================================================== 6127 * FUNCTION : computeNoiseModelEntryO 6128 * 6129 * DESCRIPTION: function to map a given sensitivity to the O noise 6130 * model parameters in the DNG noise model. 6131 * 6132 * PARAMETERS : sens : the sensor sensitivity 6133 * 6134 ** RETURN : O (sensor readout) noise 6135 * 6136 *==========================================================================*/ 6137double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) { 6138 double o = gCamCapability[mCameraId]->gradient_O * sens + 6139 gCamCapability[mCameraId]->offset_O; 6140 return ((o < 0.0) ? 0.0 : o); 6141} 6142 6143/*=========================================================================== 6144 * FUNCTION : getSensorSensitivity 6145 * 6146 * DESCRIPTION: convert iso_mode to an integer value 6147 * 6148 * PARAMETERS : iso_mode : the iso_mode supported by sensor 6149 * 6150 ** RETURN : sensitivity supported by sensor 6151 * 6152 *==========================================================================*/ 6153int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode) 6154{ 6155 int32_t sensitivity; 6156 6157 switch (iso_mode) { 6158 case CAM_ISO_MODE_100: 6159 sensitivity = 100; 6160 break; 6161 case CAM_ISO_MODE_200: 6162 sensitivity = 200; 6163 break; 6164 case CAM_ISO_MODE_400: 6165 sensitivity = 400; 6166 break; 6167 case CAM_ISO_MODE_800: 6168 sensitivity = 800; 6169 break; 6170 case CAM_ISO_MODE_1600: 6171 sensitivity = 1600; 6172 break; 6173 default: 6174 sensitivity = -1; 6175 break; 6176 } 6177 return sensitivity; 6178} 6179 6180/*=========================================================================== 6181 * FUNCTION : getCamInfo 6182 * 6183 * DESCRIPTION: query camera capabilities 6184 * 6185 * PARAMETERS : 6186 * @cameraId : camera Id 6187 * @info : camera info struct to be filled in with camera capabilities 6188 * 6189 * RETURN : int type of status 6190 * NO_ERROR -- success 6191 * none-zero failure code 6192 *==========================================================================*/ 6193int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId, 6194 struct camera_info *info) 6195{ 6196 ATRACE_CALL(); 6197 int rc = 0; 6198 6199 pthread_mutex_lock(&gCamLock); 6200 if (NULL == gCamCapability[cameraId]) { 6201 rc = initCapabilities(cameraId); 6202 if (rc < 0) { 6203 pthread_mutex_unlock(&gCamLock); 6204 return rc; 6205 } 6206 } 6207 6208 if (NULL == gStaticMetadata[cameraId]) { 6209 rc = initStaticMetadata(cameraId); 6210 if (rc < 0) { 6211 pthread_mutex_unlock(&gCamLock); 6212 return rc; 6213 } 6214 } 6215 6216 switch(gCamCapability[cameraId]->position) { 6217 case CAM_POSITION_BACK: 6218 info->facing = CAMERA_FACING_BACK; 6219 break; 6220 6221 case CAM_POSITION_FRONT: 6222 info->facing = CAMERA_FACING_FRONT; 6223 break; 6224 6225 default: 6226 ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId); 6227 rc = -1; 6228 break; 6229 } 6230 6231 6232 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle; 6233 info->device_version = CAMERA_DEVICE_API_VERSION_3_3; 6234 info->static_camera_characteristics = gStaticMetadata[cameraId]; 6235 6236 pthread_mutex_unlock(&gCamLock); 6237 6238 return rc; 6239} 6240 6241/*=========================================================================== 6242 * FUNCTION : translateCapabilityToMetadata 6243 * 6244 * DESCRIPTION: translate the capability into camera_metadata_t 6245 * 6246 * PARAMETERS : type of the request 6247 * 6248 * 6249 * RETURN : success: camera_metadata_t* 6250 * failure: NULL 6251 * 6252 *==========================================================================*/ 6253camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type) 6254{ 6255 if (mDefaultMetadata[type] != NULL) { 6256 return mDefaultMetadata[type]; 6257 } 6258 //first time we are handling this request 6259 //fill up the metadata structure using the wrapper class 6260 CameraMetadata settings; 6261 //translate from cam_capability_t to camera_metadata_tag_t 6262 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE; 6263 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1); 6264 int32_t defaultRequestID = 0; 6265 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1); 6266 6267 /* OIS disable */ 6268 char ois_prop[PROPERTY_VALUE_MAX]; 6269 memset(ois_prop, 0, sizeof(ois_prop)); 6270 property_get("persist.camera.ois.disable", ois_prop, "0"); 6271 uint8_t ois_disable = (uint8_t)atoi(ois_prop); 6272 6273 /* Force video to use OIS */ 6274 char videoOisProp[PROPERTY_VALUE_MAX]; 6275 memset(videoOisProp, 0, sizeof(videoOisProp)); 6276 property_get("persist.camera.ois.video", videoOisProp, "1"); 6277 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp); 6278 6279 uint8_t controlIntent = 0; 6280 uint8_t focusMode; 6281 uint8_t vsMode; 6282 uint8_t optStabMode; 6283 uint8_t cacMode; 6284 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF; 6285 switch (type) { 6286 case CAMERA3_TEMPLATE_PREVIEW: 6287 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW; 6288 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE; 6289 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON; 6290 break; 6291 case CAMERA3_TEMPLATE_STILL_CAPTURE: 6292 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE; 6293 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE; 6294 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON; 6295 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY; 6296 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1); 6297 break; 6298 case CAMERA3_TEMPLATE_VIDEO_RECORD: 6299 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD; 6300 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO; 6301 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF; 6302 if (forceVideoOis) 6303 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON; 6304 break; 6305 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT: 6306 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT; 6307 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO; 6308 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF; 6309 if (forceVideoOis) 6310 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON; 6311 break; 6312 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG: 6313 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG; 6314 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE; 6315 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON; 6316 break; 6317 case CAMERA3_TEMPLATE_MANUAL: 6318 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL; 6319 focusMode = ANDROID_CONTROL_AF_MODE_OFF; 6320 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF; 6321 break; 6322 default: 6323 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM; 6324 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF; 6325 break; 6326 } 6327 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1); 6328 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1); 6329 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) { 6330 focusMode = ANDROID_CONTROL_AF_MODE_OFF; 6331 } 6332 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1); 6333 6334 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 && 6335 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON) 6336 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON; 6337 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 && 6338 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF) 6339 || ois_disable) 6340 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF; 6341 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1); 6342 6343 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, 6344 &gCamCapability[mCameraId]->exposure_compensation_default, 1); 6345 6346 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF; 6347 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1); 6348 6349 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF; 6350 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1); 6351 6352 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO; 6353 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1); 6354 6355 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO; 6356 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1); 6357 6358 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF; 6359 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1); 6360 6361 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; 6362 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1); 6363 6364 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON; 6365 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1); 6366 6367 /*flash*/ 6368 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF; 6369 settings.update(ANDROID_FLASH_MODE, &flashMode, 1); 6370 6371 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4; 6372 settings.update(ANDROID_FLASH_FIRING_POWER, 6373 &flashFiringLevel, 1); 6374 6375 /* lens */ 6376 float default_aperture = gCamCapability[mCameraId]->apertures[0]; 6377 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1); 6378 6379 if (gCamCapability[mCameraId]->filter_densities_count) { 6380 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0]; 6381 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density, 6382 gCamCapability[mCameraId]->filter_densities_count); 6383 } 6384 6385 float default_focal_length = gCamCapability[mCameraId]->focal_length; 6386 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1); 6387 6388 float default_focus_distance = 0; 6389 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1); 6390 6391 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST; 6392 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1); 6393 6394 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST; 6395 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1); 6396 6397 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF; 6398 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1); 6399 6400 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_FULL; 6401 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1); 6402 6403 static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF; 6404 settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1); 6405 6406 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF; 6407 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1); 6408 6409 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF; 6410 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1); 6411 6412 static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF; 6413 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1); 6414 6415 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF; 6416 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1); 6417 6418 /* Exposure time(Update the Min Exposure Time)*/ 6419 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0]; 6420 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1); 6421 6422 /* frame duration */ 6423 static const int64_t default_frame_duration = NSEC_PER_33MSEC; 6424 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1); 6425 6426 /* sensitivity */ 6427 static const int32_t default_sensitivity = 100; 6428 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1); 6429 6430 /*edge mode*/ 6431 static const uint8_t edge_mode = ANDROID_EDGE_MODE_FAST; 6432 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1); 6433 6434 /*noise reduction mode*/ 6435 static const uint8_t noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST; 6436 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1); 6437 6438 /*color correction mode*/ 6439 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST; 6440 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1); 6441 6442 /*transform matrix mode*/ 6443 static const uint8_t tonemap_mode = ANDROID_TONEMAP_MODE_FAST; 6444 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1); 6445 6446 uint8_t edge_strength = (uint8_t)gCamCapability[mCameraId]->sharpness_ctrl.def_value; 6447 settings.update(ANDROID_EDGE_STRENGTH, &edge_strength, 1); 6448 6449 int32_t scaler_crop_region[4]; 6450 scaler_crop_region[0] = 0; 6451 scaler_crop_region[1] = 0; 6452 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width; 6453 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height; 6454 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4); 6455 6456 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO; 6457 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1); 6458 6459 /*focus distance*/ 6460 float focus_distance = 0.0; 6461 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1); 6462 6463 /*target fps range: use maximum range for picture, and maximum fixed range for video*/ 6464 float max_range = 0.0; 6465 float max_fixed_fps = 0.0; 6466 int32_t fps_range[2] = {0, 0}; 6467 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt; 6468 i++) { 6469 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps - 6470 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps; 6471 if (type == CAMERA3_TEMPLATE_PREVIEW || 6472 type == CAMERA3_TEMPLATE_STILL_CAPTURE || 6473 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) { 6474 if (range > max_range) { 6475 fps_range[0] = 6476 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps; 6477 fps_range[1] = 6478 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps; 6479 max_range = range; 6480 } 6481 } else { 6482 if (range < 0.01 && max_fixed_fps < 6483 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) { 6484 fps_range[0] = 6485 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps; 6486 fps_range[1] = 6487 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps; 6488 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps; 6489 } 6490 } 6491 } 6492 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2); 6493 6494 /*precapture trigger*/ 6495 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE; 6496 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1); 6497 6498 /*af trigger*/ 6499 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE; 6500 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1); 6501 6502 /* ae & af regions */ 6503 int32_t active_region[] = { 6504 gCamCapability[mCameraId]->active_array_size.left, 6505 gCamCapability[mCameraId]->active_array_size.top, 6506 gCamCapability[mCameraId]->active_array_size.left + 6507 gCamCapability[mCameraId]->active_array_size.width, 6508 gCamCapability[mCameraId]->active_array_size.top + 6509 gCamCapability[mCameraId]->active_array_size.height, 6510 0}; 6511 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region, 6512 sizeof(active_region) / sizeof(active_region[0])); 6513 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region, 6514 sizeof(active_region) / sizeof(active_region[0])); 6515 6516 /* black level lock */ 6517 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF; 6518 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1); 6519 6520 /* face detect mode */ 6521 uint8_t facedetect_mode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF; 6522 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &facedetect_mode, 1); 6523 6524 /* lens shading map mode */ 6525 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF; 6526 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) { 6527 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON; 6528 } 6529 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1); 6530 6531 //special defaults for manual template 6532 if (type == CAMERA3_TEMPLATE_MANUAL) { 6533 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF; 6534 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1); 6535 6536 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF; 6537 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1); 6538 6539 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF; 6540 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1); 6541 6542 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF; 6543 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1); 6544 6545 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST; 6546 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1); 6547 6548 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX; 6549 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1); 6550 } 6551 6552 /* CDS default */ 6553 char prop[PROPERTY_VALUE_MAX]; 6554 memset(prop, 0, sizeof(prop)); 6555 property_get("persist.camera.CDS", prop, "Auto"); 6556 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO; 6557 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop); 6558 if (CAM_CDS_MODE_MAX == cds_mode) { 6559 cds_mode = CAM_CDS_MODE_AUTO; 6560 } 6561 int32_t mode = cds_mode; 6562 settings.update(QCAMERA3_CDS_MODE, &mode, 1); 6563 6564 mDefaultMetadata[type] = settings.release(); 6565 6566 return mDefaultMetadata[type]; 6567} 6568 6569/*=========================================================================== 6570 * FUNCTION : setFrameParameters 6571 * 6572 * DESCRIPTION: set parameters per frame as requested in the metadata from 6573 * framework 6574 * 6575 * PARAMETERS : 6576 * @request : request that needs to be serviced 6577 * @streamID : Stream ID of all the requested streams 6578 * @blob_request: Whether this request is a blob request or not 6579 * 6580 * RETURN : success: NO_ERROR 6581 * failure: 6582 *==========================================================================*/ 6583int QCamera3HardwareInterface::setFrameParameters( 6584 camera3_capture_request_t *request, 6585 cam_stream_ID_t streamID, 6586 int blob_request, 6587 uint32_t snapshotStreamId) 6588{ 6589 /*translate from camera_metadata_t type to parm_type_t*/ 6590 int rc = 0; 6591 int32_t hal_version = CAM_HAL_V3; 6592 6593 clear_metadata_buffer(mParameters); 6594 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) { 6595 ALOGE("%s: Failed to set hal version in the parameters", __func__); 6596 return BAD_VALUE; 6597 } 6598 6599 /*we need to update the frame number in the parameters*/ 6600 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER, 6601 request->frame_number)) { 6602 ALOGE("%s: Failed to set the frame number in the parameters", __func__); 6603 return BAD_VALUE; 6604 } 6605 6606 /* Update stream id of all the requested buffers */ 6607 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamID)) { 6608 ALOGE("%s: Failed to set stream type mask in the parameters", __func__); 6609 return BAD_VALUE; 6610 } 6611 6612 if (mUpdateDebugLevel) { 6613 uint32_t dummyDebugLevel = 0; 6614 /* The value of dummyDebugLevel is irrelavent. On 6615 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */ 6616 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, 6617 dummyDebugLevel)) { 6618 ALOGE("%s: Failed to set UPDATE_DEBUG_LEVEL", __func__); 6619 return BAD_VALUE; 6620 } 6621 mUpdateDebugLevel = false; 6622 } 6623 6624 if(request->settings != NULL){ 6625 rc = translateToHalMetadata(request, mParameters, snapshotStreamId); 6626 if (blob_request) 6627 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t)); 6628 } 6629 6630 return rc; 6631} 6632 6633/*=========================================================================== 6634 * FUNCTION : setReprocParameters 6635 * 6636 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and 6637 * return it. 6638 * 6639 * PARAMETERS : 6640 * @request : request that needs to be serviced 6641 * 6642 * RETURN : success: NO_ERROR 6643 * failure: 6644 *==========================================================================*/ 6645int32_t QCamera3HardwareInterface::setReprocParameters( 6646 camera3_capture_request_t *request, metadata_buffer_t *reprocParam, 6647 uint32_t snapshotStreamId) 6648{ 6649 /*translate from camera_metadata_t type to parm_type_t*/ 6650 int rc = 0; 6651 6652 if (NULL == request->settings){ 6653 ALOGE("%s: Reprocess settings cannot be NULL", __func__); 6654 return BAD_VALUE; 6655 } 6656 6657 if (NULL == reprocParam) { 6658 ALOGE("%s: Invalid reprocessing metadata buffer", __func__); 6659 return BAD_VALUE; 6660 } 6661 clear_metadata_buffer(reprocParam); 6662 6663 /*we need to update the frame number in the parameters*/ 6664 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER, 6665 request->frame_number)) { 6666 ALOGE("%s: Failed to set the frame number in the parameters", __func__); 6667 return BAD_VALUE; 6668 } 6669 6670 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId); 6671 if (rc < 0) { 6672 ALOGE("%s: Failed to translate reproc request", __func__); 6673 return rc; 6674 } 6675 6676 CameraMetadata frame_settings; 6677 frame_settings = request->settings; 6678 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) && 6679 frame_settings.exists(QCAMERA3_CROP_REPROCESS) && 6680 frame_settings.exists(QCAMERA3_CROP_STREAM_ID_REPROCESS)) { 6681 int32_t *crop_count = 6682 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32; 6683 int32_t *crop_data = 6684 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32; 6685 int32_t *crop_stream_ids = 6686 frame_settings.find(QCAMERA3_CROP_STREAM_ID_REPROCESS).data.i32; 6687 int32_t *roi_map = 6688 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32; 6689 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) { 6690 bool found = false; 6691 int32_t i; 6692 for (i = 0; i < *crop_count; i++) { 6693#ifdef __LP64__ 6694 int32_t id = (int32_t) 6695 ((((int64_t)request->input_buffer->stream) & 0x0000FFFF) ^ 6696 (((int64_t)request->input_buffer->stream) >> 0x20 & 0x0000FFFF)); 6697#else 6698 int32_t id = (int32_t) request->input_buffer->stream; 6699#endif 6700 if (crop_stream_ids[i] == id) { 6701 found = true; 6702 break; 6703 } 6704 } 6705 6706 if (found) { 6707 cam_crop_data_t crop_meta; 6708 size_t roi_map_idx = i*4; 6709 size_t crop_info_idx = i*4; 6710 memset(&crop_meta, 0, sizeof(cam_crop_data_t)); 6711 crop_meta.num_of_streams = 1; 6712 crop_meta.crop_info[0].crop.left = crop_data[crop_info_idx++]; 6713 crop_meta.crop_info[0].crop.top = crop_data[crop_info_idx++]; 6714 crop_meta.crop_info[0].crop.width = crop_data[crop_info_idx++]; 6715 crop_meta.crop_info[0].crop.height = crop_data[crop_info_idx++]; 6716 6717 crop_meta.crop_info[0].roi_map.left = 6718 roi_map[roi_map_idx++]; 6719 crop_meta.crop_info[0].roi_map.top = 6720 roi_map[roi_map_idx++]; 6721 crop_meta.crop_info[0].roi_map.width = 6722 roi_map[roi_map_idx++]; 6723 crop_meta.crop_info[0].roi_map.height = 6724 roi_map[roi_map_idx++]; 6725 6726 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) { 6727 rc = BAD_VALUE; 6728 } 6729 CDBG("%s: Found reprocess crop data for stream %p %dx%d, %dx%d", 6730 __func__, 6731 request->input_buffer->stream, 6732 crop_meta.crop_info[0].crop.left, 6733 crop_meta.crop_info[0].crop.top, 6734 crop_meta.crop_info[0].crop.width, 6735 crop_meta.crop_info[0].crop.height); 6736 CDBG("%s: Found reprocess roi map data for stream %p %dx%d, %dx%d", 6737 __func__, 6738 request->input_buffer->stream, 6739 crop_meta.crop_info[0].roi_map.left, 6740 crop_meta.crop_info[0].roi_map.top, 6741 crop_meta.crop_info[0].roi_map.width, 6742 crop_meta.crop_info[0].roi_map.height); 6743 } else { 6744 ALOGE("%s: No matching reprocess input stream found!", __func__); 6745 } 6746 } else { 6747 ALOGE("%s: Invalid reprocess crop count %d!", __func__, *crop_count); 6748 } 6749 } 6750 6751 return rc; 6752} 6753 6754/*=========================================================================== 6755 * FUNCTION : setHalFpsRange 6756 * 6757 * DESCRIPTION: set FPS range parameter 6758 * 6759 * 6760 * PARAMETERS : 6761 * @settings : Metadata from framework 6762 * @hal_metadata: Metadata buffer 6763 * 6764 * 6765 * RETURN : success: NO_ERROR 6766 * failure: 6767 *==========================================================================*/ 6768int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings, 6769 metadata_buffer_t *hal_metadata) 6770{ 6771 int32_t rc = NO_ERROR; 6772 cam_fps_range_t fps_range; 6773 fps_range.min_fps = (float) 6774 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0]; 6775 fps_range.max_fps = (float) 6776 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1]; 6777 fps_range.video_min_fps = fps_range.min_fps; 6778 fps_range.video_max_fps = fps_range.max_fps; 6779 6780 CDBG("%s: aeTargetFpsRange fps: [%f %f]", __func__, 6781 fps_range.min_fps, fps_range.max_fps); 6782 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as 6783 * follows: 6784 * ---------------------------------------------------------------| 6785 * Video stream is absent in configure_streams | 6786 * (Camcorder preview before the first video record | 6787 * ---------------------------------------------------------------| 6788 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange | 6789 * | | | vid_min/max_fps| 6790 * ---------------------------------------------------------------| 6791 * NO | [ 30, 240] | 30 | [ 30, 30] | 6792 * |-------------|-------------|----------------| 6793 * | [240, 240] | 30 | [ 30, 30] | 6794 * ---------------------------------------------------------------| 6795 * Video stream is present in configure_streams | 6796 * ---------------------------------------------------------------| 6797 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange | 6798 * | | | vid_min/max_fps| 6799 * ---------------------------------------------------------------| 6800 * NO | [ 30, 240] | 240 | [240, 240] | 6801 * (camcorder prev |-------------|-------------|----------------| 6802 * after video rec | [240, 240] | 240 | [240, 240] | 6803 * is stopped) | | | | 6804 * ---------------------------------------------------------------| 6805 * YES | [ 30, 240] | 240 | [240, 240] | 6806 * |-------------|-------------|----------------| 6807 * | [240, 240] | 240 | [240, 240] | 6808 * ---------------------------------------------------------------| 6809 */ 6810 mBatchSize = 0; 6811 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) { 6812 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP), 6813 fps_range.max_fps); 6814 if (NAME_NOT_FOUND != val) { 6815 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val; 6816 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) { 6817 return BAD_VALUE; 6818 } 6819 6820 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) { 6821 mHFRVideoFps = fps_range.max_fps; 6822 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR; 6823 if (mBatchSize > MAX_HFR_BATCH_SIZE) { 6824 mBatchSize = MAX_HFR_BATCH_SIZE; 6825 } 6826 } 6827 CDBG("%s: hfrMode: %d batchSize: %d", __func__, hfrMode, mBatchSize); 6828 6829 if (!m_bIsVideo) { 6830 if (fps_range.min_fps > PREVIEW_FPS_FOR_HFR) { 6831 fps_range.min_fps = PREVIEW_FPS_FOR_HFR; 6832 } 6833 fps_range.max_fps = fps_range.min_fps; 6834 fps_range.video_max_fps = fps_range.min_fps; 6835 } else { 6836 fps_range.min_fps = fps_range.video_max_fps; 6837 } 6838 fps_range.video_min_fps = fps_range.video_max_fps; 6839 } 6840 } 6841 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) { 6842 return BAD_VALUE; 6843 } 6844 CDBG("%s: fps: [%f %f] vid_fps: [%f %f]", __func__, fps_range.min_fps, 6845 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps); 6846 return rc; 6847} 6848 6849/*=========================================================================== 6850 * FUNCTION : translateToHalMetadata 6851 * 6852 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t 6853 * 6854 * 6855 * PARAMETERS : 6856 * @request : request sent from framework 6857 * 6858 * 6859 * RETURN : success: NO_ERROR 6860 * failure: 6861 *==========================================================================*/ 6862int QCamera3HardwareInterface::translateToHalMetadata 6863 (const camera3_capture_request_t *request, 6864 metadata_buffer_t *hal_metadata, 6865 uint32_t snapshotStreamId) 6866{ 6867 int rc = 0; 6868 CameraMetadata frame_settings; 6869 frame_settings = request->settings; 6870 6871 /* Do not change the order of the following list unless you know what you are 6872 * doing. 6873 * The order is laid out in such a way that parameters in the front of the table 6874 * may be used to override the parameters later in the table. Examples are: 6875 * 1. META_MODE should precede AEC/AWB/AF MODE 6876 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION 6877 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE 6878 * 4. Any mode should precede it's corresponding settings 6879 */ 6880 if (frame_settings.exists(ANDROID_CONTROL_MODE)) { 6881 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0]; 6882 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) { 6883 rc = BAD_VALUE; 6884 } 6885 rc = extractSceneMode(frame_settings, metaMode, hal_metadata); 6886 if (rc != NO_ERROR) { 6887 ALOGE("%s: extractSceneMode failed", __func__); 6888 } 6889 } 6890 6891 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) { 6892 uint8_t fwk_aeMode = 6893 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0]; 6894 uint8_t aeMode; 6895 int32_t redeye; 6896 6897 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) { 6898 aeMode = CAM_AE_MODE_OFF; 6899 } else { 6900 aeMode = CAM_AE_MODE_ON; 6901 } 6902 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) { 6903 redeye = 1; 6904 } else { 6905 redeye = 0; 6906 } 6907 6908 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP), 6909 fwk_aeMode); 6910 if (NAME_NOT_FOUND != val) { 6911 int32_t flashMode = (int32_t)val; 6912 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_LED_MODE, flashMode); 6913 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode); 6914 } 6915 6916 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode); 6917 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) { 6918 rc = BAD_VALUE; 6919 } 6920 } 6921 6922 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) { 6923 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0]; 6924 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), 6925 fwk_whiteLevel); 6926 if (NAME_NOT_FOUND != val) { 6927 uint8_t whiteLevel = (uint8_t)val; 6928 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) { 6929 rc = BAD_VALUE; 6930 } 6931 } 6932 } 6933 6934 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) { 6935 uint8_t fwk_cacMode = 6936 frame_settings.find( 6937 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0]; 6938 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP), 6939 fwk_cacMode); 6940 if (NAME_NOT_FOUND != val) { 6941 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val; 6942 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) { 6943 rc = BAD_VALUE; 6944 } 6945 } else { 6946 ALOGE("%s: Invalid framework CAC mode: %d", __func__, fwk_cacMode); 6947 } 6948 } 6949 6950 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) { 6951 uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0]; 6952 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), 6953 fwk_focusMode); 6954 if (NAME_NOT_FOUND != val) { 6955 uint8_t focusMode = (uint8_t)val; 6956 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_FOCUS_MODE, focusMode)) { 6957 rc = BAD_VALUE; 6958 } 6959 } 6960 } 6961 6962 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) { 6963 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0]; 6964 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_LENS_FOCUS_DISTANCE, 6965 focalDistance)) { 6966 rc = BAD_VALUE; 6967 } 6968 } 6969 6970 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) { 6971 uint8_t fwk_antibandingMode = 6972 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0]; 6973 int val = lookupHalName(ANTIBANDING_MODES_MAP, 6974 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode); 6975 if (NAME_NOT_FOUND != val) { 6976 uint32_t hal_antibandingMode = (uint32_t)val; 6977 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_ANTIBANDING, 6978 hal_antibandingMode)) { 6979 rc = BAD_VALUE; 6980 } 6981 } 6982 } 6983 6984 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) { 6985 int32_t expCompensation = frame_settings.find( 6986 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0]; 6987 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min) 6988 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min; 6989 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max) 6990 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max; 6991 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION, 6992 expCompensation)) { 6993 rc = BAD_VALUE; 6994 } 6995 } 6996 6997 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) { 6998 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0]; 6999 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) { 7000 rc = BAD_VALUE; 7001 } 7002 } 7003 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) { 7004 rc = setHalFpsRange(frame_settings, hal_metadata); 7005 if (rc != NO_ERROR) { 7006 ALOGE("%s: setHalFpsRange failed", __func__); 7007 } 7008 } 7009 7010 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) { 7011 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0]; 7012 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_AWB_LOCK, awbLock)) { 7013 rc = BAD_VALUE; 7014 } 7015 } 7016 7017 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) { 7018 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0]; 7019 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP), 7020 fwk_effectMode); 7021 if (NAME_NOT_FOUND != val) { 7022 uint8_t effectMode = (uint8_t)val; 7023 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_EFFECT, effectMode)) { 7024 rc = BAD_VALUE; 7025 } 7026 } 7027 } 7028 7029 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) { 7030 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0]; 7031 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE, 7032 colorCorrectMode)) { 7033 rc = BAD_VALUE; 7034 } 7035 } 7036 7037 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) { 7038 cam_color_correct_gains_t colorCorrectGains; 7039 for (size_t i = 0; i < CC_GAINS_COUNT; i++) { 7040 colorCorrectGains.gains[i] = 7041 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i]; 7042 } 7043 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_COLOR_CORRECT_GAINS, 7044 colorCorrectGains)) { 7045 rc = BAD_VALUE; 7046 } 7047 } 7048 7049 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) { 7050 cam_color_correct_matrix_t colorCorrectTransform; 7051 cam_rational_type_t transform_elem; 7052 size_t num = 0; 7053 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) { 7054 for (size_t j = 0; j < CC_MATRIX_COLS; j++) { 7055 transform_elem.numerator = 7056 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator; 7057 transform_elem.denominator = 7058 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator; 7059 colorCorrectTransform.transform_matrix[i][j] = transform_elem; 7060 num++; 7061 } 7062 } 7063 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM, 7064 colorCorrectTransform)) { 7065 rc = BAD_VALUE; 7066 } 7067 } 7068 7069 cam_trigger_t aecTrigger; 7070 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE; 7071 aecTrigger.trigger_id = -1; 7072 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&& 7073 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) { 7074 aecTrigger.trigger = 7075 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0]; 7076 aecTrigger.trigger_id = 7077 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0]; 7078 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, 7079 aecTrigger)) { 7080 rc = BAD_VALUE; 7081 } 7082 CDBG("%s: precaptureTrigger: %d precaptureTriggerID: %d", __func__, 7083 aecTrigger.trigger, aecTrigger.trigger_id); 7084 } 7085 7086 /*af_trigger must come with a trigger id*/ 7087 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) && 7088 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) { 7089 cam_trigger_t af_trigger; 7090 af_trigger.trigger = 7091 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0]; 7092 af_trigger.trigger_id = 7093 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0]; 7094 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) { 7095 rc = BAD_VALUE; 7096 } 7097 CDBG("%s: AfTrigger: %d AfTriggerID: %d", __func__, 7098 af_trigger.trigger, af_trigger.trigger_id); 7099 } 7100 7101 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) { 7102 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0]; 7103 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_DEMOSAIC, demosaic)) { 7104 rc = BAD_VALUE; 7105 } 7106 } 7107 7108 if (frame_settings.exists(ANDROID_EDGE_MODE)) { 7109 cam_edge_application_t edge_application; 7110 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0]; 7111 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) { 7112 edge_application.sharpness = 0; 7113 } else { 7114 if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) { 7115 uint8_t edgeStrength = frame_settings.find(ANDROID_EDGE_STRENGTH).data.u8[0]; 7116 edge_application.sharpness = (int32_t)edgeStrength; 7117 } else { 7118 edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default 7119 } 7120 } 7121 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) { 7122 rc = BAD_VALUE; 7123 } 7124 } 7125 7126 if (frame_settings.exists(ANDROID_FLASH_MODE)) { 7127 int32_t respectFlashMode = 1; 7128 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) { 7129 uint8_t fwk_aeMode = 7130 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0]; 7131 if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) { 7132 respectFlashMode = 0; 7133 CDBG_HIGH("%s: AE Mode controls flash, ignore android.flash.mode", 7134 __func__); 7135 } 7136 } 7137 if (respectFlashMode) { 7138 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), 7139 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]); 7140 CDBG_HIGH("%s: flash mode after mapping %d", __func__, val); 7141 // To check: CAM_INTF_META_FLASH_MODE usage 7142 if (NAME_NOT_FOUND != val) { 7143 uint8_t flashMode = (uint8_t)val; 7144 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_LED_MODE, flashMode)) { 7145 rc = BAD_VALUE; 7146 } 7147 } 7148 } 7149 } 7150 7151 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) { 7152 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0]; 7153 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FLASH_POWER, flashPower)) { 7154 rc = BAD_VALUE; 7155 } 7156 } 7157 7158 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) { 7159 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0]; 7160 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME, 7161 flashFiringTime)) { 7162 rc = BAD_VALUE; 7163 } 7164 } 7165 7166 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) { 7167 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0]; 7168 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE, 7169 hotPixelMode)) { 7170 rc = BAD_VALUE; 7171 } 7172 } 7173 7174 if (frame_settings.exists(ANDROID_LENS_APERTURE)) { 7175 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0]; 7176 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_LENS_APERTURE, 7177 lensAperture)) { 7178 rc = BAD_VALUE; 7179 } 7180 } 7181 7182 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) { 7183 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0]; 7184 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_LENS_FILTERDENSITY, 7185 filterDensity)) { 7186 rc = BAD_VALUE; 7187 } 7188 } 7189 7190 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) { 7191 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0]; 7192 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_LENS_FOCAL_LENGTH, focalLength)) { 7193 rc = BAD_VALUE; 7194 } 7195 } 7196 7197 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) { 7198 uint8_t optStabMode = 7199 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0]; 7200 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_LENS_OPT_STAB_MODE, optStabMode)) { 7201 rc = BAD_VALUE; 7202 } 7203 } 7204 7205 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) { 7206 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]; 7207 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_NOISE_REDUCTION_MODE, 7208 noiseRedMode)) { 7209 rc = BAD_VALUE; 7210 } 7211 } 7212 7213 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) { 7214 uint8_t noiseRedStrength = 7215 frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0]; 7216 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_NOISE_REDUCTION_STRENGTH, 7217 noiseRedStrength)) { 7218 rc = BAD_VALUE; 7219 } 7220 } 7221 7222 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) { 7223 float reprocessEffectiveExposureFactor = 7224 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0]; 7225 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, 7226 reprocessEffectiveExposureFactor)) { 7227 rc = BAD_VALUE; 7228 } 7229 } 7230 7231 cam_crop_region_t scalerCropRegion; 7232 bool scalerCropSet = false; 7233 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) { 7234 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0]; 7235 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1]; 7236 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2]; 7237 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3]; 7238 7239 // Map coordinate system from active array to sensor output. 7240 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top, 7241 scalerCropRegion.width, scalerCropRegion.height); 7242 7243 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION, 7244 scalerCropRegion)) { 7245 rc = BAD_VALUE; 7246 } 7247 scalerCropSet = true; 7248 } 7249 7250 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) { 7251 int64_t sensorExpTime = 7252 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0]; 7253 CDBG("%s: setting sensorExpTime %lld", __func__, sensorExpTime); 7254 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME, 7255 sensorExpTime)) { 7256 rc = BAD_VALUE; 7257 } 7258 } 7259 7260 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) { 7261 int64_t sensorFrameDuration = 7262 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0]; 7263 int64_t minFrameDuration = getMinFrameDuration(request); 7264 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration); 7265 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration) 7266 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration; 7267 CDBG("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration); 7268 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION, 7269 sensorFrameDuration)) { 7270 rc = BAD_VALUE; 7271 } 7272 } 7273 7274 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) { 7275 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0]; 7276 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity) 7277 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity; 7278 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity) 7279 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity; 7280 CDBG("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity); 7281 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY, 7282 sensorSensitivity)) { 7283 rc = BAD_VALUE; 7284 } 7285 } 7286 7287 if (frame_settings.exists(ANDROID_SHADING_MODE)) { 7288 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0]; 7289 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SHADING_MODE, shadingMode)) { 7290 rc = BAD_VALUE; 7291 } 7292 } 7293 7294 if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) { 7295 uint8_t shadingStrength = frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0]; 7296 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SHADING_STRENGTH, 7297 shadingStrength)) { 7298 rc = BAD_VALUE; 7299 } 7300 } 7301 7302 7303 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) { 7304 uint8_t fwk_facedetectMode = 7305 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0]; 7306 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP), 7307 fwk_facedetectMode); 7308 if (NAME_NOT_FOUND != val) { 7309 uint8_t facedetectMode = (uint8_t)val; 7310 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STATS_FACEDETECT_MODE, 7311 facedetectMode)) { 7312 rc = BAD_VALUE; 7313 } 7314 } 7315 } 7316 7317 if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) { 7318 uint8_t histogramMode = 7319 frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0]; 7320 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STATS_HISTOGRAM_MODE, 7321 histogramMode)) { 7322 rc = BAD_VALUE; 7323 } 7324 } 7325 7326 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) { 7327 uint8_t sharpnessMapMode = 7328 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0]; 7329 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, 7330 sharpnessMapMode)) { 7331 rc = BAD_VALUE; 7332 } 7333 } 7334 7335 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) { 7336 uint8_t tonemapMode = 7337 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0]; 7338 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) { 7339 rc = BAD_VALUE; 7340 } 7341 } 7342 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */ 7343 /*All tonemap channels will have the same number of points*/ 7344 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) && 7345 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) && 7346 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) { 7347 cam_rgb_tonemap_curves tonemapCurves; 7348 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2; 7349 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) { 7350 ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d", 7351 __func__, tonemapCurves.tonemap_points_cnt, 7352 CAM_MAX_TONEMAP_CURVE_SIZE); 7353 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE; 7354 } 7355 7356 /* ch0 = G*/ 7357 size_t point = 0; 7358 cam_tonemap_curve_t tonemapCurveGreen; 7359 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) { 7360 for (size_t j = 0; j < 2; j++) { 7361 tonemapCurveGreen.tonemap_points[i][j] = 7362 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point]; 7363 point++; 7364 } 7365 } 7366 tonemapCurves.curves[0] = tonemapCurveGreen; 7367 7368 /* ch 1 = B */ 7369 point = 0; 7370 cam_tonemap_curve_t tonemapCurveBlue; 7371 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) { 7372 for (size_t j = 0; j < 2; j++) { 7373 tonemapCurveBlue.tonemap_points[i][j] = 7374 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point]; 7375 point++; 7376 } 7377 } 7378 tonemapCurves.curves[1] = tonemapCurveBlue; 7379 7380 /* ch 2 = R */ 7381 point = 0; 7382 cam_tonemap_curve_t tonemapCurveRed; 7383 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) { 7384 for (size_t j = 0; j < 2; j++) { 7385 tonemapCurveRed.tonemap_points[i][j] = 7386 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point]; 7387 point++; 7388 } 7389 } 7390 tonemapCurves.curves[2] = tonemapCurveRed; 7391 7392 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES, 7393 tonemapCurves)) { 7394 rc = BAD_VALUE; 7395 } 7396 } 7397 7398 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) { 7399 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0]; 7400 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, 7401 captureIntent)) { 7402 rc = BAD_VALUE; 7403 } 7404 } 7405 7406 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) { 7407 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0]; 7408 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_BLACK_LEVEL_LOCK, 7409 blackLevelLock)) { 7410 rc = BAD_VALUE; 7411 } 7412 } 7413 7414 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) { 7415 uint8_t lensShadingMapMode = 7416 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0]; 7417 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE, 7418 lensShadingMapMode)) { 7419 rc = BAD_VALUE; 7420 } 7421 } 7422 7423 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) { 7424 cam_area_t roi; 7425 bool reset = true; 7426 convertFromRegions(roi, request->settings, ANDROID_CONTROL_AE_REGIONS); 7427 if (scalerCropSet) { 7428 reset = resetIfNeededROI(&roi, &scalerCropRegion); 7429 } 7430 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) { 7431 rc = BAD_VALUE; 7432 } 7433 } 7434 7435 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) { 7436 cam_area_t roi; 7437 bool reset = true; 7438 convertFromRegions(roi, request->settings, ANDROID_CONTROL_AF_REGIONS); 7439 if (scalerCropSet) { 7440 reset = resetIfNeededROI(&roi, &scalerCropRegion); 7441 } 7442 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) { 7443 rc = BAD_VALUE; 7444 } 7445 } 7446 7447 // CDS 7448 if (frame_settings.exists(QCAMERA3_CDS_MODE)) { 7449 int32_t *cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32; 7450 if ((CAM_CDS_MODE_MAX <= (*cds)) || (0 > (*cds))) { 7451 ALOGE("%s: Invalid CDS mode %d!", __func__, *cds); 7452 } else { 7453 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_CDS_MODE, *cds)) { 7454 rc = BAD_VALUE; 7455 } 7456 } 7457 } 7458 7459 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) { 7460 int32_t fwk_testPatternMode = 7461 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0]; 7462 int testPatternMode = lookupHalName(TEST_PATTERN_MAP, 7463 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode); 7464 7465 if (NAME_NOT_FOUND != testPatternMode) { 7466 cam_test_pattern_data_t testPatternData; 7467 memset(&testPatternData, 0, sizeof(testPatternData)); 7468 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode; 7469 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR && 7470 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) { 7471 int32_t *fwk_testPatternData = 7472 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32; 7473 testPatternData.r = fwk_testPatternData[0]; 7474 testPatternData.b = fwk_testPatternData[3]; 7475 switch (gCamCapability[mCameraId]->color_arrangement) { 7476 case CAM_FILTER_ARRANGEMENT_RGGB: 7477 case CAM_FILTER_ARRANGEMENT_GRBG: 7478 testPatternData.gr = fwk_testPatternData[1]; 7479 testPatternData.gb = fwk_testPatternData[2]; 7480 break; 7481 case CAM_FILTER_ARRANGEMENT_GBRG: 7482 case CAM_FILTER_ARRANGEMENT_BGGR: 7483 testPatternData.gr = fwk_testPatternData[2]; 7484 testPatternData.gb = fwk_testPatternData[1]; 7485 break; 7486 default: 7487 ALOGE("%s: color arrangement %d is not supported", __func__, 7488 gCamCapability[mCameraId]->color_arrangement); 7489 break; 7490 } 7491 } 7492 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA, 7493 testPatternData)) { 7494 rc = BAD_VALUE; 7495 } 7496 } else { 7497 ALOGE("%s: Invalid framework sensor test pattern mode %d", __func__, 7498 fwk_testPatternMode); 7499 } 7500 } 7501 7502 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) { 7503 size_t count = 0; 7504 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES); 7505 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES, 7506 gps_coords.data.d, gps_coords.count, count); 7507 if (gps_coords.count != count) { 7508 rc = BAD_VALUE; 7509 } 7510 } 7511 7512 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) { 7513 char gps_methods[GPS_PROCESSING_METHOD_SIZE]; 7514 size_t count = 0; 7515 const char *gps_methods_src = (const char *) 7516 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8; 7517 memset(gps_methods, '\0', sizeof(gps_methods)); 7518 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods)); 7519 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS, 7520 gps_methods, GPS_PROCESSING_METHOD_SIZE, count); 7521 if (GPS_PROCESSING_METHOD_SIZE != count) { 7522 rc = BAD_VALUE; 7523 } 7524 } 7525 7526 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) { 7527 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0]; 7528 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP, 7529 gps_timestamp)) { 7530 rc = BAD_VALUE; 7531 } 7532 } 7533 7534 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) { 7535 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0]; 7536 cam_rotation_info_t rotation_info; 7537 if (orientation == 0) { 7538 rotation_info.rotation = ROTATE_0; 7539 } else if (orientation == 90) { 7540 rotation_info.rotation = ROTATE_90; 7541 } else if (orientation == 180) { 7542 rotation_info.rotation = ROTATE_180; 7543 } else if (orientation == 270) { 7544 rotation_info.rotation = ROTATE_270; 7545 } 7546 rotation_info.streamId = snapshotStreamId; 7547 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation); 7548 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) { 7549 rc = BAD_VALUE; 7550 } 7551 } 7552 7553 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) { 7554 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0]; 7555 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) { 7556 rc = BAD_VALUE; 7557 } 7558 } 7559 7560 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) { 7561 uint32_t thumb_quality = (uint32_t) 7562 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0]; 7563 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY, 7564 thumb_quality)) { 7565 rc = BAD_VALUE; 7566 } 7567 } 7568 7569 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) { 7570 cam_dimension_t dim; 7571 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0]; 7572 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1]; 7573 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) { 7574 rc = BAD_VALUE; 7575 } 7576 } 7577 7578 // Internal metadata 7579 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) { 7580 size_t count = 0; 7581 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS); 7582 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA, 7583 privatedata.data.i32, privatedata.count, count); 7584 if (privatedata.count != count) { 7585 rc = BAD_VALUE; 7586 } 7587 } 7588 7589 if (frame_settings.exists(QCAMERA3_USE_AV_TIMER)) { 7590 uint8_t* use_av_timer = 7591 frame_settings.find(QCAMERA3_USE_AV_TIMER).data.u8; 7592 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) { 7593 rc = BAD_VALUE; 7594 } 7595 } 7596 7597 // EV step 7598 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP, 7599 gCamCapability[mCameraId]->exp_compensation_step)) { 7600 rc = BAD_VALUE; 7601 } 7602 7603 return rc; 7604} 7605 7606/*=========================================================================== 7607 * FUNCTION : captureResultCb 7608 * 7609 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata) 7610 * 7611 * PARAMETERS : 7612 * @frame : frame information from mm-camera-interface 7613 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata. 7614 * @userdata: userdata 7615 * 7616 * RETURN : NONE 7617 *==========================================================================*/ 7618void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata, 7619 camera3_stream_buffer_t *buffer, 7620 uint32_t frame_number, void *userdata) 7621{ 7622 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata; 7623 if (hw == NULL) { 7624 ALOGE("%s: Invalid hw %p", __func__, hw); 7625 return; 7626 } 7627 7628 hw->captureResultCb(metadata, buffer, frame_number); 7629 return; 7630} 7631 7632 7633/*=========================================================================== 7634 * FUNCTION : initialize 7635 * 7636 * DESCRIPTION: Pass framework callback pointers to HAL 7637 * 7638 * PARAMETERS : 7639 * 7640 * 7641 * RETURN : Success : 0 7642 * Failure: -ENODEV 7643 *==========================================================================*/ 7644 7645int QCamera3HardwareInterface::initialize(const struct camera3_device *device, 7646 const camera3_callback_ops_t *callback_ops) 7647{ 7648 CDBG("%s: E", __func__); 7649 QCamera3HardwareInterface *hw = 7650 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 7651 if (!hw) { 7652 ALOGE("%s: NULL camera device", __func__); 7653 return -ENODEV; 7654 } 7655 7656 int rc = hw->initialize(callback_ops); 7657 CDBG("%s: X", __func__); 7658 return rc; 7659} 7660 7661/*=========================================================================== 7662 * FUNCTION : configure_streams 7663 * 7664 * DESCRIPTION: 7665 * 7666 * PARAMETERS : 7667 * 7668 * 7669 * RETURN : Success: 0 7670 * Failure: -EINVAL (if stream configuration is invalid) 7671 * -ENODEV (fatal error) 7672 *==========================================================================*/ 7673 7674int QCamera3HardwareInterface::configure_streams( 7675 const struct camera3_device *device, 7676 camera3_stream_configuration_t *stream_list) 7677{ 7678 CDBG("%s: E", __func__); 7679 QCamera3HardwareInterface *hw = 7680 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 7681 if (!hw) { 7682 ALOGE("%s: NULL camera device", __func__); 7683 return -ENODEV; 7684 } 7685 int rc = hw->configureStreams(stream_list); 7686 CDBG("%s: X", __func__); 7687 return rc; 7688} 7689 7690/*=========================================================================== 7691 * FUNCTION : construct_default_request_settings 7692 * 7693 * DESCRIPTION: Configure a settings buffer to meet the required use case 7694 * 7695 * PARAMETERS : 7696 * 7697 * 7698 * RETURN : Success: Return valid metadata 7699 * Failure: Return NULL 7700 *==========================================================================*/ 7701const camera_metadata_t* QCamera3HardwareInterface:: 7702 construct_default_request_settings(const struct camera3_device *device, 7703 int type) 7704{ 7705 7706 CDBG("%s: E", __func__); 7707 camera_metadata_t* fwk_metadata = NULL; 7708 QCamera3HardwareInterface *hw = 7709 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 7710 if (!hw) { 7711 ALOGE("%s: NULL camera device", __func__); 7712 return NULL; 7713 } 7714 7715 fwk_metadata = hw->translateCapabilityToMetadata(type); 7716 7717 CDBG("%s: X", __func__); 7718 return fwk_metadata; 7719} 7720 7721/*=========================================================================== 7722 * FUNCTION : process_capture_request 7723 * 7724 * DESCRIPTION: 7725 * 7726 * PARAMETERS : 7727 * 7728 * 7729 * RETURN : 7730 *==========================================================================*/ 7731int QCamera3HardwareInterface::process_capture_request( 7732 const struct camera3_device *device, 7733 camera3_capture_request_t *request) 7734{ 7735 CDBG("%s: E", __func__); 7736 QCamera3HardwareInterface *hw = 7737 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 7738 if (!hw) { 7739 ALOGE("%s: NULL camera device", __func__); 7740 return -EINVAL; 7741 } 7742 7743 int rc = hw->processCaptureRequest(request); 7744 CDBG("%s: X", __func__); 7745 return rc; 7746} 7747 7748/*=========================================================================== 7749 * FUNCTION : dump 7750 * 7751 * DESCRIPTION: 7752 * 7753 * PARAMETERS : 7754 * 7755 * 7756 * RETURN : 7757 *==========================================================================*/ 7758 7759void QCamera3HardwareInterface::dump( 7760 const struct camera3_device *device, int fd) 7761{ 7762 /* Log level property is read when "adb shell dumpsys media.camera" is 7763 called so that the log level can be controlled without restarting 7764 the media server */ 7765 getLogLevel(); 7766 7767 CDBG("%s: E", __func__); 7768 QCamera3HardwareInterface *hw = 7769 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 7770 if (!hw) { 7771 ALOGE("%s: NULL camera device", __func__); 7772 return; 7773 } 7774 7775 hw->dump(fd); 7776 CDBG("%s: X", __func__); 7777 return; 7778} 7779 7780/*=========================================================================== 7781 * FUNCTION : flush 7782 * 7783 * DESCRIPTION: 7784 * 7785 * PARAMETERS : 7786 * 7787 * 7788 * RETURN : 7789 *==========================================================================*/ 7790 7791int QCamera3HardwareInterface::flush( 7792 const struct camera3_device *device) 7793{ 7794 int rc; 7795 CDBG("%s: E", __func__); 7796 QCamera3HardwareInterface *hw = 7797 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 7798 if (!hw) { 7799 ALOGE("%s: NULL camera device", __func__); 7800 return -EINVAL; 7801 } 7802 7803 rc = hw->flush(); 7804 CDBG("%s: X", __func__); 7805 return rc; 7806} 7807 7808/*=========================================================================== 7809 * FUNCTION : close_camera_device 7810 * 7811 * DESCRIPTION: 7812 * 7813 * PARAMETERS : 7814 * 7815 * 7816 * RETURN : 7817 *==========================================================================*/ 7818int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device) 7819{ 7820 CDBG("%s: E", __func__); 7821 int ret = NO_ERROR; 7822 QCamera3HardwareInterface *hw = 7823 reinterpret_cast<QCamera3HardwareInterface *>( 7824 reinterpret_cast<camera3_device_t *>(device)->priv); 7825 if (!hw) { 7826 ALOGE("NULL camera device"); 7827 return BAD_VALUE; 7828 } 7829 delete hw; 7830 7831 CDBG("%s: X", __func__); 7832 return ret; 7833} 7834 7835/*=========================================================================== 7836 * FUNCTION : getWaveletDenoiseProcessPlate 7837 * 7838 * DESCRIPTION: query wavelet denoise process plate 7839 * 7840 * PARAMETERS : None 7841 * 7842 * RETURN : WNR prcocess plate vlaue 7843 *==========================================================================*/ 7844cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate() 7845{ 7846 char prop[PROPERTY_VALUE_MAX]; 7847 memset(prop, 0, sizeof(prop)); 7848 property_get("persist.denoise.process.plates", prop, "0"); 7849 int processPlate = atoi(prop); 7850 switch(processPlate) { 7851 case 0: 7852 return CAM_WAVELET_DENOISE_YCBCR_PLANE; 7853 case 1: 7854 return CAM_WAVELET_DENOISE_CBCR_ONLY; 7855 case 2: 7856 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR; 7857 case 3: 7858 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR; 7859 default: 7860 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR; 7861 } 7862} 7863 7864 7865/*=========================================================================== 7866 * FUNCTION : extractSceneMode 7867 * 7868 * DESCRIPTION: Extract scene mode from frameworks set metadata 7869 * 7870 * PARAMETERS : 7871 * @frame_settings: CameraMetadata reference 7872 * @metaMode: ANDROID_CONTORL_MODE 7873 * @hal_metadata: hal metadata structure 7874 * 7875 * RETURN : None 7876 *==========================================================================*/ 7877int32_t QCamera3HardwareInterface::extractSceneMode( 7878 const CameraMetadata &frame_settings, uint8_t metaMode, 7879 metadata_buffer_t *hal_metadata) 7880{ 7881 int32_t rc = NO_ERROR; 7882 7883 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) { 7884 camera_metadata_ro_entry entry = 7885 frame_settings.find(ANDROID_CONTROL_SCENE_MODE); 7886 if (0 == entry.count) 7887 return rc; 7888 7889 uint8_t fwk_sceneMode = entry.data.u8[0]; 7890 7891 int val = lookupHalName(SCENE_MODES_MAP, 7892 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]), 7893 fwk_sceneMode); 7894 if (NAME_NOT_FOUND != val) { 7895 uint8_t sceneMode = (uint8_t)val; 7896 CDBG("%s: sceneMode: %d", __func__, sceneMode); 7897 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, 7898 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) { 7899 rc = BAD_VALUE; 7900 } 7901 } 7902 } else if ((ANDROID_CONTROL_MODE_OFF == metaMode) || 7903 (ANDROID_CONTROL_MODE_AUTO == metaMode)) { 7904 uint8_t sceneMode = CAM_SCENE_MODE_OFF; 7905 CDBG("%s: sceneMode: %d", __func__, sceneMode); 7906 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, 7907 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) { 7908 rc = BAD_VALUE; 7909 } 7910 } 7911 return rc; 7912} 7913 7914/*=========================================================================== 7915 * FUNCTION : needRotationReprocess 7916 * 7917 * DESCRIPTION: if rotation needs to be done by reprocess in pp 7918 * 7919 * PARAMETERS : none 7920 * 7921 * RETURN : true: needed 7922 * false: no need 7923 *==========================================================================*/ 7924bool QCamera3HardwareInterface::needRotationReprocess() 7925{ 7926 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) { 7927 // current rotation is not zero, and pp has the capability to process rotation 7928 CDBG_HIGH("%s: need do reprocess for rotation", __func__); 7929 return true; 7930 } 7931 7932 return false; 7933} 7934 7935/*=========================================================================== 7936 * FUNCTION : needReprocess 7937 * 7938 * DESCRIPTION: if reprocess in needed 7939 * 7940 * PARAMETERS : none 7941 * 7942 * RETURN : true: needed 7943 * false: no need 7944 *==========================================================================*/ 7945bool QCamera3HardwareInterface::needReprocess(uint32_t postprocess_mask) 7946{ 7947 if (gCamCapability[mCameraId]->min_required_pp_mask > 0) { 7948 // TODO: add for ZSL HDR later 7949 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode 7950 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){ 7951 CDBG_HIGH("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__); 7952 return true; 7953 } else { 7954 CDBG_HIGH("%s: already post processed frame", __func__); 7955 return false; 7956 } 7957 } 7958 return needRotationReprocess(); 7959} 7960 7961/*=========================================================================== 7962 * FUNCTION : needJpegRotation 7963 * 7964 * DESCRIPTION: if rotation from jpeg is needed 7965 * 7966 * PARAMETERS : none 7967 * 7968 * RETURN : true: needed 7969 * false: no need 7970 *==========================================================================*/ 7971bool QCamera3HardwareInterface::needJpegRotation() 7972{ 7973 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/ 7974 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) { 7975 CDBG("%s: Need Jpeg to do the rotation", __func__); 7976 return true; 7977 } 7978 return false; 7979} 7980 7981/*=========================================================================== 7982 * FUNCTION : addOfflineReprocChannel 7983 * 7984 * DESCRIPTION: add a reprocess channel that will do reprocess on frames 7985 * coming from input channel 7986 * 7987 * PARAMETERS : 7988 * @config : reprocess configuration 7989 * 7990 * 7991 * RETURN : Ptr to the newly created channel obj. NULL if failed. 7992 *==========================================================================*/ 7993QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel( 7994 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle) 7995{ 7996 int32_t rc = NO_ERROR; 7997 QCamera3ReprocessChannel *pChannel = NULL; 7998 7999 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle, 8000 mCameraHandle->ops, NULL, config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle); 8001 if (NULL == pChannel) { 8002 ALOGE("%s: no mem for reprocess channel", __func__); 8003 return NULL; 8004 } 8005 8006 rc = pChannel->initialize(IS_TYPE_NONE); 8007 if (rc != NO_ERROR) { 8008 ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc); 8009 delete pChannel; 8010 return NULL; 8011 } 8012 8013 // pp feature config 8014 cam_pp_feature_config_t pp_config; 8015 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t)); 8016 8017 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3; 8018 8019 rc = pChannel->addReprocStreamsFromSource(pp_config, 8020 config, 8021 IS_TYPE_NONE, 8022 mMetadataChannel); 8023 8024 if (rc != NO_ERROR) { 8025 delete pChannel; 8026 return NULL; 8027 } 8028 return pChannel; 8029} 8030 8031/*=========================================================================== 8032 * FUNCTION : getMobicatMask 8033 * 8034 * DESCRIPTION: returns mobicat mask 8035 * 8036 * PARAMETERS : none 8037 * 8038 * RETURN : mobicat mask 8039 * 8040 *==========================================================================*/ 8041uint8_t QCamera3HardwareInterface::getMobicatMask() 8042{ 8043 return m_MobicatMask; 8044} 8045 8046/*=========================================================================== 8047 * FUNCTION : setMobicat 8048 * 8049 * DESCRIPTION: set Mobicat on/off. 8050 * 8051 * PARAMETERS : 8052 * @params : none 8053 * 8054 * RETURN : int32_t type of status 8055 * NO_ERROR -- success 8056 * none-zero failure code 8057 *==========================================================================*/ 8058int32_t QCamera3HardwareInterface::setMobicat() 8059{ 8060 char value [PROPERTY_VALUE_MAX]; 8061 property_get("persist.camera.mobicat", value, "0"); 8062 int32_t ret = NO_ERROR; 8063 uint8_t enableMobi = (uint8_t)atoi(value); 8064 8065 if (enableMobi) { 8066 tune_cmd_t tune_cmd; 8067 tune_cmd.type = SET_RELOAD_CHROMATIX; 8068 tune_cmd.module = MODULE_ALL; 8069 tune_cmd.value = TRUE; 8070 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, 8071 CAM_INTF_PARM_SET_VFE_COMMAND, 8072 tune_cmd); 8073 8074 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, 8075 CAM_INTF_PARM_SET_PP_COMMAND, 8076 tune_cmd); 8077 } 8078 m_MobicatMask = enableMobi; 8079 8080 return ret; 8081} 8082 8083/*=========================================================================== 8084* FUNCTION : getLogLevel 8085* 8086* DESCRIPTION: Reads the log level property into a variable 8087* 8088* PARAMETERS : 8089* None 8090* 8091* RETURN : 8092* None 8093*==========================================================================*/ 8094void QCamera3HardwareInterface::getLogLevel() 8095{ 8096 char prop[PROPERTY_VALUE_MAX]; 8097 uint32_t globalLogLevel = 0; 8098 8099 property_get("persist.camera.hal.debug", prop, "0"); 8100 int val = atoi(prop); 8101 if (0 <= val) { 8102 gCamHal3LogLevel = (uint32_t)val; 8103 } 8104 property_get("persist.camera.global.debug", prop, "0"); 8105 val = atoi(prop); 8106 if (0 <= val) { 8107 globalLogLevel = (uint32_t)val; 8108 } 8109 8110 /* Highest log level among hal.logs and global.logs is selected */ 8111 if (gCamHal3LogLevel < globalLogLevel) 8112 gCamHal3LogLevel = globalLogLevel; 8113 8114 return; 8115} 8116 8117/*=========================================================================== 8118 * FUNCTION : validateStreamRotations 8119 * 8120 * DESCRIPTION: Check if the rotations requested are supported 8121 * 8122 * PARAMETERS : 8123 * @stream_list : streams to be configured 8124 * 8125 * RETURN : NO_ERROR on success 8126 * -EINVAL on failure 8127 * 8128 *==========================================================================*/ 8129int QCamera3HardwareInterface::validateStreamRotations( 8130 camera3_stream_configuration_t *streamList) 8131{ 8132 int rc = NO_ERROR; 8133 8134 /* 8135 * Loop through all streams requested in configuration 8136 * Check if unsupported rotations have been requested on any of them 8137 */ 8138 for (size_t j = 0; j < streamList->num_streams; j++){ 8139 camera3_stream_t *newStream = streamList->streams[j]; 8140 8141 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0); 8142 bool isImplDef = (newStream->format == 8143 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED); 8144 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL && 8145 isImplDef); 8146 8147 if (isRotated && (!isImplDef || isZsl)) { 8148 ALOGE("%s: Error: Unsupported rotation of %d requested for stream" 8149 "type:%d and stream format:%d", __func__, 8150 newStream->rotation, newStream->stream_type, 8151 newStream->format); 8152 rc = -EINVAL; 8153 break; 8154 } 8155 } 8156 return rc; 8157} 8158 8159}; //end namespace qcamera 8160