CaptureResult.java revision 0da8bf5dbc8912cf70df14bfa655189a04c75476
1/* 2 * Copyright (C) 2012 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17package android.hardware.camera2; 18 19import android.hardware.camera2.impl.CameraMetadataNative; 20 21/** 22 * <p>The results of a single image capture from the image sensor.</p> 23 * 24 * <p>Contains the final configuration for the capture hardware (sensor, lens, 25 * flash), the processing pipeline, the control algorithms, and the output 26 * buffers.</p> 27 * 28 * <p>CaptureResults are produced by a {@link CameraDevice} after processing a 29 * {@link CaptureRequest}. All properties listed for capture requests can also 30 * be queried on the capture result, to determine the final values used for 31 * capture. The result also includes additional metadata about the state of the 32 * camera device during the capture.</p> 33 * 34 */ 35public final class CaptureResult extends CameraMetadata { 36 37 private final CameraMetadataNative mResults; 38 private final CaptureRequest mRequest; 39 private final int mSequenceId; 40 41 /** 42 * Takes ownership of the passed-in properties object 43 * @hide 44 */ 45 public CaptureResult(CameraMetadataNative results, CaptureRequest parent, int sequenceId) { 46 if (results == null) { 47 throw new IllegalArgumentException("results was null"); 48 } 49 50 if (parent == null) { 51 throw new IllegalArgumentException("parent was null"); 52 } 53 54 mResults = results; 55 mRequest = parent; 56 mSequenceId = sequenceId; 57 } 58 59 @Override 60 public <T> T get(Key<T> key) { 61 return mResults.get(key); 62 } 63 64 /** 65 * Get the request associated with this result. 66 * 67 * <p>Whenever a request is successfully captured, with 68 * {@link CameraDevice.CaptureListener#onCaptureCompleted}, 69 * the {@code result}'s {@code getRequest()} will return that {@code request}. 70 * </p> 71 * 72 * <p>In particular, 73 * <code><pre>cameraDevice.capture(someRequest, new CaptureListener() { 74 * {@literal @}Override 75 * void onCaptureCompleted(CaptureRequest myRequest, CaptureResult myResult) { 76 * assert(myResult.getRequest.equals(myRequest) == true); 77 * } 78 * }; 79 * </code></pre> 80 * </p> 81 * 82 * @return The request associated with this result. Never {@code null}. 83 */ 84 public CaptureRequest getRequest() { 85 return mRequest; 86 } 87 88 /** 89 * Get the frame number associated with this result. 90 * 91 * <p>Whenever a request has been processed, regardless of failure or success, 92 * it gets a unique frame number assigned to its future result/failure.</p> 93 * 94 * <p>This value monotonically increments, starting with 0, 95 * for every new result or failure; and the scope is the lifetime of the 96 * {@link CameraDevice}.</p> 97 * 98 * @return int frame number 99 */ 100 public int getFrameNumber() { 101 return get(REQUEST_FRAME_COUNT); 102 } 103 104 /** 105 * The sequence ID for this failure that was returned by the 106 * {@link CameraDevice#capture} family of functions. 107 * 108 * <p>The sequence ID is a unique monotonically increasing value starting from 0, 109 * incremented every time a new group of requests is submitted to the CameraDevice.</p> 110 * 111 * @return int The ID for the sequence of requests that this capture result is a part of 112 * 113 * @see CameraDevice.CaptureListener#onCaptureSequenceCompleted 114 */ 115 public int getSequenceId() { 116 return mSequenceId; 117 } 118 119 /*@O~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~ 120 * The key entries below this point are generated from metadata 121 * definitions in /system/media/camera/docs. Do not modify by hand or 122 * modify the comment blocks at the start or end. 123 *~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~*/ 124 125 126 /** 127 * <p>A color transform matrix to use to transform 128 * from sensor RGB color space to output linear sRGB color space</p> 129 * <p>This matrix is either set by HAL when the request 130 * {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} is not TRANSFORM_MATRIX, or 131 * directly by the application in the request when the 132 * {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} is TRANSFORM_MATRIX.</p> 133 * <p>In the latter case, the HAL may round the matrix to account 134 * for precision issues; the final rounded matrix should be 135 * reported back in this matrix result metadata.</p> 136 * 137 * @see CaptureRequest#COLOR_CORRECTION_MODE 138 */ 139 public static final Key<Rational[]> COLOR_CORRECTION_TRANSFORM = 140 new Key<Rational[]>("android.colorCorrection.transform", Rational[].class); 141 142 /** 143 * <p>Gains applying to Bayer color channels for 144 * white-balance</p> 145 * <p>The 4-channel white-balance gains are defined in 146 * the order of [R G_even G_odd B], where G_even is the gain 147 * for green pixels on even rows of the output, and G_odd 148 * is the gain for greenpixels on the odd rows. if a HAL 149 * does not support a separate gain for even/odd green channels, 150 * it should use the G_even value,and write G_odd equal to 151 * G_even in the output result metadata.</p> 152 * <p>This array is either set by HAL when the request 153 * {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} is not TRANSFORM_MATRIX, or 154 * directly by the application in the request when the 155 * {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} is TRANSFORM_MATRIX.</p> 156 * <p>The ouput should be the gains actually applied by the HAL to 157 * the current frame.</p> 158 * 159 * @see CaptureRequest#COLOR_CORRECTION_MODE 160 */ 161 public static final Key<float[]> COLOR_CORRECTION_GAINS = 162 new Key<float[]>("android.colorCorrection.gains", float[].class); 163 164 /** 165 * <p>The ID sent with the latest 166 * CAMERA2_TRIGGER_PRECAPTURE_METERING call</p> 167 * <p>Must be 0 if no 168 * CAMERA2_TRIGGER_PRECAPTURE_METERING trigger received yet 169 * by HAL. Always updated even if AE algorithm ignores the 170 * trigger</p> 171 * @hide 172 */ 173 public static final Key<Integer> CONTROL_AE_PRECAPTURE_ID = 174 new Key<Integer>("android.control.aePrecaptureId", int.class); 175 176 /** 177 * <p>The desired mode for the camera device's 178 * auto-exposure routine.</p> 179 * <p>This control is only effective if {@link CaptureRequest#CONTROL_MODE android.control.mode} is 180 * AUTO.</p> 181 * <p>When set to any of the ON modes, the camera device's 182 * auto-exposure routine is enabled, overriding the 183 * application's selected exposure time, sensor sensitivity, 184 * and frame duration ({@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime}, 185 * {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}, and 186 * {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration}). If one of the FLASH modes 187 * is selected, the camera device's flash unit controls are 188 * also overridden.</p> 189 * <p>The FLASH modes are only available if the camera device 190 * has a flash unit ({@link CameraCharacteristics#FLASH_INFO_AVAILABLE android.flash.info.available} is <code>true</code>).</p> 191 * <p>If flash TORCH mode is desired, this field must be set to 192 * ON or OFF, and {@link CaptureRequest#FLASH_MODE android.flash.mode} set to TORCH.</p> 193 * <p>When set to any of the ON modes, the values chosen by the 194 * camera device auto-exposure routine for the overridden 195 * fields for a given capture will be available in its 196 * CaptureResult.</p> 197 * 198 * @see CaptureRequest#SENSOR_EXPOSURE_TIME 199 * @see CaptureRequest#SENSOR_FRAME_DURATION 200 * @see CaptureRequest#SENSOR_SENSITIVITY 201 * @see CaptureRequest#FLASH_MODE 202 * @see CameraCharacteristics#FLASH_INFO_AVAILABLE 203 * @see CaptureRequest#CONTROL_MODE 204 * @see #CONTROL_AE_MODE_OFF 205 * @see #CONTROL_AE_MODE_ON 206 * @see #CONTROL_AE_MODE_ON_AUTO_FLASH 207 * @see #CONTROL_AE_MODE_ON_ALWAYS_FLASH 208 * @see #CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE 209 */ 210 public static final Key<Integer> CONTROL_AE_MODE = 211 new Key<Integer>("android.control.aeMode", int.class); 212 213 /** 214 * <p>List of areas to use for 215 * metering</p> 216 * <p>Each area is a rectangle plus weight: xmin, ymin, 217 * xmax, ymax, weight. The rectangle is defined inclusive of the 218 * specified coordinates.</p> 219 * <p>The coordinate system is based on the active pixel array, 220 * with (0,0) being the top-left pixel in the active pixel array, and 221 * ({@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.width - 1, 222 * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.height - 1) being the 223 * bottom-right pixel in the active pixel array. The weight 224 * should be nonnegative.</p> 225 * <p>If all regions have 0 weight, then no specific metering area 226 * needs to be used by the HAL. If the metering region is 227 * outside the current {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion}, the HAL 228 * should ignore the sections outside the region and output the 229 * used sections in the frame metadata</p> 230 * 231 * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE 232 * @see CaptureRequest#SCALER_CROP_REGION 233 */ 234 public static final Key<int[]> CONTROL_AE_REGIONS = 235 new Key<int[]>("android.control.aeRegions", int[].class); 236 237 /** 238 * <p>Current state of AE algorithm</p> 239 * <p>Whenever the AE algorithm state changes, a 240 * MSG_AUTOEXPOSURE notification must be send if a 241 * notification callback is registered.</p> 242 * @see #CONTROL_AE_STATE_INACTIVE 243 * @see #CONTROL_AE_STATE_SEARCHING 244 * @see #CONTROL_AE_STATE_CONVERGED 245 * @see #CONTROL_AE_STATE_LOCKED 246 * @see #CONTROL_AE_STATE_FLASH_REQUIRED 247 * @see #CONTROL_AE_STATE_PRECAPTURE 248 */ 249 public static final Key<Integer> CONTROL_AE_STATE = 250 new Key<Integer>("android.control.aeState", int.class); 251 252 /** 253 * <p>Whether AF is currently enabled, and what 254 * mode it is set to</p> 255 * <p>Only effective if {@link CaptureRequest#CONTROL_MODE android.control.mode} = AUTO.</p> 256 * <p>If lens is controlled by HAL auto-focus algorithm, the HAL should 257 * report the current AF status in {@link CaptureResult#CONTROL_AF_STATE android.control.afState} in 258 * result metadata.</p> 259 * 260 * @see CaptureRequest#CONTROL_MODE 261 * @see CaptureResult#CONTROL_AF_STATE 262 * @see #CONTROL_AF_MODE_OFF 263 * @see #CONTROL_AF_MODE_AUTO 264 * @see #CONTROL_AF_MODE_MACRO 265 * @see #CONTROL_AF_MODE_CONTINUOUS_VIDEO 266 * @see #CONTROL_AF_MODE_CONTINUOUS_PICTURE 267 * @see #CONTROL_AF_MODE_EDOF 268 */ 269 public static final Key<Integer> CONTROL_AF_MODE = 270 new Key<Integer>("android.control.afMode", int.class); 271 272 /** 273 * <p>List of areas to use for focus 274 * estimation</p> 275 * <p>Each area is a rectangle plus weight: xmin, ymin, 276 * xmax, ymax, weight. The rectangle is defined inclusive of the 277 * specified coordinates.</p> 278 * <p>The coordinate system is based on the active pixel array, 279 * with (0,0) being the top-left pixel in the active pixel array, and 280 * ({@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.width - 1, 281 * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.height - 1) being the 282 * bottom-right pixel in the active pixel array. The weight 283 * should be nonnegative.</p> 284 * <p>If all regions have 0 weight, then no specific focus area 285 * needs to be used by the HAL. If the focusing region is 286 * outside the current {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion}, the HAL 287 * should ignore the sections outside the region and output the 288 * used sections in the frame metadata</p> 289 * 290 * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE 291 * @see CaptureRequest#SCALER_CROP_REGION 292 */ 293 public static final Key<int[]> CONTROL_AF_REGIONS = 294 new Key<int[]>("android.control.afRegions", int[].class); 295 296 /** 297 * <p>Current state of AF algorithm</p> 298 * <p>Whenever the AF algorithm state changes, a 299 * MSG_AUTOFOCUS notification must be send if a notification 300 * callback is registered.</p> 301 * @see #CONTROL_AF_STATE_INACTIVE 302 * @see #CONTROL_AF_STATE_PASSIVE_SCAN 303 * @see #CONTROL_AF_STATE_PASSIVE_FOCUSED 304 * @see #CONTROL_AF_STATE_ACTIVE_SCAN 305 * @see #CONTROL_AF_STATE_FOCUSED_LOCKED 306 * @see #CONTROL_AF_STATE_NOT_FOCUSED_LOCKED 307 * @see #CONTROL_AF_STATE_PASSIVE_UNFOCUSED 308 */ 309 public static final Key<Integer> CONTROL_AF_STATE = 310 new Key<Integer>("android.control.afState", int.class); 311 312 /** 313 * <p>The ID sent with the latest 314 * CAMERA2_TRIGGER_AUTOFOCUS call</p> 315 * <p>Must be 0 if no CAMERA2_TRIGGER_AUTOFOCUS trigger 316 * received yet by HAL. Always updated even if AF algorithm 317 * ignores the trigger</p> 318 * @hide 319 */ 320 public static final Key<Integer> CONTROL_AF_TRIGGER_ID = 321 new Key<Integer>("android.control.afTriggerId", int.class); 322 323 /** 324 * <p>Whether AWB is currently setting the color 325 * transform fields, and what its illumination target 326 * is</p> 327 * <p>[BC - AWB lock,AWB modes]</p> 328 * <p>Only effective if {@link CaptureRequest#CONTROL_MODE android.control.mode} = AUTO.</p> 329 * 330 * @see CaptureRequest#CONTROL_MODE 331 * @see #CONTROL_AWB_MODE_OFF 332 * @see #CONTROL_AWB_MODE_AUTO 333 * @see #CONTROL_AWB_MODE_INCANDESCENT 334 * @see #CONTROL_AWB_MODE_FLUORESCENT 335 * @see #CONTROL_AWB_MODE_WARM_FLUORESCENT 336 * @see #CONTROL_AWB_MODE_DAYLIGHT 337 * @see #CONTROL_AWB_MODE_CLOUDY_DAYLIGHT 338 * @see #CONTROL_AWB_MODE_TWILIGHT 339 * @see #CONTROL_AWB_MODE_SHADE 340 */ 341 public static final Key<Integer> CONTROL_AWB_MODE = 342 new Key<Integer>("android.control.awbMode", int.class); 343 344 /** 345 * <p>List of areas to use for illuminant 346 * estimation</p> 347 * <p>Only used in AUTO mode.</p> 348 * <p>Each area is a rectangle plus weight: xmin, ymin, 349 * xmax, ymax, weight. The rectangle is defined inclusive of the 350 * specified coordinates.</p> 351 * <p>The coordinate system is based on the active pixel array, 352 * with (0,0) being the top-left pixel in the active pixel array, and 353 * ({@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.width - 1, 354 * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.height - 1) being the 355 * bottom-right pixel in the active pixel array. The weight 356 * should be nonnegative.</p> 357 * <p>If all regions have 0 weight, then no specific metering area 358 * needs to be used by the HAL. If the metering region is 359 * outside the current {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion}, the HAL 360 * should ignore the sections outside the region and output the 361 * used sections in the frame metadata</p> 362 * 363 * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE 364 * @see CaptureRequest#SCALER_CROP_REGION 365 */ 366 public static final Key<int[]> CONTROL_AWB_REGIONS = 367 new Key<int[]>("android.control.awbRegions", int[].class); 368 369 /** 370 * <p>Current state of AWB algorithm</p> 371 * <p>Whenever the AWB algorithm state changes, a 372 * MSG_AUTOWHITEBALANCE notification must be send if a 373 * notification callback is registered.</p> 374 * @see #CONTROL_AWB_STATE_INACTIVE 375 * @see #CONTROL_AWB_STATE_SEARCHING 376 * @see #CONTROL_AWB_STATE_CONVERGED 377 * @see #CONTROL_AWB_STATE_LOCKED 378 */ 379 public static final Key<Integer> CONTROL_AWB_STATE = 380 new Key<Integer>("android.control.awbState", int.class); 381 382 /** 383 * <p>Overall mode of 3A control 384 * routines</p> 385 * <p>High-level 3A control. When set to OFF, all 3A control 386 * by the HAL is disabled. The application must set the fields for 387 * capture parameters itself.</p> 388 * <p>When set to AUTO, the individual algorithm controls in 389 * android.control.* are in effect, such as {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode}.</p> 390 * <p>When set to USE_SCENE_MODE, the individual controls in 391 * android.control.* are mostly disabled, and the HAL implements 392 * one of the scene mode settings (such as ACTION, SUNSET, or PARTY) 393 * as it wishes. The HAL scene mode 3A settings are provided by 394 * android.control.sceneModeOverrides.</p> 395 * 396 * @see CaptureRequest#CONTROL_AF_MODE 397 * @see #CONTROL_MODE_OFF 398 * @see #CONTROL_MODE_AUTO 399 * @see #CONTROL_MODE_USE_SCENE_MODE 400 */ 401 public static final Key<Integer> CONTROL_MODE = 402 new Key<Integer>("android.control.mode", int.class); 403 404 /** 405 * <p>Operation mode for edge 406 * enhancement</p> 407 * <p>Edge/sharpness/detail enhancement. OFF means no 408 * enhancement will be applied by the HAL.</p> 409 * <p>FAST/HIGH_QUALITY both mean HAL-determined enhancement 410 * will be applied. HIGH_QUALITY mode indicates that the 411 * HAL should use the highest-quality enhancement algorithms, 412 * even if it slows down capture rate. FAST means the HAL should 413 * not slow down capture rate when applying edge enhancement.</p> 414 * @see #EDGE_MODE_OFF 415 * @see #EDGE_MODE_FAST 416 * @see #EDGE_MODE_HIGH_QUALITY 417 */ 418 public static final Key<Integer> EDGE_MODE = 419 new Key<Integer>("android.edge.mode", int.class); 420 421 /** 422 * <p>Select flash operation mode</p> 423 * @see #FLASH_MODE_OFF 424 * @see #FLASH_MODE_SINGLE 425 * @see #FLASH_MODE_TORCH 426 */ 427 public static final Key<Integer> FLASH_MODE = 428 new Key<Integer>("android.flash.mode", int.class); 429 430 /** 431 * <p>Current state of the flash 432 * unit</p> 433 * @see #FLASH_STATE_UNAVAILABLE 434 * @see #FLASH_STATE_CHARGING 435 * @see #FLASH_STATE_READY 436 * @see #FLASH_STATE_FIRED 437 */ 438 public static final Key<Integer> FLASH_STATE = 439 new Key<Integer>("android.flash.state", int.class); 440 441 /** 442 * <p>GPS coordinates to include in output JPEG 443 * EXIF</p> 444 */ 445 public static final Key<double[]> JPEG_GPS_COORDINATES = 446 new Key<double[]>("android.jpeg.gpsCoordinates", double[].class); 447 448 /** 449 * <p>32 characters describing GPS algorithm to 450 * include in EXIF</p> 451 */ 452 public static final Key<String> JPEG_GPS_PROCESSING_METHOD = 453 new Key<String>("android.jpeg.gpsProcessingMethod", String.class); 454 455 /** 456 * <p>Time GPS fix was made to include in 457 * EXIF</p> 458 */ 459 public static final Key<Long> JPEG_GPS_TIMESTAMP = 460 new Key<Long>("android.jpeg.gpsTimestamp", long.class); 461 462 /** 463 * <p>Orientation of JPEG image to 464 * write</p> 465 */ 466 public static final Key<Integer> JPEG_ORIENTATION = 467 new Key<Integer>("android.jpeg.orientation", int.class); 468 469 /** 470 * <p>Compression quality of the final JPEG 471 * image</p> 472 * <p>85-95 is typical usage range</p> 473 */ 474 public static final Key<Byte> JPEG_QUALITY = 475 new Key<Byte>("android.jpeg.quality", byte.class); 476 477 /** 478 * <p>Compression quality of JPEG 479 * thumbnail</p> 480 */ 481 public static final Key<Byte> JPEG_THUMBNAIL_QUALITY = 482 new Key<Byte>("android.jpeg.thumbnailQuality", byte.class); 483 484 /** 485 * <p>Resolution of embedded JPEG thumbnail</p> 486 * <p>When set to (0, 0) value, the JPEG EXIF must not contain thumbnail, 487 * but the captured JPEG must still be a valid image.</p> 488 * <p>When a jpeg image capture is issued, the thumbnail size selected should have 489 * the same aspect ratio as the jpeg image.</p> 490 */ 491 public static final Key<android.hardware.camera2.Size> JPEG_THUMBNAIL_SIZE = 492 new Key<android.hardware.camera2.Size>("android.jpeg.thumbnailSize", android.hardware.camera2.Size.class); 493 494 /** 495 * <p>Size of the lens aperture</p> 496 * <p>Will not be supported on most devices. Can only 497 * pick from supported list</p> 498 */ 499 public static final Key<Float> LENS_APERTURE = 500 new Key<Float>("android.lens.aperture", float.class); 501 502 /** 503 * <p>State of lens neutral density 504 * filter(s)</p> 505 * <p>Will not be supported on most devices. Can only 506 * pick from supported list</p> 507 */ 508 public static final Key<Float> LENS_FILTER_DENSITY = 509 new Key<Float>("android.lens.filterDensity", float.class); 510 511 /** 512 * <p>Lens optical zoom setting</p> 513 * <p>Will not be supported on most devices.</p> 514 */ 515 public static final Key<Float> LENS_FOCAL_LENGTH = 516 new Key<Float>("android.lens.focalLength", float.class); 517 518 /** 519 * <p>Distance to plane of sharpest focus, 520 * measured from frontmost surface of the lens</p> 521 * <p>Should be zero for fixed-focus cameras</p> 522 */ 523 public static final Key<Float> LENS_FOCUS_DISTANCE = 524 new Key<Float>("android.lens.focusDistance", float.class); 525 526 /** 527 * <p>The range of scene distances that are in 528 * sharp focus (depth of field)</p> 529 * <p>If variable focus not supported, can still report 530 * fixed depth of field range</p> 531 */ 532 public static final Key<float[]> LENS_FOCUS_RANGE = 533 new Key<float[]>("android.lens.focusRange", float[].class); 534 535 /** 536 * <p>Whether optical image stabilization is 537 * enabled.</p> 538 * <p>Will not be supported on most devices.</p> 539 * @see #LENS_OPTICAL_STABILIZATION_MODE_OFF 540 * @see #LENS_OPTICAL_STABILIZATION_MODE_ON 541 */ 542 public static final Key<Integer> LENS_OPTICAL_STABILIZATION_MODE = 543 new Key<Integer>("android.lens.opticalStabilizationMode", int.class); 544 545 /** 546 * <p>Current lens status</p> 547 * @see #LENS_STATE_STATIONARY 548 * @see #LENS_STATE_MOVING 549 */ 550 public static final Key<Integer> LENS_STATE = 551 new Key<Integer>("android.lens.state", int.class); 552 553 /** 554 * <p>Mode of operation for the noise reduction 555 * algorithm</p> 556 * <p>Noise filtering control. OFF means no noise reduction 557 * will be applied by the HAL.</p> 558 * <p>FAST/HIGH_QUALITY both mean HAL-determined noise filtering 559 * will be applied. HIGH_QUALITY mode indicates that the HAL 560 * should use the highest-quality noise filtering algorithms, 561 * even if it slows down capture rate. FAST means the HAL should not 562 * slow down capture rate when applying noise filtering.</p> 563 * @see #NOISE_REDUCTION_MODE_OFF 564 * @see #NOISE_REDUCTION_MODE_FAST 565 * @see #NOISE_REDUCTION_MODE_HIGH_QUALITY 566 */ 567 public static final Key<Integer> NOISE_REDUCTION_MODE = 568 new Key<Integer>("android.noiseReduction.mode", int.class); 569 570 /** 571 * <p>Whether a result given to the framework is the 572 * final one for the capture, or only a partial that contains a 573 * subset of the full set of dynamic metadata 574 * values.</p> 575 * <p>The entries in the result metadata buffers for a 576 * single capture may not overlap, except for this entry. The 577 * FINAL buffers must retain FIFO ordering relative to the 578 * requests that generate them, so the FINAL buffer for frame 3 must 579 * always be sent to the framework after the FINAL buffer for frame 2, and 580 * before the FINAL buffer for frame 4. PARTIAL buffers may be returned 581 * in any order relative to other frames, but all PARTIAL buffers for a given 582 * capture must arrive before the FINAL buffer for that capture. This entry may 583 * only be used by the HAL if quirks.usePartialResult is set to 1.</p> 584 * <p><b>Optional</b> - This value may be null on some devices.</p> 585 * @hide 586 */ 587 public static final Key<Boolean> QUIRKS_PARTIAL_RESULT = 588 new Key<Boolean>("android.quirks.partialResult", boolean.class); 589 590 /** 591 * <p>A frame counter set by the framework. This value monotonically 592 * increases with every new result (that is, each new result has a unique 593 * frameCount value).</p> 594 * <p>Reset on release()</p> 595 */ 596 public static final Key<Integer> REQUEST_FRAME_COUNT = 597 new Key<Integer>("android.request.frameCount", int.class); 598 599 /** 600 * <p>An application-specified ID for the current 601 * request. Must be maintained unchanged in output 602 * frame</p> 603 * @hide 604 */ 605 public static final Key<Integer> REQUEST_ID = 606 new Key<Integer>("android.request.id", int.class); 607 608 /** 609 * <p>(x, y, width, height).</p> 610 * <p>A rectangle with the top-level corner of (x,y) and size 611 * (width, height). The region of the sensor that is used for 612 * output. Each stream must use this rectangle to produce its 613 * output, cropping to a smaller region if necessary to 614 * maintain the stream's aspect ratio.</p> 615 * <p>HAL2.x uses only (x, y, width)</p> 616 * <p>Any additional per-stream cropping must be done to 617 * maximize the final pixel area of the stream.</p> 618 * <p>For example, if the crop region is set to a 4:3 aspect 619 * ratio, then 4:3 streams should use the exact crop 620 * region. 16:9 streams should further crop vertically 621 * (letterbox).</p> 622 * <p>Conversely, if the crop region is set to a 16:9, then 4:3 623 * outputs should crop horizontally (pillarbox), and 16:9 624 * streams should match exactly. These additional crops must 625 * be centered within the crop region.</p> 626 * <p>The output streams must maintain square pixels at all 627 * times, no matter what the relative aspect ratios of the 628 * crop region and the stream are. Negative values for 629 * corner are allowed for raw output if full pixel array is 630 * larger than active pixel array. Width and height may be 631 * rounded to nearest larger supportable width, especially 632 * for raw output, where only a few fixed scales may be 633 * possible. The width and height of the crop region cannot 634 * be set to be smaller than floor( activeArraySize.width / 635 * android.scaler.maxDigitalZoom ) and floor( 636 * activeArraySize.height / android.scaler.maxDigitalZoom), 637 * respectively.</p> 638 */ 639 public static final Key<android.graphics.Rect> SCALER_CROP_REGION = 640 new Key<android.graphics.Rect>("android.scaler.cropRegion", android.graphics.Rect.class); 641 642 /** 643 * <p>Duration each pixel is exposed to 644 * light.</p> 645 * <p>If the sensor can't expose this exact duration, it should shorten the 646 * duration exposed to the nearest possible value (rather than expose longer).</p> 647 * <p>1/10000 - 30 sec range. No bulb mode</p> 648 */ 649 public static final Key<Long> SENSOR_EXPOSURE_TIME = 650 new Key<Long>("android.sensor.exposureTime", long.class); 651 652 /** 653 * <p>Duration from start of frame exposure to 654 * start of next frame exposure</p> 655 * <p>Exposure time has priority, so duration is set to 656 * max(duration, exposure time + overhead)</p> 657 */ 658 public static final Key<Long> SENSOR_FRAME_DURATION = 659 new Key<Long>("android.sensor.frameDuration", long.class); 660 661 /** 662 * <p>Gain applied to image data. Must be 663 * implemented through analog gain only if set to values 664 * below 'maximum analog sensitivity'.</p> 665 * <p>If the sensor can't apply this exact gain, it should lessen the 666 * gain to the nearest possible value (rather than gain more).</p> 667 * <p>ISO 12232:2006 REI method</p> 668 */ 669 public static final Key<Integer> SENSOR_SENSITIVITY = 670 new Key<Integer>("android.sensor.sensitivity", int.class); 671 672 /** 673 * <p>Time at start of exposure of first 674 * row</p> 675 * <p>Monotonic, should be synced to other timestamps in 676 * system</p> 677 */ 678 public static final Key<Long> SENSOR_TIMESTAMP = 679 new Key<Long>("android.sensor.timestamp", long.class); 680 681 /** 682 * <p>The temperature of the sensor, sampled at the time 683 * exposure began for this frame.</p> 684 * <p>The thermal diode being queried should be inside the sensor PCB, or 685 * somewhere close to it.</p> 686 */ 687 public static final Key<Float> SENSOR_TEMPERATURE = 688 new Key<Float>("android.sensor.temperature", float.class); 689 690 /** 691 * <p>State of the face detector 692 * unit</p> 693 * <p>Whether face detection is enabled, and whether it 694 * should output just the basic fields or the full set of 695 * fields. Value must be one of the 696 * {@link CameraCharacteristics#STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES android.statistics.info.availableFaceDetectModes}.</p> 697 * 698 * @see CameraCharacteristics#STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES 699 * @see #STATISTICS_FACE_DETECT_MODE_OFF 700 * @see #STATISTICS_FACE_DETECT_MODE_SIMPLE 701 * @see #STATISTICS_FACE_DETECT_MODE_FULL 702 */ 703 public static final Key<Integer> STATISTICS_FACE_DETECT_MODE = 704 new Key<Integer>("android.statistics.faceDetectMode", int.class); 705 706 /** 707 * <p>List of unique IDs for detected 708 * faces</p> 709 * <p>Only available if faceDetectMode == FULL</p> 710 * @hide 711 */ 712 public static final Key<int[]> STATISTICS_FACE_IDS = 713 new Key<int[]>("android.statistics.faceIds", int[].class); 714 715 /** 716 * <p>List of landmarks for detected 717 * faces</p> 718 * <p>Only available if faceDetectMode == FULL</p> 719 * @hide 720 */ 721 public static final Key<int[]> STATISTICS_FACE_LANDMARKS = 722 new Key<int[]>("android.statistics.faceLandmarks", int[].class); 723 724 /** 725 * <p>List of the bounding rectangles for detected 726 * faces</p> 727 * <p>Only available if faceDetectMode != OFF</p> 728 * @hide 729 */ 730 public static final Key<android.graphics.Rect[]> STATISTICS_FACE_RECTANGLES = 731 new Key<android.graphics.Rect[]>("android.statistics.faceRectangles", android.graphics.Rect[].class); 732 733 /** 734 * <p>List of the face confidence scores for 735 * detected faces</p> 736 * <p>Only available if faceDetectMode != OFF. The value should be 737 * meaningful (for example, setting 100 at all times is illegal).</p> 738 * @hide 739 */ 740 public static final Key<byte[]> STATISTICS_FACE_SCORES = 741 new Key<byte[]>("android.statistics.faceScores", byte[].class); 742 743 /** 744 * <p>The shading map is a low-resolution floating-point map 745 * that lists the coefficients used to correct for vignetting, for each 746 * Bayer color channel.</p> 747 * <p>The least shaded section of the image should have a gain factor 748 * of 1; all other sections should have gains above 1.</p> 749 * <p>When {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} = TRANSFORM_MATRIX, the map 750 * must take into account the colorCorrection settings.</p> 751 * <p>The shading map is for the entire active pixel array, and is not 752 * affected by the crop region specified in the request. Each shading map 753 * entry is the value of the shading compensation map over a specific 754 * pixel on the sensor. Specifically, with a (N x M) resolution shading 755 * map, and an active pixel array size (W x H), shading map entry 756 * (x,y) ϵ (0 ... N-1, 0 ... M-1) is the value of the shading map at 757 * pixel ( ((W-1)/(N-1)) * x, ((H-1)/(M-1)) * y) for the four color channels. 758 * The map is assumed to be bilinearly interpolated between the sample points.</p> 759 * <p>The channel order is [R, Geven, Godd, B], where Geven is the green 760 * channel for the even rows of a Bayer pattern, and Godd is the odd rows. 761 * The shading map is stored in a fully interleaved format, and its size 762 * is provided in the camera static metadata by {@link CameraCharacteristics#LENS_INFO_SHADING_MAP_SIZE android.lens.info.shadingMapSize}.</p> 763 * <p>The shading map should have on the order of 30-40 rows and columns, 764 * and must be smaller than 64x64.</p> 765 * <p>As an example, given a very small map defined as:</p> 766 * <pre><code>{@link CameraCharacteristics#LENS_INFO_SHADING_MAP_SIZE android.lens.info.shadingMapSize} = [ 4, 3 ] 767 * {@link CaptureResult#STATISTICS_LENS_SHADING_MAP android.statistics.lensShadingMap} = 768 * [ 1.3, 1.2, 1.15, 1.2, 1.2, 1.2, 1.15, 1.2, 769 * 1.1, 1.2, 1.2, 1.2, 1.3, 1.2, 1.3, 1.3, 770 * 1.2, 1.2, 1.25, 1.1, 1.1, 1.1, 1.1, 1.0, 771 * 1.0, 1.0, 1.0, 1.0, 1.2, 1.3, 1.25, 1.2, 772 * 1.3, 1.2, 1.2, 1.3, 1.2, 1.15, 1.1, 1.2, 773 * 1.2, 1.1, 1.0, 1.2, 1.3, 1.15, 1.2, 1.3 ] 774 * </code></pre> 775 * <p>The low-resolution scaling map images for each channel are 776 * (displayed using nearest-neighbor interpolation):</p> 777 * <p><img alt="Red lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/red_shading.png" /> 778 * <img alt="Green (even rows) lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/green_e_shading.png" /> 779 * <img alt="Green (odd rows) lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/green_o_shading.png" /> 780 * <img alt="Blue lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/blue_shading.png" /></p> 781 * <p>As a visualization only, inverting the full-color map to recover an 782 * image of a gray wall (using bicubic interpolation for visual quality) as captured by the sensor gives:</p> 783 * <p><img alt="Image of a uniform white wall (inverse shading map)" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/inv_shading.png" /></p> 784 * 785 * @see CaptureRequest#COLOR_CORRECTION_MODE 786 * @see CaptureResult#STATISTICS_LENS_SHADING_MAP 787 * @see CameraCharacteristics#LENS_INFO_SHADING_MAP_SIZE 788 */ 789 public static final Key<float[]> STATISTICS_LENS_SHADING_MAP = 790 new Key<float[]>("android.statistics.lensShadingMap", float[].class); 791 792 /** 793 * <p>The best-fit color channel gains calculated 794 * by the HAL's statistics units for the current output frame</p> 795 * <p>This may be different than the gains used for this frame, 796 * since statistics processing on data from a new frame 797 * typically completes after the transform has already been 798 * applied to that frame.</p> 799 * <p>The 4 channel gains are defined in Bayer domain, 800 * see {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} for details.</p> 801 * <p>This value should always be calculated by the AWB block, 802 * regardless of the android.control.* current values.</p> 803 * 804 * @see CaptureRequest#COLOR_CORRECTION_GAINS 805 */ 806 public static final Key<float[]> STATISTICS_PREDICTED_COLOR_GAINS = 807 new Key<float[]>("android.statistics.predictedColorGains", float[].class); 808 809 /** 810 * <p>The best-fit color transform matrix estimate 811 * calculated by the HAL's statistics units for the current 812 * output frame</p> 813 * <p>The HAL must provide the estimate from its 814 * statistics unit on the white balance transforms to use 815 * for the next frame. These are the values the HAL believes 816 * are the best fit for the current output frame. This may 817 * be different than the transform used for this frame, since 818 * statistics processing on data from a new frame typically 819 * completes after the transform has already been applied to 820 * that frame.</p> 821 * <p>These estimates must be provided for all frames, even if 822 * capture settings and color transforms are set by the application.</p> 823 * <p>This value should always be calculated by the AWB block, 824 * regardless of the android.control.* current values.</p> 825 */ 826 public static final Key<Rational[]> STATISTICS_PREDICTED_COLOR_TRANSFORM = 827 new Key<Rational[]>("android.statistics.predictedColorTransform", Rational[].class); 828 829 /** 830 * <p>The HAL estimated scene illumination lighting 831 * frequency</p> 832 * <p>Report NONE if there doesn't appear to be flickering 833 * illumination</p> 834 * @see #STATISTICS_SCENE_FLICKER_NONE 835 * @see #STATISTICS_SCENE_FLICKER_50HZ 836 * @see #STATISTICS_SCENE_FLICKER_60HZ 837 */ 838 public static final Key<Integer> STATISTICS_SCENE_FLICKER = 839 new Key<Integer>("android.statistics.sceneFlicker", int.class); 840 841 /** 842 * <p>Table mapping blue input values to output 843 * values</p> 844 * <p>Tonemapping / contrast / gamma curve for the blue 845 * channel, to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is CONTRAST_CURVE.</p> 846 * <p>See {@link CaptureRequest#TONEMAP_CURVE_RED android.tonemap.curveRed} for more details.</p> 847 * 848 * @see CaptureRequest#TONEMAP_MODE 849 * @see CaptureRequest#TONEMAP_CURVE_RED 850 */ 851 public static final Key<float[]> TONEMAP_CURVE_BLUE = 852 new Key<float[]>("android.tonemap.curveBlue", float[].class); 853 854 /** 855 * <p>Table mapping green input values to output 856 * values</p> 857 * <p>Tonemapping / contrast / gamma curve for the green 858 * channel, to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is CONTRAST_CURVE.</p> 859 * <p>See {@link CaptureRequest#TONEMAP_CURVE_RED android.tonemap.curveRed} for more details.</p> 860 * 861 * @see CaptureRequest#TONEMAP_MODE 862 * @see CaptureRequest#TONEMAP_CURVE_RED 863 */ 864 public static final Key<float[]> TONEMAP_CURVE_GREEN = 865 new Key<float[]>("android.tonemap.curveGreen", float[].class); 866 867 /** 868 * <p>Table mapping red input values to output 869 * values</p> 870 * <p>Tonemapping / contrast / gamma curve for the red 871 * channel, to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is CONTRAST_CURVE.</p> 872 * <p>Since the input and output ranges may vary depending on 873 * the camera pipeline, the input and output pixel values 874 * are represented by normalized floating-point values 875 * between 0 and 1, with 0 == black and 1 == white.</p> 876 * <p>The curve should be linearly interpolated between the 877 * defined points. The points will be listed in increasing 878 * order of P_IN. For example, if the array is: [0.0, 0.0, 879 * 0.3, 0.5, 1.0, 1.0], then the input->output mapping 880 * for a few sample points would be: 0 -> 0, 0.15 -> 881 * 0.25, 0.3 -> 0.5, 0.5 -> 0.64</p> 882 * 883 * @see CaptureRequest#TONEMAP_MODE 884 */ 885 public static final Key<float[]> TONEMAP_CURVE_RED = 886 new Key<float[]>("android.tonemap.curveRed", float[].class); 887 888 /** 889 * @see #TONEMAP_MODE_CONTRAST_CURVE 890 * @see #TONEMAP_MODE_FAST 891 * @see #TONEMAP_MODE_HIGH_QUALITY 892 */ 893 public static final Key<Integer> TONEMAP_MODE = 894 new Key<Integer>("android.tonemap.mode", int.class); 895 896 /** 897 * <p>This LED is nominally used to indicate to the user 898 * that the camera is powered on and may be streaming images back to the 899 * Application Processor. In certain rare circumstances, the OS may 900 * disable this when video is processed locally and not transmitted to 901 * any untrusted applications.</p> 902 * <p>In particular, the LED <em>must</em> always be on when the data could be 903 * transmitted off the device. The LED <em>should</em> always be on whenever 904 * data is stored locally on the device.</p> 905 * <p>The LED <em>may</em> be off if a trusted application is using the data that 906 * doesn't violate the above rules.</p> 907 * @hide 908 */ 909 public static final Key<Boolean> LED_TRANSMIT = 910 new Key<Boolean>("android.led.transmit", boolean.class); 911 912 /** 913 * <p>Whether black-level compensation is locked 914 * to its current values, or is free to vary.</p> 915 * <p>Whether the black level offset was locked for this frame. Should be 916 * ON if {@link CaptureRequest#BLACK_LEVEL_LOCK android.blackLevel.lock} was ON in the capture request, unless 917 * a change in other capture settings forced the camera device to 918 * perform a black level reset.</p> 919 * 920 * @see CaptureRequest#BLACK_LEVEL_LOCK 921 */ 922 public static final Key<Boolean> BLACK_LEVEL_LOCK = 923 new Key<Boolean>("android.blackLevel.lock", boolean.class); 924 925 /*~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~ 926 * End generated code 927 *~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~O@*/ 928 929 /** 930 * <p> 931 * List of the {@link Face Faces} detected through camera face detection 932 * in this result. 933 * </p> 934 * <p> 935 * Only available if {@link #STATISTICS_FACE_DETECT_MODE} {@code !=} 936 * {@link CameraMetadata#STATISTICS_FACE_DETECT_MODE_OFF OFF}. 937 * </p> 938 * 939 * @see Face 940 */ 941 public static final Key<Face[]> STATISTICS_FACES = 942 new Key<Face[]>("android.statistics.faces", Face[].class); 943} 944