CaptureResult.java revision e83c4ebc62f10b06139bd4cf6109a1d0f3521032
1/* 2 * Copyright (C) 2012 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17package android.hardware.camera2; 18 19import android.graphics.Point; 20import android.graphics.Rect; 21import android.hardware.camera2.impl.CameraMetadataNative; 22 23/** 24 * <p>The results of a single image capture from the image sensor.</p> 25 * 26 * <p>Contains the final configuration for the capture hardware (sensor, lens, 27 * flash), the processing pipeline, the control algorithms, and the output 28 * buffers.</p> 29 * 30 * <p>CaptureResults are produced by a {@link CameraDevice} after processing a 31 * {@link CaptureRequest}. All properties listed for capture requests can also 32 * be queried on the capture result, to determine the final values used for 33 * capture. The result also includes additional metadata about the state of the 34 * camera device during the capture.</p> 35 * 36 */ 37public final class CaptureResult extends CameraMetadata { 38 39 private final CameraMetadataNative mResults; 40 private final CaptureRequest mRequest; 41 private final int mSequenceId; 42 43 /** 44 * Takes ownership of the passed-in properties object 45 * @hide 46 */ 47 public CaptureResult(CameraMetadataNative results, CaptureRequest parent, int sequenceId) { 48 if (results == null) { 49 throw new IllegalArgumentException("results was null"); 50 } 51 52 if (parent == null) { 53 throw new IllegalArgumentException("parent was null"); 54 } 55 56 mResults = results; 57 mRequest = parent; 58 mSequenceId = sequenceId; 59 } 60 61 @Override 62 public <T> T get(Key<T> key) { 63 if (key == STATISTICS_FACES) { // Don't throw IllegalArgumentException 64 // TODO: Implement android.statistics.faces 65 return null; 66 } 67 68 return mResults.get(key); 69 } 70 71 /** 72 * Get the request associated with this result. 73 * 74 * <p>Whenever a request is successfully captured, with 75 * {@link CameraDevice.CaptureListener#onCaptureCompleted}, 76 * the {@code result}'s {@code getRequest()} will return that {@code request}. 77 * </p> 78 * 79 * <p>In particular, 80 * <code><pre>cameraDevice.capture(someRequest, new CaptureListener() { 81 * {@literal @}Override 82 * void onCaptureCompleted(CaptureRequest myRequest, CaptureResult myResult) { 83 * assert(myResult.getRequest.equals(myRequest) == true); 84 * } 85 * }; 86 * </code></pre> 87 * </p> 88 * 89 * @return The request associated with this result. Never {@code null}. 90 */ 91 public CaptureRequest getRequest() { 92 return mRequest; 93 } 94 95 /** 96 * Get the frame number associated with this result. 97 * 98 * <p>Whenever a request has been processed, regardless of failure or success, 99 * it gets a unique frame number assigned to its future result/failure.</p> 100 * 101 * <p>This value monotonically increments, starting with 0, 102 * for every new result or failure; and the scope is the lifetime of the 103 * {@link CameraDevice}.</p> 104 * 105 * @return int frame number 106 */ 107 public int getFrameNumber() { 108 return get(REQUEST_FRAME_COUNT); 109 } 110 111 /** 112 * The sequence ID for this failure that was returned by the 113 * {@link CameraDevice#capture} family of functions. 114 * 115 * <p>The sequence ID is a unique monotonically increasing value starting from 0, 116 * incremented every time a new group of requests is submitted to the CameraDevice.</p> 117 * 118 * @return int The ID for the sequence of requests that this capture result is a part of 119 * 120 * @see CameraDevice.CaptureListener#onCaptureSequenceCompleted 121 */ 122 public int getSequenceId() { 123 return mSequenceId; 124 } 125 126 /*@O~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~ 127 * The key entries below this point are generated from metadata 128 * definitions in /system/media/camera/docs. Do not modify by hand or 129 * modify the comment blocks at the start or end. 130 *~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~*/ 131 132 /** 133 * <p> 134 * A color transform matrix to use to transform 135 * from sensor RGB color space to output linear sRGB color space 136 * </p> 137 * <p> 138 * This matrix is either set by HAL when the request 139 * android.colorCorrection.mode is not TRANSFORM_MATRIX, or 140 * directly by the application in the request when the 141 * android.colorCorrection.mode is TRANSFORM_MATRIX. 142 * </p><p> 143 * In the latter case, the HAL may round the matrix to account 144 * for precision issues; the final rounded matrix should be 145 * reported back in this matrix result metadata. 146 * </p> 147 */ 148 public static final Key<Rational[]> COLOR_CORRECTION_TRANSFORM = 149 new Key<Rational[]>("android.colorCorrection.transform", Rational[].class); 150 151 /** 152 * <p> 153 * Gains applying to Bayer color channels for 154 * white-balance 155 * </p> 156 * <p> 157 * The 4-channel white-balance gains are defined in 158 * the order of [R G_even G_odd B], where G_even is the gain 159 * for green pixels on even rows of the output, and G_odd 160 * is the gain for greenpixels on the odd rows. if a HAL 161 * does not support a separate gain for even/odd green channels, 162 * it should use the G_even value,and write G_odd equal to 163 * G_even in the output result metadata. 164 * </p><p> 165 * This array is either set by HAL when the request 166 * android.colorCorrection.mode is not TRANSFORM_MATRIX, or 167 * directly by the application in the request when the 168 * android.colorCorrection.mode is TRANSFORM_MATRIX. 169 * </p><p> 170 * The ouput should be the gains actually applied by the HAL to 171 * the current frame. 172 * </p> 173 */ 174 public static final Key<float[]> COLOR_CORRECTION_GAINS = 175 new Key<float[]>("android.colorCorrection.gains", float[].class); 176 177 /** 178 * <p> 179 * The ID sent with the latest 180 * CAMERA2_TRIGGER_PRECAPTURE_METERING call 181 * </p> 182 * <p> 183 * Must be 0 if no 184 * CAMERA2_TRIGGER_PRECAPTURE_METERING trigger received yet 185 * by HAL. Always updated even if AE algorithm ignores the 186 * trigger 187 * </p> 188 * 189 * @hide 190 */ 191 public static final Key<Integer> CONTROL_AE_PRECAPTURE_ID = 192 new Key<Integer>("android.control.aePrecaptureId", int.class); 193 194 /** 195 * <p> 196 * List of areas to use for 197 * metering 198 * </p> 199 * <p> 200 * Each area is a rectangle plus weight: xmin, ymin, 201 * xmax, ymax, weight. The rectangle is defined inclusive of the 202 * specified coordinates. 203 * </p><p> 204 * The coordinate system is based on the active pixel array, 205 * with (0,0) being the top-left pixel in the active pixel array, and 206 * (android.sensor.info.activeArraySize.width - 1, 207 * android.sensor.info.activeArraySize.height - 1) being the 208 * bottom-right pixel in the active pixel array. The weight 209 * should be nonnegative. 210 * </p><p> 211 * If all regions have 0 weight, then no specific metering area 212 * needs to be used by the HAL. If the metering region is 213 * outside the current android.scaler.cropRegion, the HAL 214 * should ignore the sections outside the region and output the 215 * used sections in the frame metadata 216 * </p> 217 */ 218 public static final Key<int[]> CONTROL_AE_REGIONS = 219 new Key<int[]>("android.control.aeRegions", int[].class); 220 221 /** 222 * <p> 223 * Current state of AE algorithm 224 * </p> 225 * <p> 226 * Whenever the AE algorithm state changes, a 227 * MSG_AUTOEXPOSURE notification must be send if a 228 * notification callback is registered. 229 * </p> 230 * @see #CONTROL_AE_STATE_INACTIVE 231 * @see #CONTROL_AE_STATE_SEARCHING 232 * @see #CONTROL_AE_STATE_CONVERGED 233 * @see #CONTROL_AE_STATE_LOCKED 234 * @see #CONTROL_AE_STATE_FLASH_REQUIRED 235 * @see #CONTROL_AE_STATE_PRECAPTURE 236 */ 237 public static final Key<Integer> CONTROL_AE_STATE = 238 new Key<Integer>("android.control.aeState", int.class); 239 240 /** 241 * <p> 242 * Whether AF is currently enabled, and what 243 * mode it is set to 244 * </p> 245 * @see #CONTROL_AF_MODE_OFF 246 * @see #CONTROL_AF_MODE_AUTO 247 * @see #CONTROL_AF_MODE_MACRO 248 * @see #CONTROL_AF_MODE_CONTINUOUS_VIDEO 249 * @see #CONTROL_AF_MODE_CONTINUOUS_PICTURE 250 * @see #CONTROL_AF_MODE_EDOF 251 */ 252 public static final Key<Integer> CONTROL_AF_MODE = 253 new Key<Integer>("android.control.afMode", int.class); 254 255 /** 256 * <p> 257 * List of areas to use for focus 258 * estimation 259 * </p> 260 * <p> 261 * Each area is a rectangle plus weight: xmin, ymin, 262 * xmax, ymax, weight. The rectangle is defined inclusive of the 263 * specified coordinates. 264 * </p><p> 265 * The coordinate system is based on the active pixel array, 266 * with (0,0) being the top-left pixel in the active pixel array, and 267 * (android.sensor.info.activeArraySize.width - 1, 268 * android.sensor.info.activeArraySize.height - 1) being the 269 * bottom-right pixel in the active pixel array. The weight 270 * should be nonnegative. 271 * </p><p> 272 * If all regions have 0 weight, then no specific focus area 273 * needs to be used by the HAL. If the focusing region is 274 * outside the current android.scaler.cropRegion, the HAL 275 * should ignore the sections outside the region and output the 276 * used sections in the frame metadata 277 * </p> 278 */ 279 public static final Key<int[]> CONTROL_AF_REGIONS = 280 new Key<int[]>("android.control.afRegions", int[].class); 281 282 /** 283 * <p> 284 * Current state of AF algorithm 285 * </p> 286 * <p> 287 * Whenever the AF algorithm state changes, a 288 * MSG_AUTOFOCUS notification must be send if a notification 289 * callback is registered. 290 * </p> 291 * @see #CONTROL_AF_STATE_INACTIVE 292 * @see #CONTROL_AF_STATE_PASSIVE_SCAN 293 * @see #CONTROL_AF_STATE_PASSIVE_FOCUSED 294 * @see #CONTROL_AF_STATE_ACTIVE_SCAN 295 * @see #CONTROL_AF_STATE_FOCUSED_LOCKED 296 * @see #CONTROL_AF_STATE_NOT_FOCUSED_LOCKED 297 * @see #CONTROL_AF_STATE_PASSIVE_UNFOCUSED 298 */ 299 public static final Key<Integer> CONTROL_AF_STATE = 300 new Key<Integer>("android.control.afState", int.class); 301 302 /** 303 * <p> 304 * The ID sent with the latest 305 * CAMERA2_TRIGGER_AUTOFOCUS call 306 * </p> 307 * <p> 308 * Must be 0 if no CAMERA2_TRIGGER_AUTOFOCUS trigger 309 * received yet by HAL. Always updated even if AF algorithm 310 * ignores the trigger 311 * </p> 312 * 313 * @hide 314 */ 315 public static final Key<Integer> CONTROL_AF_TRIGGER_ID = 316 new Key<Integer>("android.control.afTriggerId", int.class); 317 318 /** 319 * <p> 320 * Whether AWB is currently setting the color 321 * transform fields, and what its illumination target 322 * is 323 * </p> 324 * <p> 325 * [BC - AWB lock,AWB modes] 326 * </p> 327 * @see #CONTROL_AWB_MODE_OFF 328 * @see #CONTROL_AWB_MODE_AUTO 329 * @see #CONTROL_AWB_MODE_INCANDESCENT 330 * @see #CONTROL_AWB_MODE_FLUORESCENT 331 * @see #CONTROL_AWB_MODE_WARM_FLUORESCENT 332 * @see #CONTROL_AWB_MODE_DAYLIGHT 333 * @see #CONTROL_AWB_MODE_CLOUDY_DAYLIGHT 334 * @see #CONTROL_AWB_MODE_TWILIGHT 335 * @see #CONTROL_AWB_MODE_SHADE 336 */ 337 public static final Key<Integer> CONTROL_AWB_MODE = 338 new Key<Integer>("android.control.awbMode", int.class); 339 340 /** 341 * <p> 342 * List of areas to use for illuminant 343 * estimation 344 * </p> 345 * <p> 346 * Only used in AUTO mode. 347 * </p><p> 348 * Each area is a rectangle plus weight: xmin, ymin, 349 * xmax, ymax, weight. The rectangle is defined inclusive of the 350 * specified coordinates. 351 * </p><p> 352 * The coordinate system is based on the active pixel array, 353 * with (0,0) being the top-left pixel in the active pixel array, and 354 * (android.sensor.info.activeArraySize.width - 1, 355 * android.sensor.info.activeArraySize.height - 1) being the 356 * bottom-right pixel in the active pixel array. The weight 357 * should be nonnegative. 358 * </p><p> 359 * If all regions have 0 weight, then no specific metering area 360 * needs to be used by the HAL. If the metering region is 361 * outside the current android.scaler.cropRegion, the HAL 362 * should ignore the sections outside the region and output the 363 * used sections in the frame metadata 364 * </p> 365 */ 366 public static final Key<int[]> CONTROL_AWB_REGIONS = 367 new Key<int[]>("android.control.awbRegions", int[].class); 368 369 /** 370 * <p> 371 * Current state of AWB algorithm 372 * </p> 373 * <p> 374 * Whenever the AWB algorithm state changes, a 375 * MSG_AUTOWHITEBALANCE notification must be send if a 376 * notification callback is registered. 377 * </p> 378 * @see #CONTROL_AWB_STATE_INACTIVE 379 * @see #CONTROL_AWB_STATE_SEARCHING 380 * @see #CONTROL_AWB_STATE_CONVERGED 381 * @see #CONTROL_AWB_STATE_LOCKED 382 */ 383 public static final Key<Integer> CONTROL_AWB_STATE = 384 new Key<Integer>("android.control.awbState", int.class); 385 386 /** 387 * <p> 388 * Overall mode of 3A control 389 * routines 390 * </p> 391 * @see #CONTROL_MODE_OFF 392 * @see #CONTROL_MODE_AUTO 393 * @see #CONTROL_MODE_USE_SCENE_MODE 394 */ 395 public static final Key<Integer> CONTROL_MODE = 396 new Key<Integer>("android.control.mode", int.class); 397 398 /** 399 * <p> 400 * Operation mode for edge 401 * enhancement 402 * </p> 403 * @see #EDGE_MODE_OFF 404 * @see #EDGE_MODE_FAST 405 * @see #EDGE_MODE_HIGH_QUALITY 406 */ 407 public static final Key<Integer> EDGE_MODE = 408 new Key<Integer>("android.edge.mode", int.class); 409 410 /** 411 * <p> 412 * Select flash operation mode 413 * </p> 414 * @see #FLASH_MODE_OFF 415 * @see #FLASH_MODE_SINGLE 416 * @see #FLASH_MODE_TORCH 417 */ 418 public static final Key<Integer> FLASH_MODE = 419 new Key<Integer>("android.flash.mode", int.class); 420 421 /** 422 * <p> 423 * Current state of the flash 424 * unit 425 * </p> 426 * @see #FLASH_STATE_UNAVAILABLE 427 * @see #FLASH_STATE_CHARGING 428 * @see #FLASH_STATE_READY 429 * @see #FLASH_STATE_FIRED 430 */ 431 public static final Key<Integer> FLASH_STATE = 432 new Key<Integer>("android.flash.state", int.class); 433 434 /** 435 * <p> 436 * GPS coordinates to include in output JPEG 437 * EXIF 438 * </p> 439 */ 440 public static final Key<double[]> JPEG_GPS_COORDINATES = 441 new Key<double[]>("android.jpeg.gpsCoordinates", double[].class); 442 443 /** 444 * <p> 445 * 32 characters describing GPS algorithm to 446 * include in EXIF 447 * </p> 448 */ 449 public static final Key<String> JPEG_GPS_PROCESSING_METHOD = 450 new Key<String>("android.jpeg.gpsProcessingMethod", String.class); 451 452 /** 453 * <p> 454 * Time GPS fix was made to include in 455 * EXIF 456 * </p> 457 */ 458 public static final Key<Long> JPEG_GPS_TIMESTAMP = 459 new Key<Long>("android.jpeg.gpsTimestamp", long.class); 460 461 /** 462 * <p> 463 * Orientation of JPEG image to 464 * write 465 * </p> 466 */ 467 public static final Key<Integer> JPEG_ORIENTATION = 468 new Key<Integer>("android.jpeg.orientation", int.class); 469 470 /** 471 * <p> 472 * Compression quality of the final JPEG 473 * image 474 * </p> 475 * <p> 476 * 85-95 is typical usage range 477 * </p> 478 */ 479 public static final Key<Byte> JPEG_QUALITY = 480 new Key<Byte>("android.jpeg.quality", byte.class); 481 482 /** 483 * <p> 484 * Compression quality of JPEG 485 * thumbnail 486 * </p> 487 */ 488 public static final Key<Byte> JPEG_THUMBNAIL_QUALITY = 489 new Key<Byte>("android.jpeg.thumbnailQuality", byte.class); 490 491 /** 492 * <p> 493 * Resolution of embedded JPEG 494 * thumbnail 495 * </p> 496 */ 497 public static final Key<android.hardware.camera2.Size> JPEG_THUMBNAIL_SIZE = 498 new Key<android.hardware.camera2.Size>("android.jpeg.thumbnailSize", android.hardware.camera2.Size.class); 499 500 /** 501 * <p> 502 * Size of the lens aperture 503 * </p> 504 * <p> 505 * Will not be supported on most devices. Can only 506 * pick from supported list 507 * </p> 508 */ 509 public static final Key<Float> LENS_APERTURE = 510 new Key<Float>("android.lens.aperture", float.class); 511 512 /** 513 * <p> 514 * State of lens neutral density 515 * filter(s) 516 * </p> 517 * <p> 518 * Will not be supported on most devices. Can only 519 * pick from supported list 520 * </p> 521 */ 522 public static final Key<Float> LENS_FILTER_DENSITY = 523 new Key<Float>("android.lens.filterDensity", float.class); 524 525 /** 526 * <p> 527 * Lens optical zoom setting 528 * </p> 529 * <p> 530 * Will not be supported on most devices. 531 * </p> 532 */ 533 public static final Key<Float> LENS_FOCAL_LENGTH = 534 new Key<Float>("android.lens.focalLength", float.class); 535 536 /** 537 * <p> 538 * Distance to plane of sharpest focus, 539 * measured from frontmost surface of the lens 540 * </p> 541 * <p> 542 * Should be zero for fixed-focus cameras 543 * </p> 544 */ 545 public static final Key<Float> LENS_FOCUS_DISTANCE = 546 new Key<Float>("android.lens.focusDistance", float.class); 547 548 /** 549 * <p> 550 * The range of scene distances that are in 551 * sharp focus (depth of field) 552 * </p> 553 * <p> 554 * If variable focus not supported, can still report 555 * fixed depth of field range 556 * </p> 557 */ 558 public static final Key<float[]> LENS_FOCUS_RANGE = 559 new Key<float[]>("android.lens.focusRange", float[].class); 560 561 /** 562 * <p> 563 * Whether optical image stabilization is 564 * enabled. 565 * </p> 566 * <p> 567 * Will not be supported on most devices. 568 * </p> 569 * @see #LENS_OPTICAL_STABILIZATION_MODE_OFF 570 * @see #LENS_OPTICAL_STABILIZATION_MODE_ON 571 */ 572 public static final Key<Integer> LENS_OPTICAL_STABILIZATION_MODE = 573 new Key<Integer>("android.lens.opticalStabilizationMode", int.class); 574 575 /** 576 * <p> 577 * Current lens status 578 * </p> 579 * @see #LENS_STATE_STATIONARY 580 * @see #LENS_STATE_MOVING 581 */ 582 public static final Key<Integer> LENS_STATE = 583 new Key<Integer>("android.lens.state", int.class); 584 585 /** 586 * <p> 587 * Mode of operation for the noise reduction 588 * algorithm 589 * </p> 590 * @see #NOISE_REDUCTION_MODE_OFF 591 * @see #NOISE_REDUCTION_MODE_FAST 592 * @see #NOISE_REDUCTION_MODE_HIGH_QUALITY 593 */ 594 public static final Key<Integer> NOISE_REDUCTION_MODE = 595 new Key<Integer>("android.noiseReduction.mode", int.class); 596 597 /** 598 * <p> 599 * A frame counter set by the framework. This value monotonically 600 * increases with every new result (that is, each new result has a unique 601 * frameCount value). 602 * </p> 603 * <p> 604 * Reset on release() 605 * </p> 606 */ 607 public static final Key<Integer> REQUEST_FRAME_COUNT = 608 new Key<Integer>("android.request.frameCount", int.class); 609 610 /** 611 * <p> 612 * An application-specified ID for the current 613 * request. Must be maintained unchanged in output 614 * frame 615 * </p> 616 * 617 * @hide 618 */ 619 public static final Key<Integer> REQUEST_ID = 620 new Key<Integer>("android.request.id", int.class); 621 622 /** 623 * <p> 624 * (x, y, width, height). 625 * </p><p> 626 * A rectangle with the top-level corner of (x,y) and size 627 * (width, height). The region of the sensor that is used for 628 * output. Each stream must use this rectangle to produce its 629 * output, cropping to a smaller region if necessary to 630 * maintain the stream's aspect ratio. 631 * </p><p> 632 * HAL2.x uses only (x, y, width) 633 * </p> 634 * <p> 635 * Any additional per-stream cropping must be done to 636 * maximize the final pixel area of the stream. 637 * </p><p> 638 * For example, if the crop region is set to a 4:3 aspect 639 * ratio, then 4:3 streams should use the exact crop 640 * region. 16:9 streams should further crop vertically 641 * (letterbox). 642 * </p><p> 643 * Conversely, if the crop region is set to a 16:9, then 4:3 644 * outputs should crop horizontally (pillarbox), and 16:9 645 * streams should match exactly. These additional crops must 646 * be centered within the crop region. 647 * </p><p> 648 * The output streams must maintain square pixels at all 649 * times, no matter what the relative aspect ratios of the 650 * crop region and the stream are. Negative values for 651 * corner are allowed for raw output if full pixel array is 652 * larger than active pixel array. Width and height may be 653 * rounded to nearest larger supportable width, especially 654 * for raw output, where only a few fixed scales may be 655 * possible. The width and height of the crop region cannot 656 * be set to be smaller than floor( activeArraySize.width / 657 * android.scaler.maxDigitalZoom ) and floor( 658 * activeArraySize.height / android.scaler.maxDigitalZoom), 659 * respectively. 660 * </p> 661 */ 662 public static final Key<android.graphics.Rect> SCALER_CROP_REGION = 663 new Key<android.graphics.Rect>("android.scaler.cropRegion", android.graphics.Rect.class); 664 665 /** 666 * <p> 667 * Duration each pixel is exposed to 668 * light. 669 * </p><p> 670 * If the sensor can't expose this exact duration, it should shorten the 671 * duration exposed to the nearest possible value (rather than expose longer). 672 * </p> 673 * <p> 674 * 1/10000 - 30 sec range. No bulb mode 675 * </p> 676 */ 677 public static final Key<Long> SENSOR_EXPOSURE_TIME = 678 new Key<Long>("android.sensor.exposureTime", long.class); 679 680 /** 681 * <p> 682 * Duration from start of frame exposure to 683 * start of next frame exposure 684 * </p> 685 * <p> 686 * Exposure time has priority, so duration is set to 687 * max(duration, exposure time + overhead) 688 * </p> 689 */ 690 public static final Key<Long> SENSOR_FRAME_DURATION = 691 new Key<Long>("android.sensor.frameDuration", long.class); 692 693 /** 694 * <p> 695 * Gain applied to image data. Must be 696 * implemented through analog gain only if set to values 697 * below 'maximum analog sensitivity'. 698 * </p><p> 699 * If the sensor can't apply this exact gain, it should lessen the 700 * gain to the nearest possible value (rather than gain more). 701 * </p> 702 * <p> 703 * ISO 12232:2006 REI method 704 * </p> 705 */ 706 public static final Key<Integer> SENSOR_SENSITIVITY = 707 new Key<Integer>("android.sensor.sensitivity", int.class); 708 709 /** 710 * <p> 711 * Time at start of exposure of first 712 * row 713 * </p> 714 * <p> 715 * Monotonic, should be synced to other timestamps in 716 * system 717 * </p> 718 */ 719 public static final Key<Long> SENSOR_TIMESTAMP = 720 new Key<Long>("android.sensor.timestamp", long.class); 721 722 /** 723 * <p> 724 * The temperature of the sensor, sampled at the time 725 * exposure began for this frame. 726 * </p><p> 727 * The thermal diode being queried should be inside the sensor PCB, or 728 * somewhere close to it. 729 * </p> 730 * 731 * <b>Optional</b> - This value may be null on some devices. 732 * 733 * <b>{@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL}</b> - 734 * Present on all devices that report being FULL level hardware devices in the 735 * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL HARDWARE_LEVEL} key. 736 */ 737 public static final Key<Float> SENSOR_TEMPERATURE = 738 new Key<Float>("android.sensor.temperature", float.class); 739 740 /** 741 * <p> 742 * State of the face detector 743 * unit 744 * </p> 745 * <p> 746 * Whether face detection is enabled, and whether it 747 * should output just the basic fields or the full set of 748 * fields. Value must be one of the 749 * android.statistics.info.availableFaceDetectModes. 750 * </p> 751 * @see #STATISTICS_FACE_DETECT_MODE_OFF 752 * @see #STATISTICS_FACE_DETECT_MODE_SIMPLE 753 * @see #STATISTICS_FACE_DETECT_MODE_FULL 754 */ 755 public static final Key<Integer> STATISTICS_FACE_DETECT_MODE = 756 new Key<Integer>("android.statistics.faceDetectMode", int.class); 757 758 /** 759 * <p> 760 * List of unique IDs for detected 761 * faces 762 * </p> 763 * <p> 764 * Only available if faceDetectMode == FULL 765 * </p> 766 */ 767 public static final Key<int[]> STATISTICS_FACE_IDS = 768 new Key<int[]>("android.statistics.faceIds", int[].class); 769 770 /** 771 * <p> 772 * List of landmarks for detected 773 * faces 774 * </p> 775 * <p> 776 * Only available if faceDetectMode == FULL 777 * </p> 778 */ 779 public static final Key<int[]> STATISTICS_FACE_LANDMARKS = 780 new Key<int[]>("android.statistics.faceLandmarks", int[].class); 781 782 /** 783 * <p> 784 * List of the bounding rectangles for detected 785 * faces 786 * </p> 787 * <p> 788 * Only available if faceDetectMode != OFF 789 * </p> 790 */ 791 public static final Key<android.graphics.Rect[]> STATISTICS_FACE_RECTANGLES = 792 new Key<android.graphics.Rect[]>("android.statistics.faceRectangles", android.graphics.Rect[].class); 793 794 /** 795 * <p> 796 * List of the face confidence scores for 797 * detected faces 798 * </p> 799 * <p> 800 * Only available if faceDetectMode != OFF. The value should be 801 * meaningful (for example, setting 100 at all times is illegal). 802 * </p> 803 */ 804 public static final Key<byte[]> STATISTICS_FACE_SCORES = 805 new Key<byte[]>("android.statistics.faceScores", byte[].class); 806 807 /** 808 * <p> 809 * A low-resolution map of lens shading, per 810 * color channel 811 * </p> 812 * <p> 813 * Assume bilinear interpolation of map. The least 814 * shaded section of the image should have a gain factor 815 * of 1; all other sections should have gains above 1. 816 * the map should be on the order of 30-40 rows, and 817 * must be smaller than 64x64. 818 * </p><p> 819 * When android.colorCorrection.mode = TRANSFORM_MATRIX, the map 820 * must take into account the colorCorrection settings. 821 * </p> 822 */ 823 public static final Key<float[]> STATISTICS_LENS_SHADING_MAP = 824 new Key<float[]>("android.statistics.lensShadingMap", float[].class); 825 826 /** 827 * <p> 828 * The best-fit color channel gains calculated 829 * by the HAL's statistics units for the current output frame 830 * </p> 831 * <p> 832 * This may be different than the gains used for this frame, 833 * since statistics processing on data from a new frame 834 * typically completes after the transform has already been 835 * applied to that frame. 836 * </p><p> 837 * The 4 channel gains are defined in Bayer domain, 838 * see android.colorCorrection.gains for details. 839 * </p><p> 840 * This value should always be calculated by the AWB block, 841 * regardless of the android.control.* current values. 842 * </p> 843 */ 844 public static final Key<float[]> STATISTICS_PREDICTED_COLOR_GAINS = 845 new Key<float[]>("android.statistics.predictedColorGains", float[].class); 846 847 /** 848 * <p> 849 * The best-fit color transform matrix estimate 850 * calculated by the HAL's statistics units for the current 851 * output frame 852 * </p> 853 * <p> 854 * The HAL must provide the estimate from its 855 * statistics unit on the white balance transforms to use 856 * for the next frame. These are the values the HAL believes 857 * are the best fit for the current output frame. This may 858 * be different than the transform used for this frame, since 859 * statistics processing on data from a new frame typically 860 * completes after the transform has already been applied to 861 * that frame. 862 * </p><p> 863 * These estimates must be provided for all frames, even if 864 * capture settings and color transforms are set by the application. 865 * </p><p> 866 * This value should always be calculated by the AWB block, 867 * regardless of the android.control.* current values. 868 * </p> 869 */ 870 public static final Key<Rational[]> STATISTICS_PREDICTED_COLOR_TRANSFORM = 871 new Key<Rational[]>("android.statistics.predictedColorTransform", Rational[].class); 872 873 /** 874 * <p> 875 * The HAL estimated scene illumination lighting 876 * frequency 877 * </p> 878 * <p> 879 * Report NONE if there doesn't appear to be flickering 880 * illumination 881 * </p> 882 * @see #STATISTICS_SCENE_FLICKER_NONE 883 * @see #STATISTICS_SCENE_FLICKER_50HZ 884 * @see #STATISTICS_SCENE_FLICKER_60HZ 885 */ 886 public static final Key<Integer> STATISTICS_SCENE_FLICKER = 887 new Key<Integer>("android.statistics.sceneFlicker", int.class); 888 889 /** 890 * <p> 891 * Table mapping blue input values to output 892 * values 893 * </p> 894 * <p> 895 * Tonemapping / contrast / gamma curve for the blue 896 * channel, to use when android.tonemap.mode is CONTRAST_CURVE. 897 * </p><p> 898 * See android.tonemap.curveRed for more details. 899 * </p> 900 */ 901 public static final Key<float[]> TONEMAP_CURVE_BLUE = 902 new Key<float[]>("android.tonemap.curveBlue", float[].class); 903 904 /** 905 * <p> 906 * Table mapping green input values to output 907 * values 908 * </p> 909 * <p> 910 * Tonemapping / contrast / gamma curve for the green 911 * channel, to use when android.tonemap.mode is CONTRAST_CURVE. 912 * </p><p> 913 * See android.tonemap.curveRed for more details. 914 * </p> 915 */ 916 public static final Key<float[]> TONEMAP_CURVE_GREEN = 917 new Key<float[]>("android.tonemap.curveGreen", float[].class); 918 919 /** 920 * <p> 921 * Table mapping red input values to output 922 * values 923 * </p> 924 * <p> 925 * Tonemapping / contrast / gamma curve for the red 926 * channel, to use when android.tonemap.mode is CONTRAST_CURVE. 927 * </p><p> 928 * Since the input and output ranges may vary depending on 929 * the camera pipeline, the input and output pixel values 930 * are represented by normalized floating-point values 931 * between 0 and 1, with 0 == black and 1 == white. 932 * </p><p> 933 * The curve should be linearly interpolated between the 934 * defined points. The points will be listed in increasing 935 * order of P_IN. For example, if the array is: [0.0, 0.0, 936 * 0.3, 0.5, 1.0, 1.0], then the input->output mapping 937 * for a few sample points would be: 0 -> 0, 0.15 -> 938 * 0.25, 0.3 -> 0.5, 0.5 -> 0.64 939 * </p> 940 */ 941 public static final Key<float[]> TONEMAP_CURVE_RED = 942 new Key<float[]>("android.tonemap.curveRed", float[].class); 943 944 /** 945 * @see #TONEMAP_MODE_CONTRAST_CURVE 946 * @see #TONEMAP_MODE_FAST 947 * @see #TONEMAP_MODE_HIGH_QUALITY 948 */ 949 public static final Key<Integer> TONEMAP_MODE = 950 new Key<Integer>("android.tonemap.mode", int.class); 951 952 /** 953 * <p> 954 * This LED is nominally used to indicate to the user 955 * that the camera is powered on and may be streaming images back to the 956 * Application Processor. In certain rare circumstances, the OS may 957 * disable this when video is processed locally and not transmitted to 958 * any untrusted applications. 959 * </p><p> 960 * In particular, the LED *must* always be on when the data could be 961 * transmitted off the device. The LED *should* always be on whenever 962 * data is stored locally on the device. 963 * </p><p> 964 * The LED *may* be off if a trusted application is using the data that 965 * doesn't violate the above rules. 966 * </p> 967 * 968 * @hide 969 */ 970 public static final Key<Boolean> LED_TRANSMIT = 971 new Key<Boolean>("android.led.transmit", boolean.class); 972 973 /** 974 * <p> 975 * Whether black-level compensation is locked 976 * to its current values, or is free to vary 977 * </p> 978 * <p> 979 * When set to ON, the values used for black-level 980 * compensation must not change until the lock is set to 981 * OFF 982 * </p><p> 983 * Since changes to certain capture parameters (such as 984 * exposure time) may require resetting of black level 985 * compensation, the HAL must report whether setting the 986 * black level lock was successful in the output result 987 * metadata. 988 * </p><p> 989 * The black level locking must happen at the sensor, and not at the ISP. 990 * If for some reason black level locking is no longer legal (for example, 991 * the analog gain has changed, which forces black levels to be 992 * recalculated), then the HAL is free to override this request (and it 993 * must report 'OFF' when this does happen) until the next time locking 994 * is legal again. 995 * </p> 996 */ 997 public static final Key<Boolean> BLACK_LEVEL_LOCK = 998 new Key<Boolean>("android.blackLevel.lock", boolean.class); 999 1000 /*~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~ 1001 * End generated code 1002 *~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~O@*/ 1003 1004 /** 1005 * <p> 1006 * List of the {@link Face Faces} detected through camera face detection 1007 * in this result. 1008 * </p> 1009 * <p> 1010 * Only available if {@link #STATISTICS_FACE_DETECT_MODE} {@code !=} 1011 * {@link CameraMetadata#STATISTICS_FACE_DETECT_MODE_OFF OFF}. 1012 * </p> 1013 * 1014 * @see Face 1015 */ 1016 public static final Key<Face[]> STATISTICS_FACES = 1017 new Key<Face[]>("android.statistics.faces", Face[].class); 1018} 1019