CaptureResult.java revision 6090995951c6e2e4dcf38102f01793f8a94166e1
1/* 2 * Copyright (C) 2012 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17package android.hardware.camera2; 18 19import android.hardware.camera2.impl.CameraMetadataNative; 20 21/** 22 * <p>The results of a single image capture from the image sensor.</p> 23 * 24 * <p>Contains the final configuration for the capture hardware (sensor, lens, 25 * flash), the processing pipeline, the control algorithms, and the output 26 * buffers.</p> 27 * 28 * <p>CaptureResults are produced by a {@link CameraDevice} after processing a 29 * {@link CaptureRequest}. All properties listed for capture requests can also 30 * be queried on the capture result, to determine the final values used for 31 * capture. The result also includes additional metadata about the state of the 32 * camera device during the capture.</p> 33 * 34 */ 35public final class CaptureResult extends CameraMetadata { 36 37 private final CameraMetadataNative mResults; 38 private final CaptureRequest mRequest; 39 private final int mSequenceId; 40 41 /** 42 * Takes ownership of the passed-in properties object 43 * @hide 44 */ 45 public CaptureResult(CameraMetadataNative results, CaptureRequest parent, int sequenceId) { 46 if (results == null) { 47 throw new IllegalArgumentException("results was null"); 48 } 49 50 if (parent == null) { 51 throw new IllegalArgumentException("parent was null"); 52 } 53 54 mResults = results; 55 mRequest = parent; 56 mSequenceId = sequenceId; 57 } 58 59 @Override 60 public <T> T get(Key<T> key) { 61 return mResults.get(key); 62 } 63 64 /** 65 * Get the request associated with this result. 66 * 67 * <p>Whenever a request is successfully captured, with 68 * {@link CameraDevice.CaptureListener#onCaptureCompleted}, 69 * the {@code result}'s {@code getRequest()} will return that {@code request}. 70 * </p> 71 * 72 * <p>In particular, 73 * <code><pre>cameraDevice.capture(someRequest, new CaptureListener() { 74 * {@literal @}Override 75 * void onCaptureCompleted(CaptureRequest myRequest, CaptureResult myResult) { 76 * assert(myResult.getRequest.equals(myRequest) == true); 77 * } 78 * }; 79 * </code></pre> 80 * </p> 81 * 82 * @return The request associated with this result. Never {@code null}. 83 */ 84 public CaptureRequest getRequest() { 85 return mRequest; 86 } 87 88 /** 89 * Get the frame number associated with this result. 90 * 91 * <p>Whenever a request has been processed, regardless of failure or success, 92 * it gets a unique frame number assigned to its future result/failure.</p> 93 * 94 * <p>This value monotonically increments, starting with 0, 95 * for every new result or failure; and the scope is the lifetime of the 96 * {@link CameraDevice}.</p> 97 * 98 * @return int frame number 99 */ 100 public int getFrameNumber() { 101 return get(REQUEST_FRAME_COUNT); 102 } 103 104 /** 105 * The sequence ID for this failure that was returned by the 106 * {@link CameraDevice#capture} family of functions. 107 * 108 * <p>The sequence ID is a unique monotonically increasing value starting from 0, 109 * incremented every time a new group of requests is submitted to the CameraDevice.</p> 110 * 111 * @return int The ID for the sequence of requests that this capture result is a part of 112 * 113 * @see CameraDevice.CaptureListener#onCaptureSequenceCompleted 114 */ 115 public int getSequenceId() { 116 return mSequenceId; 117 } 118 119 /*@O~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~ 120 * The key entries below this point are generated from metadata 121 * definitions in /system/media/camera/docs. Do not modify by hand or 122 * modify the comment blocks at the start or end. 123 *~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~*/ 124 125 /** 126 * <p> 127 * A color transform matrix to use to transform 128 * from sensor RGB color space to output linear sRGB color space 129 * </p> 130 * <p> 131 * This matrix is either set by HAL when the request 132 * android.colorCorrection.mode is not TRANSFORM_MATRIX, or 133 * directly by the application in the request when the 134 * android.colorCorrection.mode is TRANSFORM_MATRIX. 135 * </p><p> 136 * In the latter case, the HAL may round the matrix to account 137 * for precision issues; the final rounded matrix should be 138 * reported back in this matrix result metadata. 139 * </p> 140 */ 141 public static final Key<Rational[]> COLOR_CORRECTION_TRANSFORM = 142 new Key<Rational[]>("android.colorCorrection.transform", Rational[].class); 143 144 /** 145 * <p> 146 * Gains applying to Bayer color channels for 147 * white-balance 148 * </p> 149 * <p> 150 * The 4-channel white-balance gains are defined in 151 * the order of [R G_even G_odd B], where G_even is the gain 152 * for green pixels on even rows of the output, and G_odd 153 * is the gain for greenpixels on the odd rows. if a HAL 154 * does not support a separate gain for even/odd green channels, 155 * it should use the G_even value,and write G_odd equal to 156 * G_even in the output result metadata. 157 * </p><p> 158 * This array is either set by HAL when the request 159 * android.colorCorrection.mode is not TRANSFORM_MATRIX, or 160 * directly by the application in the request when the 161 * android.colorCorrection.mode is TRANSFORM_MATRIX. 162 * </p><p> 163 * The ouput should be the gains actually applied by the HAL to 164 * the current frame. 165 * </p> 166 */ 167 public static final Key<float[]> COLOR_CORRECTION_GAINS = 168 new Key<float[]>("android.colorCorrection.gains", float[].class); 169 170 /** 171 * <p> 172 * The ID sent with the latest 173 * CAMERA2_TRIGGER_PRECAPTURE_METERING call 174 * </p> 175 * <p> 176 * Must be 0 if no 177 * CAMERA2_TRIGGER_PRECAPTURE_METERING trigger received yet 178 * by HAL. Always updated even if AE algorithm ignores the 179 * trigger 180 * </p> 181 * 182 * @hide 183 */ 184 public static final Key<Integer> CONTROL_AE_PRECAPTURE_ID = 185 new Key<Integer>("android.control.aePrecaptureId", int.class); 186 187 /** 188 * <p> 189 * List of areas to use for 190 * metering 191 * </p> 192 * <p> 193 * Each area is a rectangle plus weight: xmin, ymin, 194 * xmax, ymax, weight. The rectangle is defined inclusive of the 195 * specified coordinates. 196 * </p><p> 197 * The coordinate system is based on the active pixel array, 198 * with (0,0) being the top-left pixel in the active pixel array, and 199 * (android.sensor.info.activeArraySize.width - 1, 200 * android.sensor.info.activeArraySize.height - 1) being the 201 * bottom-right pixel in the active pixel array. The weight 202 * should be nonnegative. 203 * </p><p> 204 * If all regions have 0 weight, then no specific metering area 205 * needs to be used by the HAL. If the metering region is 206 * outside the current android.scaler.cropRegion, the HAL 207 * should ignore the sections outside the region and output the 208 * used sections in the frame metadata 209 * </p> 210 */ 211 public static final Key<int[]> CONTROL_AE_REGIONS = 212 new Key<int[]>("android.control.aeRegions", int[].class); 213 214 /** 215 * <p> 216 * Current state of AE algorithm 217 * </p> 218 * <p> 219 * Whenever the AE algorithm state changes, a 220 * MSG_AUTOEXPOSURE notification must be send if a 221 * notification callback is registered. 222 * </p> 223 * @see #CONTROL_AE_STATE_INACTIVE 224 * @see #CONTROL_AE_STATE_SEARCHING 225 * @see #CONTROL_AE_STATE_CONVERGED 226 * @see #CONTROL_AE_STATE_LOCKED 227 * @see #CONTROL_AE_STATE_FLASH_REQUIRED 228 * @see #CONTROL_AE_STATE_PRECAPTURE 229 */ 230 public static final Key<Integer> CONTROL_AE_STATE = 231 new Key<Integer>("android.control.aeState", int.class); 232 233 /** 234 * <p> 235 * Whether AF is currently enabled, and what 236 * mode it is set to 237 * </p> 238 * @see #CONTROL_AF_MODE_OFF 239 * @see #CONTROL_AF_MODE_AUTO 240 * @see #CONTROL_AF_MODE_MACRO 241 * @see #CONTROL_AF_MODE_CONTINUOUS_VIDEO 242 * @see #CONTROL_AF_MODE_CONTINUOUS_PICTURE 243 * @see #CONTROL_AF_MODE_EDOF 244 */ 245 public static final Key<Integer> CONTROL_AF_MODE = 246 new Key<Integer>("android.control.afMode", int.class); 247 248 /** 249 * <p> 250 * List of areas to use for focus 251 * estimation 252 * </p> 253 * <p> 254 * Each area is a rectangle plus weight: xmin, ymin, 255 * xmax, ymax, weight. The rectangle is defined inclusive of the 256 * specified coordinates. 257 * </p><p> 258 * The coordinate system is based on the active pixel array, 259 * with (0,0) being the top-left pixel in the active pixel array, and 260 * (android.sensor.info.activeArraySize.width - 1, 261 * android.sensor.info.activeArraySize.height - 1) being the 262 * bottom-right pixel in the active pixel array. The weight 263 * should be nonnegative. 264 * </p><p> 265 * If all regions have 0 weight, then no specific focus area 266 * needs to be used by the HAL. If the focusing region is 267 * outside the current android.scaler.cropRegion, the HAL 268 * should ignore the sections outside the region and output the 269 * used sections in the frame metadata 270 * </p> 271 */ 272 public static final Key<int[]> CONTROL_AF_REGIONS = 273 new Key<int[]>("android.control.afRegions", int[].class); 274 275 /** 276 * <p> 277 * Current state of AF algorithm 278 * </p> 279 * <p> 280 * Whenever the AF algorithm state changes, a 281 * MSG_AUTOFOCUS notification must be send if a notification 282 * callback is registered. 283 * </p> 284 * @see #CONTROL_AF_STATE_INACTIVE 285 * @see #CONTROL_AF_STATE_PASSIVE_SCAN 286 * @see #CONTROL_AF_STATE_PASSIVE_FOCUSED 287 * @see #CONTROL_AF_STATE_ACTIVE_SCAN 288 * @see #CONTROL_AF_STATE_FOCUSED_LOCKED 289 * @see #CONTROL_AF_STATE_NOT_FOCUSED_LOCKED 290 * @see #CONTROL_AF_STATE_PASSIVE_UNFOCUSED 291 */ 292 public static final Key<Integer> CONTROL_AF_STATE = 293 new Key<Integer>("android.control.afState", int.class); 294 295 /** 296 * <p> 297 * The ID sent with the latest 298 * CAMERA2_TRIGGER_AUTOFOCUS call 299 * </p> 300 * <p> 301 * Must be 0 if no CAMERA2_TRIGGER_AUTOFOCUS trigger 302 * received yet by HAL. Always updated even if AF algorithm 303 * ignores the trigger 304 * </p> 305 * 306 * @hide 307 */ 308 public static final Key<Integer> CONTROL_AF_TRIGGER_ID = 309 new Key<Integer>("android.control.afTriggerId", int.class); 310 311 /** 312 * <p> 313 * Whether AWB is currently setting the color 314 * transform fields, and what its illumination target 315 * is 316 * </p> 317 * <p> 318 * [BC - AWB lock,AWB modes] 319 * </p> 320 * @see #CONTROL_AWB_MODE_OFF 321 * @see #CONTROL_AWB_MODE_AUTO 322 * @see #CONTROL_AWB_MODE_INCANDESCENT 323 * @see #CONTROL_AWB_MODE_FLUORESCENT 324 * @see #CONTROL_AWB_MODE_WARM_FLUORESCENT 325 * @see #CONTROL_AWB_MODE_DAYLIGHT 326 * @see #CONTROL_AWB_MODE_CLOUDY_DAYLIGHT 327 * @see #CONTROL_AWB_MODE_TWILIGHT 328 * @see #CONTROL_AWB_MODE_SHADE 329 */ 330 public static final Key<Integer> CONTROL_AWB_MODE = 331 new Key<Integer>("android.control.awbMode", int.class); 332 333 /** 334 * <p> 335 * List of areas to use for illuminant 336 * estimation 337 * </p> 338 * <p> 339 * Only used in AUTO mode. 340 * </p><p> 341 * Each area is a rectangle plus weight: xmin, ymin, 342 * xmax, ymax, weight. The rectangle is defined inclusive of the 343 * specified coordinates. 344 * </p><p> 345 * The coordinate system is based on the active pixel array, 346 * with (0,0) being the top-left pixel in the active pixel array, and 347 * (android.sensor.info.activeArraySize.width - 1, 348 * android.sensor.info.activeArraySize.height - 1) being the 349 * bottom-right pixel in the active pixel array. The weight 350 * should be nonnegative. 351 * </p><p> 352 * If all regions have 0 weight, then no specific metering area 353 * needs to be used by the HAL. If the metering region is 354 * outside the current android.scaler.cropRegion, the HAL 355 * should ignore the sections outside the region and output the 356 * used sections in the frame metadata 357 * </p> 358 */ 359 public static final Key<int[]> CONTROL_AWB_REGIONS = 360 new Key<int[]>("android.control.awbRegions", int[].class); 361 362 /** 363 * <p> 364 * Current state of AWB algorithm 365 * </p> 366 * <p> 367 * Whenever the AWB algorithm state changes, a 368 * MSG_AUTOWHITEBALANCE notification must be send if a 369 * notification callback is registered. 370 * </p> 371 * @see #CONTROL_AWB_STATE_INACTIVE 372 * @see #CONTROL_AWB_STATE_SEARCHING 373 * @see #CONTROL_AWB_STATE_CONVERGED 374 * @see #CONTROL_AWB_STATE_LOCKED 375 */ 376 public static final Key<Integer> CONTROL_AWB_STATE = 377 new Key<Integer>("android.control.awbState", int.class); 378 379 /** 380 * <p> 381 * Overall mode of 3A control 382 * routines 383 * </p> 384 * @see #CONTROL_MODE_OFF 385 * @see #CONTROL_MODE_AUTO 386 * @see #CONTROL_MODE_USE_SCENE_MODE 387 */ 388 public static final Key<Integer> CONTROL_MODE = 389 new Key<Integer>("android.control.mode", int.class); 390 391 /** 392 * <p> 393 * Operation mode for edge 394 * enhancement 395 * </p> 396 * @see #EDGE_MODE_OFF 397 * @see #EDGE_MODE_FAST 398 * @see #EDGE_MODE_HIGH_QUALITY 399 */ 400 public static final Key<Integer> EDGE_MODE = 401 new Key<Integer>("android.edge.mode", int.class); 402 403 /** 404 * <p> 405 * Select flash operation mode 406 * </p> 407 * @see #FLASH_MODE_OFF 408 * @see #FLASH_MODE_SINGLE 409 * @see #FLASH_MODE_TORCH 410 */ 411 public static final Key<Integer> FLASH_MODE = 412 new Key<Integer>("android.flash.mode", int.class); 413 414 /** 415 * <p> 416 * Current state of the flash 417 * unit 418 * </p> 419 * @see #FLASH_STATE_UNAVAILABLE 420 * @see #FLASH_STATE_CHARGING 421 * @see #FLASH_STATE_READY 422 * @see #FLASH_STATE_FIRED 423 */ 424 public static final Key<Integer> FLASH_STATE = 425 new Key<Integer>("android.flash.state", int.class); 426 427 /** 428 * <p> 429 * GPS coordinates to include in output JPEG 430 * EXIF 431 * </p> 432 */ 433 public static final Key<double[]> JPEG_GPS_COORDINATES = 434 new Key<double[]>("android.jpeg.gpsCoordinates", double[].class); 435 436 /** 437 * <p> 438 * 32 characters describing GPS algorithm to 439 * include in EXIF 440 * </p> 441 */ 442 public static final Key<String> JPEG_GPS_PROCESSING_METHOD = 443 new Key<String>("android.jpeg.gpsProcessingMethod", String.class); 444 445 /** 446 * <p> 447 * Time GPS fix was made to include in 448 * EXIF 449 * </p> 450 */ 451 public static final Key<Long> JPEG_GPS_TIMESTAMP = 452 new Key<Long>("android.jpeg.gpsTimestamp", long.class); 453 454 /** 455 * <p> 456 * Orientation of JPEG image to 457 * write 458 * </p> 459 */ 460 public static final Key<Integer> JPEG_ORIENTATION = 461 new Key<Integer>("android.jpeg.orientation", int.class); 462 463 /** 464 * <p> 465 * Compression quality of the final JPEG 466 * image 467 * </p> 468 * <p> 469 * 85-95 is typical usage range 470 * </p> 471 */ 472 public static final Key<Byte> JPEG_QUALITY = 473 new Key<Byte>("android.jpeg.quality", byte.class); 474 475 /** 476 * <p> 477 * Compression quality of JPEG 478 * thumbnail 479 * </p> 480 */ 481 public static final Key<Byte> JPEG_THUMBNAIL_QUALITY = 482 new Key<Byte>("android.jpeg.thumbnailQuality", byte.class); 483 484 /** 485 * <p> 486 * Resolution of embedded JPEG 487 * thumbnail 488 * </p> 489 */ 490 public static final Key<android.hardware.camera2.Size> JPEG_THUMBNAIL_SIZE = 491 new Key<android.hardware.camera2.Size>("android.jpeg.thumbnailSize", android.hardware.camera2.Size.class); 492 493 /** 494 * <p> 495 * Size of the lens aperture 496 * </p> 497 * <p> 498 * Will not be supported on most devices. Can only 499 * pick from supported list 500 * </p> 501 */ 502 public static final Key<Float> LENS_APERTURE = 503 new Key<Float>("android.lens.aperture", float.class); 504 505 /** 506 * <p> 507 * State of lens neutral density 508 * filter(s) 509 * </p> 510 * <p> 511 * Will not be supported on most devices. Can only 512 * pick from supported list 513 * </p> 514 */ 515 public static final Key<Float> LENS_FILTER_DENSITY = 516 new Key<Float>("android.lens.filterDensity", float.class); 517 518 /** 519 * <p> 520 * Lens optical zoom setting 521 * </p> 522 * <p> 523 * Will not be supported on most devices. 524 * </p> 525 */ 526 public static final Key<Float> LENS_FOCAL_LENGTH = 527 new Key<Float>("android.lens.focalLength", float.class); 528 529 /** 530 * <p> 531 * Distance to plane of sharpest focus, 532 * measured from frontmost surface of the lens 533 * </p> 534 * <p> 535 * Should be zero for fixed-focus cameras 536 * </p> 537 */ 538 public static final Key<Float> LENS_FOCUS_DISTANCE = 539 new Key<Float>("android.lens.focusDistance", float.class); 540 541 /** 542 * <p> 543 * The range of scene distances that are in 544 * sharp focus (depth of field) 545 * </p> 546 * <p> 547 * If variable focus not supported, can still report 548 * fixed depth of field range 549 * </p> 550 */ 551 public static final Key<float[]> LENS_FOCUS_RANGE = 552 new Key<float[]>("android.lens.focusRange", float[].class); 553 554 /** 555 * <p> 556 * Whether optical image stabilization is 557 * enabled. 558 * </p> 559 * <p> 560 * Will not be supported on most devices. 561 * </p> 562 * @see #LENS_OPTICAL_STABILIZATION_MODE_OFF 563 * @see #LENS_OPTICAL_STABILIZATION_MODE_ON 564 */ 565 public static final Key<Integer> LENS_OPTICAL_STABILIZATION_MODE = 566 new Key<Integer>("android.lens.opticalStabilizationMode", int.class); 567 568 /** 569 * <p> 570 * Current lens status 571 * </p> 572 * @see #LENS_STATE_STATIONARY 573 * @see #LENS_STATE_MOVING 574 */ 575 public static final Key<Integer> LENS_STATE = 576 new Key<Integer>("android.lens.state", int.class); 577 578 /** 579 * <p> 580 * Mode of operation for the noise reduction 581 * algorithm 582 * </p> 583 * @see #NOISE_REDUCTION_MODE_OFF 584 * @see #NOISE_REDUCTION_MODE_FAST 585 * @see #NOISE_REDUCTION_MODE_HIGH_QUALITY 586 */ 587 public static final Key<Integer> NOISE_REDUCTION_MODE = 588 new Key<Integer>("android.noiseReduction.mode", int.class); 589 590 /** 591 * <p> 592 * Whether a result given to the framework is the 593 * final one for the capture, or only a partial that contains a 594 * subset of the full set of dynamic metadata 595 * values. 596 * </p> 597 * <p> 598 * The entries in the result metadata buffers for a 599 * single capture may not overlap, except for this entry. The 600 * FINAL buffers must retain FIFO ordering relative to the 601 * requests that generate them, so the FINAL buffer for frame 3 must 602 * always be sent to the framework after the FINAL buffer for frame 2, and 603 * before the FINAL buffer for frame 4. PARTIAL buffers may be returned 604 * in any order relative to other frames, but all PARTIAL buffers for a given 605 * capture must arrive before the FINAL buffer for that capture. This entry may 606 * only be used by the HAL if quirks.usePartialResult is set to 1. 607 * </p> 608 * 609 * <b>Optional</b> - This value may be null on some devices. 610 * 611 * @hide 612 */ 613 public static final Key<Boolean> QUIRKS_PARTIAL_RESULT = 614 new Key<Boolean>("android.quirks.partialResult", boolean.class); 615 616 /** 617 * <p> 618 * A frame counter set by the framework. This value monotonically 619 * increases with every new result (that is, each new result has a unique 620 * frameCount value). 621 * </p> 622 * <p> 623 * Reset on release() 624 * </p> 625 */ 626 public static final Key<Integer> REQUEST_FRAME_COUNT = 627 new Key<Integer>("android.request.frameCount", int.class); 628 629 /** 630 * <p> 631 * An application-specified ID for the current 632 * request. Must be maintained unchanged in output 633 * frame 634 * </p> 635 * 636 * @hide 637 */ 638 public static final Key<Integer> REQUEST_ID = 639 new Key<Integer>("android.request.id", int.class); 640 641 /** 642 * <p> 643 * (x, y, width, height). 644 * </p><p> 645 * A rectangle with the top-level corner of (x,y) and size 646 * (width, height). The region of the sensor that is used for 647 * output. Each stream must use this rectangle to produce its 648 * output, cropping to a smaller region if necessary to 649 * maintain the stream's aspect ratio. 650 * </p><p> 651 * HAL2.x uses only (x, y, width) 652 * </p> 653 * <p> 654 * Any additional per-stream cropping must be done to 655 * maximize the final pixel area of the stream. 656 * </p><p> 657 * For example, if the crop region is set to a 4:3 aspect 658 * ratio, then 4:3 streams should use the exact crop 659 * region. 16:9 streams should further crop vertically 660 * (letterbox). 661 * </p><p> 662 * Conversely, if the crop region is set to a 16:9, then 4:3 663 * outputs should crop horizontally (pillarbox), and 16:9 664 * streams should match exactly. These additional crops must 665 * be centered within the crop region. 666 * </p><p> 667 * The output streams must maintain square pixels at all 668 * times, no matter what the relative aspect ratios of the 669 * crop region and the stream are. Negative values for 670 * corner are allowed for raw output if full pixel array is 671 * larger than active pixel array. Width and height may be 672 * rounded to nearest larger supportable width, especially 673 * for raw output, where only a few fixed scales may be 674 * possible. The width and height of the crop region cannot 675 * be set to be smaller than floor( activeArraySize.width / 676 * android.scaler.maxDigitalZoom ) and floor( 677 * activeArraySize.height / android.scaler.maxDigitalZoom), 678 * respectively. 679 * </p> 680 */ 681 public static final Key<android.graphics.Rect> SCALER_CROP_REGION = 682 new Key<android.graphics.Rect>("android.scaler.cropRegion", android.graphics.Rect.class); 683 684 /** 685 * <p> 686 * Duration each pixel is exposed to 687 * light. 688 * </p><p> 689 * If the sensor can't expose this exact duration, it should shorten the 690 * duration exposed to the nearest possible value (rather than expose longer). 691 * </p> 692 * <p> 693 * 1/10000 - 30 sec range. No bulb mode 694 * </p> 695 */ 696 public static final Key<Long> SENSOR_EXPOSURE_TIME = 697 new Key<Long>("android.sensor.exposureTime", long.class); 698 699 /** 700 * <p> 701 * Duration from start of frame exposure to 702 * start of next frame exposure 703 * </p> 704 * <p> 705 * Exposure time has priority, so duration is set to 706 * max(duration, exposure time + overhead) 707 * </p> 708 */ 709 public static final Key<Long> SENSOR_FRAME_DURATION = 710 new Key<Long>("android.sensor.frameDuration", long.class); 711 712 /** 713 * <p> 714 * Gain applied to image data. Must be 715 * implemented through analog gain only if set to values 716 * below 'maximum analog sensitivity'. 717 * </p><p> 718 * If the sensor can't apply this exact gain, it should lessen the 719 * gain to the nearest possible value (rather than gain more). 720 * </p> 721 * <p> 722 * ISO 12232:2006 REI method 723 * </p> 724 */ 725 public static final Key<Integer> SENSOR_SENSITIVITY = 726 new Key<Integer>("android.sensor.sensitivity", int.class); 727 728 /** 729 * <p> 730 * Time at start of exposure of first 731 * row 732 * </p> 733 * <p> 734 * Monotonic, should be synced to other timestamps in 735 * system 736 * </p> 737 */ 738 public static final Key<Long> SENSOR_TIMESTAMP = 739 new Key<Long>("android.sensor.timestamp", long.class); 740 741 /** 742 * <p> 743 * The temperature of the sensor, sampled at the time 744 * exposure began for this frame. 745 * </p><p> 746 * The thermal diode being queried should be inside the sensor PCB, or 747 * somewhere close to it. 748 * </p> 749 * 750 * <b>Optional</b> - This value may be null on some devices. 751 * 752 * <b>{@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL}</b> - 753 * Present on all devices that report being FULL level hardware devices in the 754 * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL HARDWARE_LEVEL} key. 755 */ 756 public static final Key<Float> SENSOR_TEMPERATURE = 757 new Key<Float>("android.sensor.temperature", float.class); 758 759 /** 760 * <p> 761 * State of the face detector 762 * unit 763 * </p> 764 * <p> 765 * Whether face detection is enabled, and whether it 766 * should output just the basic fields or the full set of 767 * fields. Value must be one of the 768 * android.statistics.info.availableFaceDetectModes. 769 * </p> 770 * @see #STATISTICS_FACE_DETECT_MODE_OFF 771 * @see #STATISTICS_FACE_DETECT_MODE_SIMPLE 772 * @see #STATISTICS_FACE_DETECT_MODE_FULL 773 */ 774 public static final Key<Integer> STATISTICS_FACE_DETECT_MODE = 775 new Key<Integer>("android.statistics.faceDetectMode", int.class); 776 777 /** 778 * <p> 779 * List of unique IDs for detected 780 * faces 781 * </p> 782 * <p> 783 * Only available if faceDetectMode == FULL 784 * </p> 785 * 786 * @hide 787 */ 788 public static final Key<int[]> STATISTICS_FACE_IDS = 789 new Key<int[]>("android.statistics.faceIds", int[].class); 790 791 /** 792 * <p> 793 * List of landmarks for detected 794 * faces 795 * </p> 796 * <p> 797 * Only available if faceDetectMode == FULL 798 * </p> 799 * 800 * @hide 801 */ 802 public static final Key<int[]> STATISTICS_FACE_LANDMARKS = 803 new Key<int[]>("android.statistics.faceLandmarks", int[].class); 804 805 /** 806 * <p> 807 * List of the bounding rectangles for detected 808 * faces 809 * </p> 810 * <p> 811 * Only available if faceDetectMode != OFF 812 * </p> 813 * 814 * @hide 815 */ 816 public static final Key<android.graphics.Rect[]> STATISTICS_FACE_RECTANGLES = 817 new Key<android.graphics.Rect[]>("android.statistics.faceRectangles", android.graphics.Rect[].class); 818 819 /** 820 * <p> 821 * List of the face confidence scores for 822 * detected faces 823 * </p> 824 * <p> 825 * Only available if faceDetectMode != OFF. The value should be 826 * meaningful (for example, setting 100 at all times is illegal). 827 * </p> 828 * 829 * @hide 830 */ 831 public static final Key<byte[]> STATISTICS_FACE_SCORES = 832 new Key<byte[]>("android.statistics.faceScores", byte[].class); 833 834 /** 835 * <p> 836 * A low-resolution map of lens shading, per 837 * color channel 838 * </p> 839 * <p> 840 * Assume bilinear interpolation of map. The least 841 * shaded section of the image should have a gain factor 842 * of 1; all other sections should have gains above 1. 843 * the map should be on the order of 30-40 rows, and 844 * must be smaller than 64x64. 845 * </p><p> 846 * When android.colorCorrection.mode = TRANSFORM_MATRIX, the map 847 * must take into account the colorCorrection settings. 848 * </p> 849 */ 850 public static final Key<float[]> STATISTICS_LENS_SHADING_MAP = 851 new Key<float[]>("android.statistics.lensShadingMap", float[].class); 852 853 /** 854 * <p> 855 * The best-fit color channel gains calculated 856 * by the HAL's statistics units for the current output frame 857 * </p> 858 * <p> 859 * This may be different than the gains used for this frame, 860 * since statistics processing on data from a new frame 861 * typically completes after the transform has already been 862 * applied to that frame. 863 * </p><p> 864 * The 4 channel gains are defined in Bayer domain, 865 * see android.colorCorrection.gains for details. 866 * </p><p> 867 * This value should always be calculated by the AWB block, 868 * regardless of the android.control.* current values. 869 * </p> 870 */ 871 public static final Key<float[]> STATISTICS_PREDICTED_COLOR_GAINS = 872 new Key<float[]>("android.statistics.predictedColorGains", float[].class); 873 874 /** 875 * <p> 876 * The best-fit color transform matrix estimate 877 * calculated by the HAL's statistics units for the current 878 * output frame 879 * </p> 880 * <p> 881 * The HAL must provide the estimate from its 882 * statistics unit on the white balance transforms to use 883 * for the next frame. These are the values the HAL believes 884 * are the best fit for the current output frame. This may 885 * be different than the transform used for this frame, since 886 * statistics processing on data from a new frame typically 887 * completes after the transform has already been applied to 888 * that frame. 889 * </p><p> 890 * These estimates must be provided for all frames, even if 891 * capture settings and color transforms are set by the application. 892 * </p><p> 893 * This value should always be calculated by the AWB block, 894 * regardless of the android.control.* current values. 895 * </p> 896 */ 897 public static final Key<Rational[]> STATISTICS_PREDICTED_COLOR_TRANSFORM = 898 new Key<Rational[]>("android.statistics.predictedColorTransform", Rational[].class); 899 900 /** 901 * <p> 902 * The HAL estimated scene illumination lighting 903 * frequency 904 * </p> 905 * <p> 906 * Report NONE if there doesn't appear to be flickering 907 * illumination 908 * </p> 909 * @see #STATISTICS_SCENE_FLICKER_NONE 910 * @see #STATISTICS_SCENE_FLICKER_50HZ 911 * @see #STATISTICS_SCENE_FLICKER_60HZ 912 */ 913 public static final Key<Integer> STATISTICS_SCENE_FLICKER = 914 new Key<Integer>("android.statistics.sceneFlicker", int.class); 915 916 /** 917 * <p> 918 * Table mapping blue input values to output 919 * values 920 * </p> 921 * <p> 922 * Tonemapping / contrast / gamma curve for the blue 923 * channel, to use when android.tonemap.mode is CONTRAST_CURVE. 924 * </p><p> 925 * See android.tonemap.curveRed for more details. 926 * </p> 927 */ 928 public static final Key<float[]> TONEMAP_CURVE_BLUE = 929 new Key<float[]>("android.tonemap.curveBlue", float[].class); 930 931 /** 932 * <p> 933 * Table mapping green input values to output 934 * values 935 * </p> 936 * <p> 937 * Tonemapping / contrast / gamma curve for the green 938 * channel, to use when android.tonemap.mode is CONTRAST_CURVE. 939 * </p><p> 940 * See android.tonemap.curveRed for more details. 941 * </p> 942 */ 943 public static final Key<float[]> TONEMAP_CURVE_GREEN = 944 new Key<float[]>("android.tonemap.curveGreen", float[].class); 945 946 /** 947 * <p> 948 * Table mapping red input values to output 949 * values 950 * </p> 951 * <p> 952 * Tonemapping / contrast / gamma curve for the red 953 * channel, to use when android.tonemap.mode is CONTRAST_CURVE. 954 * </p><p> 955 * Since the input and output ranges may vary depending on 956 * the camera pipeline, the input and output pixel values 957 * are represented by normalized floating-point values 958 * between 0 and 1, with 0 == black and 1 == white. 959 * </p><p> 960 * The curve should be linearly interpolated between the 961 * defined points. The points will be listed in increasing 962 * order of P_IN. For example, if the array is: [0.0, 0.0, 963 * 0.3, 0.5, 1.0, 1.0], then the input->output mapping 964 * for a few sample points would be: 0 -> 0, 0.15 -> 965 * 0.25, 0.3 -> 0.5, 0.5 -> 0.64 966 * </p> 967 */ 968 public static final Key<float[]> TONEMAP_CURVE_RED = 969 new Key<float[]>("android.tonemap.curveRed", float[].class); 970 971 /** 972 * @see #TONEMAP_MODE_CONTRAST_CURVE 973 * @see #TONEMAP_MODE_FAST 974 * @see #TONEMAP_MODE_HIGH_QUALITY 975 */ 976 public static final Key<Integer> TONEMAP_MODE = 977 new Key<Integer>("android.tonemap.mode", int.class); 978 979 /** 980 * <p> 981 * This LED is nominally used to indicate to the user 982 * that the camera is powered on and may be streaming images back to the 983 * Application Processor. In certain rare circumstances, the OS may 984 * disable this when video is processed locally and not transmitted to 985 * any untrusted applications. 986 * </p><p> 987 * In particular, the LED *must* always be on when the data could be 988 * transmitted off the device. The LED *should* always be on whenever 989 * data is stored locally on the device. 990 * </p><p> 991 * The LED *may* be off if a trusted application is using the data that 992 * doesn't violate the above rules. 993 * </p> 994 * 995 * @hide 996 */ 997 public static final Key<Boolean> LED_TRANSMIT = 998 new Key<Boolean>("android.led.transmit", boolean.class); 999 1000 /** 1001 * <p> 1002 * Whether black-level compensation is locked 1003 * to its current values, or is free to vary 1004 * </p> 1005 * <p> 1006 * When set to ON, the values used for black-level 1007 * compensation must not change until the lock is set to 1008 * OFF 1009 * </p><p> 1010 * Since changes to certain capture parameters (such as 1011 * exposure time) may require resetting of black level 1012 * compensation, the HAL must report whether setting the 1013 * black level lock was successful in the output result 1014 * metadata. 1015 * </p><p> 1016 * The black level locking must happen at the sensor, and not at the ISP. 1017 * If for some reason black level locking is no longer legal (for example, 1018 * the analog gain has changed, which forces black levels to be 1019 * recalculated), then the HAL is free to override this request (and it 1020 * must report 'OFF' when this does happen) until the next time locking 1021 * is legal again. 1022 * </p> 1023 */ 1024 public static final Key<Boolean> BLACK_LEVEL_LOCK = 1025 new Key<Boolean>("android.blackLevel.lock", boolean.class); 1026 1027 /*~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~ 1028 * End generated code 1029 *~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~O@*/ 1030 1031 /** 1032 * <p> 1033 * List of the {@link Face Faces} detected through camera face detection 1034 * in this result. 1035 * </p> 1036 * <p> 1037 * Only available if {@link #STATISTICS_FACE_DETECT_MODE} {@code !=} 1038 * {@link CameraMetadata#STATISTICS_FACE_DETECT_MODE_OFF OFF}. 1039 * </p> 1040 * 1041 * @see Face 1042 */ 1043 public static final Key<Face[]> STATISTICS_FACES = 1044 new Key<Face[]>("android.statistics.faces", Face[].class); 1045} 1046