CaptureResult.java revision d5ff06a2ce4a65615ce5e8b8df93f2c3da2a8bbd
1/* 2 * Copyright (C) 2012 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17package android.hardware.camera2; 18 19import android.graphics.Point; 20import android.graphics.Rect; 21 22/** 23 * <p>The results of a single image capture from the image sensor.</p> 24 * 25 * <p>Contains the final configuration for the capture hardware (sensor, lens, 26 * flash), the processing pipeline, the control algorithms, and the output 27 * buffers.</p> 28 * 29 * <p>CaptureResults are produced by a {@link CameraDevice} after processing a 30 * {@link CaptureRequest}. All properties listed for capture requests can also 31 * be queried on the capture result, to determine the final values used for 32 * capture. The result also includes additional metadata about the state of the 33 * camera device during the capture.</p> 34 * 35 */ 36public final class CaptureResult extends CameraMetadata { 37 /** 38 * @hide 39 */ 40 public CaptureResult() { 41 } 42 43 /** 44 * Describes a face detected in an image. 45 */ 46 public static class Face { 47 48 /** 49 * <p>Bounds of the face. A rectangle relative to the sensor's 50 * {@link CameraProperties#SENSOR_INFO_ACTIVE_ARRAY_SIZE}, with (0,0) 51 * representing the top-left corner of the active array rectangle.</p> 52 */ 53 public Rect getBounds() { 54 return mBounds; 55 } 56 57 /** <p>The confidence level for the detection of the face. The range is 1 to 58 * 100. 100 is the highest confidence.</p> 59 * 60 * <p>Depending on the device, even very low-confidence faces may be 61 * listed, so applications should filter out faces with low confidence, 62 * depending on the use case. For a typical point-and-shoot camera 63 * application that wishes to display rectangles around detected faces, 64 * filtering out faces with confidence less than 50 is recommended.</p> 65 * 66 */ 67 public int getScore() { 68 return mScore; 69 } 70 71 /** 72 * An unique id per face while the face is visible to the tracker. If 73 * the face leaves the field-of-view and comes back, it will get a new 74 * id. This is an optional field, may not be supported on all devices. 75 * If not supported, id will always be set to -1. The optional fields 76 * are supported as a set. Either they are all valid, or none of them 77 * are. 78 */ 79 public int getId() { 80 return mId; 81 } 82 83 /** 84 * The coordinates of the center of the left eye. The coordinates are in 85 * the same space as the ones for {@link #getBounds}. This is an 86 * optional field, may not be supported on all devices. If not 87 * supported, the value will always be set to null. The optional fields 88 * are supported as a set. Either they are all valid, or none of them 89 * are. 90 */ 91 public Point getLeftEye() { 92 return mLeftEye; 93 } 94 95 /** 96 * The coordinates of the center of the right eye. The coordinates are 97 * in the same space as the ones for {@link #getBounds}.This is an 98 * optional field, may not be supported on all devices. If not 99 * supported, the value will always be set to null. The optional fields 100 * are supported as a set. Either they are all valid, or none of them 101 * are. 102 */ 103 public Point getRightEye() { 104 return mRightEye; 105 } 106 107 /** 108 * The coordinates of the center of the mouth. The coordinates are in 109 * the same space as the ones for {@link #getBounds}. This is an optional 110 * field, may not be supported on all devices. If not supported, the 111 * value will always be set to null. The optional fields are supported 112 * as a set. Either they are all valid, or none of them are. 113 */ 114 public Point getMouth() { 115 return mMouth; 116 } 117 118 private Rect mBounds; 119 private int mScore; 120 private int mId; 121 private Point mLeftEye; 122 private Point mRightEye; 123 private Point mMouth; 124 } 125 126 /*@O~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~ 127 * The key entries below this point are generated from metadata 128 * definitions in /system/media/camera/docs. Do not modify by hand or 129 * modify the comment blocks at the start or end. 130 *~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~*/ 131 132 /** 133 * <p> 134 * A color transform matrix to use to transform 135 * from sensor RGB color space to output linear sRGB color space 136 * </p> 137 * <p> 138 * This matrix is either set by HAL when the request 139 * android.colorCorrection.mode is not TRANSFORM_MATRIX, or 140 * directly by the application in the request when the 141 * android.colorCorrection.mode is TRANSFORM_MATRIX. 142 * </p><p> 143 * In the latter case, the HAL may round the matrix to account 144 * for precision issues; the final rounded matrix should be 145 * reported back in this matrix result metadata. 146 * </p> 147 */ 148 public static final Key<Rational[]> COLOR_CORRECTION_TRANSFORM = 149 new Key<Rational[]>("android.colorCorrection.transform", Rational[].class); 150 151 /** 152 * <p> 153 * Gains applying to Bayer color channels for 154 * white-balance 155 * </p> 156 * <p> 157 * The 4-channel white-balance gains are defined in 158 * the order of [R G_even G_odd B], where G_even is the gain 159 * for green pixels on even rows of the output, and G_odd 160 * is the gain for greenpixels on the odd rows. if a HAL 161 * does not support a separate gain for even/odd green channels, 162 * it should use the G_even value,and write G_odd equal to 163 * G_even in the output result metadata. 164 * </p><p> 165 * This array is either set by HAL when the request 166 * android.colorCorrection.mode is not TRANSFORM_MATRIX, or 167 * directly by the application in the request when the 168 * android.colorCorrection.mode is TRANSFORM_MATRIX. 169 * </p><p> 170 * The ouput should be the gains actually applied by the HAL to 171 * the current frame. 172 * </p> 173 */ 174 public static final Key<float[]> COLOR_CORRECTION_GAINS = 175 new Key<float[]>("android.colorCorrection.gains", float[].class); 176 177 /** 178 * <p> 179 * The ID sent with the latest 180 * CAMERA2_TRIGGER_PRECAPTURE_METERING call 181 * </p> 182 * <p> 183 * Must be 0 if no 184 * CAMERA2_TRIGGER_PRECAPTURE_METERING trigger received yet 185 * by HAL. Always updated even if AE algorithm ignores the 186 * trigger 187 * </p> 188 * 189 * @hide 190 */ 191 public static final Key<Integer> CONTROL_AE_PRECAPTURE_ID = 192 new Key<Integer>("android.control.aePrecaptureId", int.class); 193 194 /** 195 * <p> 196 * List of areas to use for 197 * metering 198 * </p> 199 * <p> 200 * Each area is a rectangle plus weight: xmin, ymin, 201 * xmax, ymax, weight. 202 * </p><p> 203 * The coordinate system is based on the active pixel array, 204 * with (0,0) being the top-left of the active pixel array, and 205 * (android.sensor.info.activeArraySize.width, 206 * android.sensor.info.activeArraySize.height) being the 207 * bottom-right point of the active pixel array. The weight 208 * should be nonnegative. 209 * </p><p> 210 * If all regions have 0 weight, then no specific metering area 211 * needs to be used by the HAL. If the metering region is 212 * outside the current android.scaler.cropRegion, the HAL 213 * should ignore the sections outside the region and output the 214 * used sections in the frame metadata 215 * </p> 216 */ 217 public static final Key<int[]> CONTROL_AE_REGIONS = 218 new Key<int[]>("android.control.aeRegions", int[].class); 219 220 /** 221 * <p> 222 * Current state of AE algorithm 223 * </p> 224 * <p> 225 * Whenever the AE algorithm state changes, a 226 * MSG_AUTOEXPOSURE notification must be send if a 227 * notification callback is registered. 228 * </p> 229 * @see #CONTROL_AE_STATE_INACTIVE 230 * @see #CONTROL_AE_STATE_SEARCHING 231 * @see #CONTROL_AE_STATE_CONVERGED 232 * @see #CONTROL_AE_STATE_LOCKED 233 * @see #CONTROL_AE_STATE_FLASH_REQUIRED 234 * @see #CONTROL_AE_STATE_PRECAPTURE 235 */ 236 public static final Key<Integer> CONTROL_AE_STATE = 237 new Key<Integer>("android.control.aeState", int.class); 238 239 /** 240 * <p> 241 * Whether AF is currently enabled, and what 242 * mode it is set to 243 * </p> 244 * @see #CONTROL_AF_MODE_OFF 245 * @see #CONTROL_AF_MODE_AUTO 246 * @see #CONTROL_AF_MODE_MACRO 247 * @see #CONTROL_AF_MODE_CONTINUOUS_VIDEO 248 * @see #CONTROL_AF_MODE_CONTINUOUS_PICTURE 249 * @see #CONTROL_AF_MODE_EDOF 250 */ 251 public static final Key<Integer> CONTROL_AF_MODE = 252 new Key<Integer>("android.control.afMode", int.class); 253 254 /** 255 * <p> 256 * List of areas to use for focus 257 * estimation 258 * </p> 259 * <p> 260 * Each area is a rectangle plus weight: xmin, ymin, 261 * xmax, ymax, weight. 262 * </p><p> 263 * The coordinate system is based on the active pixel array, 264 * with (0,0) being the top-left of the active pixel array, and 265 * (android.sensor.info.activeArraySize.width, 266 * android.sensor.info.activeArraySize.height) being the 267 * bottom-right point of the active pixel array. The weight 268 * should be nonnegative. 269 * </p><p> 270 * If all regions have 0 weight, then no specific focus area 271 * needs to be used by the HAL. If the focusing region is 272 * outside the current android.scaler.cropRegion, the HAL 273 * should ignore the sections outside the region and output the 274 * used sections in the frame metadata 275 * </p> 276 */ 277 public static final Key<int[]> CONTROL_AF_REGIONS = 278 new Key<int[]>("android.control.afRegions", int[].class); 279 280 /** 281 * <p> 282 * Current state of AF algorithm 283 * </p> 284 * <p> 285 * Whenever the AF algorithm state changes, a 286 * MSG_AUTOFOCUS notification must be send if a notification 287 * callback is registered. 288 * </p> 289 * @see #CONTROL_AF_STATE_INACTIVE 290 * @see #CONTROL_AF_STATE_PASSIVE_SCAN 291 * @see #CONTROL_AF_STATE_PASSIVE_FOCUSED 292 * @see #CONTROL_AF_STATE_ACTIVE_SCAN 293 * @see #CONTROL_AF_STATE_FOCUSED_LOCKED 294 * @see #CONTROL_AF_STATE_NOT_FOCUSED_LOCKED 295 */ 296 public static final Key<Integer> CONTROL_AF_STATE = 297 new Key<Integer>("android.control.afState", int.class); 298 299 /** 300 * <p> 301 * The ID sent with the latest 302 * CAMERA2_TRIGGER_AUTOFOCUS call 303 * </p> 304 * <p> 305 * Must be 0 if no CAMERA2_TRIGGER_AUTOFOCUS trigger 306 * received yet by HAL. Always updated even if AF algorithm 307 * ignores the trigger 308 * </p> 309 * 310 * @hide 311 */ 312 public static final Key<Integer> CONTROL_AF_TRIGGER_ID = 313 new Key<Integer>("android.control.afTriggerId", int.class); 314 315 /** 316 * <p> 317 * Whether AWB is currently setting the color 318 * transform fields, and what its illumination target 319 * is 320 * </p> 321 * <p> 322 * [BC - AWB lock,AWB modes] 323 * </p> 324 * @see #CONTROL_AWB_MODE_OFF 325 * @see #CONTROL_AWB_MODE_AUTO 326 * @see #CONTROL_AWB_MODE_INCANDESCENT 327 * @see #CONTROL_AWB_MODE_FLUORESCENT 328 * @see #CONTROL_AWB_MODE_WARM_FLUORESCENT 329 * @see #CONTROL_AWB_MODE_DAYLIGHT 330 * @see #CONTROL_AWB_MODE_CLOUDY_DAYLIGHT 331 * @see #CONTROL_AWB_MODE_TWILIGHT 332 * @see #CONTROL_AWB_MODE_SHADE 333 */ 334 public static final Key<Integer> CONTROL_AWB_MODE = 335 new Key<Integer>("android.control.awbMode", int.class); 336 337 /** 338 * <p> 339 * List of areas to use for illuminant 340 * estimation 341 * </p> 342 * <p> 343 * Only used in AUTO mode. 344 * </p><p> 345 * Each area is a rectangle plus weight: xmin, ymin, xmax, 346 * ymax, weight. The coordinate system is based on the active 347 * pixel array, with (0,0) being the top-left of the active 348 * pixel array, and (android.sensor.info.activeArraySize.width, 349 * android.sensor.info.activeArraySize.height) being the 350 * bottom-right point of the active pixel array. The weight 351 * should be nonnegative. 352 * </p><p> 353 * If all regions have 0 weight, then no specific metering area 354 * needs to be used by the HAL. If the metering region is 355 * outside the current android.scaler.cropRegion, the HAL 356 * should ignore the sections outside the region and output the 357 * used sections in the frame metadata 358 * </p> 359 */ 360 public static final Key<int[]> CONTROL_AWB_REGIONS = 361 new Key<int[]>("android.control.awbRegions", int[].class); 362 363 /** 364 * <p> 365 * Current state of AWB algorithm 366 * </p> 367 * <p> 368 * Whenever the AWB algorithm state changes, a 369 * MSG_AUTOWHITEBALANCE notification must be send if a 370 * notification callback is registered. 371 * </p> 372 * @see #CONTROL_AWB_STATE_INACTIVE 373 * @see #CONTROL_AWB_STATE_SEARCHING 374 * @see #CONTROL_AWB_STATE_CONVERGED 375 * @see #CONTROL_AWB_STATE_LOCKED 376 */ 377 public static final Key<Integer> CONTROL_AWB_STATE = 378 new Key<Integer>("android.control.awbState", int.class); 379 380 /** 381 * <p> 382 * Overall mode of 3A control 383 * routines 384 * </p> 385 * @see #CONTROL_MODE_OFF 386 * @see #CONTROL_MODE_AUTO 387 * @see #CONTROL_MODE_USE_SCENE_MODE 388 */ 389 public static final Key<Integer> CONTROL_MODE = 390 new Key<Integer>("android.control.mode", int.class); 391 392 /** 393 * <p> 394 * Operation mode for edge 395 * enhancement 396 * </p> 397 * @see #EDGE_MODE_OFF 398 * @see #EDGE_MODE_FAST 399 * @see #EDGE_MODE_HIGH_QUALITY 400 */ 401 public static final Key<Integer> EDGE_MODE = 402 new Key<Integer>("android.edge.mode", int.class); 403 404 /** 405 * <p> 406 * Select flash operation mode 407 * </p> 408 * @see #FLASH_MODE_OFF 409 * @see #FLASH_MODE_SINGLE 410 * @see #FLASH_MODE_TORCH 411 */ 412 public static final Key<Integer> FLASH_MODE = 413 new Key<Integer>("android.flash.mode", int.class); 414 415 /** 416 * <p> 417 * Current state of the flash 418 * unit 419 * </p> 420 * @see #FLASH_STATE_UNAVAILABLE 421 * @see #FLASH_STATE_CHARGING 422 * @see #FLASH_STATE_READY 423 * @see #FLASH_STATE_FIRED 424 */ 425 public static final Key<Integer> FLASH_STATE = 426 new Key<Integer>("android.flash.state", int.class); 427 428 /** 429 * <p> 430 * GPS coordinates to include in output JPEG 431 * EXIF 432 * </p> 433 */ 434 public static final Key<double[]> JPEG_GPS_COORDINATES = 435 new Key<double[]>("android.jpeg.gpsCoordinates", double[].class); 436 437 /** 438 * <p> 439 * 32 characters describing GPS algorithm to 440 * include in EXIF 441 * </p> 442 */ 443 public static final Key<String> JPEG_GPS_PROCESSING_METHOD = 444 new Key<String>("android.jpeg.gpsProcessingMethod", String.class); 445 446 /** 447 * <p> 448 * Time GPS fix was made to include in 449 * EXIF 450 * </p> 451 */ 452 public static final Key<Long> JPEG_GPS_TIMESTAMP = 453 new Key<Long>("android.jpeg.gpsTimestamp", long.class); 454 455 /** 456 * <p> 457 * Orientation of JPEG image to 458 * write 459 * </p> 460 */ 461 public static final Key<Integer> JPEG_ORIENTATION = 462 new Key<Integer>("android.jpeg.orientation", int.class); 463 464 /** 465 * <p> 466 * Compression quality of the final JPEG 467 * image 468 * </p> 469 * <p> 470 * 85-95 is typical usage range 471 * </p> 472 */ 473 public static final Key<Byte> JPEG_QUALITY = 474 new Key<Byte>("android.jpeg.quality", byte.class); 475 476 /** 477 * <p> 478 * Compression quality of JPEG 479 * thumbnail 480 * </p> 481 */ 482 public static final Key<Byte> JPEG_THUMBNAIL_QUALITY = 483 new Key<Byte>("android.jpeg.thumbnailQuality", byte.class); 484 485 /** 486 * <p> 487 * Resolution of embedded JPEG 488 * thumbnail 489 * </p> 490 */ 491 public static final Key<android.hardware.camera2.Size> JPEG_THUMBNAIL_SIZE = 492 new Key<android.hardware.camera2.Size>("android.jpeg.thumbnailSize", android.hardware.camera2.Size.class); 493 494 /** 495 * <p> 496 * Size of the lens aperture 497 * </p> 498 * <p> 499 * Will not be supported on most devices. Can only 500 * pick from supported list 501 * </p> 502 */ 503 public static final Key<Float> LENS_APERTURE = 504 new Key<Float>("android.lens.aperture", float.class); 505 506 /** 507 * <p> 508 * State of lens neutral density 509 * filter(s) 510 * </p> 511 * <p> 512 * Will not be supported on most devices. Can only 513 * pick from supported list 514 * </p> 515 */ 516 public static final Key<Float> LENS_FILTER_DENSITY = 517 new Key<Float>("android.lens.filterDensity", float.class); 518 519 /** 520 * <p> 521 * Lens optical zoom setting 522 * </p> 523 * <p> 524 * Will not be supported on most devices. 525 * </p> 526 */ 527 public static final Key<Float> LENS_FOCAL_LENGTH = 528 new Key<Float>("android.lens.focalLength", float.class); 529 530 /** 531 * <p> 532 * Distance to plane of sharpest focus, 533 * measured from frontmost surface of the lens 534 * </p> 535 * <p> 536 * Should be zero for fixed-focus cameras 537 * </p> 538 */ 539 public static final Key<Float> LENS_FOCUS_DISTANCE = 540 new Key<Float>("android.lens.focusDistance", float.class); 541 542 /** 543 * <p> 544 * The range of scene distances that are in 545 * sharp focus (depth of field) 546 * </p> 547 * <p> 548 * If variable focus not supported, can still report 549 * fixed depth of field range 550 * </p> 551 */ 552 public static final Key<Float> LENS_FOCUS_RANGE = 553 new Key<Float>("android.lens.focusRange", float.class); 554 555 /** 556 * <p> 557 * Whether optical image stabilization is 558 * enabled. 559 * </p> 560 * <p> 561 * Will not be supported on most devices. 562 * </p> 563 * @see #LENS_OPTICAL_STABILIZATION_MODE_OFF 564 * @see #LENS_OPTICAL_STABILIZATION_MODE_ON 565 */ 566 public static final Key<Integer> LENS_OPTICAL_STABILIZATION_MODE = 567 new Key<Integer>("android.lens.opticalStabilizationMode", int.class); 568 569 /** 570 * <p> 571 * Current lens status 572 * </p> 573 * @see #LENS_STATE_STATIONARY 574 */ 575 public static final Key<Integer> LENS_STATE = 576 new Key<Integer>("android.lens.state", int.class); 577 578 /** 579 * <p> 580 * Mode of operation for the noise reduction 581 * algorithm 582 * </p> 583 * @see #NOISE_REDUCTION_MODE_OFF 584 * @see #NOISE_REDUCTION_MODE_FAST 585 * @see #NOISE_REDUCTION_MODE_HIGH_QUALITY 586 */ 587 public static final Key<Integer> NOISE_REDUCTION_MODE = 588 new Key<Integer>("android.noiseReduction.mode", int.class); 589 590 /** 591 * <p> 592 * Number of frames captured since 593 * open() 594 * </p> 595 * <p> 596 * Reset on release() 597 * </p> 598 */ 599 public static final Key<Integer> REQUEST_FRAME_COUNT = 600 new Key<Integer>("android.request.frameCount", int.class); 601 602 /** 603 * <p> 604 * An application-specified ID for the current 605 * request. Must be maintained unchanged in output 606 * frame 607 * </p> 608 * 609 * @hide 610 */ 611 public static final Key<Integer> REQUEST_ID = 612 new Key<Integer>("android.request.id", int.class); 613 614 /** 615 * <p> 616 * (x, y, width, height). 617 * </p><p> 618 * A rectangle with the top-level corner of (x,y) and size 619 * (width, height). The region of the sensor that is used for 620 * output. Each stream must use this rectangle to produce its 621 * output, cropping to a smaller region if necessary to 622 * maintain the stream's aspect ratio. 623 * </p><p> 624 * HAL2.x uses only (x, y, width) 625 * </p> 626 * <p> 627 * Any additional per-stream cropping must be done to 628 * maximize the final pixel area of the stream. 629 * </p><p> 630 * For example, if the crop region is set to a 4:3 aspect 631 * ratio, then 4:3 streams should use the exact crop 632 * region. 16:9 streams should further crop vertically 633 * (letterbox). 634 * </p><p> 635 * Conversely, if the crop region is set to a 16:9, then 4:3 636 * outputs should crop horizontally (pillarbox), and 16:9 637 * streams should match exactly. These additional crops must 638 * be centered within the crop region. 639 * </p><p> 640 * The output streams must maintain square pixels at all 641 * times, no matter what the relative aspect ratios of the 642 * crop region and the stream are. Negative values for 643 * corner are allowed for raw output if full pixel array is 644 * larger than active pixel array. Width and height may be 645 * rounded to nearest larger supportable width, especially 646 * for raw output, where only a few fixed scales may be 647 * possible. The width and height of the crop region cannot 648 * be set to be smaller than floor( activeArraySize.width / 649 * android.scaler.maxDigitalZoom ) and floor( 650 * activeArraySize.height / android.scaler.maxDigitalZoom), 651 * respectively. 652 * </p> 653 */ 654 public static final Key<android.graphics.Rect> SCALER_CROP_REGION = 655 new Key<android.graphics.Rect>("android.scaler.cropRegion", android.graphics.Rect.class); 656 657 /** 658 * <p> 659 * Duration each pixel is exposed to 660 * light. 661 * </p><p> 662 * If the sensor can't expose this exact duration, it should shorten the 663 * duration exposed to the nearest possible value (rather than expose longer). 664 * </p> 665 * <p> 666 * 1/10000 - 30 sec range. No bulb mode 667 * </p> 668 */ 669 public static final Key<Long> SENSOR_EXPOSURE_TIME = 670 new Key<Long>("android.sensor.exposureTime", long.class); 671 672 /** 673 * <p> 674 * Duration from start of frame exposure to 675 * start of next frame exposure 676 * </p> 677 * <p> 678 * Exposure time has priority, so duration is set to 679 * max(duration, exposure time + overhead) 680 * </p> 681 */ 682 public static final Key<Long> SENSOR_FRAME_DURATION = 683 new Key<Long>("android.sensor.frameDuration", long.class); 684 685 /** 686 * <p> 687 * Gain applied to image data. Must be 688 * implemented through analog gain only if set to values 689 * below 'maximum analog sensitivity'. 690 * </p><p> 691 * If the sensor can't apply this exact gain, it should lessen the 692 * gain to the nearest possible value (rather than gain more). 693 * </p> 694 * <p> 695 * ISO 12232:2006 REI method 696 * </p> 697 */ 698 public static final Key<Integer> SENSOR_SENSITIVITY = 699 new Key<Integer>("android.sensor.sensitivity", int.class); 700 701 /** 702 * <p> 703 * Time at start of exposure of first 704 * row 705 * </p> 706 * <p> 707 * Monotonic, should be synced to other timestamps in 708 * system 709 * </p> 710 */ 711 public static final Key<Long> SENSOR_TIMESTAMP = 712 new Key<Long>("android.sensor.timestamp", long.class); 713 714 /** 715 * <p> 716 * The temperature of the sensor, sampled at the time 717 * exposure began for this frame. 718 * </p><p> 719 * The thermal diode being queried should be inside the sensor PCB, or 720 * somewhere close to it. 721 * </p> 722 */ 723 public static final Key<Float> SENSOR_TEMPERATURE = 724 new Key<Float>("android.sensor.temperature", float.class); 725 726 /** 727 * <p> 728 * State of the face detector 729 * unit 730 * </p> 731 * <p> 732 * Whether face detection is enabled, and whether it 733 * should output just the basic fields or the full set of 734 * fields. Value must be one of the 735 * android.statistics.info.availableFaceDetectModes. 736 * </p> 737 * @see #STATISTICS_FACE_DETECT_MODE_OFF 738 * @see #STATISTICS_FACE_DETECT_MODE_SIMPLE 739 * @see #STATISTICS_FACE_DETECT_MODE_FULL 740 */ 741 public static final Key<Integer> STATISTICS_FACE_DETECT_MODE = 742 new Key<Integer>("android.statistics.faceDetectMode", int.class); 743 744 /** 745 * <p> 746 * List of unique IDs for detected 747 * faces 748 * </p> 749 * <p> 750 * Only available if faceDetectMode == FULL 751 * </p> 752 */ 753 public static final Key<int[]> STATISTICS_FACE_IDS = 754 new Key<int[]>("android.statistics.faceIds", int[].class); 755 756 /** 757 * <p> 758 * List of landmarks for detected 759 * faces 760 * </p> 761 * <p> 762 * Only available if faceDetectMode == FULL 763 * </p> 764 */ 765 public static final Key<int[]> STATISTICS_FACE_LANDMARKS = 766 new Key<int[]>("android.statistics.faceLandmarks", int[].class); 767 768 /** 769 * <p> 770 * List of the bounding rectangles for detected 771 * faces 772 * </p> 773 * <p> 774 * Only available if faceDetectMode != OFF 775 * </p> 776 */ 777 public static final Key<android.graphics.Rect[]> STATISTICS_FACE_RECTANGLES = 778 new Key<android.graphics.Rect[]>("android.statistics.faceRectangles", android.graphics.Rect[].class); 779 780 /** 781 * <p> 782 * List of the face confidence scores for 783 * detected faces 784 * </p> 785 * <p> 786 * Only available if faceDetectMode != OFF. The value should be 787 * meaningful (for example, setting 100 at all times is illegal). 788 * </p> 789 */ 790 public static final Key<byte[]> STATISTICS_FACE_SCORES = 791 new Key<byte[]>("android.statistics.faceScores", byte[].class); 792 793 /** 794 * <p> 795 * A low-resolution map of lens shading, per 796 * color channel 797 * </p> 798 * <p> 799 * Assume bilinear interpolation of map. The least 800 * shaded section of the image should have a gain factor 801 * of 1; all other sections should have gains above 1. 802 * the map should be on the order of 30-40 rows, and 803 * must be smaller than 64x64. 804 * </p><p> 805 * When android.colorCorrection.mode = TRANSFORM_MATRIX, the map 806 * must take into account the colorCorrection settings. 807 * </p> 808 */ 809 public static final Key<float[]> STATISTICS_LENS_SHADING_MAP = 810 new Key<float[]>("android.statistics.lensShadingMap", float[].class); 811 812 /** 813 * <p> 814 * The best-fit color channel gains calculated 815 * by the HAL's statistics units for the current output frame 816 * </p> 817 * <p> 818 * This may be different than the gains used for this frame, 819 * since statistics processing on data from a new frame 820 * typically completes after the transform has already been 821 * applied to that frame. 822 * </p><p> 823 * The 4 channel gains are defined in Bayer domain, 824 * see android.colorCorrection.gains for details. 825 * </p><p> 826 * This value should always be calculated by the AWB block, 827 * regardless of the android.control.* current values. 828 * </p> 829 */ 830 public static final Key<float[]> STATISTICS_PREDICTED_COLOR_GAINS = 831 new Key<float[]>("android.statistics.predictedColorGains", float[].class); 832 833 /** 834 * <p> 835 * The best-fit color transform matrix estimate 836 * calculated by the HAL's statistics units for the current 837 * output frame 838 * </p> 839 * <p> 840 * The HAL must provide the estimate from its 841 * statistics unit on the white balance transforms to use 842 * for the next frame. These are the values the HAL believes 843 * are the best fit for the current output frame. This may 844 * be different than the transform used for this frame, since 845 * statistics processing on data from a new frame typically 846 * completes after the transform has already been applied to 847 * that frame. 848 * </p><p> 849 * These estimates must be provided for all frames, even if 850 * capture settings and color transforms are set by the application. 851 * </p><p> 852 * This value should always be calculated by the AWB block, 853 * regardless of the android.control.* current values. 854 * </p> 855 */ 856 public static final Key<Rational[]> STATISTICS_PREDICTED_COLOR_TRANSFORM = 857 new Key<Rational[]>("android.statistics.predictedColorTransform", Rational[].class); 858 859 /** 860 * <p> 861 * The HAL estimated scene illumination lighting 862 * frequency 863 * </p> 864 * <p> 865 * Report NONE if there doesn't appear to be flickering 866 * illumination 867 * </p> 868 * @see #STATISTICS_SCENE_FLICKER_NONE 869 * @see #STATISTICS_SCENE_FLICKER_50HZ 870 * @see #STATISTICS_SCENE_FLICKER_60HZ 871 */ 872 public static final Key<Integer> STATISTICS_SCENE_FLICKER = 873 new Key<Integer>("android.statistics.sceneFlicker", int.class); 874 875 /** 876 * <p> 877 * Table mapping blue input values to output 878 * values 879 * </p> 880 */ 881 public static final Key<Float> TONEMAP_CURVE_BLUE = 882 new Key<Float>("android.tonemap.curveBlue", float.class); 883 884 /** 885 * <p> 886 * Table mapping green input values to output 887 * values 888 * </p> 889 */ 890 public static final Key<Float> TONEMAP_CURVE_GREEN = 891 new Key<Float>("android.tonemap.curveGreen", float.class); 892 893 /** 894 * <p> 895 * Table mapping red input values to output 896 * values 897 * </p> 898 * <p> 899 * .The input range must be monotonically increasing 900 * with N, and values between entries should be linearly 901 * interpolated. For example, if the array is: [0.0, 0.0, 902 * 0.3, 0.5, 1.0, 1.0], then the input->output mapping 903 * for a few sample points would be: 0 -> 0, 0.15 -> 904 * 0.25, 0.3 -> 0.5, 0.5 -> 0.64 905 * </p> 906 */ 907 public static final Key<float[]> TONEMAP_CURVE_RED = 908 new Key<float[]>("android.tonemap.curveRed", float[].class); 909 910 /** 911 * @see #TONEMAP_MODE_CONTRAST_CURVE 912 * @see #TONEMAP_MODE_FAST 913 * @see #TONEMAP_MODE_HIGH_QUALITY 914 */ 915 public static final Key<Integer> TONEMAP_MODE = 916 new Key<Integer>("android.tonemap.mode", int.class); 917 918 /** 919 * <p> 920 * This LED is nominally used to indicate to the user 921 * that the camera is powered on and may be streaming images back to the 922 * Application Processor. In certain rare circumstances, the OS may 923 * disable this when video is processed locally and not transmitted to 924 * any untrusted applications. 925 * </p><p> 926 * In particular, the LED *must* always be on when the data could be 927 * transmitted off the device. The LED *should* always be on whenever 928 * data is stored locally on the device. 929 * </p><p> 930 * The LED *may* be off if a trusted application is using the data that 931 * doesn't violate the above rules. 932 * </p> 933 * 934 * @hide 935 */ 936 public static final Key<Boolean> LED_TRANSMIT = 937 new Key<Boolean>("android.led.transmit", boolean.class); 938 939 /** 940 * <p> 941 * Whether black-level compensation is locked 942 * to its current values, or is free to vary 943 * </p> 944 * <p> 945 * When set to ON, the values used for black-level 946 * compensation must not change until the lock is set to 947 * OFF 948 * </p><p> 949 * Since changes to certain capture parameters (such as 950 * exposure time) may require resetting of black level 951 * compensation, the HAL must report whether setting the 952 * black level lock was successful in the output result 953 * metadata. 954 * </p><p> 955 * The black level locking must happen at the sensor, and not at the ISP. 956 * If for some reason black level locking is no longer legal (for example, 957 * the analog gain has changed, which forces black levels to be 958 * recalculated), then the HAL is free to override this request (and it 959 * must report 'OFF' when this does happen) until the next time locking 960 * is legal again. 961 * </p> 962 */ 963 public static final Key<Boolean> BLACK_LEVEL_LOCK = 964 new Key<Boolean>("android.blackLevel.lock", boolean.class); 965 966 /*~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~ 967 * End generated code 968 *~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~O@*/ 969} 970