CaptureRequest.java revision 6bbf9dc5ae7ebc85991dcfe3e18e837b12d3f333
1/* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17package android.hardware.camera2; 18 19import android.hardware.camera2.impl.CameraMetadataNative; 20import android.hardware.camera2.CameraDevice.CaptureListener; 21import android.os.Parcel; 22import android.os.Parcelable; 23import android.view.Surface; 24 25import java.util.HashSet; 26import java.util.Objects; 27 28 29/** 30 * <p>An immutable package of settings and outputs needed to capture a single 31 * image from the camera device.</p> 32 * 33 * <p>Contains the configuration for the capture hardware (sensor, lens, flash), 34 * the processing pipeline, the control algorithms, and the output buffers. Also 35 * contains the list of target Surfaces to send image data to for this 36 * capture.</p> 37 * 38 * <p>CaptureRequests can be created by using a {@link Builder} instance, 39 * obtained by calling {@link CameraDevice#createCaptureRequest}</p> 40 * 41 * <p>CaptureRequests are given to {@link CameraDevice#capture} or 42 * {@link CameraDevice#setRepeatingRequest} to capture images from a camera.</p> 43 * 44 * <p>Each request can specify a different subset of target Surfaces for the 45 * camera to send the captured data to. All the surfaces used in a request must 46 * be part of the surface list given to the last call to 47 * {@link CameraDevice#configureOutputs}, when the request is submitted to the 48 * camera device.</p> 49 * 50 * <p>For example, a request meant for repeating preview might only include the 51 * Surface for the preview SurfaceView or SurfaceTexture, while a 52 * high-resolution still capture would also include a Surface from a ImageReader 53 * configured for high-resolution JPEG images.</p> 54 * 55 * @see CameraDevice#capture 56 * @see CameraDevice#setRepeatingRequest 57 * @see CameraDevice#createCaptureRequest 58 */ 59public final class CaptureRequest extends CameraMetadata implements Parcelable { 60 61 private final HashSet<Surface> mSurfaceSet; 62 private final CameraMetadataNative mSettings; 63 64 private Object mUserTag; 65 66 /** 67 * Construct empty request. 68 * 69 * Used by Binder to unparcel this object only. 70 */ 71 private CaptureRequest() { 72 mSettings = new CameraMetadataNative(); 73 mSurfaceSet = new HashSet<Surface>(); 74 } 75 76 /** 77 * Clone from source capture request. 78 * 79 * Used by the Builder to create an immutable copy. 80 */ 81 @SuppressWarnings("unchecked") 82 private CaptureRequest(CaptureRequest source) { 83 mSettings = new CameraMetadataNative(source.mSettings); 84 mSurfaceSet = (HashSet<Surface>) source.mSurfaceSet.clone(); 85 } 86 87 /** 88 * Take ownership of passed-in settings. 89 * 90 * Used by the Builder to create a mutable CaptureRequest. 91 */ 92 private CaptureRequest(CameraMetadataNative settings) { 93 mSettings = settings; 94 mSurfaceSet = new HashSet<Surface>(); 95 } 96 97 @SuppressWarnings("unchecked") 98 @Override 99 public <T> T get(Key<T> key) { 100 return mSettings.get(key); 101 } 102 103 /** 104 * Retrieve the tag for this request, if any. 105 * 106 * <p>This tag is not used for anything by the camera device, but can be 107 * used by an application to easily identify a CaptureRequest when it is 108 * returned by 109 * {@link CameraDevice.CaptureListener#onCaptureCompleted CaptureListener.onCaptureCompleted} 110 * </p> 111 * 112 * @return the last tag Object set on this request, or {@code null} if 113 * no tag has been set. 114 * @see Builder#setTag 115 */ 116 public Object getTag() { 117 return mUserTag; 118 } 119 120 /** 121 * Determine whether this CaptureRequest is equal to another CaptureRequest. 122 * 123 * <p>A request is considered equal to another is if it's set of key/values is equal, it's 124 * list of output surfaces is equal, and the user tag is equal.</p> 125 * 126 * @param other Another instance of CaptureRequest. 127 * 128 * @return True if the requests are the same, false otherwise. 129 */ 130 @Override 131 public boolean equals(Object other) { 132 return other instanceof CaptureRequest 133 && equals((CaptureRequest)other); 134 } 135 136 private boolean equals(CaptureRequest other) { 137 return other != null 138 && Objects.equals(mUserTag, other.mUserTag) 139 && mSurfaceSet.equals(other.mSurfaceSet) 140 && mSettings.equals(other.mSettings); 141 } 142 143 @Override 144 public int hashCode() { 145 return mSettings.hashCode(); 146 } 147 148 public static final Parcelable.Creator<CaptureRequest> CREATOR = 149 new Parcelable.Creator<CaptureRequest>() { 150 @Override 151 public CaptureRequest createFromParcel(Parcel in) { 152 CaptureRequest request = new CaptureRequest(); 153 request.readFromParcel(in); 154 155 return request; 156 } 157 158 @Override 159 public CaptureRequest[] newArray(int size) { 160 return new CaptureRequest[size]; 161 } 162 }; 163 164 /** 165 * Expand this object from a Parcel. 166 * Hidden since this breaks the immutability of CaptureRequest, but is 167 * needed to receive CaptureRequests with aidl. 168 * 169 * @param in The parcel from which the object should be read 170 * @hide 171 */ 172 public void readFromParcel(Parcel in) { 173 mSettings.readFromParcel(in); 174 175 mSurfaceSet.clear(); 176 177 Parcelable[] parcelableArray = in.readParcelableArray(Surface.class.getClassLoader()); 178 179 if (parcelableArray == null) { 180 return; 181 } 182 183 for (Parcelable p : parcelableArray) { 184 Surface s = (Surface) p; 185 mSurfaceSet.add(s); 186 } 187 } 188 189 @Override 190 public int describeContents() { 191 return 0; 192 } 193 194 @Override 195 public void writeToParcel(Parcel dest, int flags) { 196 mSettings.writeToParcel(dest, flags); 197 dest.writeParcelableArray(mSurfaceSet.toArray(new Surface[mSurfaceSet.size()]), flags); 198 } 199 200 /** 201 * A builder for capture requests. 202 * 203 * <p>To obtain a builder instance, use the 204 * {@link CameraDevice#createCaptureRequest} method, which initializes the 205 * request fields to one of the templates defined in {@link CameraDevice}. 206 * 207 * @see CameraDevice#createCaptureRequest 208 * @see #TEMPLATE_PREVIEW 209 * @see #TEMPLATE_RECORD 210 * @see #TEMPLATE_STILL_CAPTURE 211 * @see #TEMPLATE_VIDEO_SNAPSHOT 212 * @see #TEMPLATE_MANUAL 213 */ 214 public final static class Builder { 215 216 private final CaptureRequest mRequest; 217 218 /** 219 * Initialize the builder using the template; the request takes 220 * ownership of the template. 221 * 222 * @hide 223 */ 224 public Builder(CameraMetadataNative template) { 225 mRequest = new CaptureRequest(template); 226 } 227 228 /** 229 * <p>Add a surface to the list of targets for this request</p> 230 * 231 * <p>The Surface added must be one of the surfaces included in the most 232 * recent call to {@link CameraDevice#configureOutputs}, when the 233 * request is given to the camera device.</p> 234 * 235 * <p>Adding a target more than once has no effect.</p> 236 * 237 * @param outputTarget Surface to use as an output target for this request 238 */ 239 public void addTarget(Surface outputTarget) { 240 mRequest.mSurfaceSet.add(outputTarget); 241 } 242 243 /** 244 * <p>Remove a surface from the list of targets for this request.</p> 245 * 246 * <p>Removing a target that is not currently added has no effect.</p> 247 * 248 * @param outputTarget Surface to use as an output target for this request 249 */ 250 public void removeTarget(Surface outputTarget) { 251 mRequest.mSurfaceSet.remove(outputTarget); 252 } 253 254 /** 255 * Set a capture request field to a value. The field definitions can be 256 * found in {@link CaptureRequest}. 257 * 258 * @param key The metadata field to write. 259 * @param value The value to set the field to, which must be of a matching 260 * type to the key. 261 */ 262 public <T> void set(Key<T> key, T value) { 263 mRequest.mSettings.set(key, value); 264 } 265 266 /** 267 * Get a capture request field value. The field definitions can be 268 * found in {@link CaptureRequest}. 269 * 270 * @throws IllegalArgumentException if the key was not valid 271 * 272 * @param key The metadata field to read. 273 * @return The value of that key, or {@code null} if the field is not set. 274 */ 275 public <T> T get(Key<T> key) { 276 return mRequest.mSettings.get(key); 277 } 278 279 /** 280 * Set a tag for this request. 281 * 282 * <p>This tag is not used for anything by the camera device, but can be 283 * used by an application to easily identify a CaptureRequest when it is 284 * returned by 285 * {@link CameraDevice.CaptureListener#onCaptureCompleted CaptureListener.onCaptureCompleted} 286 * 287 * @param tag an arbitrary Object to store with this request 288 * @see CaptureRequest#getTag 289 */ 290 public void setTag(Object tag) { 291 mRequest.mUserTag = tag; 292 } 293 294 /** 295 * Build a request using the current target Surfaces and settings. 296 * 297 * @return A new capture request instance, ready for submission to the 298 * camera device. 299 */ 300 public CaptureRequest build() { 301 return new CaptureRequest(mRequest); 302 } 303 304 305 /** 306 * @hide 307 */ 308 public boolean isEmpty() { 309 return mRequest.mSettings.isEmpty(); 310 } 311 312 } 313 314 /*@O~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~ 315 * The key entries below this point are generated from metadata 316 * definitions in /system/media/camera/docs. Do not modify by hand or 317 * modify the comment blocks at the start or end. 318 *~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~*/ 319 320 /** 321 * <p> 322 * When android.control.awbMode is not OFF, TRANSFORM_MATRIX 323 * should be ignored. 324 * </p> 325 * @see #COLOR_CORRECTION_MODE_TRANSFORM_MATRIX 326 * @see #COLOR_CORRECTION_MODE_FAST 327 * @see #COLOR_CORRECTION_MODE_HIGH_QUALITY 328 */ 329 public static final Key<Integer> COLOR_CORRECTION_MODE = 330 new Key<Integer>("android.colorCorrection.mode", int.class); 331 332 /** 333 * <p> 334 * A color transform matrix to use to transform 335 * from sensor RGB color space to output linear sRGB color space 336 * </p> 337 * <p> 338 * This matrix is either set by HAL when the request 339 * android.colorCorrection.mode is not TRANSFORM_MATRIX, or 340 * directly by the application in the request when the 341 * android.colorCorrection.mode is TRANSFORM_MATRIX. 342 * </p><p> 343 * In the latter case, the HAL may round the matrix to account 344 * for precision issues; the final rounded matrix should be 345 * reported back in this matrix result metadata. 346 * </p> 347 */ 348 public static final Key<Rational[]> COLOR_CORRECTION_TRANSFORM = 349 new Key<Rational[]>("android.colorCorrection.transform", Rational[].class); 350 351 /** 352 * <p> 353 * Gains applying to Bayer color channels for 354 * white-balance 355 * </p> 356 * <p> 357 * The 4-channel white-balance gains are defined in 358 * the order of [R G_even G_odd B], where G_even is the gain 359 * for green pixels on even rows of the output, and G_odd 360 * is the gain for greenpixels on the odd rows. if a HAL 361 * does not support a separate gain for even/odd green channels, 362 * it should use the G_even value,and write G_odd equal to 363 * G_even in the output result metadata. 364 * </p><p> 365 * This array is either set by HAL when the request 366 * android.colorCorrection.mode is not TRANSFORM_MATRIX, or 367 * directly by the application in the request when the 368 * android.colorCorrection.mode is TRANSFORM_MATRIX. 369 * </p><p> 370 * The ouput should be the gains actually applied by the HAL to 371 * the current frame. 372 * </p> 373 */ 374 public static final Key<float[]> COLOR_CORRECTION_GAINS = 375 new Key<float[]>("android.colorCorrection.gains", float[].class); 376 377 /** 378 * <p> 379 * Enum for controlling 380 * antibanding 381 * </p> 382 * @see #CONTROL_AE_ANTIBANDING_MODE_OFF 383 * @see #CONTROL_AE_ANTIBANDING_MODE_50HZ 384 * @see #CONTROL_AE_ANTIBANDING_MODE_60HZ 385 * @see #CONTROL_AE_ANTIBANDING_MODE_AUTO 386 */ 387 public static final Key<Integer> CONTROL_AE_ANTIBANDING_MODE = 388 new Key<Integer>("android.control.aeAntibandingMode", int.class); 389 390 /** 391 * <p> 392 * Adjustment to AE target image 393 * brightness 394 * </p> 395 * <p> 396 * For example, if EV step is 0.333, '6' will mean an 397 * exposure compensation of +2 EV; -3 will mean an exposure 398 * compensation of -1 399 * </p> 400 */ 401 public static final Key<Integer> CONTROL_AE_EXPOSURE_COMPENSATION = 402 new Key<Integer>("android.control.aeExposureCompensation", int.class); 403 404 /** 405 * <p> 406 * Whether AE is currently locked to its latest 407 * calculated values 408 * </p> 409 * <p> 410 * Note that even when AE is locked, the flash may be 411 * fired if the AE mode is ON_AUTO_FLASH / ON_ALWAYS_FLASH / 412 * ON_AUTO_FLASH_REDEYE. 413 * </p> 414 */ 415 public static final Key<Boolean> CONTROL_AE_LOCK = 416 new Key<Boolean>("android.control.aeLock", boolean.class); 417 418 /** 419 * <p> 420 * Whether AE is currently updating the sensor 421 * exposure and sensitivity fields 422 * </p> 423 * <p> 424 * Only effective if android.control.mode = 425 * AUTO 426 * </p> 427 * @see #CONTROL_AE_MODE_OFF 428 * @see #CONTROL_AE_MODE_ON 429 * @see #CONTROL_AE_MODE_ON_AUTO_FLASH 430 * @see #CONTROL_AE_MODE_ON_ALWAYS_FLASH 431 * @see #CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE 432 */ 433 public static final Key<Integer> CONTROL_AE_MODE = 434 new Key<Integer>("android.control.aeMode", int.class); 435 436 /** 437 * <p> 438 * List of areas to use for 439 * metering 440 * </p> 441 * <p> 442 * Each area is a rectangle plus weight: xmin, ymin, 443 * xmax, ymax, weight. The rectangle is defined inclusive of the 444 * specified coordinates. 445 * </p><p> 446 * The coordinate system is based on the active pixel array, 447 * with (0,0) being the top-left pixel in the active pixel array, and 448 * (android.sensor.info.activeArraySize.width - 1, 449 * android.sensor.info.activeArraySize.height - 1) being the 450 * bottom-right pixel in the active pixel array. The weight 451 * should be nonnegative. 452 * </p><p> 453 * If all regions have 0 weight, then no specific metering area 454 * needs to be used by the HAL. If the metering region is 455 * outside the current android.scaler.cropRegion, the HAL 456 * should ignore the sections outside the region and output the 457 * used sections in the frame metadata 458 * </p> 459 */ 460 public static final Key<int[]> CONTROL_AE_REGIONS = 461 new Key<int[]>("android.control.aeRegions", int[].class); 462 463 /** 464 * <p> 465 * Range over which fps can be adjusted to 466 * maintain exposure 467 * </p> 468 * <p> 469 * Only constrains AE algorithm, not manual control 470 * of android.sensor.exposureTime 471 * </p> 472 */ 473 public static final Key<int[]> CONTROL_AE_TARGET_FPS_RANGE = 474 new Key<int[]>("android.control.aeTargetFpsRange", int[].class); 475 476 /** 477 * <p> 478 * Whether the HAL must trigger precapture 479 * metering. 480 * </p> 481 * <p> 482 * This entry is normally set to IDLE, or is not 483 * included at all in the request settings. When included and 484 * set to START, the HAL must trigger the autoexposure 485 * precapture metering sequence. 486 * </p><p> 487 * The effect of AE precapture trigger depends on the current 488 * AE mode and state; see the camera HAL device v3 header for 489 * details. 490 * </p> 491 * @see #CONTROL_AE_PRECAPTURE_TRIGGER_IDLE 492 * @see #CONTROL_AE_PRECAPTURE_TRIGGER_START 493 */ 494 public static final Key<Integer> CONTROL_AE_PRECAPTURE_TRIGGER = 495 new Key<Integer>("android.control.aePrecaptureTrigger", int.class); 496 497 /** 498 * <p> 499 * Whether AF is currently enabled, and what 500 * mode it is set to 501 * </p> 502 * @see #CONTROL_AF_MODE_OFF 503 * @see #CONTROL_AF_MODE_AUTO 504 * @see #CONTROL_AF_MODE_MACRO 505 * @see #CONTROL_AF_MODE_CONTINUOUS_VIDEO 506 * @see #CONTROL_AF_MODE_CONTINUOUS_PICTURE 507 * @see #CONTROL_AF_MODE_EDOF 508 */ 509 public static final Key<Integer> CONTROL_AF_MODE = 510 new Key<Integer>("android.control.afMode", int.class); 511 512 /** 513 * <p> 514 * List of areas to use for focus 515 * estimation 516 * </p> 517 * <p> 518 * Each area is a rectangle plus weight: xmin, ymin, 519 * xmax, ymax, weight. The rectangle is defined inclusive of the 520 * specified coordinates. 521 * </p><p> 522 * The coordinate system is based on the active pixel array, 523 * with (0,0) being the top-left pixel in the active pixel array, and 524 * (android.sensor.info.activeArraySize.width - 1, 525 * android.sensor.info.activeArraySize.height - 1) being the 526 * bottom-right pixel in the active pixel array. The weight 527 * should be nonnegative. 528 * </p><p> 529 * If all regions have 0 weight, then no specific focus area 530 * needs to be used by the HAL. If the focusing region is 531 * outside the current android.scaler.cropRegion, the HAL 532 * should ignore the sections outside the region and output the 533 * used sections in the frame metadata 534 * </p> 535 */ 536 public static final Key<int[]> CONTROL_AF_REGIONS = 537 new Key<int[]>("android.control.afRegions", int[].class); 538 539 /** 540 * <p> 541 * Whether the HAL must trigger autofocus. 542 * </p> 543 * <p> 544 * This entry is normally set to IDLE, or is not 545 * included at all in the request settings. 546 * </p><p> 547 * When included and set to START, the HAL must trigger the 548 * autofocus algorithm. The effect of AF trigger depends on the 549 * current AF mode and state; see the camera HAL device v3 550 * header for details. When set to CANCEL, the HAL must cancel 551 * any active trigger, and return to initial AF state. 552 * </p> 553 * @see #CONTROL_AF_TRIGGER_IDLE 554 * @see #CONTROL_AF_TRIGGER_START 555 * @see #CONTROL_AF_TRIGGER_CANCEL 556 */ 557 public static final Key<Integer> CONTROL_AF_TRIGGER = 558 new Key<Integer>("android.control.afTrigger", int.class); 559 560 /** 561 * <p> 562 * Whether AWB is currently locked to its 563 * latest calculated values 564 * </p> 565 * <p> 566 * Note that AWB lock is only meaningful for AUTO 567 * mode; in other modes, AWB is already fixed to a specific 568 * setting 569 * </p> 570 */ 571 public static final Key<Boolean> CONTROL_AWB_LOCK = 572 new Key<Boolean>("android.control.awbLock", boolean.class); 573 574 /** 575 * <p> 576 * Whether AWB is currently setting the color 577 * transform fields, and what its illumination target 578 * is 579 * </p> 580 * <p> 581 * [BC - AWB lock,AWB modes] 582 * </p> 583 * @see #CONTROL_AWB_MODE_OFF 584 * @see #CONTROL_AWB_MODE_AUTO 585 * @see #CONTROL_AWB_MODE_INCANDESCENT 586 * @see #CONTROL_AWB_MODE_FLUORESCENT 587 * @see #CONTROL_AWB_MODE_WARM_FLUORESCENT 588 * @see #CONTROL_AWB_MODE_DAYLIGHT 589 * @see #CONTROL_AWB_MODE_CLOUDY_DAYLIGHT 590 * @see #CONTROL_AWB_MODE_TWILIGHT 591 * @see #CONTROL_AWB_MODE_SHADE 592 */ 593 public static final Key<Integer> CONTROL_AWB_MODE = 594 new Key<Integer>("android.control.awbMode", int.class); 595 596 /** 597 * <p> 598 * List of areas to use for illuminant 599 * estimation 600 * </p> 601 * <p> 602 * Only used in AUTO mode. 603 * </p><p> 604 * Each area is a rectangle plus weight: xmin, ymin, 605 * xmax, ymax, weight. The rectangle is defined inclusive of the 606 * specified coordinates. 607 * </p><p> 608 * The coordinate system is based on the active pixel array, 609 * with (0,0) being the top-left pixel in the active pixel array, and 610 * (android.sensor.info.activeArraySize.width - 1, 611 * android.sensor.info.activeArraySize.height - 1) being the 612 * bottom-right pixel in the active pixel array. The weight 613 * should be nonnegative. 614 * </p><p> 615 * If all regions have 0 weight, then no specific metering area 616 * needs to be used by the HAL. If the metering region is 617 * outside the current android.scaler.cropRegion, the HAL 618 * should ignore the sections outside the region and output the 619 * used sections in the frame metadata 620 * </p> 621 */ 622 public static final Key<int[]> CONTROL_AWB_REGIONS = 623 new Key<int[]>("android.control.awbRegions", int[].class); 624 625 /** 626 * <p> 627 * Information to 3A routines about the purpose 628 * of this capture, to help decide optimal 3A 629 * strategy 630 * </p> 631 * <p> 632 * Only used if android.control.mode != OFF. 633 * </p> 634 * @see #CONTROL_CAPTURE_INTENT_CUSTOM 635 * @see #CONTROL_CAPTURE_INTENT_PREVIEW 636 * @see #CONTROL_CAPTURE_INTENT_STILL_CAPTURE 637 * @see #CONTROL_CAPTURE_INTENT_VIDEO_RECORD 638 * @see #CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT 639 * @see #CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG 640 */ 641 public static final Key<Integer> CONTROL_CAPTURE_INTENT = 642 new Key<Integer>("android.control.captureIntent", int.class); 643 644 /** 645 * <p> 646 * Whether any special color effect is in use. 647 * Only used if android.control.mode != OFF 648 * </p> 649 * @see #CONTROL_EFFECT_MODE_OFF 650 * @see #CONTROL_EFFECT_MODE_MONO 651 * @see #CONTROL_EFFECT_MODE_NEGATIVE 652 * @see #CONTROL_EFFECT_MODE_SOLARIZE 653 * @see #CONTROL_EFFECT_MODE_SEPIA 654 * @see #CONTROL_EFFECT_MODE_POSTERIZE 655 * @see #CONTROL_EFFECT_MODE_WHITEBOARD 656 * @see #CONTROL_EFFECT_MODE_BLACKBOARD 657 * @see #CONTROL_EFFECT_MODE_AQUA 658 */ 659 public static final Key<Integer> CONTROL_EFFECT_MODE = 660 new Key<Integer>("android.control.effectMode", int.class); 661 662 /** 663 * <p> 664 * Overall mode of 3A control 665 * routines 666 * </p> 667 * @see #CONTROL_MODE_OFF 668 * @see #CONTROL_MODE_AUTO 669 * @see #CONTROL_MODE_USE_SCENE_MODE 670 */ 671 public static final Key<Integer> CONTROL_MODE = 672 new Key<Integer>("android.control.mode", int.class); 673 674 /** 675 * <p> 676 * Which scene mode is active when 677 * android.control.mode = SCENE_MODE 678 * </p> 679 * @see #CONTROL_SCENE_MODE_UNSUPPORTED 680 * @see #CONTROL_SCENE_MODE_FACE_PRIORITY 681 * @see #CONTROL_SCENE_MODE_ACTION 682 * @see #CONTROL_SCENE_MODE_PORTRAIT 683 * @see #CONTROL_SCENE_MODE_LANDSCAPE 684 * @see #CONTROL_SCENE_MODE_NIGHT 685 * @see #CONTROL_SCENE_MODE_NIGHT_PORTRAIT 686 * @see #CONTROL_SCENE_MODE_THEATRE 687 * @see #CONTROL_SCENE_MODE_BEACH 688 * @see #CONTROL_SCENE_MODE_SNOW 689 * @see #CONTROL_SCENE_MODE_SUNSET 690 * @see #CONTROL_SCENE_MODE_STEADYPHOTO 691 * @see #CONTROL_SCENE_MODE_FIREWORKS 692 * @see #CONTROL_SCENE_MODE_SPORTS 693 * @see #CONTROL_SCENE_MODE_PARTY 694 * @see #CONTROL_SCENE_MODE_CANDLELIGHT 695 * @see #CONTROL_SCENE_MODE_BARCODE 696 */ 697 public static final Key<Integer> CONTROL_SCENE_MODE = 698 new Key<Integer>("android.control.sceneMode", int.class); 699 700 /** 701 * <p> 702 * Whether video stabilization is 703 * active 704 * </p> 705 * <p> 706 * If enabled, video stabilization can modify the 707 * android.scaler.cropRegion to keep the video stream 708 * stabilized 709 * </p> 710 */ 711 public static final Key<Boolean> CONTROL_VIDEO_STABILIZATION_MODE = 712 new Key<Boolean>("android.control.videoStabilizationMode", boolean.class); 713 714 /** 715 * <p> 716 * Operation mode for edge 717 * enhancement 718 * </p> 719 * @see #EDGE_MODE_OFF 720 * @see #EDGE_MODE_FAST 721 * @see #EDGE_MODE_HIGH_QUALITY 722 */ 723 public static final Key<Integer> EDGE_MODE = 724 new Key<Integer>("android.edge.mode", int.class); 725 726 /** 727 * <p> 728 * Select flash operation mode 729 * </p> 730 * @see #FLASH_MODE_OFF 731 * @see #FLASH_MODE_SINGLE 732 * @see #FLASH_MODE_TORCH 733 */ 734 public static final Key<Integer> FLASH_MODE = 735 new Key<Integer>("android.flash.mode", int.class); 736 737 /** 738 * <p> 739 * GPS coordinates to include in output JPEG 740 * EXIF 741 * </p> 742 */ 743 public static final Key<double[]> JPEG_GPS_COORDINATES = 744 new Key<double[]>("android.jpeg.gpsCoordinates", double[].class); 745 746 /** 747 * <p> 748 * 32 characters describing GPS algorithm to 749 * include in EXIF 750 * </p> 751 */ 752 public static final Key<String> JPEG_GPS_PROCESSING_METHOD = 753 new Key<String>("android.jpeg.gpsProcessingMethod", String.class); 754 755 /** 756 * <p> 757 * Time GPS fix was made to include in 758 * EXIF 759 * </p> 760 */ 761 public static final Key<Long> JPEG_GPS_TIMESTAMP = 762 new Key<Long>("android.jpeg.gpsTimestamp", long.class); 763 764 /** 765 * <p> 766 * Orientation of JPEG image to 767 * write 768 * </p> 769 */ 770 public static final Key<Integer> JPEG_ORIENTATION = 771 new Key<Integer>("android.jpeg.orientation", int.class); 772 773 /** 774 * <p> 775 * Compression quality of the final JPEG 776 * image 777 * </p> 778 * <p> 779 * 85-95 is typical usage range 780 * </p> 781 */ 782 public static final Key<Byte> JPEG_QUALITY = 783 new Key<Byte>("android.jpeg.quality", byte.class); 784 785 /** 786 * <p> 787 * Compression quality of JPEG 788 * thumbnail 789 * </p> 790 */ 791 public static final Key<Byte> JPEG_THUMBNAIL_QUALITY = 792 new Key<Byte>("android.jpeg.thumbnailQuality", byte.class); 793 794 /** 795 * <p> 796 * Resolution of embedded JPEG 797 * thumbnail 798 * </p> 799 */ 800 public static final Key<android.hardware.camera2.Size> JPEG_THUMBNAIL_SIZE = 801 new Key<android.hardware.camera2.Size>("android.jpeg.thumbnailSize", android.hardware.camera2.Size.class); 802 803 /** 804 * <p> 805 * Size of the lens aperture 806 * </p> 807 * <p> 808 * Will not be supported on most devices. Can only 809 * pick from supported list 810 * </p> 811 */ 812 public static final Key<Float> LENS_APERTURE = 813 new Key<Float>("android.lens.aperture", float.class); 814 815 /** 816 * <p> 817 * State of lens neutral density 818 * filter(s) 819 * </p> 820 * <p> 821 * Will not be supported on most devices. Can only 822 * pick from supported list 823 * </p> 824 */ 825 public static final Key<Float> LENS_FILTER_DENSITY = 826 new Key<Float>("android.lens.filterDensity", float.class); 827 828 /** 829 * <p> 830 * Lens optical zoom setting 831 * </p> 832 * <p> 833 * Will not be supported on most devices. 834 * </p> 835 */ 836 public static final Key<Float> LENS_FOCAL_LENGTH = 837 new Key<Float>("android.lens.focalLength", float.class); 838 839 /** 840 * <p> 841 * Distance to plane of sharpest focus, 842 * measured from frontmost surface of the lens 843 * </p> 844 * <p> 845 * 0 = infinity focus. Used value should be clamped 846 * to (0,minimum focus distance) 847 * </p> 848 */ 849 public static final Key<Float> LENS_FOCUS_DISTANCE = 850 new Key<Float>("android.lens.focusDistance", float.class); 851 852 /** 853 * <p> 854 * Whether optical image stabilization is 855 * enabled. 856 * </p> 857 * <p> 858 * Will not be supported on most devices. 859 * </p> 860 * @see #LENS_OPTICAL_STABILIZATION_MODE_OFF 861 * @see #LENS_OPTICAL_STABILIZATION_MODE_ON 862 */ 863 public static final Key<Integer> LENS_OPTICAL_STABILIZATION_MODE = 864 new Key<Integer>("android.lens.opticalStabilizationMode", int.class); 865 866 /** 867 * <p> 868 * Mode of operation for the noise reduction 869 * algorithm 870 * </p> 871 * @see #NOISE_REDUCTION_MODE_OFF 872 * @see #NOISE_REDUCTION_MODE_FAST 873 * @see #NOISE_REDUCTION_MODE_HIGH_QUALITY 874 */ 875 public static final Key<Integer> NOISE_REDUCTION_MODE = 876 new Key<Integer>("android.noiseReduction.mode", int.class); 877 878 /** 879 * <p> 880 * An application-specified ID for the current 881 * request. Must be maintained unchanged in output 882 * frame 883 * </p> 884 * 885 * @hide 886 */ 887 public static final Key<Integer> REQUEST_ID = 888 new Key<Integer>("android.request.id", int.class); 889 890 /** 891 * <p> 892 * (x, y, width, height). 893 * </p><p> 894 * A rectangle with the top-level corner of (x,y) and size 895 * (width, height). The region of the sensor that is used for 896 * output. Each stream must use this rectangle to produce its 897 * output, cropping to a smaller region if necessary to 898 * maintain the stream's aspect ratio. 899 * </p><p> 900 * HAL2.x uses only (x, y, width) 901 * </p> 902 * <p> 903 * Any additional per-stream cropping must be done to 904 * maximize the final pixel area of the stream. 905 * </p><p> 906 * For example, if the crop region is set to a 4:3 aspect 907 * ratio, then 4:3 streams should use the exact crop 908 * region. 16:9 streams should further crop vertically 909 * (letterbox). 910 * </p><p> 911 * Conversely, if the crop region is set to a 16:9, then 4:3 912 * outputs should crop horizontally (pillarbox), and 16:9 913 * streams should match exactly. These additional crops must 914 * be centered within the crop region. 915 * </p><p> 916 * The output streams must maintain square pixels at all 917 * times, no matter what the relative aspect ratios of the 918 * crop region and the stream are. Negative values for 919 * corner are allowed for raw output if full pixel array is 920 * larger than active pixel array. Width and height may be 921 * rounded to nearest larger supportable width, especially 922 * for raw output, where only a few fixed scales may be 923 * possible. The width and height of the crop region cannot 924 * be set to be smaller than floor( activeArraySize.width / 925 * android.scaler.maxDigitalZoom ) and floor( 926 * activeArraySize.height / android.scaler.maxDigitalZoom), 927 * respectively. 928 * </p> 929 */ 930 public static final Key<android.graphics.Rect> SCALER_CROP_REGION = 931 new Key<android.graphics.Rect>("android.scaler.cropRegion", android.graphics.Rect.class); 932 933 /** 934 * <p> 935 * Duration each pixel is exposed to 936 * light. 937 * </p><p> 938 * If the sensor can't expose this exact duration, it should shorten the 939 * duration exposed to the nearest possible value (rather than expose longer). 940 * </p> 941 * <p> 942 * 1/10000 - 30 sec range. No bulb mode 943 * </p> 944 */ 945 public static final Key<Long> SENSOR_EXPOSURE_TIME = 946 new Key<Long>("android.sensor.exposureTime", long.class); 947 948 /** 949 * <p> 950 * Duration from start of frame exposure to 951 * start of next frame exposure 952 * </p> 953 * <p> 954 * Exposure time has priority, so duration is set to 955 * max(duration, exposure time + overhead) 956 * </p> 957 */ 958 public static final Key<Long> SENSOR_FRAME_DURATION = 959 new Key<Long>("android.sensor.frameDuration", long.class); 960 961 /** 962 * <p> 963 * Gain applied to image data. Must be 964 * implemented through analog gain only if set to values 965 * below 'maximum analog sensitivity'. 966 * </p><p> 967 * If the sensor can't apply this exact gain, it should lessen the 968 * gain to the nearest possible value (rather than gain more). 969 * </p> 970 * <p> 971 * ISO 12232:2006 REI method 972 * </p> 973 */ 974 public static final Key<Integer> SENSOR_SENSITIVITY = 975 new Key<Integer>("android.sensor.sensitivity", int.class); 976 977 /** 978 * <p> 979 * State of the face detector 980 * unit 981 * </p> 982 * <p> 983 * Whether face detection is enabled, and whether it 984 * should output just the basic fields or the full set of 985 * fields. Value must be one of the 986 * android.statistics.info.availableFaceDetectModes. 987 * </p> 988 * @see #STATISTICS_FACE_DETECT_MODE_OFF 989 * @see #STATISTICS_FACE_DETECT_MODE_SIMPLE 990 * @see #STATISTICS_FACE_DETECT_MODE_FULL 991 */ 992 public static final Key<Integer> STATISTICS_FACE_DETECT_MODE = 993 new Key<Integer>("android.statistics.faceDetectMode", int.class); 994 995 /** 996 * <p> 997 * Whether the HAL needs to output the lens 998 * shading map in output result metadata 999 * </p> 1000 * <p> 1001 * When set to ON, 1002 * android.statistics.lensShadingMap must be provided in 1003 * the output result metdata. 1004 * </p> 1005 * @see #STATISTICS_LENS_SHADING_MAP_MODE_OFF 1006 * @see #STATISTICS_LENS_SHADING_MAP_MODE_ON 1007 */ 1008 public static final Key<Integer> STATISTICS_LENS_SHADING_MAP_MODE = 1009 new Key<Integer>("android.statistics.lensShadingMapMode", int.class); 1010 1011 /** 1012 * <p> 1013 * Table mapping blue input values to output 1014 * values 1015 * </p> 1016 * <p> 1017 * Tonemapping / contrast / gamma curve for the blue 1018 * channel, to use when android.tonemap.mode is CONTRAST_CURVE. 1019 * </p><p> 1020 * See android.tonemap.curveRed for more details. 1021 * </p> 1022 */ 1023 public static final Key<float[]> TONEMAP_CURVE_BLUE = 1024 new Key<float[]>("android.tonemap.curveBlue", float[].class); 1025 1026 /** 1027 * <p> 1028 * Table mapping green input values to output 1029 * values 1030 * </p> 1031 * <p> 1032 * Tonemapping / contrast / gamma curve for the green 1033 * channel, to use when android.tonemap.mode is CONTRAST_CURVE. 1034 * </p><p> 1035 * See android.tonemap.curveRed for more details. 1036 * </p> 1037 */ 1038 public static final Key<float[]> TONEMAP_CURVE_GREEN = 1039 new Key<float[]>("android.tonemap.curveGreen", float[].class); 1040 1041 /** 1042 * <p> 1043 * Table mapping red input values to output 1044 * values 1045 * </p> 1046 * <p> 1047 * Tonemapping / contrast / gamma curve for the red 1048 * channel, to use when android.tonemap.mode is CONTRAST_CURVE. 1049 * </p><p> 1050 * Since the input and output ranges may vary depending on 1051 * the camera pipeline, the input and output pixel values 1052 * are represented by normalized floating-point values 1053 * between 0 and 1, with 0 == black and 1 == white. 1054 * </p><p> 1055 * The curve should be linearly interpolated between the 1056 * defined points. The points will be listed in increasing 1057 * order of P_IN. For example, if the array is: [0.0, 0.0, 1058 * 0.3, 0.5, 1.0, 1.0], then the input->output mapping 1059 * for a few sample points would be: 0 -> 0, 0.15 -> 1060 * 0.25, 0.3 -> 0.5, 0.5 -> 0.64 1061 * </p> 1062 */ 1063 public static final Key<float[]> TONEMAP_CURVE_RED = 1064 new Key<float[]>("android.tonemap.curveRed", float[].class); 1065 1066 /** 1067 * @see #TONEMAP_MODE_CONTRAST_CURVE 1068 * @see #TONEMAP_MODE_FAST 1069 * @see #TONEMAP_MODE_HIGH_QUALITY 1070 */ 1071 public static final Key<Integer> TONEMAP_MODE = 1072 new Key<Integer>("android.tonemap.mode", int.class); 1073 1074 /** 1075 * <p> 1076 * This LED is nominally used to indicate to the user 1077 * that the camera is powered on and may be streaming images back to the 1078 * Application Processor. In certain rare circumstances, the OS may 1079 * disable this when video is processed locally and not transmitted to 1080 * any untrusted applications. 1081 * </p><p> 1082 * In particular, the LED *must* always be on when the data could be 1083 * transmitted off the device. The LED *should* always be on whenever 1084 * data is stored locally on the device. 1085 * </p><p> 1086 * The LED *may* be off if a trusted application is using the data that 1087 * doesn't violate the above rules. 1088 * </p> 1089 * 1090 * @hide 1091 */ 1092 public static final Key<Boolean> LED_TRANSMIT = 1093 new Key<Boolean>("android.led.transmit", boolean.class); 1094 1095 /** 1096 * <p> 1097 * Whether black-level compensation is locked 1098 * to its current values, or is free to vary 1099 * </p> 1100 * <p> 1101 * When set to ON, the values used for black-level 1102 * compensation must not change until the lock is set to 1103 * OFF 1104 * </p><p> 1105 * Since changes to certain capture parameters (such as 1106 * exposure time) may require resetting of black level 1107 * compensation, the HAL must report whether setting the 1108 * black level lock was successful in the output result 1109 * metadata. 1110 * </p><p> 1111 * The black level locking must happen at the sensor, and not at the ISP. 1112 * If for some reason black level locking is no longer legal (for example, 1113 * the analog gain has changed, which forces black levels to be 1114 * recalculated), then the HAL is free to override this request (and it 1115 * must report 'OFF' when this does happen) until the next time locking 1116 * is legal again. 1117 * </p> 1118 */ 1119 public static final Key<Boolean> BLACK_LEVEL_LOCK = 1120 new Key<Boolean>("android.blackLevel.lock", boolean.class); 1121 1122 /*~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~ 1123 * End generated code 1124 *~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~O@*/ 1125} 1126