CaptureRequest.java revision 49a3ca9330d213fe35280c0af78a4d21acb98234
1/* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17package android.hardware.camera2; 18 19import android.hardware.camera2.impl.CameraMetadataNative; 20import android.os.Parcel; 21import android.os.Parcelable; 22import android.view.Surface; 23 24import java.util.HashSet; 25import java.util.Objects; 26 27 28/** 29 * <p>An immutable package of settings and outputs needed to capture a single 30 * image from the camera device.</p> 31 * 32 * <p>Contains the configuration for the capture hardware (sensor, lens, flash), 33 * the processing pipeline, the control algorithms, and the output buffers. Also 34 * contains the list of target Surfaces to send image data to for this 35 * capture.</p> 36 * 37 * <p>CaptureRequests can be created by using a {@link Builder} instance, 38 * obtained by calling {@link CameraDevice#createCaptureRequest}</p> 39 * 40 * <p>CaptureRequests are given to {@link CameraDevice#capture} or 41 * {@link CameraDevice#setRepeatingRequest} to capture images from a camera.</p> 42 * 43 * <p>Each request can specify a different subset of target Surfaces for the 44 * camera to send the captured data to. All the surfaces used in a request must 45 * be part of the surface list given to the last call to 46 * {@link CameraDevice#configureOutputs}, when the request is submitted to the 47 * camera device.</p> 48 * 49 * <p>For example, a request meant for repeating preview might only include the 50 * Surface for the preview SurfaceView or SurfaceTexture, while a 51 * high-resolution still capture would also include a Surface from a ImageReader 52 * configured for high-resolution JPEG images.</p> 53 * 54 * @see CameraDevice#capture 55 * @see CameraDevice#setRepeatingRequest 56 * @see CameraDevice#createCaptureRequest 57 */ 58public final class CaptureRequest extends CameraMetadata implements Parcelable { 59 60 private final HashSet<Surface> mSurfaceSet; 61 private final CameraMetadataNative mSettings; 62 63 private Object mUserTag; 64 65 /** 66 * Construct empty request. 67 * 68 * Used by Binder to unparcel this object only. 69 */ 70 private CaptureRequest() { 71 mSettings = new CameraMetadataNative(); 72 mSurfaceSet = new HashSet<Surface>(); 73 } 74 75 /** 76 * Clone from source capture request. 77 * 78 * Used by the Builder to create an immutable copy. 79 */ 80 @SuppressWarnings("unchecked") 81 private CaptureRequest(CaptureRequest source) { 82 mSettings = new CameraMetadataNative(source.mSettings); 83 mSurfaceSet = (HashSet<Surface>) source.mSurfaceSet.clone(); 84 mUserTag = source.mUserTag; 85 } 86 87 /** 88 * Take ownership of passed-in settings. 89 * 90 * Used by the Builder to create a mutable CaptureRequest. 91 */ 92 private CaptureRequest(CameraMetadataNative settings) { 93 mSettings = settings; 94 mSurfaceSet = new HashSet<Surface>(); 95 } 96 97 @SuppressWarnings("unchecked") 98 @Override 99 public <T> T get(Key<T> key) { 100 return mSettings.get(key); 101 } 102 103 /** 104 * Retrieve the tag for this request, if any. 105 * 106 * <p>This tag is not used for anything by the camera device, but can be 107 * used by an application to easily identify a CaptureRequest when it is 108 * returned by 109 * {@link CameraDevice.CaptureListener#onCaptureCompleted CaptureListener.onCaptureCompleted} 110 * </p> 111 * 112 * @return the last tag Object set on this request, or {@code null} if 113 * no tag has been set. 114 * @see Builder#setTag 115 */ 116 public Object getTag() { 117 return mUserTag; 118 } 119 120 /** 121 * Determine whether this CaptureRequest is equal to another CaptureRequest. 122 * 123 * <p>A request is considered equal to another is if it's set of key/values is equal, it's 124 * list of output surfaces is equal, and the user tag is equal.</p> 125 * 126 * @param other Another instance of CaptureRequest. 127 * 128 * @return True if the requests are the same, false otherwise. 129 */ 130 @Override 131 public boolean equals(Object other) { 132 return other instanceof CaptureRequest 133 && equals((CaptureRequest)other); 134 } 135 136 private boolean equals(CaptureRequest other) { 137 return other != null 138 && Objects.equals(mUserTag, other.mUserTag) 139 && mSurfaceSet.equals(other.mSurfaceSet) 140 && mSettings.equals(other.mSettings); 141 } 142 143 @Override 144 public int hashCode() { 145 return mSettings.hashCode(); 146 } 147 148 public static final Parcelable.Creator<CaptureRequest> CREATOR = 149 new Parcelable.Creator<CaptureRequest>() { 150 @Override 151 public CaptureRequest createFromParcel(Parcel in) { 152 CaptureRequest request = new CaptureRequest(); 153 request.readFromParcel(in); 154 155 return request; 156 } 157 158 @Override 159 public CaptureRequest[] newArray(int size) { 160 return new CaptureRequest[size]; 161 } 162 }; 163 164 /** 165 * Expand this object from a Parcel. 166 * Hidden since this breaks the immutability of CaptureRequest, but is 167 * needed to receive CaptureRequests with aidl. 168 * 169 * @param in The parcel from which the object should be read 170 * @hide 171 */ 172 public void readFromParcel(Parcel in) { 173 mSettings.readFromParcel(in); 174 175 mSurfaceSet.clear(); 176 177 Parcelable[] parcelableArray = in.readParcelableArray(Surface.class.getClassLoader()); 178 179 if (parcelableArray == null) { 180 return; 181 } 182 183 for (Parcelable p : parcelableArray) { 184 Surface s = (Surface) p; 185 mSurfaceSet.add(s); 186 } 187 } 188 189 @Override 190 public int describeContents() { 191 return 0; 192 } 193 194 @Override 195 public void writeToParcel(Parcel dest, int flags) { 196 mSettings.writeToParcel(dest, flags); 197 dest.writeParcelableArray(mSurfaceSet.toArray(new Surface[mSurfaceSet.size()]), flags); 198 } 199 200 /** 201 * A builder for capture requests. 202 * 203 * <p>To obtain a builder instance, use the 204 * {@link CameraDevice#createCaptureRequest} method, which initializes the 205 * request fields to one of the templates defined in {@link CameraDevice}. 206 * 207 * @see CameraDevice#createCaptureRequest 208 * @see #TEMPLATE_PREVIEW 209 * @see #TEMPLATE_RECORD 210 * @see #TEMPLATE_STILL_CAPTURE 211 * @see #TEMPLATE_VIDEO_SNAPSHOT 212 * @see #TEMPLATE_MANUAL 213 */ 214 public final static class Builder { 215 216 private final CaptureRequest mRequest; 217 218 /** 219 * Initialize the builder using the template; the request takes 220 * ownership of the template. 221 * 222 * @hide 223 */ 224 public Builder(CameraMetadataNative template) { 225 mRequest = new CaptureRequest(template); 226 } 227 228 /** 229 * <p>Add a surface to the list of targets for this request</p> 230 * 231 * <p>The Surface added must be one of the surfaces included in the most 232 * recent call to {@link CameraDevice#configureOutputs}, when the 233 * request is given to the camera device.</p> 234 * 235 * <p>Adding a target more than once has no effect.</p> 236 * 237 * @param outputTarget Surface to use as an output target for this request 238 */ 239 public void addTarget(Surface outputTarget) { 240 mRequest.mSurfaceSet.add(outputTarget); 241 } 242 243 /** 244 * <p>Remove a surface from the list of targets for this request.</p> 245 * 246 * <p>Removing a target that is not currently added has no effect.</p> 247 * 248 * @param outputTarget Surface to use as an output target for this request 249 */ 250 public void removeTarget(Surface outputTarget) { 251 mRequest.mSurfaceSet.remove(outputTarget); 252 } 253 254 /** 255 * Set a capture request field to a value. The field definitions can be 256 * found in {@link CaptureRequest}. 257 * 258 * @param key The metadata field to write. 259 * @param value The value to set the field to, which must be of a matching 260 * type to the key. 261 */ 262 public <T> void set(Key<T> key, T value) { 263 mRequest.mSettings.set(key, value); 264 } 265 266 /** 267 * Get a capture request field value. The field definitions can be 268 * found in {@link CaptureRequest}. 269 * 270 * @throws IllegalArgumentException if the key was not valid 271 * 272 * @param key The metadata field to read. 273 * @return The value of that key, or {@code null} if the field is not set. 274 */ 275 public <T> T get(Key<T> key) { 276 return mRequest.mSettings.get(key); 277 } 278 279 /** 280 * Set a tag for this request. 281 * 282 * <p>This tag is not used for anything by the camera device, but can be 283 * used by an application to easily identify a CaptureRequest when it is 284 * returned by 285 * {@link CameraDevice.CaptureListener#onCaptureCompleted CaptureListener.onCaptureCompleted} 286 * 287 * @param tag an arbitrary Object to store with this request 288 * @see CaptureRequest#getTag 289 */ 290 public void setTag(Object tag) { 291 mRequest.mUserTag = tag; 292 } 293 294 /** 295 * Build a request using the current target Surfaces and settings. 296 * 297 * @return A new capture request instance, ready for submission to the 298 * camera device. 299 */ 300 public CaptureRequest build() { 301 return new CaptureRequest(mRequest); 302 } 303 304 305 /** 306 * @hide 307 */ 308 public boolean isEmpty() { 309 return mRequest.mSettings.isEmpty(); 310 } 311 312 } 313 314 /*@O~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~ 315 * The key entries below this point are generated from metadata 316 * definitions in /system/media/camera/docs. Do not modify by hand or 317 * modify the comment blocks at the start or end. 318 *~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~*/ 319 320 321 /** 322 * <p>The mode control selects how the image data is converted from the 323 * sensor's native color into linear sRGB color.</p> 324 * <p>When auto-white balance is enabled with {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode}, this 325 * control is overridden by the AWB routine. When AWB is disabled, the 326 * application controls how the color mapping is performed.</p> 327 * <p>We define the expected processing pipeline below. For consistency 328 * across devices, this is always the case with TRANSFORM_MATRIX.</p> 329 * <p>When either FULL or HIGH_QUALITY is used, the camera device may 330 * do additional processing but {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} and 331 * {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform} will still be provided by the 332 * camera device (in the results) and be roughly correct.</p> 333 * <p>Switching to TRANSFORM_MATRIX and using the data provided from 334 * FAST or HIGH_QUALITY will yield a picture with the same white point 335 * as what was produced by the camera device in the earlier frame.</p> 336 * <p>The expected processing pipeline is as follows:</p> 337 * <p><img alt="White balance processing pipeline" src="../../../../images/camera2/metadata/android.colorCorrection.mode/processing_pipeline.png" /></p> 338 * <p>The white balance is encoded by two values, a 4-channel white-balance 339 * gain vector (applied in the Bayer domain), and a 3x3 color transform 340 * matrix (applied after demosaic).</p> 341 * <p>The 4-channel white-balance gains are defined as:</p> 342 * <pre><code>{@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} = [ R G_even G_odd B ] 343 * </code></pre> 344 * <p>where <code>G_even</code> is the gain for green pixels on even rows of the 345 * output, and <code>G_odd</code> is the gain for green pixels on the odd rows. 346 * These may be identical for a given camera device implementation; if 347 * the camera device does not support a separate gain for even/odd green 348 * channels, it will use the <code>G_even</code> value, and write <code>G_odd</code> equal to 349 * <code>G_even</code> in the output result metadata.</p> 350 * <p>The matrices for color transforms are defined as a 9-entry vector:</p> 351 * <pre><code>{@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform} = [ I0 I1 I2 I3 I4 I5 I6 I7 I8 ] 352 * </code></pre> 353 * <p>which define a transform from input sensor colors, <code>P_in = [ r g b ]</code>, 354 * to output linear sRGB, <code>P_out = [ r' g' b' ]</code>,</p> 355 * <p>with colors as follows:</p> 356 * <pre><code>r' = I0r + I1g + I2b 357 * g' = I3r + I4g + I5b 358 * b' = I6r + I7g + I8b 359 * </code></pre> 360 * <p>Both the input and output value ranges must match. Overflow/underflow 361 * values are clipped to fit within the range.</p> 362 * 363 * @see CaptureRequest#COLOR_CORRECTION_GAINS 364 * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM 365 * @see CaptureRequest#CONTROL_AWB_MODE 366 * @see #COLOR_CORRECTION_MODE_TRANSFORM_MATRIX 367 * @see #COLOR_CORRECTION_MODE_FAST 368 * @see #COLOR_CORRECTION_MODE_HIGH_QUALITY 369 */ 370 public static final Key<Integer> COLOR_CORRECTION_MODE = 371 new Key<Integer>("android.colorCorrection.mode", int.class); 372 373 /** 374 * <p>A color transform matrix to use to transform 375 * from sensor RGB color space to output linear sRGB color space</p> 376 * <p>This matrix is either set by the camera device when the request 377 * {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} is not TRANSFORM_MATRIX, or 378 * directly by the application in the request when the 379 * {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} is TRANSFORM_MATRIX.</p> 380 * <p>In the latter case, the camera device may round the matrix to account 381 * for precision issues; the final rounded matrix should be reported back 382 * in this matrix result metadata. The transform should keep the magnitude 383 * of the output color values within <code>[0, 1.0]</code> (assuming input color 384 * values is within the normalized range <code>[0, 1.0]</code>), or clipping may occur.</p> 385 * 386 * @see CaptureRequest#COLOR_CORRECTION_MODE 387 */ 388 public static final Key<Rational[]> COLOR_CORRECTION_TRANSFORM = 389 new Key<Rational[]>("android.colorCorrection.transform", Rational[].class); 390 391 /** 392 * <p>Gains applying to Bayer raw color channels for 393 * white-balance</p> 394 * <p>The 4-channel white-balance gains are defined in 395 * the order of <code>[R G_even G_odd B]</code>, where <code>G_even</code> is the gain 396 * for green pixels on even rows of the output, and <code>G_odd</code> 397 * is the gain for green pixels on the odd rows. if a HAL 398 * does not support a separate gain for even/odd green channels, 399 * it should use the <code>G_even</code> value, and write <code>G_odd</code> equal to 400 * <code>G_even</code> in the output result metadata.</p> 401 * <p>This array is either set by HAL when the request 402 * {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} is not TRANSFORM_MATRIX, or 403 * directly by the application in the request when the 404 * {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} is TRANSFORM_MATRIX.</p> 405 * <p>The output should be the gains actually applied by the HAL to 406 * the current frame.</p> 407 * 408 * @see CaptureRequest#COLOR_CORRECTION_MODE 409 */ 410 public static final Key<float[]> COLOR_CORRECTION_GAINS = 411 new Key<float[]>("android.colorCorrection.gains", float[].class); 412 413 /** 414 * <p>The desired setting for the camera device's auto-exposure 415 * algorithm's antibanding compensation.</p> 416 * <p>Some kinds of lighting fixtures, such as some fluorescent 417 * lights, flicker at the rate of the power supply frequency 418 * (60Hz or 50Hz, depending on country). While this is 419 * typically not noticeable to a person, it can be visible to 420 * a camera device. If a camera sets its exposure time to the 421 * wrong value, the flicker may become visible in the 422 * viewfinder as flicker or in a final captured image, as a 423 * set of variable-brightness bands across the image.</p> 424 * <p>Therefore, the auto-exposure routines of camera devices 425 * include antibanding routines that ensure that the chosen 426 * exposure value will not cause such banding. The choice of 427 * exposure time depends on the rate of flicker, which the 428 * camera device can detect automatically, or the expected 429 * rate can be selected by the application using this 430 * control.</p> 431 * <p>A given camera device may not support all of the possible 432 * options for the antibanding mode. The 433 * {@link CameraCharacteristics#CONTROL_AE_AVAILABLE_ANTIBANDING_MODES android.control.aeAvailableAntibandingModes} key contains 434 * the available modes for a given camera device.</p> 435 * <p>The default mode is AUTO, which must be supported by all 436 * camera devices.</p> 437 * <p>If manual exposure control is enabled (by setting 438 * {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} or {@link CaptureRequest#CONTROL_MODE android.control.mode} to OFF), 439 * then this setting has no effect, and the application must 440 * ensure it selects exposure times that do not cause banding 441 * issues. The {@link CaptureResult#STATISTICS_SCENE_FLICKER android.statistics.sceneFlicker} key can assist 442 * the application in this.</p> 443 * 444 * @see CameraCharacteristics#CONTROL_AE_AVAILABLE_ANTIBANDING_MODES 445 * @see CaptureRequest#CONTROL_AE_MODE 446 * @see CaptureRequest#CONTROL_MODE 447 * @see CaptureResult#STATISTICS_SCENE_FLICKER 448 * @see #CONTROL_AE_ANTIBANDING_MODE_OFF 449 * @see #CONTROL_AE_ANTIBANDING_MODE_50HZ 450 * @see #CONTROL_AE_ANTIBANDING_MODE_60HZ 451 * @see #CONTROL_AE_ANTIBANDING_MODE_AUTO 452 */ 453 public static final Key<Integer> CONTROL_AE_ANTIBANDING_MODE = 454 new Key<Integer>("android.control.aeAntibandingMode", int.class); 455 456 /** 457 * <p>Adjustment to AE target image 458 * brightness</p> 459 * <p>For example, if EV step is 0.333, '6' will mean an 460 * exposure compensation of +2 EV; -3 will mean an exposure 461 * compensation of -1</p> 462 */ 463 public static final Key<Integer> CONTROL_AE_EXPOSURE_COMPENSATION = 464 new Key<Integer>("android.control.aeExposureCompensation", int.class); 465 466 /** 467 * <p>Whether AE is currently locked to its latest 468 * calculated values.</p> 469 * <p>Note that even when AE is locked, the flash may be 470 * fired if the {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} is ON_AUTO_FLASH / ON_ALWAYS_FLASH / 471 * ON_AUTO_FLASH_REDEYE.</p> 472 * <p>If AE precapture is triggered (see {@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger}) 473 * when AE is already locked, the camera device will not change the exposure time 474 * ({@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime}) and sensitivity ({@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}) 475 * parameters. The flash may be fired if the android.control.aeMode 476 * is ON_AUTO_FLASH/ON_AUTO_FLASH_REDEYE and the scene is too dark. If the 477 * {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} is ON_ALWAYS_FLASH, the scene may become overexposed.</p> 478 * <p>See {@link CaptureResult#CONTROL_AE_STATE android.control.aeState} for AE lock related state transition details.</p> 479 * 480 * @see CaptureRequest#CONTROL_AE_MODE 481 * @see CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER 482 * @see CaptureResult#CONTROL_AE_STATE 483 * @see CaptureRequest#SENSOR_EXPOSURE_TIME 484 * @see CaptureRequest#SENSOR_SENSITIVITY 485 */ 486 public static final Key<Boolean> CONTROL_AE_LOCK = 487 new Key<Boolean>("android.control.aeLock", boolean.class); 488 489 /** 490 * <p>The desired mode for the camera device's 491 * auto-exposure routine.</p> 492 * <p>This control is only effective if {@link CaptureRequest#CONTROL_MODE android.control.mode} is 493 * AUTO.</p> 494 * <p>When set to any of the ON modes, the camera device's 495 * auto-exposure routine is enabled, overriding the 496 * application's selected exposure time, sensor sensitivity, 497 * and frame duration ({@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime}, 498 * {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}, and 499 * {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration}). If one of the FLASH modes 500 * is selected, the camera device's flash unit controls are 501 * also overridden.</p> 502 * <p>The FLASH modes are only available if the camera device 503 * has a flash unit ({@link CameraCharacteristics#FLASH_INFO_AVAILABLE android.flash.info.available} is <code>true</code>).</p> 504 * <p>If flash TORCH mode is desired, this field must be set to 505 * ON or OFF, and {@link CaptureRequest#FLASH_MODE android.flash.mode} set to TORCH.</p> 506 * <p>When set to any of the ON modes, the values chosen by the 507 * camera device auto-exposure routine for the overridden 508 * fields for a given capture will be available in its 509 * CaptureResult.</p> 510 * 511 * @see CaptureRequest#CONTROL_MODE 512 * @see CameraCharacteristics#FLASH_INFO_AVAILABLE 513 * @see CaptureRequest#FLASH_MODE 514 * @see CaptureRequest#SENSOR_EXPOSURE_TIME 515 * @see CaptureRequest#SENSOR_FRAME_DURATION 516 * @see CaptureRequest#SENSOR_SENSITIVITY 517 * @see #CONTROL_AE_MODE_OFF 518 * @see #CONTROL_AE_MODE_ON 519 * @see #CONTROL_AE_MODE_ON_AUTO_FLASH 520 * @see #CONTROL_AE_MODE_ON_ALWAYS_FLASH 521 * @see #CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE 522 */ 523 public static final Key<Integer> CONTROL_AE_MODE = 524 new Key<Integer>("android.control.aeMode", int.class); 525 526 /** 527 * <p>List of areas to use for 528 * metering.</p> 529 * <p>Each area is a rectangle plus weight: xmin, ymin, 530 * xmax, ymax, weight. The rectangle is defined to be inclusive of the 531 * specified coordinates.</p> 532 * <p>The coordinate system is based on the active pixel array, 533 * with (0,0) being the top-left pixel in the active pixel array, and 534 * ({@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.width - 1, 535 * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.height - 1) being the 536 * bottom-right pixel in the active pixel array. The weight 537 * should be nonnegative.</p> 538 * <p>If all regions have 0 weight, then no specific metering area 539 * needs to be used by the HAL. If the metering region is 540 * outside the current {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion}, the HAL 541 * should ignore the sections outside the region and output the 542 * used sections in the frame metadata.</p> 543 * 544 * @see CaptureRequest#SCALER_CROP_REGION 545 * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE 546 */ 547 public static final Key<int[]> CONTROL_AE_REGIONS = 548 new Key<int[]>("android.control.aeRegions", int[].class); 549 550 /** 551 * <p>Range over which fps can be adjusted to 552 * maintain exposure</p> 553 * <p>Only constrains AE algorithm, not manual control 554 * of {@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime}</p> 555 * 556 * @see CaptureRequest#SENSOR_EXPOSURE_TIME 557 */ 558 public static final Key<int[]> CONTROL_AE_TARGET_FPS_RANGE = 559 new Key<int[]>("android.control.aeTargetFpsRange", int[].class); 560 561 /** 562 * <p>Whether the camera device will trigger a precapture 563 * metering sequence when it processes this request.</p> 564 * <p>This entry is normally set to IDLE, or is not 565 * included at all in the request settings. When included and 566 * set to START, the camera device will trigger the autoexposure 567 * precapture metering sequence.</p> 568 * <p>The effect of AE precapture trigger depends on the current 569 * AE mode and state; see {@link CaptureResult#CONTROL_AE_STATE android.control.aeState} for AE precapture 570 * state transition details.</p> 571 * 572 * @see CaptureResult#CONTROL_AE_STATE 573 * @see #CONTROL_AE_PRECAPTURE_TRIGGER_IDLE 574 * @see #CONTROL_AE_PRECAPTURE_TRIGGER_START 575 */ 576 public static final Key<Integer> CONTROL_AE_PRECAPTURE_TRIGGER = 577 new Key<Integer>("android.control.aePrecaptureTrigger", int.class); 578 579 /** 580 * <p>Whether AF is currently enabled, and what 581 * mode it is set to</p> 582 * <p>Only effective if {@link CaptureRequest#CONTROL_MODE android.control.mode} = AUTO.</p> 583 * <p>If the lens is controlled by the camera device auto-focus algorithm, 584 * the camera device will report the current AF status in android.control.afState 585 * in result metadata.</p> 586 * 587 * @see CaptureRequest#CONTROL_MODE 588 * @see #CONTROL_AF_MODE_OFF 589 * @see #CONTROL_AF_MODE_AUTO 590 * @see #CONTROL_AF_MODE_MACRO 591 * @see #CONTROL_AF_MODE_CONTINUOUS_VIDEO 592 * @see #CONTROL_AF_MODE_CONTINUOUS_PICTURE 593 * @see #CONTROL_AF_MODE_EDOF 594 */ 595 public static final Key<Integer> CONTROL_AF_MODE = 596 new Key<Integer>("android.control.afMode", int.class); 597 598 /** 599 * <p>List of areas to use for focus 600 * estimation.</p> 601 * <p>Each area is a rectangle plus weight: xmin, ymin, 602 * xmax, ymax, weight. The rectangle is defined to be inclusive of the 603 * specified coordinates.</p> 604 * <p>The coordinate system is based on the active pixel array, 605 * with (0,0) being the top-left pixel in the active pixel array, and 606 * ({@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.width - 1, 607 * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.height - 1) being the 608 * bottom-right pixel in the active pixel array. The weight 609 * should be nonnegative.</p> 610 * <p>If all regions have 0 weight, then no specific focus area 611 * needs to be used by the HAL. If the focusing region is 612 * outside the current {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion}, the HAL 613 * should ignore the sections outside the region and output the 614 * used sections in the frame metadata.</p> 615 * 616 * @see CaptureRequest#SCALER_CROP_REGION 617 * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE 618 */ 619 public static final Key<int[]> CONTROL_AF_REGIONS = 620 new Key<int[]>("android.control.afRegions", int[].class); 621 622 /** 623 * <p>Whether the camera device will trigger autofocus for this request.</p> 624 * <p>This entry is normally set to IDLE, or is not 625 * included at all in the request settings.</p> 626 * <p>When included and set to START, the camera device will trigger the 627 * autofocus algorithm. If autofocus is disabled, this trigger has no effect.</p> 628 * <p>When set to CANCEL, the camera device will cancel any active trigger, 629 * and return to its initial AF state.</p> 630 * <p>See {@link CaptureResult#CONTROL_AF_STATE android.control.afState} for what that means for each AF mode.</p> 631 * 632 * @see CaptureResult#CONTROL_AF_STATE 633 * @see #CONTROL_AF_TRIGGER_IDLE 634 * @see #CONTROL_AF_TRIGGER_START 635 * @see #CONTROL_AF_TRIGGER_CANCEL 636 */ 637 public static final Key<Integer> CONTROL_AF_TRIGGER = 638 new Key<Integer>("android.control.afTrigger", int.class); 639 640 /** 641 * <p>Whether AWB is currently locked to its 642 * latest calculated values</p> 643 * <p>Note that AWB lock is only meaningful for AUTO 644 * mode; in other modes, AWB is already fixed to a specific 645 * setting</p> 646 */ 647 public static final Key<Boolean> CONTROL_AWB_LOCK = 648 new Key<Boolean>("android.control.awbLock", boolean.class); 649 650 /** 651 * <p>Whether AWB is currently setting the color 652 * transform fields, and what its illumination target 653 * is</p> 654 * <p>This control is only effective if {@link CaptureRequest#CONTROL_MODE android.control.mode} is AUTO.</p> 655 * <p>When set to the ON mode, the camera device's auto white balance 656 * routine is enabled, overriding the application's selected 657 * {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform}, {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} and 658 * {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode}.</p> 659 * <p>When set to the OFF mode, the camera device's auto white balance 660 * routine is disabled. The applicantion manually controls the white 661 * balance by {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform}, android.colorCorrection.gains 662 * and {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode}.</p> 663 * <p>When set to any other modes, the camera device's auto white balance 664 * routine is disabled. The camera device uses each particular illumination 665 * target for white balance adjustment.</p> 666 * 667 * @see CaptureRequest#COLOR_CORRECTION_GAINS 668 * @see CaptureRequest#COLOR_CORRECTION_MODE 669 * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM 670 * @see CaptureRequest#CONTROL_MODE 671 * @see #CONTROL_AWB_MODE_OFF 672 * @see #CONTROL_AWB_MODE_AUTO 673 * @see #CONTROL_AWB_MODE_INCANDESCENT 674 * @see #CONTROL_AWB_MODE_FLUORESCENT 675 * @see #CONTROL_AWB_MODE_WARM_FLUORESCENT 676 * @see #CONTROL_AWB_MODE_DAYLIGHT 677 * @see #CONTROL_AWB_MODE_CLOUDY_DAYLIGHT 678 * @see #CONTROL_AWB_MODE_TWILIGHT 679 * @see #CONTROL_AWB_MODE_SHADE 680 */ 681 public static final Key<Integer> CONTROL_AWB_MODE = 682 new Key<Integer>("android.control.awbMode", int.class); 683 684 /** 685 * <p>List of areas to use for illuminant 686 * estimation.</p> 687 * <p>Only used in AUTO mode.</p> 688 * <p>Each area is a rectangle plus weight: xmin, ymin, 689 * xmax, ymax, weight. The rectangle is defined to be inclusive of the 690 * specified coordinates.</p> 691 * <p>The coordinate system is based on the active pixel array, 692 * with (0,0) being the top-left pixel in the active pixel array, and 693 * ({@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.width - 1, 694 * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.height - 1) being the 695 * bottom-right pixel in the active pixel array. The weight 696 * should be nonnegative.</p> 697 * <p>If all regions have 0 weight, then no specific metering area 698 * needs to be used by the HAL. If the metering region is 699 * outside the current {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion}, the HAL 700 * should ignore the sections outside the region and output the 701 * used sections in the frame metadata.</p> 702 * 703 * @see CaptureRequest#SCALER_CROP_REGION 704 * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE 705 */ 706 public static final Key<int[]> CONTROL_AWB_REGIONS = 707 new Key<int[]>("android.control.awbRegions", int[].class); 708 709 /** 710 * <p>Information to the camera device 3A (auto-exposure, 711 * auto-focus, auto-white balance) routines about the purpose 712 * of this capture, to help the camera device to decide optimal 3A 713 * strategy.</p> 714 * <p>This control is only effective if <code>{@link CaptureRequest#CONTROL_MODE android.control.mode} != OFF</code> 715 * and any 3A routine is active.</p> 716 * 717 * @see CaptureRequest#CONTROL_MODE 718 * @see #CONTROL_CAPTURE_INTENT_CUSTOM 719 * @see #CONTROL_CAPTURE_INTENT_PREVIEW 720 * @see #CONTROL_CAPTURE_INTENT_STILL_CAPTURE 721 * @see #CONTROL_CAPTURE_INTENT_VIDEO_RECORD 722 * @see #CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT 723 * @see #CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG 724 */ 725 public static final Key<Integer> CONTROL_CAPTURE_INTENT = 726 new Key<Integer>("android.control.captureIntent", int.class); 727 728 /** 729 * <p>A special color effect to apply.</p> 730 * <p>When this mode is set, a color effect will be applied 731 * to images produced by the camera device. The interpretation 732 * and implementation of these color effects is left to the 733 * implementor of the camera device, and should not be 734 * depended on to be consistent (or present) across all 735 * devices.</p> 736 * <p>A color effect will only be applied if 737 * {@link CaptureRequest#CONTROL_MODE android.control.mode} != OFF.</p> 738 * 739 * @see CaptureRequest#CONTROL_MODE 740 * @see #CONTROL_EFFECT_MODE_OFF 741 * @see #CONTROL_EFFECT_MODE_MONO 742 * @see #CONTROL_EFFECT_MODE_NEGATIVE 743 * @see #CONTROL_EFFECT_MODE_SOLARIZE 744 * @see #CONTROL_EFFECT_MODE_SEPIA 745 * @see #CONTROL_EFFECT_MODE_POSTERIZE 746 * @see #CONTROL_EFFECT_MODE_WHITEBOARD 747 * @see #CONTROL_EFFECT_MODE_BLACKBOARD 748 * @see #CONTROL_EFFECT_MODE_AQUA 749 */ 750 public static final Key<Integer> CONTROL_EFFECT_MODE = 751 new Key<Integer>("android.control.effectMode", int.class); 752 753 /** 754 * <p>Overall mode of 3A control 755 * routines</p> 756 * <p>High-level 3A control. When set to OFF, all 3A control 757 * by the camera device is disabled. The application must set the fields for 758 * capture parameters itself.</p> 759 * <p>When set to AUTO, the individual algorithm controls in 760 * android.control.* are in effect, such as {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode}.</p> 761 * <p>When set to USE_SCENE_MODE, the individual controls in 762 * android.control.* are mostly disabled, and the camera device implements 763 * one of the scene mode settings (such as ACTION, SUNSET, or PARTY) 764 * as it wishes. The camera device scene mode 3A settings are provided by 765 * android.control.sceneModeOverrides.</p> 766 * 767 * @see CaptureRequest#CONTROL_AF_MODE 768 * @see #CONTROL_MODE_OFF 769 * @see #CONTROL_MODE_AUTO 770 * @see #CONTROL_MODE_USE_SCENE_MODE 771 */ 772 public static final Key<Integer> CONTROL_MODE = 773 new Key<Integer>("android.control.mode", int.class); 774 775 /** 776 * <p>A camera mode optimized for conditions typical in a particular 777 * capture setting.</p> 778 * <p>This is the mode that that is active when 779 * <code>{@link CaptureRequest#CONTROL_MODE android.control.mode} == USE_SCENE_MODE</code>. Aside from FACE_PRIORITY, 780 * these modes will disable {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode}, 781 * {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode}, and {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode} while in use.</p> 782 * <p>The interpretation and implementation of these scene modes is left 783 * to the implementor of the camera device. Their behavior will not be 784 * consistent across all devices, and any given device may only implement 785 * a subset of these modes.</p> 786 * 787 * @see CaptureRequest#CONTROL_AE_MODE 788 * @see CaptureRequest#CONTROL_AF_MODE 789 * @see CaptureRequest#CONTROL_AWB_MODE 790 * @see CaptureRequest#CONTROL_MODE 791 * @see #CONTROL_SCENE_MODE_DISABLED 792 * @see #CONTROL_SCENE_MODE_FACE_PRIORITY 793 * @see #CONTROL_SCENE_MODE_ACTION 794 * @see #CONTROL_SCENE_MODE_PORTRAIT 795 * @see #CONTROL_SCENE_MODE_LANDSCAPE 796 * @see #CONTROL_SCENE_MODE_NIGHT 797 * @see #CONTROL_SCENE_MODE_NIGHT_PORTRAIT 798 * @see #CONTROL_SCENE_MODE_THEATRE 799 * @see #CONTROL_SCENE_MODE_BEACH 800 * @see #CONTROL_SCENE_MODE_SNOW 801 * @see #CONTROL_SCENE_MODE_SUNSET 802 * @see #CONTROL_SCENE_MODE_STEADYPHOTO 803 * @see #CONTROL_SCENE_MODE_FIREWORKS 804 * @see #CONTROL_SCENE_MODE_SPORTS 805 * @see #CONTROL_SCENE_MODE_PARTY 806 * @see #CONTROL_SCENE_MODE_CANDLELIGHT 807 * @see #CONTROL_SCENE_MODE_BARCODE 808 */ 809 public static final Key<Integer> CONTROL_SCENE_MODE = 810 new Key<Integer>("android.control.sceneMode", int.class); 811 812 /** 813 * <p>Whether video stabilization is 814 * active</p> 815 * <p>If enabled, video stabilization can modify the 816 * {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion} to keep the video stream 817 * stabilized</p> 818 * 819 * @see CaptureRequest#SCALER_CROP_REGION 820 */ 821 public static final Key<Boolean> CONTROL_VIDEO_STABILIZATION_MODE = 822 new Key<Boolean>("android.control.videoStabilizationMode", boolean.class); 823 824 /** 825 * <p>Operation mode for edge 826 * enhancement</p> 827 * <p>Edge/sharpness/detail enhancement. OFF means no 828 * enhancement will be applied by the HAL.</p> 829 * <p>FAST/HIGH_QUALITY both mean camera device determined enhancement 830 * will be applied. HIGH_QUALITY mode indicates that the 831 * camera device will use the highest-quality enhancement algorithms, 832 * even if it slows down capture rate. FAST means the camera device will 833 * not slow down capture rate when applying edge enhancement.</p> 834 * @see #EDGE_MODE_OFF 835 * @see #EDGE_MODE_FAST 836 * @see #EDGE_MODE_HIGH_QUALITY 837 */ 838 public static final Key<Integer> EDGE_MODE = 839 new Key<Integer>("android.edge.mode", int.class); 840 841 /** 842 * <p>The desired mode for for the camera device's flash control.</p> 843 * <p>This control is only effective when flash unit is available 844 * (<code>{@link CameraCharacteristics#FLASH_INFO_AVAILABLE android.flash.info.available} == true</code>).</p> 845 * <p>When this control is used, the {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} must be set to ON or OFF. 846 * Otherwise, the camera device auto-exposure related flash control (ON_AUTO_FLASH, 847 * ON_ALWAYS_FLASH, or ON_AUTO_FLASH_REDEYE) will override this control.</p> 848 * <p>When set to OFF, the camera device will not fire flash for this capture.</p> 849 * <p>When set to SINGLE, the camera device will fire flash regardless of the camera 850 * device's auto-exposure routine's result. When used in still capture case, this 851 * control should be used along with AE precapture metering sequence 852 * ({@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger}), otherwise, the image may be incorrectly exposed.</p> 853 * <p>When set to TORCH, the flash will be on continuously. This mode can be used 854 * for use cases such as preview, auto-focus assist, still capture, or video recording.</p> 855 * <p>The flash status will be reported by {@link CaptureResult#FLASH_STATE android.flash.state} in the capture result metadata.</p> 856 * 857 * @see CaptureRequest#CONTROL_AE_MODE 858 * @see CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER 859 * @see CameraCharacteristics#FLASH_INFO_AVAILABLE 860 * @see CaptureResult#FLASH_STATE 861 * @see #FLASH_MODE_OFF 862 * @see #FLASH_MODE_SINGLE 863 * @see #FLASH_MODE_TORCH 864 */ 865 public static final Key<Integer> FLASH_MODE = 866 new Key<Integer>("android.flash.mode", int.class); 867 868 /** 869 * <p>GPS coordinates to include in output JPEG 870 * EXIF</p> 871 */ 872 public static final Key<double[]> JPEG_GPS_COORDINATES = 873 new Key<double[]>("android.jpeg.gpsCoordinates", double[].class); 874 875 /** 876 * <p>32 characters describing GPS algorithm to 877 * include in EXIF</p> 878 */ 879 public static final Key<String> JPEG_GPS_PROCESSING_METHOD = 880 new Key<String>("android.jpeg.gpsProcessingMethod", String.class); 881 882 /** 883 * <p>Time GPS fix was made to include in 884 * EXIF</p> 885 */ 886 public static final Key<Long> JPEG_GPS_TIMESTAMP = 887 new Key<Long>("android.jpeg.gpsTimestamp", long.class); 888 889 /** 890 * <p>Orientation of JPEG image to 891 * write</p> 892 */ 893 public static final Key<Integer> JPEG_ORIENTATION = 894 new Key<Integer>("android.jpeg.orientation", int.class); 895 896 /** 897 * <p>Compression quality of the final JPEG 898 * image</p> 899 * <p>85-95 is typical usage range</p> 900 */ 901 public static final Key<Byte> JPEG_QUALITY = 902 new Key<Byte>("android.jpeg.quality", byte.class); 903 904 /** 905 * <p>Compression quality of JPEG 906 * thumbnail</p> 907 */ 908 public static final Key<Byte> JPEG_THUMBNAIL_QUALITY = 909 new Key<Byte>("android.jpeg.thumbnailQuality", byte.class); 910 911 /** 912 * <p>Resolution of embedded JPEG thumbnail</p> 913 * <p>When set to (0, 0) value, the JPEG EXIF will not contain thumbnail, 914 * but the captured JPEG will still be a valid image.</p> 915 * <p>When a jpeg image capture is issued, the thumbnail size selected should have 916 * the same aspect ratio as the jpeg image.</p> 917 */ 918 public static final Key<android.hardware.camera2.Size> JPEG_THUMBNAIL_SIZE = 919 new Key<android.hardware.camera2.Size>("android.jpeg.thumbnailSize", android.hardware.camera2.Size.class); 920 921 /** 922 * <p>The ratio of lens focal length to the effective 923 * aperture diameter.</p> 924 * <p>This will only be supported on the camera devices that 925 * have variable aperture lens. The aperture value can only be 926 * one of the values listed in {@link CameraCharacteristics#LENS_INFO_AVAILABLE_APERTURES android.lens.info.availableApertures}.</p> 927 * <p>When this is supported and {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} is OFF, 928 * this can be set along with {@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime}, 929 * {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}, and android.sensor.frameDuration 930 * to achieve manual exposure control.</p> 931 * <p>The requested aperture value may take several frames to reach the 932 * requested value; the camera device will report the current (intermediate) 933 * aperture size in capture result metadata while the aperture is changing. 934 * While the aperture is still changing, {@link CaptureResult#LENS_STATE android.lens.state} will be set to MOVING.</p> 935 * <p>When this is supported and {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} is one of 936 * the ON modes, this will be overridden by the camera device 937 * auto-exposure algorithm, the overridden values are then provided 938 * back to the user in the corresponding result.</p> 939 * 940 * @see CaptureRequest#CONTROL_AE_MODE 941 * @see CameraCharacteristics#LENS_INFO_AVAILABLE_APERTURES 942 * @see CaptureResult#LENS_STATE 943 * @see CaptureRequest#SENSOR_EXPOSURE_TIME 944 * @see CaptureRequest#SENSOR_SENSITIVITY 945 */ 946 public static final Key<Float> LENS_APERTURE = 947 new Key<Float>("android.lens.aperture", float.class); 948 949 /** 950 * <p>State of lens neutral density filter(s).</p> 951 * <p>This will not be supported on most camera devices. On devices 952 * where this is supported, this may only be set to one of the 953 * values included in {@link CameraCharacteristics#LENS_INFO_AVAILABLE_FILTER_DENSITIES android.lens.info.availableFilterDensities}.</p> 954 * <p>Lens filters are typically used to lower the amount of light the 955 * sensor is exposed to (measured in steps of EV). As used here, an EV 956 * step is the standard logarithmic representation, which are 957 * non-negative, and inversely proportional to the amount of light 958 * hitting the sensor. For example, setting this to 0 would result 959 * in no reduction of the incoming light, and setting this to 2 would 960 * mean that the filter is set to reduce incoming light by two stops 961 * (allowing 1/4 of the prior amount of light to the sensor).</p> 962 * <p>It may take several frames before the lens filter density changes 963 * to the requested value. While the filter density is still changing, 964 * {@link CaptureResult#LENS_STATE android.lens.state} will be set to MOVING.</p> 965 * 966 * @see CameraCharacteristics#LENS_INFO_AVAILABLE_FILTER_DENSITIES 967 * @see CaptureResult#LENS_STATE 968 */ 969 public static final Key<Float> LENS_FILTER_DENSITY = 970 new Key<Float>("android.lens.filterDensity", float.class); 971 972 /** 973 * <p>The current lens focal length; used for optical zoom.</p> 974 * <p>This setting controls the physical focal length of the camera 975 * device's lens. Changing the focal length changes the field of 976 * view of the camera device, and is usually used for optical zoom.</p> 977 * <p>Like {@link CaptureRequest#LENS_FOCUS_DISTANCE android.lens.focusDistance} and {@link CaptureRequest#LENS_APERTURE android.lens.aperture}, this 978 * setting won't be applied instantaneously, and it may take several 979 * frames before the lens can change to the requested focal length. 980 * While the focal length is still changing, {@link CaptureResult#LENS_STATE android.lens.state} will 981 * be set to MOVING.</p> 982 * <p>This is expected not to be supported on most devices.</p> 983 * 984 * @see CaptureRequest#LENS_APERTURE 985 * @see CaptureRequest#LENS_FOCUS_DISTANCE 986 * @see CaptureResult#LENS_STATE 987 */ 988 public static final Key<Float> LENS_FOCAL_LENGTH = 989 new Key<Float>("android.lens.focalLength", float.class); 990 991 /** 992 * <p>Distance to plane of sharpest focus, 993 * measured from frontmost surface of the lens</p> 994 * <p>0 means infinity focus. Used value will be clamped 995 * to [0, {@link CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE android.lens.info.minimumFocusDistance}].</p> 996 * <p>Like {@link CaptureRequest#LENS_FOCAL_LENGTH android.lens.focalLength}, this setting won't be applied 997 * instantaneously, and it may take several frames before the lens 998 * can move to the requested focus distance. While the lens is still moving, 999 * {@link CaptureResult#LENS_STATE android.lens.state} will be set to MOVING.</p> 1000 * 1001 * @see CaptureRequest#LENS_FOCAL_LENGTH 1002 * @see CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE 1003 * @see CaptureResult#LENS_STATE 1004 */ 1005 public static final Key<Float> LENS_FOCUS_DISTANCE = 1006 new Key<Float>("android.lens.focusDistance", float.class); 1007 1008 /** 1009 * <p>Sets whether the camera device uses optical image stabilization (OIS) 1010 * when capturing images.</p> 1011 * <p>OIS is used to compensate for motion blur due to small movements of 1012 * the camera during capture. Unlike digital image stabilization, OIS makes 1013 * use of mechanical elements to stabilize the camera sensor, and thus 1014 * allows for longer exposure times before camera shake becomes 1015 * apparent.</p> 1016 * <p>This is not expected to be supported on most devices.</p> 1017 * @see #LENS_OPTICAL_STABILIZATION_MODE_OFF 1018 * @see #LENS_OPTICAL_STABILIZATION_MODE_ON 1019 */ 1020 public static final Key<Integer> LENS_OPTICAL_STABILIZATION_MODE = 1021 new Key<Integer>("android.lens.opticalStabilizationMode", int.class); 1022 1023 /** 1024 * <p>Mode of operation for the noise reduction 1025 * algorithm</p> 1026 * <p>Noise filtering control. OFF means no noise reduction 1027 * will be applied by the HAL.</p> 1028 * <p>FAST/HIGH_QUALITY both mean camera device determined noise filtering 1029 * will be applied. HIGH_QUALITY mode indicates that the camera device 1030 * will use the highest-quality noise filtering algorithms, 1031 * even if it slows down capture rate. FAST means the camera device should not 1032 * slow down capture rate when applying noise filtering.</p> 1033 * @see #NOISE_REDUCTION_MODE_OFF 1034 * @see #NOISE_REDUCTION_MODE_FAST 1035 * @see #NOISE_REDUCTION_MODE_HIGH_QUALITY 1036 */ 1037 public static final Key<Integer> NOISE_REDUCTION_MODE = 1038 new Key<Integer>("android.noiseReduction.mode", int.class); 1039 1040 /** 1041 * <p>An application-specified ID for the current 1042 * request. Must be maintained unchanged in output 1043 * frame</p> 1044 * @hide 1045 */ 1046 public static final Key<Integer> REQUEST_ID = 1047 new Key<Integer>("android.request.id", int.class); 1048 1049 /** 1050 * <p>(x, y, width, height).</p> 1051 * <p>A rectangle with the top-level corner of (x,y) and size 1052 * (width, height). The region of the sensor that is used for 1053 * output. Each stream must use this rectangle to produce its 1054 * output, cropping to a smaller region if necessary to 1055 * maintain the stream's aspect ratio.</p> 1056 * <p>HAL2.x uses only (x, y, width)</p> 1057 * <p>Any additional per-stream cropping must be done to 1058 * maximize the final pixel area of the stream.</p> 1059 * <p>For example, if the crop region is set to a 4:3 aspect 1060 * ratio, then 4:3 streams should use the exact crop 1061 * region. 16:9 streams should further crop vertically 1062 * (letterbox).</p> 1063 * <p>Conversely, if the crop region is set to a 16:9, then 4:3 1064 * outputs should crop horizontally (pillarbox), and 16:9 1065 * streams should match exactly. These additional crops must 1066 * be centered within the crop region.</p> 1067 * <p>The output streams must maintain square pixels at all 1068 * times, no matter what the relative aspect ratios of the 1069 * crop region and the stream are. Negative values for 1070 * corner are allowed for raw output if full pixel array is 1071 * larger than active pixel array. Width and height may be 1072 * rounded to nearest larger supportable width, especially 1073 * for raw output, where only a few fixed scales may be 1074 * possible. The width and height of the crop region cannot 1075 * be set to be smaller than floor( activeArraySize.width / 1076 * android.scaler.maxDigitalZoom ) and floor( 1077 * activeArraySize.height / android.scaler.maxDigitalZoom), 1078 * respectively.</p> 1079 */ 1080 public static final Key<android.graphics.Rect> SCALER_CROP_REGION = 1081 new Key<android.graphics.Rect>("android.scaler.cropRegion", android.graphics.Rect.class); 1082 1083 /** 1084 * <p>Duration each pixel is exposed to 1085 * light.</p> 1086 * <p>If the sensor can't expose this exact duration, it should shorten the 1087 * duration exposed to the nearest possible value (rather than expose longer).</p> 1088 * <p>1/10000 - 30 sec range. No bulb mode</p> 1089 */ 1090 public static final Key<Long> SENSOR_EXPOSURE_TIME = 1091 new Key<Long>("android.sensor.exposureTime", long.class); 1092 1093 /** 1094 * <p>Duration from start of frame exposure to 1095 * start of next frame exposure.</p> 1096 * <p>The maximum frame rate that can be supported by a camera subsystem is 1097 * a function of many factors:</p> 1098 * <ul> 1099 * <li>Requested resolutions of output image streams</li> 1100 * <li>Availability of binning / skipping modes on the imager</li> 1101 * <li>The bandwidth of the imager interface</li> 1102 * <li>The bandwidth of the various ISP processing blocks</li> 1103 * </ul> 1104 * <p>Since these factors can vary greatly between different ISPs and 1105 * sensors, the camera abstraction tries to represent the bandwidth 1106 * restrictions with as simple a model as possible.</p> 1107 * <p>The model presented has the following characteristics:</p> 1108 * <ul> 1109 * <li>The image sensor is always configured to output the smallest 1110 * resolution possible given the application's requested output stream 1111 * sizes. The smallest resolution is defined as being at least as large 1112 * as the largest requested output stream size; the camera pipeline must 1113 * never digitally upsample sensor data when the crop region covers the 1114 * whole sensor. In general, this means that if only small output stream 1115 * resolutions are configured, the sensor can provide a higher frame 1116 * rate.</li> 1117 * <li>Since any request may use any or all the currently configured 1118 * output streams, the sensor and ISP must be configured to support 1119 * scaling a single capture to all the streams at the same time. This 1120 * means the camera pipeline must be ready to produce the largest 1121 * requested output size without any delay. Therefore, the overall 1122 * frame rate of a given configured stream set is governed only by the 1123 * largest requested stream resolution.</li> 1124 * <li>Using more than one output stream in a request does not affect the 1125 * frame duration.</li> 1126 * <li>JPEG streams act like processed YUV streams in requests for which 1127 * they are not included; in requests in which they are directly 1128 * referenced, they act as JPEG streams. This is because supporting a 1129 * JPEG stream requires the underlying YUV data to always be ready for 1130 * use by a JPEG encoder, but the encoder will only be used (and impact 1131 * frame duration) on requests that actually reference a JPEG stream.</li> 1132 * <li>The JPEG processor can run concurrently to the rest of the camera 1133 * pipeline, but cannot process more than 1 capture at a time.</li> 1134 * </ul> 1135 * <p>The necessary information for the application, given the model above, 1136 * is provided via the android.scaler.available*MinDurations fields. 1137 * These are used to determine the maximum frame rate / minimum frame 1138 * duration that is possible for a given stream configuration.</p> 1139 * <p>Specifically, the application can use the following rules to 1140 * determine the minimum frame duration it can request from the HAL 1141 * device:</p> 1142 * <ol> 1143 * <li>Given the application's currently configured set of output 1144 * streams, <code>S</code>, divide them into three sets: streams in a JPEG format 1145 * <code>SJ</code>, streams in a raw sensor format <code>SR</code>, and the rest ('processed') 1146 * <code>SP</code>.</li> 1147 * <li>For each subset of streams, find the largest resolution (by pixel 1148 * count) in the subset. This gives (at most) three resolutions <code>RJ</code>, 1149 * <code>RR</code>, and <code>RP</code>.</li> 1150 * <li>If <code>RJ</code> is greater than <code>RP</code>, set <code>RP</code> equal to <code>RJ</code>. If there is 1151 * no exact match for <code>RP == RJ</code> (in particular there isn't an available 1152 * processed resolution at the same size as <code>RJ</code>), then set <code>RP</code> equal 1153 * to the smallest processed resolution that is larger than <code>RJ</code>. If 1154 * there are no processed resolutions larger than <code>RJ</code>, then set <code>RJ</code> to 1155 * the processed resolution closest to <code>RJ</code>.</li> 1156 * <li>If <code>RP</code> is greater than <code>RR</code>, set <code>RR</code> equal to <code>RP</code>. If there is 1157 * no exact match for <code>RR == RP</code> (in particular there isn't an available 1158 * raw resolution at the same size as <code>RP</code>), then set <code>RR</code> equal to 1159 * or to the smallest raw resolution that is larger than <code>RP</code>. If 1160 * there are no raw resolutions larger than <code>RP</code>, then set <code>RR</code> to 1161 * the raw resolution closest to <code>RP</code>.</li> 1162 * <li>Look up the matching minimum frame durations in the property lists 1163 * {@link CameraCharacteristics#SCALER_AVAILABLE_JPEG_MIN_DURATIONS android.scaler.availableJpegMinDurations}, 1164 * android.scaler.availableRawMinDurations, and 1165 * {@link CameraCharacteristics#SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS android.scaler.availableProcessedMinDurations}. This gives three 1166 * minimum frame durations <code>FJ</code>, <code>FR</code>, and <code>FP</code>.</li> 1167 * <li>If a stream of requests do not use a JPEG stream, then the minimum 1168 * supported frame duration for each request is <code>max(FR, FP)</code>.</li> 1169 * <li>If a stream of requests all use the JPEG stream, then the minimum 1170 * supported frame duration for each request is <code>max(FR, FP, FJ)</code>.</li> 1171 * <li>If a mix of JPEG-using and non-JPEG-using requests is submitted by 1172 * the application, then the HAL will have to delay JPEG-using requests 1173 * whenever the JPEG encoder is still busy processing an older capture. 1174 * This will happen whenever a JPEG-using request starts capture less 1175 * than <code>FJ</code> <em>ns</em> after a previous JPEG-using request. The minimum 1176 * supported frame duration will vary between the values calculated in 1177 * #6 and #7.</li> 1178 * </ol> 1179 * 1180 * @see CameraCharacteristics#SCALER_AVAILABLE_JPEG_MIN_DURATIONS 1181 * @see CameraCharacteristics#SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS 1182 */ 1183 public static final Key<Long> SENSOR_FRAME_DURATION = 1184 new Key<Long>("android.sensor.frameDuration", long.class); 1185 1186 /** 1187 * <p>Gain applied to image data. Must be 1188 * implemented through analog gain only if set to values 1189 * below 'maximum analog sensitivity'.</p> 1190 * <p>If the sensor can't apply this exact gain, it should lessen the 1191 * gain to the nearest possible value (rather than gain more).</p> 1192 * <p>ISO 12232:2006 REI method</p> 1193 */ 1194 public static final Key<Integer> SENSOR_SENSITIVITY = 1195 new Key<Integer>("android.sensor.sensitivity", int.class); 1196 1197 /** 1198 * <p>A pixel <code>[R, G_even, G_odd, B]</code> that supplies the test pattern 1199 * when {@link CaptureRequest#SENSOR_TEST_PATTERN_MODE android.sensor.testPatternMode} is SOLID_COLOR.</p> 1200 * <p>Each color channel is treated as an unsigned 32-bit integer. 1201 * The camera device then uses the most significant X bits 1202 * that correspond to how many bits are in its Bayer raw sensor 1203 * output.</p> 1204 * <p>For example, a sensor with RAW10 Bayer output would use the 1205 * 10 most significant bits from each color channel.</p> 1206 * <p><b>Optional</b> - This value may be {@code null} on some devices.</p> 1207 * 1208 * @see CaptureRequest#SENSOR_TEST_PATTERN_MODE 1209 */ 1210 public static final Key<int[]> SENSOR_TEST_PATTERN_DATA = 1211 new Key<int[]>("android.sensor.testPatternData", int[].class); 1212 1213 /** 1214 * <p>When enabled, the sensor sends a test pattern instead of 1215 * doing a real exposure from the camera.</p> 1216 * <p>When a test pattern is enabled, all manual sensor controls specified 1217 * by android.sensor.* should be ignored. All other controls should 1218 * work as normal.</p> 1219 * <p>For example, if manual flash is enabled, flash firing should still 1220 * occur (and that the test pattern remain unmodified, since the flash 1221 * would not actually affect it).</p> 1222 * <p><b>Optional</b> - This value may be {@code null} on some devices.</p> 1223 * @see #SENSOR_TEST_PATTERN_MODE_OFF 1224 * @see #SENSOR_TEST_PATTERN_MODE_SOLID_COLOR 1225 * @see #SENSOR_TEST_PATTERN_MODE_COLOR_BARS 1226 * @see #SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY 1227 * @see #SENSOR_TEST_PATTERN_MODE_PN9 1228 * @see #SENSOR_TEST_PATTERN_MODE_CUSTOM1 1229 */ 1230 public static final Key<Integer> SENSOR_TEST_PATTERN_MODE = 1231 new Key<Integer>("android.sensor.testPatternMode", int.class); 1232 1233 /** 1234 * <p>Quality of lens shading correction applied 1235 * to the image data.</p> 1236 * <p>When set to OFF mode, no lens shading correction will be applied by the 1237 * camera device, and an identity lens shading map data will be provided 1238 * if <code>{@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE android.statistics.lensShadingMapMode} == ON</code>. For example, for lens 1239 * shading map with size specified as <code>{@link CameraCharacteristics#LENS_INFO_SHADING_MAP_SIZE android.lens.info.shadingMapSize} = [ 4, 3 ]</code>, 1240 * the output {@link CaptureResult#STATISTICS_LENS_SHADING_MAP android.statistics.lensShadingMap} for this case will be an identity map 1241 * shown below:</p> 1242 * <pre><code>[ 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1243 * 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1244 * 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1245 * 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1246 * 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1247 * 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] 1248 * </code></pre> 1249 * <p>When set to other modes, lens shading correction will be applied by the 1250 * camera device. Applications can request lens shading map data by setting 1251 * {@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE android.statistics.lensShadingMapMode} to ON, and then the camera device will provide 1252 * lens shading map data in {@link CaptureResult#STATISTICS_LENS_SHADING_MAP android.statistics.lensShadingMap}, with size specified 1253 * by {@link CameraCharacteristics#LENS_INFO_SHADING_MAP_SIZE android.lens.info.shadingMapSize}.</p> 1254 * 1255 * @see CameraCharacteristics#LENS_INFO_SHADING_MAP_SIZE 1256 * @see CaptureResult#STATISTICS_LENS_SHADING_MAP 1257 * @see CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE 1258 * @see #SHADING_MODE_OFF 1259 * @see #SHADING_MODE_FAST 1260 * @see #SHADING_MODE_HIGH_QUALITY 1261 * @hide 1262 */ 1263 public static final Key<Integer> SHADING_MODE = 1264 new Key<Integer>("android.shading.mode", int.class); 1265 1266 /** 1267 * <p>State of the face detector 1268 * unit</p> 1269 * <p>Whether face detection is enabled, and whether it 1270 * should output just the basic fields or the full set of 1271 * fields. Value must be one of the 1272 * {@link CameraCharacteristics#STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES android.statistics.info.availableFaceDetectModes}.</p> 1273 * 1274 * @see CameraCharacteristics#STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES 1275 * @see #STATISTICS_FACE_DETECT_MODE_OFF 1276 * @see #STATISTICS_FACE_DETECT_MODE_SIMPLE 1277 * @see #STATISTICS_FACE_DETECT_MODE_FULL 1278 */ 1279 public static final Key<Integer> STATISTICS_FACE_DETECT_MODE = 1280 new Key<Integer>("android.statistics.faceDetectMode", int.class); 1281 1282 /** 1283 * <p>Whether the HAL needs to output the lens 1284 * shading map in output result metadata</p> 1285 * <p>When set to ON, 1286 * {@link CaptureResult#STATISTICS_LENS_SHADING_MAP android.statistics.lensShadingMap} must be provided in 1287 * the output result metadata.</p> 1288 * 1289 * @see CaptureResult#STATISTICS_LENS_SHADING_MAP 1290 * @see #STATISTICS_LENS_SHADING_MAP_MODE_OFF 1291 * @see #STATISTICS_LENS_SHADING_MAP_MODE_ON 1292 */ 1293 public static final Key<Integer> STATISTICS_LENS_SHADING_MAP_MODE = 1294 new Key<Integer>("android.statistics.lensShadingMapMode", int.class); 1295 1296 /** 1297 * <p>Tonemapping / contrast / gamma curve for the blue 1298 * channel, to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is 1299 * CONTRAST_CURVE.</p> 1300 * <p>See {@link CaptureRequest#TONEMAP_CURVE_RED android.tonemap.curveRed} for more details.</p> 1301 * 1302 * @see CaptureRequest#TONEMAP_CURVE_RED 1303 * @see CaptureRequest#TONEMAP_MODE 1304 */ 1305 public static final Key<float[]> TONEMAP_CURVE_BLUE = 1306 new Key<float[]>("android.tonemap.curveBlue", float[].class); 1307 1308 /** 1309 * <p>Tonemapping / contrast / gamma curve for the green 1310 * channel, to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is 1311 * CONTRAST_CURVE.</p> 1312 * <p>See {@link CaptureRequest#TONEMAP_CURVE_RED android.tonemap.curveRed} for more details.</p> 1313 * 1314 * @see CaptureRequest#TONEMAP_CURVE_RED 1315 * @see CaptureRequest#TONEMAP_MODE 1316 */ 1317 public static final Key<float[]> TONEMAP_CURVE_GREEN = 1318 new Key<float[]>("android.tonemap.curveGreen", float[].class); 1319 1320 /** 1321 * <p>Tonemapping / contrast / gamma curve for the red 1322 * channel, to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is 1323 * CONTRAST_CURVE.</p> 1324 * <p>Each channel's curve is defined by an array of control points:</p> 1325 * <pre><code>{@link CaptureRequest#TONEMAP_CURVE_RED android.tonemap.curveRed} = 1326 * [ P0in, P0out, P1in, P1out, P2in, P2out, P3in, P3out, ..., PNin, PNout ] 1327 * 2 &lt;= N &lt;= {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}</code></pre> 1328 * <p>These are sorted in order of increasing <code>Pin</code>; it is always 1329 * guaranteed that input values 0.0 and 1.0 are included in the list to 1330 * define a complete mapping. For input values between control points, 1331 * the camera device must linearly interpolate between the control 1332 * points.</p> 1333 * <p>Each curve can have an independent number of points, and the number 1334 * of points can be less than max (that is, the request doesn't have to 1335 * always provide a curve with number of points equivalent to 1336 * {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}).</p> 1337 * <p>A few examples, and their corresponding graphical mappings; these 1338 * only specify the red channel and the precision is limited to 4 1339 * digits, for conciseness.</p> 1340 * <p>Linear mapping:</p> 1341 * <pre><code>{@link CaptureRequest#TONEMAP_CURVE_RED android.tonemap.curveRed} = [ 0, 0, 1.0, 1.0 ] 1342 * </code></pre> 1343 * <p><img alt="Linear mapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/linear_tonemap.png" /></p> 1344 * <p>Invert mapping:</p> 1345 * <pre><code>{@link CaptureRequest#TONEMAP_CURVE_RED android.tonemap.curveRed} = [ 0, 1.0, 1.0, 0 ] 1346 * </code></pre> 1347 * <p><img alt="Inverting mapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/inverse_tonemap.png" /></p> 1348 * <p>Gamma 1/2.2 mapping, with 16 control points:</p> 1349 * <pre><code>{@link CaptureRequest#TONEMAP_CURVE_RED android.tonemap.curveRed} = [ 1350 * 0.0000, 0.0000, 0.0667, 0.2920, 0.1333, 0.4002, 0.2000, 0.4812, 1351 * 0.2667, 0.5484, 0.3333, 0.6069, 0.4000, 0.6594, 0.4667, 0.7072, 1352 * 0.5333, 0.7515, 0.6000, 0.7928, 0.6667, 0.8317, 0.7333, 0.8685, 1353 * 0.8000, 0.9035, 0.8667, 0.9370, 0.9333, 0.9691, 1.0000, 1.0000 ] 1354 * </code></pre> 1355 * <p><img alt="Gamma = 1/2.2 tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/gamma_tonemap.png" /></p> 1356 * <p>Standard sRGB gamma mapping, per IEC 61966-2-1:1999, with 16 control points:</p> 1357 * <pre><code>{@link CaptureRequest#TONEMAP_CURVE_RED android.tonemap.curveRed} = [ 1358 * 0.0000, 0.0000, 0.0667, 0.2864, 0.1333, 0.4007, 0.2000, 0.4845, 1359 * 0.2667, 0.5532, 0.3333, 0.6125, 0.4000, 0.6652, 0.4667, 0.7130, 1360 * 0.5333, 0.7569, 0.6000, 0.7977, 0.6667, 0.8360, 0.7333, 0.8721, 1361 * 0.8000, 0.9063, 0.8667, 0.9389, 0.9333, 0.9701, 1.0000, 1.0000 ] 1362 * </code></pre> 1363 * <p><img alt="sRGB tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png" /></p> 1364 * 1365 * @see CaptureRequest#TONEMAP_CURVE_RED 1366 * @see CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS 1367 * @see CaptureRequest#TONEMAP_MODE 1368 */ 1369 public static final Key<float[]> TONEMAP_CURVE_RED = 1370 new Key<float[]>("android.tonemap.curveRed", float[].class); 1371 1372 /** 1373 * <p>High-level global contrast/gamma/tonemapping control.</p> 1374 * <p>When switching to an application-defined contrast curve by setting 1375 * {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} to CONTRAST_CURVE, the curve is defined 1376 * per-channel with a set of <code>(in, out)</code> points that specify the 1377 * mapping from input high-bit-depth pixel value to the output 1378 * low-bit-depth value. Since the actual pixel ranges of both input 1379 * and output may change depending on the camera pipeline, the values 1380 * are specified by normalized floating-point numbers.</p> 1381 * <p>More-complex color mapping operations such as 3D color look-up 1382 * tables, selective chroma enhancement, or other non-linear color 1383 * transforms will be disabled when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is 1384 * CONTRAST_CURVE.</p> 1385 * <p>When using either FAST or HIGH_QUALITY, the camera device will 1386 * emit its own tonemap curve in {@link CaptureRequest#TONEMAP_CURVE_RED android.tonemap.curveRed}, 1387 * {@link CaptureRequest#TONEMAP_CURVE_GREEN android.tonemap.curveGreen}, and {@link CaptureRequest#TONEMAP_CURVE_BLUE android.tonemap.curveBlue}. 1388 * These values are always available, and as close as possible to the 1389 * actually used nonlinear/nonglobal transforms.</p> 1390 * <p>If a request is sent with TRANSFORM_MATRIX with the camera device's 1391 * provided curve in FAST or HIGH_QUALITY, the image's tonemap will be 1392 * roughly the same.</p> 1393 * 1394 * @see CaptureRequest#TONEMAP_CURVE_BLUE 1395 * @see CaptureRequest#TONEMAP_CURVE_GREEN 1396 * @see CaptureRequest#TONEMAP_CURVE_RED 1397 * @see CaptureRequest#TONEMAP_MODE 1398 * @see #TONEMAP_MODE_CONTRAST_CURVE 1399 * @see #TONEMAP_MODE_FAST 1400 * @see #TONEMAP_MODE_HIGH_QUALITY 1401 */ 1402 public static final Key<Integer> TONEMAP_MODE = 1403 new Key<Integer>("android.tonemap.mode", int.class); 1404 1405 /** 1406 * <p>This LED is nominally used to indicate to the user 1407 * that the camera is powered on and may be streaming images back to the 1408 * Application Processor. In certain rare circumstances, the OS may 1409 * disable this when video is processed locally and not transmitted to 1410 * any untrusted applications.</p> 1411 * <p>In particular, the LED <em>must</em> always be on when the data could be 1412 * transmitted off the device. The LED <em>should</em> always be on whenever 1413 * data is stored locally on the device.</p> 1414 * <p>The LED <em>may</em> be off if a trusted application is using the data that 1415 * doesn't violate the above rules.</p> 1416 * @hide 1417 */ 1418 public static final Key<Boolean> LED_TRANSMIT = 1419 new Key<Boolean>("android.led.transmit", boolean.class); 1420 1421 /** 1422 * <p>Whether black-level compensation is locked 1423 * to its current values, or is free to vary.</p> 1424 * <p>When set to ON, the values used for black-level 1425 * compensation will not change until the lock is set to 1426 * OFF.</p> 1427 * <p>Since changes to certain capture parameters (such as 1428 * exposure time) may require resetting of black level 1429 * compensation, the camera device must report whether setting 1430 * the black level lock was successful in the output result 1431 * metadata.</p> 1432 * <p>For example, if a sequence of requests is as follows:</p> 1433 * <ul> 1434 * <li>Request 1: Exposure = 10ms, Black level lock = OFF</li> 1435 * <li>Request 2: Exposure = 10ms, Black level lock = ON</li> 1436 * <li>Request 3: Exposure = 10ms, Black level lock = ON</li> 1437 * <li>Request 4: Exposure = 20ms, Black level lock = ON</li> 1438 * <li>Request 5: Exposure = 20ms, Black level lock = ON</li> 1439 * <li>Request 6: Exposure = 20ms, Black level lock = ON</li> 1440 * </ul> 1441 * <p>And the exposure change in Request 4 requires the camera 1442 * device to reset the black level offsets, then the output 1443 * result metadata is expected to be:</p> 1444 * <ul> 1445 * <li>Result 1: Exposure = 10ms, Black level lock = OFF</li> 1446 * <li>Result 2: Exposure = 10ms, Black level lock = ON</li> 1447 * <li>Result 3: Exposure = 10ms, Black level lock = ON</li> 1448 * <li>Result 4: Exposure = 20ms, Black level lock = OFF</li> 1449 * <li>Result 5: Exposure = 20ms, Black level lock = ON</li> 1450 * <li>Result 6: Exposure = 20ms, Black level lock = ON</li> 1451 * </ul> 1452 * <p>This indicates to the application that on frame 4, black 1453 * levels were reset due to exposure value changes, and pixel 1454 * values may not be consistent across captures.</p> 1455 * <p>The camera device will maintain the lock to the extent 1456 * possible, only overriding the lock to OFF when changes to 1457 * other request parameters require a black level recalculation 1458 * or reset.</p> 1459 */ 1460 public static final Key<Boolean> BLACK_LEVEL_LOCK = 1461 new Key<Boolean>("android.blackLevel.lock", boolean.class); 1462 1463 /*~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~ 1464 * End generated code 1465 *~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~O@*/ 1466} 1467