/* * Copyright (C) 2013 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.hardware.camera2; import android.hardware.camera2.impl.CameraMetadataNative; import android.os.Parcel; import android.os.Parcelable; import android.view.Surface; import java.util.HashSet; import java.util.Objects; /** *
An immutable package of settings and outputs needed to capture a single * image from the camera device.
* *Contains the configuration for the capture hardware (sensor, lens, flash), * the processing pipeline, the control algorithms, and the output buffers. Also * contains the list of target Surfaces to send image data to for this * capture.
* *CaptureRequests can be created by using a {@link Builder} instance, * obtained by calling {@link CameraDevice#createCaptureRequest}
* *CaptureRequests are given to {@link CameraDevice#capture} or * {@link CameraDevice#setRepeatingRequest} to capture images from a camera.
* *Each request can specify a different subset of target Surfaces for the * camera to send the captured data to. All the surfaces used in a request must * be part of the surface list given to the last call to * {@link CameraDevice#configureOutputs}, when the request is submitted to the * camera device.
* *For example, a request meant for repeating preview might only include the * Surface for the preview SurfaceView or SurfaceTexture, while a * high-resolution still capture would also include a Surface from a ImageReader * configured for high-resolution JPEG images.
* * @see CameraDevice#capture * @see CameraDevice#setRepeatingRequest * @see CameraDevice#createCaptureRequest */ public final class CaptureRequest extends CameraMetadata implements Parcelable { private final HashSetThis tag is not used for anything by the camera device, but can be * used by an application to easily identify a CaptureRequest when it is * returned by * {@link CameraDevice.CaptureListener#onCaptureCompleted CaptureListener.onCaptureCompleted} *
* * @return the last tag Object set on this request, or {@code null} if * no tag has been set. * @see Builder#setTag */ public Object getTag() { return mUserTag; } /** * Determine whether this CaptureRequest is equal to another CaptureRequest. * *A request is considered equal to another is if it's set of key/values is equal, it's * list of output surfaces is equal, and the user tag is equal.
* * @param other Another instance of CaptureRequest. * * @return True if the requests are the same, false otherwise. */ @Override public boolean equals(Object other) { return other instanceof CaptureRequest && equals((CaptureRequest)other); } private boolean equals(CaptureRequest other) { return other != null && Objects.equals(mUserTag, other.mUserTag) && mSurfaceSet.equals(other.mSurfaceSet) && mSettings.equals(other.mSettings); } @Override public int hashCode() { return mSettings.hashCode(); } public static final Parcelable.CreatorTo obtain a builder instance, use the * {@link CameraDevice#createCaptureRequest} method, which initializes the * request fields to one of the templates defined in {@link CameraDevice}. * * @see CameraDevice#createCaptureRequest * @see #TEMPLATE_PREVIEW * @see #TEMPLATE_RECORD * @see #TEMPLATE_STILL_CAPTURE * @see #TEMPLATE_VIDEO_SNAPSHOT * @see #TEMPLATE_MANUAL */ public final static class Builder { private final CaptureRequest mRequest; /** * Initialize the builder using the template; the request takes * ownership of the template. * * @hide */ public Builder(CameraMetadataNative template) { mRequest = new CaptureRequest(template); } /** *
Add a surface to the list of targets for this request
* *The Surface added must be one of the surfaces included in the most * recent call to {@link CameraDevice#configureOutputs}, when the * request is given to the camera device.
* *Adding a target more than once has no effect.
* * @param outputTarget Surface to use as an output target for this request */ public void addTarget(Surface outputTarget) { mRequest.mSurfaceSet.add(outputTarget); } /** *Remove a surface from the list of targets for this request.
* *Removing a target that is not currently added has no effect.
* * @param outputTarget Surface to use as an output target for this request */ public void removeTarget(Surface outputTarget) { mRequest.mSurfaceSet.remove(outputTarget); } /** * Set a capture request field to a value. The field definitions can be * found in {@link CaptureRequest}. * * @param key The metadata field to write. * @param value The value to set the field to, which must be of a matching * type to the key. */ publicThis tag is not used for anything by the camera device, but can be * used by an application to easily identify a CaptureRequest when it is * returned by * {@link CameraDevice.CaptureListener#onCaptureCompleted CaptureListener.onCaptureCompleted} * * @param tag an arbitrary Object to store with this request * @see CaptureRequest#getTag */ public void setTag(Object tag) { mRequest.mUserTag = tag; } /** * Build a request using the current target Surfaces and settings. * * @return A new capture request instance, ready for submission to the * camera device. */ public CaptureRequest build() { return new CaptureRequest(mRequest); } /** * @hide */ public boolean isEmpty() { return mRequest.mSettings.isEmpty(); } } /*@O~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~ * The key entries below this point are generated from metadata * definitions in /system/media/camera/docs. Do not modify by hand or * modify the comment blocks at the start or end. *~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~*/ /** *
The mode control selects how the image data is converted from the * sensor's native color into linear sRGB color.
*When auto-white balance is enabled with {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode}, this * control is overridden by the AWB routine. When AWB is disabled, the * application controls how the color mapping is performed.
*We define the expected processing pipeline below. For consistency * across devices, this is always the case with TRANSFORM_MATRIX.
*When either FULL or HIGH_QUALITY is used, the camera device may * do additional processing but {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} and * {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform} will still be provided by the * camera device (in the results) and be roughly correct.
*Switching to TRANSFORM_MATRIX and using the data provided from * FAST or HIGH_QUALITY will yield a picture with the same white point * as what was produced by the camera device in the earlier frame.
*The expected processing pipeline is as follows:
* *The white balance is encoded by two values, a 4-channel white-balance * gain vector (applied in the Bayer domain), and a 3x3 color transform * matrix (applied after demosaic).
*The 4-channel white-balance gains are defined as:
*{@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} = [ R G_even G_odd B ]
*
* where G_even
is the gain for green pixels on even rows of the
* output, and G_odd
is the gain for green pixels on the odd rows.
* These may be identical for a given camera device implementation; if
* the camera device does not support a separate gain for even/odd green
* channels, it will use the G_even
value, and write G_odd
equal to
* G_even
in the output result metadata.
The matrices for color transforms are defined as a 9-entry vector:
*{@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform} = [ I0 I1 I2 I3 I4 I5 I6 I7 I8 ]
*
* which define a transform from input sensor colors, P_in = [ r g b ]
,
* to output linear sRGB, P_out = [ r' g' b' ]
,
with colors as follows:
*r' = I0r + I1g + I2b
* g' = I3r + I4g + I5b
* b' = I6r + I7g + I8b
*
* Both the input and output value ranges must match. Overflow/underflow * values are clipped to fit within the range.
* * @see CaptureRequest#COLOR_CORRECTION_GAINS * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM * @see CaptureRequest#CONTROL_AWB_MODE * @see #COLOR_CORRECTION_MODE_TRANSFORM_MATRIX * @see #COLOR_CORRECTION_MODE_FAST * @see #COLOR_CORRECTION_MODE_HIGH_QUALITY */ public static final KeyA color transform matrix to use to transform * from sensor RGB color space to output linear sRGB color space
*This matrix is either set by the camera device when the request * {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} is not TRANSFORM_MATRIX, or * directly by the application in the request when the * {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} is TRANSFORM_MATRIX.
*In the latter case, the camera device may round the matrix to account
* for precision issues; the final rounded matrix should be reported back
* in this matrix result metadata. The transform should keep the magnitude
* of the output color values within [0, 1.0]
(assuming input color
* values is within the normalized range [0, 1.0]
), or clipping may occur.
Gains applying to Bayer raw color channels for * white-balance
*The 4-channel white-balance gains are defined in
* the order of [R G_even G_odd B]
, where G_even
is the gain
* for green pixels on even rows of the output, and G_odd
* is the gain for green pixels on the odd rows. if a HAL
* does not support a separate gain for even/odd green channels,
* it should use the G_even
value, and write G_odd
equal to
* G_even
in the output result metadata.
This array is either set by HAL when the request * {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} is not TRANSFORM_MATRIX, or * directly by the application in the request when the * {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} is TRANSFORM_MATRIX.
*The output should be the gains actually applied by the HAL to * the current frame.
* * @see CaptureRequest#COLOR_CORRECTION_MODE */ public static final KeyThe desired setting for the camera device's auto-exposure * algorithm's antibanding compensation.
*Some kinds of lighting fixtures, such as some fluorescent * lights, flicker at the rate of the power supply frequency * (60Hz or 50Hz, depending on country). While this is * typically not noticeable to a person, it can be visible to * a camera device. If a camera sets its exposure time to the * wrong value, the flicker may become visible in the * viewfinder as flicker or in a final captured image, as a * set of variable-brightness bands across the image.
*Therefore, the auto-exposure routines of camera devices * include antibanding routines that ensure that the chosen * exposure value will not cause such banding. The choice of * exposure time depends on the rate of flicker, which the * camera device can detect automatically, or the expected * rate can be selected by the application using this * control.
*A given camera device may not support all of the possible * options for the antibanding mode. The * {@link CameraCharacteristics#CONTROL_AE_AVAILABLE_ANTIBANDING_MODES android.control.aeAvailableAntibandingModes} key contains * the available modes for a given camera device.
*The default mode is AUTO, which must be supported by all * camera devices.
*If manual exposure control is enabled (by setting * {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} or {@link CaptureRequest#CONTROL_MODE android.control.mode} to OFF), * then this setting has no effect, and the application must * ensure it selects exposure times that do not cause banding * issues. The {@link CaptureResult#STATISTICS_SCENE_FLICKER android.statistics.sceneFlicker} key can assist * the application in this.
* * @see CameraCharacteristics#CONTROL_AE_AVAILABLE_ANTIBANDING_MODES * @see CaptureRequest#CONTROL_AE_MODE * @see CaptureRequest#CONTROL_MODE * @see CaptureResult#STATISTICS_SCENE_FLICKER * @see #CONTROL_AE_ANTIBANDING_MODE_OFF * @see #CONTROL_AE_ANTIBANDING_MODE_50HZ * @see #CONTROL_AE_ANTIBANDING_MODE_60HZ * @see #CONTROL_AE_ANTIBANDING_MODE_AUTO */ public static final KeyAdjustment to AE target image * brightness
*For example, if EV step is 0.333, '6' will mean an * exposure compensation of +2 EV; -3 will mean an exposure * compensation of -1
*/ public static final KeyWhether AE is currently locked to its latest * calculated values.
*Note that even when AE is locked, the flash may be * fired if the {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} is ON_AUTO_FLASH / ON_ALWAYS_FLASH / * ON_AUTO_FLASH_REDEYE.
*If AE precapture is triggered (see {@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger}) * when AE is already locked, the camera device will not change the exposure time * ({@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime}) and sensitivity ({@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}) * parameters. The flash may be fired if the android.control.aeMode * is ON_AUTO_FLASH/ON_AUTO_FLASH_REDEYE and the scene is too dark. If the * {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} is ON_ALWAYS_FLASH, the scene may become overexposed.
*See {@link CaptureResult#CONTROL_AE_STATE android.control.aeState} for AE lock related state transition details.
* * @see CaptureRequest#CONTROL_AE_MODE * @see CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER * @see CaptureResult#CONTROL_AE_STATE * @see CaptureRequest#SENSOR_EXPOSURE_TIME * @see CaptureRequest#SENSOR_SENSITIVITY */ public static final KeyThe desired mode for the camera device's * auto-exposure routine.
*This control is only effective if {@link CaptureRequest#CONTROL_MODE android.control.mode} is * AUTO.
*When set to any of the ON modes, the camera device's * auto-exposure routine is enabled, overriding the * application's selected exposure time, sensor sensitivity, * and frame duration ({@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime}, * {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}, and * {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration}). If one of the FLASH modes * is selected, the camera device's flash unit controls are * also overridden.
*The FLASH modes are only available if the camera device
* has a flash unit ({@link CameraCharacteristics#FLASH_INFO_AVAILABLE android.flash.info.available} is true
).
If flash TORCH mode is desired, this field must be set to * ON or OFF, and {@link CaptureRequest#FLASH_MODE android.flash.mode} set to TORCH.
*When set to any of the ON modes, the values chosen by the * camera device auto-exposure routine for the overridden * fields for a given capture will be available in its * CaptureResult.
* * @see CaptureRequest#CONTROL_MODE * @see CameraCharacteristics#FLASH_INFO_AVAILABLE * @see CaptureRequest#FLASH_MODE * @see CaptureRequest#SENSOR_EXPOSURE_TIME * @see CaptureRequest#SENSOR_FRAME_DURATION * @see CaptureRequest#SENSOR_SENSITIVITY * @see #CONTROL_AE_MODE_OFF * @see #CONTROL_AE_MODE_ON * @see #CONTROL_AE_MODE_ON_AUTO_FLASH * @see #CONTROL_AE_MODE_ON_ALWAYS_FLASH * @see #CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE */ public static final KeyList of areas to use for * metering.
*Each area is a rectangle plus weight: xmin, ymin, * xmax, ymax, weight. The rectangle is defined to be inclusive of the * specified coordinates.
*The coordinate system is based on the active pixel array, * with (0,0) being the top-left pixel in the active pixel array, and * ({@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.width - 1, * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.height - 1) being the * bottom-right pixel in the active pixel array. The weight * should be nonnegative.
*If all regions have 0 weight, then no specific metering area * needs to be used by the HAL. If the metering region is * outside the current {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion}, the HAL * should ignore the sections outside the region and output the * used sections in the frame metadata.
* * @see CaptureRequest#SCALER_CROP_REGION * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE */ public static final KeyRange over which fps can be adjusted to * maintain exposure
*Only constrains AE algorithm, not manual control * of {@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime}
* * @see CaptureRequest#SENSOR_EXPOSURE_TIME */ public static final KeyWhether the camera device will trigger a precapture * metering sequence when it processes this request.
*This entry is normally set to IDLE, or is not * included at all in the request settings. When included and * set to START, the camera device will trigger the autoexposure * precapture metering sequence.
*The effect of AE precapture trigger depends on the current * AE mode and state; see {@link CaptureResult#CONTROL_AE_STATE android.control.aeState} for AE precapture * state transition details.
* * @see CaptureResult#CONTROL_AE_STATE * @see #CONTROL_AE_PRECAPTURE_TRIGGER_IDLE * @see #CONTROL_AE_PRECAPTURE_TRIGGER_START */ public static final KeyWhether AF is currently enabled, and what * mode it is set to
*Only effective if {@link CaptureRequest#CONTROL_MODE android.control.mode} = AUTO.
*If the lens is controlled by the camera device auto-focus algorithm, * the camera device will report the current AF status in android.control.afState * in result metadata.
* * @see CaptureRequest#CONTROL_MODE * @see #CONTROL_AF_MODE_OFF * @see #CONTROL_AF_MODE_AUTO * @see #CONTROL_AF_MODE_MACRO * @see #CONTROL_AF_MODE_CONTINUOUS_VIDEO * @see #CONTROL_AF_MODE_CONTINUOUS_PICTURE * @see #CONTROL_AF_MODE_EDOF */ public static final KeyList of areas to use for focus * estimation.
*Each area is a rectangle plus weight: xmin, ymin, * xmax, ymax, weight. The rectangle is defined to be inclusive of the * specified coordinates.
*The coordinate system is based on the active pixel array, * with (0,0) being the top-left pixel in the active pixel array, and * ({@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.width - 1, * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.height - 1) being the * bottom-right pixel in the active pixel array. The weight * should be nonnegative.
*If all regions have 0 weight, then no specific focus area * needs to be used by the HAL. If the focusing region is * outside the current {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion}, the HAL * should ignore the sections outside the region and output the * used sections in the frame metadata.
* * @see CaptureRequest#SCALER_CROP_REGION * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE */ public static final KeyWhether the camera device will trigger autofocus for this request.
*This entry is normally set to IDLE, or is not * included at all in the request settings.
*When included and set to START, the camera device will trigger the * autofocus algorithm. If autofocus is disabled, this trigger has no effect.
*When set to CANCEL, the camera device will cancel any active trigger, * and return to its initial AF state.
*See {@link CaptureResult#CONTROL_AF_STATE android.control.afState} for what that means for each AF mode.
* * @see CaptureResult#CONTROL_AF_STATE * @see #CONTROL_AF_TRIGGER_IDLE * @see #CONTROL_AF_TRIGGER_START * @see #CONTROL_AF_TRIGGER_CANCEL */ public static final KeyWhether AWB is currently locked to its * latest calculated values.
*Note that AWB lock is only meaningful for AUTO * mode; in other modes, AWB is already fixed to a specific * setting.
*/ public static final KeyWhether AWB is currently setting the color * transform fields, and what its illumination target * is
*This control is only effective if {@link CaptureRequest#CONTROL_MODE android.control.mode} is AUTO.
*When set to the ON mode, the camera device's auto white balance * routine is enabled, overriding the application's selected * {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform}, {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} and * {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode}.
*When set to the OFF mode, the camera device's auto white balance * routine is disabled. The applicantion manually controls the white * balance by {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform}, android.colorCorrection.gains * and {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode}.
*When set to any other modes, the camera device's auto white balance * routine is disabled. The camera device uses each particular illumination * target for white balance adjustment.
* * @see CaptureRequest#COLOR_CORRECTION_GAINS * @see CaptureRequest#COLOR_CORRECTION_MODE * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM * @see CaptureRequest#CONTROL_MODE * @see #CONTROL_AWB_MODE_OFF * @see #CONTROL_AWB_MODE_AUTO * @see #CONTROL_AWB_MODE_INCANDESCENT * @see #CONTROL_AWB_MODE_FLUORESCENT * @see #CONTROL_AWB_MODE_WARM_FLUORESCENT * @see #CONTROL_AWB_MODE_DAYLIGHT * @see #CONTROL_AWB_MODE_CLOUDY_DAYLIGHT * @see #CONTROL_AWB_MODE_TWILIGHT * @see #CONTROL_AWB_MODE_SHADE */ public static final KeyList of areas to use for illuminant * estimation.
*Only used in AUTO mode.
*Each area is a rectangle plus weight: xmin, ymin, * xmax, ymax, weight. The rectangle is defined to be inclusive of the * specified coordinates.
*The coordinate system is based on the active pixel array, * with (0,0) being the top-left pixel in the active pixel array, and * ({@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.width - 1, * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.height - 1) being the * bottom-right pixel in the active pixel array. The weight * should be nonnegative.
*If all regions have 0 weight, then no specific metering area * needs to be used by the HAL. If the metering region is * outside the current {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion}, the HAL * should ignore the sections outside the region and output the * used sections in the frame metadata.
* * @see CaptureRequest#SCALER_CROP_REGION * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE */ public static final KeyInformation to the camera device 3A (auto-exposure, * auto-focus, auto-white balance) routines about the purpose * of this capture, to help the camera device to decide optimal 3A * strategy.
*This control is only effective if {@link CaptureRequest#CONTROL_MODE android.control.mode} != OFF
* and any 3A routine is active.
A special color effect to apply.
*When this mode is set, a color effect will be applied * to images produced by the camera device. The interpretation * and implementation of these color effects is left to the * implementor of the camera device, and should not be * depended on to be consistent (or present) across all * devices.
*A color effect will only be applied if * {@link CaptureRequest#CONTROL_MODE android.control.mode} != OFF.
* * @see CaptureRequest#CONTROL_MODE * @see #CONTROL_EFFECT_MODE_OFF * @see #CONTROL_EFFECT_MODE_MONO * @see #CONTROL_EFFECT_MODE_NEGATIVE * @see #CONTROL_EFFECT_MODE_SOLARIZE * @see #CONTROL_EFFECT_MODE_SEPIA * @see #CONTROL_EFFECT_MODE_POSTERIZE * @see #CONTROL_EFFECT_MODE_WHITEBOARD * @see #CONTROL_EFFECT_MODE_BLACKBOARD * @see #CONTROL_EFFECT_MODE_AQUA */ public static final KeyOverall mode of 3A control * routines
*High-level 3A control. When set to OFF, all 3A control * by the camera device is disabled. The application must set the fields for * capture parameters itself.
*When set to AUTO, the individual algorithm controls in * android.control.* are in effect, such as {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode}.
*When set to USE_SCENE_MODE, the individual controls in * android.control.* are mostly disabled, and the camera device implements * one of the scene mode settings (such as ACTION, SUNSET, or PARTY) * as it wishes. The camera device scene mode 3A settings are provided by * android.control.sceneModeOverrides.
*When set to OFF_KEEP_STATE, it is similar to OFF mode, the only difference * is that this frame will not be used by camera device background 3A statistics * update, as if this frame is never captured. This mode can be used in the scenario * where the application doesn't want a 3A manual control capture to affect * the subsequent auto 3A capture results.
* * @see CaptureRequest#CONTROL_AF_MODE * @see #CONTROL_MODE_OFF * @see #CONTROL_MODE_AUTO * @see #CONTROL_MODE_USE_SCENE_MODE * @see #CONTROL_MODE_OFF_KEEP_STATE */ public static final KeyA camera mode optimized for conditions typical in a particular * capture setting.
*This is the mode that that is active when
* {@link CaptureRequest#CONTROL_MODE android.control.mode} == USE_SCENE_MODE
. Aside from FACE_PRIORITY,
* these modes will disable {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode},
* {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode}, and {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode} while in use.
The interpretation and implementation of these scene modes is left * to the implementor of the camera device. Their behavior will not be * consistent across all devices, and any given device may only implement * a subset of these modes.
* * @see CaptureRequest#CONTROL_AE_MODE * @see CaptureRequest#CONTROL_AF_MODE * @see CaptureRequest#CONTROL_AWB_MODE * @see CaptureRequest#CONTROL_MODE * @see #CONTROL_SCENE_MODE_DISABLED * @see #CONTROL_SCENE_MODE_FACE_PRIORITY * @see #CONTROL_SCENE_MODE_ACTION * @see #CONTROL_SCENE_MODE_PORTRAIT * @see #CONTROL_SCENE_MODE_LANDSCAPE * @see #CONTROL_SCENE_MODE_NIGHT * @see #CONTROL_SCENE_MODE_NIGHT_PORTRAIT * @see #CONTROL_SCENE_MODE_THEATRE * @see #CONTROL_SCENE_MODE_BEACH * @see #CONTROL_SCENE_MODE_SNOW * @see #CONTROL_SCENE_MODE_SUNSET * @see #CONTROL_SCENE_MODE_STEADYPHOTO * @see #CONTROL_SCENE_MODE_FIREWORKS * @see #CONTROL_SCENE_MODE_SPORTS * @see #CONTROL_SCENE_MODE_PARTY * @see #CONTROL_SCENE_MODE_CANDLELIGHT * @see #CONTROL_SCENE_MODE_BARCODE */ public static final KeyWhether video stabilization is * active
*If enabled, video stabilization can modify the * {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion} to keep the video stream * stabilized
* * @see CaptureRequest#SCALER_CROP_REGION */ public static final KeyOperation mode for edge * enhancement
*Edge/sharpness/detail enhancement. OFF means no * enhancement will be applied by the HAL.
*FAST/HIGH_QUALITY both mean camera device determined enhancement * will be applied. HIGH_QUALITY mode indicates that the * camera device will use the highest-quality enhancement algorithms, * even if it slows down capture rate. FAST means the camera device will * not slow down capture rate when applying edge enhancement.
* @see #EDGE_MODE_OFF * @see #EDGE_MODE_FAST * @see #EDGE_MODE_HIGH_QUALITY */ public static final KeyThe desired mode for for the camera device's flash control.
*This control is only effective when flash unit is available
* ({@link CameraCharacteristics#FLASH_INFO_AVAILABLE android.flash.info.available} == true
).
When this control is used, the {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} must be set to ON or OFF. * Otherwise, the camera device auto-exposure related flash control (ON_AUTO_FLASH, * ON_ALWAYS_FLASH, or ON_AUTO_FLASH_REDEYE) will override this control.
*When set to OFF, the camera device will not fire flash for this capture.
*When set to SINGLE, the camera device will fire flash regardless of the camera * device's auto-exposure routine's result. When used in still capture case, this * control should be used along with AE precapture metering sequence * ({@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger}), otherwise, the image may be incorrectly exposed.
*When set to TORCH, the flash will be on continuously. This mode can be used * for use cases such as preview, auto-focus assist, still capture, or video recording.
*The flash status will be reported by {@link CaptureResult#FLASH_STATE android.flash.state} in the capture result metadata.
* * @see CaptureRequest#CONTROL_AE_MODE * @see CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER * @see CameraCharacteristics#FLASH_INFO_AVAILABLE * @see CaptureResult#FLASH_STATE * @see #FLASH_MODE_OFF * @see #FLASH_MODE_SINGLE * @see #FLASH_MODE_TORCH */ public static final KeyGPS coordinates to include in output JPEG * EXIF
*/ public static final Key32 characters describing GPS algorithm to * include in EXIF
*/ public static final KeyTime GPS fix was made to include in * EXIF
*/ public static final KeyOrientation of JPEG image to * write
*/ public static final KeyCompression quality of the final JPEG * image
*85-95 is typical usage range
*/ public static final KeyCompression quality of JPEG * thumbnail
*/ public static final KeyResolution of embedded JPEG thumbnail
*When set to (0, 0) value, the JPEG EXIF will not contain thumbnail, * but the captured JPEG will still be a valid image.
*When a jpeg image capture is issued, the thumbnail size selected should have * the same aspect ratio as the jpeg image.
*/ public static final KeyThe ratio of lens focal length to the effective * aperture diameter.
*This will only be supported on the camera devices that * have variable aperture lens. The aperture value can only be * one of the values listed in {@link CameraCharacteristics#LENS_INFO_AVAILABLE_APERTURES android.lens.info.availableApertures}.
*When this is supported and {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} is OFF, * this can be set along with {@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime}, * {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}, and android.sensor.frameDuration * to achieve manual exposure control.
*The requested aperture value may take several frames to reach the * requested value; the camera device will report the current (intermediate) * aperture size in capture result metadata while the aperture is changing. * While the aperture is still changing, {@link CaptureResult#LENS_STATE android.lens.state} will be set to MOVING.
*When this is supported and {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} is one of * the ON modes, this will be overridden by the camera device * auto-exposure algorithm, the overridden values are then provided * back to the user in the corresponding result.
* * @see CaptureRequest#CONTROL_AE_MODE * @see CameraCharacteristics#LENS_INFO_AVAILABLE_APERTURES * @see CaptureResult#LENS_STATE * @see CaptureRequest#SENSOR_EXPOSURE_TIME * @see CaptureRequest#SENSOR_SENSITIVITY */ public static final KeyState of lens neutral density filter(s).
*This will not be supported on most camera devices. On devices * where this is supported, this may only be set to one of the * values included in {@link CameraCharacteristics#LENS_INFO_AVAILABLE_FILTER_DENSITIES android.lens.info.availableFilterDensities}.
*Lens filters are typically used to lower the amount of light the * sensor is exposed to (measured in steps of EV). As used here, an EV * step is the standard logarithmic representation, which are * non-negative, and inversely proportional to the amount of light * hitting the sensor. For example, setting this to 0 would result * in no reduction of the incoming light, and setting this to 2 would * mean that the filter is set to reduce incoming light by two stops * (allowing 1/4 of the prior amount of light to the sensor).
*It may take several frames before the lens filter density changes * to the requested value. While the filter density is still changing, * {@link CaptureResult#LENS_STATE android.lens.state} will be set to MOVING.
* * @see CameraCharacteristics#LENS_INFO_AVAILABLE_FILTER_DENSITIES * @see CaptureResult#LENS_STATE */ public static final KeyThe current lens focal length; used for optical zoom.
*This setting controls the physical focal length of the camera * device's lens. Changing the focal length changes the field of * view of the camera device, and is usually used for optical zoom.
*Like {@link CaptureRequest#LENS_FOCUS_DISTANCE android.lens.focusDistance} and {@link CaptureRequest#LENS_APERTURE android.lens.aperture}, this * setting won't be applied instantaneously, and it may take several * frames before the lens can change to the requested focal length. * While the focal length is still changing, {@link CaptureResult#LENS_STATE android.lens.state} will * be set to MOVING.
*This is expected not to be supported on most devices.
* * @see CaptureRequest#LENS_APERTURE * @see CaptureRequest#LENS_FOCUS_DISTANCE * @see CaptureResult#LENS_STATE */ public static final KeyDistance to plane of sharpest focus, * measured from frontmost surface of the lens
*0 means infinity focus. Used value will be clamped * to [0, {@link CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE android.lens.info.minimumFocusDistance}].
*Like {@link CaptureRequest#LENS_FOCAL_LENGTH android.lens.focalLength}, this setting won't be applied * instantaneously, and it may take several frames before the lens * can move to the requested focus distance. While the lens is still moving, * {@link CaptureResult#LENS_STATE android.lens.state} will be set to MOVING.
* * @see CaptureRequest#LENS_FOCAL_LENGTH * @see CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE * @see CaptureResult#LENS_STATE */ public static final KeySets whether the camera device uses optical image stabilization (OIS) * when capturing images.
*OIS is used to compensate for motion blur due to small movements of * the camera during capture. Unlike digital image stabilization, OIS makes * use of mechanical elements to stabilize the camera sensor, and thus * allows for longer exposure times before camera shake becomes * apparent.
*This is not expected to be supported on most devices.
* @see #LENS_OPTICAL_STABILIZATION_MODE_OFF * @see #LENS_OPTICAL_STABILIZATION_MODE_ON */ public static final KeyMode of operation for the noise reduction * algorithm
*Noise filtering control. OFF means no noise reduction * will be applied by the HAL.
*FAST/HIGH_QUALITY both mean camera device determined noise filtering * will be applied. HIGH_QUALITY mode indicates that the camera device * will use the highest-quality noise filtering algorithms, * even if it slows down capture rate. FAST means the camera device should not * slow down capture rate when applying noise filtering.
* @see #NOISE_REDUCTION_MODE_OFF * @see #NOISE_REDUCTION_MODE_FAST * @see #NOISE_REDUCTION_MODE_HIGH_QUALITY */ public static final KeyAn application-specified ID for the current * request. Must be maintained unchanged in output * frame
* @hide */ public static final Key(x, y, width, height).
*A rectangle with the top-level corner of (x,y) and size * (width, height). The region of the sensor that is used for * output. Each stream must use this rectangle to produce its * output, cropping to a smaller region if necessary to * maintain the stream's aspect ratio.
*HAL2.x uses only (x, y, width)
*Any additional per-stream cropping must be done to * maximize the final pixel area of the stream.
*For example, if the crop region is set to a 4:3 aspect * ratio, then 4:3 streams should use the exact crop * region. 16:9 streams should further crop vertically * (letterbox).
*Conversely, if the crop region is set to a 16:9, then 4:3 * outputs should crop horizontally (pillarbox), and 16:9 * streams should match exactly. These additional crops must * be centered within the crop region.
*The output streams must maintain square pixels at all * times, no matter what the relative aspect ratios of the * crop region and the stream are. Negative values for * corner are allowed for raw output if full pixel array is * larger than active pixel array. Width and height may be * rounded to nearest larger supportable width, especially * for raw output, where only a few fixed scales may be * possible. The width and height of the crop region cannot * be set to be smaller than floor( activeArraySize.width / * android.scaler.maxDigitalZoom ) and floor( * activeArraySize.height / android.scaler.maxDigitalZoom), * respectively.
*/ public static final KeyDuration each pixel is exposed to * light.
*If the sensor can't expose this exact duration, it should shorten the * duration exposed to the nearest possible value (rather than expose longer).
*1/10000 - 30 sec range. No bulb mode
*/ public static final KeyDuration from start of frame exposure to * start of next frame exposure.
*The maximum frame rate that can be supported by a camera subsystem is * a function of many factors:
*Since these factors can vary greatly between different ISPs and * sensors, the camera abstraction tries to represent the bandwidth * restrictions with as simple a model as possible.
*The model presented has the following characteristics:
*The necessary information for the application, given the model above, * is provided via the android.scaler.available*MinDurations fields. * These are used to determine the maximum frame rate / minimum frame * duration that is possible for a given stream configuration.
*Specifically, the application can use the following rules to * determine the minimum frame duration it can request from the HAL * device:
*S
, divide them into three sets: streams in a JPEG format
* SJ
, streams in a raw sensor format SR
, and the rest ('processed')
* SP
.RJ
,
* RR
, and RP
.RJ
is greater than RP
, set RP
equal to RJ
. If there is
* no exact match for RP == RJ
(in particular there isn't an available
* processed resolution at the same size as RJ
), then set RP
equal
* to the smallest processed resolution that is larger than RJ
. If
* there are no processed resolutions larger than RJ
, then set RJ
to
* the processed resolution closest to RJ
.RP
is greater than RR
, set RR
equal to RP
. If there is
* no exact match for RR == RP
(in particular there isn't an available
* raw resolution at the same size as RP
), then set RR
equal to
* or to the smallest raw resolution that is larger than RP
. If
* there are no raw resolutions larger than RP
, then set RR
to
* the raw resolution closest to RP
.FJ
, FR
, and FP
.max(FR, FP)
.max(FR, FP, FJ)
.FJ
ns after a previous JPEG-using request. The minimum
* supported frame duration will vary between the values calculated in
* #6 and #7.Gain applied to image data. Must be * implemented through analog gain only if set to values * below 'maximum analog sensitivity'.
*If the sensor can't apply this exact gain, it should lessen the * gain to the nearest possible value (rather than gain more).
*ISO 12232:2006 REI method
*/ public static final KeyA pixel [R, G_even, G_odd, B]
that supplies the test pattern
* when {@link CaptureRequest#SENSOR_TEST_PATTERN_MODE android.sensor.testPatternMode} is SOLID_COLOR.
Each color channel is treated as an unsigned 32-bit integer. * The camera device then uses the most significant X bits * that correspond to how many bits are in its Bayer raw sensor * output.
*For example, a sensor with RAW10 Bayer output would use the * 10 most significant bits from each color channel.
*Optional - This value may be {@code null} on some devices.
* * @see CaptureRequest#SENSOR_TEST_PATTERN_MODE */ public static final KeyWhen enabled, the sensor sends a test pattern instead of * doing a real exposure from the camera.
*When a test pattern is enabled, all manual sensor controls specified * by android.sensor.* should be ignored. All other controls should * work as normal.
*For example, if manual flash is enabled, flash firing should still * occur (and that the test pattern remain unmodified, since the flash * would not actually affect it).
*Optional - This value may be {@code null} on some devices.
* @see #SENSOR_TEST_PATTERN_MODE_OFF * @see #SENSOR_TEST_PATTERN_MODE_SOLID_COLOR * @see #SENSOR_TEST_PATTERN_MODE_COLOR_BARS * @see #SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY * @see #SENSOR_TEST_PATTERN_MODE_PN9 * @see #SENSOR_TEST_PATTERN_MODE_CUSTOM1 */ public static final KeyQuality of lens shading correction applied * to the image data.
*When set to OFF mode, no lens shading correction will be applied by the
* camera device, and an identity lens shading map data will be provided
* if {@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE android.statistics.lensShadingMapMode} == ON
. For example, for lens
* shading map with size specified as {@link CameraCharacteristics#LENS_INFO_SHADING_MAP_SIZE android.lens.info.shadingMapSize} = [ 4, 3 ]
,
* the output {@link CaptureResult#STATISTICS_LENS_SHADING_MAP android.statistics.lensShadingMap} for this case will be an identity map
* shown below:
[ 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
* 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
* 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
* 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
* 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
* 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ]
*
* When set to other modes, lens shading correction will be applied by the * camera device. Applications can request lens shading map data by setting * {@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE android.statistics.lensShadingMapMode} to ON, and then the camera device will provide * lens shading map data in {@link CaptureResult#STATISTICS_LENS_SHADING_MAP android.statistics.lensShadingMap}, with size specified * by {@link CameraCharacteristics#LENS_INFO_SHADING_MAP_SIZE android.lens.info.shadingMapSize}.
* * @see CameraCharacteristics#LENS_INFO_SHADING_MAP_SIZE * @see CaptureResult#STATISTICS_LENS_SHADING_MAP * @see CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE * @see #SHADING_MODE_OFF * @see #SHADING_MODE_FAST * @see #SHADING_MODE_HIGH_QUALITY * @hide */ public static final KeyState of the face detector * unit
*Whether face detection is enabled, and whether it * should output just the basic fields or the full set of * fields. Value must be one of the * {@link CameraCharacteristics#STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES android.statistics.info.availableFaceDetectModes}.
* * @see CameraCharacteristics#STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES * @see #STATISTICS_FACE_DETECT_MODE_OFF * @see #STATISTICS_FACE_DETECT_MODE_SIMPLE * @see #STATISTICS_FACE_DETECT_MODE_FULL */ public static final KeyWhether the HAL needs to output the lens * shading map in output result metadata
*When set to ON, * {@link CaptureResult#STATISTICS_LENS_SHADING_MAP android.statistics.lensShadingMap} must be provided in * the output result metadata.
* * @see CaptureResult#STATISTICS_LENS_SHADING_MAP * @see #STATISTICS_LENS_SHADING_MAP_MODE_OFF * @see #STATISTICS_LENS_SHADING_MAP_MODE_ON */ public static final KeyTonemapping / contrast / gamma curve for the blue * channel, to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is * CONTRAST_CURVE.
*See {@link CaptureRequest#TONEMAP_CURVE_RED android.tonemap.curveRed} for more details.
* * @see CaptureRequest#TONEMAP_CURVE_RED * @see CaptureRequest#TONEMAP_MODE */ public static final KeyTonemapping / contrast / gamma curve for the green * channel, to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is * CONTRAST_CURVE.
*See {@link CaptureRequest#TONEMAP_CURVE_RED android.tonemap.curveRed} for more details.
* * @see CaptureRequest#TONEMAP_CURVE_RED * @see CaptureRequest#TONEMAP_MODE */ public static final KeyTonemapping / contrast / gamma curve for the red * channel, to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is * CONTRAST_CURVE.
*Each channel's curve is defined by an array of control points:
*{@link CaptureRequest#TONEMAP_CURVE_RED android.tonemap.curveRed} =
* [ P0in, P0out, P1in, P1out, P2in, P2out, P3in, P3out, ..., PNin, PNout ]
* 2 <= N <= {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}
* These are sorted in order of increasing Pin
; it is always
* guaranteed that input values 0.0 and 1.0 are included in the list to
* define a complete mapping. For input values between control points,
* the camera device must linearly interpolate between the control
* points.
Each curve can have an independent number of points, and the number * of points can be less than max (that is, the request doesn't have to * always provide a curve with number of points equivalent to * {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}).
*A few examples, and their corresponding graphical mappings; these * only specify the red channel and the precision is limited to 4 * digits, for conciseness.
*Linear mapping:
*{@link CaptureRequest#TONEMAP_CURVE_RED android.tonemap.curveRed} = [ 0, 0, 1.0, 1.0 ]
*
*
* Invert mapping:
*{@link CaptureRequest#TONEMAP_CURVE_RED android.tonemap.curveRed} = [ 0, 1.0, 1.0, 0 ]
*
*
* Gamma 1/2.2 mapping, with 16 control points:
*{@link CaptureRequest#TONEMAP_CURVE_RED android.tonemap.curveRed} = [
* 0.0000, 0.0000, 0.0667, 0.2920, 0.1333, 0.4002, 0.2000, 0.4812,
* 0.2667, 0.5484, 0.3333, 0.6069, 0.4000, 0.6594, 0.4667, 0.7072,
* 0.5333, 0.7515, 0.6000, 0.7928, 0.6667, 0.8317, 0.7333, 0.8685,
* 0.8000, 0.9035, 0.8667, 0.9370, 0.9333, 0.9691, 1.0000, 1.0000 ]
*
*
* Standard sRGB gamma mapping, per IEC 61966-2-1:1999, with 16 control points:
*{@link CaptureRequest#TONEMAP_CURVE_RED android.tonemap.curveRed} = [
* 0.0000, 0.0000, 0.0667, 0.2864, 0.1333, 0.4007, 0.2000, 0.4845,
* 0.2667, 0.5532, 0.3333, 0.6125, 0.4000, 0.6652, 0.4667, 0.7130,
* 0.5333, 0.7569, 0.6000, 0.7977, 0.6667, 0.8360, 0.7333, 0.8721,
* 0.8000, 0.9063, 0.8667, 0.9389, 0.9333, 0.9701, 1.0000, 1.0000 ]
*
*
*
* @see CaptureRequest#TONEMAP_CURVE_RED
* @see CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS
* @see CaptureRequest#TONEMAP_MODE
*/
public static final KeyHigh-level global contrast/gamma/tonemapping control.
*When switching to an application-defined contrast curve by setting
* {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} to CONTRAST_CURVE, the curve is defined
* per-channel with a set of (in, out)
points that specify the
* mapping from input high-bit-depth pixel value to the output
* low-bit-depth value. Since the actual pixel ranges of both input
* and output may change depending on the camera pipeline, the values
* are specified by normalized floating-point numbers.
More-complex color mapping operations such as 3D color look-up * tables, selective chroma enhancement, or other non-linear color * transforms will be disabled when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is * CONTRAST_CURVE.
*When using either FAST or HIGH_QUALITY, the camera device will * emit its own tonemap curve in {@link CaptureRequest#TONEMAP_CURVE_RED android.tonemap.curveRed}, * {@link CaptureRequest#TONEMAP_CURVE_GREEN android.tonemap.curveGreen}, and {@link CaptureRequest#TONEMAP_CURVE_BLUE android.tonemap.curveBlue}. * These values are always available, and as close as possible to the * actually used nonlinear/nonglobal transforms.
*If a request is sent with TRANSFORM_MATRIX with the camera device's * provided curve in FAST or HIGH_QUALITY, the image's tonemap will be * roughly the same.
* * @see CaptureRequest#TONEMAP_CURVE_BLUE * @see CaptureRequest#TONEMAP_CURVE_GREEN * @see CaptureRequest#TONEMAP_CURVE_RED * @see CaptureRequest#TONEMAP_MODE * @see #TONEMAP_MODE_CONTRAST_CURVE * @see #TONEMAP_MODE_FAST * @see #TONEMAP_MODE_HIGH_QUALITY */ public static final KeyThis LED is nominally used to indicate to the user * that the camera is powered on and may be streaming images back to the * Application Processor. In certain rare circumstances, the OS may * disable this when video is processed locally and not transmitted to * any untrusted applications.
*In particular, the LED must always be on when the data could be * transmitted off the device. The LED should always be on whenever * data is stored locally on the device.
*The LED may be off if a trusted application is using the data that * doesn't violate the above rules.
* @hide */ public static final KeyWhether black-level compensation is locked * to its current values, or is free to vary.
*When set to ON, the values used for black-level * compensation will not change until the lock is set to * OFF.
*Since changes to certain capture parameters (such as * exposure time) may require resetting of black level * compensation, the camera device must report whether setting * the black level lock was successful in the output result * metadata.
*For example, if a sequence of requests is as follows:
*And the exposure change in Request 4 requires the camera * device to reset the black level offsets, then the output * result metadata is expected to be:
*This indicates to the application that on frame 4, black * levels were reset due to exposure value changes, and pixel * values may not be consistent across captures.
*The camera device will maintain the lock to the extent * possible, only overriding the lock to OFF when changes to * other request parameters require a black level recalculation * or reset.
*/ public static final Key