CameraCharacteristics.java revision a23ffb5f50d5bf72bde9b8fdbcbd0cea037135b3
1/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17package android.hardware.camera2;
18
19import android.hardware.camera2.impl.CameraMetadataNative;
20
21import java.util.Collections;
22import java.util.List;
23
24/**
25 * <p>The properties describing a
26 * {@link CameraDevice CameraDevice}.</p>
27 *
28 * <p>These properties are fixed for a given CameraDevice, and can be queried
29 * through the {@link CameraManager CameraManager}
30 * interface in addition to through the CameraDevice interface.</p>
31 *
32 * @see CameraDevice
33 * @see CameraManager
34 */
35public final class CameraCharacteristics extends CameraMetadata {
36
37    private final CameraMetadataNative mProperties;
38    private List<Key<?>> mAvailableRequestKeys;
39    private List<Key<?>> mAvailableResultKeys;
40
41    /**
42     * Takes ownership of the passed-in properties object
43     * @hide
44     */
45    public CameraCharacteristics(CameraMetadataNative properties) {
46        mProperties = properties;
47    }
48
49    @Override
50    public <T> T get(Key<T> key) {
51        return mProperties.get(key);
52    }
53
54    /**
55     * Returns the list of keys supported by this {@link CameraDevice} for querying
56     * with a {@link CaptureRequest}.
57     *
58     * <p>The list returned is not modifiable, so any attempts to modify it will throw
59     * a {@code UnsupportedOperationException}.</p>
60     *
61     * <p>Each key is only listed once in the list. The order of the keys is undefined.</p>
62     *
63     * <p>Note that there is no {@code getAvailableCameraCharacteristicsKeys()} -- use
64     * {@link #getKeys()} instead.</p>
65     *
66     * @return List of keys supported by this CameraDevice for CaptureRequests.
67     */
68    public List<Key<?>> getAvailableCaptureRequestKeys() {
69        if (mAvailableRequestKeys == null) {
70            mAvailableRequestKeys = getAvailableKeyList(CaptureRequest.class);
71        }
72        return mAvailableRequestKeys;
73    }
74
75    /**
76     * Returns the list of keys supported by this {@link CameraDevice} for querying
77     * with a {@link CaptureResult}.
78     *
79     * <p>The list returned is not modifiable, so any attempts to modify it will throw
80     * a {@code UnsupportedOperationException}.</p>
81     *
82     * <p>Each key is only listed once in the list. The order of the keys is undefined.</p>
83     *
84     * <p>Note that there is no {@code getAvailableCameraCharacteristicsKeys()} -- use
85     * {@link #getKeys()} instead.</p>
86     *
87     * @return List of keys supported by this CameraDevice for CaptureResults.
88     */
89    public List<Key<?>> getAvailableCaptureResultKeys() {
90        if (mAvailableResultKeys == null) {
91            mAvailableResultKeys = getAvailableKeyList(CaptureResult.class);
92        }
93        return mAvailableResultKeys;
94    }
95
96    /**
97     * Returns the list of keys supported by this {@link CameraDevice} by metadataClass.
98     *
99     * <p>The list returned is not modifiable, so any attempts to modify it will throw
100     * a {@code UnsupportedOperationException}.</p>
101     *
102     * <p>Each key is only listed once in the list. The order of the keys is undefined.</p>
103     *
104     * @param metadataClass The subclass of CameraMetadata that you want to get the keys for.
105     *
106     * @return List of keys supported by this CameraDevice for metadataClass.
107     *
108     * @throws IllegalArgumentException if metadataClass is not a subclass of CameraMetadata
109     */
110    private <T extends CameraMetadata> List<Key<?>> getAvailableKeyList(Class<T> metadataClass) {
111
112        if (metadataClass.equals(CameraMetadata.class)) {
113            throw new AssertionError(
114                    "metadataClass must be a strict subclass of CameraMetadata");
115        } else if (!CameraMetadata.class.isAssignableFrom(metadataClass)) {
116            throw new AssertionError(
117                    "metadataClass must be a subclass of CameraMetadata");
118        }
119
120        return Collections.unmodifiableList(getKeysStatic(metadataClass, /*instance*/null));
121    }
122
123    /*@O~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~
124     * The key entries below this point are generated from metadata
125     * definitions in /system/media/camera/docs. Do not modify by hand or
126     * modify the comment blocks at the start or end.
127     *~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~*/
128
129
130    /**
131     * <p>The set of auto-exposure antibanding modes that are
132     * supported by this camera device.</p>
133     * <p>Not all of the auto-exposure anti-banding modes may be
134     * supported by a given camera device. This field lists the
135     * valid anti-banding modes that the application may request
136     * for this camera device; they must include AUTO.</p>
137     */
138    public static final Key<byte[]> CONTROL_AE_AVAILABLE_ANTIBANDING_MODES =
139            new Key<byte[]>("android.control.aeAvailableAntibandingModes", byte[].class);
140
141    /**
142     * <p>The set of auto-exposure modes that are supported by this
143     * camera device.</p>
144     * <p>Not all the auto-exposure modes may be supported by a
145     * given camera device, especially if no flash unit is
146     * available. This entry lists the valid modes for
147     * {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} for this camera device.</p>
148     * <p>All camera devices support ON, and all camera devices with
149     * flash units support ON_AUTO_FLASH and
150     * ON_ALWAYS_FLASH.</p>
151     * <p>Full-capability camera devices always support OFF mode,
152     * which enables application control of camera exposure time,
153     * sensitivity, and frame duration.</p>
154     *
155     * @see CaptureRequest#CONTROL_AE_MODE
156     */
157    public static final Key<byte[]> CONTROL_AE_AVAILABLE_MODES =
158            new Key<byte[]>("android.control.aeAvailableModes", byte[].class);
159
160    /**
161     * <p>List of frame rate ranges supported by the
162     * AE algorithm/hardware</p>
163     */
164    public static final Key<int[]> CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES =
165            new Key<int[]>("android.control.aeAvailableTargetFpsRanges", int[].class);
166
167    /**
168     * <p>Maximum and minimum exposure compensation
169     * setting, in counts of
170     * android.control.aeCompensationStepSize</p>
171     */
172    public static final Key<int[]> CONTROL_AE_COMPENSATION_RANGE =
173            new Key<int[]>("android.control.aeCompensationRange", int[].class);
174
175    /**
176     * <p>Smallest step by which exposure compensation
177     * can be changed</p>
178     */
179    public static final Key<Rational> CONTROL_AE_COMPENSATION_STEP =
180            new Key<Rational>("android.control.aeCompensationStep", Rational.class);
181
182    /**
183     * <p>List of AF modes that can be
184     * selected with {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode}.</p>
185     * <p>Not all the auto-focus modes may be supported by a
186     * given camera device. This entry lists the valid modes for
187     * {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode} for this camera device.</p>
188     * <p>All camera devices will support OFF mode, and all camera devices with
189     * adjustable focuser units (<code>{@link CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE android.lens.info.minimumFocusDistance} &gt; 0</code>)
190     * will support AUTO mode.</p>
191     *
192     * @see CaptureRequest#CONTROL_AF_MODE
193     * @see CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE
194     */
195    public static final Key<byte[]> CONTROL_AF_AVAILABLE_MODES =
196            new Key<byte[]>("android.control.afAvailableModes", byte[].class);
197
198    /**
199     * <p>List containing the subset of color effects
200     * specified in {@link CaptureRequest#CONTROL_EFFECT_MODE android.control.effectMode} that is supported by
201     * this device.</p>
202     * <p>This list contains the color effect modes that can be applied to
203     * images produced by the camera device. Only modes that have
204     * been fully implemented for the current device may be included here.
205     * Implementations are not expected to be consistent across all devices.
206     * If no color effect modes are available for a device, this should
207     * simply be set to OFF.</p>
208     * <p>A color effect will only be applied if
209     * {@link CaptureRequest#CONTROL_MODE android.control.mode} != OFF.</p>
210     *
211     * @see CaptureRequest#CONTROL_EFFECT_MODE
212     * @see CaptureRequest#CONTROL_MODE
213     */
214    public static final Key<byte[]> CONTROL_AVAILABLE_EFFECTS =
215            new Key<byte[]>("android.control.availableEffects", byte[].class);
216
217    /**
218     * <p>List containing a subset of scene modes
219     * specified in {@link CaptureRequest#CONTROL_SCENE_MODE android.control.sceneMode}.</p>
220     * <p>This list contains scene modes that can be set for the camera device.
221     * Only scene modes that have been fully implemented for the
222     * camera device may be included here. Implementations are not expected
223     * to be consistent across all devices. If no scene modes are supported
224     * by the camera device, this will be set to <code>[DISABLED]</code>.</p>
225     *
226     * @see CaptureRequest#CONTROL_SCENE_MODE
227     */
228    public static final Key<byte[]> CONTROL_AVAILABLE_SCENE_MODES =
229            new Key<byte[]>("android.control.availableSceneModes", byte[].class);
230
231    /**
232     * <p>List of video stabilization modes that can
233     * be supported</p>
234     */
235    public static final Key<byte[]> CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES =
236            new Key<byte[]>("android.control.availableVideoStabilizationModes", byte[].class);
237
238    /**
239     * <p>The set of auto-white-balance modes ({@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode})
240     * that are supported by this camera device.</p>
241     * <p>Not all the auto-white-balance modes may be supported by a
242     * given camera device. This entry lists the valid modes for
243     * {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode} for this camera device.</p>
244     * <p>All camera devices will support ON mode.</p>
245     * <p>Full-capability camera devices will always support OFF mode,
246     * which enables application control of white balance, by using
247     * {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform} and {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains}({@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} must be set to TRANSFORM_MATRIX).</p>
248     *
249     * @see CaptureRequest#COLOR_CORRECTION_GAINS
250     * @see CaptureRequest#COLOR_CORRECTION_MODE
251     * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM
252     * @see CaptureRequest#CONTROL_AWB_MODE
253     */
254    public static final Key<byte[]> CONTROL_AWB_AVAILABLE_MODES =
255            new Key<byte[]>("android.control.awbAvailableModes", byte[].class);
256
257    /**
258     * <p>List of the maximum number of regions that can be used for metering in
259     * auto-exposure (AE), auto-white balance (AWB), and auto-focus (AF);
260     * this corresponds to the the maximum number of elements in
261     * {@link CaptureRequest#CONTROL_AE_REGIONS android.control.aeRegions}, {@link CaptureRequest#CONTROL_AWB_REGIONS android.control.awbRegions},
262     * and {@link CaptureRequest#CONTROL_AF_REGIONS android.control.afRegions}.</p>
263     *
264     * @see CaptureRequest#CONTROL_AE_REGIONS
265     * @see CaptureRequest#CONTROL_AF_REGIONS
266     * @see CaptureRequest#CONTROL_AWB_REGIONS
267     */
268    public static final Key<int[]> CONTROL_MAX_REGIONS =
269            new Key<int[]>("android.control.maxRegions", int[].class);
270
271    /**
272     * <p>Whether this camera device has a
273     * flash.</p>
274     * <p>If no flash, none of the flash controls do
275     * anything. All other metadata should return 0.</p>
276     */
277    public static final Key<Boolean> FLASH_INFO_AVAILABLE =
278            new Key<Boolean>("android.flash.info.available", boolean.class);
279
280    /**
281     * <p>Supported resolutions for the JPEG thumbnail</p>
282     * <p>Below condiditions will be satisfied for this size list:</p>
283     * <ul>
284     * <li>The sizes will be sorted by increasing pixel area (width x height).
285     * If several resolutions have the same area, they will be sorted by increasing width.</li>
286     * <li>The aspect ratio of the largest thumbnail size will be same as the
287     * aspect ratio of largest JPEG output size in {@link CameraCharacteristics#SCALER_AVAILABLE_STREAM_CONFIGURATIONS android.scaler.availableStreamConfigurations}.
288     * The largest size is defined as the size that has the largest pixel area
289     * in a given size list.</li>
290     * <li>Each output JPEG size in {@link CameraCharacteristics#SCALER_AVAILABLE_STREAM_CONFIGURATIONS android.scaler.availableStreamConfigurations} will have at least
291     * one corresponding size that has the same aspect ratio in availableThumbnailSizes,
292     * and vice versa.</li>
293     * <li>All non (0, 0) sizes will have non-zero widths and heights.</li>
294     * </ul>
295     *
296     * @see CameraCharacteristics#SCALER_AVAILABLE_STREAM_CONFIGURATIONS
297     */
298    public static final Key<android.hardware.camera2.Size[]> JPEG_AVAILABLE_THUMBNAIL_SIZES =
299            new Key<android.hardware.camera2.Size[]>("android.jpeg.availableThumbnailSizes", android.hardware.camera2.Size[].class);
300
301    /**
302     * <p>List of supported aperture
303     * values.</p>
304     * <p>If the camera device doesn't support variable apertures,
305     * listed value will be the fixed aperture.</p>
306     * <p>If the camera device supports variable apertures, the aperture value
307     * in this list will be sorted in ascending order.</p>
308     */
309    public static final Key<float[]> LENS_INFO_AVAILABLE_APERTURES =
310            new Key<float[]>("android.lens.info.availableApertures", float[].class);
311
312    /**
313     * <p>List of supported neutral density filter values for
314     * {@link CaptureRequest#LENS_FILTER_DENSITY android.lens.filterDensity}.</p>
315     * <p>If changing {@link CaptureRequest#LENS_FILTER_DENSITY android.lens.filterDensity} is not supported,
316     * availableFilterDensities must contain only 0. Otherwise, this
317     * list contains only the exact filter density values available on
318     * this camera device.</p>
319     *
320     * @see CaptureRequest#LENS_FILTER_DENSITY
321     */
322    public static final Key<float[]> LENS_INFO_AVAILABLE_FILTER_DENSITIES =
323            new Key<float[]>("android.lens.info.availableFilterDensities", float[].class);
324
325    /**
326     * <p>The available focal lengths for this device for use with
327     * {@link CaptureRequest#LENS_FOCAL_LENGTH android.lens.focalLength}.</p>
328     * <p>If optical zoom is not supported, this will only report
329     * a single value corresponding to the static focal length of the
330     * device. Otherwise, this will report every focal length supported
331     * by the device.</p>
332     *
333     * @see CaptureRequest#LENS_FOCAL_LENGTH
334     */
335    public static final Key<float[]> LENS_INFO_AVAILABLE_FOCAL_LENGTHS =
336            new Key<float[]>("android.lens.info.availableFocalLengths", float[].class);
337
338    /**
339     * <p>List containing a subset of the optical image
340     * stabilization (OIS) modes specified in
341     * {@link CaptureRequest#LENS_OPTICAL_STABILIZATION_MODE android.lens.opticalStabilizationMode}.</p>
342     * <p>If OIS is not implemented for a given camera device, this should
343     * contain only OFF.</p>
344     *
345     * @see CaptureRequest#LENS_OPTICAL_STABILIZATION_MODE
346     */
347    public static final Key<byte[]> LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION =
348            new Key<byte[]>("android.lens.info.availableOpticalStabilization", byte[].class);
349
350    /**
351     * <p>Optional. Hyperfocal distance for this lens.</p>
352     * <p>If the lens is fixed focus, the camera device will report 0.</p>
353     * <p>If the lens is not fixed focus, the camera device will report this
354     * field when {@link CameraCharacteristics#LENS_INFO_FOCUS_DISTANCE_CALIBRATION android.lens.info.focusDistanceCalibration} is APPROXIMATE or CALIBRATED.</p>
355     * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
356     *
357     * @see CameraCharacteristics#LENS_INFO_FOCUS_DISTANCE_CALIBRATION
358     */
359    public static final Key<Float> LENS_INFO_HYPERFOCAL_DISTANCE =
360            new Key<Float>("android.lens.info.hyperfocalDistance", float.class);
361
362    /**
363     * <p>Shortest distance from frontmost surface
364     * of the lens that can be focused correctly.</p>
365     * <p>If the lens is fixed-focus, this should be
366     * 0.</p>
367     */
368    public static final Key<Float> LENS_INFO_MINIMUM_FOCUS_DISTANCE =
369            new Key<Float>("android.lens.info.minimumFocusDistance", float.class);
370
371    /**
372     * <p>Dimensions of lens shading map.</p>
373     * <p>The map should be on the order of 30-40 rows and columns, and
374     * must be smaller than 64x64.</p>
375     */
376    public static final Key<android.hardware.camera2.Size> LENS_INFO_SHADING_MAP_SIZE =
377            new Key<android.hardware.camera2.Size>("android.lens.info.shadingMapSize", android.hardware.camera2.Size.class);
378
379    /**
380     * <p>The lens focus distance calibration quality.</p>
381     * <p>The lens focus distance calibration quality determines the reliability of
382     * focus related metadata entries, i.e. {@link CaptureRequest#LENS_FOCUS_DISTANCE android.lens.focusDistance},
383     * {@link CaptureResult#LENS_FOCUS_RANGE android.lens.focusRange}, {@link CameraCharacteristics#LENS_INFO_HYPERFOCAL_DISTANCE android.lens.info.hyperfocalDistance}, and
384     * {@link CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE android.lens.info.minimumFocusDistance}.</p>
385     *
386     * @see CaptureRequest#LENS_FOCUS_DISTANCE
387     * @see CaptureResult#LENS_FOCUS_RANGE
388     * @see CameraCharacteristics#LENS_INFO_HYPERFOCAL_DISTANCE
389     * @see CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE
390     * @see #LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED
391     * @see #LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE
392     * @see #LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED
393     */
394    public static final Key<Integer> LENS_INFO_FOCUS_DISTANCE_CALIBRATION =
395            new Key<Integer>("android.lens.info.focusDistanceCalibration", int.class);
396
397    /**
398     * <p>Direction the camera faces relative to
399     * device screen</p>
400     * @see #LENS_FACING_FRONT
401     * @see #LENS_FACING_BACK
402     */
403    public static final Key<Integer> LENS_FACING =
404            new Key<Integer>("android.lens.facing", int.class);
405
406    /**
407     * <p>If set to 1, the HAL will always split result
408     * metadata for a single capture into multiple buffers,
409     * returned using multiple process_capture_result calls.</p>
410     * <p>Does not need to be listed in static
411     * metadata. Support for partial results will be reworked in
412     * future versions of camera service. This quirk will stop
413     * working at that point; DO NOT USE without careful
414     * consideration of future support.</p>
415     * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
416     * @hide
417     */
418    public static final Key<Byte> QUIRKS_USE_PARTIAL_RESULT =
419            new Key<Byte>("android.quirks.usePartialResult", byte.class);
420
421    /**
422     * <p>The maximum numbers of different types of output streams
423     * that can be configured and used simultaneously by a camera device.</p>
424     * <p>This is a 3 element tuple that contains the max number of output simultaneous
425     * streams for raw sensor, processed (and uncompressed), and JPEG formats respectively.
426     * For example, if max raw sensor format output stream number is 1, max YUV streams
427     * number is 3, and max JPEG stream number is 2, then this tuple should be <code>(1, 3, 2)</code>.</p>
428     * <p>This lists the upper bound of the number of output streams supported by
429     * the camera device. Using more streams simultaneously may require more hardware and
430     * CPU resources that will consume more power. The image format for a output stream can
431     * be any supported format provided by {@link CameraCharacteristics#SCALER_AVAILABLE_FORMATS android.scaler.availableFormats}. The formats
432     * defined in {@link CameraCharacteristics#SCALER_AVAILABLE_FORMATS android.scaler.availableFormats} can be catergorized into the 3 stream types
433     * as below:</p>
434     * <ul>
435     * <li>JPEG-compressed format: BLOB.</li>
436     * <li>Raw formats: RAW_SENSOR and RAW_OPAQUE.</li>
437     * <li>processed, uncompressed formats: YCbCr_420_888, YCrCb_420_SP, YV12.</li>
438     * </ul>
439     *
440     * @see CameraCharacteristics#SCALER_AVAILABLE_FORMATS
441     */
442    public static final Key<int[]> REQUEST_MAX_NUM_OUTPUT_STREAMS =
443            new Key<int[]>("android.request.maxNumOutputStreams", int[].class);
444
445    /**
446     * <p>The maximum numbers of any type of input streams
447     * that can be configured and used simultaneously by a camera device.</p>
448     * <p>When set to 0, it means no input stream is supported.</p>
449     * <p>The image format for a input stream can be any supported format provided
450     * by android.scaler.availableInputFormats. When using an input stream, there must be
451     * at least one output stream configured to to receive the reprocessed images.</p>
452     * <p>For example, for Zero Shutter Lag (ZSL) still capture use case, the input
453     * stream image format will be RAW_OPAQUE, the associated output stream image format
454     * should be JPEG.</p>
455     */
456    public static final Key<Integer> REQUEST_MAX_NUM_INPUT_STREAMS =
457            new Key<Integer>("android.request.maxNumInputStreams", int.class);
458
459    /**
460     * <p>Specifies the number of maximum pipeline stages a frame
461     * has to go through from when it's exposed to when it's available
462     * to the framework.</p>
463     * <p>A typical minimum value for this is 2 (one stage to expose,
464     * one stage to readout) from the sensor. The ISP then usually adds
465     * its own stages to do custom HW processing. Further stages may be
466     * added by SW processing.</p>
467     * <p>Depending on what settings are used (e.g. YUV, JPEG) and what
468     * processing is enabled (e.g. face detection), the actual pipeline
469     * depth (specified by {@link CaptureResult#REQUEST_PIPELINE_DEPTH android.request.pipelineDepth}) may be less than
470     * the max pipeline depth.</p>
471     * <p>A pipeline depth of X stages is equivalent to a pipeline latency of
472     * X frame intervals.</p>
473     * <p>This value will be 8 or less.</p>
474     *
475     * @see CaptureResult#REQUEST_PIPELINE_DEPTH
476     */
477    public static final Key<Byte> REQUEST_PIPELINE_MAX_DEPTH =
478            new Key<Byte>("android.request.pipelineMaxDepth", byte.class);
479
480    /**
481     * <p>Optional. Defaults to 1. Defines how many sub-components
482     * a result will be composed of.</p>
483     * <p>In order to combat the pipeline latency, partial results
484     * may be delivered to the application layer from the camera device as
485     * soon as they are available.</p>
486     * <p>A value of 1 means that partial results are not supported.</p>
487     * <p>A typical use case for this might be: after requesting an AF lock the
488     * new AF state might be available 50% of the way through the pipeline.
489     * The camera device could then immediately dispatch this state via a
490     * partial result to the framework/application layer, and the rest of
491     * the metadata via later partial results.</p>
492     */
493    public static final Key<Integer> REQUEST_PARTIAL_RESULT_COUNT =
494            new Key<Integer>("android.request.partialResultCount", int.class);
495
496    /**
497     * <p>List of capabilities that the camera device
498     * advertises as fully supporting.</p>
499     * <p>A capability is a contract that the camera device makes in order
500     * to be able to satisfy one or more use cases.</p>
501     * <p>Listing a capability guarantees that the whole set of features
502     * required to support a common use will all be available.</p>
503     * <p>Using a subset of the functionality provided by an unsupported
504     * capability may be possible on a specific camera device implementation;
505     * to do this query each of android.request.availableRequestKeys,
506     * android.request.availableResultKeys,
507     * android.request.availableCharacteristicsKeys.</p>
508     * <p>XX: Maybe these should go into android.info.supportedHardwareLevel
509     * as a table instead?</p>
510     * <p>The following capabilities are guaranteed to be available on
511     * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} <code>==</code> FULL devices:</p>
512     * <ul>
513     * <li>MANUAL_SENSOR</li>
514     * <li>ZSL</li>
515     * </ul>
516     * <p>Other capabilities may be available on either FULL or LIMITED
517     * devices, but the app. should query this field to be sure.</p>
518     *
519     * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
520     * @see #REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE
521     * @see #REQUEST_AVAILABLE_CAPABILITIES_OPTIONAL
522     * @see #REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR
523     * @see #REQUEST_AVAILABLE_CAPABILITIES_GCAM
524     * @see #REQUEST_AVAILABLE_CAPABILITIES_ZSL
525     * @see #REQUEST_AVAILABLE_CAPABILITIES_DNG
526     */
527    public static final Key<Integer> REQUEST_AVAILABLE_CAPABILITIES =
528            new Key<Integer>("android.request.availableCapabilities", int.class);
529
530    /**
531     * <p>A list of all keys that the camera device has available
532     * to use with CaptureRequest.</p>
533     * <p>Attempting to set a key into a CaptureRequest that is not
534     * listed here will result in an invalid request and will be rejected
535     * by the camera device.</p>
536     * <p>This field can be used to query the feature set of a camera device
537     * at a more granular level than capabilities. This is especially
538     * important for optional keys that are not listed under any capability
539     * in {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities}.</p>
540     * <p>TODO: This should be used by #getAvailableCaptureRequestKeys.</p>
541     *
542     * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
543     * @hide
544     */
545    public static final Key<int[]> REQUEST_AVAILABLE_REQUEST_KEYS =
546            new Key<int[]>("android.request.availableRequestKeys", int[].class);
547
548    /**
549     * <p>A list of all keys that the camera device has available
550     * to use with CaptureResult.</p>
551     * <p>Attempting to get a key from a CaptureResult that is not
552     * listed here will always return a <code>null</code> value. Getting a key from
553     * a CaptureResult that is listed here must never return a <code>null</code>
554     * value.</p>
555     * <p>The following keys may return <code>null</code> unless they are enabled:</p>
556     * <ul>
557     * <li>{@link CaptureResult#STATISTICS_LENS_SHADING_MAP android.statistics.lensShadingMap} (non-null iff {@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE android.statistics.lensShadingMapMode} == ON)</li>
558     * </ul>
559     * <p>(Those sometimes-null keys should nevertheless be listed here
560     * if they are available.)</p>
561     * <p>This field can be used to query the feature set of a camera device
562     * at a more granular level than capabilities. This is especially
563     * important for optional keys that are not listed under any capability
564     * in {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities}.</p>
565     * <p>TODO: This should be used by #getAvailableCaptureResultKeys.</p>
566     *
567     * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
568     * @see CaptureResult#STATISTICS_LENS_SHADING_MAP
569     * @see CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE
570     * @hide
571     */
572    public static final Key<int[]> REQUEST_AVAILABLE_RESULT_KEYS =
573            new Key<int[]>("android.request.availableResultKeys", int[].class);
574
575    /**
576     * <p>A list of all keys that the camera device has available
577     * to use with CameraCharacteristics.</p>
578     * <p>This entry follows the same rules as
579     * android.request.availableResultKeys (except that it applies for
580     * CameraCharacteristics instead of CaptureResult). See above for more
581     * details.</p>
582     * <p>TODO: This should be used by CameraCharacteristics#getKeys.</p>
583     * @hide
584     */
585    public static final Key<int[]> REQUEST_AVAILABLE_CHARACTERISTICS_KEYS =
586            new Key<int[]>("android.request.availableCharacteristicsKeys", int[].class);
587
588    /**
589     * <p>The list of image formats that are supported by this
590     * camera device for output streams.</p>
591     * <p>All camera devices will support JPEG and YUV_420_888 formats.</p>
592     * <p>When set to YUV_420_888, application can access the YUV420 data directly.</p>
593     */
594    public static final Key<int[]> SCALER_AVAILABLE_FORMATS =
595            new Key<int[]>("android.scaler.availableFormats", int[].class);
596
597    /**
598     * <p>The minimum frame duration that is supported
599     * for each resolution in {@link CameraCharacteristics#SCALER_AVAILABLE_JPEG_SIZES android.scaler.availableJpegSizes}.</p>
600     * <p>This corresponds to the minimum steady-state frame duration when only
601     * that JPEG stream is active and captured in a burst, with all
602     * processing (typically in android.*.mode) set to FAST.</p>
603     * <p>When multiple streams are configured, the minimum
604     * frame duration will be &gt;= max(individual stream min
605     * durations)</p>
606     *
607     * @see CameraCharacteristics#SCALER_AVAILABLE_JPEG_SIZES
608     */
609    public static final Key<long[]> SCALER_AVAILABLE_JPEG_MIN_DURATIONS =
610            new Key<long[]>("android.scaler.availableJpegMinDurations", long[].class);
611
612    /**
613     * <p>The JPEG resolutions that are supported by this camera device.</p>
614     * <p>The resolutions are listed as <code>(width, height)</code> pairs. All camera devices will support
615     * sensor maximum resolution (defined by {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}).</p>
616     *
617     * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
618     */
619    public static final Key<android.hardware.camera2.Size[]> SCALER_AVAILABLE_JPEG_SIZES =
620            new Key<android.hardware.camera2.Size[]>("android.scaler.availableJpegSizes", android.hardware.camera2.Size[].class);
621
622    /**
623     * <p>The maximum ratio between active area width
624     * and crop region width, or between active area height and
625     * crop region height, if the crop region height is larger
626     * than width</p>
627     */
628    public static final Key<Float> SCALER_AVAILABLE_MAX_DIGITAL_ZOOM =
629            new Key<Float>("android.scaler.availableMaxDigitalZoom", float.class);
630
631    /**
632     * <p>For each available processed output size (defined in
633     * {@link CameraCharacteristics#SCALER_AVAILABLE_PROCESSED_SIZES android.scaler.availableProcessedSizes}), this property lists the
634     * minimum supportable frame duration for that size.</p>
635     * <p>This should correspond to the frame duration when only that processed
636     * stream is active, with all processing (typically in android.*.mode)
637     * set to FAST.</p>
638     * <p>When multiple streams are configured, the minimum frame duration will
639     * be &gt;= max(individual stream min durations).</p>
640     *
641     * @see CameraCharacteristics#SCALER_AVAILABLE_PROCESSED_SIZES
642     */
643    public static final Key<long[]> SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS =
644            new Key<long[]>("android.scaler.availableProcessedMinDurations", long[].class);
645
646    /**
647     * <p>The resolutions available for use with
648     * processed output streams, such as YV12, NV12, and
649     * platform opaque YUV/RGB streams to the GPU or video
650     * encoders.</p>
651     * <p>The resolutions are listed as <code>(width, height)</code> pairs.</p>
652     * <p>For a given use case, the actual maximum supported resolution
653     * may be lower than what is listed here, depending on the destination
654     * Surface for the image data. For example, for recording video,
655     * the video encoder chosen may have a maximum size limit (e.g. 1080p)
656     * smaller than what the camera (e.g. maximum resolution is 3264x2448)
657     * can provide.</p>
658     * <p>Please reference the documentation for the image data destination to
659     * check if it limits the maximum size for image data.</p>
660     */
661    public static final Key<android.hardware.camera2.Size[]> SCALER_AVAILABLE_PROCESSED_SIZES =
662            new Key<android.hardware.camera2.Size[]>("android.scaler.availableProcessedSizes", android.hardware.camera2.Size[].class);
663
664    /**
665     * <p>The mapping of image formats that are supported by this
666     * camera device for input streams, to their corresponding output formats.</p>
667     * <p>All camera devices with at least 1
668     * android.request.request.maxNumInputStreams will have at least one
669     * available input format.</p>
670     * <p>The camera device will support the following map of formats,
671     * if its dependent capability is supported:</p>
672     * <table>
673     * <thead>
674     * <tr>
675     * <th align="left">Input Format</th>
676     * <th align="left">Output Format</th>
677     * <th align="left">Capability</th>
678     * </tr>
679     * </thead>
680     * <tbody>
681     * <tr>
682     * <td align="left">RAW_OPAQUE</td>
683     * <td align="left">JPEG</td>
684     * <td align="left">ZSL</td>
685     * </tr>
686     * <tr>
687     * <td align="left">RAW_OPAQUE</td>
688     * <td align="left">YUV_420_888</td>
689     * <td align="left">ZSL</td>
690     * </tr>
691     * <tr>
692     * <td align="left">RAW_OPAQUE</td>
693     * <td align="left">RAW16</td>
694     * <td align="left">DNG</td>
695     * </tr>
696     * <tr>
697     * <td align="left">RAW16</td>
698     * <td align="left">YUV_420_888</td>
699     * <td align="left">DNG</td>
700     * </tr>
701     * <tr>
702     * <td align="left">RAW16</td>
703     * <td align="left">JPEG</td>
704     * <td align="left">DNG</td>
705     * </tr>
706     * </tbody>
707     * </table>
708     * <p>For ZSL-capable camera devices, using the RAW_OPAQUE format
709     * as either input or output will never hurt maximum frame rate (i.e.
710     * {@link CameraCharacteristics#SCALER_AVAILABLE_STALL_DURATIONS android.scaler.availableStallDurations} will not have RAW_OPAQUE).</p>
711     * <p>Attempting to configure an input stream with output streams not
712     * listed as available in this map is not valid.</p>
713     * <p>TODO: Add java type mapping for this property.</p>
714     *
715     * @see CameraCharacteristics#SCALER_AVAILABLE_STALL_DURATIONS
716     */
717    public static final Key<int[]> SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP =
718            new Key<int[]>("android.scaler.availableInputOutputFormatsMap", int[].class);
719
720    /**
721     * <p>The available stream configurations that this
722     * camera device supports
723     * (i.e. format, width, height, output/input stream).</p>
724     * <p>The configurations are listed as <code>(format, width, height, input?)</code>
725     * tuples.</p>
726     * <p>All camera devices will support sensor maximum resolution (defined by
727     * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}) for the JPEG format.</p>
728     * <p>For a given use case, the actual maximum supported resolution
729     * may be lower than what is listed here, depending on the destination
730     * Surface for the image data. For example, for recording video,
731     * the video encoder chosen may have a maximum size limit (e.g. 1080p)
732     * smaller than what the camera (e.g. maximum resolution is 3264x2448)
733     * can provide.</p>
734     * <p>Please reference the documentation for the image data destination to
735     * check if it limits the maximum size for image data.</p>
736     * <p>Not all output formats may be supported in a configuration with
737     * an input stream of a particular format. For more details, see
738     * {@link CameraCharacteristics#SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP android.scaler.availableInputOutputFormatsMap}.</p>
739     * <p>The following table describes the minimum required output stream
740     * configurations based on the hardware level
741     * ({@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel}):</p>
742     * <table>
743     * <thead>
744     * <tr>
745     * <th align="center">Format</th>
746     * <th align="center">Size</th>
747     * <th align="center">Hardware Level</th>
748     * <th align="center">Notes</th>
749     * </tr>
750     * </thead>
751     * <tbody>
752     * <tr>
753     * <td align="center">JPEG</td>
754     * <td align="center">{@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}</td>
755     * <td align="center">Any</td>
756     * <td align="center"></td>
757     * </tr>
758     * <tr>
759     * <td align="center">JPEG</td>
760     * <td align="center">1920x1080 (1080p)</td>
761     * <td align="center">Any</td>
762     * <td align="center">if 1080p &lt;= activeArraySize</td>
763     * </tr>
764     * <tr>
765     * <td align="center">JPEG</td>
766     * <td align="center">1280x720 (720)</td>
767     * <td align="center">Any</td>
768     * <td align="center">if 720p &lt;= activeArraySize</td>
769     * </tr>
770     * <tr>
771     * <td align="center">JPEG</td>
772     * <td align="center">640x480 (480p)</td>
773     * <td align="center">Any</td>
774     * <td align="center">if 480p &lt;= activeArraySize</td>
775     * </tr>
776     * <tr>
777     * <td align="center">JPEG</td>
778     * <td align="center">320x240 (240p)</td>
779     * <td align="center">Any</td>
780     * <td align="center">if 240p &lt;= activeArraySize</td>
781     * </tr>
782     * <tr>
783     * <td align="center">YUV_420_888</td>
784     * <td align="center">all output sizes available for JPEG</td>
785     * <td align="center">FULL</td>
786     * <td align="center"></td>
787     * </tr>
788     * <tr>
789     * <td align="center">YUV_420_888</td>
790     * <td align="center">all output sizes available for JPEG, up to the maximum video size</td>
791     * <td align="center">LIMITED</td>
792     * <td align="center"></td>
793     * </tr>
794     * <tr>
795     * <td align="center">IMPLEMENTATION_DEFINED</td>
796     * <td align="center">same as YUV_420_888</td>
797     * <td align="center">Any</td>
798     * <td align="center"></td>
799     * </tr>
800     * </tbody>
801     * </table>
802     * <p>Refer to {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} for additional
803     * mandatory stream configurations on a per-capability basis.</p>
804     *
805     * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
806     * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
807     * @see CameraCharacteristics#SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP
808     * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
809     * @see #SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT
810     * @see #SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT
811     */
812    public static final Key<int[]> SCALER_AVAILABLE_STREAM_CONFIGURATIONS =
813            new Key<int[]>("android.scaler.availableStreamConfigurations", int[].class);
814
815    /**
816     * <p>This lists the minimum frame duration for each
817     * format/size combination.</p>
818     * <p>This should correspond to the frame duration when only that
819     * stream is active, with all processing (typically in android.*.mode)
820     * set to either OFF or FAST.</p>
821     * <p>When multiple streams are used in a request, the minimum frame
822     * duration will be max(individual stream min durations).</p>
823     * <p>The minimum frame duration of a stream (of a particular format, size)
824     * is the same regardless of whether the stream is input or output.</p>
825     * <p>See {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration} and
826     * {@link CameraCharacteristics#SCALER_AVAILABLE_STALL_DURATIONS android.scaler.availableStallDurations} for more details about
827     * calculating the max frame rate.</p>
828     *
829     * @see CameraCharacteristics#SCALER_AVAILABLE_STALL_DURATIONS
830     * @see CaptureRequest#SENSOR_FRAME_DURATION
831     */
832    public static final Key<long[]> SCALER_AVAILABLE_MIN_FRAME_DURATIONS =
833            new Key<long[]>("android.scaler.availableMinFrameDurations", long[].class);
834
835    /**
836     * <p>This lists the maximum stall duration for each
837     * format/size combination.</p>
838     * <p>A stall duration is how much extra time would get added
839     * to the normal minimum frame duration for a repeating request
840     * that has streams with non-zero stall.</p>
841     * <p>For example, consider JPEG captures which have the following
842     * characteristics:</p>
843     * <ul>
844     * <li>JPEG streams act like processed YUV streams in requests for which
845     * they are not included; in requests in which they are directly
846     * referenced, they act as JPEG streams. This is because supporting a
847     * JPEG stream requires the underlying YUV data to always be ready for
848     * use by a JPEG encoder, but the encoder will only be used (and impact
849     * frame duration) on requests that actually reference a JPEG stream.</li>
850     * <li>The JPEG processor can run concurrently to the rest of the camera
851     * pipeline, but cannot process more than 1 capture at a time.</li>
852     * </ul>
853     * <p>In other words, using a repeating YUV request would result
854     * in a steady frame rate (let's say it's 30 FPS). If a single
855     * JPEG request is submitted periodically, the frame rate will stay
856     * at 30 FPS (as long as we wait for the previous JPEG to return each
857     * time). If we try to submit a repeating YUV + JPEG request, then
858     * the frame rate will drop from 30 FPS.</p>
859     * <p>In general, submitting a new request with a non-0 stall time
860     * stream will <em>not</em> cause a frame rate drop unless there are still
861     * outstanding buffers for that stream from previous requests.</p>
862     * <p>Submitting a repeating request with streams (call this <code>S</code>)
863     * is the same as setting the minimum frame duration from
864     * the normal minimum frame duration corresponding to <code>S</code>, added with
865     * the maximum stall duration for <code>S</code>.</p>
866     * <p>If interleaving requests with and without a stall duration,
867     * a request will stall by the maximum of the remaining times
868     * for each can-stall stream with outstanding buffers.</p>
869     * <p>This means that a stalling request will not have an exposure start
870     * until the stall has completed.</p>
871     * <p>This should correspond to the stall duration when only that stream is
872     * active, with all processing (typically in android.*.mode) set to FAST
873     * or OFF. Setting any of the processing modes to HIGH_QUALITY
874     * effectively results in an indeterminate stall duration for all
875     * streams in a request (the regular stall calculation rules are
876     * ignored).</p>
877     * <p>The following formats may always have a stall duration:</p>
878     * <ul>
879     * <li>JPEG</li>
880     * <li>RAW16</li>
881     * </ul>
882     * <p>The following formats will never have a stall duration:</p>
883     * <ul>
884     * <li>YUV_420_888</li>
885     * <li>IMPLEMENTATION_DEFINED</li>
886     * </ul>
887     * <p>All other formats may or may not have an allowed stall duration on
888     * a per-capability basis; refer to android.request.availableCapabilities
889     * for more details.</p>
890     * <p>See {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration} for more information about
891     * calculating the max frame rate (absent stalls).</p>
892     *
893     * @see CaptureRequest#SENSOR_FRAME_DURATION
894     */
895    public static final Key<long[]> SCALER_AVAILABLE_STALL_DURATIONS =
896            new Key<long[]>("android.scaler.availableStallDurations", long[].class);
897
898    /**
899     * <p>Area of raw data which corresponds to only
900     * active pixels.</p>
901     * <p>It is smaller or equal to
902     * sensor full pixel array, which could include the black calibration pixels.</p>
903     */
904    public static final Key<android.graphics.Rect> SENSOR_INFO_ACTIVE_ARRAY_SIZE =
905            new Key<android.graphics.Rect>("android.sensor.info.activeArraySize", android.graphics.Rect.class);
906
907    /**
908     * <p>Range of valid sensitivities</p>
909     */
910    public static final Key<int[]> SENSOR_INFO_SENSITIVITY_RANGE =
911            new Key<int[]>("android.sensor.info.sensitivityRange", int[].class);
912
913    /**
914     * <p>Range of valid exposure
915     * times used by {@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime}.</p>
916     *
917     * @see CaptureRequest#SENSOR_EXPOSURE_TIME
918     */
919    public static final Key<long[]> SENSOR_INFO_EXPOSURE_TIME_RANGE =
920            new Key<long[]>("android.sensor.info.exposureTimeRange", long[].class);
921
922    /**
923     * <p>Maximum possible frame duration (minimum frame
924     * rate).</p>
925     * <p>The largest possible android.sensor.frameDuration
926     * that will be accepted by the camera device. Attempting to use
927     * frame durations beyond the maximum will result in the frame duration
928     * being clipped to the maximum. See that control
929     * for a full definition of frame durations.</p>
930     * <p>Refer to
931     * {@link CameraCharacteristics#SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS android.scaler.availableProcessedMinDurations},
932     * {@link CameraCharacteristics#SCALER_AVAILABLE_JPEG_MIN_DURATIONS android.scaler.availableJpegMinDurations}, and
933     * android.scaler.availableRawMinDurations for the minimum
934     * frame duration values.</p>
935     *
936     * @see CameraCharacteristics#SCALER_AVAILABLE_JPEG_MIN_DURATIONS
937     * @see CameraCharacteristics#SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS
938     */
939    public static final Key<Long> SENSOR_INFO_MAX_FRAME_DURATION =
940            new Key<Long>("android.sensor.info.maxFrameDuration", long.class);
941
942    /**
943     * <p>The physical dimensions of the full pixel
944     * array</p>
945     * <p>Needed for FOV calculation for old API</p>
946     */
947    public static final Key<float[]> SENSOR_INFO_PHYSICAL_SIZE =
948            new Key<float[]>("android.sensor.info.physicalSize", float[].class);
949
950    /**
951     * <p>Gain factor from electrons to raw units when
952     * ISO=100</p>
953     * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
954     * <p><b>Full capability</b> -
955     * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
956     * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
957     *
958     * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
959     */
960    public static final Key<Rational> SENSOR_BASE_GAIN_FACTOR =
961            new Key<Rational>("android.sensor.baseGainFactor", Rational.class);
962
963    /**
964     * <p>A fixed black level offset for each of the color filter arrangement
965     * (CFA) mosaic channels.</p>
966     * <p>This tag specifies the zero light value for each of the CFA mosaic
967     * channels in the camera sensor.</p>
968     * <p>The values are given in row-column scan order, with the first value
969     * corresponding to the element of the CFA in row=0, column=0.</p>
970     * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
971     */
972    public static final Key<int[]> SENSOR_BLACK_LEVEL_PATTERN =
973            new Key<int[]>("android.sensor.blackLevelPattern", int[].class);
974
975    /**
976     * <p>Maximum sensitivity that is implemented
977     * purely through analog gain.</p>
978     * <p>For {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity} values less than or
979     * equal to this, all applied gain must be analog. For
980     * values above this, the gain applied can be a mix of analog and
981     * digital.</p>
982     * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
983     * <p><b>Full capability</b> -
984     * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
985     * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
986     *
987     * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
988     * @see CaptureRequest#SENSOR_SENSITIVITY
989     */
990    public static final Key<Integer> SENSOR_MAX_ANALOG_SENSITIVITY =
991            new Key<Integer>("android.sensor.maxAnalogSensitivity", int.class);
992
993    /**
994     * <p>Clockwise angle through which the output
995     * image needs to be rotated to be upright on the device
996     * screen in its native orientation. Also defines the
997     * direction of rolling shutter readout, which is from top
998     * to bottom in the sensor's coordinate system</p>
999     */
1000    public static final Key<Integer> SENSOR_ORIENTATION =
1001            new Key<Integer>("android.sensor.orientation", int.class);
1002
1003    /**
1004     * <p>Optional. Defaults to [OFF]. Lists the supported test
1005     * pattern modes for android.test.patternMode.</p>
1006     * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
1007     */
1008    public static final Key<Byte> SENSOR_AVAILABLE_TEST_PATTERN_MODES =
1009            new Key<Byte>("android.sensor.availableTestPatternModes", byte.class);
1010
1011    /**
1012     * <p>Which face detection modes are available,
1013     * if any</p>
1014     * <p>OFF means face detection is disabled, it must
1015     * be included in the list.</p>
1016     * <p>SIMPLE means the device supports the
1017     * android.statistics.faceRectangles and
1018     * android.statistics.faceScores outputs.</p>
1019     * <p>FULL means the device additionally supports the
1020     * android.statistics.faceIds and
1021     * android.statistics.faceLandmarks outputs.</p>
1022     */
1023    public static final Key<byte[]> STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES =
1024            new Key<byte[]>("android.statistics.info.availableFaceDetectModes", byte[].class);
1025
1026    /**
1027     * <p>Maximum number of simultaneously detectable
1028     * faces</p>
1029     */
1030    public static final Key<Integer> STATISTICS_INFO_MAX_FACE_COUNT =
1031            new Key<Integer>("android.statistics.info.maxFaceCount", int.class);
1032
1033    /**
1034     * <p>Maximum number of supported points in the
1035     * tonemap curve that can be used for {@link CaptureRequest#TONEMAP_CURVE_RED android.tonemap.curveRed}, or
1036     * {@link CaptureRequest#TONEMAP_CURVE_GREEN android.tonemap.curveGreen}, or {@link CaptureRequest#TONEMAP_CURVE_BLUE android.tonemap.curveBlue}.</p>
1037     * <p>If the actual number of points provided by the application (in
1038     * android.tonemap.curve*)  is less than max, the camera device will
1039     * resample the curve to its internal representation, using linear
1040     * interpolation.</p>
1041     * <p>The output curves in the result metadata may have a different number
1042     * of points than the input curves, and will represent the actual
1043     * hardware curves used as closely as possible when linearly interpolated.</p>
1044     *
1045     * @see CaptureRequest#TONEMAP_CURVE_BLUE
1046     * @see CaptureRequest#TONEMAP_CURVE_GREEN
1047     * @see CaptureRequest#TONEMAP_CURVE_RED
1048     */
1049    public static final Key<Integer> TONEMAP_MAX_CURVE_POINTS =
1050            new Key<Integer>("android.tonemap.maxCurvePoints", int.class);
1051
1052    /**
1053     * <p>A list of camera LEDs that are available on this system.</p>
1054     * @see #LED_AVAILABLE_LEDS_TRANSMIT
1055     * @hide
1056     */
1057    public static final Key<int[]> LED_AVAILABLE_LEDS =
1058            new Key<int[]>("android.led.availableLeds", int[].class);
1059
1060    /**
1061     * <p>Generally classifies the overall set of the camera device functionality.</p>
1062     * <p>Camera devices will come in two flavors: LIMITED and FULL.</p>
1063     * <p>A FULL device has the most support possible and will enable the
1064     * widest range of use cases such as:</p>
1065     * <ul>
1066     * <li>30 FPS at maximum resolution (== sensor resolution)</li>
1067     * <li>Per frame control</li>
1068     * <li>Manual sensor control</li>
1069     * <li>Zero Shutter Lag (ZSL)</li>
1070     * </ul>
1071     * <p>A LIMITED device may have some or none of the above characteristics.
1072     * To find out more refer to {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities}.</p>
1073     *
1074     * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
1075     * @see #INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED
1076     * @see #INFO_SUPPORTED_HARDWARE_LEVEL_FULL
1077     */
1078    public static final Key<Integer> INFO_SUPPORTED_HARDWARE_LEVEL =
1079            new Key<Integer>("android.info.supportedHardwareLevel", int.class);
1080
1081    /**
1082     * <p>The maximum number of frames that can occur after a request
1083     * (different than the previous) has been submitted, and before the
1084     * result's state becomes synchronized (by setting
1085     * android.sync.frameNumber to a non-negative value).</p>
1086     * <p>This defines the maximum distance (in number of metadata results),
1087     * between android.sync.frameNumber and the equivalent
1088     * android.request.frameCount.</p>
1089     * <p>In other words this acts as an upper boundary for how many frames
1090     * must occur before the camera device knows for a fact that the new
1091     * submitted camera settings have been applied in outgoing frames.</p>
1092     * <p>For example if the distance was 2,</p>
1093     * <pre><code>initial request = X (repeating)
1094     * request1 = X
1095     * request2 = Y
1096     * request3 = Y
1097     * request4 = Y
1098     *
1099     * where requestN has frameNumber N, and the first of the repeating
1100     * initial request's has frameNumber F (and F &lt; 1).
1101     *
1102     * initial result = X' + { android.sync.frameNumber == F }
1103     * result1 = X' + { android.sync.frameNumber == F }
1104     * result2 = X' + { android.sync.frameNumber == CONVERGING }
1105     * result3 = X' + { android.sync.frameNumber == CONVERGING }
1106     * result4 = X' + { android.sync.frameNumber == 2 }
1107     *
1108     * where resultN has frameNumber N.
1109     * </code></pre>
1110     * <p>Since <code>result4</code> has a <code>frameNumber == 4</code> and
1111     * <code>android.sync.frameNumber == 2</code>, the distance is clearly
1112     * <code>4 - 2 = 2</code>.</p>
1113     * @see #SYNC_MAX_LATENCY_PER_FRAME_CONTROL
1114     * @see #SYNC_MAX_LATENCY_UNKNOWN
1115     */
1116    public static final Key<Integer> SYNC_MAX_LATENCY =
1117            new Key<Integer>("android.sync.maxLatency", int.class);
1118
1119    /*~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~
1120     * End generated code
1121     *~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~O@*/
1122}
1123