CameraCharacteristics.java revision 6f387098c2c9006a1fecfe18c7052b2cc40a1941
1/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17package android.hardware.camera2;
18
19import android.hardware.camera2.impl.CameraMetadataNative;
20import android.hardware.camera2.impl.PublicKey;
21import android.hardware.camera2.impl.SyntheticKey;
22import android.hardware.camera2.utils.TypeReference;
23import android.util.Rational;
24
25import java.util.Collections;
26import java.util.List;
27
28/**
29 * <p>The properties describing a
30 * {@link CameraDevice CameraDevice}.</p>
31 *
32 * <p>These properties are fixed for a given CameraDevice, and can be queried
33 * through the {@link CameraManager CameraManager}
34 * interface with {@link CameraManager#getCameraCharacteristics}.</p>
35 *
36 * <p>{@link CameraCharacteristics} objects are immutable.</p>
37 *
38 * @see CameraDevice
39 * @see CameraManager
40 */
41public final class CameraCharacteristics extends CameraMetadata<CameraCharacteristics.Key<?>> {
42
43    /**
44     * A {@code Key} is used to do camera characteristics field lookups with
45     * {@link CameraCharacteristics#get}.
46     *
47     * <p>For example, to get the stream configuration map:
48     * <code><pre>
49     * StreamConfigurationMap map = cameraCharacteristics.get(
50     *      CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
51     * </pre></code>
52     * </p>
53     *
54     * <p>To enumerate over all possible keys for {@link CameraCharacteristics}, see
55     * {@link CameraCharacteristics#getKeys()}.</p>
56     *
57     * @see CameraCharacteristics#get
58     * @see CameraCharacteristics#getKeys()
59     */
60    public static final class Key<T> {
61        private final CameraMetadataNative.Key<T> mKey;
62
63        /**
64         * Visible for testing and vendor extensions only.
65         *
66         * @hide
67         */
68        public Key(String name, Class<T> type) {
69            mKey = new CameraMetadataNative.Key<T>(name,  type);
70        }
71
72        /**
73         * Visible for testing and vendor extensions only.
74         *
75         * @hide
76         */
77        public Key(String name, TypeReference<T> typeReference) {
78            mKey = new CameraMetadataNative.Key<T>(name,  typeReference);
79        }
80
81        /**
82         * Return a camelCase, period separated name formatted like:
83         * {@code "root.section[.subsections].name"}.
84         *
85         * <p>Built-in keys exposed by the Android SDK are always prefixed with {@code "android."};
86         * keys that are device/platform-specific are prefixed with {@code "com."}.</p>
87         *
88         * <p>For example, {@code CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP} would
89         * have a name of {@code "android.scaler.streamConfigurationMap"}; whereas a device
90         * specific key might look like {@code "com.google.nexus.data.private"}.</p>
91         *
92         * @return String representation of the key name
93         */
94        public String getName() {
95            return mKey.getName();
96        }
97
98        /**
99         * {@inheritDoc}
100         */
101        @Override
102        public final int hashCode() {
103            return mKey.hashCode();
104        }
105
106        /**
107         * {@inheritDoc}
108         */
109        @SuppressWarnings("unchecked")
110        @Override
111        public final boolean equals(Object o) {
112            return o instanceof Key && ((Key<T>)o).mKey.equals(mKey);
113        }
114
115        /**
116         * Visible for CameraMetadataNative implementation only; do not use.
117         *
118         * TODO: Make this private or remove it altogether.
119         *
120         * @hide
121         */
122        public CameraMetadataNative.Key<T> getNativeKey() {
123            return mKey;
124        }
125
126        @SuppressWarnings({
127                "unused", "unchecked"
128        })
129        private Key(CameraMetadataNative.Key<?> nativeKey) {
130            mKey = (CameraMetadataNative.Key<T>) nativeKey;
131        }
132    }
133
134    private final CameraMetadataNative mProperties;
135    private List<CameraCharacteristics.Key<?>> mKeys;
136    private List<CaptureRequest.Key<?>> mAvailableRequestKeys;
137    private List<CaptureResult.Key<?>> mAvailableResultKeys;
138
139    /**
140     * Takes ownership of the passed-in properties object
141     * @hide
142     */
143    public CameraCharacteristics(CameraMetadataNative properties) {
144        mProperties = CameraMetadataNative.move(properties);
145    }
146
147    /**
148     * Returns a copy of the underlying {@link CameraMetadataNative}.
149     * @hide
150     */
151    public CameraMetadataNative getNativeCopy() {
152        return new CameraMetadataNative(mProperties);
153    }
154
155    /**
156     * Get a camera characteristics field value.
157     *
158     * <p>The field definitions can be
159     * found in {@link CameraCharacteristics}.</p>
160     *
161     * <p>Querying the value for the same key more than once will return a value
162     * which is equal to the previous queried value.</p>
163     *
164     * @throws IllegalArgumentException if the key was not valid
165     *
166     * @param key The characteristics field to read.
167     * @return The value of that key, or {@code null} if the field is not set.
168     */
169    public <T> T get(Key<T> key) {
170        return mProperties.get(key);
171    }
172
173    /**
174     * {@inheritDoc}
175     * @hide
176     */
177    @SuppressWarnings("unchecked")
178    @Override
179    protected <T> T getProtected(Key<?> key) {
180        return (T) mProperties.get(key);
181    }
182
183    /**
184     * {@inheritDoc}
185     * @hide
186     */
187    @SuppressWarnings("unchecked")
188    @Override
189    protected Class<Key<?>> getKeyClass() {
190        Object thisClass = Key.class;
191        return (Class<Key<?>>)thisClass;
192    }
193
194    /**
195     * {@inheritDoc}
196     */
197    @Override
198    public List<Key<?>> getKeys() {
199        // List of keys is immutable; cache the results after we calculate them
200        if (mKeys != null) {
201            return mKeys;
202        }
203
204        int[] filterTags = get(REQUEST_AVAILABLE_CHARACTERISTICS_KEYS);
205        if (filterTags == null) {
206            throw new AssertionError("android.request.availableCharacteristicsKeys must be non-null"
207                    + " in the characteristics");
208        }
209
210        mKeys = Collections.unmodifiableList(
211                getKeysStatic(getClass(), getKeyClass(), this, filterTags));
212        return mKeys;
213    }
214
215    /**
216     * Returns the list of keys supported by this {@link CameraDevice} for querying
217     * with a {@link CaptureRequest}.
218     *
219     * <p>The list returned is not modifiable, so any attempts to modify it will throw
220     * a {@code UnsupportedOperationException}.</p>
221     *
222     * <p>Each key is only listed once in the list. The order of the keys is undefined.</p>
223     *
224     * <p>Note that there is no {@code getAvailableCameraCharacteristicsKeys()} -- use
225     * {@link #getKeys()} instead.</p>
226     *
227     * @return List of keys supported by this CameraDevice for CaptureRequests.
228     */
229    @SuppressWarnings({"unchecked"})
230    public List<CaptureRequest.Key<?>> getAvailableCaptureRequestKeys() {
231        if (mAvailableRequestKeys == null) {
232            Object crKey = CaptureRequest.Key.class;
233            Class<CaptureRequest.Key<?>> crKeyTyped = (Class<CaptureRequest.Key<?>>)crKey;
234
235            int[] filterTags = get(REQUEST_AVAILABLE_REQUEST_KEYS);
236            if (filterTags == null) {
237                throw new AssertionError("android.request.availableRequestKeys must be non-null "
238                        + "in the characteristics");
239            }
240            mAvailableRequestKeys =
241                    getAvailableKeyList(CaptureRequest.class, crKeyTyped, filterTags);
242        }
243        return mAvailableRequestKeys;
244    }
245
246    /**
247     * Returns the list of keys supported by this {@link CameraDevice} for querying
248     * with a {@link CaptureResult}.
249     *
250     * <p>The list returned is not modifiable, so any attempts to modify it will throw
251     * a {@code UnsupportedOperationException}.</p>
252     *
253     * <p>Each key is only listed once in the list. The order of the keys is undefined.</p>
254     *
255     * <p>Note that there is no {@code getAvailableCameraCharacteristicsKeys()} -- use
256     * {@link #getKeys()} instead.</p>
257     *
258     * @return List of keys supported by this CameraDevice for CaptureResults.
259     */
260    @SuppressWarnings({"unchecked"})
261    public List<CaptureResult.Key<?>> getAvailableCaptureResultKeys() {
262        if (mAvailableResultKeys == null) {
263            Object crKey = CaptureResult.Key.class;
264            Class<CaptureResult.Key<?>> crKeyTyped = (Class<CaptureResult.Key<?>>)crKey;
265
266            int[] filterTags = get(REQUEST_AVAILABLE_RESULT_KEYS);
267            if (filterTags == null) {
268                throw new AssertionError("android.request.availableResultKeys must be non-null "
269                        + "in the characteristics");
270            }
271            mAvailableResultKeys = getAvailableKeyList(CaptureResult.class, crKeyTyped, filterTags);
272        }
273        return mAvailableResultKeys;
274    }
275
276    /**
277     * Returns the list of keys supported by this {@link CameraDevice} by metadataClass.
278     *
279     * <p>The list returned is not modifiable, so any attempts to modify it will throw
280     * a {@code UnsupportedOperationException}.</p>
281     *
282     * <p>Each key is only listed once in the list. The order of the keys is undefined.</p>
283     *
284     * @param metadataClass The subclass of CameraMetadata that you want to get the keys for.
285     * @param keyClass The class of the metadata key, e.g. CaptureRequest.Key.class
286     *
287     * @return List of keys supported by this CameraDevice for metadataClass.
288     *
289     * @throws IllegalArgumentException if metadataClass is not a subclass of CameraMetadata
290     */
291    private <TKey> List<TKey>
292    getAvailableKeyList(Class<?> metadataClass, Class<TKey> keyClass, int[] filterTags) {
293
294        if (metadataClass.equals(CameraMetadata.class)) {
295            throw new AssertionError(
296                    "metadataClass must be a strict subclass of CameraMetadata");
297        } else if (!CameraMetadata.class.isAssignableFrom(metadataClass)) {
298            throw new AssertionError(
299                    "metadataClass must be a subclass of CameraMetadata");
300        }
301
302        List<TKey> staticKeyList = CameraCharacteristics.<TKey>getKeysStatic(
303                metadataClass, keyClass, /*instance*/null, filterTags);
304        return Collections.unmodifiableList(staticKeyList);
305    }
306
307    /*@O~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~
308     * The key entries below this point are generated from metadata
309     * definitions in /system/media/camera/docs. Do not modify by hand or
310     * modify the comment blocks at the start or end.
311     *~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~*/
312
313
314    /**
315     * <p>The set of aberration correction modes supported by this camera device.</p>
316     * <p>This metadata lists the valid modes for {@link CaptureRequest#COLOR_CORRECTION_ABERRATION_MODE android.colorCorrection.aberrationMode}.
317     * If no aberration correction modes are available for a device, this list will solely include
318     * OFF mode.</p>
319     * <p>For FULL capability device ({@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} <code>==</code> FULL), OFF must be
320     * included.</p>
321     *
322     * @see CaptureRequest#COLOR_CORRECTION_ABERRATION_MODE
323     * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
324     */
325    @PublicKey
326    public static final Key<int[]> COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES =
327            new Key<int[]>("android.colorCorrection.availableAberrationModes", int[].class);
328
329    /**
330     * <p>The set of auto-exposure antibanding modes that are
331     * supported by this camera device.</p>
332     * <p>Not all of the auto-exposure anti-banding modes may be
333     * supported by a given camera device. This field lists the
334     * valid anti-banding modes that the application may request
335     * for this camera device; they must include AUTO.</p>
336     */
337    @PublicKey
338    public static final Key<int[]> CONTROL_AE_AVAILABLE_ANTIBANDING_MODES =
339            new Key<int[]>("android.control.aeAvailableAntibandingModes", int[].class);
340
341    /**
342     * <p>The set of auto-exposure modes that are supported by this
343     * camera device.</p>
344     * <p>Not all the auto-exposure modes may be supported by a
345     * given camera device, especially if no flash unit is
346     * available. This entry lists the valid modes for
347     * {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} for this camera device.</p>
348     * <p>All camera devices support ON, and all camera devices with
349     * flash units support ON_AUTO_FLASH and
350     * ON_ALWAYS_FLASH.</p>
351     * <p>FULL mode camera devices always support OFF mode,
352     * which enables application control of camera exposure time,
353     * sensitivity, and frame duration.</p>
354     *
355     * @see CaptureRequest#CONTROL_AE_MODE
356     */
357    @PublicKey
358    public static final Key<int[]> CONTROL_AE_AVAILABLE_MODES =
359            new Key<int[]>("android.control.aeAvailableModes", int[].class);
360
361    /**
362     * <p>List of frame rate ranges supported by the
363     * auto-exposure (AE) algorithm/hardware</p>
364     */
365    @PublicKey
366    public static final Key<android.util.Range<Integer>[]> CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES =
367            new Key<android.util.Range<Integer>[]>("android.control.aeAvailableTargetFpsRanges", new TypeReference<android.util.Range<Integer>[]>() {{ }});
368
369    /**
370     * <p>Maximum and minimum exposure compensation
371     * setting, in counts of
372     * {@link CameraCharacteristics#CONTROL_AE_COMPENSATION_STEP android.control.aeCompensationStep}.</p>
373     *
374     * @see CameraCharacteristics#CONTROL_AE_COMPENSATION_STEP
375     */
376    @PublicKey
377    public static final Key<android.util.Range<Integer>> CONTROL_AE_COMPENSATION_RANGE =
378            new Key<android.util.Range<Integer>>("android.control.aeCompensationRange", new TypeReference<android.util.Range<Integer>>() {{ }});
379
380    /**
381     * <p>Smallest step by which exposure compensation
382     * can be changed</p>
383     */
384    @PublicKey
385    public static final Key<Rational> CONTROL_AE_COMPENSATION_STEP =
386            new Key<Rational>("android.control.aeCompensationStep", Rational.class);
387
388    /**
389     * <p>List of auto-focus (AF) modes that can be
390     * selected with {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode}.</p>
391     * <p>Not all the auto-focus modes may be supported by a
392     * given camera device. This entry lists the valid modes for
393     * {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode} for this camera device.</p>
394     * <p>All LIMITED and FULL mode camera devices will support OFF mode, and all
395     * camera devices with adjustable focuser units
396     * (<code>{@link CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE android.lens.info.minimumFocusDistance} &gt; 0</code>) will support AUTO mode.</p>
397     *
398     * @see CaptureRequest#CONTROL_AF_MODE
399     * @see CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE
400     */
401    @PublicKey
402    public static final Key<int[]> CONTROL_AF_AVAILABLE_MODES =
403            new Key<int[]>("android.control.afAvailableModes", int[].class);
404
405    /**
406     * <p>List containing the subset of color effects
407     * specified in {@link CaptureRequest#CONTROL_EFFECT_MODE android.control.effectMode} that is supported by
408     * this device.</p>
409     * <p>This list contains the color effect modes that can be applied to
410     * images produced by the camera device. Only modes that have
411     * been fully implemented for the current device may be included here.
412     * Implementations are not expected to be consistent across all devices.
413     * If no color effect modes are available for a device, this should
414     * simply be set to OFF.</p>
415     * <p>A color effect will only be applied if
416     * {@link CaptureRequest#CONTROL_MODE android.control.mode} != OFF.</p>
417     *
418     * @see CaptureRequest#CONTROL_EFFECT_MODE
419     * @see CaptureRequest#CONTROL_MODE
420     */
421    @PublicKey
422    public static final Key<int[]> CONTROL_AVAILABLE_EFFECTS =
423            new Key<int[]>("android.control.availableEffects", int[].class);
424
425    /**
426     * <p>List containing a subset of scene modes
427     * specified in {@link CaptureRequest#CONTROL_SCENE_MODE android.control.sceneMode}.</p>
428     * <p>This list contains scene modes that can be set for the camera device.
429     * Only scene modes that have been fully implemented for the
430     * camera device may be included here. Implementations are not expected
431     * to be consistent across all devices. If no scene modes are supported
432     * by the camera device, this will be set to <code>[DISABLED]</code>.</p>
433     *
434     * @see CaptureRequest#CONTROL_SCENE_MODE
435     */
436    @PublicKey
437    public static final Key<int[]> CONTROL_AVAILABLE_SCENE_MODES =
438            new Key<int[]>("android.control.availableSceneModes", int[].class);
439
440    /**
441     * <p>List of video stabilization modes that can
442     * be supported</p>
443     */
444    @PublicKey
445    public static final Key<int[]> CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES =
446            new Key<int[]>("android.control.availableVideoStabilizationModes", int[].class);
447
448    /**
449     * <p>The set of auto-white-balance modes ({@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode})
450     * that are supported by this camera device.</p>
451     * <p>Not all the auto-white-balance modes may be supported by a
452     * given camera device. This entry lists the valid modes for
453     * {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode} for this camera device.</p>
454     * <p>All camera devices will support ON mode.</p>
455     * <p>FULL mode camera devices will always support OFF mode,
456     * which enables application control of white balance, by using
457     * {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform} and {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains}({@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} must be set to TRANSFORM_MATRIX).</p>
458     *
459     * @see CaptureRequest#COLOR_CORRECTION_GAINS
460     * @see CaptureRequest#COLOR_CORRECTION_MODE
461     * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM
462     * @see CaptureRequest#CONTROL_AWB_MODE
463     */
464    @PublicKey
465    public static final Key<int[]> CONTROL_AWB_AVAILABLE_MODES =
466            new Key<int[]>("android.control.awbAvailableModes", int[].class);
467
468    /**
469     * <p>List of the maximum number of regions that can be used for metering in
470     * auto-exposure (AE), auto-white balance (AWB), and auto-focus (AF);
471     * this corresponds to the the maximum number of elements in
472     * {@link CaptureRequest#CONTROL_AE_REGIONS android.control.aeRegions}, {@link CaptureRequest#CONTROL_AWB_REGIONS android.control.awbRegions},
473     * and {@link CaptureRequest#CONTROL_AF_REGIONS android.control.afRegions}.</p>
474     *
475     * @see CaptureRequest#CONTROL_AE_REGIONS
476     * @see CaptureRequest#CONTROL_AF_REGIONS
477     * @see CaptureRequest#CONTROL_AWB_REGIONS
478     * @hide
479     */
480    public static final Key<int[]> CONTROL_MAX_REGIONS =
481            new Key<int[]>("android.control.maxRegions", int[].class);
482
483    /**
484     * <p>List of the maximum number of regions that can be used for metering in
485     * auto-exposure (AE);
486     * this corresponds to the the maximum number of elements in
487     * {@link CaptureRequest#CONTROL_AE_REGIONS android.control.aeRegions}.</p>
488     *
489     * @see CaptureRequest#CONTROL_AE_REGIONS
490     */
491    @PublicKey
492    @SyntheticKey
493    public static final Key<Integer> CONTROL_MAX_REGIONS_AE =
494            new Key<Integer>("android.control.maxRegionsAe", int.class);
495
496    /**
497     * <p>List of the maximum number of regions that can be used for metering in
498     * auto-white balance (AWB);
499     * this corresponds to the the maximum number of elements in
500     * {@link CaptureRequest#CONTROL_AWB_REGIONS android.control.awbRegions}.</p>
501     *
502     * @see CaptureRequest#CONTROL_AWB_REGIONS
503     */
504    @PublicKey
505    @SyntheticKey
506    public static final Key<Integer> CONTROL_MAX_REGIONS_AWB =
507            new Key<Integer>("android.control.maxRegionsAwb", int.class);
508
509    /**
510     * <p>List of the maximum number of regions that can be used for metering in
511     * auto-focus (AF);
512     * this corresponds to the the maximum number of elements in
513     * {@link CaptureRequest#CONTROL_AF_REGIONS android.control.afRegions}.</p>
514     *
515     * @see CaptureRequest#CONTROL_AF_REGIONS
516     */
517    @PublicKey
518    @SyntheticKey
519    public static final Key<Integer> CONTROL_MAX_REGIONS_AF =
520            new Key<Integer>("android.control.maxRegionsAf", int.class);
521
522    /**
523     * <p>List of available high speed video size and fps range configurations
524     * supported by the camera device, in the format of (width, height, fps_min, fps_max).</p>
525     * <p>When HIGH_SPEED_VIDEO is supported in {@link CameraCharacteristics#CONTROL_AVAILABLE_SCENE_MODES android.control.availableSceneModes},
526     * this metadata will list the supported high speed video size and fps range
527     * configurations. All the sizes listed in this configuration will be a subset
528     * of the sizes reported by StreamConfigurationMap#getOutputSizes for processed
529     * non-stalling formats.</p>
530     * <p>For the high speed video use case, where the application will set
531     * {@link CaptureRequest#CONTROL_SCENE_MODE android.control.sceneMode} to HIGH_SPEED_VIDEO in capture requests, the application must
532     * select the video size and fps range from this metadata to configure the recording and
533     * preview streams and setup the recording requests. For example, if the application intends
534     * to do high speed recording, it can select the maximum size reported by this metadata to
535     * configure output streams. Once the size is selected, application can filter this metadata
536     * by selected size and get the supported fps ranges, and use these fps ranges to setup the
537     * recording requests. Note that for the use case of multiple output streams, application
538     * must select one unique size from this metadata to use. Otherwise a request error might
539     * occur.</p>
540     * <p>For normal video recording use case, where some application will NOT set
541     * {@link CaptureRequest#CONTROL_SCENE_MODE android.control.sceneMode} to HIGH_SPEED_VIDEO in capture requests, the fps ranges
542     * reported in this metadata must not be used to setup capture requests, or it will cause
543     * request error.</p>
544     *
545     * @see CameraCharacteristics#CONTROL_AVAILABLE_SCENE_MODES
546     * @see CaptureRequest#CONTROL_SCENE_MODE
547     * @hide
548     */
549    public static final Key<android.hardware.camera2.params.HighSpeedVideoConfiguration[]> CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS =
550            new Key<android.hardware.camera2.params.HighSpeedVideoConfiguration[]>("android.control.availableHighSpeedVideoConfigurations", android.hardware.camera2.params.HighSpeedVideoConfiguration[].class);
551
552    /**
553     * <p>The set of edge enhancement modes supported by this camera device.</p>
554     * <p>This tag lists the valid modes for {@link CaptureRequest#EDGE_MODE android.edge.mode}.</p>
555     * <p>Full-capability camera devices must always support OFF and FAST.</p>
556     *
557     * @see CaptureRequest#EDGE_MODE
558     */
559    @PublicKey
560    public static final Key<int[]> EDGE_AVAILABLE_EDGE_MODES =
561            new Key<int[]>("android.edge.availableEdgeModes", int[].class);
562
563    /**
564     * <p>Whether this camera device has a
565     * flash.</p>
566     * <p>If no flash, none of the flash controls do
567     * anything. All other metadata should return 0.</p>
568     */
569    @PublicKey
570    public static final Key<Boolean> FLASH_INFO_AVAILABLE =
571            new Key<Boolean>("android.flash.info.available", boolean.class);
572
573    /**
574     * <p>The set of hot pixel correction modes that are supported by this
575     * camera device.</p>
576     * <p>This tag lists valid modes for {@link CaptureRequest#HOT_PIXEL_MODE android.hotPixel.mode}.</p>
577     * <p>FULL mode camera devices will always support FAST.</p>
578     *
579     * @see CaptureRequest#HOT_PIXEL_MODE
580     */
581    @PublicKey
582    public static final Key<int[]> HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES =
583            new Key<int[]>("android.hotPixel.availableHotPixelModes", int[].class);
584
585    /**
586     * <p>Supported resolutions for the JPEG thumbnail.</p>
587     * <p>Below condiditions will be satisfied for this size list:</p>
588     * <ul>
589     * <li>The sizes will be sorted by increasing pixel area (width x height).
590     * If several resolutions have the same area, they will be sorted by increasing width.</li>
591     * <li>The aspect ratio of the largest thumbnail size will be same as the
592     * aspect ratio of largest JPEG output size in android.scaler.availableStreamConfigurations.
593     * The largest size is defined as the size that has the largest pixel area
594     * in a given size list.</li>
595     * <li>Each output JPEG size in android.scaler.availableStreamConfigurations will have at least
596     * one corresponding size that has the same aspect ratio in availableThumbnailSizes,
597     * and vice versa.</li>
598     * <li>All non (0, 0) sizes will have non-zero widths and heights.</li>
599     * </ul>
600     */
601    @PublicKey
602    public static final Key<android.util.Size[]> JPEG_AVAILABLE_THUMBNAIL_SIZES =
603            new Key<android.util.Size[]>("android.jpeg.availableThumbnailSizes", android.util.Size[].class);
604
605    /**
606     * <p>List of supported aperture
607     * values.</p>
608     * <p>If the camera device doesn't support variable apertures,
609     * listed value will be the fixed aperture.</p>
610     * <p>If the camera device supports variable apertures, the aperture value
611     * in this list will be sorted in ascending order.</p>
612     */
613    @PublicKey
614    public static final Key<float[]> LENS_INFO_AVAILABLE_APERTURES =
615            new Key<float[]>("android.lens.info.availableApertures", float[].class);
616
617    /**
618     * <p>List of supported neutral density filter values for
619     * {@link CaptureRequest#LENS_FILTER_DENSITY android.lens.filterDensity}.</p>
620     * <p>If changing {@link CaptureRequest#LENS_FILTER_DENSITY android.lens.filterDensity} is not supported,
621     * availableFilterDensities must contain only 0. Otherwise, this
622     * list contains only the exact filter density values available on
623     * this camera device.</p>
624     *
625     * @see CaptureRequest#LENS_FILTER_DENSITY
626     */
627    @PublicKey
628    public static final Key<float[]> LENS_INFO_AVAILABLE_FILTER_DENSITIES =
629            new Key<float[]>("android.lens.info.availableFilterDensities", float[].class);
630
631    /**
632     * <p>The available focal lengths for this device for use with
633     * {@link CaptureRequest#LENS_FOCAL_LENGTH android.lens.focalLength}.</p>
634     * <p>If optical zoom is not supported, this will only report
635     * a single value corresponding to the static focal length of the
636     * device. Otherwise, this will report every focal length supported
637     * by the device.</p>
638     *
639     * @see CaptureRequest#LENS_FOCAL_LENGTH
640     */
641    @PublicKey
642    public static final Key<float[]> LENS_INFO_AVAILABLE_FOCAL_LENGTHS =
643            new Key<float[]>("android.lens.info.availableFocalLengths", float[].class);
644
645    /**
646     * <p>List containing a subset of the optical image
647     * stabilization (OIS) modes specified in
648     * {@link CaptureRequest#LENS_OPTICAL_STABILIZATION_MODE android.lens.opticalStabilizationMode}.</p>
649     * <p>If OIS is not implemented for a given camera device, this will
650     * contain only OFF.</p>
651     *
652     * @see CaptureRequest#LENS_OPTICAL_STABILIZATION_MODE
653     */
654    @PublicKey
655    public static final Key<int[]> LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION =
656            new Key<int[]>("android.lens.info.availableOpticalStabilization", int[].class);
657
658    /**
659     * <p>Optional. Hyperfocal distance for this lens.</p>
660     * <p>If the lens is not fixed focus, the camera device will report this
661     * field when {@link CameraCharacteristics#LENS_INFO_FOCUS_DISTANCE_CALIBRATION android.lens.info.focusDistanceCalibration} is APPROXIMATE or CALIBRATED.</p>
662     * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
663     *
664     * @see CameraCharacteristics#LENS_INFO_FOCUS_DISTANCE_CALIBRATION
665     */
666    @PublicKey
667    public static final Key<Float> LENS_INFO_HYPERFOCAL_DISTANCE =
668            new Key<Float>("android.lens.info.hyperfocalDistance", float.class);
669
670    /**
671     * <p>Shortest distance from frontmost surface
672     * of the lens that can be focused correctly.</p>
673     * <p>If the lens is fixed-focus, this should be
674     * 0.</p>
675     * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
676     */
677    @PublicKey
678    public static final Key<Float> LENS_INFO_MINIMUM_FOCUS_DISTANCE =
679            new Key<Float>("android.lens.info.minimumFocusDistance", float.class);
680
681    /**
682     * <p>Dimensions of lens shading map.</p>
683     * <p>The map should be on the order of 30-40 rows and columns, and
684     * must be smaller than 64x64.</p>
685     * @hide
686     */
687    public static final Key<android.util.Size> LENS_INFO_SHADING_MAP_SIZE =
688            new Key<android.util.Size>("android.lens.info.shadingMapSize", android.util.Size.class);
689
690    /**
691     * <p>The lens focus distance calibration quality.</p>
692     * <p>The lens focus distance calibration quality determines the reliability of
693     * focus related metadata entries, i.e. {@link CaptureRequest#LENS_FOCUS_DISTANCE android.lens.focusDistance},
694     * {@link CaptureResult#LENS_FOCUS_RANGE android.lens.focusRange}, {@link CameraCharacteristics#LENS_INFO_HYPERFOCAL_DISTANCE android.lens.info.hyperfocalDistance}, and
695     * {@link CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE android.lens.info.minimumFocusDistance}.</p>
696     *
697     * @see CaptureRequest#LENS_FOCUS_DISTANCE
698     * @see CaptureResult#LENS_FOCUS_RANGE
699     * @see CameraCharacteristics#LENS_INFO_HYPERFOCAL_DISTANCE
700     * @see CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE
701     * @see #LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED
702     * @see #LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE
703     * @see #LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED
704     */
705    @PublicKey
706    public static final Key<Integer> LENS_INFO_FOCUS_DISTANCE_CALIBRATION =
707            new Key<Integer>("android.lens.info.focusDistanceCalibration", int.class);
708
709    /**
710     * <p>Direction the camera faces relative to
711     * device screen.</p>
712     * @see #LENS_FACING_FRONT
713     * @see #LENS_FACING_BACK
714     */
715    @PublicKey
716    public static final Key<Integer> LENS_FACING =
717            new Key<Integer>("android.lens.facing", int.class);
718
719    /**
720     * <p>The set of noise reduction modes supported by this camera device.</p>
721     * <p>This tag lists the valid modes for {@link CaptureRequest#NOISE_REDUCTION_MODE android.noiseReduction.mode}.</p>
722     * <p>Full-capability camera devices must always support OFF and FAST.</p>
723     *
724     * @see CaptureRequest#NOISE_REDUCTION_MODE
725     */
726    @PublicKey
727    public static final Key<int[]> NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES =
728            new Key<int[]>("android.noiseReduction.availableNoiseReductionModes", int[].class);
729
730    /**
731     * <p>If set to 1, the HAL will always split result
732     * metadata for a single capture into multiple buffers,
733     * returned using multiple process_capture_result calls.</p>
734     * <p>Does not need to be listed in static
735     * metadata. Support for partial results will be reworked in
736     * future versions of camera service. This quirk will stop
737     * working at that point; DO NOT USE without careful
738     * consideration of future support.</p>
739     * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
740     * @deprecated
741     * @hide
742     */
743    @Deprecated
744    public static final Key<Byte> QUIRKS_USE_PARTIAL_RESULT =
745            new Key<Byte>("android.quirks.usePartialResult", byte.class);
746
747    /**
748     * <p>The maximum numbers of different types of output streams
749     * that can be configured and used simultaneously by a camera device.</p>
750     * <p>This is a 3 element tuple that contains the max number of output simultaneous
751     * streams for raw sensor, processed (but not stalling), and processed (and stalling)
752     * formats respectively. For example, assuming that JPEG is typically a processed and
753     * stalling stream, if max raw sensor format output stream number is 1, max YUV streams
754     * number is 3, and max JPEG stream number is 2, then this tuple should be <code>(1, 3, 2)</code>.</p>
755     * <p>This lists the upper bound of the number of output streams supported by
756     * the camera device. Using more streams simultaneously may require more hardware and
757     * CPU resources that will consume more power. The image format for an output stream can
758     * be any supported format provided by android.scaler.availableStreamConfigurations.
759     * The formats defined in android.scaler.availableStreamConfigurations can be catergorized
760     * into the 3 stream types as below:</p>
761     * <ul>
762     * <li>Processed (but stalling): any non-RAW format with a stallDurations &gt; 0.
763     * Typically JPEG format (ImageFormat#JPEG).</li>
764     * <li>Raw formats: ImageFormat#RAW_SENSOR, ImageFormat#RAW10 and ImageFormat#RAW_OPAQUE.</li>
765     * <li>Processed (but not-stalling): any non-RAW format without a stall duration.
766     * Typically ImageFormat#YUV_420_888, ImageFormat#NV21, ImageFormat#YV12.</li>
767     * </ul>
768     * @hide
769     */
770    public static final Key<int[]> REQUEST_MAX_NUM_OUTPUT_STREAMS =
771            new Key<int[]>("android.request.maxNumOutputStreams", int[].class);
772
773    /**
774     * <p>The maximum numbers of different types of output streams
775     * that can be configured and used simultaneously by a camera device
776     * for any <code>RAW</code> formats.</p>
777     * <p>This value contains the max number of output simultaneous
778     * streams from the raw sensor.</p>
779     * <p>This lists the upper bound of the number of output streams supported by
780     * the camera device. Using more streams simultaneously may require more hardware and
781     * CPU resources that will consume more power. The image format for this kind of an output stream can
782     * be any <code>RAW</code> and supported format provided by {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap}.</p>
783     * <p>In particular, a <code>RAW</code> format is typically one of:</p>
784     * <ul>
785     * <li>ImageFormat#RAW_SENSOR</li>
786     * <li>ImageFormat#RAW10</li>
787     * <li>Opaque <code>RAW</code></li>
788     * </ul>
789     *
790     * @see CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP
791     */
792    @PublicKey
793    @SyntheticKey
794    public static final Key<Integer> REQUEST_MAX_NUM_OUTPUT_RAW =
795            new Key<Integer>("android.request.maxNumOutputRaw", int.class);
796
797    /**
798     * <p>The maximum numbers of different types of output streams
799     * that can be configured and used simultaneously by a camera device
800     * for any processed (but not-stalling) formats.</p>
801     * <p>This value contains the max number of output simultaneous
802     * streams for any processed (but not-stalling) formats.</p>
803     * <p>This lists the upper bound of the number of output streams supported by
804     * the camera device. Using more streams simultaneously may require more hardware and
805     * CPU resources that will consume more power. The image format for this kind of an output stream can
806     * be any non-<code>RAW</code> and supported format provided by {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap}.</p>
807     * <p>Processed (but not-stalling) is defined as any non-RAW format without a stall duration.
808     * Typically:</p>
809     * <ul>
810     * <li>ImageFormat#YUV_420_888</li>
811     * <li>ImageFormat#NV21</li>
812     * <li>ImageFormat#YV12</li>
813     * <li>Implementation-defined formats, i.e. StreamConfiguration#isOutputSupportedFor(Class)</li>
814     * </ul>
815     * <p>For full guarantees, query StreamConfigurationMap#getOutputStallDuration with
816     * a processed format -- it will return 0 for a non-stalling stream.</p>
817     *
818     * @see CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP
819     */
820    @PublicKey
821    @SyntheticKey
822    public static final Key<Integer> REQUEST_MAX_NUM_OUTPUT_PROC =
823            new Key<Integer>("android.request.maxNumOutputProc", int.class);
824
825    /**
826     * <p>The maximum numbers of different types of output streams
827     * that can be configured and used simultaneously by a camera device
828     * for any processed (and stalling) formats.</p>
829     * <p>This value contains the max number of output simultaneous
830     * streams for any processed (but not-stalling) formats.</p>
831     * <p>This lists the upper bound of the number of output streams supported by
832     * the camera device. Using more streams simultaneously may require more hardware and
833     * CPU resources that will consume more power. The image format for this kind of an output stream can
834     * be any non-<code>RAW</code> and supported format provided by {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap}.</p>
835     * <p>A processed and stalling format is defined as any non-RAW format with a stallDurations &gt; 0.
836     * Typically only the <code>JPEG</code> format (ImageFormat#JPEG)</p>
837     * <p>For full guarantees, query StreamConfigurationMap#getOutputStallDuration with
838     * a processed format -- it will return a non-0 value for a stalling stream.</p>
839     *
840     * @see CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP
841     */
842    @PublicKey
843    @SyntheticKey
844    public static final Key<Integer> REQUEST_MAX_NUM_OUTPUT_PROC_STALLING =
845            new Key<Integer>("android.request.maxNumOutputProcStalling", int.class);
846
847    /**
848     * <p>The maximum numbers of any type of input streams
849     * that can be configured and used simultaneously by a camera device.</p>
850     * <p>When set to 0, it means no input stream is supported.</p>
851     * <p>The image format for a input stream can be any supported
852     * format provided by
853     * android.scaler.availableInputOutputFormatsMap. When using an
854     * input stream, there must be at least one output stream
855     * configured to to receive the reprocessed images.</p>
856     * <p>For example, for Zero Shutter Lag (ZSL) still capture use case, the input
857     * stream image format will be RAW_OPAQUE, the associated output stream image format
858     * should be JPEG.</p>
859     * @hide
860     */
861    public static final Key<Integer> REQUEST_MAX_NUM_INPUT_STREAMS =
862            new Key<Integer>("android.request.maxNumInputStreams", int.class);
863
864    /**
865     * <p>Specifies the number of maximum pipeline stages a frame
866     * has to go through from when it's exposed to when it's available
867     * to the framework.</p>
868     * <p>A typical minimum value for this is 2 (one stage to expose,
869     * one stage to readout) from the sensor. The ISP then usually adds
870     * its own stages to do custom HW processing. Further stages may be
871     * added by SW processing.</p>
872     * <p>Depending on what settings are used (e.g. YUV, JPEG) and what
873     * processing is enabled (e.g. face detection), the actual pipeline
874     * depth (specified by {@link CaptureResult#REQUEST_PIPELINE_DEPTH android.request.pipelineDepth}) may be less than
875     * the max pipeline depth.</p>
876     * <p>A pipeline depth of X stages is equivalent to a pipeline latency of
877     * X frame intervals.</p>
878     * <p>This value will be 8 or less.</p>
879     *
880     * @see CaptureResult#REQUEST_PIPELINE_DEPTH
881     */
882    @PublicKey
883    public static final Key<Byte> REQUEST_PIPELINE_MAX_DEPTH =
884            new Key<Byte>("android.request.pipelineMaxDepth", byte.class);
885
886    /**
887     * <p>Defines how many sub-components
888     * a result will be composed of.</p>
889     * <p>In order to combat the pipeline latency, partial results
890     * may be delivered to the application layer from the camera device as
891     * soon as they are available.</p>
892     * <p>Optional; defaults to 1. A value of 1 means that partial
893     * results are not supported, and only the final TotalCaptureResult will
894     * be produced by the camera device.</p>
895     * <p>A typical use case for this might be: after requesting an
896     * auto-focus (AF) lock the new AF state might be available 50%
897     * of the way through the pipeline.  The camera device could
898     * then immediately dispatch this state via a partial result to
899     * the application, and the rest of the metadata via later
900     * partial results.</p>
901     * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
902     */
903    @PublicKey
904    public static final Key<Integer> REQUEST_PARTIAL_RESULT_COUNT =
905            new Key<Integer>("android.request.partialResultCount", int.class);
906
907    /**
908     * <p>List of capabilities that the camera device
909     * advertises as fully supporting.</p>
910     * <p>A capability is a contract that the camera device makes in order
911     * to be able to satisfy one or more use cases.</p>
912     * <p>Listing a capability guarantees that the whole set of features
913     * required to support a common use will all be available.</p>
914     * <p>Using a subset of the functionality provided by an unsupported
915     * capability may be possible on a specific camera device implementation;
916     * to do this query each of android.request.availableRequestKeys,
917     * android.request.availableResultKeys,
918     * android.request.availableCharacteristicsKeys.</p>
919     * <p>The following capabilities are guaranteed to be available on
920     * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} <code>==</code> FULL devices:</p>
921     * <ul>
922     * <li>MANUAL_SENSOR</li>
923     * <li>MANUAL_POST_PROCESSING</li>
924     * </ul>
925     * <p>Other capabilities may be available on either FULL or LIMITED
926     * devices, but the application should query this field to be sure.</p>
927     *
928     * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
929     * @see #REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE
930     * @see #REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR
931     * @see #REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING
932     * @see #REQUEST_AVAILABLE_CAPABILITIES_RAW
933     */
934    @PublicKey
935    public static final Key<int[]> REQUEST_AVAILABLE_CAPABILITIES =
936            new Key<int[]>("android.request.availableCapabilities", int[].class);
937
938    /**
939     * <p>A list of all keys that the camera device has available
940     * to use with CaptureRequest.</p>
941     * <p>Attempting to set a key into a CaptureRequest that is not
942     * listed here will result in an invalid request and will be rejected
943     * by the camera device.</p>
944     * <p>This field can be used to query the feature set of a camera device
945     * at a more granular level than capabilities. This is especially
946     * important for optional keys that are not listed under any capability
947     * in {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities}.</p>
948     *
949     * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
950     * @hide
951     */
952    public static final Key<int[]> REQUEST_AVAILABLE_REQUEST_KEYS =
953            new Key<int[]>("android.request.availableRequestKeys", int[].class);
954
955    /**
956     * <p>A list of all keys that the camera device has available
957     * to use with CaptureResult.</p>
958     * <p>Attempting to get a key from a CaptureResult that is not
959     * listed here will always return a <code>null</code> value. Getting a key from
960     * a CaptureResult that is listed here must never return a <code>null</code>
961     * value.</p>
962     * <p>The following keys may return <code>null</code> unless they are enabled:</p>
963     * <ul>
964     * <li>android.statistics.lensShadingMap (non-null iff {@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE android.statistics.lensShadingMapMode} == ON)</li>
965     * </ul>
966     * <p>(Those sometimes-null keys should nevertheless be listed here
967     * if they are available.)</p>
968     * <p>This field can be used to query the feature set of a camera device
969     * at a more granular level than capabilities. This is especially
970     * important for optional keys that are not listed under any capability
971     * in {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities}.</p>
972     *
973     * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
974     * @see CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE
975     * @hide
976     */
977    public static final Key<int[]> REQUEST_AVAILABLE_RESULT_KEYS =
978            new Key<int[]>("android.request.availableResultKeys", int[].class);
979
980    /**
981     * <p>A list of all keys that the camera device has available
982     * to use with CameraCharacteristics.</p>
983     * <p>This entry follows the same rules as
984     * android.request.availableResultKeys (except that it applies for
985     * CameraCharacteristics instead of CaptureResult). See above for more
986     * details.</p>
987     * @hide
988     */
989    public static final Key<int[]> REQUEST_AVAILABLE_CHARACTERISTICS_KEYS =
990            new Key<int[]>("android.request.availableCharacteristicsKeys", int[].class);
991
992    /**
993     * <p>The list of image formats that are supported by this
994     * camera device for output streams.</p>
995     * <p>All camera devices will support JPEG and YUV_420_888 formats.</p>
996     * <p>When set to YUV_420_888, application can access the YUV420 data directly.</p>
997     * @deprecated
998     * @hide
999     */
1000    @Deprecated
1001    public static final Key<int[]> SCALER_AVAILABLE_FORMATS =
1002            new Key<int[]>("android.scaler.availableFormats", int[].class);
1003
1004    /**
1005     * <p>The minimum frame duration that is supported
1006     * for each resolution in android.scaler.availableJpegSizes.</p>
1007     * <p>This corresponds to the minimum steady-state frame duration when only
1008     * that JPEG stream is active and captured in a burst, with all
1009     * processing (typically in android.*.mode) set to FAST.</p>
1010     * <p>When multiple streams are configured, the minimum
1011     * frame duration will be &gt;= max(individual stream min
1012     * durations)</p>
1013     * @deprecated
1014     * @hide
1015     */
1016    @Deprecated
1017    public static final Key<long[]> SCALER_AVAILABLE_JPEG_MIN_DURATIONS =
1018            new Key<long[]>("android.scaler.availableJpegMinDurations", long[].class);
1019
1020    /**
1021     * <p>The JPEG resolutions that are supported by this camera device.</p>
1022     * <p>The resolutions are listed as <code>(width, height)</code> pairs. All camera devices will support
1023     * sensor maximum resolution (defined by {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}).</p>
1024     *
1025     * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
1026     * @deprecated
1027     * @hide
1028     */
1029    @Deprecated
1030    public static final Key<android.util.Size[]> SCALER_AVAILABLE_JPEG_SIZES =
1031            new Key<android.util.Size[]>("android.scaler.availableJpegSizes", android.util.Size[].class);
1032
1033    /**
1034     * <p>The maximum ratio between both active area width
1035     * and crop region width, and active area height and
1036     * crop region height.</p>
1037     * <p>This represents the maximum amount of zooming possible by
1038     * the camera device, or equivalently, the minimum cropping
1039     * window size.</p>
1040     * <p>Crop regions that have a width or height that is smaller
1041     * than this ratio allows will be rounded up to the minimum
1042     * allowed size by the camera device.</p>
1043     */
1044    @PublicKey
1045    public static final Key<Float> SCALER_AVAILABLE_MAX_DIGITAL_ZOOM =
1046            new Key<Float>("android.scaler.availableMaxDigitalZoom", float.class);
1047
1048    /**
1049     * <p>For each available processed output size (defined in
1050     * android.scaler.availableProcessedSizes), this property lists the
1051     * minimum supportable frame duration for that size.</p>
1052     * <p>This should correspond to the frame duration when only that processed
1053     * stream is active, with all processing (typically in android.*.mode)
1054     * set to FAST.</p>
1055     * <p>When multiple streams are configured, the minimum frame duration will
1056     * be &gt;= max(individual stream min durations).</p>
1057     * @deprecated
1058     * @hide
1059     */
1060    @Deprecated
1061    public static final Key<long[]> SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS =
1062            new Key<long[]>("android.scaler.availableProcessedMinDurations", long[].class);
1063
1064    /**
1065     * <p>The resolutions available for use with
1066     * processed output streams, such as YV12, NV12, and
1067     * platform opaque YUV/RGB streams to the GPU or video
1068     * encoders.</p>
1069     * <p>The resolutions are listed as <code>(width, height)</code> pairs.</p>
1070     * <p>For a given use case, the actual maximum supported resolution
1071     * may be lower than what is listed here, depending on the destination
1072     * Surface for the image data. For example, for recording video,
1073     * the video encoder chosen may have a maximum size limit (e.g. 1080p)
1074     * smaller than what the camera (e.g. maximum resolution is 3264x2448)
1075     * can provide.</p>
1076     * <p>Please reference the documentation for the image data destination to
1077     * check if it limits the maximum size for image data.</p>
1078     * @deprecated
1079     * @hide
1080     */
1081    @Deprecated
1082    public static final Key<android.util.Size[]> SCALER_AVAILABLE_PROCESSED_SIZES =
1083            new Key<android.util.Size[]>("android.scaler.availableProcessedSizes", android.util.Size[].class);
1084
1085    /**
1086     * <p>The mapping of image formats that are supported by this
1087     * camera device for input streams, to their corresponding output formats.</p>
1088     * <p>All camera devices with at least 1
1089     * android.request.maxNumInputStreams will have at least one
1090     * available input format.</p>
1091     * <p>The camera device will support the following map of formats,
1092     * if its dependent capability is supported:</p>
1093     * <table>
1094     * <thead>
1095     * <tr>
1096     * <th align="left">Input Format</th>
1097     * <th align="left">Output Format</th>
1098     * <th align="left">Capability</th>
1099     * </tr>
1100     * </thead>
1101     * <tbody>
1102     * <tr>
1103     * <td align="left">RAW_OPAQUE</td>
1104     * <td align="left">JPEG</td>
1105     * <td align="left">ZSL</td>
1106     * </tr>
1107     * <tr>
1108     * <td align="left">RAW_OPAQUE</td>
1109     * <td align="left">YUV_420_888</td>
1110     * <td align="left">ZSL</td>
1111     * </tr>
1112     * <tr>
1113     * <td align="left">RAW_OPAQUE</td>
1114     * <td align="left">RAW16</td>
1115     * <td align="left">RAW</td>
1116     * </tr>
1117     * <tr>
1118     * <td align="left">RAW16</td>
1119     * <td align="left">YUV_420_888</td>
1120     * <td align="left">RAW</td>
1121     * </tr>
1122     * <tr>
1123     * <td align="left">RAW16</td>
1124     * <td align="left">JPEG</td>
1125     * <td align="left">RAW</td>
1126     * </tr>
1127     * </tbody>
1128     * </table>
1129     * <p>For ZSL-capable camera devices, using the RAW_OPAQUE format
1130     * as either input or output will never hurt maximum frame rate (i.e.
1131     * StreamConfigurationMap#getOutputStallDuration(int,Size)
1132     * for a <code>format =</code> RAW_OPAQUE is always 0).</p>
1133     * <p>Attempting to configure an input stream with output streams not
1134     * listed as available in this map is not valid.</p>
1135     * <p>TODO: typedef to ReprocessFormatMap</p>
1136     * @hide
1137     */
1138    public static final Key<int[]> SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP =
1139            new Key<int[]>("android.scaler.availableInputOutputFormatsMap", int[].class);
1140
1141    /**
1142     * <p>The available stream configurations that this
1143     * camera device supports
1144     * (i.e. format, width, height, output/input stream).</p>
1145     * <p>The configurations are listed as <code>(format, width, height, input?)</code>
1146     * tuples.</p>
1147     * <p>For a given use case, the actual maximum supported resolution
1148     * may be lower than what is listed here, depending on the destination
1149     * Surface for the image data. For example, for recording video,
1150     * the video encoder chosen may have a maximum size limit (e.g. 1080p)
1151     * smaller than what the camera (e.g. maximum resolution is 3264x2448)
1152     * can provide.</p>
1153     * <p>Please reference the documentation for the image data destination to
1154     * check if it limits the maximum size for image data.</p>
1155     * <p>Not all output formats may be supported in a configuration with
1156     * an input stream of a particular format. For more details, see
1157     * android.scaler.availableInputOutputFormatsMap.</p>
1158     * <p>The following table describes the minimum required output stream
1159     * configurations based on the hardware level
1160     * ({@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel}):</p>
1161     * <table>
1162     * <thead>
1163     * <tr>
1164     * <th align="center">Format</th>
1165     * <th align="center">Size</th>
1166     * <th align="center">Hardware Level</th>
1167     * <th align="center">Notes</th>
1168     * </tr>
1169     * </thead>
1170     * <tbody>
1171     * <tr>
1172     * <td align="center">JPEG</td>
1173     * <td align="center">{@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}</td>
1174     * <td align="center">Any</td>
1175     * <td align="center"></td>
1176     * </tr>
1177     * <tr>
1178     * <td align="center">JPEG</td>
1179     * <td align="center">1920x1080 (1080p)</td>
1180     * <td align="center">Any</td>
1181     * <td align="center">if 1080p &lt;= activeArraySize</td>
1182     * </tr>
1183     * <tr>
1184     * <td align="center">JPEG</td>
1185     * <td align="center">1280x720 (720)</td>
1186     * <td align="center">Any</td>
1187     * <td align="center">if 720p &lt;= activeArraySize</td>
1188     * </tr>
1189     * <tr>
1190     * <td align="center">JPEG</td>
1191     * <td align="center">640x480 (480p)</td>
1192     * <td align="center">Any</td>
1193     * <td align="center">if 480p &lt;= activeArraySize</td>
1194     * </tr>
1195     * <tr>
1196     * <td align="center">JPEG</td>
1197     * <td align="center">320x240 (240p)</td>
1198     * <td align="center">Any</td>
1199     * <td align="center">if 240p &lt;= activeArraySize</td>
1200     * </tr>
1201     * <tr>
1202     * <td align="center">YUV_420_888</td>
1203     * <td align="center">all output sizes available for JPEG</td>
1204     * <td align="center">FULL</td>
1205     * <td align="center"></td>
1206     * </tr>
1207     * <tr>
1208     * <td align="center">YUV_420_888</td>
1209     * <td align="center">all output sizes available for JPEG, up to the maximum video size</td>
1210     * <td align="center">LIMITED</td>
1211     * <td align="center"></td>
1212     * </tr>
1213     * <tr>
1214     * <td align="center">IMPLEMENTATION_DEFINED</td>
1215     * <td align="center">same as YUV_420_888</td>
1216     * <td align="center">Any</td>
1217     * <td align="center"></td>
1218     * </tr>
1219     * </tbody>
1220     * </table>
1221     * <p>Refer to {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} for additional
1222     * mandatory stream configurations on a per-capability basis.</p>
1223     *
1224     * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
1225     * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
1226     * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
1227     * @hide
1228     */
1229    public static final Key<android.hardware.camera2.params.StreamConfiguration[]> SCALER_AVAILABLE_STREAM_CONFIGURATIONS =
1230            new Key<android.hardware.camera2.params.StreamConfiguration[]>("android.scaler.availableStreamConfigurations", android.hardware.camera2.params.StreamConfiguration[].class);
1231
1232    /**
1233     * <p>This lists the minimum frame duration for each
1234     * format/size combination.</p>
1235     * <p>This should correspond to the frame duration when only that
1236     * stream is active, with all processing (typically in android.*.mode)
1237     * set to either OFF or FAST.</p>
1238     * <p>When multiple streams are used in a request, the minimum frame
1239     * duration will be max(individual stream min durations).</p>
1240     * <p>The minimum frame duration of a stream (of a particular format, size)
1241     * is the same regardless of whether the stream is input or output.</p>
1242     * <p>See {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration} and
1243     * android.scaler.availableStallDurations for more details about
1244     * calculating the max frame rate.</p>
1245     * <p>(Keep in sync with
1246     * StreamConfigurationMap#getOutputMinFrameDuration)</p>
1247     *
1248     * @see CaptureRequest#SENSOR_FRAME_DURATION
1249     * @hide
1250     */
1251    public static final Key<android.hardware.camera2.params.StreamConfigurationDuration[]> SCALER_AVAILABLE_MIN_FRAME_DURATIONS =
1252            new Key<android.hardware.camera2.params.StreamConfigurationDuration[]>("android.scaler.availableMinFrameDurations", android.hardware.camera2.params.StreamConfigurationDuration[].class);
1253
1254    /**
1255     * <p>This lists the maximum stall duration for each
1256     * format/size combination.</p>
1257     * <p>A stall duration is how much extra time would get added
1258     * to the normal minimum frame duration for a repeating request
1259     * that has streams with non-zero stall.</p>
1260     * <p>For example, consider JPEG captures which have the following
1261     * characteristics:</p>
1262     * <ul>
1263     * <li>JPEG streams act like processed YUV streams in requests for which
1264     * they are not included; in requests in which they are directly
1265     * referenced, they act as JPEG streams. This is because supporting a
1266     * JPEG stream requires the underlying YUV data to always be ready for
1267     * use by a JPEG encoder, but the encoder will only be used (and impact
1268     * frame duration) on requests that actually reference a JPEG stream.</li>
1269     * <li>The JPEG processor can run concurrently to the rest of the camera
1270     * pipeline, but cannot process more than 1 capture at a time.</li>
1271     * </ul>
1272     * <p>In other words, using a repeating YUV request would result
1273     * in a steady frame rate (let's say it's 30 FPS). If a single
1274     * JPEG request is submitted periodically, the frame rate will stay
1275     * at 30 FPS (as long as we wait for the previous JPEG to return each
1276     * time). If we try to submit a repeating YUV + JPEG request, then
1277     * the frame rate will drop from 30 FPS.</p>
1278     * <p>In general, submitting a new request with a non-0 stall time
1279     * stream will <em>not</em> cause a frame rate drop unless there are still
1280     * outstanding buffers for that stream from previous requests.</p>
1281     * <p>Submitting a repeating request with streams (call this <code>S</code>)
1282     * is the same as setting the minimum frame duration from
1283     * the normal minimum frame duration corresponding to <code>S</code>, added with
1284     * the maximum stall duration for <code>S</code>.</p>
1285     * <p>If interleaving requests with and without a stall duration,
1286     * a request will stall by the maximum of the remaining times
1287     * for each can-stall stream with outstanding buffers.</p>
1288     * <p>This means that a stalling request will not have an exposure start
1289     * until the stall has completed.</p>
1290     * <p>This should correspond to the stall duration when only that stream is
1291     * active, with all processing (typically in android.*.mode) set to FAST
1292     * or OFF. Setting any of the processing modes to HIGH_QUALITY
1293     * effectively results in an indeterminate stall duration for all
1294     * streams in a request (the regular stall calculation rules are
1295     * ignored).</p>
1296     * <p>The following formats may always have a stall duration:</p>
1297     * <ul>
1298     * <li>ImageFormat#JPEG</li>
1299     * <li>ImageFormat#RAW_SENSOR</li>
1300     * </ul>
1301     * <p>The following formats will never have a stall duration:</p>
1302     * <ul>
1303     * <li>ImageFormat#YUV_420_888</li>
1304     * </ul>
1305     * <p>All other formats may or may not have an allowed stall duration on
1306     * a per-capability basis; refer to {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities}
1307     * for more details.</p>
1308     * <p>See {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration} for more information about
1309     * calculating the max frame rate (absent stalls).</p>
1310     * <p>(Keep up to date with
1311     * StreamConfigurationMap#getOutputStallDuration(int, Size) )</p>
1312     *
1313     * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
1314     * @see CaptureRequest#SENSOR_FRAME_DURATION
1315     * @hide
1316     */
1317    public static final Key<android.hardware.camera2.params.StreamConfigurationDuration[]> SCALER_AVAILABLE_STALL_DURATIONS =
1318            new Key<android.hardware.camera2.params.StreamConfigurationDuration[]>("android.scaler.availableStallDurations", android.hardware.camera2.params.StreamConfigurationDuration[].class);
1319
1320    /**
1321     * <p>The available stream configurations that this
1322     * camera device supports; also includes the minimum frame durations
1323     * and the stall durations for each format/size combination.</p>
1324     * <p>All camera devices will support sensor maximum resolution (defined by
1325     * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}) for the JPEG format.</p>
1326     * <p>For a given use case, the actual maximum supported resolution
1327     * may be lower than what is listed here, depending on the destination
1328     * Surface for the image data. For example, for recording video,
1329     * the video encoder chosen may have a maximum size limit (e.g. 1080p)
1330     * smaller than what the camera (e.g. maximum resolution is 3264x2448)
1331     * can provide.</p>
1332     * <p>Please reference the documentation for the image data destination to
1333     * check if it limits the maximum size for image data.</p>
1334     * <p>The following table describes the minimum required output stream
1335     * configurations based on the hardware level
1336     * ({@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel}):</p>
1337     * <table>
1338     * <thead>
1339     * <tr>
1340     * <th align="center">Format</th>
1341     * <th align="center">Size</th>
1342     * <th align="center">Hardware Level</th>
1343     * <th align="center">Notes</th>
1344     * </tr>
1345     * </thead>
1346     * <tbody>
1347     * <tr>
1348     * <td align="center">JPEG</td>
1349     * <td align="center">{@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}</td>
1350     * <td align="center">Any</td>
1351     * <td align="center"></td>
1352     * </tr>
1353     * <tr>
1354     * <td align="center">JPEG</td>
1355     * <td align="center">1920x1080 (1080p)</td>
1356     * <td align="center">Any</td>
1357     * <td align="center">if 1080p &lt;= activeArraySize</td>
1358     * </tr>
1359     * <tr>
1360     * <td align="center">JPEG</td>
1361     * <td align="center">1280x720 (720)</td>
1362     * <td align="center">Any</td>
1363     * <td align="center">if 720p &lt;= activeArraySize</td>
1364     * </tr>
1365     * <tr>
1366     * <td align="center">JPEG</td>
1367     * <td align="center">640x480 (480p)</td>
1368     * <td align="center">Any</td>
1369     * <td align="center">if 480p &lt;= activeArraySize</td>
1370     * </tr>
1371     * <tr>
1372     * <td align="center">JPEG</td>
1373     * <td align="center">320x240 (240p)</td>
1374     * <td align="center">Any</td>
1375     * <td align="center">if 240p &lt;= activeArraySize</td>
1376     * </tr>
1377     * <tr>
1378     * <td align="center">YUV_420_888</td>
1379     * <td align="center">all output sizes available for JPEG</td>
1380     * <td align="center">FULL</td>
1381     * <td align="center"></td>
1382     * </tr>
1383     * <tr>
1384     * <td align="center">YUV_420_888</td>
1385     * <td align="center">all output sizes available for JPEG, up to the maximum video size</td>
1386     * <td align="center">LIMITED</td>
1387     * <td align="center"></td>
1388     * </tr>
1389     * <tr>
1390     * <td align="center">IMPLEMENTATION_DEFINED</td>
1391     * <td align="center">same as YUV_420_888</td>
1392     * <td align="center">Any</td>
1393     * <td align="center"></td>
1394     * </tr>
1395     * </tbody>
1396     * </table>
1397     * <p>Refer to {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} for additional
1398     * mandatory stream configurations on a per-capability basis.</p>
1399     *
1400     * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
1401     * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
1402     * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
1403     */
1404    @PublicKey
1405    @SyntheticKey
1406    public static final Key<android.hardware.camera2.params.StreamConfigurationMap> SCALER_STREAM_CONFIGURATION_MAP =
1407            new Key<android.hardware.camera2.params.StreamConfigurationMap>("android.scaler.streamConfigurationMap", android.hardware.camera2.params.StreamConfigurationMap.class);
1408
1409    /**
1410     * <p>The crop type that this camera device supports.</p>
1411     * <p>When passing a non-centered crop region ({@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion}) to a camera
1412     * device that only supports CENTER_ONLY cropping, the camera device will move the
1413     * crop region to the center of the sensor active array ({@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize})
1414     * and keep the crop region width and height unchanged. The camera device will return the
1415     * final used crop region in metadata result {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion}.</p>
1416     * <p>Camera devices that support FREEFORM cropping will support any crop region that
1417     * is inside of the active array. The camera device will apply the same crop region and
1418     * return the final used crop region in capture result metadata {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion}.</p>
1419     * <p>FULL capability devices ({@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} <code>==</code> FULL) will support
1420     * FREEFORM cropping.</p>
1421     *
1422     * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
1423     * @see CaptureRequest#SCALER_CROP_REGION
1424     * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
1425     * @see #SCALER_CROPPING_TYPE_CENTER_ONLY
1426     * @see #SCALER_CROPPING_TYPE_FREEFORM
1427     */
1428    @PublicKey
1429    public static final Key<Integer> SCALER_CROPPING_TYPE =
1430            new Key<Integer>("android.scaler.croppingType", int.class);
1431
1432    /**
1433     * <p>Area of raw data which corresponds to only
1434     * active pixels.</p>
1435     * <p>It is smaller or equal to
1436     * sensor full pixel array, which could include the black calibration pixels.</p>
1437     */
1438    @PublicKey
1439    public static final Key<android.graphics.Rect> SENSOR_INFO_ACTIVE_ARRAY_SIZE =
1440            new Key<android.graphics.Rect>("android.sensor.info.activeArraySize", android.graphics.Rect.class);
1441
1442    /**
1443     * <p>Range of valid sensitivities.</p>
1444     * <p>The minimum and maximum valid values for the
1445     * {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity} control.</p>
1446     * <p>The values are the standard ISO sensitivity values,
1447     * as defined in ISO 12232:2006.</p>
1448     *
1449     * @see CaptureRequest#SENSOR_SENSITIVITY
1450     */
1451    @PublicKey
1452    public static final Key<android.util.Range<Integer>> SENSOR_INFO_SENSITIVITY_RANGE =
1453            new Key<android.util.Range<Integer>>("android.sensor.info.sensitivityRange", new TypeReference<android.util.Range<Integer>>() {{ }});
1454
1455    /**
1456     * <p>The arrangement of color filters on sensor;
1457     * represents the colors in the top-left 2x2 section of
1458     * the sensor, in reading order.</p>
1459     * @see #SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB
1460     * @see #SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG
1461     * @see #SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG
1462     * @see #SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR
1463     * @see #SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGB
1464     */
1465    @PublicKey
1466    public static final Key<Integer> SENSOR_INFO_COLOR_FILTER_ARRANGEMENT =
1467            new Key<Integer>("android.sensor.info.colorFilterArrangement", int.class);
1468
1469    /**
1470     * <p>Range of valid exposure
1471     * times used by {@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime}.</p>
1472     * <p>The min value will be &lt;= 100e3 (100 us). For FULL
1473     * capability devices ({@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} == FULL),
1474     * max will be &gt;= 100e6 (100ms)</p>
1475     *
1476     * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
1477     * @see CaptureRequest#SENSOR_EXPOSURE_TIME
1478     */
1479    @PublicKey
1480    public static final Key<android.util.Range<Long>> SENSOR_INFO_EXPOSURE_TIME_RANGE =
1481            new Key<android.util.Range<Long>>("android.sensor.info.exposureTimeRange", new TypeReference<android.util.Range<Long>>() {{ }});
1482
1483    /**
1484     * <p>Maximum possible frame duration (minimum frame
1485     * rate).</p>
1486     * <p>The largest possible {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration}
1487     * that will be accepted by the camera device. Attempting to use
1488     * frame durations beyond the maximum will result in the frame duration
1489     * being clipped to the maximum. See that control
1490     * for a full definition of frame durations.</p>
1491     * <p>Refer to
1492     * StreamConfigurationMap#getOutputMinFrameDuration(int,Size)
1493     * for the minimum frame duration values.</p>
1494     * <p>For FULL capability devices ({@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} == FULL),
1495     * max will be &gt;= 100e6 (100ms).</p>
1496     *
1497     * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
1498     * @see CaptureRequest#SENSOR_FRAME_DURATION
1499     */
1500    @PublicKey
1501    public static final Key<Long> SENSOR_INFO_MAX_FRAME_DURATION =
1502            new Key<Long>("android.sensor.info.maxFrameDuration", long.class);
1503
1504    /**
1505     * <p>The physical dimensions of the full pixel
1506     * array.</p>
1507     * <p>This is the physical size of the sensor pixel
1508     * array defined by {@link CameraCharacteristics#SENSOR_INFO_PIXEL_ARRAY_SIZE android.sensor.info.pixelArraySize}.</p>
1509     *
1510     * @see CameraCharacteristics#SENSOR_INFO_PIXEL_ARRAY_SIZE
1511     */
1512    @PublicKey
1513    public static final Key<android.util.SizeF> SENSOR_INFO_PHYSICAL_SIZE =
1514            new Key<android.util.SizeF>("android.sensor.info.physicalSize", android.util.SizeF.class);
1515
1516    /**
1517     * <p>Dimensions of full pixel array, possibly
1518     * including black calibration pixels.</p>
1519     * <p>The pixel count of the full pixel array,
1520     * which covers {@link CameraCharacteristics#SENSOR_INFO_PHYSICAL_SIZE android.sensor.info.physicalSize} area.</p>
1521     * <p>If a camera device supports raw sensor formats, either this
1522     * or {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize} is the maximum output
1523     * raw size listed in {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap}.
1524     * If a size corresponding to pixelArraySize is listed, the resulting
1525     * raw sensor image will include black pixels.</p>
1526     *
1527     * @see CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP
1528     * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
1529     * @see CameraCharacteristics#SENSOR_INFO_PHYSICAL_SIZE
1530     */
1531    @PublicKey
1532    public static final Key<android.util.Size> SENSOR_INFO_PIXEL_ARRAY_SIZE =
1533            new Key<android.util.Size>("android.sensor.info.pixelArraySize", android.util.Size.class);
1534
1535    /**
1536     * <p>Maximum raw value output by sensor.</p>
1537     * <p>This specifies the fully-saturated encoding level for the raw
1538     * sample values from the sensor.  This is typically caused by the
1539     * sensor becoming highly non-linear or clipping. The minimum for
1540     * each channel is specified by the offset in the
1541     * {@link CameraCharacteristics#SENSOR_BLACK_LEVEL_PATTERN android.sensor.blackLevelPattern} tag.</p>
1542     * <p>The white level is typically determined either by sensor bit depth
1543     * (8-14 bits is expected), or by the point where the sensor response
1544     * becomes too non-linear to be useful.  The default value for this is
1545     * maximum representable value for a 16-bit raw sample (2^16 - 1).</p>
1546     *
1547     * @see CameraCharacteristics#SENSOR_BLACK_LEVEL_PATTERN
1548     */
1549    @PublicKey
1550    public static final Key<Integer> SENSOR_INFO_WHITE_LEVEL =
1551            new Key<Integer>("android.sensor.info.whiteLevel", int.class);
1552
1553    /**
1554     * <p>The time base source for sensor capture start timestamps.</p>
1555     * <p>The timestamps provided for captures are always in nanoseconds and monotonic, but
1556     * may not based on a time source that can be compared to other system time sources.</p>
1557     * <p>This characteristic defines the source for the timestamps, and therefore whether they
1558     * can be compared against other system time sources/timestamps.</p>
1559     * @see #SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN
1560     * @see #SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME
1561     */
1562    @PublicKey
1563    public static final Key<Integer> SENSOR_INFO_TIMESTAMP_SOURCE =
1564            new Key<Integer>("android.sensor.info.timestampSource", int.class);
1565
1566    /**
1567     * <p>The standard reference illuminant used as the scene light source when
1568     * calculating the {@link CameraCharacteristics#SENSOR_COLOR_TRANSFORM1 android.sensor.colorTransform1},
1569     * {@link CameraCharacteristics#SENSOR_CALIBRATION_TRANSFORM1 android.sensor.calibrationTransform1}, and
1570     * {@link CameraCharacteristics#SENSOR_FORWARD_MATRIX1 android.sensor.forwardMatrix1} matrices.</p>
1571     * <p>The values in this tag correspond to the values defined for the
1572     * EXIF LightSource tag. These illuminants are standard light sources
1573     * that are often used calibrating camera devices.</p>
1574     * <p>If this tag is present, then {@link CameraCharacteristics#SENSOR_COLOR_TRANSFORM1 android.sensor.colorTransform1},
1575     * {@link CameraCharacteristics#SENSOR_CALIBRATION_TRANSFORM1 android.sensor.calibrationTransform1}, and
1576     * {@link CameraCharacteristics#SENSOR_FORWARD_MATRIX1 android.sensor.forwardMatrix1} will also be present.</p>
1577     * <p>Some devices may choose to provide a second set of calibration
1578     * information for improved quality, including
1579     * {@link CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT2 android.sensor.referenceIlluminant2} and its corresponding matrices.</p>
1580     *
1581     * @see CameraCharacteristics#SENSOR_CALIBRATION_TRANSFORM1
1582     * @see CameraCharacteristics#SENSOR_COLOR_TRANSFORM1
1583     * @see CameraCharacteristics#SENSOR_FORWARD_MATRIX1
1584     * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT2
1585     * @see #SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT
1586     * @see #SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT
1587     * @see #SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN
1588     * @see #SENSOR_REFERENCE_ILLUMINANT1_FLASH
1589     * @see #SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER
1590     * @see #SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER
1591     * @see #SENSOR_REFERENCE_ILLUMINANT1_SHADE
1592     * @see #SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT
1593     * @see #SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT
1594     * @see #SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT
1595     * @see #SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT
1596     * @see #SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A
1597     * @see #SENSOR_REFERENCE_ILLUMINANT1_STANDARD_B
1598     * @see #SENSOR_REFERENCE_ILLUMINANT1_STANDARD_C
1599     * @see #SENSOR_REFERENCE_ILLUMINANT1_D55
1600     * @see #SENSOR_REFERENCE_ILLUMINANT1_D65
1601     * @see #SENSOR_REFERENCE_ILLUMINANT1_D75
1602     * @see #SENSOR_REFERENCE_ILLUMINANT1_D50
1603     * @see #SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN
1604     */
1605    @PublicKey
1606    public static final Key<Integer> SENSOR_REFERENCE_ILLUMINANT1 =
1607            new Key<Integer>("android.sensor.referenceIlluminant1", int.class);
1608
1609    /**
1610     * <p>The standard reference illuminant used as the scene light source when
1611     * calculating the {@link CameraCharacteristics#SENSOR_COLOR_TRANSFORM2 android.sensor.colorTransform2},
1612     * {@link CameraCharacteristics#SENSOR_CALIBRATION_TRANSFORM2 android.sensor.calibrationTransform2}, and
1613     * {@link CameraCharacteristics#SENSOR_FORWARD_MATRIX2 android.sensor.forwardMatrix2} matrices.</p>
1614     * <p>See {@link CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1 android.sensor.referenceIlluminant1} for more details.
1615     * Valid values for this are the same as those given for the first
1616     * reference illuminant.</p>
1617     * <p>If this tag is present, then {@link CameraCharacteristics#SENSOR_COLOR_TRANSFORM2 android.sensor.colorTransform2},
1618     * {@link CameraCharacteristics#SENSOR_CALIBRATION_TRANSFORM2 android.sensor.calibrationTransform2}, and
1619     * {@link CameraCharacteristics#SENSOR_FORWARD_MATRIX2 android.sensor.forwardMatrix2} will also be present.</p>
1620     *
1621     * @see CameraCharacteristics#SENSOR_CALIBRATION_TRANSFORM2
1622     * @see CameraCharacteristics#SENSOR_COLOR_TRANSFORM2
1623     * @see CameraCharacteristics#SENSOR_FORWARD_MATRIX2
1624     * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1
1625     */
1626    @PublicKey
1627    public static final Key<Byte> SENSOR_REFERENCE_ILLUMINANT2 =
1628            new Key<Byte>("android.sensor.referenceIlluminant2", byte.class);
1629
1630    /**
1631     * <p>A per-device calibration transform matrix that maps from the
1632     * reference sensor colorspace to the actual device sensor colorspace.</p>
1633     * <p>This matrix is used to correct for per-device variations in the
1634     * sensor colorspace, and is used for processing raw buffer data.</p>
1635     * <p>The matrix is expressed as a 3x3 matrix in row-major-order, and
1636     * contains a per-device calibration transform that maps colors
1637     * from reference sensor color space (i.e. the "golden module"
1638     * colorspace) into this camera device's native sensor color
1639     * space under the first reference illuminant
1640     * ({@link CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1 android.sensor.referenceIlluminant1}).</p>
1641     * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
1642     *
1643     * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1
1644     */
1645    @PublicKey
1646    public static final Key<android.hardware.camera2.params.ColorSpaceTransform> SENSOR_CALIBRATION_TRANSFORM1 =
1647            new Key<android.hardware.camera2.params.ColorSpaceTransform>("android.sensor.calibrationTransform1", android.hardware.camera2.params.ColorSpaceTransform.class);
1648
1649    /**
1650     * <p>A per-device calibration transform matrix that maps from the
1651     * reference sensor colorspace to the actual device sensor colorspace
1652     * (this is the colorspace of the raw buffer data).</p>
1653     * <p>This matrix is used to correct for per-device variations in the
1654     * sensor colorspace, and is used for processing raw buffer data.</p>
1655     * <p>The matrix is expressed as a 3x3 matrix in row-major-order, and
1656     * contains a per-device calibration transform that maps colors
1657     * from reference sensor color space (i.e. the "golden module"
1658     * colorspace) into this camera device's native sensor color
1659     * space under the second reference illuminant
1660     * ({@link CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT2 android.sensor.referenceIlluminant2}).</p>
1661     * <p>This matrix will only be present if the second reference
1662     * illuminant is present.</p>
1663     * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
1664     *
1665     * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT2
1666     */
1667    @PublicKey
1668    public static final Key<android.hardware.camera2.params.ColorSpaceTransform> SENSOR_CALIBRATION_TRANSFORM2 =
1669            new Key<android.hardware.camera2.params.ColorSpaceTransform>("android.sensor.calibrationTransform2", android.hardware.camera2.params.ColorSpaceTransform.class);
1670
1671    /**
1672     * <p>A matrix that transforms color values from CIE XYZ color space to
1673     * reference sensor color space.</p>
1674     * <p>This matrix is used to convert from the standard CIE XYZ color
1675     * space to the reference sensor colorspace, and is used when processing
1676     * raw buffer data.</p>
1677     * <p>The matrix is expressed as a 3x3 matrix in row-major-order, and
1678     * contains a color transform matrix that maps colors from the CIE
1679     * XYZ color space to the reference sensor color space (i.e. the
1680     * "golden module" colorspace) under the first reference illuminant
1681     * ({@link CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1 android.sensor.referenceIlluminant1}).</p>
1682     * <p>The white points chosen in both the reference sensor color space
1683     * and the CIE XYZ colorspace when calculating this transform will
1684     * match the standard white point for the first reference illuminant
1685     * (i.e. no chromatic adaptation will be applied by this transform).</p>
1686     * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
1687     *
1688     * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1
1689     */
1690    @PublicKey
1691    public static final Key<android.hardware.camera2.params.ColorSpaceTransform> SENSOR_COLOR_TRANSFORM1 =
1692            new Key<android.hardware.camera2.params.ColorSpaceTransform>("android.sensor.colorTransform1", android.hardware.camera2.params.ColorSpaceTransform.class);
1693
1694    /**
1695     * <p>A matrix that transforms color values from CIE XYZ color space to
1696     * reference sensor color space.</p>
1697     * <p>This matrix is used to convert from the standard CIE XYZ color
1698     * space to the reference sensor colorspace, and is used when processing
1699     * raw buffer data.</p>
1700     * <p>The matrix is expressed as a 3x3 matrix in row-major-order, and
1701     * contains a color transform matrix that maps colors from the CIE
1702     * XYZ color space to the reference sensor color space (i.e. the
1703     * "golden module" colorspace) under the second reference illuminant
1704     * ({@link CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT2 android.sensor.referenceIlluminant2}).</p>
1705     * <p>The white points chosen in both the reference sensor color space
1706     * and the CIE XYZ colorspace when calculating this transform will
1707     * match the standard white point for the second reference illuminant
1708     * (i.e. no chromatic adaptation will be applied by this transform).</p>
1709     * <p>This matrix will only be present if the second reference
1710     * illuminant is present.</p>
1711     * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
1712     *
1713     * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT2
1714     */
1715    @PublicKey
1716    public static final Key<android.hardware.camera2.params.ColorSpaceTransform> SENSOR_COLOR_TRANSFORM2 =
1717            new Key<android.hardware.camera2.params.ColorSpaceTransform>("android.sensor.colorTransform2", android.hardware.camera2.params.ColorSpaceTransform.class);
1718
1719    /**
1720     * <p>A matrix that transforms white balanced camera colors from the reference
1721     * sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint.</p>
1722     * <p>This matrix is used to convert to the standard CIE XYZ colorspace, and
1723     * is used when processing raw buffer data.</p>
1724     * <p>This matrix is expressed as a 3x3 matrix in row-major-order, and contains
1725     * a color transform matrix that maps white balanced colors from the
1726     * reference sensor color space to the CIE XYZ color space with a D50 white
1727     * point.</p>
1728     * <p>Under the first reference illuminant ({@link CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1 android.sensor.referenceIlluminant1})
1729     * this matrix is chosen so that the standard white point for this reference
1730     * illuminant in the reference sensor colorspace is mapped to D50 in the
1731     * CIE XYZ colorspace.</p>
1732     * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
1733     *
1734     * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1
1735     */
1736    @PublicKey
1737    public static final Key<android.hardware.camera2.params.ColorSpaceTransform> SENSOR_FORWARD_MATRIX1 =
1738            new Key<android.hardware.camera2.params.ColorSpaceTransform>("android.sensor.forwardMatrix1", android.hardware.camera2.params.ColorSpaceTransform.class);
1739
1740    /**
1741     * <p>A matrix that transforms white balanced camera colors from the reference
1742     * sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint.</p>
1743     * <p>This matrix is used to convert to the standard CIE XYZ colorspace, and
1744     * is used when processing raw buffer data.</p>
1745     * <p>This matrix is expressed as a 3x3 matrix in row-major-order, and contains
1746     * a color transform matrix that maps white balanced colors from the
1747     * reference sensor color space to the CIE XYZ color space with a D50 white
1748     * point.</p>
1749     * <p>Under the second reference illuminant ({@link CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT2 android.sensor.referenceIlluminant2})
1750     * this matrix is chosen so that the standard white point for this reference
1751     * illuminant in the reference sensor colorspace is mapped to D50 in the
1752     * CIE XYZ colorspace.</p>
1753     * <p>This matrix will only be present if the second reference
1754     * illuminant is present.</p>
1755     * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
1756     *
1757     * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT2
1758     */
1759    @PublicKey
1760    public static final Key<android.hardware.camera2.params.ColorSpaceTransform> SENSOR_FORWARD_MATRIX2 =
1761            new Key<android.hardware.camera2.params.ColorSpaceTransform>("android.sensor.forwardMatrix2", android.hardware.camera2.params.ColorSpaceTransform.class);
1762
1763    /**
1764     * <p>A fixed black level offset for each of the color filter arrangement
1765     * (CFA) mosaic channels.</p>
1766     * <p>This tag specifies the zero light value for each of the CFA mosaic
1767     * channels in the camera sensor.  The maximal value output by the
1768     * sensor is represented by the value in {@link CameraCharacteristics#SENSOR_INFO_WHITE_LEVEL android.sensor.info.whiteLevel}.</p>
1769     * <p>The values are given in the same order as channels listed for the CFA
1770     * layout tag (see {@link CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT android.sensor.info.colorFilterArrangement}), i.e. the
1771     * nth value given corresponds to the black level offset for the nth
1772     * color channel listed in the CFA.</p>
1773     * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
1774     *
1775     * @see CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
1776     * @see CameraCharacteristics#SENSOR_INFO_WHITE_LEVEL
1777     */
1778    @PublicKey
1779    public static final Key<android.hardware.camera2.params.BlackLevelPattern> SENSOR_BLACK_LEVEL_PATTERN =
1780            new Key<android.hardware.camera2.params.BlackLevelPattern>("android.sensor.blackLevelPattern", android.hardware.camera2.params.BlackLevelPattern.class);
1781
1782    /**
1783     * <p>Maximum sensitivity that is implemented
1784     * purely through analog gain.</p>
1785     * <p>For {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity} values less than or
1786     * equal to this, all applied gain must be analog. For
1787     * values above this, the gain applied can be a mix of analog and
1788     * digital.</p>
1789     * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
1790     * <p><b>Full capability</b> -
1791     * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
1792     * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
1793     *
1794     * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
1795     * @see CaptureRequest#SENSOR_SENSITIVITY
1796     */
1797    @PublicKey
1798    public static final Key<Integer> SENSOR_MAX_ANALOG_SENSITIVITY =
1799            new Key<Integer>("android.sensor.maxAnalogSensitivity", int.class);
1800
1801    /**
1802     * <p>Clockwise angle through which the output
1803     * image needs to be rotated to be upright on the device
1804     * screen in its native orientation. Also defines the
1805     * direction of rolling shutter readout, which is from top
1806     * to bottom in the sensor's coordinate system</p>
1807     */
1808    @PublicKey
1809    public static final Key<Integer> SENSOR_ORIENTATION =
1810            new Key<Integer>("android.sensor.orientation", int.class);
1811
1812    /**
1813     * <p>Lists the supported sensor test pattern modes for {@link CaptureRequest#SENSOR_TEST_PATTERN_MODE android.sensor.testPatternMode}.</p>
1814     * <p>Optional. Defaults to [OFF].</p>
1815     * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
1816     *
1817     * @see CaptureRequest#SENSOR_TEST_PATTERN_MODE
1818     */
1819    @PublicKey
1820    public static final Key<int[]> SENSOR_AVAILABLE_TEST_PATTERN_MODES =
1821            new Key<int[]>("android.sensor.availableTestPatternModes", int[].class);
1822
1823    /**
1824     * <p>The face detection modes that are available
1825     * for this camera device.</p>
1826     * <p>OFF is always supported.</p>
1827     * <p>SIMPLE means the device supports the
1828     * android.statistics.faceRectangles and
1829     * android.statistics.faceScores outputs.</p>
1830     * <p>FULL means the device additionally supports the
1831     * android.statistics.faceIds and
1832     * android.statistics.faceLandmarks outputs.</p>
1833     */
1834    @PublicKey
1835    public static final Key<int[]> STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES =
1836            new Key<int[]>("android.statistics.info.availableFaceDetectModes", int[].class);
1837
1838    /**
1839     * <p>The maximum number of simultaneously detectable
1840     * faces.</p>
1841     */
1842    @PublicKey
1843    public static final Key<Integer> STATISTICS_INFO_MAX_FACE_COUNT =
1844            new Key<Integer>("android.statistics.info.maxFaceCount", int.class);
1845
1846    /**
1847     * <p>The set of hot pixel map output modes supported by this camera device.</p>
1848     * <p>This tag lists valid output modes for {@link CaptureRequest#STATISTICS_HOT_PIXEL_MAP_MODE android.statistics.hotPixelMapMode}.</p>
1849     * <p>If no hotpixel map is available for this camera device, this will contain
1850     * only OFF.  If the hotpixel map is available, this will include both
1851     * the ON and OFF options.</p>
1852     *
1853     * @see CaptureRequest#STATISTICS_HOT_PIXEL_MAP_MODE
1854     */
1855    @PublicKey
1856    public static final Key<boolean[]> STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES =
1857            new Key<boolean[]>("android.statistics.info.availableHotPixelMapModes", boolean[].class);
1858
1859    /**
1860     * <p>Maximum number of supported points in the
1861     * tonemap curve that can be used for {@link CaptureRequest#TONEMAP_CURVE android.tonemap.curve}.</p>
1862     * <p>If the actual number of points provided by the application (in
1863     * {@link CaptureRequest#TONEMAP_CURVE android.tonemap.curve}*)  is less than max, the camera device will
1864     * resample the curve to its internal representation, using linear
1865     * interpolation.</p>
1866     * <p>The output curves in the result metadata may have a different number
1867     * of points than the input curves, and will represent the actual
1868     * hardware curves used as closely as possible when linearly interpolated.</p>
1869     *
1870     * @see CaptureRequest#TONEMAP_CURVE
1871     */
1872    @PublicKey
1873    public static final Key<Integer> TONEMAP_MAX_CURVE_POINTS =
1874            new Key<Integer>("android.tonemap.maxCurvePoints", int.class);
1875
1876    /**
1877     * <p>The set of tonemapping modes supported by this camera device.</p>
1878     * <p>This tag lists the valid modes for {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode}.</p>
1879     * <p>Full-capability camera devices must always support CONTRAST_CURVE and
1880     * FAST.</p>
1881     *
1882     * @see CaptureRequest#TONEMAP_MODE
1883     */
1884    @PublicKey
1885    public static final Key<int[]> TONEMAP_AVAILABLE_TONE_MAP_MODES =
1886            new Key<int[]>("android.tonemap.availableToneMapModes", int[].class);
1887
1888    /**
1889     * <p>A list of camera LEDs that are available on this system.</p>
1890     * @see #LED_AVAILABLE_LEDS_TRANSMIT
1891     * @hide
1892     */
1893    public static final Key<int[]> LED_AVAILABLE_LEDS =
1894            new Key<int[]>("android.led.availableLeds", int[].class);
1895
1896    /**
1897     * <p>Generally classifies the overall set of the camera device functionality.</p>
1898     * <p>Camera devices will come in two flavors: LIMITED and FULL.</p>
1899     * <p>A FULL device has the most support possible and will support below capabilities:</p>
1900     * <ul>
1901     * <li>30fps at maximum resolution (== sensor resolution) is preferred, more than 20fps is required.</li>
1902     * <li>Per frame control ({@link CameraCharacteristics#SYNC_MAX_LATENCY android.sync.maxLatency} <code>==</code> PER_FRAME_CONTROL)</li>
1903     * <li>Manual sensor control ({@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} contains MANUAL_SENSOR)</li>
1904     * <li>Manual post-processing control ({@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} contains MANUAL_POST_PROCESSING)</li>
1905     * <li>Arbitrary cropping region ({@link CameraCharacteristics#SCALER_CROPPING_TYPE android.scaler.croppingType} <code>==</code> FREEFORM)</li>
1906     * <li>At least 3 processed (but not stalling) format output streams ({@link CameraCharacteristics#REQUEST_MAX_NUM_OUTPUT_PROC android.request.maxNumOutputProc} <code>&gt;=</code> 3)</li>
1907     * <li>The required stream configuration defined in android.scaler.availableStreamConfigurations</li>
1908     * <li>The required exposure time range defined in {@link CameraCharacteristics#SENSOR_INFO_EXPOSURE_TIME_RANGE android.sensor.info.exposureTimeRange}</li>
1909     * <li>The required maxFrameDuration defined in {@link CameraCharacteristics#SENSOR_INFO_MAX_FRAME_DURATION android.sensor.info.maxFrameDuration}</li>
1910     * </ul>
1911     * <p>A LIMITED device may have some or none of the above characteristics.
1912     * To find out more refer to {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities}.</p>
1913     * <p>A LEGACY device does not support per-frame control, manual sensor control, manual
1914     * post-processing, arbitrary cropping regions, and has relaxed performance constraints.</p>
1915     *
1916     * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
1917     * @see CameraCharacteristics#REQUEST_MAX_NUM_OUTPUT_PROC
1918     * @see CameraCharacteristics#SCALER_CROPPING_TYPE
1919     * @see CameraCharacteristics#SENSOR_INFO_EXPOSURE_TIME_RANGE
1920     * @see CameraCharacteristics#SENSOR_INFO_MAX_FRAME_DURATION
1921     * @see CameraCharacteristics#SYNC_MAX_LATENCY
1922     * @see #INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED
1923     * @see #INFO_SUPPORTED_HARDWARE_LEVEL_FULL
1924     * @see #INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY
1925     */
1926    @PublicKey
1927    public static final Key<Integer> INFO_SUPPORTED_HARDWARE_LEVEL =
1928            new Key<Integer>("android.info.supportedHardwareLevel", int.class);
1929
1930    /**
1931     * <p>The maximum number of frames that can occur after a request
1932     * (different than the previous) has been submitted, and before the
1933     * result's state becomes synchronized (by setting
1934     * android.sync.frameNumber to a non-negative value).</p>
1935     * <p>This defines the maximum distance (in number of metadata results),
1936     * between android.sync.frameNumber and the equivalent
1937     * frame number for that result.</p>
1938     * <p>In other words this acts as an upper boundary for how many frames
1939     * must occur before the camera device knows for a fact that the new
1940     * submitted camera settings have been applied in outgoing frames.</p>
1941     * <p>For example if the distance was 2,</p>
1942     * <pre><code>initial request = X (repeating)
1943     * request1 = X
1944     * request2 = Y
1945     * request3 = Y
1946     * request4 = Y
1947     *
1948     * where requestN has frameNumber N, and the first of the repeating
1949     * initial request's has frameNumber F (and F &lt; 1).
1950     *
1951     * initial result = X' + { android.sync.frameNumber == F }
1952     * result1 = X' + { android.sync.frameNumber == F }
1953     * result2 = X' + { android.sync.frameNumber == CONVERGING }
1954     * result3 = X' + { android.sync.frameNumber == CONVERGING }
1955     * result4 = X' + { android.sync.frameNumber == 2 }
1956     *
1957     * where resultN has frameNumber N.
1958     * </code></pre>
1959     * <p>Since <code>result4</code> has a <code>frameNumber == 4</code> and
1960     * <code>android.sync.frameNumber == 2</code>, the distance is clearly
1961     * <code>4 - 2 = 2</code>.</p>
1962     * @see #SYNC_MAX_LATENCY_PER_FRAME_CONTROL
1963     * @see #SYNC_MAX_LATENCY_UNKNOWN
1964     */
1965    @PublicKey
1966    public static final Key<Integer> SYNC_MAX_LATENCY =
1967            new Key<Integer>("android.sync.maxLatency", int.class);
1968
1969    /*~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~
1970     * End generated code
1971     *~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~O@*/
1972
1973}
1974