metadata_properties.xml revision 366da5bdc4c7f9398c30bc8d2df29c144cf9a704
1<?xml version="1.0" encoding="utf-8"?>
2<!-- Copyright (C) 2012 The Android Open Source Project
3
4     Licensed under the Apache License, Version 2.0 (the "License");
5     you may not use this file except in compliance with the License.
6     You may obtain a copy of the License at
7
8          http://www.apache.org/licenses/LICENSE-2.0
9
10     Unless required by applicable law or agreed to in writing, software
11     distributed under the License is distributed on an "AS IS" BASIS,
12     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13     See the License for the specific language governing permissions and
14     limitations under the License.
15-->
16<metadata xmlns="http://schemas.android.com/service/camera/metadata/"
17xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
18xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata_properties.xsd">
19
20  <tags>
21    <tag id="BC">
22        Needed for backwards compatibility with old Java API
23    </tag>
24    <tag id="V1">
25        New features for first camera 2 release (API1)
26    </tag>
27    <tag id="RAW">
28        Needed for useful RAW image processing and DNG file support
29    </tag>
30    <tag id="HAL2">
31        Entry is only used by camera device HAL 2.x
32    </tag>
33    <tag id="FULL">
34        Entry is required for full hardware level devices, and optional for other hardware levels
35    </tag>
36    <tag id="DEPTH">
37        Entry is required for the depth capability.
38    </tag>
39    <tag id="REPROC">
40        Entry is required for the YUV or PRIVATE reprocessing capability.
41    </tag>
42    <tag id="FUTURE">
43        Entry is  under-specified and is not required for now. This is for book-keeping purpose,
44        do not implement or use it, it may be revised for future.
45    </tag>
46  </tags>
47
48  <types>
49    <typedef name="pairFloatFloat">
50      <language name="java">android.util.Pair&lt;Float,Float&gt;</language>
51    </typedef>
52    <typedef name="pairDoubleDouble">
53      <language name="java">android.util.Pair&lt;Double,Double&gt;</language>
54    </typedef>
55    <typedef name="rectangle">
56      <language name="java">android.graphics.Rect</language>
57    </typedef>
58    <typedef name="size">
59      <language name="java">android.util.Size</language>
60    </typedef>
61    <typedef name="string">
62      <language name="java">String</language>
63    </typedef>
64    <typedef name="boolean">
65      <language name="java">boolean</language>
66    </typedef>
67    <typedef name="imageFormat">
68      <language name="java">int</language>
69    </typedef>
70    <typedef name="streamConfigurationMap">
71      <language name="java">android.hardware.camera2.params.StreamConfigurationMap</language>
72    </typedef>
73    <typedef name="streamConfiguration">
74      <language name="java">android.hardware.camera2.params.StreamConfiguration</language>
75    </typedef>
76    <typedef name="streamConfigurationDuration">
77      <language name="java">android.hardware.camera2.params.StreamConfigurationDuration</language>
78    </typedef>
79    <typedef name="face">
80      <language name="java">android.hardware.camera2.params.Face</language>
81    </typedef>
82    <typedef name="meteringRectangle">
83      <language name="java">android.hardware.camera2.params.MeteringRectangle</language>
84    </typedef>
85    <typedef name="rangeFloat">
86      <language name="java">android.util.Range&lt;Float&gt;</language>
87    </typedef>
88    <typedef name="rangeInt">
89      <language name="java">android.util.Range&lt;Integer&gt;</language>
90    </typedef>
91    <typedef name="rangeLong">
92      <language name="java">android.util.Range&lt;Long&gt;</language>
93    </typedef>
94    <typedef name="colorSpaceTransform">
95      <language name="java">android.hardware.camera2.params.ColorSpaceTransform</language>
96    </typedef>
97    <typedef name="rggbChannelVector">
98      <language name="java">android.hardware.camera2.params.RggbChannelVector</language>
99    </typedef>
100    <typedef name="blackLevelPattern">
101      <language name="java">android.hardware.camera2.params.BlackLevelPattern</language>
102    </typedef>
103    <typedef name="enumList">
104      <language name="java">int</language>
105    </typedef>
106    <typedef name="sizeF">
107      <language name="java">android.util.SizeF</language>
108    </typedef>
109    <typedef name="point">
110      <language name="java">android.graphics.Point</language>
111    </typedef>
112    <typedef name="tonemapCurve">
113      <language name="java">android.hardware.camera2.params.TonemapCurve</language>
114    </typedef>
115    <typedef name="lensShadingMap">
116      <language name="java">android.hardware.camera2.params.LensShadingMap</language>
117    </typedef>
118    <typedef name="location">
119      <language name="java">android.location.Location</language>
120    </typedef>
121    <typedef name="highSpeedVideoConfiguration">
122      <language name="java">android.hardware.camera2.params.HighSpeedVideoConfiguration</language>
123    </typedef>
124    <typedef name="reprocessFormatsMap">
125      <language name="java">android.hardware.camera2.params.ReprocessFormatsMap</language>
126    </typedef>
127  </types>
128
129  <namespace name="android">
130    <section name="colorCorrection">
131      <controls>
132        <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="full">
133          <enum>
134            <value>TRANSFORM_MATRIX
135              <notes>Use the android.colorCorrection.transform matrix
136                and android.colorCorrection.gains to do color conversion.
137
138                All advanced white balance adjustments (not specified
139                by our white balance pipeline) must be disabled.
140
141                If AWB is enabled with `android.control.awbMode != OFF`, then
142                TRANSFORM_MATRIX is ignored. The camera device will override
143                this value to either FAST or HIGH_QUALITY.
144              </notes>
145            </value>
146            <value>FAST
147              <notes>Color correction processing must not slow down
148              capture rate relative to sensor raw output.
149
150              Advanced white balance adjustments above and beyond
151              the specified white balance pipeline may be applied.
152
153              If AWB is enabled with `android.control.awbMode != OFF`, then
154              the camera device uses the last frame's AWB values
155              (or defaults if AWB has never been run).
156            </notes>
157            </value>
158            <value>HIGH_QUALITY
159              <notes>Color correction processing operates at improved
160              quality but the capture rate might be reduced (relative to sensor
161              raw output rate)
162
163              Advanced white balance adjustments above and beyond
164              the specified white balance pipeline may be applied.
165
166              If AWB is enabled with `android.control.awbMode != OFF`, then
167              the camera device uses the last frame's AWB values
168              (or defaults if AWB has never been run).
169            </notes>
170            </value>
171          </enum>
172
173          <description>
174          The mode control selects how the image data is converted from the
175          sensor's native color into linear sRGB color.
176          </description>
177          <details>
178          When auto-white balance (AWB) is enabled with android.control.awbMode, this
179          control is overridden by the AWB routine. When AWB is disabled, the
180          application controls how the color mapping is performed.
181
182          We define the expected processing pipeline below. For consistency
183          across devices, this is always the case with TRANSFORM_MATRIX.
184
185          When either FULL or HIGH_QUALITY is used, the camera device may
186          do additional processing but android.colorCorrection.gains and
187          android.colorCorrection.transform will still be provided by the
188          camera device (in the results) and be roughly correct.
189
190          Switching to TRANSFORM_MATRIX and using the data provided from
191          FAST or HIGH_QUALITY will yield a picture with the same white point
192          as what was produced by the camera device in the earlier frame.
193
194          The expected processing pipeline is as follows:
195
196          ![White balance processing pipeline](android.colorCorrection.mode/processing_pipeline.png)
197
198          The white balance is encoded by two values, a 4-channel white-balance
199          gain vector (applied in the Bayer domain), and a 3x3 color transform
200          matrix (applied after demosaic).
201
202          The 4-channel white-balance gains are defined as:
203
204              android.colorCorrection.gains = [ R G_even G_odd B ]
205
206          where `G_even` is the gain for green pixels on even rows of the
207          output, and `G_odd` is the gain for green pixels on the odd rows.
208          These may be identical for a given camera device implementation; if
209          the camera device does not support a separate gain for even/odd green
210          channels, it will use the `G_even` value, and write `G_odd` equal to
211          `G_even` in the output result metadata.
212
213          The matrices for color transforms are defined as a 9-entry vector:
214
215              android.colorCorrection.transform = [ I0 I1 I2 I3 I4 I5 I6 I7 I8 ]
216
217          which define a transform from input sensor colors, `P_in = [ r g b ]`,
218          to output linear sRGB, `P_out = [ r' g' b' ]`,
219
220          with colors as follows:
221
222              r' = I0r + I1g + I2b
223              g' = I3r + I4g + I5b
224              b' = I6r + I7g + I8b
225
226          Both the input and output value ranges must match. Overflow/underflow
227          values are clipped to fit within the range.
228          </details>
229          <hal_details>
230          HAL must support both FAST and HIGH_QUALITY if color correction control is available
231          on the camera device, but the underlying implementation can be the same for both modes.
232          That is, if the highest quality implementation on the camera device does not slow down
233          capture rate, then FAST and HIGH_QUALITY should generate the same output.
234          </hal_details>
235        </entry>
236        <entry name="transform" type="rational" visibility="public"
237               type_notes="3x3 rational matrix in row-major order"
238               container="array" typedef="colorSpaceTransform" hwlevel="full">
239          <array>
240            <size>3</size>
241            <size>3</size>
242          </array>
243          <description>A color transform matrix to use to transform
244          from sensor RGB color space to output linear sRGB color space.
245          </description>
246          <units>Unitless scale factors</units>
247          <details>This matrix is either set by the camera device when the request
248          android.colorCorrection.mode is not TRANSFORM_MATRIX, or
249          directly by the application in the request when the
250          android.colorCorrection.mode is TRANSFORM_MATRIX.
251
252          In the latter case, the camera device may round the matrix to account
253          for precision issues; the final rounded matrix should be reported back
254          in this matrix result metadata. The transform should keep the magnitude
255          of the output color values within `[0, 1.0]` (assuming input color
256          values is within the normalized range `[0, 1.0]`), or clipping may occur.
257
258          The valid range of each matrix element varies on different devices, but
259          values within [-1.5, 3.0] are guaranteed not to be clipped.
260          </details>
261        </entry>
262        <entry name="gains" type="float" visibility="public"
263               type_notes="A 1D array of floats for 4 color channel gains"
264               container="array" typedef="rggbChannelVector" hwlevel="full">
265          <array>
266            <size>4</size>
267          </array>
268          <description>Gains applying to Bayer raw color channels for
269          white-balance.</description>
270          <units>Unitless gain factors</units>
271          <details>
272          These per-channel gains are either set by the camera device
273          when the request android.colorCorrection.mode is not
274          TRANSFORM_MATRIX, or directly by the application in the
275          request when the android.colorCorrection.mode is
276          TRANSFORM_MATRIX.
277
278          The gains in the result metadata are the gains actually
279          applied by the camera device to the current frame.
280
281          The valid range of gains varies on different devices, but gains
282          between [1.0, 3.0] are guaranteed not to be clipped. Even if a given
283          device allows gains below 1.0, this is usually not recommended because
284          this can create color artifacts.
285          </details>
286          <hal_details>
287          The 4-channel white-balance gains are defined in
288          the order of `[R G_even G_odd B]`, where `G_even` is the gain
289          for green pixels on even rows of the output, and `G_odd`
290          is the gain for green pixels on the odd rows.
291
292          If a HAL does not support a separate gain for even/odd green
293          channels, it must use the `G_even` value, and write
294          `G_odd` equal to `G_even` in the output result metadata.
295          </hal_details>
296        </entry>
297        <entry name="aberrationMode" type="byte" visibility="public" enum="true" hwlevel="legacy">
298          <enum>
299            <value>OFF
300              <notes>
301                No aberration correction is applied.
302              </notes>
303            </value>
304            <value>FAST
305              <notes>
306                Aberration correction will not slow down capture rate
307                relative to sensor raw output.
308            </notes>
309            </value>
310            <value>HIGH_QUALITY
311              <notes>
312                Aberration correction operates at improved quality but the capture rate might be
313                reduced (relative to sensor raw output rate)
314            </notes>
315            </value>
316          </enum>
317          <description>
318            Mode of operation for the chromatic aberration correction algorithm.
319          </description>
320          <range>android.colorCorrection.availableAberrationModes</range>
321          <details>
322            Chromatic (color) aberration is caused by the fact that different wavelengths of light
323            can not focus on the same point after exiting from the lens. This metadata defines
324            the high level control of chromatic aberration correction algorithm, which aims to
325            minimize the chromatic artifacts that may occur along the object boundaries in an
326            image.
327
328            FAST/HIGH_QUALITY both mean that camera device determined aberration
329            correction will be applied. HIGH_QUALITY mode indicates that the camera device will
330            use the highest-quality aberration correction algorithms, even if it slows down
331            capture rate. FAST means the camera device will not slow down capture rate when
332            applying aberration correction.
333
334            LEGACY devices will always be in FAST mode.
335          </details>
336        </entry>
337      </controls>
338      <dynamic>
339        <clone entry="android.colorCorrection.mode" kind="controls">
340        </clone>
341        <clone entry="android.colorCorrection.transform" kind="controls">
342        </clone>
343        <clone entry="android.colorCorrection.gains" kind="controls">
344        </clone>
345        <clone entry="android.colorCorrection.aberrationMode" kind="controls">
346        </clone>
347      </dynamic>
348      <static>
349        <entry name="availableAberrationModes" type="byte" visibility="public"
350        type_notes="list of enums" container="array" typedef="enumList" hwlevel="legacy">
351          <array>
352            <size>n</size>
353          </array>
354          <description>
355            List of aberration correction modes for android.colorCorrection.aberrationMode that are
356            supported by this camera device.
357          </description>
358          <range>Any value listed in android.colorCorrection.aberrationMode</range>
359          <details>
360            This key lists the valid modes for android.colorCorrection.aberrationMode.  If no
361            aberration correction modes are available for a device, this list will solely include
362            OFF mode. All camera devices will support either OFF or FAST mode.
363
364            Camera devices that support the MANUAL_POST_PROCESSING capability will always list
365            OFF mode. This includes all FULL level devices.
366
367            LEGACY devices will always only support FAST mode.
368          </details>
369          <hal_details>
370            HAL must support both FAST and HIGH_QUALITY if chromatic aberration control is available
371            on the camera device, but the underlying implementation can be the same for both modes.
372            That is, if the highest quality implementation on the camera device does not slow down
373            capture rate, then FAST and HIGH_QUALITY will generate the same output.
374          </hal_details>
375          <tag id="V1" />
376        </entry>
377      </static>
378    </section>
379    <section name="control">
380      <controls>
381        <entry name="aeAntibandingMode" type="byte" visibility="public"
382               enum="true" hwlevel="legacy">
383          <enum>
384            <value>OFF
385              <notes>
386                The camera device will not adjust exposure duration to
387                avoid banding problems.
388              </notes>
389            </value>
390            <value>50HZ
391              <notes>
392                The camera device will adjust exposure duration to
393                avoid banding problems with 50Hz illumination sources.
394              </notes>
395            </value>
396            <value>60HZ
397              <notes>
398                The camera device will adjust exposure duration to
399                avoid banding problems with 60Hz illumination
400                sources.
401              </notes>
402            </value>
403            <value>AUTO
404              <notes>
405                The camera device will automatically adapt its
406                antibanding routine to the current illumination
407                condition. This is the default mode if AUTO is
408                available on given camera device.
409              </notes>
410            </value>
411          </enum>
412          <description>
413            The desired setting for the camera device's auto-exposure
414            algorithm's antibanding compensation.
415          </description>
416          <range>
417            android.control.aeAvailableAntibandingModes
418          </range>
419          <details>
420            Some kinds of lighting fixtures, such as some fluorescent
421            lights, flicker at the rate of the power supply frequency
422            (60Hz or 50Hz, depending on country). While this is
423            typically not noticeable to a person, it can be visible to
424            a camera device. If a camera sets its exposure time to the
425            wrong value, the flicker may become visible in the
426            viewfinder as flicker or in a final captured image, as a
427            set of variable-brightness bands across the image.
428
429            Therefore, the auto-exposure routines of camera devices
430            include antibanding routines that ensure that the chosen
431            exposure value will not cause such banding. The choice of
432            exposure time depends on the rate of flicker, which the
433            camera device can detect automatically, or the expected
434            rate can be selected by the application using this
435            control.
436
437            A given camera device may not support all of the possible
438            options for the antibanding mode. The
439            android.control.aeAvailableAntibandingModes key contains
440            the available modes for a given camera device.
441
442            AUTO mode is the default if it is available on given
443            camera device. When AUTO mode is not available, the
444            default will be either 50HZ or 60HZ, and both 50HZ
445            and 60HZ will be available.
446
447            If manual exposure control is enabled (by setting
448            android.control.aeMode or android.control.mode to OFF),
449            then this setting has no effect, and the application must
450            ensure it selects exposure times that do not cause banding
451            issues. The android.statistics.sceneFlicker key can assist
452            the application in this.
453          </details>
454          <hal_details>
455            For all capture request templates, this field must be set
456            to AUTO if AUTO mode is available. If AUTO is not available,
457            the default must be either 50HZ or 60HZ, and both 50HZ and
458            60HZ must be available.
459
460            If manual exposure control is enabled (by setting
461            android.control.aeMode or android.control.mode to OFF),
462            then the exposure values provided by the application must not be
463            adjusted for antibanding.
464          </hal_details>
465          <tag id="BC" />
466        </entry>
467        <entry name="aeExposureCompensation" type="int32" visibility="public" hwlevel="legacy">
468          <description>Adjustment to auto-exposure (AE) target image
469          brightness.</description>
470          <units>Compensation steps</units>
471          <range>android.control.aeCompensationRange</range>
472          <details>
473          The adjustment is measured as a count of steps, with the
474          step size defined by android.control.aeCompensationStep and the
475          allowed range by android.control.aeCompensationRange.
476
477          For example, if the exposure value (EV) step is 0.333, '6'
478          will mean an exposure compensation of +2 EV; -3 will mean an
479          exposure compensation of -1 EV. One EV represents a doubling
480          of image brightness. Note that this control will only be
481          effective if android.control.aeMode `!=` OFF. This control
482          will take effect even when android.control.aeLock `== true`.
483
484          In the event of exposure compensation value being changed, camera device
485          may take several frames to reach the newly requested exposure target.
486          During that time, android.control.aeState field will be in the SEARCHING
487          state. Once the new exposure target is reached, android.control.aeState will
488          change from SEARCHING to either CONVERGED, LOCKED (if AE lock is enabled), or
489          FLASH_REQUIRED (if the scene is too dark for still capture).
490          </details>
491          <tag id="BC" />
492        </entry>
493        <entry name="aeLock" type="byte" visibility="public" enum="true"
494               typedef="boolean" hwlevel="legacy">
495          <enum>
496            <value>OFF
497            <notes>Auto-exposure lock is disabled; the AE algorithm
498            is free to update its parameters.</notes></value>
499            <value>ON
500            <notes>Auto-exposure lock is enabled; the AE algorithm
501            must not update the exposure and sensitivity parameters
502            while the lock is active.
503
504            android.control.aeExposureCompensation setting changes
505            will still take effect while auto-exposure is locked.
506
507            Some rare LEGACY devices may not support
508            this, in which case the value will always be overridden to OFF.
509            </notes></value>
510          </enum>
511          <description>Whether auto-exposure (AE) is currently locked to its latest
512          calculated values.</description>
513          <details>
514          When set to `true` (ON), the AE algorithm is locked to its latest parameters,
515          and will not change exposure settings until the lock is set to `false` (OFF).
516
517          Note that even when AE is locked, the flash may be fired if
518          the android.control.aeMode is ON_AUTO_FLASH /
519          ON_ALWAYS_FLASH / ON_AUTO_FLASH_REDEYE.
520
521          When android.control.aeExposureCompensation is changed, even if the AE lock
522          is ON, the camera device will still adjust its exposure value.
523
524          If AE precapture is triggered (see android.control.aePrecaptureTrigger)
525          when AE is already locked, the camera device will not change the exposure time
526          (android.sensor.exposureTime) and sensitivity (android.sensor.sensitivity)
527          parameters. The flash may be fired if the android.control.aeMode
528          is ON_AUTO_FLASH/ON_AUTO_FLASH_REDEYE and the scene is too dark. If the
529          android.control.aeMode is ON_ALWAYS_FLASH, the scene may become overexposed.
530          Similarly, AE precapture trigger CANCEL has no effect when AE is already locked.
531
532          When an AE precapture sequence is triggered, AE unlock will not be able to unlock
533          the AE if AE is locked by the camera device internally during precapture metering
534          sequence In other words, submitting requests with AE unlock has no effect for an
535          ongoing precapture metering sequence. Otherwise, the precapture metering sequence
536          will never succeed in a sequence of preview requests where AE lock is always set
537          to `false`.
538
539          Since the camera device has a pipeline of in-flight requests, the settings that
540          get locked do not necessarily correspond to the settings that were present in the
541          latest capture result received from the camera device, since additional captures
542          and AE updates may have occurred even before the result was sent out. If an
543          application is switching between automatic and manual control and wishes to eliminate
544          any flicker during the switch, the following procedure is recommended:
545
546            1. Starting in auto-AE mode:
547            2. Lock AE
548            3. Wait for the first result to be output that has the AE locked
549            4. Copy exposure settings from that result into a request, set the request to manual AE
550            5. Submit the capture request, proceed to run manual AE as desired.
551
552          See android.control.aeState for AE lock related state transition details.
553          </details>
554          <tag id="BC" />
555        </entry>
556        <entry name="aeMode" type="byte" visibility="public" enum="true" hwlevel="legacy">
557          <enum>
558            <value>OFF
559              <notes>
560                The camera device's autoexposure routine is disabled.
561
562                The application-selected android.sensor.exposureTime,
563                android.sensor.sensitivity and
564                android.sensor.frameDuration are used by the camera
565                device, along with android.flash.* fields, if there's
566                a flash unit for this camera device.
567
568                Note that auto-white balance (AWB) and auto-focus (AF)
569                behavior is device dependent when AE is in OFF mode.
570                To have consistent behavior across different devices,
571                it is recommended to either set AWB and AF to OFF mode
572                or lock AWB and AF before setting AE to OFF.
573                See android.control.awbMode, android.control.afMode,
574                android.control.awbLock, and android.control.afTrigger
575                for more details.
576
577                LEGACY devices do not support the OFF mode and will
578                override attempts to use this value to ON.
579              </notes>
580            </value>
581            <value>ON
582              <notes>
583                The camera device's autoexposure routine is active,
584                with no flash control.
585
586                The application's values for
587                android.sensor.exposureTime,
588                android.sensor.sensitivity, and
589                android.sensor.frameDuration are ignored. The
590                application has control over the various
591                android.flash.* fields.
592              </notes>
593            </value>
594            <value>ON_AUTO_FLASH
595              <notes>
596                Like ON, except that the camera device also controls
597                the camera's flash unit, firing it in low-light
598                conditions.
599
600                The flash may be fired during a precapture sequence
601                (triggered by android.control.aePrecaptureTrigger) and
602                may be fired for captures for which the
603                android.control.captureIntent field is set to
604                STILL_CAPTURE
605              </notes>
606            </value>
607            <value>ON_ALWAYS_FLASH
608              <notes>
609                Like ON, except that the camera device also controls
610                the camera's flash unit, always firing it for still
611                captures.
612
613                The flash may be fired during a precapture sequence
614                (triggered by android.control.aePrecaptureTrigger) and
615                will always be fired for captures for which the
616                android.control.captureIntent field is set to
617                STILL_CAPTURE
618              </notes>
619            </value>
620            <value>ON_AUTO_FLASH_REDEYE
621              <notes>
622                Like ON_AUTO_FLASH, but with automatic red eye
623                reduction.
624
625                If deemed necessary by the camera device, a red eye
626                reduction flash will fire during the precapture
627                sequence.
628              </notes>
629            </value>
630          </enum>
631          <description>The desired mode for the camera device's
632          auto-exposure routine.</description>
633          <range>android.control.aeAvailableModes</range>
634          <details>
635            This control is only effective if android.control.mode is
636            AUTO.
637
638            When set to any of the ON modes, the camera device's
639            auto-exposure routine is enabled, overriding the
640            application's selected exposure time, sensor sensitivity,
641            and frame duration (android.sensor.exposureTime,
642            android.sensor.sensitivity, and
643            android.sensor.frameDuration). If one of the FLASH modes
644            is selected, the camera device's flash unit controls are
645            also overridden.
646
647            The FLASH modes are only available if the camera device
648            has a flash unit (android.flash.info.available is `true`).
649
650            If flash TORCH mode is desired, this field must be set to
651            ON or OFF, and android.flash.mode set to TORCH.
652
653            When set to any of the ON modes, the values chosen by the
654            camera device auto-exposure routine for the overridden
655            fields for a given capture will be available in its
656            CaptureResult.
657          </details>
658          <tag id="BC" />
659        </entry>
660        <entry name="aeRegions" type="int32" visibility="public"
661            optional="true" container="array" typedef="meteringRectangle">
662          <array>
663            <size>5</size>
664            <size>area_count</size>
665          </array>
666          <description>List of metering areas to use for auto-exposure adjustment.</description>
667          <units>Pixel coordinates within android.sensor.info.activeArraySize</units>
668          <range>Coordinates must be between `[(0,0), (width, height))` of
669          android.sensor.info.activeArraySize</range>
670          <details>
671              Not available if android.control.maxRegionsAe is 0.
672              Otherwise will always be present.
673
674              The maximum number of regions supported by the device is determined by the value
675              of android.control.maxRegionsAe.
676
677              The coordinate system is based on the active pixel array,
678              with (0,0) being the top-left pixel in the active pixel array, and
679              (android.sensor.info.activeArraySize.width - 1,
680              android.sensor.info.activeArraySize.height - 1) being the
681              bottom-right pixel in the active pixel array.
682
683              The weight must be within `[0, 1000]`, and represents a weight
684              for every pixel in the area. This means that a large metering area
685              with the same weight as a smaller area will have more effect in
686              the metering result. Metering areas can partially overlap and the
687              camera device will add the weights in the overlap region.
688
689              The weights are relative to weights of other exposure metering regions, so if only one
690              region is used, all non-zero weights will have the same effect. A region with 0
691              weight is ignored.
692
693              If all regions have 0 weight, then no specific metering area needs to be used by the
694              camera device.
695
696              If the metering region is outside the used android.scaler.cropRegion returned in
697              capture result metadata, the camera device will ignore the sections outside the crop
698              region and output only the intersection rectangle as the metering region in the result
699              metadata.  If the region is entirely outside the crop region, it will be ignored and
700              not reported in the result metadata.
701          </details>
702          <hal_details>
703              The HAL level representation of MeteringRectangle[] is a
704              int[5 * area_count].
705              Every five elements represent a metering region of
706              (xmin, ymin, xmax, ymax, weight).
707              The rectangle is defined to be inclusive on xmin and ymin, but
708              exclusive on xmax and ymax.
709          </hal_details>
710          <tag id="BC" />
711        </entry>
712        <entry name="aeTargetFpsRange" type="int32" visibility="public"
713               container="array" typedef="rangeInt" hwlevel="legacy">
714          <array>
715            <size>2</size>
716          </array>
717          <description>Range over which the auto-exposure routine can
718          adjust the capture frame rate to maintain good
719          exposure.</description>
720          <units>Frames per second (FPS)</units>
721          <range>Any of the entries in android.control.aeAvailableTargetFpsRanges</range>
722          <details>Only constrains auto-exposure (AE) algorithm, not
723          manual control of android.sensor.exposureTime and
724          android.sensor.frameDuration.</details>
725          <tag id="BC" />
726        </entry>
727        <entry name="aePrecaptureTrigger" type="byte" visibility="public"
728               enum="true" hwlevel="limited">
729          <enum>
730            <value>IDLE
731              <notes>The trigger is idle.</notes>
732            </value>
733            <value>START
734              <notes>The precapture metering sequence will be started
735              by the camera device.
736
737              The exact effect of the precapture trigger depends on
738              the current AE mode and state.</notes>
739            </value>
740            <value>CANCEL
741              <notes>The camera device will cancel any currently active or completed
742              precapture metering sequence, the auto-exposure routine will return to its
743              initial state.</notes>
744            </value>
745          </enum>
746          <description>Whether the camera device will trigger a precapture
747          metering sequence when it processes this request.</description>
748          <details>This entry is normally set to IDLE, or is not
749          included at all in the request settings. When included and
750          set to START, the camera device will trigger the auto-exposure (AE)
751          precapture metering sequence.
752
753          When set to CANCEL, the camera device will cancel any active
754          precapture metering trigger, and return to its initial AE state.
755          If a precapture metering sequence is already completed, and the camera
756          device has implicitly locked the AE for subsequent still capture, the
757          CANCEL trigger will unlock the AE and return to its initial AE state.
758
759          The precapture sequence should be triggered before starting a
760          high-quality still capture for final metering decisions to
761          be made, and for firing pre-capture flash pulses to estimate
762          scene brightness and required final capture flash power, when
763          the flash is enabled.
764
765          Normally, this entry should be set to START for only a
766          single request, and the application should wait until the
767          sequence completes before starting a new one.
768
769          When a precapture metering sequence is finished, the camera device
770          may lock the auto-exposure routine internally to be able to accurately expose the
771          subsequent still capture image (`android.control.captureIntent == STILL_CAPTURE`).
772          For this case, the AE may not resume normal scan if no subsequent still capture is
773          submitted. To ensure that the AE routine restarts normal scan, the application should
774          submit a request with `android.control.aeLock == true`, followed by a request
775          with `android.control.aeLock == false`, if the application decides not to submit a
776          still capture request after the precapture sequence completes. Alternatively, for
777          API level 23 or newer devices, the CANCEL can be used to unlock the camera device
778          internally locked AE if the application doesn't submit a still capture request after
779          the AE precapture trigger. Note that, the CANCEL was added in API level 23, and must not
780          be used in devices that have earlier API levels.
781
782          The exact effect of auto-exposure (AE) precapture trigger
783          depends on the current AE mode and state; see
784          android.control.aeState for AE precapture state transition
785          details.
786
787          On LEGACY-level devices, the precapture trigger is not supported;
788          capturing a high-resolution JPEG image will automatically trigger a
789          precapture sequence before the high-resolution capture, including
790          potentially firing a pre-capture flash.
791
792          Using the precapture trigger and the auto-focus trigger android.control.afTrigger
793          simultaneously is allowed. However, since these triggers often require cooperation between
794          the auto-focus and auto-exposure routines (for example, the may need to be enabled for a
795          focus sweep), the camera device may delay acting on a later trigger until the previous
796          trigger has been fully handled. This may lead to longer intervals between the trigger and
797          changes to android.control.aeState indicating the start of the precapture sequence, for
798          example.
799
800          If both the precapture and the auto-focus trigger are activated on the same request, then
801          the camera device will complete them in the optimal order for that device.
802          </details>
803          <hal_details>
804          The HAL must support triggering the AE precapture trigger while an AF trigger is active
805          (and vice versa), or at the same time as the AF trigger.  It is acceptable for the HAL to
806          treat these as two consecutive triggers, for example handling the AF trigger and then the
807          AE trigger.  Or the HAL may choose to optimize the case with both triggers fired at once,
808          to minimize the latency for converging both focus and exposure/flash usage.
809          </hal_details>
810          <tag id="BC" />
811        </entry>
812        <entry name="afMode" type="byte" visibility="public" enum="true"
813               hwlevel="legacy">
814          <enum>
815            <value>OFF
816            <notes>The auto-focus routine does not control the lens;
817            android.lens.focusDistance is controlled by the
818            application.</notes></value>
819            <value>AUTO
820            <notes>Basic automatic focus mode.
821
822            In this mode, the lens does not move unless
823            the autofocus trigger action is called. When that trigger
824            is activated, AF will transition to ACTIVE_SCAN, then to
825            the outcome of the scan (FOCUSED or NOT_FOCUSED).
826
827            Always supported if lens is not fixed focus.
828
829            Use android.lens.info.minimumFocusDistance to determine if lens
830            is fixed-focus.
831
832            Triggering AF_CANCEL resets the lens position to default,
833            and sets the AF state to INACTIVE.</notes></value>
834            <value>MACRO
835            <notes>Close-up focusing mode.
836
837            In this mode, the lens does not move unless the
838            autofocus trigger action is called. When that trigger is
839            activated, AF will transition to ACTIVE_SCAN, then to
840            the outcome of the scan (FOCUSED or NOT_FOCUSED). This
841            mode is optimized for focusing on objects very close to
842            the camera.
843
844            When that trigger is activated, AF will transition to
845            ACTIVE_SCAN, then to the outcome of the scan (FOCUSED or
846            NOT_FOCUSED). Triggering cancel AF resets the lens
847            position to default, and sets the AF state to
848            INACTIVE.</notes></value>
849            <value>CONTINUOUS_VIDEO
850            <notes>In this mode, the AF algorithm modifies the lens
851            position continually to attempt to provide a
852            constantly-in-focus image stream.
853
854            The focusing behavior should be suitable for good quality
855            video recording; typically this means slower focus
856            movement and no overshoots. When the AF trigger is not
857            involved, the AF algorithm should start in INACTIVE state,
858            and then transition into PASSIVE_SCAN and PASSIVE_FOCUSED
859            states as appropriate. When the AF trigger is activated,
860            the algorithm should immediately transition into
861            AF_FOCUSED or AF_NOT_FOCUSED as appropriate, and lock the
862            lens position until a cancel AF trigger is received.
863
864            Once cancel is received, the algorithm should transition
865            back to INACTIVE and resume passive scan. Note that this
866            behavior is not identical to CONTINUOUS_PICTURE, since an
867            ongoing PASSIVE_SCAN must immediately be
868            canceled.</notes></value>
869            <value>CONTINUOUS_PICTURE
870            <notes>In this mode, the AF algorithm modifies the lens
871            position continually to attempt to provide a
872            constantly-in-focus image stream.
873
874            The focusing behavior should be suitable for still image
875            capture; typically this means focusing as fast as
876            possible. When the AF trigger is not involved, the AF
877            algorithm should start in INACTIVE state, and then
878            transition into PASSIVE_SCAN and PASSIVE_FOCUSED states as
879            appropriate as it attempts to maintain focus. When the AF
880            trigger is activated, the algorithm should finish its
881            PASSIVE_SCAN if active, and then transition into
882            AF_FOCUSED or AF_NOT_FOCUSED as appropriate, and lock the
883            lens position until a cancel AF trigger is received.
884
885            When the AF cancel trigger is activated, the algorithm
886            should transition back to INACTIVE and then act as if it
887            has just been started.</notes></value>
888            <value>EDOF
889            <notes>Extended depth of field (digital focus) mode.
890
891            The camera device will produce images with an extended
892            depth of field automatically; no special focusing
893            operations need to be done before taking a picture.
894
895            AF triggers are ignored, and the AF state will always be
896            INACTIVE.</notes></value>
897          </enum>
898          <description>Whether auto-focus (AF) is currently enabled, and what
899          mode it is set to.</description>
900          <range>android.control.afAvailableModes</range>
901          <details>Only effective if android.control.mode = AUTO and the lens is not fixed focus
902          (i.e. `android.lens.info.minimumFocusDistance &gt; 0`). Also note that
903          when android.control.aeMode is OFF, the behavior of AF is device
904          dependent. It is recommended to lock AF by using android.control.afTrigger before
905          setting android.control.aeMode to OFF, or set AF mode to OFF when AE is OFF.
906
907          If the lens is controlled by the camera device auto-focus algorithm,
908          the camera device will report the current AF status in android.control.afState
909          in result metadata.</details>
910          <hal_details>
911          When afMode is AUTO or MACRO, the lens must not move until an AF trigger is sent in a
912          request (android.control.afTrigger `==` START). After an AF trigger, the afState will end
913          up with either FOCUSED_LOCKED or NOT_FOCUSED_LOCKED state (see
914          android.control.afState for detailed state transitions), which indicates that the lens is
915          locked and will not move. If camera movement (e.g. tilting camera) causes the lens to move
916          after the lens is locked, the HAL must compensate this movement appropriately such that
917          the same focal plane remains in focus.
918
919          When afMode is one of the continuous auto focus modes, the HAL is free to start a AF
920          scan whenever it's not locked. When the lens is locked after an AF trigger
921          (see android.control.afState for detailed state transitions), the HAL should maintain the
922          same lock behavior as above.
923
924          When afMode is OFF, the application controls focus manually. The accuracy of the
925          focus distance control depends on the android.lens.info.focusDistanceCalibration.
926          However, the lens must not move regardless of the camera movement for any focus distance
927          manual control.
928
929          To put this in concrete terms, if the camera has lens elements which may move based on
930          camera orientation or motion (e.g. due to gravity), then the HAL must drive the lens to
931          remain in a fixed position invariant to the camera's orientation or motion, for example,
932          by using accelerometer measurements in the lens control logic. This is a typical issue
933          that will arise on camera modules with open-loop VCMs.
934          </hal_details>
935          <tag id="BC" />
936        </entry>
937        <entry name="afRegions" type="int32" visibility="public"
938               optional="true" container="array" typedef="meteringRectangle">
939          <array>
940            <size>5</size>
941            <size>area_count</size>
942          </array>
943          <description>List of metering areas to use for auto-focus.</description>
944          <units>Pixel coordinates within android.sensor.info.activeArraySize</units>
945          <range>Coordinates must be between `[(0,0), (width, height))` of
946          android.sensor.info.activeArraySize</range>
947          <details>
948              Not available if android.control.maxRegionsAf is 0.
949              Otherwise will always be present.
950
951              The maximum number of focus areas supported by the device is determined by the value
952              of android.control.maxRegionsAf.
953
954              The coordinate system is based on the active pixel array,
955              with (0,0) being the top-left pixel in the active pixel array, and
956              (android.sensor.info.activeArraySize.width - 1,
957              android.sensor.info.activeArraySize.height - 1) being the
958              bottom-right pixel in the active pixel array.
959
960              The weight must be within `[0, 1000]`, and represents a weight
961              for every pixel in the area. This means that a large metering area
962              with the same weight as a smaller area will have more effect in
963              the metering result. Metering areas can partially overlap and the
964              camera device will add the weights in the overlap region.
965
966              The weights are relative to weights of other metering regions, so if only one region
967              is used, all non-zero weights will have the same effect. A region with 0 weight is
968              ignored.
969
970              If all regions have 0 weight, then no specific metering area needs to be used by the
971              camera device.
972
973              If the metering region is outside the used android.scaler.cropRegion returned in
974              capture result metadata, the camera device will ignore the sections outside the crop
975              region and output only the intersection rectangle as the metering region in the result
976              metadata. If the region is entirely outside the crop region, it will be ignored and
977              not reported in the result metadata.
978          </details>
979          <hal_details>
980              The HAL level representation of MeteringRectangle[] is a
981              int[5 * area_count].
982              Every five elements represent a metering region of
983              (xmin, ymin, xmax, ymax, weight).
984              The rectangle is defined to be inclusive on xmin and ymin, but
985              exclusive on xmax and ymax.
986          </hal_details>
987          <tag id="BC" />
988        </entry>
989        <entry name="afTrigger" type="byte" visibility="public" enum="true"
990               hwlevel="legacy">
991          <enum>
992            <value>IDLE
993              <notes>The trigger is idle.</notes>
994            </value>
995            <value>START
996              <notes>Autofocus will trigger now.</notes>
997            </value>
998            <value>CANCEL
999              <notes>Autofocus will return to its initial
1000              state, and cancel any currently active trigger.</notes>
1001            </value>
1002          </enum>
1003          <description>
1004          Whether the camera device will trigger autofocus for this request.
1005          </description>
1006          <details>This entry is normally set to IDLE, or is not
1007          included at all in the request settings.
1008
1009          When included and set to START, the camera device will trigger the
1010          autofocus algorithm. If autofocus is disabled, this trigger has no effect.
1011
1012          When set to CANCEL, the camera device will cancel any active trigger,
1013          and return to its initial AF state.
1014
1015          Generally, applications should set this entry to START or CANCEL for only a
1016          single capture, and then return it to IDLE (or not set at all). Specifying
1017          START for multiple captures in a row means restarting the AF operation over
1018          and over again.
1019
1020          See android.control.afState for what the trigger means for each AF mode.
1021
1022          Using the autofocus trigger and the precapture trigger android.control.aePrecaptureTrigger
1023          simultaneously is allowed. However, since these triggers often require cooperation between
1024          the auto-focus and auto-exposure routines (for example, the may need to be enabled for a
1025          focus sweep), the camera device may delay acting on a later trigger until the previous
1026          trigger has been fully handled. This may lead to longer intervals between the trigger and
1027          changes to android.control.afState, for example.
1028          </details>
1029          <hal_details>
1030          The HAL must support triggering the AF trigger while an AE precapture trigger is active
1031          (and vice versa), or at the same time as the AE trigger.  It is acceptable for the HAL to
1032          treat these as two consecutive triggers, for example handling the AF trigger and then the
1033          AE trigger.  Or the HAL may choose to optimize the case with both triggers fired at once,
1034          to minimize the latency for converging both focus and exposure/flash usage.
1035          </hal_details>
1036          <tag id="BC" />
1037        </entry>
1038        <entry name="awbLock" type="byte" visibility="public" enum="true"
1039               typedef="boolean" hwlevel="legacy">
1040          <enum>
1041            <value>OFF
1042            <notes>Auto-white balance lock is disabled; the AWB
1043            algorithm is free to update its parameters if in AUTO
1044            mode.</notes></value>
1045            <value>ON
1046            <notes>Auto-white balance lock is enabled; the AWB
1047            algorithm will not update its parameters while the lock
1048            is active.</notes></value>
1049          </enum>
1050          <description>Whether auto-white balance (AWB) is currently locked to its
1051          latest calculated values.</description>
1052          <details>
1053          When set to `true` (ON), the AWB algorithm is locked to its latest parameters,
1054          and will not change color balance settings until the lock is set to `false` (OFF).
1055
1056          Since the camera device has a pipeline of in-flight requests, the settings that
1057          get locked do not necessarily correspond to the settings that were present in the
1058          latest capture result received from the camera device, since additional captures
1059          and AWB updates may have occurred even before the result was sent out. If an
1060          application is switching between automatic and manual control and wishes to eliminate
1061          any flicker during the switch, the following procedure is recommended:
1062
1063            1. Starting in auto-AWB mode:
1064            2. Lock AWB
1065            3. Wait for the first result to be output that has the AWB locked
1066            4. Copy AWB settings from that result into a request, set the request to manual AWB
1067            5. Submit the capture request, proceed to run manual AWB as desired.
1068
1069          Note that AWB lock is only meaningful when
1070          android.control.awbMode is in the AUTO mode; in other modes,
1071          AWB is already fixed to a specific setting.
1072
1073          Some LEGACY devices may not support ON; the value is then overridden to OFF.
1074          </details>
1075          <tag id="BC" />
1076        </entry>
1077        <entry name="awbMode" type="byte" visibility="public" enum="true"
1078               hwlevel="legacy">
1079          <enum>
1080            <value>OFF
1081            <notes>
1082            The camera device's auto-white balance routine is disabled.
1083
1084            The application-selected color transform matrix
1085            (android.colorCorrection.transform) and gains
1086            (android.colorCorrection.gains) are used by the camera
1087            device for manual white balance control.
1088            </notes>
1089            </value>
1090            <value>AUTO
1091            <notes>
1092            The camera device's auto-white balance routine is active.
1093
1094            The application's values for android.colorCorrection.transform
1095            and android.colorCorrection.gains are ignored.
1096            For devices that support the MANUAL_POST_PROCESSING capability, the
1097            values used by the camera device for the transform and gains
1098            will be available in the capture result for this request.
1099            </notes>
1100            </value>
1101            <value>INCANDESCENT
1102            <notes>
1103            The camera device's auto-white balance routine is disabled;
1104            the camera device uses incandescent light as the assumed scene
1105            illumination for white balance.
1106
1107            While the exact white balance transforms are up to the
1108            camera device, they will approximately match the CIE
1109            standard illuminant A.
1110
1111            The application's values for android.colorCorrection.transform
1112            and android.colorCorrection.gains are ignored.
1113            For devices that support the MANUAL_POST_PROCESSING capability, the
1114            values used by the camera device for the transform and gains
1115            will be available in the capture result for this request.
1116            </notes>
1117            </value>
1118            <value>FLUORESCENT
1119            <notes>
1120            The camera device's auto-white balance routine is disabled;
1121            the camera device uses fluorescent light as the assumed scene
1122            illumination for white balance.
1123
1124            While the exact white balance transforms are up to the
1125            camera device, they will approximately match the CIE
1126            standard illuminant F2.
1127
1128            The application's values for android.colorCorrection.transform
1129            and android.colorCorrection.gains are ignored.
1130            For devices that support the MANUAL_POST_PROCESSING capability, the
1131            values used by the camera device for the transform and gains
1132            will be available in the capture result for this request.
1133            </notes>
1134            </value>
1135            <value>WARM_FLUORESCENT
1136            <notes>
1137            The camera device's auto-white balance routine is disabled;
1138            the camera device uses warm fluorescent light as the assumed scene
1139            illumination for white balance.
1140
1141            While the exact white balance transforms are up to the
1142            camera device, they will approximately match the CIE
1143            standard illuminant F4.
1144
1145            The application's values for android.colorCorrection.transform
1146            and android.colorCorrection.gains are ignored.
1147            For devices that support the MANUAL_POST_PROCESSING capability, the
1148            values used by the camera device for the transform and gains
1149            will be available in the capture result for this request.
1150            </notes>
1151            </value>
1152            <value>DAYLIGHT
1153            <notes>
1154            The camera device's auto-white balance routine is disabled;
1155            the camera device uses daylight light as the assumed scene
1156            illumination for white balance.
1157
1158            While the exact white balance transforms are up to the
1159            camera device, they will approximately match the CIE
1160            standard illuminant D65.
1161
1162            The application's values for android.colorCorrection.transform
1163            and android.colorCorrection.gains are ignored.
1164            For devices that support the MANUAL_POST_PROCESSING capability, the
1165            values used by the camera device for the transform and gains
1166            will be available in the capture result for this request.
1167            </notes>
1168            </value>
1169            <value>CLOUDY_DAYLIGHT
1170            <notes>
1171            The camera device's auto-white balance routine is disabled;
1172            the camera device uses cloudy daylight light as the assumed scene
1173            illumination for white balance.
1174
1175            The application's values for android.colorCorrection.transform
1176            and android.colorCorrection.gains are ignored.
1177            For devices that support the MANUAL_POST_PROCESSING capability, the
1178            values used by the camera device for the transform and gains
1179            will be available in the capture result for this request.
1180            </notes>
1181            </value>
1182            <value>TWILIGHT
1183            <notes>
1184            The camera device's auto-white balance routine is disabled;
1185            the camera device uses twilight light as the assumed scene
1186            illumination for white balance.
1187
1188            The application's values for android.colorCorrection.transform
1189            and android.colorCorrection.gains are ignored.
1190            For devices that support the MANUAL_POST_PROCESSING capability, the
1191            values used by the camera device for the transform and gains
1192            will be available in the capture result for this request.
1193            </notes>
1194            </value>
1195            <value>SHADE
1196            <notes>
1197            The camera device's auto-white balance routine is disabled;
1198            the camera device uses shade light as the assumed scene
1199            illumination for white balance.
1200
1201            The application's values for android.colorCorrection.transform
1202            and android.colorCorrection.gains are ignored.
1203            For devices that support the MANUAL_POST_PROCESSING capability, the
1204            values used by the camera device for the transform and gains
1205            will be available in the capture result for this request.
1206            </notes>
1207            </value>
1208          </enum>
1209          <description>Whether auto-white balance (AWB) is currently setting the color
1210          transform fields, and what its illumination target
1211          is.</description>
1212          <range>android.control.awbAvailableModes</range>
1213          <details>
1214          This control is only effective if android.control.mode is AUTO.
1215
1216          When set to the ON mode, the camera device's auto-white balance
1217          routine is enabled, overriding the application's selected
1218          android.colorCorrection.transform, android.colorCorrection.gains and
1219          android.colorCorrection.mode. Note that when android.control.aeMode
1220          is OFF, the behavior of AWB is device dependent. It is recommened to
1221          also set AWB mode to OFF or lock AWB by using android.control.awbLock before
1222          setting AE mode to OFF.
1223
1224          When set to the OFF mode, the camera device's auto-white balance
1225          routine is disabled. The application manually controls the white
1226          balance by android.colorCorrection.transform, android.colorCorrection.gains
1227          and android.colorCorrection.mode.
1228
1229          When set to any other modes, the camera device's auto-white
1230          balance routine is disabled. The camera device uses each
1231          particular illumination target for white balance
1232          adjustment. The application's values for
1233          android.colorCorrection.transform,
1234          android.colorCorrection.gains and
1235          android.colorCorrection.mode are ignored.
1236          </details>
1237          <tag id="BC" />
1238        </entry>
1239        <entry name="awbRegions" type="int32" visibility="public"
1240               optional="true" container="array" typedef="meteringRectangle">
1241          <array>
1242            <size>5</size>
1243            <size>area_count</size>
1244          </array>
1245          <description>List of metering areas to use for auto-white-balance illuminant
1246          estimation.</description>
1247          <units>Pixel coordinates within android.sensor.info.activeArraySize</units>
1248          <range>Coordinates must be between `[(0,0), (width, height))` of
1249          android.sensor.info.activeArraySize</range>
1250          <details>
1251              Not available if android.control.maxRegionsAwb is 0.
1252              Otherwise will always be present.
1253
1254              The maximum number of regions supported by the device is determined by the value
1255              of android.control.maxRegionsAwb.
1256
1257              The coordinate system is based on the active pixel array,
1258              with (0,0) being the top-left pixel in the active pixel array, and
1259              (android.sensor.info.activeArraySize.width - 1,
1260              android.sensor.info.activeArraySize.height - 1) being the
1261              bottom-right pixel in the active pixel array.
1262
1263              The weight must range from 0 to 1000, and represents a weight
1264              for every pixel in the area. This means that a large metering area
1265              with the same weight as a smaller area will have more effect in
1266              the metering result. Metering areas can partially overlap and the
1267              camera device will add the weights in the overlap region.
1268
1269              The weights are relative to weights of other white balance metering regions, so if
1270              only one region is used, all non-zero weights will have the same effect. A region with
1271              0 weight is ignored.
1272
1273              If all regions have 0 weight, then no specific metering area needs to be used by the
1274              camera device.
1275
1276              If the metering region is outside the used android.scaler.cropRegion returned in
1277              capture result metadata, the camera device will ignore the sections outside the crop
1278              region and output only the intersection rectangle as the metering region in the result
1279              metadata.  If the region is entirely outside the crop region, it will be ignored and
1280              not reported in the result metadata.
1281          </details>
1282          <hal_details>
1283              The HAL level representation of MeteringRectangle[] is a
1284              int[5 * area_count].
1285              Every five elements represent a metering region of
1286              (xmin, ymin, xmax, ymax, weight).
1287              The rectangle is defined to be inclusive on xmin and ymin, but
1288              exclusive on xmax and ymax.
1289          </hal_details>
1290          <tag id="BC" />
1291        </entry>
1292        <entry name="captureIntent" type="byte" visibility="public" enum="true"
1293               hwlevel="legacy">
1294          <enum>
1295            <value>CUSTOM
1296            <notes>The goal of this request doesn't fall into the other
1297            categories. The camera device will default to preview-like
1298            behavior.</notes></value>
1299            <value>PREVIEW
1300            <notes>This request is for a preview-like use case.
1301
1302            The precapture trigger may be used to start off a metering
1303            w/flash sequence.
1304            </notes></value>
1305            <value>STILL_CAPTURE
1306            <notes>This request is for a still capture-type
1307            use case.
1308
1309            If the flash unit is under automatic control, it may fire as needed.
1310            </notes></value>
1311            <value>VIDEO_RECORD
1312            <notes>This request is for a video recording
1313            use case.</notes></value>
1314            <value>VIDEO_SNAPSHOT
1315            <notes>This request is for a video snapshot (still
1316            image while recording video) use case.
1317
1318            The camera device should take the highest-quality image
1319            possible (given the other settings) without disrupting the
1320            frame rate of video recording.  </notes></value>
1321            <value>ZERO_SHUTTER_LAG
1322            <notes>This request is for a ZSL usecase; the
1323            application will stream full-resolution images and
1324            reprocess one or several later for a final
1325            capture.
1326            </notes></value>
1327            <value>MANUAL
1328            <notes>This request is for manual capture use case where
1329            the applications want to directly control the capture parameters.
1330
1331            For example, the application may wish to manually control
1332            android.sensor.exposureTime, android.sensor.sensitivity, etc.
1333            </notes></value>
1334          </enum>
1335          <description>Information to the camera device 3A (auto-exposure,
1336          auto-focus, auto-white balance) routines about the purpose
1337          of this capture, to help the camera device to decide optimal 3A
1338          strategy.</description>
1339          <details>This control (except for MANUAL) is only effective if
1340          `android.control.mode != OFF` and any 3A routine is active.
1341
1342          ZERO_SHUTTER_LAG will be supported if android.request.availableCapabilities
1343          contains PRIVATE_REPROCESSING or YUV_REPROCESSING. MANUAL will be supported if
1344          android.request.availableCapabilities contains MANUAL_SENSOR. Other intent values are
1345          always supported.
1346          </details>
1347          <tag id="BC" />
1348        </entry>
1349        <entry name="effectMode" type="byte" visibility="public" enum="true"
1350               hwlevel="legacy">
1351          <enum>
1352            <value>OFF
1353              <notes>
1354              No color effect will be applied.
1355              </notes>
1356            </value>
1357            <value optional="true">MONO
1358              <notes>
1359              A "monocolor" effect where the image is mapped into
1360              a single color.
1361
1362              This will typically be grayscale.
1363              </notes>
1364            </value>
1365            <value optional="true">NEGATIVE
1366              <notes>
1367              A "photo-negative" effect where the image's colors
1368              are inverted.
1369              </notes>
1370            </value>
1371            <value optional="true">SOLARIZE
1372              <notes>
1373              A "solarisation" effect (Sabattier effect) where the
1374              image is wholly or partially reversed in
1375              tone.
1376              </notes>
1377            </value>
1378            <value optional="true">SEPIA
1379              <notes>
1380              A "sepia" effect where the image is mapped into warm
1381              gray, red, and brown tones.
1382              </notes>
1383            </value>
1384            <value optional="true">POSTERIZE
1385              <notes>
1386              A "posterization" effect where the image uses
1387              discrete regions of tone rather than a continuous
1388              gradient of tones.
1389              </notes>
1390            </value>
1391            <value optional="true">WHITEBOARD
1392              <notes>
1393              A "whiteboard" effect where the image is typically displayed
1394              as regions of white, with black or grey details.
1395              </notes>
1396            </value>
1397            <value optional="true">BLACKBOARD
1398              <notes>
1399              A "blackboard" effect where the image is typically displayed
1400              as regions of black, with white or grey details.
1401              </notes>
1402            </value>
1403            <value optional="true">AQUA
1404              <notes>
1405              An "aqua" effect where a blue hue is added to the image.
1406              </notes>
1407            </value>
1408          </enum>
1409          <description>A special color effect to apply.</description>
1410          <range>android.control.availableEffects</range>
1411          <details>
1412          When this mode is set, a color effect will be applied
1413          to images produced by the camera device. The interpretation
1414          and implementation of these color effects is left to the
1415          implementor of the camera device, and should not be
1416          depended on to be consistent (or present) across all
1417          devices.
1418          </details>
1419          <tag id="BC" />
1420        </entry>
1421        <entry name="mode" type="byte" visibility="public" enum="true"
1422               hwlevel="legacy">
1423          <enum>
1424            <value>OFF
1425            <notes>Full application control of pipeline.
1426
1427            All control by the device's metering and focusing (3A)
1428            routines is disabled, and no other settings in
1429            android.control.* have any effect, except that
1430            android.control.captureIntent may be used by the camera
1431            device to select post-processing values for processing
1432            blocks that do not allow for manual control, or are not
1433            exposed by the camera API.
1434
1435            However, the camera device's 3A routines may continue to
1436            collect statistics and update their internal state so that
1437            when control is switched to AUTO mode, good control values
1438            can be immediately applied.
1439            </notes></value>
1440            <value>AUTO
1441            <notes>Use settings for each individual 3A routine.
1442
1443            Manual control of capture parameters is disabled. All
1444            controls in android.control.* besides sceneMode take
1445            effect.</notes></value>
1446            <value optional="true">USE_SCENE_MODE
1447            <notes>Use a specific scene mode.
1448
1449            Enabling this disables control.aeMode, control.awbMode and
1450            control.afMode controls; the camera device will ignore
1451            those settings while USE_SCENE_MODE is active (except for
1452            FACE_PRIORITY scene mode). Other control entries are still active.
1453            This setting can only be used if scene mode is supported (i.e.
1454            android.control.availableSceneModes
1455            contain some modes other than DISABLED).</notes></value>
1456            <value optional="true">OFF_KEEP_STATE
1457            <notes>Same as OFF mode, except that this capture will not be
1458            used by camera device background auto-exposure, auto-white balance and
1459            auto-focus algorithms (3A) to update their statistics.
1460
1461            Specifically, the 3A routines are locked to the last
1462            values set from a request with AUTO, OFF, or
1463            USE_SCENE_MODE, and any statistics or state updates
1464            collected from manual captures with OFF_KEEP_STATE will be
1465            discarded by the camera device.
1466            </notes></value>
1467          </enum>
1468          <description>Overall mode of 3A (auto-exposure, auto-white-balance, auto-focus) control
1469          routines.</description>
1470          <range>android.control.availableModes</range>
1471          <details>
1472          This is a top-level 3A control switch. When set to OFF, all 3A control
1473          by the camera device is disabled. The application must set the fields for
1474          capture parameters itself.
1475
1476          When set to AUTO, the individual algorithm controls in
1477          android.control.* are in effect, such as android.control.afMode.
1478
1479          When set to USE_SCENE_MODE, the individual controls in
1480          android.control.* are mostly disabled, and the camera device implements
1481          one of the scene mode settings (such as ACTION, SUNSET, or PARTY)
1482          as it wishes. The camera device scene mode 3A settings are provided by
1483          {@link android.hardware.camera2.CaptureResult capture results}.
1484
1485          When set to OFF_KEEP_STATE, it is similar to OFF mode, the only difference
1486          is that this frame will not be used by camera device background 3A statistics
1487          update, as if this frame is never captured. This mode can be used in the scenario
1488          where the application doesn't want a 3A manual control capture to affect
1489          the subsequent auto 3A capture results.
1490          </details>
1491          <tag id="BC" />
1492        </entry>
1493        <entry name="sceneMode" type="byte" visibility="public" enum="true"
1494               hwlevel="legacy">
1495          <enum>
1496            <value id="0">DISABLED
1497              <notes>
1498              Indicates that no scene modes are set for a given capture request.
1499              </notes>
1500            </value>
1501            <value>FACE_PRIORITY
1502              <notes>If face detection support exists, use face
1503              detection data for auto-focus, auto-white balance, and
1504              auto-exposure routines.
1505
1506              If face detection statistics are disabled
1507              (i.e. android.statistics.faceDetectMode is set to OFF),
1508              this should still operate correctly (but will not return
1509              face detection statistics to the framework).
1510
1511              Unlike the other scene modes, android.control.aeMode,
1512              android.control.awbMode, and android.control.afMode
1513              remain active when FACE_PRIORITY is set.
1514              </notes>
1515            </value>
1516            <value optional="true">ACTION
1517              <notes>
1518              Optimized for photos of quickly moving objects.
1519
1520              Similar to SPORTS.
1521              </notes>
1522            </value>
1523            <value optional="true">PORTRAIT
1524              <notes>
1525              Optimized for still photos of people.
1526              </notes>
1527            </value>
1528            <value optional="true">LANDSCAPE
1529              <notes>
1530              Optimized for photos of distant macroscopic objects.
1531              </notes>
1532            </value>
1533            <value optional="true">NIGHT
1534              <notes>
1535              Optimized for low-light settings.
1536              </notes>
1537            </value>
1538            <value optional="true">NIGHT_PORTRAIT
1539              <notes>
1540              Optimized for still photos of people in low-light
1541              settings.
1542              </notes>
1543            </value>
1544            <value optional="true">THEATRE
1545              <notes>
1546              Optimized for dim, indoor settings where flash must
1547              remain off.
1548              </notes>
1549            </value>
1550            <value optional="true">BEACH
1551              <notes>
1552              Optimized for bright, outdoor beach settings.
1553              </notes>
1554            </value>
1555            <value optional="true">SNOW
1556              <notes>
1557              Optimized for bright, outdoor settings containing snow.
1558              </notes>
1559            </value>
1560            <value optional="true">SUNSET
1561              <notes>
1562              Optimized for scenes of the setting sun.
1563              </notes>
1564            </value>
1565            <value optional="true">STEADYPHOTO
1566              <notes>
1567              Optimized to avoid blurry photos due to small amounts of
1568              device motion (for example: due to hand shake).
1569              </notes>
1570            </value>
1571            <value optional="true">FIREWORKS
1572              <notes>
1573              Optimized for nighttime photos of fireworks.
1574              </notes>
1575            </value>
1576            <value optional="true">SPORTS
1577              <notes>
1578              Optimized for photos of quickly moving people.
1579
1580              Similar to ACTION.
1581              </notes>
1582            </value>
1583            <value optional="true">PARTY
1584              <notes>
1585              Optimized for dim, indoor settings with multiple moving
1586              people.
1587              </notes>
1588            </value>
1589            <value optional="true">CANDLELIGHT
1590              <notes>
1591              Optimized for dim settings where the main light source
1592              is a flame.
1593              </notes>
1594            </value>
1595            <value optional="true">BARCODE
1596              <notes>
1597              Optimized for accurately capturing a photo of barcode
1598              for use by camera applications that wish to read the
1599              barcode value.
1600              </notes>
1601            </value>
1602            <value deprecated="true" optional="true">HIGH_SPEED_VIDEO
1603              <notes>
1604              This is deprecated, please use {@link
1605              android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}
1606              and {@link
1607              android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}
1608              for high speed video recording.
1609
1610              Optimized for high speed video recording (frame rate >=60fps) use case.
1611
1612              The supported high speed video sizes and fps ranges are specified in
1613              android.control.availableHighSpeedVideoConfigurations. To get desired
1614              output frame rates, the application is only allowed to select video size
1615              and fps range combinations listed in this static metadata. The fps range
1616              can be control via android.control.aeTargetFpsRange.
1617
1618              In this mode, the camera device will override aeMode, awbMode, and afMode to
1619              ON, ON, and CONTINUOUS_VIDEO, respectively. All post-processing block mode
1620              controls will be overridden to be FAST. Therefore, no manual control of capture
1621              and post-processing parameters is possible. All other controls operate the
1622              same as when android.control.mode == AUTO. This means that all other
1623              android.control.* fields continue to work, such as
1624
1625              * android.control.aeTargetFpsRange
1626              * android.control.aeExposureCompensation
1627              * android.control.aeLock
1628              * android.control.awbLock
1629              * android.control.effectMode
1630              * android.control.aeRegions
1631              * android.control.afRegions
1632              * android.control.awbRegions
1633              * android.control.afTrigger
1634              * android.control.aePrecaptureTrigger
1635
1636              Outside of android.control.*, the following controls will work:
1637
1638              * android.flash.mode (automatic flash for still capture will not work since aeMode is ON)
1639              * android.lens.opticalStabilizationMode (if it is supported)
1640              * android.scaler.cropRegion
1641              * android.statistics.faceDetectMode
1642
1643              For high speed recording use case, the actual maximum supported frame rate may
1644              be lower than what camera can output, depending on the destination Surfaces for
1645              the image data. For example, if the destination surface is from video encoder,
1646              the application need check if the video encoder is capable of supporting the
1647              high frame rate for a given video size, or it will end up with lower recording
1648              frame rate. If the destination surface is from preview window, the preview frame
1649              rate will be bounded by the screen refresh rate.
1650
1651              The camera device will only support up to 2 output high speed streams
1652              (processed non-stalling format defined in android.request.maxNumOutputStreams)
1653              in this mode. This control will be effective only if all of below conditions are true:
1654
1655              * The application created no more than maxNumHighSpeedStreams processed non-stalling
1656              format output streams, where maxNumHighSpeedStreams is calculated as
1657              min(2, android.request.maxNumOutputStreams[Processed (but not-stalling)]).
1658              * The stream sizes are selected from the sizes reported by
1659              android.control.availableHighSpeedVideoConfigurations.
1660              * No processed non-stalling or raw streams are configured.
1661
1662              When above conditions are NOT satistied, the controls of this mode and
1663              android.control.aeTargetFpsRange will be ignored by the camera device,
1664              the camera device will fall back to android.control.mode `==` AUTO,
1665              and the returned capture result metadata will give the fps range choosen
1666              by the camera device.
1667
1668              Switching into or out of this mode may trigger some camera ISP/sensor
1669              reconfigurations, which may introduce extra latency. It is recommended that
1670              the application avoids unnecessary scene mode switch as much as possible.
1671              </notes>
1672            </value>
1673            <value optional="true">HDR
1674              <notes>
1675              Turn on a device-specific high dynamic range (HDR) mode.
1676
1677              In this scene mode, the camera device captures images
1678              that keep a larger range of scene illumination levels
1679              visible in the final image. For example, when taking a
1680              picture of a object in front of a bright window, both
1681              the object and the scene through the window may be
1682              visible when using HDR mode, while in normal AUTO mode,
1683              one or the other may be poorly exposed. As a tradeoff,
1684              HDR mode generally takes much longer to capture a single
1685              image, has no user control, and may have other artifacts
1686              depending on the HDR method used.
1687
1688              Therefore, HDR captures operate at a much slower rate
1689              than regular captures.
1690
1691              In this mode, on LIMITED or FULL devices, when a request
1692              is made with a android.control.captureIntent of
1693              STILL_CAPTURE, the camera device will capture an image
1694              using a high dynamic range capture technique.  On LEGACY
1695              devices, captures that target a JPEG-format output will
1696              be captured with HDR, and the capture intent is not
1697              relevant.
1698
1699              The HDR capture may involve the device capturing a burst
1700              of images internally and combining them into one, or it
1701              may involve the device using specialized high dynamic
1702              range capture hardware. In all cases, a single image is
1703              produced in response to a capture request submitted
1704              while in HDR mode.
1705
1706              Since substantial post-processing is generally needed to
1707              produce an HDR image, only YUV and JPEG outputs are
1708              supported for LIMITED/FULL device HDR captures, and only
1709              JPEG outputs are supported for LEGACY HDR
1710              captures. Using a RAW output for HDR capture is not
1711              supported.
1712              </notes>
1713            </value>
1714            <value optional="true" hidden="true">FACE_PRIORITY_LOW_LIGHT
1715              <notes>Same as FACE_PRIORITY scene mode, except that the camera
1716              device will choose higher sensitivity values (android.sensor.sensitivity)
1717              under low light conditions.
1718
1719              The camera device may be tuned to expose the images in a reduced
1720              sensitivity range to produce the best quality images. For example,
1721              if the android.sensor.info.sensitivityRange gives range of [100, 1600],
1722              the camera device auto-exposure routine tuning process may limit the actual
1723              exposure sensitivity range to [100, 1200] to ensure that the noise level isn't
1724              exessive in order to preserve the image quality. Under this situation, the image under
1725              low light may be under-exposed when the sensor max exposure time (bounded by the
1726              android.control.aeTargetFpsRange when android.control.aeMode is one of the
1727              ON_* modes) and effective max sensitivity are reached. This scene mode allows the
1728              camera device auto-exposure routine to increase the sensitivity up to the max
1729              sensitivity specified by android.sensor.info.sensitivityRange when the scene is too
1730              dark and the max exposure time is reached. The captured images may be noisier
1731              compared with the images captured in normal FACE_PRIORITY mode; therefore, it is
1732              recommended that the application only use this scene mode when it is capable of
1733              reducing the noise level of the captured images.
1734
1735              Unlike the other scene modes, android.control.aeMode,
1736              android.control.awbMode, and android.control.afMode
1737              remain active when FACE_PRIORITY_LOW_LIGHT is set.
1738              </notes>
1739            </value>
1740          </enum>
1741          <description>
1742          Control for which scene mode is currently active.
1743          </description>
1744          <range>android.control.availableSceneModes</range>
1745          <details>
1746          Scene modes are custom camera modes optimized for a certain set of conditions and
1747          capture settings.
1748
1749          This is the mode that that is active when
1750          `android.control.mode == USE_SCENE_MODE`. Aside from FACE_PRIORITY, these modes will
1751          disable android.control.aeMode, android.control.awbMode, and android.control.afMode
1752          while in use.
1753
1754          The interpretation and implementation of these scene modes is left
1755          to the implementor of the camera device. Their behavior will not be
1756          consistent across all devices, and any given device may only implement
1757          a subset of these modes.
1758          </details>
1759          <hal_details>
1760          HAL implementations that include scene modes are expected to provide
1761          the per-scene settings to use for android.control.aeMode,
1762          android.control.awbMode, and android.control.afMode in
1763          android.control.sceneModeOverrides.
1764
1765          For HIGH_SPEED_VIDEO mode, if it is included in android.control.availableSceneModes,
1766          the HAL must list supported video size and fps range in
1767          android.control.availableHighSpeedVideoConfigurations. For a given size, e.g.
1768          1280x720, if the HAL has two different sensor configurations for normal streaming
1769          mode and high speed streaming, when this scene mode is set/reset in a sequence of capture
1770          requests, the HAL may have to switch between different sensor modes.
1771          This mode is deprecated in HAL3.3, to support high speed video recording, please implement
1772          android.control.availableHighSpeedVideoConfigurations and CONSTRAINED_HIGH_SPEED_VIDEO
1773          capbility defined in android.request.availableCapabilities.
1774          </hal_details>
1775          <tag id="BC" />
1776        </entry>
1777        <entry name="videoStabilizationMode" type="byte" visibility="public"
1778               enum="true" hwlevel="legacy">
1779          <enum>
1780            <value>OFF
1781            <notes>
1782              Video stabilization is disabled.
1783            </notes></value>
1784            <value>ON
1785            <notes>
1786              Video stabilization is enabled.
1787            </notes></value>
1788          </enum>
1789          <description>Whether video stabilization is
1790          active.</description>
1791          <details>
1792          Video stabilization automatically warps images from
1793          the camera in order to stabilize motion between consecutive frames.
1794
1795          If enabled, video stabilization can modify the
1796          android.scaler.cropRegion to keep the video stream stabilized.
1797
1798          Switching between different video stabilization modes may take several
1799          frames to initialize, the camera device will report the current mode
1800          in capture result metadata. For example, When "ON" mode is requested,
1801          the video stabilization modes in the first several capture results may
1802          still be "OFF", and it will become "ON" when the initialization is
1803          done.
1804
1805          In addition, not all recording sizes or frame rates may be supported for
1806          stabilization by a device that reports stabilization support. It is guaranteed
1807          that an output targeting a MediaRecorder or MediaCodec will be stabilized if
1808          the recording resolution is less than or equal to 1920 x 1080 (width less than
1809          or equal to 1920, height less than or equal to 1080), and the recording
1810          frame rate is less than or equal to 30fps.  At other sizes, the CaptureResult
1811          android.control.videoStabilizationMode field will return
1812          OFF if the recording output is not stabilized, or if there are no output
1813          Surface types that can be stabilized.
1814
1815          If a camera device supports both this mode and OIS
1816          (android.lens.opticalStabilizationMode), turning both modes on may
1817          produce undesirable interaction, so it is recommended not to enable
1818          both at the same time.
1819          </details>
1820          <tag id="BC" />
1821        </entry>
1822      </controls>
1823      <static>
1824        <entry name="aeAvailableAntibandingModes" type="byte" visibility="public"
1825               type_notes="list of enums" container="array" typedef="enumList"
1826               hwlevel="legacy">
1827          <array>
1828            <size>n</size>
1829          </array>
1830          <description>
1831            List of auto-exposure antibanding modes for android.control.aeAntibandingMode that are
1832            supported by this camera device.
1833          </description>
1834          <range>Any value listed in android.control.aeAntibandingMode</range>
1835          <details>
1836            Not all of the auto-exposure anti-banding modes may be
1837            supported by a given camera device. This field lists the
1838            valid anti-banding modes that the application may request
1839            for this camera device with the
1840            android.control.aeAntibandingMode control.
1841          </details>
1842          <tag id="BC" />
1843        </entry>
1844        <entry name="aeAvailableModes" type="byte" visibility="public"
1845               type_notes="list of enums" container="array" typedef="enumList"
1846               hwlevel="legacy">
1847          <array>
1848            <size>n</size>
1849          </array>
1850          <description>
1851            List of auto-exposure modes for android.control.aeMode that are supported by this camera
1852            device.
1853          </description>
1854          <range>Any value listed in android.control.aeMode</range>
1855          <details>
1856            Not all the auto-exposure modes may be supported by a
1857            given camera device, especially if no flash unit is
1858            available. This entry lists the valid modes for
1859            android.control.aeMode for this camera device.
1860
1861            All camera devices support ON, and all camera devices with flash
1862            units support ON_AUTO_FLASH and ON_ALWAYS_FLASH.
1863
1864            FULL mode camera devices always support OFF mode,
1865            which enables application control of camera exposure time,
1866            sensitivity, and frame duration.
1867
1868            LEGACY mode camera devices never support OFF mode.
1869            LIMITED mode devices support OFF if they support the MANUAL_SENSOR
1870            capability.
1871          </details>
1872          <tag id="BC" />
1873        </entry>
1874        <entry name="aeAvailableTargetFpsRanges" type="int32" visibility="public"
1875               type_notes="list of pairs of frame rates"
1876               container="array" typedef="rangeInt"
1877               hwlevel="legacy">
1878          <array>
1879            <size>2</size>
1880            <size>n</size>
1881          </array>
1882          <description>List of frame rate ranges for android.control.aeTargetFpsRange supported by
1883          this camera device.</description>
1884          <units>Frames per second (FPS)</units>
1885          <details>
1886          For devices at the LEGACY level or above:
1887
1888          * This list will always include (30, 30).
1889          * Also, for constant-framerate recording, for each normal
1890          {@link android.media.CamcorderProfile CamcorderProfile} that has
1891          {@link android.media.CamcorderProfile#quality quality} in
1892          the range [{@link android.media.CamcorderProfile#QUALITY_LOW QUALITY_LOW},
1893          {@link android.media.CamcorderProfile#QUALITY_2160P QUALITY_2160P}], if the profile is
1894          supported by the device and has
1895          {@link android.media.CamcorderProfile#videoFrameRate videoFrameRate} `x`, this list will
1896          always include (`x`,`x`).
1897          * For preview streaming use case, this list will always include (`min`, `max`) where
1898          `min` &lt;= 15 and `max` &gt;= 30.
1899
1900          For devices at the LIMITED level or above:
1901
1902          * For YUV_420_888 burst capture use case, this list will always include (`min`, `max`)
1903          and (`max`, `max`) where `min` &lt;= 15 and `max` = the maximum output frame rate of the
1904          maximum YUV_420_888 output size.
1905          </details>
1906          <tag id="BC" />
1907        </entry>
1908        <entry name="aeCompensationRange" type="int32" visibility="public"
1909               container="array" typedef="rangeInt"
1910               hwlevel="legacy">
1911          <array>
1912            <size>2</size>
1913          </array>
1914          <description>Maximum and minimum exposure compensation values for
1915          android.control.aeExposureCompensation, in counts of android.control.aeCompensationStep,
1916          that are supported by this camera device.</description>
1917          <range>
1918            Range [0,0] indicates that exposure compensation is not supported.
1919
1920            For LIMITED and FULL devices, range must follow below requirements if exposure
1921            compensation is supported (`range != [0, 0]`):
1922
1923            `Min.exposure compensation * android.control.aeCompensationStep &lt;= -2 EV`
1924
1925            `Max.exposure compensation * android.control.aeCompensationStep &gt;= 2 EV`
1926
1927            LEGACY devices may support a smaller range than this.
1928          </range>
1929          <tag id="BC" />
1930        </entry>
1931        <entry name="aeCompensationStep" type="rational" visibility="public"
1932               hwlevel="legacy">
1933          <description>Smallest step by which the exposure compensation
1934          can be changed.</description>
1935          <units>Exposure Value (EV)</units>
1936          <details>
1937          This is the unit for android.control.aeExposureCompensation. For example, if this key has
1938          a value of `1/2`, then a setting of `-2` for android.control.aeExposureCompensation means
1939          that the target EV offset for the auto-exposure routine is -1 EV.
1940
1941          One unit of EV compensation changes the brightness of the captured image by a factor
1942          of two. +1 EV doubles the image brightness, while -1 EV halves the image brightness.
1943          </details>
1944          <hal_details>
1945            This must be less than or equal to 1/2.
1946          </hal_details>
1947          <tag id="BC" />
1948        </entry>
1949        <entry name="afAvailableModes" type="byte" visibility="public"
1950               type_notes="List of enums" container="array" typedef="enumList"
1951               hwlevel="legacy">
1952          <array>
1953            <size>n</size>
1954          </array>
1955          <description>
1956          List of auto-focus (AF) modes for android.control.afMode that are
1957          supported by this camera device.
1958          </description>
1959          <range>Any value listed in android.control.afMode</range>
1960          <details>
1961          Not all the auto-focus modes may be supported by a
1962          given camera device. This entry lists the valid modes for
1963          android.control.afMode for this camera device.
1964
1965          All LIMITED and FULL mode camera devices will support OFF mode, and all
1966          camera devices with adjustable focuser units
1967          (`android.lens.info.minimumFocusDistance &gt; 0`) will support AUTO mode.
1968
1969          LEGACY devices will support OFF mode only if they support
1970          focusing to infinity (by also setting android.lens.focusDistance to
1971          `0.0f`).
1972          </details>
1973          <tag id="BC" />
1974        </entry>
1975        <entry name="availableEffects" type="byte" visibility="public"
1976               type_notes="List of enums (android.control.effectMode)." container="array"
1977               typedef="enumList" hwlevel="legacy">
1978          <array>
1979            <size>n</size>
1980          </array>
1981          <description>
1982          List of color effects for android.control.effectMode that are supported by this camera
1983          device.
1984          </description>
1985          <range>Any value listed in android.control.effectMode</range>
1986          <details>
1987          This list contains the color effect modes that can be applied to
1988          images produced by the camera device.
1989          Implementations are not expected to be consistent across all devices.
1990          If no color effect modes are available for a device, this will only list
1991          OFF.
1992
1993          A color effect will only be applied if
1994          android.control.mode != OFF.  OFF is always included in this list.
1995
1996          This control has no effect on the operation of other control routines such
1997          as auto-exposure, white balance, or focus.
1998          </details>
1999          <tag id="BC" />
2000        </entry>
2001        <entry name="availableSceneModes" type="byte" visibility="public"
2002               type_notes="List of enums (android.control.sceneMode)."
2003               container="array" typedef="enumList" hwlevel="legacy">
2004          <array>
2005            <size>n</size>
2006          </array>
2007          <description>
2008          List of scene modes for android.control.sceneMode that are supported by this camera
2009          device.
2010          </description>
2011          <range>Any value listed in android.control.sceneMode</range>
2012          <details>
2013          This list contains scene modes that can be set for the camera device.
2014          Only scene modes that have been fully implemented for the
2015          camera device may be included here. Implementations are not expected
2016          to be consistent across all devices.
2017
2018          If no scene modes are supported by the camera device, this
2019          will be set to DISABLED. Otherwise DISABLED will not be listed.
2020
2021          FACE_PRIORITY is always listed if face detection is
2022          supported (i.e.`android.statistics.info.maxFaceCount &gt;
2023          0`).
2024          </details>
2025          <tag id="BC" />
2026        </entry>
2027        <entry name="availableVideoStabilizationModes" type="byte"
2028               visibility="public" type_notes="List of enums." container="array"
2029               typedef="enumList" hwlevel="legacy">
2030          <array>
2031            <size>n</size>
2032          </array>
2033          <description>
2034          List of video stabilization modes for android.control.videoStabilizationMode
2035          that are supported by this camera device.
2036          </description>
2037          <range>Any value listed in android.control.videoStabilizationMode</range>
2038          <details>
2039          OFF will always be listed.
2040          </details>
2041          <tag id="BC" />
2042        </entry>
2043        <entry name="awbAvailableModes" type="byte" visibility="public"
2044               type_notes="List of enums"
2045               container="array" typedef="enumList" hwlevel="legacy">
2046          <array>
2047            <size>n</size>
2048          </array>
2049          <description>
2050          List of auto-white-balance modes for android.control.awbMode that are supported by this
2051          camera device.
2052          </description>
2053          <range>Any value listed in android.control.awbMode</range>
2054          <details>
2055          Not all the auto-white-balance modes may be supported by a
2056          given camera device. This entry lists the valid modes for
2057          android.control.awbMode for this camera device.
2058
2059          All camera devices will support ON mode.
2060
2061          Camera devices that support the MANUAL_POST_PROCESSING capability will always support OFF
2062          mode, which enables application control of white balance, by using
2063          android.colorCorrection.transform and android.colorCorrection.gains
2064          (android.colorCorrection.mode must be set to TRANSFORM_MATRIX). This includes all FULL
2065          mode camera devices.
2066          </details>
2067          <tag id="BC" />
2068        </entry>
2069        <entry name="maxRegions" type="int32" visibility="hidden"
2070               container="array" hwlevel="legacy">
2071          <array>
2072            <size>3</size>
2073          </array>
2074          <description>
2075          List of the maximum number of regions that can be used for metering in
2076          auto-exposure (AE), auto-white balance (AWB), and auto-focus (AF);
2077          this corresponds to the the maximum number of elements in
2078          android.control.aeRegions, android.control.awbRegions,
2079          and android.control.afRegions.
2080          </description>
2081          <range>
2082          Value must be &amp;gt;= 0 for each element. For full-capability devices
2083          this value must be &amp;gt;= 1 for AE and AF. The order of the elements is:
2084          `(AE, AWB, AF)`.</range>
2085          <tag id="BC" />
2086        </entry>
2087        <entry name="maxRegionsAe" type="int32" visibility="public"
2088               synthetic="true" hwlevel="legacy">
2089          <description>
2090          The maximum number of metering regions that can be used by the auto-exposure (AE)
2091          routine.
2092          </description>
2093          <range>Value will be &amp;gt;= 0. For FULL-capability devices, this
2094          value will be &amp;gt;= 1.
2095          </range>
2096          <details>
2097          This corresponds to the the maximum allowed number of elements in
2098          android.control.aeRegions.
2099          </details>
2100          <hal_details>This entry is private to the framework. Fill in
2101          maxRegions to have this entry be automatically populated.
2102          </hal_details>
2103        </entry>
2104        <entry name="maxRegionsAwb" type="int32" visibility="public"
2105               synthetic="true" hwlevel="legacy">
2106          <description>
2107          The maximum number of metering regions that can be used by the auto-white balance (AWB)
2108          routine.
2109          </description>
2110          <range>Value will be &amp;gt;= 0.
2111          </range>
2112          <details>
2113          This corresponds to the the maximum allowed number of elements in
2114          android.control.awbRegions.
2115          </details>
2116          <hal_details>This entry is private to the framework. Fill in
2117          maxRegions to have this entry be automatically populated.
2118          </hal_details>
2119        </entry>
2120        <entry name="maxRegionsAf" type="int32" visibility="public"
2121               synthetic="true" hwlevel="legacy">
2122          <description>
2123          The maximum number of metering regions that can be used by the auto-focus (AF) routine.
2124          </description>
2125          <range>Value will be &amp;gt;= 0. For FULL-capability devices, this
2126          value will be &amp;gt;= 1.
2127          </range>
2128          <details>
2129          This corresponds to the the maximum allowed number of elements in
2130          android.control.afRegions.
2131          </details>
2132          <hal_details>This entry is private to the framework. Fill in
2133          maxRegions to have this entry be automatically populated.
2134          </hal_details>
2135        </entry>
2136        <entry name="sceneModeOverrides" type="byte" visibility="system"
2137               container="array" hwlevel="limited">
2138          <array>
2139            <size>3</size>
2140            <size>length(availableSceneModes)</size>
2141          </array>
2142          <description>
2143          Ordered list of auto-exposure, auto-white balance, and auto-focus
2144          settings to use with each available scene mode.
2145          </description>
2146          <range>
2147          For each available scene mode, the list must contain three
2148          entries containing the android.control.aeMode,
2149          android.control.awbMode, and android.control.afMode values used
2150          by the camera device. The entry order is `(aeMode, awbMode, afMode)`
2151          where aeMode has the lowest index position.
2152          </range>
2153          <details>
2154          When a scene mode is enabled, the camera device is expected
2155          to override android.control.aeMode, android.control.awbMode,
2156          and android.control.afMode with its preferred settings for
2157          that scene mode.
2158
2159          The order of this list matches that of availableSceneModes,
2160          with 3 entries for each mode.  The overrides listed
2161          for FACE_PRIORITY and FACE_PRIORITY_LOW_LIGHT (if supported) are ignored,
2162          since for that mode the application-set android.control.aeMode,
2163          android.control.awbMode, and android.control.afMode values are
2164          used instead, matching the behavior when android.control.mode
2165          is set to AUTO. It is recommended that the FACE_PRIORITY and
2166          FACE_PRIORITY_LOW_LIGHT (if supported) overrides should be set to 0.
2167
2168          For example, if availableSceneModes contains
2169          `(FACE_PRIORITY, ACTION, NIGHT)`,  then the camera framework
2170          expects sceneModeOverrides to have 9 entries formatted like:
2171          `(0, 0, 0, ON_AUTO_FLASH, AUTO, CONTINUOUS_PICTURE,
2172          ON_AUTO_FLASH, INCANDESCENT, AUTO)`.
2173          </details>
2174          <hal_details>
2175          To maintain backward compatibility, this list will be made available
2176          in the static metadata of the camera service.  The camera service will
2177          use these values to set android.control.aeMode,
2178          android.control.awbMode, and android.control.afMode when using a scene
2179          mode other than FACE_PRIORITY and FACE_PRIORITY_LOW_LIGHT (if supported).
2180          </hal_details>
2181          <tag id="BC" />
2182        </entry>
2183      </static>
2184      <dynamic>
2185        <entry name="aePrecaptureId" type="int32" visibility="system" deprecated="true">
2186          <description>The ID sent with the latest
2187          CAMERA2_TRIGGER_PRECAPTURE_METERING call</description>
2188          <details>Must be 0 if no
2189          CAMERA2_TRIGGER_PRECAPTURE_METERING trigger received yet
2190          by HAL. Always updated even if AE algorithm ignores the
2191          trigger</details>
2192        </entry>
2193        <clone entry="android.control.aeAntibandingMode" kind="controls">
2194        </clone>
2195        <clone entry="android.control.aeExposureCompensation" kind="controls">
2196        </clone>
2197        <clone entry="android.control.aeLock" kind="controls">
2198        </clone>
2199        <clone entry="android.control.aeMode" kind="controls">
2200        </clone>
2201        <clone entry="android.control.aeRegions" kind="controls">
2202        </clone>
2203        <clone entry="android.control.aeTargetFpsRange" kind="controls">
2204        </clone>
2205        <clone entry="android.control.aePrecaptureTrigger" kind="controls">
2206        </clone>
2207        <entry name="aeState" type="byte" visibility="public" enum="true"
2208               hwlevel="limited">
2209          <enum>
2210            <value>INACTIVE
2211            <notes>AE is off or recently reset.
2212
2213            When a camera device is opened, it starts in
2214            this state. This is a transient state, the camera device may skip reporting
2215            this state in capture result.</notes></value>
2216            <value>SEARCHING
2217            <notes>AE doesn't yet have a good set of control values
2218            for the current scene.
2219
2220            This is a transient state, the camera device may skip
2221            reporting this state in capture result.</notes></value>
2222            <value>CONVERGED
2223            <notes>AE has a good set of control values for the
2224            current scene.</notes></value>
2225            <value>LOCKED
2226            <notes>AE has been locked.</notes></value>
2227            <value>FLASH_REQUIRED
2228            <notes>AE has a good set of control values, but flash
2229            needs to be fired for good quality still
2230            capture.</notes></value>
2231            <value>PRECAPTURE
2232            <notes>AE has been asked to do a precapture sequence
2233            and is currently executing it.
2234
2235            Precapture can be triggered through setting
2236            android.control.aePrecaptureTrigger to START. Currently
2237            active and completed (if it causes camera device internal AE lock) precapture
2238            metering sequence can be canceled through setting
2239            android.control.aePrecaptureTrigger to CANCEL.
2240
2241            Once PRECAPTURE completes, AE will transition to CONVERGED
2242            or FLASH_REQUIRED as appropriate. This is a transient
2243            state, the camera device may skip reporting this state in
2244            capture result.</notes></value>
2245          </enum>
2246          <description>Current state of the auto-exposure (AE) algorithm.</description>
2247          <details>Switching between or enabling AE modes (android.control.aeMode) always
2248          resets the AE state to INACTIVE. Similarly, switching between android.control.mode,
2249          or android.control.sceneMode if `android.control.mode == USE_SCENE_MODE` resets all
2250          the algorithm states to INACTIVE.
2251
2252          The camera device can do several state transitions between two results, if it is
2253          allowed by the state transition table. For example: INACTIVE may never actually be
2254          seen in a result.
2255
2256          The state in the result is the state for this image (in sync with this image): if
2257          AE state becomes CONVERGED, then the image data associated with this result should
2258          be good to use.
2259
2260          Below are state transition tables for different AE modes.
2261
2262            State       | Transition Cause | New State | Notes
2263          :------------:|:----------------:|:---------:|:-----------------------:
2264          INACTIVE      |                  | INACTIVE  | Camera device auto exposure algorithm is disabled
2265
2266          When android.control.aeMode is AE_MODE_ON_*:
2267
2268            State        | Transition Cause                             | New State      | Notes
2269          :-------------:|:--------------------------------------------:|:--------------:|:-----------------:
2270          INACTIVE       | Camera device initiates AE scan              | SEARCHING      | Values changing
2271          INACTIVE       | android.control.aeLock is ON                 | LOCKED         | Values locked
2272          SEARCHING      | Camera device finishes AE scan               | CONVERGED      | Good values, not changing
2273          SEARCHING      | Camera device finishes AE scan               | FLASH_REQUIRED | Converged but too dark w/o flash
2274          SEARCHING      | android.control.aeLock is ON                 | LOCKED         | Values locked
2275          CONVERGED      | Camera device initiates AE scan              | SEARCHING      | Values changing
2276          CONVERGED      | android.control.aeLock is ON                 | LOCKED         | Values locked
2277          FLASH_REQUIRED | Camera device initiates AE scan              | SEARCHING      | Values changing
2278          FLASH_REQUIRED | android.control.aeLock is ON                 | LOCKED         | Values locked
2279          LOCKED         | android.control.aeLock is OFF                | SEARCHING      | Values not good after unlock
2280          LOCKED         | android.control.aeLock is OFF                | CONVERGED      | Values good after unlock
2281          LOCKED         | android.control.aeLock is OFF                | FLASH_REQUIRED | Exposure good, but too dark
2282          PRECAPTURE     | Sequence done. android.control.aeLock is OFF | CONVERGED      | Ready for high-quality capture
2283          PRECAPTURE     | Sequence done. android.control.aeLock is ON  | LOCKED         | Ready for high-quality capture
2284          LOCKED         | aeLock is ON and aePrecaptureTrigger is START | LOCKED        | Precapture trigger is ignored when AE is already locked
2285          LOCKED         | aeLock is ON and aePrecaptureTrigger is CANCEL| LOCKED        | Precapture trigger is ignored when AE is already locked
2286          Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is START | PRECAPTURE     | Start AE precapture metering sequence
2287          Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is CANCEL| INACTIVE       | Currently active precapture metering sequence is canceled
2288
2289          For the above table, the camera device may skip reporting any state changes that happen
2290          without application intervention (i.e. mode switch, trigger, locking). Any state that
2291          can be skipped in that manner is called a transient state.
2292
2293          For example, for above AE modes (AE_MODE_ON_*), in addition to the state transitions
2294          listed in above table, it is also legal for the camera device to skip one or more
2295          transient states between two results. See below table for examples:
2296
2297            State        | Transition Cause                                            | New State      | Notes
2298          :-------------:|:-----------------------------------------------------------:|:--------------:|:-----------------:
2299          INACTIVE       | Camera device finished AE scan                              | CONVERGED      | Values are already good, transient states are skipped by camera device.
2300          Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is START, sequence done | FLASH_REQUIRED | Converged but too dark w/o flash after a precapture sequence, transient states are skipped by camera device.
2301          Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is START, sequence done | CONVERGED      | Converged after a precapture sequence, transient states are skipped by camera device.
2302          Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is CANCEL, converged    | FLASH_REQUIRED | Converged but too dark w/o flash after a precapture sequence is canceled, transient states are skipped by camera device.
2303          Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is CANCEL, converged    | CONVERGED      | Converged after a precapture sequenceis canceled, transient states are skipped by camera device.
2304          CONVERGED      | Camera device finished AE scan                              | FLASH_REQUIRED | Converged but too dark w/o flash after a new scan, transient states are skipped by camera device.
2305          FLASH_REQUIRED | Camera device finished AE scan                              | CONVERGED      | Converged after a new scan, transient states are skipped by camera device.
2306          </details>
2307        </entry>
2308        <clone entry="android.control.afMode" kind="controls">
2309        </clone>
2310        <clone entry="android.control.afRegions" kind="controls">
2311        </clone>
2312        <clone entry="android.control.afTrigger" kind="controls">
2313        </clone>
2314        <entry name="afState" type="byte" visibility="public" enum="true"
2315               hwlevel="legacy">
2316          <enum>
2317            <value>INACTIVE
2318            <notes>AF is off or has not yet tried to scan/been asked
2319            to scan.
2320
2321            When a camera device is opened, it starts in this
2322            state. This is a transient state, the camera device may
2323            skip reporting this state in capture
2324            result.</notes></value>
2325            <value>PASSIVE_SCAN
2326            <notes>AF is currently performing an AF scan initiated the
2327            camera device in a continuous autofocus mode.
2328
2329            Only used by CONTINUOUS_* AF modes. This is a transient
2330            state, the camera device may skip reporting this state in
2331            capture result.</notes></value>
2332            <value>PASSIVE_FOCUSED
2333            <notes>AF currently believes it is in focus, but may
2334            restart scanning at any time.
2335
2336            Only used by CONTINUOUS_* AF modes. This is a transient
2337            state, the camera device may skip reporting this state in
2338            capture result.</notes></value>
2339            <value>ACTIVE_SCAN
2340            <notes>AF is performing an AF scan because it was
2341            triggered by AF trigger.
2342
2343            Only used by AUTO or MACRO AF modes. This is a transient
2344            state, the camera device may skip reporting this state in
2345            capture result.</notes></value>
2346            <value>FOCUSED_LOCKED
2347            <notes>AF believes it is focused correctly and has locked
2348            focus.
2349
2350            This state is reached only after an explicit START AF trigger has been
2351            sent (android.control.afTrigger), when good focus has been obtained.
2352
2353            The lens will remain stationary until the AF mode (android.control.afMode) is changed or
2354            a new AF trigger is sent to the camera device (android.control.afTrigger).
2355            </notes></value>
2356            <value>NOT_FOCUSED_LOCKED
2357            <notes>AF has failed to focus successfully and has locked
2358            focus.
2359
2360            This state is reached only after an explicit START AF trigger has been
2361            sent (android.control.afTrigger), when good focus cannot be obtained.
2362
2363            The lens will remain stationary until the AF mode (android.control.afMode) is changed or
2364            a new AF trigger is sent to the camera device (android.control.afTrigger).
2365            </notes></value>
2366            <value>PASSIVE_UNFOCUSED
2367            <notes>AF finished a passive scan without finding focus,
2368            and may restart scanning at any time.
2369
2370            Only used by CONTINUOUS_* AF modes. This is a transient state, the camera
2371            device may skip reporting this state in capture result.
2372
2373            LEGACY camera devices do not support this state. When a passive
2374            scan has finished, it will always go to PASSIVE_FOCUSED.
2375            </notes></value>
2376          </enum>
2377          <description>Current state of auto-focus (AF) algorithm.</description>
2378          <details>
2379          Switching between or enabling AF modes (android.control.afMode) always
2380          resets the AF state to INACTIVE. Similarly, switching between android.control.mode,
2381          or android.control.sceneMode if `android.control.mode == USE_SCENE_MODE` resets all
2382          the algorithm states to INACTIVE.
2383
2384          The camera device can do several state transitions between two results, if it is
2385          allowed by the state transition table. For example: INACTIVE may never actually be
2386          seen in a result.
2387
2388          The state in the result is the state for this image (in sync with this image): if
2389          AF state becomes FOCUSED, then the image data associated with this result should
2390          be sharp.
2391
2392          Below are state transition tables for different AF modes.
2393
2394          When android.control.afMode is AF_MODE_OFF or AF_MODE_EDOF:
2395
2396            State       | Transition Cause | New State | Notes
2397          :------------:|:----------------:|:---------:|:-----------:
2398          INACTIVE      |                  | INACTIVE  | Never changes
2399
2400          When android.control.afMode is AF_MODE_AUTO or AF_MODE_MACRO:
2401
2402            State            | Transition Cause | New State          | Notes
2403          :-----------------:|:----------------:|:------------------:|:--------------:
2404          INACTIVE           | AF_TRIGGER       | ACTIVE_SCAN        | Start AF sweep, Lens now moving
2405          ACTIVE_SCAN        | AF sweep done    | FOCUSED_LOCKED     | Focused, Lens now locked
2406          ACTIVE_SCAN        | AF sweep done    | NOT_FOCUSED_LOCKED | Not focused, Lens now locked
2407          ACTIVE_SCAN        | AF_CANCEL        | INACTIVE           | Cancel/reset AF, Lens now locked
2408          FOCUSED_LOCKED     | AF_CANCEL        | INACTIVE           | Cancel/reset AF
2409          FOCUSED_LOCKED     | AF_TRIGGER       | ACTIVE_SCAN        | Start new sweep, Lens now moving
2410          NOT_FOCUSED_LOCKED | AF_CANCEL        | INACTIVE           | Cancel/reset AF
2411          NOT_FOCUSED_LOCKED | AF_TRIGGER       | ACTIVE_SCAN        | Start new sweep, Lens now moving
2412          Any state          | Mode change      | INACTIVE           |
2413
2414          For the above table, the camera device may skip reporting any state changes that happen
2415          without application intervention (i.e. mode switch, trigger, locking). Any state that
2416          can be skipped in that manner is called a transient state.
2417
2418          For example, for these AF modes (AF_MODE_AUTO and AF_MODE_MACRO), in addition to the
2419          state transitions listed in above table, it is also legal for the camera device to skip
2420          one or more transient states between two results. See below table for examples:
2421
2422            State            | Transition Cause | New State          | Notes
2423          :-----------------:|:----------------:|:------------------:|:--------------:
2424          INACTIVE           | AF_TRIGGER       | FOCUSED_LOCKED     | Focus is already good or good after a scan, lens is now locked.
2425          INACTIVE           | AF_TRIGGER       | NOT_FOCUSED_LOCKED | Focus failed after a scan, lens is now locked.
2426          FOCUSED_LOCKED     | AF_TRIGGER       | FOCUSED_LOCKED     | Focus is already good or good after a scan, lens is now locked.
2427          NOT_FOCUSED_LOCKED | AF_TRIGGER       | FOCUSED_LOCKED     | Focus is good after a scan, lens is not locked.
2428
2429
2430          When android.control.afMode is AF_MODE_CONTINUOUS_VIDEO:
2431
2432            State            | Transition Cause                    | New State          | Notes
2433          :-----------------:|:-----------------------------------:|:------------------:|:--------------:
2434          INACTIVE           | Camera device initiates new scan    | PASSIVE_SCAN       | Start AF scan, Lens now moving
2435          INACTIVE           | AF_TRIGGER                          | NOT_FOCUSED_LOCKED | AF state query, Lens now locked
2436          PASSIVE_SCAN       | Camera device completes current scan| PASSIVE_FOCUSED    | End AF scan, Lens now locked
2437          PASSIVE_SCAN       | Camera device fails current scan    | PASSIVE_UNFOCUSED  | End AF scan, Lens now locked
2438          PASSIVE_SCAN       | AF_TRIGGER                          | FOCUSED_LOCKED     | Immediate transition, if focus is good. Lens now locked
2439          PASSIVE_SCAN       | AF_TRIGGER                          | NOT_FOCUSED_LOCKED | Immediate transition, if focus is bad. Lens now locked
2440          PASSIVE_SCAN       | AF_CANCEL                           | INACTIVE           | Reset lens position, Lens now locked
2441          PASSIVE_FOCUSED    | Camera device initiates new scan    | PASSIVE_SCAN       | Start AF scan, Lens now moving
2442          PASSIVE_UNFOCUSED  | Camera device initiates new scan    | PASSIVE_SCAN       | Start AF scan, Lens now moving
2443          PASSIVE_FOCUSED    | AF_TRIGGER                          | FOCUSED_LOCKED     | Immediate transition, lens now locked
2444          PASSIVE_UNFOCUSED  | AF_TRIGGER                          | NOT_FOCUSED_LOCKED | Immediate transition, lens now locked
2445          FOCUSED_LOCKED     | AF_TRIGGER                          | FOCUSED_LOCKED     | No effect
2446          FOCUSED_LOCKED     | AF_CANCEL                           | INACTIVE           | Restart AF scan
2447          NOT_FOCUSED_LOCKED | AF_TRIGGER                          | NOT_FOCUSED_LOCKED | No effect
2448          NOT_FOCUSED_LOCKED | AF_CANCEL                           | INACTIVE           | Restart AF scan
2449
2450          When android.control.afMode is AF_MODE_CONTINUOUS_PICTURE:
2451
2452            State            | Transition Cause                     | New State          | Notes
2453          :-----------------:|:------------------------------------:|:------------------:|:--------------:
2454          INACTIVE           | Camera device initiates new scan     | PASSIVE_SCAN       | Start AF scan, Lens now moving
2455          INACTIVE           | AF_TRIGGER                           | NOT_FOCUSED_LOCKED | AF state query, Lens now locked
2456          PASSIVE_SCAN       | Camera device completes current scan | PASSIVE_FOCUSED    | End AF scan, Lens now locked
2457          PASSIVE_SCAN       | Camera device fails current scan     | PASSIVE_UNFOCUSED  | End AF scan, Lens now locked
2458          PASSIVE_SCAN       | AF_TRIGGER                           | FOCUSED_LOCKED     | Eventual transition once the focus is good. Lens now locked
2459          PASSIVE_SCAN       | AF_TRIGGER                           | NOT_FOCUSED_LOCKED | Eventual transition if cannot find focus. Lens now locked
2460          PASSIVE_SCAN       | AF_CANCEL                            | INACTIVE           | Reset lens position, Lens now locked
2461          PASSIVE_FOCUSED    | Camera device initiates new scan     | PASSIVE_SCAN       | Start AF scan, Lens now moving
2462          PASSIVE_UNFOCUSED  | Camera device initiates new scan     | PASSIVE_SCAN       | Start AF scan, Lens now moving
2463          PASSIVE_FOCUSED    | AF_TRIGGER                           | FOCUSED_LOCKED     | Immediate trans. Lens now locked
2464          PASSIVE_UNFOCUSED  | AF_TRIGGER                           | NOT_FOCUSED_LOCKED | Immediate trans. Lens now locked
2465          FOCUSED_LOCKED     | AF_TRIGGER                           | FOCUSED_LOCKED     | No effect
2466          FOCUSED_LOCKED     | AF_CANCEL                            | INACTIVE           | Restart AF scan
2467          NOT_FOCUSED_LOCKED | AF_TRIGGER                           | NOT_FOCUSED_LOCKED | No effect
2468          NOT_FOCUSED_LOCKED | AF_CANCEL                            | INACTIVE           | Restart AF scan
2469
2470          When switch between AF_MODE_CONTINUOUS_* (CAF modes) and AF_MODE_AUTO/AF_MODE_MACRO
2471          (AUTO modes), the initial INACTIVE or PASSIVE_SCAN states may be skipped by the
2472          camera device. When a trigger is included in a mode switch request, the trigger
2473          will be evaluated in the context of the new mode in the request.
2474          See below table for examples:
2475
2476            State      | Transition Cause                       | New State                                | Notes
2477          :-----------:|:--------------------------------------:|:----------------------------------------:|:--------------:
2478          any state    | CAF-->AUTO mode switch                 | INACTIVE                                 | Mode switch without trigger, initial state must be INACTIVE
2479          any state    | CAF-->AUTO mode switch with AF_TRIGGER | trigger-reachable states from INACTIVE   | Mode switch with trigger, INACTIVE is skipped
2480          any state    | AUTO-->CAF mode switch                 | passively reachable states from INACTIVE | Mode switch without trigger, passive transient state is skipped
2481          </details>
2482        </entry>
2483        <entry name="afTriggerId" type="int32" visibility="system" deprecated="true">
2484          <description>The ID sent with the latest
2485          CAMERA2_TRIGGER_AUTOFOCUS call</description>
2486          <details>Must be 0 if no CAMERA2_TRIGGER_AUTOFOCUS trigger
2487          received yet by HAL. Always updated even if AF algorithm
2488          ignores the trigger</details>
2489        </entry>
2490        <clone entry="android.control.awbLock" kind="controls">
2491        </clone>
2492        <clone entry="android.control.awbMode" kind="controls">
2493        </clone>
2494        <clone entry="android.control.awbRegions" kind="controls">
2495        </clone>
2496        <clone entry="android.control.captureIntent" kind="controls">
2497        </clone>
2498        <entry name="awbState" type="byte" visibility="public" enum="true"
2499               hwlevel="limited">
2500          <enum>
2501            <value>INACTIVE
2502            <notes>AWB is not in auto mode, or has not yet started metering.
2503
2504            When a camera device is opened, it starts in this
2505            state. This is a transient state, the camera device may
2506            skip reporting this state in capture
2507            result.</notes></value>
2508            <value>SEARCHING
2509            <notes>AWB doesn't yet have a good set of control
2510            values for the current scene.
2511
2512            This is a transient state, the camera device
2513            may skip reporting this state in capture result.</notes></value>
2514            <value>CONVERGED
2515            <notes>AWB has a good set of control values for the
2516            current scene.</notes></value>
2517            <value>LOCKED
2518            <notes>AWB has been locked.
2519            </notes></value>
2520          </enum>
2521          <description>Current state of auto-white balance (AWB) algorithm.</description>
2522          <details>Switching between or enabling AWB modes (android.control.awbMode) always
2523          resets the AWB state to INACTIVE. Similarly, switching between android.control.mode,
2524          or android.control.sceneMode if `android.control.mode == USE_SCENE_MODE` resets all
2525          the algorithm states to INACTIVE.
2526
2527          The camera device can do several state transitions between two results, if it is
2528          allowed by the state transition table. So INACTIVE may never actually be seen in
2529          a result.
2530
2531          The state in the result is the state for this image (in sync with this image): if
2532          AWB state becomes CONVERGED, then the image data associated with this result should
2533          be good to use.
2534
2535          Below are state transition tables for different AWB modes.
2536
2537          When `android.control.awbMode != AWB_MODE_AUTO`:
2538
2539            State       | Transition Cause | New State | Notes
2540          :------------:|:----------------:|:---------:|:-----------------------:
2541          INACTIVE      |                  |INACTIVE   |Camera device auto white balance algorithm is disabled
2542
2543          When android.control.awbMode is AWB_MODE_AUTO:
2544
2545            State        | Transition Cause                 | New State     | Notes
2546          :-------------:|:--------------------------------:|:-------------:|:-----------------:
2547          INACTIVE       | Camera device initiates AWB scan | SEARCHING     | Values changing
2548          INACTIVE       | android.control.awbLock is ON    | LOCKED        | Values locked
2549          SEARCHING      | Camera device finishes AWB scan  | CONVERGED     | Good values, not changing
2550          SEARCHING      | android.control.awbLock is ON    | LOCKED        | Values locked
2551          CONVERGED      | Camera device initiates AWB scan | SEARCHING     | Values changing
2552          CONVERGED      | android.control.awbLock is ON    | LOCKED        | Values locked
2553          LOCKED         | android.control.awbLock is OFF   | SEARCHING     | Values not good after unlock
2554
2555          For the above table, the camera device may skip reporting any state changes that happen
2556          without application intervention (i.e. mode switch, trigger, locking). Any state that
2557          can be skipped in that manner is called a transient state.
2558
2559          For example, for this AWB mode (AWB_MODE_AUTO), in addition to the state transitions
2560          listed in above table, it is also legal for the camera device to skip one or more
2561          transient states between two results. See below table for examples:
2562
2563            State        | Transition Cause                 | New State     | Notes
2564          :-------------:|:--------------------------------:|:-------------:|:-----------------:
2565          INACTIVE       | Camera device finished AWB scan  | CONVERGED     | Values are already good, transient states are skipped by camera device.
2566          LOCKED         | android.control.awbLock is OFF   | CONVERGED     | Values good after unlock, transient states are skipped by camera device.
2567          </details>
2568        </entry>
2569        <clone entry="android.control.effectMode" kind="controls">
2570        </clone>
2571        <clone entry="android.control.mode" kind="controls">
2572        </clone>
2573        <clone entry="android.control.sceneMode" kind="controls">
2574        </clone>
2575        <clone entry="android.control.videoStabilizationMode" kind="controls">
2576        </clone>
2577      </dynamic>
2578      <static>
2579        <entry name="availableHighSpeedVideoConfigurations" type="int32" visibility="hidden"
2580               container="array" typedef="highSpeedVideoConfiguration" hwlevel="limited">
2581          <array>
2582            <size>5</size>
2583            <size>n</size>
2584          </array>
2585          <description>
2586          List of available high speed video size, fps range and max batch size configurations
2587          supported by the camera device, in the format of (width, height, fps_min, fps_max, batch_size_max).
2588          </description>
2589          <range>
2590          For each configuration, the fps_max &amp;gt;= 120fps.
2591          </range>
2592          <details>
2593          When CONSTRAINED_HIGH_SPEED_VIDEO is supported in android.request.availableCapabilities,
2594          this metadata will list the supported high speed video size, fps range and max batch size
2595          configurations. All the sizes listed in this configuration will be a subset of the sizes
2596          reported by {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes}
2597          for processed non-stalling formats.
2598
2599          For the high speed video use case, the application must
2600          select the video size and fps range from this metadata to configure the recording and
2601          preview streams and setup the recording requests. For example, if the application intends
2602          to do high speed recording, it can select the maximum size reported by this metadata to
2603          configure output streams. Once the size is selected, application can filter this metadata
2604          by selected size and get the supported fps ranges, and use these fps ranges to setup the
2605          recording requests. Note that for the use case of multiple output streams, application
2606          must select one unique size from this metadata to use (e.g., preview and recording streams
2607          must have the same size). Otherwise, the high speed capture session creation will fail.
2608
2609          The min and max fps will be multiple times of 30fps.
2610
2611          High speed video streaming extends significant performance pressue to camera hardware,
2612          to achieve efficient high speed streaming, the camera device may have to aggregate
2613          multiple frames together and send to camera device for processing where the request
2614          controls are same for all the frames in this batch. Max batch size indicates
2615          the max possible number of frames the camera device will group together for this high
2616          speed stream configuration. This max batch size will be used to generate a high speed
2617          recording request list by
2618          {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}.
2619          The max batch size for each configuration will satisfy below conditions:
2620
2621          * Each max batch size will be a divisor of its corresponding fps_max / 30. For example,
2622          if max_fps is 300, max batch size will only be 1, 2, 5, or 10.
2623          * The camera device may choose smaller internal batch size for each configuration, but
2624          the actual batch size will be a divisor of max batch size. For example, if the max batch
2625          size is 8, the actual batch size used by camera device will only be 1, 2, 4, or 8.
2626          * The max batch size in each configuration entry must be no larger than 32.
2627
2628          The camera device doesn't have to support batch mode to achieve high speed video recording,
2629          in such case, batch_size_max will be reported as 1 in each configuration entry.
2630
2631          This fps ranges in this configuration list can only be used to create requests
2632          that are submitted to a high speed camera capture session created by
2633          {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}.
2634          The fps ranges reported in this metadata must not be used to setup capture requests for
2635          normal capture session, or it will cause request error.
2636          </details>
2637          <hal_details>
2638          All the sizes listed in this configuration will be a subset of the sizes reported by
2639          android.scaler.availableStreamConfigurations for processed non-stalling output formats.
2640          Note that for all high speed video configurations, HAL must be able to support a minimum
2641          of two streams, though the application might choose to configure just one stream.
2642
2643          The HAL may support multiple sensor modes for high speed outputs, for example, 120fps
2644          sensor mode and 120fps recording, 240fps sensor mode for 240fps recording. The application
2645          usually starts preview first, then starts recording. To avoid sensor mode switch caused
2646          stutter when starting recording as much as possible, the application may want to ensure
2647          the same sensor mode is used for preview and recording. Therefore, The HAL must advertise
2648          the variable fps range [30, fps_max] for each fixed fps range in this configuration list.
2649          For example, if the HAL advertises [120, 120] and [240, 240], the HAL must also advertise
2650          [30, 120] and [30, 240] for each configuration. In doing so, if the application intends to
2651          do 120fps recording, it can select [30, 120] to start preview, and [120, 120] to start
2652          recording. For these variable fps ranges, it's up to the HAL to decide the actual fps
2653          values that are suitable for smooth preview streaming. If the HAL sees different max_fps
2654          values that fall into different sensor modes in a sequence of requests, the HAL must
2655          switch the sensor mode as quick as possible to minimize the mode switch caused stutter.
2656          </hal_details>
2657          <tag id="V1" />
2658        </entry>
2659        <entry name="aeLockAvailable" type="byte" visibility="public" enum="true"
2660               typedef="boolean" hwlevel="legacy">
2661          <enum>
2662            <value>FALSE</value>
2663            <value>TRUE</value>
2664          </enum>
2665          <description>Whether the camera device supports android.control.aeLock</description>
2666          <details>
2667              Devices with MANUAL_SENSOR capability or BURST_CAPTURE capability will always
2668              list `true`. This includes FULL devices.
2669          </details>
2670          <tag id="BC"/>
2671        </entry>
2672        <entry name="awbLockAvailable" type="byte" visibility="public" enum="true"
2673               typedef="boolean" hwlevel="legacy">
2674          <enum>
2675            <value>FALSE</value>
2676            <value>TRUE</value>
2677          </enum>
2678          <description>Whether the camera device supports android.control.awbLock</description>
2679          <details>
2680              Devices with MANUAL_POST_PROCESSING capability or BURST_CAPTURE capability will
2681              always list `true`. This includes FULL devices.
2682          </details>
2683          <tag id="BC"/>
2684        </entry>
2685        <entry name="availableModes" type="byte" visibility="public"
2686            type_notes="List of enums (android.control.mode)." container="array"
2687            typedef="enumList" hwlevel="legacy">
2688          <array>
2689            <size>n</size>
2690          </array>
2691          <description>
2692          List of control modes for android.control.mode that are supported by this camera
2693          device.
2694          </description>
2695          <range>Any value listed in android.control.mode</range>
2696          <details>
2697              This list contains control modes that can be set for the camera device.
2698              LEGACY mode devices will always support AUTO mode. LIMITED and FULL
2699              devices will always support OFF, AUTO modes.
2700          </details>
2701        </entry>
2702      </static>
2703    </section>
2704    <section name="demosaic">
2705      <controls>
2706        <entry name="mode" type="byte" enum="true">
2707          <enum>
2708            <value>FAST
2709            <notes>Minimal or no slowdown of frame rate compared to
2710            Bayer RAW output.</notes></value>
2711            <value>HIGH_QUALITY
2712            <notes>Improved processing quality but the frame rate might be slowed down
2713            relative to raw output.</notes></value>
2714          </enum>
2715          <description>Controls the quality of the demosaicing
2716          processing.</description>
2717          <tag id="FUTURE" />
2718        </entry>
2719      </controls>
2720    </section>
2721    <section name="edge">
2722      <controls>
2723        <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="full">
2724          <enum>
2725            <value>OFF
2726            <notes>No edge enhancement is applied.</notes></value>
2727            <value>FAST
2728            <notes>Apply edge enhancement at a quality level that does not slow down frame rate
2729            relative to sensor output. It may be the same as OFF if edge enhancement will
2730            slow down frame rate relative to sensor.</notes></value>
2731            <value>HIGH_QUALITY
2732            <notes>Apply high-quality edge enhancement, at a cost of possibly reduced output frame rate.
2733            </notes></value>
2734            <value optional="true">ZERO_SHUTTER_LAG
2735            <notes>Edge enhancement is applied at different levels for different output streams,
2736            based on resolution. Streams at maximum recording resolution (see {@link
2737            android.hardware.camera2.CameraDevice#createCaptureSession}) or below have
2738            edge enhancement applied, while higher-resolution streams have no edge enhancement
2739            applied. The level of edge enhancement for low-resolution streams is tuned so that
2740            frame rate is not impacted, and the quality is equal to or better than FAST (since it
2741            is only applied to lower-resolution outputs, quality may improve from FAST).
2742
2743            This mode is intended to be used by applications operating in a zero-shutter-lag mode
2744            with YUV or PRIVATE reprocessing, where the application continuously captures
2745            high-resolution intermediate buffers into a circular buffer, from which a final image is
2746            produced via reprocessing when a user takes a picture.  For such a use case, the
2747            high-resolution buffers must not have edge enhancement applied to maximize efficiency of
2748            preview and to avoid double-applying enhancement when reprocessed, while low-resolution
2749            buffers (used for recording or preview, generally) need edge enhancement applied for
2750            reasonable preview quality.
2751
2752            This mode is guaranteed to be supported by devices that support either the
2753            YUV_REPROCESSING or PRIVATE_REPROCESSING capabilities
2754            (android.request.availableCapabilities lists either of those capabilities) and it will
2755            be the default mode for CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template.
2756            </notes></value>
2757          </enum>
2758          <description>Operation mode for edge
2759          enhancement.</description>
2760          <range>android.edge.availableEdgeModes</range>
2761          <details>Edge enhancement improves sharpness and details in the captured image. OFF means
2762          no enhancement will be applied by the camera device.
2763
2764          FAST/HIGH_QUALITY both mean camera device determined enhancement
2765          will be applied. HIGH_QUALITY mode indicates that the
2766          camera device will use the highest-quality enhancement algorithms,
2767          even if it slows down capture rate. FAST means the camera device will
2768          not slow down capture rate when applying edge enhancement. FAST may be the same as OFF if
2769          edge enhancement will slow down capture rate. Every output stream will have a similar
2770          amount of enhancement applied.
2771
2772          ZERO_SHUTTER_LAG is meant to be used by applications that maintain a continuous circular
2773          buffer of high-resolution images during preview and reprocess image(s) from that buffer
2774          into a final capture when triggered by the user. In this mode, the camera device applies
2775          edge enhancement to low-resolution streams (below maximum recording resolution) to
2776          maximize preview quality, but does not apply edge enhancement to high-resolution streams,
2777          since those will be reprocessed later if necessary.
2778
2779          For YUV_REPROCESSING, these FAST/HIGH_QUALITY modes both mean that the camera
2780          device will apply FAST/HIGH_QUALITY YUV-domain edge enhancement, respectively.
2781          The camera device may adjust its internal edge enhancement parameters for best
2782          image quality based on the android.reprocess.effectiveExposureFactor, if it is set.
2783          </details>
2784          <hal_details>
2785          For YUV_REPROCESSING The HAL can use android.reprocess.effectiveExposureFactor to
2786          adjust the internal edge enhancement reduction parameters appropriately to get the best
2787          quality images.
2788          </hal_details>
2789          <tag id="V1" />
2790          <tag id="REPROC" />
2791        </entry>
2792        <entry name="strength" type="byte">
2793          <description>Control the amount of edge enhancement
2794          applied to the images</description>
2795          <units>1-10; 10 is maximum sharpening</units>
2796          <tag id="FUTURE" />
2797        </entry>
2798      </controls>
2799      <static>
2800        <entry name="availableEdgeModes" type="byte" visibility="public"
2801               type_notes="list of enums" container="array" typedef="enumList"
2802               hwlevel="full">
2803          <array>
2804            <size>n</size>
2805          </array>
2806          <description>
2807          List of edge enhancement modes for android.edge.mode that are supported by this camera
2808          device.
2809          </description>
2810          <range>Any value listed in android.edge.mode</range>
2811          <details>
2812          Full-capability camera devices must always support OFF; camera devices that support
2813          YUV_REPROCESSING or PRIVATE_REPROCESSING will list ZERO_SHUTTER_LAG; all devices will
2814          list FAST.
2815          </details>
2816          <hal_details>
2817          HAL must support both FAST and HIGH_QUALITY if edge enhancement control is available
2818          on the camera device, but the underlying implementation can be the same for both modes.
2819          That is, if the highest quality implementation on the camera device does not slow down
2820          capture rate, then FAST and HIGH_QUALITY will generate the same output.
2821          </hal_details>
2822          <tag id="V1" />
2823          <tag id="REPROC" />
2824        </entry>
2825      </static>
2826      <dynamic>
2827        <clone entry="android.edge.mode" kind="controls">
2828          <tag id="V1" />
2829          <tag id="REPROC" />
2830        </clone>
2831      </dynamic>
2832    </section>
2833    <section name="flash">
2834      <controls>
2835        <entry name="firingPower" type="byte">
2836          <description>Power for flash firing/torch</description>
2837          <units>10 is max power; 0 is no flash. Linear</units>
2838          <range>0 - 10</range>
2839          <details>Power for snapshot may use a different scale than
2840          for torch mode. Only one entry for torch mode will be
2841          used</details>
2842          <tag id="FUTURE" />
2843        </entry>
2844        <entry name="firingTime" type="int64">
2845          <description>Firing time of flash relative to start of
2846          exposure</description>
2847          <units>nanoseconds</units>
2848          <range>0-(exposure time-flash duration)</range>
2849          <details>Clamped to (0, exposure time - flash
2850          duration).</details>
2851          <tag id="FUTURE" />
2852        </entry>
2853        <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="legacy">
2854          <enum>
2855            <value>OFF
2856              <notes>
2857              Do not fire the flash for this capture.
2858              </notes>
2859            </value>
2860            <value>SINGLE
2861              <notes>
2862              If the flash is available and charged, fire flash
2863              for this capture.
2864              </notes>
2865            </value>
2866            <value>TORCH
2867              <notes>
2868              Transition flash to continuously on.
2869              </notes>
2870            </value>
2871          </enum>
2872          <description>The desired mode for for the camera device's flash control.</description>
2873          <details>
2874          This control is only effective when flash unit is available
2875          (`android.flash.info.available == true`).
2876
2877          When this control is used, the android.control.aeMode must be set to ON or OFF.
2878          Otherwise, the camera device auto-exposure related flash control (ON_AUTO_FLASH,
2879          ON_ALWAYS_FLASH, or ON_AUTO_FLASH_REDEYE) will override this control.
2880
2881          When set to OFF, the camera device will not fire flash for this capture.
2882
2883          When set to SINGLE, the camera device will fire flash regardless of the camera
2884          device's auto-exposure routine's result. When used in still capture case, this
2885          control should be used along with auto-exposure (AE) precapture metering sequence
2886          (android.control.aePrecaptureTrigger), otherwise, the image may be incorrectly exposed.
2887
2888          When set to TORCH, the flash will be on continuously. This mode can be used
2889          for use cases such as preview, auto-focus assist, still capture, or video recording.
2890
2891          The flash status will be reported by android.flash.state in the capture result metadata.
2892          </details>
2893          <tag id="BC" />
2894        </entry>
2895      </controls>
2896      <static>
2897        <namespace name="info">
2898          <entry name="available" type="byte" visibility="public" enum="true"
2899                 typedef="boolean" hwlevel="legacy">
2900            <enum>
2901              <value>FALSE</value>
2902              <value>TRUE</value>
2903            </enum>
2904            <description>Whether this camera device has a
2905            flash unit.</description>
2906            <details>
2907            Will be `false` if no flash is available.
2908
2909            If there is no flash unit, none of the flash controls do
2910            anything.</details>
2911            <tag id="BC" />
2912          </entry>
2913          <entry name="chargeDuration" type="int64">
2914            <description>Time taken before flash can fire
2915            again</description>
2916            <units>nanoseconds</units>
2917            <range>0-1e9</range>
2918            <details>1 second too long/too short for recharge? Should
2919            this be power-dependent?</details>
2920            <tag id="FUTURE" />
2921          </entry>
2922        </namespace>
2923        <entry name="colorTemperature" type="byte">
2924          <description>The x,y whitepoint of the
2925          flash</description>
2926          <units>pair of floats</units>
2927          <range>0-1 for both</range>
2928          <tag id="FUTURE" />
2929        </entry>
2930        <entry name="maxEnergy" type="byte">
2931          <description>Max energy output of the flash for a full
2932          power single flash</description>
2933          <units>lumen-seconds</units>
2934          <range>&amp;gt;= 0</range>
2935          <tag id="FUTURE" />
2936        </entry>
2937      </static>
2938      <dynamic>
2939        <clone entry="android.flash.firingPower" kind="controls">
2940        </clone>
2941        <clone entry="android.flash.firingTime" kind="controls">
2942        </clone>
2943        <clone entry="android.flash.mode" kind="controls"></clone>
2944        <entry name="state" type="byte" visibility="public" enum="true"
2945               hwlevel="limited">
2946          <enum>
2947            <value>UNAVAILABLE
2948            <notes>No flash on camera.</notes></value>
2949            <value>CHARGING
2950            <notes>Flash is charging and cannot be fired.</notes></value>
2951            <value>READY
2952            <notes>Flash is ready to fire.</notes></value>
2953            <value>FIRED
2954            <notes>Flash fired for this capture.</notes></value>
2955            <value>PARTIAL
2956            <notes>Flash partially illuminated this frame.
2957
2958            This is usually due to the next or previous frame having
2959            the flash fire, and the flash spilling into this capture
2960            due to hardware limitations.</notes></value>
2961          </enum>
2962          <description>Current state of the flash
2963          unit.</description>
2964          <details>
2965          When the camera device doesn't have flash unit
2966          (i.e. `android.flash.info.available == false`), this state will always be UNAVAILABLE.
2967          Other states indicate the current flash status.
2968
2969          In certain conditions, this will be available on LEGACY devices:
2970
2971           * Flash-less cameras always return UNAVAILABLE.
2972           * Using android.control.aeMode `==` ON_ALWAYS_FLASH
2973             will always return FIRED.
2974           * Using android.flash.mode `==` TORCH
2975             will always return FIRED.
2976
2977          In all other conditions the state will not be available on
2978          LEGACY devices (i.e. it will be `null`).
2979          </details>
2980        </entry>
2981      </dynamic>
2982    </section>
2983    <section name="hotPixel">
2984      <controls>
2985        <entry name="mode" type="byte" visibility="public" enum="true">
2986          <enum>
2987            <value>OFF
2988              <notes>
2989              No hot pixel correction is applied.
2990
2991              The frame rate must not be reduced relative to sensor raw output
2992              for this option.
2993
2994              The hotpixel map may be returned in android.statistics.hotPixelMap.
2995              </notes>
2996            </value>
2997            <value>FAST
2998              <notes>
2999              Hot pixel correction is applied, without reducing frame
3000              rate relative to sensor raw output.
3001
3002              The hotpixel map may be returned in android.statistics.hotPixelMap.
3003              </notes>
3004            </value>
3005            <value>HIGH_QUALITY
3006              <notes>
3007              High-quality hot pixel correction is applied, at a cost
3008              of possibly reduced frame rate relative to sensor raw output.
3009
3010              The hotpixel map may be returned in android.statistics.hotPixelMap.
3011              </notes>
3012            </value>
3013          </enum>
3014          <description>
3015          Operational mode for hot pixel correction.
3016          </description>
3017          <range>android.hotPixel.availableHotPixelModes</range>
3018          <details>
3019          Hotpixel correction interpolates out, or otherwise removes, pixels
3020          that do not accurately measure the incoming light (i.e. pixels that
3021          are stuck at an arbitrary value or are oversensitive).
3022          </details>
3023          <tag id="V1" />
3024          <tag id="RAW" />
3025        </entry>
3026      </controls>
3027      <static>
3028        <entry name="availableHotPixelModes" type="byte" visibility="public"
3029          type_notes="list of enums" container="array" typedef="enumList">
3030          <array>
3031            <size>n</size>
3032          </array>
3033          <description>
3034          List of hot pixel correction modes for android.hotPixel.mode that are supported by this
3035          camera device.
3036          </description>
3037          <range>Any value listed in android.hotPixel.mode</range>
3038          <details>
3039          FULL mode camera devices will always support FAST.
3040          </details>
3041          <hal_details>
3042          To avoid performance issues, there will be significantly fewer hot
3043          pixels than actual pixels on the camera sensor.
3044          HAL must support both FAST and HIGH_QUALITY if hot pixel correction control is available
3045          on the camera device, but the underlying implementation can be the same for both modes.
3046          That is, if the highest quality implementation on the camera device does not slow down
3047          capture rate, then FAST and HIGH_QUALITY will generate the same output.
3048          </hal_details>
3049          <tag id="V1" />
3050          <tag id="RAW" />
3051        </entry>
3052      </static>
3053      <dynamic>
3054        <clone entry="android.hotPixel.mode" kind="controls">
3055          <tag id="V1" />
3056          <tag id="RAW" />
3057        </clone>
3058      </dynamic>
3059    </section>
3060    <section name="jpeg">
3061      <controls>
3062        <entry name="gpsLocation" type="byte" visibility="public" synthetic="true"
3063        typedef="location" hwlevel="legacy">
3064          <description>
3065          A location object to use when generating image GPS metadata.
3066          </description>
3067          <details>
3068          Setting a location object in a request will include the GPS coordinates of the location
3069          into any JPEG images captured based on the request. These coordinates can then be
3070          viewed by anyone who receives the JPEG image.
3071          </details>
3072        </entry>
3073        <entry name="gpsCoordinates" type="double" visibility="hidden"
3074        type_notes="latitude, longitude, altitude. First two in degrees, the third in meters"
3075        container="array" hwlevel="legacy">
3076          <array>
3077            <size>3</size>
3078          </array>
3079          <description>GPS coordinates to include in output JPEG
3080          EXIF.</description>
3081          <range>(-180 - 180], [-90,90], [-inf, inf]</range>
3082          <tag id="BC" />
3083        </entry>
3084        <entry name="gpsProcessingMethod" type="byte" visibility="hidden"
3085               typedef="string" hwlevel="legacy">
3086          <description>32 characters describing GPS algorithm to
3087          include in EXIF.</description>
3088          <units>UTF-8 null-terminated string</units>
3089          <tag id="BC" />
3090        </entry>
3091        <entry name="gpsTimestamp" type="int64" visibility="hidden" hwlevel="legacy">
3092          <description>Time GPS fix was made to include in
3093          EXIF.</description>
3094          <units>UTC in seconds since January 1, 1970</units>
3095          <tag id="BC" />
3096        </entry>
3097        <entry name="orientation" type="int32" visibility="public" hwlevel="legacy">
3098          <description>The orientation for a JPEG image.</description>
3099          <units>Degrees in multiples of 90</units>
3100          <range>0, 90, 180, 270</range>
3101          <details>
3102          The clockwise rotation angle in degrees, relative to the orientation
3103          to the camera, that the JPEG picture needs to be rotated by, to be viewed
3104          upright.
3105
3106          Camera devices may either encode this value into the JPEG EXIF header, or
3107          rotate the image data to match this orientation. When the image data is rotated,
3108          the thumbnail data will also be rotated.
3109
3110          Note that this orientation is relative to the orientation of the camera sensor, given
3111          by android.sensor.orientation.
3112
3113          To translate from the device orientation given by the Android sensor APIs, the following
3114          sample code may be used:
3115
3116              private int getJpegOrientation(CameraCharacteristics c, int deviceOrientation) {
3117                  if (deviceOrientation == android.view.OrientationEventListener.ORIENTATION_UNKNOWN) return 0;
3118                  int sensorOrientation = c.get(CameraCharacteristics.SENSOR_ORIENTATION);
3119
3120                  // Round device orientation to a multiple of 90
3121                  deviceOrientation = (deviceOrientation + 45) / 90 * 90;
3122
3123                  // Reverse device orientation for front-facing cameras
3124                  boolean facingFront = c.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT;
3125                  if (facingFront) deviceOrientation = -deviceOrientation;
3126
3127                  // Calculate desired JPEG orientation relative to camera orientation to make
3128                  // the image upright relative to the device orientation
3129                  int jpegOrientation = (sensorOrientation + deviceOrientation + 360) % 360;
3130
3131                  return jpegOrientation;
3132              }
3133          </details>
3134          <tag id="BC" />
3135        </entry>
3136        <entry name="quality" type="byte" visibility="public" hwlevel="legacy">
3137          <description>Compression quality of the final JPEG
3138          image.</description>
3139          <range>1-100; larger is higher quality</range>
3140          <details>85-95 is typical usage range.</details>
3141          <tag id="BC" />
3142        </entry>
3143        <entry name="thumbnailQuality" type="byte" visibility="public" hwlevel="legacy">
3144          <description>Compression quality of JPEG
3145          thumbnail.</description>
3146          <range>1-100; larger is higher quality</range>
3147          <tag id="BC" />
3148        </entry>
3149        <entry name="thumbnailSize" type="int32" visibility="public"
3150        container="array" typedef="size" hwlevel="legacy">
3151          <array>
3152            <size>2</size>
3153          </array>
3154          <description>Resolution of embedded JPEG thumbnail.</description>
3155          <range>android.jpeg.availableThumbnailSizes</range>
3156          <details>When set to (0, 0) value, the JPEG EXIF will not contain thumbnail,
3157          but the captured JPEG will still be a valid image.
3158
3159          For best results, when issuing a request for a JPEG image, the thumbnail size selected
3160          should have the same aspect ratio as the main JPEG output.
3161
3162          If the thumbnail image aspect ratio differs from the JPEG primary image aspect
3163          ratio, the camera device creates the thumbnail by cropping it from the primary image.
3164          For example, if the primary image has 4:3 aspect ratio, the thumbnail image has
3165          16:9 aspect ratio, the primary image will be cropped vertically (letterbox) to
3166          generate the thumbnail image. The thumbnail image will always have a smaller Field
3167          Of View (FOV) than the primary image when aspect ratios differ.
3168
3169          When an android.jpeg.orientation of non-zero degree is requested,
3170          the camera device will handle thumbnail rotation in one of the following ways:
3171
3172          * Set the {@link android.media.ExifInterface#TAG_ORIENTATION EXIF orientation flag}
3173            and keep jpeg and thumbnail image data unrotated.
3174          * Rotate the jpeg and thumbnail image data and not set
3175            {@link android.media.ExifInterface#TAG_ORIENTATION EXIF orientation flag}. In this
3176            case, LIMITED or FULL hardware level devices will report rotated thumnail size in
3177            capture result, so the width and height will be interchanged if 90 or 270 degree
3178            orientation is requested. LEGACY device will always report unrotated thumbnail
3179            size.
3180          </details>
3181          <hal_details>
3182          The HAL must not squeeze or stretch the downscaled primary image to generate thumbnail.
3183          The cropping must be done on the primary jpeg image rather than the sensor active array.
3184          The stream cropping rule specified by "S5. Cropping" in camera3.h doesn't apply to the
3185          thumbnail image cropping.
3186          </hal_details>
3187          <tag id="BC" />
3188        </entry>
3189      </controls>
3190      <static>
3191        <entry name="availableThumbnailSizes" type="int32" visibility="public"
3192        container="array" typedef="size" hwlevel="legacy">
3193          <array>
3194            <size>2</size>
3195            <size>n</size>
3196          </array>
3197          <description>List of JPEG thumbnail sizes for android.jpeg.thumbnailSize supported by this
3198          camera device.</description>
3199          <details>
3200          This list will include at least one non-zero resolution, plus `(0,0)` for indicating no
3201          thumbnail should be generated.
3202
3203          Below condiditions will be satisfied for this size list:
3204
3205          * The sizes will be sorted by increasing pixel area (width x height).
3206          If several resolutions have the same area, they will be sorted by increasing width.
3207          * The aspect ratio of the largest thumbnail size will be same as the
3208          aspect ratio of largest JPEG output size in android.scaler.availableStreamConfigurations.
3209          The largest size is defined as the size that has the largest pixel area
3210          in a given size list.
3211          * Each output JPEG size in android.scaler.availableStreamConfigurations will have at least
3212          one corresponding size that has the same aspect ratio in availableThumbnailSizes,
3213          and vice versa.
3214          * All non-`(0, 0)` sizes will have non-zero widths and heights.</details>
3215          <tag id="BC" />
3216        </entry>
3217        <entry name="maxSize" type="int32" visibility="system">
3218          <description>Maximum size in bytes for the compressed
3219          JPEG buffer</description>
3220          <range>Must be large enough to fit any JPEG produced by
3221          the camera</range>
3222          <details>This is used for sizing the gralloc buffers for
3223          JPEG</details>
3224        </entry>
3225      </static>
3226      <dynamic>
3227        <clone entry="android.jpeg.gpsLocation" kind="controls">
3228        </clone>
3229        <clone entry="android.jpeg.gpsCoordinates" kind="controls">
3230        </clone>
3231        <clone entry="android.jpeg.gpsProcessingMethod"
3232        kind="controls"></clone>
3233        <clone entry="android.jpeg.gpsTimestamp" kind="controls">
3234        </clone>
3235        <clone entry="android.jpeg.orientation" kind="controls">
3236        </clone>
3237        <clone entry="android.jpeg.quality" kind="controls">
3238        </clone>
3239        <entry name="size" type="int32">
3240          <description>The size of the compressed JPEG image, in
3241          bytes</description>
3242          <range>&amp;gt;= 0</range>
3243          <details>If no JPEG output is produced for the request,
3244          this must be 0.
3245
3246          Otherwise, this describes the real size of the compressed
3247          JPEG image placed in the output stream.  More specifically,
3248          if android.jpeg.maxSize = 1000000, and a specific capture
3249          has android.jpeg.size = 500000, then the output buffer from
3250          the JPEG stream will be 1000000 bytes, of which the first
3251          500000 make up the real data.</details>
3252          <tag id="FUTURE" />
3253        </entry>
3254        <clone entry="android.jpeg.thumbnailQuality"
3255        kind="controls"></clone>
3256        <clone entry="android.jpeg.thumbnailSize" kind="controls">
3257        </clone>
3258      </dynamic>
3259    </section>
3260    <section name="lens">
3261      <controls>
3262        <entry name="aperture" type="float" visibility="public" hwlevel="full">
3263          <description>The desired lens aperture size, as a ratio of lens focal length to the
3264          effective aperture diameter.</description>
3265          <units>The f-number (f/N)</units>
3266          <range>android.lens.info.availableApertures</range>
3267          <details>Setting this value is only supported on the camera devices that have a variable
3268          aperture lens.
3269
3270          When this is supported and android.control.aeMode is OFF,
3271          this can be set along with android.sensor.exposureTime,
3272          android.sensor.sensitivity, and android.sensor.frameDuration
3273          to achieve manual exposure control.
3274
3275          The requested aperture value may take several frames to reach the
3276          requested value; the camera device will report the current (intermediate)
3277          aperture size in capture result metadata while the aperture is changing.
3278          While the aperture is still changing, android.lens.state will be set to MOVING.
3279
3280          When this is supported and android.control.aeMode is one of
3281          the ON modes, this will be overridden by the camera device
3282          auto-exposure algorithm, the overridden values are then provided
3283          back to the user in the corresponding result.</details>
3284          <tag id="V1" />
3285        </entry>
3286        <entry name="filterDensity" type="float" visibility="public" hwlevel="full">
3287          <description>
3288          The desired setting for the lens neutral density filter(s).
3289          </description>
3290          <units>Exposure Value (EV)</units>
3291          <range>android.lens.info.availableFilterDensities</range>
3292          <details>
3293          This control will not be supported on most camera devices.
3294
3295          Lens filters are typically used to lower the amount of light the
3296          sensor is exposed to (measured in steps of EV). As used here, an EV
3297          step is the standard logarithmic representation, which are
3298          non-negative, and inversely proportional to the amount of light
3299          hitting the sensor.  For example, setting this to 0 would result
3300          in no reduction of the incoming light, and setting this to 2 would
3301          mean that the filter is set to reduce incoming light by two stops
3302          (allowing 1/4 of the prior amount of light to the sensor).
3303
3304          It may take several frames before the lens filter density changes
3305          to the requested value. While the filter density is still changing,
3306          android.lens.state will be set to MOVING.
3307          </details>
3308          <tag id="V1" />
3309        </entry>
3310        <entry name="focalLength" type="float" visibility="public" hwlevel="legacy">
3311          <description>
3312          The desired lens focal length; used for optical zoom.
3313          </description>
3314          <units>Millimeters</units>
3315          <range>android.lens.info.availableFocalLengths</range>
3316          <details>
3317          This setting controls the physical focal length of the camera
3318          device's lens. Changing the focal length changes the field of
3319          view of the camera device, and is usually used for optical zoom.
3320
3321          Like android.lens.focusDistance and android.lens.aperture, this
3322          setting won't be applied instantaneously, and it may take several
3323          frames before the lens can change to the requested focal length.
3324          While the focal length is still changing, android.lens.state will
3325          be set to MOVING.
3326
3327          Optical zoom will not be supported on most devices.
3328          </details>
3329          <tag id="V1" />
3330        </entry>
3331        <entry name="focusDistance" type="float" visibility="public" hwlevel="full">
3332          <description>Desired distance to plane of sharpest focus,
3333          measured from frontmost surface of the lens.</description>
3334          <units>See android.lens.info.focusDistanceCalibration for details</units>
3335          <range>&amp;gt;= 0</range>
3336          <details>
3337          This control can be used for setting manual focus, on devices that support
3338          the MANUAL_SENSOR capability and have a variable-focus lens (see
3339          android.lens.info.minimumFocusDistance).
3340
3341          A value of `0.0f` means infinity focus. The value set will be clamped to
3342          `[0.0f, android.lens.info.minimumFocusDistance]`.
3343
3344          Like android.lens.focalLength, this setting won't be applied
3345          instantaneously, and it may take several frames before the lens
3346          can move to the requested focus distance. While the lens is still moving,
3347          android.lens.state will be set to MOVING.
3348
3349          LEGACY devices support at most setting this to `0.0f`
3350          for infinity focus.
3351          </details>
3352          <tag id="BC" />
3353          <tag id="V1" />
3354        </entry>
3355        <entry name="opticalStabilizationMode" type="byte" visibility="public"
3356        enum="true" hwlevel="limited">
3357          <enum>
3358            <value>OFF
3359              <notes>Optical stabilization is unavailable.</notes>
3360            </value>
3361            <value optional="true">ON
3362              <notes>Optical stabilization is enabled.</notes>
3363            </value>
3364          </enum>
3365          <description>
3366          Sets whether the camera device uses optical image stabilization (OIS)
3367          when capturing images.
3368          </description>
3369          <range>android.lens.info.availableOpticalStabilization</range>
3370          <details>
3371          OIS is used to compensate for motion blur due to small
3372          movements of the camera during capture. Unlike digital image
3373          stabilization (android.control.videoStabilizationMode), OIS
3374          makes use of mechanical elements to stabilize the camera
3375          sensor, and thus allows for longer exposure times before
3376          camera shake becomes apparent.
3377
3378          Switching between different optical stabilization modes may take several
3379          frames to initialize, the camera device will report the current mode in
3380          capture result metadata. For example, When "ON" mode is requested, the
3381          optical stabilization modes in the first several capture results may still
3382          be "OFF", and it will become "ON" when the initialization is done.
3383
3384          If a camera device supports both OIS and digital image stabilization
3385          (android.control.videoStabilizationMode), turning both modes on may produce undesirable
3386          interaction, so it is recommended not to enable both at the same time.
3387
3388          Not all devices will support OIS; see
3389          android.lens.info.availableOpticalStabilization for
3390          available controls.
3391          </details>
3392          <tag id="V1" />
3393        </entry>
3394      </controls>
3395      <static>
3396        <namespace name="info">
3397          <entry name="availableApertures" type="float" visibility="public"
3398          container="array" hwlevel="full">
3399            <array>
3400              <size>n</size>
3401            </array>
3402            <description>List of aperture size values for android.lens.aperture that are
3403            supported by this camera device.</description>
3404            <units>The aperture f-number</units>
3405            <details>If the camera device doesn't support a variable lens aperture,
3406            this list will contain only one value, which is the fixed aperture size.
3407
3408            If the camera device supports a variable aperture, the aperture values
3409            in this list will be sorted in ascending order.</details>
3410            <tag id="V1" />
3411          </entry>
3412          <entry name="availableFilterDensities" type="float" visibility="public"
3413          container="array" hwlevel="full">
3414            <array>
3415              <size>n</size>
3416            </array>
3417            <description>
3418            List of neutral density filter values for
3419            android.lens.filterDensity that are supported by this camera device.
3420            </description>
3421            <units>Exposure value (EV)</units>
3422            <range>
3423            Values are &amp;gt;= 0
3424            </range>
3425            <details>
3426            If a neutral density filter is not supported by this camera device,
3427            this list will contain only 0. Otherwise, this list will include every
3428            filter density supported by the camera device, in ascending order.
3429            </details>
3430            <tag id="V1" />
3431          </entry>
3432          <entry name="availableFocalLengths" type="float" visibility="public"
3433          type_notes="The list of available focal lengths"
3434          container="array" hwlevel="legacy">
3435            <array>
3436              <size>n</size>
3437            </array>
3438            <description>
3439            List of focal lengths for android.lens.focalLength that are supported by this camera
3440            device.
3441            </description>
3442            <units>Millimeters</units>
3443            <range>
3444            Values are &amp;gt; 0
3445            </range>
3446            <details>
3447            If optical zoom is not supported, this list will only contain
3448            a single value corresponding to the fixed focal length of the
3449            device. Otherwise, this list will include every focal length supported
3450            by the camera device, in ascending order.
3451            </details>
3452            <tag id="BC" />
3453            <tag id="V1" />
3454          </entry>
3455          <entry name="availableOpticalStabilization" type="byte"
3456          visibility="public" type_notes="list of enums" container="array"
3457          typedef="enumList" hwlevel="limited">
3458            <array>
3459              <size>n</size>
3460            </array>
3461            <description>
3462            List of optical image stabilization (OIS) modes for
3463            android.lens.opticalStabilizationMode that are supported by this camera device.
3464            </description>
3465            <range>Any value listed in android.lens.opticalStabilizationMode</range>
3466            <details>
3467            If OIS is not supported by a given camera device, this list will
3468            contain only OFF.
3469            </details>
3470            <tag id="V1" />
3471          </entry>
3472          <entry name="hyperfocalDistance" type="float" visibility="public" optional="true"
3473                 hwlevel="limited">
3474            <description>Hyperfocal distance for this lens.</description>
3475            <units>See android.lens.info.focusDistanceCalibration for details</units>
3476            <range>If lens is fixed focus, &amp;gt;= 0. If lens has focuser unit, the value is
3477            within `(0.0f, android.lens.info.minimumFocusDistance]`</range>
3478            <details>
3479            If the lens is not fixed focus, the camera device will report this
3480            field when android.lens.info.focusDistanceCalibration is APPROXIMATE or CALIBRATED.
3481            </details>
3482          </entry>
3483          <entry name="minimumFocusDistance" type="float" visibility="public" optional="true"
3484                 hwlevel="limited">
3485            <description>Shortest distance from frontmost surface
3486            of the lens that can be brought into sharp focus.</description>
3487            <units>See android.lens.info.focusDistanceCalibration for details</units>
3488            <range>&amp;gt;= 0</range>
3489            <details>If the lens is fixed-focus, this will be
3490            0.</details>
3491            <hal_details>Mandatory for FULL devices; LIMITED devices
3492            must always set this value to 0 for fixed-focus; and may omit
3493            the minimum focus distance otherwise.
3494
3495            This field is also mandatory for all devices advertising
3496            the MANUAL_SENSOR capability.</hal_details>
3497            <tag id="V1" />
3498          </entry>
3499          <entry name="shadingMapSize" type="int32" visibility="hidden"
3500                 type_notes="width and height (N, M) of lens shading map provided by the camera device."
3501                 container="array" typedef="size" hwlevel="full">
3502            <array>
3503              <size>2</size>
3504            </array>
3505            <description>Dimensions of lens shading map.</description>
3506            <range>Both values &amp;gt;= 1</range>
3507            <details>
3508            The map should be on the order of 30-40 rows and columns, and
3509            must be smaller than 64x64.
3510            </details>
3511            <tag id="V1" />
3512          </entry>
3513          <entry name="focusDistanceCalibration" type="byte" visibility="public"
3514                 enum="true" hwlevel="limited">
3515            <enum>
3516              <value>UNCALIBRATED
3517                <notes>
3518                The lens focus distance is not accurate, and the units used for
3519                android.lens.focusDistance do not correspond to any physical units.
3520
3521                Setting the lens to the same focus distance on separate occasions may
3522                result in a different real focus distance, depending on factors such
3523                as the orientation of the device, the age of the focusing mechanism,
3524                and the device temperature. The focus distance value will still be
3525                in the range of `[0, android.lens.info.minimumFocusDistance]`, where 0
3526                represents the farthest focus.
3527                </notes>
3528              </value>
3529              <value>APPROXIMATE
3530                <notes>
3531                The lens focus distance is measured in diopters.
3532
3533                However, setting the lens to the same focus distance
3534                on separate occasions may result in a different real
3535                focus distance, depending on factors such as the
3536                orientation of the device, the age of the focusing
3537                mechanism, and the device temperature.
3538                </notes>
3539              </value>
3540              <value>CALIBRATED
3541                <notes>
3542                The lens focus distance is measured in diopters, and
3543                is calibrated.
3544
3545                The lens mechanism is calibrated so that setting the
3546                same focus distance is repeatable on multiple
3547                occasions with good accuracy, and the focus distance
3548                corresponds to the real physical distance to the plane
3549                of best focus.
3550                </notes>
3551              </value>
3552            </enum>
3553            <description>The lens focus distance calibration quality.</description>
3554            <details>
3555            The lens focus distance calibration quality determines the reliability of
3556            focus related metadata entries, i.e. android.lens.focusDistance,
3557            android.lens.focusRange, android.lens.info.hyperfocalDistance, and
3558            android.lens.info.minimumFocusDistance.
3559
3560            APPROXIMATE and CALIBRATED devices report the focus metadata in
3561            units of diopters (1/meter), so `0.0f` represents focusing at infinity,
3562            and increasing positive numbers represent focusing closer and closer
3563            to the camera device. The focus distance control also uses diopters
3564            on these devices.
3565
3566            UNCALIBRATED devices do not use units that are directly comparable
3567            to any real physical measurement, but `0.0f` still represents farthest
3568            focus, and android.lens.info.minimumFocusDistance represents the
3569            nearest focus the device can achieve.
3570            </details>
3571            <hal_details>
3572            For devices advertise APPROXIMATE quality or higher, diopters 0 (infinity
3573            focus) must work. When autofocus is disabled (android.control.afMode == OFF)
3574            and the lens focus distance is set to 0 diopters
3575            (android.lens.focusDistance == 0), the lens will move to focus at infinity
3576            and is stably focused at infinity even if the device tilts. It may take the
3577            lens some time to move; during the move the lens state should be MOVING and
3578            the output diopter value should be changing toward 0.
3579            </hal_details>
3580          <tag id="V1" />
3581        </entry>
3582        </namespace>
3583        <entry name="facing" type="byte" visibility="public" enum="true" hwlevel="legacy">
3584          <enum>
3585            <value>FRONT
3586            <notes>
3587              The camera device faces the same direction as the device's screen.
3588            </notes></value>
3589            <value>BACK
3590            <notes>
3591              The camera device faces the opposite direction as the device's screen.
3592            </notes></value>
3593            <value>EXTERNAL
3594            <notes>
3595              The camera device is an external camera, and has no fixed facing relative to the
3596              device's screen.
3597            </notes></value>
3598          </enum>
3599          <description>Direction the camera faces relative to
3600          device screen.</description>
3601        </entry>
3602        <entry name="poseRotation" type="float" visibility="public"
3603               container="array">
3604          <array>
3605            <size>4</size>
3606          </array>
3607          <description>
3608            The orientation of the camera relative to the sensor
3609            coordinate system.
3610          </description>
3611          <units>
3612            Quaternion coefficients
3613          </units>
3614          <details>
3615            The four coefficients that describe the quaternion
3616            rotation from the Android sensor coordinate system to a
3617            camera-aligned coordinate system where the X-axis is
3618            aligned with the long side of the image sensor, the Y-axis
3619            is aligned with the short side of the image sensor, and
3620            the Z-axis is aligned with the optical axis of the sensor.
3621
3622            To convert from the quaternion coefficients `(x,y,z,w)`
3623            to the axis of rotation `(a_x, a_y, a_z)` and rotation
3624            amount `theta`, the following formulas can be used:
3625
3626                 theta = 2 * acos(w)
3627                a_x = x / sin(theta/2)
3628                a_y = y / sin(theta/2)
3629                a_z = z / sin(theta/2)
3630
3631            To create a 3x3 rotation matrix that applies the rotation
3632            defined by this quaternion, the following matrix can be
3633            used:
3634
3635                R = [ 1 - 2y^2 - 2z^2,       2xy - 2zw,       2xz + 2yw,
3636                           2xy + 2zw, 1 - 2x^2 - 2z^2,       2yz - 2xw,
3637                           2xz - 2yw,       2yz + 2xw, 1 - 2x^2 - 2y^2 ]
3638
3639             This matrix can then be used to apply the rotation to a
3640             column vector point with
3641
3642               `p' = Rp`
3643
3644             where `p` is in the device sensor coordinate system, and
3645             `p'` is in the camera-oriented coordinate system.
3646          </details>
3647          <tag id="DEPTH" />
3648        </entry>
3649        <entry name="poseTranslation" type="float" visibility="public"
3650               container="array">
3651          <array>
3652            <size>3</size>
3653          </array>
3654          <description>Position of the camera optical center.</description>
3655          <units>Meters</units>
3656          <details>
3657            The position of the camera device's lens optical center,
3658            as a three-dimensional vector `(x,y,z)`, relative to the
3659            optical center of the largest camera device facing in the
3660            same direction as this camera, in the {@link
3661            android.hardware.SensorEvent Android sensor coordinate
3662            axes}. Note that only the axis definitions are shared with
3663            the sensor coordinate system, but not the origin.
3664
3665            If this device is the largest or only camera device with a
3666            given facing, then this position will be `(0, 0, 0)`; a
3667            camera device with a lens optical center located 3 cm from
3668            the main sensor along the +X axis (to the right from the
3669            user's perspective) will report `(0.03, 0, 0)`.
3670
3671            To transform a pixel coordinates between two cameras
3672            facing the same direction, first the source camera
3673            android.lens.radialDistortion must be corrected for.  Then
3674            the source camera android.lens.intrinsicCalibration needs
3675            to be applied, followed by the android.lens.poseRotation
3676            of the source camera, the translation of the source camera
3677            relative to the destination camera, the
3678            android.lens.poseRotation of the destination camera, and
3679            finally the inverse of android.lens.intrinsicCalibration
3680            of the destination camera. This obtains a
3681            radial-distortion-free coordinate in the destination
3682            camera pixel coordinates.
3683
3684            To compare this against a real image from the destination
3685            camera, the destination camera image then needs to be
3686            corrected for radial distortion before comparison or
3687            sampling.
3688          </details>
3689          <tag id="DEPTH" />
3690        </entry>
3691      </static>
3692      <dynamic>
3693        <clone entry="android.lens.aperture" kind="controls">
3694          <tag id="V1" />
3695        </clone>
3696        <clone entry="android.lens.filterDensity" kind="controls">
3697          <tag id="V1" />
3698        </clone>
3699        <clone entry="android.lens.focalLength" kind="controls">
3700          <tag id="BC" />
3701        </clone>
3702        <clone entry="android.lens.focusDistance" kind="controls">
3703          <details>Should be zero for fixed-focus cameras</details>
3704          <tag id="BC" />
3705        </clone>
3706        <entry name="focusRange" type="float" visibility="public"
3707        type_notes="Range of scene distances that are in focus"
3708        container="array" typedef="pairFloatFloat" hwlevel="limited">
3709          <array>
3710            <size>2</size>
3711          </array>
3712          <description>The range of scene distances that are in
3713          sharp focus (depth of field).</description>
3714          <units>A pair of focus distances in diopters: (near,
3715          far); see android.lens.info.focusDistanceCalibration for details.</units>
3716          <range>&amp;gt;=0</range>
3717          <details>If variable focus not supported, can still report
3718          fixed depth of field range</details>
3719          <tag id="BC" />
3720        </entry>
3721        <clone entry="android.lens.opticalStabilizationMode"
3722        kind="controls">
3723          <tag id="V1" />
3724        </clone>
3725        <entry name="state" type="byte" visibility="public" enum="true" hwlevel="limited">
3726          <enum>
3727            <value>STATIONARY
3728              <notes>
3729              The lens parameters (android.lens.focalLength, android.lens.focusDistance,
3730              android.lens.filterDensity and android.lens.aperture) are not changing.
3731              </notes>
3732            </value>
3733            <value>MOVING
3734              <notes>
3735              One or several of the lens parameters
3736              (android.lens.focalLength, android.lens.focusDistance,
3737              android.lens.filterDensity or android.lens.aperture) is
3738              currently changing.
3739              </notes>
3740            </value>
3741          </enum>
3742          <description>Current lens status.</description>
3743          <details>
3744          For lens parameters android.lens.focalLength, android.lens.focusDistance,
3745          android.lens.filterDensity and android.lens.aperture, when changes are requested,
3746          they may take several frames to reach the requested values. This state indicates
3747          the current status of the lens parameters.
3748
3749          When the state is STATIONARY, the lens parameters are not changing. This could be
3750          either because the parameters are all fixed, or because the lens has had enough
3751          time to reach the most recently-requested values.
3752          If all these lens parameters are not changable for a camera device, as listed below:
3753
3754          * Fixed focus (`android.lens.info.minimumFocusDistance == 0`), which means
3755          android.lens.focusDistance parameter will always be 0.
3756          * Fixed focal length (android.lens.info.availableFocalLengths contains single value),
3757          which means the optical zoom is not supported.
3758          * No ND filter (android.lens.info.availableFilterDensities contains only 0).
3759          * Fixed aperture (android.lens.info.availableApertures contains single value).
3760
3761          Then this state will always be STATIONARY.
3762
3763          When the state is MOVING, it indicates that at least one of the lens parameters
3764          is changing.
3765          </details>
3766          <tag id="V1" />
3767        </entry>
3768        <clone entry="android.lens.poseRotation" kind="static">
3769        </clone>
3770        <clone entry="android.lens.poseTranslation" kind="static">
3771        </clone>
3772      </dynamic>
3773      <static>
3774        <entry name="intrinsicCalibration" type="float" visibility="public"
3775               container="array">
3776          <array>
3777            <size>5</size>
3778          </array>
3779          <description>
3780            The parameters for this camera device's intrinsic
3781            calibration.
3782          </description>
3783          <units>
3784            Pixels in the
3785            android.sensor.info.preCorrectionActiveArraySize
3786            coordinate system.
3787          </units>
3788          <details>
3789            The five calibration parameters that describe the
3790            transform from camera-centric 3D coordinates to sensor
3791            pixel coordinates:
3792
3793                [f_x, f_y, c_x, c_y, s]
3794
3795            Where `f_x` and `f_y` are the horizontal and vertical
3796            focal lengths, `[c_x, c_y]` is the position of the optical
3797            axis, and `s` is a skew parameter for the sensor plane not
3798            being aligned with the lens plane.
3799
3800            These are typically used within a transformation matrix K:
3801
3802                K = [ f_x,   s, c_x,
3803                       0, f_y, c_y,
3804                       0    0,   1 ]
3805
3806            which can then be combined with the camera pose rotation
3807            `R` and translation `t` (android.lens.poseRotation and
3808            android.lens.poseTranslation, respective) to calculate the
3809            complete transform from world coordinates to pixel
3810            coordinates:
3811
3812                P = [ K 0   * [ R t
3813                     0 1 ]     0 1 ]
3814
3815            and with `p_w` being a point in the world coordinate system
3816            and `p_s` being a point in the camera active pixel array
3817            coordinate system, and with the mapping including the
3818            homogeneous division by z:
3819
3820                 p_h = (x_h, y_h, z_h) = P p_w
3821                p_s = p_h / z_h
3822
3823            so `[x_s, y_s]` is the pixel coordinates of the world
3824            point, `z_s = 1`, and `w_s` is a measurement of disparity
3825            (depth) in pixel coordinates.
3826
3827            Note that the coordinate system for this transform is the
3828            android.sensor.info.preCorrectionActiveArraySize system,
3829            where `(0,0)` is the top-left of the
3830            preCorrectionActiveArraySize rectangle. Once the pose and
3831            intrinsic calibration transforms have been applied to a
3832            world point, then the android.lens.radialDistortion
3833            transform needs to be applied, and the result adjusted to
3834            be in the android.sensor.info.activeArraySize coordinate
3835            system (where `(0, 0)` is the top-left of the
3836            activeArraySize rectangle), to determine the final pixel
3837            coordinate of the world point for processed (non-RAW)
3838            output buffers.
3839          </details>
3840          <tag id="DEPTH" />
3841        </entry>
3842        <entry name="radialDistortion" type="float" visibility="public"
3843               container="array">
3844          <array>
3845            <size>6</size>
3846          </array>
3847          <description>
3848            The correction coefficients to correct for this camera device's
3849            radial and tangential lens distortion.
3850          </description>
3851          <units>
3852            Unitless coefficients.
3853          </units>
3854          <details>
3855            Four radial distortion coefficients `[kappa_0, kappa_1, kappa_2,
3856            kappa_3]` and two tangential distortion coefficients
3857            `[kappa_4, kappa_5]` that can be used to correct the
3858            lens's geometric distortion with the mapping equations:
3859
3860                 x_c = x_i * ( kappa_0 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) +
3861                       kappa_4 * (2 * x_i * y_i) + kappa_5 * ( r^2 + 2 * x_i^2 )
3862                 y_c = y_i * ( kappa_0 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) +
3863                       kappa_5 * (2 * x_i * y_i) + kappa_4 * ( r^2 + 2 * y_i^2 )
3864
3865            Here, `[x_c, y_c]` are the coordinates to sample in the
3866            input image that correspond to the pixel values in the
3867            corrected image at the coordinate `[x_i, y_i]`:
3868
3869                 correctedImage(x_i, y_i) = sample_at(x_c, y_c, inputImage)
3870
3871            The pixel coordinates are defined in a normalized
3872            coordinate system related to the
3873            android.lens.intrinsicCalibration calibration fields.
3874            Both `[x_i, y_i]` and `[x_c, y_c]` have `(0,0)` at the
3875            lens optical center `[c_x, c_y]`. The maximum magnitudes
3876            of both x and y coordinates are normalized to be 1 at the
3877            edge further from the optical center, so the range
3878            for both dimensions is `-1 &lt;= x &lt;= 1`.
3879
3880            Finally, `r` represents the radial distance from the
3881            optical center, `r^2 = x_i^2 + y_i^2`, and its magnitude
3882            is therefore no larger than `|r| &lt;= sqrt(2)`.
3883
3884            The distortion model used is the Brown-Conrady model.
3885          </details>
3886          <tag id="DEPTH" />
3887        </entry>
3888      </static>
3889      <dynamic>
3890        <clone entry="android.lens.intrinsicCalibration" kind="static">
3891        </clone>
3892        <clone entry="android.lens.radialDistortion" kind="static">
3893        </clone>
3894      </dynamic>
3895    </section>
3896    <section name="noiseReduction">
3897      <controls>
3898        <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="full">
3899          <enum>
3900            <value>OFF
3901            <notes>No noise reduction is applied.</notes></value>
3902            <value>FAST
3903            <notes>Noise reduction is applied without reducing frame rate relative to sensor
3904            output. It may be the same as OFF if noise reduction will reduce frame rate
3905            relative to sensor.</notes></value>
3906            <value>HIGH_QUALITY
3907            <notes>High-quality noise reduction is applied, at the cost of possibly reduced frame
3908            rate relative to sensor output.</notes></value>
3909            <value optional="true">MINIMAL
3910            <notes>MINIMAL noise reduction is applied without reducing frame rate relative to
3911            sensor output. </notes></value>
3912            <value optional="true">ZERO_SHUTTER_LAG
3913
3914            <notes>Noise reduction is applied at different levels for different output streams,
3915            based on resolution. Streams at maximum recording resolution (see {@link
3916            android.hardware.camera2.CameraDevice#createCaptureSession}) or below have noise
3917            reduction applied, while higher-resolution streams have MINIMAL (if supported) or no
3918            noise reduction applied (if MINIMAL is not supported.) The degree of noise reduction
3919            for low-resolution streams is tuned so that frame rate is not impacted, and the quality
3920            is equal to or better than FAST (since it is only applied to lower-resolution outputs,
3921            quality may improve from FAST).
3922
3923            This mode is intended to be used by applications operating in a zero-shutter-lag mode
3924            with YUV or PRIVATE reprocessing, where the application continuously captures
3925            high-resolution intermediate buffers into a circular buffer, from which a final image is
3926            produced via reprocessing when a user takes a picture.  For such a use case, the
3927            high-resolution buffers must not have noise reduction applied to maximize efficiency of
3928            preview and to avoid over-applying noise filtering when reprocessing, while
3929            low-resolution buffers (used for recording or preview, generally) need noise reduction
3930            applied for reasonable preview quality.
3931
3932            This mode is guaranteed to be supported by devices that support either the
3933            YUV_REPROCESSING or PRIVATE_REPROCESSING capabilities
3934            (android.request.availableCapabilities lists either of those capabilities) and it will
3935            be the default mode for CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template.
3936            </notes></value>
3937          </enum>
3938          <description>Mode of operation for the noise reduction algorithm.</description>
3939          <range>android.noiseReduction.availableNoiseReductionModes</range>
3940          <details>The noise reduction algorithm attempts to improve image quality by removing
3941          excessive noise added by the capture process, especially in dark conditions.
3942
3943          OFF means no noise reduction will be applied by the camera device, for both raw and
3944          YUV domain.
3945
3946          MINIMAL means that only sensor raw domain basic noise reduction is enabled ,to remove
3947          demosaicing or other processing artifacts. For YUV_REPROCESSING, MINIMAL is same as OFF.
3948          This mode is optional, may not be support by all devices. The application should check
3949          android.noiseReduction.availableNoiseReductionModes before using it.
3950
3951          FAST/HIGH_QUALITY both mean camera device determined noise filtering
3952          will be applied. HIGH_QUALITY mode indicates that the camera device
3953          will use the highest-quality noise filtering algorithms,
3954          even if it slows down capture rate. FAST means the camera device will not
3955          slow down capture rate when applying noise filtering. FAST may be the same as MINIMAL if
3956          MINIMAL is listed, or the same as OFF if any noise filtering will slow down capture rate.
3957          Every output stream will have a similar amount of enhancement applied.
3958
3959          ZERO_SHUTTER_LAG is meant to be used by applications that maintain a continuous circular
3960          buffer of high-resolution images during preview and reprocess image(s) from that buffer
3961          into a final capture when triggered by the user. In this mode, the camera device applies
3962          noise reduction to low-resolution streams (below maximum recording resolution) to maximize
3963          preview quality, but does not apply noise reduction to high-resolution streams, since
3964          those will be reprocessed later if necessary.
3965
3966          For YUV_REPROCESSING, these FAST/HIGH_QUALITY modes both mean that the camera device
3967          will apply FAST/HIGH_QUALITY YUV domain noise reduction, respectively. The camera device
3968          may adjust the noise reduction parameters for best image quality based on the
3969          android.reprocess.effectiveExposureFactor if it is set.
3970          </details>
3971          <hal_details>
3972          For YUV_REPROCESSING The HAL can use android.reprocess.effectiveExposureFactor to
3973          adjust the internal noise reduction parameters appropriately to get the best quality
3974          images.
3975          </hal_details>
3976          <tag id="V1" />
3977          <tag id="REPROC" />
3978        </entry>
3979        <entry name="strength" type="byte">
3980          <description>Control the amount of noise reduction
3981          applied to the images</description>
3982          <units>1-10; 10 is max noise reduction</units>
3983          <range>1 - 10</range>
3984          <tag id="FUTURE" />
3985        </entry>
3986      </controls>
3987      <static>
3988        <entry name="availableNoiseReductionModes" type="byte" visibility="public"
3989        type_notes="list of enums" container="array" typedef="enumList" hwlevel="limited">
3990          <array>
3991            <size>n</size>
3992          </array>
3993          <description>
3994          List of noise reduction modes for android.noiseReduction.mode that are supported
3995          by this camera device.
3996          </description>
3997          <range>Any value listed in android.noiseReduction.mode</range>
3998          <details>
3999          Full-capability camera devices will always support OFF and FAST.
4000
4001          Camera devices that support YUV_REPROCESSING or PRIVATE_REPROCESSING will support
4002          ZERO_SHUTTER_LAG.
4003
4004          Legacy-capability camera devices will only support FAST mode.
4005          </details>
4006          <hal_details>
4007          HAL must support both FAST and HIGH_QUALITY if noise reduction control is available
4008          on the camera device, but the underlying implementation can be the same for both modes.
4009          That is, if the highest quality implementation on the camera device does not slow down
4010          capture rate, then FAST and HIGH_QUALITY will generate the same output.
4011          </hal_details>
4012          <tag id="V1" />
4013          <tag id="REPROC" />
4014        </entry>
4015      </static>
4016      <dynamic>
4017        <clone entry="android.noiseReduction.mode" kind="controls">
4018          <tag id="V1" />
4019          <tag id="REPROC" />
4020        </clone>
4021      </dynamic>
4022    </section>
4023    <section name="quirks">
4024      <static>
4025        <entry name="meteringCropRegion" type="byte" visibility="system" deprecated="true" optional="true">
4026          <description>If set to 1, the camera service does not
4027          scale 'normalized' coordinates with respect to the crop
4028          region. This applies to metering input (a{e,f,wb}Region
4029          and output (face rectangles).</description>
4030          <details>Normalized coordinates refer to those in the
4031          (-1000,1000) range mentioned in the
4032          android.hardware.Camera API.
4033
4034          HAL implementations should instead always use and emit
4035          sensor array-relative coordinates for all region data. Does
4036          not need to be listed in static metadata. Support will be
4037          removed in future versions of camera service.</details>
4038        </entry>
4039        <entry name="triggerAfWithAuto" type="byte" visibility="system" deprecated="true" optional="true">
4040          <description>If set to 1, then the camera service always
4041          switches to FOCUS_MODE_AUTO before issuing a AF
4042          trigger.</description>
4043          <details>HAL implementations should implement AF trigger
4044          modes for AUTO, MACRO, CONTINUOUS_FOCUS, and
4045          CONTINUOUS_PICTURE modes instead of using this flag. Does
4046          not need to be listed in static metadata. Support will be
4047          removed in future versions of camera service</details>
4048        </entry>
4049        <entry name="useZslFormat" type="byte" visibility="system" deprecated="true" optional="true">
4050          <description>If set to 1, the camera service uses
4051          CAMERA2_PIXEL_FORMAT_ZSL instead of
4052          HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED for the zero
4053          shutter lag stream</description>
4054          <details>HAL implementations should use gralloc usage flags
4055          to determine that a stream will be used for
4056          zero-shutter-lag, instead of relying on an explicit
4057          format setting. Does not need to be listed in static
4058          metadata. Support will be removed in future versions of
4059          camera service.</details>
4060        </entry>
4061        <entry name="usePartialResult" type="byte" visibility="hidden" deprecated="true" optional="true">
4062          <description>
4063          If set to 1, the HAL will always split result
4064          metadata for a single capture into multiple buffers,
4065          returned using multiple process_capture_result calls.
4066          </description>
4067          <details>
4068          Does not need to be listed in static
4069          metadata. Support for partial results will be reworked in
4070          future versions of camera service. This quirk will stop
4071          working at that point; DO NOT USE without careful
4072          consideration of future support.
4073          </details>
4074          <hal_details>
4075          Refer to `camera3_capture_result::partial_result`
4076          for information on how to implement partial results.
4077          </hal_details>
4078        </entry>
4079      </static>
4080      <dynamic>
4081        <entry name="partialResult" type="byte" visibility="hidden" deprecated="true" optional="true" enum="true" typedef="boolean">
4082          <enum>
4083            <value>FINAL
4084            <notes>The last or only metadata result buffer
4085            for this capture.</notes>
4086            </value>
4087            <value>PARTIAL
4088            <notes>A partial buffer of result metadata for this
4089            capture. More result buffers for this capture will be sent
4090            by the camera device, the last of which will be marked
4091            FINAL.</notes>
4092            </value>
4093          </enum>
4094          <description>
4095          Whether a result given to the framework is the
4096          final one for the capture, or only a partial that contains a
4097          subset of the full set of dynamic metadata
4098          values.</description>
4099          <range>Optional. Default value is FINAL.</range>
4100          <details>
4101          The entries in the result metadata buffers for a
4102          single capture may not overlap, except for this entry. The
4103          FINAL buffers must retain FIFO ordering relative to the
4104          requests that generate them, so the FINAL buffer for frame 3 must
4105          always be sent to the framework after the FINAL buffer for frame 2, and
4106          before the FINAL buffer for frame 4. PARTIAL buffers may be returned
4107          in any order relative to other frames, but all PARTIAL buffers for a given
4108          capture must arrive before the FINAL buffer for that capture. This entry may
4109          only be used by the camera device if quirks.usePartialResult is set to 1.
4110          </details>
4111          <hal_details>
4112          Refer to `camera3_capture_result::partial_result`
4113          for information on how to implement partial results.
4114          </hal_details>
4115        </entry>
4116      </dynamic>
4117    </section>
4118    <section name="request">
4119      <controls>
4120        <entry name="frameCount" type="int32" visibility="system" deprecated="true">
4121          <description>A frame counter set by the framework. Must
4122          be maintained unchanged in output frame. This value monotonically
4123          increases with every new result (that is, each new result has a unique
4124          frameCount value).
4125          </description>
4126          <units>incrementing integer</units>
4127          <range>Any int.</range>
4128        </entry>
4129        <entry name="id" type="int32" visibility="hidden">
4130          <description>An application-specified ID for the current
4131          request. Must be maintained unchanged in output
4132          frame</description>
4133          <units>arbitrary integer assigned by application</units>
4134          <range>Any int</range>
4135          <tag id="V1" />
4136        </entry>
4137        <entry name="inputStreams" type="int32" visibility="system" deprecated="true"
4138               container="array">
4139          <array>
4140            <size>n</size>
4141          </array>
4142          <description>List which camera reprocess stream is used
4143          for the source of reprocessing data.</description>
4144          <units>List of camera reprocess stream IDs</units>
4145          <range>
4146          Typically, only one entry allowed, must be a valid reprocess stream ID.
4147          </range>
4148          <details>Only meaningful when android.request.type ==
4149          REPROCESS. Ignored otherwise</details>
4150          <tag id="HAL2" />
4151        </entry>
4152        <entry name="metadataMode" type="byte" visibility="system"
4153               enum="true">
4154          <enum>
4155            <value>NONE
4156            <notes>No metadata should be produced on output, except
4157            for application-bound buffer data. If no
4158            application-bound streams exist, no frame should be
4159            placed in the output frame queue. If such streams
4160            exist, a frame should be placed on the output queue
4161            with null metadata but with the necessary output buffer
4162            information. Timestamp information should still be
4163            included with any output stream buffers</notes></value>
4164            <value>FULL
4165            <notes>All metadata should be produced. Statistics will
4166            only be produced if they are separately
4167            enabled</notes></value>
4168          </enum>
4169          <description>How much metadata to produce on
4170          output</description>
4171          <tag id="FUTURE" />
4172        </entry>
4173        <entry name="outputStreams" type="int32" visibility="system" deprecated="true"
4174               container="array">
4175          <array>
4176            <size>n</size>
4177          </array>
4178          <description>Lists which camera output streams image data
4179          from this capture must be sent to</description>
4180          <units>List of camera stream IDs</units>
4181          <range>List must only include streams that have been
4182          created</range>
4183          <details>If no output streams are listed, then the image
4184          data should simply be discarded. The image data must
4185          still be captured for metadata and statistics production,
4186          and the lens and flash must operate as requested.</details>
4187          <tag id="HAL2" />
4188        </entry>
4189        <entry name="type" type="byte" visibility="system" deprecated="true" enum="true">
4190          <enum>
4191            <value>CAPTURE
4192            <notes>Capture a new image from the imaging hardware,
4193            and process it according to the
4194            settings</notes></value>
4195            <value>REPROCESS
4196            <notes>Process previously captured data; the
4197            android.request.inputStreams parameter determines the
4198            source reprocessing stream. TODO: Mark dynamic metadata
4199            needed for reprocessing with [RP]</notes></value>
4200          </enum>
4201          <description>The type of the request; either CAPTURE or
4202          REPROCESS. For HAL3, this tag is redundant.
4203          </description>
4204          <tag id="HAL2" />
4205        </entry>
4206      </controls>
4207      <static>
4208        <entry name="maxNumOutputStreams" type="int32" visibility="hidden"
4209        container="array" hwlevel="legacy">
4210          <array>
4211            <size>3</size>
4212          </array>
4213          <description>The maximum numbers of different types of output streams
4214          that can be configured and used simultaneously by a camera device.
4215          </description>
4216          <range>
4217          For processed (and stalling) format streams, &amp;gt;= 1.
4218
4219          For Raw format (either stalling or non-stalling) streams, &amp;gt;= 0.
4220
4221          For processed (but not stalling) format streams, &amp;gt;= 3
4222          for FULL mode devices (`android.info.supportedHardwareLevel == FULL`);
4223          &amp;gt;= 2 for LIMITED mode devices (`android.info.supportedHardwareLevel == LIMITED`).
4224          </range>
4225          <details>
4226          This is a 3 element tuple that contains the max number of output simultaneous
4227          streams for raw sensor, processed (but not stalling), and processed (and stalling)
4228          formats respectively. For example, assuming that JPEG is typically a processed and
4229          stalling stream, if max raw sensor format output stream number is 1, max YUV streams
4230          number is 3, and max JPEG stream number is 2, then this tuple should be `(1, 3, 2)`.
4231
4232          This lists the upper bound of the number of output streams supported by
4233          the camera device. Using more streams simultaneously may require more hardware and
4234          CPU resources that will consume more power. The image format for an output stream can
4235          be any supported format provided by android.scaler.availableStreamConfigurations.
4236          The formats defined in android.scaler.availableStreamConfigurations can be catergorized
4237          into the 3 stream types as below:
4238
4239          * Processed (but stalling): any non-RAW format with a stallDurations &amp;gt; 0.
4240            Typically {@link android.graphics.ImageFormat#JPEG JPEG format}.
4241          * Raw formats: {@link android.graphics.ImageFormat#RAW_SENSOR RAW_SENSOR}, {@link
4242            android.graphics.ImageFormat#RAW10 RAW10}, or {@link android.graphics.ImageFormat#RAW12
4243            RAW12}.
4244          * Processed (but not-stalling): any non-RAW format without a stall duration.
4245            Typically {@link android.graphics.ImageFormat#YUV_420_888 YUV_420_888},
4246            {@link android.graphics.ImageFormat#NV21 NV21}, or
4247            {@link android.graphics.ImageFormat#YV12 YV12}.
4248          </details>
4249          <tag id="BC" />
4250        </entry>
4251        <entry name="maxNumOutputRaw" type="int32" visibility="public" synthetic="true" hwlevel="legacy">
4252          <description>The maximum numbers of different types of output streams
4253          that can be configured and used simultaneously by a camera device
4254          for any `RAW` formats.
4255          </description>
4256          <range>
4257          &amp;gt;= 0
4258          </range>
4259          <details>
4260          This value contains the max number of output simultaneous
4261          streams from the raw sensor.
4262
4263          This lists the upper bound of the number of output streams supported by
4264          the camera device. Using more streams simultaneously may require more hardware and
4265          CPU resources that will consume more power. The image format for this kind of an output stream can
4266          be any `RAW` and supported format provided by android.scaler.streamConfigurationMap.
4267
4268          In particular, a `RAW` format is typically one of:
4269
4270          * {@link android.graphics.ImageFormat#RAW_SENSOR RAW_SENSOR}
4271          * {@link android.graphics.ImageFormat#RAW10 RAW10}
4272          * {@link android.graphics.ImageFormat#RAW12 RAW12}
4273
4274          LEGACY mode devices (android.info.supportedHardwareLevel `==` LEGACY)
4275          never support raw streams.
4276          </details>
4277        </entry>
4278        <entry name="maxNumOutputProc" type="int32" visibility="public" synthetic="true" hwlevel="legacy">
4279          <description>The maximum numbers of different types of output streams
4280          that can be configured and used simultaneously by a camera device
4281          for any processed (but not-stalling) formats.
4282          </description>
4283          <range>
4284          &amp;gt;= 3
4285          for FULL mode devices (`android.info.supportedHardwareLevel == FULL`);
4286          &amp;gt;= 2 for LIMITED mode devices (`android.info.supportedHardwareLevel == LIMITED`).
4287          </range>
4288          <details>
4289          This value contains the max number of output simultaneous
4290          streams for any processed (but not-stalling) formats.
4291
4292          This lists the upper bound of the number of output streams supported by
4293          the camera device. Using more streams simultaneously may require more hardware and
4294          CPU resources that will consume more power. The image format for this kind of an output stream can
4295          be any non-`RAW` and supported format provided by android.scaler.streamConfigurationMap.
4296
4297          Processed (but not-stalling) is defined as any non-RAW format without a stall duration.
4298          Typically:
4299
4300          * {@link android.graphics.ImageFormat#YUV_420_888 YUV_420_888}
4301          * {@link android.graphics.ImageFormat#NV21 NV21}
4302          * {@link android.graphics.ImageFormat#YV12 YV12}
4303          * Implementation-defined formats, i.e. {@link
4304            android.hardware.camera2.params.StreamConfigurationMap#isOutputSupportedFor(Class)}
4305
4306          For full guarantees, query {@link
4307          android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration} with a
4308          processed format -- it will return 0 for a non-stalling stream.
4309
4310          LEGACY devices will support at least 2 processing/non-stalling streams.
4311          </details>
4312        </entry>
4313        <entry name="maxNumOutputProcStalling" type="int32" visibility="public" synthetic="true" hwlevel="legacy">
4314          <description>The maximum numbers of different types of output streams
4315          that can be configured and used simultaneously by a camera device
4316          for any processed (and stalling) formats.
4317          </description>
4318          <range>
4319          &amp;gt;= 1
4320          </range>
4321          <details>
4322          This value contains the max number of output simultaneous
4323          streams for any processed (but not-stalling) formats.
4324
4325          This lists the upper bound of the number of output streams supported by
4326          the camera device. Using more streams simultaneously may require more hardware and
4327          CPU resources that will consume more power. The image format for this kind of an output stream can
4328          be any non-`RAW` and supported format provided by android.scaler.streamConfigurationMap.
4329
4330          A processed and stalling format is defined as any non-RAW format with a stallDurations
4331          &amp;gt; 0.  Typically only the {@link android.graphics.ImageFormat#JPEG JPEG format} is a
4332          stalling format.
4333
4334          For full guarantees, query {@link
4335          android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration} with a
4336          processed format -- it will return a non-0 value for a stalling stream.
4337
4338          LEGACY devices will support up to 1 processing/stalling stream.
4339          </details>
4340        </entry>
4341        <entry name="maxNumReprocessStreams" type="int32" visibility="system"
4342        deprecated="true" container="array">
4343          <array>
4344            <size>1</size>
4345          </array>
4346          <description>How many reprocessing streams of any type
4347          can be allocated at the same time.</description>
4348          <range>&amp;gt;= 0</range>
4349          <details>
4350          Only used by HAL2.x.
4351
4352          When set to 0, it means no reprocess stream is supported.
4353          </details>
4354          <tag id="HAL2" />
4355        </entry>
4356        <entry name="maxNumInputStreams" type="int32" visibility="public" hwlevel="full">
4357          <description>
4358          The maximum numbers of any type of input streams
4359          that can be configured and used simultaneously by a camera device.
4360          </description>
4361          <range>
4362          0 or 1.
4363          </range>
4364          <details>When set to 0, it means no input stream is supported.
4365
4366          The image format for a input stream can be any supported format returned by {@link
4367          android.hardware.camera2.params.StreamConfigurationMap#getInputFormats}. When using an
4368          input stream, there must be at least one output stream configured to to receive the
4369          reprocessed images.
4370
4371          When an input stream and some output streams are used in a reprocessing request,
4372          only the input buffer will be used to produce these output stream buffers, and a
4373          new sensor image will not be captured.
4374
4375          For example, for Zero Shutter Lag (ZSL) still capture use case, the input
4376          stream image format will be PRIVATE, the associated output stream image format
4377          should be JPEG.
4378          </details>
4379          <hal_details>
4380          For the reprocessing flow and controls, see
4381          hardware/libhardware/include/hardware/camera3.h Section 10 for more details.
4382          </hal_details>
4383          <tag id="REPROC" />
4384        </entry>
4385      </static>
4386      <dynamic>
4387        <entry name="frameCount" type="int32" visibility="hidden" deprecated="true">
4388          <description>A frame counter set by the framework. This value monotonically
4389          increases with every new result (that is, each new result has a unique
4390          frameCount value).</description>
4391          <units>count of frames</units>
4392          <range>&amp;gt; 0</range>
4393          <details>Reset on release()</details>
4394        </entry>
4395        <clone entry="android.request.id" kind="controls"></clone>
4396        <clone entry="android.request.metadataMode"
4397        kind="controls"></clone>
4398        <clone entry="android.request.outputStreams"
4399        kind="controls"></clone>
4400        <entry name="pipelineDepth" type="byte" visibility="public" hwlevel="legacy">
4401          <description>Specifies the number of pipeline stages the frame went
4402          through from when it was exposed to when the final completed result
4403          was available to the framework.</description>
4404          <range>&amp;lt;= android.request.pipelineMaxDepth</range>
4405          <details>Depending on what settings are used in the request, and
4406          what streams are configured, the data may undergo less processing,
4407          and some pipeline stages skipped.
4408
4409          See android.request.pipelineMaxDepth for more details.
4410          </details>
4411          <hal_details>
4412          This value must always represent the accurate count of how many
4413          pipeline stages were actually used.
4414          </hal_details>
4415        </entry>
4416      </dynamic>
4417      <static>
4418        <entry name="pipelineMaxDepth" type="byte" visibility="public" hwlevel="legacy">
4419          <description>Specifies the number of maximum pipeline stages a frame
4420          has to go through from when it's exposed to when it's available
4421          to the framework.</description>
4422          <details>A typical minimum value for this is 2 (one stage to expose,
4423          one stage to readout) from the sensor. The ISP then usually adds
4424          its own stages to do custom HW processing. Further stages may be
4425          added by SW processing.
4426
4427          Depending on what settings are used (e.g. YUV, JPEG) and what
4428          processing is enabled (e.g. face detection), the actual pipeline
4429          depth (specified by android.request.pipelineDepth) may be less than
4430          the max pipeline depth.
4431
4432          A pipeline depth of X stages is equivalent to a pipeline latency of
4433          X frame intervals.
4434
4435          This value will normally be 8 or less, however, for high speed capture session,
4436          the max pipeline depth will be up to 8 x size of high speed capture request list.
4437          </details>
4438          <hal_details>
4439          This value should be 4 or less, expect for the high speed recording session, where the
4440          max batch sizes may be larger than 1.
4441          </hal_details>
4442        </entry>
4443        <entry name="partialResultCount" type="int32" visibility="public" optional="true">
4444          <description>Defines how many sub-components
4445          a result will be composed of.
4446          </description>
4447          <range>&amp;gt;= 1</range>
4448          <details>In order to combat the pipeline latency, partial results
4449          may be delivered to the application layer from the camera device as
4450          soon as they are available.
4451
4452          Optional; defaults to 1. A value of 1 means that partial
4453          results are not supported, and only the final TotalCaptureResult will
4454          be produced by the camera device.
4455
4456          A typical use case for this might be: after requesting an
4457          auto-focus (AF) lock the new AF state might be available 50%
4458          of the way through the pipeline.  The camera device could
4459          then immediately dispatch this state via a partial result to
4460          the application, and the rest of the metadata via later
4461          partial results.
4462          </details>
4463        </entry>
4464        <entry name="availableCapabilities" type="byte" visibility="public"
4465          enum="true" container="array" hwlevel="legacy">
4466          <array>
4467            <size>n</size>
4468          </array>
4469          <enum>
4470            <value>BACKWARD_COMPATIBLE
4471              <notes>The minimal set of capabilities that every camera
4472                device (regardless of android.info.supportedHardwareLevel)
4473                supports.
4474
4475                This capability is listed by all normal devices, and
4476                indicates that the camera device has a feature set
4477                that's comparable to the baseline requirements for the
4478                older android.hardware.Camera API.
4479
4480                Devices with the DEPTH_OUTPUT capability might not list this
4481                capability, indicating that they support only depth measurement,
4482                not standard color output.
4483              </notes>
4484            </value>
4485            <value optional="true">MANUAL_SENSOR
4486              <notes>
4487              The camera device can be manually controlled (3A algorithms such
4488              as auto-exposure, and auto-focus can be bypassed).
4489              The camera device supports basic manual control of the sensor image
4490              acquisition related stages. This means the following controls are
4491              guaranteed to be supported:
4492
4493              * Manual frame duration control
4494                  * android.sensor.frameDuration
4495                  * android.sensor.info.maxFrameDuration
4496              * Manual exposure control
4497                  * android.sensor.exposureTime
4498                  * android.sensor.info.exposureTimeRange
4499              * Manual sensitivity control
4500                  * android.sensor.sensitivity
4501                  * android.sensor.info.sensitivityRange
4502              * Manual lens control (if the lens is adjustable)
4503                  * android.lens.*
4504              * Manual flash control (if a flash unit is present)
4505                  * android.flash.*
4506              * Manual black level locking
4507                  * android.blackLevel.lock
4508              * Auto exposure lock
4509                  * android.control.aeLock
4510
4511              If any of the above 3A algorithms are enabled, then the camera
4512              device will accurately report the values applied by 3A in the
4513              result.
4514
4515              A given camera device may also support additional manual sensor controls,
4516              but this capability only covers the above list of controls.
4517
4518              If this is supported, android.scaler.streamConfigurationMap will
4519              additionally return a min frame duration that is greater than
4520              zero for each supported size-format combination.
4521              </notes>
4522            </value>
4523            <value optional="true">MANUAL_POST_PROCESSING
4524              <notes>
4525              The camera device post-processing stages can be manually controlled.
4526              The camera device supports basic manual control of the image post-processing
4527              stages. This means the following controls are guaranteed to be supported:
4528
4529              * Manual tonemap control
4530                  * android.tonemap.curve
4531                  * android.tonemap.mode
4532                  * android.tonemap.maxCurvePoints
4533                  * android.tonemap.gamma
4534                  * android.tonemap.presetCurve
4535
4536              * Manual white balance control
4537                  * android.colorCorrection.transform
4538                  * android.colorCorrection.gains
4539              * Manual lens shading map control
4540                    * android.shading.mode
4541                    * android.statistics.lensShadingMapMode
4542                    * android.statistics.lensShadingMap
4543                    * android.lens.info.shadingMapSize
4544              * Manual aberration correction control (if aberration correction is supported)
4545                    * android.colorCorrection.aberrationMode
4546                    * android.colorCorrection.availableAberrationModes
4547              * Auto white balance lock
4548                    * android.control.awbLock
4549
4550              If auto white balance is enabled, then the camera device
4551              will accurately report the values applied by AWB in the result.
4552
4553              A given camera device may also support additional post-processing
4554              controls, but this capability only covers the above list of controls.
4555              </notes>
4556            </value>
4557            <value optional="true">RAW
4558              <notes>
4559              The camera device supports outputting RAW buffers and
4560              metadata for interpreting them.
4561
4562              Devices supporting the RAW capability allow both for
4563              saving DNG files, and for direct application processing of
4564              raw sensor images.
4565
4566              * RAW_SENSOR is supported as an output format.
4567              * The maximum available resolution for RAW_SENSOR streams
4568                will match either the value in
4569                android.sensor.info.pixelArraySize or
4570                android.sensor.info.preCorrectionActiveArraySize.
4571              * All DNG-related optional metadata entries are provided
4572                by the camera device.
4573              </notes>
4574            </value>
4575            <value optional="true">PRIVATE_REPROCESSING
4576              <notes>
4577              The camera device supports the Zero Shutter Lag reprocessing use case.
4578
4579              * One input stream is supported, that is, `android.request.maxNumInputStreams == 1`.
4580              * {@link android.graphics.ImageFormat#PRIVATE} is supported as an output/input format,
4581                that is, {@link android.graphics.ImageFormat#PRIVATE} is included in the lists of
4582                formats returned by {@link
4583                android.hardware.camera2.params.StreamConfigurationMap#getInputFormats} and {@link
4584                android.hardware.camera2.params.StreamConfigurationMap#getOutputFormats}.
4585              * {@link android.hardware.camera2.params.StreamConfigurationMap#getValidOutputFormatsForInput}
4586                returns non empty int[] for each supported input format returned by {@link
4587                android.hardware.camera2.params.StreamConfigurationMap#getInputFormats}.
4588              * Each size returned by {@link
4589                android.hardware.camera2.params.StreamConfigurationMap#getInputSizes
4590                getInputSizes(ImageFormat.PRIVATE)} is also included in {@link
4591                android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes
4592                getOutputSizes(ImageFormat.PRIVATE)}
4593              * Using {@link android.graphics.ImageFormat#PRIVATE} does not cause a frame rate drop
4594                relative to the sensor's maximum capture rate (at that resolution).
4595              * {@link android.graphics.ImageFormat#PRIVATE} will be reprocessable into both
4596                {@link android.graphics.ImageFormat#YUV_420_888} and
4597                {@link android.graphics.ImageFormat#JPEG} formats.
4598              * The maximum available resolution for PRIVATE streams
4599                (both input/output) will match the maximum available
4600                resolution of JPEG streams.
4601              * Static metadata android.reprocess.maxCaptureStall.
4602              * Only below controls are effective for reprocessing requests and
4603                will be present in capture results, other controls in reprocess
4604                requests will be ignored by the camera device.
4605                    * android.jpeg.*
4606                    * android.noiseReduction.mode
4607                    * android.edge.mode
4608              * android.noiseReduction.availableNoiseReductionModes and
4609                android.edge.availableEdgeModes will both list ZERO_SHUTTER_LAG as a supported mode.
4610              </notes>
4611            </value>
4612            <value optional="true">READ_SENSOR_SETTINGS
4613              <notes>
4614              The camera device supports accurately reporting the sensor settings for many of
4615              the sensor controls while the built-in 3A algorithm is running.  This allows
4616              reporting of sensor settings even when these settings cannot be manually changed.
4617
4618              The values reported for the following controls are guaranteed to be available
4619              in the CaptureResult, including when 3A is enabled:
4620
4621              * Exposure control
4622                  * android.sensor.exposureTime
4623              * Sensitivity control
4624                  * android.sensor.sensitivity
4625              * Lens controls (if the lens is adjustable)
4626                  * android.lens.focusDistance
4627                  * android.lens.aperture
4628
4629              This capability is a subset of the MANUAL_SENSOR control capability, and will
4630              always be included if the MANUAL_SENSOR capability is available.
4631              </notes>
4632            </value>
4633            <value optional="true">BURST_CAPTURE
4634              <notes>
4635              The camera device supports capturing high-resolution images at &gt;= 20 frames per
4636              second, in at least the uncompressed YUV format, when post-processing settings are set
4637              to FAST. Additionally, maximum-resolution images can be captured at &gt;= 10 frames
4638              per second.  Here, 'high resolution' means at least 8 megapixels, or the maximum
4639              resolution of the device, whichever is smaller.
4640
4641              More specifically, this means that a size matching the camera device's active array
4642              size is listed as a supported size for the {@link
4643              android.graphics.ImageFormat#YUV_420_888} format in either {@link
4644              android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes} or {@link
4645              android.hardware.camera2.params.StreamConfigurationMap#getHighResolutionOutputSizes},
4646              with a minimum frame duration for that format and size of either &lt;= 1/20 s, or
4647              &lt;= 1/10 s, respectively; and the android.control.aeAvailableTargetFpsRanges entry
4648              lists at least one FPS range where the minimum FPS is &gt;= 1 / minimumFrameDuration
4649              for the maximum-size YUV_420_888 format.  If that maximum size is listed in {@link
4650              android.hardware.camera2.params.StreamConfigurationMap#getHighResolutionOutputSizes},
4651              then the list of resolutions for YUV_420_888 from {@link
4652              android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes} contains at
4653              least one resolution &gt;= 8 megapixels, with a minimum frame duration of &lt;= 1/20
4654              s.
4655
4656              If the device supports the {@link android.graphics.ImageFormat#RAW10}, {@link
4657              android.graphics.ImageFormat#RAW12}, then those can also be captured at the same rate
4658              as the maximum-size YUV_420_888 resolution is.
4659
4660              If the device supports the PRIVATE_REPROCESSING capability, then the same guarantees
4661              as for the YUV_420_888 format also apply to the {@link
4662              android.graphics.ImageFormat#PRIVATE} format.
4663
4664              In addition, the android.sync.maxLatency field is guaranted to have a value between 0
4665              and 4, inclusive. android.control.aeLockAvailable and android.control.awbLockAvailable
4666              are also guaranteed to be `true` so burst capture with these two locks ON yields
4667              consistent image output.
4668              </notes>
4669            </value>
4670            <value optional="true">YUV_REPROCESSING
4671              <notes>
4672              The camera device supports the YUV_420_888 reprocessing use case, similar as
4673              PRIVATE_REPROCESSING, This capability requires the camera device to support the
4674              following:
4675
4676              * One input stream is supported, that is, `android.request.maxNumInputStreams == 1`.
4677              * {@link android.graphics.ImageFormat#YUV_420_888} is supported as an output/input format, that is,
4678                YUV_420_888 is included in the lists of formats returned by
4679                {@link android.hardware.camera2.params.StreamConfigurationMap#getInputFormats} and
4680                {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputFormats}.
4681              * {@link
4682                android.hardware.camera2.params.StreamConfigurationMap#getValidOutputFormatsForInput}
4683                returns non-empty int[] for each supported input format returned by {@link
4684                android.hardware.camera2.params.StreamConfigurationMap#getInputFormats}.
4685              * Each size returned by {@link
4686                android.hardware.camera2.params.StreamConfigurationMap#getInputSizes
4687                getInputSizes(YUV_420_888)} is also included in {@link
4688                android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes
4689                getOutputSizes(YUV_420_888)}
4690              * Using {@link android.graphics.ImageFormat#YUV_420_888} does not cause a frame rate drop
4691                relative to the sensor's maximum capture rate (at that resolution).
4692              * {@link android.graphics.ImageFormat#YUV_420_888} will be reprocessable into both
4693                {@link android.graphics.ImageFormat#YUV_420_888} and {@link
4694                android.graphics.ImageFormat#JPEG} formats.
4695              * The maximum available resolution for {@link
4696                android.graphics.ImageFormat#YUV_420_888} streams (both input/output) will match the
4697                maximum available resolution of {@link android.graphics.ImageFormat#JPEG} streams.
4698              * Static metadata android.reprocess.maxCaptureStall.
4699              * Only the below controls are effective for reprocessing requests and will be present
4700                in capture results. The reprocess requests are from the original capture results that
4701                are associated with the intermediate {@link android.graphics.ImageFormat#YUV_420_888}
4702                output buffers.  All other controls in the reprocess requests will be ignored by the
4703                camera device.
4704                    * android.jpeg.*
4705                    * android.noiseReduction.mode
4706                    * android.edge.mode
4707                    * android.reprocess.effectiveExposureFactor
4708              * android.noiseReduction.availableNoiseReductionModes and
4709                android.edge.availableEdgeModes will both list ZERO_SHUTTER_LAG as a supported mode.
4710              </notes>
4711            </value>
4712            <value optional="true">DEPTH_OUTPUT
4713              <notes>
4714              The camera device can produce depth measurements from its field of view.
4715
4716              This capability requires the camera device to support the following:
4717
4718              * {@link android.graphics.ImageFormat#DEPTH16} is supported as an output format.
4719              * {@link android.graphics.ImageFormat#DEPTH_POINT_CLOUD} is optionally supported as an
4720                output format.
4721              * This camera device, and all camera devices with the same android.lens.facing,
4722                will list the following calibration entries in both
4723                {@link android.hardware.camera2.CameraCharacteristics} and
4724                {@link android.hardware.camera2.CaptureResult}:
4725                  - android.lens.poseTranslation
4726                  - android.lens.poseRotation
4727                  - android.lens.intrinsicCalibration
4728                  - android.lens.radialDistortion
4729              * The android.depth.depthIsExclusive entry is listed by this device.
4730              * A LIMITED camera with only the DEPTH_OUTPUT capability does not have to support
4731                normal YUV_420_888, JPEG, and PRIV-format outputs. It only has to support the DEPTH16
4732                format.
4733
4734              Generally, depth output operates at a slower frame rate than standard color capture,
4735              so the DEPTH16 and DEPTH_POINT_CLOUD formats will commonly have a stall duration that
4736              should be accounted for (see
4737              {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration}).
4738              On a device that supports both depth and color-based output, to enable smooth preview,
4739              using a repeating burst is recommended, where a depth-output target is only included
4740              once every N frames, where N is the ratio between preview output rate and depth output
4741              rate, including depth stall time.
4742              </notes>
4743            </value>
4744            <value optional="true">CONSTRAINED_HIGH_SPEED_VIDEO
4745              <notes>
4746              The device supports constrained high speed video recording (frame rate >=120fps)
4747              use case. The camera device will support high speed capture session created by
4748              {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}, which
4749              only accepts high speed request lists created by
4750              {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}.
4751
4752              A camera device can still support high speed video streaming by advertising the high speed
4753              FPS ranges in android.control.aeAvailableTargetFpsRanges. For this case, all normal
4754              capture request per frame control and synchronization requirements will apply to
4755              the high speed fps ranges, the same as all other fps ranges. This capability describes
4756              the capability of a specialized operating mode with many limitations (see below), which
4757              is only targeted at high speed video recording.
4758
4759              The supported high speed video sizes and fps ranges are specified in
4760              {@link android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoFpsRanges}.
4761              To get desired output frame rates, the application is only allowed to select video size
4762              and FPS range combinations provided by
4763              {@link android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoSizes}.
4764              The fps range can be controlled via android.control.aeTargetFpsRange.
4765
4766              In this capability, the camera device will override aeMode, awbMode, and afMode to
4767              ON, AUTO, and CONTINUOUS_VIDEO, respectively. All post-processing block mode
4768              controls will be overridden to be FAST. Therefore, no manual control of capture
4769              and post-processing parameters is possible. All other controls operate the
4770              same as when android.control.mode == AUTO. This means that all other
4771              android.control.* fields continue to work, such as
4772
4773              * android.control.aeTargetFpsRange
4774              * android.control.aeExposureCompensation
4775              * android.control.aeLock
4776              * android.control.awbLock
4777              * android.control.effectMode
4778              * android.control.aeRegions
4779              * android.control.afRegions
4780              * android.control.awbRegions
4781              * android.control.afTrigger
4782              * android.control.aePrecaptureTrigger
4783
4784              Outside of android.control.*, the following controls will work:
4785
4786              * android.flash.mode (TORCH mode only, automatic flash for still capture will not
4787              work since aeMode is ON)
4788              * android.lens.opticalStabilizationMode (if it is supported)
4789              * android.scaler.cropRegion
4790              * android.statistics.faceDetectMode (if it is supported)
4791
4792              For high speed recording use case, the actual maximum supported frame rate may
4793              be lower than what camera can output, depending on the destination Surfaces for
4794              the image data. For example, if the destination surface is from video encoder,
4795              the application need check if the video encoder is capable of supporting the
4796              high frame rate for a given video size, or it will end up with lower recording
4797              frame rate. If the destination surface is from preview window, the actual preview frame
4798              rate will be bounded by the screen refresh rate.
4799
4800              The camera device will only support up to 2 high speed simultaneous output surfaces
4801              (preview and recording surfaces)
4802              in this mode. Above controls will be effective only if all of below conditions are true:
4803
4804              * The application creates a camera capture session with no more than 2 surfaces via
4805              {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}. The
4806              targeted surfaces must be preview surface (either from
4807              {@link android.view.SurfaceView} or {@link android.graphics.SurfaceTexture}) or
4808              recording surface(either from {@link android.media.MediaRecorder#getSurface} or
4809              {@link android.media.MediaCodec#createInputSurface}).
4810              * The stream sizes are selected from the sizes reported by
4811              {@link android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoSizes}.
4812              * The FPS ranges are selected from
4813              {@link android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoFpsRanges}.
4814
4815              When above conditions are NOT satistied,
4816              {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}
4817              will fail.
4818
4819              Switching to a FPS range that has different maximum FPS may trigger some camera device
4820              reconfigurations, which may introduce extra latency. It is recommended that
4821              the application avoids unnecessary maximum target FPS changes as much as possible
4822              during high speed streaming.
4823              </notes>
4824            </value>
4825          </enum>
4826          <description>List of capabilities that this camera device
4827          advertises as fully supporting.</description>
4828          <details>
4829          A capability is a contract that the camera device makes in order
4830          to be able to satisfy one or more use cases.
4831
4832          Listing a capability guarantees that the whole set of features
4833          required to support a common use will all be available.
4834
4835          Using a subset of the functionality provided by an unsupported
4836          capability may be possible on a specific camera device implementation;
4837          to do this query each of android.request.availableRequestKeys,
4838          android.request.availableResultKeys,
4839          android.request.availableCharacteristicsKeys.
4840
4841          The following capabilities are guaranteed to be available on
4842          android.info.supportedHardwareLevel `==` FULL devices:
4843
4844          * MANUAL_SENSOR
4845          * MANUAL_POST_PROCESSING
4846
4847          Other capabilities may be available on either FULL or LIMITED
4848          devices, but the application should query this key to be sure.
4849          </details>
4850          <hal_details>
4851          Additional constraint details per-capability will be available
4852          in the Compatibility Test Suite.
4853
4854          Minimum baseline requirements required for the
4855          BACKWARD_COMPATIBLE capability are not explicitly listed.
4856          Instead refer to "BC" tags and the camera CTS tests in the
4857          android.hardware.camera2.cts package.
4858
4859          Listed controls that can be either request or result (e.g.
4860          android.sensor.exposureTime) must be available both in the
4861          request and the result in order to be considered to be
4862          capability-compliant.
4863
4864          For example, if the HAL claims to support MANUAL control,
4865          then exposure time must be configurable via the request _and_
4866          the actual exposure applied must be available via
4867          the result.
4868
4869          If MANUAL_SENSOR is omitted, the HAL may choose to omit the
4870          android.scaler.availableMinFrameDurations static property entirely.
4871
4872          For PRIVATE_REPROCESSING and YUV_REPROCESSING capabilities, see
4873          hardware/libhardware/include/hardware/camera3.h Section 10 for more information.
4874
4875          Devices that support the MANUAL_SENSOR capability must support the
4876          CAMERA3_TEMPLATE_MANUAL template defined in camera3.h.
4877
4878          Devices that support the PRIVATE_REPROCESSING capability or the
4879          YUV_REPROCESSING capability must support the
4880          CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template defined in camera3.h.
4881
4882          For DEPTH_OUTPUT, the depth-format keys
4883          android.depth.availableDepthStreamConfigurations,
4884          android.depth.availableDepthMinFrameDurations,
4885          android.depth.availableDepthStallDurations must be available, in
4886          addition to the other keys explicitly mentioned in the DEPTH_OUTPUT
4887          enum notes. The entry android.depth.maxDepthSamples must be available
4888          if the DEPTH_POINT_CLOUD format is supported (HAL pixel format BLOB, dataspace
4889          DEPTH).
4890          </hal_details>
4891        </entry>
4892        <entry name="availableRequestKeys" type="int32" visibility="hidden"
4893          container="array" hwlevel="legacy">
4894          <array>
4895            <size>n</size>
4896          </array>
4897          <description>A list of all keys that the camera device has available
4898          to use with {@link android.hardware.camera2.CaptureRequest}.</description>
4899
4900          <details>Attempting to set a key into a CaptureRequest that is not
4901          listed here will result in an invalid request and will be rejected
4902          by the camera device.
4903
4904          This field can be used to query the feature set of a camera device
4905          at a more granular level than capabilities. This is especially
4906          important for optional keys that are not listed under any capability
4907          in android.request.availableCapabilities.
4908          </details>
4909          <hal_details>
4910          Vendor tags must not be listed here. Use the vendor tag metadata
4911          extensions C api instead (refer to camera3.h for more details).
4912
4913          Setting/getting vendor tags will be checked against the metadata
4914          vendor extensions API and not against this field.
4915
4916          The HAL must not consume any request tags that are not listed either
4917          here or in the vendor tag list.
4918
4919          The public camera2 API will always make the vendor tags visible
4920          via
4921          {@link android.hardware.camera2.CameraCharacteristics#getAvailableCaptureRequestKeys}.
4922          </hal_details>
4923        </entry>
4924        <entry name="availableResultKeys" type="int32" visibility="hidden"
4925          container="array" hwlevel="legacy">
4926          <array>
4927            <size>n</size>
4928          </array>
4929          <description>A list of all keys that the camera device has available
4930          to use with {@link android.hardware.camera2.CaptureResult}.</description>
4931
4932          <details>Attempting to get a key from a CaptureResult that is not
4933          listed here will always return a `null` value. Getting a key from
4934          a CaptureResult that is listed here will generally never return a `null`
4935          value.
4936
4937          The following keys may return `null` unless they are enabled:
4938
4939          * android.statistics.lensShadingMap (non-null iff android.statistics.lensShadingMapMode == ON)
4940
4941          (Those sometimes-null keys will nevertheless be listed here
4942          if they are available.)
4943
4944          This field can be used to query the feature set of a camera device
4945          at a more granular level than capabilities. This is especially
4946          important for optional keys that are not listed under any capability
4947          in android.request.availableCapabilities.
4948          </details>
4949          <hal_details>
4950          Tags listed here must always have an entry in the result metadata,
4951          even if that size is 0 elements. Only array-type tags (e.g. lists,
4952          matrices, strings) are allowed to have 0 elements.
4953
4954          Vendor tags must not be listed here. Use the vendor tag metadata
4955          extensions C api instead (refer to camera3.h for more details).
4956
4957          Setting/getting vendor tags will be checked against the metadata
4958          vendor extensions API and not against this field.
4959
4960          The HAL must not produce any result tags that are not listed either
4961          here or in the vendor tag list.
4962
4963          The public camera2 API will always make the vendor tags visible via {@link
4964          android.hardware.camera2.CameraCharacteristics#getAvailableCaptureResultKeys}.
4965          </hal_details>
4966        </entry>
4967        <entry name="availableCharacteristicsKeys" type="int32" visibility="hidden"
4968          container="array" hwlevel="legacy">
4969          <array>
4970            <size>n</size>
4971          </array>
4972          <description>A list of all keys that the camera device has available
4973          to use with {@link android.hardware.camera2.CameraCharacteristics}.</description>
4974          <details>This entry follows the same rules as
4975          android.request.availableResultKeys (except that it applies for
4976          CameraCharacteristics instead of CaptureResult). See above for more
4977          details.
4978          </details>
4979          <hal_details>
4980          Keys listed here must always have an entry in the static info metadata,
4981          even if that size is 0 elements. Only array-type tags (e.g. lists,
4982          matrices, strings) are allowed to have 0 elements.
4983
4984          Vendor tags must not be listed here. Use the vendor tag metadata
4985          extensions C api instead (refer to camera3.h for more details).
4986
4987          Setting/getting vendor tags will be checked against the metadata
4988          vendor extensions API and not against this field.
4989
4990          The HAL must not have any tags in its static info that are not listed
4991          either here or in the vendor tag list.
4992
4993          The public camera2 API will always make the vendor tags visible
4994          via {@link android.hardware.camera2.CameraCharacteristics#getKeys}.
4995          </hal_details>
4996        </entry>
4997      </static>
4998    </section>
4999    <section name="scaler">
5000      <controls>
5001        <entry name="cropRegion" type="int32" visibility="public"
5002               container="array" typedef="rectangle" hwlevel="legacy">
5003          <array>
5004            <size>4</size>
5005          </array>
5006          <description>The desired region of the sensor to read out for this capture.</description>
5007          <units>Pixel coordinates relative to
5008          android.sensor.info.activeArraySize</units>
5009          <details>
5010            This control can be used to implement digital zoom.
5011
5012            The crop region coordinate system is based off
5013            android.sensor.info.activeArraySize, with `(0, 0)` being the
5014            top-left corner of the sensor active array.
5015
5016            Output streams use this rectangle to produce their output,
5017            cropping to a smaller region if necessary to maintain the
5018            stream's aspect ratio, then scaling the sensor input to
5019            match the output's configured resolution.
5020
5021            The crop region is applied after the RAW to other color
5022            space (e.g. YUV) conversion. Since raw streams
5023            (e.g. RAW16) don't have the conversion stage, they are not
5024            croppable. The crop region will be ignored by raw streams.
5025
5026            For non-raw streams, any additional per-stream cropping will
5027            be done to maximize the final pixel area of the stream.
5028
5029            For example, if the crop region is set to a 4:3 aspect
5030            ratio, then 4:3 streams will use the exact crop
5031            region. 16:9 streams will further crop vertically
5032            (letterbox).
5033
5034            Conversely, if the crop region is set to a 16:9, then 4:3
5035            outputs will crop horizontally (pillarbox), and 16:9
5036            streams will match exactly. These additional crops will
5037            be centered within the crop region.
5038
5039            The width and height of the crop region cannot
5040            be set to be smaller than
5041            `floor( activeArraySize.width / android.scaler.availableMaxDigitalZoom )` and
5042            `floor( activeArraySize.height / android.scaler.availableMaxDigitalZoom )`, respectively.
5043
5044            The camera device may adjust the crop region to account
5045            for rounding and other hardware requirements; the final
5046            crop region used will be included in the output capture
5047            result.
5048          </details>
5049          <hal_details>
5050            The output streams must maintain square pixels at all
5051            times, no matter what the relative aspect ratios of the
5052            crop region and the stream are.  Negative values for
5053            corner are allowed for raw output if full pixel array is
5054            larger than active pixel array. Width and height may be
5055            rounded to nearest larger supportable width, especially
5056            for raw output, where only a few fixed scales may be
5057            possible.
5058
5059            For a set of output streams configured, if the sensor output is cropped to a smaller
5060            size than active array size, the HAL need follow below cropping rules:
5061
5062            * The HAL need handle the cropRegion as if the sensor crop size is the effective active
5063            array size.More specifically, the HAL must transform the request cropRegion from
5064            android.sensor.info.activeArraySize to the sensor cropped pixel area size in this way:
5065                1. Translate the requested cropRegion w.r.t., the left top corner of the sensor
5066                cropped pixel area by (tx, ty),
5067                where `tx = sensorCrop.top * (sensorCrop.height / activeArraySize.height)`
5068                and `tx = sensorCrop.left * (sensorCrop.width / activeArraySize.width)`. The
5069                (sensorCrop.top, sensorCrop.left) is the coordinate based off the
5070                android.sensor.info.activeArraySize.
5071                2. Scale the width and height of requested cropRegion with scaling factor of
5072                sensorCrop.width/activeArraySize.width and sensorCrop.height/activeArraySize.height
5073                respectively.
5074            Once this new cropRegion is calculated, the HAL must use this region to crop the image
5075            with regard to the sensor crop size (effective active array size). The HAL still need
5076            follow the general cropping rule for this new cropRegion and effective active
5077            array size.
5078
5079            * The HAL must report the cropRegion with regard to android.sensor.info.activeArraySize.
5080            The HAL need convert the new cropRegion generated above w.r.t., full active array size.
5081            The reported cropRegion may be slightly different with the requested cropRegion since
5082            the HAL may adjust the crop region to account for rounding, conversion error, or other
5083            hardware limitations.
5084
5085            HAL2.x uses only (x, y, width)
5086          </hal_details>
5087          <tag id="BC" />
5088        </entry>
5089      </controls>
5090      <static>
5091        <entry name="availableFormats" type="int32"
5092        visibility="hidden" deprecated="true" enum="true"
5093        container="array" typedef="imageFormat">
5094          <array>
5095            <size>n</size>
5096          </array>
5097          <enum>
5098            <value optional="true" id="0x20">RAW16
5099              <notes>
5100              RAW16 is a standard, cross-platform format for raw image
5101              buffers with 16-bit pixels.
5102
5103              Buffers of this format are typically expected to have a
5104              Bayer Color Filter Array (CFA) layout, which is given in
5105              android.sensor.info.colorFilterArrangement. Sensors with
5106              CFAs that are not representable by a format in
5107              android.sensor.info.colorFilterArrangement should not
5108              use this format.
5109
5110              Buffers of this format will also follow the constraints given for
5111              RAW_OPAQUE buffers, but with relaxed performance constraints.
5112
5113              This format is intended to give users access to the full contents
5114              of the buffers coming directly from the image sensor prior to any
5115              cropping or scaling operations, and all coordinate systems for
5116              metadata used for this format are relative to the size of the
5117              active region of the image sensor before any geometric distortion
5118              correction has been applied (i.e.
5119              android.sensor.info.preCorrectionActiveArraySize). Supported
5120              dimensions for this format are limited to the full dimensions of
5121              the sensor (e.g. either android.sensor.info.pixelArraySize or
5122              android.sensor.info.preCorrectionActiveArraySize will be the
5123              only supported output size).
5124
5125              See android.scaler.availableInputOutputFormatsMap for
5126              the full set of performance guarantees.
5127              </notes>
5128            </value>
5129            <value optional="true" id="0x24">RAW_OPAQUE
5130              <notes>
5131              RAW_OPAQUE (or
5132              {@link android.graphics.ImageFormat#RAW_PRIVATE RAW_PRIVATE}
5133              as referred in public API) is a format for raw image buffers
5134              coming from an image sensor.
5135
5136              The actual structure of buffers of this format is
5137              platform-specific, but must follow several constraints:
5138
5139              1. No image post-processing operations may have been applied to
5140              buffers of this type. These buffers contain raw image data coming
5141              directly from the image sensor.
5142              1. If a buffer of this format is passed to the camera device for
5143              reprocessing, the resulting images will be identical to the images
5144              produced if the buffer had come directly from the sensor and was
5145              processed with the same settings.
5146
5147              The intended use for this format is to allow access to the native
5148              raw format buffers coming directly from the camera sensor without
5149              any additional conversions or decrease in framerate.
5150
5151              See android.scaler.availableInputOutputFormatsMap for the full set of
5152              performance guarantees.
5153              </notes>
5154            </value>
5155            <value optional="true" id="0x32315659">YV12
5156              <notes>YCrCb 4:2:0 Planar</notes>
5157            </value>
5158            <value optional="true" id="0x11">YCrCb_420_SP
5159              <notes>NV21</notes>
5160            </value>
5161            <value id="0x22">IMPLEMENTATION_DEFINED
5162              <notes>System internal format, not application-accessible</notes>
5163            </value>
5164            <value id="0x23">YCbCr_420_888
5165              <notes>Flexible YUV420 Format</notes>
5166            </value>
5167            <value id="0x21">BLOB
5168              <notes>JPEG format</notes>
5169            </value>
5170          </enum>
5171          <description>The list of image formats that are supported by this
5172          camera device for output streams.</description>
5173          <details>
5174          All camera devices will support JPEG and YUV_420_888 formats.
5175
5176          When set to YUV_420_888, application can access the YUV420 data directly.
5177          </details>
5178          <hal_details>
5179          These format values are from HAL_PIXEL_FORMAT_* in
5180          system/core/include/system/graphics.h.
5181
5182          When IMPLEMENTATION_DEFINED is used, the platform
5183          gralloc module will select a format based on the usage flags provided
5184          by the camera HAL device and the other endpoint of the stream. It is
5185          usually used by preview and recording streams, where the application doesn't
5186          need access the image data.
5187
5188          YCbCr_420_888 format must be supported by the HAL. When an image stream
5189          needs CPU/application direct access, this format will be used.
5190
5191          The BLOB format must be supported by the HAL. This is used for the JPEG stream.
5192
5193          A RAW_OPAQUE buffer should contain only pixel data. It is strongly
5194          recommended that any information used by the camera device when
5195          processing images is fully expressed by the result metadata
5196          for that image buffer.
5197          </hal_details>
5198          <tag id="BC" />
5199        </entry>
5200        <entry name="availableJpegMinDurations" type="int64" visibility="hidden" deprecated="true"
5201        container="array">
5202          <array>
5203            <size>n</size>
5204          </array>
5205          <description>The minimum frame duration that is supported
5206          for each resolution in android.scaler.availableJpegSizes.
5207          </description>
5208          <units>Nanoseconds</units>
5209          <range>TODO: Remove property.</range>
5210          <details>
5211          This corresponds to the minimum steady-state frame duration when only
5212          that JPEG stream is active and captured in a burst, with all
5213          processing (typically in android.*.mode) set to FAST.
5214
5215          When multiple streams are configured, the minimum
5216          frame duration will be &amp;gt;= max(individual stream min
5217          durations)</details>
5218          <tag id="BC" />
5219        </entry>
5220        <entry name="availableJpegSizes" type="int32" visibility="hidden"
5221        deprecated="true" container="array" typedef="size">
5222          <array>
5223            <size>n</size>
5224            <size>2</size>
5225          </array>
5226          <description>The JPEG resolutions that are supported by this camera device.</description>
5227          <range>TODO: Remove property.</range>
5228          <details>
5229          The resolutions are listed as `(width, height)` pairs. All camera devices will support
5230          sensor maximum resolution (defined by android.sensor.info.activeArraySize).
5231          </details>
5232          <hal_details>
5233          The HAL must include sensor maximum resolution
5234          (defined by android.sensor.info.activeArraySize),
5235          and should include half/quarter of sensor maximum resolution.
5236          </hal_details>
5237          <tag id="BC" />
5238        </entry>
5239        <entry name="availableMaxDigitalZoom" type="float" visibility="public"
5240              hwlevel="legacy">
5241          <description>The maximum ratio between both active area width
5242          and crop region width, and active area height and
5243          crop region height, for android.scaler.cropRegion.
5244          </description>
5245          <units>Zoom scale factor</units>
5246          <range>&amp;gt;=1</range>
5247          <details>
5248          This represents the maximum amount of zooming possible by
5249          the camera device, or equivalently, the minimum cropping
5250          window size.
5251
5252          Crop regions that have a width or height that is smaller
5253          than this ratio allows will be rounded up to the minimum
5254          allowed size by the camera device.
5255          </details>
5256          <tag id="BC" />
5257        </entry>
5258        <entry name="availableProcessedMinDurations" type="int64" visibility="hidden" deprecated="true"
5259        container="array">
5260          <array>
5261            <size>n</size>
5262          </array>
5263          <description>For each available processed output size (defined in
5264          android.scaler.availableProcessedSizes), this property lists the
5265          minimum supportable frame duration for that size.
5266          </description>
5267          <units>Nanoseconds</units>
5268          <details>
5269          This should correspond to the frame duration when only that processed
5270          stream is active, with all processing (typically in android.*.mode)
5271          set to FAST.
5272
5273          When multiple streams are configured, the minimum frame duration will
5274          be &amp;gt;= max(individual stream min durations).
5275          </details>
5276          <tag id="BC" />
5277        </entry>
5278        <entry name="availableProcessedSizes" type="int32" visibility="hidden"
5279        deprecated="true" container="array" typedef="size">
5280          <array>
5281            <size>n</size>
5282            <size>2</size>
5283          </array>
5284          <description>The resolutions available for use with
5285          processed output streams, such as YV12, NV12, and
5286          platform opaque YUV/RGB streams to the GPU or video
5287          encoders.</description>
5288          <details>
5289          The resolutions are listed as `(width, height)` pairs.
5290
5291          For a given use case, the actual maximum supported resolution
5292          may be lower than what is listed here, depending on the destination
5293          Surface for the image data. For example, for recording video,
5294          the video encoder chosen may have a maximum size limit (e.g. 1080p)
5295          smaller than what the camera (e.g. maximum resolution is 3264x2448)
5296          can provide.
5297
5298          Please reference the documentation for the image data destination to
5299          check if it limits the maximum size for image data.
5300          </details>
5301          <hal_details>
5302          For FULL capability devices (`android.info.supportedHardwareLevel == FULL`),
5303          the HAL must include all JPEG sizes listed in android.scaler.availableJpegSizes
5304          and each below resolution if it is smaller than or equal to the sensor
5305          maximum resolution (if they are not listed in JPEG sizes already):
5306
5307          * 240p (320 x 240)
5308          * 480p (640 x 480)
5309          * 720p (1280 x 720)
5310          * 1080p (1920 x 1080)
5311
5312          For LIMITED capability devices (`android.info.supportedHardwareLevel == LIMITED`),
5313          the HAL only has to list up to the maximum video size supported by the devices.
5314          </hal_details>
5315          <tag id="BC" />
5316        </entry>
5317        <entry name="availableRawMinDurations" type="int64" deprecated="true"
5318        container="array">
5319          <array>
5320            <size>n</size>
5321          </array>
5322          <description>
5323          For each available raw output size (defined in
5324          android.scaler.availableRawSizes), this property lists the minimum
5325          supportable frame duration for that size.
5326          </description>
5327          <units>Nanoseconds</units>
5328          <details>
5329          Should correspond to the frame duration when only the raw stream is
5330          active.
5331
5332          When multiple streams are configured, the minimum
5333          frame duration will be &amp;gt;= max(individual stream min
5334          durations)</details>
5335          <tag id="BC" />
5336        </entry>
5337        <entry name="availableRawSizes" type="int32" deprecated="true"
5338        container="array" typedef="size">
5339          <array>
5340            <size>n</size>
5341            <size>2</size>
5342          </array>
5343          <description>The resolutions available for use with raw
5344          sensor output streams, listed as width,
5345          height</description>
5346        </entry>
5347      </static>
5348      <dynamic>
5349        <clone entry="android.scaler.cropRegion" kind="controls">
5350        </clone>
5351      </dynamic>
5352      <static>
5353        <entry name="availableInputOutputFormatsMap" type="int32" visibility="hidden"
5354          typedef="reprocessFormatsMap">
5355          <description>The mapping of image formats that are supported by this
5356          camera device for input streams, to their corresponding output formats.
5357          </description>
5358          <details>
5359          All camera devices with at least 1
5360          android.request.maxNumInputStreams will have at least one
5361          available input format.
5362
5363          The camera device will support the following map of formats,
5364          if its dependent capability (android.request.availableCapabilities) is supported:
5365
5366            Input Format                                    | Output Format                                     | Capability
5367          :-------------------------------------------------|:--------------------------------------------------|:----------
5368          {@link android.graphics.ImageFormat#PRIVATE}      | {@link android.graphics.ImageFormat#JPEG}         | PRIVATE_REPROCESSING
5369          {@link android.graphics.ImageFormat#PRIVATE}      | {@link android.graphics.ImageFormat#YUV_420_888}  | PRIVATE_REPROCESSING
5370          {@link android.graphics.ImageFormat#YUV_420_888}  | {@link android.graphics.ImageFormat#JPEG}         | YUV_REPROCESSING
5371          {@link android.graphics.ImageFormat#YUV_420_888}  | {@link android.graphics.ImageFormat#YUV_420_888}  | YUV_REPROCESSING
5372
5373          PRIVATE refers to a device-internal format that is not directly application-visible.  A
5374          PRIVATE input surface can be acquired by {@link android.media.ImageReader#newInstance}
5375          with {@link android.graphics.ImageFormat#PRIVATE} as the format.
5376
5377          For a PRIVATE_REPROCESSING-capable camera device, using the PRIVATE format as either input
5378          or output will never hurt maximum frame rate (i.e.  {@link
5379          android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration
5380          getOutputStallDuration(ImageFormat.PRIVATE, size)} is always 0),
5381
5382          Attempting to configure an input stream with output streams not
5383          listed as available in this map is not valid.
5384          </details>
5385          <hal_details>
5386          For the formats, see `system/core/include/system/graphics.h` for a definition
5387          of the image format enumerations. The PRIVATE format refers to the
5388          HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED format. The HAL could determine
5389          the actual format by using the gralloc usage flags.
5390          For ZSL use case in particular, the HAL could choose appropriate format (partially
5391          processed YUV or RAW based format) by checking the format and GRALLOC_USAGE_HW_CAMERA_ZSL.
5392          See camera3.h for more details.
5393
5394          This value is encoded as a variable-size array-of-arrays.
5395          The inner array always contains `[format, length, ...]` where
5396          `...` has `length` elements. An inner array is followed by another
5397          inner array if the total metadata entry size hasn't yet been exceeded.
5398
5399          A code sample to read/write this encoding (with a device that
5400          supports reprocessing IMPLEMENTATION_DEFINED to YUV_420_888, and JPEG,
5401          and reprocessing YUV_420_888 to YUV_420_888 and JPEG):
5402
5403              // reading
5404              int32_t* contents = &amp;entry.i32[0];
5405              for (size_t i = 0; i &lt; entry.count; ) {
5406                  int32_t format = contents[i++];
5407                  int32_t length = contents[i++];
5408                  int32_t output_formats[length];
5409                  memcpy(&amp;output_formats[0], &amp;contents[i],
5410                         length * sizeof(int32_t));
5411                  i += length;
5412              }
5413
5414              // writing (static example, PRIVATE_REPROCESSING + YUV_REPROCESSING)
5415              int32_t[] contents = {
5416                IMPLEMENTATION_DEFINED, 2, YUV_420_888, BLOB,
5417                YUV_420_888, 2, YUV_420_888, BLOB,
5418              };
5419              update_camera_metadata_entry(metadata, index, &amp;contents[0],
5420                    sizeof(contents)/sizeof(contents[0]), &amp;updated_entry);
5421
5422          If the HAL claims to support any of the capabilities listed in the
5423          above details, then it must also support all the input-output
5424          combinations listed for that capability. It can optionally support
5425          additional formats if it so chooses.
5426          </hal_details>
5427          <tag id="REPROC" />
5428        </entry>
5429        <entry name="availableStreamConfigurations" type="int32" visibility="hidden"
5430          enum="true" container="array"
5431          typedef="streamConfiguration" hwlevel="legacy">
5432          <array>
5433            <size>n</size>
5434            <size>4</size>
5435          </array>
5436          <enum>
5437            <value>OUTPUT</value>
5438            <value>INPUT</value>
5439          </enum>
5440          <description>The available stream configurations that this
5441          camera device supports
5442          (i.e. format, width, height, output/input stream).
5443          </description>
5444          <details>
5445          The configurations are listed as `(format, width, height, input?)`
5446          tuples.
5447
5448          For a given use case, the actual maximum supported resolution
5449          may be lower than what is listed here, depending on the destination
5450          Surface for the image data. For example, for recording video,
5451          the video encoder chosen may have a maximum size limit (e.g. 1080p)
5452          smaller than what the camera (e.g. maximum resolution is 3264x2448)
5453          can provide.
5454
5455          Please reference the documentation for the image data destination to
5456          check if it limits the maximum size for image data.
5457
5458          Not all output formats may be supported in a configuration with
5459          an input stream of a particular format. For more details, see
5460          android.scaler.availableInputOutputFormatsMap.
5461
5462          The following table describes the minimum required output stream
5463          configurations based on the hardware level
5464          (android.info.supportedHardwareLevel):
5465
5466          Format         | Size                                         | Hardware Level | Notes
5467          :-------------:|:--------------------------------------------:|:--------------:|:--------------:
5468          JPEG           | android.sensor.info.activeArraySize          | Any            |
5469          JPEG           | 1920x1080 (1080p)                            | Any            | if 1080p &lt;= activeArraySize
5470          JPEG           | 1280x720 (720)                               | Any            | if 720p &lt;= activeArraySize
5471          JPEG           | 640x480 (480p)                               | Any            | if 480p &lt;= activeArraySize
5472          JPEG           | 320x240 (240p)                               | Any            | if 240p &lt;= activeArraySize
5473          YUV_420_888    | all output sizes available for JPEG          | FULL           |
5474          YUV_420_888    | all output sizes available for JPEG, up to the maximum video size | LIMITED        |
5475          IMPLEMENTATION_DEFINED | same as YUV_420_888                  | Any            |
5476
5477          Refer to android.request.availableCapabilities for additional
5478          mandatory stream configurations on a per-capability basis.
5479          </details>
5480          <hal_details>
5481          It is recommended (but not mandatory) to also include half/quarter
5482          of sensor maximum resolution for JPEG formats (regardless of hardware
5483          level).
5484
5485          (The following is a rewording of the above required table):
5486
5487          For JPEG format, the sizes may be restricted by below conditions:
5488
5489          * The HAL may choose the aspect ratio of each Jpeg size to be one of well known ones
5490          (e.g. 4:3, 16:9, 3:2 etc.). If the sensor maximum resolution
5491          (defined by android.sensor.info.activeArraySize) has an aspect ratio other than these,
5492          it does not have to be included in the supported JPEG sizes.
5493          * Some hardware JPEG encoders may have pixel boundary alignment requirements, such as
5494          the dimensions being a multiple of 16.
5495
5496          Therefore, the maximum JPEG size may be smaller than sensor maximum resolution.
5497          However, the largest JPEG size must be as close as possible to the sensor maximum
5498          resolution given above constraints. It is required that after aspect ratio adjustments,
5499          additional size reduction due to other issues must be less than 3% in area. For example,
5500          if the sensor maximum resolution is 3280x2464, if the maximum JPEG size has aspect
5501          ratio 4:3, the JPEG encoder alignment requirement is 16, the maximum JPEG size will be
5502          3264x2448.
5503
5504          For FULL capability devices (`android.info.supportedHardwareLevel == FULL`),
5505          the HAL must include all YUV_420_888 sizes that have JPEG sizes listed
5506          here as output streams.
5507
5508          It must also include each below resolution if it is smaller than or
5509          equal to the sensor maximum resolution (for both YUV_420_888 and JPEG
5510          formats), as output streams:
5511
5512          * 240p (320 x 240)
5513          * 480p (640 x 480)
5514          * 720p (1280 x 720)
5515          * 1080p (1920 x 1080)
5516
5517          For LIMITED capability devices
5518          (`android.info.supportedHardwareLevel == LIMITED`),
5519          the HAL only has to list up to the maximum video size
5520          supported by the device.
5521
5522          Regardless of hardware level, every output resolution available for
5523          YUV_420_888 must also be available for IMPLEMENTATION_DEFINED.
5524
5525          This supercedes the following fields, which are now deprecated:
5526
5527          * availableFormats
5528          * available[Processed,Raw,Jpeg]Sizes
5529          </hal_details>
5530        </entry>
5531        <entry name="availableMinFrameDurations" type="int64" visibility="hidden"
5532               container="array"
5533               typedef="streamConfigurationDuration" hwlevel="legacy">
5534          <array>
5535            <size>4</size>
5536            <size>n</size>
5537          </array>
5538          <description>This lists the minimum frame duration for each
5539          format/size combination.
5540          </description>
5541          <units>(format, width, height, ns) x n</units>
5542          <details>
5543          This should correspond to the frame duration when only that
5544          stream is active, with all processing (typically in android.*.mode)
5545          set to either OFF or FAST.
5546
5547          When multiple streams are used in a request, the minimum frame
5548          duration will be max(individual stream min durations).
5549
5550          The minimum frame duration of a stream (of a particular format, size)
5551          is the same regardless of whether the stream is input or output.
5552
5553          See android.sensor.frameDuration and
5554          android.scaler.availableStallDurations for more details about
5555          calculating the max frame rate.
5556
5557          (Keep in sync with
5558          {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration})
5559          </details>
5560          <tag id="V1" />
5561        </entry>
5562        <entry name="availableStallDurations" type="int64" visibility="hidden"
5563               container="array" typedef="streamConfigurationDuration" hwlevel="legacy">
5564          <array>
5565            <size>4</size>
5566            <size>n</size>
5567          </array>
5568          <description>This lists the maximum stall duration for each
5569          output format/size combination.
5570          </description>
5571          <units>(format, width, height, ns) x n</units>
5572          <details>
5573          A stall duration is how much extra time would get added
5574          to the normal minimum frame duration for a repeating request
5575          that has streams with non-zero stall.
5576
5577          For example, consider JPEG captures which have the following
5578          characteristics:
5579
5580          * JPEG streams act like processed YUV streams in requests for which
5581          they are not included; in requests in which they are directly
5582          referenced, they act as JPEG streams. This is because supporting a
5583          JPEG stream requires the underlying YUV data to always be ready for
5584          use by a JPEG encoder, but the encoder will only be used (and impact
5585          frame duration) on requests that actually reference a JPEG stream.
5586          * The JPEG processor can run concurrently to the rest of the camera
5587          pipeline, but cannot process more than 1 capture at a time.
5588
5589          In other words, using a repeating YUV request would result
5590          in a steady frame rate (let's say it's 30 FPS). If a single
5591          JPEG request is submitted periodically, the frame rate will stay
5592          at 30 FPS (as long as we wait for the previous JPEG to return each
5593          time). If we try to submit a repeating YUV + JPEG request, then
5594          the frame rate will drop from 30 FPS.
5595
5596          In general, submitting a new request with a non-0 stall time
5597          stream will _not_ cause a frame rate drop unless there are still
5598          outstanding buffers for that stream from previous requests.
5599
5600          Submitting a repeating request with streams (call this `S`)
5601          is the same as setting the minimum frame duration from
5602          the normal minimum frame duration corresponding to `S`, added with
5603          the maximum stall duration for `S`.
5604
5605          If interleaving requests with and without a stall duration,
5606          a request will stall by the maximum of the remaining times
5607          for each can-stall stream with outstanding buffers.
5608
5609          This means that a stalling request will not have an exposure start
5610          until the stall has completed.
5611
5612          This should correspond to the stall duration when only that stream is
5613          active, with all processing (typically in android.*.mode) set to FAST
5614          or OFF. Setting any of the processing modes to HIGH_QUALITY
5615          effectively results in an indeterminate stall duration for all
5616          streams in a request (the regular stall calculation rules are
5617          ignored).
5618
5619          The following formats may always have a stall duration:
5620
5621          * {@link android.graphics.ImageFormat#JPEG}
5622          * {@link android.graphics.ImageFormat#RAW_SENSOR}
5623
5624          The following formats will never have a stall duration:
5625
5626          * {@link android.graphics.ImageFormat#YUV_420_888}
5627          * {@link android.graphics.ImageFormat#RAW10}
5628
5629          All other formats may or may not have an allowed stall duration on
5630          a per-capability basis; refer to android.request.availableCapabilities
5631          for more details.
5632
5633          See android.sensor.frameDuration for more information about
5634          calculating the max frame rate (absent stalls).
5635
5636          (Keep up to date with
5637          {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration} )
5638          </details>
5639          <hal_details>
5640          If possible, it is recommended that all non-JPEG formats
5641          (such as RAW16) should not have a stall duration. RAW10, RAW12, RAW_OPAQUE
5642          and IMPLEMENTATION_DEFINED must not have stall durations.
5643          </hal_details>
5644          <tag id="V1" />
5645        </entry>
5646        <entry name="streamConfigurationMap" type="int32" visibility="public"
5647               synthetic="true" typedef="streamConfigurationMap"
5648               hwlevel="legacy">
5649          <description>The available stream configurations that this
5650          camera device supports; also includes the minimum frame durations
5651          and the stall durations for each format/size combination.
5652          </description>
5653          <details>
5654          All camera devices will support sensor maximum resolution (defined by
5655          android.sensor.info.activeArraySize) for the JPEG format.
5656
5657          For a given use case, the actual maximum supported resolution
5658          may be lower than what is listed here, depending on the destination
5659          Surface for the image data. For example, for recording video,
5660          the video encoder chosen may have a maximum size limit (e.g. 1080p)
5661          smaller than what the camera (e.g. maximum resolution is 3264x2448)
5662          can provide.
5663
5664          Please reference the documentation for the image data destination to
5665          check if it limits the maximum size for image data.
5666
5667          The following table describes the minimum required output stream
5668          configurations based on the hardware level
5669          (android.info.supportedHardwareLevel):
5670
5671          Format                                             | Size                                         | Hardware Level | Notes
5672          :-------------------------------------------------:|:--------------------------------------------:|:--------------:|:--------------:
5673          {@link android.graphics.ImageFormat#JPEG}          | android.sensor.info.activeArraySize (*1)     | Any            |
5674          {@link android.graphics.ImageFormat#JPEG}          | 1920x1080 (1080p)                            | Any            | if 1080p &lt;= activeArraySize
5675          {@link android.graphics.ImageFormat#JPEG}          | 1280x720 (720p)                               | Any            | if 720p &lt;= activeArraySize
5676          {@link android.graphics.ImageFormat#JPEG}          | 640x480 (480p)                               | Any            | if 480p &lt;= activeArraySize
5677          {@link android.graphics.ImageFormat#JPEG}          | 320x240 (240p)                               | Any            | if 240p &lt;= activeArraySize
5678          {@link android.graphics.ImageFormat#YUV_420_888}   | all output sizes available for JPEG          | FULL           |
5679          {@link android.graphics.ImageFormat#YUV_420_888}   | all output sizes available for JPEG, up to the maximum video size | LIMITED        |
5680          {@link android.graphics.ImageFormat#PRIVATE}       | same as YUV_420_888                          | Any            |
5681
5682          Refer to android.request.availableCapabilities and {@link
5683          android.hardware.camera2.CameraDevice#createCaptureSession} for additional mandatory
5684          stream configurations on a per-capability basis.
5685
5686          *1: For JPEG format, the sizes may be restricted by below conditions:
5687
5688          * The HAL may choose the aspect ratio of each Jpeg size to be one of well known ones
5689          (e.g. 4:3, 16:9, 3:2 etc.). If the sensor maximum resolution
5690          (defined by android.sensor.info.activeArraySize) has an aspect ratio other than these,
5691          it does not have to be included in the supported JPEG sizes.
5692          * Some hardware JPEG encoders may have pixel boundary alignment requirements, such as
5693          the dimensions being a multiple of 16.
5694          Therefore, the maximum JPEG size may be smaller than sensor maximum resolution.
5695          However, the largest JPEG size will be as close as possible to the sensor maximum
5696          resolution given above constraints. It is required that after aspect ratio adjustments,
5697          additional size reduction due to other issues must be less than 3% in area. For example,
5698          if the sensor maximum resolution is 3280x2464, if the maximum JPEG size has aspect
5699          ratio 4:3, and the JPEG encoder alignment requirement is 16, the maximum JPEG size will be
5700          3264x2448.
5701          </details>
5702          <hal_details>
5703          Do not set this property directly
5704          (it is synthetic and will not be available at the HAL layer);
5705          set the android.scaler.availableStreamConfigurations instead.
5706
5707          Not all output formats may be supported in a configuration with
5708          an input stream of a particular format. For more details, see
5709          android.scaler.availableInputOutputFormatsMap.
5710
5711          It is recommended (but not mandatory) to also include half/quarter
5712          of sensor maximum resolution for JPEG formats (regardless of hardware
5713          level).
5714
5715          (The following is a rewording of the above required table):
5716
5717          The HAL must include sensor maximum resolution (defined by
5718          android.sensor.info.activeArraySize).
5719
5720          For FULL capability devices (`android.info.supportedHardwareLevel == FULL`),
5721          the HAL must include all YUV_420_888 sizes that have JPEG sizes listed
5722          here as output streams.
5723
5724          It must also include each below resolution if it is smaller than or
5725          equal to the sensor maximum resolution (for both YUV_420_888 and JPEG
5726          formats), as output streams:
5727
5728          * 240p (320 x 240)
5729          * 480p (640 x 480)
5730          * 720p (1280 x 720)
5731          * 1080p (1920 x 1080)
5732
5733          For LIMITED capability devices
5734          (`android.info.supportedHardwareLevel == LIMITED`),
5735          the HAL only has to list up to the maximum video size
5736          supported by the device.
5737
5738          Regardless of hardware level, every output resolution available for
5739          YUV_420_888 must also be available for IMPLEMENTATION_DEFINED.
5740
5741          This supercedes the following fields, which are now deprecated:
5742
5743          * availableFormats
5744          * available[Processed,Raw,Jpeg]Sizes
5745          </hal_details>
5746        </entry>
5747        <entry name="croppingType" type="byte" visibility="public" enum="true"
5748               hwlevel="legacy">
5749          <enum>
5750            <value>CENTER_ONLY
5751              <notes>
5752                The camera device only supports centered crop regions.
5753              </notes>
5754            </value>
5755            <value>FREEFORM
5756              <notes>
5757                The camera device supports arbitrarily chosen crop regions.
5758              </notes>
5759            </value>
5760          </enum>
5761          <description>The crop type that this camera device supports.</description>
5762          <details>
5763          When passing a non-centered crop region (android.scaler.cropRegion) to a camera
5764          device that only supports CENTER_ONLY cropping, the camera device will move the
5765          crop region to the center of the sensor active array (android.sensor.info.activeArraySize)
5766          and keep the crop region width and height unchanged. The camera device will return the
5767          final used crop region in metadata result android.scaler.cropRegion.
5768
5769          Camera devices that support FREEFORM cropping will support any crop region that
5770          is inside of the active array. The camera device will apply the same crop region and
5771          return the final used crop region in capture result metadata android.scaler.cropRegion.
5772
5773          LEGACY capability devices will only support CENTER_ONLY cropping.
5774          </details>
5775        </entry>
5776      </static>
5777    </section>
5778    <section name="sensor">
5779      <controls>
5780        <entry name="exposureTime" type="int64" visibility="public" hwlevel="full">
5781          <description>Duration each pixel is exposed to
5782          light.</description>
5783          <units>Nanoseconds</units>
5784          <range>android.sensor.info.exposureTimeRange</range>
5785          <details>If the sensor can't expose this exact duration, it will shorten the
5786          duration exposed to the nearest possible value (rather than expose longer).
5787          The final exposure time used will be available in the output capture result.
5788
5789          This control is only effective if android.control.aeMode or android.control.mode is set to
5790          OFF; otherwise the auto-exposure algorithm will override this value.
5791          </details>
5792          <tag id="V1" />
5793        </entry>
5794        <entry name="frameDuration" type="int64" visibility="public" hwlevel="full">
5795          <description>Duration from start of frame exposure to
5796          start of next frame exposure.</description>
5797          <units>Nanoseconds</units>
5798          <range>See android.sensor.info.maxFrameDuration,
5799          android.scaler.streamConfigurationMap. The duration
5800          is capped to `max(duration, exposureTime + overhead)`.</range>
5801          <details>
5802          The maximum frame rate that can be supported by a camera subsystem is
5803          a function of many factors:
5804
5805          * Requested resolutions of output image streams
5806          * Availability of binning / skipping modes on the imager
5807          * The bandwidth of the imager interface
5808          * The bandwidth of the various ISP processing blocks
5809
5810          Since these factors can vary greatly between different ISPs and
5811          sensors, the camera abstraction tries to represent the bandwidth
5812          restrictions with as simple a model as possible.
5813
5814          The model presented has the following characteristics:
5815
5816          * The image sensor is always configured to output the smallest
5817          resolution possible given the application's requested output stream
5818          sizes.  The smallest resolution is defined as being at least as large
5819          as the largest requested output stream size; the camera pipeline must
5820          never digitally upsample sensor data when the crop region covers the
5821          whole sensor. In general, this means that if only small output stream
5822          resolutions are configured, the sensor can provide a higher frame
5823          rate.
5824          * Since any request may use any or all the currently configured
5825          output streams, the sensor and ISP must be configured to support
5826          scaling a single capture to all the streams at the same time.  This
5827          means the camera pipeline must be ready to produce the largest
5828          requested output size without any delay.  Therefore, the overall
5829          frame rate of a given configured stream set is governed only by the
5830          largest requested stream resolution.
5831          * Using more than one output stream in a request does not affect the
5832          frame duration.
5833          * Certain format-streams may need to do additional background processing
5834          before data is consumed/produced by that stream. These processors
5835          can run concurrently to the rest of the camera pipeline, but
5836          cannot process more than 1 capture at a time.
5837
5838          The necessary information for the application, given the model above,
5839          is provided via the android.scaler.streamConfigurationMap field using
5840          {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration}.
5841          These are used to determine the maximum frame rate / minimum frame
5842          duration that is possible for a given stream configuration.
5843
5844          Specifically, the application can use the following rules to
5845          determine the minimum frame duration it can request from the camera
5846          device:
5847
5848          1. Let the set of currently configured input/output streams
5849          be called `S`.
5850          1. Find the minimum frame durations for each stream in `S`, by looking
5851          it up in android.scaler.streamConfigurationMap using {@link
5852          android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration}
5853          (with its respective size/format). Let this set of frame durations be
5854          called `F`.
5855          1. For any given request `R`, the minimum frame duration allowed
5856          for `R` is the maximum out of all values in `F`. Let the streams
5857          used in `R` be called `S_r`.
5858
5859          If none of the streams in `S_r` have a stall time (listed in {@link
5860          android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration}
5861          using its respective size/format), then the frame duration in `F`
5862          determines the steady state frame rate that the application will get
5863          if it uses `R` as a repeating request. Let this special kind of
5864          request be called `Rsimple`.
5865
5866          A repeating request `Rsimple` can be _occasionally_ interleaved
5867          by a single capture of a new request `Rstall` (which has at least
5868          one in-use stream with a non-0 stall time) and if `Rstall` has the
5869          same minimum frame duration this will not cause a frame rate loss
5870          if all buffers from the previous `Rstall` have already been
5871          delivered.
5872
5873          For more details about stalling, see
5874          {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration}.
5875
5876          This control is only effective if android.control.aeMode or android.control.mode is set to
5877          OFF; otherwise the auto-exposure algorithm will override this value.
5878          </details>
5879          <hal_details>
5880          For more details about stalling, see
5881          android.scaler.availableStallDurations.
5882          </hal_details>
5883          <tag id="V1" />
5884        </entry>
5885        <entry name="sensitivity" type="int32" visibility="public" hwlevel="full">
5886          <description>The amount of gain applied to sensor data
5887          before processing.</description>
5888          <units>ISO arithmetic units</units>
5889          <range>android.sensor.info.sensitivityRange</range>
5890          <details>
5891          The sensitivity is the standard ISO sensitivity value,
5892          as defined in ISO 12232:2006.
5893
5894          The sensitivity must be within android.sensor.info.sensitivityRange, and
5895          if if it less than android.sensor.maxAnalogSensitivity, the camera device
5896          is guaranteed to use only analog amplification for applying the gain.
5897
5898          If the camera device cannot apply the exact sensitivity
5899          requested, it will reduce the gain to the nearest supported
5900          value. The final sensitivity used will be available in the
5901          output capture result.
5902          </details>
5903          <hal_details>ISO 12232:2006 REI method is acceptable.</hal_details>
5904          <tag id="V1" />
5905        </entry>
5906      </controls>
5907      <static>
5908        <namespace name="info">
5909          <entry name="activeArraySize" type="int32" visibility="public"
5910          type_notes="Four ints defining the active pixel rectangle"
5911          container="array" typedef="rectangle" hwlevel="legacy">
5912            <array>
5913              <size>4</size>
5914            </array>
5915            <description>
5916            The area of the image sensor which corresponds to active pixels after any geometric
5917            distortion correction has been applied.
5918            </description>
5919            <units>Pixel coordinates on the image sensor</units>
5920            <details>
5921            This is the rectangle representing the size of the active region of the sensor (i.e.
5922            the region that actually receives light from the scene) after any geometric correction
5923            has been applied, and should be treated as the maximum size in pixels of any of the
5924            image output formats aside from the raw formats.
5925
5926            This rectangle is defined relative to the full pixel array; (0,0) is the top-left of
5927            the full pixel array, and the size of the full pixel array is given by
5928            android.sensor.info.pixelArraySize.
5929
5930            The coordinate system for most other keys that list pixel coordinates, including
5931            android.scaler.cropRegion, is defined relative to the active array rectangle given in
5932            this field, with `(0, 0)` being the top-left of this rectangle.
5933
5934            The active array may be smaller than the full pixel array, since the full array may
5935            include black calibration pixels or other inactive regions, and geometric correction
5936            resulting in scaling or cropping may have been applied.
5937            </details>
5938            <hal_details>
5939            This array contains `(xmin, ymin, width, height)`. The `(xmin, ymin)` must be
5940            &amp;gt;= `(0,0)`.
5941            The `(width, height)` must be &amp;lt;= `android.sensor.info.pixelArraySize`.
5942            </hal_details>
5943            <tag id="RAW" />
5944          </entry>
5945          <entry name="sensitivityRange" type="int32" visibility="public"
5946          type_notes="Range of supported sensitivities"
5947          container="array" typedef="rangeInt"
5948          hwlevel="full">
5949            <array>
5950              <size>2</size>
5951            </array>
5952            <description>Range of sensitivities for android.sensor.sensitivity supported by this
5953            camera device.</description>
5954            <range>Min &lt;= 100, Max &amp;gt;= 800</range>
5955            <details>
5956              The values are the standard ISO sensitivity values,
5957              as defined in ISO 12232:2006.
5958            </details>
5959
5960            <tag id="BC" />
5961            <tag id="V1" />
5962          </entry>
5963          <entry name="colorFilterArrangement" type="byte" visibility="public" enum="true"
5964            hwlevel="full">
5965            <enum>
5966              <value>RGGB</value>
5967              <value>GRBG</value>
5968              <value>GBRG</value>
5969              <value>BGGR</value>
5970              <value>RGB
5971              <notes>Sensor is not Bayer; output has 3 16-bit
5972              values for each pixel, instead of just 1 16-bit value
5973              per pixel.</notes></value>
5974            </enum>
5975            <description>The arrangement of color filters on sensor;
5976            represents the colors in the top-left 2x2 section of
5977            the sensor, in reading order.</description>
5978            <tag id="RAW" />
5979          </entry>
5980          <entry name="exposureTimeRange" type="int64" visibility="public"
5981                 type_notes="nanoseconds" container="array" typedef="rangeLong"
5982                 hwlevel="full">
5983            <array>
5984              <size>2</size>
5985            </array>
5986            <description>The range of image exposure times for android.sensor.exposureTime supported
5987            by this camera device.
5988            </description>
5989            <units>Nanoseconds</units>
5990            <range>The minimum exposure time will be less than 100 us. For FULL
5991            capability devices (android.info.supportedHardwareLevel == FULL),
5992            the maximum exposure time will be greater than 100ms.</range>
5993            <hal_details>For FULL capability devices (android.info.supportedHardwareLevel == FULL),
5994            The maximum of the range SHOULD be at least 1 second (1e9), MUST be at least
5995            100ms.
5996            </hal_details>
5997            <tag id="V1" />
5998          </entry>
5999          <entry name="maxFrameDuration" type="int64" visibility="public"
6000                 hwlevel="full">
6001            <description>The maximum possible frame duration (minimum frame rate) for
6002            android.sensor.frameDuration that is supported this camera device.</description>
6003            <units>Nanoseconds</units>
6004            <range>For FULL capability devices
6005            (android.info.supportedHardwareLevel == FULL), at least 100ms.
6006            </range>
6007            <details>Attempting to use frame durations beyond the maximum will result in the frame
6008            duration being clipped to the maximum. See that control for a full definition of frame
6009            durations.
6010
6011            Refer to {@link
6012            android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration}
6013            for the minimum frame duration values.
6014            </details>
6015            <hal_details>
6016            For FULL capability devices (android.info.supportedHardwareLevel == FULL),
6017            The maximum of the range SHOULD be at least
6018            1 second (1e9), MUST be at least 100ms (100e6).
6019
6020            android.sensor.info.maxFrameDuration must be greater or
6021            equal to the android.sensor.info.exposureTimeRange max
6022            value (since exposure time overrides frame duration).
6023
6024            Available minimum frame durations for JPEG must be no greater
6025            than that of the YUV_420_888/IMPLEMENTATION_DEFINED
6026            minimum frame durations (for that respective size).
6027
6028            Since JPEG processing is considered offline and can take longer than
6029            a single uncompressed capture, refer to
6030            android.scaler.availableStallDurations
6031            for details about encoding this scenario.
6032            </hal_details>
6033            <tag id="V1" />
6034          </entry>
6035          <entry name="physicalSize" type="float" visibility="public"
6036          type_notes="width x height"
6037          container="array" typedef="sizeF" hwlevel="legacy">
6038            <array>
6039              <size>2</size>
6040            </array>
6041            <description>The physical dimensions of the full pixel
6042            array.</description>
6043            <units>Millimeters</units>
6044            <details>This is the physical size of the sensor pixel
6045            array defined by android.sensor.info.pixelArraySize.
6046            </details>
6047            <hal_details>Needed for FOV calculation for old API</hal_details>
6048            <tag id="V1" />
6049            <tag id="BC" />
6050          </entry>
6051          <entry name="pixelArraySize" type="int32" visibility="public"
6052          container="array" typedef="size" hwlevel="legacy">
6053            <array>
6054              <size>2</size>
6055            </array>
6056            <description>Dimensions of the full pixel array, possibly
6057            including black calibration pixels.</description>
6058            <units>Pixels</units>
6059            <details>The pixel count of the full pixel array of the image sensor, which covers
6060            android.sensor.info.physicalSize area.  This represents the full pixel dimensions of
6061            the raw buffers produced by this sensor.
6062
6063            If a camera device supports raw sensor formats, either this or
6064            android.sensor.info.preCorrectionActiveArraySize is the maximum dimensions for the raw
6065            output formats listed in android.scaler.streamConfigurationMap (this depends on
6066            whether or not the image sensor returns buffers containing pixels that are not
6067            part of the active array region for blacklevel calibration or other purposes).
6068
6069            Some parts of the full pixel array may not receive light from the scene,
6070            or be otherwise inactive.  The android.sensor.info.preCorrectionActiveArraySize key
6071            defines the rectangle of active pixels that will be included in processed image
6072            formats.
6073            </details>
6074            <tag id="RAW" />
6075            <tag id="BC" />
6076          </entry>
6077          <entry name="whiteLevel" type="int32" visibility="public">
6078            <description>
6079            Maximum raw value output by sensor.
6080            </description>
6081            <range>&amp;gt; 255 (8-bit output)</range>
6082            <details>
6083            This specifies the fully-saturated encoding level for the raw
6084            sample values from the sensor.  This is typically caused by the
6085            sensor becoming highly non-linear or clipping. The minimum for
6086            each channel is specified by the offset in the
6087            android.sensor.blackLevelPattern key.
6088
6089            The white level is typically determined either by sensor bit depth
6090            (8-14 bits is expected), or by the point where the sensor response
6091            becomes too non-linear to be useful.  The default value for this is
6092            maximum representable value for a 16-bit raw sample (2^16 - 1).
6093
6094            The white level values of captured images may vary for different
6095            capture settings (e.g., android.sensor.sensitivity). This key
6096            represents a coarse approximation for such case. It is recommended
6097            to use android.sensor.dynamicWhiteLevel for captures when supported
6098            by the camera device, which provides more accurate white level values.
6099            </details>
6100            <hal_details>
6101            The full bit depth of the sensor must be available in the raw data,
6102            so the value for linear sensors should not be significantly lower
6103            than maximum raw value supported, i.e. 2^(sensor bits per pixel).
6104            </hal_details>
6105            <tag id="RAW" />
6106          </entry>
6107          <entry name="timestampSource" type="byte" visibility="public"
6108                 enum="true" hwlevel="legacy">
6109            <enum>
6110              <value>UNKNOWN
6111                <notes>
6112                Timestamps from android.sensor.timestamp are in nanoseconds and monotonic,
6113                but can not be compared to timestamps from other subsystems
6114                (e.g. accelerometer, gyro etc.), or other instances of the same or different
6115                camera devices in the same system. Timestamps between streams and results for
6116                a single camera instance are comparable, and the timestamps for all buffers
6117                and the result metadata generated by a single capture are identical.
6118                </notes>
6119              </value>
6120              <value>REALTIME
6121                <notes>
6122                Timestamps from android.sensor.timestamp are in the same timebase as
6123                {@link android.os.SystemClock#elapsedRealtimeNanos},
6124                and they can be compared to other timestamps using that base.
6125                </notes>
6126              </value>
6127            </enum>
6128            <description>The time base source for sensor capture start timestamps.</description>
6129            <details>
6130            The timestamps provided for captures are always in nanoseconds and monotonic, but
6131            may not based on a time source that can be compared to other system time sources.
6132
6133            This characteristic defines the source for the timestamps, and therefore whether they
6134            can be compared against other system time sources/timestamps.
6135            </details>
6136          <tag id="V1" />
6137        </entry>
6138        <entry name="lensShadingApplied" type="byte" visibility="public" enum="true"
6139               typedef="boolean">
6140          <enum>
6141            <value>FALSE</value>
6142            <value>TRUE</value>
6143          </enum>
6144          <description>Whether the RAW images output from this camera device are subject to
6145          lens shading correction.</description>
6146          <details>
6147          If TRUE, all images produced by the camera device in the RAW image formats will
6148          have lens shading correction already applied to it. If FALSE, the images will
6149          not be adjusted for lens shading correction.
6150          See android.request.maxNumOutputRaw for a list of RAW image formats.
6151
6152          This key will be `null` for all devices do not report this information.
6153          Devices with RAW capability will always report this information in this key.
6154          </details>
6155        </entry>
6156        <entry name="preCorrectionActiveArraySize" type="int32" visibility="public"
6157          type_notes="Four ints defining the active pixel rectangle" container="array"
6158          typedef="rectangle" hwlevel="legacy">
6159            <array>
6160              <size>4</size>
6161            </array>
6162            <description>
6163            The area of the image sensor which corresponds to active pixels prior to the
6164            application of any geometric distortion correction.
6165            </description>
6166            <units>Pixel coordinates on the image sensor</units>
6167            <details>
6168            This is the rectangle representing the size of the active region of the sensor (i.e.
6169            the region that actually receives light from the scene) before any geometric correction
6170            has been applied, and should be treated as the active region rectangle for any of the
6171            raw formats.  All metadata associated with raw processing (e.g. the lens shading
6172            correction map, and radial distortion fields) treats the top, left of this rectangle as
6173            the origin, (0,0).
6174
6175            The size of this region determines the maximum field of view and the maximum number of
6176            pixels that an image from this sensor can contain, prior to the application of
6177            geometric distortion correction. The effective maximum pixel dimensions of a
6178            post-distortion-corrected image is given by the android.sensor.info.activeArraySize
6179            field, and the effective maximum field of view for a post-distortion-corrected image
6180            can be calculated by applying the geometric distortion correction fields to this
6181            rectangle, and cropping to the rectangle given in android.sensor.info.activeArraySize.
6182
6183            E.g. to calculate position of a pixel, (x,y), in a processed YUV output image with the
6184            dimensions in android.sensor.info.activeArraySize given the position of a pixel,
6185            (x', y'), in the raw pixel array with dimensions give in
6186            android.sensor.info.pixelArraySize:
6187
6188            1. Choose a pixel (x', y') within the active array region of the raw buffer given in
6189            android.sensor.info.preCorrectionActiveArraySize, otherwise this pixel is considered
6190            to be outside of the FOV, and will not be shown in the processed output image.
6191            1. Apply geometric distortion correction to get the post-distortion pixel coordinate,
6192            (x_i, y_i). When applying geometric correction metadata, note that metadata for raw
6193            buffers is defined relative to the top, left of the
6194            android.sensor.info.preCorrectionActiveArraySize rectangle.
6195            1. If the resulting corrected pixel coordinate is within the region given in
6196            android.sensor.info.activeArraySize, then the position of this pixel in the
6197            processed output image buffer is `(x_i - activeArray.left, y_i - activeArray.top)`,
6198            when the top, left coordinate of that buffer is treated as (0, 0).
6199
6200            Thus, for pixel x',y' = (25, 25) on a sensor where android.sensor.info.pixelArraySize
6201            is (100,100), android.sensor.info.preCorrectionActiveArraySize is (10, 10, 100, 100),
6202            android.sensor.info.activeArraySize is (20, 20, 80, 80), and the geometric distortion
6203            correction doesn't change the pixel coordinate, the resulting pixel selected in
6204            pixel coordinates would be x,y = (25, 25) relative to the top,left of the raw buffer
6205            with dimensions given in android.sensor.info.pixelArraySize, and would be (5, 5)
6206            relative to the top,left of post-processed YUV output buffer with dimensions given in
6207            android.sensor.info.activeArraySize.
6208
6209            The currently supported fields that correct for geometric distortion are:
6210
6211            1. android.lens.radialDistortion.
6212
6213            If all of the geometric distortion fields are no-ops, this rectangle will be the same
6214            as the post-distortion-corrected rectangle given in
6215            android.sensor.info.activeArraySize.
6216
6217            This rectangle is defined relative to the full pixel array; (0,0) is the top-left of
6218            the full pixel array, and the size of the full pixel array is given by
6219            android.sensor.info.pixelArraySize.
6220
6221            The pre-correction active array may be smaller than the full pixel array, since the
6222            full array may include black calibration pixels or other inactive regions.
6223            </details>
6224            <hal_details>
6225            This array contains `(xmin, ymin, width, height)`. The `(xmin, ymin)` must be
6226            &amp;gt;= `(0,0)`.
6227            The `(width, height)` must be &amp;lt;= `android.sensor.info.pixelArraySize`.
6228
6229            If omitted by the HAL implementation, the camera framework will assume that this is
6230            the same as the post-correction active array region given in
6231            android.sensor.info.activeArraySize.
6232            </hal_details>
6233            <tag id="RAW" />
6234          </entry>
6235        </namespace>
6236        <entry name="referenceIlluminant1" type="byte" visibility="public"
6237               enum="true">
6238          <enum>
6239            <value id="1">DAYLIGHT</value>
6240            <value id="2">FLUORESCENT</value>
6241            <value id="3">TUNGSTEN
6242              <notes>Incandescent light</notes>
6243            </value>
6244            <value id="4">FLASH</value>
6245            <value id="9">FINE_WEATHER</value>
6246            <value id="10">CLOUDY_WEATHER</value>
6247            <value id="11">SHADE</value>
6248            <value id="12">DAYLIGHT_FLUORESCENT
6249              <notes>D 5700 - 7100K</notes>
6250            </value>
6251            <value id="13">DAY_WHITE_FLUORESCENT
6252              <notes>N 4600 - 5400K</notes>
6253            </value>
6254            <value id="14">COOL_WHITE_FLUORESCENT
6255              <notes>W 3900 - 4500K</notes>
6256            </value>
6257            <value id="15">WHITE_FLUORESCENT
6258              <notes>WW 3200 - 3700K</notes>
6259            </value>
6260            <value id="17">STANDARD_A</value>
6261            <value id="18">STANDARD_B</value>
6262            <value id="19">STANDARD_C</value>
6263            <value id="20">D55</value>
6264            <value id="21">D65</value>
6265            <value id="22">D75</value>
6266            <value id="23">D50</value>
6267            <value id="24">ISO_STUDIO_TUNGSTEN</value>
6268          </enum>
6269          <description>
6270          The standard reference illuminant used as the scene light source when
6271          calculating the android.sensor.colorTransform1,
6272          android.sensor.calibrationTransform1, and
6273          android.sensor.forwardMatrix1 matrices.
6274          </description>
6275          <details>
6276          The values in this key correspond to the values defined for the
6277          EXIF LightSource tag. These illuminants are standard light sources
6278          that are often used calibrating camera devices.
6279
6280          If this key is present, then android.sensor.colorTransform1,
6281          android.sensor.calibrationTransform1, and
6282          android.sensor.forwardMatrix1 will also be present.
6283
6284          Some devices may choose to provide a second set of calibration
6285          information for improved quality, including
6286          android.sensor.referenceIlluminant2 and its corresponding matrices.
6287          </details>
6288          <hal_details>
6289          The first reference illuminant (android.sensor.referenceIlluminant1)
6290          and corresponding matrices must be present to support the RAW capability
6291          and DNG output.
6292
6293          When producing raw images with a color profile that has only been
6294          calibrated against a single light source, it is valid to omit
6295          android.sensor.referenceIlluminant2 along with the
6296          android.sensor.colorTransform2, android.sensor.calibrationTransform2,
6297          and android.sensor.forwardMatrix2 matrices.
6298
6299          If only android.sensor.referenceIlluminant1 is included, it should be
6300          chosen so that it is representative of typical scene lighting.  In
6301          general, D50 or DAYLIGHT will be chosen for this case.
6302
6303          If both android.sensor.referenceIlluminant1 and
6304          android.sensor.referenceIlluminant2 are included, they should be
6305          chosen to represent the typical range of scene lighting conditions.
6306          In general, low color temperature illuminant such as Standard-A will
6307          be chosen for the first reference illuminant and a higher color
6308          temperature illuminant such as D65 will be chosen for the second
6309          reference illuminant.
6310          </hal_details>
6311          <tag id="RAW" />
6312        </entry>
6313        <entry name="referenceIlluminant2" type="byte" visibility="public">
6314          <description>
6315          The standard reference illuminant used as the scene light source when
6316          calculating the android.sensor.colorTransform2,
6317          android.sensor.calibrationTransform2, and
6318          android.sensor.forwardMatrix2 matrices.
6319          </description>
6320          <range>Any value listed in android.sensor.referenceIlluminant1</range>
6321          <details>
6322          See android.sensor.referenceIlluminant1 for more details.
6323
6324          If this key is present, then android.sensor.colorTransform2,
6325          android.sensor.calibrationTransform2, and
6326          android.sensor.forwardMatrix2 will also be present.
6327          </details>
6328          <tag id="RAW" />
6329        </entry>
6330        <entry name="calibrationTransform1" type="rational"
6331        visibility="public" optional="true"
6332        type_notes="3x3 matrix in row-major-order" container="array"
6333        typedef="colorSpaceTransform">
6334          <array>
6335            <size>3</size>
6336            <size>3</size>
6337          </array>
6338          <description>
6339          A per-device calibration transform matrix that maps from the
6340          reference sensor colorspace to the actual device sensor colorspace.
6341          </description>
6342          <details>
6343          This matrix is used to correct for per-device variations in the
6344          sensor colorspace, and is used for processing raw buffer data.
6345
6346          The matrix is expressed as a 3x3 matrix in row-major-order, and
6347          contains a per-device calibration transform that maps colors
6348          from reference sensor color space (i.e. the "golden module"
6349          colorspace) into this camera device's native sensor color
6350          space under the first reference illuminant
6351          (android.sensor.referenceIlluminant1).
6352          </details>
6353          <tag id="RAW" />
6354        </entry>
6355        <entry name="calibrationTransform2" type="rational"
6356        visibility="public" optional="true"
6357        type_notes="3x3 matrix in row-major-order" container="array"
6358        typedef="colorSpaceTransform">
6359          <array>
6360            <size>3</size>
6361            <size>3</size>
6362          </array>
6363          <description>
6364          A per-device calibration transform matrix that maps from the
6365          reference sensor colorspace to the actual device sensor colorspace
6366          (this is the colorspace of the raw buffer data).
6367          </description>
6368          <details>
6369          This matrix is used to correct for per-device variations in the
6370          sensor colorspace, and is used for processing raw buffer data.
6371
6372          The matrix is expressed as a 3x3 matrix in row-major-order, and
6373          contains a per-device calibration transform that maps colors
6374          from reference sensor color space (i.e. the "golden module"
6375          colorspace) into this camera device's native sensor color
6376          space under the second reference illuminant
6377          (android.sensor.referenceIlluminant2).
6378
6379          This matrix will only be present if the second reference
6380          illuminant is present.
6381          </details>
6382          <tag id="RAW" />
6383        </entry>
6384        <entry name="colorTransform1" type="rational"
6385        visibility="public" optional="true"
6386        type_notes="3x3 matrix in row-major-order" container="array"
6387        typedef="colorSpaceTransform">
6388          <array>
6389            <size>3</size>
6390            <size>3</size>
6391          </array>
6392          <description>
6393          A matrix that transforms color values from CIE XYZ color space to
6394          reference sensor color space.
6395          </description>
6396          <details>
6397          This matrix is used to convert from the standard CIE XYZ color
6398          space to the reference sensor colorspace, and is used when processing
6399          raw buffer data.
6400
6401          The matrix is expressed as a 3x3 matrix in row-major-order, and
6402          contains a color transform matrix that maps colors from the CIE
6403          XYZ color space to the reference sensor color space (i.e. the
6404          "golden module" colorspace) under the first reference illuminant
6405          (android.sensor.referenceIlluminant1).
6406
6407          The white points chosen in both the reference sensor color space
6408          and the CIE XYZ colorspace when calculating this transform will
6409          match the standard white point for the first reference illuminant
6410          (i.e. no chromatic adaptation will be applied by this transform).
6411          </details>
6412          <tag id="RAW" />
6413        </entry>
6414        <entry name="colorTransform2" type="rational"
6415        visibility="public" optional="true"
6416        type_notes="3x3 matrix in row-major-order" container="array"
6417        typedef="colorSpaceTransform">
6418          <array>
6419            <size>3</size>
6420            <size>3</size>
6421          </array>
6422          <description>
6423          A matrix that transforms color values from CIE XYZ color space to
6424          reference sensor color space.
6425          </description>
6426          <details>
6427          This matrix is used to convert from the standard CIE XYZ color
6428          space to the reference sensor colorspace, and is used when processing
6429          raw buffer data.
6430
6431          The matrix is expressed as a 3x3 matrix in row-major-order, and
6432          contains a color transform matrix that maps colors from the CIE
6433          XYZ color space to the reference sensor color space (i.e. the
6434          "golden module" colorspace) under the second reference illuminant
6435          (android.sensor.referenceIlluminant2).
6436
6437          The white points chosen in both the reference sensor color space
6438          and the CIE XYZ colorspace when calculating this transform will
6439          match the standard white point for the second reference illuminant
6440          (i.e. no chromatic adaptation will be applied by this transform).
6441
6442          This matrix will only be present if the second reference
6443          illuminant is present.
6444          </details>
6445          <tag id="RAW" />
6446        </entry>
6447        <entry name="forwardMatrix1" type="rational"
6448        visibility="public" optional="true"
6449        type_notes="3x3 matrix in row-major-order" container="array"
6450        typedef="colorSpaceTransform">
6451          <array>
6452            <size>3</size>
6453            <size>3</size>
6454          </array>
6455          <description>
6456          A matrix that transforms white balanced camera colors from the reference
6457          sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint.
6458          </description>
6459          <details>
6460          This matrix is used to convert to the standard CIE XYZ colorspace, and
6461          is used when processing raw buffer data.
6462
6463          This matrix is expressed as a 3x3 matrix in row-major-order, and contains
6464          a color transform matrix that maps white balanced colors from the
6465          reference sensor color space to the CIE XYZ color space with a D50 white
6466          point.
6467
6468          Under the first reference illuminant (android.sensor.referenceIlluminant1)
6469          this matrix is chosen so that the standard white point for this reference
6470          illuminant in the reference sensor colorspace is mapped to D50 in the
6471          CIE XYZ colorspace.
6472          </details>
6473          <tag id="RAW" />
6474        </entry>
6475        <entry name="forwardMatrix2" type="rational"
6476        visibility="public" optional="true"
6477        type_notes="3x3 matrix in row-major-order" container="array"
6478        typedef="colorSpaceTransform">
6479          <array>
6480            <size>3</size>
6481            <size>3</size>
6482          </array>
6483          <description>
6484          A matrix that transforms white balanced camera colors from the reference
6485          sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint.
6486          </description>
6487          <details>
6488          This matrix is used to convert to the standard CIE XYZ colorspace, and
6489          is used when processing raw buffer data.
6490
6491          This matrix is expressed as a 3x3 matrix in row-major-order, and contains
6492          a color transform matrix that maps white balanced colors from the
6493          reference sensor color space to the CIE XYZ color space with a D50 white
6494          point.
6495
6496          Under the second reference illuminant (android.sensor.referenceIlluminant2)
6497          this matrix is chosen so that the standard white point for this reference
6498          illuminant in the reference sensor colorspace is mapped to D50 in the
6499          CIE XYZ colorspace.
6500
6501          This matrix will only be present if the second reference
6502          illuminant is present.
6503          </details>
6504          <tag id="RAW" />
6505        </entry>
6506        <entry name="baseGainFactor" type="rational"
6507        optional="true">
6508          <description>Gain factor from electrons to raw units when
6509          ISO=100</description>
6510          <tag id="FUTURE" />
6511        </entry>
6512        <entry name="blackLevelPattern" type="int32" visibility="public"
6513        optional="true" type_notes="2x2 raw count block" container="array"
6514        typedef="blackLevelPattern">
6515          <array>
6516            <size>4</size>
6517          </array>
6518          <description>
6519          A fixed black level offset for each of the color filter arrangement
6520          (CFA) mosaic channels.
6521          </description>
6522          <range>&amp;gt;= 0 for each.</range>
6523          <details>
6524          This key specifies the zero light value for each of the CFA mosaic
6525          channels in the camera sensor.  The maximal value output by the
6526          sensor is represented by the value in android.sensor.info.whiteLevel.
6527
6528          The values are given in the same order as channels listed for the CFA
6529          layout key (see android.sensor.info.colorFilterArrangement), i.e. the
6530          nth value given corresponds to the black level offset for the nth
6531          color channel listed in the CFA.
6532
6533          The black level values of captured images may vary for different
6534          capture settings (e.g., android.sensor.sensitivity). This key
6535          represents a coarse approximation for such case. It is recommended to
6536          use android.sensor.dynamicBlackLevel or use pixels from
6537          android.sensor.opticalBlackRegions directly for captures when
6538          supported by the camera device, which provides more accurate black
6539          level values. For raw capture in particular, it is recommended to use
6540          pixels from android.sensor.opticalBlackRegions to calculate black
6541          level values for each frame.
6542          </details>
6543          <hal_details>
6544          The values are given in row-column scan order, with the first value
6545          corresponding to the element of the CFA in row=0, column=0.
6546          </hal_details>
6547          <tag id="RAW" />
6548        </entry>
6549        <entry name="maxAnalogSensitivity" type="int32" visibility="public"
6550               optional="true" hwlevel="full">
6551          <description>Maximum sensitivity that is implemented
6552          purely through analog gain.</description>
6553          <details>For android.sensor.sensitivity values less than or
6554          equal to this, all applied gain must be analog. For
6555          values above this, the gain applied can be a mix of analog and
6556          digital.</details>
6557          <tag id="V1" />
6558          <tag id="FULL" />
6559        </entry>
6560        <entry name="orientation" type="int32" visibility="public"
6561               hwlevel="legacy">
6562          <description>Clockwise angle through which the output image needs to be rotated to be
6563          upright on the device screen in its native orientation.
6564          </description>
6565          <units>Degrees of clockwise rotation; always a multiple of
6566          90</units>
6567          <range>0, 90, 180, 270</range>
6568          <details>
6569          Also defines the direction of rolling shutter readout, which is from top to bottom in
6570          the sensor's coordinate system.
6571          </details>
6572          <tag id="BC" />
6573        </entry>
6574        <entry name="profileHueSatMapDimensions" type="int32"
6575        visibility="system" optional="true"
6576        type_notes="Number of samples for hue, saturation, and value"
6577        container="array">
6578          <array>
6579            <size>3</size>
6580          </array>
6581          <description>
6582          The number of input samples for each dimension of
6583          android.sensor.profileHueSatMap.
6584          </description>
6585          <range>
6586          Hue &amp;gt;= 1,
6587          Saturation &amp;gt;= 2,
6588          Value &amp;gt;= 1
6589          </range>
6590          <details>
6591          The number of input samples for the hue, saturation, and value
6592          dimension of android.sensor.profileHueSatMap. The order of the
6593          dimensions given is hue, saturation, value; where hue is the 0th
6594          element.
6595          </details>
6596          <tag id="RAW" />
6597        </entry>
6598      </static>
6599      <dynamic>
6600        <clone entry="android.sensor.exposureTime" kind="controls">
6601        </clone>
6602        <clone entry="android.sensor.frameDuration"
6603        kind="controls"></clone>
6604        <clone entry="android.sensor.sensitivity" kind="controls">
6605        </clone>
6606        <entry name="timestamp" type="int64" visibility="public"
6607               hwlevel="legacy">
6608          <description>Time at start of exposure of first
6609          row of the image sensor active array, in nanoseconds.</description>
6610          <units>Nanoseconds</units>
6611          <range>&amp;gt; 0</range>
6612          <details>The timestamps are also included in all image
6613          buffers produced for the same capture, and will be identical
6614          on all the outputs.
6615
6616          When android.sensor.info.timestampSource `==` UNKNOWN,
6617          the timestamps measure time since an unspecified starting point,
6618          and are monotonically increasing. They can be compared with the
6619          timestamps for other captures from the same camera device, but are
6620          not guaranteed to be comparable to any other time source.
6621
6622          When android.sensor.info.timestampSource `==` REALTIME, the
6623          timestamps measure time in the same timebase as {@link
6624          android.os.SystemClock#elapsedRealtimeNanos}, and they can
6625          be compared to other timestamps from other subsystems that
6626          are using that base.
6627
6628          For reprocessing, the timestamp will match the start of exposure of
6629          the input image, i.e. {@link CaptureResult#SENSOR_TIMESTAMP the
6630          timestamp} in the TotalCaptureResult that was used to create the
6631          reprocess capture request.
6632          </details>
6633          <hal_details>
6634          All timestamps must be in reference to the kernel's
6635          CLOCK_BOOTTIME monotonic clock, which properly accounts for
6636          time spent asleep. This allows for synchronization with
6637          sensors that continue to operate while the system is
6638          otherwise asleep.
6639
6640          If android.sensor.info.timestampSource `==` REALTIME,
6641          The timestamp must be synchronized with the timestamps from other
6642          sensor subsystems that are using the same timebase.
6643
6644          For reprocessing, the input image's start of exposure can be looked up
6645          with android.sensor.timestamp from the metadata included in the
6646          capture request.
6647          </hal_details>
6648          <tag id="BC" />
6649        </entry>
6650        <entry name="temperature" type="float"
6651        optional="true">
6652          <description>The temperature of the sensor, sampled at the time
6653          exposure began for this frame.
6654
6655          The thermal diode being queried should be inside the sensor PCB, or
6656          somewhere close to it.
6657          </description>
6658
6659          <units>Celsius</units>
6660          <range>Optional. This value is missing if no temperature is available.</range>
6661          <tag id="FUTURE" />
6662        </entry>
6663        <entry name="neutralColorPoint" type="rational" visibility="public"
6664        optional="true" container="array">
6665          <array>
6666            <size>3</size>
6667          </array>
6668          <description>
6669          The estimated camera neutral color in the native sensor colorspace at
6670          the time of capture.
6671          </description>
6672          <details>
6673          This value gives the neutral color point encoded as an RGB value in the
6674          native sensor color space.  The neutral color point indicates the
6675          currently estimated white point of the scene illumination.  It can be
6676          used to interpolate between the provided color transforms when
6677          processing raw sensor data.
6678
6679          The order of the values is R, G, B; where R is in the lowest index.
6680          </details>
6681          <tag id="RAW" />
6682        </entry>
6683        <entry name="noiseProfile" type="double" visibility="public"
6684        optional="true" type_notes="Pairs of noise model coefficients"
6685        container="array" typedef="pairDoubleDouble">
6686          <array>
6687            <size>2</size>
6688            <size>CFA Channels</size>
6689          </array>
6690          <description>
6691          Noise model coefficients for each CFA mosaic channel.
6692          </description>
6693          <details>
6694          This key contains two noise model coefficients for each CFA channel
6695          corresponding to the sensor amplification (S) and sensor readout
6696          noise (O).  These are given as pairs of coefficients for each channel
6697          in the same order as channels listed for the CFA layout key
6698          (see android.sensor.info.colorFilterArrangement).  This is
6699          represented as an array of Pair&amp;lt;Double, Double&amp;gt;, where
6700          the first member of the Pair at index n is the S coefficient and the
6701          second member is the O coefficient for the nth color channel in the CFA.
6702
6703          These coefficients are used in a two parameter noise model to describe
6704          the amount of noise present in the image for each CFA channel.  The
6705          noise model used here is:
6706
6707          N(x) = sqrt(Sx + O)
6708
6709          Where x represents the recorded signal of a CFA channel normalized to
6710          the range [0, 1], and S and O are the noise model coeffiecients for
6711          that channel.
6712
6713          A more detailed description of the noise model can be found in the
6714          Adobe DNG specification for the NoiseProfile tag.
6715          </details>
6716          <hal_details>
6717          For a CFA layout of RGGB, the list of coefficients would be given as
6718          an array of doubles S0,O0,S1,O1,..., where S0 and O0 are the coefficients
6719          for the red channel, S1 and O1 are the coefficients for the first green
6720          channel, etc.
6721          </hal_details>
6722          <tag id="RAW" />
6723        </entry>
6724        <entry name="profileHueSatMap" type="float"
6725        visibility="system" optional="true"
6726        type_notes="Mapping for hue, saturation, and value"
6727        container="array">
6728          <array>
6729            <size>hue_samples</size>
6730            <size>saturation_samples</size>
6731            <size>value_samples</size>
6732            <size>3</size>
6733          </array>
6734          <description>
6735          A mapping containing a hue shift, saturation scale, and value scale
6736          for each pixel.
6737          </description>
6738          <units>
6739          The hue shift is given in degrees; saturation and value scale factors are
6740          unitless and are between 0 and 1 inclusive
6741          </units>
6742          <details>
6743          hue_samples, saturation_samples, and value_samples are given in
6744          android.sensor.profileHueSatMapDimensions.
6745
6746          Each entry of this map contains three floats corresponding to the
6747          hue shift, saturation scale, and value scale, respectively; where the
6748          hue shift has the lowest index. The map entries are stored in the key
6749          in nested loop order, with the value divisions in the outer loop, the
6750          hue divisions in the middle loop, and the saturation divisions in the
6751          inner loop. All zero input saturation entries are required to have a
6752          value scale factor of 1.0.
6753          </details>
6754          <tag id="RAW" />
6755        </entry>
6756        <entry name="profileToneCurve" type="float"
6757        visibility="system" optional="true"
6758        type_notes="Samples defining a spline for a tone-mapping curve"
6759        container="array">
6760          <array>
6761            <size>samples</size>
6762            <size>2</size>
6763          </array>
6764          <description>
6765          A list of x,y samples defining a tone-mapping curve for gamma adjustment.
6766          </description>
6767          <range>
6768          Each sample has an input range of `[0, 1]` and an output range of
6769          `[0, 1]`.  The first sample is required to be `(0, 0)`, and the last
6770          sample is required to be `(1, 1)`.
6771          </range>
6772          <details>
6773          This key contains a default tone curve that can be applied while
6774          processing the image as a starting point for user adjustments.
6775          The curve is specified as a list of value pairs in linear gamma.
6776          The curve is interpolated using a cubic spline.
6777          </details>
6778          <tag id="RAW" />
6779        </entry>
6780        <entry name="greenSplit" type="float" visibility="public" optional="true">
6781          <description>
6782          The worst-case divergence between Bayer green channels.
6783          </description>
6784          <range>
6785          &amp;gt;= 0
6786          </range>
6787          <details>
6788          This value is an estimate of the worst case split between the
6789          Bayer green channels in the red and blue rows in the sensor color
6790          filter array.
6791
6792          The green split is calculated as follows:
6793
6794          1. A 5x5 pixel (or larger) window W within the active sensor array is
6795          chosen. The term 'pixel' here is taken to mean a group of 4 Bayer
6796          mosaic channels (R, Gr, Gb, B).  The location and size of the window
6797          chosen is implementation defined, and should be chosen to provide a
6798          green split estimate that is both representative of the entire image
6799          for this camera sensor, and can be calculated quickly.
6800          1. The arithmetic mean of the green channels from the red
6801          rows (mean_Gr) within W is computed.
6802          1. The arithmetic mean of the green channels from the blue
6803          rows (mean_Gb) within W is computed.
6804          1. The maximum ratio R of the two means is computed as follows:
6805          `R = max((mean_Gr + 1)/(mean_Gb + 1), (mean_Gb + 1)/(mean_Gr + 1))`
6806
6807          The ratio R is the green split divergence reported for this property,
6808          which represents how much the green channels differ in the mosaic
6809          pattern.  This value is typically used to determine the treatment of
6810          the green mosaic channels when demosaicing.
6811
6812          The green split value can be roughly interpreted as follows:
6813
6814          * R &amp;lt; 1.03 is a negligible split (&amp;lt;3% divergence).
6815          * 1.20 &amp;lt;= R &amp;gt;= 1.03 will require some software
6816          correction to avoid demosaic errors (3-20% divergence).
6817          * R &amp;gt; 1.20 will require strong software correction to produce
6818          a usuable image (&amp;gt;20% divergence).
6819          </details>
6820          <hal_details>
6821          The green split given may be a static value based on prior
6822          characterization of the camera sensor using the green split
6823          calculation method given here over a large, representative, sample
6824          set of images.  Other methods of calculation that produce equivalent
6825          results, and can be interpreted in the same manner, may be used.
6826          </hal_details>
6827          <tag id="RAW" />
6828        </entry>
6829      </dynamic>
6830      <controls>
6831        <entry name="testPatternData" type="int32" visibility="public" optional="true" container="array">
6832          <array>
6833            <size>4</size>
6834          </array>
6835          <description>
6836            A pixel `[R, G_even, G_odd, B]` that supplies the test pattern
6837            when android.sensor.testPatternMode is SOLID_COLOR.
6838          </description>
6839          <details>
6840          Each color channel is treated as an unsigned 32-bit integer.
6841          The camera device then uses the most significant X bits
6842          that correspond to how many bits are in its Bayer raw sensor
6843          output.
6844
6845          For example, a sensor with RAW10 Bayer output would use the
6846          10 most significant bits from each color channel.
6847          </details>
6848          <hal_details>
6849          </hal_details>
6850        </entry>
6851        <entry name="testPatternMode" type="int32" visibility="public" optional="true"
6852          enum="true">
6853          <enum>
6854            <value>OFF
6855              <notes>No test pattern mode is used, and the camera
6856              device returns captures from the image sensor.
6857
6858              This is the default if the key is not set.</notes>
6859            </value>
6860            <value>SOLID_COLOR
6861              <notes>
6862              Each pixel in `[R, G_even, G_odd, B]` is replaced by its
6863              respective color channel provided in
6864              android.sensor.testPatternData.
6865
6866              For example:
6867
6868                  android.testPatternData = [0, 0xFFFFFFFF, 0xFFFFFFFF, 0]
6869
6870              All green pixels are 100% green. All red/blue pixels are black.
6871
6872                  android.testPatternData = [0xFFFFFFFF, 0, 0xFFFFFFFF, 0]
6873
6874              All red pixels are 100% red. Only the odd green pixels
6875              are 100% green. All blue pixels are 100% black.
6876              </notes>
6877            </value>
6878            <value>COLOR_BARS
6879              <notes>
6880              All pixel data is replaced with an 8-bar color pattern.
6881
6882              The vertical bars (left-to-right) are as follows:
6883
6884              * 100% white
6885              * yellow
6886              * cyan
6887              * green
6888              * magenta
6889              * red
6890              * blue
6891              * black
6892
6893              In general the image would look like the following:
6894
6895                 W Y C G M R B K
6896                 W Y C G M R B K
6897                 W Y C G M R B K
6898                 W Y C G M R B K
6899                 W Y C G M R B K
6900                 . . . . . . . .
6901                 . . . . . . . .
6902                 . . . . . . . .
6903
6904                 (B = Blue, K = Black)
6905
6906             Each bar should take up 1/8 of the sensor pixel array width.
6907             When this is not possible, the bar size should be rounded
6908             down to the nearest integer and the pattern can repeat
6909             on the right side.
6910
6911             Each bar's height must always take up the full sensor
6912             pixel array height.
6913
6914             Each pixel in this test pattern must be set to either
6915             0% intensity or 100% intensity.
6916             </notes>
6917            </value>
6918            <value>COLOR_BARS_FADE_TO_GRAY
6919              <notes>
6920              The test pattern is similar to COLOR_BARS, except that
6921              each bar should start at its specified color at the top,
6922              and fade to gray at the bottom.
6923
6924              Furthermore each bar is further subdivided into a left and
6925              right half. The left half should have a smooth gradient,
6926              and the right half should have a quantized gradient.
6927
6928              In particular, the right half's should consist of blocks of the
6929              same color for 1/16th active sensor pixel array width.
6930
6931              The least significant bits in the quantized gradient should
6932              be copied from the most significant bits of the smooth gradient.
6933
6934              The height of each bar should always be a multiple of 128.
6935              When this is not the case, the pattern should repeat at the bottom
6936              of the image.
6937              </notes>
6938            </value>
6939            <value>PN9
6940              <notes>
6941              All pixel data is replaced by a pseudo-random sequence
6942              generated from a PN9 512-bit sequence (typically implemented
6943              in hardware with a linear feedback shift register).
6944
6945              The generator should be reset at the beginning of each frame,
6946              and thus each subsequent raw frame with this test pattern should
6947              be exactly the same as the last.
6948              </notes>
6949            </value>
6950            <value id="256">CUSTOM1
6951              <notes>The first custom test pattern. All custom patterns that are
6952              available only on this camera device are at least this numeric
6953              value.
6954
6955              All of the custom test patterns will be static
6956              (that is the raw image must not vary from frame to frame).
6957              </notes>
6958            </value>
6959          </enum>
6960          <description>When enabled, the sensor sends a test pattern instead of
6961          doing a real exposure from the camera.
6962          </description>
6963          <range>android.sensor.availableTestPatternModes</range>
6964          <details>
6965          When a test pattern is enabled, all manual sensor controls specified
6966          by android.sensor.* will be ignored. All other controls should
6967          work as normal.
6968
6969          For example, if manual flash is enabled, flash firing should still
6970          occur (and that the test pattern remain unmodified, since the flash
6971          would not actually affect it).
6972
6973          Defaults to OFF.
6974          </details>
6975          <hal_details>
6976          All test patterns are specified in the Bayer domain.
6977
6978          The HAL may choose to substitute test patterns from the sensor
6979          with test patterns from on-device memory. In that case, it should be
6980          indistinguishable to the ISP whether the data came from the
6981          sensor interconnect bus (such as CSI2) or memory.
6982          </hal_details>
6983        </entry>
6984      </controls>
6985      <dynamic>
6986        <clone entry="android.sensor.testPatternData" kind="controls">
6987        </clone>
6988        <clone entry="android.sensor.testPatternMode" kind="controls">
6989        </clone>
6990      </dynamic>
6991      <static>
6992        <entry name="availableTestPatternModes" type="int32" visibility="public" optional="true"
6993          type_notes="list of enums" container="array">
6994          <array>
6995            <size>n</size>
6996          </array>
6997          <description>List of sensor test pattern modes for android.sensor.testPatternMode
6998          supported by this camera device.
6999          </description>
7000          <range>Any value listed in android.sensor.testPatternMode</range>
7001          <details>
7002            Defaults to OFF, and always includes OFF if defined.
7003          </details>
7004          <hal_details>
7005            All custom modes must be >= CUSTOM1.
7006          </hal_details>
7007        </entry>
7008      </static>
7009      <dynamic>
7010        <entry name="rollingShutterSkew" type="int64" visibility="public" hwlevel="limited">
7011          <description>Duration between the start of first row exposure
7012          and the start of last row exposure.</description>
7013          <units>Nanoseconds</units>
7014          <range> &amp;gt;= 0 and &amp;lt;
7015          {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration}.</range>
7016          <details>
7017          This is the exposure time skew between the first and last
7018          row exposure start times. The first row and the last row are
7019          the first and last rows inside of the
7020          android.sensor.info.activeArraySize.
7021
7022          For typical camera sensors that use rolling shutters, this is also equivalent
7023          to the frame readout time.
7024          </details>
7025          <hal_details>
7026          The HAL must report `0` if the sensor is using global shutter, where all pixels begin
7027          exposure at the same time.
7028          </hal_details>
7029          <tag id="V1" />
7030        </entry>
7031      </dynamic>
7032      <static>
7033        <entry name="opticalBlackRegions" type="int32" visibility="public" optional="true"
7034          container="array" typedef="rectangle">
7035          <array>
7036            <size>4</size>
7037            <size>num_regions</size>
7038          </array>
7039          <description>List of disjoint rectangles indicating the sensor
7040          optically shielded black pixel regions.
7041          </description>
7042          <details>
7043            In most camera sensors, the active array is surrounded by some
7044            optically shielded pixel areas. By blocking light, these pixels
7045            provides a reliable black reference for black level compensation
7046            in active array region.
7047
7048            This key provides a list of disjoint rectangles specifying the
7049            regions of optically shielded (with metal shield) black pixel
7050            regions if the camera device is capable of reading out these black
7051            pixels in the output raw images. In comparison to the fixed black
7052            level values reported by android.sensor.blackLevelPattern, this key
7053            may provide a more accurate way for the application to calculate
7054            black level of each captured raw images.
7055
7056            When this key is reported, the android.sensor.dynamicBlackLevel and
7057            android.sensor.dynamicWhiteLevel will also be reported.
7058          </details>
7059          <hal_details>
7060            This array contains (xmin, ymin, width, height). The (xmin, ymin)
7061            must be &amp;gt;= (0,0) and &amp;lt;=
7062            android.sensor.info.pixelArraySize. The (width, height) must be
7063            &amp;lt;= android.sensor.info.pixelArraySize. Each region must be
7064            outside the region reported by
7065            android.sensor.info.preCorrectionActiveArraySize.
7066
7067            The HAL must report minimal number of disjoint regions for the
7068            optically shielded back pixel regions. For example, if a region can
7069            be covered by one rectangle, the HAL must not split this region into
7070            multiple rectangles.
7071          </hal_details>
7072        </entry>
7073      </static>
7074      <dynamic>
7075        <entry name="dynamicBlackLevel" type="float" visibility="public"
7076        optional="true" type_notes="2x2 raw count block" container="array">
7077          <array>
7078            <size>4</size>
7079          </array>
7080          <description>
7081          A per-frame dynamic black level offset for each of the color filter
7082          arrangement (CFA) mosaic channels.
7083          </description>
7084          <range>&amp;gt;= 0 for each.</range>
7085          <details>
7086          Camera sensor black levels may vary dramatically for different
7087          capture settings (e.g. android.sensor.sensitivity). The fixed black
7088          level reported by android.sensor.blackLevelPattern may be too
7089          inaccurate to represent the actual value on a per-frame basis. The
7090          camera device internal pipeline relies on reliable black level values
7091          to process the raw images appropriately. To get the best image
7092          quality, the camera device may choose to estimate the per frame black
7093          level values either based on optically shielded black regions
7094          (android.sensor.opticalBlackRegions) or its internal model.
7095
7096          This key reports the camera device estimated per-frame zero light
7097          value for each of the CFA mosaic channels in the camera sensor. The
7098          android.sensor.blackLevelPattern may only represent a coarse
7099          approximation of the actual black level values. This value is the
7100          black level used in camera device internal image processing pipeline
7101          and generally more accurate than the fixed black level values.
7102          However, since they are estimated values by the camera device, they
7103          may not be as accurate as the black level values calculated from the
7104          optical black pixels reported by android.sensor.opticalBlackRegions.
7105
7106          The values are given in the same order as channels listed for the CFA
7107          layout key (see android.sensor.info.colorFilterArrangement), i.e. the
7108          nth value given corresponds to the black level offset for the nth
7109          color channel listed in the CFA.
7110
7111          This key will be available if android.sensor.opticalBlackRegions is
7112          available or the camera device advertises this key via
7113          {@link android.hardware.camera2.CameraCharacteristics#getAvailableCaptureRequestKeys}.
7114          </details>
7115          <hal_details>
7116          The values are given in row-column scan order, with the first value
7117          corresponding to the element of the CFA in row=0, column=0.
7118          </hal_details>
7119          <tag id="RAW" />
7120        </entry>
7121        <entry name="dynamicWhiteLevel" type="int32" visibility="public"
7122        optional="true" >
7123          <description>
7124          Maximum raw value output by sensor for this frame.
7125          </description>
7126          <range> &amp;gt;= 0</range>
7127          <details>
7128          Since the android.sensor.blackLevel may change for different
7129          capture settings (e.g., android.sensor.sensitivity), the white
7130          level will change accordingly. This key is similar to
7131          android.sensor.info.whiteLevel, but specifies the camera device
7132          estimated white level for each frame.
7133
7134          This key will be available if android.sensor.opticalBlackRegions is
7135          available or the camera device advertises this key via
7136          {@link android.hardware.camera2.CameraCharacteristics#getAvailableCaptureRequestKeys}.
7137          </details>
7138          <hal_details>
7139          The full bit depth of the sensor must be available in the raw data,
7140          so the value for linear sensors should not be significantly lower
7141          than maximum raw value supported, i.e. 2^(sensor bits per pixel).
7142          </hal_details>
7143          <tag id="RAW" />
7144        </entry>
7145      </dynamic>
7146      <static>
7147        <entry name="opaqueRawSize" type="int32" visibility="system" container="array">
7148          <array>
7149            <size>n</size>
7150            <size>3</size>
7151          </array>
7152          <description>Size in bytes for all the listed opaque RAW buffer sizes</description>
7153          <range>Must be large enough to fit the opaque RAW of corresponding size produced by
7154          the camera</range>
7155          <details>
7156          This configurations are listed as `(width, height, size_in_bytes)` tuples.
7157          This is used for sizing the gralloc buffers for opaque RAW buffers.
7158          All RAW_OPAQUE output stream configuration listed in
7159          android.scaler.availableStreamConfigurations will have a corresponding tuple in
7160          this key.
7161          </details>
7162          <hal_details>
7163              This key is added in HAL3.4.
7164              For HAL3.4 or above: devices advertising RAW_OPAQUE format output must list this key.
7165              For HAL3.3 or earlier devices: if RAW_OPAQUE ouput is advertised, camera framework
7166              will derive this key by assuming each pixel takes two bytes and no padding bytes
7167              between rows.
7168          </hal_details>
7169        </entry>
7170      </static>
7171    </section>
7172    <section name="shading">
7173      <controls>
7174        <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="full">
7175          <enum>
7176            <value>OFF
7177            <notes>No lens shading correction is applied.</notes></value>
7178            <value>FAST
7179            <notes>Apply lens shading corrections, without slowing
7180            frame rate relative to sensor raw output</notes></value>
7181            <value>HIGH_QUALITY
7182            <notes>Apply high-quality lens shading correction, at the
7183            cost of possibly reduced frame rate.</notes></value>
7184          </enum>
7185          <description>Quality of lens shading correction applied
7186          to the image data.</description>
7187          <range>android.shading.availableModes</range>
7188          <details>
7189          When set to OFF mode, no lens shading correction will be applied by the
7190          camera device, and an identity lens shading map data will be provided
7191          if `android.statistics.lensShadingMapMode == ON`. For example, for lens
7192          shading map with size of `[ 4, 3 ]`,
7193          the output android.statistics.lensShadingCorrectionMap for this case will be an identity
7194          map shown below:
7195
7196              [ 1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
7197               1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
7198               1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
7199               1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
7200               1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
7201               1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0 ]
7202
7203          When set to other modes, lens shading correction will be applied by the camera
7204          device. Applications can request lens shading map data by setting
7205          android.statistics.lensShadingMapMode to ON, and then the camera device will provide lens
7206          shading map data in android.statistics.lensShadingCorrectionMap; the returned shading map
7207          data will be the one applied by the camera device for this capture request.
7208
7209          The shading map data may depend on the auto-exposure (AE) and AWB statistics, therefore
7210          the reliability of the map data may be affected by the AE and AWB algorithms. When AE and
7211          AWB are in AUTO modes(android.control.aeMode `!=` OFF and android.control.awbMode `!=`
7212          OFF), to get best results, it is recommended that the applications wait for the AE and AWB
7213          to be converged before using the returned shading map data.
7214          </details>
7215        </entry>
7216        <entry name="strength" type="byte">
7217          <description>Control the amount of shading correction
7218          applied to the images</description>
7219          <units>unitless: 1-10; 10 is full shading
7220          compensation</units>
7221          <tag id="FUTURE" />
7222        </entry>
7223      </controls>
7224      <dynamic>
7225        <clone entry="android.shading.mode" kind="controls">
7226        </clone>
7227      </dynamic>
7228      <static>
7229        <entry name="availableModes" type="byte" visibility="public"
7230            type_notes="List of enums (android.shading.mode)." container="array"
7231            typedef="enumList" hwlevel="legacy">
7232          <array>
7233            <size>n</size>
7234          </array>
7235          <description>
7236          List of lens shading modes for android.shading.mode that are supported by this camera device.
7237          </description>
7238          <range>Any value listed in android.shading.mode</range>
7239          <details>
7240              This list contains lens shading modes that can be set for the camera device.
7241              Camera devices that support the MANUAL_POST_PROCESSING capability will always
7242              list OFF and FAST mode. This includes all FULL level devices.
7243              LEGACY devices will always only support FAST mode.
7244          </details>
7245          <hal_details>
7246            HAL must support both FAST and HIGH_QUALITY if lens shading correction control is
7247            available on the camera device, but the underlying implementation can be the same for
7248            both modes. That is, if the highest quality implementation on the camera device does not
7249            slow down capture rate, then FAST and HIGH_QUALITY will generate the same output.
7250          </hal_details>
7251        </entry>
7252      </static>
7253    </section>
7254    <section name="statistics">
7255      <controls>
7256        <entry name="faceDetectMode" type="byte" visibility="public" enum="true"
7257               hwlevel="legacy">
7258          <enum>
7259            <value>OFF
7260            <notes>Do not include face detection statistics in capture
7261            results.</notes></value>
7262            <value optional="true">SIMPLE
7263            <notes>Return face rectangle and confidence values only.
7264            </notes></value>
7265            <value optional="true">FULL
7266            <notes>Return all face
7267            metadata.
7268
7269            In this mode, face rectangles, scores, landmarks, and face IDs are all valid.
7270            </notes></value>
7271          </enum>
7272          <description>Operating mode for the face detector
7273          unit.</description>
7274          <range>android.statistics.info.availableFaceDetectModes</range>
7275          <details>Whether face detection is enabled, and whether it
7276          should output just the basic fields or the full set of
7277          fields.</details>
7278          <hal_details>
7279            SIMPLE mode must fill in android.statistics.faceRectangles and
7280            android.statistics.faceScores.
7281            FULL mode must also fill in android.statistics.faceIds, and
7282            android.statistics.faceLandmarks.
7283          </hal_details>
7284          <tag id="BC" />
7285        </entry>
7286        <entry name="histogramMode" type="byte" enum="true" typedef="boolean">
7287          <enum>
7288            <value>OFF</value>
7289            <value>ON</value>
7290          </enum>
7291          <description>Operating mode for histogram
7292          generation</description>
7293          <tag id="FUTURE" />
7294        </entry>
7295        <entry name="sharpnessMapMode" type="byte" enum="true" typedef="boolean">
7296          <enum>
7297            <value>OFF</value>
7298            <value>ON</value>
7299          </enum>
7300          <description>Operating mode for sharpness map
7301          generation</description>
7302          <tag id="FUTURE" />
7303        </entry>
7304        <entry name="hotPixelMapMode" type="byte" visibility="public" enum="true"
7305        typedef="boolean">
7306          <enum>
7307            <value>OFF
7308            <notes>Hot pixel map production is disabled.
7309            </notes></value>
7310            <value>ON
7311            <notes>Hot pixel map production is enabled.
7312            </notes></value>
7313          </enum>
7314          <description>
7315          Operating mode for hot pixel map generation.
7316          </description>
7317          <range>android.statistics.info.availableHotPixelMapModes</range>
7318          <details>
7319          If set to `true`, a hot pixel map is returned in android.statistics.hotPixelMap.
7320          If set to `false`, no hot pixel map will be returned.
7321          </details>
7322          <tag id="V1" />
7323          <tag id="RAW" />
7324        </entry>
7325      </controls>
7326      <static>
7327        <namespace name="info">
7328          <entry name="availableFaceDetectModes" type="byte"
7329                 visibility="public"
7330                 type_notes="List of enums from android.statistics.faceDetectMode"
7331                 container="array"
7332                 typedef="enumList"
7333                 hwlevel="legacy">
7334            <array>
7335              <size>n</size>
7336            </array>
7337            <description>List of face detection modes for android.statistics.faceDetectMode that are
7338            supported by this camera device.
7339            </description>
7340            <range>Any value listed in android.statistics.faceDetectMode</range>
7341            <details>OFF is always supported.
7342            </details>
7343          </entry>
7344          <entry name="histogramBucketCount" type="int32">
7345            <description>Number of histogram buckets
7346            supported</description>
7347            <range>&amp;gt;= 64</range>
7348            <tag id="FUTURE" />
7349          </entry>
7350          <entry name="maxFaceCount" type="int32" visibility="public" hwlevel="legacy">
7351            <description>The maximum number of simultaneously detectable
7352            faces.</description>
7353            <range>0 for cameras without available face detection; otherwise:
7354            `&gt;=4` for LIMITED or FULL hwlevel devices or
7355            `&gt;0` for LEGACY devices.</range>
7356            <tag id="BC" />
7357          </entry>
7358          <entry name="maxHistogramCount" type="int32">
7359            <description>Maximum value possible for a histogram
7360            bucket</description>
7361            <tag id="FUTURE" />
7362          </entry>
7363          <entry name="maxSharpnessMapValue" type="int32">
7364            <description>Maximum value possible for a sharpness map
7365            region.</description>
7366            <tag id="FUTURE" />
7367          </entry>
7368          <entry name="sharpnessMapSize" type="int32"
7369          type_notes="width x height" container="array" typedef="size">
7370            <array>
7371              <size>2</size>
7372            </array>
7373            <description>Dimensions of the sharpness
7374            map</description>
7375            <range>Must be at least 32 x 32</range>
7376            <tag id="FUTURE" />
7377          </entry>
7378          <entry name="availableHotPixelMapModes" type="byte" visibility="public"
7379                 type_notes="list of enums" container="array" typedef="boolean">
7380            <array>
7381              <size>n</size>
7382            </array>
7383            <description>
7384            List of hot pixel map output modes for android.statistics.hotPixelMapMode that are
7385            supported by this camera device.
7386            </description>
7387            <range>Any value listed in android.statistics.hotPixelMapMode</range>
7388            <details>
7389            If no hotpixel map output is available for this camera device, this will contain only
7390            `false`.
7391
7392            ON is always supported on devices with the RAW capability.
7393            </details>
7394            <tag id="V1" />
7395            <tag id="RAW" />
7396          </entry>
7397          <entry name="availableLensShadingMapModes" type="byte" visibility="public"
7398                 type_notes="list of enums" container="array" typedef="enumList">
7399            <array>
7400              <size>n</size>
7401            </array>
7402            <description>
7403            List of lens shading map output modes for android.statistics.lensShadingMapMode that
7404            are supported by this camera device.
7405            </description>
7406            <range>Any value listed in android.statistics.lensShadingMapMode</range>
7407            <details>
7408            If no lens shading map output is available for this camera device, this key will
7409            contain only OFF.
7410
7411            ON is always supported on devices with the RAW capability.
7412            LEGACY mode devices will always only support OFF.
7413            </details>
7414          </entry>
7415        </namespace>
7416      </static>
7417      <dynamic>
7418        <clone entry="android.statistics.faceDetectMode"
7419               kind="controls"></clone>
7420        <entry name="faceIds" type="int32" visibility="hidden" container="array"
7421               hwlevel="legacy">
7422          <array>
7423            <size>n</size>
7424          </array>
7425          <description>List of unique IDs for detected faces.</description>
7426          <details>
7427          Each detected face is given a unique ID that is valid for as long as the face is visible
7428          to the camera device.  A face that leaves the field of view and later returns may be
7429          assigned a new ID.
7430
7431          Only available if android.statistics.faceDetectMode == FULL</details>
7432          <tag id="BC" />
7433        </entry>
7434        <entry name="faceLandmarks" type="int32" visibility="hidden"
7435        type_notes="(leftEyeX, leftEyeY, rightEyeX, rightEyeY, mouthX, mouthY)"
7436        container="array" hwlevel="legacy">
7437          <array>
7438            <size>n</size>
7439            <size>6</size>
7440          </array>
7441          <description>List of landmarks for detected
7442          faces.</description>
7443          <details>
7444            The coordinate system is that of android.sensor.info.activeArraySize, with
7445            `(0, 0)` being the top-left pixel of the active array.
7446
7447            Only available if android.statistics.faceDetectMode == FULL</details>
7448          <tag id="BC" />
7449        </entry>
7450        <entry name="faceRectangles" type="int32" visibility="hidden"
7451        type_notes="(xmin, ymin, xmax, ymax). (0,0) is top-left of active pixel area"
7452        container="array" typedef="rectangle" hwlevel="legacy">
7453          <array>
7454            <size>n</size>
7455            <size>4</size>
7456          </array>
7457          <description>List of the bounding rectangles for detected
7458          faces.</description>
7459          <details>
7460            The coordinate system is that of android.sensor.info.activeArraySize, with
7461            `(0, 0)` being the top-left pixel of the active array.
7462
7463            Only available if android.statistics.faceDetectMode != OFF</details>
7464          <tag id="BC" />
7465        </entry>
7466        <entry name="faceScores" type="byte" visibility="hidden" container="array"
7467               hwlevel="legacy">
7468          <array>
7469            <size>n</size>
7470          </array>
7471          <description>List of the face confidence scores for
7472          detected faces</description>
7473          <range>1-100</range>
7474          <details>Only available if android.statistics.faceDetectMode != OFF.
7475          </details>
7476          <hal_details>
7477          The value should be meaningful (for example, setting 100 at
7478          all times is illegal).</hal_details>
7479          <tag id="BC" />
7480        </entry>
7481        <entry name="faces" type="int32" visibility="public" synthetic="true"
7482               container="array" typedef="face" hwlevel="legacy">
7483          <array>
7484            <size>n</size>
7485          </array>
7486          <description>List of the faces detected through camera face detection
7487          in this capture.</description>
7488          <details>
7489          Only available if android.statistics.faceDetectMode `!=` OFF.
7490          </details>
7491        </entry>
7492        <entry name="histogram" type="int32"
7493        type_notes="count of pixels for each color channel that fall into each histogram bucket, scaled to be between 0 and maxHistogramCount"
7494        container="array">
7495          <array>
7496            <size>n</size>
7497            <size>3</size>
7498          </array>
7499          <description>A 3-channel histogram based on the raw
7500          sensor data</description>
7501          <details>The k'th bucket (0-based) covers the input range
7502          (with w = android.sensor.info.whiteLevel) of [ k * w/N,
7503          (k + 1) * w / N ). If only a monochrome sharpness map is
7504          supported, all channels should have the same data</details>
7505          <tag id="FUTURE" />
7506        </entry>
7507        <clone entry="android.statistics.histogramMode"
7508        kind="controls"></clone>
7509        <entry name="sharpnessMap" type="int32"
7510        type_notes="estimated sharpness for each region of the input image. Normalized to be between 0 and maxSharpnessMapValue. Higher values mean sharper (better focused)"
7511        container="array">
7512          <array>
7513            <size>n</size>
7514            <size>m</size>
7515            <size>3</size>
7516          </array>
7517          <description>A 3-channel sharpness map, based on the raw
7518          sensor data</description>
7519          <details>If only a monochrome sharpness map is supported,
7520          all channels should have the same data</details>
7521          <tag id="FUTURE" />
7522        </entry>
7523        <clone entry="android.statistics.sharpnessMapMode"
7524               kind="controls"></clone>
7525        <entry name="lensShadingCorrectionMap" type="byte" visibility="public"
7526               typedef="lensShadingMap" hwlevel="full">
7527          <description>The shading map is a low-resolution floating-point map
7528          that lists the coefficients used to correct for vignetting, for each
7529          Bayer color channel.</description>
7530          <range>Each gain factor is &amp;gt;= 1</range>
7531          <details>The least shaded section of the image should have a gain factor
7532          of 1; all other sections should have gains above 1.
7533
7534          When android.colorCorrection.mode = TRANSFORM_MATRIX, the map
7535          must take into account the colorCorrection settings.
7536
7537          The shading map is for the entire active pixel array, and is not
7538          affected by the crop region specified in the request. Each shading map
7539          entry is the value of the shading compensation map over a specific
7540          pixel on the sensor.  Specifically, with a (N x M) resolution shading
7541          map, and an active pixel array size (W x H), shading map entry
7542          (x,y) ϵ (0 ... N-1, 0 ... M-1) is the value of the shading map at
7543          pixel ( ((W-1)/(N-1)) * x, ((H-1)/(M-1)) * y) for the four color channels.
7544          The map is assumed to be bilinearly interpolated between the sample points.
7545
7546          The channel order is [R, Geven, Godd, B], where Geven is the green
7547          channel for the even rows of a Bayer pattern, and Godd is the odd rows.
7548          The shading map is stored in a fully interleaved format.
7549
7550          The shading map should have on the order of 30-40 rows and columns,
7551          and must be smaller than 64x64.
7552
7553          As an example, given a very small map defined as:
7554
7555              width,height = [ 4, 3 ]
7556              values =
7557              [ 1.3, 1.2, 1.15, 1.2,  1.2, 1.2, 1.15, 1.2,
7558                  1.1, 1.2, 1.2, 1.2,  1.3, 1.2, 1.3, 1.3,
7559                1.2, 1.2, 1.25, 1.1,  1.1, 1.1, 1.1, 1.0,
7560                  1.0, 1.0, 1.0, 1.0,  1.2, 1.3, 1.25, 1.2,
7561                1.3, 1.2, 1.2, 1.3,   1.2, 1.15, 1.1, 1.2,
7562                  1.2, 1.1, 1.0, 1.2,  1.3, 1.15, 1.2, 1.3 ]
7563
7564          The low-resolution scaling map images for each channel are
7565          (displayed using nearest-neighbor interpolation):
7566
7567          ![Red lens shading map](android.statistics.lensShadingMap/red_shading.png)
7568          ![Green (even rows) lens shading map](android.statistics.lensShadingMap/green_e_shading.png)
7569          ![Green (odd rows) lens shading map](android.statistics.lensShadingMap/green_o_shading.png)
7570          ![Blue lens shading map](android.statistics.lensShadingMap/blue_shading.png)
7571
7572          As a visualization only, inverting the full-color map to recover an
7573          image of a gray wall (using bicubic interpolation for visual quality) as captured by the sensor gives:
7574
7575          ![Image of a uniform white wall (inverse shading map)](android.statistics.lensShadingMap/inv_shading.png)
7576          </details>
7577        </entry>
7578        <entry name="lensShadingMap" type="float" visibility="hidden"
7579               type_notes="2D array of float gain factors per channel to correct lens shading"
7580               container="array" hwlevel="full">
7581          <array>
7582            <size>4</size>
7583            <size>n</size>
7584            <size>m</size>
7585          </array>
7586          <description>The shading map is a low-resolution floating-point map
7587          that lists the coefficients used to correct for vignetting, for each
7588          Bayer color channel of RAW image data.</description>
7589          <range>Each gain factor is &amp;gt;= 1</range>
7590          <details>The least shaded section of the image should have a gain factor
7591          of 1; all other sections should have gains above 1.
7592
7593          When android.colorCorrection.mode = TRANSFORM_MATRIX, the map
7594          must take into account the colorCorrection settings.
7595
7596          The shading map is for the entire active pixel array, and is not
7597          affected by the crop region specified in the request. Each shading map
7598          entry is the value of the shading compensation map over a specific
7599          pixel on the sensor.  Specifically, with a (N x M) resolution shading
7600          map, and an active pixel array size (W x H), shading map entry
7601          (x,y) ϵ (0 ... N-1, 0 ... M-1) is the value of the shading map at
7602          pixel ( ((W-1)/(N-1)) * x, ((H-1)/(M-1)) * y) for the four color channels.
7603          The map is assumed to be bilinearly interpolated between the sample points.
7604
7605          The channel order is [R, Geven, Godd, B], where Geven is the green
7606          channel for the even rows of a Bayer pattern, and Godd is the odd rows.
7607          The shading map is stored in a fully interleaved format, and its size
7608          is provided in the camera static metadata by android.lens.info.shadingMapSize.
7609
7610          The shading map should have on the order of 30-40 rows and columns,
7611          and must be smaller than 64x64.
7612
7613          As an example, given a very small map defined as:
7614
7615              android.lens.info.shadingMapSize = [ 4, 3 ]
7616              android.statistics.lensShadingMap =
7617              [ 1.3, 1.2, 1.15, 1.2,  1.2, 1.2, 1.15, 1.2,
7618                  1.1, 1.2, 1.2, 1.2,  1.3, 1.2, 1.3, 1.3,
7619                1.2, 1.2, 1.25, 1.1,  1.1, 1.1, 1.1, 1.0,
7620                  1.0, 1.0, 1.0, 1.0,  1.2, 1.3, 1.25, 1.2,
7621                1.3, 1.2, 1.2, 1.3,   1.2, 1.15, 1.1, 1.2,
7622                  1.2, 1.1, 1.0, 1.2,  1.3, 1.15, 1.2, 1.3 ]
7623
7624          The low-resolution scaling map images for each channel are
7625          (displayed using nearest-neighbor interpolation):
7626
7627          ![Red lens shading map](android.statistics.lensShadingMap/red_shading.png)
7628          ![Green (even rows) lens shading map](android.statistics.lensShadingMap/green_e_shading.png)
7629          ![Green (odd rows) lens shading map](android.statistics.lensShadingMap/green_o_shading.png)
7630          ![Blue lens shading map](android.statistics.lensShadingMap/blue_shading.png)
7631
7632          As a visualization only, inverting the full-color map to recover an
7633          image of a gray wall (using bicubic interpolation for visual quality)
7634          as captured by the sensor gives:
7635
7636          ![Image of a uniform white wall (inverse shading map)](android.statistics.lensShadingMap/inv_shading.png)
7637
7638          Note that the RAW image data might be subject to lens shading
7639          correction not reported on this map. Query
7640          android.sensor.info.lensShadingApplied to see if RAW image data has subject
7641          to lens shading correction. If android.sensor.info.lensShadingApplied
7642          is TRUE, the RAW image data is subject to partial or full lens shading
7643          correction. In the case full lens shading correction is applied to RAW
7644          images, the gain factor map reported in this key will contain all 1.0 gains.
7645          In other words, the map reported in this key is the remaining lens shading
7646          that needs to be applied on the RAW image to get images without lens shading
7647          artifacts. See android.request.maxNumOutputRaw for a list of RAW image
7648          formats.
7649          </details>
7650          <hal_details>
7651          The lens shading map calculation may depend on exposure and white balance statistics.
7652          When AE and AWB are in AUTO modes
7653          (android.control.aeMode `!=` OFF and android.control.awbMode `!=` OFF), the HAL
7654          may have all the information it need to generate most accurate lens shading map. When
7655          AE or AWB are in manual mode
7656          (android.control.aeMode `==` OFF or android.control.awbMode `==` OFF), the shading map
7657          may be adversely impacted by manual exposure or white balance parameters. To avoid
7658          generating unreliable shading map data, the HAL may choose to lock the shading map with
7659          the latest known good map generated when the AE and AWB are in AUTO modes.
7660          </hal_details>
7661        </entry>
7662        <entry name="predictedColorGains" type="float"
7663               visibility="hidden"
7664               deprecated="true"
7665               optional="true"
7666               type_notes="A 1D array of floats for 4 color channel gains"
7667               container="array">
7668          <array>
7669            <size>4</size>
7670          </array>
7671          <description>The best-fit color channel gains calculated
7672          by the camera device's statistics units for the current output frame.
7673          </description>
7674          <details>
7675          This may be different than the gains used for this frame,
7676          since statistics processing on data from a new frame
7677          typically completes after the transform has already been
7678          applied to that frame.
7679
7680          The 4 channel gains are defined in Bayer domain,
7681          see android.colorCorrection.gains for details.
7682
7683          This value should always be calculated by the auto-white balance (AWB) block,
7684          regardless of the android.control.* current values.
7685          </details>
7686        </entry>
7687        <entry name="predictedColorTransform" type="rational"
7688               visibility="hidden"
7689               deprecated="true"
7690               optional="true"
7691               type_notes="3x3 rational matrix in row-major order"
7692               container="array">
7693          <array>
7694            <size>3</size>
7695            <size>3</size>
7696          </array>
7697          <description>The best-fit color transform matrix estimate
7698          calculated by the camera device's statistics units for the current
7699          output frame.</description>
7700          <details>The camera device will provide the estimate from its
7701          statistics unit on the white balance transforms to use
7702          for the next frame. These are the values the camera device believes
7703          are the best fit for the current output frame. This may
7704          be different than the transform used for this frame, since
7705          statistics processing on data from a new frame typically
7706          completes after the transform has already been applied to
7707          that frame.
7708
7709          These estimates must be provided for all frames, even if
7710          capture settings and color transforms are set by the application.
7711
7712          This value should always be calculated by the auto-white balance (AWB) block,
7713          regardless of the android.control.* current values.
7714          </details>
7715        </entry>
7716        <entry name="sceneFlicker" type="byte" visibility="public" enum="true"
7717               hwlevel="full">
7718          <enum>
7719            <value>NONE
7720            <notes>The camera device does not detect any flickering illumination
7721            in the current scene.</notes></value>
7722            <value>50HZ
7723            <notes>The camera device detects illumination flickering at 50Hz
7724            in the current scene.</notes></value>
7725            <value>60HZ
7726            <notes>The camera device detects illumination flickering at 60Hz
7727            in the current scene.</notes></value>
7728          </enum>
7729          <description>The camera device estimated scene illumination lighting
7730          frequency.</description>
7731          <details>
7732          Many light sources, such as most fluorescent lights, flicker at a rate
7733          that depends on the local utility power standards. This flicker must be
7734          accounted for by auto-exposure routines to avoid artifacts in captured images.
7735          The camera device uses this entry to tell the application what the scene
7736          illuminant frequency is.
7737
7738          When manual exposure control is enabled
7739          (`android.control.aeMode == OFF` or `android.control.mode ==
7740          OFF`), the android.control.aeAntibandingMode doesn't perform
7741          antibanding, and the application can ensure it selects
7742          exposure times that do not cause banding issues by looking
7743          into this metadata field. See
7744          android.control.aeAntibandingMode for more details.
7745
7746          Reports NONE if there doesn't appear to be flickering illumination.
7747          </details>
7748        </entry>
7749        <clone entry="android.statistics.hotPixelMapMode" kind="controls">
7750        </clone>
7751        <entry name="hotPixelMap" type="int32" visibility="public"
7752        type_notes="list of coordinates based on android.sensor.pixelArraySize"
7753        container="array" typedef="point">
7754          <array>
7755            <size>2</size>
7756            <size>n</size>
7757          </array>
7758          <description>
7759          List of `(x, y)` coordinates of hot/defective pixels on the sensor.
7760          </description>
7761          <range>
7762          n &lt;= number of pixels on the sensor.
7763          The `(x, y)` coordinates must be bounded by
7764          android.sensor.info.pixelArraySize.
7765          </range>
7766          <details>
7767          A coordinate `(x, y)` must lie between `(0, 0)`, and
7768          `(width - 1, height - 1)` (inclusive), which are the top-left and
7769          bottom-right of the pixel array, respectively. The width and
7770          height dimensions are given in android.sensor.info.pixelArraySize.
7771          This may include hot pixels that lie outside of the active array
7772          bounds given by android.sensor.info.activeArraySize.
7773          </details>
7774          <hal_details>
7775          A hotpixel map contains the coordinates of pixels on the camera
7776          sensor that do report valid values (usually due to defects in
7777          the camera sensor). This includes pixels that are stuck at certain
7778          values, or have a response that does not accuractly encode the
7779          incoming light from the scene.
7780
7781          To avoid performance issues, there should be significantly fewer hot
7782          pixels than actual pixels on the camera sensor.
7783          </hal_details>
7784          <tag id="V1" />
7785          <tag id="RAW" />
7786        </entry>
7787      </dynamic>
7788      <controls>
7789        <entry name="lensShadingMapMode" type="byte" visibility="public" enum="true" hwlevel="full">
7790          <enum>
7791            <value>OFF
7792            <notes>Do not include a lens shading map in the capture result.</notes></value>
7793            <value>ON
7794            <notes>Include a lens shading map in the capture result.</notes></value>
7795          </enum>
7796          <description>Whether the camera device will output the lens
7797          shading map in output result metadata.</description>
7798          <range>android.statistics.info.availableLensShadingMapModes</range>
7799          <details>When set to ON,
7800          android.statistics.lensShadingMap will be provided in
7801          the output result metadata.
7802
7803          ON is always supported on devices with the RAW capability.
7804          </details>
7805          <tag id="RAW" />
7806        </entry>
7807      </controls>
7808      <dynamic>
7809        <clone entry="android.statistics.lensShadingMapMode" kind="controls">
7810        </clone>
7811      </dynamic>
7812    </section>
7813    <section name="tonemap">
7814      <controls>
7815        <entry name="curveBlue" type="float" visibility="hidden"
7816        type_notes="1D array of float pairs (P_IN, P_OUT). The maximum number of pairs is specified by android.tonemap.maxCurvePoints."
7817        container="array" hwlevel="full">
7818          <array>
7819            <size>n</size>
7820            <size>2</size>
7821          </array>
7822          <description>Tonemapping / contrast / gamma curve for the blue
7823          channel, to use when android.tonemap.mode is
7824          CONTRAST_CURVE.</description>
7825          <details>See android.tonemap.curveRed for more details.</details>
7826        </entry>
7827        <entry name="curveGreen" type="float" visibility="hidden"
7828        type_notes="1D array of float pairs (P_IN, P_OUT). The maximum number of pairs is specified by android.tonemap.maxCurvePoints."
7829        container="array" hwlevel="full">
7830          <array>
7831            <size>n</size>
7832            <size>2</size>
7833          </array>
7834          <description>Tonemapping / contrast / gamma curve for the green
7835          channel, to use when android.tonemap.mode is
7836          CONTRAST_CURVE.</description>
7837          <details>See android.tonemap.curveRed for more details.</details>
7838        </entry>
7839        <entry name="curveRed" type="float" visibility="hidden"
7840        type_notes="1D array of float pairs (P_IN, P_OUT). The maximum number of pairs is specified by android.tonemap.maxCurvePoints."
7841        container="array" hwlevel="full">
7842          <array>
7843            <size>n</size>
7844            <size>2</size>
7845          </array>
7846          <description>Tonemapping / contrast / gamma curve for the red
7847          channel, to use when android.tonemap.mode is
7848          CONTRAST_CURVE.</description>
7849          <range>0-1 on both input and output coordinates, normalized
7850          as a floating-point value such that 0 == black and 1 == white.
7851          </range>
7852          <details>
7853          Each channel's curve is defined by an array of control points:
7854
7855              android.tonemap.curveRed =
7856                [ P0in, P0out, P1in, P1out, P2in, P2out, P3in, P3out, ..., PNin, PNout ]
7857              2 &lt;= N &lt;= android.tonemap.maxCurvePoints
7858
7859          These are sorted in order of increasing `Pin`; it is
7860          required that input values 0.0 and 1.0 are included in the list to
7861          define a complete mapping. For input values between control points,
7862          the camera device must linearly interpolate between the control
7863          points.
7864
7865          Each curve can have an independent number of points, and the number
7866          of points can be less than max (that is, the request doesn't have to
7867          always provide a curve with number of points equivalent to
7868          android.tonemap.maxCurvePoints).
7869
7870          A few examples, and their corresponding graphical mappings; these
7871          only specify the red channel and the precision is limited to 4
7872          digits, for conciseness.
7873
7874          Linear mapping:
7875
7876              android.tonemap.curveRed = [ 0, 0, 1.0, 1.0 ]
7877
7878          ![Linear mapping curve](android.tonemap.curveRed/linear_tonemap.png)
7879
7880          Invert mapping:
7881
7882              android.tonemap.curveRed = [ 0, 1.0, 1.0, 0 ]
7883
7884          ![Inverting mapping curve](android.tonemap.curveRed/inverse_tonemap.png)
7885
7886          Gamma 1/2.2 mapping, with 16 control points:
7887
7888              android.tonemap.curveRed = [
7889                0.0000, 0.0000, 0.0667, 0.2920, 0.1333, 0.4002, 0.2000, 0.4812,
7890                0.2667, 0.5484, 0.3333, 0.6069, 0.4000, 0.6594, 0.4667, 0.7072,
7891                0.5333, 0.7515, 0.6000, 0.7928, 0.6667, 0.8317, 0.7333, 0.8685,
7892                0.8000, 0.9035, 0.8667, 0.9370, 0.9333, 0.9691, 1.0000, 1.0000 ]
7893
7894          ![Gamma = 1/2.2 tonemapping curve](android.tonemap.curveRed/gamma_tonemap.png)
7895
7896          Standard sRGB gamma mapping, per IEC 61966-2-1:1999, with 16 control points:
7897
7898              android.tonemap.curveRed = [
7899                0.0000, 0.0000, 0.0667, 0.2864, 0.1333, 0.4007, 0.2000, 0.4845,
7900                0.2667, 0.5532, 0.3333, 0.6125, 0.4000, 0.6652, 0.4667, 0.7130,
7901                0.5333, 0.7569, 0.6000, 0.7977, 0.6667, 0.8360, 0.7333, 0.8721,
7902                0.8000, 0.9063, 0.8667, 0.9389, 0.9333, 0.9701, 1.0000, 1.0000 ]
7903
7904          ![sRGB tonemapping curve](android.tonemap.curveRed/srgb_tonemap.png)
7905        </details>
7906        <hal_details>
7907          For good quality of mapping, at least 128 control points are
7908          preferred.
7909
7910          A typical use case of this would be a gamma-1/2.2 curve, with as many
7911          control points used as are available.
7912        </hal_details>
7913        </entry>
7914        <entry name="curve" type="float" visibility="public" synthetic="true"
7915               typedef="tonemapCurve"
7916               hwlevel="full">
7917          <description>Tonemapping / contrast / gamma curve to use when android.tonemap.mode
7918          is CONTRAST_CURVE.</description>
7919          <details>
7920          The tonemapCurve consist of three curves for each of red, green, and blue
7921          channels respectively. The following example uses the red channel as an
7922          example. The same logic applies to green and blue channel.
7923          Each channel's curve is defined by an array of control points:
7924
7925              curveRed =
7926                [ P0(in, out), P1(in, out), P2(in, out), P3(in, out), ..., PN(in, out) ]
7927              2 &lt;= N &lt;= android.tonemap.maxCurvePoints
7928
7929          These are sorted in order of increasing `Pin`; it is always
7930          guaranteed that input values 0.0 and 1.0 are included in the list to
7931          define a complete mapping. For input values between control points,
7932          the camera device must linearly interpolate between the control
7933          points.
7934
7935          Each curve can have an independent number of points, and the number
7936          of points can be less than max (that is, the request doesn't have to
7937          always provide a curve with number of points equivalent to
7938          android.tonemap.maxCurvePoints).
7939
7940          A few examples, and their corresponding graphical mappings; these
7941          only specify the red channel and the precision is limited to 4
7942          digits, for conciseness.
7943
7944          Linear mapping:
7945
7946              curveRed = [ (0, 0), (1.0, 1.0) ]
7947
7948          ![Linear mapping curve](android.tonemap.curveRed/linear_tonemap.png)
7949
7950          Invert mapping:
7951
7952              curveRed = [ (0, 1.0), (1.0, 0) ]
7953
7954          ![Inverting mapping curve](android.tonemap.curveRed/inverse_tonemap.png)
7955
7956          Gamma 1/2.2 mapping, with 16 control points:
7957
7958              curveRed = [
7959                (0.0000, 0.0000), (0.0667, 0.2920), (0.1333, 0.4002), (0.2000, 0.4812),
7960                (0.2667, 0.5484), (0.3333, 0.6069), (0.4000, 0.6594), (0.4667, 0.7072),
7961                (0.5333, 0.7515), (0.6000, 0.7928), (0.6667, 0.8317), (0.7333, 0.8685),
7962                (0.8000, 0.9035), (0.8667, 0.9370), (0.9333, 0.9691), (1.0000, 1.0000) ]
7963
7964          ![Gamma = 1/2.2 tonemapping curve](android.tonemap.curveRed/gamma_tonemap.png)
7965
7966          Standard sRGB gamma mapping, per IEC 61966-2-1:1999, with 16 control points:
7967
7968              curveRed = [
7969                (0.0000, 0.0000), (0.0667, 0.2864), (0.1333, 0.4007), (0.2000, 0.4845),
7970                (0.2667, 0.5532), (0.3333, 0.6125), (0.4000, 0.6652), (0.4667, 0.7130),
7971                (0.5333, 0.7569), (0.6000, 0.7977), (0.6667, 0.8360), (0.7333, 0.8721),
7972                (0.8000, 0.9063), (0.8667, 0.9389), (0.9333, 0.9701), (1.0000, 1.0000) ]
7973
7974          ![sRGB tonemapping curve](android.tonemap.curveRed/srgb_tonemap.png)
7975        </details>
7976        <hal_details>
7977            This entry is created by the framework from the curveRed, curveGreen and
7978            curveBlue entries.
7979        </hal_details>
7980        </entry>
7981        <entry name="mode" type="byte" visibility="public" enum="true"
7982               hwlevel="full">
7983          <enum>
7984            <value>CONTRAST_CURVE
7985              <notes>Use the tone mapping curve specified in
7986              the android.tonemap.curve* entries.
7987
7988              All color enhancement and tonemapping must be disabled, except
7989              for applying the tonemapping curve specified by
7990              android.tonemap.curve.
7991
7992              Must not slow down frame rate relative to raw
7993              sensor output.
7994              </notes>
7995            </value>
7996            <value>FAST
7997              <notes>
7998              Advanced gamma mapping and color enhancement may be applied, without
7999              reducing frame rate compared to raw sensor output.
8000              </notes>
8001            </value>
8002            <value>HIGH_QUALITY
8003              <notes>
8004              High-quality gamma mapping and color enhancement will be applied, at
8005              the cost of possibly reduced frame rate compared to raw sensor output.
8006              </notes>
8007            </value>
8008            <value>GAMMA_VALUE
8009              <notes>
8010              Use the gamma value specified in android.tonemap.gamma to peform
8011              tonemapping.
8012
8013              All color enhancement and tonemapping must be disabled, except
8014              for applying the tonemapping curve specified by android.tonemap.gamma.
8015
8016              Must not slow down frame rate relative to raw sensor output.
8017              </notes>
8018            </value>
8019            <value>PRESET_CURVE
8020              <notes>
8021              Use the preset tonemapping curve specified in
8022              android.tonemap.presetCurve to peform tonemapping.
8023
8024              All color enhancement and tonemapping must be disabled, except
8025              for applying the tonemapping curve specified by
8026              android.tonemap.presetCurve.
8027
8028              Must not slow down frame rate relative to raw sensor output.
8029              </notes>
8030            </value>
8031          </enum>
8032          <description>High-level global contrast/gamma/tonemapping control.
8033          </description>
8034          <range>android.tonemap.availableToneMapModes</range>
8035          <details>
8036          When switching to an application-defined contrast curve by setting
8037          android.tonemap.mode to CONTRAST_CURVE, the curve is defined
8038          per-channel with a set of `(in, out)` points that specify the
8039          mapping from input high-bit-depth pixel value to the output
8040          low-bit-depth value.  Since the actual pixel ranges of both input
8041          and output may change depending on the camera pipeline, the values
8042          are specified by normalized floating-point numbers.
8043
8044          More-complex color mapping operations such as 3D color look-up
8045          tables, selective chroma enhancement, or other non-linear color
8046          transforms will be disabled when android.tonemap.mode is
8047          CONTRAST_CURVE.
8048
8049          When using either FAST or HIGH_QUALITY, the camera device will
8050          emit its own tonemap curve in android.tonemap.curve.
8051          These values are always available, and as close as possible to the
8052          actually used nonlinear/nonglobal transforms.
8053
8054          If a request is sent with CONTRAST_CURVE with the camera device's
8055          provided curve in FAST or HIGH_QUALITY, the image's tonemap will be
8056          roughly the same.</details>
8057        </entry>
8058      </controls>
8059      <static>
8060        <entry name="maxCurvePoints" type="int32" visibility="public"
8061               hwlevel="full">
8062          <description>Maximum number of supported points in the
8063            tonemap curve that can be used for android.tonemap.curve.
8064          </description>
8065          <details>
8066          If the actual number of points provided by the application (in android.tonemap.curve*) is
8067          less than this maximum, the camera device will resample the curve to its internal
8068          representation, using linear interpolation.
8069
8070          The output curves in the result metadata may have a different number
8071          of points than the input curves, and will represent the actual
8072          hardware curves used as closely as possible when linearly interpolated.
8073          </details>
8074          <hal_details>
8075          This value must be at least 64. This should be at least 128.
8076          </hal_details>
8077        </entry>
8078        <entry name="availableToneMapModes" type="byte" visibility="public"
8079        type_notes="list of enums" container="array" typedef="enumList" hwlevel="full">
8080          <array>
8081            <size>n</size>
8082          </array>
8083          <description>
8084          List of tonemapping modes for android.tonemap.mode that are supported by this camera
8085          device.
8086          </description>
8087          <range>Any value listed in android.tonemap.mode</range>
8088          <details>
8089          Camera devices that support the MANUAL_POST_PROCESSING capability will always contain
8090          at least one of below mode combinations:
8091
8092          * CONTRAST_CURVE, FAST and HIGH_QUALITY
8093          * GAMMA_VALUE, PRESET_CURVE, FAST and HIGH_QUALITY
8094
8095          This includes all FULL level devices.
8096          </details>
8097          <hal_details>
8098            HAL must support both FAST and HIGH_QUALITY if automatic tonemap control is available
8099            on the camera device, but the underlying implementation can be the same for both modes.
8100            That is, if the highest quality implementation on the camera device does not slow down
8101            capture rate, then FAST and HIGH_QUALITY will generate the same output.
8102          </hal_details>
8103        </entry>
8104      </static>
8105      <dynamic>
8106        <clone entry="android.tonemap.curveBlue" kind="controls">
8107        </clone>
8108        <clone entry="android.tonemap.curveGreen" kind="controls">
8109        </clone>
8110        <clone entry="android.tonemap.curveRed" kind="controls">
8111        </clone>
8112        <clone entry="android.tonemap.curve" kind="controls">
8113        </clone>
8114        <clone entry="android.tonemap.mode" kind="controls">
8115        </clone>
8116      </dynamic>
8117      <controls>
8118        <entry name="gamma" type="float" visibility="public">
8119          <description> Tonemapping curve to use when android.tonemap.mode is
8120          GAMMA_VALUE
8121          </description>
8122          <details>
8123          The tonemap curve will be defined the following formula:
8124          * OUT = pow(IN, 1.0 / gamma)
8125          where IN and OUT is the input pixel value scaled to range [0.0, 1.0],
8126          pow is the power function and gamma is the gamma value specified by this
8127          key.
8128
8129          The same curve will be applied to all color channels. The camera device
8130          may clip the input gamma value to its supported range. The actual applied
8131          value will be returned in capture result.
8132
8133          The valid range of gamma value varies on different devices, but values
8134          within [1.0, 5.0] are guaranteed not to be clipped.
8135          </details>
8136        </entry>
8137        <entry name="presetCurve" type="byte" visibility="public" enum="true">
8138          <enum>
8139            <value>SRGB
8140              <notes>Tonemapping curve is defined by sRGB</notes>
8141            </value>
8142            <value>REC709
8143              <notes>Tonemapping curve is defined by ITU-R BT.709</notes>
8144            </value>
8145          </enum>
8146          <description> Tonemapping curve to use when android.tonemap.mode is
8147          PRESET_CURVE
8148          </description>
8149          <details>
8150          The tonemap curve will be defined by specified standard.
8151
8152          sRGB (approximated by 16 control points):
8153
8154          ![sRGB tonemapping curve](android.tonemap.curveRed/srgb_tonemap.png)
8155
8156          Rec. 709 (approximated by 16 control points):
8157
8158          ![Rec. 709 tonemapping curve](android.tonemap.curveRed/rec709_tonemap.png)
8159
8160          Note that above figures show a 16 control points approximation of preset
8161          curves. Camera devices may apply a different approximation to the curve.
8162          </details>
8163        </entry>
8164      </controls>
8165      <dynamic>
8166        <clone entry="android.tonemap.gamma" kind="controls">
8167        </clone>
8168        <clone entry="android.tonemap.presetCurve" kind="controls">
8169        </clone>
8170      </dynamic>
8171    </section>
8172    <section name="led">
8173      <controls>
8174        <entry name="transmit" type="byte" visibility="hidden" optional="true"
8175               enum="true" typedef="boolean">
8176          <enum>
8177            <value>OFF</value>
8178            <value>ON</value>
8179          </enum>
8180          <description>This LED is nominally used to indicate to the user
8181          that the camera is powered on and may be streaming images back to the
8182          Application Processor. In certain rare circumstances, the OS may
8183          disable this when video is processed locally and not transmitted to
8184          any untrusted applications.
8185
8186          In particular, the LED *must* always be on when the data could be
8187          transmitted off the device. The LED *should* always be on whenever
8188          data is stored locally on the device.
8189
8190          The LED *may* be off if a trusted application is using the data that
8191          doesn't violate the above rules.
8192          </description>
8193        </entry>
8194      </controls>
8195      <dynamic>
8196        <clone entry="android.led.transmit" kind="controls"></clone>
8197      </dynamic>
8198      <static>
8199        <entry name="availableLeds" type="byte" visibility="hidden" optional="true"
8200               enum="true"
8201               container="array">
8202          <array>
8203            <size>n</size>
8204          </array>
8205          <enum>
8206            <value>TRANSMIT
8207              <notes>android.led.transmit control is used.</notes>
8208            </value>
8209          </enum>
8210          <description>A list of camera LEDs that are available on this system.
8211          </description>
8212        </entry>
8213      </static>
8214    </section>
8215    <section name="info">
8216      <static>
8217        <entry name="supportedHardwareLevel" type="byte" visibility="public"
8218               enum="true" hwlevel="legacy">
8219          <enum>
8220            <value>
8221              LIMITED
8222              <notes>
8223              This camera device has only limited capabilities.
8224              </notes>
8225            </value>
8226            <value>
8227              FULL
8228              <notes>
8229              This camera device is capable of supporting advanced imaging applications.
8230              </notes>
8231            </value>
8232            <value>
8233              LEGACY
8234              <notes>
8235              This camera device is running in backward compatibility mode.
8236              </notes>
8237            </value>
8238          </enum>
8239          <description>
8240          Generally classifies the overall set of the camera device functionality.
8241          </description>
8242          <details>
8243          Camera devices will come in three flavors: LEGACY, LIMITED and FULL.
8244
8245          A FULL device will support below capabilities:
8246
8247          * BURST_CAPTURE capability (android.request.availableCapabilities contains BURST_CAPTURE)
8248          * Per frame control (android.sync.maxLatency `==` PER_FRAME_CONTROL)
8249          * Manual sensor control (android.request.availableCapabilities contains MANUAL_SENSOR)
8250          * Manual post-processing control (android.request.availableCapabilities contains
8251            MANUAL_POST_PROCESSING)
8252          * At least 3 processed (but not stalling) format output streams
8253            (android.request.maxNumOutputProc `&gt;=` 3)
8254          * The required stream configurations defined in android.scaler.availableStreamConfigurations
8255          * The required exposure time range defined in android.sensor.info.exposureTimeRange
8256          * The required maxFrameDuration defined in android.sensor.info.maxFrameDuration
8257
8258          A LIMITED device may have some or none of the above characteristics.
8259          To find out more refer to android.request.availableCapabilities.
8260
8261          Some features are not part of any particular hardware level or capability and must be
8262          queried separately. These include:
8263
8264          * Calibrated timestamps (android.sensor.info.timestampSource `==` REALTIME)
8265          * Precision lens control (android.lens.info.focusDistanceCalibration `==` CALIBRATED)
8266          * Face detection (android.statistics.info.availableFaceDetectModes)
8267          * Optical or electrical image stabilization
8268            (android.lens.info.availableOpticalStabilization,
8269             android.control.availableVideoStabilizationModes)
8270
8271          A LEGACY device does not support per-frame control, manual sensor control, manual
8272          post-processing, arbitrary cropping regions, and has relaxed performance constraints.
8273
8274          Each higher level supports everything the lower level supports
8275          in this order: FULL `&gt;` LIMITED `&gt;` LEGACY.
8276
8277          Note:
8278          Pre-API level 23, FULL devices also supported arbitrary cropping region
8279          (android.scaler.croppingType `==` FREEFORM); this requirement was relaxed in API level 23,
8280          and FULL devices may only support CENTERED cropping.
8281          </details>
8282          <hal_details>
8283          The camera 3 HAL device can implement one of two possible
8284          operational modes; limited and full. Full support is
8285          expected from new higher-end devices. Limited mode has
8286          hardware requirements roughly in line with those for a
8287          camera HAL device v1 implementation, and is expected from
8288          older or inexpensive devices. Full is a strict superset of
8289          limited, and they share the same essential operational flow.
8290
8291          For full details refer to "S3. Operational Modes" in camera3.h
8292
8293          Camera HAL3+ must not implement LEGACY mode. It is there
8294          for backwards compatibility in the `android.hardware.camera2`
8295          user-facing API only.
8296          </hal_details>
8297        </entry>
8298      </static>
8299    </section>
8300    <section name="blackLevel">
8301      <controls>
8302        <entry name="lock" type="byte" visibility="public" enum="true"
8303               typedef="boolean" hwlevel="full">
8304          <enum>
8305            <value>OFF</value>
8306            <value>ON</value>
8307          </enum>
8308          <description> Whether black-level compensation is locked
8309          to its current values, or is free to vary.</description>
8310          <details>When set to `true` (ON), the values used for black-level
8311          compensation will not change until the lock is set to
8312          `false` (OFF).
8313
8314          Since changes to certain capture parameters (such as
8315          exposure time) may require resetting of black level
8316          compensation, the camera device must report whether setting
8317          the black level lock was successful in the output result
8318          metadata.
8319
8320          For example, if a sequence of requests is as follows:
8321
8322          * Request 1: Exposure = 10ms, Black level lock = OFF
8323          * Request 2: Exposure = 10ms, Black level lock = ON
8324          * Request 3: Exposure = 10ms, Black level lock = ON
8325          * Request 4: Exposure = 20ms, Black level lock = ON
8326          * Request 5: Exposure = 20ms, Black level lock = ON
8327          * Request 6: Exposure = 20ms, Black level lock = ON
8328
8329          And the exposure change in Request 4 requires the camera
8330          device to reset the black level offsets, then the output
8331          result metadata is expected to be:
8332
8333          * Result 1: Exposure = 10ms, Black level lock = OFF
8334          * Result 2: Exposure = 10ms, Black level lock = ON
8335          * Result 3: Exposure = 10ms, Black level lock = ON
8336          * Result 4: Exposure = 20ms, Black level lock = OFF
8337          * Result 5: Exposure = 20ms, Black level lock = ON
8338          * Result 6: Exposure = 20ms, Black level lock = ON
8339
8340          This indicates to the application that on frame 4, black
8341          levels were reset due to exposure value changes, and pixel
8342          values may not be consistent across captures.
8343
8344          The camera device will maintain the lock to the extent
8345          possible, only overriding the lock to OFF when changes to
8346          other request parameters require a black level recalculation
8347          or reset.
8348          </details>
8349          <hal_details>
8350          If for some reason black level locking is no longer possible
8351          (for example, the analog gain has changed, which forces
8352          black level offsets to be recalculated), then the HAL must
8353          override this request (and it must report 'OFF' when this
8354          does happen) until the next capture for which locking is
8355          possible again.</hal_details>
8356          <tag id="HAL2" />
8357        </entry>
8358      </controls>
8359      <dynamic>
8360        <clone entry="android.blackLevel.lock"
8361          kind="controls">
8362          <details>
8363            Whether the black level offset was locked for this frame.  Should be
8364            ON if android.blackLevel.lock was ON in the capture request, unless
8365            a change in other capture settings forced the camera device to
8366            perform a black level reset.
8367          </details>
8368        </clone>
8369      </dynamic>
8370    </section>
8371    <section name="sync">
8372      <dynamic>
8373        <entry name="frameNumber" type="int64" visibility="hidden" enum="true"
8374               hwlevel="legacy">
8375          <enum>
8376            <value id="-1">CONVERGING
8377              <notes>
8378              The current result is not yet fully synchronized to any request.
8379
8380              Synchronization is in progress, and reading metadata from this
8381              result may include a mix of data that have taken effect since the
8382              last synchronization time.
8383
8384              In some future result, within android.sync.maxLatency frames,
8385              this value will update to the actual frame number frame number
8386              the result is guaranteed to be synchronized to (as long as the
8387              request settings remain constant).
8388            </notes>
8389            </value>
8390            <value id="-2">UNKNOWN
8391              <notes>
8392              The current result's synchronization status is unknown.
8393
8394              The result may have already converged, or it may be in
8395              progress.  Reading from this result may include some mix
8396              of settings from past requests.
8397
8398              After a settings change, the new settings will eventually all
8399              take effect for the output buffers and results. However, this
8400              value will not change when that happens. Altering settings
8401              rapidly may provide outcomes using mixes of settings from recent
8402              requests.
8403
8404              This value is intended primarily for backwards compatibility with
8405              the older camera implementations (for android.hardware.Camera).
8406            </notes>
8407            </value>
8408          </enum>
8409          <description>The frame number corresponding to the last request
8410          with which the output result (metadata + buffers) has been fully
8411          synchronized.</description>
8412          <range>Either a non-negative value corresponding to a
8413          `frame_number`, or one of the two enums (CONVERGING / UNKNOWN).
8414          </range>
8415          <details>
8416          When a request is submitted to the camera device, there is usually a
8417          delay of several frames before the controls get applied. A camera
8418          device may either choose to account for this delay by implementing a
8419          pipeline and carefully submit well-timed atomic control updates, or
8420          it may start streaming control changes that span over several frame
8421          boundaries.
8422
8423          In the latter case, whenever a request's settings change relative to
8424          the previous submitted request, the full set of changes may take
8425          multiple frame durations to fully take effect. Some settings may
8426          take effect sooner (in less frame durations) than others.
8427
8428          While a set of control changes are being propagated, this value
8429          will be CONVERGING.
8430
8431          Once it is fully known that a set of control changes have been
8432          finished propagating, and the resulting updated control settings
8433          have been read back by the camera device, this value will be set
8434          to a non-negative frame number (corresponding to the request to
8435          which the results have synchronized to).
8436
8437          Older camera device implementations may not have a way to detect
8438          when all camera controls have been applied, and will always set this
8439          value to UNKNOWN.
8440
8441          FULL capability devices will always have this value set to the
8442          frame number of the request corresponding to this result.
8443
8444          _Further details_:
8445
8446          * Whenever a request differs from the last request, any future
8447          results not yet returned may have this value set to CONVERGING (this
8448          could include any in-progress captures not yet returned by the camera
8449          device, for more details see pipeline considerations below).
8450          * Submitting a series of multiple requests that differ from the
8451          previous request (e.g. r1, r2, r3 s.t. r1 != r2 != r3)
8452          moves the new synchronization frame to the last non-repeating
8453          request (using the smallest frame number from the contiguous list of
8454          repeating requests).
8455          * Submitting the same request repeatedly will not change this value
8456          to CONVERGING, if it was already a non-negative value.
8457          * When this value changes to non-negative, that means that all of the
8458          metadata controls from the request have been applied, all of the
8459          metadata controls from the camera device have been read to the
8460          updated values (into the result), and all of the graphics buffers
8461          corresponding to this result are also synchronized to the request.
8462
8463          _Pipeline considerations_:
8464
8465          Submitting a request with updated controls relative to the previously
8466          submitted requests may also invalidate the synchronization state
8467          of all the results corresponding to currently in-flight requests.
8468
8469          In other words, results for this current request and up to
8470          android.request.pipelineMaxDepth prior requests may have their
8471          android.sync.frameNumber change to CONVERGING.
8472          </details>
8473          <hal_details>
8474          Using UNKNOWN here is illegal unless android.sync.maxLatency
8475          is also UNKNOWN.
8476
8477          FULL capability devices should simply set this value to the
8478          `frame_number` of the request this result corresponds to.
8479          </hal_details>
8480          <tag id="V1" />
8481        </entry>
8482      </dynamic>
8483      <static>
8484        <entry name="maxLatency" type="int32" visibility="public" enum="true"
8485               hwlevel="legacy">
8486          <enum>
8487            <value id="0">PER_FRAME_CONTROL
8488              <notes>
8489              Every frame has the requests immediately applied.
8490
8491              Changing controls over multiple requests one after another will
8492              produce results that have those controls applied atomically
8493              each frame.
8494
8495              All FULL capability devices will have this as their maxLatency.
8496              </notes>
8497            </value>
8498            <value id="-1">UNKNOWN
8499              <notes>
8500              Each new frame has some subset (potentially the entire set)
8501              of the past requests applied to the camera settings.
8502
8503              By submitting a series of identical requests, the camera device
8504              will eventually have the camera settings applied, but it is
8505              unknown when that exact point will be.
8506
8507              All LEGACY capability devices will have this as their maxLatency.
8508              </notes>
8509            </value>
8510          </enum>
8511          <description>
8512          The maximum number of frames that can occur after a request
8513          (different than the previous) has been submitted, and before the
8514          result's state becomes synchronized.
8515          </description>
8516          <units>Frame counts</units>
8517          <range>A positive value, PER_FRAME_CONTROL, or UNKNOWN.</range>
8518          <details>
8519          This defines the maximum distance (in number of metadata results),
8520          between the frame number of the request that has new controls to apply
8521          and the frame number of the result that has all the controls applied.
8522
8523          In other words this acts as an upper boundary for how many frames
8524          must occur before the camera device knows for a fact that the new
8525          submitted camera settings have been applied in outgoing frames.
8526          </details>
8527          <hal_details>
8528          For example if maxLatency was 2,
8529
8530              initial request = X (repeating)
8531              request1 = X
8532              request2 = Y
8533              request3 = Y
8534              request4 = Y
8535
8536              where requestN has frameNumber N, and the first of the repeating
8537              initial request's has frameNumber F (and F &lt; 1).
8538
8539              initial result = X' + { android.sync.frameNumber == F }
8540              result1 = X' + { android.sync.frameNumber == F }
8541              result2 = X' + { android.sync.frameNumber == CONVERGING }
8542              result3 = X' + { android.sync.frameNumber == CONVERGING }
8543              result4 = X' + { android.sync.frameNumber == 2 }
8544
8545              where resultN has frameNumber N.
8546
8547          Since `result4` has a `frameNumber == 4` and
8548          `android.sync.frameNumber == 2`, the distance is clearly
8549          `4 - 2 = 2`.
8550
8551          Use `frame_count` from camera3_request_t instead of
8552          android.request.frameCount or
8553          `@link{android.hardware.camera2.CaptureResult#getFrameNumber}`.
8554
8555          LIMITED devices are strongly encouraged to use a non-negative
8556          value. If UNKNOWN is used here then app developers do not have a way
8557          to know when sensor settings have been applied.
8558          </hal_details>
8559          <tag id="V1" />
8560        </entry>
8561      </static>
8562    </section>
8563    <section name="reprocess">
8564      <controls>
8565        <entry name="effectiveExposureFactor" type="float" visibility="public" hwlevel="limited">
8566            <description>
8567            The amount of exposure time increase factor applied to the original output
8568            frame by the application processing before sending for reprocessing.
8569            </description>
8570            <units>Relative exposure time increase factor.</units>
8571            <range> &amp;gt;= 1.0</range>
8572            <details>
8573            This is optional, and will be supported if the camera device supports YUV_REPROCESSING
8574            capability (android.request.availableCapabilities contains YUV_REPROCESSING).
8575
8576            For some YUV reprocessing use cases, the application may choose to filter the original
8577            output frames to effectively reduce the noise to the same level as a frame that was
8578            captured with longer exposure time. To be more specific, assuming the original captured
8579            images were captured with a sensitivity of S and an exposure time of T, the model in
8580            the camera device is that the amount of noise in the image would be approximately what
8581            would be expected if the original capture parameters had been a sensitivity of
8582            S/effectiveExposureFactor and an exposure time of T*effectiveExposureFactor, rather
8583            than S and T respectively. If the captured images were processed by the application
8584            before being sent for reprocessing, then the application may have used image processing
8585            algorithms and/or multi-frame image fusion to reduce the noise in the
8586            application-processed images (input images). By using the effectiveExposureFactor
8587            control, the application can communicate to the camera device the actual noise level
8588            improvement in the application-processed image. With this information, the camera
8589            device can select appropriate noise reduction and edge enhancement parameters to avoid
8590            excessive noise reduction (android.noiseReduction.mode) and insufficient edge
8591            enhancement (android.edge.mode) being applied to the reprocessed frames.
8592
8593            For example, for multi-frame image fusion use case, the application may fuse
8594            multiple output frames together to a final frame for reprocessing. When N image are
8595            fused into 1 image for reprocessing, the exposure time increase factor could be up to
8596            square root of N (based on a simple photon shot noise model). The camera device will
8597            adjust the reprocessing noise reduction and edge enhancement parameters accordingly to
8598            produce the best quality images.
8599
8600            This is relative factor, 1.0 indicates the application hasn't processed the input
8601            buffer in a way that affects its effective exposure time.
8602
8603            This control is only effective for YUV reprocessing capture request. For noise
8604            reduction reprocessing, it is only effective when `android.noiseReduction.mode != OFF`.
8605            Similarly, for edge enhancement reprocessing, it is only effective when
8606            `android.edge.mode != OFF`.
8607            </details>
8608          <tag id="REPROC" />
8609        </entry>
8610      </controls>
8611      <dynamic>
8612      <clone entry="android.reprocess.effectiveExposureFactor" kind="controls">
8613      </clone>
8614      </dynamic>
8615      <static>
8616        <entry name="maxCaptureStall" type="int32" visibility="public" hwlevel="limited">
8617          <description>
8618          The maximal camera capture pipeline stall (in unit of frame count) introduced by a
8619          reprocess capture request.
8620          </description>
8621          <units>Number of frames.</units>
8622          <range> &amp;lt;= 4</range>
8623          <details>
8624          The key describes the maximal interference that one reprocess (input) request
8625          can introduce to the camera simultaneous streaming of regular (output) capture
8626          requests, including repeating requests.
8627
8628          When a reprocessing capture request is submitted while a camera output repeating request
8629          (e.g. preview) is being served by the camera device, it may preempt the camera capture
8630          pipeline for at least one frame duration so that the camera device is unable to process
8631          the following capture request in time for the next sensor start of exposure boundary.
8632          When this happens, the application may observe a capture time gap (longer than one frame
8633          duration) between adjacent capture output frames, which usually exhibits as preview
8634          glitch if the repeating request output targets include a preview surface. This key gives
8635          the worst-case number of frame stall introduced by one reprocess request with any kind of
8636          formats/sizes combination.
8637
8638          If this key reports 0, it means a reprocess request doesn't introduce any glitch to the
8639          ongoing camera repeating request outputs, as if this reprocess request is never issued.
8640
8641          This key is supported if the camera device supports PRIVATE or YUV reprocessing (
8642          i.e. android.request.availableCapabilities contains PRIVATE_REPROCESSING or
8643          YUV_REPROCESSING).
8644          </details>
8645          <tag id="REPROC" />
8646        </entry>
8647      </static>
8648    </section>
8649    <section name="depth">
8650      <static>
8651        <entry name="maxDepthSamples" type="int32" visibility="system" hwlevel="limited">
8652          <description>Maximum number of points that a depth point cloud may contain.
8653          </description>
8654          <details>
8655            If a camera device supports outputting depth range data in the form of a depth point
8656            cloud ({@link android.graphics.ImageFormat#DEPTH_POINT_CLOUD}), this is the maximum
8657            number of points an output buffer may contain.
8658
8659            Any given buffer may contain between 0 and maxDepthSamples points, inclusive.
8660            If output in the depth point cloud format is not supported, this entry will
8661            not be defined.
8662          </details>
8663          <tag id="DEPTH" />
8664        </entry>
8665        <entry name="availableDepthStreamConfigurations" type="int32" visibility="hidden"
8666          enum="true" container="array"
8667          typedef="streamConfiguration" hwlevel="limited">
8668          <array>
8669            <size>n</size>
8670            <size>4</size>
8671          </array>
8672          <enum>
8673            <value>OUTPUT</value>
8674            <value>INPUT</value>
8675          </enum>
8676          <description>The available depth dataspace stream
8677          configurations that this camera device supports
8678          (i.e. format, width, height, output/input stream).
8679          </description>
8680          <details>
8681            These are output stream configurations for use with
8682            dataSpace HAL_DATASPACE_DEPTH. The configurations are
8683            listed as `(format, width, height, input?)` tuples.
8684
8685            Only devices that support depth output for at least
8686            the HAL_PIXEL_FORMAT_Y16 dense depth map may include
8687            this entry.
8688
8689            A device that also supports the HAL_PIXEL_FORMAT_BLOB
8690            sparse depth point cloud must report a single entry for
8691            the format in this list as `(HAL_PIXEL_FORMAT_BLOB,
8692            android.depth.maxDepthSamples, 1, OUTPUT)` in addition to
8693            the entries for HAL_PIXEL_FORMAT_Y16.
8694          </details>
8695          <tag id="DEPTH" />
8696        </entry>
8697        <entry name="availableDepthMinFrameDurations" type="int64" visibility="hidden"
8698               container="array"
8699               typedef="streamConfigurationDuration" hwlevel="limited">
8700          <array>
8701            <size>4</size>
8702            <size>n</size>
8703          </array>
8704          <description>This lists the minimum frame duration for each
8705          format/size combination for depth output formats.
8706          </description>
8707          <units>(format, width, height, ns) x n</units>
8708          <details>
8709          This should correspond to the frame duration when only that
8710          stream is active, with all processing (typically in android.*.mode)
8711          set to either OFF or FAST.
8712
8713          When multiple streams are used in a request, the minimum frame
8714          duration will be max(individual stream min durations).
8715
8716          The minimum frame duration of a stream (of a particular format, size)
8717          is the same regardless of whether the stream is input or output.
8718
8719          See android.sensor.frameDuration and
8720          android.scaler.availableStallDurations for more details about
8721          calculating the max frame rate.
8722
8723          (Keep in sync with {@link
8724          android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration})
8725          </details>
8726          <tag id="DEPTH" />
8727        </entry>
8728        <entry name="availableDepthStallDurations" type="int64" visibility="hidden"
8729               container="array" typedef="streamConfigurationDuration" hwlevel="limited">
8730          <array>
8731            <size>4</size>
8732            <size>n</size>
8733          </array>
8734          <description>This lists the maximum stall duration for each
8735          output format/size combination for depth streams.
8736          </description>
8737          <units>(format, width, height, ns) x n</units>
8738          <details>
8739          A stall duration is how much extra time would get added
8740          to the normal minimum frame duration for a repeating request
8741          that has streams with non-zero stall.
8742
8743          This functions similarly to
8744          android.scaler.availableStallDurations for depth
8745          streams.
8746
8747          All depth output stream formats may have a nonzero stall
8748          duration.
8749          </details>
8750          <tag id="DEPTH" />
8751        </entry>
8752        <entry name="depthIsExclusive" type="byte" visibility="public"
8753               enum="true" typedef="boolean" hwlevel="limited">
8754          <enum>
8755            <value>FALSE</value>
8756            <value>TRUE</value>
8757          </enum>
8758          <description>Indicates whether a capture request may target both a
8759          DEPTH16 / DEPTH_POINT_CLOUD output, and normal color outputs (such as
8760          YUV_420_888, JPEG, or RAW) simultaneously.
8761          </description>
8762          <details>
8763          If TRUE, including both depth and color outputs in a single
8764          capture request is not supported. An application must interleave color
8765          and depth requests.  If FALSE, a single request can target both types
8766          of output.
8767
8768          Typically, this restriction exists on camera devices that
8769          need to emit a specific pattern or wavelength of light to
8770          measure depth values, which causes the color image to be
8771          corrupted during depth measurement.
8772          </details>
8773        </entry>
8774      </static>
8775    </section>
8776  </namespace>
8777</metadata>
8778