metadata_properties.xml revision 84a51a47a6a39592c994209783255781a76186a5
1<?xml version="1.0" encoding="utf-8"?>
2<!-- Copyright (C) 2012 The Android Open Source Project
3
4     Licensed under the Apache License, Version 2.0 (the "License");
5     you may not use this file except in compliance with the License.
6     You may obtain a copy of the License at
7
8          http://www.apache.org/licenses/LICENSE-2.0
9
10     Unless required by applicable law or agreed to in writing, software
11     distributed under the License is distributed on an "AS IS" BASIS,
12     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13     See the License for the specific language governing permissions and
14     limitations under the License.
15-->
16<metadata xmlns="http://schemas.android.com/service/camera/metadata/"
17xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
18xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata_properties.xsd">
19
20  <tags>
21    <tag id="BC">
22        Needed for backwards compatibility with old Java API
23    </tag>
24    <tag id="V1">
25        New features for first camera 2 release (API1)
26    </tag>
27    <tag id="RAW">
28        Needed for useful RAW image processing and DNG file support
29    </tag>
30    <tag id="HAL2">
31        Entry is only used by camera device HAL 2.x
32    </tag>
33    <tag id="FULL">
34        Entry is required for full hardware level devices, and optional for other hardware levels
35    </tag>
36    <tag id="DEPTH">
37        Entry is required for the depth capability.
38    </tag>
39    <tag id="REPROC">
40        Entry is required for the YUV or PRIVATE reprocessing capability.
41    </tag>
42    <tag id="FUTURE">
43        Entry is  under-specified and is not required for now. This is for book-keeping purpose,
44        do not implement or use it, it may be revised for future.
45    </tag>
46  </tags>
47
48  <types>
49    <typedef name="pairFloatFloat">
50      <language name="java">android.util.Pair&lt;Float,Float&gt;</language>
51    </typedef>
52    <typedef name="pairDoubleDouble">
53      <language name="java">android.util.Pair&lt;Double,Double&gt;</language>
54    </typedef>
55    <typedef name="rectangle">
56      <language name="java">android.graphics.Rect</language>
57    </typedef>
58    <typedef name="size">
59      <language name="java">android.util.Size</language>
60    </typedef>
61    <typedef name="string">
62      <language name="java">String</language>
63    </typedef>
64    <typedef name="boolean">
65      <language name="java">boolean</language>
66    </typedef>
67    <typedef name="imageFormat">
68      <language name="java">int</language>
69    </typedef>
70    <typedef name="streamConfigurationMap">
71      <language name="java">android.hardware.camera2.params.StreamConfigurationMap</language>
72    </typedef>
73    <typedef name="streamConfiguration">
74      <language name="java">android.hardware.camera2.params.StreamConfiguration</language>
75    </typedef>
76    <typedef name="streamConfigurationDuration">
77      <language name="java">android.hardware.camera2.params.StreamConfigurationDuration</language>
78    </typedef>
79    <typedef name="face">
80      <language name="java">android.hardware.camera2.params.Face</language>
81    </typedef>
82    <typedef name="meteringRectangle">
83      <language name="java">android.hardware.camera2.params.MeteringRectangle</language>
84    </typedef>
85    <typedef name="rangeFloat">
86      <language name="java">android.util.Range&lt;Float&gt;</language>
87    </typedef>
88    <typedef name="rangeInt">
89      <language name="java">android.util.Range&lt;Integer&gt;</language>
90    </typedef>
91    <typedef name="rangeLong">
92      <language name="java">android.util.Range&lt;Long&gt;</language>
93    </typedef>
94    <typedef name="colorSpaceTransform">
95      <language name="java">android.hardware.camera2.params.ColorSpaceTransform</language>
96    </typedef>
97    <typedef name="rggbChannelVector">
98      <language name="java">android.hardware.camera2.params.RggbChannelVector</language>
99    </typedef>
100    <typedef name="blackLevelPattern">
101      <language name="java">android.hardware.camera2.params.BlackLevelPattern</language>
102    </typedef>
103    <typedef name="enumList">
104      <language name="java">int</language>
105    </typedef>
106    <typedef name="sizeF">
107      <language name="java">android.util.SizeF</language>
108    </typedef>
109    <typedef name="point">
110      <language name="java">android.graphics.Point</language>
111    </typedef>
112    <typedef name="tonemapCurve">
113      <language name="java">android.hardware.camera2.params.TonemapCurve</language>
114    </typedef>
115    <typedef name="lensShadingMap">
116      <language name="java">android.hardware.camera2.params.LensShadingMap</language>
117    </typedef>
118    <typedef name="location">
119      <language name="java">android.location.Location</language>
120    </typedef>
121    <typedef name="highSpeedVideoConfiguration">
122      <language name="java">android.hardware.camera2.params.HighSpeedVideoConfiguration</language>
123    </typedef>
124    <typedef name="reprocessFormatsMap">
125      <language name="java">android.hardware.camera2.params.ReprocessFormatsMap</language>
126    </typedef>
127  </types>
128
129  <namespace name="android">
130    <section name="colorCorrection">
131      <controls>
132        <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="full">
133          <enum>
134            <value>TRANSFORM_MATRIX
135              <notes>Use the android.colorCorrection.transform matrix
136                and android.colorCorrection.gains to do color conversion.
137
138                All advanced white balance adjustments (not specified
139                by our white balance pipeline) must be disabled.
140
141                If AWB is enabled with `android.control.awbMode != OFF`, then
142                TRANSFORM_MATRIX is ignored. The camera device will override
143                this value to either FAST or HIGH_QUALITY.
144              </notes>
145            </value>
146            <value>FAST
147              <notes>Color correction processing must not slow down
148              capture rate relative to sensor raw output.
149
150              Advanced white balance adjustments above and beyond
151              the specified white balance pipeline may be applied.
152
153              If AWB is enabled with `android.control.awbMode != OFF`, then
154              the camera device uses the last frame's AWB values
155              (or defaults if AWB has never been run).
156            </notes>
157            </value>
158            <value>HIGH_QUALITY
159              <notes>Color correction processing operates at improved
160              quality but the capture rate might be reduced (relative to sensor
161              raw output rate)
162
163              Advanced white balance adjustments above and beyond
164              the specified white balance pipeline may be applied.
165
166              If AWB is enabled with `android.control.awbMode != OFF`, then
167              the camera device uses the last frame's AWB values
168              (or defaults if AWB has never been run).
169            </notes>
170            </value>
171          </enum>
172
173          <description>
174          The mode control selects how the image data is converted from the
175          sensor's native color into linear sRGB color.
176          </description>
177          <details>
178          When auto-white balance (AWB) is enabled with android.control.awbMode, this
179          control is overridden by the AWB routine. When AWB is disabled, the
180          application controls how the color mapping is performed.
181
182          We define the expected processing pipeline below. For consistency
183          across devices, this is always the case with TRANSFORM_MATRIX.
184
185          When either FULL or HIGH_QUALITY is used, the camera device may
186          do additional processing but android.colorCorrection.gains and
187          android.colorCorrection.transform will still be provided by the
188          camera device (in the results) and be roughly correct.
189
190          Switching to TRANSFORM_MATRIX and using the data provided from
191          FAST or HIGH_QUALITY will yield a picture with the same white point
192          as what was produced by the camera device in the earlier frame.
193
194          The expected processing pipeline is as follows:
195
196          ![White balance processing pipeline](android.colorCorrection.mode/processing_pipeline.png)
197
198          The white balance is encoded by two values, a 4-channel white-balance
199          gain vector (applied in the Bayer domain), and a 3x3 color transform
200          matrix (applied after demosaic).
201
202          The 4-channel white-balance gains are defined as:
203
204              android.colorCorrection.gains = [ R G_even G_odd B ]
205
206          where `G_even` is the gain for green pixels on even rows of the
207          output, and `G_odd` is the gain for green pixels on the odd rows.
208          These may be identical for a given camera device implementation; if
209          the camera device does not support a separate gain for even/odd green
210          channels, it will use the `G_even` value, and write `G_odd` equal to
211          `G_even` in the output result metadata.
212
213          The matrices for color transforms are defined as a 9-entry vector:
214
215              android.colorCorrection.transform = [ I0 I1 I2 I3 I4 I5 I6 I7 I8 ]
216
217          which define a transform from input sensor colors, `P_in = [ r g b ]`,
218          to output linear sRGB, `P_out = [ r' g' b' ]`,
219
220          with colors as follows:
221
222              r' = I0r + I1g + I2b
223              g' = I3r + I4g + I5b
224              b' = I6r + I7g + I8b
225
226          Both the input and output value ranges must match. Overflow/underflow
227          values are clipped to fit within the range.
228          </details>
229          <hal_details>
230          HAL must support both FAST and HIGH_QUALITY if color correction control is available
231          on the camera device, but the underlying implementation can be the same for both modes.
232          That is, if the highest quality implementation on the camera device does not slow down
233          capture rate, then FAST and HIGH_QUALITY should generate the same output.
234          </hal_details>
235        </entry>
236        <entry name="transform" type="rational" visibility="public"
237               type_notes="3x3 rational matrix in row-major order"
238               container="array" typedef="colorSpaceTransform" hwlevel="full">
239          <array>
240            <size>3</size>
241            <size>3</size>
242          </array>
243          <description>A color transform matrix to use to transform
244          from sensor RGB color space to output linear sRGB color space.
245          </description>
246          <units>Unitless scale factors</units>
247          <details>This matrix is either set by the camera device when the request
248          android.colorCorrection.mode is not TRANSFORM_MATRIX, or
249          directly by the application in the request when the
250          android.colorCorrection.mode is TRANSFORM_MATRIX.
251
252          In the latter case, the camera device may round the matrix to account
253          for precision issues; the final rounded matrix should be reported back
254          in this matrix result metadata. The transform should keep the magnitude
255          of the output color values within `[0, 1.0]` (assuming input color
256          values is within the normalized range `[0, 1.0]`), or clipping may occur.
257
258          The valid range of each matrix element varies on different devices, but
259          values within [-1.5, 3.0] are guaranteed not to be clipped.
260          </details>
261        </entry>
262        <entry name="gains" type="float" visibility="public"
263               type_notes="A 1D array of floats for 4 color channel gains"
264               container="array" typedef="rggbChannelVector" hwlevel="full">
265          <array>
266            <size>4</size>
267          </array>
268          <description>Gains applying to Bayer raw color channels for
269          white-balance.</description>
270          <units>Unitless gain factors</units>
271          <details>
272          These per-channel gains are either set by the camera device
273          when the request android.colorCorrection.mode is not
274          TRANSFORM_MATRIX, or directly by the application in the
275          request when the android.colorCorrection.mode is
276          TRANSFORM_MATRIX.
277
278          The gains in the result metadata are the gains actually
279          applied by the camera device to the current frame.
280
281          The valid range of gains varies on different devices, but gains
282          between [1.0, 3.0] are guaranteed not to be clipped. Even if a given
283          device allows gains below 1.0, this is usually not recommended because
284          this can create color artifacts.
285          </details>
286          <hal_details>
287          The 4-channel white-balance gains are defined in
288          the order of `[R G_even G_odd B]`, where `G_even` is the gain
289          for green pixels on even rows of the output, and `G_odd`
290          is the gain for green pixels on the odd rows.
291
292          If a HAL does not support a separate gain for even/odd green
293          channels, it must use the `G_even` value, and write
294          `G_odd` equal to `G_even` in the output result metadata.
295          </hal_details>
296        </entry>
297        <entry name="aberrationMode" type="byte" visibility="public" enum="true" hwlevel="legacy">
298          <enum>
299            <value>OFF
300              <notes>
301                No aberration correction is applied.
302              </notes>
303            </value>
304            <value>FAST
305              <notes>
306                Aberration correction will not slow down capture rate
307                relative to sensor raw output.
308            </notes>
309            </value>
310            <value>HIGH_QUALITY
311              <notes>
312                Aberration correction operates at improved quality but the capture rate might be
313                reduced (relative to sensor raw output rate)
314            </notes>
315            </value>
316          </enum>
317          <description>
318            Mode of operation for the chromatic aberration correction algorithm.
319          </description>
320          <range>android.colorCorrection.availableAberrationModes</range>
321          <details>
322            Chromatic (color) aberration is caused by the fact that different wavelengths of light
323            can not focus on the same point after exiting from the lens. This metadata defines
324            the high level control of chromatic aberration correction algorithm, which aims to
325            minimize the chromatic artifacts that may occur along the object boundaries in an
326            image.
327
328            FAST/HIGH_QUALITY both mean that camera device determined aberration
329            correction will be applied. HIGH_QUALITY mode indicates that the camera device will
330            use the highest-quality aberration correction algorithms, even if it slows down
331            capture rate. FAST means the camera device will not slow down capture rate when
332            applying aberration correction.
333
334            LEGACY devices will always be in FAST mode.
335          </details>
336        </entry>
337      </controls>
338      <dynamic>
339        <clone entry="android.colorCorrection.mode" kind="controls">
340        </clone>
341        <clone entry="android.colorCorrection.transform" kind="controls">
342        </clone>
343        <clone entry="android.colorCorrection.gains" kind="controls">
344        </clone>
345        <clone entry="android.colorCorrection.aberrationMode" kind="controls">
346        </clone>
347      </dynamic>
348      <static>
349        <entry name="availableAberrationModes" type="byte" visibility="public"
350        type_notes="list of enums" container="array" typedef="enumList" hwlevel="legacy">
351          <array>
352            <size>n</size>
353          </array>
354          <description>
355            List of aberration correction modes for android.colorCorrection.aberrationMode that are
356            supported by this camera device.
357          </description>
358          <range>Any value listed in android.colorCorrection.aberrationMode</range>
359          <details>
360            This key lists the valid modes for android.colorCorrection.aberrationMode.  If no
361            aberration correction modes are available for a device, this list will solely include
362            OFF mode. All camera devices will support either OFF or FAST mode.
363
364            Camera devices that support the MANUAL_POST_PROCESSING capability will always list
365            OFF mode. This includes all FULL level devices.
366
367            LEGACY devices will always only support FAST mode.
368          </details>
369          <hal_details>
370            HAL must support both FAST and HIGH_QUALITY if chromatic aberration control is available
371            on the camera device, but the underlying implementation can be the same for both modes.
372            That is, if the highest quality implementation on the camera device does not slow down
373            capture rate, then FAST and HIGH_QUALITY will generate the same output.
374          </hal_details>
375          <tag id="V1" />
376        </entry>
377      </static>
378    </section>
379    <section name="control">
380      <controls>
381        <entry name="aeAntibandingMode" type="byte" visibility="public"
382               enum="true" hwlevel="legacy">
383          <enum>
384            <value>OFF
385              <notes>
386                The camera device will not adjust exposure duration to
387                avoid banding problems.
388              </notes>
389            </value>
390            <value>50HZ
391              <notes>
392                The camera device will adjust exposure duration to
393                avoid banding problems with 50Hz illumination sources.
394              </notes>
395            </value>
396            <value>60HZ
397              <notes>
398                The camera device will adjust exposure duration to
399                avoid banding problems with 60Hz illumination
400                sources.
401              </notes>
402            </value>
403            <value>AUTO
404              <notes>
405                The camera device will automatically adapt its
406                antibanding routine to the current illumination
407                condition. This is the default mode if AUTO is
408                available on given camera device.
409              </notes>
410            </value>
411          </enum>
412          <description>
413            The desired setting for the camera device's auto-exposure
414            algorithm's antibanding compensation.
415          </description>
416          <range>
417            android.control.aeAvailableAntibandingModes
418          </range>
419          <details>
420            Some kinds of lighting fixtures, such as some fluorescent
421            lights, flicker at the rate of the power supply frequency
422            (60Hz or 50Hz, depending on country). While this is
423            typically not noticeable to a person, it can be visible to
424            a camera device. If a camera sets its exposure time to the
425            wrong value, the flicker may become visible in the
426            viewfinder as flicker or in a final captured image, as a
427            set of variable-brightness bands across the image.
428
429            Therefore, the auto-exposure routines of camera devices
430            include antibanding routines that ensure that the chosen
431            exposure value will not cause such banding. The choice of
432            exposure time depends on the rate of flicker, which the
433            camera device can detect automatically, or the expected
434            rate can be selected by the application using this
435            control.
436
437            A given camera device may not support all of the possible
438            options for the antibanding mode. The
439            android.control.aeAvailableAntibandingModes key contains
440            the available modes for a given camera device.
441
442            AUTO mode is the default if it is available on given
443            camera device. When AUTO mode is not available, the
444            default will be either 50HZ or 60HZ, and both 50HZ
445            and 60HZ will be available.
446
447            If manual exposure control is enabled (by setting
448            android.control.aeMode or android.control.mode to OFF),
449            then this setting has no effect, and the application must
450            ensure it selects exposure times that do not cause banding
451            issues. The android.statistics.sceneFlicker key can assist
452            the application in this.
453          </details>
454          <hal_details>
455            For all capture request templates, this field must be set
456            to AUTO if AUTO mode is available. If AUTO is not available,
457            the default must be either 50HZ or 60HZ, and both 50HZ and
458            60HZ must be available.
459
460            If manual exposure control is enabled (by setting
461            android.control.aeMode or android.control.mode to OFF),
462            then the exposure values provided by the application must not be
463            adjusted for antibanding.
464          </hal_details>
465          <tag id="BC" />
466        </entry>
467        <entry name="aeExposureCompensation" type="int32" visibility="public" hwlevel="legacy">
468          <description>Adjustment to auto-exposure (AE) target image
469          brightness.</description>
470          <units>Compensation steps</units>
471          <range>android.control.aeCompensationRange</range>
472          <details>
473          The adjustment is measured as a count of steps, with the
474          step size defined by android.control.aeCompensationStep and the
475          allowed range by android.control.aeCompensationRange.
476
477          For example, if the exposure value (EV) step is 0.333, '6'
478          will mean an exposure compensation of +2 EV; -3 will mean an
479          exposure compensation of -1 EV. One EV represents a doubling
480          of image brightness. Note that this control will only be
481          effective if android.control.aeMode `!=` OFF. This control
482          will take effect even when android.control.aeLock `== true`.
483
484          In the event of exposure compensation value being changed, camera device
485          may take several frames to reach the newly requested exposure target.
486          During that time, android.control.aeState field will be in the SEARCHING
487          state. Once the new exposure target is reached, android.control.aeState will
488          change from SEARCHING to either CONVERGED, LOCKED (if AE lock is enabled), or
489          FLASH_REQUIRED (if the scene is too dark for still capture).
490          </details>
491          <tag id="BC" />
492        </entry>
493        <entry name="aeLock" type="byte" visibility="public" enum="true"
494               typedef="boolean" hwlevel="legacy">
495          <enum>
496            <value>OFF
497            <notes>Auto-exposure lock is disabled; the AE algorithm
498            is free to update its parameters.</notes></value>
499            <value>ON
500            <notes>Auto-exposure lock is enabled; the AE algorithm
501            must not update the exposure and sensitivity parameters
502            while the lock is active.
503
504            android.control.aeExposureCompensation setting changes
505            will still take effect while auto-exposure is locked.
506
507            Some rare LEGACY devices may not support
508            this, in which case the value will always be overridden to OFF.
509            </notes></value>
510          </enum>
511          <description>Whether auto-exposure (AE) is currently locked to its latest
512          calculated values.</description>
513          <details>
514          When set to `true` (ON), the AE algorithm is locked to its latest parameters,
515          and will not change exposure settings until the lock is set to `false` (OFF).
516
517          Note that even when AE is locked, the flash may be fired if
518          the android.control.aeMode is ON_AUTO_FLASH /
519          ON_ALWAYS_FLASH / ON_AUTO_FLASH_REDEYE.
520
521          When android.control.aeExposureCompensation is changed, even if the AE lock
522          is ON, the camera device will still adjust its exposure value.
523
524          If AE precapture is triggered (see android.control.aePrecaptureTrigger)
525          when AE is already locked, the camera device will not change the exposure time
526          (android.sensor.exposureTime) and sensitivity (android.sensor.sensitivity)
527          parameters. The flash may be fired if the android.control.aeMode
528          is ON_AUTO_FLASH/ON_AUTO_FLASH_REDEYE and the scene is too dark. If the
529          android.control.aeMode is ON_ALWAYS_FLASH, the scene may become overexposed.
530          Similarly, AE precapture trigger CANCEL has no effect when AE is already locked.
531
532          When an AE precapture sequence is triggered, AE unlock will not be able to unlock
533          the AE if AE is locked by the camera device internally during precapture metering
534          sequence In other words, submitting requests with AE unlock has no effect for an
535          ongoing precapture metering sequence. Otherwise, the precapture metering sequence
536          will never succeed in a sequence of preview requests where AE lock is always set
537          to `false`.
538
539          Since the camera device has a pipeline of in-flight requests, the settings that
540          get locked do not necessarily correspond to the settings that were present in the
541          latest capture result received from the camera device, since additional captures
542          and AE updates may have occurred even before the result was sent out. If an
543          application is switching between automatic and manual control and wishes to eliminate
544          any flicker during the switch, the following procedure is recommended:
545
546            1. Starting in auto-AE mode:
547            2. Lock AE
548            3. Wait for the first result to be output that has the AE locked
549            4. Copy exposure settings from that result into a request, set the request to manual AE
550            5. Submit the capture request, proceed to run manual AE as desired.
551
552          See android.control.aeState for AE lock related state transition details.
553          </details>
554          <tag id="BC" />
555        </entry>
556        <entry name="aeMode" type="byte" visibility="public" enum="true" hwlevel="legacy">
557          <enum>
558            <value>OFF
559              <notes>
560                The camera device's autoexposure routine is disabled.
561
562                The application-selected android.sensor.exposureTime,
563                android.sensor.sensitivity and
564                android.sensor.frameDuration are used by the camera
565                device, along with android.flash.* fields, if there's
566                a flash unit for this camera device.
567
568                Note that auto-white balance (AWB) and auto-focus (AF)
569                behavior is device dependent when AE is in OFF mode.
570                To have consistent behavior across different devices,
571                it is recommended to either set AWB and AF to OFF mode
572                or lock AWB and AF before setting AE to OFF.
573                See android.control.awbMode, android.control.afMode,
574                android.control.awbLock, and android.control.afTrigger
575                for more details.
576
577                LEGACY devices do not support the OFF mode and will
578                override attempts to use this value to ON.
579              </notes>
580            </value>
581            <value>ON
582              <notes>
583                The camera device's autoexposure routine is active,
584                with no flash control.
585
586                The application's values for
587                android.sensor.exposureTime,
588                android.sensor.sensitivity, and
589                android.sensor.frameDuration are ignored. The
590                application has control over the various
591                android.flash.* fields.
592              </notes>
593            </value>
594            <value>ON_AUTO_FLASH
595              <notes>
596                Like ON, except that the camera device also controls
597                the camera's flash unit, firing it in low-light
598                conditions.
599
600                The flash may be fired during a precapture sequence
601                (triggered by android.control.aePrecaptureTrigger) and
602                may be fired for captures for which the
603                android.control.captureIntent field is set to
604                STILL_CAPTURE
605              </notes>
606            </value>
607            <value>ON_ALWAYS_FLASH
608              <notes>
609                Like ON, except that the camera device also controls
610                the camera's flash unit, always firing it for still
611                captures.
612
613                The flash may be fired during a precapture sequence
614                (triggered by android.control.aePrecaptureTrigger) and
615                will always be fired for captures for which the
616                android.control.captureIntent field is set to
617                STILL_CAPTURE
618              </notes>
619            </value>
620            <value>ON_AUTO_FLASH_REDEYE
621              <notes>
622                Like ON_AUTO_FLASH, but with automatic red eye
623                reduction.
624
625                If deemed necessary by the camera device, a red eye
626                reduction flash will fire during the precapture
627                sequence.
628              </notes>
629            </value>
630          </enum>
631          <description>The desired mode for the camera device's
632          auto-exposure routine.</description>
633          <range>android.control.aeAvailableModes</range>
634          <details>
635            This control is only effective if android.control.mode is
636            AUTO.
637
638            When set to any of the ON modes, the camera device's
639            auto-exposure routine is enabled, overriding the
640            application's selected exposure time, sensor sensitivity,
641            and frame duration (android.sensor.exposureTime,
642            android.sensor.sensitivity, and
643            android.sensor.frameDuration). If one of the FLASH modes
644            is selected, the camera device's flash unit controls are
645            also overridden.
646
647            The FLASH modes are only available if the camera device
648            has a flash unit (android.flash.info.available is `true`).
649
650            If flash TORCH mode is desired, this field must be set to
651            ON or OFF, and android.flash.mode set to TORCH.
652
653            When set to any of the ON modes, the values chosen by the
654            camera device auto-exposure routine for the overridden
655            fields for a given capture will be available in its
656            CaptureResult.
657          </details>
658          <tag id="BC" />
659        </entry>
660        <entry name="aeRegions" type="int32" visibility="public"
661            optional="true" container="array" typedef="meteringRectangle">
662          <array>
663            <size>5</size>
664            <size>area_count</size>
665          </array>
666          <description>List of metering areas to use for auto-exposure adjustment.</description>
667          <units>Pixel coordinates within android.sensor.info.activeArraySize</units>
668          <range>Coordinates must be between `[(0,0), (width, height))` of
669          android.sensor.info.activeArraySize</range>
670          <details>
671              Not available if android.control.maxRegionsAe is 0.
672              Otherwise will always be present.
673
674              The maximum number of regions supported by the device is determined by the value
675              of android.control.maxRegionsAe.
676
677              The coordinate system is based on the active pixel array,
678              with (0,0) being the top-left pixel in the active pixel array, and
679              (android.sensor.info.activeArraySize.width - 1,
680              android.sensor.info.activeArraySize.height - 1) being the
681              bottom-right pixel in the active pixel array.
682
683              The weight must be within `[0, 1000]`, and represents a weight
684              for every pixel in the area. This means that a large metering area
685              with the same weight as a smaller area will have more effect in
686              the metering result. Metering areas can partially overlap and the
687              camera device will add the weights in the overlap region.
688
689              The weights are relative to weights of other exposure metering regions, so if only one
690              region is used, all non-zero weights will have the same effect. A region with 0
691              weight is ignored.
692
693              If all regions have 0 weight, then no specific metering area needs to be used by the
694              camera device.
695
696              If the metering region is outside the used android.scaler.cropRegion returned in
697              capture result metadata, the camera device will ignore the sections outside the crop
698              region and output only the intersection rectangle as the metering region in the result
699              metadata.  If the region is entirely outside the crop region, it will be ignored and
700              not reported in the result metadata.
701          </details>
702          <hal_details>
703              The HAL level representation of MeteringRectangle[] is a
704              int[5 * area_count].
705              Every five elements represent a metering region of
706              (xmin, ymin, xmax, ymax, weight).
707              The rectangle is defined to be inclusive on xmin and ymin, but
708              exclusive on xmax and ymax.
709          </hal_details>
710          <tag id="BC" />
711        </entry>
712        <entry name="aeTargetFpsRange" type="int32" visibility="public"
713               container="array" typedef="rangeInt" hwlevel="legacy">
714          <array>
715            <size>2</size>
716          </array>
717          <description>Range over which the auto-exposure routine can
718          adjust the capture frame rate to maintain good
719          exposure.</description>
720          <units>Frames per second (FPS)</units>
721          <range>Any of the entries in android.control.aeAvailableTargetFpsRanges</range>
722          <details>Only constrains auto-exposure (AE) algorithm, not
723          manual control of android.sensor.exposureTime and
724          android.sensor.frameDuration.</details>
725          <tag id="BC" />
726        </entry>
727        <entry name="aePrecaptureTrigger" type="byte" visibility="public"
728               enum="true" hwlevel="limited">
729          <enum>
730            <value>IDLE
731              <notes>The trigger is idle.</notes>
732            </value>
733            <value>START
734              <notes>The precapture metering sequence will be started
735              by the camera device.
736
737              The exact effect of the precapture trigger depends on
738              the current AE mode and state.</notes>
739            </value>
740            <value>CANCEL
741              <notes>The camera device will cancel any currently active or completed
742              precapture metering sequence, the auto-exposure routine will return to its
743              initial state.</notes>
744            </value>
745          </enum>
746          <description>Whether the camera device will trigger a precapture
747          metering sequence when it processes this request.</description>
748          <details>This entry is normally set to IDLE, or is not
749          included at all in the request settings. When included and
750          set to START, the camera device will trigger the auto-exposure (AE)
751          precapture metering sequence.
752
753          When set to CANCEL, the camera device will cancel any active
754          precapture metering trigger, and return to its initial AE state.
755          If a precapture metering sequence is already completed, and the camera
756          device has implicitly locked the AE for subsequent still capture, the
757          CANCEL trigger will unlock the AE and return to its initial AE state.
758
759          The precapture sequence should be triggered before starting a
760          high-quality still capture for final metering decisions to
761          be made, and for firing pre-capture flash pulses to estimate
762          scene brightness and required final capture flash power, when
763          the flash is enabled.
764
765          Normally, this entry should be set to START for only a
766          single request, and the application should wait until the
767          sequence completes before starting a new one.
768
769          When a precapture metering sequence is finished, the camera device
770          may lock the auto-exposure routine internally to be able to accurately expose the
771          subsequent still capture image (`android.control.captureIntent == STILL_CAPTURE`).
772          For this case, the AE may not resume normal scan if no subsequent still capture is
773          submitted. To ensure that the AE routine restarts normal scan, the application should
774          submit a request with `android.control.aeLock == true`, followed by a request
775          with `android.control.aeLock == false`, if the application decides not to submit a
776          still capture request after the precapture sequence completes. Alternatively, for
777          API level 23 or newer devices, the CANCEL can be used to unlock the camera device
778          internally locked AE if the application doesn't submit a still capture request after
779          the AE precapture trigger. Note that, the CANCEL was added in API level 23, and must not
780          be used in devices that have earlier API levels.
781
782          The exact effect of auto-exposure (AE) precapture trigger
783          depends on the current AE mode and state; see
784          android.control.aeState for AE precapture state transition
785          details.
786
787          On LEGACY-level devices, the precapture trigger is not supported;
788          capturing a high-resolution JPEG image will automatically trigger a
789          precapture sequence before the high-resolution capture, including
790          potentially firing a pre-capture flash.
791
792          Using the precapture trigger and the auto-focus trigger android.control.afTrigger
793          simultaneously is allowed. However, since these triggers often require cooperation between
794          the auto-focus and auto-exposure routines (for example, the may need to be enabled for a
795          focus sweep), the camera device may delay acting on a later trigger until the previous
796          trigger has been fully handled. This may lead to longer intervals between the trigger and
797          changes to android.control.aeState indicating the start of the precapture sequence, for
798          example.
799
800          If both the precapture and the auto-focus trigger are activated on the same request, then
801          the camera device will complete them in the optimal order for that device.
802          </details>
803          <hal_details>
804          The HAL must support triggering the AE precapture trigger while an AF trigger is active
805          (and vice versa), or at the same time as the AF trigger.  It is acceptable for the HAL to
806          treat these as two consecutive triggers, for example handling the AF trigger and then the
807          AE trigger.  Or the HAL may choose to optimize the case with both triggers fired at once,
808          to minimize the latency for converging both focus and exposure/flash usage.
809          </hal_details>
810          <tag id="BC" />
811        </entry>
812        <entry name="afMode" type="byte" visibility="public" enum="true"
813               hwlevel="legacy">
814          <enum>
815            <value>OFF
816            <notes>The auto-focus routine does not control the lens;
817            android.lens.focusDistance is controlled by the
818            application.</notes></value>
819            <value>AUTO
820            <notes>Basic automatic focus mode.
821
822            In this mode, the lens does not move unless
823            the autofocus trigger action is called. When that trigger
824            is activated, AF will transition to ACTIVE_SCAN, then to
825            the outcome of the scan (FOCUSED or NOT_FOCUSED).
826
827            Always supported if lens is not fixed focus.
828
829            Use android.lens.info.minimumFocusDistance to determine if lens
830            is fixed-focus.
831
832            Triggering AF_CANCEL resets the lens position to default,
833            and sets the AF state to INACTIVE.</notes></value>
834            <value>MACRO
835            <notes>Close-up focusing mode.
836
837            In this mode, the lens does not move unless the
838            autofocus trigger action is called. When that trigger is
839            activated, AF will transition to ACTIVE_SCAN, then to
840            the outcome of the scan (FOCUSED or NOT_FOCUSED). This
841            mode is optimized for focusing on objects very close to
842            the camera.
843
844            When that trigger is activated, AF will transition to
845            ACTIVE_SCAN, then to the outcome of the scan (FOCUSED or
846            NOT_FOCUSED). Triggering cancel AF resets the lens
847            position to default, and sets the AF state to
848            INACTIVE.</notes></value>
849            <value>CONTINUOUS_VIDEO
850            <notes>In this mode, the AF algorithm modifies the lens
851            position continually to attempt to provide a
852            constantly-in-focus image stream.
853
854            The focusing behavior should be suitable for good quality
855            video recording; typically this means slower focus
856            movement and no overshoots. When the AF trigger is not
857            involved, the AF algorithm should start in INACTIVE state,
858            and then transition into PASSIVE_SCAN and PASSIVE_FOCUSED
859            states as appropriate. When the AF trigger is activated,
860            the algorithm should immediately transition into
861            AF_FOCUSED or AF_NOT_FOCUSED as appropriate, and lock the
862            lens position until a cancel AF trigger is received.
863
864            Once cancel is received, the algorithm should transition
865            back to INACTIVE and resume passive scan. Note that this
866            behavior is not identical to CONTINUOUS_PICTURE, since an
867            ongoing PASSIVE_SCAN must immediately be
868            canceled.</notes></value>
869            <value>CONTINUOUS_PICTURE
870            <notes>In this mode, the AF algorithm modifies the lens
871            position continually to attempt to provide a
872            constantly-in-focus image stream.
873
874            The focusing behavior should be suitable for still image
875            capture; typically this means focusing as fast as
876            possible. When the AF trigger is not involved, the AF
877            algorithm should start in INACTIVE state, and then
878            transition into PASSIVE_SCAN and PASSIVE_FOCUSED states as
879            appropriate as it attempts to maintain focus. When the AF
880            trigger is activated, the algorithm should finish its
881            PASSIVE_SCAN if active, and then transition into
882            AF_FOCUSED or AF_NOT_FOCUSED as appropriate, and lock the
883            lens position until a cancel AF trigger is received.
884
885            When the AF cancel trigger is activated, the algorithm
886            should transition back to INACTIVE and then act as if it
887            has just been started.</notes></value>
888            <value>EDOF
889            <notes>Extended depth of field (digital focus) mode.
890
891            The camera device will produce images with an extended
892            depth of field automatically; no special focusing
893            operations need to be done before taking a picture.
894
895            AF triggers are ignored, and the AF state will always be
896            INACTIVE.</notes></value>
897          </enum>
898          <description>Whether auto-focus (AF) is currently enabled, and what
899          mode it is set to.</description>
900          <range>android.control.afAvailableModes</range>
901          <details>Only effective if android.control.mode = AUTO and the lens is not fixed focus
902          (i.e. `android.lens.info.minimumFocusDistance &gt; 0`). Also note that
903          when android.control.aeMode is OFF, the behavior of AF is device
904          dependent. It is recommended to lock AF by using android.control.afTrigger before
905          setting android.control.aeMode to OFF, or set AF mode to OFF when AE is OFF.
906
907          If the lens is controlled by the camera device auto-focus algorithm,
908          the camera device will report the current AF status in android.control.afState
909          in result metadata.</details>
910          <hal_details>
911          When afMode is AUTO or MACRO, the lens must not move until an AF trigger is sent in a
912          request (android.control.afTrigger `==` START). After an AF trigger, the afState will end
913          up with either FOCUSED_LOCKED or NOT_FOCUSED_LOCKED state (see
914          android.control.afState for detailed state transitions), which indicates that the lens is
915          locked and will not move. If camera movement (e.g. tilting camera) causes the lens to move
916          after the lens is locked, the HAL must compensate this movement appropriately such that
917          the same focal plane remains in focus.
918
919          When afMode is one of the continuous auto focus modes, the HAL is free to start a AF
920          scan whenever it's not locked. When the lens is locked after an AF trigger
921          (see android.control.afState for detailed state transitions), the HAL should maintain the
922          same lock behavior as above.
923
924          When afMode is OFF, the application controls focus manually. The accuracy of the
925          focus distance control depends on the android.lens.info.focusDistanceCalibration.
926          However, the lens must not move regardless of the camera movement for any focus distance
927          manual control.
928
929          To put this in concrete terms, if the camera has lens elements which may move based on
930          camera orientation or motion (e.g. due to gravity), then the HAL must drive the lens to
931          remain in a fixed position invariant to the camera's orientation or motion, for example,
932          by using accelerometer measurements in the lens control logic. This is a typical issue
933          that will arise on camera modules with open-loop VCMs.
934          </hal_details>
935          <tag id="BC" />
936        </entry>
937        <entry name="afRegions" type="int32" visibility="public"
938               optional="true" container="array" typedef="meteringRectangle">
939          <array>
940            <size>5</size>
941            <size>area_count</size>
942          </array>
943          <description>List of metering areas to use for auto-focus.</description>
944          <units>Pixel coordinates within android.sensor.info.activeArraySize</units>
945          <range>Coordinates must be between `[(0,0), (width, height))` of
946          android.sensor.info.activeArraySize</range>
947          <details>
948              Not available if android.control.maxRegionsAf is 0.
949              Otherwise will always be present.
950
951              The maximum number of focus areas supported by the device is determined by the value
952              of android.control.maxRegionsAf.
953
954              The coordinate system is based on the active pixel array,
955              with (0,0) being the top-left pixel in the active pixel array, and
956              (android.sensor.info.activeArraySize.width - 1,
957              android.sensor.info.activeArraySize.height - 1) being the
958              bottom-right pixel in the active pixel array.
959
960              The weight must be within `[0, 1000]`, and represents a weight
961              for every pixel in the area. This means that a large metering area
962              with the same weight as a smaller area will have more effect in
963              the metering result. Metering areas can partially overlap and the
964              camera device will add the weights in the overlap region.
965
966              The weights are relative to weights of other metering regions, so if only one region
967              is used, all non-zero weights will have the same effect. A region with 0 weight is
968              ignored.
969
970              If all regions have 0 weight, then no specific metering area needs to be used by the
971              camera device.
972
973              If the metering region is outside the used android.scaler.cropRegion returned in
974              capture result metadata, the camera device will ignore the sections outside the crop
975              region and output only the intersection rectangle as the metering region in the result
976              metadata. If the region is entirely outside the crop region, it will be ignored and
977              not reported in the result metadata.
978          </details>
979          <hal_details>
980              The HAL level representation of MeteringRectangle[] is a
981              int[5 * area_count].
982              Every five elements represent a metering region of
983              (xmin, ymin, xmax, ymax, weight).
984              The rectangle is defined to be inclusive on xmin and ymin, but
985              exclusive on xmax and ymax.
986          </hal_details>
987          <tag id="BC" />
988        </entry>
989        <entry name="afTrigger" type="byte" visibility="public" enum="true"
990               hwlevel="legacy">
991          <enum>
992            <value>IDLE
993              <notes>The trigger is idle.</notes>
994            </value>
995            <value>START
996              <notes>Autofocus will trigger now.</notes>
997            </value>
998            <value>CANCEL
999              <notes>Autofocus will return to its initial
1000              state, and cancel any currently active trigger.</notes>
1001            </value>
1002          </enum>
1003          <description>
1004          Whether the camera device will trigger autofocus for this request.
1005          </description>
1006          <details>This entry is normally set to IDLE, or is not
1007          included at all in the request settings.
1008
1009          When included and set to START, the camera device will trigger the
1010          autofocus algorithm. If autofocus is disabled, this trigger has no effect.
1011
1012          When set to CANCEL, the camera device will cancel any active trigger,
1013          and return to its initial AF state.
1014
1015          Generally, applications should set this entry to START or CANCEL for only a
1016          single capture, and then return it to IDLE (or not set at all). Specifying
1017          START for multiple captures in a row means restarting the AF operation over
1018          and over again.
1019
1020          See android.control.afState for what the trigger means for each AF mode.
1021
1022          Using the autofocus trigger and the precapture trigger android.control.aePrecaptureTrigger
1023          simultaneously is allowed. However, since these triggers often require cooperation between
1024          the auto-focus and auto-exposure routines (for example, the may need to be enabled for a
1025          focus sweep), the camera device may delay acting on a later trigger until the previous
1026          trigger has been fully handled. This may lead to longer intervals between the trigger and
1027          changes to android.control.afState, for example.
1028          </details>
1029          <hal_details>
1030          The HAL must support triggering the AF trigger while an AE precapture trigger is active
1031          (and vice versa), or at the same time as the AE trigger.  It is acceptable for the HAL to
1032          treat these as two consecutive triggers, for example handling the AF trigger and then the
1033          AE trigger.  Or the HAL may choose to optimize the case with both triggers fired at once,
1034          to minimize the latency for converging both focus and exposure/flash usage.
1035          </hal_details>
1036          <tag id="BC" />
1037        </entry>
1038        <entry name="awbLock" type="byte" visibility="public" enum="true"
1039               typedef="boolean" hwlevel="legacy">
1040          <enum>
1041            <value>OFF
1042            <notes>Auto-white balance lock is disabled; the AWB
1043            algorithm is free to update its parameters if in AUTO
1044            mode.</notes></value>
1045            <value>ON
1046            <notes>Auto-white balance lock is enabled; the AWB
1047            algorithm will not update its parameters while the lock
1048            is active.</notes></value>
1049          </enum>
1050          <description>Whether auto-white balance (AWB) is currently locked to its
1051          latest calculated values.</description>
1052          <details>
1053          When set to `true` (ON), the AWB algorithm is locked to its latest parameters,
1054          and will not change color balance settings until the lock is set to `false` (OFF).
1055
1056          Since the camera device has a pipeline of in-flight requests, the settings that
1057          get locked do not necessarily correspond to the settings that were present in the
1058          latest capture result received from the camera device, since additional captures
1059          and AWB updates may have occurred even before the result was sent out. If an
1060          application is switching between automatic and manual control and wishes to eliminate
1061          any flicker during the switch, the following procedure is recommended:
1062
1063            1. Starting in auto-AWB mode:
1064            2. Lock AWB
1065            3. Wait for the first result to be output that has the AWB locked
1066            4. Copy AWB settings from that result into a request, set the request to manual AWB
1067            5. Submit the capture request, proceed to run manual AWB as desired.
1068
1069          Note that AWB lock is only meaningful when
1070          android.control.awbMode is in the AUTO mode; in other modes,
1071          AWB is already fixed to a specific setting.
1072
1073          Some LEGACY devices may not support ON; the value is then overridden to OFF.
1074          </details>
1075          <tag id="BC" />
1076        </entry>
1077        <entry name="awbMode" type="byte" visibility="public" enum="true"
1078               hwlevel="legacy">
1079          <enum>
1080            <value>OFF
1081            <notes>
1082            The camera device's auto-white balance routine is disabled.
1083
1084            The application-selected color transform matrix
1085            (android.colorCorrection.transform) and gains
1086            (android.colorCorrection.gains) are used by the camera
1087            device for manual white balance control.
1088            </notes>
1089            </value>
1090            <value>AUTO
1091            <notes>
1092            The camera device's auto-white balance routine is active.
1093
1094            The application's values for android.colorCorrection.transform
1095            and android.colorCorrection.gains are ignored.
1096            For devices that support the MANUAL_POST_PROCESSING capability, the
1097            values used by the camera device for the transform and gains
1098            will be available in the capture result for this request.
1099            </notes>
1100            </value>
1101            <value>INCANDESCENT
1102            <notes>
1103            The camera device's auto-white balance routine is disabled;
1104            the camera device uses incandescent light as the assumed scene
1105            illumination for white balance.
1106
1107            While the exact white balance transforms are up to the
1108            camera device, they will approximately match the CIE
1109            standard illuminant A.
1110
1111            The application's values for android.colorCorrection.transform
1112            and android.colorCorrection.gains are ignored.
1113            For devices that support the MANUAL_POST_PROCESSING capability, the
1114            values used by the camera device for the transform and gains
1115            will be available in the capture result for this request.
1116            </notes>
1117            </value>
1118            <value>FLUORESCENT
1119            <notes>
1120            The camera device's auto-white balance routine is disabled;
1121            the camera device uses fluorescent light as the assumed scene
1122            illumination for white balance.
1123
1124            While the exact white balance transforms are up to the
1125            camera device, they will approximately match the CIE
1126            standard illuminant F2.
1127
1128            The application's values for android.colorCorrection.transform
1129            and android.colorCorrection.gains are ignored.
1130            For devices that support the MANUAL_POST_PROCESSING capability, the
1131            values used by the camera device for the transform and gains
1132            will be available in the capture result for this request.
1133            </notes>
1134            </value>
1135            <value>WARM_FLUORESCENT
1136            <notes>
1137            The camera device's auto-white balance routine is disabled;
1138            the camera device uses warm fluorescent light as the assumed scene
1139            illumination for white balance.
1140
1141            While the exact white balance transforms are up to the
1142            camera device, they will approximately match the CIE
1143            standard illuminant F4.
1144
1145            The application's values for android.colorCorrection.transform
1146            and android.colorCorrection.gains are ignored.
1147            For devices that support the MANUAL_POST_PROCESSING capability, the
1148            values used by the camera device for the transform and gains
1149            will be available in the capture result for this request.
1150            </notes>
1151            </value>
1152            <value>DAYLIGHT
1153            <notes>
1154            The camera device's auto-white balance routine is disabled;
1155            the camera device uses daylight light as the assumed scene
1156            illumination for white balance.
1157
1158            While the exact white balance transforms are up to the
1159            camera device, they will approximately match the CIE
1160            standard illuminant D65.
1161
1162            The application's values for android.colorCorrection.transform
1163            and android.colorCorrection.gains are ignored.
1164            For devices that support the MANUAL_POST_PROCESSING capability, the
1165            values used by the camera device for the transform and gains
1166            will be available in the capture result for this request.
1167            </notes>
1168            </value>
1169            <value>CLOUDY_DAYLIGHT
1170            <notes>
1171            The camera device's auto-white balance routine is disabled;
1172            the camera device uses cloudy daylight light as the assumed scene
1173            illumination for white balance.
1174
1175            The application's values for android.colorCorrection.transform
1176            and android.colorCorrection.gains are ignored.
1177            For devices that support the MANUAL_POST_PROCESSING capability, the
1178            values used by the camera device for the transform and gains
1179            will be available in the capture result for this request.
1180            </notes>
1181            </value>
1182            <value>TWILIGHT
1183            <notes>
1184            The camera device's auto-white balance routine is disabled;
1185            the camera device uses twilight light as the assumed scene
1186            illumination for white balance.
1187
1188            The application's values for android.colorCorrection.transform
1189            and android.colorCorrection.gains are ignored.
1190            For devices that support the MANUAL_POST_PROCESSING capability, the
1191            values used by the camera device for the transform and gains
1192            will be available in the capture result for this request.
1193            </notes>
1194            </value>
1195            <value>SHADE
1196            <notes>
1197            The camera device's auto-white balance routine is disabled;
1198            the camera device uses shade light as the assumed scene
1199            illumination for white balance.
1200
1201            The application's values for android.colorCorrection.transform
1202            and android.colorCorrection.gains are ignored.
1203            For devices that support the MANUAL_POST_PROCESSING capability, the
1204            values used by the camera device for the transform and gains
1205            will be available in the capture result for this request.
1206            </notes>
1207            </value>
1208          </enum>
1209          <description>Whether auto-white balance (AWB) is currently setting the color
1210          transform fields, and what its illumination target
1211          is.</description>
1212          <range>android.control.awbAvailableModes</range>
1213          <details>
1214          This control is only effective if android.control.mode is AUTO.
1215
1216          When set to the ON mode, the camera device's auto-white balance
1217          routine is enabled, overriding the application's selected
1218          android.colorCorrection.transform, android.colorCorrection.gains and
1219          android.colorCorrection.mode. Note that when android.control.aeMode
1220          is OFF, the behavior of AWB is device dependent. It is recommened to
1221          also set AWB mode to OFF or lock AWB by using android.control.awbLock before
1222          setting AE mode to OFF.
1223
1224          When set to the OFF mode, the camera device's auto-white balance
1225          routine is disabled. The application manually controls the white
1226          balance by android.colorCorrection.transform, android.colorCorrection.gains
1227          and android.colorCorrection.mode.
1228
1229          When set to any other modes, the camera device's auto-white
1230          balance routine is disabled. The camera device uses each
1231          particular illumination target for white balance
1232          adjustment. The application's values for
1233          android.colorCorrection.transform,
1234          android.colorCorrection.gains and
1235          android.colorCorrection.mode are ignored.
1236          </details>
1237          <tag id="BC" />
1238        </entry>
1239        <entry name="awbRegions" type="int32" visibility="public"
1240               optional="true" container="array" typedef="meteringRectangle">
1241          <array>
1242            <size>5</size>
1243            <size>area_count</size>
1244          </array>
1245          <description>List of metering areas to use for auto-white-balance illuminant
1246          estimation.</description>
1247          <units>Pixel coordinates within android.sensor.info.activeArraySize</units>
1248          <range>Coordinates must be between `[(0,0), (width, height))` of
1249          android.sensor.info.activeArraySize</range>
1250          <details>
1251              Not available if android.control.maxRegionsAwb is 0.
1252              Otherwise will always be present.
1253
1254              The maximum number of regions supported by the device is determined by the value
1255              of android.control.maxRegionsAwb.
1256
1257              The coordinate system is based on the active pixel array,
1258              with (0,0) being the top-left pixel in the active pixel array, and
1259              (android.sensor.info.activeArraySize.width - 1,
1260              android.sensor.info.activeArraySize.height - 1) being the
1261              bottom-right pixel in the active pixel array.
1262
1263              The weight must range from 0 to 1000, and represents a weight
1264              for every pixel in the area. This means that a large metering area
1265              with the same weight as a smaller area will have more effect in
1266              the metering result. Metering areas can partially overlap and the
1267              camera device will add the weights in the overlap region.
1268
1269              The weights are relative to weights of other white balance metering regions, so if
1270              only one region is used, all non-zero weights will have the same effect. A region with
1271              0 weight is ignored.
1272
1273              If all regions have 0 weight, then no specific metering area needs to be used by the
1274              camera device.
1275
1276              If the metering region is outside the used android.scaler.cropRegion returned in
1277              capture result metadata, the camera device will ignore the sections outside the crop
1278              region and output only the intersection rectangle as the metering region in the result
1279              metadata.  If the region is entirely outside the crop region, it will be ignored and
1280              not reported in the result metadata.
1281          </details>
1282          <hal_details>
1283              The HAL level representation of MeteringRectangle[] is a
1284              int[5 * area_count].
1285              Every five elements represent a metering region of
1286              (xmin, ymin, xmax, ymax, weight).
1287              The rectangle is defined to be inclusive on xmin and ymin, but
1288              exclusive on xmax and ymax.
1289          </hal_details>
1290          <tag id="BC" />
1291        </entry>
1292        <entry name="captureIntent" type="byte" visibility="public" enum="true"
1293               hwlevel="legacy">
1294          <enum>
1295            <value>CUSTOM
1296            <notes>The goal of this request doesn't fall into the other
1297            categories. The camera device will default to preview-like
1298            behavior.</notes></value>
1299            <value>PREVIEW
1300            <notes>This request is for a preview-like use case.
1301
1302            The precapture trigger may be used to start off a metering
1303            w/flash sequence.
1304            </notes></value>
1305            <value>STILL_CAPTURE
1306            <notes>This request is for a still capture-type
1307            use case.
1308
1309            If the flash unit is under automatic control, it may fire as needed.
1310            </notes></value>
1311            <value>VIDEO_RECORD
1312            <notes>This request is for a video recording
1313            use case.</notes></value>
1314            <value>VIDEO_SNAPSHOT
1315            <notes>This request is for a video snapshot (still
1316            image while recording video) use case.
1317
1318            The camera device should take the highest-quality image
1319            possible (given the other settings) without disrupting the
1320            frame rate of video recording.  </notes></value>
1321            <value>ZERO_SHUTTER_LAG
1322            <notes>This request is for a ZSL usecase; the
1323            application will stream full-resolution images and
1324            reprocess one or several later for a final
1325            capture.
1326            </notes></value>
1327            <value>MANUAL
1328            <notes>This request is for manual capture use case where
1329            the applications want to directly control the capture parameters.
1330
1331            For example, the application may wish to manually control
1332            android.sensor.exposureTime, android.sensor.sensitivity, etc.
1333            </notes></value>
1334          </enum>
1335          <description>Information to the camera device 3A (auto-exposure,
1336          auto-focus, auto-white balance) routines about the purpose
1337          of this capture, to help the camera device to decide optimal 3A
1338          strategy.</description>
1339          <details>This control (except for MANUAL) is only effective if
1340          `android.control.mode != OFF` and any 3A routine is active.
1341
1342          ZERO_SHUTTER_LAG will be supported if android.request.availableCapabilities
1343          contains PRIVATE_REPROCESSING or YUV_REPROCESSING. MANUAL will be supported if
1344          android.request.availableCapabilities contains MANUAL_SENSOR. Other intent values are
1345          always supported.
1346          </details>
1347          <tag id="BC" />
1348        </entry>
1349        <entry name="effectMode" type="byte" visibility="public" enum="true"
1350               hwlevel="legacy">
1351          <enum>
1352            <value>OFF
1353              <notes>
1354              No color effect will be applied.
1355              </notes>
1356            </value>
1357            <value optional="true">MONO
1358              <notes>
1359              A "monocolor" effect where the image is mapped into
1360              a single color.
1361
1362              This will typically be grayscale.
1363              </notes>
1364            </value>
1365            <value optional="true">NEGATIVE
1366              <notes>
1367              A "photo-negative" effect where the image's colors
1368              are inverted.
1369              </notes>
1370            </value>
1371            <value optional="true">SOLARIZE
1372              <notes>
1373              A "solarisation" effect (Sabattier effect) where the
1374              image is wholly or partially reversed in
1375              tone.
1376              </notes>
1377            </value>
1378            <value optional="true">SEPIA
1379              <notes>
1380              A "sepia" effect where the image is mapped into warm
1381              gray, red, and brown tones.
1382              </notes>
1383            </value>
1384            <value optional="true">POSTERIZE
1385              <notes>
1386              A "posterization" effect where the image uses
1387              discrete regions of tone rather than a continuous
1388              gradient of tones.
1389              </notes>
1390            </value>
1391            <value optional="true">WHITEBOARD
1392              <notes>
1393              A "whiteboard" effect where the image is typically displayed
1394              as regions of white, with black or grey details.
1395              </notes>
1396            </value>
1397            <value optional="true">BLACKBOARD
1398              <notes>
1399              A "blackboard" effect where the image is typically displayed
1400              as regions of black, with white or grey details.
1401              </notes>
1402            </value>
1403            <value optional="true">AQUA
1404              <notes>
1405              An "aqua" effect where a blue hue is added to the image.
1406              </notes>
1407            </value>
1408          </enum>
1409          <description>A special color effect to apply.</description>
1410          <range>android.control.availableEffects</range>
1411          <details>
1412          When this mode is set, a color effect will be applied
1413          to images produced by the camera device. The interpretation
1414          and implementation of these color effects is left to the
1415          implementor of the camera device, and should not be
1416          depended on to be consistent (or present) across all
1417          devices.
1418          </details>
1419          <tag id="BC" />
1420        </entry>
1421        <entry name="mode" type="byte" visibility="public" enum="true"
1422               hwlevel="legacy">
1423          <enum>
1424            <value>OFF
1425            <notes>Full application control of pipeline.
1426
1427            All control by the device's metering and focusing (3A)
1428            routines is disabled, and no other settings in
1429            android.control.* have any effect, except that
1430            android.control.captureIntent may be used by the camera
1431            device to select post-processing values for processing
1432            blocks that do not allow for manual control, or are not
1433            exposed by the camera API.
1434
1435            However, the camera device's 3A routines may continue to
1436            collect statistics and update their internal state so that
1437            when control is switched to AUTO mode, good control values
1438            can be immediately applied.
1439            </notes></value>
1440            <value>AUTO
1441            <notes>Use settings for each individual 3A routine.
1442
1443            Manual control of capture parameters is disabled. All
1444            controls in android.control.* besides sceneMode take
1445            effect.</notes></value>
1446            <value optional="true">USE_SCENE_MODE
1447            <notes>Use a specific scene mode.
1448
1449            Enabling this disables control.aeMode, control.awbMode and
1450            control.afMode controls; the camera device will ignore
1451            those settings while USE_SCENE_MODE is active (except for
1452            FACE_PRIORITY scene mode). Other control entries are still active.
1453            This setting can only be used if scene mode is supported (i.e.
1454            android.control.availableSceneModes
1455            contain some modes other than DISABLED).</notes></value>
1456            <value optional="true">OFF_KEEP_STATE
1457            <notes>Same as OFF mode, except that this capture will not be
1458            used by camera device background auto-exposure, auto-white balance and
1459            auto-focus algorithms (3A) to update their statistics.
1460
1461            Specifically, the 3A routines are locked to the last
1462            values set from a request with AUTO, OFF, or
1463            USE_SCENE_MODE, and any statistics or state updates
1464            collected from manual captures with OFF_KEEP_STATE will be
1465            discarded by the camera device.
1466            </notes></value>
1467          </enum>
1468          <description>Overall mode of 3A (auto-exposure, auto-white-balance, auto-focus) control
1469          routines.</description>
1470          <range>android.control.availableModes</range>
1471          <details>
1472          This is a top-level 3A control switch. When set to OFF, all 3A control
1473          by the camera device is disabled. The application must set the fields for
1474          capture parameters itself.
1475
1476          When set to AUTO, the individual algorithm controls in
1477          android.control.* are in effect, such as android.control.afMode.
1478
1479          When set to USE_SCENE_MODE, the individual controls in
1480          android.control.* are mostly disabled, and the camera device implements
1481          one of the scene mode settings (such as ACTION, SUNSET, or PARTY)
1482          as it wishes. The camera device scene mode 3A settings are provided by
1483          {@link android.hardware.camera2.CaptureResult capture results}.
1484
1485          When set to OFF_KEEP_STATE, it is similar to OFF mode, the only difference
1486          is that this frame will not be used by camera device background 3A statistics
1487          update, as if this frame is never captured. This mode can be used in the scenario
1488          where the application doesn't want a 3A manual control capture to affect
1489          the subsequent auto 3A capture results.
1490          </details>
1491          <tag id="BC" />
1492        </entry>
1493        <entry name="sceneMode" type="byte" visibility="public" enum="true"
1494               hwlevel="legacy">
1495          <enum>
1496            <value id="0">DISABLED
1497              <notes>
1498              Indicates that no scene modes are set for a given capture request.
1499              </notes>
1500            </value>
1501            <value>FACE_PRIORITY
1502              <notes>If face detection support exists, use face
1503              detection data for auto-focus, auto-white balance, and
1504              auto-exposure routines.
1505
1506              If face detection statistics are disabled
1507              (i.e. android.statistics.faceDetectMode is set to OFF),
1508              this should still operate correctly (but will not return
1509              face detection statistics to the framework).
1510
1511              Unlike the other scene modes, android.control.aeMode,
1512              android.control.awbMode, and android.control.afMode
1513              remain active when FACE_PRIORITY is set.
1514              </notes>
1515            </value>
1516            <value optional="true">ACTION
1517              <notes>
1518              Optimized for photos of quickly moving objects.
1519
1520              Similar to SPORTS.
1521              </notes>
1522            </value>
1523            <value optional="true">PORTRAIT
1524              <notes>
1525              Optimized for still photos of people.
1526              </notes>
1527            </value>
1528            <value optional="true">LANDSCAPE
1529              <notes>
1530              Optimized for photos of distant macroscopic objects.
1531              </notes>
1532            </value>
1533            <value optional="true">NIGHT
1534              <notes>
1535              Optimized for low-light settings.
1536              </notes>
1537            </value>
1538            <value optional="true">NIGHT_PORTRAIT
1539              <notes>
1540              Optimized for still photos of people in low-light
1541              settings.
1542              </notes>
1543            </value>
1544            <value optional="true">THEATRE
1545              <notes>
1546              Optimized for dim, indoor settings where flash must
1547              remain off.
1548              </notes>
1549            </value>
1550            <value optional="true">BEACH
1551              <notes>
1552              Optimized for bright, outdoor beach settings.
1553              </notes>
1554            </value>
1555            <value optional="true">SNOW
1556              <notes>
1557              Optimized for bright, outdoor settings containing snow.
1558              </notes>
1559            </value>
1560            <value optional="true">SUNSET
1561              <notes>
1562              Optimized for scenes of the setting sun.
1563              </notes>
1564            </value>
1565            <value optional="true">STEADYPHOTO
1566              <notes>
1567              Optimized to avoid blurry photos due to small amounts of
1568              device motion (for example: due to hand shake).
1569              </notes>
1570            </value>
1571            <value optional="true">FIREWORKS
1572              <notes>
1573              Optimized for nighttime photos of fireworks.
1574              </notes>
1575            </value>
1576            <value optional="true">SPORTS
1577              <notes>
1578              Optimized for photos of quickly moving people.
1579
1580              Similar to ACTION.
1581              </notes>
1582            </value>
1583            <value optional="true">PARTY
1584              <notes>
1585              Optimized for dim, indoor settings with multiple moving
1586              people.
1587              </notes>
1588            </value>
1589            <value optional="true">CANDLELIGHT
1590              <notes>
1591              Optimized for dim settings where the main light source
1592              is a flame.
1593              </notes>
1594            </value>
1595            <value optional="true">BARCODE
1596              <notes>
1597              Optimized for accurately capturing a photo of barcode
1598              for use by camera applications that wish to read the
1599              barcode value.
1600              </notes>
1601            </value>
1602            <value deprecated="true" optional="true">HIGH_SPEED_VIDEO
1603              <notes>
1604              This is deprecated, please use {@link
1605              android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}
1606              and {@link
1607              android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}
1608              for high speed video recording.
1609
1610              Optimized for high speed video recording (frame rate >=60fps) use case.
1611
1612              The supported high speed video sizes and fps ranges are specified in
1613              android.control.availableHighSpeedVideoConfigurations. To get desired
1614              output frame rates, the application is only allowed to select video size
1615              and fps range combinations listed in this static metadata. The fps range
1616              can be control via android.control.aeTargetFpsRange.
1617
1618              In this mode, the camera device will override aeMode, awbMode, and afMode to
1619              ON, ON, and CONTINUOUS_VIDEO, respectively. All post-processing block mode
1620              controls will be overridden to be FAST. Therefore, no manual control of capture
1621              and post-processing parameters is possible. All other controls operate the
1622              same as when android.control.mode == AUTO. This means that all other
1623              android.control.* fields continue to work, such as
1624
1625              * android.control.aeTargetFpsRange
1626              * android.control.aeExposureCompensation
1627              * android.control.aeLock
1628              * android.control.awbLock
1629              * android.control.effectMode
1630              * android.control.aeRegions
1631              * android.control.afRegions
1632              * android.control.awbRegions
1633              * android.control.afTrigger
1634              * android.control.aePrecaptureTrigger
1635
1636              Outside of android.control.*, the following controls will work:
1637
1638              * android.flash.mode (automatic flash for still capture will not work since aeMode is ON)
1639              * android.lens.opticalStabilizationMode (if it is supported)
1640              * android.scaler.cropRegion
1641              * android.statistics.faceDetectMode
1642
1643              For high speed recording use case, the actual maximum supported frame rate may
1644              be lower than what camera can output, depending on the destination Surfaces for
1645              the image data. For example, if the destination surface is from video encoder,
1646              the application need check if the video encoder is capable of supporting the
1647              high frame rate for a given video size, or it will end up with lower recording
1648              frame rate. If the destination surface is from preview window, the preview frame
1649              rate will be bounded by the screen refresh rate.
1650
1651              The camera device will only support up to 2 output high speed streams
1652              (processed non-stalling format defined in android.request.maxNumOutputStreams)
1653              in this mode. This control will be effective only if all of below conditions are true:
1654
1655              * The application created no more than maxNumHighSpeedStreams processed non-stalling
1656              format output streams, where maxNumHighSpeedStreams is calculated as
1657              min(2, android.request.maxNumOutputStreams[Processed (but not-stalling)]).
1658              * The stream sizes are selected from the sizes reported by
1659              android.control.availableHighSpeedVideoConfigurations.
1660              * No processed non-stalling or raw streams are configured.
1661
1662              When above conditions are NOT satistied, the controls of this mode and
1663              android.control.aeTargetFpsRange will be ignored by the camera device,
1664              the camera device will fall back to android.control.mode `==` AUTO,
1665              and the returned capture result metadata will give the fps range choosen
1666              by the camera device.
1667
1668              Switching into or out of this mode may trigger some camera ISP/sensor
1669              reconfigurations, which may introduce extra latency. It is recommended that
1670              the application avoids unnecessary scene mode switch as much as possible.
1671              </notes>
1672            </value>
1673            <value optional="true">HDR
1674              <notes>
1675              Turn on a device-specific high dynamic range (HDR) mode.
1676
1677              In this scene mode, the camera device captures images
1678              that keep a larger range of scene illumination levels
1679              visible in the final image. For example, when taking a
1680              picture of a object in front of a bright window, both
1681              the object and the scene through the window may be
1682              visible when using HDR mode, while in normal AUTO mode,
1683              one or the other may be poorly exposed. As a tradeoff,
1684              HDR mode generally takes much longer to capture a single
1685              image, has no user control, and may have other artifacts
1686              depending on the HDR method used.
1687
1688              Therefore, HDR captures operate at a much slower rate
1689              than regular captures.
1690
1691              In this mode, on LIMITED or FULL devices, when a request
1692              is made with a android.control.captureIntent of
1693              STILL_CAPTURE, the camera device will capture an image
1694              using a high dynamic range capture technique.  On LEGACY
1695              devices, captures that target a JPEG-format output will
1696              be captured with HDR, and the capture intent is not
1697              relevant.
1698
1699              The HDR capture may involve the device capturing a burst
1700              of images internally and combining them into one, or it
1701              may involve the device using specialized high dynamic
1702              range capture hardware. In all cases, a single image is
1703              produced in response to a capture request submitted
1704              while in HDR mode.
1705
1706              Since substantial post-processing is generally needed to
1707              produce an HDR image, only YUV, PRIVATE, and JPEG
1708              outputs are supported for LIMITED/FULL device HDR
1709              captures, and only JPEG outputs are supported for LEGACY
1710              HDR captures. Using a RAW output for HDR capture is not
1711              supported.
1712
1713              Some devices may also support always-on HDR, which
1714              applies HDR processing at full frame rate.  For these
1715              devices, intents other than STILL_CAPTURE will also
1716              produce an HDR output with no frame rate impact compared
1717              to normal operation, though the quality may be lower
1718              than for STILL_CAPTURE intents.
1719
1720              If SCENE_MODE_HDR is used with unsupported output types
1721              or capture intents, the images captured will be as if
1722              the SCENE_MODE was not enabled at all.
1723              </notes>
1724            </value>
1725            <value optional="true" hidden="true">FACE_PRIORITY_LOW_LIGHT
1726              <notes>Same as FACE_PRIORITY scene mode, except that the camera
1727              device will choose higher sensitivity values (android.sensor.sensitivity)
1728              under low light conditions.
1729
1730              The camera device may be tuned to expose the images in a reduced
1731              sensitivity range to produce the best quality images. For example,
1732              if the android.sensor.info.sensitivityRange gives range of [100, 1600],
1733              the camera device auto-exposure routine tuning process may limit the actual
1734              exposure sensitivity range to [100, 1200] to ensure that the noise level isn't
1735              exessive in order to preserve the image quality. Under this situation, the image under
1736              low light may be under-exposed when the sensor max exposure time (bounded by the
1737              android.control.aeTargetFpsRange when android.control.aeMode is one of the
1738              ON_* modes) and effective max sensitivity are reached. This scene mode allows the
1739              camera device auto-exposure routine to increase the sensitivity up to the max
1740              sensitivity specified by android.sensor.info.sensitivityRange when the scene is too
1741              dark and the max exposure time is reached. The captured images may be noisier
1742              compared with the images captured in normal FACE_PRIORITY mode; therefore, it is
1743              recommended that the application only use this scene mode when it is capable of
1744              reducing the noise level of the captured images.
1745
1746              Unlike the other scene modes, android.control.aeMode,
1747              android.control.awbMode, and android.control.afMode
1748              remain active when FACE_PRIORITY_LOW_LIGHT is set.
1749              </notes>
1750            </value>
1751            <value optional="true" hidden="true" id="100">DEVICE_CUSTOM_START
1752              <notes>
1753                Scene mode values within the range of
1754                `[DEVICE_CUSTOM_START, DEVICE_CUSTOM_END]` are reserved for device specific
1755                customized scene modes.
1756              </notes>
1757            </value>
1758            <value optional="true" hidden="true" id="127">DEVICE_CUSTOM_END
1759              <notes>
1760                Scene mode values within the range of
1761                `[DEVICE_CUSTOM_START, DEVICE_CUSTOM_END]` are reserved for device specific
1762                customized scene modes.
1763              </notes>
1764            </value>
1765          </enum>
1766          <description>
1767          Control for which scene mode is currently active.
1768          </description>
1769          <range>android.control.availableSceneModes</range>
1770          <details>
1771          Scene modes are custom camera modes optimized for a certain set of conditions and
1772          capture settings.
1773
1774          This is the mode that that is active when
1775          `android.control.mode == USE_SCENE_MODE`. Aside from FACE_PRIORITY, these modes will
1776          disable android.control.aeMode, android.control.awbMode, and android.control.afMode
1777          while in use.
1778
1779          The interpretation and implementation of these scene modes is left
1780          to the implementor of the camera device. Their behavior will not be
1781          consistent across all devices, and any given device may only implement
1782          a subset of these modes.
1783          </details>
1784          <hal_details>
1785          HAL implementations that include scene modes are expected to provide
1786          the per-scene settings to use for android.control.aeMode,
1787          android.control.awbMode, and android.control.afMode in
1788          android.control.sceneModeOverrides.
1789
1790          For HIGH_SPEED_VIDEO mode, if it is included in android.control.availableSceneModes,
1791          the HAL must list supported video size and fps range in
1792          android.control.availableHighSpeedVideoConfigurations. For a given size, e.g.
1793          1280x720, if the HAL has two different sensor configurations for normal streaming
1794          mode and high speed streaming, when this scene mode is set/reset in a sequence of capture
1795          requests, the HAL may have to switch between different sensor modes.
1796          This mode is deprecated in HAL3.3, to support high speed video recording, please implement
1797          android.control.availableHighSpeedVideoConfigurations and CONSTRAINED_HIGH_SPEED_VIDEO
1798          capbility defined in android.request.availableCapabilities.
1799          </hal_details>
1800          <tag id="BC" />
1801        </entry>
1802        <entry name="videoStabilizationMode" type="byte" visibility="public"
1803               enum="true" hwlevel="legacy">
1804          <enum>
1805            <value>OFF
1806            <notes>
1807              Video stabilization is disabled.
1808            </notes></value>
1809            <value>ON
1810            <notes>
1811              Video stabilization is enabled.
1812            </notes></value>
1813          </enum>
1814          <description>Whether video stabilization is
1815          active.</description>
1816          <details>
1817          Video stabilization automatically warps images from
1818          the camera in order to stabilize motion between consecutive frames.
1819
1820          If enabled, video stabilization can modify the
1821          android.scaler.cropRegion to keep the video stream stabilized.
1822
1823          Switching between different video stabilization modes may take several
1824          frames to initialize, the camera device will report the current mode
1825          in capture result metadata. For example, When "ON" mode is requested,
1826          the video stabilization modes in the first several capture results may
1827          still be "OFF", and it will become "ON" when the initialization is
1828          done.
1829
1830          In addition, not all recording sizes or frame rates may be supported for
1831          stabilization by a device that reports stabilization support. It is guaranteed
1832          that an output targeting a MediaRecorder or MediaCodec will be stabilized if
1833          the recording resolution is less than or equal to 1920 x 1080 (width less than
1834          or equal to 1920, height less than or equal to 1080), and the recording
1835          frame rate is less than or equal to 30fps.  At other sizes, the CaptureResult
1836          android.control.videoStabilizationMode field will return
1837          OFF if the recording output is not stabilized, or if there are no output
1838          Surface types that can be stabilized.
1839
1840          If a camera device supports both this mode and OIS
1841          (android.lens.opticalStabilizationMode), turning both modes on may
1842          produce undesirable interaction, so it is recommended not to enable
1843          both at the same time.
1844          </details>
1845          <tag id="BC" />
1846        </entry>
1847      </controls>
1848      <static>
1849        <entry name="aeAvailableAntibandingModes" type="byte" visibility="public"
1850               type_notes="list of enums" container="array" typedef="enumList"
1851               hwlevel="legacy">
1852          <array>
1853            <size>n</size>
1854          </array>
1855          <description>
1856            List of auto-exposure antibanding modes for android.control.aeAntibandingMode that are
1857            supported by this camera device.
1858          </description>
1859          <range>Any value listed in android.control.aeAntibandingMode</range>
1860          <details>
1861            Not all of the auto-exposure anti-banding modes may be
1862            supported by a given camera device. This field lists the
1863            valid anti-banding modes that the application may request
1864            for this camera device with the
1865            android.control.aeAntibandingMode control.
1866          </details>
1867          <tag id="BC" />
1868        </entry>
1869        <entry name="aeAvailableModes" type="byte" visibility="public"
1870               type_notes="list of enums" container="array" typedef="enumList"
1871               hwlevel="legacy">
1872          <array>
1873            <size>n</size>
1874          </array>
1875          <description>
1876            List of auto-exposure modes for android.control.aeMode that are supported by this camera
1877            device.
1878          </description>
1879          <range>Any value listed in android.control.aeMode</range>
1880          <details>
1881            Not all the auto-exposure modes may be supported by a
1882            given camera device, especially if no flash unit is
1883            available. This entry lists the valid modes for
1884            android.control.aeMode for this camera device.
1885
1886            All camera devices support ON, and all camera devices with flash
1887            units support ON_AUTO_FLASH and ON_ALWAYS_FLASH.
1888
1889            FULL mode camera devices always support OFF mode,
1890            which enables application control of camera exposure time,
1891            sensitivity, and frame duration.
1892
1893            LEGACY mode camera devices never support OFF mode.
1894            LIMITED mode devices support OFF if they support the MANUAL_SENSOR
1895            capability.
1896          </details>
1897          <tag id="BC" />
1898        </entry>
1899        <entry name="aeAvailableTargetFpsRanges" type="int32" visibility="public"
1900               type_notes="list of pairs of frame rates"
1901               container="array" typedef="rangeInt"
1902               hwlevel="legacy">
1903          <array>
1904            <size>2</size>
1905            <size>n</size>
1906          </array>
1907          <description>List of frame rate ranges for android.control.aeTargetFpsRange supported by
1908          this camera device.</description>
1909          <units>Frames per second (FPS)</units>
1910          <details>
1911          For devices at the LEGACY level or above:
1912
1913          * For constant-framerate recording, for each normal
1914          {@link android.media.CamcorderProfile CamcorderProfile}, that is, a
1915          {@link android.media.CamcorderProfile CamcorderProfile} that has
1916          {@link android.media.CamcorderProfile#quality quality} in
1917          the range [{@link android.media.CamcorderProfile#QUALITY_LOW QUALITY_LOW},
1918          {@link android.media.CamcorderProfile#QUALITY_2160P QUALITY_2160P}], if the profile is
1919          supported by the device and has
1920          {@link android.media.CamcorderProfile#videoFrameRate videoFrameRate} `x`, this list will
1921          always include (`x`,`x`).
1922
1923          * Also, a camera device must either not support any
1924          {@link android.media.CamcorderProfile CamcorderProfile},
1925          or support at least one
1926          normal {@link android.media.CamcorderProfile CamcorderProfile} that has
1927          {@link android.media.CamcorderProfile#videoFrameRate videoFrameRate} `x` &gt;= 24.
1928
1929          For devices at the LIMITED level or above:
1930
1931          * For YUV_420_888 burst capture use case, this list will always include (`min`, `max`)
1932          and (`max`, `max`) where `min` &lt;= 15 and `max` = the maximum output frame rate of the
1933          maximum YUV_420_888 output size.
1934          </details>
1935          <tag id="BC" />
1936        </entry>
1937        <entry name="aeCompensationRange" type="int32" visibility="public"
1938               container="array" typedef="rangeInt"
1939               hwlevel="legacy">
1940          <array>
1941            <size>2</size>
1942          </array>
1943          <description>Maximum and minimum exposure compensation values for
1944          android.control.aeExposureCompensation, in counts of android.control.aeCompensationStep,
1945          that are supported by this camera device.</description>
1946          <range>
1947            Range [0,0] indicates that exposure compensation is not supported.
1948
1949            For LIMITED and FULL devices, range must follow below requirements if exposure
1950            compensation is supported (`range != [0, 0]`):
1951
1952            `Min.exposure compensation * android.control.aeCompensationStep &lt;= -2 EV`
1953
1954            `Max.exposure compensation * android.control.aeCompensationStep &gt;= 2 EV`
1955
1956            LEGACY devices may support a smaller range than this.
1957          </range>
1958          <tag id="BC" />
1959        </entry>
1960        <entry name="aeCompensationStep" type="rational" visibility="public"
1961               hwlevel="legacy">
1962          <description>Smallest step by which the exposure compensation
1963          can be changed.</description>
1964          <units>Exposure Value (EV)</units>
1965          <details>
1966          This is the unit for android.control.aeExposureCompensation. For example, if this key has
1967          a value of `1/2`, then a setting of `-2` for android.control.aeExposureCompensation means
1968          that the target EV offset for the auto-exposure routine is -1 EV.
1969
1970          One unit of EV compensation changes the brightness of the captured image by a factor
1971          of two. +1 EV doubles the image brightness, while -1 EV halves the image brightness.
1972          </details>
1973          <hal_details>
1974            This must be less than or equal to 1/2.
1975          </hal_details>
1976          <tag id="BC" />
1977        </entry>
1978        <entry name="afAvailableModes" type="byte" visibility="public"
1979               type_notes="List of enums" container="array" typedef="enumList"
1980               hwlevel="legacy">
1981          <array>
1982            <size>n</size>
1983          </array>
1984          <description>
1985          List of auto-focus (AF) modes for android.control.afMode that are
1986          supported by this camera device.
1987          </description>
1988          <range>Any value listed in android.control.afMode</range>
1989          <details>
1990          Not all the auto-focus modes may be supported by a
1991          given camera device. This entry lists the valid modes for
1992          android.control.afMode for this camera device.
1993
1994          All LIMITED and FULL mode camera devices will support OFF mode, and all
1995          camera devices with adjustable focuser units
1996          (`android.lens.info.minimumFocusDistance &gt; 0`) will support AUTO mode.
1997
1998          LEGACY devices will support OFF mode only if they support
1999          focusing to infinity (by also setting android.lens.focusDistance to
2000          `0.0f`).
2001          </details>
2002          <tag id="BC" />
2003        </entry>
2004        <entry name="availableEffects" type="byte" visibility="public"
2005               type_notes="List of enums (android.control.effectMode)." container="array"
2006               typedef="enumList" hwlevel="legacy">
2007          <array>
2008            <size>n</size>
2009          </array>
2010          <description>
2011          List of color effects for android.control.effectMode that are supported by this camera
2012          device.
2013          </description>
2014          <range>Any value listed in android.control.effectMode</range>
2015          <details>
2016          This list contains the color effect modes that can be applied to
2017          images produced by the camera device.
2018          Implementations are not expected to be consistent across all devices.
2019          If no color effect modes are available for a device, this will only list
2020          OFF.
2021
2022          A color effect will only be applied if
2023          android.control.mode != OFF.  OFF is always included in this list.
2024
2025          This control has no effect on the operation of other control routines such
2026          as auto-exposure, white balance, or focus.
2027          </details>
2028          <tag id="BC" />
2029        </entry>
2030        <entry name="availableSceneModes" type="byte" visibility="public"
2031               type_notes="List of enums (android.control.sceneMode)."
2032               container="array" typedef="enumList" hwlevel="legacy">
2033          <array>
2034            <size>n</size>
2035          </array>
2036          <description>
2037          List of scene modes for android.control.sceneMode that are supported by this camera
2038          device.
2039          </description>
2040          <range>Any value listed in android.control.sceneMode</range>
2041          <details>
2042          This list contains scene modes that can be set for the camera device.
2043          Only scene modes that have been fully implemented for the
2044          camera device may be included here. Implementations are not expected
2045          to be consistent across all devices.
2046
2047          If no scene modes are supported by the camera device, this
2048          will be set to DISABLED. Otherwise DISABLED will not be listed.
2049
2050          FACE_PRIORITY is always listed if face detection is
2051          supported (i.e.`android.statistics.info.maxFaceCount &gt;
2052          0`).
2053          </details>
2054          <tag id="BC" />
2055        </entry>
2056        <entry name="availableVideoStabilizationModes" type="byte"
2057               visibility="public" type_notes="List of enums." container="array"
2058               typedef="enumList" hwlevel="legacy">
2059          <array>
2060            <size>n</size>
2061          </array>
2062          <description>
2063          List of video stabilization modes for android.control.videoStabilizationMode
2064          that are supported by this camera device.
2065          </description>
2066          <range>Any value listed in android.control.videoStabilizationMode</range>
2067          <details>
2068          OFF will always be listed.
2069          </details>
2070          <tag id="BC" />
2071        </entry>
2072        <entry name="awbAvailableModes" type="byte" visibility="public"
2073               type_notes="List of enums"
2074               container="array" typedef="enumList" hwlevel="legacy">
2075          <array>
2076            <size>n</size>
2077          </array>
2078          <description>
2079          List of auto-white-balance modes for android.control.awbMode that are supported by this
2080          camera device.
2081          </description>
2082          <range>Any value listed in android.control.awbMode</range>
2083          <details>
2084          Not all the auto-white-balance modes may be supported by a
2085          given camera device. This entry lists the valid modes for
2086          android.control.awbMode for this camera device.
2087
2088          All camera devices will support ON mode.
2089
2090          Camera devices that support the MANUAL_POST_PROCESSING capability will always support OFF
2091          mode, which enables application control of white balance, by using
2092          android.colorCorrection.transform and android.colorCorrection.gains
2093          (android.colorCorrection.mode must be set to TRANSFORM_MATRIX). This includes all FULL
2094          mode camera devices.
2095          </details>
2096          <tag id="BC" />
2097        </entry>
2098        <entry name="maxRegions" type="int32" visibility="ndk_public"
2099               container="array" hwlevel="legacy">
2100          <array>
2101            <size>3</size>
2102          </array>
2103          <description>
2104          List of the maximum number of regions that can be used for metering in
2105          auto-exposure (AE), auto-white balance (AWB), and auto-focus (AF);
2106          this corresponds to the the maximum number of elements in
2107          android.control.aeRegions, android.control.awbRegions,
2108          and android.control.afRegions.
2109          </description>
2110          <range>
2111          Value must be &amp;gt;= 0 for each element. For full-capability devices
2112          this value must be &amp;gt;= 1 for AE and AF. The order of the elements is:
2113          `(AE, AWB, AF)`.</range>
2114          <tag id="BC" />
2115        </entry>
2116        <entry name="maxRegionsAe" type="int32" visibility="java_public"
2117               synthetic="true" hwlevel="legacy">
2118          <description>
2119          The maximum number of metering regions that can be used by the auto-exposure (AE)
2120          routine.
2121          </description>
2122          <range>Value will be &amp;gt;= 0. For FULL-capability devices, this
2123          value will be &amp;gt;= 1.
2124          </range>
2125          <details>
2126          This corresponds to the the maximum allowed number of elements in
2127          android.control.aeRegions.
2128          </details>
2129          <hal_details>This entry is private to the framework. Fill in
2130          maxRegions to have this entry be automatically populated.
2131          </hal_details>
2132        </entry>
2133        <entry name="maxRegionsAwb" type="int32" visibility="java_public"
2134               synthetic="true" hwlevel="legacy">
2135          <description>
2136          The maximum number of metering regions that can be used by the auto-white balance (AWB)
2137          routine.
2138          </description>
2139          <range>Value will be &amp;gt;= 0.
2140          </range>
2141          <details>
2142          This corresponds to the the maximum allowed number of elements in
2143          android.control.awbRegions.
2144          </details>
2145          <hal_details>This entry is private to the framework. Fill in
2146          maxRegions to have this entry be automatically populated.
2147          </hal_details>
2148        </entry>
2149        <entry name="maxRegionsAf" type="int32" visibility="java_public"
2150               synthetic="true" hwlevel="legacy">
2151          <description>
2152          The maximum number of metering regions that can be used by the auto-focus (AF) routine.
2153          </description>
2154          <range>Value will be &amp;gt;= 0. For FULL-capability devices, this
2155          value will be &amp;gt;= 1.
2156          </range>
2157          <details>
2158          This corresponds to the the maximum allowed number of elements in
2159          android.control.afRegions.
2160          </details>
2161          <hal_details>This entry is private to the framework. Fill in
2162          maxRegions to have this entry be automatically populated.
2163          </hal_details>
2164        </entry>
2165        <entry name="sceneModeOverrides" type="byte" visibility="system"
2166               container="array" hwlevel="limited">
2167          <array>
2168            <size>3</size>
2169            <size>length(availableSceneModes)</size>
2170          </array>
2171          <description>
2172          Ordered list of auto-exposure, auto-white balance, and auto-focus
2173          settings to use with each available scene mode.
2174          </description>
2175          <range>
2176          For each available scene mode, the list must contain three
2177          entries containing the android.control.aeMode,
2178          android.control.awbMode, and android.control.afMode values used
2179          by the camera device. The entry order is `(aeMode, awbMode, afMode)`
2180          where aeMode has the lowest index position.
2181          </range>
2182          <details>
2183          When a scene mode is enabled, the camera device is expected
2184          to override android.control.aeMode, android.control.awbMode,
2185          and android.control.afMode with its preferred settings for
2186          that scene mode.
2187
2188          The order of this list matches that of availableSceneModes,
2189          with 3 entries for each mode.  The overrides listed
2190          for FACE_PRIORITY and FACE_PRIORITY_LOW_LIGHT (if supported) are ignored,
2191          since for that mode the application-set android.control.aeMode,
2192          android.control.awbMode, and android.control.afMode values are
2193          used instead, matching the behavior when android.control.mode
2194          is set to AUTO. It is recommended that the FACE_PRIORITY and
2195          FACE_PRIORITY_LOW_LIGHT (if supported) overrides should be set to 0.
2196
2197          For example, if availableSceneModes contains
2198          `(FACE_PRIORITY, ACTION, NIGHT)`,  then the camera framework
2199          expects sceneModeOverrides to have 9 entries formatted like:
2200          `(0, 0, 0, ON_AUTO_FLASH, AUTO, CONTINUOUS_PICTURE,
2201          ON_AUTO_FLASH, INCANDESCENT, AUTO)`.
2202          </details>
2203          <hal_details>
2204          To maintain backward compatibility, this list will be made available
2205          in the static metadata of the camera service.  The camera service will
2206          use these values to set android.control.aeMode,
2207          android.control.awbMode, and android.control.afMode when using a scene
2208          mode other than FACE_PRIORITY and FACE_PRIORITY_LOW_LIGHT (if supported).
2209          </hal_details>
2210          <tag id="BC" />
2211        </entry>
2212      </static>
2213      <dynamic>
2214        <entry name="aePrecaptureId" type="int32" visibility="system" deprecated="true">
2215          <description>The ID sent with the latest
2216          CAMERA2_TRIGGER_PRECAPTURE_METERING call</description>
2217          <details>Must be 0 if no
2218          CAMERA2_TRIGGER_PRECAPTURE_METERING trigger received yet
2219          by HAL. Always updated even if AE algorithm ignores the
2220          trigger</details>
2221        </entry>
2222        <clone entry="android.control.aeAntibandingMode" kind="controls">
2223        </clone>
2224        <clone entry="android.control.aeExposureCompensation" kind="controls">
2225        </clone>
2226        <clone entry="android.control.aeLock" kind="controls">
2227        </clone>
2228        <clone entry="android.control.aeMode" kind="controls">
2229        </clone>
2230        <clone entry="android.control.aeRegions" kind="controls">
2231        </clone>
2232        <clone entry="android.control.aeTargetFpsRange" kind="controls">
2233        </clone>
2234        <clone entry="android.control.aePrecaptureTrigger" kind="controls">
2235        </clone>
2236        <entry name="aeState" type="byte" visibility="public" enum="true"
2237               hwlevel="limited">
2238          <enum>
2239            <value>INACTIVE
2240            <notes>AE is off or recently reset.
2241
2242            When a camera device is opened, it starts in
2243            this state. This is a transient state, the camera device may skip reporting
2244            this state in capture result.</notes></value>
2245            <value>SEARCHING
2246            <notes>AE doesn't yet have a good set of control values
2247            for the current scene.
2248
2249            This is a transient state, the camera device may skip
2250            reporting this state in capture result.</notes></value>
2251            <value>CONVERGED
2252            <notes>AE has a good set of control values for the
2253            current scene.</notes></value>
2254            <value>LOCKED
2255            <notes>AE has been locked.</notes></value>
2256            <value>FLASH_REQUIRED
2257            <notes>AE has a good set of control values, but flash
2258            needs to be fired for good quality still
2259            capture.</notes></value>
2260            <value>PRECAPTURE
2261            <notes>AE has been asked to do a precapture sequence
2262            and is currently executing it.
2263
2264            Precapture can be triggered through setting
2265            android.control.aePrecaptureTrigger to START. Currently
2266            active and completed (if it causes camera device internal AE lock) precapture
2267            metering sequence can be canceled through setting
2268            android.control.aePrecaptureTrigger to CANCEL.
2269
2270            Once PRECAPTURE completes, AE will transition to CONVERGED
2271            or FLASH_REQUIRED as appropriate. This is a transient
2272            state, the camera device may skip reporting this state in
2273            capture result.</notes></value>
2274          </enum>
2275          <description>Current state of the auto-exposure (AE) algorithm.</description>
2276          <details>Switching between or enabling AE modes (android.control.aeMode) always
2277          resets the AE state to INACTIVE. Similarly, switching between android.control.mode,
2278          or android.control.sceneMode if `android.control.mode == USE_SCENE_MODE` resets all
2279          the algorithm states to INACTIVE.
2280
2281          The camera device can do several state transitions between two results, if it is
2282          allowed by the state transition table. For example: INACTIVE may never actually be
2283          seen in a result.
2284
2285          The state in the result is the state for this image (in sync with this image): if
2286          AE state becomes CONVERGED, then the image data associated with this result should
2287          be good to use.
2288
2289          Below are state transition tables for different AE modes.
2290
2291            State       | Transition Cause | New State | Notes
2292          :------------:|:----------------:|:---------:|:-----------------------:
2293          INACTIVE      |                  | INACTIVE  | Camera device auto exposure algorithm is disabled
2294
2295          When android.control.aeMode is AE_MODE_ON_*:
2296
2297            State        | Transition Cause                             | New State      | Notes
2298          :-------------:|:--------------------------------------------:|:--------------:|:-----------------:
2299          INACTIVE       | Camera device initiates AE scan              | SEARCHING      | Values changing
2300          INACTIVE       | android.control.aeLock is ON                 | LOCKED         | Values locked
2301          SEARCHING      | Camera device finishes AE scan               | CONVERGED      | Good values, not changing
2302          SEARCHING      | Camera device finishes AE scan               | FLASH_REQUIRED | Converged but too dark w/o flash
2303          SEARCHING      | android.control.aeLock is ON                 | LOCKED         | Values locked
2304          CONVERGED      | Camera device initiates AE scan              | SEARCHING      | Values changing
2305          CONVERGED      | android.control.aeLock is ON                 | LOCKED         | Values locked
2306          FLASH_REQUIRED | Camera device initiates AE scan              | SEARCHING      | Values changing
2307          FLASH_REQUIRED | android.control.aeLock is ON                 | LOCKED         | Values locked
2308          LOCKED         | android.control.aeLock is OFF                | SEARCHING      | Values not good after unlock
2309          LOCKED         | android.control.aeLock is OFF                | CONVERGED      | Values good after unlock
2310          LOCKED         | android.control.aeLock is OFF                | FLASH_REQUIRED | Exposure good, but too dark
2311          PRECAPTURE     | Sequence done. android.control.aeLock is OFF | CONVERGED      | Ready for high-quality capture
2312          PRECAPTURE     | Sequence done. android.control.aeLock is ON  | LOCKED         | Ready for high-quality capture
2313          LOCKED         | aeLock is ON and aePrecaptureTrigger is START | LOCKED        | Precapture trigger is ignored when AE is already locked
2314          LOCKED         | aeLock is ON and aePrecaptureTrigger is CANCEL| LOCKED        | Precapture trigger is ignored when AE is already locked
2315          Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is START | PRECAPTURE     | Start AE precapture metering sequence
2316          Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is CANCEL| INACTIVE       | Currently active precapture metering sequence is canceled
2317
2318          For the above table, the camera device may skip reporting any state changes that happen
2319          without application intervention (i.e. mode switch, trigger, locking). Any state that
2320          can be skipped in that manner is called a transient state.
2321
2322          For example, for above AE modes (AE_MODE_ON_*), in addition to the state transitions
2323          listed in above table, it is also legal for the camera device to skip one or more
2324          transient states between two results. See below table for examples:
2325
2326            State        | Transition Cause                                            | New State      | Notes
2327          :-------------:|:-----------------------------------------------------------:|:--------------:|:-----------------:
2328          INACTIVE       | Camera device finished AE scan                              | CONVERGED      | Values are already good, transient states are skipped by camera device.
2329          Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is START, sequence done | FLASH_REQUIRED | Converged but too dark w/o flash after a precapture sequence, transient states are skipped by camera device.
2330          Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is START, sequence done | CONVERGED      | Converged after a precapture sequence, transient states are skipped by camera device.
2331          Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is CANCEL, converged    | FLASH_REQUIRED | Converged but too dark w/o flash after a precapture sequence is canceled, transient states are skipped by camera device.
2332          Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is CANCEL, converged    | CONVERGED      | Converged after a precapture sequenceis canceled, transient states are skipped by camera device.
2333          CONVERGED      | Camera device finished AE scan                              | FLASH_REQUIRED | Converged but too dark w/o flash after a new scan, transient states are skipped by camera device.
2334          FLASH_REQUIRED | Camera device finished AE scan                              | CONVERGED      | Converged after a new scan, transient states are skipped by camera device.
2335          </details>
2336        </entry>
2337        <clone entry="android.control.afMode" kind="controls">
2338        </clone>
2339        <clone entry="android.control.afRegions" kind="controls">
2340        </clone>
2341        <clone entry="android.control.afTrigger" kind="controls">
2342        </clone>
2343        <entry name="afState" type="byte" visibility="public" enum="true"
2344               hwlevel="legacy">
2345          <enum>
2346            <value>INACTIVE
2347            <notes>AF is off or has not yet tried to scan/been asked
2348            to scan.
2349
2350            When a camera device is opened, it starts in this
2351            state. This is a transient state, the camera device may
2352            skip reporting this state in capture
2353            result.</notes></value>
2354            <value>PASSIVE_SCAN
2355            <notes>AF is currently performing an AF scan initiated the
2356            camera device in a continuous autofocus mode.
2357
2358            Only used by CONTINUOUS_* AF modes. This is a transient
2359            state, the camera device may skip reporting this state in
2360            capture result.</notes></value>
2361            <value>PASSIVE_FOCUSED
2362            <notes>AF currently believes it is in focus, but may
2363            restart scanning at any time.
2364
2365            Only used by CONTINUOUS_* AF modes. This is a transient
2366            state, the camera device may skip reporting this state in
2367            capture result.</notes></value>
2368            <value>ACTIVE_SCAN
2369            <notes>AF is performing an AF scan because it was
2370            triggered by AF trigger.
2371
2372            Only used by AUTO or MACRO AF modes. This is a transient
2373            state, the camera device may skip reporting this state in
2374            capture result.</notes></value>
2375            <value>FOCUSED_LOCKED
2376            <notes>AF believes it is focused correctly and has locked
2377            focus.
2378
2379            This state is reached only after an explicit START AF trigger has been
2380            sent (android.control.afTrigger), when good focus has been obtained.
2381
2382            The lens will remain stationary until the AF mode (android.control.afMode) is changed or
2383            a new AF trigger is sent to the camera device (android.control.afTrigger).
2384            </notes></value>
2385            <value>NOT_FOCUSED_LOCKED
2386            <notes>AF has failed to focus successfully and has locked
2387            focus.
2388
2389            This state is reached only after an explicit START AF trigger has been
2390            sent (android.control.afTrigger), when good focus cannot be obtained.
2391
2392            The lens will remain stationary until the AF mode (android.control.afMode) is changed or
2393            a new AF trigger is sent to the camera device (android.control.afTrigger).
2394            </notes></value>
2395            <value>PASSIVE_UNFOCUSED
2396            <notes>AF finished a passive scan without finding focus,
2397            and may restart scanning at any time.
2398
2399            Only used by CONTINUOUS_* AF modes. This is a transient state, the camera
2400            device may skip reporting this state in capture result.
2401
2402            LEGACY camera devices do not support this state. When a passive
2403            scan has finished, it will always go to PASSIVE_FOCUSED.
2404            </notes></value>
2405          </enum>
2406          <description>Current state of auto-focus (AF) algorithm.</description>
2407          <details>
2408          Switching between or enabling AF modes (android.control.afMode) always
2409          resets the AF state to INACTIVE. Similarly, switching between android.control.mode,
2410          or android.control.sceneMode if `android.control.mode == USE_SCENE_MODE` resets all
2411          the algorithm states to INACTIVE.
2412
2413          The camera device can do several state transitions between two results, if it is
2414          allowed by the state transition table. For example: INACTIVE may never actually be
2415          seen in a result.
2416
2417          The state in the result is the state for this image (in sync with this image): if
2418          AF state becomes FOCUSED, then the image data associated with this result should
2419          be sharp.
2420
2421          Below are state transition tables for different AF modes.
2422
2423          When android.control.afMode is AF_MODE_OFF or AF_MODE_EDOF:
2424
2425            State       | Transition Cause | New State | Notes
2426          :------------:|:----------------:|:---------:|:-----------:
2427          INACTIVE      |                  | INACTIVE  | Never changes
2428
2429          When android.control.afMode is AF_MODE_AUTO or AF_MODE_MACRO:
2430
2431            State            | Transition Cause | New State          | Notes
2432          :-----------------:|:----------------:|:------------------:|:--------------:
2433          INACTIVE           | AF_TRIGGER       | ACTIVE_SCAN        | Start AF sweep, Lens now moving
2434          ACTIVE_SCAN        | AF sweep done    | FOCUSED_LOCKED     | Focused, Lens now locked
2435          ACTIVE_SCAN        | AF sweep done    | NOT_FOCUSED_LOCKED | Not focused, Lens now locked
2436          ACTIVE_SCAN        | AF_CANCEL        | INACTIVE           | Cancel/reset AF, Lens now locked
2437          FOCUSED_LOCKED     | AF_CANCEL        | INACTIVE           | Cancel/reset AF
2438          FOCUSED_LOCKED     | AF_TRIGGER       | ACTIVE_SCAN        | Start new sweep, Lens now moving
2439          NOT_FOCUSED_LOCKED | AF_CANCEL        | INACTIVE           | Cancel/reset AF
2440          NOT_FOCUSED_LOCKED | AF_TRIGGER       | ACTIVE_SCAN        | Start new sweep, Lens now moving
2441          Any state          | Mode change      | INACTIVE           |
2442
2443          For the above table, the camera device may skip reporting any state changes that happen
2444          without application intervention (i.e. mode switch, trigger, locking). Any state that
2445          can be skipped in that manner is called a transient state.
2446
2447          For example, for these AF modes (AF_MODE_AUTO and AF_MODE_MACRO), in addition to the
2448          state transitions listed in above table, it is also legal for the camera device to skip
2449          one or more transient states between two results. See below table for examples:
2450
2451            State            | Transition Cause | New State          | Notes
2452          :-----------------:|:----------------:|:------------------:|:--------------:
2453          INACTIVE           | AF_TRIGGER       | FOCUSED_LOCKED     | Focus is already good or good after a scan, lens is now locked.
2454          INACTIVE           | AF_TRIGGER       | NOT_FOCUSED_LOCKED | Focus failed after a scan, lens is now locked.
2455          FOCUSED_LOCKED     | AF_TRIGGER       | FOCUSED_LOCKED     | Focus is already good or good after a scan, lens is now locked.
2456          NOT_FOCUSED_LOCKED | AF_TRIGGER       | FOCUSED_LOCKED     | Focus is good after a scan, lens is not locked.
2457
2458
2459          When android.control.afMode is AF_MODE_CONTINUOUS_VIDEO:
2460
2461            State            | Transition Cause                    | New State          | Notes
2462          :-----------------:|:-----------------------------------:|:------------------:|:--------------:
2463          INACTIVE           | Camera device initiates new scan    | PASSIVE_SCAN       | Start AF scan, Lens now moving
2464          INACTIVE           | AF_TRIGGER                          | NOT_FOCUSED_LOCKED | AF state query, Lens now locked
2465          PASSIVE_SCAN       | Camera device completes current scan| PASSIVE_FOCUSED    | End AF scan, Lens now locked
2466          PASSIVE_SCAN       | Camera device fails current scan    | PASSIVE_UNFOCUSED  | End AF scan, Lens now locked
2467          PASSIVE_SCAN       | AF_TRIGGER                          | FOCUSED_LOCKED     | Immediate transition, if focus is good. Lens now locked
2468          PASSIVE_SCAN       | AF_TRIGGER                          | NOT_FOCUSED_LOCKED | Immediate transition, if focus is bad. Lens now locked
2469          PASSIVE_SCAN       | AF_CANCEL                           | INACTIVE           | Reset lens position, Lens now locked
2470          PASSIVE_FOCUSED    | Camera device initiates new scan    | PASSIVE_SCAN       | Start AF scan, Lens now moving
2471          PASSIVE_UNFOCUSED  | Camera device initiates new scan    | PASSIVE_SCAN       | Start AF scan, Lens now moving
2472          PASSIVE_FOCUSED    | AF_TRIGGER                          | FOCUSED_LOCKED     | Immediate transition, lens now locked
2473          PASSIVE_UNFOCUSED  | AF_TRIGGER                          | NOT_FOCUSED_LOCKED | Immediate transition, lens now locked
2474          FOCUSED_LOCKED     | AF_TRIGGER                          | FOCUSED_LOCKED     | No effect
2475          FOCUSED_LOCKED     | AF_CANCEL                           | INACTIVE           | Restart AF scan
2476          NOT_FOCUSED_LOCKED | AF_TRIGGER                          | NOT_FOCUSED_LOCKED | No effect
2477          NOT_FOCUSED_LOCKED | AF_CANCEL                           | INACTIVE           | Restart AF scan
2478
2479          When android.control.afMode is AF_MODE_CONTINUOUS_PICTURE:
2480
2481            State            | Transition Cause                     | New State          | Notes
2482          :-----------------:|:------------------------------------:|:------------------:|:--------------:
2483          INACTIVE           | Camera device initiates new scan     | PASSIVE_SCAN       | Start AF scan, Lens now moving
2484          INACTIVE           | AF_TRIGGER                           | NOT_FOCUSED_LOCKED | AF state query, Lens now locked
2485          PASSIVE_SCAN       | Camera device completes current scan | PASSIVE_FOCUSED    | End AF scan, Lens now locked
2486          PASSIVE_SCAN       | Camera device fails current scan     | PASSIVE_UNFOCUSED  | End AF scan, Lens now locked
2487          PASSIVE_SCAN       | AF_TRIGGER                           | FOCUSED_LOCKED     | Eventual transition once the focus is good. Lens now locked
2488          PASSIVE_SCAN       | AF_TRIGGER                           | NOT_FOCUSED_LOCKED | Eventual transition if cannot find focus. Lens now locked
2489          PASSIVE_SCAN       | AF_CANCEL                            | INACTIVE           | Reset lens position, Lens now locked
2490          PASSIVE_FOCUSED    | Camera device initiates new scan     | PASSIVE_SCAN       | Start AF scan, Lens now moving
2491          PASSIVE_UNFOCUSED  | Camera device initiates new scan     | PASSIVE_SCAN       | Start AF scan, Lens now moving
2492          PASSIVE_FOCUSED    | AF_TRIGGER                           | FOCUSED_LOCKED     | Immediate trans. Lens now locked
2493          PASSIVE_UNFOCUSED  | AF_TRIGGER                           | NOT_FOCUSED_LOCKED | Immediate trans. Lens now locked
2494          FOCUSED_LOCKED     | AF_TRIGGER                           | FOCUSED_LOCKED     | No effect
2495          FOCUSED_LOCKED     | AF_CANCEL                            | INACTIVE           | Restart AF scan
2496          NOT_FOCUSED_LOCKED | AF_TRIGGER                           | NOT_FOCUSED_LOCKED | No effect
2497          NOT_FOCUSED_LOCKED | AF_CANCEL                            | INACTIVE           | Restart AF scan
2498
2499          When switch between AF_MODE_CONTINUOUS_* (CAF modes) and AF_MODE_AUTO/AF_MODE_MACRO
2500          (AUTO modes), the initial INACTIVE or PASSIVE_SCAN states may be skipped by the
2501          camera device. When a trigger is included in a mode switch request, the trigger
2502          will be evaluated in the context of the new mode in the request.
2503          See below table for examples:
2504
2505            State      | Transition Cause                       | New State                                | Notes
2506          :-----------:|:--------------------------------------:|:----------------------------------------:|:--------------:
2507          any state    | CAF-->AUTO mode switch                 | INACTIVE                                 | Mode switch without trigger, initial state must be INACTIVE
2508          any state    | CAF-->AUTO mode switch with AF_TRIGGER | trigger-reachable states from INACTIVE   | Mode switch with trigger, INACTIVE is skipped
2509          any state    | AUTO-->CAF mode switch                 | passively reachable states from INACTIVE | Mode switch without trigger, passive transient state is skipped
2510          </details>
2511        </entry>
2512        <entry name="afTriggerId" type="int32" visibility="system" deprecated="true">
2513          <description>The ID sent with the latest
2514          CAMERA2_TRIGGER_AUTOFOCUS call</description>
2515          <details>Must be 0 if no CAMERA2_TRIGGER_AUTOFOCUS trigger
2516          received yet by HAL. Always updated even if AF algorithm
2517          ignores the trigger</details>
2518        </entry>
2519        <clone entry="android.control.awbLock" kind="controls">
2520        </clone>
2521        <clone entry="android.control.awbMode" kind="controls">
2522        </clone>
2523        <clone entry="android.control.awbRegions" kind="controls">
2524        </clone>
2525        <clone entry="android.control.captureIntent" kind="controls">
2526        </clone>
2527        <entry name="awbState" type="byte" visibility="public" enum="true"
2528               hwlevel="limited">
2529          <enum>
2530            <value>INACTIVE
2531            <notes>AWB is not in auto mode, or has not yet started metering.
2532
2533            When a camera device is opened, it starts in this
2534            state. This is a transient state, the camera device may
2535            skip reporting this state in capture
2536            result.</notes></value>
2537            <value>SEARCHING
2538            <notes>AWB doesn't yet have a good set of control
2539            values for the current scene.
2540
2541            This is a transient state, the camera device
2542            may skip reporting this state in capture result.</notes></value>
2543            <value>CONVERGED
2544            <notes>AWB has a good set of control values for the
2545            current scene.</notes></value>
2546            <value>LOCKED
2547            <notes>AWB has been locked.
2548            </notes></value>
2549          </enum>
2550          <description>Current state of auto-white balance (AWB) algorithm.</description>
2551          <details>Switching between or enabling AWB modes (android.control.awbMode) always
2552          resets the AWB state to INACTIVE. Similarly, switching between android.control.mode,
2553          or android.control.sceneMode if `android.control.mode == USE_SCENE_MODE` resets all
2554          the algorithm states to INACTIVE.
2555
2556          The camera device can do several state transitions between two results, if it is
2557          allowed by the state transition table. So INACTIVE may never actually be seen in
2558          a result.
2559
2560          The state in the result is the state for this image (in sync with this image): if
2561          AWB state becomes CONVERGED, then the image data associated with this result should
2562          be good to use.
2563
2564          Below are state transition tables for different AWB modes.
2565
2566          When `android.control.awbMode != AWB_MODE_AUTO`:
2567
2568            State       | Transition Cause | New State | Notes
2569          :------------:|:----------------:|:---------:|:-----------------------:
2570          INACTIVE      |                  |INACTIVE   |Camera device auto white balance algorithm is disabled
2571
2572          When android.control.awbMode is AWB_MODE_AUTO:
2573
2574            State        | Transition Cause                 | New State     | Notes
2575          :-------------:|:--------------------------------:|:-------------:|:-----------------:
2576          INACTIVE       | Camera device initiates AWB scan | SEARCHING     | Values changing
2577          INACTIVE       | android.control.awbLock is ON    | LOCKED        | Values locked
2578          SEARCHING      | Camera device finishes AWB scan  | CONVERGED     | Good values, not changing
2579          SEARCHING      | android.control.awbLock is ON    | LOCKED        | Values locked
2580          CONVERGED      | Camera device initiates AWB scan | SEARCHING     | Values changing
2581          CONVERGED      | android.control.awbLock is ON    | LOCKED        | Values locked
2582          LOCKED         | android.control.awbLock is OFF   | SEARCHING     | Values not good after unlock
2583
2584          For the above table, the camera device may skip reporting any state changes that happen
2585          without application intervention (i.e. mode switch, trigger, locking). Any state that
2586          can be skipped in that manner is called a transient state.
2587
2588          For example, for this AWB mode (AWB_MODE_AUTO), in addition to the state transitions
2589          listed in above table, it is also legal for the camera device to skip one or more
2590          transient states between two results. See below table for examples:
2591
2592            State        | Transition Cause                 | New State     | Notes
2593          :-------------:|:--------------------------------:|:-------------:|:-----------------:
2594          INACTIVE       | Camera device finished AWB scan  | CONVERGED     | Values are already good, transient states are skipped by camera device.
2595          LOCKED         | android.control.awbLock is OFF   | CONVERGED     | Values good after unlock, transient states are skipped by camera device.
2596          </details>
2597        </entry>
2598        <clone entry="android.control.effectMode" kind="controls">
2599        </clone>
2600        <clone entry="android.control.mode" kind="controls">
2601        </clone>
2602        <clone entry="android.control.sceneMode" kind="controls">
2603        </clone>
2604        <clone entry="android.control.videoStabilizationMode" kind="controls">
2605        </clone>
2606      </dynamic>
2607      <static>
2608        <entry name="availableHighSpeedVideoConfigurations" type="int32" visibility="hidden"
2609               container="array" typedef="highSpeedVideoConfiguration" hwlevel="limited">
2610          <array>
2611            <size>5</size>
2612            <size>n</size>
2613          </array>
2614          <description>
2615          List of available high speed video size, fps range and max batch size configurations
2616          supported by the camera device, in the format of (width, height, fps_min, fps_max, batch_size_max).
2617          </description>
2618          <range>
2619          For each configuration, the fps_max &amp;gt;= 120fps.
2620          </range>
2621          <details>
2622          When CONSTRAINED_HIGH_SPEED_VIDEO is supported in android.request.availableCapabilities,
2623          this metadata will list the supported high speed video size, fps range and max batch size
2624          configurations. All the sizes listed in this configuration will be a subset of the sizes
2625          reported by {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes}
2626          for processed non-stalling formats.
2627
2628          For the high speed video use case, the application must
2629          select the video size and fps range from this metadata to configure the recording and
2630          preview streams and setup the recording requests. For example, if the application intends
2631          to do high speed recording, it can select the maximum size reported by this metadata to
2632          configure output streams. Once the size is selected, application can filter this metadata
2633          by selected size and get the supported fps ranges, and use these fps ranges to setup the
2634          recording requests. Note that for the use case of multiple output streams, application
2635          must select one unique size from this metadata to use (e.g., preview and recording streams
2636          must have the same size). Otherwise, the high speed capture session creation will fail.
2637
2638          The min and max fps will be multiple times of 30fps.
2639
2640          High speed video streaming extends significant performance pressue to camera hardware,
2641          to achieve efficient high speed streaming, the camera device may have to aggregate
2642          multiple frames together and send to camera device for processing where the request
2643          controls are same for all the frames in this batch. Max batch size indicates
2644          the max possible number of frames the camera device will group together for this high
2645          speed stream configuration. This max batch size will be used to generate a high speed
2646          recording request list by
2647          {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}.
2648          The max batch size for each configuration will satisfy below conditions:
2649
2650          * Each max batch size will be a divisor of its corresponding fps_max / 30. For example,
2651          if max_fps is 300, max batch size will only be 1, 2, 5, or 10.
2652          * The camera device may choose smaller internal batch size for each configuration, but
2653          the actual batch size will be a divisor of max batch size. For example, if the max batch
2654          size is 8, the actual batch size used by camera device will only be 1, 2, 4, or 8.
2655          * The max batch size in each configuration entry must be no larger than 32.
2656
2657          The camera device doesn't have to support batch mode to achieve high speed video recording,
2658          in such case, batch_size_max will be reported as 1 in each configuration entry.
2659
2660          This fps ranges in this configuration list can only be used to create requests
2661          that are submitted to a high speed camera capture session created by
2662          {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}.
2663          The fps ranges reported in this metadata must not be used to setup capture requests for
2664          normal capture session, or it will cause request error.
2665          </details>
2666          <hal_details>
2667          All the sizes listed in this configuration will be a subset of the sizes reported by
2668          android.scaler.availableStreamConfigurations for processed non-stalling output formats.
2669          Note that for all high speed video configurations, HAL must be able to support a minimum
2670          of two streams, though the application might choose to configure just one stream.
2671
2672          The HAL may support multiple sensor modes for high speed outputs, for example, 120fps
2673          sensor mode and 120fps recording, 240fps sensor mode for 240fps recording. The application
2674          usually starts preview first, then starts recording. To avoid sensor mode switch caused
2675          stutter when starting recording as much as possible, the application may want to ensure
2676          the same sensor mode is used for preview and recording. Therefore, The HAL must advertise
2677          the variable fps range [30, fps_max] for each fixed fps range in this configuration list.
2678          For example, if the HAL advertises [120, 120] and [240, 240], the HAL must also advertise
2679          [30, 120] and [30, 240] for each configuration. In doing so, if the application intends to
2680          do 120fps recording, it can select [30, 120] to start preview, and [120, 120] to start
2681          recording. For these variable fps ranges, it's up to the HAL to decide the actual fps
2682          values that are suitable for smooth preview streaming. If the HAL sees different max_fps
2683          values that fall into different sensor modes in a sequence of requests, the HAL must
2684          switch the sensor mode as quick as possible to minimize the mode switch caused stutter.
2685          </hal_details>
2686          <tag id="V1" />
2687        </entry>
2688        <entry name="aeLockAvailable" type="byte" visibility="public" enum="true"
2689               typedef="boolean" hwlevel="legacy">
2690          <enum>
2691            <value>FALSE</value>
2692            <value>TRUE</value>
2693          </enum>
2694          <description>Whether the camera device supports android.control.aeLock</description>
2695          <details>
2696              Devices with MANUAL_SENSOR capability or BURST_CAPTURE capability will always
2697              list `true`. This includes FULL devices.
2698          </details>
2699          <tag id="BC"/>
2700        </entry>
2701        <entry name="awbLockAvailable" type="byte" visibility="public" enum="true"
2702               typedef="boolean" hwlevel="legacy">
2703          <enum>
2704            <value>FALSE</value>
2705            <value>TRUE</value>
2706          </enum>
2707          <description>Whether the camera device supports android.control.awbLock</description>
2708          <details>
2709              Devices with MANUAL_POST_PROCESSING capability or BURST_CAPTURE capability will
2710              always list `true`. This includes FULL devices.
2711          </details>
2712          <tag id="BC"/>
2713        </entry>
2714        <entry name="availableModes" type="byte" visibility="public"
2715            type_notes="List of enums (android.control.mode)." container="array"
2716            typedef="enumList" hwlevel="legacy">
2717          <array>
2718            <size>n</size>
2719          </array>
2720          <description>
2721          List of control modes for android.control.mode that are supported by this camera
2722          device.
2723          </description>
2724          <range>Any value listed in android.control.mode</range>
2725          <details>
2726              This list contains control modes that can be set for the camera device.
2727              LEGACY mode devices will always support AUTO mode. LIMITED and FULL
2728              devices will always support OFF, AUTO modes.
2729          </details>
2730        </entry>
2731        <entry name="postRawSensitivityBoostRange" type="int32" visibility="public"
2732            type_notes="Range of supported post RAW sensitivitiy boosts"
2733            container="array" typedef="rangeInt">
2734          <array>
2735            <size>2</size>
2736          </array>
2737          <description>Range of boosts for android.control.postRawSensitivityBoost supported
2738            by this camera device.
2739          </description>
2740          <units>ISO arithmetic units, the same as android.sensor.sensitivity</units>
2741          <details>
2742            Devices support post RAW sensitivity boost  will advertise
2743            android.control.postRawSensitivityBoost key for controling
2744            post RAW sensitivity boost.
2745
2746            This key will be `null` for devices that do not support any RAW format
2747            outputs. For devices that do support RAW format outputs, this key will always
2748            present, and if a device does not support post RAW sensitivity boost, it will
2749            list `(100, 100)` in this key.
2750          </details>
2751          <hal_details>
2752             This key is added in HAL3.4. For HAL3.3 or earlier devices, camera framework will
2753             generate this key as `(100, 100)` if device supports any of RAW output formats.
2754             All HAL3.4 and above devices should list this key if device supports any of RAW
2755             output formats.
2756          </hal_details>
2757        </entry>
2758      </static>
2759      <controls>
2760        <entry name="postRawSensitivityBoost" type="int32" visibility="public">
2761          <description>The amount of additional sensitivity boost applied to output images
2762             after RAW sensor data is captured.
2763          </description>
2764          <units>ISO arithmetic units, the same as android.sensor.sensitivity</units>
2765          <range>android.control.postRawSensitivityBoostRange</range>
2766          <details>
2767          Some camera devices support additional digital sensitivity boosting in the
2768          camera processing pipeline after sensor RAW image is captured.
2769          Such a boost will be applied to YUV/JPEG format output images but will not
2770          have effect on RAW output formats like RAW_SENSOR, RAW10, RAW12 or RAW_OPAQUE.
2771
2772          This key will be `null` for devices that do not support any RAW format
2773          outputs. For devices that do support RAW format outputs, this key will always
2774          present, and if a device does not support post RAW sensitivity boost, it will
2775          list `100` in this key.
2776
2777          If the camera device cannot apply the exact boost requested, it will reduce the
2778          boost to the nearest supported value.
2779          The final boost value used will be available in the output capture result.
2780
2781          For devices that support post RAW sensitivity boost, the YUV/JPEG output images
2782          of such device will have the total sensitivity of
2783          `android.sensor.sensitivity * android.control.postRawSensitivityBoost / 100`
2784          The sensitivity of RAW format images will always be `android.sensor.sensitivity`
2785
2786          This control is only effective if android.control.aeMode or android.control.mode is set to
2787          OFF; otherwise the auto-exposure algorithm will override this value.
2788          </details>
2789        </entry>
2790      </controls>
2791      <dynamic>
2792        <clone entry="android.control.postRawSensitivityBoost" kind="controls">
2793        </clone>
2794      </dynamic>
2795    </section>
2796    <section name="demosaic">
2797      <controls>
2798        <entry name="mode" type="byte" enum="true">
2799          <enum>
2800            <value>FAST
2801            <notes>Minimal or no slowdown of frame rate compared to
2802            Bayer RAW output.</notes></value>
2803            <value>HIGH_QUALITY
2804            <notes>Improved processing quality but the frame rate might be slowed down
2805            relative to raw output.</notes></value>
2806          </enum>
2807          <description>Controls the quality of the demosaicing
2808          processing.</description>
2809          <tag id="FUTURE" />
2810        </entry>
2811      </controls>
2812    </section>
2813    <section name="edge">
2814      <controls>
2815        <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="full">
2816          <enum>
2817            <value>OFF
2818            <notes>No edge enhancement is applied.</notes></value>
2819            <value>FAST
2820            <notes>Apply edge enhancement at a quality level that does not slow down frame rate
2821            relative to sensor output. It may be the same as OFF if edge enhancement will
2822            slow down frame rate relative to sensor.</notes></value>
2823            <value>HIGH_QUALITY
2824            <notes>Apply high-quality edge enhancement, at a cost of possibly reduced output frame rate.
2825            </notes></value>
2826            <value optional="true">ZERO_SHUTTER_LAG
2827            <notes>Edge enhancement is applied at different levels for different output streams,
2828            based on resolution. Streams at maximum recording resolution (see {@link
2829            android.hardware.camera2.CameraDevice#createCaptureSession}) or below have
2830            edge enhancement applied, while higher-resolution streams have no edge enhancement
2831            applied. The level of edge enhancement for low-resolution streams is tuned so that
2832            frame rate is not impacted, and the quality is equal to or better than FAST (since it
2833            is only applied to lower-resolution outputs, quality may improve from FAST).
2834
2835            This mode is intended to be used by applications operating in a zero-shutter-lag mode
2836            with YUV or PRIVATE reprocessing, where the application continuously captures
2837            high-resolution intermediate buffers into a circular buffer, from which a final image is
2838            produced via reprocessing when a user takes a picture.  For such a use case, the
2839            high-resolution buffers must not have edge enhancement applied to maximize efficiency of
2840            preview and to avoid double-applying enhancement when reprocessed, while low-resolution
2841            buffers (used for recording or preview, generally) need edge enhancement applied for
2842            reasonable preview quality.
2843
2844            This mode is guaranteed to be supported by devices that support either the
2845            YUV_REPROCESSING or PRIVATE_REPROCESSING capabilities
2846            (android.request.availableCapabilities lists either of those capabilities) and it will
2847            be the default mode for CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template.
2848            </notes></value>
2849          </enum>
2850          <description>Operation mode for edge
2851          enhancement.</description>
2852          <range>android.edge.availableEdgeModes</range>
2853          <details>Edge enhancement improves sharpness and details in the captured image. OFF means
2854          no enhancement will be applied by the camera device.
2855
2856          FAST/HIGH_QUALITY both mean camera device determined enhancement
2857          will be applied. HIGH_QUALITY mode indicates that the
2858          camera device will use the highest-quality enhancement algorithms,
2859          even if it slows down capture rate. FAST means the camera device will
2860          not slow down capture rate when applying edge enhancement. FAST may be the same as OFF if
2861          edge enhancement will slow down capture rate. Every output stream will have a similar
2862          amount of enhancement applied.
2863
2864          ZERO_SHUTTER_LAG is meant to be used by applications that maintain a continuous circular
2865          buffer of high-resolution images during preview and reprocess image(s) from that buffer
2866          into a final capture when triggered by the user. In this mode, the camera device applies
2867          edge enhancement to low-resolution streams (below maximum recording resolution) to
2868          maximize preview quality, but does not apply edge enhancement to high-resolution streams,
2869          since those will be reprocessed later if necessary.
2870
2871          For YUV_REPROCESSING, these FAST/HIGH_QUALITY modes both mean that the camera
2872          device will apply FAST/HIGH_QUALITY YUV-domain edge enhancement, respectively.
2873          The camera device may adjust its internal edge enhancement parameters for best
2874          image quality based on the android.reprocess.effectiveExposureFactor, if it is set.
2875          </details>
2876          <hal_details>
2877          For YUV_REPROCESSING The HAL can use android.reprocess.effectiveExposureFactor to
2878          adjust the internal edge enhancement reduction parameters appropriately to get the best
2879          quality images.
2880          </hal_details>
2881          <tag id="V1" />
2882          <tag id="REPROC" />
2883        </entry>
2884        <entry name="strength" type="byte">
2885          <description>Control the amount of edge enhancement
2886          applied to the images</description>
2887          <units>1-10; 10 is maximum sharpening</units>
2888          <tag id="FUTURE" />
2889        </entry>
2890      </controls>
2891      <static>
2892        <entry name="availableEdgeModes" type="byte" visibility="public"
2893               type_notes="list of enums" container="array" typedef="enumList"
2894               hwlevel="full">
2895          <array>
2896            <size>n</size>
2897          </array>
2898          <description>
2899          List of edge enhancement modes for android.edge.mode that are supported by this camera
2900          device.
2901          </description>
2902          <range>Any value listed in android.edge.mode</range>
2903          <details>
2904          Full-capability camera devices must always support OFF; camera devices that support
2905          YUV_REPROCESSING or PRIVATE_REPROCESSING will list ZERO_SHUTTER_LAG; all devices will
2906          list FAST.
2907          </details>
2908          <hal_details>
2909          HAL must support both FAST and HIGH_QUALITY if edge enhancement control is available
2910          on the camera device, but the underlying implementation can be the same for both modes.
2911          That is, if the highest quality implementation on the camera device does not slow down
2912          capture rate, then FAST and HIGH_QUALITY will generate the same output.
2913          </hal_details>
2914          <tag id="V1" />
2915          <tag id="REPROC" />
2916        </entry>
2917      </static>
2918      <dynamic>
2919        <clone entry="android.edge.mode" kind="controls">
2920          <tag id="V1" />
2921          <tag id="REPROC" />
2922        </clone>
2923      </dynamic>
2924    </section>
2925    <section name="flash">
2926      <controls>
2927        <entry name="firingPower" type="byte">
2928          <description>Power for flash firing/torch</description>
2929          <units>10 is max power; 0 is no flash. Linear</units>
2930          <range>0 - 10</range>
2931          <details>Power for snapshot may use a different scale than
2932          for torch mode. Only one entry for torch mode will be
2933          used</details>
2934          <tag id="FUTURE" />
2935        </entry>
2936        <entry name="firingTime" type="int64">
2937          <description>Firing time of flash relative to start of
2938          exposure</description>
2939          <units>nanoseconds</units>
2940          <range>0-(exposure time-flash duration)</range>
2941          <details>Clamped to (0, exposure time - flash
2942          duration).</details>
2943          <tag id="FUTURE" />
2944        </entry>
2945        <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="legacy">
2946          <enum>
2947            <value>OFF
2948              <notes>
2949              Do not fire the flash for this capture.
2950              </notes>
2951            </value>
2952            <value>SINGLE
2953              <notes>
2954              If the flash is available and charged, fire flash
2955              for this capture.
2956              </notes>
2957            </value>
2958            <value>TORCH
2959              <notes>
2960              Transition flash to continuously on.
2961              </notes>
2962            </value>
2963          </enum>
2964          <description>The desired mode for for the camera device's flash control.</description>
2965          <details>
2966          This control is only effective when flash unit is available
2967          (`android.flash.info.available == true`).
2968
2969          When this control is used, the android.control.aeMode must be set to ON or OFF.
2970          Otherwise, the camera device auto-exposure related flash control (ON_AUTO_FLASH,
2971          ON_ALWAYS_FLASH, or ON_AUTO_FLASH_REDEYE) will override this control.
2972
2973          When set to OFF, the camera device will not fire flash for this capture.
2974
2975          When set to SINGLE, the camera device will fire flash regardless of the camera
2976          device's auto-exposure routine's result. When used in still capture case, this
2977          control should be used along with auto-exposure (AE) precapture metering sequence
2978          (android.control.aePrecaptureTrigger), otherwise, the image may be incorrectly exposed.
2979
2980          When set to TORCH, the flash will be on continuously. This mode can be used
2981          for use cases such as preview, auto-focus assist, still capture, or video recording.
2982
2983          The flash status will be reported by android.flash.state in the capture result metadata.
2984          </details>
2985          <tag id="BC" />
2986        </entry>
2987      </controls>
2988      <static>
2989        <namespace name="info">
2990          <entry name="available" type="byte" visibility="public" enum="true"
2991                 typedef="boolean" hwlevel="legacy">
2992            <enum>
2993              <value>FALSE</value>
2994              <value>TRUE</value>
2995            </enum>
2996            <description>Whether this camera device has a
2997            flash unit.</description>
2998            <details>
2999            Will be `false` if no flash is available.
3000
3001            If there is no flash unit, none of the flash controls do
3002            anything.</details>
3003            <tag id="BC" />
3004          </entry>
3005          <entry name="chargeDuration" type="int64">
3006            <description>Time taken before flash can fire
3007            again</description>
3008            <units>nanoseconds</units>
3009            <range>0-1e9</range>
3010            <details>1 second too long/too short for recharge? Should
3011            this be power-dependent?</details>
3012            <tag id="FUTURE" />
3013          </entry>
3014        </namespace>
3015        <entry name="colorTemperature" type="byte">
3016          <description>The x,y whitepoint of the
3017          flash</description>
3018          <units>pair of floats</units>
3019          <range>0-1 for both</range>
3020          <tag id="FUTURE" />
3021        </entry>
3022        <entry name="maxEnergy" type="byte">
3023          <description>Max energy output of the flash for a full
3024          power single flash</description>
3025          <units>lumen-seconds</units>
3026          <range>&amp;gt;= 0</range>
3027          <tag id="FUTURE" />
3028        </entry>
3029      </static>
3030      <dynamic>
3031        <clone entry="android.flash.firingPower" kind="controls">
3032        </clone>
3033        <clone entry="android.flash.firingTime" kind="controls">
3034        </clone>
3035        <clone entry="android.flash.mode" kind="controls"></clone>
3036        <entry name="state" type="byte" visibility="public" enum="true"
3037               hwlevel="limited">
3038          <enum>
3039            <value>UNAVAILABLE
3040            <notes>No flash on camera.</notes></value>
3041            <value>CHARGING
3042            <notes>Flash is charging and cannot be fired.</notes></value>
3043            <value>READY
3044            <notes>Flash is ready to fire.</notes></value>
3045            <value>FIRED
3046            <notes>Flash fired for this capture.</notes></value>
3047            <value>PARTIAL
3048            <notes>Flash partially illuminated this frame.
3049
3050            This is usually due to the next or previous frame having
3051            the flash fire, and the flash spilling into this capture
3052            due to hardware limitations.</notes></value>
3053          </enum>
3054          <description>Current state of the flash
3055          unit.</description>
3056          <details>
3057          When the camera device doesn't have flash unit
3058          (i.e. `android.flash.info.available == false`), this state will always be UNAVAILABLE.
3059          Other states indicate the current flash status.
3060
3061          In certain conditions, this will be available on LEGACY devices:
3062
3063           * Flash-less cameras always return UNAVAILABLE.
3064           * Using android.control.aeMode `==` ON_ALWAYS_FLASH
3065             will always return FIRED.
3066           * Using android.flash.mode `==` TORCH
3067             will always return FIRED.
3068
3069          In all other conditions the state will not be available on
3070          LEGACY devices (i.e. it will be `null`).
3071          </details>
3072        </entry>
3073      </dynamic>
3074    </section>
3075    <section name="hotPixel">
3076      <controls>
3077        <entry name="mode" type="byte" visibility="public" enum="true">
3078          <enum>
3079            <value>OFF
3080              <notes>
3081              No hot pixel correction is applied.
3082
3083              The frame rate must not be reduced relative to sensor raw output
3084              for this option.
3085
3086              The hotpixel map may be returned in android.statistics.hotPixelMap.
3087              </notes>
3088            </value>
3089            <value>FAST
3090              <notes>
3091              Hot pixel correction is applied, without reducing frame
3092              rate relative to sensor raw output.
3093
3094              The hotpixel map may be returned in android.statistics.hotPixelMap.
3095              </notes>
3096            </value>
3097            <value>HIGH_QUALITY
3098              <notes>
3099              High-quality hot pixel correction is applied, at a cost
3100              of possibly reduced frame rate relative to sensor raw output.
3101
3102              The hotpixel map may be returned in android.statistics.hotPixelMap.
3103              </notes>
3104            </value>
3105          </enum>
3106          <description>
3107          Operational mode for hot pixel correction.
3108          </description>
3109          <range>android.hotPixel.availableHotPixelModes</range>
3110          <details>
3111          Hotpixel correction interpolates out, or otherwise removes, pixels
3112          that do not accurately measure the incoming light (i.e. pixels that
3113          are stuck at an arbitrary value or are oversensitive).
3114          </details>
3115          <tag id="V1" />
3116          <tag id="RAW" />
3117        </entry>
3118      </controls>
3119      <static>
3120        <entry name="availableHotPixelModes" type="byte" visibility="public"
3121          type_notes="list of enums" container="array" typedef="enumList">
3122          <array>
3123            <size>n</size>
3124          </array>
3125          <description>
3126          List of hot pixel correction modes for android.hotPixel.mode that are supported by this
3127          camera device.
3128          </description>
3129          <range>Any value listed in android.hotPixel.mode</range>
3130          <details>
3131          FULL mode camera devices will always support FAST.
3132          </details>
3133          <hal_details>
3134          To avoid performance issues, there will be significantly fewer hot
3135          pixels than actual pixels on the camera sensor.
3136          HAL must support both FAST and HIGH_QUALITY if hot pixel correction control is available
3137          on the camera device, but the underlying implementation can be the same for both modes.
3138          That is, if the highest quality implementation on the camera device does not slow down
3139          capture rate, then FAST and HIGH_QUALITY will generate the same output.
3140          </hal_details>
3141          <tag id="V1" />
3142          <tag id="RAW" />
3143        </entry>
3144      </static>
3145      <dynamic>
3146        <clone entry="android.hotPixel.mode" kind="controls">
3147          <tag id="V1" />
3148          <tag id="RAW" />
3149        </clone>
3150      </dynamic>
3151    </section>
3152    <section name="jpeg">
3153      <controls>
3154        <entry name="gpsLocation" type="byte" visibility="java_public" synthetic="true"
3155        typedef="location" hwlevel="legacy">
3156          <description>
3157          A location object to use when generating image GPS metadata.
3158          </description>
3159          <details>
3160          Setting a location object in a request will include the GPS coordinates of the location
3161          into any JPEG images captured based on the request. These coordinates can then be
3162          viewed by anyone who receives the JPEG image.
3163          </details>
3164        </entry>
3165        <entry name="gpsCoordinates" type="double" visibility="ndk_public"
3166        type_notes="latitude, longitude, altitude. First two in degrees, the third in meters"
3167        container="array" hwlevel="legacy">
3168          <array>
3169            <size>3</size>
3170          </array>
3171          <description>GPS coordinates to include in output JPEG
3172          EXIF.</description>
3173          <range>(-180 - 180], [-90,90], [-inf, inf]</range>
3174          <tag id="BC" />
3175        </entry>
3176        <entry name="gpsProcessingMethod" type="byte" visibility="ndk_public"
3177               typedef="string" hwlevel="legacy">
3178          <description>32 characters describing GPS algorithm to
3179          include in EXIF.</description>
3180          <units>UTF-8 null-terminated string</units>
3181          <tag id="BC" />
3182        </entry>
3183        <entry name="gpsTimestamp" type="int64" visibility="ndk_public" hwlevel="legacy">
3184          <description>Time GPS fix was made to include in
3185          EXIF.</description>
3186          <units>UTC in seconds since January 1, 1970</units>
3187          <tag id="BC" />
3188        </entry>
3189        <entry name="orientation" type="int32" visibility="public" hwlevel="legacy">
3190          <description>The orientation for a JPEG image.</description>
3191          <units>Degrees in multiples of 90</units>
3192          <range>0, 90, 180, 270</range>
3193          <details>
3194          The clockwise rotation angle in degrees, relative to the orientation
3195          to the camera, that the JPEG picture needs to be rotated by, to be viewed
3196          upright.
3197
3198          Camera devices may either encode this value into the JPEG EXIF header, or
3199          rotate the image data to match this orientation. When the image data is rotated,
3200          the thumbnail data will also be rotated.
3201
3202          Note that this orientation is relative to the orientation of the camera sensor, given
3203          by android.sensor.orientation.
3204
3205          To translate from the device orientation given by the Android sensor APIs, the following
3206          sample code may be used:
3207
3208              private int getJpegOrientation(CameraCharacteristics c, int deviceOrientation) {
3209                  if (deviceOrientation == android.view.OrientationEventListener.ORIENTATION_UNKNOWN) return 0;
3210                  int sensorOrientation = c.get(CameraCharacteristics.SENSOR_ORIENTATION);
3211
3212                  // Round device orientation to a multiple of 90
3213                  deviceOrientation = (deviceOrientation + 45) / 90 * 90;
3214
3215                  // Reverse device orientation for front-facing cameras
3216                  boolean facingFront = c.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT;
3217                  if (facingFront) deviceOrientation = -deviceOrientation;
3218
3219                  // Calculate desired JPEG orientation relative to camera orientation to make
3220                  // the image upright relative to the device orientation
3221                  int jpegOrientation = (sensorOrientation + deviceOrientation + 360) % 360;
3222
3223                  return jpegOrientation;
3224              }
3225          </details>
3226          <tag id="BC" />
3227        </entry>
3228        <entry name="quality" type="byte" visibility="public" hwlevel="legacy">
3229          <description>Compression quality of the final JPEG
3230          image.</description>
3231          <range>1-100; larger is higher quality</range>
3232          <details>85-95 is typical usage range.</details>
3233          <tag id="BC" />
3234        </entry>
3235        <entry name="thumbnailQuality" type="byte" visibility="public" hwlevel="legacy">
3236          <description>Compression quality of JPEG
3237          thumbnail.</description>
3238          <range>1-100; larger is higher quality</range>
3239          <tag id="BC" />
3240        </entry>
3241        <entry name="thumbnailSize" type="int32" visibility="public"
3242        container="array" typedef="size" hwlevel="legacy">
3243          <array>
3244            <size>2</size>
3245          </array>
3246          <description>Resolution of embedded JPEG thumbnail.</description>
3247          <range>android.jpeg.availableThumbnailSizes</range>
3248          <details>When set to (0, 0) value, the JPEG EXIF will not contain thumbnail,
3249          but the captured JPEG will still be a valid image.
3250
3251          For best results, when issuing a request for a JPEG image, the thumbnail size selected
3252          should have the same aspect ratio as the main JPEG output.
3253
3254          If the thumbnail image aspect ratio differs from the JPEG primary image aspect
3255          ratio, the camera device creates the thumbnail by cropping it from the primary image.
3256          For example, if the primary image has 4:3 aspect ratio, the thumbnail image has
3257          16:9 aspect ratio, the primary image will be cropped vertically (letterbox) to
3258          generate the thumbnail image. The thumbnail image will always have a smaller Field
3259          Of View (FOV) than the primary image when aspect ratios differ.
3260
3261          When an android.jpeg.orientation of non-zero degree is requested,
3262          the camera device will handle thumbnail rotation in one of the following ways:
3263
3264          * Set the {@link android.media.ExifInterface#TAG_ORIENTATION EXIF orientation flag}
3265            and keep jpeg and thumbnail image data unrotated.
3266          * Rotate the jpeg and thumbnail image data and not set
3267            {@link android.media.ExifInterface#TAG_ORIENTATION EXIF orientation flag}. In this
3268            case, LIMITED or FULL hardware level devices will report rotated thumnail size in
3269            capture result, so the width and height will be interchanged if 90 or 270 degree
3270            orientation is requested. LEGACY device will always report unrotated thumbnail
3271            size.
3272          </details>
3273          <hal_details>
3274          The HAL must not squeeze or stretch the downscaled primary image to generate thumbnail.
3275          The cropping must be done on the primary jpeg image rather than the sensor active array.
3276          The stream cropping rule specified by "S5. Cropping" in camera3.h doesn't apply to the
3277          thumbnail image cropping.
3278          </hal_details>
3279          <tag id="BC" />
3280        </entry>
3281      </controls>
3282      <static>
3283        <entry name="availableThumbnailSizes" type="int32" visibility="public"
3284        container="array" typedef="size" hwlevel="legacy">
3285          <array>
3286            <size>2</size>
3287            <size>n</size>
3288          </array>
3289          <description>List of JPEG thumbnail sizes for android.jpeg.thumbnailSize supported by this
3290          camera device.</description>
3291          <details>
3292          This list will include at least one non-zero resolution, plus `(0,0)` for indicating no
3293          thumbnail should be generated.
3294
3295          Below condiditions will be satisfied for this size list:
3296
3297          * The sizes will be sorted by increasing pixel area (width x height).
3298          If several resolutions have the same area, they will be sorted by increasing width.
3299          * The aspect ratio of the largest thumbnail size will be same as the
3300          aspect ratio of largest JPEG output size in android.scaler.availableStreamConfigurations.
3301          The largest size is defined as the size that has the largest pixel area
3302          in a given size list.
3303          * Each output JPEG size in android.scaler.availableStreamConfigurations will have at least
3304          one corresponding size that has the same aspect ratio in availableThumbnailSizes,
3305          and vice versa.
3306          * All non-`(0, 0)` sizes will have non-zero widths and heights.</details>
3307          <tag id="BC" />
3308        </entry>
3309        <entry name="maxSize" type="int32" visibility="system">
3310          <description>Maximum size in bytes for the compressed
3311          JPEG buffer</description>
3312          <range>Must be large enough to fit any JPEG produced by
3313          the camera</range>
3314          <details>This is used for sizing the gralloc buffers for
3315          JPEG</details>
3316        </entry>
3317      </static>
3318      <dynamic>
3319        <clone entry="android.jpeg.gpsLocation" kind="controls">
3320        </clone>
3321        <clone entry="android.jpeg.gpsCoordinates" kind="controls">
3322        </clone>
3323        <clone entry="android.jpeg.gpsProcessingMethod"
3324        kind="controls"></clone>
3325        <clone entry="android.jpeg.gpsTimestamp" kind="controls">
3326        </clone>
3327        <clone entry="android.jpeg.orientation" kind="controls">
3328        </clone>
3329        <clone entry="android.jpeg.quality" kind="controls">
3330        </clone>
3331        <entry name="size" type="int32">
3332          <description>The size of the compressed JPEG image, in
3333          bytes</description>
3334          <range>&amp;gt;= 0</range>
3335          <details>If no JPEG output is produced for the request,
3336          this must be 0.
3337
3338          Otherwise, this describes the real size of the compressed
3339          JPEG image placed in the output stream.  More specifically,
3340          if android.jpeg.maxSize = 1000000, and a specific capture
3341          has android.jpeg.size = 500000, then the output buffer from
3342          the JPEG stream will be 1000000 bytes, of which the first
3343          500000 make up the real data.</details>
3344          <tag id="FUTURE" />
3345        </entry>
3346        <clone entry="android.jpeg.thumbnailQuality"
3347        kind="controls"></clone>
3348        <clone entry="android.jpeg.thumbnailSize" kind="controls">
3349        </clone>
3350      </dynamic>
3351    </section>
3352    <section name="lens">
3353      <controls>
3354        <entry name="aperture" type="float" visibility="public" hwlevel="full">
3355          <description>The desired lens aperture size, as a ratio of lens focal length to the
3356          effective aperture diameter.</description>
3357          <units>The f-number (f/N)</units>
3358          <range>android.lens.info.availableApertures</range>
3359          <details>Setting this value is only supported on the camera devices that have a variable
3360          aperture lens.
3361
3362          When this is supported and android.control.aeMode is OFF,
3363          this can be set along with android.sensor.exposureTime,
3364          android.sensor.sensitivity, and android.sensor.frameDuration
3365          to achieve manual exposure control.
3366
3367          The requested aperture value may take several frames to reach the
3368          requested value; the camera device will report the current (intermediate)
3369          aperture size in capture result metadata while the aperture is changing.
3370          While the aperture is still changing, android.lens.state will be set to MOVING.
3371
3372          When this is supported and android.control.aeMode is one of
3373          the ON modes, this will be overridden by the camera device
3374          auto-exposure algorithm, the overridden values are then provided
3375          back to the user in the corresponding result.</details>
3376          <tag id="V1" />
3377        </entry>
3378        <entry name="filterDensity" type="float" visibility="public" hwlevel="full">
3379          <description>
3380          The desired setting for the lens neutral density filter(s).
3381          </description>
3382          <units>Exposure Value (EV)</units>
3383          <range>android.lens.info.availableFilterDensities</range>
3384          <details>
3385          This control will not be supported on most camera devices.
3386
3387          Lens filters are typically used to lower the amount of light the
3388          sensor is exposed to (measured in steps of EV). As used here, an EV
3389          step is the standard logarithmic representation, which are
3390          non-negative, and inversely proportional to the amount of light
3391          hitting the sensor.  For example, setting this to 0 would result
3392          in no reduction of the incoming light, and setting this to 2 would
3393          mean that the filter is set to reduce incoming light by two stops
3394          (allowing 1/4 of the prior amount of light to the sensor).
3395
3396          It may take several frames before the lens filter density changes
3397          to the requested value. While the filter density is still changing,
3398          android.lens.state will be set to MOVING.
3399          </details>
3400          <tag id="V1" />
3401        </entry>
3402        <entry name="focalLength" type="float" visibility="public" hwlevel="legacy">
3403          <description>
3404          The desired lens focal length; used for optical zoom.
3405          </description>
3406          <units>Millimeters</units>
3407          <range>android.lens.info.availableFocalLengths</range>
3408          <details>
3409          This setting controls the physical focal length of the camera
3410          device's lens. Changing the focal length changes the field of
3411          view of the camera device, and is usually used for optical zoom.
3412
3413          Like android.lens.focusDistance and android.lens.aperture, this
3414          setting won't be applied instantaneously, and it may take several
3415          frames before the lens can change to the requested focal length.
3416          While the focal length is still changing, android.lens.state will
3417          be set to MOVING.
3418
3419          Optical zoom will not be supported on most devices.
3420          </details>
3421          <tag id="V1" />
3422        </entry>
3423        <entry name="focusDistance" type="float" visibility="public" hwlevel="full">
3424          <description>Desired distance to plane of sharpest focus,
3425          measured from frontmost surface of the lens.</description>
3426          <units>See android.lens.info.focusDistanceCalibration for details</units>
3427          <range>&amp;gt;= 0</range>
3428          <details>
3429          This control can be used for setting manual focus, on devices that support
3430          the MANUAL_SENSOR capability and have a variable-focus lens (see
3431          android.lens.info.minimumFocusDistance).
3432
3433          A value of `0.0f` means infinity focus. The value set will be clamped to
3434          `[0.0f, android.lens.info.minimumFocusDistance]`.
3435
3436          Like android.lens.focalLength, this setting won't be applied
3437          instantaneously, and it may take several frames before the lens
3438          can move to the requested focus distance. While the lens is still moving,
3439          android.lens.state will be set to MOVING.
3440
3441          LEGACY devices support at most setting this to `0.0f`
3442          for infinity focus.
3443          </details>
3444          <tag id="BC" />
3445          <tag id="V1" />
3446        </entry>
3447        <entry name="opticalStabilizationMode" type="byte" visibility="public"
3448        enum="true" hwlevel="limited">
3449          <enum>
3450            <value>OFF
3451              <notes>Optical stabilization is unavailable.</notes>
3452            </value>
3453            <value optional="true">ON
3454              <notes>Optical stabilization is enabled.</notes>
3455            </value>
3456          </enum>
3457          <description>
3458          Sets whether the camera device uses optical image stabilization (OIS)
3459          when capturing images.
3460          </description>
3461          <range>android.lens.info.availableOpticalStabilization</range>
3462          <details>
3463          OIS is used to compensate for motion blur due to small
3464          movements of the camera during capture. Unlike digital image
3465          stabilization (android.control.videoStabilizationMode), OIS
3466          makes use of mechanical elements to stabilize the camera
3467          sensor, and thus allows for longer exposure times before
3468          camera shake becomes apparent.
3469
3470          Switching between different optical stabilization modes may take several
3471          frames to initialize, the camera device will report the current mode in
3472          capture result metadata. For example, When "ON" mode is requested, the
3473          optical stabilization modes in the first several capture results may still
3474          be "OFF", and it will become "ON" when the initialization is done.
3475
3476          If a camera device supports both OIS and digital image stabilization
3477          (android.control.videoStabilizationMode), turning both modes on may produce undesirable
3478          interaction, so it is recommended not to enable both at the same time.
3479
3480          Not all devices will support OIS; see
3481          android.lens.info.availableOpticalStabilization for
3482          available controls.
3483          </details>
3484          <tag id="V1" />
3485        </entry>
3486      </controls>
3487      <static>
3488        <namespace name="info">
3489          <entry name="availableApertures" type="float" visibility="public"
3490          container="array" hwlevel="full">
3491            <array>
3492              <size>n</size>
3493            </array>
3494            <description>List of aperture size values for android.lens.aperture that are
3495            supported by this camera device.</description>
3496            <units>The aperture f-number</units>
3497            <details>If the camera device doesn't support a variable lens aperture,
3498            this list will contain only one value, which is the fixed aperture size.
3499
3500            If the camera device supports a variable aperture, the aperture values
3501            in this list will be sorted in ascending order.</details>
3502            <tag id="V1" />
3503          </entry>
3504          <entry name="availableFilterDensities" type="float" visibility="public"
3505          container="array" hwlevel="full">
3506            <array>
3507              <size>n</size>
3508            </array>
3509            <description>
3510            List of neutral density filter values for
3511            android.lens.filterDensity that are supported by this camera device.
3512            </description>
3513            <units>Exposure value (EV)</units>
3514            <range>
3515            Values are &amp;gt;= 0
3516            </range>
3517            <details>
3518            If a neutral density filter is not supported by this camera device,
3519            this list will contain only 0. Otherwise, this list will include every
3520            filter density supported by the camera device, in ascending order.
3521            </details>
3522            <tag id="V1" />
3523          </entry>
3524          <entry name="availableFocalLengths" type="float" visibility="public"
3525          type_notes="The list of available focal lengths"
3526          container="array" hwlevel="legacy">
3527            <array>
3528              <size>n</size>
3529            </array>
3530            <description>
3531            List of focal lengths for android.lens.focalLength that are supported by this camera
3532            device.
3533            </description>
3534            <units>Millimeters</units>
3535            <range>
3536            Values are &amp;gt; 0
3537            </range>
3538            <details>
3539            If optical zoom is not supported, this list will only contain
3540            a single value corresponding to the fixed focal length of the
3541            device. Otherwise, this list will include every focal length supported
3542            by the camera device, in ascending order.
3543            </details>
3544            <tag id="BC" />
3545            <tag id="V1" />
3546          </entry>
3547          <entry name="availableOpticalStabilization" type="byte"
3548          visibility="public" type_notes="list of enums" container="array"
3549          typedef="enumList" hwlevel="limited">
3550            <array>
3551              <size>n</size>
3552            </array>
3553            <description>
3554            List of optical image stabilization (OIS) modes for
3555            android.lens.opticalStabilizationMode that are supported by this camera device.
3556            </description>
3557            <range>Any value listed in android.lens.opticalStabilizationMode</range>
3558            <details>
3559            If OIS is not supported by a given camera device, this list will
3560            contain only OFF.
3561            </details>
3562            <tag id="V1" />
3563          </entry>
3564          <entry name="hyperfocalDistance" type="float" visibility="public" optional="true"
3565                 hwlevel="limited">
3566            <description>Hyperfocal distance for this lens.</description>
3567            <units>See android.lens.info.focusDistanceCalibration for details</units>
3568            <range>If lens is fixed focus, &amp;gt;= 0. If lens has focuser unit, the value is
3569            within `(0.0f, android.lens.info.minimumFocusDistance]`</range>
3570            <details>
3571            If the lens is not fixed focus, the camera device will report this
3572            field when android.lens.info.focusDistanceCalibration is APPROXIMATE or CALIBRATED.
3573            </details>
3574          </entry>
3575          <entry name="minimumFocusDistance" type="float" visibility="public" optional="true"
3576                 hwlevel="limited">
3577            <description>Shortest distance from frontmost surface
3578            of the lens that can be brought into sharp focus.</description>
3579            <units>See android.lens.info.focusDistanceCalibration for details</units>
3580            <range>&amp;gt;= 0</range>
3581            <details>If the lens is fixed-focus, this will be
3582            0.</details>
3583            <hal_details>Mandatory for FULL devices; LIMITED devices
3584            must always set this value to 0 for fixed-focus; and may omit
3585            the minimum focus distance otherwise.
3586
3587            This field is also mandatory for all devices advertising
3588            the MANUAL_SENSOR capability.</hal_details>
3589            <tag id="V1" />
3590          </entry>
3591          <entry name="shadingMapSize" type="int32" visibility="ndk_public"
3592                 type_notes="width and height (N, M) of lens shading map provided by the camera device."
3593                 container="array" typedef="size" hwlevel="full">
3594            <array>
3595              <size>2</size>
3596            </array>
3597            <description>Dimensions of lens shading map.</description>
3598            <range>Both values &amp;gt;= 1</range>
3599            <details>
3600            The map should be on the order of 30-40 rows and columns, and
3601            must be smaller than 64x64.
3602            </details>
3603            <tag id="V1" />
3604          </entry>
3605          <entry name="focusDistanceCalibration" type="byte" visibility="public"
3606                 enum="true" hwlevel="limited">
3607            <enum>
3608              <value>UNCALIBRATED
3609                <notes>
3610                The lens focus distance is not accurate, and the units used for
3611                android.lens.focusDistance do not correspond to any physical units.
3612
3613                Setting the lens to the same focus distance on separate occasions may
3614                result in a different real focus distance, depending on factors such
3615                as the orientation of the device, the age of the focusing mechanism,
3616                and the device temperature. The focus distance value will still be
3617                in the range of `[0, android.lens.info.minimumFocusDistance]`, where 0
3618                represents the farthest focus.
3619                </notes>
3620              </value>
3621              <value>APPROXIMATE
3622                <notes>
3623                The lens focus distance is measured in diopters.
3624
3625                However, setting the lens to the same focus distance
3626                on separate occasions may result in a different real
3627                focus distance, depending on factors such as the
3628                orientation of the device, the age of the focusing
3629                mechanism, and the device temperature.
3630                </notes>
3631              </value>
3632              <value>CALIBRATED
3633                <notes>
3634                The lens focus distance is measured in diopters, and
3635                is calibrated.
3636
3637                The lens mechanism is calibrated so that setting the
3638                same focus distance is repeatable on multiple
3639                occasions with good accuracy, and the focus distance
3640                corresponds to the real physical distance to the plane
3641                of best focus.
3642                </notes>
3643              </value>
3644            </enum>
3645            <description>The lens focus distance calibration quality.</description>
3646            <details>
3647            The lens focus distance calibration quality determines the reliability of
3648            focus related metadata entries, i.e. android.lens.focusDistance,
3649            android.lens.focusRange, android.lens.info.hyperfocalDistance, and
3650            android.lens.info.minimumFocusDistance.
3651
3652            APPROXIMATE and CALIBRATED devices report the focus metadata in
3653            units of diopters (1/meter), so `0.0f` represents focusing at infinity,
3654            and increasing positive numbers represent focusing closer and closer
3655            to the camera device. The focus distance control also uses diopters
3656            on these devices.
3657
3658            UNCALIBRATED devices do not use units that are directly comparable
3659            to any real physical measurement, but `0.0f` still represents farthest
3660            focus, and android.lens.info.minimumFocusDistance represents the
3661            nearest focus the device can achieve.
3662            </details>
3663            <hal_details>
3664            For devices advertise APPROXIMATE quality or higher, diopters 0 (infinity
3665            focus) must work. When autofocus is disabled (android.control.afMode == OFF)
3666            and the lens focus distance is set to 0 diopters
3667            (android.lens.focusDistance == 0), the lens will move to focus at infinity
3668            and is stably focused at infinity even if the device tilts. It may take the
3669            lens some time to move; during the move the lens state should be MOVING and
3670            the output diopter value should be changing toward 0.
3671            </hal_details>
3672          <tag id="V1" />
3673        </entry>
3674        </namespace>
3675        <entry name="facing" type="byte" visibility="public" enum="true" hwlevel="legacy">
3676          <enum>
3677            <value>FRONT
3678            <notes>
3679              The camera device faces the same direction as the device's screen.
3680            </notes></value>
3681            <value>BACK
3682            <notes>
3683              The camera device faces the opposite direction as the device's screen.
3684            </notes></value>
3685            <value>EXTERNAL
3686            <notes>
3687              The camera device is an external camera, and has no fixed facing relative to the
3688              device's screen.
3689            </notes></value>
3690          </enum>
3691          <description>Direction the camera faces relative to
3692          device screen.</description>
3693        </entry>
3694        <entry name="poseRotation" type="float" visibility="public"
3695               container="array">
3696          <array>
3697            <size>4</size>
3698          </array>
3699          <description>
3700            The orientation of the camera relative to the sensor
3701            coordinate system.
3702          </description>
3703          <units>
3704            Quaternion coefficients
3705          </units>
3706          <details>
3707            The four coefficients that describe the quaternion
3708            rotation from the Android sensor coordinate system to a
3709            camera-aligned coordinate system where the X-axis is
3710            aligned with the long side of the image sensor, the Y-axis
3711            is aligned with the short side of the image sensor, and
3712            the Z-axis is aligned with the optical axis of the sensor.
3713
3714            To convert from the quaternion coefficients `(x,y,z,w)`
3715            to the axis of rotation `(a_x, a_y, a_z)` and rotation
3716            amount `theta`, the following formulas can be used:
3717
3718                 theta = 2 * acos(w)
3719                a_x = x / sin(theta/2)
3720                a_y = y / sin(theta/2)
3721                a_z = z / sin(theta/2)
3722
3723            To create a 3x3 rotation matrix that applies the rotation
3724            defined by this quaternion, the following matrix can be
3725            used:
3726
3727                R = [ 1 - 2y^2 - 2z^2,       2xy - 2zw,       2xz + 2yw,
3728                           2xy + 2zw, 1 - 2x^2 - 2z^2,       2yz - 2xw,
3729                           2xz - 2yw,       2yz + 2xw, 1 - 2x^2 - 2y^2 ]
3730
3731             This matrix can then be used to apply the rotation to a
3732             column vector point with
3733
3734               `p' = Rp`
3735
3736             where `p` is in the device sensor coordinate system, and
3737             `p'` is in the camera-oriented coordinate system.
3738          </details>
3739          <tag id="DEPTH" />
3740        </entry>
3741        <entry name="poseTranslation" type="float" visibility="public"
3742               container="array">
3743          <array>
3744            <size>3</size>
3745          </array>
3746          <description>Position of the camera optical center.</description>
3747          <units>Meters</units>
3748          <details>
3749            The position of the camera device's lens optical center,
3750            as a three-dimensional vector `(x,y,z)`, relative to the
3751            optical center of the largest camera device facing in the
3752            same direction as this camera, in the {@link
3753            android.hardware.SensorEvent Android sensor coordinate
3754            axes}. Note that only the axis definitions are shared with
3755            the sensor coordinate system, but not the origin.
3756
3757            If this device is the largest or only camera device with a
3758            given facing, then this position will be `(0, 0, 0)`; a
3759            camera device with a lens optical center located 3 cm from
3760            the main sensor along the +X axis (to the right from the
3761            user's perspective) will report `(0.03, 0, 0)`.
3762
3763            To transform a pixel coordinates between two cameras
3764            facing the same direction, first the source camera
3765            android.lens.radialDistortion must be corrected for.  Then
3766            the source camera android.lens.intrinsicCalibration needs
3767            to be applied, followed by the android.lens.poseRotation
3768            of the source camera, the translation of the source camera
3769            relative to the destination camera, the
3770            android.lens.poseRotation of the destination camera, and
3771            finally the inverse of android.lens.intrinsicCalibration
3772            of the destination camera. This obtains a
3773            radial-distortion-free coordinate in the destination
3774            camera pixel coordinates.
3775
3776            To compare this against a real image from the destination
3777            camera, the destination camera image then needs to be
3778            corrected for radial distortion before comparison or
3779            sampling.
3780          </details>
3781          <tag id="DEPTH" />
3782        </entry>
3783      </static>
3784      <dynamic>
3785        <clone entry="android.lens.aperture" kind="controls">
3786          <tag id="V1" />
3787        </clone>
3788        <clone entry="android.lens.filterDensity" kind="controls">
3789          <tag id="V1" />
3790        </clone>
3791        <clone entry="android.lens.focalLength" kind="controls">
3792          <tag id="BC" />
3793        </clone>
3794        <clone entry="android.lens.focusDistance" kind="controls">
3795          <details>Should be zero for fixed-focus cameras</details>
3796          <tag id="BC" />
3797        </clone>
3798        <entry name="focusRange" type="float" visibility="public"
3799        type_notes="Range of scene distances that are in focus"
3800        container="array" typedef="pairFloatFloat" hwlevel="limited">
3801          <array>
3802            <size>2</size>
3803          </array>
3804          <description>The range of scene distances that are in
3805          sharp focus (depth of field).</description>
3806          <units>A pair of focus distances in diopters: (near,
3807          far); see android.lens.info.focusDistanceCalibration for details.</units>
3808          <range>&amp;gt;=0</range>
3809          <details>If variable focus not supported, can still report
3810          fixed depth of field range</details>
3811          <tag id="BC" />
3812        </entry>
3813        <clone entry="android.lens.opticalStabilizationMode"
3814        kind="controls">
3815          <tag id="V1" />
3816        </clone>
3817        <entry name="state" type="byte" visibility="public" enum="true" hwlevel="limited">
3818          <enum>
3819            <value>STATIONARY
3820              <notes>
3821              The lens parameters (android.lens.focalLength, android.lens.focusDistance,
3822              android.lens.filterDensity and android.lens.aperture) are not changing.
3823              </notes>
3824            </value>
3825            <value>MOVING
3826              <notes>
3827              One or several of the lens parameters
3828              (android.lens.focalLength, android.lens.focusDistance,
3829              android.lens.filterDensity or android.lens.aperture) is
3830              currently changing.
3831              </notes>
3832            </value>
3833          </enum>
3834          <description>Current lens status.</description>
3835          <details>
3836          For lens parameters android.lens.focalLength, android.lens.focusDistance,
3837          android.lens.filterDensity and android.lens.aperture, when changes are requested,
3838          they may take several frames to reach the requested values. This state indicates
3839          the current status of the lens parameters.
3840
3841          When the state is STATIONARY, the lens parameters are not changing. This could be
3842          either because the parameters are all fixed, or because the lens has had enough
3843          time to reach the most recently-requested values.
3844          If all these lens parameters are not changable for a camera device, as listed below:
3845
3846          * Fixed focus (`android.lens.info.minimumFocusDistance == 0`), which means
3847          android.lens.focusDistance parameter will always be 0.
3848          * Fixed focal length (android.lens.info.availableFocalLengths contains single value),
3849          which means the optical zoom is not supported.
3850          * No ND filter (android.lens.info.availableFilterDensities contains only 0).
3851          * Fixed aperture (android.lens.info.availableApertures contains single value).
3852
3853          Then this state will always be STATIONARY.
3854
3855          When the state is MOVING, it indicates that at least one of the lens parameters
3856          is changing.
3857          </details>
3858          <tag id="V1" />
3859        </entry>
3860        <clone entry="android.lens.poseRotation" kind="static">
3861        </clone>
3862        <clone entry="android.lens.poseTranslation" kind="static">
3863        </clone>
3864      </dynamic>
3865      <static>
3866        <entry name="intrinsicCalibration" type="float" visibility="public"
3867               container="array">
3868          <array>
3869            <size>5</size>
3870          </array>
3871          <description>
3872            The parameters for this camera device's intrinsic
3873            calibration.
3874          </description>
3875          <units>
3876            Pixels in the
3877            android.sensor.info.preCorrectionActiveArraySize
3878            coordinate system.
3879          </units>
3880          <details>
3881            The five calibration parameters that describe the
3882            transform from camera-centric 3D coordinates to sensor
3883            pixel coordinates:
3884
3885                [f_x, f_y, c_x, c_y, s]
3886
3887            Where `f_x` and `f_y` are the horizontal and vertical
3888            focal lengths, `[c_x, c_y]` is the position of the optical
3889            axis, and `s` is a skew parameter for the sensor plane not
3890            being aligned with the lens plane.
3891
3892            These are typically used within a transformation matrix K:
3893
3894                K = [ f_x,   s, c_x,
3895                       0, f_y, c_y,
3896                       0    0,   1 ]
3897
3898            which can then be combined with the camera pose rotation
3899            `R` and translation `t` (android.lens.poseRotation and
3900            android.lens.poseTranslation, respective) to calculate the
3901            complete transform from world coordinates to pixel
3902            coordinates:
3903
3904                P = [ K 0   * [ R t
3905                     0 1 ]     0 1 ]
3906
3907            and with `p_w` being a point in the world coordinate system
3908            and `p_s` being a point in the camera active pixel array
3909            coordinate system, and with the mapping including the
3910            homogeneous division by z:
3911
3912                 p_h = (x_h, y_h, z_h) = P p_w
3913                p_s = p_h / z_h
3914
3915            so `[x_s, y_s]` is the pixel coordinates of the world
3916            point, `z_s = 1`, and `w_s` is a measurement of disparity
3917            (depth) in pixel coordinates.
3918
3919            Note that the coordinate system for this transform is the
3920            android.sensor.info.preCorrectionActiveArraySize system,
3921            where `(0,0)` is the top-left of the
3922            preCorrectionActiveArraySize rectangle. Once the pose and
3923            intrinsic calibration transforms have been applied to a
3924            world point, then the android.lens.radialDistortion
3925            transform needs to be applied, and the result adjusted to
3926            be in the android.sensor.info.activeArraySize coordinate
3927            system (where `(0, 0)` is the top-left of the
3928            activeArraySize rectangle), to determine the final pixel
3929            coordinate of the world point for processed (non-RAW)
3930            output buffers.
3931          </details>
3932          <tag id="DEPTH" />
3933        </entry>
3934        <entry name="radialDistortion" type="float" visibility="public"
3935               container="array">
3936          <array>
3937            <size>6</size>
3938          </array>
3939          <description>
3940            The correction coefficients to correct for this camera device's
3941            radial and tangential lens distortion.
3942          </description>
3943          <units>
3944            Unitless coefficients.
3945          </units>
3946          <details>
3947            Four radial distortion coefficients `[kappa_0, kappa_1, kappa_2,
3948            kappa_3]` and two tangential distortion coefficients
3949            `[kappa_4, kappa_5]` that can be used to correct the
3950            lens's geometric distortion with the mapping equations:
3951
3952                 x_c = x_i * ( kappa_0 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) +
3953                       kappa_4 * (2 * x_i * y_i) + kappa_5 * ( r^2 + 2 * x_i^2 )
3954                 y_c = y_i * ( kappa_0 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) +
3955                       kappa_5 * (2 * x_i * y_i) + kappa_4 * ( r^2 + 2 * y_i^2 )
3956
3957            Here, `[x_c, y_c]` are the coordinates to sample in the
3958            input image that correspond to the pixel values in the
3959            corrected image at the coordinate `[x_i, y_i]`:
3960
3961                 correctedImage(x_i, y_i) = sample_at(x_c, y_c, inputImage)
3962
3963            The pixel coordinates are defined in a normalized
3964            coordinate system related to the
3965            android.lens.intrinsicCalibration calibration fields.
3966            Both `[x_i, y_i]` and `[x_c, y_c]` have `(0,0)` at the
3967            lens optical center `[c_x, c_y]`. The maximum magnitudes
3968            of both x and y coordinates are normalized to be 1 at the
3969            edge further from the optical center, so the range
3970            for both dimensions is `-1 &lt;= x &lt;= 1`.
3971
3972            Finally, `r` represents the radial distance from the
3973            optical center, `r^2 = x_i^2 + y_i^2`, and its magnitude
3974            is therefore no larger than `|r| &lt;= sqrt(2)`.
3975
3976            The distortion model used is the Brown-Conrady model.
3977          </details>
3978          <tag id="DEPTH" />
3979        </entry>
3980      </static>
3981      <dynamic>
3982        <clone entry="android.lens.intrinsicCalibration" kind="static">
3983        </clone>
3984        <clone entry="android.lens.radialDistortion" kind="static">
3985        </clone>
3986      </dynamic>
3987    </section>
3988    <section name="noiseReduction">
3989      <controls>
3990        <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="full">
3991          <enum>
3992            <value>OFF
3993            <notes>No noise reduction is applied.</notes></value>
3994            <value>FAST
3995            <notes>Noise reduction is applied without reducing frame rate relative to sensor
3996            output. It may be the same as OFF if noise reduction will reduce frame rate
3997            relative to sensor.</notes></value>
3998            <value>HIGH_QUALITY
3999            <notes>High-quality noise reduction is applied, at the cost of possibly reduced frame
4000            rate relative to sensor output.</notes></value>
4001            <value optional="true">MINIMAL
4002            <notes>MINIMAL noise reduction is applied without reducing frame rate relative to
4003            sensor output. </notes></value>
4004            <value optional="true">ZERO_SHUTTER_LAG
4005
4006            <notes>Noise reduction is applied at different levels for different output streams,
4007            based on resolution. Streams at maximum recording resolution (see {@link
4008            android.hardware.camera2.CameraDevice#createCaptureSession}) or below have noise
4009            reduction applied, while higher-resolution streams have MINIMAL (if supported) or no
4010            noise reduction applied (if MINIMAL is not supported.) The degree of noise reduction
4011            for low-resolution streams is tuned so that frame rate is not impacted, and the quality
4012            is equal to or better than FAST (since it is only applied to lower-resolution outputs,
4013            quality may improve from FAST).
4014
4015            This mode is intended to be used by applications operating in a zero-shutter-lag mode
4016            with YUV or PRIVATE reprocessing, where the application continuously captures
4017            high-resolution intermediate buffers into a circular buffer, from which a final image is
4018            produced via reprocessing when a user takes a picture.  For such a use case, the
4019            high-resolution buffers must not have noise reduction applied to maximize efficiency of
4020            preview and to avoid over-applying noise filtering when reprocessing, while
4021            low-resolution buffers (used for recording or preview, generally) need noise reduction
4022            applied for reasonable preview quality.
4023
4024            This mode is guaranteed to be supported by devices that support either the
4025            YUV_REPROCESSING or PRIVATE_REPROCESSING capabilities
4026            (android.request.availableCapabilities lists either of those capabilities) and it will
4027            be the default mode for CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template.
4028            </notes></value>
4029          </enum>
4030          <description>Mode of operation for the noise reduction algorithm.</description>
4031          <range>android.noiseReduction.availableNoiseReductionModes</range>
4032          <details>The noise reduction algorithm attempts to improve image quality by removing
4033          excessive noise added by the capture process, especially in dark conditions.
4034
4035          OFF means no noise reduction will be applied by the camera device, for both raw and
4036          YUV domain.
4037
4038          MINIMAL means that only sensor raw domain basic noise reduction is enabled ,to remove
4039          demosaicing or other processing artifacts. For YUV_REPROCESSING, MINIMAL is same as OFF.
4040          This mode is optional, may not be support by all devices. The application should check
4041          android.noiseReduction.availableNoiseReductionModes before using it.
4042
4043          FAST/HIGH_QUALITY both mean camera device determined noise filtering
4044          will be applied. HIGH_QUALITY mode indicates that the camera device
4045          will use the highest-quality noise filtering algorithms,
4046          even if it slows down capture rate. FAST means the camera device will not
4047          slow down capture rate when applying noise filtering. FAST may be the same as MINIMAL if
4048          MINIMAL is listed, or the same as OFF if any noise filtering will slow down capture rate.
4049          Every output stream will have a similar amount of enhancement applied.
4050
4051          ZERO_SHUTTER_LAG is meant to be used by applications that maintain a continuous circular
4052          buffer of high-resolution images during preview and reprocess image(s) from that buffer
4053          into a final capture when triggered by the user. In this mode, the camera device applies
4054          noise reduction to low-resolution streams (below maximum recording resolution) to maximize
4055          preview quality, but does not apply noise reduction to high-resolution streams, since
4056          those will be reprocessed later if necessary.
4057
4058          For YUV_REPROCESSING, these FAST/HIGH_QUALITY modes both mean that the camera device
4059          will apply FAST/HIGH_QUALITY YUV domain noise reduction, respectively. The camera device
4060          may adjust the noise reduction parameters for best image quality based on the
4061          android.reprocess.effectiveExposureFactor if it is set.
4062          </details>
4063          <hal_details>
4064          For YUV_REPROCESSING The HAL can use android.reprocess.effectiveExposureFactor to
4065          adjust the internal noise reduction parameters appropriately to get the best quality
4066          images.
4067          </hal_details>
4068          <tag id="V1" />
4069          <tag id="REPROC" />
4070        </entry>
4071        <entry name="strength" type="byte">
4072          <description>Control the amount of noise reduction
4073          applied to the images</description>
4074          <units>1-10; 10 is max noise reduction</units>
4075          <range>1 - 10</range>
4076          <tag id="FUTURE" />
4077        </entry>
4078      </controls>
4079      <static>
4080        <entry name="availableNoiseReductionModes" type="byte" visibility="public"
4081        type_notes="list of enums" container="array" typedef="enumList" hwlevel="limited">
4082          <array>
4083            <size>n</size>
4084          </array>
4085          <description>
4086          List of noise reduction modes for android.noiseReduction.mode that are supported
4087          by this camera device.
4088          </description>
4089          <range>Any value listed in android.noiseReduction.mode</range>
4090          <details>
4091          Full-capability camera devices will always support OFF and FAST.
4092
4093          Camera devices that support YUV_REPROCESSING or PRIVATE_REPROCESSING will support
4094          ZERO_SHUTTER_LAG.
4095
4096          Legacy-capability camera devices will only support FAST mode.
4097          </details>
4098          <hal_details>
4099          HAL must support both FAST and HIGH_QUALITY if noise reduction control is available
4100          on the camera device, but the underlying implementation can be the same for both modes.
4101          That is, if the highest quality implementation on the camera device does not slow down
4102          capture rate, then FAST and HIGH_QUALITY will generate the same output.
4103          </hal_details>
4104          <tag id="V1" />
4105          <tag id="REPROC" />
4106        </entry>
4107      </static>
4108      <dynamic>
4109        <clone entry="android.noiseReduction.mode" kind="controls">
4110          <tag id="V1" />
4111          <tag id="REPROC" />
4112        </clone>
4113      </dynamic>
4114    </section>
4115    <section name="quirks">
4116      <static>
4117        <entry name="meteringCropRegion" type="byte" visibility="system" deprecated="true" optional="true">
4118          <description>If set to 1, the camera service does not
4119          scale 'normalized' coordinates with respect to the crop
4120          region. This applies to metering input (a{e,f,wb}Region
4121          and output (face rectangles).</description>
4122          <details>Normalized coordinates refer to those in the
4123          (-1000,1000) range mentioned in the
4124          android.hardware.Camera API.
4125
4126          HAL implementations should instead always use and emit
4127          sensor array-relative coordinates for all region data. Does
4128          not need to be listed in static metadata. Support will be
4129          removed in future versions of camera service.</details>
4130        </entry>
4131        <entry name="triggerAfWithAuto" type="byte" visibility="system" deprecated="true" optional="true">
4132          <description>If set to 1, then the camera service always
4133          switches to FOCUS_MODE_AUTO before issuing a AF
4134          trigger.</description>
4135          <details>HAL implementations should implement AF trigger
4136          modes for AUTO, MACRO, CONTINUOUS_FOCUS, and
4137          CONTINUOUS_PICTURE modes instead of using this flag. Does
4138          not need to be listed in static metadata. Support will be
4139          removed in future versions of camera service</details>
4140        </entry>
4141        <entry name="useZslFormat" type="byte" visibility="system" deprecated="true" optional="true">
4142          <description>If set to 1, the camera service uses
4143          CAMERA2_PIXEL_FORMAT_ZSL instead of
4144          HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED for the zero
4145          shutter lag stream</description>
4146          <details>HAL implementations should use gralloc usage flags
4147          to determine that a stream will be used for
4148          zero-shutter-lag, instead of relying on an explicit
4149          format setting. Does not need to be listed in static
4150          metadata. Support will be removed in future versions of
4151          camera service.</details>
4152        </entry>
4153        <entry name="usePartialResult" type="byte" visibility="hidden" deprecated="true" optional="true">
4154          <description>
4155          If set to 1, the HAL will always split result
4156          metadata for a single capture into multiple buffers,
4157          returned using multiple process_capture_result calls.
4158          </description>
4159          <details>
4160          Does not need to be listed in static
4161          metadata. Support for partial results will be reworked in
4162          future versions of camera service. This quirk will stop
4163          working at that point; DO NOT USE without careful
4164          consideration of future support.
4165          </details>
4166          <hal_details>
4167          Refer to `camera3_capture_result::partial_result`
4168          for information on how to implement partial results.
4169          </hal_details>
4170        </entry>
4171      </static>
4172      <dynamic>
4173        <entry name="partialResult" type="byte" visibility="hidden" deprecated="true" optional="true" enum="true" typedef="boolean">
4174          <enum>
4175            <value>FINAL
4176            <notes>The last or only metadata result buffer
4177            for this capture.</notes>
4178            </value>
4179            <value>PARTIAL
4180            <notes>A partial buffer of result metadata for this
4181            capture. More result buffers for this capture will be sent
4182            by the camera device, the last of which will be marked
4183            FINAL.</notes>
4184            </value>
4185          </enum>
4186          <description>
4187          Whether a result given to the framework is the
4188          final one for the capture, or only a partial that contains a
4189          subset of the full set of dynamic metadata
4190          values.</description>
4191          <range>Optional. Default value is FINAL.</range>
4192          <details>
4193          The entries in the result metadata buffers for a
4194          single capture may not overlap, except for this entry. The
4195          FINAL buffers must retain FIFO ordering relative to the
4196          requests that generate them, so the FINAL buffer for frame 3 must
4197          always be sent to the framework after the FINAL buffer for frame 2, and
4198          before the FINAL buffer for frame 4. PARTIAL buffers may be returned
4199          in any order relative to other frames, but all PARTIAL buffers for a given
4200          capture must arrive before the FINAL buffer for that capture. This entry may
4201          only be used by the camera device if quirks.usePartialResult is set to 1.
4202          </details>
4203          <hal_details>
4204          Refer to `camera3_capture_result::partial_result`
4205          for information on how to implement partial results.
4206          </hal_details>
4207        </entry>
4208      </dynamic>
4209    </section>
4210    <section name="request">
4211      <controls>
4212        <entry name="frameCount" type="int32" visibility="system" deprecated="true">
4213          <description>A frame counter set by the framework. Must
4214          be maintained unchanged in output frame. This value monotonically
4215          increases with every new result (that is, each new result has a unique
4216          frameCount value).
4217          </description>
4218          <units>incrementing integer</units>
4219          <range>Any int.</range>
4220        </entry>
4221        <entry name="id" type="int32" visibility="hidden">
4222          <description>An application-specified ID for the current
4223          request. Must be maintained unchanged in output
4224          frame</description>
4225          <units>arbitrary integer assigned by application</units>
4226          <range>Any int</range>
4227          <tag id="V1" />
4228        </entry>
4229        <entry name="inputStreams" type="int32" visibility="system" deprecated="true"
4230               container="array">
4231          <array>
4232            <size>n</size>
4233          </array>
4234          <description>List which camera reprocess stream is used
4235          for the source of reprocessing data.</description>
4236          <units>List of camera reprocess stream IDs</units>
4237          <range>
4238          Typically, only one entry allowed, must be a valid reprocess stream ID.
4239          </range>
4240          <details>Only meaningful when android.request.type ==
4241          REPROCESS. Ignored otherwise</details>
4242          <tag id="HAL2" />
4243        </entry>
4244        <entry name="metadataMode" type="byte" visibility="system"
4245               enum="true">
4246          <enum>
4247            <value>NONE
4248            <notes>No metadata should be produced on output, except
4249            for application-bound buffer data. If no
4250            application-bound streams exist, no frame should be
4251            placed in the output frame queue. If such streams
4252            exist, a frame should be placed on the output queue
4253            with null metadata but with the necessary output buffer
4254            information. Timestamp information should still be
4255            included with any output stream buffers</notes></value>
4256            <value>FULL
4257            <notes>All metadata should be produced. Statistics will
4258            only be produced if they are separately
4259            enabled</notes></value>
4260          </enum>
4261          <description>How much metadata to produce on
4262          output</description>
4263          <tag id="FUTURE" />
4264        </entry>
4265        <entry name="outputStreams" type="int32" visibility="system" deprecated="true"
4266               container="array">
4267          <array>
4268            <size>n</size>
4269          </array>
4270          <description>Lists which camera output streams image data
4271          from this capture must be sent to</description>
4272          <units>List of camera stream IDs</units>
4273          <range>List must only include streams that have been
4274          created</range>
4275          <details>If no output streams are listed, then the image
4276          data should simply be discarded. The image data must
4277          still be captured for metadata and statistics production,
4278          and the lens and flash must operate as requested.</details>
4279          <tag id="HAL2" />
4280        </entry>
4281        <entry name="type" type="byte" visibility="system" deprecated="true" enum="true">
4282          <enum>
4283            <value>CAPTURE
4284            <notes>Capture a new image from the imaging hardware,
4285            and process it according to the
4286            settings</notes></value>
4287            <value>REPROCESS
4288            <notes>Process previously captured data; the
4289            android.request.inputStreams parameter determines the
4290            source reprocessing stream. TODO: Mark dynamic metadata
4291            needed for reprocessing with [RP]</notes></value>
4292          </enum>
4293          <description>The type of the request; either CAPTURE or
4294          REPROCESS. For HAL3, this tag is redundant.
4295          </description>
4296          <tag id="HAL2" />
4297        </entry>
4298      </controls>
4299      <static>
4300        <entry name="maxNumOutputStreams" type="int32" visibility="ndk_public"
4301               container="array" hwlevel="legacy">
4302          <array>
4303            <size>3</size>
4304          </array>
4305          <description>The maximum numbers of different types of output streams
4306          that can be configured and used simultaneously by a camera device.
4307          </description>
4308          <range>
4309          For processed (and stalling) format streams, &amp;gt;= 1.
4310
4311          For Raw format (either stalling or non-stalling) streams, &amp;gt;= 0.
4312
4313          For processed (but not stalling) format streams, &amp;gt;= 3
4314          for FULL mode devices (`android.info.supportedHardwareLevel == FULL`);
4315          &amp;gt;= 2 for LIMITED mode devices (`android.info.supportedHardwareLevel == LIMITED`).
4316          </range>
4317          <details>
4318          This is a 3 element tuple that contains the max number of output simultaneous
4319          streams for raw sensor, processed (but not stalling), and processed (and stalling)
4320          formats respectively. For example, assuming that JPEG is typically a processed and
4321          stalling stream, if max raw sensor format output stream number is 1, max YUV streams
4322          number is 3, and max JPEG stream number is 2, then this tuple should be `(1, 3, 2)`.
4323
4324          This lists the upper bound of the number of output streams supported by
4325          the camera device. Using more streams simultaneously may require more hardware and
4326          CPU resources that will consume more power. The image format for an output stream can
4327          be any supported format provided by android.scaler.availableStreamConfigurations.
4328          The formats defined in android.scaler.availableStreamConfigurations can be catergorized
4329          into the 3 stream types as below:
4330
4331          * Processed (but stalling): any non-RAW format with a stallDurations &amp;gt; 0.
4332            Typically {@link android.graphics.ImageFormat#JPEG JPEG format}.
4333          * Raw formats: {@link android.graphics.ImageFormat#RAW_SENSOR RAW_SENSOR}, {@link
4334            android.graphics.ImageFormat#RAW10 RAW10}, or {@link android.graphics.ImageFormat#RAW12
4335            RAW12}.
4336          * Processed (but not-stalling): any non-RAW format without a stall duration.
4337            Typically {@link android.graphics.ImageFormat#YUV_420_888 YUV_420_888},
4338            {@link android.graphics.ImageFormat#NV21 NV21}, or
4339            {@link android.graphics.ImageFormat#YV12 YV12}.
4340          </details>
4341          <tag id="BC" />
4342        </entry>
4343        <entry name="maxNumOutputRaw" type="int32" visibility="java_public" synthetic="true"
4344               hwlevel="legacy">
4345          <description>The maximum numbers of different types of output streams
4346          that can be configured and used simultaneously by a camera device
4347          for any `RAW` formats.
4348          </description>
4349          <range>
4350          &amp;gt;= 0
4351          </range>
4352          <details>
4353          This value contains the max number of output simultaneous
4354          streams from the raw sensor.
4355
4356          This lists the upper bound of the number of output streams supported by
4357          the camera device. Using more streams simultaneously may require more hardware and
4358          CPU resources that will consume more power. The image format for this kind of an output stream can
4359          be any `RAW` and supported format provided by android.scaler.streamConfigurationMap.
4360
4361          In particular, a `RAW` format is typically one of:
4362
4363          * {@link android.graphics.ImageFormat#RAW_SENSOR RAW_SENSOR}
4364          * {@link android.graphics.ImageFormat#RAW10 RAW10}
4365          * {@link android.graphics.ImageFormat#RAW12 RAW12}
4366
4367          LEGACY mode devices (android.info.supportedHardwareLevel `==` LEGACY)
4368          never support raw streams.
4369          </details>
4370        </entry>
4371        <entry name="maxNumOutputProc" type="int32" visibility="java_public" synthetic="true"
4372               hwlevel="legacy">
4373          <description>The maximum numbers of different types of output streams
4374          that can be configured and used simultaneously by a camera device
4375          for any processed (but not-stalling) formats.
4376          </description>
4377          <range>
4378          &amp;gt;= 3
4379          for FULL mode devices (`android.info.supportedHardwareLevel == FULL`);
4380          &amp;gt;= 2 for LIMITED mode devices (`android.info.supportedHardwareLevel == LIMITED`).
4381          </range>
4382          <details>
4383          This value contains the max number of output simultaneous
4384          streams for any processed (but not-stalling) formats.
4385
4386          This lists the upper bound of the number of output streams supported by
4387          the camera device. Using more streams simultaneously may require more hardware and
4388          CPU resources that will consume more power. The image format for this kind of an output stream can
4389          be any non-`RAW` and supported format provided by android.scaler.streamConfigurationMap.
4390
4391          Processed (but not-stalling) is defined as any non-RAW format without a stall duration.
4392          Typically:
4393
4394          * {@link android.graphics.ImageFormat#YUV_420_888 YUV_420_888}
4395          * {@link android.graphics.ImageFormat#NV21 NV21}
4396          * {@link android.graphics.ImageFormat#YV12 YV12}
4397          * Implementation-defined formats, i.e. {@link
4398            android.hardware.camera2.params.StreamConfigurationMap#isOutputSupportedFor(Class)}
4399
4400          For full guarantees, query {@link
4401          android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration} with a
4402          processed format -- it will return 0 for a non-stalling stream.
4403
4404          LEGACY devices will support at least 2 processing/non-stalling streams.
4405          </details>
4406        </entry>
4407        <entry name="maxNumOutputProcStalling" type="int32" visibility="java_public" synthetic="true"
4408               hwlevel="legacy">
4409          <description>The maximum numbers of different types of output streams
4410          that can be configured and used simultaneously by a camera device
4411          for any processed (and stalling) formats.
4412          </description>
4413          <range>
4414          &amp;gt;= 1
4415          </range>
4416          <details>
4417          This value contains the max number of output simultaneous
4418          streams for any processed (but not-stalling) formats.
4419
4420          This lists the upper bound of the number of output streams supported by
4421          the camera device. Using more streams simultaneously may require more hardware and
4422          CPU resources that will consume more power. The image format for this kind of an output stream can
4423          be any non-`RAW` and supported format provided by android.scaler.streamConfigurationMap.
4424
4425          A processed and stalling format is defined as any non-RAW format with a stallDurations
4426          &amp;gt; 0.  Typically only the {@link android.graphics.ImageFormat#JPEG JPEG format} is a
4427          stalling format.
4428
4429          For full guarantees, query {@link
4430          android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration} with a
4431          processed format -- it will return a non-0 value for a stalling stream.
4432
4433          LEGACY devices will support up to 1 processing/stalling stream.
4434          </details>
4435        </entry>
4436        <entry name="maxNumReprocessStreams" type="int32" visibility="system"
4437        deprecated="true" container="array">
4438          <array>
4439            <size>1</size>
4440          </array>
4441          <description>How many reprocessing streams of any type
4442          can be allocated at the same time.</description>
4443          <range>&amp;gt;= 0</range>
4444          <details>
4445          Only used by HAL2.x.
4446
4447          When set to 0, it means no reprocess stream is supported.
4448          </details>
4449          <tag id="HAL2" />
4450        </entry>
4451        <entry name="maxNumInputStreams" type="int32" visibility="public" hwlevel="full">
4452          <description>
4453          The maximum numbers of any type of input streams
4454          that can be configured and used simultaneously by a camera device.
4455          </description>
4456          <range>
4457          0 or 1.
4458          </range>
4459          <details>When set to 0, it means no input stream is supported.
4460
4461          The image format for a input stream can be any supported format returned by {@link
4462          android.hardware.camera2.params.StreamConfigurationMap#getInputFormats}. When using an
4463          input stream, there must be at least one output stream configured to to receive the
4464          reprocessed images.
4465
4466          When an input stream and some output streams are used in a reprocessing request,
4467          only the input buffer will be used to produce these output stream buffers, and a
4468          new sensor image will not be captured.
4469
4470          For example, for Zero Shutter Lag (ZSL) still capture use case, the input
4471          stream image format will be PRIVATE, the associated output stream image format
4472          should be JPEG.
4473          </details>
4474          <hal_details>
4475          For the reprocessing flow and controls, see
4476          hardware/libhardware/include/hardware/camera3.h Section 10 for more details.
4477          </hal_details>
4478          <tag id="REPROC" />
4479        </entry>
4480      </static>
4481      <dynamic>
4482        <entry name="frameCount" type="int32" visibility="hidden" deprecated="true">
4483          <description>A frame counter set by the framework. This value monotonically
4484          increases with every new result (that is, each new result has a unique
4485          frameCount value).</description>
4486          <units>count of frames</units>
4487          <range>&amp;gt; 0</range>
4488          <details>Reset on release()</details>
4489        </entry>
4490        <clone entry="android.request.id" kind="controls"></clone>
4491        <clone entry="android.request.metadataMode"
4492        kind="controls"></clone>
4493        <clone entry="android.request.outputStreams"
4494        kind="controls"></clone>
4495        <entry name="pipelineDepth" type="byte" visibility="public" hwlevel="legacy">
4496          <description>Specifies the number of pipeline stages the frame went
4497          through from when it was exposed to when the final completed result
4498          was available to the framework.</description>
4499          <range>&amp;lt;= android.request.pipelineMaxDepth</range>
4500          <details>Depending on what settings are used in the request, and
4501          what streams are configured, the data may undergo less processing,
4502          and some pipeline stages skipped.
4503
4504          See android.request.pipelineMaxDepth for more details.
4505          </details>
4506          <hal_details>
4507          This value must always represent the accurate count of how many
4508          pipeline stages were actually used.
4509          </hal_details>
4510        </entry>
4511      </dynamic>
4512      <static>
4513        <entry name="pipelineMaxDepth" type="byte" visibility="public" hwlevel="legacy">
4514          <description>Specifies the number of maximum pipeline stages a frame
4515          has to go through from when it's exposed to when it's available
4516          to the framework.</description>
4517          <details>A typical minimum value for this is 2 (one stage to expose,
4518          one stage to readout) from the sensor. The ISP then usually adds
4519          its own stages to do custom HW processing. Further stages may be
4520          added by SW processing.
4521
4522          Depending on what settings are used (e.g. YUV, JPEG) and what
4523          processing is enabled (e.g. face detection), the actual pipeline
4524          depth (specified by android.request.pipelineDepth) may be less than
4525          the max pipeline depth.
4526
4527          A pipeline depth of X stages is equivalent to a pipeline latency of
4528          X frame intervals.
4529
4530          This value will normally be 8 or less, however, for high speed capture session,
4531          the max pipeline depth will be up to 8 x size of high speed capture request list.
4532          </details>
4533          <hal_details>
4534          This value should be 4 or less, expect for the high speed recording session, where the
4535          max batch sizes may be larger than 1.
4536          </hal_details>
4537        </entry>
4538        <entry name="partialResultCount" type="int32" visibility="public" optional="true">
4539          <description>Defines how many sub-components
4540          a result will be composed of.
4541          </description>
4542          <range>&amp;gt;= 1</range>
4543          <details>In order to combat the pipeline latency, partial results
4544          may be delivered to the application layer from the camera device as
4545          soon as they are available.
4546
4547          Optional; defaults to 1. A value of 1 means that partial
4548          results are not supported, and only the final TotalCaptureResult will
4549          be produced by the camera device.
4550
4551          A typical use case for this might be: after requesting an
4552          auto-focus (AF) lock the new AF state might be available 50%
4553          of the way through the pipeline.  The camera device could
4554          then immediately dispatch this state via a partial result to
4555          the application, and the rest of the metadata via later
4556          partial results.
4557          </details>
4558        </entry>
4559        <entry name="availableCapabilities" type="byte" visibility="public"
4560          enum="true" container="array" hwlevel="legacy">
4561          <array>
4562            <size>n</size>
4563          </array>
4564          <enum>
4565            <value>BACKWARD_COMPATIBLE
4566              <notes>The minimal set of capabilities that every camera
4567                device (regardless of android.info.supportedHardwareLevel)
4568                supports.
4569
4570                This capability is listed by all normal devices, and
4571                indicates that the camera device has a feature set
4572                that's comparable to the baseline requirements for the
4573                older android.hardware.Camera API.
4574
4575                Devices with the DEPTH_OUTPUT capability might not list this
4576                capability, indicating that they support only depth measurement,
4577                not standard color output.
4578              </notes>
4579            </value>
4580            <value optional="true">MANUAL_SENSOR
4581              <notes>
4582              The camera device can be manually controlled (3A algorithms such
4583              as auto-exposure, and auto-focus can be bypassed).
4584              The camera device supports basic manual control of the sensor image
4585              acquisition related stages. This means the following controls are
4586              guaranteed to be supported:
4587
4588              * Manual frame duration control
4589                  * android.sensor.frameDuration
4590                  * android.sensor.info.maxFrameDuration
4591              * Manual exposure control
4592                  * android.sensor.exposureTime
4593                  * android.sensor.info.exposureTimeRange
4594              * Manual sensitivity control
4595                  * android.sensor.sensitivity
4596                  * android.sensor.info.sensitivityRange
4597              * Manual lens control (if the lens is adjustable)
4598                  * android.lens.*
4599              * Manual flash control (if a flash unit is present)
4600                  * android.flash.*
4601              * Manual black level locking
4602                  * android.blackLevel.lock
4603              * Auto exposure lock
4604                  * android.control.aeLock
4605
4606              If any of the above 3A algorithms are enabled, then the camera
4607              device will accurately report the values applied by 3A in the
4608              result.
4609
4610              A given camera device may also support additional manual sensor controls,
4611              but this capability only covers the above list of controls.
4612
4613              If this is supported, android.scaler.streamConfigurationMap will
4614              additionally return a min frame duration that is greater than
4615              zero for each supported size-format combination.
4616              </notes>
4617            </value>
4618            <value optional="true">MANUAL_POST_PROCESSING
4619              <notes>
4620              The camera device post-processing stages can be manually controlled.
4621              The camera device supports basic manual control of the image post-processing
4622              stages. This means the following controls are guaranteed to be supported:
4623
4624              * Manual tonemap control
4625                  * android.tonemap.curve
4626                  * android.tonemap.mode
4627                  * android.tonemap.maxCurvePoints
4628                  * android.tonemap.gamma
4629                  * android.tonemap.presetCurve
4630
4631              * Manual white balance control
4632                  * android.colorCorrection.transform
4633                  * android.colorCorrection.gains
4634              * Manual lens shading map control
4635                    * android.shading.mode
4636                    * android.statistics.lensShadingMapMode
4637                    * android.statistics.lensShadingMap
4638                    * android.lens.info.shadingMapSize
4639              * Manual aberration correction control (if aberration correction is supported)
4640                    * android.colorCorrection.aberrationMode
4641                    * android.colorCorrection.availableAberrationModes
4642              * Auto white balance lock
4643                    * android.control.awbLock
4644
4645              If auto white balance is enabled, then the camera device
4646              will accurately report the values applied by AWB in the result.
4647
4648              A given camera device may also support additional post-processing
4649              controls, but this capability only covers the above list of controls.
4650              </notes>
4651            </value>
4652            <value optional="true">RAW
4653              <notes>
4654              The camera device supports outputting RAW buffers and
4655              metadata for interpreting them.
4656
4657              Devices supporting the RAW capability allow both for
4658              saving DNG files, and for direct application processing of
4659              raw sensor images.
4660
4661              * RAW_SENSOR is supported as an output format.
4662              * The maximum available resolution for RAW_SENSOR streams
4663                will match either the value in
4664                android.sensor.info.pixelArraySize or
4665                android.sensor.info.preCorrectionActiveArraySize.
4666              * All DNG-related optional metadata entries are provided
4667                by the camera device.
4668              </notes>
4669            </value>
4670            <value optional="true" ndk_hidden="true">PRIVATE_REPROCESSING
4671              <notes>
4672              The camera device supports the Zero Shutter Lag reprocessing use case.
4673
4674              * One input stream is supported, that is, `android.request.maxNumInputStreams == 1`.
4675              * {@link android.graphics.ImageFormat#PRIVATE} is supported as an output/input format,
4676                that is, {@link android.graphics.ImageFormat#PRIVATE} is included in the lists of
4677                formats returned by {@link
4678                android.hardware.camera2.params.StreamConfigurationMap#getInputFormats} and {@link
4679                android.hardware.camera2.params.StreamConfigurationMap#getOutputFormats}.
4680              * {@link android.hardware.camera2.params.StreamConfigurationMap#getValidOutputFormatsForInput}
4681                returns non empty int[] for each supported input format returned by {@link
4682                android.hardware.camera2.params.StreamConfigurationMap#getInputFormats}.
4683              * Each size returned by {@link
4684                android.hardware.camera2.params.StreamConfigurationMap#getInputSizes
4685                getInputSizes(ImageFormat.PRIVATE)} is also included in {@link
4686                android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes
4687                getOutputSizes(ImageFormat.PRIVATE)}
4688              * Using {@link android.graphics.ImageFormat#PRIVATE} does not cause a frame rate drop
4689                relative to the sensor's maximum capture rate (at that resolution).
4690              * {@link android.graphics.ImageFormat#PRIVATE} will be reprocessable into both
4691                {@link android.graphics.ImageFormat#YUV_420_888} and
4692                {@link android.graphics.ImageFormat#JPEG} formats.
4693              * The maximum available resolution for PRIVATE streams
4694                (both input/output) will match the maximum available
4695                resolution of JPEG streams.
4696              * Static metadata android.reprocess.maxCaptureStall.
4697              * Only below controls are effective for reprocessing requests and
4698                will be present in capture results, other controls in reprocess
4699                requests will be ignored by the camera device.
4700                    * android.jpeg.*
4701                    * android.noiseReduction.mode
4702                    * android.edge.mode
4703              * android.noiseReduction.availableNoiseReductionModes and
4704                android.edge.availableEdgeModes will both list ZERO_SHUTTER_LAG as a supported mode.
4705              </notes>
4706            </value>
4707            <value optional="true">READ_SENSOR_SETTINGS
4708              <notes>
4709              The camera device supports accurately reporting the sensor settings for many of
4710              the sensor controls while the built-in 3A algorithm is running.  This allows
4711              reporting of sensor settings even when these settings cannot be manually changed.
4712
4713              The values reported for the following controls are guaranteed to be available
4714              in the CaptureResult, including when 3A is enabled:
4715
4716              * Exposure control
4717                  * android.sensor.exposureTime
4718              * Sensitivity control
4719                  * android.sensor.sensitivity
4720              * Lens controls (if the lens is adjustable)
4721                  * android.lens.focusDistance
4722                  * android.lens.aperture
4723
4724              This capability is a subset of the MANUAL_SENSOR control capability, and will
4725              always be included if the MANUAL_SENSOR capability is available.
4726              </notes>
4727            </value>
4728            <value optional="true">BURST_CAPTURE
4729              <notes>
4730              The camera device supports capturing high-resolution images at &gt;= 20 frames per
4731              second, in at least the uncompressed YUV format, when post-processing settings are set
4732              to FAST. Additionally, maximum-resolution images can be captured at &gt;= 10 frames
4733              per second.  Here, 'high resolution' means at least 8 megapixels, or the maximum
4734              resolution of the device, whichever is smaller.
4735
4736              More specifically, this means that a size matching the camera device's active array
4737              size is listed as a supported size for the {@link
4738              android.graphics.ImageFormat#YUV_420_888} format in either {@link
4739              android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes} or {@link
4740              android.hardware.camera2.params.StreamConfigurationMap#getHighResolutionOutputSizes},
4741              with a minimum frame duration for that format and size of either &lt;= 1/20 s, or
4742              &lt;= 1/10 s, respectively; and the android.control.aeAvailableTargetFpsRanges entry
4743              lists at least one FPS range where the minimum FPS is &gt;= 1 / minimumFrameDuration
4744              for the maximum-size YUV_420_888 format.  If that maximum size is listed in {@link
4745              android.hardware.camera2.params.StreamConfigurationMap#getHighResolutionOutputSizes},
4746              then the list of resolutions for YUV_420_888 from {@link
4747              android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes} contains at
4748              least one resolution &gt;= 8 megapixels, with a minimum frame duration of &lt;= 1/20
4749              s.
4750
4751              If the device supports the {@link android.graphics.ImageFormat#RAW10}, {@link
4752              android.graphics.ImageFormat#RAW12}, then those can also be captured at the same rate
4753              as the maximum-size YUV_420_888 resolution is.
4754
4755              If the device supports the PRIVATE_REPROCESSING capability, then the same guarantees
4756              as for the YUV_420_888 format also apply to the {@link
4757              android.graphics.ImageFormat#PRIVATE} format.
4758
4759              In addition, the android.sync.maxLatency field is guaranted to have a value between 0
4760              and 4, inclusive. android.control.aeLockAvailable and android.control.awbLockAvailable
4761              are also guaranteed to be `true` so burst capture with these two locks ON yields
4762              consistent image output.
4763              </notes>
4764            </value>
4765            <value optional="true" ndk_hidden="true">YUV_REPROCESSING
4766              <notes>
4767              The camera device supports the YUV_420_888 reprocessing use case, similar as
4768              PRIVATE_REPROCESSING, This capability requires the camera device to support the
4769              following:
4770
4771              * One input stream is supported, that is, `android.request.maxNumInputStreams == 1`.
4772              * {@link android.graphics.ImageFormat#YUV_420_888} is supported as an output/input format, that is,
4773                YUV_420_888 is included in the lists of formats returned by
4774                {@link android.hardware.camera2.params.StreamConfigurationMap#getInputFormats} and
4775                {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputFormats}.
4776              * {@link
4777                android.hardware.camera2.params.StreamConfigurationMap#getValidOutputFormatsForInput}
4778                returns non-empty int[] for each supported input format returned by {@link
4779                android.hardware.camera2.params.StreamConfigurationMap#getInputFormats}.
4780              * Each size returned by {@link
4781                android.hardware.camera2.params.StreamConfigurationMap#getInputSizes
4782                getInputSizes(YUV_420_888)} is also included in {@link
4783                android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes
4784                getOutputSizes(YUV_420_888)}
4785              * Using {@link android.graphics.ImageFormat#YUV_420_888} does not cause a frame rate drop
4786                relative to the sensor's maximum capture rate (at that resolution).
4787              * {@link android.graphics.ImageFormat#YUV_420_888} will be reprocessable into both
4788                {@link android.graphics.ImageFormat#YUV_420_888} and {@link
4789                android.graphics.ImageFormat#JPEG} formats.
4790              * The maximum available resolution for {@link
4791                android.graphics.ImageFormat#YUV_420_888} streams (both input/output) will match the
4792                maximum available resolution of {@link android.graphics.ImageFormat#JPEG} streams.
4793              * Static metadata android.reprocess.maxCaptureStall.
4794              * Only the below controls are effective for reprocessing requests and will be present
4795                in capture results. The reprocess requests are from the original capture results that
4796                are associated with the intermediate {@link android.graphics.ImageFormat#YUV_420_888}
4797                output buffers.  All other controls in the reprocess requests will be ignored by the
4798                camera device.
4799                    * android.jpeg.*
4800                    * android.noiseReduction.mode
4801                    * android.edge.mode
4802                    * android.reprocess.effectiveExposureFactor
4803              * android.noiseReduction.availableNoiseReductionModes and
4804                android.edge.availableEdgeModes will both list ZERO_SHUTTER_LAG as a supported mode.
4805              </notes>
4806            </value>
4807            <value optional="true">DEPTH_OUTPUT
4808              <notes>
4809              The camera device can produce depth measurements from its field of view.
4810
4811              This capability requires the camera device to support the following:
4812
4813              * {@link android.graphics.ImageFormat#DEPTH16} is supported as an output format.
4814              * {@link android.graphics.ImageFormat#DEPTH_POINT_CLOUD} is optionally supported as an
4815                output format.
4816              * This camera device, and all camera devices with the same android.lens.facing,
4817                will list the following calibration entries in both
4818                {@link android.hardware.camera2.CameraCharacteristics} and
4819                {@link android.hardware.camera2.CaptureResult}:
4820                  - android.lens.poseTranslation
4821                  - android.lens.poseRotation
4822                  - android.lens.intrinsicCalibration
4823                  - android.lens.radialDistortion
4824              * The android.depth.depthIsExclusive entry is listed by this device.
4825              * A LIMITED camera with only the DEPTH_OUTPUT capability does not have to support
4826                normal YUV_420_888, JPEG, and PRIV-format outputs. It only has to support the DEPTH16
4827                format.
4828
4829              Generally, depth output operates at a slower frame rate than standard color capture,
4830              so the DEPTH16 and DEPTH_POINT_CLOUD formats will commonly have a stall duration that
4831              should be accounted for (see
4832              {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration}).
4833              On a device that supports both depth and color-based output, to enable smooth preview,
4834              using a repeating burst is recommended, where a depth-output target is only included
4835              once every N frames, where N is the ratio between preview output rate and depth output
4836              rate, including depth stall time.
4837              </notes>
4838            </value>
4839            <value optional="true" ndk_hidden="true">CONSTRAINED_HIGH_SPEED_VIDEO
4840              <notes>
4841              The device supports constrained high speed video recording (frame rate >=120fps)
4842              use case. The camera device will support high speed capture session created by
4843              {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}, which
4844              only accepts high speed request lists created by
4845              {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}.
4846
4847              A camera device can still support high speed video streaming by advertising the high speed
4848              FPS ranges in android.control.aeAvailableTargetFpsRanges. For this case, all normal
4849              capture request per frame control and synchronization requirements will apply to
4850              the high speed fps ranges, the same as all other fps ranges. This capability describes
4851              the capability of a specialized operating mode with many limitations (see below), which
4852              is only targeted at high speed video recording.
4853
4854              The supported high speed video sizes and fps ranges are specified in
4855              {@link android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoFpsRanges}.
4856              To get desired output frame rates, the application is only allowed to select video size
4857              and FPS range combinations provided by
4858              {@link android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoSizes}.
4859              The fps range can be controlled via android.control.aeTargetFpsRange.
4860
4861              In this capability, the camera device will override aeMode, awbMode, and afMode to
4862              ON, AUTO, and CONTINUOUS_VIDEO, respectively. All post-processing block mode
4863              controls will be overridden to be FAST. Therefore, no manual control of capture
4864              and post-processing parameters is possible. All other controls operate the
4865              same as when android.control.mode == AUTO. This means that all other
4866              android.control.* fields continue to work, such as
4867
4868              * android.control.aeTargetFpsRange
4869              * android.control.aeExposureCompensation
4870              * android.control.aeLock
4871              * android.control.awbLock
4872              * android.control.effectMode
4873              * android.control.aeRegions
4874              * android.control.afRegions
4875              * android.control.awbRegions
4876              * android.control.afTrigger
4877              * android.control.aePrecaptureTrigger
4878
4879              Outside of android.control.*, the following controls will work:
4880
4881              * android.flash.mode (TORCH mode only, automatic flash for still capture will not
4882              work since aeMode is ON)
4883              * android.lens.opticalStabilizationMode (if it is supported)
4884              * android.scaler.cropRegion
4885              * android.statistics.faceDetectMode (if it is supported)
4886
4887              For high speed recording use case, the actual maximum supported frame rate may
4888              be lower than what camera can output, depending on the destination Surfaces for
4889              the image data. For example, if the destination surface is from video encoder,
4890              the application need check if the video encoder is capable of supporting the
4891              high frame rate for a given video size, or it will end up with lower recording
4892              frame rate. If the destination surface is from preview window, the actual preview frame
4893              rate will be bounded by the screen refresh rate.
4894
4895              The camera device will only support up to 2 high speed simultaneous output surfaces
4896              (preview and recording surfaces)
4897              in this mode. Above controls will be effective only if all of below conditions are true:
4898
4899              * The application creates a camera capture session with no more than 2 surfaces via
4900              {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}. The
4901              targeted surfaces must be preview surface (either from
4902              {@link android.view.SurfaceView} or {@link android.graphics.SurfaceTexture}) or
4903              recording surface(either from {@link android.media.MediaRecorder#getSurface} or
4904              {@link android.media.MediaCodec#createInputSurface}).
4905              * The stream sizes are selected from the sizes reported by
4906              {@link android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoSizes}.
4907              * The FPS ranges are selected from
4908              {@link android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoFpsRanges}.
4909
4910              When above conditions are NOT satistied,
4911              {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}
4912              will fail.
4913
4914              Switching to a FPS range that has different maximum FPS may trigger some camera device
4915              reconfigurations, which may introduce extra latency. It is recommended that
4916              the application avoids unnecessary maximum target FPS changes as much as possible
4917              during high speed streaming.
4918              </notes>
4919            </value>
4920          </enum>
4921          <description>List of capabilities that this camera device
4922          advertises as fully supporting.</description>
4923          <details>
4924          A capability is a contract that the camera device makes in order
4925          to be able to satisfy one or more use cases.
4926
4927          Listing a capability guarantees that the whole set of features
4928          required to support a common use will all be available.
4929
4930          Using a subset of the functionality provided by an unsupported
4931          capability may be possible on a specific camera device implementation;
4932          to do this query each of android.request.availableRequestKeys,
4933          android.request.availableResultKeys,
4934          android.request.availableCharacteristicsKeys.
4935
4936          The following capabilities are guaranteed to be available on
4937          android.info.supportedHardwareLevel `==` FULL devices:
4938
4939          * MANUAL_SENSOR
4940          * MANUAL_POST_PROCESSING
4941
4942          Other capabilities may be available on either FULL or LIMITED
4943          devices, but the application should query this key to be sure.
4944          </details>
4945          <hal_details>
4946          Additional constraint details per-capability will be available
4947          in the Compatibility Test Suite.
4948
4949          Minimum baseline requirements required for the
4950          BACKWARD_COMPATIBLE capability are not explicitly listed.
4951          Instead refer to "BC" tags and the camera CTS tests in the
4952          android.hardware.camera2.cts package.
4953
4954          Listed controls that can be either request or result (e.g.
4955          android.sensor.exposureTime) must be available both in the
4956          request and the result in order to be considered to be
4957          capability-compliant.
4958
4959          For example, if the HAL claims to support MANUAL control,
4960          then exposure time must be configurable via the request _and_
4961          the actual exposure applied must be available via
4962          the result.
4963
4964          If MANUAL_SENSOR is omitted, the HAL may choose to omit the
4965          android.scaler.availableMinFrameDurations static property entirely.
4966
4967          For PRIVATE_REPROCESSING and YUV_REPROCESSING capabilities, see
4968          hardware/libhardware/include/hardware/camera3.h Section 10 for more information.
4969
4970          Devices that support the MANUAL_SENSOR capability must support the
4971          CAMERA3_TEMPLATE_MANUAL template defined in camera3.h.
4972
4973          Devices that support the PRIVATE_REPROCESSING capability or the
4974          YUV_REPROCESSING capability must support the
4975          CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template defined in camera3.h.
4976
4977          For DEPTH_OUTPUT, the depth-format keys
4978          android.depth.availableDepthStreamConfigurations,
4979          android.depth.availableDepthMinFrameDurations,
4980          android.depth.availableDepthStallDurations must be available, in
4981          addition to the other keys explicitly mentioned in the DEPTH_OUTPUT
4982          enum notes. The entry android.depth.maxDepthSamples must be available
4983          if the DEPTH_POINT_CLOUD format is supported (HAL pixel format BLOB, dataspace
4984          DEPTH).
4985          </hal_details>
4986        </entry>
4987        <entry name="availableRequestKeys" type="int32" visibility="ndk_public"
4988               container="array" hwlevel="legacy">
4989          <array>
4990            <size>n</size>
4991          </array>
4992          <description>A list of all keys that the camera device has available
4993          to use with {@link android.hardware.camera2.CaptureRequest}.</description>
4994
4995          <details>Attempting to set a key into a CaptureRequest that is not
4996          listed here will result in an invalid request and will be rejected
4997          by the camera device.
4998
4999          This field can be used to query the feature set of a camera device
5000          at a more granular level than capabilities. This is especially
5001          important for optional keys that are not listed under any capability
5002          in android.request.availableCapabilities.
5003          </details>
5004          <hal_details>
5005          Vendor tags must not be listed here. Use the vendor tag metadata
5006          extensions C api instead (refer to camera3.h for more details).
5007
5008          Setting/getting vendor tags will be checked against the metadata
5009          vendor extensions API and not against this field.
5010
5011          The HAL must not consume any request tags that are not listed either
5012          here or in the vendor tag list.
5013
5014          The public camera2 API will always make the vendor tags visible
5015          via
5016          {@link android.hardware.camera2.CameraCharacteristics#getAvailableCaptureRequestKeys}.
5017          </hal_details>
5018        </entry>
5019        <entry name="availableResultKeys" type="int32" visibility="ndk_public"
5020               container="array" hwlevel="legacy">
5021          <array>
5022            <size>n</size>
5023          </array>
5024          <description>A list of all keys that the camera device has available
5025          to use with {@link android.hardware.camera2.CaptureResult}.</description>
5026
5027          <details>Attempting to get a key from a CaptureResult that is not
5028          listed here will always return a `null` value. Getting a key from
5029          a CaptureResult that is listed here will generally never return a `null`
5030          value.
5031
5032          The following keys may return `null` unless they are enabled:
5033
5034          * android.statistics.lensShadingMap (non-null iff android.statistics.lensShadingMapMode == ON)
5035
5036          (Those sometimes-null keys will nevertheless be listed here
5037          if they are available.)
5038
5039          This field can be used to query the feature set of a camera device
5040          at a more granular level than capabilities. This is especially
5041          important for optional keys that are not listed under any capability
5042          in android.request.availableCapabilities.
5043          </details>
5044          <hal_details>
5045          Tags listed here must always have an entry in the result metadata,
5046          even if that size is 0 elements. Only array-type tags (e.g. lists,
5047          matrices, strings) are allowed to have 0 elements.
5048
5049          Vendor tags must not be listed here. Use the vendor tag metadata
5050          extensions C api instead (refer to camera3.h for more details).
5051
5052          Setting/getting vendor tags will be checked against the metadata
5053          vendor extensions API and not against this field.
5054
5055          The HAL must not produce any result tags that are not listed either
5056          here or in the vendor tag list.
5057
5058          The public camera2 API will always make the vendor tags visible via {@link
5059          android.hardware.camera2.CameraCharacteristics#getAvailableCaptureResultKeys}.
5060          </hal_details>
5061        </entry>
5062        <entry name="availableCharacteristicsKeys" type="int32" visibility="ndk_public"
5063               container="array" hwlevel="legacy">
5064          <array>
5065            <size>n</size>
5066          </array>
5067          <description>A list of all keys that the camera device has available
5068          to use with {@link android.hardware.camera2.CameraCharacteristics}.</description>
5069          <details>This entry follows the same rules as
5070          android.request.availableResultKeys (except that it applies for
5071          CameraCharacteristics instead of CaptureResult). See above for more
5072          details.
5073          </details>
5074          <hal_details>
5075          Keys listed here must always have an entry in the static info metadata,
5076          even if that size is 0 elements. Only array-type tags (e.g. lists,
5077          matrices, strings) are allowed to have 0 elements.
5078
5079          Vendor tags must not be listed here. Use the vendor tag metadata
5080          extensions C api instead (refer to camera3.h for more details).
5081
5082          Setting/getting vendor tags will be checked against the metadata
5083          vendor extensions API and not against this field.
5084
5085          The HAL must not have any tags in its static info that are not listed
5086          either here or in the vendor tag list.
5087
5088          The public camera2 API will always make the vendor tags visible
5089          via {@link android.hardware.camera2.CameraCharacteristics#getKeys}.
5090          </hal_details>
5091        </entry>
5092      </static>
5093    </section>
5094    <section name="scaler">
5095      <controls>
5096        <entry name="cropRegion" type="int32" visibility="public"
5097               container="array" typedef="rectangle" hwlevel="legacy">
5098          <array>
5099            <size>4</size>
5100          </array>
5101          <description>The desired region of the sensor to read out for this capture.</description>
5102          <units>Pixel coordinates relative to
5103          android.sensor.info.activeArraySize</units>
5104          <details>
5105            This control can be used to implement digital zoom.
5106
5107            The crop region coordinate system is based off
5108            android.sensor.info.activeArraySize, with `(0, 0)` being the
5109            top-left corner of the sensor active array.
5110
5111            Output streams use this rectangle to produce their output,
5112            cropping to a smaller region if necessary to maintain the
5113            stream's aspect ratio, then scaling the sensor input to
5114            match the output's configured resolution.
5115
5116            The crop region is applied after the RAW to other color
5117            space (e.g. YUV) conversion. Since raw streams
5118            (e.g. RAW16) don't have the conversion stage, they are not
5119            croppable. The crop region will be ignored by raw streams.
5120
5121            For non-raw streams, any additional per-stream cropping will
5122            be done to maximize the final pixel area of the stream.
5123
5124            For example, if the crop region is set to a 4:3 aspect
5125            ratio, then 4:3 streams will use the exact crop
5126            region. 16:9 streams will further crop vertically
5127            (letterbox).
5128
5129            Conversely, if the crop region is set to a 16:9, then 4:3
5130            outputs will crop horizontally (pillarbox), and 16:9
5131            streams will match exactly. These additional crops will
5132            be centered within the crop region.
5133
5134            The width and height of the crop region cannot
5135            be set to be smaller than
5136            `floor( activeArraySize.width / android.scaler.availableMaxDigitalZoom )` and
5137            `floor( activeArraySize.height / android.scaler.availableMaxDigitalZoom )`, respectively.
5138
5139            The camera device may adjust the crop region to account
5140            for rounding and other hardware requirements; the final
5141            crop region used will be included in the output capture
5142            result.
5143          </details>
5144          <hal_details>
5145            The output streams must maintain square pixels at all
5146            times, no matter what the relative aspect ratios of the
5147            crop region and the stream are.  Negative values for
5148            corner are allowed for raw output if full pixel array is
5149            larger than active pixel array. Width and height may be
5150            rounded to nearest larger supportable width, especially
5151            for raw output, where only a few fixed scales may be
5152            possible.
5153
5154            For a set of output streams configured, if the sensor output is cropped to a smaller
5155            size than active array size, the HAL need follow below cropping rules:
5156
5157            * The HAL need handle the cropRegion as if the sensor crop size is the effective active
5158            array size.More specifically, the HAL must transform the request cropRegion from
5159            android.sensor.info.activeArraySize to the sensor cropped pixel area size in this way:
5160                1. Translate the requested cropRegion w.r.t., the left top corner of the sensor
5161                cropped pixel area by (tx, ty),
5162                where `tx = sensorCrop.top * (sensorCrop.height / activeArraySize.height)`
5163                and `tx = sensorCrop.left * (sensorCrop.width / activeArraySize.width)`. The
5164                (sensorCrop.top, sensorCrop.left) is the coordinate based off the
5165                android.sensor.info.activeArraySize.
5166                2. Scale the width and height of requested cropRegion with scaling factor of
5167                sensorCrop.width/activeArraySize.width and sensorCrop.height/activeArraySize.height
5168                respectively.
5169            Once this new cropRegion is calculated, the HAL must use this region to crop the image
5170            with regard to the sensor crop size (effective active array size). The HAL still need
5171            follow the general cropping rule for this new cropRegion and effective active
5172            array size.
5173
5174            * The HAL must report the cropRegion with regard to android.sensor.info.activeArraySize.
5175            The HAL need convert the new cropRegion generated above w.r.t., full active array size.
5176            The reported cropRegion may be slightly different with the requested cropRegion since
5177            the HAL may adjust the crop region to account for rounding, conversion error, or other
5178            hardware limitations.
5179
5180            HAL2.x uses only (x, y, width)
5181          </hal_details>
5182          <tag id="BC" />
5183        </entry>
5184      </controls>
5185      <static>
5186        <entry name="availableFormats" type="int32"
5187        visibility="hidden" deprecated="true" enum="true"
5188        container="array" typedef="imageFormat">
5189          <array>
5190            <size>n</size>
5191          </array>
5192          <enum>
5193            <value optional="true" id="0x20">RAW16
5194              <notes>
5195              RAW16 is a standard, cross-platform format for raw image
5196              buffers with 16-bit pixels.
5197
5198              Buffers of this format are typically expected to have a
5199              Bayer Color Filter Array (CFA) layout, which is given in
5200              android.sensor.info.colorFilterArrangement. Sensors with
5201              CFAs that are not representable by a format in
5202              android.sensor.info.colorFilterArrangement should not
5203              use this format.
5204
5205              Buffers of this format will also follow the constraints given for
5206              RAW_OPAQUE buffers, but with relaxed performance constraints.
5207
5208              This format is intended to give users access to the full contents
5209              of the buffers coming directly from the image sensor prior to any
5210              cropping or scaling operations, and all coordinate systems for
5211              metadata used for this format are relative to the size of the
5212              active region of the image sensor before any geometric distortion
5213              correction has been applied (i.e.
5214              android.sensor.info.preCorrectionActiveArraySize). Supported
5215              dimensions for this format are limited to the full dimensions of
5216              the sensor (e.g. either android.sensor.info.pixelArraySize or
5217              android.sensor.info.preCorrectionActiveArraySize will be the
5218              only supported output size).
5219
5220              See android.scaler.availableInputOutputFormatsMap for
5221              the full set of performance guarantees.
5222              </notes>
5223            </value>
5224            <value optional="true" id="0x24">RAW_OPAQUE
5225              <notes>
5226              RAW_OPAQUE (or
5227              {@link android.graphics.ImageFormat#RAW_PRIVATE RAW_PRIVATE}
5228              as referred in public API) is a format for raw image buffers
5229              coming from an image sensor.
5230
5231              The actual structure of buffers of this format is
5232              platform-specific, but must follow several constraints:
5233
5234              1. No image post-processing operations may have been applied to
5235              buffers of this type. These buffers contain raw image data coming
5236              directly from the image sensor.
5237              1. If a buffer of this format is passed to the camera device for
5238              reprocessing, the resulting images will be identical to the images
5239              produced if the buffer had come directly from the sensor and was
5240              processed with the same settings.
5241
5242              The intended use for this format is to allow access to the native
5243              raw format buffers coming directly from the camera sensor without
5244              any additional conversions or decrease in framerate.
5245
5246              See android.scaler.availableInputOutputFormatsMap for the full set of
5247              performance guarantees.
5248              </notes>
5249            </value>
5250            <value optional="true" id="0x32315659">YV12
5251              <notes>YCrCb 4:2:0 Planar</notes>
5252            </value>
5253            <value optional="true" id="0x11">YCrCb_420_SP
5254              <notes>NV21</notes>
5255            </value>
5256            <value id="0x22">IMPLEMENTATION_DEFINED
5257              <notes>System internal format, not application-accessible</notes>
5258            </value>
5259            <value id="0x23">YCbCr_420_888
5260              <notes>Flexible YUV420 Format</notes>
5261            </value>
5262            <value id="0x21">BLOB
5263              <notes>JPEG format</notes>
5264            </value>
5265          </enum>
5266          <description>The list of image formats that are supported by this
5267          camera device for output streams.</description>
5268          <details>
5269          All camera devices will support JPEG and YUV_420_888 formats.
5270
5271          When set to YUV_420_888, application can access the YUV420 data directly.
5272          </details>
5273          <hal_details>
5274          These format values are from HAL_PIXEL_FORMAT_* in
5275          system/core/include/system/graphics.h.
5276
5277          When IMPLEMENTATION_DEFINED is used, the platform
5278          gralloc module will select a format based on the usage flags provided
5279          by the camera HAL device and the other endpoint of the stream. It is
5280          usually used by preview and recording streams, where the application doesn't
5281          need access the image data.
5282
5283          YCbCr_420_888 format must be supported by the HAL. When an image stream
5284          needs CPU/application direct access, this format will be used.
5285
5286          The BLOB format must be supported by the HAL. This is used for the JPEG stream.
5287
5288          A RAW_OPAQUE buffer should contain only pixel data. It is strongly
5289          recommended that any information used by the camera device when
5290          processing images is fully expressed by the result metadata
5291          for that image buffer.
5292          </hal_details>
5293          <tag id="BC" />
5294        </entry>
5295        <entry name="availableJpegMinDurations" type="int64" visibility="hidden" deprecated="true"
5296        container="array">
5297          <array>
5298            <size>n</size>
5299          </array>
5300          <description>The minimum frame duration that is supported
5301          for each resolution in android.scaler.availableJpegSizes.
5302          </description>
5303          <units>Nanoseconds</units>
5304          <range>TODO: Remove property.</range>
5305          <details>
5306          This corresponds to the minimum steady-state frame duration when only
5307          that JPEG stream is active and captured in a burst, with all
5308          processing (typically in android.*.mode) set to FAST.
5309
5310          When multiple streams are configured, the minimum
5311          frame duration will be &amp;gt;= max(individual stream min
5312          durations)</details>
5313          <tag id="BC" />
5314        </entry>
5315        <entry name="availableJpegSizes" type="int32" visibility="hidden"
5316        deprecated="true" container="array" typedef="size">
5317          <array>
5318            <size>n</size>
5319            <size>2</size>
5320          </array>
5321          <description>The JPEG resolutions that are supported by this camera device.</description>
5322          <range>TODO: Remove property.</range>
5323          <details>
5324          The resolutions are listed as `(width, height)` pairs. All camera devices will support
5325          sensor maximum resolution (defined by android.sensor.info.activeArraySize).
5326          </details>
5327          <hal_details>
5328          The HAL must include sensor maximum resolution
5329          (defined by android.sensor.info.activeArraySize),
5330          and should include half/quarter of sensor maximum resolution.
5331          </hal_details>
5332          <tag id="BC" />
5333        </entry>
5334        <entry name="availableMaxDigitalZoom" type="float" visibility="public"
5335              hwlevel="legacy">
5336          <description>The maximum ratio between both active area width
5337          and crop region width, and active area height and
5338          crop region height, for android.scaler.cropRegion.
5339          </description>
5340          <units>Zoom scale factor</units>
5341          <range>&amp;gt;=1</range>
5342          <details>
5343          This represents the maximum amount of zooming possible by
5344          the camera device, or equivalently, the minimum cropping
5345          window size.
5346
5347          Crop regions that have a width or height that is smaller
5348          than this ratio allows will be rounded up to the minimum
5349          allowed size by the camera device.
5350          </details>
5351          <tag id="BC" />
5352        </entry>
5353        <entry name="availableProcessedMinDurations" type="int64" visibility="hidden" deprecated="true"
5354        container="array">
5355          <array>
5356            <size>n</size>
5357          </array>
5358          <description>For each available processed output size (defined in
5359          android.scaler.availableProcessedSizes), this property lists the
5360          minimum supportable frame duration for that size.
5361          </description>
5362          <units>Nanoseconds</units>
5363          <details>
5364          This should correspond to the frame duration when only that processed
5365          stream is active, with all processing (typically in android.*.mode)
5366          set to FAST.
5367
5368          When multiple streams are configured, the minimum frame duration will
5369          be &amp;gt;= max(individual stream min durations).
5370          </details>
5371          <tag id="BC" />
5372        </entry>
5373        <entry name="availableProcessedSizes" type="int32" visibility="hidden"
5374        deprecated="true" container="array" typedef="size">
5375          <array>
5376            <size>n</size>
5377            <size>2</size>
5378          </array>
5379          <description>The resolutions available for use with
5380          processed output streams, such as YV12, NV12, and
5381          platform opaque YUV/RGB streams to the GPU or video
5382          encoders.</description>
5383          <details>
5384          The resolutions are listed as `(width, height)` pairs.
5385
5386          For a given use case, the actual maximum supported resolution
5387          may be lower than what is listed here, depending on the destination
5388          Surface for the image data. For example, for recording video,
5389          the video encoder chosen may have a maximum size limit (e.g. 1080p)
5390          smaller than what the camera (e.g. maximum resolution is 3264x2448)
5391          can provide.
5392
5393          Please reference the documentation for the image data destination to
5394          check if it limits the maximum size for image data.
5395          </details>
5396          <hal_details>
5397          For FULL capability devices (`android.info.supportedHardwareLevel == FULL`),
5398          the HAL must include all JPEG sizes listed in android.scaler.availableJpegSizes
5399          and each below resolution if it is smaller than or equal to the sensor
5400          maximum resolution (if they are not listed in JPEG sizes already):
5401
5402          * 240p (320 x 240)
5403          * 480p (640 x 480)
5404          * 720p (1280 x 720)
5405          * 1080p (1920 x 1080)
5406
5407          For LIMITED capability devices (`android.info.supportedHardwareLevel == LIMITED`),
5408          the HAL only has to list up to the maximum video size supported by the devices.
5409          </hal_details>
5410          <tag id="BC" />
5411        </entry>
5412        <entry name="availableRawMinDurations" type="int64" deprecated="true"
5413        container="array">
5414          <array>
5415            <size>n</size>
5416          </array>
5417          <description>
5418          For each available raw output size (defined in
5419          android.scaler.availableRawSizes), this property lists the minimum
5420          supportable frame duration for that size.
5421          </description>
5422          <units>Nanoseconds</units>
5423          <details>
5424          Should correspond to the frame duration when only the raw stream is
5425          active.
5426
5427          When multiple streams are configured, the minimum
5428          frame duration will be &amp;gt;= max(individual stream min
5429          durations)</details>
5430          <tag id="BC" />
5431        </entry>
5432        <entry name="availableRawSizes" type="int32" deprecated="true"
5433        container="array" typedef="size">
5434          <array>
5435            <size>n</size>
5436            <size>2</size>
5437          </array>
5438          <description>The resolutions available for use with raw
5439          sensor output streams, listed as width,
5440          height</description>
5441        </entry>
5442      </static>
5443      <dynamic>
5444        <clone entry="android.scaler.cropRegion" kind="controls">
5445        </clone>
5446      </dynamic>
5447      <static>
5448        <entry name="availableInputOutputFormatsMap" type="int32" visibility="hidden"
5449          typedef="reprocessFormatsMap">
5450          <description>The mapping of image formats that are supported by this
5451          camera device for input streams, to their corresponding output formats.
5452          </description>
5453          <details>
5454          All camera devices with at least 1
5455          android.request.maxNumInputStreams will have at least one
5456          available input format.
5457
5458          The camera device will support the following map of formats,
5459          if its dependent capability (android.request.availableCapabilities) is supported:
5460
5461            Input Format                                    | Output Format                                     | Capability
5462          :-------------------------------------------------|:--------------------------------------------------|:----------
5463          {@link android.graphics.ImageFormat#PRIVATE}      | {@link android.graphics.ImageFormat#JPEG}         | PRIVATE_REPROCESSING
5464          {@link android.graphics.ImageFormat#PRIVATE}      | {@link android.graphics.ImageFormat#YUV_420_888}  | PRIVATE_REPROCESSING
5465          {@link android.graphics.ImageFormat#YUV_420_888}  | {@link android.graphics.ImageFormat#JPEG}         | YUV_REPROCESSING
5466          {@link android.graphics.ImageFormat#YUV_420_888}  | {@link android.graphics.ImageFormat#YUV_420_888}  | YUV_REPROCESSING
5467
5468          PRIVATE refers to a device-internal format that is not directly application-visible.  A
5469          PRIVATE input surface can be acquired by {@link android.media.ImageReader#newInstance}
5470          with {@link android.graphics.ImageFormat#PRIVATE} as the format.
5471
5472          For a PRIVATE_REPROCESSING-capable camera device, using the PRIVATE format as either input
5473          or output will never hurt maximum frame rate (i.e.  {@link
5474          android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration
5475          getOutputStallDuration(ImageFormat.PRIVATE, size)} is always 0),
5476
5477          Attempting to configure an input stream with output streams not
5478          listed as available in this map is not valid.
5479          </details>
5480          <hal_details>
5481          For the formats, see `system/core/include/system/graphics.h` for a definition
5482          of the image format enumerations. The PRIVATE format refers to the
5483          HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED format. The HAL could determine
5484          the actual format by using the gralloc usage flags.
5485          For ZSL use case in particular, the HAL could choose appropriate format (partially
5486          processed YUV or RAW based format) by checking the format and GRALLOC_USAGE_HW_CAMERA_ZSL.
5487          See camera3.h for more details.
5488
5489          This value is encoded as a variable-size array-of-arrays.
5490          The inner array always contains `[format, length, ...]` where
5491          `...` has `length` elements. An inner array is followed by another
5492          inner array if the total metadata entry size hasn't yet been exceeded.
5493
5494          A code sample to read/write this encoding (with a device that
5495          supports reprocessing IMPLEMENTATION_DEFINED to YUV_420_888, and JPEG,
5496          and reprocessing YUV_420_888 to YUV_420_888 and JPEG):
5497
5498              // reading
5499              int32_t* contents = &amp;entry.i32[0];
5500              for (size_t i = 0; i &lt; entry.count; ) {
5501                  int32_t format = contents[i++];
5502                  int32_t length = contents[i++];
5503                  int32_t output_formats[length];
5504                  memcpy(&amp;output_formats[0], &amp;contents[i],
5505                         length * sizeof(int32_t));
5506                  i += length;
5507              }
5508
5509              // writing (static example, PRIVATE_REPROCESSING + YUV_REPROCESSING)
5510              int32_t[] contents = {
5511                IMPLEMENTATION_DEFINED, 2, YUV_420_888, BLOB,
5512                YUV_420_888, 2, YUV_420_888, BLOB,
5513              };
5514              update_camera_metadata_entry(metadata, index, &amp;contents[0],
5515                    sizeof(contents)/sizeof(contents[0]), &amp;updated_entry);
5516
5517          If the HAL claims to support any of the capabilities listed in the
5518          above details, then it must also support all the input-output
5519          combinations listed for that capability. It can optionally support
5520          additional formats if it so chooses.
5521          </hal_details>
5522          <tag id="REPROC" />
5523        </entry>
5524        <entry name="availableStreamConfigurations" type="int32" visibility="ndk_public"
5525               enum="true" container="array" typedef="streamConfiguration" hwlevel="legacy">
5526          <array>
5527            <size>n</size>
5528            <size>4</size>
5529          </array>
5530          <enum>
5531            <value>OUTPUT</value>
5532            <value>INPUT</value>
5533          </enum>
5534          <description>The available stream configurations that this
5535          camera device supports
5536          (i.e. format, width, height, output/input stream).
5537          </description>
5538          <details>
5539          The configurations are listed as `(format, width, height, input?)`
5540          tuples.
5541
5542          For a given use case, the actual maximum supported resolution
5543          may be lower than what is listed here, depending on the destination
5544          Surface for the image data. For example, for recording video,
5545          the video encoder chosen may have a maximum size limit (e.g. 1080p)
5546          smaller than what the camera (e.g. maximum resolution is 3264x2448)
5547          can provide.
5548
5549          Please reference the documentation for the image data destination to
5550          check if it limits the maximum size for image data.
5551
5552          Not all output formats may be supported in a configuration with
5553          an input stream of a particular format. For more details, see
5554          android.scaler.availableInputOutputFormatsMap.
5555
5556          The following table describes the minimum required output stream
5557          configurations based on the hardware level
5558          (android.info.supportedHardwareLevel):
5559
5560          Format         | Size                                         | Hardware Level | Notes
5561          :-------------:|:--------------------------------------------:|:--------------:|:--------------:
5562          JPEG           | android.sensor.info.activeArraySize          | Any            |
5563          JPEG           | 1920x1080 (1080p)                            | Any            | if 1080p &lt;= activeArraySize
5564          JPEG           | 1280x720 (720)                               | Any            | if 720p &lt;= activeArraySize
5565          JPEG           | 640x480 (480p)                               | Any            | if 480p &lt;= activeArraySize
5566          JPEG           | 320x240 (240p)                               | Any            | if 240p &lt;= activeArraySize
5567          YUV_420_888    | all output sizes available for JPEG          | FULL           |
5568          YUV_420_888    | all output sizes available for JPEG, up to the maximum video size | LIMITED        |
5569          IMPLEMENTATION_DEFINED | same as YUV_420_888                  | Any            |
5570
5571          Refer to android.request.availableCapabilities for additional
5572          mandatory stream configurations on a per-capability basis.
5573          </details>
5574          <hal_details>
5575          It is recommended (but not mandatory) to also include half/quarter
5576          of sensor maximum resolution for JPEG formats (regardless of hardware
5577          level).
5578
5579          (The following is a rewording of the above required table):
5580
5581          For JPEG format, the sizes may be restricted by below conditions:
5582
5583          * The HAL may choose the aspect ratio of each Jpeg size to be one of well known ones
5584          (e.g. 4:3, 16:9, 3:2 etc.). If the sensor maximum resolution
5585          (defined by android.sensor.info.activeArraySize) has an aspect ratio other than these,
5586          it does not have to be included in the supported JPEG sizes.
5587          * Some hardware JPEG encoders may have pixel boundary alignment requirements, such as
5588          the dimensions being a multiple of 16.
5589
5590          Therefore, the maximum JPEG size may be smaller than sensor maximum resolution.
5591          However, the largest JPEG size must be as close as possible to the sensor maximum
5592          resolution given above constraints. It is required that after aspect ratio adjustments,
5593          additional size reduction due to other issues must be less than 3% in area. For example,
5594          if the sensor maximum resolution is 3280x2464, if the maximum JPEG size has aspect
5595          ratio 4:3, the JPEG encoder alignment requirement is 16, the maximum JPEG size will be
5596          3264x2448.
5597
5598          For FULL capability devices (`android.info.supportedHardwareLevel == FULL`),
5599          the HAL must include all YUV_420_888 sizes that have JPEG sizes listed
5600          here as output streams.
5601
5602          It must also include each below resolution if it is smaller than or
5603          equal to the sensor maximum resolution (for both YUV_420_888 and JPEG
5604          formats), as output streams:
5605
5606          * 240p (320 x 240)
5607          * 480p (640 x 480)
5608          * 720p (1280 x 720)
5609          * 1080p (1920 x 1080)
5610
5611          For LIMITED capability devices
5612          (`android.info.supportedHardwareLevel == LIMITED`),
5613          the HAL only has to list up to the maximum video size
5614          supported by the device.
5615
5616          Regardless of hardware level, every output resolution available for
5617          YUV_420_888 must also be available for IMPLEMENTATION_DEFINED.
5618
5619          This supercedes the following fields, which are now deprecated:
5620
5621          * availableFormats
5622          * available[Processed,Raw,Jpeg]Sizes
5623          </hal_details>
5624        </entry>
5625        <entry name="availableMinFrameDurations" type="int64" visibility="ndk_public"
5626               container="array" typedef="streamConfigurationDuration" hwlevel="legacy">
5627          <array>
5628            <size>4</size>
5629            <size>n</size>
5630          </array>
5631          <description>This lists the minimum frame duration for each
5632          format/size combination.
5633          </description>
5634          <units>(format, width, height, ns) x n</units>
5635          <details>
5636          This should correspond to the frame duration when only that
5637          stream is active, with all processing (typically in android.*.mode)
5638          set to either OFF or FAST.
5639
5640          When multiple streams are used in a request, the minimum frame
5641          duration will be max(individual stream min durations).
5642
5643          The minimum frame duration of a stream (of a particular format, size)
5644          is the same regardless of whether the stream is input or output.
5645
5646          See android.sensor.frameDuration and
5647          android.scaler.availableStallDurations for more details about
5648          calculating the max frame rate.
5649
5650          (Keep in sync with
5651          {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration})
5652          </details>
5653          <tag id="V1" />
5654        </entry>
5655        <entry name="availableStallDurations" type="int64" visibility="ndk_public"
5656               container="array" typedef="streamConfigurationDuration" hwlevel="legacy">
5657          <array>
5658            <size>4</size>
5659            <size>n</size>
5660          </array>
5661          <description>This lists the maximum stall duration for each
5662          output format/size combination.
5663          </description>
5664          <units>(format, width, height, ns) x n</units>
5665          <details>
5666          A stall duration is how much extra time would get added
5667          to the normal minimum frame duration for a repeating request
5668          that has streams with non-zero stall.
5669
5670          For example, consider JPEG captures which have the following
5671          characteristics:
5672
5673          * JPEG streams act like processed YUV streams in requests for which
5674          they are not included; in requests in which they are directly
5675          referenced, they act as JPEG streams. This is because supporting a
5676          JPEG stream requires the underlying YUV data to always be ready for
5677          use by a JPEG encoder, but the encoder will only be used (and impact
5678          frame duration) on requests that actually reference a JPEG stream.
5679          * The JPEG processor can run concurrently to the rest of the camera
5680          pipeline, but cannot process more than 1 capture at a time.
5681
5682          In other words, using a repeating YUV request would result
5683          in a steady frame rate (let's say it's 30 FPS). If a single
5684          JPEG request is submitted periodically, the frame rate will stay
5685          at 30 FPS (as long as we wait for the previous JPEG to return each
5686          time). If we try to submit a repeating YUV + JPEG request, then
5687          the frame rate will drop from 30 FPS.
5688
5689          In general, submitting a new request with a non-0 stall time
5690          stream will _not_ cause a frame rate drop unless there are still
5691          outstanding buffers for that stream from previous requests.
5692
5693          Submitting a repeating request with streams (call this `S`)
5694          is the same as setting the minimum frame duration from
5695          the normal minimum frame duration corresponding to `S`, added with
5696          the maximum stall duration for `S`.
5697
5698          If interleaving requests with and without a stall duration,
5699          a request will stall by the maximum of the remaining times
5700          for each can-stall stream with outstanding buffers.
5701
5702          This means that a stalling request will not have an exposure start
5703          until the stall has completed.
5704
5705          This should correspond to the stall duration when only that stream is
5706          active, with all processing (typically in android.*.mode) set to FAST
5707          or OFF. Setting any of the processing modes to HIGH_QUALITY
5708          effectively results in an indeterminate stall duration for all
5709          streams in a request (the regular stall calculation rules are
5710          ignored).
5711
5712          The following formats may always have a stall duration:
5713
5714          * {@link android.graphics.ImageFormat#JPEG}
5715          * {@link android.graphics.ImageFormat#RAW_SENSOR}
5716
5717          The following formats will never have a stall duration:
5718
5719          * {@link android.graphics.ImageFormat#YUV_420_888}
5720          * {@link android.graphics.ImageFormat#RAW10}
5721
5722          All other formats may or may not have an allowed stall duration on
5723          a per-capability basis; refer to android.request.availableCapabilities
5724          for more details.
5725
5726          See android.sensor.frameDuration for more information about
5727          calculating the max frame rate (absent stalls).
5728
5729          (Keep up to date with
5730          {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration} )
5731          </details>
5732          <hal_details>
5733          If possible, it is recommended that all non-JPEG formats
5734          (such as RAW16) should not have a stall duration. RAW10, RAW12, RAW_OPAQUE
5735          and IMPLEMENTATION_DEFINED must not have stall durations.
5736          </hal_details>
5737          <tag id="V1" />
5738        </entry>
5739        <entry name="streamConfigurationMap" type="int32" visibility="java_public"
5740               synthetic="true" typedef="streamConfigurationMap"
5741               hwlevel="legacy">
5742          <description>The available stream configurations that this
5743          camera device supports; also includes the minimum frame durations
5744          and the stall durations for each format/size combination.
5745          </description>
5746          <details>
5747          All camera devices will support sensor maximum resolution (defined by
5748          android.sensor.info.activeArraySize) for the JPEG format.
5749
5750          For a given use case, the actual maximum supported resolution
5751          may be lower than what is listed here, depending on the destination
5752          Surface for the image data. For example, for recording video,
5753          the video encoder chosen may have a maximum size limit (e.g. 1080p)
5754          smaller than what the camera (e.g. maximum resolution is 3264x2448)
5755          can provide.
5756
5757          Please reference the documentation for the image data destination to
5758          check if it limits the maximum size for image data.
5759
5760          The following table describes the minimum required output stream
5761          configurations based on the hardware level
5762          (android.info.supportedHardwareLevel):
5763
5764          Format                                             | Size                                         | Hardware Level | Notes
5765          :-------------------------------------------------:|:--------------------------------------------:|:--------------:|:--------------:
5766          {@link android.graphics.ImageFormat#JPEG}          | android.sensor.info.activeArraySize (*1)     | Any            |
5767          {@link android.graphics.ImageFormat#JPEG}          | 1920x1080 (1080p)                            | Any            | if 1080p &lt;= activeArraySize
5768          {@link android.graphics.ImageFormat#JPEG}          | 1280x720 (720p)                               | Any            | if 720p &lt;= activeArraySize
5769          {@link android.graphics.ImageFormat#JPEG}          | 640x480 (480p)                               | Any            | if 480p &lt;= activeArraySize
5770          {@link android.graphics.ImageFormat#JPEG}          | 320x240 (240p)                               | Any            | if 240p &lt;= activeArraySize
5771          {@link android.graphics.ImageFormat#YUV_420_888}   | all output sizes available for JPEG          | FULL           |
5772          {@link android.graphics.ImageFormat#YUV_420_888}   | all output sizes available for JPEG, up to the maximum video size | LIMITED        |
5773          {@link android.graphics.ImageFormat#PRIVATE}       | same as YUV_420_888                          | Any            |
5774
5775          Refer to android.request.availableCapabilities and {@link
5776          android.hardware.camera2.CameraDevice#createCaptureSession} for additional mandatory
5777          stream configurations on a per-capability basis.
5778
5779          *1: For JPEG format, the sizes may be restricted by below conditions:
5780
5781          * The HAL may choose the aspect ratio of each Jpeg size to be one of well known ones
5782          (e.g. 4:3, 16:9, 3:2 etc.). If the sensor maximum resolution
5783          (defined by android.sensor.info.activeArraySize) has an aspect ratio other than these,
5784          it does not have to be included in the supported JPEG sizes.
5785          * Some hardware JPEG encoders may have pixel boundary alignment requirements, such as
5786          the dimensions being a multiple of 16.
5787          Therefore, the maximum JPEG size may be smaller than sensor maximum resolution.
5788          However, the largest JPEG size will be as close as possible to the sensor maximum
5789          resolution given above constraints. It is required that after aspect ratio adjustments,
5790          additional size reduction due to other issues must be less than 3% in area. For example,
5791          if the sensor maximum resolution is 3280x2464, if the maximum JPEG size has aspect
5792          ratio 4:3, and the JPEG encoder alignment requirement is 16, the maximum JPEG size will be
5793          3264x2448.
5794          </details>
5795          <hal_details>
5796          Do not set this property directly
5797          (it is synthetic and will not be available at the HAL layer);
5798          set the android.scaler.availableStreamConfigurations instead.
5799
5800          Not all output formats may be supported in a configuration with
5801          an input stream of a particular format. For more details, see
5802          android.scaler.availableInputOutputFormatsMap.
5803
5804          It is recommended (but not mandatory) to also include half/quarter
5805          of sensor maximum resolution for JPEG formats (regardless of hardware
5806          level).
5807
5808          (The following is a rewording of the above required table):
5809
5810          The HAL must include sensor maximum resolution (defined by
5811          android.sensor.info.activeArraySize).
5812
5813          For FULL capability devices (`android.info.supportedHardwareLevel == FULL`),
5814          the HAL must include all YUV_420_888 sizes that have JPEG sizes listed
5815          here as output streams.
5816
5817          It must also include each below resolution if it is smaller than or
5818          equal to the sensor maximum resolution (for both YUV_420_888 and JPEG
5819          formats), as output streams:
5820
5821          * 240p (320 x 240)
5822          * 480p (640 x 480)
5823          * 720p (1280 x 720)
5824          * 1080p (1920 x 1080)
5825
5826          For LIMITED capability devices
5827          (`android.info.supportedHardwareLevel == LIMITED`),
5828          the HAL only has to list up to the maximum video size
5829          supported by the device.
5830
5831          Regardless of hardware level, every output resolution available for
5832          YUV_420_888 must also be available for IMPLEMENTATION_DEFINED.
5833
5834          This supercedes the following fields, which are now deprecated:
5835
5836          * availableFormats
5837          * available[Processed,Raw,Jpeg]Sizes
5838          </hal_details>
5839        </entry>
5840        <entry name="croppingType" type="byte" visibility="public" enum="true"
5841               hwlevel="legacy">
5842          <enum>
5843            <value>CENTER_ONLY
5844              <notes>
5845                The camera device only supports centered crop regions.
5846              </notes>
5847            </value>
5848            <value>FREEFORM
5849              <notes>
5850                The camera device supports arbitrarily chosen crop regions.
5851              </notes>
5852            </value>
5853          </enum>
5854          <description>The crop type that this camera device supports.</description>
5855          <details>
5856          When passing a non-centered crop region (android.scaler.cropRegion) to a camera
5857          device that only supports CENTER_ONLY cropping, the camera device will move the
5858          crop region to the center of the sensor active array (android.sensor.info.activeArraySize)
5859          and keep the crop region width and height unchanged. The camera device will return the
5860          final used crop region in metadata result android.scaler.cropRegion.
5861
5862          Camera devices that support FREEFORM cropping will support any crop region that
5863          is inside of the active array. The camera device will apply the same crop region and
5864          return the final used crop region in capture result metadata android.scaler.cropRegion.
5865
5866          LEGACY capability devices will only support CENTER_ONLY cropping.
5867          </details>
5868        </entry>
5869      </static>
5870    </section>
5871    <section name="sensor">
5872      <controls>
5873        <entry name="exposureTime" type="int64" visibility="public" hwlevel="full">
5874          <description>Duration each pixel is exposed to
5875          light.</description>
5876          <units>Nanoseconds</units>
5877          <range>android.sensor.info.exposureTimeRange</range>
5878          <details>If the sensor can't expose this exact duration, it will shorten the
5879          duration exposed to the nearest possible value (rather than expose longer).
5880          The final exposure time used will be available in the output capture result.
5881
5882          This control is only effective if android.control.aeMode or android.control.mode is set to
5883          OFF; otherwise the auto-exposure algorithm will override this value.
5884          </details>
5885          <tag id="V1" />
5886        </entry>
5887        <entry name="frameDuration" type="int64" visibility="public" hwlevel="full">
5888          <description>Duration from start of frame exposure to
5889          start of next frame exposure.</description>
5890          <units>Nanoseconds</units>
5891          <range>See android.sensor.info.maxFrameDuration,
5892          android.scaler.streamConfigurationMap. The duration
5893          is capped to `max(duration, exposureTime + overhead)`.</range>
5894          <details>
5895          The maximum frame rate that can be supported by a camera subsystem is
5896          a function of many factors:
5897
5898          * Requested resolutions of output image streams
5899          * Availability of binning / skipping modes on the imager
5900          * The bandwidth of the imager interface
5901          * The bandwidth of the various ISP processing blocks
5902
5903          Since these factors can vary greatly between different ISPs and
5904          sensors, the camera abstraction tries to represent the bandwidth
5905          restrictions with as simple a model as possible.
5906
5907          The model presented has the following characteristics:
5908
5909          * The image sensor is always configured to output the smallest
5910          resolution possible given the application's requested output stream
5911          sizes.  The smallest resolution is defined as being at least as large
5912          as the largest requested output stream size; the camera pipeline must
5913          never digitally upsample sensor data when the crop region covers the
5914          whole sensor. In general, this means that if only small output stream
5915          resolutions are configured, the sensor can provide a higher frame
5916          rate.
5917          * Since any request may use any or all the currently configured
5918          output streams, the sensor and ISP must be configured to support
5919          scaling a single capture to all the streams at the same time.  This
5920          means the camera pipeline must be ready to produce the largest
5921          requested output size without any delay.  Therefore, the overall
5922          frame rate of a given configured stream set is governed only by the
5923          largest requested stream resolution.
5924          * Using more than one output stream in a request does not affect the
5925          frame duration.
5926          * Certain format-streams may need to do additional background processing
5927          before data is consumed/produced by that stream. These processors
5928          can run concurrently to the rest of the camera pipeline, but
5929          cannot process more than 1 capture at a time.
5930
5931          The necessary information for the application, given the model above,
5932          is provided via the android.scaler.streamConfigurationMap field using
5933          {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration}.
5934          These are used to determine the maximum frame rate / minimum frame
5935          duration that is possible for a given stream configuration.
5936
5937          Specifically, the application can use the following rules to
5938          determine the minimum frame duration it can request from the camera
5939          device:
5940
5941          1. Let the set of currently configured input/output streams
5942          be called `S`.
5943          1. Find the minimum frame durations for each stream in `S`, by looking
5944          it up in android.scaler.streamConfigurationMap using {@link
5945          android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration}
5946          (with its respective size/format). Let this set of frame durations be
5947          called `F`.
5948          1. For any given request `R`, the minimum frame duration allowed
5949          for `R` is the maximum out of all values in `F`. Let the streams
5950          used in `R` be called `S_r`.
5951
5952          If none of the streams in `S_r` have a stall time (listed in {@link
5953          android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration}
5954          using its respective size/format), then the frame duration in `F`
5955          determines the steady state frame rate that the application will get
5956          if it uses `R` as a repeating request. Let this special kind of
5957          request be called `Rsimple`.
5958
5959          A repeating request `Rsimple` can be _occasionally_ interleaved
5960          by a single capture of a new request `Rstall` (which has at least
5961          one in-use stream with a non-0 stall time) and if `Rstall` has the
5962          same minimum frame duration this will not cause a frame rate loss
5963          if all buffers from the previous `Rstall` have already been
5964          delivered.
5965
5966          For more details about stalling, see
5967          {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration}.
5968
5969          This control is only effective if android.control.aeMode or android.control.mode is set to
5970          OFF; otherwise the auto-exposure algorithm will override this value.
5971          </details>
5972          <hal_details>
5973          For more details about stalling, see
5974          android.scaler.availableStallDurations.
5975          </hal_details>
5976          <tag id="V1" />
5977        </entry>
5978        <entry name="sensitivity" type="int32" visibility="public" hwlevel="full">
5979          <description>The amount of gain applied to sensor data
5980          before processing.</description>
5981          <units>ISO arithmetic units</units>
5982          <range>android.sensor.info.sensitivityRange</range>
5983          <details>
5984          The sensitivity is the standard ISO sensitivity value,
5985          as defined in ISO 12232:2006.
5986
5987          The sensitivity must be within android.sensor.info.sensitivityRange, and
5988          if if it less than android.sensor.maxAnalogSensitivity, the camera device
5989          is guaranteed to use only analog amplification for applying the gain.
5990
5991          If the camera device cannot apply the exact sensitivity
5992          requested, it will reduce the gain to the nearest supported
5993          value. The final sensitivity used will be available in the
5994          output capture result.
5995
5996          This control is only effective if android.control.aeMode or android.control.mode is set to
5997          OFF; otherwise the auto-exposure algorithm will override this value.
5998          </details>
5999          <hal_details>ISO 12232:2006 REI method is acceptable.</hal_details>
6000          <tag id="V1" />
6001        </entry>
6002      </controls>
6003      <static>
6004        <namespace name="info">
6005          <entry name="activeArraySize" type="int32" visibility="public"
6006          type_notes="Four ints defining the active pixel rectangle"
6007          container="array" typedef="rectangle" hwlevel="legacy">
6008            <array>
6009              <size>4</size>
6010            </array>
6011            <description>
6012            The area of the image sensor which corresponds to active pixels after any geometric
6013            distortion correction has been applied.
6014            </description>
6015            <units>Pixel coordinates on the image sensor</units>
6016            <details>
6017            This is the rectangle representing the size of the active region of the sensor (i.e.
6018            the region that actually receives light from the scene) after any geometric correction
6019            has been applied, and should be treated as the maximum size in pixels of any of the
6020            image output formats aside from the raw formats.
6021
6022            This rectangle is defined relative to the full pixel array; (0,0) is the top-left of
6023            the full pixel array, and the size of the full pixel array is given by
6024            android.sensor.info.pixelArraySize.
6025
6026            The coordinate system for most other keys that list pixel coordinates, including
6027            android.scaler.cropRegion, is defined relative to the active array rectangle given in
6028            this field, with `(0, 0)` being the top-left of this rectangle.
6029
6030            The active array may be smaller than the full pixel array, since the full array may
6031            include black calibration pixels or other inactive regions, and geometric correction
6032            resulting in scaling or cropping may have been applied.
6033            </details>
6034            <hal_details>
6035            This array contains `(xmin, ymin, width, height)`. The `(xmin, ymin)` must be
6036            &amp;gt;= `(0,0)`.
6037            The `(width, height)` must be &amp;lt;= `android.sensor.info.pixelArraySize`.
6038            </hal_details>
6039            <tag id="RAW" />
6040          </entry>
6041          <entry name="sensitivityRange" type="int32" visibility="public"
6042          type_notes="Range of supported sensitivities"
6043          container="array" typedef="rangeInt"
6044          hwlevel="full">
6045            <array>
6046              <size>2</size>
6047            </array>
6048            <description>Range of sensitivities for android.sensor.sensitivity supported by this
6049            camera device.</description>
6050            <range>Min &lt;= 100, Max &amp;gt;= 800</range>
6051            <details>
6052              The values are the standard ISO sensitivity values,
6053              as defined in ISO 12232:2006.
6054            </details>
6055
6056            <tag id="BC" />
6057            <tag id="V1" />
6058          </entry>
6059          <entry name="colorFilterArrangement" type="byte" visibility="public" enum="true"
6060            hwlevel="full">
6061            <enum>
6062              <value>RGGB</value>
6063              <value>GRBG</value>
6064              <value>GBRG</value>
6065              <value>BGGR</value>
6066              <value>RGB
6067              <notes>Sensor is not Bayer; output has 3 16-bit
6068              values for each pixel, instead of just 1 16-bit value
6069              per pixel.</notes></value>
6070            </enum>
6071            <description>The arrangement of color filters on sensor;
6072            represents the colors in the top-left 2x2 section of
6073            the sensor, in reading order.</description>
6074            <tag id="RAW" />
6075          </entry>
6076          <entry name="exposureTimeRange" type="int64" visibility="public"
6077                 type_notes="nanoseconds" container="array" typedef="rangeLong"
6078                 hwlevel="full">
6079            <array>
6080              <size>2</size>
6081            </array>
6082            <description>The range of image exposure times for android.sensor.exposureTime supported
6083            by this camera device.
6084            </description>
6085            <units>Nanoseconds</units>
6086            <range>The minimum exposure time will be less than 100 us. For FULL
6087            capability devices (android.info.supportedHardwareLevel == FULL),
6088            the maximum exposure time will be greater than 100ms.</range>
6089            <hal_details>For FULL capability devices (android.info.supportedHardwareLevel == FULL),
6090            The maximum of the range SHOULD be at least 1 second (1e9), MUST be at least
6091            100ms.
6092            </hal_details>
6093            <tag id="V1" />
6094          </entry>
6095          <entry name="maxFrameDuration" type="int64" visibility="public"
6096                 hwlevel="full">
6097            <description>The maximum possible frame duration (minimum frame rate) for
6098            android.sensor.frameDuration that is supported this camera device.</description>
6099            <units>Nanoseconds</units>
6100            <range>For FULL capability devices
6101            (android.info.supportedHardwareLevel == FULL), at least 100ms.
6102            </range>
6103            <details>Attempting to use frame durations beyond the maximum will result in the frame
6104            duration being clipped to the maximum. See that control for a full definition of frame
6105            durations.
6106
6107            Refer to {@link
6108            android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration}
6109            for the minimum frame duration values.
6110            </details>
6111            <hal_details>
6112            For FULL capability devices (android.info.supportedHardwareLevel == FULL),
6113            The maximum of the range SHOULD be at least
6114            1 second (1e9), MUST be at least 100ms (100e6).
6115
6116            android.sensor.info.maxFrameDuration must be greater or
6117            equal to the android.sensor.info.exposureTimeRange max
6118            value (since exposure time overrides frame duration).
6119
6120            Available minimum frame durations for JPEG must be no greater
6121            than that of the YUV_420_888/IMPLEMENTATION_DEFINED
6122            minimum frame durations (for that respective size).
6123
6124            Since JPEG processing is considered offline and can take longer than
6125            a single uncompressed capture, refer to
6126            android.scaler.availableStallDurations
6127            for details about encoding this scenario.
6128            </hal_details>
6129            <tag id="V1" />
6130          </entry>
6131          <entry name="physicalSize" type="float" visibility="public"
6132          type_notes="width x height"
6133          container="array" typedef="sizeF" hwlevel="legacy">
6134            <array>
6135              <size>2</size>
6136            </array>
6137            <description>The physical dimensions of the full pixel
6138            array.</description>
6139            <units>Millimeters</units>
6140            <details>This is the physical size of the sensor pixel
6141            array defined by android.sensor.info.pixelArraySize.
6142            </details>
6143            <hal_details>Needed for FOV calculation for old API</hal_details>
6144            <tag id="V1" />
6145            <tag id="BC" />
6146          </entry>
6147          <entry name="pixelArraySize" type="int32" visibility="public"
6148          container="array" typedef="size" hwlevel="legacy">
6149            <array>
6150              <size>2</size>
6151            </array>
6152            <description>Dimensions of the full pixel array, possibly
6153            including black calibration pixels.</description>
6154            <units>Pixels</units>
6155            <details>The pixel count of the full pixel array of the image sensor, which covers
6156            android.sensor.info.physicalSize area.  This represents the full pixel dimensions of
6157            the raw buffers produced by this sensor.
6158
6159            If a camera device supports raw sensor formats, either this or
6160            android.sensor.info.preCorrectionActiveArraySize is the maximum dimensions for the raw
6161            output formats listed in android.scaler.streamConfigurationMap (this depends on
6162            whether or not the image sensor returns buffers containing pixels that are not
6163            part of the active array region for blacklevel calibration or other purposes).
6164
6165            Some parts of the full pixel array may not receive light from the scene,
6166            or be otherwise inactive.  The android.sensor.info.preCorrectionActiveArraySize key
6167            defines the rectangle of active pixels that will be included in processed image
6168            formats.
6169            </details>
6170            <tag id="RAW" />
6171            <tag id="BC" />
6172          </entry>
6173          <entry name="whiteLevel" type="int32" visibility="public">
6174            <description>
6175            Maximum raw value output by sensor.
6176            </description>
6177            <range>&amp;gt; 255 (8-bit output)</range>
6178            <details>
6179            This specifies the fully-saturated encoding level for the raw
6180            sample values from the sensor.  This is typically caused by the
6181            sensor becoming highly non-linear or clipping. The minimum for
6182            each channel is specified by the offset in the
6183            android.sensor.blackLevelPattern key.
6184
6185            The white level is typically determined either by sensor bit depth
6186            (8-14 bits is expected), or by the point where the sensor response
6187            becomes too non-linear to be useful.  The default value for this is
6188            maximum representable value for a 16-bit raw sample (2^16 - 1).
6189
6190            The white level values of captured images may vary for different
6191            capture settings (e.g., android.sensor.sensitivity). This key
6192            represents a coarse approximation for such case. It is recommended
6193            to use android.sensor.dynamicWhiteLevel for captures when supported
6194            by the camera device, which provides more accurate white level values.
6195            </details>
6196            <hal_details>
6197            The full bit depth of the sensor must be available in the raw data,
6198            so the value for linear sensors should not be significantly lower
6199            than maximum raw value supported, i.e. 2^(sensor bits per pixel).
6200            </hal_details>
6201            <tag id="RAW" />
6202          </entry>
6203          <entry name="timestampSource" type="byte" visibility="public"
6204                 enum="true" hwlevel="legacy">
6205            <enum>
6206              <value>UNKNOWN
6207                <notes>
6208                Timestamps from android.sensor.timestamp are in nanoseconds and monotonic,
6209                but can not be compared to timestamps from other subsystems
6210                (e.g. accelerometer, gyro etc.), or other instances of the same or different
6211                camera devices in the same system. Timestamps between streams and results for
6212                a single camera instance are comparable, and the timestamps for all buffers
6213                and the result metadata generated by a single capture are identical.
6214                </notes>
6215              </value>
6216              <value>REALTIME
6217                <notes>
6218                Timestamps from android.sensor.timestamp are in the same timebase as
6219                {@link android.os.SystemClock#elapsedRealtimeNanos},
6220                and they can be compared to other timestamps using that base.
6221                </notes>
6222              </value>
6223            </enum>
6224            <description>The time base source for sensor capture start timestamps.</description>
6225            <details>
6226            The timestamps provided for captures are always in nanoseconds and monotonic, but
6227            may not based on a time source that can be compared to other system time sources.
6228
6229            This characteristic defines the source for the timestamps, and therefore whether they
6230            can be compared against other system time sources/timestamps.
6231            </details>
6232          <tag id="V1" />
6233        </entry>
6234        <entry name="lensShadingApplied" type="byte" visibility="public" enum="true"
6235               typedef="boolean">
6236          <enum>
6237            <value>FALSE</value>
6238            <value>TRUE</value>
6239          </enum>
6240          <description>Whether the RAW images output from this camera device are subject to
6241          lens shading correction.</description>
6242          <details>
6243          If TRUE, all images produced by the camera device in the RAW image formats will
6244          have lens shading correction already applied to it. If FALSE, the images will
6245          not be adjusted for lens shading correction.
6246          See android.request.maxNumOutputRaw for a list of RAW image formats.
6247
6248          This key will be `null` for all devices do not report this information.
6249          Devices with RAW capability will always report this information in this key.
6250          </details>
6251        </entry>
6252        <entry name="preCorrectionActiveArraySize" type="int32" visibility="public"
6253          type_notes="Four ints defining the active pixel rectangle" container="array"
6254          typedef="rectangle" hwlevel="legacy">
6255            <array>
6256              <size>4</size>
6257            </array>
6258            <description>
6259            The area of the image sensor which corresponds to active pixels prior to the
6260            application of any geometric distortion correction.
6261            </description>
6262            <units>Pixel coordinates on the image sensor</units>
6263            <details>
6264            This is the rectangle representing the size of the active region of the sensor (i.e.
6265            the region that actually receives light from the scene) before any geometric correction
6266            has been applied, and should be treated as the active region rectangle for any of the
6267            raw formats.  All metadata associated with raw processing (e.g. the lens shading
6268            correction map, and radial distortion fields) treats the top, left of this rectangle as
6269            the origin, (0,0).
6270
6271            The size of this region determines the maximum field of view and the maximum number of
6272            pixels that an image from this sensor can contain, prior to the application of
6273            geometric distortion correction. The effective maximum pixel dimensions of a
6274            post-distortion-corrected image is given by the android.sensor.info.activeArraySize
6275            field, and the effective maximum field of view for a post-distortion-corrected image
6276            can be calculated by applying the geometric distortion correction fields to this
6277            rectangle, and cropping to the rectangle given in android.sensor.info.activeArraySize.
6278
6279            E.g. to calculate position of a pixel, (x,y), in a processed YUV output image with the
6280            dimensions in android.sensor.info.activeArraySize given the position of a pixel,
6281            (x', y'), in the raw pixel array with dimensions give in
6282            android.sensor.info.pixelArraySize:
6283
6284            1. Choose a pixel (x', y') within the active array region of the raw buffer given in
6285            android.sensor.info.preCorrectionActiveArraySize, otherwise this pixel is considered
6286            to be outside of the FOV, and will not be shown in the processed output image.
6287            1. Apply geometric distortion correction to get the post-distortion pixel coordinate,
6288            (x_i, y_i). When applying geometric correction metadata, note that metadata for raw
6289            buffers is defined relative to the top, left of the
6290            android.sensor.info.preCorrectionActiveArraySize rectangle.
6291            1. If the resulting corrected pixel coordinate is within the region given in
6292            android.sensor.info.activeArraySize, then the position of this pixel in the
6293            processed output image buffer is `(x_i - activeArray.left, y_i - activeArray.top)`,
6294            when the top, left coordinate of that buffer is treated as (0, 0).
6295
6296            Thus, for pixel x',y' = (25, 25) on a sensor where android.sensor.info.pixelArraySize
6297            is (100,100), android.sensor.info.preCorrectionActiveArraySize is (10, 10, 100, 100),
6298            android.sensor.info.activeArraySize is (20, 20, 80, 80), and the geometric distortion
6299            correction doesn't change the pixel coordinate, the resulting pixel selected in
6300            pixel coordinates would be x,y = (25, 25) relative to the top,left of the raw buffer
6301            with dimensions given in android.sensor.info.pixelArraySize, and would be (5, 5)
6302            relative to the top,left of post-processed YUV output buffer with dimensions given in
6303            android.sensor.info.activeArraySize.
6304
6305            The currently supported fields that correct for geometric distortion are:
6306
6307            1. android.lens.radialDistortion.
6308
6309            If all of the geometric distortion fields are no-ops, this rectangle will be the same
6310            as the post-distortion-corrected rectangle given in
6311            android.sensor.info.activeArraySize.
6312
6313            This rectangle is defined relative to the full pixel array; (0,0) is the top-left of
6314            the full pixel array, and the size of the full pixel array is given by
6315            android.sensor.info.pixelArraySize.
6316
6317            The pre-correction active array may be smaller than the full pixel array, since the
6318            full array may include black calibration pixels or other inactive regions.
6319            </details>
6320            <hal_details>
6321            This array contains `(xmin, ymin, width, height)`. The `(xmin, ymin)` must be
6322            &amp;gt;= `(0,0)`.
6323            The `(width, height)` must be &amp;lt;= `android.sensor.info.pixelArraySize`.
6324
6325            If omitted by the HAL implementation, the camera framework will assume that this is
6326            the same as the post-correction active array region given in
6327            android.sensor.info.activeArraySize.
6328            </hal_details>
6329            <tag id="RAW" />
6330          </entry>
6331        </namespace>
6332        <entry name="referenceIlluminant1" type="byte" visibility="public"
6333               enum="true">
6334          <enum>
6335            <value id="1">DAYLIGHT</value>
6336            <value id="2">FLUORESCENT</value>
6337            <value id="3">TUNGSTEN
6338              <notes>Incandescent light</notes>
6339            </value>
6340            <value id="4">FLASH</value>
6341            <value id="9">FINE_WEATHER</value>
6342            <value id="10">CLOUDY_WEATHER</value>
6343            <value id="11">SHADE</value>
6344            <value id="12">DAYLIGHT_FLUORESCENT
6345              <notes>D 5700 - 7100K</notes>
6346            </value>
6347            <value id="13">DAY_WHITE_FLUORESCENT
6348              <notes>N 4600 - 5400K</notes>
6349            </value>
6350            <value id="14">COOL_WHITE_FLUORESCENT
6351              <notes>W 3900 - 4500K</notes>
6352            </value>
6353            <value id="15">WHITE_FLUORESCENT
6354              <notes>WW 3200 - 3700K</notes>
6355            </value>
6356            <value id="17">STANDARD_A</value>
6357            <value id="18">STANDARD_B</value>
6358            <value id="19">STANDARD_C</value>
6359            <value id="20">D55</value>
6360            <value id="21">D65</value>
6361            <value id="22">D75</value>
6362            <value id="23">D50</value>
6363            <value id="24">ISO_STUDIO_TUNGSTEN</value>
6364          </enum>
6365          <description>
6366          The standard reference illuminant used as the scene light source when
6367          calculating the android.sensor.colorTransform1,
6368          android.sensor.calibrationTransform1, and
6369          android.sensor.forwardMatrix1 matrices.
6370          </description>
6371          <details>
6372          The values in this key correspond to the values defined for the
6373          EXIF LightSource tag. These illuminants are standard light sources
6374          that are often used calibrating camera devices.
6375
6376          If this key is present, then android.sensor.colorTransform1,
6377          android.sensor.calibrationTransform1, and
6378          android.sensor.forwardMatrix1 will also be present.
6379
6380          Some devices may choose to provide a second set of calibration
6381          information for improved quality, including
6382          android.sensor.referenceIlluminant2 and its corresponding matrices.
6383          </details>
6384          <hal_details>
6385          The first reference illuminant (android.sensor.referenceIlluminant1)
6386          and corresponding matrices must be present to support the RAW capability
6387          and DNG output.
6388
6389          When producing raw images with a color profile that has only been
6390          calibrated against a single light source, it is valid to omit
6391          android.sensor.referenceIlluminant2 along with the
6392          android.sensor.colorTransform2, android.sensor.calibrationTransform2,
6393          and android.sensor.forwardMatrix2 matrices.
6394
6395          If only android.sensor.referenceIlluminant1 is included, it should be
6396          chosen so that it is representative of typical scene lighting.  In
6397          general, D50 or DAYLIGHT will be chosen for this case.
6398
6399          If both android.sensor.referenceIlluminant1 and
6400          android.sensor.referenceIlluminant2 are included, they should be
6401          chosen to represent the typical range of scene lighting conditions.
6402          In general, low color temperature illuminant such as Standard-A will
6403          be chosen for the first reference illuminant and a higher color
6404          temperature illuminant such as D65 will be chosen for the second
6405          reference illuminant.
6406          </hal_details>
6407          <tag id="RAW" />
6408        </entry>
6409        <entry name="referenceIlluminant2" type="byte" visibility="public">
6410          <description>
6411          The standard reference illuminant used as the scene light source when
6412          calculating the android.sensor.colorTransform2,
6413          android.sensor.calibrationTransform2, and
6414          android.sensor.forwardMatrix2 matrices.
6415          </description>
6416          <range>Any value listed in android.sensor.referenceIlluminant1</range>
6417          <details>
6418          See android.sensor.referenceIlluminant1 for more details.
6419
6420          If this key is present, then android.sensor.colorTransform2,
6421          android.sensor.calibrationTransform2, and
6422          android.sensor.forwardMatrix2 will also be present.
6423          </details>
6424          <tag id="RAW" />
6425        </entry>
6426        <entry name="calibrationTransform1" type="rational"
6427        visibility="public" optional="true"
6428        type_notes="3x3 matrix in row-major-order" container="array"
6429        typedef="colorSpaceTransform">
6430          <array>
6431            <size>3</size>
6432            <size>3</size>
6433          </array>
6434          <description>
6435          A per-device calibration transform matrix that maps from the
6436          reference sensor colorspace to the actual device sensor colorspace.
6437          </description>
6438          <details>
6439          This matrix is used to correct for per-device variations in the
6440          sensor colorspace, and is used for processing raw buffer data.
6441
6442          The matrix is expressed as a 3x3 matrix in row-major-order, and
6443          contains a per-device calibration transform that maps colors
6444          from reference sensor color space (i.e. the "golden module"
6445          colorspace) into this camera device's native sensor color
6446          space under the first reference illuminant
6447          (android.sensor.referenceIlluminant1).
6448          </details>
6449          <tag id="RAW" />
6450        </entry>
6451        <entry name="calibrationTransform2" type="rational"
6452        visibility="public" optional="true"
6453        type_notes="3x3 matrix in row-major-order" container="array"
6454        typedef="colorSpaceTransform">
6455          <array>
6456            <size>3</size>
6457            <size>3</size>
6458          </array>
6459          <description>
6460          A per-device calibration transform matrix that maps from the
6461          reference sensor colorspace to the actual device sensor colorspace
6462          (this is the colorspace of the raw buffer data).
6463          </description>
6464          <details>
6465          This matrix is used to correct for per-device variations in the
6466          sensor colorspace, and is used for processing raw buffer data.
6467
6468          The matrix is expressed as a 3x3 matrix in row-major-order, and
6469          contains a per-device calibration transform that maps colors
6470          from reference sensor color space (i.e. the "golden module"
6471          colorspace) into this camera device's native sensor color
6472          space under the second reference illuminant
6473          (android.sensor.referenceIlluminant2).
6474
6475          This matrix will only be present if the second reference
6476          illuminant is present.
6477          </details>
6478          <tag id="RAW" />
6479        </entry>
6480        <entry name="colorTransform1" type="rational"
6481        visibility="public" optional="true"
6482        type_notes="3x3 matrix in row-major-order" container="array"
6483        typedef="colorSpaceTransform">
6484          <array>
6485            <size>3</size>
6486            <size>3</size>
6487          </array>
6488          <description>
6489          A matrix that transforms color values from CIE XYZ color space to
6490          reference sensor color space.
6491          </description>
6492          <details>
6493          This matrix is used to convert from the standard CIE XYZ color
6494          space to the reference sensor colorspace, and is used when processing
6495          raw buffer data.
6496
6497          The matrix is expressed as a 3x3 matrix in row-major-order, and
6498          contains a color transform matrix that maps colors from the CIE
6499          XYZ color space to the reference sensor color space (i.e. the
6500          "golden module" colorspace) under the first reference illuminant
6501          (android.sensor.referenceIlluminant1).
6502
6503          The white points chosen in both the reference sensor color space
6504          and the CIE XYZ colorspace when calculating this transform will
6505          match the standard white point for the first reference illuminant
6506          (i.e. no chromatic adaptation will be applied by this transform).
6507          </details>
6508          <tag id="RAW" />
6509        </entry>
6510        <entry name="colorTransform2" type="rational"
6511        visibility="public" optional="true"
6512        type_notes="3x3 matrix in row-major-order" container="array"
6513        typedef="colorSpaceTransform">
6514          <array>
6515            <size>3</size>
6516            <size>3</size>
6517          </array>
6518          <description>
6519          A matrix that transforms color values from CIE XYZ color space to
6520          reference sensor color space.
6521          </description>
6522          <details>
6523          This matrix is used to convert from the standard CIE XYZ color
6524          space to the reference sensor colorspace, and is used when processing
6525          raw buffer data.
6526
6527          The matrix is expressed as a 3x3 matrix in row-major-order, and
6528          contains a color transform matrix that maps colors from the CIE
6529          XYZ color space to the reference sensor color space (i.e. the
6530          "golden module" colorspace) under the second reference illuminant
6531          (android.sensor.referenceIlluminant2).
6532
6533          The white points chosen in both the reference sensor color space
6534          and the CIE XYZ colorspace when calculating this transform will
6535          match the standard white point for the second reference illuminant
6536          (i.e. no chromatic adaptation will be applied by this transform).
6537
6538          This matrix will only be present if the second reference
6539          illuminant is present.
6540          </details>
6541          <tag id="RAW" />
6542        </entry>
6543        <entry name="forwardMatrix1" type="rational"
6544        visibility="public" optional="true"
6545        type_notes="3x3 matrix in row-major-order" container="array"
6546        typedef="colorSpaceTransform">
6547          <array>
6548            <size>3</size>
6549            <size>3</size>
6550          </array>
6551          <description>
6552          A matrix that transforms white balanced camera colors from the reference
6553          sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint.
6554          </description>
6555          <details>
6556          This matrix is used to convert to the standard CIE XYZ colorspace, and
6557          is used when processing raw buffer data.
6558
6559          This matrix is expressed as a 3x3 matrix in row-major-order, and contains
6560          a color transform matrix that maps white balanced colors from the
6561          reference sensor color space to the CIE XYZ color space with a D50 white
6562          point.
6563
6564          Under the first reference illuminant (android.sensor.referenceIlluminant1)
6565          this matrix is chosen so that the standard white point for this reference
6566          illuminant in the reference sensor colorspace is mapped to D50 in the
6567          CIE XYZ colorspace.
6568          </details>
6569          <tag id="RAW" />
6570        </entry>
6571        <entry name="forwardMatrix2" type="rational"
6572        visibility="public" optional="true"
6573        type_notes="3x3 matrix in row-major-order" container="array"
6574        typedef="colorSpaceTransform">
6575          <array>
6576            <size>3</size>
6577            <size>3</size>
6578          </array>
6579          <description>
6580          A matrix that transforms white balanced camera colors from the reference
6581          sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint.
6582          </description>
6583          <details>
6584          This matrix is used to convert to the standard CIE XYZ colorspace, and
6585          is used when processing raw buffer data.
6586
6587          This matrix is expressed as a 3x3 matrix in row-major-order, and contains
6588          a color transform matrix that maps white balanced colors from the
6589          reference sensor color space to the CIE XYZ color space with a D50 white
6590          point.
6591
6592          Under the second reference illuminant (android.sensor.referenceIlluminant2)
6593          this matrix is chosen so that the standard white point for this reference
6594          illuminant in the reference sensor colorspace is mapped to D50 in the
6595          CIE XYZ colorspace.
6596
6597          This matrix will only be present if the second reference
6598          illuminant is present.
6599          </details>
6600          <tag id="RAW" />
6601        </entry>
6602        <entry name="baseGainFactor" type="rational"
6603        optional="true">
6604          <description>Gain factor from electrons to raw units when
6605          ISO=100</description>
6606          <tag id="FUTURE" />
6607        </entry>
6608        <entry name="blackLevelPattern" type="int32" visibility="public"
6609        optional="true" type_notes="2x2 raw count block" container="array"
6610        typedef="blackLevelPattern">
6611          <array>
6612            <size>4</size>
6613          </array>
6614          <description>
6615          A fixed black level offset for each of the color filter arrangement
6616          (CFA) mosaic channels.
6617          </description>
6618          <range>&amp;gt;= 0 for each.</range>
6619          <details>
6620          This key specifies the zero light value for each of the CFA mosaic
6621          channels in the camera sensor.  The maximal value output by the
6622          sensor is represented by the value in android.sensor.info.whiteLevel.
6623
6624          The values are given in the same order as channels listed for the CFA
6625          layout key (see android.sensor.info.colorFilterArrangement), i.e. the
6626          nth value given corresponds to the black level offset for the nth
6627          color channel listed in the CFA.
6628
6629          The black level values of captured images may vary for different
6630          capture settings (e.g., android.sensor.sensitivity). This key
6631          represents a coarse approximation for such case. It is recommended to
6632          use android.sensor.dynamicBlackLevel or use pixels from
6633          android.sensor.opticalBlackRegions directly for captures when
6634          supported by the camera device, which provides more accurate black
6635          level values. For raw capture in particular, it is recommended to use
6636          pixels from android.sensor.opticalBlackRegions to calculate black
6637          level values for each frame.
6638          </details>
6639          <hal_details>
6640          The values are given in row-column scan order, with the first value
6641          corresponding to the element of the CFA in row=0, column=0.
6642          </hal_details>
6643          <tag id="RAW" />
6644        </entry>
6645        <entry name="maxAnalogSensitivity" type="int32" visibility="public"
6646               optional="true" hwlevel="full">
6647          <description>Maximum sensitivity that is implemented
6648          purely through analog gain.</description>
6649          <details>For android.sensor.sensitivity values less than or
6650          equal to this, all applied gain must be analog. For
6651          values above this, the gain applied can be a mix of analog and
6652          digital.</details>
6653          <tag id="V1" />
6654          <tag id="FULL" />
6655        </entry>
6656        <entry name="orientation" type="int32" visibility="public"
6657               hwlevel="legacy">
6658          <description>Clockwise angle through which the output image needs to be rotated to be
6659          upright on the device screen in its native orientation.
6660          </description>
6661          <units>Degrees of clockwise rotation; always a multiple of
6662          90</units>
6663          <range>0, 90, 180, 270</range>
6664          <details>
6665          Also defines the direction of rolling shutter readout, which is from top to bottom in
6666          the sensor's coordinate system.
6667          </details>
6668          <tag id="BC" />
6669        </entry>
6670        <entry name="profileHueSatMapDimensions" type="int32"
6671        visibility="system" optional="true"
6672        type_notes="Number of samples for hue, saturation, and value"
6673        container="array">
6674          <array>
6675            <size>3</size>
6676          </array>
6677          <description>
6678          The number of input samples for each dimension of
6679          android.sensor.profileHueSatMap.
6680          </description>
6681          <range>
6682          Hue &amp;gt;= 1,
6683          Saturation &amp;gt;= 2,
6684          Value &amp;gt;= 1
6685          </range>
6686          <details>
6687          The number of input samples for the hue, saturation, and value
6688          dimension of android.sensor.profileHueSatMap. The order of the
6689          dimensions given is hue, saturation, value; where hue is the 0th
6690          element.
6691          </details>
6692          <tag id="RAW" />
6693        </entry>
6694      </static>
6695      <dynamic>
6696        <clone entry="android.sensor.exposureTime" kind="controls">
6697        </clone>
6698        <clone entry="android.sensor.frameDuration"
6699        kind="controls"></clone>
6700        <clone entry="android.sensor.sensitivity" kind="controls">
6701        </clone>
6702        <entry name="timestamp" type="int64" visibility="public"
6703               hwlevel="legacy">
6704          <description>Time at start of exposure of first
6705          row of the image sensor active array, in nanoseconds.</description>
6706          <units>Nanoseconds</units>
6707          <range>&amp;gt; 0</range>
6708          <details>The timestamps are also included in all image
6709          buffers produced for the same capture, and will be identical
6710          on all the outputs.
6711
6712          When android.sensor.info.timestampSource `==` UNKNOWN,
6713          the timestamps measure time since an unspecified starting point,
6714          and are monotonically increasing. They can be compared with the
6715          timestamps for other captures from the same camera device, but are
6716          not guaranteed to be comparable to any other time source.
6717
6718          When android.sensor.info.timestampSource `==` REALTIME, the
6719          timestamps measure time in the same timebase as {@link
6720          android.os.SystemClock#elapsedRealtimeNanos}, and they can
6721          be compared to other timestamps from other subsystems that
6722          are using that base.
6723
6724          For reprocessing, the timestamp will match the start of exposure of
6725          the input image, i.e. {@link CaptureResult#SENSOR_TIMESTAMP the
6726          timestamp} in the TotalCaptureResult that was used to create the
6727          reprocess capture request.
6728          </details>
6729          <hal_details>
6730          All timestamps must be in reference to the kernel's
6731          CLOCK_BOOTTIME monotonic clock, which properly accounts for
6732          time spent asleep. This allows for synchronization with
6733          sensors that continue to operate while the system is
6734          otherwise asleep.
6735
6736          If android.sensor.info.timestampSource `==` REALTIME,
6737          The timestamp must be synchronized with the timestamps from other
6738          sensor subsystems that are using the same timebase.
6739
6740          For reprocessing, the input image's start of exposure can be looked up
6741          with android.sensor.timestamp from the metadata included in the
6742          capture request.
6743          </hal_details>
6744          <tag id="BC" />
6745        </entry>
6746        <entry name="temperature" type="float"
6747        optional="true">
6748          <description>The temperature of the sensor, sampled at the time
6749          exposure began for this frame.
6750
6751          The thermal diode being queried should be inside the sensor PCB, or
6752          somewhere close to it.
6753          </description>
6754
6755          <units>Celsius</units>
6756          <range>Optional. This value is missing if no temperature is available.</range>
6757          <tag id="FUTURE" />
6758        </entry>
6759        <entry name="neutralColorPoint" type="rational" visibility="public"
6760        optional="true" container="array">
6761          <array>
6762            <size>3</size>
6763          </array>
6764          <description>
6765          The estimated camera neutral color in the native sensor colorspace at
6766          the time of capture.
6767          </description>
6768          <details>
6769          This value gives the neutral color point encoded as an RGB value in the
6770          native sensor color space.  The neutral color point indicates the
6771          currently estimated white point of the scene illumination.  It can be
6772          used to interpolate between the provided color transforms when
6773          processing raw sensor data.
6774
6775          The order of the values is R, G, B; where R is in the lowest index.
6776          </details>
6777          <tag id="RAW" />
6778        </entry>
6779        <entry name="noiseProfile" type="double" visibility="public"
6780        optional="true" type_notes="Pairs of noise model coefficients"
6781        container="array" typedef="pairDoubleDouble">
6782          <array>
6783            <size>2</size>
6784            <size>CFA Channels</size>
6785          </array>
6786          <description>
6787          Noise model coefficients for each CFA mosaic channel.
6788          </description>
6789          <details>
6790          This key contains two noise model coefficients for each CFA channel
6791          corresponding to the sensor amplification (S) and sensor readout
6792          noise (O).  These are given as pairs of coefficients for each channel
6793          in the same order as channels listed for the CFA layout key
6794          (see android.sensor.info.colorFilterArrangement).  This is
6795          represented as an array of Pair&amp;lt;Double, Double&amp;gt;, where
6796          the first member of the Pair at index n is the S coefficient and the
6797          second member is the O coefficient for the nth color channel in the CFA.
6798
6799          These coefficients are used in a two parameter noise model to describe
6800          the amount of noise present in the image for each CFA channel.  The
6801          noise model used here is:
6802
6803          N(x) = sqrt(Sx + O)
6804
6805          Where x represents the recorded signal of a CFA channel normalized to
6806          the range [0, 1], and S and O are the noise model coeffiecients for
6807          that channel.
6808
6809          A more detailed description of the noise model can be found in the
6810          Adobe DNG specification for the NoiseProfile tag.
6811          </details>
6812          <hal_details>
6813          For a CFA layout of RGGB, the list of coefficients would be given as
6814          an array of doubles S0,O0,S1,O1,..., where S0 and O0 are the coefficients
6815          for the red channel, S1 and O1 are the coefficients for the first green
6816          channel, etc.
6817          </hal_details>
6818          <tag id="RAW" />
6819        </entry>
6820        <entry name="profileHueSatMap" type="float"
6821        visibility="system" optional="true"
6822        type_notes="Mapping for hue, saturation, and value"
6823        container="array">
6824          <array>
6825            <size>hue_samples</size>
6826            <size>saturation_samples</size>
6827            <size>value_samples</size>
6828            <size>3</size>
6829          </array>
6830          <description>
6831          A mapping containing a hue shift, saturation scale, and value scale
6832          for each pixel.
6833          </description>
6834          <units>
6835          The hue shift is given in degrees; saturation and value scale factors are
6836          unitless and are between 0 and 1 inclusive
6837          </units>
6838          <details>
6839          hue_samples, saturation_samples, and value_samples are given in
6840          android.sensor.profileHueSatMapDimensions.
6841
6842          Each entry of this map contains three floats corresponding to the
6843          hue shift, saturation scale, and value scale, respectively; where the
6844          hue shift has the lowest index. The map entries are stored in the key
6845          in nested loop order, with the value divisions in the outer loop, the
6846          hue divisions in the middle loop, and the saturation divisions in the
6847          inner loop. All zero input saturation entries are required to have a
6848          value scale factor of 1.0.
6849          </details>
6850          <tag id="RAW" />
6851        </entry>
6852        <entry name="profileToneCurve" type="float"
6853        visibility="system" optional="true"
6854        type_notes="Samples defining a spline for a tone-mapping curve"
6855        container="array">
6856          <array>
6857            <size>samples</size>
6858            <size>2</size>
6859          </array>
6860          <description>
6861          A list of x,y samples defining a tone-mapping curve for gamma adjustment.
6862          </description>
6863          <range>
6864          Each sample has an input range of `[0, 1]` and an output range of
6865          `[0, 1]`.  The first sample is required to be `(0, 0)`, and the last
6866          sample is required to be `(1, 1)`.
6867          </range>
6868          <details>
6869          This key contains a default tone curve that can be applied while
6870          processing the image as a starting point for user adjustments.
6871          The curve is specified as a list of value pairs in linear gamma.
6872          The curve is interpolated using a cubic spline.
6873          </details>
6874          <tag id="RAW" />
6875        </entry>
6876        <entry name="greenSplit" type="float" visibility="public" optional="true">
6877          <description>
6878          The worst-case divergence between Bayer green channels.
6879          </description>
6880          <range>
6881          &amp;gt;= 0
6882          </range>
6883          <details>
6884          This value is an estimate of the worst case split between the
6885          Bayer green channels in the red and blue rows in the sensor color
6886          filter array.
6887
6888          The green split is calculated as follows:
6889
6890          1. A 5x5 pixel (or larger) window W within the active sensor array is
6891          chosen. The term 'pixel' here is taken to mean a group of 4 Bayer
6892          mosaic channels (R, Gr, Gb, B).  The location and size of the window
6893          chosen is implementation defined, and should be chosen to provide a
6894          green split estimate that is both representative of the entire image
6895          for this camera sensor, and can be calculated quickly.
6896          1. The arithmetic mean of the green channels from the red
6897          rows (mean_Gr) within W is computed.
6898          1. The arithmetic mean of the green channels from the blue
6899          rows (mean_Gb) within W is computed.
6900          1. The maximum ratio R of the two means is computed as follows:
6901          `R = max((mean_Gr + 1)/(mean_Gb + 1), (mean_Gb + 1)/(mean_Gr + 1))`
6902
6903          The ratio R is the green split divergence reported for this property,
6904          which represents how much the green channels differ in the mosaic
6905          pattern.  This value is typically used to determine the treatment of
6906          the green mosaic channels when demosaicing.
6907
6908          The green split value can be roughly interpreted as follows:
6909
6910          * R &amp;lt; 1.03 is a negligible split (&amp;lt;3% divergence).
6911          * 1.20 &amp;lt;= R &amp;gt;= 1.03 will require some software
6912          correction to avoid demosaic errors (3-20% divergence).
6913          * R &amp;gt; 1.20 will require strong software correction to produce
6914          a usuable image (&amp;gt;20% divergence).
6915          </details>
6916          <hal_details>
6917          The green split given may be a static value based on prior
6918          characterization of the camera sensor using the green split
6919          calculation method given here over a large, representative, sample
6920          set of images.  Other methods of calculation that produce equivalent
6921          results, and can be interpreted in the same manner, may be used.
6922          </hal_details>
6923          <tag id="RAW" />
6924        </entry>
6925      </dynamic>
6926      <controls>
6927        <entry name="testPatternData" type="int32" visibility="public" optional="true" container="array">
6928          <array>
6929            <size>4</size>
6930          </array>
6931          <description>
6932            A pixel `[R, G_even, G_odd, B]` that supplies the test pattern
6933            when android.sensor.testPatternMode is SOLID_COLOR.
6934          </description>
6935          <details>
6936          Each color channel is treated as an unsigned 32-bit integer.
6937          The camera device then uses the most significant X bits
6938          that correspond to how many bits are in its Bayer raw sensor
6939          output.
6940
6941          For example, a sensor with RAW10 Bayer output would use the
6942          10 most significant bits from each color channel.
6943          </details>
6944          <hal_details>
6945          </hal_details>
6946        </entry>
6947        <entry name="testPatternMode" type="int32" visibility="public" optional="true"
6948          enum="true">
6949          <enum>
6950            <value>OFF
6951              <notes>No test pattern mode is used, and the camera
6952              device returns captures from the image sensor.
6953
6954              This is the default if the key is not set.</notes>
6955            </value>
6956            <value>SOLID_COLOR
6957              <notes>
6958              Each pixel in `[R, G_even, G_odd, B]` is replaced by its
6959              respective color channel provided in
6960              android.sensor.testPatternData.
6961
6962              For example:
6963
6964                  android.testPatternData = [0, 0xFFFFFFFF, 0xFFFFFFFF, 0]
6965
6966              All green pixels are 100% green. All red/blue pixels are black.
6967
6968                  android.testPatternData = [0xFFFFFFFF, 0, 0xFFFFFFFF, 0]
6969
6970              All red pixels are 100% red. Only the odd green pixels
6971              are 100% green. All blue pixels are 100% black.
6972              </notes>
6973            </value>
6974            <value>COLOR_BARS
6975              <notes>
6976              All pixel data is replaced with an 8-bar color pattern.
6977
6978              The vertical bars (left-to-right) are as follows:
6979
6980              * 100% white
6981              * yellow
6982              * cyan
6983              * green
6984              * magenta
6985              * red
6986              * blue
6987              * black
6988
6989              In general the image would look like the following:
6990
6991                 W Y C G M R B K
6992                 W Y C G M R B K
6993                 W Y C G M R B K
6994                 W Y C G M R B K
6995                 W Y C G M R B K
6996                 . . . . . . . .
6997                 . . . . . . . .
6998                 . . . . . . . .
6999
7000                 (B = Blue, K = Black)
7001
7002             Each bar should take up 1/8 of the sensor pixel array width.
7003             When this is not possible, the bar size should be rounded
7004             down to the nearest integer and the pattern can repeat
7005             on the right side.
7006
7007             Each bar's height must always take up the full sensor
7008             pixel array height.
7009
7010             Each pixel in this test pattern must be set to either
7011             0% intensity or 100% intensity.
7012             </notes>
7013            </value>
7014            <value>COLOR_BARS_FADE_TO_GRAY
7015              <notes>
7016              The test pattern is similar to COLOR_BARS, except that
7017              each bar should start at its specified color at the top,
7018              and fade to gray at the bottom.
7019
7020              Furthermore each bar is further subdivided into a left and
7021              right half. The left half should have a smooth gradient,
7022              and the right half should have a quantized gradient.
7023
7024              In particular, the right half's should consist of blocks of the
7025              same color for 1/16th active sensor pixel array width.
7026
7027              The least significant bits in the quantized gradient should
7028              be copied from the most significant bits of the smooth gradient.
7029
7030              The height of each bar should always be a multiple of 128.
7031              When this is not the case, the pattern should repeat at the bottom
7032              of the image.
7033              </notes>
7034            </value>
7035            <value>PN9
7036              <notes>
7037              All pixel data is replaced by a pseudo-random sequence
7038              generated from a PN9 512-bit sequence (typically implemented
7039              in hardware with a linear feedback shift register).
7040
7041              The generator should be reset at the beginning of each frame,
7042              and thus each subsequent raw frame with this test pattern should
7043              be exactly the same as the last.
7044              </notes>
7045            </value>
7046            <value id="256">CUSTOM1
7047              <notes>The first custom test pattern. All custom patterns that are
7048              available only on this camera device are at least this numeric
7049              value.
7050
7051              All of the custom test patterns will be static
7052              (that is the raw image must not vary from frame to frame).
7053              </notes>
7054            </value>
7055          </enum>
7056          <description>When enabled, the sensor sends a test pattern instead of
7057          doing a real exposure from the camera.
7058          </description>
7059          <range>android.sensor.availableTestPatternModes</range>
7060          <details>
7061          When a test pattern is enabled, all manual sensor controls specified
7062          by android.sensor.* will be ignored. All other controls should
7063          work as normal.
7064
7065          For example, if manual flash is enabled, flash firing should still
7066          occur (and that the test pattern remain unmodified, since the flash
7067          would not actually affect it).
7068
7069          Defaults to OFF.
7070          </details>
7071          <hal_details>
7072          All test patterns are specified in the Bayer domain.
7073
7074          The HAL may choose to substitute test patterns from the sensor
7075          with test patterns from on-device memory. In that case, it should be
7076          indistinguishable to the ISP whether the data came from the
7077          sensor interconnect bus (such as CSI2) or memory.
7078          </hal_details>
7079        </entry>
7080      </controls>
7081      <dynamic>
7082        <clone entry="android.sensor.testPatternData" kind="controls">
7083        </clone>
7084        <clone entry="android.sensor.testPatternMode" kind="controls">
7085        </clone>
7086      </dynamic>
7087      <static>
7088        <entry name="availableTestPatternModes" type="int32" visibility="public" optional="true"
7089          type_notes="list of enums" container="array">
7090          <array>
7091            <size>n</size>
7092          </array>
7093          <description>List of sensor test pattern modes for android.sensor.testPatternMode
7094          supported by this camera device.
7095          </description>
7096          <range>Any value listed in android.sensor.testPatternMode</range>
7097          <details>
7098            Defaults to OFF, and always includes OFF if defined.
7099          </details>
7100          <hal_details>
7101            All custom modes must be >= CUSTOM1.
7102          </hal_details>
7103        </entry>
7104      </static>
7105      <dynamic>
7106        <entry name="rollingShutterSkew" type="int64" visibility="public" hwlevel="limited">
7107          <description>Duration between the start of first row exposure
7108          and the start of last row exposure.</description>
7109          <units>Nanoseconds</units>
7110          <range> &amp;gt;= 0 and &amp;lt;
7111          {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration}.</range>
7112          <details>
7113          This is the exposure time skew between the first and last
7114          row exposure start times. The first row and the last row are
7115          the first and last rows inside of the
7116          android.sensor.info.activeArraySize.
7117
7118          For typical camera sensors that use rolling shutters, this is also equivalent
7119          to the frame readout time.
7120          </details>
7121          <hal_details>
7122          The HAL must report `0` if the sensor is using global shutter, where all pixels begin
7123          exposure at the same time.
7124          </hal_details>
7125          <tag id="V1" />
7126        </entry>
7127      </dynamic>
7128      <static>
7129        <entry name="opticalBlackRegions" type="int32" visibility="public" optional="true"
7130          container="array" typedef="rectangle">
7131          <array>
7132            <size>4</size>
7133            <size>num_regions</size>
7134          </array>
7135          <description>List of disjoint rectangles indicating the sensor
7136          optically shielded black pixel regions.
7137          </description>
7138          <details>
7139            In most camera sensors, the active array is surrounded by some
7140            optically shielded pixel areas. By blocking light, these pixels
7141            provides a reliable black reference for black level compensation
7142            in active array region.
7143
7144            This key provides a list of disjoint rectangles specifying the
7145            regions of optically shielded (with metal shield) black pixel
7146            regions if the camera device is capable of reading out these black
7147            pixels in the output raw images. In comparison to the fixed black
7148            level values reported by android.sensor.blackLevelPattern, this key
7149            may provide a more accurate way for the application to calculate
7150            black level of each captured raw images.
7151
7152            When this key is reported, the android.sensor.dynamicBlackLevel and
7153            android.sensor.dynamicWhiteLevel will also be reported.
7154          </details>
7155          <hal_details>
7156            This array contains (xmin, ymin, width, height). The (xmin, ymin)
7157            must be &amp;gt;= (0,0) and &amp;lt;=
7158            android.sensor.info.pixelArraySize. The (width, height) must be
7159            &amp;lt;= android.sensor.info.pixelArraySize. Each region must be
7160            outside the region reported by
7161            android.sensor.info.preCorrectionActiveArraySize.
7162
7163            The HAL must report minimal number of disjoint regions for the
7164            optically shielded back pixel regions. For example, if a region can
7165            be covered by one rectangle, the HAL must not split this region into
7166            multiple rectangles.
7167          </hal_details>
7168        </entry>
7169      </static>
7170      <dynamic>
7171        <entry name="dynamicBlackLevel" type="float" visibility="public"
7172        optional="true" type_notes="2x2 raw count block" container="array">
7173          <array>
7174            <size>4</size>
7175          </array>
7176          <description>
7177          A per-frame dynamic black level offset for each of the color filter
7178          arrangement (CFA) mosaic channels.
7179          </description>
7180          <range>&amp;gt;= 0 for each.</range>
7181          <details>
7182          Camera sensor black levels may vary dramatically for different
7183          capture settings (e.g. android.sensor.sensitivity). The fixed black
7184          level reported by android.sensor.blackLevelPattern may be too
7185          inaccurate to represent the actual value on a per-frame basis. The
7186          camera device internal pipeline relies on reliable black level values
7187          to process the raw images appropriately. To get the best image
7188          quality, the camera device may choose to estimate the per frame black
7189          level values either based on optically shielded black regions
7190          (android.sensor.opticalBlackRegions) or its internal model.
7191
7192          This key reports the camera device estimated per-frame zero light
7193          value for each of the CFA mosaic channels in the camera sensor. The
7194          android.sensor.blackLevelPattern may only represent a coarse
7195          approximation of the actual black level values. This value is the
7196          black level used in camera device internal image processing pipeline
7197          and generally more accurate than the fixed black level values.
7198          However, since they are estimated values by the camera device, they
7199          may not be as accurate as the black level values calculated from the
7200          optical black pixels reported by android.sensor.opticalBlackRegions.
7201
7202          The values are given in the same order as channels listed for the CFA
7203          layout key (see android.sensor.info.colorFilterArrangement), i.e. the
7204          nth value given corresponds to the black level offset for the nth
7205          color channel listed in the CFA.
7206
7207          This key will be available if android.sensor.opticalBlackRegions is
7208          available or the camera device advertises this key via
7209          {@link android.hardware.camera2.CameraCharacteristics#getAvailableCaptureRequestKeys}.
7210          </details>
7211          <hal_details>
7212          The values are given in row-column scan order, with the first value
7213          corresponding to the element of the CFA in row=0, column=0.
7214          </hal_details>
7215          <tag id="RAW" />
7216        </entry>
7217        <entry name="dynamicWhiteLevel" type="int32" visibility="public"
7218        optional="true" >
7219          <description>
7220          Maximum raw value output by sensor for this frame.
7221          </description>
7222          <range> &amp;gt;= 0</range>
7223          <details>
7224          Since the android.sensor.blackLevelPattern may change for different
7225          capture settings (e.g., android.sensor.sensitivity), the white
7226          level will change accordingly. This key is similar to
7227          android.sensor.info.whiteLevel, but specifies the camera device
7228          estimated white level for each frame.
7229
7230          This key will be available if android.sensor.opticalBlackRegions is
7231          available or the camera device advertises this key via
7232          {@link android.hardware.camera2.CameraCharacteristics#getAvailableCaptureRequestKeys}.
7233          </details>
7234          <hal_details>
7235          The full bit depth of the sensor must be available in the raw data,
7236          so the value for linear sensors should not be significantly lower
7237          than maximum raw value supported, i.e. 2^(sensor bits per pixel).
7238          </hal_details>
7239          <tag id="RAW" />
7240        </entry>
7241      </dynamic>
7242      <static>
7243        <entry name="opaqueRawSize" type="int32" visibility="system" container="array">
7244          <array>
7245            <size>n</size>
7246            <size>3</size>
7247          </array>
7248          <description>Size in bytes for all the listed opaque RAW buffer sizes</description>
7249          <range>Must be large enough to fit the opaque RAW of corresponding size produced by
7250          the camera</range>
7251          <details>
7252          This configurations are listed as `(width, height, size_in_bytes)` tuples.
7253          This is used for sizing the gralloc buffers for opaque RAW buffers.
7254          All RAW_OPAQUE output stream configuration listed in
7255          android.scaler.availableStreamConfigurations will have a corresponding tuple in
7256          this key.
7257          </details>
7258          <hal_details>
7259              This key is added in HAL3.4.
7260              For HAL3.4 or above: devices advertising RAW_OPAQUE format output must list this key.
7261              For HAL3.3 or earlier devices: if RAW_OPAQUE ouput is advertised, camera framework
7262              will derive this key by assuming each pixel takes two bytes and no padding bytes
7263              between rows.
7264          </hal_details>
7265        </entry>
7266      </static>
7267    </section>
7268    <section name="shading">
7269      <controls>
7270        <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="full">
7271          <enum>
7272            <value>OFF
7273            <notes>No lens shading correction is applied.</notes></value>
7274            <value>FAST
7275            <notes>Apply lens shading corrections, without slowing
7276            frame rate relative to sensor raw output</notes></value>
7277            <value>HIGH_QUALITY
7278            <notes>Apply high-quality lens shading correction, at the
7279            cost of possibly reduced frame rate.</notes></value>
7280          </enum>
7281          <description>Quality of lens shading correction applied
7282          to the image data.</description>
7283          <range>android.shading.availableModes</range>
7284          <details>
7285          When set to OFF mode, no lens shading correction will be applied by the
7286          camera device, and an identity lens shading map data will be provided
7287          if `android.statistics.lensShadingMapMode == ON`. For example, for lens
7288          shading map with size of `[ 4, 3 ]`,
7289          the output android.statistics.lensShadingCorrectionMap for this case will be an identity
7290          map shown below:
7291
7292              [ 1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
7293               1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
7294               1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
7295               1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
7296               1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
7297               1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0 ]
7298
7299          When set to other modes, lens shading correction will be applied by the camera
7300          device. Applications can request lens shading map data by setting
7301          android.statistics.lensShadingMapMode to ON, and then the camera device will provide lens
7302          shading map data in android.statistics.lensShadingCorrectionMap; the returned shading map
7303          data will be the one applied by the camera device for this capture request.
7304
7305          The shading map data may depend on the auto-exposure (AE) and AWB statistics, therefore
7306          the reliability of the map data may be affected by the AE and AWB algorithms. When AE and
7307          AWB are in AUTO modes(android.control.aeMode `!=` OFF and android.control.awbMode `!=`
7308          OFF), to get best results, it is recommended that the applications wait for the AE and AWB
7309          to be converged before using the returned shading map data.
7310          </details>
7311        </entry>
7312        <entry name="strength" type="byte">
7313          <description>Control the amount of shading correction
7314          applied to the images</description>
7315          <units>unitless: 1-10; 10 is full shading
7316          compensation</units>
7317          <tag id="FUTURE" />
7318        </entry>
7319      </controls>
7320      <dynamic>
7321        <clone entry="android.shading.mode" kind="controls">
7322        </clone>
7323      </dynamic>
7324      <static>
7325        <entry name="availableModes" type="byte" visibility="public"
7326            type_notes="List of enums (android.shading.mode)." container="array"
7327            typedef="enumList" hwlevel="legacy">
7328          <array>
7329            <size>n</size>
7330          </array>
7331          <description>
7332          List of lens shading modes for android.shading.mode that are supported by this camera device.
7333          </description>
7334          <range>Any value listed in android.shading.mode</range>
7335          <details>
7336              This list contains lens shading modes that can be set for the camera device.
7337              Camera devices that support the MANUAL_POST_PROCESSING capability will always
7338              list OFF and FAST mode. This includes all FULL level devices.
7339              LEGACY devices will always only support FAST mode.
7340          </details>
7341          <hal_details>
7342            HAL must support both FAST and HIGH_QUALITY if lens shading correction control is
7343            available on the camera device, but the underlying implementation can be the same for
7344            both modes. That is, if the highest quality implementation on the camera device does not
7345            slow down capture rate, then FAST and HIGH_QUALITY will generate the same output.
7346          </hal_details>
7347        </entry>
7348      </static>
7349    </section>
7350    <section name="statistics">
7351      <controls>
7352        <entry name="faceDetectMode" type="byte" visibility="public" enum="true"
7353               hwlevel="legacy">
7354          <enum>
7355            <value>OFF
7356            <notes>Do not include face detection statistics in capture
7357            results.</notes></value>
7358            <value optional="true">SIMPLE
7359            <notes>Return face rectangle and confidence values only.
7360            </notes></value>
7361            <value optional="true">FULL
7362            <notes>Return all face
7363            metadata.
7364
7365            In this mode, face rectangles, scores, landmarks, and face IDs are all valid.
7366            </notes></value>
7367          </enum>
7368          <description>Operating mode for the face detector
7369          unit.</description>
7370          <range>android.statistics.info.availableFaceDetectModes</range>
7371          <details>Whether face detection is enabled, and whether it
7372          should output just the basic fields or the full set of
7373          fields.</details>
7374          <hal_details>
7375            SIMPLE mode must fill in android.statistics.faceRectangles and
7376            android.statistics.faceScores.
7377            FULL mode must also fill in android.statistics.faceIds, and
7378            android.statistics.faceLandmarks.
7379          </hal_details>
7380          <tag id="BC" />
7381        </entry>
7382        <entry name="histogramMode" type="byte" enum="true" typedef="boolean">
7383          <enum>
7384            <value>OFF</value>
7385            <value>ON</value>
7386          </enum>
7387          <description>Operating mode for histogram
7388          generation</description>
7389          <tag id="FUTURE" />
7390        </entry>
7391        <entry name="sharpnessMapMode" type="byte" enum="true" typedef="boolean">
7392          <enum>
7393            <value>OFF</value>
7394            <value>ON</value>
7395          </enum>
7396          <description>Operating mode for sharpness map
7397          generation</description>
7398          <tag id="FUTURE" />
7399        </entry>
7400        <entry name="hotPixelMapMode" type="byte" visibility="public" enum="true"
7401        typedef="boolean">
7402          <enum>
7403            <value>OFF
7404            <notes>Hot pixel map production is disabled.
7405            </notes></value>
7406            <value>ON
7407            <notes>Hot pixel map production is enabled.
7408            </notes></value>
7409          </enum>
7410          <description>
7411          Operating mode for hot pixel map generation.
7412          </description>
7413          <range>android.statistics.info.availableHotPixelMapModes</range>
7414          <details>
7415          If set to `true`, a hot pixel map is returned in android.statistics.hotPixelMap.
7416          If set to `false`, no hot pixel map will be returned.
7417          </details>
7418          <tag id="V1" />
7419          <tag id="RAW" />
7420        </entry>
7421      </controls>
7422      <static>
7423        <namespace name="info">
7424          <entry name="availableFaceDetectModes" type="byte"
7425                 visibility="public"
7426                 type_notes="List of enums from android.statistics.faceDetectMode"
7427                 container="array"
7428                 typedef="enumList"
7429                 hwlevel="legacy">
7430            <array>
7431              <size>n</size>
7432            </array>
7433            <description>List of face detection modes for android.statistics.faceDetectMode that are
7434            supported by this camera device.
7435            </description>
7436            <range>Any value listed in android.statistics.faceDetectMode</range>
7437            <details>OFF is always supported.
7438            </details>
7439          </entry>
7440          <entry name="histogramBucketCount" type="int32">
7441            <description>Number of histogram buckets
7442            supported</description>
7443            <range>&amp;gt;= 64</range>
7444            <tag id="FUTURE" />
7445          </entry>
7446          <entry name="maxFaceCount" type="int32" visibility="public" hwlevel="legacy">
7447            <description>The maximum number of simultaneously detectable
7448            faces.</description>
7449            <range>0 for cameras without available face detection; otherwise:
7450            `&gt;=4` for LIMITED or FULL hwlevel devices or
7451            `&gt;0` for LEGACY devices.</range>
7452            <tag id="BC" />
7453          </entry>
7454          <entry name="maxHistogramCount" type="int32">
7455            <description>Maximum value possible for a histogram
7456            bucket</description>
7457            <tag id="FUTURE" />
7458          </entry>
7459          <entry name="maxSharpnessMapValue" type="int32">
7460            <description>Maximum value possible for a sharpness map
7461            region.</description>
7462            <tag id="FUTURE" />
7463          </entry>
7464          <entry name="sharpnessMapSize" type="int32"
7465          type_notes="width x height" container="array" typedef="size">
7466            <array>
7467              <size>2</size>
7468            </array>
7469            <description>Dimensions of the sharpness
7470            map</description>
7471            <range>Must be at least 32 x 32</range>
7472            <tag id="FUTURE" />
7473          </entry>
7474          <entry name="availableHotPixelMapModes" type="byte" visibility="public"
7475                 type_notes="list of enums" container="array" typedef="boolean">
7476            <array>
7477              <size>n</size>
7478            </array>
7479            <description>
7480            List of hot pixel map output modes for android.statistics.hotPixelMapMode that are
7481            supported by this camera device.
7482            </description>
7483            <range>Any value listed in android.statistics.hotPixelMapMode</range>
7484            <details>
7485            If no hotpixel map output is available for this camera device, this will contain only
7486            `false`.
7487
7488            ON is always supported on devices with the RAW capability.
7489            </details>
7490            <tag id="V1" />
7491            <tag id="RAW" />
7492          </entry>
7493          <entry name="availableLensShadingMapModes" type="byte" visibility="public"
7494                 type_notes="list of enums" container="array" typedef="enumList">
7495            <array>
7496              <size>n</size>
7497            </array>
7498            <description>
7499            List of lens shading map output modes for android.statistics.lensShadingMapMode that
7500            are supported by this camera device.
7501            </description>
7502            <range>Any value listed in android.statistics.lensShadingMapMode</range>
7503            <details>
7504            If no lens shading map output is available for this camera device, this key will
7505            contain only OFF.
7506
7507            ON is always supported on devices with the RAW capability.
7508            LEGACY mode devices will always only support OFF.
7509            </details>
7510          </entry>
7511        </namespace>
7512      </static>
7513      <dynamic>
7514        <clone entry="android.statistics.faceDetectMode"
7515               kind="controls"></clone>
7516        <entry name="faceIds" type="int32" visibility="ndk_public"
7517               container="array" hwlevel="legacy">
7518          <array>
7519            <size>n</size>
7520          </array>
7521          <description>List of unique IDs for detected faces.</description>
7522          <details>
7523          Each detected face is given a unique ID that is valid for as long as the face is visible
7524          to the camera device.  A face that leaves the field of view and later returns may be
7525          assigned a new ID.
7526
7527          Only available if android.statistics.faceDetectMode == FULL</details>
7528          <tag id="BC" />
7529        </entry>
7530        <entry name="faceLandmarks" type="int32" visibility="ndk_public"
7531               type_notes="(leftEyeX, leftEyeY, rightEyeX, rightEyeY, mouthX, mouthY)"
7532               container="array" hwlevel="legacy">
7533          <array>
7534            <size>n</size>
7535            <size>6</size>
7536          </array>
7537          <description>List of landmarks for detected
7538          faces.</description>
7539          <details>
7540            The coordinate system is that of android.sensor.info.activeArraySize, with
7541            `(0, 0)` being the top-left pixel of the active array.
7542
7543            Only available if android.statistics.faceDetectMode == FULL</details>
7544          <tag id="BC" />
7545        </entry>
7546        <entry name="faceRectangles" type="int32" visibility="ndk_public"
7547               type_notes="(xmin, ymin, xmax, ymax). (0,0) is top-left of active pixel area"
7548               container="array" typedef="rectangle" hwlevel="legacy">
7549          <array>
7550            <size>n</size>
7551            <size>4</size>
7552          </array>
7553          <description>List of the bounding rectangles for detected
7554          faces.</description>
7555          <details>
7556            The coordinate system is that of android.sensor.info.activeArraySize, with
7557            `(0, 0)` being the top-left pixel of the active array.
7558
7559            Only available if android.statistics.faceDetectMode != OFF</details>
7560          <tag id="BC" />
7561        </entry>
7562        <entry name="faceScores" type="byte" visibility="ndk_public"
7563               container="array" hwlevel="legacy">
7564          <array>
7565            <size>n</size>
7566          </array>
7567          <description>List of the face confidence scores for
7568          detected faces</description>
7569          <range>1-100</range>
7570          <details>Only available if android.statistics.faceDetectMode != OFF.
7571          </details>
7572          <hal_details>
7573          The value should be meaningful (for example, setting 100 at
7574          all times is illegal).</hal_details>
7575          <tag id="BC" />
7576        </entry>
7577        <entry name="faces" type="int32" visibility="java_public" synthetic="true"
7578               container="array" typedef="face" hwlevel="legacy">
7579          <array>
7580            <size>n</size>
7581          </array>
7582          <description>List of the faces detected through camera face detection
7583          in this capture.</description>
7584          <details>
7585          Only available if android.statistics.faceDetectMode `!=` OFF.
7586          </details>
7587        </entry>
7588        <entry name="histogram" type="int32"
7589        type_notes="count of pixels for each color channel that fall into each histogram bucket, scaled to be between 0 and maxHistogramCount"
7590        container="array">
7591          <array>
7592            <size>n</size>
7593            <size>3</size>
7594          </array>
7595          <description>A 3-channel histogram based on the raw
7596          sensor data</description>
7597          <details>The k'th bucket (0-based) covers the input range
7598          (with w = android.sensor.info.whiteLevel) of [ k * w/N,
7599          (k + 1) * w / N ). If only a monochrome sharpness map is
7600          supported, all channels should have the same data</details>
7601          <tag id="FUTURE" />
7602        </entry>
7603        <clone entry="android.statistics.histogramMode"
7604        kind="controls"></clone>
7605        <entry name="sharpnessMap" type="int32"
7606        type_notes="estimated sharpness for each region of the input image. Normalized to be between 0 and maxSharpnessMapValue. Higher values mean sharper (better focused)"
7607        container="array">
7608          <array>
7609            <size>n</size>
7610            <size>m</size>
7611            <size>3</size>
7612          </array>
7613          <description>A 3-channel sharpness map, based on the raw
7614          sensor data</description>
7615          <details>If only a monochrome sharpness map is supported,
7616          all channels should have the same data</details>
7617          <tag id="FUTURE" />
7618        </entry>
7619        <clone entry="android.statistics.sharpnessMapMode"
7620               kind="controls"></clone>
7621        <entry name="lensShadingCorrectionMap" type="byte" visibility="public"
7622               typedef="lensShadingMap" hwlevel="full">
7623          <description>The shading map is a low-resolution floating-point map
7624          that lists the coefficients used to correct for vignetting, for each
7625          Bayer color channel.</description>
7626          <range>Each gain factor is &amp;gt;= 1</range>
7627          <details>The least shaded section of the image should have a gain factor
7628          of 1; all other sections should have gains above 1.
7629
7630          When android.colorCorrection.mode = TRANSFORM_MATRIX, the map
7631          must take into account the colorCorrection settings.
7632
7633          The shading map is for the entire active pixel array, and is not
7634          affected by the crop region specified in the request. Each shading map
7635          entry is the value of the shading compensation map over a specific
7636          pixel on the sensor.  Specifically, with a (N x M) resolution shading
7637          map, and an active pixel array size (W x H), shading map entry
7638          (x,y) ϵ (0 ... N-1, 0 ... M-1) is the value of the shading map at
7639          pixel ( ((W-1)/(N-1)) * x, ((H-1)/(M-1)) * y) for the four color channels.
7640          The map is assumed to be bilinearly interpolated between the sample points.
7641
7642          The channel order is [R, Geven, Godd, B], where Geven is the green
7643          channel for the even rows of a Bayer pattern, and Godd is the odd rows.
7644          The shading map is stored in a fully interleaved format.
7645
7646          The shading map should have on the order of 30-40 rows and columns,
7647          and must be smaller than 64x64.
7648
7649          As an example, given a very small map defined as:
7650
7651              width,height = [ 4, 3 ]
7652              values =
7653              [ 1.3, 1.2, 1.15, 1.2,  1.2, 1.2, 1.15, 1.2,
7654                  1.1, 1.2, 1.2, 1.2,  1.3, 1.2, 1.3, 1.3,
7655                1.2, 1.2, 1.25, 1.1,  1.1, 1.1, 1.1, 1.0,
7656                  1.0, 1.0, 1.0, 1.0,  1.2, 1.3, 1.25, 1.2,
7657                1.3, 1.2, 1.2, 1.3,   1.2, 1.15, 1.1, 1.2,
7658                  1.2, 1.1, 1.0, 1.2,  1.3, 1.15, 1.2, 1.3 ]
7659
7660          The low-resolution scaling map images for each channel are
7661          (displayed using nearest-neighbor interpolation):
7662
7663          ![Red lens shading map](android.statistics.lensShadingMap/red_shading.png)
7664          ![Green (even rows) lens shading map](android.statistics.lensShadingMap/green_e_shading.png)
7665          ![Green (odd rows) lens shading map](android.statistics.lensShadingMap/green_o_shading.png)
7666          ![Blue lens shading map](android.statistics.lensShadingMap/blue_shading.png)
7667
7668          As a visualization only, inverting the full-color map to recover an
7669          image of a gray wall (using bicubic interpolation for visual quality) as captured by the sensor gives:
7670
7671          ![Image of a uniform white wall (inverse shading map)](android.statistics.lensShadingMap/inv_shading.png)
7672          </details>
7673        </entry>
7674        <entry name="lensShadingMap" type="float" visibility="ndk_public"
7675               type_notes="2D array of float gain factors per channel to correct lens shading"
7676               container="array" hwlevel="full">
7677          <array>
7678            <size>4</size>
7679            <size>n</size>
7680            <size>m</size>
7681          </array>
7682          <description>The shading map is a low-resolution floating-point map
7683          that lists the coefficients used to correct for vignetting, for each
7684          Bayer color channel of RAW image data.</description>
7685          <range>Each gain factor is &amp;gt;= 1</range>
7686          <details>The least shaded section of the image should have a gain factor
7687          of 1; all other sections should have gains above 1.
7688
7689          When android.colorCorrection.mode = TRANSFORM_MATRIX, the map
7690          must take into account the colorCorrection settings.
7691
7692          The shading map is for the entire active pixel array, and is not
7693          affected by the crop region specified in the request. Each shading map
7694          entry is the value of the shading compensation map over a specific
7695          pixel on the sensor.  Specifically, with a (N x M) resolution shading
7696          map, and an active pixel array size (W x H), shading map entry
7697          (x,y) ϵ (0 ... N-1, 0 ... M-1) is the value of the shading map at
7698          pixel ( ((W-1)/(N-1)) * x, ((H-1)/(M-1)) * y) for the four color channels.
7699          The map is assumed to be bilinearly interpolated between the sample points.
7700
7701          The channel order is [R, Geven, Godd, B], where Geven is the green
7702          channel for the even rows of a Bayer pattern, and Godd is the odd rows.
7703          The shading map is stored in a fully interleaved format, and its size
7704          is provided in the camera static metadata by android.lens.info.shadingMapSize.
7705
7706          The shading map should have on the order of 30-40 rows and columns,
7707          and must be smaller than 64x64.
7708
7709          As an example, given a very small map defined as:
7710
7711              android.lens.info.shadingMapSize = [ 4, 3 ]
7712              android.statistics.lensShadingMap =
7713              [ 1.3, 1.2, 1.15, 1.2,  1.2, 1.2, 1.15, 1.2,
7714                  1.1, 1.2, 1.2, 1.2,  1.3, 1.2, 1.3, 1.3,
7715                1.2, 1.2, 1.25, 1.1,  1.1, 1.1, 1.1, 1.0,
7716                  1.0, 1.0, 1.0, 1.0,  1.2, 1.3, 1.25, 1.2,
7717                1.3, 1.2, 1.2, 1.3,   1.2, 1.15, 1.1, 1.2,
7718                  1.2, 1.1, 1.0, 1.2,  1.3, 1.15, 1.2, 1.3 ]
7719
7720          The low-resolution scaling map images for each channel are
7721          (displayed using nearest-neighbor interpolation):
7722
7723          ![Red lens shading map](android.statistics.lensShadingMap/red_shading.png)
7724          ![Green (even rows) lens shading map](android.statistics.lensShadingMap/green_e_shading.png)
7725          ![Green (odd rows) lens shading map](android.statistics.lensShadingMap/green_o_shading.png)
7726          ![Blue lens shading map](android.statistics.lensShadingMap/blue_shading.png)
7727
7728          As a visualization only, inverting the full-color map to recover an
7729          image of a gray wall (using bicubic interpolation for visual quality)
7730          as captured by the sensor gives:
7731
7732          ![Image of a uniform white wall (inverse shading map)](android.statistics.lensShadingMap/inv_shading.png)
7733
7734          Note that the RAW image data might be subject to lens shading
7735          correction not reported on this map. Query
7736          android.sensor.info.lensShadingApplied to see if RAW image data has subject
7737          to lens shading correction. If android.sensor.info.lensShadingApplied
7738          is TRUE, the RAW image data is subject to partial or full lens shading
7739          correction. In the case full lens shading correction is applied to RAW
7740          images, the gain factor map reported in this key will contain all 1.0 gains.
7741          In other words, the map reported in this key is the remaining lens shading
7742          that needs to be applied on the RAW image to get images without lens shading
7743          artifacts. See android.request.maxNumOutputRaw for a list of RAW image
7744          formats.
7745          </details>
7746          <hal_details>
7747          The lens shading map calculation may depend on exposure and white balance statistics.
7748          When AE and AWB are in AUTO modes
7749          (android.control.aeMode `!=` OFF and android.control.awbMode `!=` OFF), the HAL
7750          may have all the information it need to generate most accurate lens shading map. When
7751          AE or AWB are in manual mode
7752          (android.control.aeMode `==` OFF or android.control.awbMode `==` OFF), the shading map
7753          may be adversely impacted by manual exposure or white balance parameters. To avoid
7754          generating unreliable shading map data, the HAL may choose to lock the shading map with
7755          the latest known good map generated when the AE and AWB are in AUTO modes.
7756          </hal_details>
7757        </entry>
7758        <entry name="predictedColorGains" type="float"
7759               visibility="hidden"
7760               deprecated="true"
7761               optional="true"
7762               type_notes="A 1D array of floats for 4 color channel gains"
7763               container="array">
7764          <array>
7765            <size>4</size>
7766          </array>
7767          <description>The best-fit color channel gains calculated
7768          by the camera device's statistics units for the current output frame.
7769          </description>
7770          <details>
7771          This may be different than the gains used for this frame,
7772          since statistics processing on data from a new frame
7773          typically completes after the transform has already been
7774          applied to that frame.
7775
7776          The 4 channel gains are defined in Bayer domain,
7777          see android.colorCorrection.gains for details.
7778
7779          This value should always be calculated by the auto-white balance (AWB) block,
7780          regardless of the android.control.* current values.
7781          </details>
7782        </entry>
7783        <entry name="predictedColorTransform" type="rational"
7784               visibility="hidden"
7785               deprecated="true"
7786               optional="true"
7787               type_notes="3x3 rational matrix in row-major order"
7788               container="array">
7789          <array>
7790            <size>3</size>
7791            <size>3</size>
7792          </array>
7793          <description>The best-fit color transform matrix estimate
7794          calculated by the camera device's statistics units for the current
7795          output frame.</description>
7796          <details>The camera device will provide the estimate from its
7797          statistics unit on the white balance transforms to use
7798          for the next frame. These are the values the camera device believes
7799          are the best fit for the current output frame. This may
7800          be different than the transform used for this frame, since
7801          statistics processing on data from a new frame typically
7802          completes after the transform has already been applied to
7803          that frame.
7804
7805          These estimates must be provided for all frames, even if
7806          capture settings and color transforms are set by the application.
7807
7808          This value should always be calculated by the auto-white balance (AWB) block,
7809          regardless of the android.control.* current values.
7810          </details>
7811        </entry>
7812        <entry name="sceneFlicker" type="byte" visibility="public" enum="true"
7813               hwlevel="full">
7814          <enum>
7815            <value>NONE
7816            <notes>The camera device does not detect any flickering illumination
7817            in the current scene.</notes></value>
7818            <value>50HZ
7819            <notes>The camera device detects illumination flickering at 50Hz
7820            in the current scene.</notes></value>
7821            <value>60HZ
7822            <notes>The camera device detects illumination flickering at 60Hz
7823            in the current scene.</notes></value>
7824          </enum>
7825          <description>The camera device estimated scene illumination lighting
7826          frequency.</description>
7827          <details>
7828          Many light sources, such as most fluorescent lights, flicker at a rate
7829          that depends on the local utility power standards. This flicker must be
7830          accounted for by auto-exposure routines to avoid artifacts in captured images.
7831          The camera device uses this entry to tell the application what the scene
7832          illuminant frequency is.
7833
7834          When manual exposure control is enabled
7835          (`android.control.aeMode == OFF` or `android.control.mode ==
7836          OFF`), the android.control.aeAntibandingMode doesn't perform
7837          antibanding, and the application can ensure it selects
7838          exposure times that do not cause banding issues by looking
7839          into this metadata field. See
7840          android.control.aeAntibandingMode for more details.
7841
7842          Reports NONE if there doesn't appear to be flickering illumination.
7843          </details>
7844        </entry>
7845        <clone entry="android.statistics.hotPixelMapMode" kind="controls">
7846        </clone>
7847        <entry name="hotPixelMap" type="int32" visibility="public"
7848        type_notes="list of coordinates based on android.sensor.pixelArraySize"
7849        container="array" typedef="point">
7850          <array>
7851            <size>2</size>
7852            <size>n</size>
7853          </array>
7854          <description>
7855          List of `(x, y)` coordinates of hot/defective pixels on the sensor.
7856          </description>
7857          <range>
7858          n &lt;= number of pixels on the sensor.
7859          The `(x, y)` coordinates must be bounded by
7860          android.sensor.info.pixelArraySize.
7861          </range>
7862          <details>
7863          A coordinate `(x, y)` must lie between `(0, 0)`, and
7864          `(width - 1, height - 1)` (inclusive), which are the top-left and
7865          bottom-right of the pixel array, respectively. The width and
7866          height dimensions are given in android.sensor.info.pixelArraySize.
7867          This may include hot pixels that lie outside of the active array
7868          bounds given by android.sensor.info.activeArraySize.
7869          </details>
7870          <hal_details>
7871          A hotpixel map contains the coordinates of pixels on the camera
7872          sensor that do report valid values (usually due to defects in
7873          the camera sensor). This includes pixels that are stuck at certain
7874          values, or have a response that does not accuractly encode the
7875          incoming light from the scene.
7876
7877          To avoid performance issues, there should be significantly fewer hot
7878          pixels than actual pixels on the camera sensor.
7879          </hal_details>
7880          <tag id="V1" />
7881          <tag id="RAW" />
7882        </entry>
7883      </dynamic>
7884      <controls>
7885        <entry name="lensShadingMapMode" type="byte" visibility="public" enum="true" hwlevel="full">
7886          <enum>
7887            <value>OFF
7888            <notes>Do not include a lens shading map in the capture result.</notes></value>
7889            <value>ON
7890            <notes>Include a lens shading map in the capture result.</notes></value>
7891          </enum>
7892          <description>Whether the camera device will output the lens
7893          shading map in output result metadata.</description>
7894          <range>android.statistics.info.availableLensShadingMapModes</range>
7895          <details>When set to ON,
7896          android.statistics.lensShadingMap will be provided in
7897          the output result metadata.
7898
7899          ON is always supported on devices with the RAW capability.
7900          </details>
7901          <tag id="RAW" />
7902        </entry>
7903      </controls>
7904      <dynamic>
7905        <clone entry="android.statistics.lensShadingMapMode" kind="controls">
7906        </clone>
7907      </dynamic>
7908    </section>
7909    <section name="tonemap">
7910      <controls>
7911        <entry name="curveBlue" type="float" visibility="ndk_public"
7912        type_notes="1D array of float pairs (P_IN, P_OUT). The maximum number of pairs is specified by android.tonemap.maxCurvePoints."
7913        container="array" hwlevel="full">
7914          <array>
7915            <size>n</size>
7916            <size>2</size>
7917          </array>
7918          <description>Tonemapping / contrast / gamma curve for the blue
7919          channel, to use when android.tonemap.mode is
7920          CONTRAST_CURVE.</description>
7921          <details>See android.tonemap.curveRed for more details.</details>
7922        </entry>
7923        <entry name="curveGreen" type="float" visibility="ndk_public"
7924        type_notes="1D array of float pairs (P_IN, P_OUT). The maximum number of pairs is specified by android.tonemap.maxCurvePoints."
7925        container="array" hwlevel="full">
7926          <array>
7927            <size>n</size>
7928            <size>2</size>
7929          </array>
7930          <description>Tonemapping / contrast / gamma curve for the green
7931          channel, to use when android.tonemap.mode is
7932          CONTRAST_CURVE.</description>
7933          <details>See android.tonemap.curveRed for more details.</details>
7934        </entry>
7935        <entry name="curveRed" type="float" visibility="ndk_public"
7936        type_notes="1D array of float pairs (P_IN, P_OUT). The maximum number of pairs is specified by android.tonemap.maxCurvePoints."
7937        container="array" hwlevel="full">
7938          <array>
7939            <size>n</size>
7940            <size>2</size>
7941          </array>
7942          <description>Tonemapping / contrast / gamma curve for the red
7943          channel, to use when android.tonemap.mode is
7944          CONTRAST_CURVE.</description>
7945          <range>0-1 on both input and output coordinates, normalized
7946          as a floating-point value such that 0 == black and 1 == white.
7947          </range>
7948          <details>
7949          Each channel's curve is defined by an array of control points:
7950
7951              android.tonemap.curveRed =
7952                [ P0in, P0out, P1in, P1out, P2in, P2out, P3in, P3out, ..., PNin, PNout ]
7953              2 &lt;= N &lt;= android.tonemap.maxCurvePoints
7954
7955          These are sorted in order of increasing `Pin`; it is
7956          required that input values 0.0 and 1.0 are included in the list to
7957          define a complete mapping. For input values between control points,
7958          the camera device must linearly interpolate between the control
7959          points.
7960
7961          Each curve can have an independent number of points, and the number
7962          of points can be less than max (that is, the request doesn't have to
7963          always provide a curve with number of points equivalent to
7964          android.tonemap.maxCurvePoints).
7965
7966          A few examples, and their corresponding graphical mappings; these
7967          only specify the red channel and the precision is limited to 4
7968          digits, for conciseness.
7969
7970          Linear mapping:
7971
7972              android.tonemap.curveRed = [ 0, 0, 1.0, 1.0 ]
7973
7974          ![Linear mapping curve](android.tonemap.curveRed/linear_tonemap.png)
7975
7976          Invert mapping:
7977
7978              android.tonemap.curveRed = [ 0, 1.0, 1.0, 0 ]
7979
7980          ![Inverting mapping curve](android.tonemap.curveRed/inverse_tonemap.png)
7981
7982          Gamma 1/2.2 mapping, with 16 control points:
7983
7984              android.tonemap.curveRed = [
7985                0.0000, 0.0000, 0.0667, 0.2920, 0.1333, 0.4002, 0.2000, 0.4812,
7986                0.2667, 0.5484, 0.3333, 0.6069, 0.4000, 0.6594, 0.4667, 0.7072,
7987                0.5333, 0.7515, 0.6000, 0.7928, 0.6667, 0.8317, 0.7333, 0.8685,
7988                0.8000, 0.9035, 0.8667, 0.9370, 0.9333, 0.9691, 1.0000, 1.0000 ]
7989
7990          ![Gamma = 1/2.2 tonemapping curve](android.tonemap.curveRed/gamma_tonemap.png)
7991
7992          Standard sRGB gamma mapping, per IEC 61966-2-1:1999, with 16 control points:
7993
7994              android.tonemap.curveRed = [
7995                0.0000, 0.0000, 0.0667, 0.2864, 0.1333, 0.4007, 0.2000, 0.4845,
7996                0.2667, 0.5532, 0.3333, 0.6125, 0.4000, 0.6652, 0.4667, 0.7130,
7997                0.5333, 0.7569, 0.6000, 0.7977, 0.6667, 0.8360, 0.7333, 0.8721,
7998                0.8000, 0.9063, 0.8667, 0.9389, 0.9333, 0.9701, 1.0000, 1.0000 ]
7999
8000          ![sRGB tonemapping curve](android.tonemap.curveRed/srgb_tonemap.png)
8001        </details>
8002        <hal_details>
8003          For good quality of mapping, at least 128 control points are
8004          preferred.
8005
8006          A typical use case of this would be a gamma-1/2.2 curve, with as many
8007          control points used as are available.
8008        </hal_details>
8009        </entry>
8010        <entry name="curve" type="float" visibility="java_public" synthetic="true"
8011               typedef="tonemapCurve"
8012               hwlevel="full">
8013          <description>Tonemapping / contrast / gamma curve to use when android.tonemap.mode
8014          is CONTRAST_CURVE.</description>
8015          <details>
8016          The tonemapCurve consist of three curves for each of red, green, and blue
8017          channels respectively. The following example uses the red channel as an
8018          example. The same logic applies to green and blue channel.
8019          Each channel's curve is defined by an array of control points:
8020
8021              curveRed =
8022                [ P0(in, out), P1(in, out), P2(in, out), P3(in, out), ..., PN(in, out) ]
8023              2 &lt;= N &lt;= android.tonemap.maxCurvePoints
8024
8025          These are sorted in order of increasing `Pin`; it is always
8026          guaranteed that input values 0.0 and 1.0 are included in the list to
8027          define a complete mapping. For input values between control points,
8028          the camera device must linearly interpolate between the control
8029          points.
8030
8031          Each curve can have an independent number of points, and the number
8032          of points can be less than max (that is, the request doesn't have to
8033          always provide a curve with number of points equivalent to
8034          android.tonemap.maxCurvePoints).
8035
8036          A few examples, and their corresponding graphical mappings; these
8037          only specify the red channel and the precision is limited to 4
8038          digits, for conciseness.
8039
8040          Linear mapping:
8041
8042              curveRed = [ (0, 0), (1.0, 1.0) ]
8043
8044          ![Linear mapping curve](android.tonemap.curveRed/linear_tonemap.png)
8045
8046          Invert mapping:
8047
8048              curveRed = [ (0, 1.0), (1.0, 0) ]
8049
8050          ![Inverting mapping curve](android.tonemap.curveRed/inverse_tonemap.png)
8051
8052          Gamma 1/2.2 mapping, with 16 control points:
8053
8054              curveRed = [
8055                (0.0000, 0.0000), (0.0667, 0.2920), (0.1333, 0.4002), (0.2000, 0.4812),
8056                (0.2667, 0.5484), (0.3333, 0.6069), (0.4000, 0.6594), (0.4667, 0.7072),
8057                (0.5333, 0.7515), (0.6000, 0.7928), (0.6667, 0.8317), (0.7333, 0.8685),
8058                (0.8000, 0.9035), (0.8667, 0.9370), (0.9333, 0.9691), (1.0000, 1.0000) ]
8059
8060          ![Gamma = 1/2.2 tonemapping curve](android.tonemap.curveRed/gamma_tonemap.png)
8061
8062          Standard sRGB gamma mapping, per IEC 61966-2-1:1999, with 16 control points:
8063
8064              curveRed = [
8065                (0.0000, 0.0000), (0.0667, 0.2864), (0.1333, 0.4007), (0.2000, 0.4845),
8066                (0.2667, 0.5532), (0.3333, 0.6125), (0.4000, 0.6652), (0.4667, 0.7130),
8067                (0.5333, 0.7569), (0.6000, 0.7977), (0.6667, 0.8360), (0.7333, 0.8721),
8068                (0.8000, 0.9063), (0.8667, 0.9389), (0.9333, 0.9701), (1.0000, 1.0000) ]
8069
8070          ![sRGB tonemapping curve](android.tonemap.curveRed/srgb_tonemap.png)
8071        </details>
8072        <hal_details>
8073            This entry is created by the framework from the curveRed, curveGreen and
8074            curveBlue entries.
8075        </hal_details>
8076        </entry>
8077        <entry name="mode" type="byte" visibility="public" enum="true"
8078               hwlevel="full">
8079          <enum>
8080            <value>CONTRAST_CURVE
8081              <notes>Use the tone mapping curve specified in
8082              the android.tonemap.curve* entries.
8083
8084              All color enhancement and tonemapping must be disabled, except
8085              for applying the tonemapping curve specified by
8086              android.tonemap.curve.
8087
8088              Must not slow down frame rate relative to raw
8089              sensor output.
8090              </notes>
8091            </value>
8092            <value>FAST
8093              <notes>
8094              Advanced gamma mapping and color enhancement may be applied, without
8095              reducing frame rate compared to raw sensor output.
8096              </notes>
8097            </value>
8098            <value>HIGH_QUALITY
8099              <notes>
8100              High-quality gamma mapping and color enhancement will be applied, at
8101              the cost of possibly reduced frame rate compared to raw sensor output.
8102              </notes>
8103            </value>
8104            <value>GAMMA_VALUE
8105              <notes>
8106              Use the gamma value specified in android.tonemap.gamma to peform
8107              tonemapping.
8108
8109              All color enhancement and tonemapping must be disabled, except
8110              for applying the tonemapping curve specified by android.tonemap.gamma.
8111
8112              Must not slow down frame rate relative to raw sensor output.
8113              </notes>
8114            </value>
8115            <value>PRESET_CURVE
8116              <notes>
8117              Use the preset tonemapping curve specified in
8118              android.tonemap.presetCurve to peform tonemapping.
8119
8120              All color enhancement and tonemapping must be disabled, except
8121              for applying the tonemapping curve specified by
8122              android.tonemap.presetCurve.
8123
8124              Must not slow down frame rate relative to raw sensor output.
8125              </notes>
8126            </value>
8127          </enum>
8128          <description>High-level global contrast/gamma/tonemapping control.
8129          </description>
8130          <range>android.tonemap.availableToneMapModes</range>
8131          <details>
8132          When switching to an application-defined contrast curve by setting
8133          android.tonemap.mode to CONTRAST_CURVE, the curve is defined
8134          per-channel with a set of `(in, out)` points that specify the
8135          mapping from input high-bit-depth pixel value to the output
8136          low-bit-depth value.  Since the actual pixel ranges of both input
8137          and output may change depending on the camera pipeline, the values
8138          are specified by normalized floating-point numbers.
8139
8140          More-complex color mapping operations such as 3D color look-up
8141          tables, selective chroma enhancement, or other non-linear color
8142          transforms will be disabled when android.tonemap.mode is
8143          CONTRAST_CURVE.
8144
8145          When using either FAST or HIGH_QUALITY, the camera device will
8146          emit its own tonemap curve in android.tonemap.curve.
8147          These values are always available, and as close as possible to the
8148          actually used nonlinear/nonglobal transforms.
8149
8150          If a request is sent with CONTRAST_CURVE with the camera device's
8151          provided curve in FAST or HIGH_QUALITY, the image's tonemap will be
8152          roughly the same.</details>
8153        </entry>
8154      </controls>
8155      <static>
8156        <entry name="maxCurvePoints" type="int32" visibility="public"
8157               hwlevel="full">
8158          <description>Maximum number of supported points in the
8159            tonemap curve that can be used for android.tonemap.curve.
8160          </description>
8161          <details>
8162          If the actual number of points provided by the application (in android.tonemap.curve*) is
8163          less than this maximum, the camera device will resample the curve to its internal
8164          representation, using linear interpolation.
8165
8166          The output curves in the result metadata may have a different number
8167          of points than the input curves, and will represent the actual
8168          hardware curves used as closely as possible when linearly interpolated.
8169          </details>
8170          <hal_details>
8171          This value must be at least 64. This should be at least 128.
8172          </hal_details>
8173        </entry>
8174        <entry name="availableToneMapModes" type="byte" visibility="public"
8175        type_notes="list of enums" container="array" typedef="enumList" hwlevel="full">
8176          <array>
8177            <size>n</size>
8178          </array>
8179          <description>
8180          List of tonemapping modes for android.tonemap.mode that are supported by this camera
8181          device.
8182          </description>
8183          <range>Any value listed in android.tonemap.mode</range>
8184          <details>
8185          Camera devices that support the MANUAL_POST_PROCESSING capability will always contain
8186          at least one of below mode combinations:
8187
8188          * CONTRAST_CURVE, FAST and HIGH_QUALITY
8189          * GAMMA_VALUE, PRESET_CURVE, FAST and HIGH_QUALITY
8190
8191          This includes all FULL level devices.
8192          </details>
8193          <hal_details>
8194            HAL must support both FAST and HIGH_QUALITY if automatic tonemap control is available
8195            on the camera device, but the underlying implementation can be the same for both modes.
8196            That is, if the highest quality implementation on the camera device does not slow down
8197            capture rate, then FAST and HIGH_QUALITY will generate the same output.
8198          </hal_details>
8199        </entry>
8200      </static>
8201      <dynamic>
8202        <clone entry="android.tonemap.curveBlue" kind="controls">
8203        </clone>
8204        <clone entry="android.tonemap.curveGreen" kind="controls">
8205        </clone>
8206        <clone entry="android.tonemap.curveRed" kind="controls">
8207        </clone>
8208        <clone entry="android.tonemap.curve" kind="controls">
8209        </clone>
8210        <clone entry="android.tonemap.mode" kind="controls">
8211        </clone>
8212      </dynamic>
8213      <controls>
8214        <entry name="gamma" type="float" visibility="public">
8215          <description> Tonemapping curve to use when android.tonemap.mode is
8216          GAMMA_VALUE
8217          </description>
8218          <details>
8219          The tonemap curve will be defined the following formula:
8220          * OUT = pow(IN, 1.0 / gamma)
8221          where IN and OUT is the input pixel value scaled to range [0.0, 1.0],
8222          pow is the power function and gamma is the gamma value specified by this
8223          key.
8224
8225          The same curve will be applied to all color channels. The camera device
8226          may clip the input gamma value to its supported range. The actual applied
8227          value will be returned in capture result.
8228
8229          The valid range of gamma value varies on different devices, but values
8230          within [1.0, 5.0] are guaranteed not to be clipped.
8231          </details>
8232        </entry>
8233        <entry name="presetCurve" type="byte" visibility="public" enum="true">
8234          <enum>
8235            <value>SRGB
8236              <notes>Tonemapping curve is defined by sRGB</notes>
8237            </value>
8238            <value>REC709
8239              <notes>Tonemapping curve is defined by ITU-R BT.709</notes>
8240            </value>
8241          </enum>
8242          <description> Tonemapping curve to use when android.tonemap.mode is
8243          PRESET_CURVE
8244          </description>
8245          <details>
8246          The tonemap curve will be defined by specified standard.
8247
8248          sRGB (approximated by 16 control points):
8249
8250          ![sRGB tonemapping curve](android.tonemap.curveRed/srgb_tonemap.png)
8251
8252          Rec. 709 (approximated by 16 control points):
8253
8254          ![Rec. 709 tonemapping curve](android.tonemap.curveRed/rec709_tonemap.png)
8255
8256          Note that above figures show a 16 control points approximation of preset
8257          curves. Camera devices may apply a different approximation to the curve.
8258          </details>
8259        </entry>
8260      </controls>
8261      <dynamic>
8262        <clone entry="android.tonemap.gamma" kind="controls">
8263        </clone>
8264        <clone entry="android.tonemap.presetCurve" kind="controls">
8265        </clone>
8266      </dynamic>
8267    </section>
8268    <section name="led">
8269      <controls>
8270        <entry name="transmit" type="byte" visibility="hidden" optional="true"
8271               enum="true" typedef="boolean">
8272          <enum>
8273            <value>OFF</value>
8274            <value>ON</value>
8275          </enum>
8276          <description>This LED is nominally used to indicate to the user
8277          that the camera is powered on and may be streaming images back to the
8278          Application Processor. In certain rare circumstances, the OS may
8279          disable this when video is processed locally and not transmitted to
8280          any untrusted applications.
8281
8282          In particular, the LED *must* always be on when the data could be
8283          transmitted off the device. The LED *should* always be on whenever
8284          data is stored locally on the device.
8285
8286          The LED *may* be off if a trusted application is using the data that
8287          doesn't violate the above rules.
8288          </description>
8289        </entry>
8290      </controls>
8291      <dynamic>
8292        <clone entry="android.led.transmit" kind="controls"></clone>
8293      </dynamic>
8294      <static>
8295        <entry name="availableLeds" type="byte" visibility="hidden" optional="true"
8296               enum="true"
8297               container="array">
8298          <array>
8299            <size>n</size>
8300          </array>
8301          <enum>
8302            <value>TRANSMIT
8303              <notes>android.led.transmit control is used.</notes>
8304            </value>
8305          </enum>
8306          <description>A list of camera LEDs that are available on this system.
8307          </description>
8308        </entry>
8309      </static>
8310    </section>
8311    <section name="info">
8312      <static>
8313        <entry name="supportedHardwareLevel" type="byte" visibility="public"
8314               enum="true" hwlevel="legacy">
8315          <enum>
8316            <value>
8317              LIMITED
8318              <notes>
8319              This camera device does not have enough capabilities to qualify as a `FULL` device or
8320              better.
8321
8322              Only the stream configurations listed in the `LEGACY` and `LIMITED` tables in the
8323              {@link android.hardware.camera2.CameraDevice#createCaptureSession
8324              createCaptureSession} documentation are guaranteed to be supported.
8325
8326              All `LIMITED` devices support the `BACKWARDS_COMPATIBLE` capability, indicating basic
8327              support for color image capture. The only exception is that the device may
8328              alternatively support only the `DEPTH_OUTPUT` capability, if it can only output depth
8329              measurements and not color images.
8330
8331              `LIMITED` devices and above require the use of android.control.aePrecaptureTrigger
8332              to lock exposure metering (and calculate flash power, for cameras with flash) before
8333              capturing a high-quality still image.
8334
8335              A `LIMITED` device that only lists the `BACKWARDS_COMPATIBLE` capability is only
8336              required to support full-automatic operation and post-processing (`OFF` is not
8337              supported for android.control.aeMode, android.control.afMode, or
8338              android.control.awbMode)
8339
8340              Additional capabilities may optionally be supported by a `LIMITED`-level device, and
8341              can be checked for in android.request.availableCapabilities.
8342              </notes>
8343            </value>
8344            <value>
8345              FULL
8346              <notes>
8347              This camera device is capable of supporting advanced imaging applications.
8348
8349              The stream configurations listed in the `FULL`, `LEGACY` and `LIMITED` tables in the
8350              {@link android.hardware.camera2.CameraDevice#createCaptureSession
8351              createCaptureSession} documentation are guaranteed to be supported.
8352
8353              A `FULL` device will support below capabilities:
8354
8355              * `BURST_CAPTURE` capability (android.request.availableCapabilities contains
8356                `BURST_CAPTURE`)
8357              * Per frame control (android.sync.maxLatency `==` PER_FRAME_CONTROL)
8358              * Manual sensor control (android.request.availableCapabilities contains `MANUAL_SENSOR`)
8359              * Manual post-processing control (android.request.availableCapabilities contains
8360                `MANUAL_POST_PROCESSING`)
8361              * The required exposure time range defined in android.sensor.info.exposureTimeRange
8362              * The required maxFrameDuration defined in android.sensor.info.maxFrameDuration
8363
8364              Note:
8365              Pre-API level 23, FULL devices also supported arbitrary cropping region
8366              (android.scaler.croppingType `== FREEFORM`); this requirement was relaxed in API level
8367              23, and `FULL` devices may only support `CENTERED` cropping.
8368              </notes>
8369            </value>
8370            <value>
8371              LEGACY
8372              <notes>
8373              This camera device is running in backward compatibility mode.
8374
8375              Only the stream configurations listed in the `LEGACY` table in the {@link
8376              android.hardware.camera2.CameraDevice#createCaptureSession createCaptureSession}
8377              documentation are supported.
8378
8379              A `LEGACY` device does not support per-frame control, manual sensor control, manual
8380              post-processing, arbitrary cropping regions, and has relaxed performance constraints.
8381              No additional capabilities beyond `BACKWARD_COMPATIBLE` will ever be listed by a
8382              `LEGACY` device in android.request.availableCapabilities.
8383
8384              In addition, the android.control.aePrecaptureTrigger is not functional on `LEGACY`
8385              devices. Instead, every request that includes a JPEG-format output target is treated
8386              as triggering a still capture, internally executing a precapture trigger.  This may
8387              fire the flash for flash power metering during precapture, and then fire the flash
8388              for the final capture, if a flash is available on the device and the AE mode is set to
8389              enable the flash.
8390              </notes>
8391            </value>
8392            <value>
8393              3
8394              <notes>
8395              This camera device is capable of YUV reprocessing and RAW data capture, in addition to
8396              FULL-level capabilities.
8397
8398              The stream configurations listed in the `LEVEL_3`, `RAW`, `FULL`, `LEGACY` and
8399              `LIMITED` tables in the {@link
8400              android.hardware.camera2.CameraDevice#createCaptureSession createCaptureSession}
8401              documentation are guaranteed to be supported.
8402
8403              The following additional capabilities are guaranteed to be supported:
8404
8405              * `YUV_REPROCESSING` capability (android.request.availableCapabilities contains
8406                `YUV_REPROCESSING`)
8407              * `RAW` capability (android.request.availableCapabilities contains
8408                `RAW`)
8409              </notes>
8410            </value>
8411          </enum>
8412          <description>
8413          Generally classifies the overall set of the camera device functionality.
8414          </description>
8415          <details>
8416          The supported hardware level is a high-level description of the camera device's
8417          capabilities, summarizing several capabilities into one field.  Each level adds additional
8418          features to the previous one, and is always a strict superset of the previous level.
8419          The ordering is `LEGACY &lt; LIMITED &lt; FULL &lt; LEVEL_3`.
8420
8421          Starting from `LEVEL_3`, the level enumerations are guaranteed to be in increasing
8422          numerical value as well. To check if a given device is at least at a given hardware level,
8423          the following code snippet can be used:
8424
8425              // Returns true if the device supports the required hardware level, or better.
8426              boolean isHardwareLevelSupported(CameraCharacteristics c, int requiredLevel) {
8427                  int deviceLevel = c.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
8428                  if (deviceLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
8429                      return requiredLevel == deviceLevel;
8430                  }
8431                  // deviceLevel is not LEGACY, can use numerical sort
8432                  return requiredLevel &lt;= deviceLevel;
8433              }
8434
8435          At a high level, the levels are:
8436
8437          * `LEGACY` devices operate in a backwards-compatibility mode for older
8438            Android devices, and have very limited capabilities.
8439          * `LIMITED` devices represent the
8440            baseline feature set, and may also include additional capabilities that are
8441            subsets of `FULL`.
8442          * `FULL` devices additionally support per-frame manual control of sensor, flash, lens and
8443            post-processing settings, and image capture at a high rate.
8444          * `LEVEL_3` devices additionally support YUV reprocessing and RAW image capture, along
8445            with additional output stream configurations.
8446
8447          See the individual level enums for full descriptions of the supported capabilities.  The
8448          android.request.availableCapabilities entry describes the device's capabilities at a
8449          finer-grain level, if needed. In addition, many controls have their available settings or
8450          ranges defined in individual {@link android.hardware.camera2.CameraCharacteristics} entries.
8451
8452          Some features are not part of any particular hardware level or capability and must be
8453          queried separately. These include:
8454
8455          * Calibrated timestamps (android.sensor.info.timestampSource `==` REALTIME)
8456          * Precision lens control (android.lens.info.focusDistanceCalibration `==` CALIBRATED)
8457          * Face detection (android.statistics.info.availableFaceDetectModes)
8458          * Optical or electrical image stabilization
8459            (android.lens.info.availableOpticalStabilization,
8460             android.control.availableVideoStabilizationModes)
8461
8462          </details>
8463          <hal_details>
8464          The camera 3 HAL device can implement one of three possible operational modes; LIMITED,
8465          FULL, and LEVEL_3.
8466
8467          FULL support or better is expected from new higher-end devices. Limited
8468          mode has hardware requirements roughly in line with those for a camera HAL device v1
8469          implementation, and is expected from older or inexpensive devices. Each level is a strict
8470          superset of the previous level, and they share the same essential operational flow.
8471
8472          For full details refer to "S3. Operational Modes" in camera3.h
8473
8474          Camera HAL3+ must not implement LEGACY mode. It is there for backwards compatibility in
8475          the `android.hardware.camera2` user-facing API only on HALv1 devices, and is implemented
8476          by the camera framework code.
8477          </hal_details>
8478        </entry>
8479      </static>
8480    </section>
8481    <section name="blackLevel">
8482      <controls>
8483        <entry name="lock" type="byte" visibility="public" enum="true"
8484               typedef="boolean" hwlevel="full">
8485          <enum>
8486            <value>OFF</value>
8487            <value>ON</value>
8488          </enum>
8489          <description> Whether black-level compensation is locked
8490          to its current values, or is free to vary.</description>
8491          <details>When set to `true` (ON), the values used for black-level
8492          compensation will not change until the lock is set to
8493          `false` (OFF).
8494
8495          Since changes to certain capture parameters (such as
8496          exposure time) may require resetting of black level
8497          compensation, the camera device must report whether setting
8498          the black level lock was successful in the output result
8499          metadata.
8500
8501          For example, if a sequence of requests is as follows:
8502
8503          * Request 1: Exposure = 10ms, Black level lock = OFF
8504          * Request 2: Exposure = 10ms, Black level lock = ON
8505          * Request 3: Exposure = 10ms, Black level lock = ON
8506          * Request 4: Exposure = 20ms, Black level lock = ON
8507          * Request 5: Exposure = 20ms, Black level lock = ON
8508          * Request 6: Exposure = 20ms, Black level lock = ON
8509
8510          And the exposure change in Request 4 requires the camera
8511          device to reset the black level offsets, then the output
8512          result metadata is expected to be:
8513
8514          * Result 1: Exposure = 10ms, Black level lock = OFF
8515          * Result 2: Exposure = 10ms, Black level lock = ON
8516          * Result 3: Exposure = 10ms, Black level lock = ON
8517          * Result 4: Exposure = 20ms, Black level lock = OFF
8518          * Result 5: Exposure = 20ms, Black level lock = ON
8519          * Result 6: Exposure = 20ms, Black level lock = ON
8520
8521          This indicates to the application that on frame 4, black
8522          levels were reset due to exposure value changes, and pixel
8523          values may not be consistent across captures.
8524
8525          The camera device will maintain the lock to the extent
8526          possible, only overriding the lock to OFF when changes to
8527          other request parameters require a black level recalculation
8528          or reset.
8529          </details>
8530          <hal_details>
8531          If for some reason black level locking is no longer possible
8532          (for example, the analog gain has changed, which forces
8533          black level offsets to be recalculated), then the HAL must
8534          override this request (and it must report 'OFF' when this
8535          does happen) until the next capture for which locking is
8536          possible again.</hal_details>
8537          <tag id="HAL2" />
8538        </entry>
8539      </controls>
8540      <dynamic>
8541        <clone entry="android.blackLevel.lock"
8542          kind="controls">
8543          <details>
8544            Whether the black level offset was locked for this frame.  Should be
8545            ON if android.blackLevel.lock was ON in the capture request, unless
8546            a change in other capture settings forced the camera device to
8547            perform a black level reset.
8548          </details>
8549        </clone>
8550      </dynamic>
8551    </section>
8552    <section name="sync">
8553      <dynamic>
8554        <entry name="frameNumber" type="int64" visibility="ndk_public"
8555               enum="true" hwlevel="legacy">
8556          <enum>
8557            <value id="-1">CONVERGING
8558              <notes>
8559              The current result is not yet fully synchronized to any request.
8560
8561              Synchronization is in progress, and reading metadata from this
8562              result may include a mix of data that have taken effect since the
8563              last synchronization time.
8564
8565              In some future result, within android.sync.maxLatency frames,
8566              this value will update to the actual frame number frame number
8567              the result is guaranteed to be synchronized to (as long as the
8568              request settings remain constant).
8569            </notes>
8570            </value>
8571            <value id="-2">UNKNOWN
8572              <notes>
8573              The current result's synchronization status is unknown.
8574
8575              The result may have already converged, or it may be in
8576              progress.  Reading from this result may include some mix
8577              of settings from past requests.
8578
8579              After a settings change, the new settings will eventually all
8580              take effect for the output buffers and results. However, this
8581              value will not change when that happens. Altering settings
8582              rapidly may provide outcomes using mixes of settings from recent
8583              requests.
8584
8585              This value is intended primarily for backwards compatibility with
8586              the older camera implementations (for android.hardware.Camera).
8587            </notes>
8588            </value>
8589          </enum>
8590          <description>The frame number corresponding to the last request
8591          with which the output result (metadata + buffers) has been fully
8592          synchronized.</description>
8593          <range>Either a non-negative value corresponding to a
8594          `frame_number`, or one of the two enums (CONVERGING / UNKNOWN).
8595          </range>
8596          <details>
8597          When a request is submitted to the camera device, there is usually a
8598          delay of several frames before the controls get applied. A camera
8599          device may either choose to account for this delay by implementing a
8600          pipeline and carefully submit well-timed atomic control updates, or
8601          it may start streaming control changes that span over several frame
8602          boundaries.
8603
8604          In the latter case, whenever a request's settings change relative to
8605          the previous submitted request, the full set of changes may take
8606          multiple frame durations to fully take effect. Some settings may
8607          take effect sooner (in less frame durations) than others.
8608
8609          While a set of control changes are being propagated, this value
8610          will be CONVERGING.
8611
8612          Once it is fully known that a set of control changes have been
8613          finished propagating, and the resulting updated control settings
8614          have been read back by the camera device, this value will be set
8615          to a non-negative frame number (corresponding to the request to
8616          which the results have synchronized to).
8617
8618          Older camera device implementations may not have a way to detect
8619          when all camera controls have been applied, and will always set this
8620          value to UNKNOWN.
8621
8622          FULL capability devices will always have this value set to the
8623          frame number of the request corresponding to this result.
8624
8625          _Further details_:
8626
8627          * Whenever a request differs from the last request, any future
8628          results not yet returned may have this value set to CONVERGING (this
8629          could include any in-progress captures not yet returned by the camera
8630          device, for more details see pipeline considerations below).
8631          * Submitting a series of multiple requests that differ from the
8632          previous request (e.g. r1, r2, r3 s.t. r1 != r2 != r3)
8633          moves the new synchronization frame to the last non-repeating
8634          request (using the smallest frame number from the contiguous list of
8635          repeating requests).
8636          * Submitting the same request repeatedly will not change this value
8637          to CONVERGING, if it was already a non-negative value.
8638          * When this value changes to non-negative, that means that all of the
8639          metadata controls from the request have been applied, all of the
8640          metadata controls from the camera device have been read to the
8641          updated values (into the result), and all of the graphics buffers
8642          corresponding to this result are also synchronized to the request.
8643
8644          _Pipeline considerations_:
8645
8646          Submitting a request with updated controls relative to the previously
8647          submitted requests may also invalidate the synchronization state
8648          of all the results corresponding to currently in-flight requests.
8649
8650          In other words, results for this current request and up to
8651          android.request.pipelineMaxDepth prior requests may have their
8652          android.sync.frameNumber change to CONVERGING.
8653          </details>
8654          <hal_details>
8655          Using UNKNOWN here is illegal unless android.sync.maxLatency
8656          is also UNKNOWN.
8657
8658          FULL capability devices should simply set this value to the
8659          `frame_number` of the request this result corresponds to.
8660          </hal_details>
8661          <tag id="V1" />
8662        </entry>
8663      </dynamic>
8664      <static>
8665        <entry name="maxLatency" type="int32" visibility="public" enum="true"
8666               hwlevel="legacy">
8667          <enum>
8668            <value id="0">PER_FRAME_CONTROL
8669              <notes>
8670              Every frame has the requests immediately applied.
8671
8672              Changing controls over multiple requests one after another will
8673              produce results that have those controls applied atomically
8674              each frame.
8675
8676              All FULL capability devices will have this as their maxLatency.
8677              </notes>
8678            </value>
8679            <value id="-1">UNKNOWN
8680              <notes>
8681              Each new frame has some subset (potentially the entire set)
8682              of the past requests applied to the camera settings.
8683
8684              By submitting a series of identical requests, the camera device
8685              will eventually have the camera settings applied, but it is
8686              unknown when that exact point will be.
8687
8688              All LEGACY capability devices will have this as their maxLatency.
8689              </notes>
8690            </value>
8691          </enum>
8692          <description>
8693          The maximum number of frames that can occur after a request
8694          (different than the previous) has been submitted, and before the
8695          result's state becomes synchronized.
8696          </description>
8697          <units>Frame counts</units>
8698          <range>A positive value, PER_FRAME_CONTROL, or UNKNOWN.</range>
8699          <details>
8700          This defines the maximum distance (in number of metadata results),
8701          between the frame number of the request that has new controls to apply
8702          and the frame number of the result that has all the controls applied.
8703
8704          In other words this acts as an upper boundary for how many frames
8705          must occur before the camera device knows for a fact that the new
8706          submitted camera settings have been applied in outgoing frames.
8707          </details>
8708          <hal_details>
8709          For example if maxLatency was 2,
8710
8711              initial request = X (repeating)
8712              request1 = X
8713              request2 = Y
8714              request3 = Y
8715              request4 = Y
8716
8717              where requestN has frameNumber N, and the first of the repeating
8718              initial request's has frameNumber F (and F &lt; 1).
8719
8720              initial result = X' + { android.sync.frameNumber == F }
8721              result1 = X' + { android.sync.frameNumber == F }
8722              result2 = X' + { android.sync.frameNumber == CONVERGING }
8723              result3 = X' + { android.sync.frameNumber == CONVERGING }
8724              result4 = X' + { android.sync.frameNumber == 2 }
8725
8726              where resultN has frameNumber N.
8727
8728          Since `result4` has a `frameNumber == 4` and
8729          `android.sync.frameNumber == 2`, the distance is clearly
8730          `4 - 2 = 2`.
8731
8732          Use `frame_count` from camera3_request_t instead of
8733          android.request.frameCount or
8734          `{@link android.hardware.camera2.CaptureResult#getFrameNumber}`.
8735
8736          LIMITED devices are strongly encouraged to use a non-negative
8737          value. If UNKNOWN is used here then app developers do not have a way
8738          to know when sensor settings have been applied.
8739          </hal_details>
8740          <tag id="V1" />
8741        </entry>
8742      </static>
8743    </section>
8744    <section name="reprocess">
8745      <controls>
8746        <entry name="effectiveExposureFactor" type="float" visibility="java_public" hwlevel="limited">
8747            <description>
8748            The amount of exposure time increase factor applied to the original output
8749            frame by the application processing before sending for reprocessing.
8750            </description>
8751            <units>Relative exposure time increase factor.</units>
8752            <range> &amp;gt;= 1.0</range>
8753            <details>
8754            This is optional, and will be supported if the camera device supports YUV_REPROCESSING
8755            capability (android.request.availableCapabilities contains YUV_REPROCESSING).
8756
8757            For some YUV reprocessing use cases, the application may choose to filter the original
8758            output frames to effectively reduce the noise to the same level as a frame that was
8759            captured with longer exposure time. To be more specific, assuming the original captured
8760            images were captured with a sensitivity of S and an exposure time of T, the model in
8761            the camera device is that the amount of noise in the image would be approximately what
8762            would be expected if the original capture parameters had been a sensitivity of
8763            S/effectiveExposureFactor and an exposure time of T*effectiveExposureFactor, rather
8764            than S and T respectively. If the captured images were processed by the application
8765            before being sent for reprocessing, then the application may have used image processing
8766            algorithms and/or multi-frame image fusion to reduce the noise in the
8767            application-processed images (input images). By using the effectiveExposureFactor
8768            control, the application can communicate to the camera device the actual noise level
8769            improvement in the application-processed image. With this information, the camera
8770            device can select appropriate noise reduction and edge enhancement parameters to avoid
8771            excessive noise reduction (android.noiseReduction.mode) and insufficient edge
8772            enhancement (android.edge.mode) being applied to the reprocessed frames.
8773
8774            For example, for multi-frame image fusion use case, the application may fuse
8775            multiple output frames together to a final frame for reprocessing. When N image are
8776            fused into 1 image for reprocessing, the exposure time increase factor could be up to
8777            square root of N (based on a simple photon shot noise model). The camera device will
8778            adjust the reprocessing noise reduction and edge enhancement parameters accordingly to
8779            produce the best quality images.
8780
8781            This is relative factor, 1.0 indicates the application hasn't processed the input
8782            buffer in a way that affects its effective exposure time.
8783
8784            This control is only effective for YUV reprocessing capture request. For noise
8785            reduction reprocessing, it is only effective when `android.noiseReduction.mode != OFF`.
8786            Similarly, for edge enhancement reprocessing, it is only effective when
8787            `android.edge.mode != OFF`.
8788            </details>
8789          <tag id="REPROC" />
8790        </entry>
8791      </controls>
8792      <dynamic>
8793      <clone entry="android.reprocess.effectiveExposureFactor" kind="controls">
8794      </clone>
8795      </dynamic>
8796      <static>
8797        <entry name="maxCaptureStall" type="int32" visibility="java_public" hwlevel="limited">
8798          <description>
8799          The maximal camera capture pipeline stall (in unit of frame count) introduced by a
8800          reprocess capture request.
8801          </description>
8802          <units>Number of frames.</units>
8803          <range> &amp;lt;= 4</range>
8804          <details>
8805          The key describes the maximal interference that one reprocess (input) request
8806          can introduce to the camera simultaneous streaming of regular (output) capture
8807          requests, including repeating requests.
8808
8809          When a reprocessing capture request is submitted while a camera output repeating request
8810          (e.g. preview) is being served by the camera device, it may preempt the camera capture
8811          pipeline for at least one frame duration so that the camera device is unable to process
8812          the following capture request in time for the next sensor start of exposure boundary.
8813          When this happens, the application may observe a capture time gap (longer than one frame
8814          duration) between adjacent capture output frames, which usually exhibits as preview
8815          glitch if the repeating request output targets include a preview surface. This key gives
8816          the worst-case number of frame stall introduced by one reprocess request with any kind of
8817          formats/sizes combination.
8818
8819          If this key reports 0, it means a reprocess request doesn't introduce any glitch to the
8820          ongoing camera repeating request outputs, as if this reprocess request is never issued.
8821
8822          This key is supported if the camera device supports PRIVATE or YUV reprocessing (
8823          i.e. android.request.availableCapabilities contains PRIVATE_REPROCESSING or
8824          YUV_REPROCESSING).
8825          </details>
8826          <tag id="REPROC" />
8827        </entry>
8828      </static>
8829    </section>
8830    <section name="depth">
8831      <static>
8832        <entry name="maxDepthSamples" type="int32" visibility="system" hwlevel="limited">
8833          <description>Maximum number of points that a depth point cloud may contain.
8834          </description>
8835          <details>
8836            If a camera device supports outputting depth range data in the form of a depth point
8837            cloud ({@link android.graphics.ImageFormat#DEPTH_POINT_CLOUD}), this is the maximum
8838            number of points an output buffer may contain.
8839
8840            Any given buffer may contain between 0 and maxDepthSamples points, inclusive.
8841            If output in the depth point cloud format is not supported, this entry will
8842            not be defined.
8843          </details>
8844          <tag id="DEPTH" />
8845        </entry>
8846        <entry name="availableDepthStreamConfigurations" type="int32" visibility="ndk_public"
8847               enum="true" container="array" typedef="streamConfiguration" hwlevel="limited">
8848          <array>
8849            <size>n</size>
8850            <size>4</size>
8851          </array>
8852          <enum>
8853            <value>OUTPUT</value>
8854            <value>INPUT</value>
8855          </enum>
8856          <description>The available depth dataspace stream
8857          configurations that this camera device supports
8858          (i.e. format, width, height, output/input stream).
8859          </description>
8860          <details>
8861            These are output stream configurations for use with
8862            dataSpace HAL_DATASPACE_DEPTH. The configurations are
8863            listed as `(format, width, height, input?)` tuples.
8864
8865            Only devices that support depth output for at least
8866            the HAL_PIXEL_FORMAT_Y16 dense depth map may include
8867            this entry.
8868
8869            A device that also supports the HAL_PIXEL_FORMAT_BLOB
8870            sparse depth point cloud must report a single entry for
8871            the format in this list as `(HAL_PIXEL_FORMAT_BLOB,
8872            android.depth.maxDepthSamples, 1, OUTPUT)` in addition to
8873            the entries for HAL_PIXEL_FORMAT_Y16.
8874          </details>
8875          <tag id="DEPTH" />
8876        </entry>
8877        <entry name="availableDepthMinFrameDurations" type="int64" visibility="ndk_public"
8878               container="array" typedef="streamConfigurationDuration" hwlevel="limited">
8879          <array>
8880            <size>4</size>
8881            <size>n</size>
8882          </array>
8883          <description>This lists the minimum frame duration for each
8884          format/size combination for depth output formats.
8885          </description>
8886          <units>(format, width, height, ns) x n</units>
8887          <details>
8888          This should correspond to the frame duration when only that
8889          stream is active, with all processing (typically in android.*.mode)
8890          set to either OFF or FAST.
8891
8892          When multiple streams are used in a request, the minimum frame
8893          duration will be max(individual stream min durations).
8894
8895          The minimum frame duration of a stream (of a particular format, size)
8896          is the same regardless of whether the stream is input or output.
8897
8898          See android.sensor.frameDuration and
8899          android.scaler.availableStallDurations for more details about
8900          calculating the max frame rate.
8901
8902          (Keep in sync with {@link
8903          android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration})
8904          </details>
8905          <tag id="DEPTH" />
8906        </entry>
8907        <entry name="availableDepthStallDurations" type="int64" visibility="ndk_public"
8908               container="array" typedef="streamConfigurationDuration" hwlevel="limited">
8909          <array>
8910            <size>4</size>
8911            <size>n</size>
8912          </array>
8913          <description>This lists the maximum stall duration for each
8914          output format/size combination for depth streams.
8915          </description>
8916          <units>(format, width, height, ns) x n</units>
8917          <details>
8918          A stall duration is how much extra time would get added
8919          to the normal minimum frame duration for a repeating request
8920          that has streams with non-zero stall.
8921
8922          This functions similarly to
8923          android.scaler.availableStallDurations for depth
8924          streams.
8925
8926          All depth output stream formats may have a nonzero stall
8927          duration.
8928          </details>
8929          <tag id="DEPTH" />
8930        </entry>
8931        <entry name="depthIsExclusive" type="byte" visibility="public"
8932               enum="true" typedef="boolean" hwlevel="limited">
8933          <enum>
8934            <value>FALSE</value>
8935            <value>TRUE</value>
8936          </enum>
8937          <description>Indicates whether a capture request may target both a
8938          DEPTH16 / DEPTH_POINT_CLOUD output, and normal color outputs (such as
8939          YUV_420_888, JPEG, or RAW) simultaneously.
8940          </description>
8941          <details>
8942          If TRUE, including both depth and color outputs in a single
8943          capture request is not supported. An application must interleave color
8944          and depth requests.  If FALSE, a single request can target both types
8945          of output.
8946
8947          Typically, this restriction exists on camera devices that
8948          need to emit a specific pattern or wavelength of light to
8949          measure depth values, which causes the color image to be
8950          corrupted during depth measurement.
8951          </details>
8952        </entry>
8953      </static>
8954    </section>
8955  </namespace>
8956</metadata>
8957