StreamConfigurationMap.java revision 310f381eac558bce069b52fbda9a8aeb83608858
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17package android.hardware.camera2.params;
18
19import android.graphics.ImageFormat;
20import android.graphics.PixelFormat;
21import android.hardware.camera2.CameraCharacteristics;
22import android.hardware.camera2.CameraDevice;
23import android.hardware.camera2.CaptureRequest;
24import android.hardware.camera2.utils.HashCodeHelpers;
25import android.hardware.camera2.legacy.LegacyCameraDevice;
26import android.hardware.camera2.legacy.LegacyMetadataMapper;
27import android.hardware.camera2.legacy.LegacyExceptionUtils.BufferQueueAbandonedException;
28import android.view.Surface;
29import android.util.Range;
30import android.util.Size;
31
32import java.util.Arrays;
33import java.util.HashMap;
34import java.util.Objects;
35import java.util.Set;
36
37import static com.android.internal.util.Preconditions.*;
38
39/**
40 * Immutable class to store the available stream
41 * {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP configurations} to set up
42 * {@link android.view.Surface Surfaces} for creating a
43 * {@link android.hardware.camera2.CameraCaptureSession capture session} with
44 * {@link android.hardware.camera2.CameraDevice#createCaptureSession}.
45 * <!-- TODO: link to input stream configuration -->
46 *
47 * <p>This is the authoritative list for all <!-- input/ -->output formats (and sizes respectively
48 * for that format) that are supported by a camera device.</p>
49 *
50 * <p>This also contains the minimum frame durations and stall durations for each format/size
51 * combination that can be used to calculate effective frame rate when submitting multiple captures.
52 * </p>
53 *
54 * <p>An instance of this object is available from {@link CameraCharacteristics} using
55 * the {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP} key and the
56 * {@link CameraCharacteristics#get} method.</p>
57 *
58 * <pre><code>{@code
59 * CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId);
60 * StreamConfigurationMap configs = characteristics.get(
61 *         CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
62 * }</code></pre>
63 *
64 * @see CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP
65 * @see CameraDevice#createCaptureSession
66 */
67public final class StreamConfigurationMap {
68
69    private static final String TAG = "StreamConfigurationMap";
70
71    /**
72     * Create a new {@link StreamConfigurationMap}.
73     *
74     * <p>The array parameters ownership is passed to this object after creation; do not
75     * write to them after this constructor is invoked.</p>
76     *
77     * @param configurations a non-{@code null} array of {@link StreamConfiguration}
78     * @param minFrameDurations a non-{@code null} array of {@link StreamConfigurationDuration}
79     * @param stallDurations a non-{@code null} array of {@link StreamConfigurationDuration}
80     * @param highSpeedVideoConfigurations an array of {@link HighSpeedVideoConfiguration}, null if
81     *        camera device does not support high speed video recording
82     *
83     * @throws NullPointerException if any of the arguments except highSpeedVideoConfigurations
84     *         were {@code null} or any subelements were {@code null}
85     *
86     * @hide
87     */
88    public StreamConfigurationMap(
89            StreamConfiguration[] configurations,
90            StreamConfigurationDuration[] minFrameDurations,
91            StreamConfigurationDuration[] stallDurations,
92            StreamConfiguration[] depthConfigurations,
93            StreamConfigurationDuration[] depthMinFrameDurations,
94            StreamConfigurationDuration[] depthStallDurations,
95            HighSpeedVideoConfiguration[] highSpeedVideoConfigurations,
96            ReprocessFormatsMap inputOutputFormatsMap) {
97        mConfigurations = checkArrayElementsNotNull(configurations, "configurations");
98        mMinFrameDurations = checkArrayElementsNotNull(minFrameDurations, "minFrameDurations");
99        mStallDurations = checkArrayElementsNotNull(stallDurations, "stallDurations");
100
101        if (depthConfigurations == null) {
102            mDepthConfigurations = new StreamConfiguration[0];
103            mDepthMinFrameDurations = new StreamConfigurationDuration[0];
104            mDepthStallDurations = new StreamConfigurationDuration[0];
105        } else {
106            mDepthConfigurations = checkArrayElementsNotNull(depthConfigurations,
107                    "depthConfigurations");
108            mDepthMinFrameDurations = checkArrayElementsNotNull(depthMinFrameDurations,
109                    "depthMinFrameDurations");
110            mDepthStallDurations = checkArrayElementsNotNull(depthStallDurations,
111                    "depthStallDurations");
112        }
113
114        if (highSpeedVideoConfigurations == null) {
115            mHighSpeedVideoConfigurations = new HighSpeedVideoConfiguration[0];
116        } else {
117            mHighSpeedVideoConfigurations = checkArrayElementsNotNull(
118                    highSpeedVideoConfigurations, "highSpeedVideoConfigurations");
119        }
120
121        // For each format, track how many sizes there are available to configure
122        for (StreamConfiguration config : configurations) {
123            HashMap<Integer, Integer> map = config.isOutput() ? mOutputFormats : mInputFormats;
124
125            Integer count = map.get(config.getFormat());
126
127            if (count == null) {
128                count = 0;
129            }
130
131            map.put(config.getFormat(), count + 1);
132        }
133
134        // For each depth format, track how many sizes there are available to configure
135        for (StreamConfiguration config : mDepthConfigurations) {
136            if (!config.isOutput()) {
137                // Ignoring input depth configs
138                continue;
139            }
140
141            Integer count = mDepthOutputFormats.get(config.getFormat());
142
143            if (count == null) {
144                count = 0;
145            }
146
147            mDepthOutputFormats.put(config.getFormat(), count + 1);
148        }
149
150        if (!mOutputFormats.containsKey(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED)) {
151            throw new AssertionError(
152                    "At least one stream configuration for IMPLEMENTATION_DEFINED must exist");
153        }
154
155        // For each Size/FPS range, track how many FPS range/Size there are available
156        for (HighSpeedVideoConfiguration config : mHighSpeedVideoConfigurations) {
157            Size size = config.getSize();
158            Range<Integer> fpsRange = config.getFpsRange();
159            Integer fpsRangeCount = mHighSpeedVideoSizeMap.get(size);
160            if (fpsRangeCount == null) {
161                fpsRangeCount = 0;
162            }
163            mHighSpeedVideoSizeMap.put(size, fpsRangeCount + 1);
164            Integer sizeCount = mHighSpeedVideoFpsRangeMap.get(fpsRange);
165            if (sizeCount == null) {
166                sizeCount = 0;
167            }
168            mHighSpeedVideoFpsRangeMap.put(fpsRange, sizeCount + 1);
169        }
170
171        mInputOutputFormatsMap = inputOutputFormatsMap;
172    }
173
174    /**
175     * Get the image {@code format} output formats in this stream configuration.
176     *
177     * <p>All image formats returned by this function will be defined in either {@link ImageFormat}
178     * or in {@link PixelFormat} (and there is no possibility of collision).</p>
179     *
180     * <p>Formats listed in this array are guaranteed to return true if queried with
181     * {@link #isOutputSupportedFor(int)}.</p>
182     *
183     * @return an array of integer format
184     *
185     * @see ImageFormat
186     * @see PixelFormat
187     */
188    public final int[] getOutputFormats() {
189        return getPublicFormats(/*output*/true);
190    }
191
192    /**
193     * Get the image {@code format} output formats for a reprocessing input format.
194     *
195     * <p>When submitting a {@link CaptureRequest} with an input Surface of a given format,
196     * the only allowed target outputs of the {@link CaptureRequest} are the ones with a format
197     * listed in the return value of this method. Including any other output Surface as a target
198     * will throw an IllegalArgumentException. If no output format is supported given the input
199     * format, an empty int[] will be returned.</p>
200     *
201     * <p>All image formats returned by this function will be defined in either {@link ImageFormat}
202     * or in {@link PixelFormat} (and there is no possibility of collision).</p>
203     *
204     * <p>Formats listed in this array are guaranteed to return true if queried with
205     * {@link #isOutputSupportedFor(int)}.</p>
206     *
207     * @return an array of integer format
208     *
209     * @see ImageFormat
210     * @see PixelFormat
211     */
212    public final int[] getValidOutputFormatsForInput(int inputFormat) {
213        if (mInputOutputFormatsMap == null) {
214            return new int[0];
215        }
216        return mInputOutputFormatsMap.getOutputs(inputFormat);
217    }
218
219    /**
220     * Get the image {@code format} input formats in this stream configuration.
221     *
222     * <p>All image formats returned by this function will be defined in either {@link ImageFormat}
223     * or in {@link PixelFormat} (and there is no possibility of collision).</p>
224     *
225     * @return an array of integer format
226     *
227     * @see ImageFormat
228     * @see PixelFormat
229     */
230    public final int[] getInputFormats() {
231        return getPublicFormats(/*output*/false);
232    }
233
234    /**
235     * Get the supported input sizes for this input format.
236     *
237     * <p>The format must have come from {@link #getInputFormats}; otherwise
238     * {@code null} is returned.</p>
239     *
240     * @param format a format from {@link #getInputFormats}
241     * @return a non-empty array of sizes, or {@code null} if the format was not available.
242     */
243    public Size[] getInputSizes(final int format) {
244        return getPublicFormatSizes(format, /*output*/false);
245    }
246
247    /**
248     * Determine whether or not output surfaces with a particular user-defined format can be passed
249     * {@link CameraDevice#createCaptureSession createCaptureSession}.
250     *
251     * <p>This method determines that the output {@code format} is supported by the camera device;
252     * each output {@code surface} target may or may not itself support that {@code format}.
253     * Refer to the class which provides the surface for additional documentation.</p>
254     *
255     * <p>Formats for which this returns {@code true} are guaranteed to exist in the result
256     * returned by {@link #getOutputSizes}.</p>
257     *
258     * @param format an image format from either {@link ImageFormat} or {@link PixelFormat}
259     * @return
260     *          {@code true} iff using a {@code surface} with this {@code format} will be
261     *          supported with {@link CameraDevice#createCaptureSession}
262     *
263     * @throws IllegalArgumentException
264     *          if the image format was not a defined named constant
265     *          from either {@link ImageFormat} or {@link PixelFormat}
266     *
267     * @see ImageFormat
268     * @see PixelFormat
269     * @see CameraDevice#createCaptureSession
270     */
271    public boolean isOutputSupportedFor(int format) {
272        checkArgumentFormat(format);
273
274        int internalFormat = imageFormatToInternal(format);
275        int dataspace = imageFormatToDataspace(format);
276        if (dataspace == HAL_DATASPACE_DEPTH) {
277            return mDepthOutputFormats.containsKey(internalFormat);
278        } else {
279            return getFormatsMap(/*output*/true).containsKey(internalFormat);
280        }
281    }
282
283    /**
284     * Determine whether or not output streams can be configured with a particular class
285     * as a consumer.
286     *
287     * <p>The following list is generally usable for outputs:
288     * <ul>
289     * <li>{@link android.media.ImageReader} -
290     * Recommended for image processing or streaming to external resources (such as a file or
291     * network)
292     * <li>{@link android.media.MediaRecorder} -
293     * Recommended for recording video (simple to use)
294     * <li>{@link android.media.MediaCodec} -
295     * Recommended for recording video (more complicated to use, with more flexibility)
296     * <li>{@link android.renderscript.Allocation} -
297     * Recommended for image processing with {@link android.renderscript RenderScript}
298     * <li>{@link android.view.SurfaceHolder} -
299     * Recommended for low-power camera preview with {@link android.view.SurfaceView}
300     * <li>{@link android.graphics.SurfaceTexture} -
301     * Recommended for OpenGL-accelerated preview processing or compositing with
302     * {@link android.view.TextureView}
303     * </ul>
304     * </p>
305     *
306     * <p>Generally speaking this means that creating a {@link Surface} from that class <i>may</i>
307     * provide a producer endpoint that is suitable to be used with
308     * {@link CameraDevice#createCaptureSession}.</p>
309     *
310     * <p>Since not all of the above classes support output of all format and size combinations,
311     * the particular combination should be queried with {@link #isOutputSupportedFor(Surface)}.</p>
312     *
313     * @param klass a non-{@code null} {@link Class} object reference
314     * @return {@code true} if this class is supported as an output, {@code false} otherwise
315     *
316     * @throws NullPointerException if {@code klass} was {@code null}
317     *
318     * @see CameraDevice#createCaptureSession
319     * @see #isOutputSupportedFor(Surface)
320     */
321    public static <T> boolean isOutputSupportedFor(Class<T> klass) {
322        checkNotNull(klass, "klass must not be null");
323
324        if (klass == android.media.ImageReader.class) {
325            return true;
326        } else if (klass == android.media.MediaRecorder.class) {
327            return true;
328        } else if (klass == android.media.MediaCodec.class) {
329            return true;
330        } else if (klass == android.renderscript.Allocation.class) {
331            return true;
332        } else if (klass == android.view.SurfaceHolder.class) {
333            return true;
334        } else if (klass == android.graphics.SurfaceTexture.class) {
335            return true;
336        }
337
338        return false;
339    }
340
341    /**
342     * Determine whether or not the {@code surface} in its current state is suitable to be included
343     * in a {@link CameraDevice#createCaptureSession capture session} as an output.
344     *
345     * <p>Not all surfaces are usable with the {@link CameraDevice}, and not all configurations
346     * of that {@code surface} are compatible. Some classes that provide the {@code surface} are
347     * compatible with the {@link CameraDevice} in general
348     * (see {@link #isOutputSupportedFor(Class)}, but it is the caller's responsibility to put the
349     * {@code surface} into a state that will be compatible with the {@link CameraDevice}.</p>
350     *
351     * <p>Reasons for a {@code surface} being specifically incompatible might be:
352     * <ul>
353     * <li>Using a format that's not listed by {@link #getOutputFormats}
354     * <li>Using a format/size combination that's not listed by {@link #getOutputSizes}
355     * <li>The {@code surface} itself is not in a state where it can service a new producer.</p>
356     * </li>
357     * </ul>
358     *
359     * <p>Surfaces from flexible sources will return true even if the exact size of the Surface does
360     * not match a camera-supported size, as long as the format (or class) is supported and the
361     * camera device supports a size that is equal to or less than 1080p in that format. If such as
362     * Surface is used to create a capture session, it will have its size rounded to the nearest
363     * supported size, below or equal to 1080p. Flexible sources include SurfaceView, SurfaceTexture,
364     * and ImageReader.</p>
365     *
366     * <p>This is not an exhaustive list; see the particular class's documentation for further
367     * possible reasons of incompatibility.</p>
368     *
369     * @param surface a non-{@code null} {@link Surface} object reference
370     * @return {@code true} if this is supported, {@code false} otherwise
371     *
372     * @throws NullPointerException if {@code surface} was {@code null}
373     * @throws IllegalArgumentException if the Surface endpoint is no longer valid
374     *
375     * @see CameraDevice#createCaptureSession
376     * @see #isOutputSupportedFor(Class)
377     */
378    public boolean isOutputSupportedFor(Surface surface) {
379        checkNotNull(surface, "surface must not be null");
380
381        Size surfaceSize;
382        int surfaceFormat = -1;
383        try {
384            surfaceSize = LegacyCameraDevice.getSurfaceSize(surface);
385            surfaceFormat = LegacyCameraDevice.detectSurfaceType(surface);
386        } catch(BufferQueueAbandonedException e) {
387            throw new IllegalArgumentException("Abandoned surface", e);
388        }
389
390        // See if consumer is flexible.
391        boolean isFlexible = LegacyCameraDevice.isFlexibleConsumer(surface);
392
393        // Override RGB formats to IMPLEMENTATION_DEFINED, b/9487482
394        if ((surfaceFormat >= LegacyMetadataMapper.HAL_PIXEL_FORMAT_RGBA_8888 &&
395                        surfaceFormat <= LegacyMetadataMapper.HAL_PIXEL_FORMAT_BGRA_8888)) {
396            surfaceFormat = LegacyMetadataMapper.HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
397        }
398
399        for (StreamConfiguration config : mConfigurations) {
400            if (config.getFormat() == surfaceFormat && config.isOutput()) {
401                // Mathing format, either need exact size match, or a flexible consumer
402                // and a size no bigger than MAX_DIMEN_FOR_ROUNDING
403                if (config.getSize().equals(surfaceSize)) {
404                    return true;
405                } else if (isFlexible &&
406                        (config.getSize().getWidth() <= LegacyCameraDevice.MAX_DIMEN_FOR_ROUNDING)) {
407                    return true;
408                }
409            }
410        }
411        return false;
412    }
413
414    /**
415     * Get a list of sizes compatible with {@code klass} to use as an output.
416     *
417     * <p>Since some of the supported classes may support additional formats beyond
418     * {@link ImageFormat#PRIVATE}; this function only returns
419     * sizes for {@link ImageFormat#PRIVATE}. For example, {@link android.media.ImageReader}
420     * supports {@link ImageFormat#YUV_420_888} and {@link ImageFormat#PRIVATE}, this method will
421     * only return the sizes for {@link ImageFormat#PRIVATE} for {@link android.media.ImageReader}
422     * class .</p>
423     *
424     * <p>If a well-defined format such as {@code NV21} is required, use
425     * {@link #getOutputSizes(int)} instead.</p>
426     *
427     * <p>The {@code klass} should be a supported output, that querying
428     * {@code #isOutputSupportedFor(Class)} should return {@code true}.</p>
429     *
430     * @param klass
431     *          a non-{@code null} {@link Class} object reference
432     * @return
433     *          an array of supported sizes for {@link ImageFormat#PRIVATE} format,
434     *          or {@code null} iff the {@code klass} is not a supported output.
435     *
436     *
437     * @throws NullPointerException if {@code klass} was {@code null}
438     *
439     * @see #isOutputSupportedFor(Class)
440     */
441    public <T> Size[] getOutputSizes(Class<T> klass) {
442        if (isOutputSupportedFor(klass) == false) {
443            return null;
444        }
445
446        return getInternalFormatSizes(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
447                HAL_DATASPACE_UNKNOWN,/*output*/true);
448    }
449
450    /**
451     * Get a list of sizes compatible with the requested image {@code format}.
452     *
453     * <p>The {@code format} should be a supported format (one of the formats returned by
454     * {@link #getOutputFormats}).</p>
455     *
456     * @param format an image format from {@link ImageFormat} or {@link PixelFormat}
457     * @return
458     *          an array of supported sizes,
459     *          or {@code null} if the {@code format} is not a supported output
460     *
461     * @see ImageFormat
462     * @see PixelFormat
463     * @see #getOutputFormats
464     */
465    public Size[] getOutputSizes(int format) {
466        return getPublicFormatSizes(format, /*output*/true);
467    }
468
469    /**
470     * Get a list of supported high speed video recording sizes.
471     *
472     * <p> When HIGH_SPEED_VIDEO is supported in
473     * {@link CameraCharacteristics#CONTROL_AVAILABLE_SCENE_MODES available scene modes}, this
474     * method will list the supported high speed video size configurations. All the sizes listed
475     * will be a subset of the sizes reported by {@link #getOutputSizes} for processed non-stalling
476     * formats (typically ImageFormat#YUV_420_888, ImageFormat#NV21, ImageFormat#YV12)</p>
477     *
478     * <p> To enable high speed video recording, application must set
479     * {@link CaptureRequest#CONTROL_SCENE_MODE} to
480     * {@link CaptureRequest#CONTROL_SCENE_MODE_HIGH_SPEED_VIDEO HIGH_SPEED_VIDEO} in capture
481     * requests and select the video size from this method and
482     * {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE FPS range} from
483     * {@link #getHighSpeedVideoFpsRangesFor} to configure the recording and preview streams and
484     * setup the recording requests. For example, if the application intends to do high speed
485     * recording, it can select the maximum size reported by this method to configure output
486     * streams. Note that for the use case of multiple output streams, application must select one
487     * unique size from this method to use. Otherwise a request error might occur. Once the size is
488     * selected, application can get the supported FPS ranges by
489     * {@link #getHighSpeedVideoFpsRangesFor}, and use these FPS ranges to setup the recording
490     * requests.</p>
491     *
492     * @return
493     *          an array of supported high speed video recording sizes
494     *
495     * @see #getHighSpeedVideoFpsRangesFor(Size)
496     */
497    public Size[] getHighSpeedVideoSizes() {
498        Set<Size> keySet = mHighSpeedVideoSizeMap.keySet();
499        return keySet.toArray(new Size[keySet.size()]);
500    }
501
502    /**
503     * Get the frame per second ranges (fpsMin, fpsMax) for input high speed video size.
504     *
505     * <p> See {@link #getHighSpeedVideoSizes} for how to enable high speed recording.</p>
506     *
507     * <p> For normal video recording use case, where some application will NOT set
508     * {@link CaptureRequest#CONTROL_SCENE_MODE} to
509     * {@link CaptureRequest#CONTROL_SCENE_MODE_HIGH_SPEED_VIDEO HIGH_SPEED_VIDEO} in capture
510     * requests, the {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE FPS ranges} reported in
511     * this method must not be used to setup capture requests, or it will cause request error.</p>
512     *
513     * @param size one of the sizes returned by {@link #getHighSpeedVideoSizes()}
514     * @return
515     *          An array of FPS range to use with
516     *          {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE TARGET_FPS_RANGE} when using
517     *          {@link CaptureRequest#CONTROL_SCENE_MODE_HIGH_SPEED_VIDEO HIGH_SPEED_VIDEO} scene
518     *          mode.
519     *          The upper bound of returned ranges is guaranteed to be larger or equal to 60.
520     *
521     * @throws IllegalArgumentException if input size does not exist in the return value of
522     *         getHighSpeedVideoSizes
523     * @see #getHighSpeedVideoSizes()
524     */
525    public Range<Integer>[] getHighSpeedVideoFpsRangesFor(Size size) {
526        Integer fpsRangeCount = mHighSpeedVideoSizeMap.get(size);
527        if (fpsRangeCount == null || fpsRangeCount == 0) {
528            throw new IllegalArgumentException(String.format(
529                    "Size %s does not support high speed video recording", size));
530        }
531
532        @SuppressWarnings("unchecked")
533        Range<Integer>[] fpsRanges = new Range[fpsRangeCount];
534        int i = 0;
535        for (HighSpeedVideoConfiguration config : mHighSpeedVideoConfigurations) {
536            if (size.equals(config.getSize())) {
537                fpsRanges[i++] = config.getFpsRange();
538            }
539        }
540        return fpsRanges;
541    }
542
543    /**
544     * Get a list of supported high speed video recording FPS ranges.
545     *
546     * <p> When HIGH_SPEED_VIDEO is supported in
547     * {@link CameraCharacteristics#CONTROL_AVAILABLE_SCENE_MODES available scene modes}, this
548     * method will list the supported high speed video FPS range configurations. Application can
549     * then use {@link #getHighSpeedVideoSizesFor} to query available sizes for one of returned
550     * FPS range.</p>
551     *
552     * <p> To enable high speed video recording, application must set
553     * {@link CaptureRequest#CONTROL_SCENE_MODE} to
554     * {@link CaptureRequest#CONTROL_SCENE_MODE_HIGH_SPEED_VIDEO HIGH_SPEED_VIDEO} in capture
555     * requests and select the video size from {@link #getHighSpeedVideoSizesFor} and
556     * {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE FPS range} from
557     * this method to configure the recording and preview streams and setup the recording requests.
558     * For example, if the application intends to do high speed recording, it can select one FPS
559     * range reported by this method, query the video sizes corresponding to this FPS range  by
560     * {@link #getHighSpeedVideoSizesFor} and select one of reported sizes to configure output
561     * streams. Note that for the use case of multiple output streams, application must select one
562     * unique size from {@link #getHighSpeedVideoSizesFor}, and use it for all output streams.
563     * Otherwise a request error might occur when attempting to enable
564     * {@link CaptureRequest#CONTROL_SCENE_MODE_HIGH_SPEED_VIDEO HIGH_SPEED_VIDEO}.
565     * Once the stream is configured, application can set the FPS range in the recording requests.
566     * </p>
567     *
568     * @return
569     *          an array of supported high speed video recording FPS ranges
570     *          The upper bound of returned ranges is guaranteed to be larger or equal to 60.
571     *
572     * @see #getHighSpeedVideoSizesFor
573     */
574    @SuppressWarnings("unchecked")
575    public Range<Integer>[] getHighSpeedVideoFpsRanges() {
576        Set<Range<Integer>> keySet = mHighSpeedVideoFpsRangeMap.keySet();
577        return keySet.toArray(new Range[keySet.size()]);
578    }
579
580    /**
581     * Get the supported video sizes for input FPS range.
582     *
583     * <p> See {@link #getHighSpeedVideoFpsRanges} for how to enable high speed recording.</p>
584     *
585     * <p> For normal video recording use case, where the application will NOT set
586     * {@link CaptureRequest#CONTROL_SCENE_MODE} to
587     * {@link CaptureRequest#CONTROL_SCENE_MODE_HIGH_SPEED_VIDEO HIGH_SPEED_VIDEO} in capture
588     * requests, the {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE FPS ranges} reported in
589     * this method must not be used to setup capture requests, or it will cause request error.</p>
590     *
591     * @param fpsRange one of the FPS range returned by {@link #getHighSpeedVideoFpsRanges()}
592     * @return
593     *          An array of video sizes to configure output stream when using
594     *          {@link CaptureRequest#CONTROL_SCENE_MODE_HIGH_SPEED_VIDEO HIGH_SPEED_VIDEO} scene
595     *          mode.
596     *
597     * @throws IllegalArgumentException if input FPS range does not exist in the return value of
598     *         getHighSpeedVideoFpsRanges
599     * @see #getHighSpeedVideoFpsRanges()
600     */
601    public Size[] getHighSpeedVideoSizesFor(Range<Integer> fpsRange) {
602        Integer sizeCount = mHighSpeedVideoFpsRangeMap.get(fpsRange);
603        if (sizeCount == null || sizeCount == 0) {
604            throw new IllegalArgumentException(String.format(
605                    "FpsRange %s does not support high speed video recording", fpsRange));
606        }
607
608        Size[] sizes = new Size[sizeCount];
609        int i = 0;
610        for (HighSpeedVideoConfiguration config : mHighSpeedVideoConfigurations) {
611            if (fpsRange.equals(config.getFpsRange())) {
612                sizes[i++] = config.getSize();
613            }
614        }
615        return sizes;
616    }
617
618    /**
619     * Get the minimum {@link CaptureRequest#SENSOR_FRAME_DURATION frame duration}
620     * for the format/size combination (in nanoseconds).
621     *
622     * <p>{@code format} should be one of the ones returned by {@link #getOutputFormats()}.</p>
623     * <p>{@code size} should be one of the ones returned by
624     * {@link #getOutputSizes(int)}.</p>
625     *
626     * <p>This should correspond to the frame duration when only that stream is active, with all
627     * processing (typically in {@code android.*.mode}) set to either {@code OFF} or {@code FAST}.
628     * </p>
629     *
630     * <p>When multiple streams are used in a request, the minimum frame duration will be
631     * {@code max(individual stream min durations)}.</p>
632     *
633     * <p>For devices that do not support manual sensor control
634     * ({@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR}),
635     * this function may return 0.</p>
636     *
637     * <!--
638     * TODO: uncomment after adding input stream support
639     * <p>The minimum frame duration of a stream (of a particular format, size) is the same
640     * regardless of whether the stream is input or output.</p>
641     * -->
642     *
643     * @param format an image format from {@link ImageFormat} or {@link PixelFormat}
644     * @param size an output-compatible size
645     * @return a minimum frame duration {@code >} 0 in nanoseconds, or
646     *          0 if the minimum frame duration is not available.
647     *
648     * @throws IllegalArgumentException if {@code format} or {@code size} was not supported
649     * @throws NullPointerException if {@code size} was {@code null}
650     *
651     * @see CaptureRequest#SENSOR_FRAME_DURATION
652     * @see #getOutputStallDuration(int, Size)
653     * @see ImageFormat
654     * @see PixelFormat
655     */
656    public long getOutputMinFrameDuration(int format, Size size) {
657        checkNotNull(size, "size must not be null");
658        checkArgumentFormatSupported(format, /*output*/true);
659
660        return getInternalFormatDuration(imageFormatToInternal(format),
661                imageFormatToDataspace(format),
662                size,
663                DURATION_MIN_FRAME);
664    }
665
666    /**
667     * Get the minimum {@link CaptureRequest#SENSOR_FRAME_DURATION frame duration}
668     * for the class/size combination (in nanoseconds).
669     *
670     * <p>This assumes a the {@code klass} is set up to use {@link ImageFormat#PRIVATE}.
671     * For user-defined formats, use {@link #getOutputMinFrameDuration(int, Size)}.</p>
672     *
673     * <p>{@code klass} should be one of the ones which is supported by
674     * {@link #isOutputSupportedFor(Class)}.</p>
675     *
676     * <p>{@code size} should be one of the ones returned by
677     * {@link #getOutputSizes(int)}.</p>
678     *
679     * <p>This should correspond to the frame duration when only that stream is active, with all
680     * processing (typically in {@code android.*.mode}) set to either {@code OFF} or {@code FAST}.
681     * </p>
682     *
683     * <p>When multiple streams are used in a request, the minimum frame duration will be
684     * {@code max(individual stream min durations)}.</p>
685     *
686     * <p>For devices that do not support manual sensor control
687     * ({@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR}),
688     * this function may return 0.</p>
689     *
690     * <!--
691     * TODO: uncomment after adding input stream support
692     * <p>The minimum frame duration of a stream (of a particular format, size) is the same
693     * regardless of whether the stream is input or output.</p>
694     * -->
695     *
696     * @param klass
697     *          a class which is supported by {@link #isOutputSupportedFor(Class)} and has a
698     *          non-empty array returned by {@link #getOutputSizes(Class)}
699     * @param size an output-compatible size
700     * @return a minimum frame duration {@code >} 0 in nanoseconds, or
701     *          0 if the minimum frame duration is not available.
702     *
703     * @throws IllegalArgumentException if {@code klass} or {@code size} was not supported
704     * @throws NullPointerException if {@code size} or {@code klass} was {@code null}
705     *
706     * @see CaptureRequest#SENSOR_FRAME_DURATION
707     * @see ImageFormat
708     * @see PixelFormat
709     */
710    public <T> long getOutputMinFrameDuration(final Class<T> klass, final Size size) {
711        if (!isOutputSupportedFor(klass)) {
712            throw new IllegalArgumentException("klass was not supported");
713        }
714
715        return getInternalFormatDuration(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
716                HAL_DATASPACE_UNKNOWN,
717                size, DURATION_MIN_FRAME);
718    }
719
720    /**
721     * Get the stall duration for the format/size combination (in nanoseconds).
722     *
723     * <p>{@code format} should be one of the ones returned by {@link #getOutputFormats()}.</p>
724     * <p>{@code size} should be one of the ones returned by
725     * {@link #getOutputSizes(int)}.</p>
726     *
727     * <p>
728     * A stall duration is how much extra time would get added to the normal minimum frame duration
729     * for a repeating request that has streams with non-zero stall.
730     *
731     * <p>For example, consider JPEG captures which have the following characteristics:
732     *
733     * <ul>
734     * <li>JPEG streams act like processed YUV streams in requests for which they are not included;
735     * in requests in which they are directly referenced, they act as JPEG streams.
736     * This is because supporting a JPEG stream requires the underlying YUV data to always be ready
737     * for use by a JPEG encoder, but the encoder will only be used (and impact frame duration) on
738     * requests that actually reference a JPEG stream.
739     * <li>The JPEG processor can run concurrently to the rest of the camera pipeline, but cannot
740     * process more than 1 capture at a time.
741     * </ul>
742     *
743     * <p>In other words, using a repeating YUV request would result in a steady frame rate
744     * (let's say it's 30 FPS). If a single JPEG request is submitted periodically,
745     * the frame rate will stay at 30 FPS (as long as we wait for the previous JPEG to return each
746     * time). If we try to submit a repeating YUV + JPEG request, then the frame rate will drop from
747     * 30 FPS.</p>
748     *
749     * <p>In general, submitting a new request with a non-0 stall time stream will <em>not</em> cause a
750     * frame rate drop unless there are still outstanding buffers for that stream from previous
751     * requests.</p>
752     *
753     * <p>Submitting a repeating request with streams (call this {@code S}) is the same as setting
754     * the minimum frame duration from the normal minimum frame duration corresponding to {@code S},
755     * added with the maximum stall duration for {@code S}.</p>
756     *
757     * <p>If interleaving requests with and without a stall duration, a request will stall by the
758     * maximum of the remaining times for each can-stall stream with outstanding buffers.</p>
759     *
760     * <p>This means that a stalling request will not have an exposure start until the stall has
761     * completed.</p>
762     *
763     * <p>This should correspond to the stall duration when only that stream is active, with all
764     * processing (typically in {@code android.*.mode}) set to {@code FAST} or {@code OFF}.
765     * Setting any of the processing modes to {@code HIGH_QUALITY} effectively results in an
766     * indeterminate stall duration for all streams in a request (the regular stall calculation
767     * rules are ignored).</p>
768     *
769     * <p>The following formats may always have a stall duration:
770     * <ul>
771     * <li>{@link ImageFormat#JPEG JPEG}
772     * <li>{@link ImageFormat#RAW_SENSOR RAW16}
773     * </ul>
774     * </p>
775     *
776     * <p>The following formats will never have a stall duration:
777     * <ul>
778     * <li>{@link ImageFormat#YUV_420_888 YUV_420_888}
779     * <li>{@link #isOutputSupportedFor(Class) Implementation-Defined}
780     * </ul></p>
781     *
782     * <p>
783     * All other formats may or may not have an allowed stall duration on a per-capability basis;
784     * refer to {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
785     * android.request.availableCapabilities} for more details.</p>
786     * </p>
787     *
788     * <p>See {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration}
789     * for more information about calculating the max frame rate (absent stalls).</p>
790     *
791     * @param format an image format from {@link ImageFormat} or {@link PixelFormat}
792     * @param size an output-compatible size
793     * @return a stall duration {@code >=} 0 in nanoseconds
794     *
795     * @throws IllegalArgumentException if {@code format} or {@code size} was not supported
796     * @throws NullPointerException if {@code size} was {@code null}
797     *
798     * @see CaptureRequest#SENSOR_FRAME_DURATION
799     * @see ImageFormat
800     * @see PixelFormat
801     */
802    public long getOutputStallDuration(int format, Size size) {
803        checkArgumentFormatSupported(format, /*output*/true);
804
805        return getInternalFormatDuration(imageFormatToInternal(format),
806                imageFormatToDataspace(format),
807                size,
808                DURATION_STALL);
809    }
810
811    /**
812     * Get the stall duration for the class/size combination (in nanoseconds).
813     *
814     * <p>This assumes a the {@code klass} is set up to use {@link ImageFormat#PRIVATE}.
815     * For user-defined formats, use {@link #getOutputMinFrameDuration(int, Size)}.</p>
816     *
817     * <p>{@code klass} should be one of the ones with a non-empty array returned by
818     * {@link #getOutputSizes(Class)}.</p>
819     *
820     * <p>{@code size} should be one of the ones returned by
821     * {@link #getOutputSizes(Class)}.</p>
822     *
823     * <p>See {@link #getOutputStallDuration(int, Size)} for a definition of a
824     * <em>stall duration</em>.</p>
825     *
826     * @param klass
827     *          a class which is supported by {@link #isOutputSupportedFor(Class)} and has a
828     *          non-empty array returned by {@link #getOutputSizes(Class)}
829     * @param size an output-compatible size
830     * @return a minimum frame duration {@code >=} 0 in nanoseconds
831     *
832     * @throws IllegalArgumentException if {@code klass} or {@code size} was not supported
833     * @throws NullPointerException if {@code size} or {@code klass} was {@code null}
834     *
835     * @see CaptureRequest#SENSOR_FRAME_DURATION
836     * @see ImageFormat
837     * @see PixelFormat
838     */
839    public <T> long getOutputStallDuration(final Class<T> klass, final Size size) {
840        if (!isOutputSupportedFor(klass)) {
841            throw new IllegalArgumentException("klass was not supported");
842        }
843
844        return getInternalFormatDuration(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
845                HAL_DATASPACE_UNKNOWN, size, DURATION_STALL);
846    }
847
848    /**
849     * Check if this {@link StreamConfigurationMap} is equal to another
850     * {@link StreamConfigurationMap}.
851     *
852     * <p>Two vectors are only equal if and only if each of the respective elements is equal.</p>
853     *
854     * @return {@code true} if the objects were equal, {@code false} otherwise
855     */
856    @Override
857    public boolean equals(final Object obj) {
858        if (obj == null) {
859            return false;
860        }
861        if (this == obj) {
862            return true;
863        }
864        if (obj instanceof StreamConfigurationMap) {
865            final StreamConfigurationMap other = (StreamConfigurationMap) obj;
866            // XX: do we care about order?
867            return Arrays.equals(mConfigurations, other.mConfigurations) &&
868                    Arrays.equals(mMinFrameDurations, other.mMinFrameDurations) &&
869                    Arrays.equals(mStallDurations, other.mStallDurations) &&
870                    Arrays.equals(mHighSpeedVideoConfigurations,
871                            other.mHighSpeedVideoConfigurations);
872        }
873        return false;
874    }
875
876    /**
877     * {@inheritDoc}
878     */
879    @Override
880    public int hashCode() {
881        // XX: do we care about order?
882        return HashCodeHelpers.hashCode(
883                mConfigurations, mMinFrameDurations,
884                mStallDurations, mHighSpeedVideoConfigurations);
885    }
886
887    // Check that the argument is supported by #getOutputFormats or #getInputFormats
888    private int checkArgumentFormatSupported(int format, boolean output) {
889        checkArgumentFormat(format);
890
891        int[] formats = output ? getOutputFormats() : getInputFormats();
892        for (int i = 0; i < formats.length; ++i) {
893            if (format == formats[i]) {
894                return format;
895            }
896        }
897
898        throw new IllegalArgumentException(String.format(
899                "format %x is not supported by this stream configuration map", format));
900    }
901
902    /**
903     * Ensures that the format is either user-defined or implementation defined.
904     *
905     * <p>If a format has a different internal representation than the public representation,
906     * passing in the public representation here will fail.</p>
907     *
908     * <p>For example if trying to use {@link ImageFormat#JPEG}:
909     * it has a different public representation than the internal representation
910     * {@code HAL_PIXEL_FORMAT_BLOB}, this check will fail.</p>
911     *
912     * <p>Any invalid/undefined formats will raise an exception.</p>
913     *
914     * @param format image format
915     * @return the format
916     *
917     * @throws IllegalArgumentException if the format was invalid
918     */
919    static int checkArgumentFormatInternal(int format) {
920        switch (format) {
921            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
922            case HAL_PIXEL_FORMAT_BLOB:
923            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
924            case HAL_PIXEL_FORMAT_Y16:
925                return format;
926            case ImageFormat.JPEG:
927                throw new IllegalArgumentException(
928                        "ImageFormat.JPEG is an unknown internal format");
929            default:
930                return checkArgumentFormat(format);
931        }
932    }
933
934    /**
935     * Ensures that the format is publicly user-defined in either ImageFormat or PixelFormat.
936     *
937     * <p>If a format has a different public representation than the internal representation,
938     * passing in the internal representation here will fail.</p>
939     *
940     * <p>For example if trying to use {@code HAL_PIXEL_FORMAT_BLOB}:
941     * it has a different internal representation than the public representation
942     * {@link ImageFormat#JPEG}, this check will fail.</p>
943     *
944     * <p>Any invalid/undefined formats will raise an exception, including implementation-defined.
945     * </p>
946     *
947     * <p>Note that {@code @hide} and deprecated formats will not pass this check.</p>
948     *
949     * @param format image format
950     * @return the format
951     *
952     * @throws IllegalArgumentException if the format was not user-defined
953     */
954    static int checkArgumentFormat(int format) {
955        if (!ImageFormat.isPublicFormat(format) && !PixelFormat.isPublicFormat(format)) {
956            throw new IllegalArgumentException(String.format(
957                    "format 0x%x was not defined in either ImageFormat or PixelFormat", format));
958        }
959
960        return format;
961    }
962
963    /**
964     * Convert an internal format compatible with {@code graphics.h} into public-visible
965     * {@code ImageFormat}. This assumes the dataspace of the format is not HAL_DATASPACE_DEPTH.
966     *
967     * <p>In particular these formats are converted:
968     * <ul>
969     * <li>HAL_PIXEL_FORMAT_BLOB => ImageFormat.JPEG</li>
970     * </ul>
971     * </p>
972     *
973     * <p>Passing in a format which has no public equivalent will fail;
974     * as will passing in a public format which has a different internal format equivalent.
975     * See {@link #checkArgumentFormat} for more details about a legal public format.</p>
976     *
977     * <p>All other formats are returned as-is, no further invalid check is performed.</p>
978     *
979     * <p>This function is the dual of {@link #imageFormatToInternal} for dataspaces other than
980     * HAL_DATASPACE_DEPTH.</p>
981     *
982     * @param format image format from {@link ImageFormat} or {@link PixelFormat}
983     * @return the converted image formats
984     *
985     * @throws IllegalArgumentException
986     *          if {@code format} is {@code HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED} or
987     *          {@link ImageFormat#JPEG}
988     *
989     * @see ImageFormat
990     * @see PixelFormat
991     * @see #checkArgumentFormat
992     */
993    static int imageFormatToPublic(int format) {
994        switch (format) {
995            case HAL_PIXEL_FORMAT_BLOB:
996                return ImageFormat.JPEG;
997            case ImageFormat.JPEG:
998                throw new IllegalArgumentException(
999                        "ImageFormat.JPEG is an unknown internal format");
1000            default:
1001                return format;
1002        }
1003    }
1004
1005    /**
1006     * Convert an internal format compatible with {@code graphics.h} into public-visible
1007     * {@code ImageFormat}. This assumes the dataspace of the format is HAL_DATASPACE_DEPTH.
1008     *
1009     * <p>In particular these formats are converted:
1010     * <ul>
1011     * <li>HAL_PIXEL_FORMAT_BLOB => ImageFormat.DEPTH_POINT_CLOUD
1012     * <li>HAL_PIXEL_FORMAT_Y16 => ImageFormat.DEPTH16
1013     * </ul>
1014     * </p>
1015     *
1016     * <p>Passing in an implementation-defined format which has no public equivalent will fail;
1017     * as will passing in a public format which has a different internal format equivalent.
1018     * See {@link #checkArgumentFormat} for more details about a legal public format.</p>
1019     *
1020     * <p>All other formats are returned as-is, no further invalid check is performed.</p>
1021     *
1022     * <p>This function is the dual of {@link #imageFormatToInternal} for formats associated with
1023     * HAL_DATASPACE_DEPTH.</p>
1024     *
1025     * @param format image format from {@link ImageFormat} or {@link PixelFormat}
1026     * @return the converted image formats
1027     *
1028     * @throws IllegalArgumentException
1029     *          if {@code format} is {@code HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED} or
1030     *          {@link ImageFormat#JPEG}
1031     *
1032     * @see ImageFormat
1033     * @see PixelFormat
1034     * @see #checkArgumentFormat
1035     */
1036    static int depthFormatToPublic(int format) {
1037        switch (format) {
1038            case HAL_PIXEL_FORMAT_BLOB:
1039                return ImageFormat.DEPTH_POINT_CLOUD;
1040            case HAL_PIXEL_FORMAT_Y16:
1041                return ImageFormat.DEPTH16;
1042            case ImageFormat.JPEG:
1043                throw new IllegalArgumentException(
1044                        "ImageFormat.JPEG is an unknown internal format");
1045            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1046                throw new IllegalArgumentException(
1047                        "IMPLEMENTATION_DEFINED must not leak to public API");
1048            default:
1049                throw new IllegalArgumentException(
1050                        "Unknown DATASPACE_DEPTH format " + format);
1051        }
1052    }
1053
1054    /**
1055     * Convert image formats from internal to public formats (in-place).
1056     *
1057     * @param formats an array of image formats
1058     * @return {@code formats}
1059     *
1060     * @see #imageFormatToPublic
1061     */
1062    static int[] imageFormatToPublic(int[] formats) {
1063        if (formats == null) {
1064            return null;
1065        }
1066
1067        for (int i = 0; i < formats.length; ++i) {
1068            formats[i] = imageFormatToPublic(formats[i]);
1069        }
1070
1071        return formats;
1072    }
1073
1074    /**
1075     * Convert a public format compatible with {@code ImageFormat} to an internal format
1076     * from {@code graphics.h}.
1077     *
1078     * <p>In particular these formats are converted:
1079     * <ul>
1080     * <li>ImageFormat.JPEG => HAL_PIXEL_FORMAT_BLOB
1081     * <li>ImageFormat.DEPTH_POINT_CLOUD => HAL_PIXEL_FORMAT_BLOB
1082     * <li>ImageFormat.DEPTH16 => HAL_PIXEL_FORMAT_Y16
1083     * </ul>
1084     * </p>
1085     *
1086     * <p>Passing in an internal format which has a different public format equivalent will fail.
1087     * See {@link #checkArgumentFormat} for more details about a legal public format.</p>
1088     *
1089     * <p>All other formats are returned as-is, no invalid check is performed.</p>
1090     *
1091     * <p>This function is the dual of {@link #imageFormatToPublic}.</p>
1092     *
1093     * @param format public image format from {@link ImageFormat} or {@link PixelFormat}
1094     * @return the converted image formats
1095     *
1096     * @see ImageFormat
1097     * @see PixelFormat
1098     *
1099     * @throws IllegalArgumentException
1100     *              if {@code format} was {@code HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED}
1101     */
1102    static int imageFormatToInternal(int format) {
1103        switch (format) {
1104            case ImageFormat.JPEG:
1105            case ImageFormat.DEPTH_POINT_CLOUD:
1106                return HAL_PIXEL_FORMAT_BLOB;
1107            case ImageFormat.DEPTH16:
1108                return HAL_PIXEL_FORMAT_Y16;
1109            default:
1110                return format;
1111        }
1112    }
1113
1114    /**
1115     * Convert a public format compatible with {@code ImageFormat} to an internal dataspace
1116     * from {@code graphics.h}.
1117     *
1118     * <p>In particular these formats are converted:
1119     * <ul>
1120     * <li>ImageFormat.JPEG => HAL_DATASPACE_JFIF
1121     * <li>ImageFormat.DEPTH_POINT_CLOUD => HAL_DATASPACE_DEPTH
1122     * <li>ImageFormat.DEPTH16 => HAL_DATASPACE_DEPTH
1123     * <li>others => HAL_DATASPACE_UNKNOWN
1124     * </ul>
1125     * </p>
1126     *
1127     * <p>Passing in an implementation-defined format here will fail (it's not a public format);
1128     * as will passing in an internal format which has a different public format equivalent.
1129     * See {@link #checkArgumentFormat} for more details about a legal public format.</p>
1130     *
1131     * <p>All other formats are returned as-is, no invalid check is performed.</p>
1132     *
1133     * <p>This function is the dual of {@link #imageFormatToPublic}.</p>
1134     *
1135     * @param format public image format from {@link ImageFormat} or {@link PixelFormat}
1136     * @return the converted image formats
1137     *
1138     * @see ImageFormat
1139     * @see PixelFormat
1140     *
1141     * @throws IllegalArgumentException
1142     *              if {@code format} was {@code HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED}
1143     */
1144    static int imageFormatToDataspace(int format) {
1145        switch (format) {
1146            case ImageFormat.JPEG:
1147                return HAL_DATASPACE_JFIF;
1148            case ImageFormat.DEPTH_POINT_CLOUD:
1149            case ImageFormat.DEPTH16:
1150                return HAL_DATASPACE_DEPTH;
1151            default:
1152                return HAL_DATASPACE_UNKNOWN;
1153        }
1154    }
1155
1156    /**
1157     * Convert image formats from public to internal formats (in-place).
1158     *
1159     * @param formats an array of image formats
1160     * @return {@code formats}
1161     *
1162     * @see #imageFormatToInternal
1163     *
1164     * @hide
1165     */
1166    public static int[] imageFormatToInternal(int[] formats) {
1167        if (formats == null) {
1168            return null;
1169        }
1170
1171        for (int i = 0; i < formats.length; ++i) {
1172            formats[i] = imageFormatToInternal(formats[i]);
1173        }
1174
1175        return formats;
1176    }
1177
1178    private Size[] getPublicFormatSizes(int format, boolean output) {
1179        try {
1180            checkArgumentFormatSupported(format, output);
1181        } catch (IllegalArgumentException e) {
1182            return null;
1183        }
1184
1185        int internalFormat = imageFormatToInternal(format);
1186        int dataspace = imageFormatToDataspace(format);
1187
1188        return getInternalFormatSizes(internalFormat, dataspace, output);
1189    }
1190
1191    private Size[] getInternalFormatSizes(int format, int dataspace, boolean output) {
1192
1193        HashMap<Integer, Integer> formatsMap =
1194                (dataspace == HAL_DATASPACE_DEPTH) ? mDepthOutputFormats : getFormatsMap(output);
1195
1196        Integer sizesCount = formatsMap.get(format);
1197        if (sizesCount == null) {
1198            throw new IllegalArgumentException("format not available");
1199        }
1200
1201        int len = sizesCount;
1202        Size[] sizes = new Size[len];
1203        int sizeIndex = 0;
1204
1205        StreamConfiguration[] configurations =
1206                (dataspace == HAL_DATASPACE_DEPTH) ? mDepthConfigurations : mConfigurations;
1207
1208
1209        for (StreamConfiguration config : configurations) {
1210            if (config.getFormat() == format && config.isOutput() == output) {
1211                sizes[sizeIndex++] = config.getSize();
1212            }
1213        }
1214
1215        if (sizeIndex != len) {
1216            throw new AssertionError(
1217                    "Too few sizes (expected " + len + ", actual " + sizeIndex + ")");
1218        }
1219
1220        return sizes;
1221    }
1222
1223    /** Get the list of publically visible output formats; does not include IMPL_DEFINED */
1224    private int[] getPublicFormats(boolean output) {
1225        int[] formats = new int[getPublicFormatCount(output)];
1226
1227        int i = 0;
1228
1229        for (int format : getFormatsMap(output).keySet()) {
1230            if (format != HAL_PIXEL_FORMAT_RAW_OPAQUE) {
1231                formats[i++] = imageFormatToPublic(format);
1232            }
1233        }
1234        if (output) {
1235            for (int format : mDepthOutputFormats.keySet()) {
1236                formats[i++] = depthFormatToPublic(format);
1237            }
1238        }
1239        if (formats.length != i) {
1240            throw new AssertionError("Too few formats " + i + ", expected " + formats.length);
1241        }
1242
1243        return formats;
1244    }
1245
1246    /** Get the format -> size count map for either output or input formats */
1247    private HashMap<Integer, Integer> getFormatsMap(boolean output) {
1248        return output ? mOutputFormats : mInputFormats;
1249    }
1250
1251    private long getInternalFormatDuration(int format, int dataspace, Size size, int duration) {
1252        // assume format is already checked, since its internal
1253
1254        if (!arrayContains(getInternalFormatSizes(format, dataspace, /*output*/true), size)) {
1255            throw new IllegalArgumentException("size was not supported");
1256        }
1257
1258        StreamConfigurationDuration[] durations = getDurations(duration, dataspace);
1259
1260        for (StreamConfigurationDuration configurationDuration : durations) {
1261            if (configurationDuration.getFormat() == format &&
1262                    configurationDuration.getWidth() == size.getWidth() &&
1263                    configurationDuration.getHeight() == size.getHeight()) {
1264                return configurationDuration.getDuration();
1265            }
1266        }
1267        // Default duration is '0' (unsupported/no extra stall)
1268        return 0;
1269    }
1270
1271    /**
1272     * Get the durations array for the kind of duration
1273     *
1274     * @see #DURATION_MIN_FRAME
1275     * @see #DURATION_STALL
1276     * */
1277    private StreamConfigurationDuration[] getDurations(int duration, int dataspace) {
1278        switch (duration) {
1279            case DURATION_MIN_FRAME:
1280                return (dataspace == HAL_DATASPACE_DEPTH) ?
1281                        mDepthMinFrameDurations : mMinFrameDurations;
1282            case DURATION_STALL:
1283                return (dataspace == HAL_DATASPACE_DEPTH) ?
1284                        mDepthStallDurations : mStallDurations;
1285            default:
1286                throw new IllegalArgumentException("duration was invalid");
1287        }
1288    }
1289
1290    /** Count the number of publicly-visible output formats */
1291    private int getPublicFormatCount(boolean output) {
1292        HashMap<Integer, Integer> formatsMap = getFormatsMap(output);
1293
1294        int size = formatsMap.size();
1295        if (formatsMap.containsKey(HAL_PIXEL_FORMAT_RAW_OPAQUE)) {
1296            size -= 1;
1297        }
1298        if (output) {
1299            size += mDepthOutputFormats.size();
1300        }
1301
1302        return size;
1303    }
1304
1305    private static <T> boolean arrayContains(T[] array, T element) {
1306        if (array == null) {
1307            return false;
1308        }
1309
1310        for (T el : array) {
1311            if (Objects.equals(el, element)) {
1312                return true;
1313            }
1314        }
1315
1316        return false;
1317    }
1318
1319    /**
1320     * Return this {@link StreamConfigurationMap} as a string representation.
1321     *
1322     * <p>{@code "StreamConfigurationMap(Outputs([w:%d, h:%d, format:%s(%d), min_duration:%d,
1323     * stall:%d], ... [w:%d, h:%d, format:%s(%d), min_duration:%d, stall:%d]), Inputs([w:%d, h:%d,
1324     * format:%s(%d)], ... [w:%d, h:%d, format:%s(%d)]), ValidOutputFormatsForInput(
1325     * [in:%d, out:%d, ... %d], ... [in:%d, out:%d, ... %d]), HighSpeedVideoConfigurations(
1326     * [w:%d, h:%d, min_fps:%d, max_fps:%d], ... [w:%d, h:%d, min_fps:%d, max_fps:%d]))"}.</p>
1327     *
1328     * <p>{@code Outputs([w:%d, h:%d, format:%s(%d), min_duration:%d, stall:%d], ...
1329     * [w:%d, h:%d, format:%s(%d), min_duration:%d, stall:%d])}, where
1330     * {@code [w:%d, h:%d, format:%s(%d), min_duration:%d, stall:%d]} represents an output
1331     * configuration's width, height, format, minimal frame duration in nanoseconds, and stall
1332     * duration in nanoseconds.</p>
1333     *
1334     * <p>{@code Inputs([w:%d, h:%d, format:%s(%d)], ... [w:%d, h:%d, format:%s(%d)])}, where
1335     * {@code [w:%d, h:%d, format:%s(%d)]} represents an input configuration's width, height, and
1336     * format.</p>
1337     *
1338     * <p>{@code ValidOutputFormatsForInput([in:%s(%d), out:%s(%d), ... %s(%d)],
1339     * ... [in:%s(%d), out:%s(%d), ... %s(%d)])}, where {@code [in:%s(%d), out:%s(%d), ... %s(%d)]}
1340     * represents an input fomat and its valid output formats.</p>
1341     *
1342     * <p>{@code HighSpeedVideoConfigurations([w:%d, h:%d, min_fps:%d, max_fps:%d],
1343     * ... [w:%d, h:%d, min_fps:%d, max_fps:%d])}, where
1344     * {@code [w:%d, h:%d, min_fps:%d, max_fps:%d]} represents a high speed video output
1345     * configuration's width, height, minimal frame rate, and maximal frame rate.</p>
1346     *
1347     * @return string representation of {@link StreamConfigurationMap}
1348     */
1349    @Override
1350    public String toString() {
1351        StringBuilder sb = new StringBuilder("StreamConfiguration(");
1352        appendOutputsString(sb);
1353        sb.append(", ");
1354        appendInputsString(sb);
1355        sb.append(", ");
1356        appendValidOutputFormatsForInputString(sb);
1357        sb.append(", ");
1358        appendHighSpeedVideoConfigurationsString(sb);
1359        sb.append(")");
1360
1361        return sb.toString();
1362    }
1363
1364    private void appendOutputsString(StringBuilder sb) {
1365        sb.append("Outputs(");
1366        int[] formats = getOutputFormats();
1367        for (int format : formats) {
1368            Size[] sizes = getOutputSizes(format);
1369            for (Size size : sizes) {
1370                long minFrameDuration = getOutputMinFrameDuration(format, size);
1371                long stallDuration = getOutputStallDuration(format, size);
1372                sb.append(String.format("[w:%d, h:%d, format:%s(%d), min_duration:%d, " +
1373                        "stall:%d], ", size.getWidth(), size.getHeight(), formatToString(format),
1374                        format, minFrameDuration, stallDuration));
1375            }
1376        }
1377        // Remove the pending ", "
1378        if (sb.charAt(sb.length() - 1) == ' ') {
1379            sb.delete(sb.length() - 2, sb.length());
1380        }
1381        sb.append(")");
1382    }
1383
1384    private void appendInputsString(StringBuilder sb) {
1385        sb.append("Inputs(");
1386        int[] formats = getInputFormats();
1387        for (int format : formats) {
1388            Size[] sizes = getInputSizes(format);
1389            for (Size size : sizes) {
1390                sb.append(String.format("[w:%d, h:%d, format:%s(%d)], ", size.getWidth(),
1391                        size.getHeight(), formatToString(format), format));
1392            }
1393        }
1394        // Remove the pending ", "
1395        if (sb.charAt(sb.length() - 1) == ' ') {
1396            sb.delete(sb.length() - 2, sb.length());
1397        }
1398        sb.append(")");
1399    }
1400
1401    private void appendValidOutputFormatsForInputString(StringBuilder sb) {
1402        sb.append("ValidOutputFormatsForInput(");
1403        int[] inputFormats = getInputFormats();
1404        for (int inputFormat : inputFormats) {
1405            sb.append(String.format("[in:%s(%d), out:", formatToString(inputFormat), inputFormat));
1406            int[] outputFormats = getValidOutputFormatsForInput(inputFormat);
1407            for (int i = 0; i < outputFormats.length; i++) {
1408                sb.append(String.format("%s(%d)", formatToString(outputFormats[i]),
1409                        outputFormats[i]));
1410                if (i < outputFormats.length - 1) {
1411                    sb.append(", ");
1412                }
1413            }
1414            sb.append("], ");
1415        }
1416        // Remove the pending ", "
1417        if (sb.charAt(sb.length() - 1) == ' ') {
1418            sb.delete(sb.length() - 2, sb.length());
1419        }
1420        sb.append(")");
1421    }
1422
1423    private void appendHighSpeedVideoConfigurationsString(StringBuilder sb) {
1424        sb.append("HighSpeedVideoConfigurations(");
1425        Size[] sizes = getHighSpeedVideoSizes();
1426        for (Size size : sizes) {
1427            Range<Integer>[] ranges = getHighSpeedVideoFpsRangesFor(size);
1428            for (Range<Integer> range : ranges) {
1429                sb.append(String.format("[w:%d, h:%d, min_fps:%d, max_fps:%d], ", size.getWidth(),
1430                        size.getHeight(), range.getLower(), range.getUpper()));
1431            }
1432        }
1433        // Remove the pending ", "
1434        if (sb.charAt(sb.length() - 1) == ' ') {
1435            sb.delete(sb.length() - 2, sb.length());
1436        }
1437        sb.append(")");
1438    }
1439
1440    private String formatToString(int format) {
1441        switch (format) {
1442            case ImageFormat.YV12:
1443                return "YV12";
1444            case ImageFormat.YUV_420_888:
1445                return "YUV_420_888";
1446            case ImageFormat.NV21:
1447                return "NV21";
1448            case ImageFormat.NV16:
1449                return "NV16";
1450            case PixelFormat.RGB_565:
1451                return "RGB_565";
1452            case PixelFormat.RGBA_8888:
1453                return "RGBA_8888";
1454            case PixelFormat.RGBX_8888:
1455                return "RGBX_8888";
1456            case PixelFormat.RGB_888:
1457                return "RGB_888";
1458            case ImageFormat.JPEG:
1459                return "JPEG";
1460            case ImageFormat.YUY2:
1461                return "YUY2";
1462            case ImageFormat.Y8:
1463                return "Y8";
1464            case ImageFormat.Y16:
1465                return "Y16";
1466            case ImageFormat.RAW_SENSOR:
1467                return "RAW_SENSOR";
1468            case ImageFormat.RAW10:
1469                return "RAW10";
1470            case ImageFormat.DEPTH16:
1471                return "DEPTH16";
1472            case ImageFormat.DEPTH_POINT_CLOUD:
1473                return "DEPTH_POINT_CLOUD";
1474            case ImageFormat.PRIVATE:
1475                return "PRIVATE";
1476            default:
1477                return "UNKNOWN";
1478        }
1479    }
1480
1481    // from system/core/include/system/graphics.h
1482    private static final int HAL_PIXEL_FORMAT_BLOB = 0x21;
1483    private static final int HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED = 0x22;
1484    private static final int HAL_PIXEL_FORMAT_RAW_OPAQUE = 0x24;
1485    private static final int HAL_PIXEL_FORMAT_Y16 = 0x20363159;
1486
1487    private static final int HAL_DATASPACE_UNKNOWN = 0x0;
1488    private static final int HAL_DATASPACE_JFIF = 0x101;
1489    private static final int HAL_DATASPACE_DEPTH = 0x1000;
1490
1491    /**
1492     * @see #getDurations(int, int)
1493     */
1494    private static final int DURATION_MIN_FRAME = 0;
1495    private static final int DURATION_STALL = 1;
1496
1497    private final StreamConfiguration[] mConfigurations;
1498    private final StreamConfigurationDuration[] mMinFrameDurations;
1499    private final StreamConfigurationDuration[] mStallDurations;
1500
1501    private final StreamConfiguration[] mDepthConfigurations;
1502    private final StreamConfigurationDuration[] mDepthMinFrameDurations;
1503    private final StreamConfigurationDuration[] mDepthStallDurations;
1504
1505    private final HighSpeedVideoConfiguration[] mHighSpeedVideoConfigurations;
1506    private final ReprocessFormatsMap mInputOutputFormatsMap;
1507
1508    /** ImageFormat -> num output sizes mapping */
1509    private final HashMap</*ImageFormat*/Integer, /*Count*/Integer> mOutputFormats =
1510            new HashMap<Integer, Integer>();
1511    /** ImageFormat -> num input sizes mapping */
1512    private final HashMap</*ImageFormat*/Integer, /*Count*/Integer> mInputFormats =
1513            new HashMap<Integer, Integer>();
1514    /** ImageFormat -> num depth output sizes mapping */
1515    private final HashMap</*ImageFormat*/Integer, /*Count*/Integer> mDepthOutputFormats =
1516            new HashMap<Integer, Integer>();
1517    /** High speed video Size -> FPS range count mapping*/
1518    private final HashMap</*HighSpeedVideoSize*/Size, /*Count*/Integer> mHighSpeedVideoSizeMap =
1519            new HashMap<Size, Integer>();
1520    /** High speed video FPS range -> Size count mapping*/
1521    private final HashMap</*HighSpeedVideoFpsRange*/Range<Integer>, /*Count*/Integer>
1522            mHighSpeedVideoFpsRangeMap = new HashMap<Range<Integer>, Integer>();
1523
1524}
1525
1526