StreamConfigurationMap.java revision 639fffee624302ec5b175503d7bd8a441340a629
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17package android.hardware.camera2.params;
18
19import android.graphics.ImageFormat;
20import android.graphics.PixelFormat;
21import android.hardware.camera2.CameraCharacteristics;
22import android.hardware.camera2.CameraDevice;
23import android.hardware.camera2.CameraMetadata;
24import android.hardware.camera2.CaptureRequest;
25import android.hardware.camera2.utils.HashCodeHelpers;
26import android.hardware.camera2.utils.SurfaceUtils;
27import android.hardware.camera2.legacy.LegacyCameraDevice;
28import android.hardware.camera2.legacy.LegacyMetadataMapper;
29import android.view.Surface;
30import android.util.Range;
31import android.util.Size;
32import android.util.SparseIntArray;
33
34import java.util.Arrays;
35import java.util.HashMap;
36import java.util.Objects;
37import java.util.Set;
38
39import static com.android.internal.util.Preconditions.*;
40
41/**
42 * Immutable class to store the available stream
43 * {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP configurations} to set up
44 * {@link android.view.Surface Surfaces} for creating a
45 * {@link android.hardware.camera2.CameraCaptureSession capture session} with
46 * {@link android.hardware.camera2.CameraDevice#createCaptureSession}.
47 * <!-- TODO: link to input stream configuration -->
48 *
49 * <p>This is the authoritative list for all <!-- input/ -->output formats (and sizes respectively
50 * for that format) that are supported by a camera device.</p>
51 *
52 * <p>This also contains the minimum frame durations and stall durations for each format/size
53 * combination that can be used to calculate effective frame rate when submitting multiple captures.
54 * </p>
55 *
56 * <p>An instance of this object is available from {@link CameraCharacteristics} using
57 * the {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP} key and the
58 * {@link CameraCharacteristics#get} method.</p>
59 *
60 * <pre><code>{@code
61 * CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId);
62 * StreamConfigurationMap configs = characteristics.get(
63 *         CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
64 * }</code></pre>
65 *
66 * @see CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP
67 * @see CameraDevice#createCaptureSession
68 */
69public final class StreamConfigurationMap {
70
71    private static final String TAG = "StreamConfigurationMap";
72
73    /**
74     * Create a new {@link StreamConfigurationMap}.
75     *
76     * <p>The array parameters ownership is passed to this object after creation; do not
77     * write to them after this constructor is invoked.</p>
78     *
79     * @param configurations a non-{@code null} array of {@link StreamConfiguration}
80     * @param minFrameDurations a non-{@code null} array of {@link StreamConfigurationDuration}
81     * @param stallDurations a non-{@code null} array of {@link StreamConfigurationDuration}
82     * @param highSpeedVideoConfigurations an array of {@link HighSpeedVideoConfiguration}, null if
83     *        camera device does not support high speed video recording
84     * @param listHighResolution a flag indicating whether the device supports BURST_CAPTURE
85     *        and thus needs a separate list of slow high-resolution output sizes
86     * @throws NullPointerException if any of the arguments except highSpeedVideoConfigurations
87     *         were {@code null} or any subelements were {@code null}
88     *
89     * @hide
90     */
91    public StreamConfigurationMap(
92            StreamConfiguration[] configurations,
93            StreamConfigurationDuration[] minFrameDurations,
94            StreamConfigurationDuration[] stallDurations,
95            StreamConfiguration[] depthConfigurations,
96            StreamConfigurationDuration[] depthMinFrameDurations,
97            StreamConfigurationDuration[] depthStallDurations,
98            HighSpeedVideoConfiguration[] highSpeedVideoConfigurations,
99            ReprocessFormatsMap inputOutputFormatsMap,
100            boolean listHighResolution) {
101        mConfigurations = checkArrayElementsNotNull(configurations, "configurations");
102        mMinFrameDurations = checkArrayElementsNotNull(minFrameDurations, "minFrameDurations");
103        mStallDurations = checkArrayElementsNotNull(stallDurations, "stallDurations");
104        mListHighResolution = listHighResolution;
105
106        if (depthConfigurations == null) {
107            mDepthConfigurations = new StreamConfiguration[0];
108            mDepthMinFrameDurations = new StreamConfigurationDuration[0];
109            mDepthStallDurations = new StreamConfigurationDuration[0];
110        } else {
111            mDepthConfigurations = checkArrayElementsNotNull(depthConfigurations,
112                    "depthConfigurations");
113            mDepthMinFrameDurations = checkArrayElementsNotNull(depthMinFrameDurations,
114                    "depthMinFrameDurations");
115            mDepthStallDurations = checkArrayElementsNotNull(depthStallDurations,
116                    "depthStallDurations");
117        }
118
119        if (highSpeedVideoConfigurations == null) {
120            mHighSpeedVideoConfigurations = new HighSpeedVideoConfiguration[0];
121        } else {
122            mHighSpeedVideoConfigurations = checkArrayElementsNotNull(
123                    highSpeedVideoConfigurations, "highSpeedVideoConfigurations");
124        }
125
126        // For each format, track how many sizes there are available to configure
127        for (StreamConfiguration config : configurations) {
128            int fmt = config.getFormat();
129            SparseIntArray map = null;
130            if (config.isOutput()) {
131                mAllOutputFormats.put(fmt, mAllOutputFormats.get(fmt) + 1);
132                long duration = 0;
133                if (mListHighResolution) {
134                    for (StreamConfigurationDuration configurationDuration : mMinFrameDurations) {
135                        if (configurationDuration.getFormat() == fmt &&
136                                configurationDuration.getWidth() == config.getSize().getWidth() &&
137                                configurationDuration.getHeight() == config.getSize().getHeight()) {
138                            duration = configurationDuration.getDuration();
139                            break;
140                        }
141                    }
142                }
143                map = duration <= DURATION_20FPS_NS ?
144                        mOutputFormats : mHighResOutputFormats;
145            } else {
146                map = mInputFormats;
147            }
148            map.put(fmt, map.get(fmt) + 1);
149        }
150
151        // For each depth format, track how many sizes there are available to configure
152        for (StreamConfiguration config : mDepthConfigurations) {
153            if (!config.isOutput()) {
154                // Ignoring input depth configs
155                continue;
156            }
157
158            mDepthOutputFormats.put(config.getFormat(),
159                    mDepthOutputFormats.get(config.getFormat()) + 1);
160        }
161
162        if (mOutputFormats.indexOfKey(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) < 0) {
163            throw new AssertionError(
164                    "At least one stream configuration for IMPLEMENTATION_DEFINED must exist");
165        }
166
167        // For each Size/FPS range, track how many FPS range/Size there are available
168        for (HighSpeedVideoConfiguration config : mHighSpeedVideoConfigurations) {
169            Size size = config.getSize();
170            Range<Integer> fpsRange = config.getFpsRange();
171            Integer fpsRangeCount = mHighSpeedVideoSizeMap.get(size);
172            if (fpsRangeCount == null) {
173                fpsRangeCount = 0;
174            }
175            mHighSpeedVideoSizeMap.put(size, fpsRangeCount + 1);
176            Integer sizeCount = mHighSpeedVideoFpsRangeMap.get(fpsRange);
177            if (sizeCount == null) {
178                sizeCount = 0;
179            }
180            mHighSpeedVideoFpsRangeMap.put(fpsRange, sizeCount + 1);
181        }
182
183        mInputOutputFormatsMap = inputOutputFormatsMap;
184    }
185
186    /**
187     * Get the image {@code format} output formats in this stream configuration.
188     *
189     * <p>All image formats returned by this function will be defined in either {@link ImageFormat}
190     * or in {@link PixelFormat} (and there is no possibility of collision).</p>
191     *
192     * <p>Formats listed in this array are guaranteed to return true if queried with
193     * {@link #isOutputSupportedFor(int)}.</p>
194     *
195     * @return an array of integer format
196     *
197     * @see ImageFormat
198     * @see PixelFormat
199     */
200    public final int[] getOutputFormats() {
201        return getPublicFormats(/*output*/true);
202    }
203
204    /**
205     * Get the image {@code format} output formats for a reprocessing input format.
206     *
207     * <p>When submitting a {@link CaptureRequest} with an input Surface of a given format,
208     * the only allowed target outputs of the {@link CaptureRequest} are the ones with a format
209     * listed in the return value of this method. Including any other output Surface as a target
210     * will throw an IllegalArgumentException. If no output format is supported given the input
211     * format, an empty int[] will be returned.</p>
212     *
213     * <p>All image formats returned by this function will be defined in either {@link ImageFormat}
214     * or in {@link PixelFormat} (and there is no possibility of collision).</p>
215     *
216     * <p>Formats listed in this array are guaranteed to return true if queried with
217     * {@link #isOutputSupportedFor(int)}.</p>
218     *
219     * @return an array of integer format
220     *
221     * @see ImageFormat
222     * @see PixelFormat
223     */
224    public final int[] getValidOutputFormatsForInput(int inputFormat) {
225        if (mInputOutputFormatsMap == null) {
226            return new int[0];
227        }
228        return mInputOutputFormatsMap.getOutputs(inputFormat);
229    }
230
231    /**
232     * Get the image {@code format} input formats in this stream configuration.
233     *
234     * <p>All image formats returned by this function will be defined in either {@link ImageFormat}
235     * or in {@link PixelFormat} (and there is no possibility of collision).</p>
236     *
237     * @return an array of integer format
238     *
239     * @see ImageFormat
240     * @see PixelFormat
241     */
242    public final int[] getInputFormats() {
243        return getPublicFormats(/*output*/false);
244    }
245
246    /**
247     * Get the supported input sizes for this input format.
248     *
249     * <p>The format must have come from {@link #getInputFormats}; otherwise
250     * {@code null} is returned.</p>
251     *
252     * @param format a format from {@link #getInputFormats}
253     * @return a non-empty array of sizes, or {@code null} if the format was not available.
254     */
255    public Size[] getInputSizes(final int format) {
256        return getPublicFormatSizes(format, /*output*/false, /*highRes*/false);
257    }
258
259    /**
260     * Determine whether or not output surfaces with a particular user-defined format can be passed
261     * {@link CameraDevice#createCaptureSession createCaptureSession}.
262     *
263     * <p>This method determines that the output {@code format} is supported by the camera device;
264     * each output {@code surface} target may or may not itself support that {@code format}.
265     * Refer to the class which provides the surface for additional documentation.</p>
266     *
267     * <p>Formats for which this returns {@code true} are guaranteed to exist in the result
268     * returned by {@link #getOutputSizes}.</p>
269     *
270     * @param format an image format from either {@link ImageFormat} or {@link PixelFormat}
271     * @return
272     *          {@code true} iff using a {@code surface} with this {@code format} will be
273     *          supported with {@link CameraDevice#createCaptureSession}
274     *
275     * @throws IllegalArgumentException
276     *          if the image format was not a defined named constant
277     *          from either {@link ImageFormat} or {@link PixelFormat}
278     *
279     * @see ImageFormat
280     * @see PixelFormat
281     * @see CameraDevice#createCaptureSession
282     */
283    public boolean isOutputSupportedFor(int format) {
284        checkArgumentFormat(format);
285
286        int internalFormat = imageFormatToInternal(format);
287        int dataspace = imageFormatToDataspace(format);
288        if (dataspace == HAL_DATASPACE_DEPTH) {
289            return mDepthOutputFormats.indexOfKey(internalFormat) >= 0;
290        } else {
291            return getFormatsMap(/*output*/true).indexOfKey(internalFormat) >= 0;
292        }
293    }
294
295    /**
296     * Determine whether or not output streams can be configured with a particular class
297     * as a consumer.
298     *
299     * <p>The following list is generally usable for outputs:
300     * <ul>
301     * <li>{@link android.media.ImageReader} -
302     * Recommended for image processing or streaming to external resources (such as a file or
303     * network)
304     * <li>{@link android.media.MediaRecorder} -
305     * Recommended for recording video (simple to use)
306     * <li>{@link android.media.MediaCodec} -
307     * Recommended for recording video (more complicated to use, with more flexibility)
308     * <li>{@link android.renderscript.Allocation} -
309     * Recommended for image processing with {@link android.renderscript RenderScript}
310     * <li>{@link android.view.SurfaceHolder} -
311     * Recommended for low-power camera preview with {@link android.view.SurfaceView}
312     * <li>{@link android.graphics.SurfaceTexture} -
313     * Recommended for OpenGL-accelerated preview processing or compositing with
314     * {@link android.view.TextureView}
315     * </ul>
316     * </p>
317     *
318     * <p>Generally speaking this means that creating a {@link Surface} from that class <i>may</i>
319     * provide a producer endpoint that is suitable to be used with
320     * {@link CameraDevice#createCaptureSession}.</p>
321     *
322     * <p>Since not all of the above classes support output of all format and size combinations,
323     * the particular combination should be queried with {@link #isOutputSupportedFor(Surface)}.</p>
324     *
325     * @param klass a non-{@code null} {@link Class} object reference
326     * @return {@code true} if this class is supported as an output, {@code false} otherwise
327     *
328     * @throws NullPointerException if {@code klass} was {@code null}
329     *
330     * @see CameraDevice#createCaptureSession
331     * @see #isOutputSupportedFor(Surface)
332     */
333    public static <T> boolean isOutputSupportedFor(Class<T> klass) {
334        checkNotNull(klass, "klass must not be null");
335
336        if (klass == android.media.ImageReader.class) {
337            return true;
338        } else if (klass == android.media.MediaRecorder.class) {
339            return true;
340        } else if (klass == android.media.MediaCodec.class) {
341            return true;
342        } else if (klass == android.renderscript.Allocation.class) {
343            return true;
344        } else if (klass == android.view.SurfaceHolder.class) {
345            return true;
346        } else if (klass == android.graphics.SurfaceTexture.class) {
347            return true;
348        }
349
350        return false;
351    }
352
353    /**
354     * Determine whether or not the {@code surface} in its current state is suitable to be included
355     * in a {@link CameraDevice#createCaptureSession capture session} as an output.
356     *
357     * <p>Not all surfaces are usable with the {@link CameraDevice}, and not all configurations
358     * of that {@code surface} are compatible. Some classes that provide the {@code surface} are
359     * compatible with the {@link CameraDevice} in general
360     * (see {@link #isOutputSupportedFor(Class)}, but it is the caller's responsibility to put the
361     * {@code surface} into a state that will be compatible with the {@link CameraDevice}.</p>
362     *
363     * <p>Reasons for a {@code surface} being specifically incompatible might be:
364     * <ul>
365     * <li>Using a format that's not listed by {@link #getOutputFormats}
366     * <li>Using a format/size combination that's not listed by {@link #getOutputSizes}
367     * <li>The {@code surface} itself is not in a state where it can service a new producer.</p>
368     * </li>
369     * </ul>
370     *
371     * <p>Surfaces from flexible sources will return true even if the exact size of the Surface does
372     * not match a camera-supported size, as long as the format (or class) is supported and the
373     * camera device supports a size that is equal to or less than 1080p in that format. If such as
374     * Surface is used to create a capture session, it will have its size rounded to the nearest
375     * supported size, below or equal to 1080p. Flexible sources include SurfaceView, SurfaceTexture,
376     * and ImageReader.</p>
377     *
378     * <p>This is not an exhaustive list; see the particular class's documentation for further
379     * possible reasons of incompatibility.</p>
380     *
381     * @param surface a non-{@code null} {@link Surface} object reference
382     * @return {@code true} if this is supported, {@code false} otherwise
383     *
384     * @throws NullPointerException if {@code surface} was {@code null}
385     * @throws IllegalArgumentException if the Surface endpoint is no longer valid
386     *
387     * @see CameraDevice#createCaptureSession
388     * @see #isOutputSupportedFor(Class)
389     */
390    public boolean isOutputSupportedFor(Surface surface) {
391        checkNotNull(surface, "surface must not be null");
392
393        Size surfaceSize = SurfaceUtils.getSurfaceSize(surface);
394        int surfaceFormat = SurfaceUtils.getSurfaceFormat(surface);
395        int surfaceDataspace = SurfaceUtils.getSurfaceDataspace(surface);
396
397        // See if consumer is flexible.
398        boolean isFlexible = SurfaceUtils.isFlexibleConsumer(surface);
399
400        // Override RGB formats to IMPLEMENTATION_DEFINED, b/9487482
401        if ((surfaceFormat >= LegacyMetadataMapper.HAL_PIXEL_FORMAT_RGBA_8888 &&
402                        surfaceFormat <= LegacyMetadataMapper.HAL_PIXEL_FORMAT_BGRA_8888)) {
403            surfaceFormat = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
404        }
405
406        StreamConfiguration[] configs =
407                surfaceDataspace != HAL_DATASPACE_DEPTH ? mConfigurations : mDepthConfigurations;
408        for (StreamConfiguration config : configs) {
409            if (config.getFormat() == surfaceFormat && config.isOutput()) {
410                // Matching format, either need exact size match, or a flexible consumer
411                // and a size no bigger than MAX_DIMEN_FOR_ROUNDING
412                if (config.getSize().equals(surfaceSize)) {
413                    return true;
414                } else if (isFlexible &&
415                        (config.getSize().getWidth() <= LegacyCameraDevice.MAX_DIMEN_FOR_ROUNDING)) {
416                    return true;
417                }
418            }
419        }
420        return false;
421    }
422
423    /**
424     * Get a list of sizes compatible with {@code klass} to use as an output.
425     *
426     * <p>Some of the supported classes may support additional formats beyond
427     * {@link ImageFormat#PRIVATE}; this function only returns
428     * sizes for {@link ImageFormat#PRIVATE}. For example, {@link android.media.ImageReader}
429     * supports {@link ImageFormat#YUV_420_888} and {@link ImageFormat#PRIVATE}, this method will
430     * only return the sizes for {@link ImageFormat#PRIVATE} for {@link android.media.ImageReader}
431     * class.</p>
432     *
433     * <p>If a well-defined format such as {@code NV21} is required, use
434     * {@link #getOutputSizes(int)} instead.</p>
435     *
436     * <p>The {@code klass} should be a supported output, that querying
437     * {@code #isOutputSupportedFor(Class)} should return {@code true}.</p>
438     *
439     * @param klass
440     *          a non-{@code null} {@link Class} object reference
441     * @return
442     *          an array of supported sizes for {@link ImageFormat#PRIVATE} format,
443     *          or {@code null} iff the {@code klass} is not a supported output.
444     *
445     *
446     * @throws NullPointerException if {@code klass} was {@code null}
447     *
448     * @see #isOutputSupportedFor(Class)
449     */
450    public <T> Size[] getOutputSizes(Class<T> klass) {
451        if (isOutputSupportedFor(klass) == false) {
452            return null;
453        }
454
455        return getInternalFormatSizes(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
456                HAL_DATASPACE_UNKNOWN,/*output*/true, /*highRes*/false);
457    }
458
459    /**
460     * Get a list of sizes compatible with the requested image {@code format}.
461     *
462     * <p>The {@code format} should be a supported format (one of the formats returned by
463     * {@link #getOutputFormats}).</p>
464     *
465     * As of API level 23, the {@link #getHighResolutionOutputSizes} method can be used on devices
466     * that support the
467     * {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE BURST_CAPTURE}
468     * capability to get a list of high-resolution output sizes that cannot operate at the preferred
469     * 20fps rate. This means that for some supported formats, this method will return an empty
470     * list, if all the supported resolutions operate at below 20fps.  For devices that do not
471     * support the BURST_CAPTURE capability, all output resolutions are listed through this method.
472     *
473     * @param format an image format from {@link ImageFormat} or {@link PixelFormat}
474     * @return
475     *          an array of supported sizes,
476     *          or {@code null} if the {@code format} is not a supported output
477     *
478     * @see ImageFormat
479     * @see PixelFormat
480     * @see #getOutputFormats
481     */
482    public Size[] getOutputSizes(int format) {
483        return getPublicFormatSizes(format, /*output*/true, /*highRes*/ false);
484    }
485
486    /**
487     * Get a list of supported high speed video recording sizes.
488     * <p>
489     * When {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO} is
490     * supported in {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES}, this method will
491     * list the supported high speed video size configurations. All the sizes listed will be a
492     * subset of the sizes reported by {@link #getOutputSizes} for processed non-stalling formats
493     * (typically {@link ImageFormat#PRIVATE} {@link ImageFormat#YUV_420_888}, etc.)
494     * </p>
495     * <p>
496     * To enable high speed video recording, application must create a constrained create high speed
497     * capture session via {@link CameraDevice#createConstrainedHighSpeedCaptureSession}, and submit
498     * a CaptureRequest list created by
499     * {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}
500     * to this session. The application must select the video size from this method and
501     * {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE FPS range} from
502     * {@link #getHighSpeedVideoFpsRangesFor} to configure the constrained high speed session and
503     * generate the high speed request list. For example, if the application intends to do high
504     * speed recording, it can select the maximum size reported by this method to create high speed
505     * capture session. Note that for the use case of multiple output streams, application must
506     * select one unique size from this method to use (e.g., preview and recording streams must have
507     * the same size). Otherwise, the high speed session creation will fail. Once the size is
508     * selected, application can get the supported FPS ranges by
509     * {@link #getHighSpeedVideoFpsRangesFor}, and use these FPS ranges to setup the recording
510     * request lists via
511     * {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}.
512     * </p>
513     *
514     * @return an array of supported high speed video recording sizes
515     * @see #getHighSpeedVideoFpsRangesFor(Size)
516     * @see CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO
517     * @see CameraDevice#createConstrainedHighSpeedCaptureSession
518     * @see android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList
519     */
520    public Size[] getHighSpeedVideoSizes() {
521        Set<Size> keySet = mHighSpeedVideoSizeMap.keySet();
522        return keySet.toArray(new Size[keySet.size()]);
523    }
524
525    /**
526     * Get the frame per second ranges (fpsMin, fpsMax) for input high speed video size.
527     * <p>
528     * See {@link #getHighSpeedVideoFpsRanges} for how to enable high speed recording.
529     * </p>
530     * <p>
531     * The {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE FPS ranges} reported in this method
532     * must not be used to setup capture requests that are submitted to unconstrained capture
533     * sessions, or it will result in {@link IllegalArgumentException IllegalArgumentExceptions}.
534     * </p>
535     * <p>
536     * See {@link #getHighSpeedVideoFpsRanges} for the characteristics of the returned FPS ranges.
537     * </p>
538     *
539     * @param size one of the sizes returned by {@link #getHighSpeedVideoSizes()}
540     * @return an array of supported high speed video recording FPS ranges The upper bound of
541     *         returned ranges is guaranteed to be greater than or equal to 120.
542     * @throws IllegalArgumentException if input size does not exist in the return value of
543     *             getHighSpeedVideoSizes
544     * @see #getHighSpeedVideoSizes()
545     * @see #getHighSpeedVideoFpsRanges()
546     */
547    public Range<Integer>[] getHighSpeedVideoFpsRangesFor(Size size) {
548        Integer fpsRangeCount = mHighSpeedVideoSizeMap.get(size);
549        if (fpsRangeCount == null || fpsRangeCount == 0) {
550            throw new IllegalArgumentException(String.format(
551                    "Size %s does not support high speed video recording", size));
552        }
553
554        @SuppressWarnings("unchecked")
555        Range<Integer>[] fpsRanges = new Range[fpsRangeCount];
556        int i = 0;
557        for (HighSpeedVideoConfiguration config : mHighSpeedVideoConfigurations) {
558            if (size.equals(config.getSize())) {
559                fpsRanges[i++] = config.getFpsRange();
560            }
561        }
562        return fpsRanges;
563    }
564
565    /**
566     * Get a list of supported high speed video recording FPS ranges.
567     * <p>
568     * When {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO} is
569     * supported in {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES}, this method will
570     * list the supported high speed video FPS range configurations. Application can then use
571     * {@link #getHighSpeedVideoSizesFor} to query available sizes for one of returned FPS range.
572     * </p>
573     * <p>
574     * To enable high speed video recording, application must create a constrained create high speed
575     * capture session via {@link CameraDevice#createConstrainedHighSpeedCaptureSession}, and submit
576     * a CaptureRequest list created by
577     * {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}
578     * to this session. The application must select the video size from this method and
579     * {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE FPS range} from
580     * {@link #getHighSpeedVideoFpsRangesFor} to configure the constrained high speed session and
581     * generate the high speed request list. For example, if the application intends to do high
582     * speed recording, it can select one FPS range reported by this method, query the video sizes
583     * corresponding to this FPS range by {@link #getHighSpeedVideoSizesFor} and use one of reported
584     * sizes to create a high speed capture session. Note that for the use case of multiple output
585     * streams, application must select one unique size from this method to use (e.g., preview and
586     * recording streams must have the same size). Otherwise, the high speed session creation will
587     * fail. Once the high speed capture session is created, the application can set the FPS range
588     * in the recording request lists via
589     * {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}.
590     * </p>
591     * <p>
592     * The FPS ranges reported by this method will have below characteristics:
593     * <li>The fpsMin and fpsMax will be a multiple 30fps.</li>
594     * <li>The fpsMin will be no less than 30fps, the fpsMax will be no less than 120fps.</li>
595     * <li>At least one range will be a fixed FPS range where fpsMin == fpsMax.</li>
596     * <li>For each fixed FPS range, there will be one corresponding variable FPS range [30,
597     * fps_max]. These kinds of FPS ranges are suitable for preview-only use cases where the
598     * application doesn't want the camera device always produce higher frame rate than the display
599     * refresh rate.</li>
600     * </p>
601     *
602     * @return an array of supported high speed video recording FPS ranges The upper bound of
603     *         returned ranges is guaranteed to be larger or equal to 120.
604     * @see #getHighSpeedVideoSizesFor
605     * @see CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO
606     * @see CameraDevice#createConstrainedHighSpeedCaptureSession
607     * @see CameraDevice#createHighSpeedRequestList
608     */
609    @SuppressWarnings("unchecked")
610    public Range<Integer>[] getHighSpeedVideoFpsRanges() {
611        Set<Range<Integer>> keySet = mHighSpeedVideoFpsRangeMap.keySet();
612        return keySet.toArray(new Range[keySet.size()]);
613    }
614
615    /**
616     * Get the supported video sizes for an input high speed FPS range.
617     *
618     * <p> See {@link #getHighSpeedVideoSizes} for how to enable high speed recording.</p>
619     *
620     * @param fpsRange one of the FPS range returned by {@link #getHighSpeedVideoFpsRanges()}
621     * @return An array of video sizes to create high speed capture sessions for high speed streaming
622     *         use cases.
623     *
624     * @throws IllegalArgumentException if input FPS range does not exist in the return value of
625     *         getHighSpeedVideoFpsRanges
626     * @see #getHighSpeedVideoFpsRanges()
627     */
628    public Size[] getHighSpeedVideoSizesFor(Range<Integer> fpsRange) {
629        Integer sizeCount = mHighSpeedVideoFpsRangeMap.get(fpsRange);
630        if (sizeCount == null || sizeCount == 0) {
631            throw new IllegalArgumentException(String.format(
632                    "FpsRange %s does not support high speed video recording", fpsRange));
633        }
634
635        Size[] sizes = new Size[sizeCount];
636        int i = 0;
637        for (HighSpeedVideoConfiguration config : mHighSpeedVideoConfigurations) {
638            if (fpsRange.equals(config.getFpsRange())) {
639                sizes[i++] = config.getSize();
640            }
641        }
642        return sizes;
643    }
644
645    /**
646     * Get a list of supported high resolution sizes, which cannot operate at full BURST_CAPTURE
647     * rate.
648     *
649     * <p>This includes all output sizes that cannot meet the 20 fps frame rate requirements for the
650     * {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE BURST_CAPTURE}
651     * capability.  This does not include the stall duration, so for example, a JPEG or RAW16 output
652     * resolution with a large stall duration but a minimum frame duration that's above 20 fps will
653     * still be listed in the regular {@link #getOutputSizes} list. All the sizes on this list are
654     * still guaranteed to operate at a rate of at least 10 fps, not including stall duration.</p>
655     *
656     * <p>For a device that does not support the BURST_CAPTURE capability, this list will be
657     * {@code null}, since resolutions in the {@link #getOutputSizes} list are already not
658     * guaranteed to meet &gt;= 20 fps rate requirements. For a device that does support the
659     * BURST_CAPTURE capability, this list may be empty, if all supported resolutions meet the 20
660     * fps requirement.</p>
661     *
662     * @return an array of supported slower high-resolution sizes, or {@code null} if the
663     *         BURST_CAPTURE capability is not supported
664     */
665    public Size[] getHighResolutionOutputSizes(int format) {
666        if (!mListHighResolution) return null;
667
668        return getPublicFormatSizes(format, /*output*/true, /*highRes*/ true);
669    }
670
671    /**
672     * Get the minimum {@link CaptureRequest#SENSOR_FRAME_DURATION frame duration}
673     * for the format/size combination (in nanoseconds).
674     *
675     * <p>{@code format} should be one of the ones returned by {@link #getOutputFormats()}.</p>
676     * <p>{@code size} should be one of the ones returned by
677     * {@link #getOutputSizes(int)}.</p>
678     *
679     * <p>This should correspond to the frame duration when only that stream is active, with all
680     * processing (typically in {@code android.*.mode}) set to either {@code OFF} or {@code FAST}.
681     * </p>
682     *
683     * <p>When multiple streams are used in a request, the minimum frame duration will be
684     * {@code max(individual stream min durations)}.</p>
685     *
686     * <p>For devices that do not support manual sensor control
687     * ({@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR}),
688     * this function may return 0.</p>
689     *
690     * <!--
691     * TODO: uncomment after adding input stream support
692     * <p>The minimum frame duration of a stream (of a particular format, size) is the same
693     * regardless of whether the stream is input or output.</p>
694     * -->
695     *
696     * @param format an image format from {@link ImageFormat} or {@link PixelFormat}
697     * @param size an output-compatible size
698     * @return a minimum frame duration {@code >} 0 in nanoseconds, or
699     *          0 if the minimum frame duration is not available.
700     *
701     * @throws IllegalArgumentException if {@code format} or {@code size} was not supported
702     * @throws NullPointerException if {@code size} was {@code null}
703     *
704     * @see CaptureRequest#SENSOR_FRAME_DURATION
705     * @see #getOutputStallDuration(int, Size)
706     * @see ImageFormat
707     * @see PixelFormat
708     */
709    public long getOutputMinFrameDuration(int format, Size size) {
710        checkNotNull(size, "size must not be null");
711        checkArgumentFormatSupported(format, /*output*/true);
712
713        return getInternalFormatDuration(imageFormatToInternal(format),
714                imageFormatToDataspace(format),
715                size,
716                DURATION_MIN_FRAME);
717    }
718
719    /**
720     * Get the minimum {@link CaptureRequest#SENSOR_FRAME_DURATION frame duration}
721     * for the class/size combination (in nanoseconds).
722     *
723     * <p>This assumes a the {@code klass} is set up to use {@link ImageFormat#PRIVATE}.
724     * For user-defined formats, use {@link #getOutputMinFrameDuration(int, Size)}.</p>
725     *
726     * <p>{@code klass} should be one of the ones which is supported by
727     * {@link #isOutputSupportedFor(Class)}.</p>
728     *
729     * <p>{@code size} should be one of the ones returned by
730     * {@link #getOutputSizes(int)}.</p>
731     *
732     * <p>This should correspond to the frame duration when only that stream is active, with all
733     * processing (typically in {@code android.*.mode}) set to either {@code OFF} or {@code FAST}.
734     * </p>
735     *
736     * <p>When multiple streams are used in a request, the minimum frame duration will be
737     * {@code max(individual stream min durations)}.</p>
738     *
739     * <p>For devices that do not support manual sensor control
740     * ({@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR}),
741     * this function may return 0.</p>
742     *
743     * <!--
744     * TODO: uncomment after adding input stream support
745     * <p>The minimum frame duration of a stream (of a particular format, size) is the same
746     * regardless of whether the stream is input or output.</p>
747     * -->
748     *
749     * @param klass
750     *          a class which is supported by {@link #isOutputSupportedFor(Class)} and has a
751     *          non-empty array returned by {@link #getOutputSizes(Class)}
752     * @param size an output-compatible size
753     * @return a minimum frame duration {@code >} 0 in nanoseconds, or
754     *          0 if the minimum frame duration is not available.
755     *
756     * @throws IllegalArgumentException if {@code klass} or {@code size} was not supported
757     * @throws NullPointerException if {@code size} or {@code klass} was {@code null}
758     *
759     * @see CaptureRequest#SENSOR_FRAME_DURATION
760     * @see ImageFormat
761     * @see PixelFormat
762     */
763    public <T> long getOutputMinFrameDuration(final Class<T> klass, final Size size) {
764        if (!isOutputSupportedFor(klass)) {
765            throw new IllegalArgumentException("klass was not supported");
766        }
767
768        return getInternalFormatDuration(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
769                HAL_DATASPACE_UNKNOWN,
770                size, DURATION_MIN_FRAME);
771    }
772
773    /**
774     * Get the stall duration for the format/size combination (in nanoseconds).
775     *
776     * <p>{@code format} should be one of the ones returned by {@link #getOutputFormats()}.</p>
777     * <p>{@code size} should be one of the ones returned by
778     * {@link #getOutputSizes(int)}.</p>
779     *
780     * <p>
781     * A stall duration is how much extra time would get added to the normal minimum frame duration
782     * for a repeating request that has streams with non-zero stall.
783     *
784     * <p>For example, consider JPEG captures which have the following characteristics:
785     *
786     * <ul>
787     * <li>JPEG streams act like processed YUV streams in requests for which they are not included;
788     * in requests in which they are directly referenced, they act as JPEG streams.
789     * This is because supporting a JPEG stream requires the underlying YUV data to always be ready
790     * for use by a JPEG encoder, but the encoder will only be used (and impact frame duration) on
791     * requests that actually reference a JPEG stream.
792     * <li>The JPEG processor can run concurrently to the rest of the camera pipeline, but cannot
793     * process more than 1 capture at a time.
794     * </ul>
795     *
796     * <p>In other words, using a repeating YUV request would result in a steady frame rate
797     * (let's say it's 30 FPS). If a single JPEG request is submitted periodically,
798     * the frame rate will stay at 30 FPS (as long as we wait for the previous JPEG to return each
799     * time). If we try to submit a repeating YUV + JPEG request, then the frame rate will drop from
800     * 30 FPS.</p>
801     *
802     * <p>In general, submitting a new request with a non-0 stall time stream will <em>not</em> cause a
803     * frame rate drop unless there are still outstanding buffers for that stream from previous
804     * requests.</p>
805     *
806     * <p>Submitting a repeating request with streams (call this {@code S}) is the same as setting
807     * the minimum frame duration from the normal minimum frame duration corresponding to {@code S},
808     * added with the maximum stall duration for {@code S}.</p>
809     *
810     * <p>If interleaving requests with and without a stall duration, a request will stall by the
811     * maximum of the remaining times for each can-stall stream with outstanding buffers.</p>
812     *
813     * <p>This means that a stalling request will not have an exposure start until the stall has
814     * completed.</p>
815     *
816     * <p>This should correspond to the stall duration when only that stream is active, with all
817     * processing (typically in {@code android.*.mode}) set to {@code FAST} or {@code OFF}.
818     * Setting any of the processing modes to {@code HIGH_QUALITY} effectively results in an
819     * indeterminate stall duration for all streams in a request (the regular stall calculation
820     * rules are ignored).</p>
821     *
822     * <p>The following formats may always have a stall duration:
823     * <ul>
824     * <li>{@link ImageFormat#JPEG JPEG}
825     * <li>{@link ImageFormat#RAW_SENSOR RAW16}
826     * </ul>
827     * </p>
828     *
829     * <p>The following formats will never have a stall duration:
830     * <ul>
831     * <li>{@link ImageFormat#YUV_420_888 YUV_420_888}
832     * <li>{@link #isOutputSupportedFor(Class) Implementation-Defined}
833     * </ul></p>
834     *
835     * <p>
836     * All other formats may or may not have an allowed stall duration on a per-capability basis;
837     * refer to {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
838     * android.request.availableCapabilities} for more details.</p>
839     * </p>
840     *
841     * <p>See {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration}
842     * for more information about calculating the max frame rate (absent stalls).</p>
843     *
844     * @param format an image format from {@link ImageFormat} or {@link PixelFormat}
845     * @param size an output-compatible size
846     * @return a stall duration {@code >=} 0 in nanoseconds
847     *
848     * @throws IllegalArgumentException if {@code format} or {@code size} was not supported
849     * @throws NullPointerException if {@code size} was {@code null}
850     *
851     * @see CaptureRequest#SENSOR_FRAME_DURATION
852     * @see ImageFormat
853     * @see PixelFormat
854     */
855    public long getOutputStallDuration(int format, Size size) {
856        checkArgumentFormatSupported(format, /*output*/true);
857
858        return getInternalFormatDuration(imageFormatToInternal(format),
859                imageFormatToDataspace(format),
860                size,
861                DURATION_STALL);
862    }
863
864    /**
865     * Get the stall duration for the class/size combination (in nanoseconds).
866     *
867     * <p>This assumes a the {@code klass} is set up to use {@link ImageFormat#PRIVATE}.
868     * For user-defined formats, use {@link #getOutputMinFrameDuration(int, Size)}.</p>
869     *
870     * <p>{@code klass} should be one of the ones with a non-empty array returned by
871     * {@link #getOutputSizes(Class)}.</p>
872     *
873     * <p>{@code size} should be one of the ones returned by
874     * {@link #getOutputSizes(Class)}.</p>
875     *
876     * <p>See {@link #getOutputStallDuration(int, Size)} for a definition of a
877     * <em>stall duration</em>.</p>
878     *
879     * @param klass
880     *          a class which is supported by {@link #isOutputSupportedFor(Class)} and has a
881     *          non-empty array returned by {@link #getOutputSizes(Class)}
882     * @param size an output-compatible size
883     * @return a minimum frame duration {@code >=} 0 in nanoseconds
884     *
885     * @throws IllegalArgumentException if {@code klass} or {@code size} was not supported
886     * @throws NullPointerException if {@code size} or {@code klass} was {@code null}
887     *
888     * @see CaptureRequest#SENSOR_FRAME_DURATION
889     * @see ImageFormat
890     * @see PixelFormat
891     */
892    public <T> long getOutputStallDuration(final Class<T> klass, final Size size) {
893        if (!isOutputSupportedFor(klass)) {
894            throw new IllegalArgumentException("klass was not supported");
895        }
896
897        return getInternalFormatDuration(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
898                HAL_DATASPACE_UNKNOWN, size, DURATION_STALL);
899    }
900
901    /**
902     * Check if this {@link StreamConfigurationMap} is equal to another
903     * {@link StreamConfigurationMap}.
904     *
905     * <p>Two vectors are only equal if and only if each of the respective elements is equal.</p>
906     *
907     * @return {@code true} if the objects were equal, {@code false} otherwise
908     */
909    @Override
910    public boolean equals(final Object obj) {
911        if (obj == null) {
912            return false;
913        }
914        if (this == obj) {
915            return true;
916        }
917        if (obj instanceof StreamConfigurationMap) {
918            final StreamConfigurationMap other = (StreamConfigurationMap) obj;
919            // XX: do we care about order?
920            return Arrays.equals(mConfigurations, other.mConfigurations) &&
921                    Arrays.equals(mMinFrameDurations, other.mMinFrameDurations) &&
922                    Arrays.equals(mStallDurations, other.mStallDurations) &&
923                    Arrays.equals(mDepthConfigurations, other.mDepthConfigurations) &&
924                    Arrays.equals(mHighSpeedVideoConfigurations,
925                            other.mHighSpeedVideoConfigurations);
926        }
927        return false;
928    }
929
930    /**
931     * {@inheritDoc}
932     */
933    @Override
934    public int hashCode() {
935        // XX: do we care about order?
936        return HashCodeHelpers.hashCodeGeneric(
937                mConfigurations, mMinFrameDurations,
938                mStallDurations,
939                mDepthConfigurations, mHighSpeedVideoConfigurations);
940    }
941
942    // Check that the argument is supported by #getOutputFormats or #getInputFormats
943    private int checkArgumentFormatSupported(int format, boolean output) {
944        checkArgumentFormat(format);
945
946        int internalFormat = imageFormatToInternal(format);
947        int internalDataspace = imageFormatToDataspace(format);
948
949        if (output) {
950            if (internalDataspace == HAL_DATASPACE_DEPTH) {
951                if (mDepthOutputFormats.indexOfKey(internalFormat) >= 0) {
952                    return format;
953                }
954            } else {
955                if (mAllOutputFormats.indexOfKey(internalFormat) >= 0) {
956                    return format;
957                }
958            }
959        } else {
960            if (mInputFormats.indexOfKey(internalFormat) >= 0) {
961                return format;
962            }
963        }
964
965        throw new IllegalArgumentException(String.format(
966                "format %x is not supported by this stream configuration map", format));
967    }
968
969    /**
970     * Ensures that the format is either user-defined or implementation defined.
971     *
972     * <p>If a format has a different internal representation than the public representation,
973     * passing in the public representation here will fail.</p>
974     *
975     * <p>For example if trying to use {@link ImageFormat#JPEG}:
976     * it has a different public representation than the internal representation
977     * {@code HAL_PIXEL_FORMAT_BLOB}, this check will fail.</p>
978     *
979     * <p>Any invalid/undefined formats will raise an exception.</p>
980     *
981     * @param format image format
982     * @return the format
983     *
984     * @throws IllegalArgumentException if the format was invalid
985     */
986    static int checkArgumentFormatInternal(int format) {
987        switch (format) {
988            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
989            case HAL_PIXEL_FORMAT_BLOB:
990            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
991            case HAL_PIXEL_FORMAT_Y16:
992                return format;
993            case ImageFormat.JPEG:
994                throw new IllegalArgumentException(
995                        "ImageFormat.JPEG is an unknown internal format");
996            default:
997                return checkArgumentFormat(format);
998        }
999    }
1000
1001    /**
1002     * Ensures that the format is publicly user-defined in either ImageFormat or PixelFormat.
1003     *
1004     * <p>If a format has a different public representation than the internal representation,
1005     * passing in the internal representation here will fail.</p>
1006     *
1007     * <p>For example if trying to use {@code HAL_PIXEL_FORMAT_BLOB}:
1008     * it has a different internal representation than the public representation
1009     * {@link ImageFormat#JPEG}, this check will fail.</p>
1010     *
1011     * <p>Any invalid/undefined formats will raise an exception, including implementation-defined.
1012     * </p>
1013     *
1014     * <p>Note that {@code @hide} and deprecated formats will not pass this check.</p>
1015     *
1016     * @param format image format
1017     * @return the format
1018     *
1019     * @throws IllegalArgumentException if the format was not user-defined
1020     */
1021    static int checkArgumentFormat(int format) {
1022        if (!ImageFormat.isPublicFormat(format) && !PixelFormat.isPublicFormat(format)) {
1023            throw new IllegalArgumentException(String.format(
1024                    "format 0x%x was not defined in either ImageFormat or PixelFormat", format));
1025        }
1026
1027        return format;
1028    }
1029
1030    /**
1031     * Convert an internal format compatible with {@code graphics.h} into public-visible
1032     * {@code ImageFormat}. This assumes the dataspace of the format is not HAL_DATASPACE_DEPTH.
1033     *
1034     * <p>In particular these formats are converted:
1035     * <ul>
1036     * <li>HAL_PIXEL_FORMAT_BLOB => ImageFormat.JPEG</li>
1037     * </ul>
1038     * </p>
1039     *
1040     * <p>Passing in a format which has no public equivalent will fail;
1041     * as will passing in a public format which has a different internal format equivalent.
1042     * See {@link #checkArgumentFormat} for more details about a legal public format.</p>
1043     *
1044     * <p>All other formats are returned as-is, no further invalid check is performed.</p>
1045     *
1046     * <p>This function is the dual of {@link #imageFormatToInternal} for dataspaces other than
1047     * HAL_DATASPACE_DEPTH.</p>
1048     *
1049     * @param format image format from {@link ImageFormat} or {@link PixelFormat}
1050     * @return the converted image formats
1051     *
1052     * @throws IllegalArgumentException
1053     *          if {@code format} is {@code HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED} or
1054     *          {@link ImageFormat#JPEG}
1055     *
1056     * @see ImageFormat
1057     * @see PixelFormat
1058     * @see #checkArgumentFormat
1059     */
1060    static int imageFormatToPublic(int format) {
1061        switch (format) {
1062            case HAL_PIXEL_FORMAT_BLOB:
1063                return ImageFormat.JPEG;
1064            case ImageFormat.JPEG:
1065                throw new IllegalArgumentException(
1066                        "ImageFormat.JPEG is an unknown internal format");
1067            default:
1068                return format;
1069        }
1070    }
1071
1072    /**
1073     * Convert an internal format compatible with {@code graphics.h} into public-visible
1074     * {@code ImageFormat}. This assumes the dataspace of the format is HAL_DATASPACE_DEPTH.
1075     *
1076     * <p>In particular these formats are converted:
1077     * <ul>
1078     * <li>HAL_PIXEL_FORMAT_BLOB => ImageFormat.DEPTH_POINT_CLOUD
1079     * <li>HAL_PIXEL_FORMAT_Y16 => ImageFormat.DEPTH16
1080     * </ul>
1081     * </p>
1082     *
1083     * <p>Passing in an implementation-defined format which has no public equivalent will fail;
1084     * as will passing in a public format which has a different internal format equivalent.
1085     * See {@link #checkArgumentFormat} for more details about a legal public format.</p>
1086     *
1087     * <p>All other formats are returned as-is, no further invalid check is performed.</p>
1088     *
1089     * <p>This function is the dual of {@link #imageFormatToInternal} for formats associated with
1090     * HAL_DATASPACE_DEPTH.</p>
1091     *
1092     * @param format image format from {@link ImageFormat} or {@link PixelFormat}
1093     * @return the converted image formats
1094     *
1095     * @throws IllegalArgumentException
1096     *          if {@code format} is {@code HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED} or
1097     *          {@link ImageFormat#JPEG}
1098     *
1099     * @see ImageFormat
1100     * @see PixelFormat
1101     * @see #checkArgumentFormat
1102     */
1103    static int depthFormatToPublic(int format) {
1104        switch (format) {
1105            case HAL_PIXEL_FORMAT_BLOB:
1106                return ImageFormat.DEPTH_POINT_CLOUD;
1107            case HAL_PIXEL_FORMAT_Y16:
1108                return ImageFormat.DEPTH16;
1109            case ImageFormat.JPEG:
1110                throw new IllegalArgumentException(
1111                        "ImageFormat.JPEG is an unknown internal format");
1112            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1113                throw new IllegalArgumentException(
1114                        "IMPLEMENTATION_DEFINED must not leak to public API");
1115            default:
1116                throw new IllegalArgumentException(
1117                        "Unknown DATASPACE_DEPTH format " + format);
1118        }
1119    }
1120
1121    /**
1122     * Convert image formats from internal to public formats (in-place).
1123     *
1124     * @param formats an array of image formats
1125     * @return {@code formats}
1126     *
1127     * @see #imageFormatToPublic
1128     */
1129    static int[] imageFormatToPublic(int[] formats) {
1130        if (formats == null) {
1131            return null;
1132        }
1133
1134        for (int i = 0; i < formats.length; ++i) {
1135            formats[i] = imageFormatToPublic(formats[i]);
1136        }
1137
1138        return formats;
1139    }
1140
1141    /**
1142     * Convert a public format compatible with {@code ImageFormat} to an internal format
1143     * from {@code graphics.h}.
1144     *
1145     * <p>In particular these formats are converted:
1146     * <ul>
1147     * <li>ImageFormat.JPEG => HAL_PIXEL_FORMAT_BLOB
1148     * <li>ImageFormat.DEPTH_POINT_CLOUD => HAL_PIXEL_FORMAT_BLOB
1149     * <li>ImageFormat.DEPTH16 => HAL_PIXEL_FORMAT_Y16
1150     * </ul>
1151     * </p>
1152     *
1153     * <p>Passing in an internal format which has a different public format equivalent will fail.
1154     * See {@link #checkArgumentFormat} for more details about a legal public format.</p>
1155     *
1156     * <p>All other formats are returned as-is, no invalid check is performed.</p>
1157     *
1158     * <p>This function is the dual of {@link #imageFormatToPublic}.</p>
1159     *
1160     * @param format public image format from {@link ImageFormat} or {@link PixelFormat}
1161     * @return the converted image formats
1162     *
1163     * @see ImageFormat
1164     * @see PixelFormat
1165     *
1166     * @throws IllegalArgumentException
1167     *              if {@code format} was {@code HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED}
1168     */
1169    static int imageFormatToInternal(int format) {
1170        switch (format) {
1171            case ImageFormat.JPEG:
1172            case ImageFormat.DEPTH_POINT_CLOUD:
1173                return HAL_PIXEL_FORMAT_BLOB;
1174            case ImageFormat.DEPTH16:
1175                return HAL_PIXEL_FORMAT_Y16;
1176            default:
1177                return format;
1178        }
1179    }
1180
1181    /**
1182     * Convert a public format compatible with {@code ImageFormat} to an internal dataspace
1183     * from {@code graphics.h}.
1184     *
1185     * <p>In particular these formats are converted:
1186     * <ul>
1187     * <li>ImageFormat.JPEG => HAL_DATASPACE_JFIF
1188     * <li>ImageFormat.DEPTH_POINT_CLOUD => HAL_DATASPACE_DEPTH
1189     * <li>ImageFormat.DEPTH16 => HAL_DATASPACE_DEPTH
1190     * <li>others => HAL_DATASPACE_UNKNOWN
1191     * </ul>
1192     * </p>
1193     *
1194     * <p>Passing in an implementation-defined format here will fail (it's not a public format);
1195     * as will passing in an internal format which has a different public format equivalent.
1196     * See {@link #checkArgumentFormat} for more details about a legal public format.</p>
1197     *
1198     * <p>All other formats are returned as-is, no invalid check is performed.</p>
1199     *
1200     * <p>This function is the dual of {@link #imageFormatToPublic}.</p>
1201     *
1202     * @param format public image format from {@link ImageFormat} or {@link PixelFormat}
1203     * @return the converted image formats
1204     *
1205     * @see ImageFormat
1206     * @see PixelFormat
1207     *
1208     * @throws IllegalArgumentException
1209     *              if {@code format} was {@code HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED}
1210     */
1211    static int imageFormatToDataspace(int format) {
1212        switch (format) {
1213            case ImageFormat.JPEG:
1214                return HAL_DATASPACE_JFIF;
1215            case ImageFormat.DEPTH_POINT_CLOUD:
1216            case ImageFormat.DEPTH16:
1217                return HAL_DATASPACE_DEPTH;
1218            default:
1219                return HAL_DATASPACE_UNKNOWN;
1220        }
1221    }
1222
1223    /**
1224     * Convert image formats from public to internal formats (in-place).
1225     *
1226     * @param formats an array of image formats
1227     * @return {@code formats}
1228     *
1229     * @see #imageFormatToInternal
1230     *
1231     * @hide
1232     */
1233    public static int[] imageFormatToInternal(int[] formats) {
1234        if (formats == null) {
1235            return null;
1236        }
1237
1238        for (int i = 0; i < formats.length; ++i) {
1239            formats[i] = imageFormatToInternal(formats[i]);
1240        }
1241
1242        return formats;
1243    }
1244
1245    private Size[] getPublicFormatSizes(int format, boolean output, boolean highRes) {
1246        try {
1247            checkArgumentFormatSupported(format, output);
1248        } catch (IllegalArgumentException e) {
1249            return null;
1250        }
1251
1252        int internalFormat = imageFormatToInternal(format);
1253        int dataspace = imageFormatToDataspace(format);
1254
1255        return getInternalFormatSizes(internalFormat, dataspace, output, highRes);
1256    }
1257
1258    private Size[] getInternalFormatSizes(int format, int dataspace,
1259            boolean output, boolean highRes) {
1260        SparseIntArray formatsMap =
1261                !output ? mInputFormats :
1262                dataspace == HAL_DATASPACE_DEPTH ? mDepthOutputFormats :
1263                highRes ? mHighResOutputFormats :
1264                mOutputFormats;
1265
1266        int sizesCount = formatsMap.get(format);
1267        if ( ((!output || dataspace == HAL_DATASPACE_DEPTH) && sizesCount == 0) ||
1268                (output && dataspace != HAL_DATASPACE_DEPTH && mAllOutputFormats.get(format) == 0)) {
1269            // Only throw if this is really not supported at all
1270            throw new IllegalArgumentException("format not available");
1271        }
1272
1273        Size[] sizes = new Size[sizesCount];
1274        int sizeIndex = 0;
1275
1276        StreamConfiguration[] configurations =
1277                (dataspace == HAL_DATASPACE_DEPTH) ? mDepthConfigurations : mConfigurations;
1278
1279        for (StreamConfiguration config : configurations) {
1280            int fmt = config.getFormat();
1281            if (fmt == format && config.isOutput() == output) {
1282                if (output) {
1283                    // Filter slow high-res output formats; include for
1284                    // highRes, remove for !highRes
1285                    long duration = 0;
1286                    for (int i = 0; i < mMinFrameDurations.length; i++) {
1287                        StreamConfigurationDuration d = mMinFrameDurations[i];
1288                        if (d.getFormat() == fmt &&
1289                                d.getWidth() == config.getSize().getWidth() &&
1290                                d.getHeight() == config.getSize().getHeight()) {
1291                            duration = d.getDuration();
1292                            break;
1293                        }
1294                    }
1295                    if (highRes != (duration > DURATION_20FPS_NS)) {
1296                        continue;
1297                    }
1298                }
1299                sizes[sizeIndex++] = config.getSize();
1300            }
1301        }
1302
1303        if (sizeIndex != sizesCount) {
1304            throw new AssertionError(
1305                    "Too few sizes (expected " + sizesCount + ", actual " + sizeIndex + ")");
1306        }
1307
1308        return sizes;
1309    }
1310
1311    /** Get the list of publically visible output formats; does not include IMPL_DEFINED */
1312    private int[] getPublicFormats(boolean output) {
1313        int[] formats = new int[getPublicFormatCount(output)];
1314
1315        int i = 0;
1316
1317        SparseIntArray map = getFormatsMap(output);
1318        for (int j = 0; j < map.size(); j++) {
1319            int format = map.keyAt(j);
1320            if (format != HAL_PIXEL_FORMAT_RAW_OPAQUE) {
1321                formats[i++] = imageFormatToPublic(format);
1322            }
1323        }
1324        if (output) {
1325            for (int j = 0; j < mDepthOutputFormats.size(); j++) {
1326                formats[i++] = depthFormatToPublic(mDepthOutputFormats.keyAt(j));
1327            }
1328        }
1329        if (formats.length != i) {
1330            throw new AssertionError("Too few formats " + i + ", expected " + formats.length);
1331        }
1332
1333        return formats;
1334    }
1335
1336    /** Get the format -> size count map for either output or input formats */
1337    private SparseIntArray getFormatsMap(boolean output) {
1338        return output ? mAllOutputFormats : mInputFormats;
1339    }
1340
1341    private long getInternalFormatDuration(int format, int dataspace, Size size, int duration) {
1342        // assume format is already checked, since its internal
1343
1344        if (!isSupportedInternalConfiguration(format, dataspace, size)) {
1345            throw new IllegalArgumentException("size was not supported");
1346        }
1347
1348        StreamConfigurationDuration[] durations = getDurations(duration, dataspace);
1349
1350        for (StreamConfigurationDuration configurationDuration : durations) {
1351            if (configurationDuration.getFormat() == format &&
1352                    configurationDuration.getWidth() == size.getWidth() &&
1353                    configurationDuration.getHeight() == size.getHeight()) {
1354                return configurationDuration.getDuration();
1355            }
1356        }
1357        // Default duration is '0' (unsupported/no extra stall)
1358        return 0;
1359    }
1360
1361    /**
1362     * Get the durations array for the kind of duration
1363     *
1364     * @see #DURATION_MIN_FRAME
1365     * @see #DURATION_STALL
1366     * */
1367    private StreamConfigurationDuration[] getDurations(int duration, int dataspace) {
1368        switch (duration) {
1369            case DURATION_MIN_FRAME:
1370                return (dataspace == HAL_DATASPACE_DEPTH) ?
1371                        mDepthMinFrameDurations : mMinFrameDurations;
1372            case DURATION_STALL:
1373                return (dataspace == HAL_DATASPACE_DEPTH) ?
1374                        mDepthStallDurations : mStallDurations;
1375            default:
1376                throw new IllegalArgumentException("duration was invalid");
1377        }
1378    }
1379
1380    /** Count the number of publicly-visible output formats */
1381    private int getPublicFormatCount(boolean output) {
1382        SparseIntArray formatsMap = getFormatsMap(output);
1383        int size = formatsMap.size();
1384        if (formatsMap.indexOfKey(HAL_PIXEL_FORMAT_RAW_OPAQUE) >= 0) {
1385            size -= 1;
1386        }
1387        if (output) {
1388            size += mDepthOutputFormats.size();
1389        }
1390
1391        return size;
1392    }
1393
1394    private static <T> boolean arrayContains(T[] array, T element) {
1395        if (array == null) {
1396            return false;
1397        }
1398
1399        for (T el : array) {
1400            if (Objects.equals(el, element)) {
1401                return true;
1402            }
1403        }
1404
1405        return false;
1406    }
1407
1408    private boolean isSupportedInternalConfiguration(int format, int dataspace,
1409            Size size) {
1410        StreamConfiguration[] configurations =
1411                (dataspace == HAL_DATASPACE_DEPTH) ? mDepthConfigurations : mConfigurations;
1412
1413        for (int i = 0; i < configurations.length; i++) {
1414            if (configurations[i].getFormat() == format &&
1415                    configurations[i].getSize().equals(size)) {
1416                return true;
1417            }
1418        }
1419
1420        return false;
1421    }
1422
1423    /**
1424     * Return this {@link StreamConfigurationMap} as a string representation.
1425     *
1426     * <p>{@code "StreamConfigurationMap(Outputs([w:%d, h:%d, format:%s(%d), min_duration:%d,
1427     * stall:%d], ... [w:%d, h:%d, format:%s(%d), min_duration:%d, stall:%d]), Inputs([w:%d, h:%d,
1428     * format:%s(%d)], ... [w:%d, h:%d, format:%s(%d)]), ValidOutputFormatsForInput(
1429     * [in:%d, out:%d, ... %d], ... [in:%d, out:%d, ... %d]), HighSpeedVideoConfigurations(
1430     * [w:%d, h:%d, min_fps:%d, max_fps:%d], ... [w:%d, h:%d, min_fps:%d, max_fps:%d]))"}.</p>
1431     *
1432     * <p>{@code Outputs([w:%d, h:%d, format:%s(%d), min_duration:%d, stall:%d], ...
1433     * [w:%d, h:%d, format:%s(%d), min_duration:%d, stall:%d])}, where
1434     * {@code [w:%d, h:%d, format:%s(%d), min_duration:%d, stall:%d]} represents an output
1435     * configuration's width, height, format, minimal frame duration in nanoseconds, and stall
1436     * duration in nanoseconds.</p>
1437     *
1438     * <p>{@code Inputs([w:%d, h:%d, format:%s(%d)], ... [w:%d, h:%d, format:%s(%d)])}, where
1439     * {@code [w:%d, h:%d, format:%s(%d)]} represents an input configuration's width, height, and
1440     * format.</p>
1441     *
1442     * <p>{@code ValidOutputFormatsForInput([in:%s(%d), out:%s(%d), ... %s(%d)],
1443     * ... [in:%s(%d), out:%s(%d), ... %s(%d)])}, where {@code [in:%s(%d), out:%s(%d), ... %s(%d)]}
1444     * represents an input fomat and its valid output formats.</p>
1445     *
1446     * <p>{@code HighSpeedVideoConfigurations([w:%d, h:%d, min_fps:%d, max_fps:%d],
1447     * ... [w:%d, h:%d, min_fps:%d, max_fps:%d])}, where
1448     * {@code [w:%d, h:%d, min_fps:%d, max_fps:%d]} represents a high speed video output
1449     * configuration's width, height, minimal frame rate, and maximal frame rate.</p>
1450     *
1451     * @return string representation of {@link StreamConfigurationMap}
1452     */
1453    @Override
1454    public String toString() {
1455        StringBuilder sb = new StringBuilder("StreamConfiguration(");
1456        appendOutputsString(sb);
1457        sb.append(", ");
1458        appendHighResOutputsString(sb);
1459        sb.append(", ");
1460        appendInputsString(sb);
1461        sb.append(", ");
1462        appendValidOutputFormatsForInputString(sb);
1463        sb.append(", ");
1464        appendHighSpeedVideoConfigurationsString(sb);
1465        sb.append(")");
1466
1467        return sb.toString();
1468    }
1469
1470    private void appendOutputsString(StringBuilder sb) {
1471        sb.append("Outputs(");
1472        int[] formats = getOutputFormats();
1473        for (int format : formats) {
1474            Size[] sizes = getOutputSizes(format);
1475            for (Size size : sizes) {
1476                long minFrameDuration = getOutputMinFrameDuration(format, size);
1477                long stallDuration = getOutputStallDuration(format, size);
1478                sb.append(String.format("[w:%d, h:%d, format:%s(%d), min_duration:%d, " +
1479                        "stall:%d], ", size.getWidth(), size.getHeight(), formatToString(format),
1480                        format, minFrameDuration, stallDuration));
1481            }
1482        }
1483        // Remove the pending ", "
1484        if (sb.charAt(sb.length() - 1) == ' ') {
1485            sb.delete(sb.length() - 2, sb.length());
1486        }
1487        sb.append(")");
1488    }
1489
1490    private void appendHighResOutputsString(StringBuilder sb) {
1491        sb.append("HighResolutionOutputs(");
1492        int[] formats = getOutputFormats();
1493        for (int format : formats) {
1494            Size[] sizes = getHighResolutionOutputSizes(format);
1495            if (sizes == null) continue;
1496            for (Size size : sizes) {
1497                long minFrameDuration = getOutputMinFrameDuration(format, size);
1498                long stallDuration = getOutputStallDuration(format, size);
1499                sb.append(String.format("[w:%d, h:%d, format:%s(%d), min_duration:%d, " +
1500                        "stall:%d], ", size.getWidth(), size.getHeight(), formatToString(format),
1501                        format, minFrameDuration, stallDuration));
1502            }
1503        }
1504        // Remove the pending ", "
1505        if (sb.charAt(sb.length() - 1) == ' ') {
1506            sb.delete(sb.length() - 2, sb.length());
1507        }
1508        sb.append(")");
1509    }
1510
1511    private void appendInputsString(StringBuilder sb) {
1512        sb.append("Inputs(");
1513        int[] formats = getInputFormats();
1514        for (int format : formats) {
1515            Size[] sizes = getInputSizes(format);
1516            for (Size size : sizes) {
1517                sb.append(String.format("[w:%d, h:%d, format:%s(%d)], ", size.getWidth(),
1518                        size.getHeight(), formatToString(format), format));
1519            }
1520        }
1521        // Remove the pending ", "
1522        if (sb.charAt(sb.length() - 1) == ' ') {
1523            sb.delete(sb.length() - 2, sb.length());
1524        }
1525        sb.append(")");
1526    }
1527
1528    private void appendValidOutputFormatsForInputString(StringBuilder sb) {
1529        sb.append("ValidOutputFormatsForInput(");
1530        int[] inputFormats = getInputFormats();
1531        for (int inputFormat : inputFormats) {
1532            sb.append(String.format("[in:%s(%d), out:", formatToString(inputFormat), inputFormat));
1533            int[] outputFormats = getValidOutputFormatsForInput(inputFormat);
1534            for (int i = 0; i < outputFormats.length; i++) {
1535                sb.append(String.format("%s(%d)", formatToString(outputFormats[i]),
1536                        outputFormats[i]));
1537                if (i < outputFormats.length - 1) {
1538                    sb.append(", ");
1539                }
1540            }
1541            sb.append("], ");
1542        }
1543        // Remove the pending ", "
1544        if (sb.charAt(sb.length() - 1) == ' ') {
1545            sb.delete(sb.length() - 2, sb.length());
1546        }
1547        sb.append(")");
1548    }
1549
1550    private void appendHighSpeedVideoConfigurationsString(StringBuilder sb) {
1551        sb.append("HighSpeedVideoConfigurations(");
1552        Size[] sizes = getHighSpeedVideoSizes();
1553        for (Size size : sizes) {
1554            Range<Integer>[] ranges = getHighSpeedVideoFpsRangesFor(size);
1555            for (Range<Integer> range : ranges) {
1556                sb.append(String.format("[w:%d, h:%d, min_fps:%d, max_fps:%d], ", size.getWidth(),
1557                        size.getHeight(), range.getLower(), range.getUpper()));
1558            }
1559        }
1560        // Remove the pending ", "
1561        if (sb.charAt(sb.length() - 1) == ' ') {
1562            sb.delete(sb.length() - 2, sb.length());
1563        }
1564        sb.append(")");
1565    }
1566
1567    private String formatToString(int format) {
1568        switch (format) {
1569            case ImageFormat.YV12:
1570                return "YV12";
1571            case ImageFormat.YUV_420_888:
1572                return "YUV_420_888";
1573            case ImageFormat.NV21:
1574                return "NV21";
1575            case ImageFormat.NV16:
1576                return "NV16";
1577            case PixelFormat.RGB_565:
1578                return "RGB_565";
1579            case PixelFormat.RGBA_8888:
1580                return "RGBA_8888";
1581            case PixelFormat.RGBX_8888:
1582                return "RGBX_8888";
1583            case PixelFormat.RGB_888:
1584                return "RGB_888";
1585            case ImageFormat.JPEG:
1586                return "JPEG";
1587            case ImageFormat.YUY2:
1588                return "YUY2";
1589            case ImageFormat.Y8:
1590                return "Y8";
1591            case ImageFormat.Y16:
1592                return "Y16";
1593            case ImageFormat.RAW_SENSOR:
1594                return "RAW_SENSOR";
1595            case ImageFormat.RAW10:
1596                return "RAW10";
1597            case ImageFormat.DEPTH16:
1598                return "DEPTH16";
1599            case ImageFormat.DEPTH_POINT_CLOUD:
1600                return "DEPTH_POINT_CLOUD";
1601            case ImageFormat.PRIVATE:
1602                return "PRIVATE";
1603            default:
1604                return "UNKNOWN";
1605        }
1606    }
1607
1608    // from system/core/include/system/graphics.h
1609    private static final int HAL_PIXEL_FORMAT_RAW16 = 0x20;
1610    private static final int HAL_PIXEL_FORMAT_BLOB = 0x21;
1611    private static final int HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED = 0x22;
1612    private static final int HAL_PIXEL_FORMAT_YCbCr_420_888 = 0x23;
1613    private static final int HAL_PIXEL_FORMAT_RAW_OPAQUE = 0x24;
1614    private static final int HAL_PIXEL_FORMAT_RAW10 = 0x25;
1615    private static final int HAL_PIXEL_FORMAT_RAW12 = 0x26;
1616    private static final int HAL_PIXEL_FORMAT_Y16 = 0x20363159;
1617
1618
1619    private static final int HAL_DATASPACE_UNKNOWN = 0x0;
1620    private static final int HAL_DATASPACE_JFIF = 0x101;
1621    private static final int HAL_DATASPACE_DEPTH = 0x1000;
1622
1623    private static final long DURATION_20FPS_NS = 50000000L;
1624    /**
1625     * @see #getDurations(int, int)
1626     */
1627    private static final int DURATION_MIN_FRAME = 0;
1628    private static final int DURATION_STALL = 1;
1629
1630    private final StreamConfiguration[] mConfigurations;
1631    private final StreamConfigurationDuration[] mMinFrameDurations;
1632    private final StreamConfigurationDuration[] mStallDurations;
1633
1634    private final StreamConfiguration[] mDepthConfigurations;
1635    private final StreamConfigurationDuration[] mDepthMinFrameDurations;
1636    private final StreamConfigurationDuration[] mDepthStallDurations;
1637
1638    private final HighSpeedVideoConfiguration[] mHighSpeedVideoConfigurations;
1639    private final ReprocessFormatsMap mInputOutputFormatsMap;
1640
1641    private final boolean mListHighResolution;
1642
1643    /** internal format -> num output sizes mapping, not including slow high-res sizes, for
1644     * non-depth dataspaces */
1645    private final SparseIntArray mOutputFormats = new SparseIntArray();
1646    /** internal format -> num output sizes mapping for slow high-res sizes, for non-depth
1647     * dataspaces */
1648    private final SparseIntArray mHighResOutputFormats = new SparseIntArray();
1649    /** internal format -> num output sizes mapping for all non-depth dataspaces */
1650    private final SparseIntArray mAllOutputFormats = new SparseIntArray();
1651    /** internal format -> num input sizes mapping, for input reprocessing formats */
1652    private final SparseIntArray mInputFormats = new SparseIntArray();
1653    /** internal format -> num depth output sizes mapping, for HAL_DATASPACE_DEPTH */
1654    private final SparseIntArray mDepthOutputFormats = new SparseIntArray();
1655    /** High speed video Size -> FPS range count mapping*/
1656    private final HashMap</*HighSpeedVideoSize*/Size, /*Count*/Integer> mHighSpeedVideoSizeMap =
1657            new HashMap<Size, Integer>();
1658    /** High speed video FPS range -> Size count mapping*/
1659    private final HashMap</*HighSpeedVideoFpsRange*/Range<Integer>, /*Count*/Integer>
1660            mHighSpeedVideoFpsRangeMap = new HashMap<Range<Integer>, Integer>();
1661
1662}
1663