StreamConfigurationMap.java revision 7966d446cddf92f814792dca3cfb0dfbbc1bef3e
1e4d011201cea40d46cb2b2eef401db8fddc5c9c6Romain Guy/* 2e4d011201cea40d46cb2b2eef401db8fddc5c9c6Romain Guy * Copyright (C) 2014 The Android Open Source Project 3e4d011201cea40d46cb2b2eef401db8fddc5c9c6Romain Guy * 4e4d011201cea40d46cb2b2eef401db8fddc5c9c6Romain Guy * Licensed under the Apache License, Version 2.0 (the "License"); 5e4d011201cea40d46cb2b2eef401db8fddc5c9c6Romain Guy * you may not use this file except in compliance with the License. 6e4d011201cea40d46cb2b2eef401db8fddc5c9c6Romain Guy * You may obtain a copy of the License at 7e4d011201cea40d46cb2b2eef401db8fddc5c9c6Romain Guy * 8e4d011201cea40d46cb2b2eef401db8fddc5c9c6Romain Guy * http://www.apache.org/licenses/LICENSE-2.0 9e4d011201cea40d46cb2b2eef401db8fddc5c9c6Romain Guy * 10e4d011201cea40d46cb2b2eef401db8fddc5c9c6Romain Guy * Unless required by applicable law or agreed to in writing, software 11e4d011201cea40d46cb2b2eef401db8fddc5c9c6Romain Guy * distributed under the License is distributed on an "AS IS" BASIS, 12e4d011201cea40d46cb2b2eef401db8fddc5c9c6Romain Guy * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13e4d011201cea40d46cb2b2eef401db8fddc5c9c6Romain Guy * See the License for the specific language governing permissions and 14e4d011201cea40d46cb2b2eef401db8fddc5c9c6Romain Guy * limitations under the License. 15e4d011201cea40d46cb2b2eef401db8fddc5c9c6Romain Guy */ 16e4d011201cea40d46cb2b2eef401db8fddc5c9c6Romain Guy 179d5316e3f56d138504565ff311145ac01621dff4Romain Guypackage android.hardware.camera2.params; 189d5316e3f56d138504565ff311145ac01621dff4Romain Guy 199d5316e3f56d138504565ff311145ac01621dff4Romain Guyimport android.graphics.ImageFormat; 209d5316e3f56d138504565ff311145ac01621dff4Romain Guyimport android.graphics.PixelFormat; 219d5316e3f56d138504565ff311145ac01621dff4Romain Guyimport android.hardware.camera2.CameraCharacteristics; 2285bf02fc16784d935fb9eebfa9cb20fe46ff7951Romain Guyimport android.hardware.camera2.CameraDevice; 23ce0537b80087a6225273040a987414b1dd081aa0Romain Guyimport android.hardware.camera2.CameraMetadata; 24f6a11b8a9e25ff9861bbba19251bea84d8a5daf2Romain Guyimport android.hardware.camera2.CaptureRequest; 25ce0537b80087a6225273040a987414b1dd081aa0Romain Guyimport android.hardware.camera2.utils.HashCodeHelpers; 26079ba2c85b15e882629b8d188f5fbdb42f7f8eeaRomain Guyimport android.hardware.camera2.utils.SurfaceUtils; 27d27977d1a91d5a6b3cc9fa7664ac7e835e7bd895Romain Guyimport android.hardware.camera2.legacy.LegacyCameraDevice; 2885bf02fc16784d935fb9eebfa9cb20fe46ff7951Romain Guyimport android.hardware.camera2.legacy.LegacyMetadataMapper; 29e4d011201cea40d46cb2b2eef401db8fddc5c9c6Romain Guyimport android.view.Surface; 30bb9524b6bdddc7ac77d8628daa8b366b8a7be4a4Romain Guyimport android.util.Range; 31deba785f122a47915756ffd991f5540d952cf937Romain Guyimport android.util.Size; 32bb9524b6bdddc7ac77d8628daa8b366b8a7be4a4Romain Guyimport android.util.SparseIntArray; 33f6a11b8a9e25ff9861bbba19251bea84d8a5daf2Romain Guy 345cbbce535744b89df5ecea95de21ee3733298260Romain Guyimport java.util.Arrays; 35bb9524b6bdddc7ac77d8628daa8b366b8a7be4a4Romain Guyimport java.util.HashMap; 365cbbce535744b89df5ecea95de21ee3733298260Romain Guyimport java.util.Objects; 37ce0537b80087a6225273040a987414b1dd081aa0Romain Guyimport java.util.Set; 38dda570201ac851dd85af3861f7e575721d3345daRomain Guy 39c0ac193b9415680f0a69e20a3f5f22d16f8053beRomain Guyimport static com.android.internal.util.Preconditions.*; 40f7f93556c8fcc640ab5adef79d021a80a72a645aRomain Guy 41f7f93556c8fcc640ab5adef79d021a80a72a645aRomain Guy/** 42694b519ac647fe998fd396fe0784cc8e179aadc4Romain Guy * Immutable class to store the available stream 43bb9524b6bdddc7ac77d8628daa8b366b8a7be4a4Romain Guy * {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP configurations} to set up 44e4d011201cea40d46cb2b2eef401db8fddc5c9c6Romain Guy * {@link android.view.Surface Surfaces} for creating a 459d5316e3f56d138504565ff311145ac01621dff4Romain Guy * {@link android.hardware.camera2.CameraCaptureSession capture session} with 46e4d011201cea40d46cb2b2eef401db8fddc5c9c6Romain Guy * {@link android.hardware.camera2.CameraDevice#createCaptureSession}. 47f6a11b8a9e25ff9861bbba19251bea84d8a5daf2Romain Guy * <!-- TODO: link to input stream configuration --> 48f6a11b8a9e25ff9861bbba19251bea84d8a5daf2Romain Guy * 49f6a11b8a9e25ff9861bbba19251bea84d8a5daf2Romain Guy * <p>This is the authoritative list for all <!-- input/ -->output formats (and sizes respectively 50f6a11b8a9e25ff9861bbba19251bea84d8a5daf2Romain Guy * for that format) that are supported by a camera device.</p> 515cbbce535744b89df5ecea95de21ee3733298260Romain Guy * 52026c5e16704e817cac7d9c382914c947e34f87e0Romain Guy * <p>This also contains the minimum frame durations and stall durations for each format/size 53026c5e16704e817cac7d9c382914c947e34f87e0Romain Guy * combination that can be used to calculate effective frame rate when submitting multiple captures. 54026c5e16704e817cac7d9c382914c947e34f87e0Romain Guy * </p> 55026c5e16704e817cac7d9c382914c947e34f87e0Romain Guy * 56026c5e16704e817cac7d9c382914c947e34f87e0Romain Guy * <p>An instance of this object is available from {@link CameraCharacteristics} using 57026c5e16704e817cac7d9c382914c947e34f87e0Romain Guy * the {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP} key and the 58026c5e16704e817cac7d9c382914c947e34f87e0Romain Guy * {@link CameraCharacteristics#get} method.</p> 59026c5e16704e817cac7d9c382914c947e34f87e0Romain Guy * 60f6a11b8a9e25ff9861bbba19251bea84d8a5daf2Romain Guy * <pre><code>{@code 61f6a11b8a9e25ff9861bbba19251bea84d8a5daf2Romain Guy * CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId); 62f6a11b8a9e25ff9861bbba19251bea84d8a5daf2Romain Guy * StreamConfigurationMap configs = characteristics.get( 63f6a11b8a9e25ff9861bbba19251bea84d8a5daf2Romain Guy * CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); 645cbbce535744b89df5ecea95de21ee3733298260Romain Guy * }</code></pre> 655cbbce535744b89df5ecea95de21ee3733298260Romain Guy * 665cbbce535744b89df5ecea95de21ee3733298260Romain Guy * @see CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP 675cbbce535744b89df5ecea95de21ee3733298260Romain Guy * @see CameraDevice#createCaptureSession 6885bf02fc16784d935fb9eebfa9cb20fe46ff7951Romain Guy */ 69e4d011201cea40d46cb2b2eef401db8fddc5c9c6Romain Guypublic final class StreamConfigurationMap { 7085bf02fc16784d935fb9eebfa9cb20fe46ff7951Romain Guy 7185bf02fc16784d935fb9eebfa9cb20fe46ff7951Romain Guy private static final String TAG = "StreamConfigurationMap"; 72e4d011201cea40d46cb2b2eef401db8fddc5c9c6Romain Guy 73e4d011201cea40d46cb2b2eef401db8fddc5c9c6Romain Guy /** 74e4d011201cea40d46cb2b2eef401db8fddc5c9c6Romain Guy * Create a new {@link StreamConfigurationMap}. 7508ae317c21ec3086b5017672bba87420cc38a407Romain Guy * 76bb9524b6bdddc7ac77d8628daa8b366b8a7be4a4Romain Guy * <p>The array parameters ownership is passed to this object after creation; do not 77bb9524b6bdddc7ac77d8628daa8b366b8a7be4a4Romain Guy * write to them after this constructor is invoked.</p> 78bb9524b6bdddc7ac77d8628daa8b366b8a7be4a4Romain Guy * 79bb9524b6bdddc7ac77d8628daa8b366b8a7be4a4Romain Guy * @param configurations a non-{@code null} array of {@link StreamConfiguration} 80bb9524b6bdddc7ac77d8628daa8b366b8a7be4a4Romain Guy * @param minFrameDurations a non-{@code null} array of {@link StreamConfigurationDuration} 81bd6b79b40247aea7bfe13d0831c6c0472df6c636Romain Guy * @param stallDurations a non-{@code null} array of {@link StreamConfigurationDuration} 82bd6b79b40247aea7bfe13d0831c6c0472df6c636Romain Guy * @param highSpeedVideoConfigurations an array of {@link HighSpeedVideoConfiguration}, null if 83bd6b79b40247aea7bfe13d0831c6c0472df6c636Romain Guy * camera device does not support high speed video recording 84f6a11b8a9e25ff9861bbba19251bea84d8a5daf2Romain Guy * @param listHighResolution a flag indicating whether the device supports BURST_CAPTURE 85f6a11b8a9e25ff9861bbba19251bea84d8a5daf2Romain Guy * and thus needs a separate list of slow high-resolution output sizes 86f6a11b8a9e25ff9861bbba19251bea84d8a5daf2Romain Guy * @throws NullPointerException if any of the arguments except highSpeedVideoConfigurations 87f6a11b8a9e25ff9861bbba19251bea84d8a5daf2Romain Guy * were {@code null} or any subelements were {@code null} 88f6a11b8a9e25ff9861bbba19251bea84d8a5daf2Romain Guy * 89f6a11b8a9e25ff9861bbba19251bea84d8a5daf2Romain Guy * @hide 90f6a11b8a9e25ff9861bbba19251bea84d8a5daf2Romain Guy */ 91f6a11b8a9e25ff9861bbba19251bea84d8a5daf2Romain Guy public StreamConfigurationMap( 929d5316e3f56d138504565ff311145ac01621dff4Romain Guy StreamConfiguration[] configurations, 93c7d53494f1fbd9f9d74af89053ff9fdb1ccbac6cRomain Guy StreamConfigurationDuration[] minFrameDurations, 94079ba2c85b15e882629b8d188f5fbdb42f7f8eeaRomain Guy StreamConfigurationDuration[] stallDurations, 95bb9524b6bdddc7ac77d8628daa8b366b8a7be4a4Romain Guy StreamConfiguration[] depthConfigurations, 96c1396e93b6a5286a5183c00c781b62e940a12c1fRomain Guy StreamConfigurationDuration[] depthMinFrameDurations, 97f86ef57f8bcd8ba43ce222ec6a8b4f67d3600640Romain Guy StreamConfigurationDuration[] depthStallDurations, 988ba548f81d1ab5f1750cbf86098c4a14e0b8beadRomain Guy HighSpeedVideoConfiguration[] highSpeedVideoConfigurations, 99f86ef57f8bcd8ba43ce222ec6a8b4f67d3600640Romain Guy ReprocessFormatsMap inputOutputFormatsMap, 100deba785f122a47915756ffd991f5540d952cf937Romain Guy boolean listHighResolution) { 101deba785f122a47915756ffd991f5540d952cf937Romain Guy 10285bf02fc16784d935fb9eebfa9cb20fe46ff7951Romain Guy if (configurations == null) { 103bd6b79b40247aea7bfe13d0831c6c0472df6c636Romain Guy // If no color configurations exist, ensure depth ones do 10485bf02fc16784d935fb9eebfa9cb20fe46ff7951Romain Guy checkArrayElementsNotNull(depthConfigurations, "depthConfigurations"); 105d27977d1a91d5a6b3cc9fa7664ac7e835e7bd895Romain Guy mConfigurations = new StreamConfiguration[0]; 106d27977d1a91d5a6b3cc9fa7664ac7e835e7bd895Romain Guy mMinFrameDurations = new StreamConfigurationDuration[0]; 107d27977d1a91d5a6b3cc9fa7664ac7e835e7bd895Romain Guy mStallDurations = new StreamConfigurationDuration[0]; 108f9764a4f532561f6e2e985ff3b25112f1132ce44Romain Guy } else { 109c0ac193b9415680f0a69e20a3f5f22d16f8053beRomain Guy mConfigurations = checkArrayElementsNotNull(configurations, "configurations"); 110c0ac193b9415680f0a69e20a3f5f22d16f8053beRomain Guy mMinFrameDurations = checkArrayElementsNotNull(minFrameDurations, "minFrameDurations"); 111d27977d1a91d5a6b3cc9fa7664ac7e835e7bd895Romain Guy mStallDurations = checkArrayElementsNotNull(stallDurations, "stallDurations"); 112694b519ac647fe998fd396fe0784cc8e179aadc4Romain Guy } 113694b519ac647fe998fd396fe0784cc8e179aadc4Romain Guy 11408ae317c21ec3086b5017672bba87420cc38a407Romain Guy mListHighResolution = listHighResolution; 1155cbbce535744b89df5ecea95de21ee3733298260Romain Guy 116d27977d1a91d5a6b3cc9fa7664ac7e835e7bd895Romain Guy if (depthConfigurations == null) { 117d27977d1a91d5a6b3cc9fa7664ac7e835e7bd895Romain Guy mDepthConfigurations = new StreamConfiguration[0]; 118d27977d1a91d5a6b3cc9fa7664ac7e835e7bd895Romain Guy mDepthMinFrameDurations = new StreamConfigurationDuration[0]; 119d27977d1a91d5a6b3cc9fa7664ac7e835e7bd895Romain Guy mDepthStallDurations = new StreamConfigurationDuration[0]; 120d27977d1a91d5a6b3cc9fa7664ac7e835e7bd895Romain Guy } else { 121d27977d1a91d5a6b3cc9fa7664ac7e835e7bd895Romain Guy mDepthConfigurations = checkArrayElementsNotNull(depthConfigurations, 122d27977d1a91d5a6b3cc9fa7664ac7e835e7bd895Romain Guy "depthConfigurations"); 123d27977d1a91d5a6b3cc9fa7664ac7e835e7bd895Romain Guy mDepthMinFrameDurations = checkArrayElementsNotNull(depthMinFrameDurations, 124d27977d1a91d5a6b3cc9fa7664ac7e835e7bd895Romain Guy "depthMinFrameDurations"); 125d27977d1a91d5a6b3cc9fa7664ac7e835e7bd895Romain Guy mDepthStallDurations = checkArrayElementsNotNull(depthStallDurations, 126d27977d1a91d5a6b3cc9fa7664ac7e835e7bd895Romain Guy "depthStallDurations"); 1275cbbce535744b89df5ecea95de21ee3733298260Romain Guy } 1285cbbce535744b89df5ecea95de21ee3733298260Romain Guy 1295cbbce535744b89df5ecea95de21ee3733298260Romain Guy if (highSpeedVideoConfigurations == null) { 1305cbbce535744b89df5ecea95de21ee3733298260Romain Guy mHighSpeedVideoConfigurations = new HighSpeedVideoConfiguration[0]; 1315cbbce535744b89df5ecea95de21ee3733298260Romain Guy } else { 1325cbbce535744b89df5ecea95de21ee3733298260Romain Guy mHighSpeedVideoConfigurations = checkArrayElementsNotNull( 133bb9524b6bdddc7ac77d8628daa8b366b8a7be4a4Romain Guy highSpeedVideoConfigurations, "highSpeedVideoConfigurations"); 1345cbbce535744b89df5ecea95de21ee3733298260Romain Guy } 1355cbbce535744b89df5ecea95de21ee3733298260Romain Guy 1365cbbce535744b89df5ecea95de21ee3733298260Romain Guy // For each format, track how many sizes there are available to configure 1375cbbce535744b89df5ecea95de21ee3733298260Romain Guy for (StreamConfiguration config : mConfigurations) { 1385cbbce535744b89df5ecea95de21ee3733298260Romain Guy int fmt = config.getFormat(); 1395cbbce535744b89df5ecea95de21ee3733298260Romain Guy SparseIntArray map = null; 1405cbbce535744b89df5ecea95de21ee3733298260Romain Guy if (config.isOutput()) { 141bb9524b6bdddc7ac77d8628daa8b366b8a7be4a4Romain Guy mAllOutputFormats.put(fmt, mAllOutputFormats.get(fmt) + 1); 142bb9524b6bdddc7ac77d8628daa8b366b8a7be4a4Romain Guy long duration = 0; 1435cbbce535744b89df5ecea95de21ee3733298260Romain Guy if (mListHighResolution) { 1445cbbce535744b89df5ecea95de21ee3733298260Romain Guy for (StreamConfigurationDuration configurationDuration : mMinFrameDurations) { 1455cbbce535744b89df5ecea95de21ee3733298260Romain Guy if (configurationDuration.getFormat() == fmt && 1465cbbce535744b89df5ecea95de21ee3733298260Romain Guy configurationDuration.getWidth() == config.getSize().getWidth() && 147bb9524b6bdddc7ac77d8628daa8b366b8a7be4a4Romain Guy configurationDuration.getHeight() == config.getSize().getHeight()) { 148bb9524b6bdddc7ac77d8628daa8b366b8a7be4a4Romain Guy duration = configurationDuration.getDuration(); 1495cbbce535744b89df5ecea95de21ee3733298260Romain Guy break; 150d55a86120dd1e8ebcc6906c9ffd463f7460348daRomain Guy } 151d55a86120dd1e8ebcc6906c9ffd463f7460348daRomain Guy } 152d55a86120dd1e8ebcc6906c9ffd463f7460348daRomain Guy } 153d55a86120dd1e8ebcc6906c9ffd463f7460348daRomain Guy map = duration <= DURATION_20FPS_NS ? 154d55a86120dd1e8ebcc6906c9ffd463f7460348daRomain Guy mOutputFormats : mHighResOutputFormats; 155d55a86120dd1e8ebcc6906c9ffd463f7460348daRomain Guy } else { 156d55a86120dd1e8ebcc6906c9ffd463f7460348daRomain Guy map = mInputFormats; 157d55a86120dd1e8ebcc6906c9ffd463f7460348daRomain Guy } 158d55a86120dd1e8ebcc6906c9ffd463f7460348daRomain Guy map.put(fmt, map.get(fmt) + 1); 159d55a86120dd1e8ebcc6906c9ffd463f7460348daRomain Guy } 160d55a86120dd1e8ebcc6906c9ffd463f7460348daRomain Guy 161d55a86120dd1e8ebcc6906c9ffd463f7460348daRomain Guy // For each depth format, track how many sizes there are available to configure 162d55a86120dd1e8ebcc6906c9ffd463f7460348daRomain Guy for (StreamConfiguration config : mDepthConfigurations) { 163d55a86120dd1e8ebcc6906c9ffd463f7460348daRomain Guy if (!config.isOutput()) { 164d55a86120dd1e8ebcc6906c9ffd463f7460348daRomain Guy // Ignoring input depth configs 165d55a86120dd1e8ebcc6906c9ffd463f7460348daRomain Guy continue; 166d55a86120dd1e8ebcc6906c9ffd463f7460348daRomain Guy } 167d55a86120dd1e8ebcc6906c9ffd463f7460348daRomain Guy 168d55a86120dd1e8ebcc6906c9ffd463f7460348daRomain Guy mDepthOutputFormats.put(config.getFormat(), 169d55a86120dd1e8ebcc6906c9ffd463f7460348daRomain Guy mDepthOutputFormats.get(config.getFormat()) + 1); 170d55a86120dd1e8ebcc6906c9ffd463f7460348daRomain Guy } 171d55a86120dd1e8ebcc6906c9ffd463f7460348daRomain Guy 172d55a86120dd1e8ebcc6906c9ffd463f7460348daRomain Guy if (configurations != null && 173d55a86120dd1e8ebcc6906c9ffd463f7460348daRomain Guy mOutputFormats.indexOfKey(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) < 0) { 174d55a86120dd1e8ebcc6906c9ffd463f7460348daRomain Guy throw new AssertionError( 175d55a86120dd1e8ebcc6906c9ffd463f7460348daRomain Guy "At least one stream configuration for IMPLEMENTATION_DEFINED must exist"); 176d55a86120dd1e8ebcc6906c9ffd463f7460348daRomain Guy } 177d55a86120dd1e8ebcc6906c9ffd463f7460348daRomain Guy 1785cbbce535744b89df5ecea95de21ee3733298260Romain Guy // For each Size/FPS range, track how many FPS range/Size there are available 1795cbbce535744b89df5ecea95de21ee3733298260Romain Guy for (HighSpeedVideoConfiguration config : mHighSpeedVideoConfigurations) { 1805cbbce535744b89df5ecea95de21ee3733298260Romain Guy Size size = config.getSize(); 1815cbbce535744b89df5ecea95de21ee3733298260Romain Guy Range<Integer> fpsRange = config.getFpsRange(); 1825cbbce535744b89df5ecea95de21ee3733298260Romain Guy Integer fpsRangeCount = mHighSpeedVideoSizeMap.get(size); 1835cbbce535744b89df5ecea95de21ee3733298260Romain Guy if (fpsRangeCount == null) { 1845cbbce535744b89df5ecea95de21ee3733298260Romain Guy fpsRangeCount = 0; 1855cbbce535744b89df5ecea95de21ee3733298260Romain Guy } 186026c5e16704e817cac7d9c382914c947e34f87e0Romain Guy mHighSpeedVideoSizeMap.put(size, fpsRangeCount + 1); 1873d58c03de0d8877b36cdb78b0ca8b5cac7f600e2Romain Guy Integer sizeCount = mHighSpeedVideoFpsRangeMap.get(fpsRange); 1885cbbce535744b89df5ecea95de21ee3733298260Romain Guy if (sizeCount == null) { 189026c5e16704e817cac7d9c382914c947e34f87e0Romain Guy sizeCount = 0; 1903d58c03de0d8877b36cdb78b0ca8b5cac7f600e2Romain Guy } 1915cbbce535744b89df5ecea95de21ee3733298260Romain Guy mHighSpeedVideoFpsRangeMap.put(fpsRange, sizeCount + 1); 1925cbbce535744b89df5ecea95de21ee3733298260Romain Guy } 1935cbbce535744b89df5ecea95de21ee3733298260Romain Guy 1945cbbce535744b89df5ecea95de21ee3733298260Romain Guy mInputOutputFormatsMap = inputOutputFormatsMap; 1955cbbce535744b89df5ecea95de21ee3733298260Romain Guy } 1965cbbce535744b89df5ecea95de21ee3733298260Romain Guy 1975cbbce535744b89df5ecea95de21ee3733298260Romain Guy /** 1985cbbce535744b89df5ecea95de21ee3733298260Romain Guy * Get the image {@code format} output formats in this stream configuration. 1995cbbce535744b89df5ecea95de21ee3733298260Romain Guy * 2005cbbce535744b89df5ecea95de21ee3733298260Romain Guy * <p>All image formats returned by this function will be defined in either {@link ImageFormat} 2015cbbce535744b89df5ecea95de21ee3733298260Romain Guy * or in {@link PixelFormat} (and there is no possibility of collision).</p> 202d55a86120dd1e8ebcc6906c9ffd463f7460348daRomain Guy * 203c1396e93b6a5286a5183c00c781b62e940a12c1fRomain Guy * <p>Formats listed in this array are guaranteed to return true if queried with 2045cbbce535744b89df5ecea95de21ee3733298260Romain Guy * {@link #isOutputSupportedFor(int)}.</p> 205bd6b79b40247aea7bfe13d0831c6c0472df6c636Romain Guy * 206a979474f15b454c8e2963f239a3770e200bb227cRomain Guy * @return an array of integer format 207c7d53494f1fbd9f9d74af89053ff9fdb1ccbac6cRomain Guy * 208026c5e16704e817cac7d9c382914c947e34f87e0Romain Guy * @see ImageFormat 20982ba814ca0dea659be2cc6523bc0137679d961ceRomain Guy * @see PixelFormat 21082ba814ca0dea659be2cc6523bc0137679d961ceRomain Guy */ 21182ba814ca0dea659be2cc6523bc0137679d961ceRomain Guy public final int[] getOutputFormats() { 21282ba814ca0dea659be2cc6523bc0137679d961ceRomain Guy return getPublicFormats(/*output*/true); 21382ba814ca0dea659be2cc6523bc0137679d961ceRomain Guy } 21482ba814ca0dea659be2cc6523bc0137679d961ceRomain Guy 21582ba814ca0dea659be2cc6523bc0137679d961ceRomain Guy /** 21682ba814ca0dea659be2cc6523bc0137679d961ceRomain Guy * Get the image {@code format} output formats for a reprocessing input format. 21782ba814ca0dea659be2cc6523bc0137679d961ceRomain Guy * 21882ba814ca0dea659be2cc6523bc0137679d961ceRomain Guy * <p>When submitting a {@link CaptureRequest} with an input Surface of a given format, 219a979474f15b454c8e2963f239a3770e200bb227cRomain Guy * the only allowed target outputs of the {@link CaptureRequest} are the ones with a format 220a979474f15b454c8e2963f239a3770e200bb227cRomain Guy * listed in the return value of this method. Including any other output Surface as a target 22182ba814ca0dea659be2cc6523bc0137679d961ceRomain Guy * will throw an IllegalArgumentException. If no output format is supported given the input 22282ba814ca0dea659be2cc6523bc0137679d961ceRomain Guy * format, an empty int[] will be returned.</p> 22382ba814ca0dea659be2cc6523bc0137679d961ceRomain Guy * 22482ba814ca0dea659be2cc6523bc0137679d961ceRomain Guy * <p>All image formats returned by this function will be defined in either {@link ImageFormat} 22582ba814ca0dea659be2cc6523bc0137679d961ceRomain Guy * or in {@link PixelFormat} (and there is no possibility of collision).</p> 22682ba814ca0dea659be2cc6523bc0137679d961ceRomain Guy * 22782ba814ca0dea659be2cc6523bc0137679d961ceRomain Guy * <p>Formats listed in this array are guaranteed to return true if queried with 22882ba814ca0dea659be2cc6523bc0137679d961ceRomain Guy * {@link #isOutputSupportedFor(int)}.</p> 22982ba814ca0dea659be2cc6523bc0137679d961ceRomain Guy * 23082ba814ca0dea659be2cc6523bc0137679d961ceRomain Guy * @return an array of integer format 23182ba814ca0dea659be2cc6523bc0137679d961ceRomain Guy * 23282ba814ca0dea659be2cc6523bc0137679d961ceRomain Guy * @see ImageFormat 23382ba814ca0dea659be2cc6523bc0137679d961ceRomain Guy * @see PixelFormat 23482ba814ca0dea659be2cc6523bc0137679d961ceRomain Guy */ 23582ba814ca0dea659be2cc6523bc0137679d961ceRomain Guy public final int[] getValidOutputFormatsForInput(int inputFormat) { 23682ba814ca0dea659be2cc6523bc0137679d961ceRomain Guy if (mInputOutputFormatsMap == null) { 23782ba814ca0dea659be2cc6523bc0137679d961ceRomain Guy return new int[0]; 238f7f93556c8fcc640ab5adef79d021a80a72a645aRomain Guy } 239f7f93556c8fcc640ab5adef79d021a80a72a645aRomain Guy return mInputOutputFormatsMap.getOutputs(inputFormat); 240a979474f15b454c8e2963f239a3770e200bb227cRomain Guy } 241f7f93556c8fcc640ab5adef79d021a80a72a645aRomain Guy 242f7f93556c8fcc640ab5adef79d021a80a72a645aRomain Guy /** 243f7f93556c8fcc640ab5adef79d021a80a72a645aRomain Guy * Get the image {@code format} input formats in this stream configuration. 244d27977d1a91d5a6b3cc9fa7664ac7e835e7bd895Romain Guy * 245d27977d1a91d5a6b3cc9fa7664ac7e835e7bd895Romain Guy * <p>All image formats returned by this function will be defined in either {@link ImageFormat} 246d27977d1a91d5a6b3cc9fa7664ac7e835e7bd895Romain Guy * or in {@link PixelFormat} (and there is no possibility of collision).</p> 247d27977d1a91d5a6b3cc9fa7664ac7e835e7bd895Romain Guy * 248d27977d1a91d5a6b3cc9fa7664ac7e835e7bd895Romain Guy * @return an array of integer format 249d27977d1a91d5a6b3cc9fa7664ac7e835e7bd895Romain Guy * 250d27977d1a91d5a6b3cc9fa7664ac7e835e7bd895Romain Guy * @see ImageFormat 251d27977d1a91d5a6b3cc9fa7664ac7e835e7bd895Romain Guy * @see PixelFormat 252d27977d1a91d5a6b3cc9fa7664ac7e835e7bd895Romain Guy */ 253d27977d1a91d5a6b3cc9fa7664ac7e835e7bd895Romain Guy public final int[] getInputFormats() { 254d27977d1a91d5a6b3cc9fa7664ac7e835e7bd895Romain Guy return getPublicFormats(/*output*/false); 255d27977d1a91d5a6b3cc9fa7664ac7e835e7bd895Romain Guy } 256d27977d1a91d5a6b3cc9fa7664ac7e835e7bd895Romain Guy 257c0ac193b9415680f0a69e20a3f5f22d16f8053beRomain Guy /** 258c0ac193b9415680f0a69e20a3f5f22d16f8053beRomain Guy * Get the supported input sizes for this input format. 259c0ac193b9415680f0a69e20a3f5f22d16f8053beRomain Guy * 260c0ac193b9415680f0a69e20a3f5f22d16f8053beRomain Guy * <p>The format must have come from {@link #getInputFormats}; otherwise 261c0ac193b9415680f0a69e20a3f5f22d16f8053beRomain Guy * {@code null} is returned.</p> 262c0ac193b9415680f0a69e20a3f5f22d16f8053beRomain Guy * 263c0ac193b9415680f0a69e20a3f5f22d16f8053beRomain Guy * @param format a format from {@link #getInputFormats} 264c0ac193b9415680f0a69e20a3f5f22d16f8053beRomain Guy * @return a non-empty array of sizes, or {@code null} if the format was not available. 265c0ac193b9415680f0a69e20a3f5f22d16f8053beRomain Guy */ 266c0ac193b9415680f0a69e20a3f5f22d16f8053beRomain Guy public Size[] getInputSizes(final int format) { 267c0ac193b9415680f0a69e20a3f5f22d16f8053beRomain Guy return getPublicFormatSizes(format, /*output*/false, /*highRes*/false); 268c0ac193b9415680f0a69e20a3f5f22d16f8053beRomain Guy } 269c0ac193b9415680f0a69e20a3f5f22d16f8053beRomain Guy 270026c5e16704e817cac7d9c382914c947e34f87e0Romain Guy /** 271026c5e16704e817cac7d9c382914c947e34f87e0Romain Guy * Determine whether or not output surfaces with a particular user-defined format can be passed 272026c5e16704e817cac7d9c382914c947e34f87e0Romain Guy * {@link CameraDevice#createCaptureSession createCaptureSession}. 2738ba548f81d1ab5f1750cbf86098c4a14e0b8beadRomain Guy * 274026c5e16704e817cac7d9c382914c947e34f87e0Romain Guy * <p>This method determines that the output {@code format} is supported by the camera device; 275026c5e16704e817cac7d9c382914c947e34f87e0Romain Guy * each output {@code surface} target may or may not itself support that {@code format}. 276026c5e16704e817cac7d9c382914c947e34f87e0Romain Guy * Refer to the class which provides the surface for additional documentation.</p> 277026c5e16704e817cac7d9c382914c947e34f87e0Romain Guy * 278026c5e16704e817cac7d9c382914c947e34f87e0Romain Guy * <p>Formats for which this returns {@code true} are guaranteed to exist in the result 279026c5e16704e817cac7d9c382914c947e34f87e0Romain Guy * returned by {@link #getOutputSizes}.</p> 280026c5e16704e817cac7d9c382914c947e34f87e0Romain Guy * 281026c5e16704e817cac7d9c382914c947e34f87e0Romain Guy * @param format an image format from either {@link ImageFormat} or {@link PixelFormat} 2828ba548f81d1ab5f1750cbf86098c4a14e0b8beadRomain Guy * @return 2838ba548f81d1ab5f1750cbf86098c4a14e0b8beadRomain Guy * {@code true} iff using a {@code surface} with this {@code format} will be 2848ba548f81d1ab5f1750cbf86098c4a14e0b8beadRomain Guy * supported with {@link CameraDevice#createCaptureSession} 2858ba548f81d1ab5f1750cbf86098c4a14e0b8beadRomain Guy * 2868ba548f81d1ab5f1750cbf86098c4a14e0b8beadRomain Guy * @throws IllegalArgumentException 2878ba548f81d1ab5f1750cbf86098c4a14e0b8beadRomain Guy * if the image format was not a defined named constant 2888ba548f81d1ab5f1750cbf86098c4a14e0b8beadRomain Guy * from either {@link ImageFormat} or {@link PixelFormat} 2898ba548f81d1ab5f1750cbf86098c4a14e0b8beadRomain Guy * 2908ba548f81d1ab5f1750cbf86098c4a14e0b8beadRomain Guy * @see ImageFormat 2918ba548f81d1ab5f1750cbf86098c4a14e0b8beadRomain Guy * @see PixelFormat 292f7f93556c8fcc640ab5adef79d021a80a72a645aRomain Guy * @see CameraDevice#createCaptureSession 293a1db574036c9bc2d397b69f8200594027e1fff16Romain Guy */ 294a1db574036c9bc2d397b69f8200594027e1fff16Romain Guy public boolean isOutputSupportedFor(int format) { 295a1db574036c9bc2d397b69f8200594027e1fff16Romain Guy checkArgumentFormat(format); 296a1db574036c9bc2d397b69f8200594027e1fff16Romain Guy 297a1db574036c9bc2d397b69f8200594027e1fff16Romain Guy int internalFormat = imageFormatToInternal(format); 29882ba814ca0dea659be2cc6523bc0137679d961ceRomain Guy int dataspace = imageFormatToDataspace(format); 29982ba814ca0dea659be2cc6523bc0137679d961ceRomain Guy if (dataspace == HAL_DATASPACE_DEPTH) { 30082ba814ca0dea659be2cc6523bc0137679d961ceRomain Guy return mDepthOutputFormats.indexOfKey(internalFormat) >= 0; 301a979474f15b454c8e2963f239a3770e200bb227cRomain Guy } else { 30282ba814ca0dea659be2cc6523bc0137679d961ceRomain Guy return getFormatsMap(/*output*/true).indexOfKey(internalFormat) >= 0; 303260e102162322958cf17dbd895cd6bd30dc87e32Romain Guy } 304d27977d1a91d5a6b3cc9fa7664ac7e835e7bd895Romain Guy } 305d27977d1a91d5a6b3cc9fa7664ac7e835e7bd895Romain Guy 306d27977d1a91d5a6b3cc9fa7664ac7e835e7bd895Romain Guy /** 307d27977d1a91d5a6b3cc9fa7664ac7e835e7bd895Romain Guy * Determine whether or not output streams can be configured with a particular class 3086926c72e25b8dec3dd4b84af0819fa1937ae7296Romain Guy * as a consumer. 309d27977d1a91d5a6b3cc9fa7664ac7e835e7bd895Romain Guy * 310d27977d1a91d5a6b3cc9fa7664ac7e835e7bd895Romain Guy * <p>The following list is generally usable for outputs: 311d27977d1a91d5a6b3cc9fa7664ac7e835e7bd895Romain Guy * <ul> 312260e102162322958cf17dbd895cd6bd30dc87e32Romain Guy * <li>{@link android.media.ImageReader} - 313d27977d1a91d5a6b3cc9fa7664ac7e835e7bd895Romain Guy * Recommended for image processing or streaming to external resources (such as a file or 314260e102162322958cf17dbd895cd6bd30dc87e32Romain Guy * network) 315bb9524b6bdddc7ac77d8628daa8b366b8a7be4a4Romain Guy * <li>{@link android.media.MediaRecorder} - 316bb9524b6bdddc7ac77d8628daa8b366b8a7be4a4Romain Guy * Recommended for recording video (simple to use) 317bb9524b6bdddc7ac77d8628daa8b366b8a7be4a4Romain Guy * <li>{@link android.media.MediaCodec} - 31885bf02fc16784d935fb9eebfa9cb20fe46ff7951Romain Guy * Recommended for recording video (more complicated to use, with more flexibility) 319260e102162322958cf17dbd895cd6bd30dc87e32Romain Guy * <li>{@link android.renderscript.Allocation} - 320bb9524b6bdddc7ac77d8628daa8b366b8a7be4a4Romain Guy * Recommended for image processing with {@link android.renderscript RenderScript} 321c7d53494f1fbd9f9d74af89053ff9fdb1ccbac6cRomain Guy * <li>{@link android.view.SurfaceHolder} - 322c7d53494f1fbd9f9d74af89053ff9fdb1ccbac6cRomain Guy * Recommended for low-power camera preview with {@link android.view.SurfaceView} 323c7d53494f1fbd9f9d74af89053ff9fdb1ccbac6cRomain Guy * <li>{@link android.graphics.SurfaceTexture} - 324bb9524b6bdddc7ac77d8628daa8b366b8a7be4a4Romain Guy * Recommended for OpenGL-accelerated preview processing or compositing with 325bb9524b6bdddc7ac77d8628daa8b366b8a7be4a4Romain Guy * {@link android.view.TextureView} 326f6a11b8a9e25ff9861bbba19251bea84d8a5daf2Romain Guy * </ul> 327f6a11b8a9e25ff9861bbba19251bea84d8a5daf2Romain Guy * </p> 328bb9524b6bdddc7ac77d8628daa8b366b8a7be4a4Romain Guy * 329bb9524b6bdddc7ac77d8628daa8b366b8a7be4a4Romain Guy * <p>Generally speaking this means that creating a {@link Surface} from that class <i>may</i> 3309d5316e3f56d138504565ff311145ac01621dff4Romain Guy * provide a producer endpoint that is suitable to be used with 3319d5316e3f56d138504565ff311145ac01621dff4Romain Guy * {@link CameraDevice#createCaptureSession}.</p> 332d27977d1a91d5a6b3cc9fa7664ac7e835e7bd895Romain Guy * 333d27977d1a91d5a6b3cc9fa7664ac7e835e7bd895Romain Guy * <p>Since not all of the above classes support output of all format and size combinations, 334d27977d1a91d5a6b3cc9fa7664ac7e835e7bd895Romain Guy * the particular combination should be queried with {@link #isOutputSupportedFor(Surface)}.</p> 335694b519ac647fe998fd396fe0784cc8e179aadc4Romain Guy * 336f9764a4f532561f6e2e985ff3b25112f1132ce44Romain Guy * @param klass a non-{@code null} {@link Class} object reference 337026c5e16704e817cac7d9c382914c947e34f87e0Romain Guy * @return {@code true} if this class is supported as an output, {@code false} otherwise 338026c5e16704e817cac7d9c382914c947e34f87e0Romain Guy * 339026c5e16704e817cac7d9c382914c947e34f87e0Romain Guy * @throws NullPointerException if {@code klass} was {@code null} 340ce0537b80087a6225273040a987414b1dd081aa0Romain Guy * 3411e79386ba34f0db38c1b35b22cdf122632534354Romain Guy * @see CameraDevice#createCaptureSession 3421e79386ba34f0db38c1b35b22cdf122632534354Romain Guy * @see #isOutputSupportedFor(Surface) 3431e79386ba34f0db38c1b35b22cdf122632534354Romain Guy */ 34482ba814ca0dea659be2cc6523bc0137679d961ceRomain Guy public static <T> boolean isOutputSupportedFor(Class<T> klass) { 34582ba814ca0dea659be2cc6523bc0137679d961ceRomain Guy checkNotNull(klass, "klass must not be null"); 34682ba814ca0dea659be2cc6523bc0137679d961ceRomain Guy 34782ba814ca0dea659be2cc6523bc0137679d961ceRomain Guy if (klass == android.media.ImageReader.class) { 34882ba814ca0dea659be2cc6523bc0137679d961ceRomain Guy return true; 3497fac2e18339f765320d759e8d4c090f92431959eRomain Guy } else if (klass == android.media.MediaRecorder.class) { 350d27977d1a91d5a6b3cc9fa7664ac7e835e7bd895Romain Guy return true; 351f9764a4f532561f6e2e985ff3b25112f1132ce44Romain Guy } else if (klass == android.media.MediaCodec.class) { 352d27977d1a91d5a6b3cc9fa7664ac7e835e7bd895Romain Guy return true; 353a1db574036c9bc2d397b69f8200594027e1fff16Romain Guy } else if (klass == android.renderscript.Allocation.class) { 354a1db574036c9bc2d397b69f8200594027e1fff16Romain Guy return true; 355d27977d1a91d5a6b3cc9fa7664ac7e835e7bd895Romain Guy } else if (klass == android.view.SurfaceHolder.class) { 3567fac2e18339f765320d759e8d4c090f92431959eRomain Guy return true; 3577fac2e18339f765320d759e8d4c090f92431959eRomain Guy } else if (klass == android.graphics.SurfaceTexture.class) { 3587fac2e18339f765320d759e8d4c090f92431959eRomain Guy return true; 3597fac2e18339f765320d759e8d4c090f92431959eRomain Guy } 3607fac2e18339f765320d759e8d4c090f92431959eRomain Guy 3617fac2e18339f765320d759e8d4c090f92431959eRomain Guy return false; 362c0ac193b9415680f0a69e20a3f5f22d16f8053beRomain Guy } 363d27977d1a91d5a6b3cc9fa7664ac7e835e7bd895Romain Guy 364694b519ac647fe998fd396fe0784cc8e179aadc4Romain Guy /** 365694b519ac647fe998fd396fe0784cc8e179aadc4Romain Guy * Determine whether or not the {@code surface} in its current state is suitable to be included 366694b519ac647fe998fd396fe0784cc8e179aadc4Romain Guy * in a {@link CameraDevice#createCaptureSession capture session} as an output. 36782ba814ca0dea659be2cc6523bc0137679d961ceRomain Guy * 368ce0537b80087a6225273040a987414b1dd081aa0Romain Guy * <p>Not all surfaces are usable with the {@link CameraDevice}, and not all configurations 369dda570201ac851dd85af3861f7e575721d3345daRomain Guy * of that {@code surface} are compatible. Some classes that provide the {@code surface} are 370c0ac193b9415680f0a69e20a3f5f22d16f8053beRomain Guy * compatible with the {@link CameraDevice} in general 371f7f93556c8fcc640ab5adef79d021a80a72a645aRomain Guy * (see {@link #isOutputSupportedFor(Class)}, but it is the caller's responsibility to put the 372bb9524b6bdddc7ac77d8628daa8b366b8a7be4a4Romain Guy * {@code surface} into a state that will be compatible with the {@link CameraDevice}.</p> 373e4d011201cea40d46cb2b2eef401db8fddc5c9c6Romain Guy * 3749d5316e3f56d138504565ff311145ac01621dff4Romain Guy * <p>Reasons for a {@code surface} being specifically incompatible might be: 375e4d011201cea40d46cb2b2eef401db8fddc5c9c6Romain Guy * <ul> 376e4d011201cea40d46cb2b2eef401db8fddc5c9c6Romain Guy * <li>Using a format that's not listed by {@link #getOutputFormats} 3779d5316e3f56d138504565ff311145ac01621dff4Romain Guy * <li>Using a format/size combination that's not listed by {@link #getOutputSizes} 378 * <li>The {@code surface} itself is not in a state where it can service a new producer.</p> 379 * </li> 380 * </ul> 381 * 382 * <p>Surfaces from flexible sources will return true even if the exact size of the Surface does 383 * not match a camera-supported size, as long as the format (or class) is supported and the 384 * camera device supports a size that is equal to or less than 1080p in that format. If such as 385 * Surface is used to create a capture session, it will have its size rounded to the nearest 386 * supported size, below or equal to 1080p. Flexible sources include SurfaceView, SurfaceTexture, 387 * and ImageReader.</p> 388 * 389 * <p>This is not an exhaustive list; see the particular class's documentation for further 390 * possible reasons of incompatibility.</p> 391 * 392 * @param surface a non-{@code null} {@link Surface} object reference 393 * @return {@code true} if this is supported, {@code false} otherwise 394 * 395 * @throws NullPointerException if {@code surface} was {@code null} 396 * @throws IllegalArgumentException if the Surface endpoint is no longer valid 397 * 398 * @see CameraDevice#createCaptureSession 399 * @see #isOutputSupportedFor(Class) 400 */ 401 public boolean isOutputSupportedFor(Surface surface) { 402 checkNotNull(surface, "surface must not be null"); 403 404 Size surfaceSize = SurfaceUtils.getSurfaceSize(surface); 405 int surfaceFormat = SurfaceUtils.getSurfaceFormat(surface); 406 int surfaceDataspace = SurfaceUtils.getSurfaceDataspace(surface); 407 408 // See if consumer is flexible. 409 boolean isFlexible = SurfaceUtils.isFlexibleConsumer(surface); 410 411 // Override RGB formats to IMPLEMENTATION_DEFINED, b/9487482 412 if ((surfaceFormat >= LegacyMetadataMapper.HAL_PIXEL_FORMAT_RGBA_8888 && 413 surfaceFormat <= LegacyMetadataMapper.HAL_PIXEL_FORMAT_BGRA_8888)) { 414 surfaceFormat = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED; 415 } 416 417 StreamConfiguration[] configs = 418 surfaceDataspace != HAL_DATASPACE_DEPTH ? mConfigurations : mDepthConfigurations; 419 for (StreamConfiguration config : configs) { 420 if (config.getFormat() == surfaceFormat && config.isOutput()) { 421 // Matching format, either need exact size match, or a flexible consumer 422 // and a size no bigger than MAX_DIMEN_FOR_ROUNDING 423 if (config.getSize().equals(surfaceSize)) { 424 return true; 425 } else if (isFlexible && 426 (config.getSize().getWidth() <= LegacyCameraDevice.MAX_DIMEN_FOR_ROUNDING)) { 427 return true; 428 } 429 } 430 } 431 return false; 432 } 433 434 /** 435 * Get a list of sizes compatible with {@code klass} to use as an output. 436 * 437 * <p>Some of the supported classes may support additional formats beyond 438 * {@link ImageFormat#PRIVATE}; this function only returns 439 * sizes for {@link ImageFormat#PRIVATE}. For example, {@link android.media.ImageReader} 440 * supports {@link ImageFormat#YUV_420_888} and {@link ImageFormat#PRIVATE}, this method will 441 * only return the sizes for {@link ImageFormat#PRIVATE} for {@link android.media.ImageReader} 442 * class.</p> 443 * 444 * <p>If a well-defined format such as {@code NV21} is required, use 445 * {@link #getOutputSizes(int)} instead.</p> 446 * 447 * <p>The {@code klass} should be a supported output, that querying 448 * {@code #isOutputSupportedFor(Class)} should return {@code true}.</p> 449 * 450 * @param klass 451 * a non-{@code null} {@link Class} object reference 452 * @return 453 * an array of supported sizes for {@link ImageFormat#PRIVATE} format, 454 * or {@code null} iff the {@code klass} is not a supported output. 455 * 456 * 457 * @throws NullPointerException if {@code klass} was {@code null} 458 * 459 * @see #isOutputSupportedFor(Class) 460 */ 461 public <T> Size[] getOutputSizes(Class<T> klass) { 462 if (isOutputSupportedFor(klass) == false) { 463 return null; 464 } 465 466 return getInternalFormatSizes(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 467 HAL_DATASPACE_UNKNOWN,/*output*/true, /*highRes*/false); 468 } 469 470 /** 471 * Get a list of sizes compatible with the requested image {@code format}. 472 * 473 * <p>The {@code format} should be a supported format (one of the formats returned by 474 * {@link #getOutputFormats}).</p> 475 * 476 * As of API level 23, the {@link #getHighResolutionOutputSizes} method can be used on devices 477 * that support the 478 * {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE BURST_CAPTURE} 479 * capability to get a list of high-resolution output sizes that cannot operate at the preferred 480 * 20fps rate. This means that for some supported formats, this method will return an empty 481 * list, if all the supported resolutions operate at below 20fps. For devices that do not 482 * support the BURST_CAPTURE capability, all output resolutions are listed through this method. 483 * 484 * @param format an image format from {@link ImageFormat} or {@link PixelFormat} 485 * @return 486 * an array of supported sizes, 487 * or {@code null} if the {@code format} is not a supported output 488 * 489 * @see ImageFormat 490 * @see PixelFormat 491 * @see #getOutputFormats 492 */ 493 public Size[] getOutputSizes(int format) { 494 return getPublicFormatSizes(format, /*output*/true, /*highRes*/ false); 495 } 496 497 /** 498 * Get a list of supported high speed video recording sizes. 499 * <p> 500 * When {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO} is 501 * supported in {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES}, this method will 502 * list the supported high speed video size configurations. All the sizes listed will be a 503 * subset of the sizes reported by {@link #getOutputSizes} for processed non-stalling formats 504 * (typically {@link ImageFormat#PRIVATE} {@link ImageFormat#YUV_420_888}, etc.) 505 * </p> 506 * <p> 507 * To enable high speed video recording, application must create a constrained create high speed 508 * capture session via {@link CameraDevice#createConstrainedHighSpeedCaptureSession}, and submit 509 * a CaptureRequest list created by 510 * {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList} 511 * to this session. The application must select the video size from this method and 512 * {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE FPS range} from 513 * {@link #getHighSpeedVideoFpsRangesFor} to configure the constrained high speed session and 514 * generate the high speed request list. For example, if the application intends to do high 515 * speed recording, it can select the maximum size reported by this method to create high speed 516 * capture session. Note that for the use case of multiple output streams, application must 517 * select one unique size from this method to use (e.g., preview and recording streams must have 518 * the same size). Otherwise, the high speed session creation will fail. Once the size is 519 * selected, application can get the supported FPS ranges by 520 * {@link #getHighSpeedVideoFpsRangesFor}, and use these FPS ranges to setup the recording 521 * request lists via 522 * {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}. 523 * </p> 524 * 525 * @return an array of supported high speed video recording sizes 526 * @see #getHighSpeedVideoFpsRangesFor(Size) 527 * @see CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO 528 * @see CameraDevice#createConstrainedHighSpeedCaptureSession 529 * @see android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList 530 */ 531 public Size[] getHighSpeedVideoSizes() { 532 Set<Size> keySet = mHighSpeedVideoSizeMap.keySet(); 533 return keySet.toArray(new Size[keySet.size()]); 534 } 535 536 /** 537 * Get the frame per second ranges (fpsMin, fpsMax) for input high speed video size. 538 * <p> 539 * See {@link #getHighSpeedVideoFpsRanges} for how to enable high speed recording. 540 * </p> 541 * <p> 542 * The {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE FPS ranges} reported in this method 543 * must not be used to setup capture requests that are submitted to unconstrained capture 544 * sessions, or it will result in {@link IllegalArgumentException IllegalArgumentExceptions}. 545 * </p> 546 * <p> 547 * See {@link #getHighSpeedVideoFpsRanges} for the characteristics of the returned FPS ranges. 548 * </p> 549 * 550 * @param size one of the sizes returned by {@link #getHighSpeedVideoSizes()} 551 * @return an array of supported high speed video recording FPS ranges The upper bound of 552 * returned ranges is guaranteed to be greater than or equal to 120. 553 * @throws IllegalArgumentException if input size does not exist in the return value of 554 * getHighSpeedVideoSizes 555 * @see #getHighSpeedVideoSizes() 556 * @see #getHighSpeedVideoFpsRanges() 557 */ 558 public Range<Integer>[] getHighSpeedVideoFpsRangesFor(Size size) { 559 Integer fpsRangeCount = mHighSpeedVideoSizeMap.get(size); 560 if (fpsRangeCount == null || fpsRangeCount == 0) { 561 throw new IllegalArgumentException(String.format( 562 "Size %s does not support high speed video recording", size)); 563 } 564 565 @SuppressWarnings("unchecked") 566 Range<Integer>[] fpsRanges = new Range[fpsRangeCount]; 567 int i = 0; 568 for (HighSpeedVideoConfiguration config : mHighSpeedVideoConfigurations) { 569 if (size.equals(config.getSize())) { 570 fpsRanges[i++] = config.getFpsRange(); 571 } 572 } 573 return fpsRanges; 574 } 575 576 /** 577 * Get a list of supported high speed video recording FPS ranges. 578 * <p> 579 * When {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO} is 580 * supported in {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES}, this method will 581 * list the supported high speed video FPS range configurations. Application can then use 582 * {@link #getHighSpeedVideoSizesFor} to query available sizes for one of returned FPS range. 583 * </p> 584 * <p> 585 * To enable high speed video recording, application must create a constrained create high speed 586 * capture session via {@link CameraDevice#createConstrainedHighSpeedCaptureSession}, and submit 587 * a CaptureRequest list created by 588 * {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList} 589 * to this session. The application must select the video size from this method and 590 * {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE FPS range} from 591 * {@link #getHighSpeedVideoFpsRangesFor} to configure the constrained high speed session and 592 * generate the high speed request list. For example, if the application intends to do high 593 * speed recording, it can select one FPS range reported by this method, query the video sizes 594 * corresponding to this FPS range by {@link #getHighSpeedVideoSizesFor} and use one of reported 595 * sizes to create a high speed capture session. Note that for the use case of multiple output 596 * streams, application must select one unique size from this method to use (e.g., preview and 597 * recording streams must have the same size). Otherwise, the high speed session creation will 598 * fail. Once the high speed capture session is created, the application can set the FPS range 599 * in the recording request lists via 600 * {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}. 601 * </p> 602 * <p> 603 * The FPS ranges reported by this method will have below characteristics: 604 * <li>The fpsMin and fpsMax will be a multiple 30fps.</li> 605 * <li>The fpsMin will be no less than 30fps, the fpsMax will be no less than 120fps.</li> 606 * <li>At least one range will be a fixed FPS range where fpsMin == fpsMax.</li> 607 * <li>For each fixed FPS range, there will be one corresponding variable FPS range [30, 608 * fps_max]. These kinds of FPS ranges are suitable for preview-only use cases where the 609 * application doesn't want the camera device always produce higher frame rate than the display 610 * refresh rate.</li> 611 * </p> 612 * 613 * @return an array of supported high speed video recording FPS ranges The upper bound of 614 * returned ranges is guaranteed to be larger or equal to 120. 615 * @see #getHighSpeedVideoSizesFor 616 * @see CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO 617 * @see CameraDevice#createConstrainedHighSpeedCaptureSession 618 * @see CameraDevice#createHighSpeedRequestList 619 */ 620 @SuppressWarnings("unchecked") 621 public Range<Integer>[] getHighSpeedVideoFpsRanges() { 622 Set<Range<Integer>> keySet = mHighSpeedVideoFpsRangeMap.keySet(); 623 return keySet.toArray(new Range[keySet.size()]); 624 } 625 626 /** 627 * Get the supported video sizes for an input high speed FPS range. 628 * 629 * <p> See {@link #getHighSpeedVideoSizes} for how to enable high speed recording.</p> 630 * 631 * @param fpsRange one of the FPS range returned by {@link #getHighSpeedVideoFpsRanges()} 632 * @return An array of video sizes to create high speed capture sessions for high speed streaming 633 * use cases. 634 * 635 * @throws IllegalArgumentException if input FPS range does not exist in the return value of 636 * getHighSpeedVideoFpsRanges 637 * @see #getHighSpeedVideoFpsRanges() 638 */ 639 public Size[] getHighSpeedVideoSizesFor(Range<Integer> fpsRange) { 640 Integer sizeCount = mHighSpeedVideoFpsRangeMap.get(fpsRange); 641 if (sizeCount == null || sizeCount == 0) { 642 throw new IllegalArgumentException(String.format( 643 "FpsRange %s does not support high speed video recording", fpsRange)); 644 } 645 646 Size[] sizes = new Size[sizeCount]; 647 int i = 0; 648 for (HighSpeedVideoConfiguration config : mHighSpeedVideoConfigurations) { 649 if (fpsRange.equals(config.getFpsRange())) { 650 sizes[i++] = config.getSize(); 651 } 652 } 653 return sizes; 654 } 655 656 /** 657 * Get a list of supported high resolution sizes, which cannot operate at full BURST_CAPTURE 658 * rate. 659 * 660 * <p>This includes all output sizes that cannot meet the 20 fps frame rate requirements for the 661 * {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE BURST_CAPTURE} 662 * capability. This does not include the stall duration, so for example, a JPEG or RAW16 output 663 * resolution with a large stall duration but a minimum frame duration that's above 20 fps will 664 * still be listed in the regular {@link #getOutputSizes} list. All the sizes on this list are 665 * still guaranteed to operate at a rate of at least 10 fps, not including stall duration.</p> 666 * 667 * <p>For a device that does not support the BURST_CAPTURE capability, this list will be 668 * {@code null}, since resolutions in the {@link #getOutputSizes} list are already not 669 * guaranteed to meet >= 20 fps rate requirements. For a device that does support the 670 * BURST_CAPTURE capability, this list may be empty, if all supported resolutions meet the 20 671 * fps requirement.</p> 672 * 673 * @return an array of supported slower high-resolution sizes, or {@code null} if the 674 * BURST_CAPTURE capability is not supported 675 */ 676 public Size[] getHighResolutionOutputSizes(int format) { 677 if (!mListHighResolution) return null; 678 679 return getPublicFormatSizes(format, /*output*/true, /*highRes*/ true); 680 } 681 682 /** 683 * Get the minimum {@link CaptureRequest#SENSOR_FRAME_DURATION frame duration} 684 * for the format/size combination (in nanoseconds). 685 * 686 * <p>{@code format} should be one of the ones returned by {@link #getOutputFormats()}.</p> 687 * <p>{@code size} should be one of the ones returned by 688 * {@link #getOutputSizes(int)}.</p> 689 * 690 * <p>This should correspond to the frame duration when only that stream is active, with all 691 * processing (typically in {@code android.*.mode}) set to either {@code OFF} or {@code FAST}. 692 * </p> 693 * 694 * <p>When multiple streams are used in a request, the minimum frame duration will be 695 * {@code max(individual stream min durations)}.</p> 696 * 697 * <p>For devices that do not support manual sensor control 698 * ({@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR}), 699 * this function may return 0.</p> 700 * 701 * <!-- 702 * TODO: uncomment after adding input stream support 703 * <p>The minimum frame duration of a stream (of a particular format, size) is the same 704 * regardless of whether the stream is input or output.</p> 705 * --> 706 * 707 * @param format an image format from {@link ImageFormat} or {@link PixelFormat} 708 * @param size an output-compatible size 709 * @return a minimum frame duration {@code >} 0 in nanoseconds, or 710 * 0 if the minimum frame duration is not available. 711 * 712 * @throws IllegalArgumentException if {@code format} or {@code size} was not supported 713 * @throws NullPointerException if {@code size} was {@code null} 714 * 715 * @see CaptureRequest#SENSOR_FRAME_DURATION 716 * @see #getOutputStallDuration(int, Size) 717 * @see ImageFormat 718 * @see PixelFormat 719 */ 720 public long getOutputMinFrameDuration(int format, Size size) { 721 checkNotNull(size, "size must not be null"); 722 checkArgumentFormatSupported(format, /*output*/true); 723 724 return getInternalFormatDuration(imageFormatToInternal(format), 725 imageFormatToDataspace(format), 726 size, 727 DURATION_MIN_FRAME); 728 } 729 730 /** 731 * Get the minimum {@link CaptureRequest#SENSOR_FRAME_DURATION frame duration} 732 * for the class/size combination (in nanoseconds). 733 * 734 * <p>This assumes a the {@code klass} is set up to use {@link ImageFormat#PRIVATE}. 735 * For user-defined formats, use {@link #getOutputMinFrameDuration(int, Size)}.</p> 736 * 737 * <p>{@code klass} should be one of the ones which is supported by 738 * {@link #isOutputSupportedFor(Class)}.</p> 739 * 740 * <p>{@code size} should be one of the ones returned by 741 * {@link #getOutputSizes(int)}.</p> 742 * 743 * <p>This should correspond to the frame duration when only that stream is active, with all 744 * processing (typically in {@code android.*.mode}) set to either {@code OFF} or {@code FAST}. 745 * </p> 746 * 747 * <p>When multiple streams are used in a request, the minimum frame duration will be 748 * {@code max(individual stream min durations)}.</p> 749 * 750 * <p>For devices that do not support manual sensor control 751 * ({@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR}), 752 * this function may return 0.</p> 753 * 754 * <!-- 755 * TODO: uncomment after adding input stream support 756 * <p>The minimum frame duration of a stream (of a particular format, size) is the same 757 * regardless of whether the stream is input or output.</p> 758 * --> 759 * 760 * @param klass 761 * a class which is supported by {@link #isOutputSupportedFor(Class)} and has a 762 * non-empty array returned by {@link #getOutputSizes(Class)} 763 * @param size an output-compatible size 764 * @return a minimum frame duration {@code >} 0 in nanoseconds, or 765 * 0 if the minimum frame duration is not available. 766 * 767 * @throws IllegalArgumentException if {@code klass} or {@code size} was not supported 768 * @throws NullPointerException if {@code size} or {@code klass} was {@code null} 769 * 770 * @see CaptureRequest#SENSOR_FRAME_DURATION 771 * @see ImageFormat 772 * @see PixelFormat 773 */ 774 public <T> long getOutputMinFrameDuration(final Class<T> klass, final Size size) { 775 if (!isOutputSupportedFor(klass)) { 776 throw new IllegalArgumentException("klass was not supported"); 777 } 778 779 return getInternalFormatDuration(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 780 HAL_DATASPACE_UNKNOWN, 781 size, DURATION_MIN_FRAME); 782 } 783 784 /** 785 * Get the stall duration for the format/size combination (in nanoseconds). 786 * 787 * <p>{@code format} should be one of the ones returned by {@link #getOutputFormats()}.</p> 788 * <p>{@code size} should be one of the ones returned by 789 * {@link #getOutputSizes(int)}.</p> 790 * 791 * <p> 792 * A stall duration is how much extra time would get added to the normal minimum frame duration 793 * for a repeating request that has streams with non-zero stall. 794 * 795 * <p>For example, consider JPEG captures which have the following characteristics: 796 * 797 * <ul> 798 * <li>JPEG streams act like processed YUV streams in requests for which they are not included; 799 * in requests in which they are directly referenced, they act as JPEG streams. 800 * This is because supporting a JPEG stream requires the underlying YUV data to always be ready 801 * for use by a JPEG encoder, but the encoder will only be used (and impact frame duration) on 802 * requests that actually reference a JPEG stream. 803 * <li>The JPEG processor can run concurrently to the rest of the camera pipeline, but cannot 804 * process more than 1 capture at a time. 805 * </ul> 806 * 807 * <p>In other words, using a repeating YUV request would result in a steady frame rate 808 * (let's say it's 30 FPS). If a single JPEG request is submitted periodically, 809 * the frame rate will stay at 30 FPS (as long as we wait for the previous JPEG to return each 810 * time). If we try to submit a repeating YUV + JPEG request, then the frame rate will drop from 811 * 30 FPS.</p> 812 * 813 * <p>In general, submitting a new request with a non-0 stall time stream will <em>not</em> cause a 814 * frame rate drop unless there are still outstanding buffers for that stream from previous 815 * requests.</p> 816 * 817 * <p>Submitting a repeating request with streams (call this {@code S}) is the same as setting 818 * the minimum frame duration from the normal minimum frame duration corresponding to {@code S}, 819 * added with the maximum stall duration for {@code S}.</p> 820 * 821 * <p>If interleaving requests with and without a stall duration, a request will stall by the 822 * maximum of the remaining times for each can-stall stream with outstanding buffers.</p> 823 * 824 * <p>This means that a stalling request will not have an exposure start until the stall has 825 * completed.</p> 826 * 827 * <p>This should correspond to the stall duration when only that stream is active, with all 828 * processing (typically in {@code android.*.mode}) set to {@code FAST} or {@code OFF}. 829 * Setting any of the processing modes to {@code HIGH_QUALITY} effectively results in an 830 * indeterminate stall duration for all streams in a request (the regular stall calculation 831 * rules are ignored).</p> 832 * 833 * <p>The following formats may always have a stall duration: 834 * <ul> 835 * <li>{@link ImageFormat#JPEG JPEG} 836 * <li>{@link ImageFormat#RAW_SENSOR RAW16} 837 * <li>{@link ImageFormat#RAW_PRIVATE RAW_PRIVATE} 838 * </ul> 839 * </p> 840 * 841 * <p>The following formats will never have a stall duration: 842 * <ul> 843 * <li>{@link ImageFormat#YUV_420_888 YUV_420_888} 844 * <li>{@link #isOutputSupportedFor(Class) Implementation-Defined} 845 * </ul></p> 846 * 847 * <p> 848 * All other formats may or may not have an allowed stall duration on a per-capability basis; 849 * refer to {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES 850 * android.request.availableCapabilities} for more details.</p> 851 * </p> 852 * 853 * <p>See {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration} 854 * for more information about calculating the max frame rate (absent stalls).</p> 855 * 856 * @param format an image format from {@link ImageFormat} or {@link PixelFormat} 857 * @param size an output-compatible size 858 * @return a stall duration {@code >=} 0 in nanoseconds 859 * 860 * @throws IllegalArgumentException if {@code format} or {@code size} was not supported 861 * @throws NullPointerException if {@code size} was {@code null} 862 * 863 * @see CaptureRequest#SENSOR_FRAME_DURATION 864 * @see ImageFormat 865 * @see PixelFormat 866 */ 867 public long getOutputStallDuration(int format, Size size) { 868 checkArgumentFormatSupported(format, /*output*/true); 869 870 return getInternalFormatDuration(imageFormatToInternal(format), 871 imageFormatToDataspace(format), 872 size, 873 DURATION_STALL); 874 } 875 876 /** 877 * Get the stall duration for the class/size combination (in nanoseconds). 878 * 879 * <p>This assumes a the {@code klass} is set up to use {@link ImageFormat#PRIVATE}. 880 * For user-defined formats, use {@link #getOutputMinFrameDuration(int, Size)}.</p> 881 * 882 * <p>{@code klass} should be one of the ones with a non-empty array returned by 883 * {@link #getOutputSizes(Class)}.</p> 884 * 885 * <p>{@code size} should be one of the ones returned by 886 * {@link #getOutputSizes(Class)}.</p> 887 * 888 * <p>See {@link #getOutputStallDuration(int, Size)} for a definition of a 889 * <em>stall duration</em>.</p> 890 * 891 * @param klass 892 * a class which is supported by {@link #isOutputSupportedFor(Class)} and has a 893 * non-empty array returned by {@link #getOutputSizes(Class)} 894 * @param size an output-compatible size 895 * @return a minimum frame duration {@code >=} 0 in nanoseconds 896 * 897 * @throws IllegalArgumentException if {@code klass} or {@code size} was not supported 898 * @throws NullPointerException if {@code size} or {@code klass} was {@code null} 899 * 900 * @see CaptureRequest#SENSOR_FRAME_DURATION 901 * @see ImageFormat 902 * @see PixelFormat 903 */ 904 public <T> long getOutputStallDuration(final Class<T> klass, final Size size) { 905 if (!isOutputSupportedFor(klass)) { 906 throw new IllegalArgumentException("klass was not supported"); 907 } 908 909 return getInternalFormatDuration(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 910 HAL_DATASPACE_UNKNOWN, size, DURATION_STALL); 911 } 912 913 /** 914 * Check if this {@link StreamConfigurationMap} is equal to another 915 * {@link StreamConfigurationMap}. 916 * 917 * <p>Two vectors are only equal if and only if each of the respective elements is equal.</p> 918 * 919 * @return {@code true} if the objects were equal, {@code false} otherwise 920 */ 921 @Override 922 public boolean equals(final Object obj) { 923 if (obj == null) { 924 return false; 925 } 926 if (this == obj) { 927 return true; 928 } 929 if (obj instanceof StreamConfigurationMap) { 930 final StreamConfigurationMap other = (StreamConfigurationMap) obj; 931 // XX: do we care about order? 932 return Arrays.equals(mConfigurations, other.mConfigurations) && 933 Arrays.equals(mMinFrameDurations, other.mMinFrameDurations) && 934 Arrays.equals(mStallDurations, other.mStallDurations) && 935 Arrays.equals(mDepthConfigurations, other.mDepthConfigurations) && 936 Arrays.equals(mHighSpeedVideoConfigurations, 937 other.mHighSpeedVideoConfigurations); 938 } 939 return false; 940 } 941 942 /** 943 * {@inheritDoc} 944 */ 945 @Override 946 public int hashCode() { 947 // XX: do we care about order? 948 return HashCodeHelpers.hashCodeGeneric( 949 mConfigurations, mMinFrameDurations, 950 mStallDurations, 951 mDepthConfigurations, mHighSpeedVideoConfigurations); 952 } 953 954 // Check that the argument is supported by #getOutputFormats or #getInputFormats 955 private int checkArgumentFormatSupported(int format, boolean output) { 956 checkArgumentFormat(format); 957 958 int internalFormat = imageFormatToInternal(format); 959 int internalDataspace = imageFormatToDataspace(format); 960 961 if (output) { 962 if (internalDataspace == HAL_DATASPACE_DEPTH) { 963 if (mDepthOutputFormats.indexOfKey(internalFormat) >= 0) { 964 return format; 965 } 966 } else { 967 if (mAllOutputFormats.indexOfKey(internalFormat) >= 0) { 968 return format; 969 } 970 } 971 } else { 972 if (mInputFormats.indexOfKey(internalFormat) >= 0) { 973 return format; 974 } 975 } 976 977 throw new IllegalArgumentException(String.format( 978 "format %x is not supported by this stream configuration map", format)); 979 } 980 981 /** 982 * Ensures that the format is either user-defined or implementation defined. 983 * 984 * <p>If a format has a different internal representation than the public representation, 985 * passing in the public representation here will fail.</p> 986 * 987 * <p>For example if trying to use {@link ImageFormat#JPEG}: 988 * it has a different public representation than the internal representation 989 * {@code HAL_PIXEL_FORMAT_BLOB}, this check will fail.</p> 990 * 991 * <p>Any invalid/undefined formats will raise an exception.</p> 992 * 993 * @param format image format 994 * @return the format 995 * 996 * @throws IllegalArgumentException if the format was invalid 997 */ 998 static int checkArgumentFormatInternal(int format) { 999 switch (format) { 1000 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: 1001 case HAL_PIXEL_FORMAT_BLOB: 1002 case HAL_PIXEL_FORMAT_RAW_OPAQUE: 1003 case HAL_PIXEL_FORMAT_Y16: 1004 return format; 1005 case ImageFormat.JPEG: 1006 throw new IllegalArgumentException( 1007 "ImageFormat.JPEG is an unknown internal format"); 1008 default: 1009 return checkArgumentFormat(format); 1010 } 1011 } 1012 1013 /** 1014 * Ensures that the format is publicly user-defined in either ImageFormat or PixelFormat. 1015 * 1016 * <p>If a format has a different public representation than the internal representation, 1017 * passing in the internal representation here will fail.</p> 1018 * 1019 * <p>For example if trying to use {@code HAL_PIXEL_FORMAT_BLOB}: 1020 * it has a different internal representation than the public representation 1021 * {@link ImageFormat#JPEG}, this check will fail.</p> 1022 * 1023 * <p>Any invalid/undefined formats will raise an exception, including implementation-defined. 1024 * </p> 1025 * 1026 * <p>Note that {@code @hide} and deprecated formats will not pass this check.</p> 1027 * 1028 * @param format image format 1029 * @return the format 1030 * 1031 * @throws IllegalArgumentException if the format was not user-defined 1032 */ 1033 static int checkArgumentFormat(int format) { 1034 if (!ImageFormat.isPublicFormat(format) && !PixelFormat.isPublicFormat(format)) { 1035 throw new IllegalArgumentException(String.format( 1036 "format 0x%x was not defined in either ImageFormat or PixelFormat", format)); 1037 } 1038 1039 return format; 1040 } 1041 1042 /** 1043 * Convert an internal format compatible with {@code graphics.h} into public-visible 1044 * {@code ImageFormat}. This assumes the dataspace of the format is not HAL_DATASPACE_DEPTH. 1045 * 1046 * <p>In particular these formats are converted: 1047 * <ul> 1048 * <li>HAL_PIXEL_FORMAT_BLOB => ImageFormat.JPEG</li> 1049 * </ul> 1050 * </p> 1051 * 1052 * <p>Passing in a format which has no public equivalent will fail; 1053 * as will passing in a public format which has a different internal format equivalent. 1054 * See {@link #checkArgumentFormat} for more details about a legal public format.</p> 1055 * 1056 * <p>All other formats are returned as-is, no further invalid check is performed.</p> 1057 * 1058 * <p>This function is the dual of {@link #imageFormatToInternal} for dataspaces other than 1059 * HAL_DATASPACE_DEPTH.</p> 1060 * 1061 * @param format image format from {@link ImageFormat} or {@link PixelFormat} 1062 * @return the converted image formats 1063 * 1064 * @throws IllegalArgumentException 1065 * if {@code format} is {@code HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED} or 1066 * {@link ImageFormat#JPEG} 1067 * 1068 * @see ImageFormat 1069 * @see PixelFormat 1070 * @see #checkArgumentFormat 1071 */ 1072 static int imageFormatToPublic(int format) { 1073 switch (format) { 1074 case HAL_PIXEL_FORMAT_BLOB: 1075 return ImageFormat.JPEG; 1076 case ImageFormat.JPEG: 1077 throw new IllegalArgumentException( 1078 "ImageFormat.JPEG is an unknown internal format"); 1079 default: 1080 return format; 1081 } 1082 } 1083 1084 /** 1085 * Convert an internal format compatible with {@code graphics.h} into public-visible 1086 * {@code ImageFormat}. This assumes the dataspace of the format is HAL_DATASPACE_DEPTH. 1087 * 1088 * <p>In particular these formats are converted: 1089 * <ul> 1090 * <li>HAL_PIXEL_FORMAT_BLOB => ImageFormat.DEPTH_POINT_CLOUD 1091 * <li>HAL_PIXEL_FORMAT_Y16 => ImageFormat.DEPTH16 1092 * </ul> 1093 * </p> 1094 * 1095 * <p>Passing in an implementation-defined format which has no public equivalent will fail; 1096 * as will passing in a public format which has a different internal format equivalent. 1097 * See {@link #checkArgumentFormat} for more details about a legal public format.</p> 1098 * 1099 * <p>All other formats are returned as-is, no further invalid check is performed.</p> 1100 * 1101 * <p>This function is the dual of {@link #imageFormatToInternal} for formats associated with 1102 * HAL_DATASPACE_DEPTH.</p> 1103 * 1104 * @param format image format from {@link ImageFormat} or {@link PixelFormat} 1105 * @return the converted image formats 1106 * 1107 * @throws IllegalArgumentException 1108 * if {@code format} is {@code HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED} or 1109 * {@link ImageFormat#JPEG} 1110 * 1111 * @see ImageFormat 1112 * @see PixelFormat 1113 * @see #checkArgumentFormat 1114 */ 1115 static int depthFormatToPublic(int format) { 1116 switch (format) { 1117 case HAL_PIXEL_FORMAT_BLOB: 1118 return ImageFormat.DEPTH_POINT_CLOUD; 1119 case HAL_PIXEL_FORMAT_Y16: 1120 return ImageFormat.DEPTH16; 1121 case ImageFormat.JPEG: 1122 throw new IllegalArgumentException( 1123 "ImageFormat.JPEG is an unknown internal format"); 1124 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: 1125 throw new IllegalArgumentException( 1126 "IMPLEMENTATION_DEFINED must not leak to public API"); 1127 default: 1128 throw new IllegalArgumentException( 1129 "Unknown DATASPACE_DEPTH format " + format); 1130 } 1131 } 1132 1133 /** 1134 * Convert image formats from internal to public formats (in-place). 1135 * 1136 * @param formats an array of image formats 1137 * @return {@code formats} 1138 * 1139 * @see #imageFormatToPublic 1140 */ 1141 static int[] imageFormatToPublic(int[] formats) { 1142 if (formats == null) { 1143 return null; 1144 } 1145 1146 for (int i = 0; i < formats.length; ++i) { 1147 formats[i] = imageFormatToPublic(formats[i]); 1148 } 1149 1150 return formats; 1151 } 1152 1153 /** 1154 * Convert a public format compatible with {@code ImageFormat} to an internal format 1155 * from {@code graphics.h}. 1156 * 1157 * <p>In particular these formats are converted: 1158 * <ul> 1159 * <li>ImageFormat.JPEG => HAL_PIXEL_FORMAT_BLOB 1160 * <li>ImageFormat.DEPTH_POINT_CLOUD => HAL_PIXEL_FORMAT_BLOB 1161 * <li>ImageFormat.DEPTH16 => HAL_PIXEL_FORMAT_Y16 1162 * </ul> 1163 * </p> 1164 * 1165 * <p>Passing in an internal format which has a different public format equivalent will fail. 1166 * See {@link #checkArgumentFormat} for more details about a legal public format.</p> 1167 * 1168 * <p>All other formats are returned as-is, no invalid check is performed.</p> 1169 * 1170 * <p>This function is the dual of {@link #imageFormatToPublic}.</p> 1171 * 1172 * @param format public image format from {@link ImageFormat} or {@link PixelFormat} 1173 * @return the converted image formats 1174 * 1175 * @see ImageFormat 1176 * @see PixelFormat 1177 * 1178 * @throws IllegalArgumentException 1179 * if {@code format} was {@code HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED} 1180 */ 1181 static int imageFormatToInternal(int format) { 1182 switch (format) { 1183 case ImageFormat.JPEG: 1184 case ImageFormat.DEPTH_POINT_CLOUD: 1185 return HAL_PIXEL_FORMAT_BLOB; 1186 case ImageFormat.DEPTH16: 1187 return HAL_PIXEL_FORMAT_Y16; 1188 default: 1189 return format; 1190 } 1191 } 1192 1193 /** 1194 * Convert a public format compatible with {@code ImageFormat} to an internal dataspace 1195 * from {@code graphics.h}. 1196 * 1197 * <p>In particular these formats are converted: 1198 * <ul> 1199 * <li>ImageFormat.JPEG => HAL_DATASPACE_V0_JFIF 1200 * <li>ImageFormat.DEPTH_POINT_CLOUD => HAL_DATASPACE_DEPTH 1201 * <li>ImageFormat.DEPTH16 => HAL_DATASPACE_DEPTH 1202 * <li>others => HAL_DATASPACE_UNKNOWN 1203 * </ul> 1204 * </p> 1205 * 1206 * <p>Passing in an implementation-defined format here will fail (it's not a public format); 1207 * as will passing in an internal format which has a different public format equivalent. 1208 * See {@link #checkArgumentFormat} for more details about a legal public format.</p> 1209 * 1210 * <p>All other formats are returned as-is, no invalid check is performed.</p> 1211 * 1212 * <p>This function is the dual of {@link #imageFormatToPublic}.</p> 1213 * 1214 * @param format public image format from {@link ImageFormat} or {@link PixelFormat} 1215 * @return the converted image formats 1216 * 1217 * @see ImageFormat 1218 * @see PixelFormat 1219 * 1220 * @throws IllegalArgumentException 1221 * if {@code format} was {@code HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED} 1222 */ 1223 static int imageFormatToDataspace(int format) { 1224 switch (format) { 1225 case ImageFormat.JPEG: 1226 return HAL_DATASPACE_V0_JFIF; 1227 case ImageFormat.DEPTH_POINT_CLOUD: 1228 case ImageFormat.DEPTH16: 1229 return HAL_DATASPACE_DEPTH; 1230 default: 1231 return HAL_DATASPACE_UNKNOWN; 1232 } 1233 } 1234 1235 /** 1236 * Convert image formats from public to internal formats (in-place). 1237 * 1238 * @param formats an array of image formats 1239 * @return {@code formats} 1240 * 1241 * @see #imageFormatToInternal 1242 * 1243 * @hide 1244 */ 1245 public static int[] imageFormatToInternal(int[] formats) { 1246 if (formats == null) { 1247 return null; 1248 } 1249 1250 for (int i = 0; i < formats.length; ++i) { 1251 formats[i] = imageFormatToInternal(formats[i]); 1252 } 1253 1254 return formats; 1255 } 1256 1257 private Size[] getPublicFormatSizes(int format, boolean output, boolean highRes) { 1258 try { 1259 checkArgumentFormatSupported(format, output); 1260 } catch (IllegalArgumentException e) { 1261 return null; 1262 } 1263 1264 int internalFormat = imageFormatToInternal(format); 1265 int dataspace = imageFormatToDataspace(format); 1266 1267 return getInternalFormatSizes(internalFormat, dataspace, output, highRes); 1268 } 1269 1270 private Size[] getInternalFormatSizes(int format, int dataspace, 1271 boolean output, boolean highRes) { 1272 // All depth formats are non-high-res. 1273 if (dataspace == HAL_DATASPACE_DEPTH && highRes) { 1274 return new Size[0]; 1275 } 1276 1277 SparseIntArray formatsMap = 1278 !output ? mInputFormats : 1279 dataspace == HAL_DATASPACE_DEPTH ? mDepthOutputFormats : 1280 highRes ? mHighResOutputFormats : 1281 mOutputFormats; 1282 1283 int sizesCount = formatsMap.get(format); 1284 if ( ((!output || dataspace == HAL_DATASPACE_DEPTH) && sizesCount == 0) || 1285 (output && dataspace != HAL_DATASPACE_DEPTH && mAllOutputFormats.get(format) == 0)) { 1286 // Only throw if this is really not supported at all 1287 throw new IllegalArgumentException("format not available"); 1288 } 1289 1290 Size[] sizes = new Size[sizesCount]; 1291 int sizeIndex = 0; 1292 1293 StreamConfiguration[] configurations = 1294 (dataspace == HAL_DATASPACE_DEPTH) ? mDepthConfigurations : mConfigurations; 1295 StreamConfigurationDuration[] minFrameDurations = 1296 (dataspace == HAL_DATASPACE_DEPTH) ? mDepthMinFrameDurations : mMinFrameDurations; 1297 1298 for (StreamConfiguration config : configurations) { 1299 int fmt = config.getFormat(); 1300 if (fmt == format && config.isOutput() == output) { 1301 if (output && mListHighResolution) { 1302 // Filter slow high-res output formats; include for 1303 // highRes, remove for !highRes 1304 long duration = 0; 1305 for (int i = 0; i < minFrameDurations.length; i++) { 1306 StreamConfigurationDuration d = minFrameDurations[i]; 1307 if (d.getFormat() == fmt && 1308 d.getWidth() == config.getSize().getWidth() && 1309 d.getHeight() == config.getSize().getHeight()) { 1310 duration = d.getDuration(); 1311 break; 1312 } 1313 } 1314 if (dataspace != HAL_DATASPACE_DEPTH && 1315 highRes != (duration > DURATION_20FPS_NS)) { 1316 continue; 1317 } 1318 } 1319 sizes[sizeIndex++] = config.getSize(); 1320 } 1321 } 1322 1323 if (sizeIndex != sizesCount) { 1324 throw new AssertionError( 1325 "Too few sizes (expected " + sizesCount + ", actual " + sizeIndex + ")"); 1326 } 1327 1328 return sizes; 1329 } 1330 1331 /** Get the list of publically visible output formats; does not include IMPL_DEFINED */ 1332 private int[] getPublicFormats(boolean output) { 1333 int[] formats = new int[getPublicFormatCount(output)]; 1334 1335 int i = 0; 1336 1337 SparseIntArray map = getFormatsMap(output); 1338 for (int j = 0; j < map.size(); j++) { 1339 int format = map.keyAt(j); 1340 formats[i++] = imageFormatToPublic(format); 1341 } 1342 if (output) { 1343 for (int j = 0; j < mDepthOutputFormats.size(); j++) { 1344 formats[i++] = depthFormatToPublic(mDepthOutputFormats.keyAt(j)); 1345 } 1346 } 1347 if (formats.length != i) { 1348 throw new AssertionError("Too few formats " + i + ", expected " + formats.length); 1349 } 1350 1351 return formats; 1352 } 1353 1354 /** Get the format -> size count map for either output or input formats */ 1355 private SparseIntArray getFormatsMap(boolean output) { 1356 return output ? mAllOutputFormats : mInputFormats; 1357 } 1358 1359 private long getInternalFormatDuration(int format, int dataspace, Size size, int duration) { 1360 // assume format is already checked, since its internal 1361 1362 if (!isSupportedInternalConfiguration(format, dataspace, size)) { 1363 throw new IllegalArgumentException("size was not supported"); 1364 } 1365 1366 StreamConfigurationDuration[] durations = getDurations(duration, dataspace); 1367 1368 for (StreamConfigurationDuration configurationDuration : durations) { 1369 if (configurationDuration.getFormat() == format && 1370 configurationDuration.getWidth() == size.getWidth() && 1371 configurationDuration.getHeight() == size.getHeight()) { 1372 return configurationDuration.getDuration(); 1373 } 1374 } 1375 // Default duration is '0' (unsupported/no extra stall) 1376 return 0; 1377 } 1378 1379 /** 1380 * Get the durations array for the kind of duration 1381 * 1382 * @see #DURATION_MIN_FRAME 1383 * @see #DURATION_STALL 1384 * */ 1385 private StreamConfigurationDuration[] getDurations(int duration, int dataspace) { 1386 switch (duration) { 1387 case DURATION_MIN_FRAME: 1388 return (dataspace == HAL_DATASPACE_DEPTH) ? 1389 mDepthMinFrameDurations : mMinFrameDurations; 1390 case DURATION_STALL: 1391 return (dataspace == HAL_DATASPACE_DEPTH) ? 1392 mDepthStallDurations : mStallDurations; 1393 default: 1394 throw new IllegalArgumentException("duration was invalid"); 1395 } 1396 } 1397 1398 /** Count the number of publicly-visible output formats */ 1399 private int getPublicFormatCount(boolean output) { 1400 SparseIntArray formatsMap = getFormatsMap(output); 1401 int size = formatsMap.size(); 1402 if (output) { 1403 size += mDepthOutputFormats.size(); 1404 } 1405 1406 return size; 1407 } 1408 1409 private static <T> boolean arrayContains(T[] array, T element) { 1410 if (array == null) { 1411 return false; 1412 } 1413 1414 for (T el : array) { 1415 if (Objects.equals(el, element)) { 1416 return true; 1417 } 1418 } 1419 1420 return false; 1421 } 1422 1423 private boolean isSupportedInternalConfiguration(int format, int dataspace, 1424 Size size) { 1425 StreamConfiguration[] configurations = 1426 (dataspace == HAL_DATASPACE_DEPTH) ? mDepthConfigurations : mConfigurations; 1427 1428 for (int i = 0; i < configurations.length; i++) { 1429 if (configurations[i].getFormat() == format && 1430 configurations[i].getSize().equals(size)) { 1431 return true; 1432 } 1433 } 1434 1435 return false; 1436 } 1437 1438 /** 1439 * Return this {@link StreamConfigurationMap} as a string representation. 1440 * 1441 * <p>{@code "StreamConfigurationMap(Outputs([w:%d, h:%d, format:%s(%d), min_duration:%d, 1442 * stall:%d], ... [w:%d, h:%d, format:%s(%d), min_duration:%d, stall:%d]), Inputs([w:%d, h:%d, 1443 * format:%s(%d)], ... [w:%d, h:%d, format:%s(%d)]), ValidOutputFormatsForInput( 1444 * [in:%d, out:%d, ... %d], ... [in:%d, out:%d, ... %d]), HighSpeedVideoConfigurations( 1445 * [w:%d, h:%d, min_fps:%d, max_fps:%d], ... [w:%d, h:%d, min_fps:%d, max_fps:%d]))"}.</p> 1446 * 1447 * <p>{@code Outputs([w:%d, h:%d, format:%s(%d), min_duration:%d, stall:%d], ... 1448 * [w:%d, h:%d, format:%s(%d), min_duration:%d, stall:%d])}, where 1449 * {@code [w:%d, h:%d, format:%s(%d), min_duration:%d, stall:%d]} represents an output 1450 * configuration's width, height, format, minimal frame duration in nanoseconds, and stall 1451 * duration in nanoseconds.</p> 1452 * 1453 * <p>{@code Inputs([w:%d, h:%d, format:%s(%d)], ... [w:%d, h:%d, format:%s(%d)])}, where 1454 * {@code [w:%d, h:%d, format:%s(%d)]} represents an input configuration's width, height, and 1455 * format.</p> 1456 * 1457 * <p>{@code ValidOutputFormatsForInput([in:%s(%d), out:%s(%d), ... %s(%d)], 1458 * ... [in:%s(%d), out:%s(%d), ... %s(%d)])}, where {@code [in:%s(%d), out:%s(%d), ... %s(%d)]} 1459 * represents an input fomat and its valid output formats.</p> 1460 * 1461 * <p>{@code HighSpeedVideoConfigurations([w:%d, h:%d, min_fps:%d, max_fps:%d], 1462 * ... [w:%d, h:%d, min_fps:%d, max_fps:%d])}, where 1463 * {@code [w:%d, h:%d, min_fps:%d, max_fps:%d]} represents a high speed video output 1464 * configuration's width, height, minimal frame rate, and maximal frame rate.</p> 1465 * 1466 * @return string representation of {@link StreamConfigurationMap} 1467 */ 1468 @Override 1469 public String toString() { 1470 StringBuilder sb = new StringBuilder("StreamConfiguration("); 1471 appendOutputsString(sb); 1472 sb.append(", "); 1473 appendHighResOutputsString(sb); 1474 sb.append(", "); 1475 appendInputsString(sb); 1476 sb.append(", "); 1477 appendValidOutputFormatsForInputString(sb); 1478 sb.append(", "); 1479 appendHighSpeedVideoConfigurationsString(sb); 1480 sb.append(")"); 1481 1482 return sb.toString(); 1483 } 1484 1485 private void appendOutputsString(StringBuilder sb) { 1486 sb.append("Outputs("); 1487 int[] formats = getOutputFormats(); 1488 for (int format : formats) { 1489 Size[] sizes = getOutputSizes(format); 1490 for (Size size : sizes) { 1491 long minFrameDuration = getOutputMinFrameDuration(format, size); 1492 long stallDuration = getOutputStallDuration(format, size); 1493 sb.append(String.format("[w:%d, h:%d, format:%s(%d), min_duration:%d, " + 1494 "stall:%d], ", size.getWidth(), size.getHeight(), formatToString(format), 1495 format, minFrameDuration, stallDuration)); 1496 } 1497 } 1498 // Remove the pending ", " 1499 if (sb.charAt(sb.length() - 1) == ' ') { 1500 sb.delete(sb.length() - 2, sb.length()); 1501 } 1502 sb.append(")"); 1503 } 1504 1505 private void appendHighResOutputsString(StringBuilder sb) { 1506 sb.append("HighResolutionOutputs("); 1507 int[] formats = getOutputFormats(); 1508 for (int format : formats) { 1509 Size[] sizes = getHighResolutionOutputSizes(format); 1510 if (sizes == null) continue; 1511 for (Size size : sizes) { 1512 long minFrameDuration = getOutputMinFrameDuration(format, size); 1513 long stallDuration = getOutputStallDuration(format, size); 1514 sb.append(String.format("[w:%d, h:%d, format:%s(%d), min_duration:%d, " + 1515 "stall:%d], ", size.getWidth(), size.getHeight(), formatToString(format), 1516 format, minFrameDuration, stallDuration)); 1517 } 1518 } 1519 // Remove the pending ", " 1520 if (sb.charAt(sb.length() - 1) == ' ') { 1521 sb.delete(sb.length() - 2, sb.length()); 1522 } 1523 sb.append(")"); 1524 } 1525 1526 private void appendInputsString(StringBuilder sb) { 1527 sb.append("Inputs("); 1528 int[] formats = getInputFormats(); 1529 for (int format : formats) { 1530 Size[] sizes = getInputSizes(format); 1531 for (Size size : sizes) { 1532 sb.append(String.format("[w:%d, h:%d, format:%s(%d)], ", size.getWidth(), 1533 size.getHeight(), formatToString(format), format)); 1534 } 1535 } 1536 // Remove the pending ", " 1537 if (sb.charAt(sb.length() - 1) == ' ') { 1538 sb.delete(sb.length() - 2, sb.length()); 1539 } 1540 sb.append(")"); 1541 } 1542 1543 private void appendValidOutputFormatsForInputString(StringBuilder sb) { 1544 sb.append("ValidOutputFormatsForInput("); 1545 int[] inputFormats = getInputFormats(); 1546 for (int inputFormat : inputFormats) { 1547 sb.append(String.format("[in:%s(%d), out:", formatToString(inputFormat), inputFormat)); 1548 int[] outputFormats = getValidOutputFormatsForInput(inputFormat); 1549 for (int i = 0; i < outputFormats.length; i++) { 1550 sb.append(String.format("%s(%d)", formatToString(outputFormats[i]), 1551 outputFormats[i])); 1552 if (i < outputFormats.length - 1) { 1553 sb.append(", "); 1554 } 1555 } 1556 sb.append("], "); 1557 } 1558 // Remove the pending ", " 1559 if (sb.charAt(sb.length() - 1) == ' ') { 1560 sb.delete(sb.length() - 2, sb.length()); 1561 } 1562 sb.append(")"); 1563 } 1564 1565 private void appendHighSpeedVideoConfigurationsString(StringBuilder sb) { 1566 sb.append("HighSpeedVideoConfigurations("); 1567 Size[] sizes = getHighSpeedVideoSizes(); 1568 for (Size size : sizes) { 1569 Range<Integer>[] ranges = getHighSpeedVideoFpsRangesFor(size); 1570 for (Range<Integer> range : ranges) { 1571 sb.append(String.format("[w:%d, h:%d, min_fps:%d, max_fps:%d], ", size.getWidth(), 1572 size.getHeight(), range.getLower(), range.getUpper())); 1573 } 1574 } 1575 // Remove the pending ", " 1576 if (sb.charAt(sb.length() - 1) == ' ') { 1577 sb.delete(sb.length() - 2, sb.length()); 1578 } 1579 sb.append(")"); 1580 } 1581 1582 private String formatToString(int format) { 1583 switch (format) { 1584 case ImageFormat.YV12: 1585 return "YV12"; 1586 case ImageFormat.YUV_420_888: 1587 return "YUV_420_888"; 1588 case ImageFormat.NV21: 1589 return "NV21"; 1590 case ImageFormat.NV16: 1591 return "NV16"; 1592 case PixelFormat.RGB_565: 1593 return "RGB_565"; 1594 case PixelFormat.RGBA_8888: 1595 return "RGBA_8888"; 1596 case PixelFormat.RGBX_8888: 1597 return "RGBX_8888"; 1598 case PixelFormat.RGB_888: 1599 return "RGB_888"; 1600 case ImageFormat.JPEG: 1601 return "JPEG"; 1602 case ImageFormat.YUY2: 1603 return "YUY2"; 1604 case ImageFormat.Y8: 1605 return "Y8"; 1606 case ImageFormat.Y16: 1607 return "Y16"; 1608 case ImageFormat.RAW_SENSOR: 1609 return "RAW_SENSOR"; 1610 case ImageFormat.RAW_PRIVATE: 1611 return "RAW_PRIVATE"; 1612 case ImageFormat.RAW10: 1613 return "RAW10"; 1614 case ImageFormat.DEPTH16: 1615 return "DEPTH16"; 1616 case ImageFormat.DEPTH_POINT_CLOUD: 1617 return "DEPTH_POINT_CLOUD"; 1618 case ImageFormat.PRIVATE: 1619 return "PRIVATE"; 1620 default: 1621 return "UNKNOWN"; 1622 } 1623 } 1624 1625 // from system/core/include/system/graphics.h 1626 private static final int HAL_PIXEL_FORMAT_RAW16 = 0x20; 1627 private static final int HAL_PIXEL_FORMAT_BLOB = 0x21; 1628 private static final int HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED = 0x22; 1629 private static final int HAL_PIXEL_FORMAT_YCbCr_420_888 = 0x23; 1630 private static final int HAL_PIXEL_FORMAT_RAW_OPAQUE = 0x24; 1631 private static final int HAL_PIXEL_FORMAT_RAW10 = 0x25; 1632 private static final int HAL_PIXEL_FORMAT_RAW12 = 0x26; 1633 private static final int HAL_PIXEL_FORMAT_Y16 = 0x20363159; 1634 1635 1636 private static final int HAL_DATASPACE_STANDARD_SHIFT = 16; 1637 private static final int HAL_DATASPACE_TRANSFER_SHIFT = 22; 1638 private static final int HAL_DATASPACE_RANGE_SHIFT = 27; 1639 1640 private static final int HAL_DATASPACE_UNKNOWN = 0x0; 1641 private static final int HAL_DATASPACE_V0_JFIF = 1642 (2 << HAL_DATASPACE_STANDARD_SHIFT) | 1643 (3 << HAL_DATASPACE_TRANSFER_SHIFT) | 1644 (1 << HAL_DATASPACE_RANGE_SHIFT); 1645 1646 private static final int HAL_DATASPACE_DEPTH = 0x1000; 1647 1648 private static final long DURATION_20FPS_NS = 50000000L; 1649 /** 1650 * @see #getDurations(int, int) 1651 */ 1652 private static final int DURATION_MIN_FRAME = 0; 1653 private static final int DURATION_STALL = 1; 1654 1655 private final StreamConfiguration[] mConfigurations; 1656 private final StreamConfigurationDuration[] mMinFrameDurations; 1657 private final StreamConfigurationDuration[] mStallDurations; 1658 1659 private final StreamConfiguration[] mDepthConfigurations; 1660 private final StreamConfigurationDuration[] mDepthMinFrameDurations; 1661 private final StreamConfigurationDuration[] mDepthStallDurations; 1662 1663 private final HighSpeedVideoConfiguration[] mHighSpeedVideoConfigurations; 1664 private final ReprocessFormatsMap mInputOutputFormatsMap; 1665 1666 private final boolean mListHighResolution; 1667 1668 /** internal format -> num output sizes mapping, not including slow high-res sizes, for 1669 * non-depth dataspaces */ 1670 private final SparseIntArray mOutputFormats = new SparseIntArray(); 1671 /** internal format -> num output sizes mapping for slow high-res sizes, for non-depth 1672 * dataspaces */ 1673 private final SparseIntArray mHighResOutputFormats = new SparseIntArray(); 1674 /** internal format -> num output sizes mapping for all non-depth dataspaces */ 1675 private final SparseIntArray mAllOutputFormats = new SparseIntArray(); 1676 /** internal format -> num input sizes mapping, for input reprocessing formats */ 1677 private final SparseIntArray mInputFormats = new SparseIntArray(); 1678 /** internal format -> num depth output sizes mapping, for HAL_DATASPACE_DEPTH */ 1679 private final SparseIntArray mDepthOutputFormats = new SparseIntArray(); 1680 /** High speed video Size -> FPS range count mapping*/ 1681 private final HashMap</*HighSpeedVideoSize*/Size, /*Count*/Integer> mHighSpeedVideoSizeMap = 1682 new HashMap<Size, Integer>(); 1683 /** High speed video FPS range -> Size count mapping*/ 1684 private final HashMap</*HighSpeedVideoFpsRange*/Range<Integer>, /*Count*/Integer> 1685 mHighSpeedVideoFpsRangeMap = new HashMap<Range<Integer>, Integer>(); 1686 1687} 1688