RequestThreadManager.java revision dcb9e169d7b37d62112c25c65da54c2b92651f9f
1/* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17package android.hardware.camera2.legacy; 18 19import android.graphics.SurfaceTexture; 20import android.hardware.Camera; 21import android.hardware.camera2.CameraCharacteristics; 22import android.hardware.camera2.CaptureRequest; 23import android.hardware.camera2.impl.CameraDeviceImpl; 24import android.hardware.camera2.utils.LongParcelable; 25import android.hardware.camera2.utils.SizeAreaComparator; 26import android.hardware.camera2.impl.CameraMetadataNative; 27import android.os.ConditionVariable; 28import android.os.Handler; 29import android.os.Message; 30import android.os.SystemClock; 31import android.util.Log; 32import android.util.MutableLong; 33import android.util.Pair; 34import android.util.Size; 35import android.view.Surface; 36 37import java.io.IOException; 38import java.util.ArrayList; 39import java.util.Collection; 40import java.util.Collections; 41import java.util.Iterator; 42import java.util.List; 43import java.util.concurrent.TimeUnit; 44import java.util.concurrent.atomic.AtomicBoolean; 45 46import static com.android.internal.util.Preconditions.*; 47 48/** 49 * This class executes requests to the {@link Camera}. 50 * 51 * <p> 52 * The main components of this class are: 53 * - A message queue of requests to the {@link Camera}. 54 * - A thread that consumes requests to the {@link Camera} and executes them. 55 * - A {@link GLThreadManager} that draws to the configured output {@link Surface}s. 56 * - An {@link CameraDeviceState} state machine that manages the callbacks for various operations. 57 * </p> 58 */ 59@SuppressWarnings("deprecation") 60public class RequestThreadManager { 61 private final String TAG; 62 private final int mCameraId; 63 private final RequestHandlerThread mRequestThread; 64 65 private static final boolean DEBUG = false; 66 // For slightly more spammy messages that will get repeated every frame 67 private static final boolean VERBOSE = false; 68 private Camera mCamera; 69 private final CameraCharacteristics mCharacteristics; 70 71 private final CameraDeviceState mDeviceState; 72 private final CaptureCollector mCaptureCollector; 73 private final LegacyFocusStateMapper mFocusStateMapper; 74 private final LegacyFaceDetectMapper mFaceDetectMapper; 75 76 private static final int MSG_CONFIGURE_OUTPUTS = 1; 77 private static final int MSG_SUBMIT_CAPTURE_REQUEST = 2; 78 private static final int MSG_CLEANUP = 3; 79 80 private static final int MAX_IN_FLIGHT_REQUESTS = 2; 81 82 private static final int PREVIEW_FRAME_TIMEOUT = 1000; // ms 83 private static final int JPEG_FRAME_TIMEOUT = 4000; // ms (same as CTS for API2) 84 private static final int REQUEST_COMPLETE_TIMEOUT = JPEG_FRAME_TIMEOUT; 85 86 private static final float ASPECT_RATIO_TOLERANCE = 0.01f; 87 private boolean mPreviewRunning = false; 88 89 private final List<Surface> mPreviewOutputs = new ArrayList<>(); 90 private final List<Surface> mCallbackOutputs = new ArrayList<>(); 91 private GLThreadManager mGLThreadManager; 92 private SurfaceTexture mPreviewTexture; 93 private Camera.Parameters mParams; 94 95 private final List<Long> mJpegSurfaceIds = new ArrayList<>(); 96 97 private Size mIntermediateBufferSize; 98 99 private final RequestQueue mRequestQueue = new RequestQueue(mJpegSurfaceIds); 100 private LegacyRequest mLastRequest = null; 101 private SurfaceTexture mDummyTexture; 102 private Surface mDummySurface; 103 104 private final Object mIdleLock = new Object(); 105 private final FpsCounter mPrevCounter = new FpsCounter("Incoming Preview"); 106 private final FpsCounter mRequestCounter = new FpsCounter("Incoming Requests"); 107 108 private final AtomicBoolean mQuit = new AtomicBoolean(false); 109 110 // Stuff JPEGs into HAL_PIXEL_FORMAT_RGBA_8888 gralloc buffers to get around SW write 111 // limitations for (b/17379185). 112 private static final boolean USE_BLOB_FORMAT_OVERRIDE = true; 113 114 /** 115 * Container object for Configure messages. 116 */ 117 private static class ConfigureHolder { 118 public final ConditionVariable condition; 119 public final Collection<Pair<Surface, Size>> surfaces; 120 121 public ConfigureHolder(ConditionVariable condition, Collection<Pair<Surface, 122 Size>> surfaces) { 123 this.condition = condition; 124 this.surfaces = surfaces; 125 } 126 } 127 128 /** 129 * Counter class used to calculate and log the current FPS of frame production. 130 */ 131 public static class FpsCounter { 132 //TODO: Hook this up to SystTrace? 133 private static final String TAG = "FpsCounter"; 134 private int mFrameCount = 0; 135 private long mLastTime = 0; 136 private long mLastPrintTime = 0; 137 private double mLastFps = 0; 138 private final String mStreamType; 139 private static final long NANO_PER_SECOND = 1000000000; //ns 140 141 public FpsCounter(String streamType) { 142 mStreamType = streamType; 143 } 144 145 public synchronized void countFrame() { 146 mFrameCount++; 147 long nextTime = SystemClock.elapsedRealtimeNanos(); 148 if (mLastTime == 0) { 149 mLastTime = nextTime; 150 } 151 if (nextTime > mLastTime + NANO_PER_SECOND) { 152 long elapsed = nextTime - mLastTime; 153 mLastFps = mFrameCount * (NANO_PER_SECOND / (double) elapsed); 154 mFrameCount = 0; 155 mLastTime = nextTime; 156 } 157 } 158 159 public synchronized double checkFps() { 160 return mLastFps; 161 } 162 163 public synchronized void staggeredLog() { 164 if (mLastTime > mLastPrintTime + 5 * NANO_PER_SECOND) { 165 mLastPrintTime = mLastTime; 166 Log.d(TAG, "FPS for " + mStreamType + " stream: " + mLastFps ); 167 } 168 } 169 170 public synchronized void countAndLog() { 171 countFrame(); 172 staggeredLog(); 173 } 174 } 175 /** 176 * Fake preview for jpeg captures when there is no active preview 177 */ 178 private void createDummySurface() { 179 if (mDummyTexture == null || mDummySurface == null) { 180 mDummyTexture = new SurfaceTexture(/*ignored*/0); 181 // TODO: use smallest default sizes 182 mDummyTexture.setDefaultBufferSize(640, 480); 183 mDummySurface = new Surface(mDummyTexture); 184 } 185 } 186 187 private final Camera.ErrorCallback mErrorCallback = new Camera.ErrorCallback() { 188 @Override 189 public void onError(int i, Camera camera) { 190 Log.e(TAG, "Received error " + i + " from the Camera1 ErrorCallback"); 191 mDeviceState.setError(CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE); 192 } 193 }; 194 195 private final ConditionVariable mReceivedJpeg = new ConditionVariable(false); 196 197 private final Camera.PictureCallback mJpegCallback = new Camera.PictureCallback() { 198 @Override 199 public void onPictureTaken(byte[] data, Camera camera) { 200 Log.i(TAG, "Received jpeg."); 201 Pair<RequestHolder, Long> captureInfo = mCaptureCollector.jpegProduced(); 202 if (captureInfo == null || captureInfo.first == null) { 203 Log.e(TAG, "Dropping jpeg frame."); 204 return; 205 } 206 RequestHolder holder = captureInfo.first; 207 long timestamp = captureInfo.second; 208 for (Surface s : holder.getHolderTargets()) { 209 try { 210 if (LegacyCameraDevice.containsSurfaceId(s, mJpegSurfaceIds)) { 211 Log.i(TAG, "Producing jpeg buffer..."); 212 213 int totalSize = data.length + LegacyCameraDevice.nativeGetJpegFooterSize(); 214 totalSize = (totalSize + 3) & ~0x3; // round up to nearest octonibble 215 LegacyCameraDevice.setNextTimestamp(s, timestamp); 216 217 if (USE_BLOB_FORMAT_OVERRIDE) { 218 // Override to RGBA_8888 format. 219 LegacyCameraDevice.setSurfaceFormat(s, 220 LegacyMetadataMapper.HAL_PIXEL_FORMAT_RGBA_8888); 221 222 int dimen = (int) Math.ceil(Math.sqrt(totalSize)); 223 dimen = (dimen + 0xf) & ~0xf; // round up to nearest multiple of 16 224 LegacyCameraDevice.setSurfaceDimens(s, dimen, dimen); 225 LegacyCameraDevice.produceFrame(s, data, dimen, dimen, 226 CameraMetadataNative.NATIVE_JPEG_FORMAT); 227 } else { 228 LegacyCameraDevice.setSurfaceDimens(s, totalSize, /*height*/1); 229 LegacyCameraDevice.produceFrame(s, data, totalSize, /*height*/1, 230 CameraMetadataNative.NATIVE_JPEG_FORMAT); 231 } 232 } 233 } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) { 234 Log.w(TAG, "Surface abandoned, dropping frame. ", e); 235 } 236 } 237 238 mReceivedJpeg.open(); 239 } 240 }; 241 242 private final Camera.ShutterCallback mJpegShutterCallback = new Camera.ShutterCallback() { 243 @Override 244 public void onShutter() { 245 mCaptureCollector.jpegCaptured(SystemClock.elapsedRealtimeNanos()); 246 } 247 }; 248 249 private final SurfaceTexture.OnFrameAvailableListener mPreviewCallback = 250 new SurfaceTexture.OnFrameAvailableListener() { 251 @Override 252 public void onFrameAvailable(SurfaceTexture surfaceTexture) { 253 if (DEBUG) { 254 mPrevCounter.countAndLog(); 255 } 256 mGLThreadManager.queueNewFrame(); 257 } 258 }; 259 260 private void stopPreview() { 261 if (VERBOSE) { 262 Log.v(TAG, "stopPreview - preview running? " + mPreviewRunning); 263 } 264 if (mPreviewRunning) { 265 mCamera.stopPreview(); 266 mPreviewRunning = false; 267 } 268 } 269 270 private void startPreview() { 271 if (VERBOSE) { 272 Log.v(TAG, "startPreview - preview running? " + mPreviewRunning); 273 } 274 if (!mPreviewRunning) { 275 // XX: CameraClient:;startPreview is not getting called after a stop 276 mCamera.startPreview(); 277 mPreviewRunning = true; 278 } 279 } 280 281 private void doJpegCapturePrepare(RequestHolder request) throws IOException { 282 if (DEBUG) Log.d(TAG, "doJpegCapturePrepare - preview running? " + mPreviewRunning); 283 284 if (!mPreviewRunning) { 285 if (DEBUG) Log.d(TAG, "doJpegCapture - create fake surface"); 286 287 createDummySurface(); 288 mCamera.setPreviewTexture(mDummyTexture); 289 startPreview(); 290 } 291 } 292 293 private void doJpegCapture(RequestHolder request) { 294 if (DEBUG) Log.d(TAG, "doJpegCapturePrepare"); 295 296 mCamera.takePicture(mJpegShutterCallback, /*raw*/null, mJpegCallback); 297 mPreviewRunning = false; 298 } 299 300 private void doPreviewCapture(RequestHolder request) throws IOException { 301 if (VERBOSE) { 302 Log.v(TAG, "doPreviewCapture - preview running? " + mPreviewRunning); 303 } 304 305 if (mPreviewRunning) { 306 return; // Already running 307 } 308 309 if (mPreviewTexture == null) { 310 throw new IllegalStateException( 311 "Preview capture called with no preview surfaces configured."); 312 } 313 314 mPreviewTexture.setDefaultBufferSize(mIntermediateBufferSize.getWidth(), 315 mIntermediateBufferSize.getHeight()); 316 mCamera.setPreviewTexture(mPreviewTexture); 317 318 startPreview(); 319 } 320 321 private void configureOutputs(Collection<Pair<Surface, Size>> outputs) { 322 if (DEBUG) { 323 String outputsStr = outputs == null ? "null" : (outputs.size() + " surfaces"); 324 Log.d(TAG, "configureOutputs with " + outputsStr); 325 } 326 327 try { 328 stopPreview(); 329 } catch (RuntimeException e) { 330 Log.e(TAG, "Received device exception in configure call: ", e); 331 mDeviceState.setError( 332 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE); 333 return; 334 } 335 336 /* 337 * Try to release the previous preview's surface texture earlier if we end up 338 * using a different one; this also reduces the likelihood of getting into a deadlock 339 * when disconnecting from the old previous texture at a later time. 340 */ 341 try { 342 mCamera.setPreviewTexture(/*surfaceTexture*/null); 343 } catch (IOException e) { 344 Log.w(TAG, "Failed to clear prior SurfaceTexture, may cause GL deadlock: ", e); 345 } catch (RuntimeException e) { 346 Log.e(TAG, "Received device exception in configure call: ", e); 347 mDeviceState.setError( 348 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE); 349 return; 350 } 351 352 if (mGLThreadManager != null) { 353 mGLThreadManager.waitUntilStarted(); 354 mGLThreadManager.ignoreNewFrames(); 355 mGLThreadManager.waitUntilIdle(); 356 } 357 resetJpegSurfaceFormats(mCallbackOutputs); 358 mPreviewOutputs.clear(); 359 mCallbackOutputs.clear(); 360 mJpegSurfaceIds.clear(); 361 mPreviewTexture = null; 362 363 List<Size> previewOutputSizes = new ArrayList<>(); 364 List<Size> callbackOutputSizes = new ArrayList<>(); 365 366 int facing = mCharacteristics.get(CameraCharacteristics.LENS_FACING); 367 int orientation = mCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); 368 if (outputs != null) { 369 for (Pair<Surface, Size> outPair : outputs) { 370 Surface s = outPair.first; 371 Size outSize = outPair.second; 372 try { 373 int format = LegacyCameraDevice.detectSurfaceType(s); 374 LegacyCameraDevice.setSurfaceOrientation(s, facing, orientation); 375 switch (format) { 376 case CameraMetadataNative.NATIVE_JPEG_FORMAT: 377 if (USE_BLOB_FORMAT_OVERRIDE) { 378 // Override to RGBA_8888 format. 379 LegacyCameraDevice.setSurfaceFormat(s, 380 LegacyMetadataMapper.HAL_PIXEL_FORMAT_RGBA_8888); 381 } 382 mJpegSurfaceIds.add(LegacyCameraDevice.getSurfaceId(s)); 383 mCallbackOutputs.add(s); 384 callbackOutputSizes.add(outSize); 385 break; 386 default: 387 LegacyCameraDevice.setScalingMode(s, LegacyCameraDevice. 388 NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW); 389 mPreviewOutputs.add(s); 390 previewOutputSizes.add(outSize); 391 break; 392 } 393 } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) { 394 Log.w(TAG, "Surface abandoned, skipping...", e); 395 } 396 } 397 } 398 try { 399 mParams = mCamera.getParameters(); 400 } catch (RuntimeException e) { 401 Log.e(TAG, "Received device exception: ", e); 402 mDeviceState.setError( 403 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE); 404 return; 405 } 406 407 List<int[]> supportedFpsRanges = mParams.getSupportedPreviewFpsRange(); 408 int[] bestRange = getPhotoPreviewFpsRange(supportedFpsRanges); 409 if (DEBUG) { 410 Log.d(TAG, "doPreviewCapture - Selected range [" + 411 bestRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX] + "," + 412 bestRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX] + "]"); 413 } 414 mParams.setPreviewFpsRange(bestRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX], 415 bestRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]); 416 417 Size smallestSupportedJpegSize = calculatePictureSize(mCallbackOutputs, 418 callbackOutputSizes, mParams); 419 420 if (previewOutputSizes.size() > 0) { 421 422 Size largestOutput = SizeAreaComparator.findLargestByArea(previewOutputSizes); 423 424 // Find largest jpeg dimension - assume to have the same aspect ratio as sensor. 425 Size largestJpegDimen = ParameterUtils.getLargestSupportedJpegSizeByArea(mParams); 426 427 Size chosenJpegDimen = (smallestSupportedJpegSize != null) ? smallestSupportedJpegSize 428 : largestJpegDimen; 429 430 List<Size> supportedPreviewSizes = ParameterUtils.convertSizeList( 431 mParams.getSupportedPreviewSizes()); 432 433 // Use smallest preview dimension with same aspect ratio as sensor that is >= than all 434 // of the configured output dimensions. If none exists, fall back to using the largest 435 // supported preview size. 436 long largestOutputArea = largestOutput.getHeight() * (long) largestOutput.getWidth(); 437 Size bestPreviewDimen = SizeAreaComparator.findLargestByArea(supportedPreviewSizes); 438 for (Size s : supportedPreviewSizes) { 439 long currArea = s.getWidth() * s.getHeight(); 440 long bestArea = bestPreviewDimen.getWidth() * bestPreviewDimen.getHeight(); 441 if (checkAspectRatiosMatch(chosenJpegDimen, s) && (currArea < bestArea && 442 currArea >= largestOutputArea)) { 443 bestPreviewDimen = s; 444 } 445 } 446 447 mIntermediateBufferSize = bestPreviewDimen; 448 mParams.setPreviewSize(mIntermediateBufferSize.getWidth(), 449 mIntermediateBufferSize.getHeight()); 450 451 if (DEBUG) { 452 Log.d(TAG, "Intermediate buffer selected with dimens: " + 453 bestPreviewDimen.toString()); 454 } 455 } else { 456 mIntermediateBufferSize = null; 457 if (DEBUG) { 458 Log.d(TAG, "No Intermediate buffer selected, no preview outputs were configured"); 459 } 460 } 461 462 if (smallestSupportedJpegSize != null) { 463 /* 464 * Set takePicture size to the smallest supported JPEG size large enough 465 * to scale/crop out of for the bounding rectangle of the configured JPEG sizes. 466 */ 467 468 Log.i(TAG, "configureOutputs - set take picture size to " + smallestSupportedJpegSize); 469 mParams.setPictureSize( 470 smallestSupportedJpegSize.getWidth(), smallestSupportedJpegSize.getHeight()); 471 } 472 473 // TODO: Detect and optimize single-output paths here to skip stream teeing. 474 if (mGLThreadManager == null) { 475 mGLThreadManager = new GLThreadManager(mCameraId, facing, mDeviceState); 476 mGLThreadManager.start(); 477 } 478 mGLThreadManager.waitUntilStarted(); 479 List<Pair<Surface, Size>> previews = new ArrayList<>(); 480 Iterator<Size> previewSizeIter = previewOutputSizes.iterator(); 481 for (Surface p : mPreviewOutputs) { 482 previews.add(new Pair<>(p, previewSizeIter.next())); 483 } 484 mGLThreadManager.setConfigurationAndWait(previews, mCaptureCollector); 485 mGLThreadManager.allowNewFrames(); 486 mPreviewTexture = mGLThreadManager.getCurrentSurfaceTexture(); 487 if (mPreviewTexture != null) { 488 mPreviewTexture.setOnFrameAvailableListener(mPreviewCallback); 489 } 490 491 try { 492 mCamera.setParameters(mParams); 493 } catch (RuntimeException e) { 494 Log.e(TAG, "Received device exception while configuring: ", e); 495 mDeviceState.setError( 496 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE); 497 498 } 499 } 500 501 private void resetJpegSurfaceFormats(Collection<Surface> surfaces) { 502 if (!USE_BLOB_FORMAT_OVERRIDE || surfaces == null) { 503 return; 504 } 505 for(Surface s : surfaces) { 506 if (s == null || !s.isValid()) { 507 Log.w(TAG, "Jpeg surface is invalid, skipping..."); 508 continue; 509 } 510 try { 511 LegacyCameraDevice.setSurfaceFormat(s, LegacyMetadataMapper.HAL_PIXEL_FORMAT_BLOB); 512 } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) { 513 Log.w(TAG, "Surface abandoned, skipping...", e); 514 } 515 } 516 } 517 518 /** 519 * Find a JPEG size (that is supported by the legacy camera device) which is equal to or larger 520 * than all of the configured {@code JPEG} outputs (by both width and height). 521 * 522 * <p>If multiple supported JPEG sizes are larger, select the smallest of them which 523 * still satisfies the above constraint.</p> 524 * 525 * <p>As a result, the returned size is guaranteed to be usable without needing 526 * to upscale any of the outputs. If only one {@code JPEG} surface is used, 527 * then no scaling/cropping is necessary between the taken picture and 528 * the {@code JPEG} output surface.</p> 529 * 530 * @param callbackOutputs a non-{@code null} list of {@code Surface}s with any image formats 531 * @param params api1 parameters (used for reading only) 532 * 533 * @return a size large enough to fit all of the configured {@code JPEG} outputs, or 534 * {@code null} if the {@code callbackOutputs} did not have any {@code JPEG} 535 * surfaces. 536 */ 537 private Size calculatePictureSize( List<Surface> callbackOutputs, 538 List<Size> callbackSizes, Camera.Parameters params) { 539 /* 540 * Find the largest JPEG size (if any), from the configured outputs: 541 * - the api1 picture size should be set to the smallest legal size that's at least as large 542 * as the largest configured JPEG size 543 */ 544 if (callbackOutputs.size() != callbackSizes.size()) { 545 throw new IllegalStateException("Input collections must be same length"); 546 } 547 List<Size> configuredJpegSizes = new ArrayList<>(); 548 Iterator<Size> sizeIterator = callbackSizes.iterator(); 549 for (Surface callbackSurface : callbackOutputs) { 550 Size jpegSize = sizeIterator.next(); 551 if (!LegacyCameraDevice.containsSurfaceId(callbackSurface, mJpegSurfaceIds)) { 552 continue; // Ignore non-JPEG callback formats 553 } 554 555 configuredJpegSizes.add(jpegSize); 556 } 557 if (!configuredJpegSizes.isEmpty()) { 558 /* 559 * Find the largest configured JPEG width, and height, independently 560 * of the rest. 561 * 562 * The rest of the JPEG streams can be cropped out of this smallest bounding 563 * rectangle. 564 */ 565 int maxConfiguredJpegWidth = -1; 566 int maxConfiguredJpegHeight = -1; 567 for (Size jpegSize : configuredJpegSizes) { 568 maxConfiguredJpegWidth = jpegSize.getWidth() > maxConfiguredJpegWidth ? 569 jpegSize.getWidth() : maxConfiguredJpegWidth; 570 maxConfiguredJpegHeight = jpegSize.getHeight() > maxConfiguredJpegHeight ? 571 jpegSize.getHeight() : maxConfiguredJpegHeight; 572 } 573 Size smallestBoundJpegSize = new Size(maxConfiguredJpegWidth, maxConfiguredJpegHeight); 574 575 List<Size> supportedJpegSizes = ParameterUtils.convertSizeList( 576 params.getSupportedPictureSizes()); 577 578 /* 579 * Find the smallest supported JPEG size that can fit the smallest bounding 580 * rectangle for the configured JPEG sizes. 581 */ 582 List<Size> candidateSupportedJpegSizes = new ArrayList<>(); 583 for (Size supportedJpegSize : supportedJpegSizes) { 584 if (supportedJpegSize.getWidth() >= maxConfiguredJpegWidth && 585 supportedJpegSize.getHeight() >= maxConfiguredJpegHeight) { 586 candidateSupportedJpegSizes.add(supportedJpegSize); 587 } 588 } 589 590 if (candidateSupportedJpegSizes.isEmpty()) { 591 throw new AssertionError( 592 "Could not find any supported JPEG sizes large enough to fit " + 593 smallestBoundJpegSize); 594 } 595 596 Size smallestSupportedJpegSize = Collections.min(candidateSupportedJpegSizes, 597 new SizeAreaComparator()); 598 599 if (!smallestSupportedJpegSize.equals(smallestBoundJpegSize)) { 600 Log.w(TAG, 601 String.format( 602 "configureOutputs - Will need to crop picture %s into " 603 + "smallest bound size %s", 604 smallestSupportedJpegSize, smallestBoundJpegSize)); 605 } 606 607 return smallestSupportedJpegSize; 608 } 609 610 return null; 611 } 612 613 private static boolean checkAspectRatiosMatch(Size a, Size b) { 614 float aAspect = a.getWidth() / (float) a.getHeight(); 615 float bAspect = b.getWidth() / (float) b.getHeight(); 616 617 return Math.abs(aAspect - bAspect) < ASPECT_RATIO_TOLERANCE; 618 } 619 620 // Calculate the highest FPS range supported 621 private int[] getPhotoPreviewFpsRange(List<int[]> frameRates) { 622 if (frameRates.size() == 0) { 623 Log.e(TAG, "No supported frame rates returned!"); 624 return null; 625 } 626 627 int bestMin = 0; 628 int bestMax = 0; 629 int bestIndex = 0; 630 int index = 0; 631 for (int[] rate : frameRates) { 632 int minFps = rate[Camera.Parameters.PREVIEW_FPS_MIN_INDEX]; 633 int maxFps = rate[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]; 634 if (maxFps > bestMax || (maxFps == bestMax && minFps > bestMin)) { 635 bestMin = minFps; 636 bestMax = maxFps; 637 bestIndex = index; 638 } 639 index++; 640 } 641 642 return frameRates.get(bestIndex); 643 } 644 645 private final Handler.Callback mRequestHandlerCb = new Handler.Callback() { 646 private boolean mCleanup = false; 647 private final LegacyResultMapper mMapper = new LegacyResultMapper(); 648 649 @Override 650 public boolean handleMessage(Message msg) { 651 if (mCleanup) { 652 return true; 653 } 654 655 if (DEBUG) { 656 Log.d(TAG, "Request thread handling message:" + msg.what); 657 } 658 long startTime = 0; 659 if (DEBUG) { 660 startTime = SystemClock.elapsedRealtimeNanos(); 661 } 662 switch (msg.what) { 663 case MSG_CONFIGURE_OUTPUTS: 664 ConfigureHolder config = (ConfigureHolder) msg.obj; 665 int sizes = config.surfaces != null ? config.surfaces.size() : 0; 666 Log.i(TAG, "Configure outputs: " + sizes + " surfaces configured."); 667 668 try { 669 boolean success = mCaptureCollector.waitForEmpty(JPEG_FRAME_TIMEOUT, 670 TimeUnit.MILLISECONDS); 671 if (!success) { 672 Log.e(TAG, "Timed out while queueing configure request."); 673 mCaptureCollector.failAll(); 674 } 675 } catch (InterruptedException e) { 676 Log.e(TAG, "Interrupted while waiting for requests to complete."); 677 mDeviceState.setError( 678 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE); 679 break; 680 } 681 682 configureOutputs(config.surfaces); 683 config.condition.open(); 684 if (DEBUG) { 685 long totalTime = SystemClock.elapsedRealtimeNanos() - startTime; 686 Log.d(TAG, "Configure took " + totalTime + " ns"); 687 } 688 break; 689 case MSG_SUBMIT_CAPTURE_REQUEST: 690 Handler handler = RequestThreadManager.this.mRequestThread.getHandler(); 691 692 // Get the next burst from the request queue. 693 Pair<BurstHolder, Long> nextBurst = mRequestQueue.getNext(); 694 695 if (nextBurst == null) { 696 // If there are no further requests queued, wait for any currently executing 697 // requests to complete, then switch to idle state. 698 try { 699 boolean success = mCaptureCollector.waitForEmpty(JPEG_FRAME_TIMEOUT, 700 TimeUnit.MILLISECONDS); 701 if (!success) { 702 Log.e(TAG, 703 "Timed out while waiting for prior requests to complete."); 704 mCaptureCollector.failAll(); 705 } 706 } catch (InterruptedException e) { 707 Log.e(TAG, "Interrupted while waiting for requests to complete: ", e); 708 mDeviceState.setError( 709 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE); 710 break; 711 } 712 713 synchronized (mIdleLock) { 714 // Retry the the request queue. 715 nextBurst = mRequestQueue.getNext(); 716 717 // If we still have no queued requests, go idle. 718 if (nextBurst == null) { 719 mDeviceState.setIdle(); 720 break; 721 } 722 } 723 } 724 725 if (nextBurst != null) { 726 // Queue another capture if we did not get the last burst. 727 handler.sendEmptyMessage(MSG_SUBMIT_CAPTURE_REQUEST); 728 } 729 730 // Complete each request in the burst 731 List<RequestHolder> requests = 732 nextBurst.first.produceRequestHolders(nextBurst.second); 733 for (RequestHolder holder : requests) { 734 CaptureRequest request = holder.getRequest(); 735 736 boolean paramsChanged = false; 737 738 // Only update parameters if the request has changed 739 if (mLastRequest == null || mLastRequest.captureRequest != request) { 740 741 // The intermediate buffer is sometimes null, but we always need 742 // the Camera1 API configured preview size 743 Size previewSize = ParameterUtils.convertSize(mParams.getPreviewSize()); 744 745 LegacyRequest legacyRequest = new LegacyRequest(mCharacteristics, 746 request, previewSize, mParams); // params are copied 747 748 749 // Parameters are mutated as a side-effect 750 LegacyMetadataMapper.convertRequestMetadata(/*inout*/legacyRequest); 751 752 // If the parameters have changed, set them in the Camera1 API. 753 if (!mParams.same(legacyRequest.parameters)) { 754 try { 755 mCamera.setParameters(legacyRequest.parameters); 756 } catch (RuntimeException e) { 757 // If setting the parameters failed, report a request error to 758 // the camera client, and skip any further work for this request 759 Log.e(TAG, "Exception while setting camera parameters: ", e); 760 holder.failRequest(); 761 mDeviceState.setCaptureStart(holder, /*timestamp*/0, 762 CameraDeviceImpl.CameraDeviceCallbacks. 763 ERROR_CAMERA_REQUEST); 764 continue; 765 } 766 paramsChanged = true; 767 mParams = legacyRequest.parameters; 768 } 769 770 mLastRequest = legacyRequest; 771 } 772 773 try { 774 boolean success = mCaptureCollector.queueRequest(holder, 775 mLastRequest, JPEG_FRAME_TIMEOUT, TimeUnit.MILLISECONDS); 776 777 if (!success) { 778 // Report a request error if we timed out while queuing this. 779 Log.e(TAG, "Timed out while queueing capture request."); 780 holder.failRequest(); 781 mDeviceState.setCaptureStart(holder, /*timestamp*/0, 782 CameraDeviceImpl.CameraDeviceCallbacks. 783 ERROR_CAMERA_REQUEST); 784 continue; 785 } 786 787 // Starting the preview needs to happen before enabling 788 // face detection or auto focus 789 if (holder.hasPreviewTargets()) { 790 doPreviewCapture(holder); 791 } 792 if (holder.hasJpegTargets()) { 793 while(!mCaptureCollector.waitForPreviewsEmpty(PREVIEW_FRAME_TIMEOUT, 794 TimeUnit.MILLISECONDS)) { 795 // Fail preview requests until the queue is empty. 796 Log.e(TAG, "Timed out while waiting for preview requests to " + 797 "complete."); 798 mCaptureCollector.failNextPreview(); 799 } 800 mReceivedJpeg.close(); 801 doJpegCapturePrepare(holder); 802 } 803 804 /* 805 * Do all the actions that require a preview to have been started 806 */ 807 808 // Toggle face detection on/off 809 // - do this before AF to give AF a chance to use faces 810 mFaceDetectMapper.processFaceDetectMode(request, /*in*/mParams); 811 812 // Unconditionally process AF triggers, since they're non-idempotent 813 // - must be done after setting the most-up-to-date AF mode 814 mFocusStateMapper.processRequestTriggers(request, mParams); 815 816 if (holder.hasJpegTargets()) { 817 doJpegCapture(holder); 818 if (!mReceivedJpeg.block(JPEG_FRAME_TIMEOUT)) { 819 Log.e(TAG, "Hit timeout for jpeg callback!"); 820 mCaptureCollector.failNextJpeg(); 821 } 822 } 823 824 } catch (IOException e) { 825 Log.e(TAG, "Received device exception during capture call: ", e); 826 mDeviceState.setError( 827 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE); 828 break; 829 } catch (InterruptedException e) { 830 Log.e(TAG, "Interrupted during capture: ", e); 831 mDeviceState.setError( 832 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE); 833 break; 834 } catch (RuntimeException e) { 835 Log.e(TAG, "Received device exception during capture call: ", e); 836 mDeviceState.setError( 837 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE); 838 break; 839 } 840 841 if (paramsChanged) { 842 if (DEBUG) { 843 Log.d(TAG, "Params changed -- getting new Parameters from HAL."); 844 } 845 try { 846 mParams = mCamera.getParameters(); 847 } catch (RuntimeException e) { 848 Log.e(TAG, "Received device exception: ", e); 849 mDeviceState.setError( 850 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE); 851 break; 852 } 853 854 // Update parameters to the latest that we think the camera is using 855 mLastRequest.setParameters(mParams); 856 } 857 858 MutableLong timestampMutable = new MutableLong(/*value*/0L); 859 try { 860 boolean success = mCaptureCollector.waitForRequestCompleted(holder, 861 REQUEST_COMPLETE_TIMEOUT, TimeUnit.MILLISECONDS, 862 /*out*/timestampMutable); 863 864 if (!success) { 865 Log.e(TAG, "Timed out while waiting for request to complete."); 866 mCaptureCollector.failAll(); 867 } 868 } catch (InterruptedException e) { 869 Log.e(TAG, "Interrupted waiting for request completion: ", e); 870 mDeviceState.setError( 871 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE); 872 break; 873 } 874 875 CameraMetadataNative result = mMapper.cachedConvertResultMetadata( 876 mLastRequest, timestampMutable.value); 877 /* 878 * Order matters: The default result mapper is state-less; the 879 * other mappers carry state and may override keys set by the default 880 * mapper with their own values. 881 */ 882 883 // Update AF state 884 mFocusStateMapper.mapResultTriggers(result); 885 // Update face-related results 886 mFaceDetectMapper.mapResultFaces(result, mLastRequest); 887 888 if (!holder.requestFailed()) { 889 mDeviceState.setCaptureResult(holder, result, 890 CameraDeviceState.NO_CAPTURE_ERROR); 891 } 892 } 893 if (DEBUG) { 894 long totalTime = SystemClock.elapsedRealtimeNanos() - startTime; 895 Log.d(TAG, "Capture request took " + totalTime + " ns"); 896 mRequestCounter.countAndLog(); 897 } 898 break; 899 case MSG_CLEANUP: 900 mCleanup = true; 901 try { 902 boolean success = mCaptureCollector.waitForEmpty(JPEG_FRAME_TIMEOUT, 903 TimeUnit.MILLISECONDS); 904 if (!success) { 905 Log.e(TAG, "Timed out while queueing cleanup request."); 906 mCaptureCollector.failAll(); 907 } 908 } catch (InterruptedException e) { 909 Log.e(TAG, "Interrupted while waiting for requests to complete: ", e); 910 mDeviceState.setError( 911 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE); 912 } 913 if (mGLThreadManager != null) { 914 mGLThreadManager.quit(); 915 mGLThreadManager = null; 916 } 917 if (mCamera != null) { 918 mCamera.release(); 919 mCamera = null; 920 } 921 resetJpegSurfaceFormats(mCallbackOutputs); 922 break; 923 case RequestHandlerThread.MSG_POKE_IDLE_HANDLER: 924 // OK: Ignore message. 925 break; 926 default: 927 throw new AssertionError("Unhandled message " + msg.what + 928 " on RequestThread."); 929 } 930 return true; 931 } 932 }; 933 934 /** 935 * Create a new RequestThreadManager. 936 * 937 * @param cameraId the id of the camera to use. 938 * @param camera an open camera object. The RequestThreadManager takes ownership of this camera 939 * object, and is responsible for closing it. 940 * @param characteristics the static camera characteristics corresponding to this camera device 941 * @param deviceState a {@link CameraDeviceState} state machine. 942 */ 943 public RequestThreadManager(int cameraId, Camera camera, CameraCharacteristics characteristics, 944 CameraDeviceState deviceState) { 945 mCamera = checkNotNull(camera, "camera must not be null"); 946 mCameraId = cameraId; 947 mCharacteristics = checkNotNull(characteristics, "characteristics must not be null"); 948 String name = String.format("RequestThread-%d", cameraId); 949 TAG = name; 950 mDeviceState = checkNotNull(deviceState, "deviceState must not be null"); 951 mFocusStateMapper = new LegacyFocusStateMapper(mCamera); 952 mFaceDetectMapper = new LegacyFaceDetectMapper(mCamera, mCharacteristics); 953 mCaptureCollector = new CaptureCollector(MAX_IN_FLIGHT_REQUESTS, mDeviceState); 954 mRequestThread = new RequestHandlerThread(name, mRequestHandlerCb); 955 mCamera.setErrorCallback(mErrorCallback); 956 } 957 958 /** 959 * Start the request thread. 960 */ 961 public void start() { 962 mRequestThread.start(); 963 } 964 965 /** 966 * Flush any pending requests. 967 * 968 * @return the last frame number. 969 */ 970 public long flush() { 971 Log.i(TAG, "Flushing all pending requests."); 972 long lastFrame = mRequestQueue.stopRepeating(); 973 mCaptureCollector.failAll(); 974 return lastFrame; 975 } 976 977 /** 978 * Quit the request thread, and clean up everything. 979 */ 980 public void quit() { 981 if (!mQuit.getAndSet(true)) { // Avoid sending messages on dead thread's handler. 982 Handler handler = mRequestThread.waitAndGetHandler(); 983 handler.sendMessageAtFrontOfQueue(handler.obtainMessage(MSG_CLEANUP)); 984 mRequestThread.quitSafely(); 985 try { 986 mRequestThread.join(); 987 } catch (InterruptedException e) { 988 Log.e(TAG, String.format("Thread %s (%d) interrupted while quitting.", 989 mRequestThread.getName(), mRequestThread.getId())); 990 } 991 } 992 } 993 994 /** 995 * Submit the given burst of requests to be captured. 996 * 997 * <p>If the burst is repeating, replace the current repeating burst.</p> 998 * 999 * @param requests the burst of requests to add to the queue. 1000 * @param repeating true if the burst is repeating. 1001 * @param frameNumber an output argument that contains either the frame number of the last frame 1002 * that will be returned for this request, or the frame number of the last 1003 * frame that will be returned for the current repeating request if this 1004 * burst is set to be repeating. 1005 * @return the request id. 1006 */ 1007 public int submitCaptureRequests(List<CaptureRequest> requests, boolean repeating, 1008 /*out*/LongParcelable frameNumber) { 1009 Handler handler = mRequestThread.waitAndGetHandler(); 1010 int ret; 1011 synchronized (mIdleLock) { 1012 ret = mRequestQueue.submit(requests, repeating, frameNumber); 1013 handler.sendEmptyMessage(MSG_SUBMIT_CAPTURE_REQUEST); 1014 } 1015 return ret; 1016 } 1017 1018 /** 1019 * Cancel a repeating request. 1020 * 1021 * @param requestId the id of the repeating request to cancel. 1022 * @return the last frame to be returned from the HAL for the given repeating request, or 1023 * {@code INVALID_FRAME} if none exists. 1024 */ 1025 public long cancelRepeating(int requestId) { 1026 return mRequestQueue.stopRepeating(requestId); 1027 } 1028 1029 /** 1030 * Configure with the current list of output Surfaces. 1031 * 1032 * <p> 1033 * This operation blocks until the configuration is complete. 1034 * </p> 1035 * 1036 * <p>Using a {@code null} or empty {@code outputs} list is the equivalent of unconfiguring.</p> 1037 * 1038 * @param outputs a {@link java.util.Collection} of outputs to configure. 1039 */ 1040 public void configure(Collection<Pair<Surface, Size>> outputs) { 1041 Handler handler = mRequestThread.waitAndGetHandler(); 1042 final ConditionVariable condition = new ConditionVariable(/*closed*/false); 1043 ConfigureHolder holder = new ConfigureHolder(condition, outputs); 1044 handler.sendMessage(handler.obtainMessage(MSG_CONFIGURE_OUTPUTS, 0, 0, holder)); 1045 condition.block(); 1046 } 1047} 1048