RequestThreadManager.java revision 52571b9032fedb90ca1fabbda90d0313d44adfb9
1/* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17package android.hardware.camera2.legacy; 18 19import android.graphics.SurfaceTexture; 20import android.hardware.Camera; 21import android.hardware.camera2.CameraCharacteristics; 22import android.hardware.camera2.CaptureRequest; 23import android.hardware.camera2.impl.CameraDeviceImpl; 24import android.hardware.camera2.utils.LongParcelable; 25import android.hardware.camera2.utils.SizeAreaComparator; 26import android.hardware.camera2.impl.CameraMetadataNative; 27import android.os.ConditionVariable; 28import android.os.Handler; 29import android.os.Message; 30import android.os.SystemClock; 31import android.util.Log; 32import android.util.MutableLong; 33import android.util.Pair; 34import android.util.Size; 35import android.view.Surface; 36 37import java.io.IOException; 38import java.util.ArrayList; 39import java.util.Collection; 40import java.util.Collections; 41import java.util.List; 42import java.util.concurrent.TimeUnit; 43 44import static com.android.internal.util.Preconditions.*; 45 46/** 47 * This class executes requests to the {@link Camera}. 48 * 49 * <p> 50 * The main components of this class are: 51 * - A message queue of requests to the {@link Camera}. 52 * - A thread that consumes requests to the {@link Camera} and executes them. 53 * - A {@link GLThreadManager} that draws to the configured output {@link Surface}s. 54 * - An {@link CameraDeviceState} state machine that manages the callbacks for various operations. 55 * </p> 56 */ 57@SuppressWarnings("deprecation") 58public class RequestThreadManager { 59 private final String TAG; 60 private final int mCameraId; 61 private final RequestHandlerThread mRequestThread; 62 63 private static final boolean DEBUG = Log.isLoggable(LegacyCameraDevice.DEBUG_PROP, Log.DEBUG); 64 // For slightly more spammy messages that will get repeated every frame 65 private static final boolean VERBOSE = 66 Log.isLoggable(LegacyCameraDevice.DEBUG_PROP, Log.VERBOSE); 67 private final Camera mCamera; 68 private final CameraCharacteristics mCharacteristics; 69 70 private final CameraDeviceState mDeviceState; 71 private final CaptureCollector mCaptureCollector; 72 private final LegacyFocusStateMapper mFocusStateMapper; 73 private final LegacyFaceDetectMapper mFaceDetectMapper; 74 75 private static final int MSG_CONFIGURE_OUTPUTS = 1; 76 private static final int MSG_SUBMIT_CAPTURE_REQUEST = 2; 77 private static final int MSG_CLEANUP = 3; 78 79 private static final int MAX_IN_FLIGHT_REQUESTS = 2; 80 81 private static final int PREVIEW_FRAME_TIMEOUT = 1000; // ms 82 private static final int JPEG_FRAME_TIMEOUT = 3000; // ms (same as CTS for API2) 83 private static final int REQUEST_COMPLETE_TIMEOUT = 3000; // ms (same as JPEG timeout) 84 85 private static final float ASPECT_RATIO_TOLERANCE = 0.01f; 86 private boolean mPreviewRunning = false; 87 88 private final List<Surface> mPreviewOutputs = new ArrayList<>(); 89 private final List<Surface> mCallbackOutputs = new ArrayList<>(); 90 private GLThreadManager mGLThreadManager; 91 private SurfaceTexture mPreviewTexture; 92 private Camera.Parameters mParams; 93 94 private final List<Long> mJpegSurfaceIds = new ArrayList<>(); 95 96 private Size mIntermediateBufferSize; 97 98 private final RequestQueue mRequestQueue = new RequestQueue(mJpegSurfaceIds); 99 private LegacyRequest mLastRequest = null; 100 private SurfaceTexture mDummyTexture; 101 private Surface mDummySurface; 102 103 private final Object mIdleLock = new Object(); 104 private final FpsCounter mPrevCounter = new FpsCounter("Incoming Preview"); 105 private final FpsCounter mRequestCounter = new FpsCounter("Incoming Requests"); 106 107 // Stuff JPEGs into HAL_PIXEL_FORMAT_RGBA_8888 gralloc buffers to get around SW write 108 // limitations for (b/17379185). 109 private static final boolean USE_BLOB_FORMAT_OVERRIDE = true; 110 111 /** 112 * Container object for Configure messages. 113 */ 114 private static class ConfigureHolder { 115 public final ConditionVariable condition; 116 public final Collection<Surface> surfaces; 117 118 public ConfigureHolder(ConditionVariable condition, Collection<Surface> surfaces) { 119 this.condition = condition; 120 this.surfaces = surfaces; 121 } 122 } 123 124 /** 125 * Counter class used to calculate and log the current FPS of frame production. 126 */ 127 public static class FpsCounter { 128 //TODO: Hook this up to SystTrace? 129 private static final String TAG = "FpsCounter"; 130 private int mFrameCount = 0; 131 private long mLastTime = 0; 132 private long mLastPrintTime = 0; 133 private double mLastFps = 0; 134 private final String mStreamType; 135 private static final long NANO_PER_SECOND = 1000000000; //ns 136 137 public FpsCounter(String streamType) { 138 mStreamType = streamType; 139 } 140 141 public synchronized void countFrame() { 142 mFrameCount++; 143 long nextTime = SystemClock.elapsedRealtimeNanos(); 144 if (mLastTime == 0) { 145 mLastTime = nextTime; 146 } 147 if (nextTime > mLastTime + NANO_PER_SECOND) { 148 long elapsed = nextTime - mLastTime; 149 mLastFps = mFrameCount * (NANO_PER_SECOND / (double) elapsed); 150 mFrameCount = 0; 151 mLastTime = nextTime; 152 } 153 } 154 155 public synchronized double checkFps() { 156 return mLastFps; 157 } 158 159 public synchronized void staggeredLog() { 160 if (mLastTime > mLastPrintTime + 5 * NANO_PER_SECOND) { 161 mLastPrintTime = mLastTime; 162 Log.d(TAG, "FPS for " + mStreamType + " stream: " + mLastFps ); 163 } 164 } 165 166 public synchronized void countAndLog() { 167 countFrame(); 168 staggeredLog(); 169 } 170 } 171 /** 172 * Fake preview for jpeg captures when there is no active preview 173 */ 174 private void createDummySurface() { 175 if (mDummyTexture == null || mDummySurface == null) { 176 mDummyTexture = new SurfaceTexture(/*ignored*/0); 177 // TODO: use smallest default sizes 178 mDummyTexture.setDefaultBufferSize(640, 480); 179 mDummySurface = new Surface(mDummyTexture); 180 } 181 } 182 183 private final Camera.ErrorCallback mErrorCallback = new Camera.ErrorCallback() { 184 @Override 185 public void onError(int i, Camera camera) { 186 Log.e(TAG, "Received error " + i + " from the Camera1 ErrorCallback"); 187 mDeviceState.setError(CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE); 188 } 189 }; 190 191 private final ConditionVariable mReceivedJpeg = new ConditionVariable(false); 192 193 private final Camera.PictureCallback mJpegCallback = new Camera.PictureCallback() { 194 @Override 195 public void onPictureTaken(byte[] data, Camera camera) { 196 Log.i(TAG, "Received jpeg."); 197 Pair<RequestHolder, Long> captureInfo = mCaptureCollector.jpegProduced(); 198 if (captureInfo == null || captureInfo.first == null) { 199 Log.e(TAG, "Dropping jpeg frame."); 200 return; 201 } 202 RequestHolder holder = captureInfo.first; 203 long timestamp = captureInfo.second; 204 for (Surface s : holder.getHolderTargets()) { 205 try { 206 if (LegacyCameraDevice.containsSurfaceId(s, mJpegSurfaceIds)) { 207 Log.i(TAG, "Producing jpeg buffer..."); 208 209 int totalSize = data.length + LegacyCameraDevice.nativeGetJpegFooterSize(); 210 totalSize += ((totalSize - 1) & ~0x3) + 4; // align to next octonibble 211 212 LegacyCameraDevice.setSurfaceDimens(s, totalSize, /*height*/1); 213 LegacyCameraDevice.setNextTimestamp(s, timestamp); 214 LegacyCameraDevice.produceFrame(s, data, data.length, /*height*/1, 215 CameraMetadataNative.NATIVE_JPEG_FORMAT); 216 } 217 } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) { 218 Log.w(TAG, "Surface abandoned, dropping frame. ", e); 219 } 220 } 221 222 mReceivedJpeg.open(); 223 } 224 }; 225 226 private final Camera.ShutterCallback mJpegShutterCallback = new Camera.ShutterCallback() { 227 @Override 228 public void onShutter() { 229 mCaptureCollector.jpegCaptured(SystemClock.elapsedRealtimeNanos()); 230 } 231 }; 232 233 private final SurfaceTexture.OnFrameAvailableListener mPreviewCallback = 234 new SurfaceTexture.OnFrameAvailableListener() { 235 @Override 236 public void onFrameAvailable(SurfaceTexture surfaceTexture) { 237 if (DEBUG) { 238 mPrevCounter.countAndLog(); 239 } 240 mGLThreadManager.queueNewFrame(); 241 } 242 }; 243 244 private void stopPreview() { 245 if (VERBOSE) { 246 Log.v(TAG, "stopPreview - preview running? " + mPreviewRunning); 247 } 248 if (mPreviewRunning) { 249 mCamera.stopPreview(); 250 mPreviewRunning = false; 251 } 252 } 253 254 private void startPreview() { 255 if (VERBOSE) { 256 Log.v(TAG, "startPreview - preview running? " + mPreviewRunning); 257 } 258 if (!mPreviewRunning) { 259 // XX: CameraClient:;startPreview is not getting called after a stop 260 mCamera.startPreview(); 261 mPreviewRunning = true; 262 } 263 } 264 265 private void doJpegCapturePrepare(RequestHolder request) throws IOException { 266 if (DEBUG) Log.d(TAG, "doJpegCapturePrepare - preview running? " + mPreviewRunning); 267 268 if (!mPreviewRunning) { 269 if (DEBUG) Log.d(TAG, "doJpegCapture - create fake surface"); 270 271 createDummySurface(); 272 mCamera.setPreviewTexture(mDummyTexture); 273 startPreview(); 274 } 275 } 276 277 private void doJpegCapture(RequestHolder request) { 278 if (DEBUG) Log.d(TAG, "doJpegCapturePrepare"); 279 280 mCamera.takePicture(mJpegShutterCallback, /*raw*/null, mJpegCallback); 281 mPreviewRunning = false; 282 } 283 284 private void doPreviewCapture(RequestHolder request) throws IOException { 285 if (VERBOSE) { 286 Log.v(TAG, "doPreviewCapture - preview running? " + mPreviewRunning); 287 } 288 289 if (mPreviewRunning) { 290 return; // Already running 291 } 292 293 if (mPreviewTexture == null) { 294 throw new IllegalStateException( 295 "Preview capture called with no preview surfaces configured."); 296 } 297 298 mPreviewTexture.setDefaultBufferSize(mIntermediateBufferSize.getWidth(), 299 mIntermediateBufferSize.getHeight()); 300 mCamera.setPreviewTexture(mPreviewTexture); 301 302 startPreview(); 303 } 304 305 private void configureOutputs(Collection<Surface> outputs) { 306 if (DEBUG) { 307 String outputsStr = outputs == null ? "null" : (outputs.size() + " surfaces"); 308 Log.d(TAG, "configureOutputs with " + outputsStr); 309 } 310 311 stopPreview(); 312 /* 313 * Try to release the previous preview's surface texture earlier if we end up 314 * using a different one; this also reduces the likelihood of getting into a deadlock 315 * when disconnecting from the old previous texture at a later time. 316 */ 317 try { 318 mCamera.setPreviewTexture(/*surfaceTexture*/null); 319 } catch (IOException e) { 320 Log.w(TAG, "Failed to clear prior SurfaceTexture, may cause GL deadlock: ", e); 321 } 322 323 if (mGLThreadManager != null) { 324 mGLThreadManager.waitUntilStarted(); 325 mGLThreadManager.ignoreNewFrames(); 326 mGLThreadManager.waitUntilIdle(); 327 } 328 resetJpegSurfaceFormats(mCallbackOutputs); 329 mPreviewOutputs.clear(); 330 mCallbackOutputs.clear(); 331 mJpegSurfaceIds.clear(); 332 mPreviewTexture = null; 333 334 int facing = mCharacteristics.get(CameraCharacteristics.LENS_FACING); 335 int orientation = mCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); 336 if (outputs != null) { 337 for (Surface s : outputs) { 338 try { 339 int format = LegacyCameraDevice.detectSurfaceType(s); 340 LegacyCameraDevice.setSurfaceOrientation(s, facing, orientation); 341 switch (format) { 342 case CameraMetadataNative.NATIVE_JPEG_FORMAT: 343 if (USE_BLOB_FORMAT_OVERRIDE) { 344 // Override to RGBA_8888 format. 345 LegacyCameraDevice.setSurfaceFormat(s, 346 LegacyMetadataMapper.HAL_PIXEL_FORMAT_RGBA_8888); 347 } 348 mJpegSurfaceIds.add(LegacyCameraDevice.getSurfaceId(s)); 349 mCallbackOutputs.add(s); 350 break; 351 default: 352 mPreviewOutputs.add(s); 353 break; 354 } 355 } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) { 356 Log.w(TAG, "Surface abandoned, skipping...", e); 357 } 358 } 359 } 360 try { 361 mParams = mCamera.getParameters(); 362 } catch (RuntimeException e) { 363 Log.e(TAG, "Received device exception: ", e); 364 mDeviceState.setError( 365 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE); 366 return; 367 } 368 369 List<int[]> supportedFpsRanges = mParams.getSupportedPreviewFpsRange(); 370 int[] bestRange = getPhotoPreviewFpsRange(supportedFpsRanges); 371 if (DEBUG) { 372 Log.d(TAG, "doPreviewCapture - Selected range [" + 373 bestRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX] + "," + 374 bestRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX] + "]"); 375 } 376 mParams.setPreviewFpsRange(bestRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX], 377 bestRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]); 378 379 if (mPreviewOutputs.size() > 0) { 380 List<Size> outputSizes = new ArrayList<>(outputs.size()); 381 for (Surface s : mPreviewOutputs) { 382 try { 383 Size size = LegacyCameraDevice.getSurfaceSize(s); 384 outputSizes.add(size); 385 } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) { 386 Log.w(TAG, "Surface abandoned, skipping...", e); 387 } 388 } 389 390 Size largestOutput = SizeAreaComparator.findLargestByArea(outputSizes); 391 392 // Find largest jpeg dimension - assume to have the same aspect ratio as sensor. 393 Size largestJpegDimen = ParameterUtils.getLargestSupportedJpegSizeByArea(mParams); 394 395 List<Size> supportedPreviewSizes = ParameterUtils.convertSizeList( 396 mParams.getSupportedPreviewSizes()); 397 398 // Use smallest preview dimension with same aspect ratio as sensor that is >= than all 399 // of the configured output dimensions. If none exists, fall back to using the largest 400 // supported preview size. 401 long largestOutputArea = largestOutput.getHeight() * (long) largestOutput.getWidth(); 402 Size bestPreviewDimen = SizeAreaComparator.findLargestByArea(supportedPreviewSizes); 403 for (Size s : supportedPreviewSizes) { 404 long currArea = s.getWidth() * s.getHeight(); 405 long bestArea = bestPreviewDimen.getWidth() * bestPreviewDimen.getHeight(); 406 if (checkAspectRatiosMatch(largestJpegDimen, s) && (currArea < bestArea && 407 currArea >= largestOutputArea)) { 408 bestPreviewDimen = s; 409 } 410 } 411 412 mIntermediateBufferSize = bestPreviewDimen; 413 mParams.setPreviewSize(mIntermediateBufferSize.getWidth(), 414 mIntermediateBufferSize.getHeight()); 415 416 if (DEBUG) { 417 Log.d(TAG, "Intermediate buffer selected with dimens: " + 418 bestPreviewDimen.toString()); 419 } 420 } else { 421 mIntermediateBufferSize = null; 422 if (DEBUG) { 423 Log.d(TAG, "No Intermediate buffer selected, no preview outputs were configured"); 424 } 425 } 426 427 Size smallestSupportedJpegSize = calculatePictureSize(mCallbackOutputs, mParams); 428 if (smallestSupportedJpegSize != null) { 429 /* 430 * Set takePicture size to the smallest supported JPEG size large enough 431 * to scale/crop out of for the bounding rectangle of the configured JPEG sizes. 432 */ 433 434 Log.i(TAG, "configureOutputs - set take picture size to " + smallestSupportedJpegSize); 435 mParams.setPictureSize( 436 smallestSupportedJpegSize.getWidth(), smallestSupportedJpegSize.getHeight()); 437 } 438 439 // TODO: Detect and optimize single-output paths here to skip stream teeing. 440 if (mGLThreadManager == null) { 441 mGLThreadManager = new GLThreadManager(mCameraId, facing, mDeviceState); 442 mGLThreadManager.start(); 443 } 444 mGLThreadManager.waitUntilStarted(); 445 mGLThreadManager.setConfigurationAndWait(mPreviewOutputs, mCaptureCollector); 446 mGLThreadManager.allowNewFrames(); 447 mPreviewTexture = mGLThreadManager.getCurrentSurfaceTexture(); 448 if (mPreviewTexture != null) { 449 mPreviewTexture.setOnFrameAvailableListener(mPreviewCallback); 450 } 451 452 mCamera.setParameters(mParams); 453 } 454 455 private void resetJpegSurfaceFormats(Collection<Surface> surfaces) { 456 if (!USE_BLOB_FORMAT_OVERRIDE || surfaces == null) { 457 return; 458 } 459 for(Surface s : surfaces) { 460 try { 461 LegacyCameraDevice.setSurfaceFormat(s, LegacyMetadataMapper.HAL_PIXEL_FORMAT_BLOB); 462 } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) { 463 Log.w(TAG, "Surface abandoned, skipping...", e); 464 } 465 } 466 } 467 468 /** 469 * Find a JPEG size (that is supported by the legacy camera device) which is equal to or larger 470 * than all of the configured {@code JPEG} outputs (by both width and height). 471 * 472 * <p>If multiple supported JPEG sizes are larger, select the smallest of them which 473 * still satisfies the above constraint.</p> 474 * 475 * <p>As a result, the returned size is guaranteed to be usable without needing 476 * to upscale any of the outputs. If only one {@code JPEG} surface is used, 477 * then no scaling/cropping is necessary between the taken picture and 478 * the {@code JPEG} output surface.</p> 479 * 480 * @param callbackOutputs a non-{@code null} list of {@code Surface}s with any image formats 481 * @param params api1 parameters (used for reading only) 482 * 483 * @return a size large enough to fit all of the configured {@code JPEG} outputs, or 484 * {@code null} if the {@code callbackOutputs} did not have any {@code JPEG} 485 * surfaces. 486 */ 487 private Size calculatePictureSize( 488 Collection<Surface> callbackOutputs, Camera.Parameters params) { 489 /* 490 * Find the largest JPEG size (if any), from the configured outputs: 491 * - the api1 picture size should be set to the smallest legal size that's at least as large 492 * as the largest configured JPEG size 493 */ 494 List<Size> configuredJpegSizes = new ArrayList<Size>(); 495 for (Surface callbackSurface : callbackOutputs) { 496 try { 497 498 if (!LegacyCameraDevice.containsSurfaceId(callbackSurface, mJpegSurfaceIds)) { 499 continue; // Ignore non-JPEG callback formats 500 } 501 502 Size jpegSize = LegacyCameraDevice.getSurfaceSize(callbackSurface); 503 configuredJpegSizes.add(jpegSize); 504 } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) { 505 Log.w(TAG, "Surface abandoned, skipping...", e); 506 } 507 } 508 if (!configuredJpegSizes.isEmpty()) { 509 /* 510 * Find the largest configured JPEG width, and height, independently 511 * of the rest. 512 * 513 * The rest of the JPEG streams can be cropped out of this smallest bounding 514 * rectangle. 515 */ 516 int maxConfiguredJpegWidth = -1; 517 int maxConfiguredJpegHeight = -1; 518 for (Size jpegSize : configuredJpegSizes) { 519 maxConfiguredJpegWidth = jpegSize.getWidth() > maxConfiguredJpegWidth ? 520 jpegSize.getWidth() : maxConfiguredJpegWidth; 521 maxConfiguredJpegHeight = jpegSize.getHeight() > maxConfiguredJpegHeight ? 522 jpegSize.getHeight() : maxConfiguredJpegHeight; 523 } 524 Size smallestBoundJpegSize = new Size(maxConfiguredJpegWidth, maxConfiguredJpegHeight); 525 526 List<Size> supportedJpegSizes = ParameterUtils.convertSizeList( 527 params.getSupportedPictureSizes()); 528 529 /* 530 * Find the smallest supported JPEG size that can fit the smallest bounding 531 * rectangle for the configured JPEG sizes. 532 */ 533 List<Size> candidateSupportedJpegSizes = new ArrayList<>(); 534 for (Size supportedJpegSize : supportedJpegSizes) { 535 if (supportedJpegSize.getWidth() >= maxConfiguredJpegWidth && 536 supportedJpegSize.getHeight() >= maxConfiguredJpegHeight) { 537 candidateSupportedJpegSizes.add(supportedJpegSize); 538 } 539 } 540 541 if (candidateSupportedJpegSizes.isEmpty()) { 542 throw new AssertionError( 543 "Could not find any supported JPEG sizes large enough to fit " + 544 smallestBoundJpegSize); 545 } 546 547 Size smallestSupportedJpegSize = Collections.min(candidateSupportedJpegSizes, 548 new SizeAreaComparator()); 549 550 if (!smallestSupportedJpegSize.equals(smallestBoundJpegSize)) { 551 Log.w(TAG, 552 String.format( 553 "configureOutputs - Will need to crop picture %s into " 554 + "smallest bound size %s", 555 smallestSupportedJpegSize, smallestBoundJpegSize)); 556 } 557 558 return smallestSupportedJpegSize; 559 } 560 561 return null; 562 } 563 564 private static boolean checkAspectRatiosMatch(Size a, Size b) { 565 float aAspect = a.getWidth() / (float) a.getHeight(); 566 float bAspect = b.getWidth() / (float) b.getHeight(); 567 568 return Math.abs(aAspect - bAspect) < ASPECT_RATIO_TOLERANCE; 569 } 570 571 // Calculate the highest FPS range supported 572 private int[] getPhotoPreviewFpsRange(List<int[]> frameRates) { 573 if (frameRates.size() == 0) { 574 Log.e(TAG, "No supported frame rates returned!"); 575 return null; 576 } 577 578 int bestMin = 0; 579 int bestMax = 0; 580 int bestIndex = 0; 581 int index = 0; 582 for (int[] rate : frameRates) { 583 int minFps = rate[Camera.Parameters.PREVIEW_FPS_MIN_INDEX]; 584 int maxFps = rate[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]; 585 if (maxFps > bestMax || (maxFps == bestMax && minFps > bestMin)) { 586 bestMin = minFps; 587 bestMax = maxFps; 588 bestIndex = index; 589 } 590 index++; 591 } 592 593 return frameRates.get(bestIndex); 594 } 595 596 private final Handler.Callback mRequestHandlerCb = new Handler.Callback() { 597 private boolean mCleanup = false; 598 private final LegacyResultMapper mMapper = new LegacyResultMapper(); 599 600 @Override 601 public boolean handleMessage(Message msg) { 602 if (mCleanup) { 603 return true; 604 } 605 606 if (DEBUG) { 607 Log.d(TAG, "Request thread handling message:" + msg.what); 608 } 609 long startTime = 0; 610 if (DEBUG) { 611 startTime = SystemClock.elapsedRealtimeNanos(); 612 } 613 switch (msg.what) { 614 case MSG_CONFIGURE_OUTPUTS: 615 ConfigureHolder config = (ConfigureHolder) msg.obj; 616 int sizes = config.surfaces != null ? config.surfaces.size() : 0; 617 Log.i(TAG, "Configure outputs: " + sizes + " surfaces configured."); 618 619 try { 620 boolean success = mCaptureCollector.waitForEmpty(JPEG_FRAME_TIMEOUT, 621 TimeUnit.MILLISECONDS); 622 if (!success) { 623 Log.e(TAG, "Timed out while queueing configure request."); 624 mCaptureCollector.failAll(); 625 } 626 } catch (InterruptedException e) { 627 Log.e(TAG, "Interrupted while waiting for requests to complete."); 628 mDeviceState.setError( 629 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE); 630 break; 631 } 632 633 configureOutputs(config.surfaces); 634 config.condition.open(); 635 if (DEBUG) { 636 long totalTime = SystemClock.elapsedRealtimeNanos() - startTime; 637 Log.d(TAG, "Configure took " + totalTime + " ns"); 638 } 639 break; 640 case MSG_SUBMIT_CAPTURE_REQUEST: 641 Handler handler = RequestThreadManager.this.mRequestThread.getHandler(); 642 643 // Get the next burst from the request queue. 644 Pair<BurstHolder, Long> nextBurst = mRequestQueue.getNext(); 645 646 if (nextBurst == null) { 647 // If there are no further requests queued, wait for any currently executing 648 // requests to complete, then switch to idle state. 649 try { 650 boolean success = mCaptureCollector.waitForEmpty(JPEG_FRAME_TIMEOUT, 651 TimeUnit.MILLISECONDS); 652 if (!success) { 653 Log.e(TAG, 654 "Timed out while waiting for prior requests to complete."); 655 mCaptureCollector.failAll(); 656 } 657 } catch (InterruptedException e) { 658 Log.e(TAG, "Interrupted while waiting for requests to complete: ", e); 659 mDeviceState.setError( 660 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE); 661 break; 662 } 663 664 synchronized (mIdleLock) { 665 // Retry the the request queue. 666 nextBurst = mRequestQueue.getNext(); 667 668 // If we still have no queued requests, go idle. 669 if (nextBurst == null) { 670 mDeviceState.setIdle(); 671 break; 672 } 673 } 674 } 675 676 if (nextBurst != null) { 677 // Queue another capture if we did not get the last burst. 678 handler.sendEmptyMessage(MSG_SUBMIT_CAPTURE_REQUEST); 679 } 680 681 // Complete each request in the burst 682 List<RequestHolder> requests = 683 nextBurst.first.produceRequestHolders(nextBurst.second); 684 for (RequestHolder holder : requests) { 685 CaptureRequest request = holder.getRequest(); 686 687 boolean paramsChanged = false; 688 689 // Only update parameters if the request has changed 690 if (mLastRequest == null || mLastRequest.captureRequest != request) { 691 692 // The intermediate buffer is sometimes null, but we always need 693 // the Camera1 API configured preview size 694 Size previewSize = ParameterUtils.convertSize(mParams.getPreviewSize()); 695 696 LegacyRequest legacyRequest = new LegacyRequest(mCharacteristics, 697 request, previewSize, mParams); // params are copied 698 699 700 // Parameters are mutated as a side-effect 701 LegacyMetadataMapper.convertRequestMetadata(/*inout*/legacyRequest); 702 703 // If the parameters have changed, set them in the Camera1 API. 704 if (!mParams.same(legacyRequest.parameters)) { 705 try { 706 mCamera.setParameters(legacyRequest.parameters); 707 } catch (RuntimeException e) { 708 // If setting the parameters failed, report a request error to 709 // the camera client, and skip any further work for this request 710 Log.e(TAG, "Exception while setting camera parameters: ", e); 711 holder.failRequest(); 712 mDeviceState.setCaptureStart(holder, /*timestamp*/0, 713 CameraDeviceImpl.CameraDeviceCallbacks. 714 ERROR_CAMERA_REQUEST); 715 continue; 716 } 717 paramsChanged = true; 718 mParams = legacyRequest.parameters; 719 } 720 721 mLastRequest = legacyRequest; 722 } 723 724 try { 725 boolean success = mCaptureCollector.queueRequest(holder, 726 mLastRequest, JPEG_FRAME_TIMEOUT, TimeUnit.MILLISECONDS); 727 728 if (!success) { 729 // Report a request error if we timed out while queuing this. 730 Log.e(TAG, "Timed out while queueing capture request."); 731 holder.failRequest(); 732 mDeviceState.setCaptureStart(holder, /*timestamp*/0, 733 CameraDeviceImpl.CameraDeviceCallbacks. 734 ERROR_CAMERA_REQUEST); 735 continue; 736 } 737 738 // Starting the preview needs to happen before enabling 739 // face detection or auto focus 740 if (holder.hasPreviewTargets()) { 741 doPreviewCapture(holder); 742 } 743 if (holder.hasJpegTargets()) { 744 while(!mCaptureCollector.waitForPreviewsEmpty(PREVIEW_FRAME_TIMEOUT, 745 TimeUnit.MILLISECONDS)) { 746 // Fail preview requests until the queue is empty. 747 Log.e(TAG, "Timed out while waiting for preview requests to " + 748 "complete."); 749 mCaptureCollector.failNextPreview(); 750 } 751 mReceivedJpeg.close(); 752 doJpegCapturePrepare(holder); 753 } 754 755 /* 756 * Do all the actions that require a preview to have been started 757 */ 758 759 // Toggle face detection on/off 760 // - do this before AF to give AF a chance to use faces 761 mFaceDetectMapper.processFaceDetectMode(request, /*in*/mParams); 762 763 // Unconditionally process AF triggers, since they're non-idempotent 764 // - must be done after setting the most-up-to-date AF mode 765 mFocusStateMapper.processRequestTriggers(request, mParams); 766 767 if (holder.hasJpegTargets()) { 768 doJpegCapture(holder); 769 if (!mReceivedJpeg.block(JPEG_FRAME_TIMEOUT)) { 770 Log.e(TAG, "Hit timeout for jpeg callback!"); 771 mCaptureCollector.failNextJpeg(); 772 } 773 } 774 775 } catch (IOException e) { 776 Log.e(TAG, "Received device exception: ", e); 777 mDeviceState.setError( 778 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE); 779 break; 780 } catch (InterruptedException e) { 781 Log.e(TAG, "Interrupted during capture: ", e); 782 mDeviceState.setError( 783 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE); 784 break; 785 } 786 787 if (paramsChanged) { 788 if (DEBUG) { 789 Log.d(TAG, "Params changed -- getting new Parameters from HAL."); 790 } 791 try { 792 mParams = mCamera.getParameters(); 793 } catch (RuntimeException e) { 794 Log.e(TAG, "Received device exception: ", e); 795 mDeviceState.setError( 796 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE); 797 break; 798 } 799 800 // Update parameters to the latest that we think the camera is using 801 mLastRequest.setParameters(mParams); 802 } 803 804 MutableLong timestampMutable = new MutableLong(/*value*/0L); 805 try { 806 boolean success = mCaptureCollector.waitForRequestCompleted(holder, 807 REQUEST_COMPLETE_TIMEOUT, TimeUnit.MILLISECONDS, 808 /*out*/timestampMutable); 809 810 if (!success) { 811 Log.e(TAG, "Timed out while waiting for request to complete."); 812 mCaptureCollector.failAll(); 813 } 814 } catch (InterruptedException e) { 815 Log.e(TAG, "Interrupted waiting for request completion: ", e); 816 mDeviceState.setError( 817 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE); 818 break; 819 } 820 821 CameraMetadataNative result = mMapper.cachedConvertResultMetadata( 822 mLastRequest, timestampMutable.value); 823 /* 824 * Order matters: The default result mapper is state-less; the 825 * other mappers carry state and may override keys set by the default 826 * mapper with their own values. 827 */ 828 829 // Update AF state 830 mFocusStateMapper.mapResultTriggers(result); 831 // Update face-related results 832 mFaceDetectMapper.mapResultFaces(result, mLastRequest); 833 834 if (!holder.requestFailed()) { 835 mDeviceState.setCaptureResult(holder, result, 836 CameraDeviceState.NO_CAPTURE_ERROR); 837 } 838 } 839 if (DEBUG) { 840 long totalTime = SystemClock.elapsedRealtimeNanos() - startTime; 841 Log.d(TAG, "Capture request took " + totalTime + " ns"); 842 mRequestCounter.countAndLog(); 843 } 844 break; 845 case MSG_CLEANUP: 846 mCleanup = true; 847 try { 848 boolean success = mCaptureCollector.waitForEmpty(JPEG_FRAME_TIMEOUT, 849 TimeUnit.MILLISECONDS); 850 if (!success) { 851 Log.e(TAG, "Timed out while queueing cleanup request."); 852 mCaptureCollector.failAll(); 853 } 854 } catch (InterruptedException e) { 855 Log.e(TAG, "Interrupted while waiting for requests to complete: ", e); 856 mDeviceState.setError( 857 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE); 858 } 859 if (mGLThreadManager != null) { 860 mGLThreadManager.quit(); 861 } 862 if (mCamera != null) { 863 mCamera.release(); 864 } 865 resetJpegSurfaceFormats(mCallbackOutputs); 866 break; 867 default: 868 throw new AssertionError("Unhandled message " + msg.what + 869 " on RequestThread."); 870 } 871 return true; 872 } 873 }; 874 875 /** 876 * Create a new RequestThreadManager. 877 * 878 * @param cameraId the id of the camera to use. 879 * @param camera an open camera object. The RequestThreadManager takes ownership of this camera 880 * object, and is responsible for closing it. 881 * @param characteristics the static camera characteristics corresponding to this camera device 882 * @param deviceState a {@link CameraDeviceState} state machine. 883 */ 884 public RequestThreadManager(int cameraId, Camera camera, CameraCharacteristics characteristics, 885 CameraDeviceState deviceState) { 886 mCamera = checkNotNull(camera, "camera must not be null"); 887 mCameraId = cameraId; 888 mCharacteristics = checkNotNull(characteristics, "characteristics must not be null"); 889 String name = String.format("RequestThread-%d", cameraId); 890 TAG = name; 891 mDeviceState = checkNotNull(deviceState, "deviceState must not be null"); 892 mFocusStateMapper = new LegacyFocusStateMapper(mCamera); 893 mFaceDetectMapper = new LegacyFaceDetectMapper(mCamera, mCharacteristics); 894 mCaptureCollector = new CaptureCollector(MAX_IN_FLIGHT_REQUESTS, mDeviceState); 895 mRequestThread = new RequestHandlerThread(name, mRequestHandlerCb); 896 mCamera.setErrorCallback(mErrorCallback); 897 } 898 899 /** 900 * Start the request thread. 901 */ 902 public void start() { 903 mRequestThread.start(); 904 } 905 906 /** 907 * Flush any pending requests. 908 * 909 * @return the last frame number. 910 */ 911 public long flush() { 912 Log.i(TAG, "Flushing all pending requests."); 913 long lastFrame = mRequestQueue.stopRepeating(); 914 mCaptureCollector.failAll(); 915 return lastFrame; 916 } 917 918 /** 919 * Quit the request thread, and clean up everything. 920 */ 921 public void quit() { 922 Handler handler = mRequestThread.waitAndGetHandler(); 923 handler.sendMessageAtFrontOfQueue(handler.obtainMessage(MSG_CLEANUP)); 924 mRequestThread.quitSafely(); 925 try { 926 mRequestThread.join(); 927 } catch (InterruptedException e) { 928 Log.e(TAG, String.format("Thread %s (%d) interrupted while quitting.", 929 mRequestThread.getName(), mRequestThread.getId())); 930 } 931 } 932 933 /** 934 * Submit the given burst of requests to be captured. 935 * 936 * <p>If the burst is repeating, replace the current repeating burst.</p> 937 * 938 * @param requests the burst of requests to add to the queue. 939 * @param repeating true if the burst is repeating. 940 * @param frameNumber an output argument that contains either the frame number of the last frame 941 * that will be returned for this request, or the frame number of the last 942 * frame that will be returned for the current repeating request if this 943 * burst is set to be repeating. 944 * @return the request id. 945 */ 946 public int submitCaptureRequests(List<CaptureRequest> requests, boolean repeating, 947 /*out*/LongParcelable frameNumber) { 948 Handler handler = mRequestThread.waitAndGetHandler(); 949 int ret; 950 synchronized (mIdleLock) { 951 ret = mRequestQueue.submit(requests, repeating, frameNumber); 952 handler.sendEmptyMessage(MSG_SUBMIT_CAPTURE_REQUEST); 953 } 954 return ret; 955 } 956 957 /** 958 * Cancel a repeating request. 959 * 960 * @param requestId the id of the repeating request to cancel. 961 * @return the last frame to be returned from the HAL for the given repeating request, or 962 * {@code INVALID_FRAME} if none exists. 963 */ 964 public long cancelRepeating(int requestId) { 965 return mRequestQueue.stopRepeating(requestId); 966 } 967 968 /** 969 * Configure with the current list of output Surfaces. 970 * 971 * <p> 972 * This operation blocks until the configuration is complete. 973 * </p> 974 * 975 * <p>Using a {@code null} or empty {@code outputs} list is the equivalent of unconfiguring.</p> 976 * 977 * @param outputs a {@link java.util.Collection} of outputs to configure. 978 */ 979 public void configure(Collection<Surface> outputs) { 980 Handler handler = mRequestThread.waitAndGetHandler(); 981 final ConditionVariable condition = new ConditionVariable(/*closed*/false); 982 ConfigureHolder holder = new ConfigureHolder(condition, outputs); 983 handler.sendMessage(handler.obtainMessage(MSG_CONFIGURE_OUTPUTS, 0, 0, holder)); 984 condition.block(); 985 } 986} 987