RequestThreadManager.java revision 3a3eb157417fb5618518b29d889c23e8831b081c
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17package android.hardware.camera2.legacy;
18
19import android.graphics.SurfaceTexture;
20import android.hardware.Camera;
21import android.hardware.camera2.CameraCharacteristics;
22import android.hardware.camera2.CaptureRequest;
23import android.hardware.camera2.utils.LongParcelable;
24import android.hardware.camera2.utils.SizeAreaComparator;
25import android.hardware.camera2.impl.CameraMetadataNative;
26import android.os.ConditionVariable;
27import android.os.Handler;
28import android.os.Message;
29import android.os.SystemClock;
30import android.util.Log;
31import android.util.MutableLong;
32import android.util.Pair;
33import android.util.Size;
34import android.view.Surface;
35
36import java.io.IOError;
37import java.io.IOException;
38import java.util.ArrayList;
39import java.util.Collection;
40import java.util.Collections;
41import java.util.List;
42import java.util.concurrent.ArrayBlockingQueue;
43import java.util.concurrent.TimeUnit;
44
45import static com.android.internal.util.Preconditions.*;
46
47/**
48 * This class executes requests to the {@link Camera}.
49 *
50 * <p>
51 * The main components of this class are:
52 * - A message queue of requests to the {@link Camera}.
53 * - A thread that consumes requests to the {@link Camera} and executes them.
54 * - A {@link GLThreadManager} that draws to the configured output {@link Surface}s.
55 * - An {@link CameraDeviceState} state machine that manages the callbacks for various operations.
56 * </p>
57 */
58public class RequestThreadManager {
59    private final String TAG;
60    private final int mCameraId;
61    private final RequestHandlerThread mRequestThread;
62
63    private static final boolean DEBUG = Log.isLoggable(LegacyCameraDevice.DEBUG_PROP, Log.DEBUG);
64    private final Camera mCamera;
65    private final CameraCharacteristics mCharacteristics;
66
67    private final CameraDeviceState mDeviceState;
68    private final CaptureCollector mCaptureCollector;
69    private final LegacyFocusStateMapper mFocusStateMapper;
70
71    private static final int MSG_CONFIGURE_OUTPUTS = 1;
72    private static final int MSG_SUBMIT_CAPTURE_REQUEST = 2;
73    private static final int MSG_CLEANUP = 3;
74
75    private static final int MAX_IN_FLIGHT_REQUESTS = 2;
76
77    private static final int PREVIEW_FRAME_TIMEOUT = 300; // ms
78    private static final int JPEG_FRAME_TIMEOUT = 3000; // ms (same as CTS for API2)
79    private static final int REQUEST_COMPLETE_TIMEOUT = 3000; // ms (same as JPEG timeout)
80
81    private static final float ASPECT_RATIO_TOLERANCE = 0.01f;
82    private boolean mPreviewRunning = false;
83
84    private final List<Surface> mPreviewOutputs = new ArrayList<>();
85    private final List<Surface> mCallbackOutputs = new ArrayList<>();
86    private GLThreadManager mGLThreadManager;
87    private SurfaceTexture mPreviewTexture;
88    private Camera.Parameters mParams;
89
90    private Size mIntermediateBufferSize;
91
92    private final RequestQueue mRequestQueue = new RequestQueue();
93    private LegacyRequest mLastRequest = null;
94    private SurfaceTexture mDummyTexture;
95    private Surface mDummySurface;
96
97    private final FpsCounter mPrevCounter = new FpsCounter("Incoming Preview");
98    private final FpsCounter mRequestCounter = new FpsCounter("Incoming Requests");
99
100    /**
101     * Container object for Configure messages.
102     */
103    private static class ConfigureHolder {
104        public final ConditionVariable condition;
105        public final Collection<Surface> surfaces;
106
107        public ConfigureHolder(ConditionVariable condition, Collection<Surface> surfaces) {
108            this.condition = condition;
109            this.surfaces = surfaces;
110        }
111    }
112
113    /**
114     * Counter class used to calculate and log the current FPS of frame production.
115     */
116    public static class FpsCounter {
117        //TODO: Hook this up to SystTrace?
118        private static final String TAG = "FpsCounter";
119        private int mFrameCount = 0;
120        private long mLastTime = 0;
121        private long mLastPrintTime = 0;
122        private double mLastFps = 0;
123        private final String mStreamType;
124        private static final long NANO_PER_SECOND = 1000000000; //ns
125
126        public FpsCounter(String streamType) {
127            mStreamType = streamType;
128        }
129
130        public synchronized void countFrame() {
131            mFrameCount++;
132            long nextTime = SystemClock.elapsedRealtimeNanos();
133            if (mLastTime == 0) {
134                mLastTime = nextTime;
135            }
136            if (nextTime > mLastTime + NANO_PER_SECOND) {
137                long elapsed = nextTime - mLastTime;
138                mLastFps = mFrameCount * (NANO_PER_SECOND / (double) elapsed);
139                mFrameCount = 0;
140                mLastTime = nextTime;
141            }
142        }
143
144        public synchronized double checkFps() {
145            return mLastFps;
146        }
147
148        public synchronized void staggeredLog() {
149            if (mLastTime > mLastPrintTime + 5 * NANO_PER_SECOND) {
150                mLastPrintTime = mLastTime;
151                Log.d(TAG, "FPS for " + mStreamType + " stream: " + mLastFps );
152            }
153        }
154
155        public synchronized void countAndLog() {
156            countFrame();
157            staggeredLog();
158        }
159    }
160    /**
161     * Fake preview for jpeg captures when there is no active preview
162     */
163    private void createDummySurface() {
164        if (mDummyTexture == null || mDummySurface == null) {
165            mDummyTexture = new SurfaceTexture(/*ignored*/0);
166            // TODO: use smallest default sizes
167            mDummyTexture.setDefaultBufferSize(640, 480);
168            mDummySurface = new Surface(mDummyTexture);
169        }
170    }
171
172    private final ConditionVariable mReceivedJpeg = new ConditionVariable(false);
173
174    private final Camera.PictureCallback mJpegCallback = new Camera.PictureCallback() {
175        @Override
176        public void onPictureTaken(byte[] data, Camera camera) {
177            Log.i(TAG, "Received jpeg.");
178            Pair<RequestHolder, Long> captureInfo = mCaptureCollector.jpegProduced();
179            RequestHolder holder = captureInfo.first;
180            long timestamp = captureInfo.second;
181            if (holder == null) {
182                Log.e(TAG, "Dropping jpeg frame.");
183                return;
184            }
185            for (Surface s : holder.getHolderTargets()) {
186                try {
187                    if (RequestHolder.jpegType(s)) {
188                        Log.i(TAG, "Producing jpeg buffer...");
189                        LegacyCameraDevice.setSurfaceDimens(s, data.length, /*height*/1);
190                        LegacyCameraDevice.setNextTimestamp(s, timestamp);
191                        LegacyCameraDevice.produceFrame(s, data, data.length, /*height*/1,
192                                CameraMetadataNative.NATIVE_JPEG_FORMAT);
193                    }
194                } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) {
195                    Log.w(TAG, "Surface abandoned, dropping frame. ", e);
196                }
197            }
198
199            mReceivedJpeg.open();
200        }
201    };
202
203    private final Camera.ShutterCallback mJpegShutterCallback = new Camera.ShutterCallback() {
204        @Override
205        public void onShutter() {
206            mCaptureCollector.jpegCaptured(SystemClock.elapsedRealtimeNanos());
207        }
208    };
209
210    private final SurfaceTexture.OnFrameAvailableListener mPreviewCallback =
211            new SurfaceTexture.OnFrameAvailableListener() {
212                @Override
213                public void onFrameAvailable(SurfaceTexture surfaceTexture) {
214                    if (DEBUG) {
215                        mPrevCounter.countAndLog();
216                    }
217                    mGLThreadManager.queueNewFrame();
218                }
219            };
220
221    private void stopPreview() {
222        if (mPreviewRunning) {
223            mCamera.stopPreview();
224            mPreviewRunning = false;
225        }
226    }
227
228    private void startPreview() {
229        if (!mPreviewRunning) {
230            mCamera.startPreview();
231            mPreviewRunning = true;
232        }
233    }
234
235    private void doJpegCapture(RequestHolder request) throws IOException {
236        if (DEBUG) Log.d(TAG, "doJpegCapture");
237
238        if (!mPreviewRunning) {
239            if (DEBUG) Log.d(TAG, "doJpegCapture - create fake surface");
240
241            createDummySurface();
242            mCamera.setPreviewTexture(mDummyTexture);
243            startPreview();
244        }
245        mCamera.takePicture(mJpegShutterCallback, /*raw*/null, mJpegCallback);
246        mPreviewRunning = false;
247    }
248
249    private void doPreviewCapture(RequestHolder request) throws IOException {
250        if (mPreviewRunning) {
251            return; // Already running
252        }
253
254        if (mPreviewTexture == null) {
255            throw new IllegalStateException(
256                    "Preview capture called with no preview surfaces configured.");
257        }
258
259        mPreviewTexture.setDefaultBufferSize(mIntermediateBufferSize.getWidth(),
260                mIntermediateBufferSize.getHeight());
261        mCamera.setPreviewTexture(mPreviewTexture);
262
263        startPreview();
264    }
265
266    private void configureOutputs(Collection<Surface> outputs) throws IOException {
267        stopPreview();
268        if (mGLThreadManager != null) {
269            mGLThreadManager.waitUntilStarted();
270            mGLThreadManager.ignoreNewFrames();
271            mGLThreadManager.waitUntilIdle();
272        }
273        mPreviewOutputs.clear();
274        mCallbackOutputs.clear();
275        mPreviewTexture = null;
276
277        int facing = mCharacteristics.get(CameraCharacteristics.LENS_FACING);
278        int orientation = mCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
279        if (outputs != null) {
280            for (Surface s : outputs) {
281                try {
282                    int format = LegacyCameraDevice.detectSurfaceType(s);
283                    LegacyCameraDevice.setSurfaceOrientation(s, facing, orientation);
284                    switch (format) {
285                        case CameraMetadataNative.NATIVE_JPEG_FORMAT:
286                            mCallbackOutputs.add(s);
287                            break;
288                        default:
289                            mPreviewOutputs.add(s);
290                            break;
291                    }
292                } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) {
293                    Log.w(TAG, "Surface abandoned, skipping...", e);
294                }
295            }
296        }
297        mParams = mCamera.getParameters();
298
299        List<int[]> supportedFpsRanges = mParams.getSupportedPreviewFpsRange();
300        int[] bestRange = getPhotoPreviewFpsRange(supportedFpsRanges);
301        if (DEBUG) {
302            Log.d(TAG, "doPreviewCapture - Selected range [" +
303                    bestRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX] + "," +
304                    bestRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX] + "]");
305        }
306        mParams.setPreviewFpsRange(bestRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
307                bestRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
308        mParams.setRecordingHint(true);
309
310        if (mPreviewOutputs.size() > 0) {
311            List<Size> outputSizes = new ArrayList<>(outputs.size());
312            for (Surface s : mPreviewOutputs) {
313                try {
314                    Size size = LegacyCameraDevice.getSurfaceSize(s);
315                    outputSizes.add(size);
316                } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) {
317                    Log.w(TAG, "Surface abandoned, skipping...", e);
318                }
319            }
320
321            Size largestOutput = SizeAreaComparator.findLargestByArea(outputSizes);
322
323            // Find largest jpeg dimension - assume to have the same aspect ratio as sensor.
324            Size largestJpegDimen = ParameterUtils.getLargestSupportedJpegSizeByArea(mParams);
325
326            List<Size> supportedPreviewSizes = ParameterUtils.convertSizeList(
327                    mParams.getSupportedPreviewSizes());
328
329            // Use smallest preview dimension with same aspect ratio as sensor that is >= than all
330            // of the configured output dimensions.  If none exists, fall back to using the largest
331            // supported preview size.
332            long largestOutputArea = largestOutput.getHeight() * (long) largestOutput.getWidth();
333            Size bestPreviewDimen = SizeAreaComparator.findLargestByArea(supportedPreviewSizes);
334            for (Size s : supportedPreviewSizes) {
335                long currArea = s.getWidth() * s.getHeight();
336                long bestArea = bestPreviewDimen.getWidth() * bestPreviewDimen.getHeight();
337                if (checkAspectRatiosMatch(largestJpegDimen, s) && (currArea < bestArea &&
338                        currArea >= largestOutputArea)) {
339                    bestPreviewDimen = s;
340                }
341            }
342
343            mIntermediateBufferSize = bestPreviewDimen;
344            if (DEBUG) {
345                Log.d(TAG, "Intermediate buffer selected with dimens: " +
346                        bestPreviewDimen.toString());
347            }
348        } else {
349            mIntermediateBufferSize = null;
350            if (DEBUG) {
351                Log.d(TAG, "No Intermediate buffer selected, no preview outputs were configured");
352            }
353        }
354
355        Size smallestSupportedJpegSize = calculatePictureSize(mCallbackOutputs, mParams);
356        if (smallestSupportedJpegSize != null) {
357            /*
358             * Set takePicture size to the smallest supported JPEG size large enough
359             * to scale/crop out of for the bounding rectangle of the configured JPEG sizes.
360             */
361
362            Log.i(TAG, "configureOutputs - set take picture size to " + smallestSupportedJpegSize);
363            mParams.setPictureSize(
364                    smallestSupportedJpegSize.getWidth(), smallestSupportedJpegSize.getHeight());
365        }
366
367        // TODO: Detect and optimize single-output paths here to skip stream teeing.
368        if (mGLThreadManager == null) {
369            mGLThreadManager = new GLThreadManager(mCameraId);
370            mGLThreadManager.start();
371        }
372        mGLThreadManager.waitUntilStarted();
373        mGLThreadManager.setConfigurationAndWait(mPreviewOutputs, mCaptureCollector);
374        mGLThreadManager.allowNewFrames();
375        mPreviewTexture = mGLThreadManager.getCurrentSurfaceTexture();
376        if (mPreviewTexture != null) {
377            mPreviewTexture.setOnFrameAvailableListener(mPreviewCallback);
378        }
379
380        mCamera.setParameters(mParams);
381        // TODO: configure the JPEG surface with some arbitrary size
382        // using LegacyCameraDevice.nativeConfigureSurface
383    }
384
385    /**
386     * Find a JPEG size (that is supported by the legacy camera device) which is equal to or larger
387     * than all of the configured {@code JPEG} outputs (by both width and height).
388     *
389     * <p>If multiple supported JPEG sizes are larger, select the smallest of them which
390     * still satisfies the above constraint.</p>
391     *
392     * <p>As a result, the returned size is guaranteed to be usable without needing
393     * to upscale any of the outputs. If only one {@code JPEG} surface is used,
394     * then no scaling/cropping is necessary between the taken picture and
395     * the {@code JPEG} output surface.</p>
396     *
397     * @param callbackOutputs a non-{@code null} list of {@code Surface}s with any image formats
398     * @param params api1 parameters (used for reading only)
399     *
400     * @return a size large enough to fit all of the configured {@code JPEG} outputs, or
401     *          {@code null} if the {@code callbackOutputs} did not have any {@code JPEG}
402     *          surfaces.
403     */
404    private Size calculatePictureSize(
405            Collection<Surface> callbackOutputs, Camera.Parameters params) {
406        /*
407         * Find the largest JPEG size (if any), from the configured outputs:
408         * - the api1 picture size should be set to the smallest legal size that's at least as large
409         *   as the largest configured JPEG size
410         */
411        List<Size> configuredJpegSizes = new ArrayList<Size>();
412        for (Surface callbackSurface : callbackOutputs) {
413            try {
414                int format = LegacyCameraDevice.detectSurfaceType(callbackSurface);
415
416                if (format != CameraMetadataNative.NATIVE_JPEG_FORMAT) {
417                    continue; // Ignore non-JPEG callback formats
418                }
419
420                Size jpegSize = LegacyCameraDevice.getSurfaceSize(callbackSurface);
421                configuredJpegSizes.add(jpegSize);
422            } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) {
423                Log.w(TAG, "Surface abandoned, skipping...", e);
424            }
425        }
426        if (!configuredJpegSizes.isEmpty()) {
427            /*
428             * Find the largest configured JPEG width, and height, independently
429             * of the rest.
430             *
431             * The rest of the JPEG streams can be cropped out of this smallest bounding
432             * rectangle.
433             */
434            int maxConfiguredJpegWidth = -1;
435            int maxConfiguredJpegHeight = -1;
436            for (Size jpegSize : configuredJpegSizes) {
437                maxConfiguredJpegWidth = jpegSize.getWidth() > maxConfiguredJpegWidth ?
438                        jpegSize.getWidth() : maxConfiguredJpegWidth;
439                maxConfiguredJpegHeight = jpegSize.getHeight() > maxConfiguredJpegHeight ?
440                        jpegSize.getHeight() : maxConfiguredJpegHeight;
441            }
442            Size smallestBoundJpegSize = new Size(maxConfiguredJpegWidth, maxConfiguredJpegHeight);
443
444            List<Size> supportedJpegSizes = ParameterUtils.convertSizeList(
445                    params.getSupportedPictureSizes());
446
447            /*
448             * Find the smallest supported JPEG size that can fit the smallest bounding
449             * rectangle for the configured JPEG sizes.
450             */
451            List<Size> candidateSupportedJpegSizes = new ArrayList<>();
452            for (Size supportedJpegSize : supportedJpegSizes) {
453                if (supportedJpegSize.getWidth() >= maxConfiguredJpegWidth &&
454                    supportedJpegSize.getHeight() >= maxConfiguredJpegHeight) {
455                    candidateSupportedJpegSizes.add(supportedJpegSize);
456                }
457            }
458
459            if (candidateSupportedJpegSizes.isEmpty()) {
460                throw new AssertionError(
461                        "Could not find any supported JPEG sizes large enough to fit " +
462                        smallestBoundJpegSize);
463            }
464
465            Size smallestSupportedJpegSize = Collections.min(candidateSupportedJpegSizes,
466                    new SizeAreaComparator());
467
468            if (!smallestSupportedJpegSize.equals(smallestBoundJpegSize)) {
469                Log.w(TAG,
470                        String.format(
471                                "configureOutputs - Will need to crop picture %s into "
472                                + "smallest bound size %s",
473                                smallestSupportedJpegSize, smallestBoundJpegSize));
474            }
475
476            return smallestSupportedJpegSize;
477        }
478
479        return null;
480    }
481
482    private static boolean checkAspectRatiosMatch(Size a, Size b) {
483        float aAspect = a.getWidth() / (float) a.getHeight();
484        float bAspect = b.getWidth() / (float) b.getHeight();
485
486        return Math.abs(aAspect - bAspect) < ASPECT_RATIO_TOLERANCE;
487    }
488
489    // Calculate the highest FPS range supported
490    private int[] getPhotoPreviewFpsRange(List<int[]> frameRates) {
491        if (frameRates.size() == 0) {
492            Log.e(TAG, "No supported frame rates returned!");
493            return null;
494        }
495
496        int bestMin = 0;
497        int bestMax = 0;
498        int bestIndex = 0;
499        int index = 0;
500        for (int[] rate : frameRates) {
501            int minFps = rate[Camera.Parameters.PREVIEW_FPS_MIN_INDEX];
502            int maxFps = rate[Camera.Parameters.PREVIEW_FPS_MAX_INDEX];
503            if (maxFps > bestMax || (maxFps == bestMax && minFps > bestMin)) {
504                bestMin = minFps;
505                bestMax = maxFps;
506                bestIndex = index;
507            }
508            index++;
509        }
510
511        return frameRates.get(bestIndex);
512    }
513
514    private final Handler.Callback mRequestHandlerCb = new Handler.Callback() {
515        private boolean mCleanup = false;
516        private final LegacyResultMapper mMapper = new LegacyResultMapper();
517
518        @Override
519        public boolean handleMessage(Message msg) {
520            if (mCleanup) {
521                return true;
522            }
523
524            if (DEBUG) {
525                Log.d(TAG, "Request thread handling message:" + msg.what);
526            }
527            long startTime = 0;
528            if (DEBUG) {
529                startTime = SystemClock.elapsedRealtimeNanos();
530            }
531            switch (msg.what) {
532                case MSG_CONFIGURE_OUTPUTS:
533                    ConfigureHolder config = (ConfigureHolder) msg.obj;
534                    int sizes = config.surfaces != null ? config.surfaces.size() : 0;
535                    Log.i(TAG, "Configure outputs: " + sizes +
536                            " surfaces configured.");
537
538                    try {
539                        boolean success = mCaptureCollector.waitForEmpty(JPEG_FRAME_TIMEOUT,
540                                TimeUnit.MILLISECONDS);
541                        if (!success) {
542                            Log.e(TAG, "Timed out while queueing configure request.");
543                        }
544                    } catch (InterruptedException e) {
545                        // TODO: report error to CameraDevice
546                        Log.e(TAG, "Interrupted while waiting for requests to complete.");
547                    }
548
549                    try {
550                        configureOutputs(config.surfaces);
551                    } catch (IOException e) {
552                        // TODO: report error to CameraDevice
553                        throw new IOError(e);
554                    }
555                    config.condition.open();
556                    if (DEBUG) {
557                        long totalTime = SystemClock.elapsedRealtimeNanos() - startTime;
558                        Log.d(TAG, "Configure took " + totalTime + " ns");
559                    }
560                    break;
561                case MSG_SUBMIT_CAPTURE_REQUEST:
562                    Handler handler = RequestThreadManager.this.mRequestThread.getHandler();
563
564                    // Get the next burst from the request queue.
565                    Pair<BurstHolder, Long> nextBurst = mRequestQueue.getNext();
566                    if (nextBurst == null) {
567                        try {
568                            boolean success = mCaptureCollector.waitForEmpty(JPEG_FRAME_TIMEOUT,
569                                    TimeUnit.MILLISECONDS);
570                            if (!success) {
571                                Log.e(TAG, "Timed out while waiting for empty.");
572                            }
573                        } catch (InterruptedException e) {
574                            // TODO: report error to CameraDevice
575                            Log.e(TAG, "Interrupted while waiting for requests to complete.");
576                        }
577                        mDeviceState.setIdle();
578                        stopPreview();
579                        break;
580                    } else {
581                        // Queue another capture if we did not get the last burst.
582                        handler.sendEmptyMessage(MSG_SUBMIT_CAPTURE_REQUEST);
583                    }
584
585                    // Complete each request in the burst
586                    List<RequestHolder> requests =
587                            nextBurst.first.produceRequestHolders(nextBurst.second);
588                    for (RequestHolder holder : requests) {
589                        CaptureRequest request = holder.getRequest();
590
591                        boolean paramsChanged = false;
592
593                        // Lazily process the rest of the request
594                        if (mLastRequest == null || mLastRequest.captureRequest != request) {
595
596                            // The intermediate buffer is sometimes null, but we always need
597                            // the camera1's configured preview size
598                            Size previewSize = ParameterUtils.convertSize(mParams.getPreviewSize());
599
600                            LegacyRequest legacyRequest = new LegacyRequest(
601                                    mCharacteristics, request, previewSize,
602                                    mParams); // params are copied
603
604                            mLastRequest = legacyRequest;
605                            // Parameters are mutated as a side-effect
606                            LegacyMetadataMapper.convertRequestMetadata(/*inout*/legacyRequest);
607
608                            if (!mParams.same(legacyRequest.parameters)) {
609                                mParams = legacyRequest.parameters;
610                                mCamera.setParameters(mParams);
611
612                                paramsChanged = true;
613                            }
614                        }
615
616                        // Unconditionally process AF triggers, since they're non-idempotent
617                        // - must be done after setting the most-up-to-date AF mode
618                        mFocusStateMapper.processRequestTriggers(request, mParams);
619
620                        try {
621                            boolean success = mCaptureCollector.queueRequest(holder,
622                                    mLastRequest, JPEG_FRAME_TIMEOUT, TimeUnit.MILLISECONDS);
623
624                            if (!success) {
625                                Log.e(TAG, "Timed out while queueing capture request.");
626                            }
627                            if (holder.hasPreviewTargets()) {
628                                doPreviewCapture(holder);
629                            }
630                            if (holder.hasJpegTargets()) {
631                                success = mCaptureCollector.
632                                        waitForPreviewsEmpty(PREVIEW_FRAME_TIMEOUT *
633                                                MAX_IN_FLIGHT_REQUESTS, TimeUnit.MILLISECONDS);
634                                if (!success) {
635                                    Log.e(TAG, "Timed out waiting for prior requests to complete.");
636                                }
637                                mReceivedJpeg.close();
638                                doJpegCapture(holder);
639                                if (!mReceivedJpeg.block(JPEG_FRAME_TIMEOUT)) {
640                                    // TODO: report error to CameraDevice
641                                    Log.e(TAG, "Hit timeout for jpeg callback!");
642                                }
643                            }
644                        } catch (IOException e) {
645                            // TODO: report error to CameraDevice
646                            throw new IOError(e);
647                        } catch (InterruptedException e) {
648                            // TODO: report error to CameraDevice
649                            Log.e(TAG, "Interrupted during capture.", e);
650                        }
651
652                        if (paramsChanged) {
653                            if (DEBUG) {
654                                Log.d(TAG, "Params changed -- getting new Parameters from HAL.");
655                            }
656                            mParams = mCamera.getParameters();
657
658                            // Update parameters to the latest that we think the camera is using
659                            mLastRequest.setParameters(mParams);
660                        }
661
662                        MutableLong timestampMutable = new MutableLong(/*value*/0L);
663                        try {
664                            boolean success = mCaptureCollector.waitForRequestCompleted(holder,
665                                    REQUEST_COMPLETE_TIMEOUT, TimeUnit.MILLISECONDS,
666                                    /*out*/timestampMutable);
667
668                            if (!success) {
669                                Log.e(TAG, "Timed out while waiting for request to complete.");
670                            }
671                        } catch (InterruptedException e) {
672                         // TODO: report error to CameraDevice
673                            Log.e(TAG, "Interrupted during request completition.", e);
674                        }
675
676                        CameraMetadataNative result = mMapper.cachedConvertResultMetadata(
677                                mLastRequest, timestampMutable.value, holder.getFrameNumber());
678                        // Update AF state
679                        mFocusStateMapper.mapResultTriggers(result);
680
681                        mDeviceState.setCaptureResult(holder, result);
682                    }
683                    if (DEBUG) {
684                        long totalTime = SystemClock.elapsedRealtimeNanos() - startTime;
685                        Log.d(TAG, "Capture request took " + totalTime + " ns");
686                        mRequestCounter.countAndLog();
687                    }
688                    break;
689                case MSG_CLEANUP:
690                    mCleanup = true;
691                    try {
692                        boolean success = mCaptureCollector.waitForEmpty(JPEG_FRAME_TIMEOUT,
693                                TimeUnit.MILLISECONDS);
694                        if (!success) {
695                            Log.e(TAG, "Timed out while queueing cleanup request.");
696                        }
697                    } catch (InterruptedException e) {
698                        // TODO: report error to CameraDevice
699                        Log.e(TAG, "Interrupted while waiting for requests to complete.");
700                    }
701                    if (mGLThreadManager != null) {
702                        mGLThreadManager.quit();
703                    }
704                    if (mCamera != null) {
705                        mCamera.release();
706                    }
707                    break;
708                default:
709                    throw new AssertionError("Unhandled message " + msg.what +
710                            " on RequestThread.");
711            }
712            return true;
713        }
714    };
715
716    /**
717     * Create a new RequestThreadManager.
718     *
719     * @param cameraId the id of the camera to use.
720     * @param camera an open camera object.  The RequestThreadManager takes ownership of this camera
721     *               object, and is responsible for closing it.
722     * @param characteristics the static camera characteristics corresponding to this camera device
723     * @param deviceState a {@link CameraDeviceState} state machine.
724     */
725    public RequestThreadManager(int cameraId, Camera camera, CameraCharacteristics characteristics,
726                                CameraDeviceState deviceState) {
727        mCamera = checkNotNull(camera, "camera must not be null");
728        mCameraId = cameraId;
729        mCharacteristics = checkNotNull(characteristics, "characteristics must not be null");
730        String name = String.format("RequestThread-%d", cameraId);
731        TAG = name;
732        mDeviceState = checkNotNull(deviceState, "deviceState must not be null");
733        mFocusStateMapper = new LegacyFocusStateMapper(mCamera);
734        mCaptureCollector = new CaptureCollector(MAX_IN_FLIGHT_REQUESTS, mDeviceState);
735        mRequestThread = new RequestHandlerThread(name, mRequestHandlerCb);
736    }
737
738    /**
739     * Start the request thread.
740     */
741    public void start() {
742        mRequestThread.start();
743    }
744
745    /**
746     * Flush the pending requests.
747     */
748    public void flush() {
749        // TODO: Implement flush.
750        Log.e(TAG, "flush not yet implemented.");
751    }
752
753    /**
754     * Quit the request thread, and clean up everything.
755     */
756    public void quit() {
757        Handler handler = mRequestThread.waitAndGetHandler();
758        handler.sendMessageAtFrontOfQueue(handler.obtainMessage(MSG_CLEANUP));
759        mRequestThread.quitSafely();
760        try {
761            mRequestThread.join();
762        } catch (InterruptedException e) {
763            Log.e(TAG, String.format("Thread %s (%d) interrupted while quitting.",
764                    mRequestThread.getName(), mRequestThread.getId()));
765        }
766    }
767
768    /**
769     * Submit the given burst of requests to be captured.
770     *
771     * <p>If the burst is repeating, replace the current repeating burst.</p>
772     *
773     * @param requests the burst of requests to add to the queue.
774     * @param repeating true if the burst is repeating.
775     * @param frameNumber an output argument that contains either the frame number of the last frame
776     *                    that will be returned for this request, or the frame number of the last
777     *                    frame that will be returned for the current repeating request if this
778     *                    burst is set to be repeating.
779     * @return the request id.
780     */
781    public int submitCaptureRequests(List<CaptureRequest> requests, boolean repeating,
782            /*out*/LongParcelable frameNumber) {
783        Handler handler = mRequestThread.waitAndGetHandler();
784        int ret = mRequestQueue.submit(requests, repeating, frameNumber);
785        handler.sendEmptyMessage(MSG_SUBMIT_CAPTURE_REQUEST);
786        return ret;
787    }
788
789    /**
790     * Cancel a repeating request.
791     *
792     * @param requestId the id of the repeating request to cancel.
793     * @return the last frame to be returned from the HAL for the given repeating request, or
794     *          {@code INVALID_FRAME} if none exists.
795     */
796    public long cancelRepeating(int requestId) {
797        return mRequestQueue.stopRepeating(requestId);
798    }
799
800
801    /**
802     * Configure with the current list of output Surfaces.
803     *
804     * <p>
805     * This operation blocks until the configuration is complete.
806     * </p>
807     *
808     * <p>Using a {@code null} or empty {@code outputs} list is the equivalent of unconfiguring.</p>
809     *
810     * @param outputs a {@link java.util.Collection} of outputs to configure.
811     */
812    public void configure(Collection<Surface> outputs) {
813        Handler handler = mRequestThread.waitAndGetHandler();
814        final ConditionVariable condition = new ConditionVariable(/*closed*/false);
815        ConfigureHolder holder = new ConfigureHolder(condition, outputs);
816        handler.sendMessage(handler.obtainMessage(MSG_CONFIGURE_OUTPUTS, 0, 0, holder));
817        condition.block();
818    }
819}
820