RequestThreadManager.java revision a296fece2b974a11bc624fd67b275863f17df867
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17package android.hardware.camera2.legacy;
18
19import android.graphics.SurfaceTexture;
20import android.hardware.Camera;
21import android.hardware.camera2.CaptureRequest;
22import android.hardware.camera2.utils.LongParcelable;
23import android.hardware.camera2.impl.CameraMetadataNative;
24import android.os.ConditionVariable;
25import android.os.Handler;
26import android.os.Message;
27import android.os.SystemClock;
28import android.util.Log;
29import android.util.Pair;
30import android.util.Size;
31import android.view.Surface;
32
33import java.io.IOError;
34import java.io.IOException;
35import java.util.ArrayList;
36import java.util.Collection;
37import java.util.Collections;
38import java.util.Comparator;
39import java.util.List;
40
41/**
42 * This class executes requests to the {@link Camera}.
43 *
44 * <p>
45 * The main components of this class are:
46 * - A message queue of requests to the {@link Camera}.
47 * - A thread that consumes requests to the {@link Camera} and executes them.
48 * - A {@link GLThreadManager} that draws to the configured output {@link Surface}s.
49 * - An {@link CameraDeviceState} state machine that manages the callbacks for various operations.
50 * </p>
51 */
52public class RequestThreadManager {
53    private final String TAG;
54    private final int mCameraId;
55    private final RequestHandlerThread mRequestThread;
56
57    private static final boolean DEBUG = Log.isLoggable(LegacyCameraDevice.DEBUG_PROP, Log.DEBUG);
58    private final Camera mCamera;
59
60    private final CameraDeviceState mDeviceState;
61
62    private static final int MSG_CONFIGURE_OUTPUTS = 1;
63    private static final int MSG_SUBMIT_CAPTURE_REQUEST = 2;
64    private static final int MSG_CLEANUP = 3;
65
66    private static final int PREVIEW_FRAME_TIMEOUT = 300; // ms
67    private static final int JPEG_FRAME_TIMEOUT = 1000; // ms
68
69    private static final float ASPECT_RATIO_TOLERANCE = 0.01f;
70    private boolean mPreviewRunning = false;
71
72    private volatile long mLastJpegTimestamp;
73    private volatile long mLastPreviewTimestamp;
74    private volatile RequestHolder mInFlightPreview;
75    private volatile RequestHolder mInFlightJpeg;
76
77    private final List<Surface> mPreviewOutputs = new ArrayList<Surface>();
78    private final List<Surface> mCallbackOutputs = new ArrayList<Surface>();
79    private GLThreadManager mGLThreadManager;
80    private SurfaceTexture mPreviewTexture;
81    private Camera.Parameters mParams;
82
83    private Size mIntermediateBufferSize;
84
85    private final RequestQueue mRequestQueue = new RequestQueue();
86    private CaptureRequest mLastRequest = null;
87    private SurfaceTexture mDummyTexture;
88    private Surface mDummySurface;
89
90    private final FpsCounter mPrevCounter = new FpsCounter("Incoming Preview");
91    private final FpsCounter mRequestCounter = new FpsCounter("Incoming Requests");
92
93    /**
94     * Container object for Configure messages.
95     */
96    private static class ConfigureHolder {
97        public final ConditionVariable condition;
98        public final Collection<Surface> surfaces;
99
100        public ConfigureHolder(ConditionVariable condition, Collection<Surface> surfaces) {
101            this.condition = condition;
102            this.surfaces = surfaces;
103        }
104    }
105
106
107    /**
108     * Comparator for {@link Size} objects by the area.
109     *
110     * <p>This comparator totally orders by rectangle area. Tiebreaks on width.</p>
111     */
112    private static class SizeAreaComparator implements Comparator<Size> {
113        @Override
114        public int compare(Size size, Size size2) {
115            if (size == null || size2 == null) {
116                throw new NullPointerException("Null argument passed to compare");
117            }
118            if (size.equals(size2)) return 0;
119            long width = size.getWidth();
120            long width2 = size2.getWidth();
121            long area = width * size.getHeight();
122            long area2 = width2 * size2.getHeight();
123            if (area == area2) {
124                return (width > width2) ? 1 : -1;
125            }
126            return (area > area2) ? 1 : -1;
127
128        }
129    }
130
131    /**
132     * Counter class used to calculate and log the current FPS of frame production.
133     */
134    public static class FpsCounter {
135        //TODO: Hook this up to SystTrace?
136        private static final String TAG = "FpsCounter";
137        private int mFrameCount = 0;
138        private long mLastTime = 0;
139        private long mLastPrintTime = 0;
140        private double mLastFps = 0;
141        private final String mStreamType;
142        private static final long NANO_PER_SECOND = 1000000000; //ns
143
144        public FpsCounter(String streamType) {
145            mStreamType = streamType;
146        }
147
148        public synchronized void countFrame() {
149            mFrameCount++;
150            long nextTime = SystemClock.elapsedRealtimeNanos();
151            if (mLastTime == 0) {
152                mLastTime = nextTime;
153            }
154            if (nextTime > mLastTime + NANO_PER_SECOND) {
155                long elapsed = nextTime - mLastTime;
156                mLastFps = mFrameCount * (NANO_PER_SECOND / (double) elapsed);
157                mFrameCount = 0;
158                mLastTime = nextTime;
159            }
160        }
161
162        public synchronized double checkFps() {
163            return mLastFps;
164        }
165
166        public synchronized void staggeredLog() {
167            if (mLastTime > mLastPrintTime + 5 * NANO_PER_SECOND) {
168                mLastPrintTime = mLastTime;
169                Log.d(TAG, "FPS for " + mStreamType + " stream: " + mLastFps );
170            }
171        }
172
173        public synchronized void countAndLog() {
174            countFrame();
175            staggeredLog();
176        }
177    }
178    /**
179     * Fake preview for jpeg captures when there is no active preview
180     */
181    private void createDummySurface() {
182        if (mDummyTexture == null || mDummySurface == null) {
183            mDummyTexture = new SurfaceTexture(/*ignored*/0);
184            // TODO: use smallest default sizes
185            mDummyTexture.setDefaultBufferSize(640, 480);
186            mDummySurface = new Surface(mDummyTexture);
187        }
188    }
189
190    private final ConditionVariable mReceivedJpeg = new ConditionVariable(false);
191    private final ConditionVariable mReceivedPreview = new ConditionVariable(false);
192
193    private final Camera.PictureCallback mJpegCallback = new Camera.PictureCallback() {
194        @Override
195        public void onPictureTaken(byte[] data, Camera camera) {
196            Log.i(TAG, "Received jpeg.");
197            RequestHolder holder = mInFlightJpeg;
198            if (holder == null) {
199                Log.w(TAG, "Dropping jpeg frame.");
200                mInFlightJpeg = null;
201                return;
202            }
203            for (Surface s : holder.getHolderTargets()) {
204                if (RequestHolder.jpegType(s)) {
205                    Log.i(TAG, "Producing jpeg buffer...");
206                    LegacyCameraDevice.nativeSetSurfaceDimens(s, data.length, /*height*/1);
207                    LegacyCameraDevice.nativeProduceFrame(s, data, data.length, /*height*/1,
208                            CameraMetadataNative.NATIVE_JPEG_FORMAT);
209                }
210            }
211            mReceivedJpeg.open();
212        }
213    };
214
215    private final Camera.ShutterCallback mJpegShutterCallback = new Camera.ShutterCallback() {
216        @Override
217        public void onShutter() {
218            mLastJpegTimestamp = SystemClock.elapsedRealtimeNanos();
219        }
220    };
221
222    private final SurfaceTexture.OnFrameAvailableListener mPreviewCallback =
223            new SurfaceTexture.OnFrameAvailableListener() {
224                @Override
225                public void onFrameAvailable(SurfaceTexture surfaceTexture) {
226                    RequestHolder holder = mInFlightPreview;
227                    if (holder == null) {
228                        mGLThreadManager.queueNewFrame(null);
229                        Log.w(TAG, "Dropping preview frame.");
230                        return;
231                    }
232
233                    if (DEBUG) {
234                        mPrevCounter.countAndLog();
235                    }
236                    mInFlightPreview = null;
237
238                    if (holder.hasPreviewTargets()) {
239                        mGLThreadManager.queueNewFrame(holder.getHolderTargets());
240                    }
241
242                    /**
243                     * TODO: Get timestamp from GL thread after buffer update.
244                     */
245                    mLastPreviewTimestamp = surfaceTexture.getTimestamp();
246                    mReceivedPreview.open();
247                }
248            };
249
250    private void stopPreview() {
251        if (mPreviewRunning) {
252            mCamera.stopPreview();
253            mPreviewRunning = false;
254        }
255    }
256
257    private void startPreview() {
258        if (!mPreviewRunning) {
259            mCamera.startPreview();
260            mPreviewRunning = true;
261        }
262    }
263
264    private void doJpegCapture(RequestHolder request) throws IOException {
265        if (DEBUG) Log.d(TAG, "doJpegCapture");
266
267        if (!mPreviewRunning) {
268            if (DEBUG) Log.d(TAG, "doJpegCapture - create fake surface");
269
270            createDummySurface();
271            mCamera.setPreviewTexture(mDummyTexture);
272            startPreview();
273        }
274        mInFlightJpeg = request;
275        // TODO: Hook up shutter callback to CameraDeviceStateListener#onCaptureStarted
276        mCamera.takePicture(mJpegShutterCallback, /*raw*/null, mJpegCallback);
277        mPreviewRunning = false;
278    }
279
280    private void doPreviewCapture(RequestHolder request) throws IOException {
281        mInFlightPreview = request;
282        if (mPreviewRunning) {
283            return; // Already running
284        }
285
286        if (mPreviewTexture == null) {
287            throw new IllegalStateException(
288                    "Preview capture called with no preview surfaces configured.");
289        }
290
291        mPreviewTexture.setDefaultBufferSize(mIntermediateBufferSize.getWidth(),
292                mIntermediateBufferSize.getHeight());
293        mCamera.setPreviewTexture(mPreviewTexture);
294        Camera.Parameters params = mCamera.getParameters();
295        List<int[]> supportedFpsRanges = params.getSupportedPreviewFpsRange();
296        int[] bestRange = getPhotoPreviewFpsRange(supportedFpsRanges);
297        if (DEBUG) {
298            Log.d(TAG, "doPreviewCapture - Selected range [" +
299                    bestRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX] + "," +
300                    bestRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX] + "]");
301        }
302        params.setPreviewFpsRange(bestRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
303                bestRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
304        params.setRecordingHint(true);
305        mCamera.setParameters(params);
306
307        startPreview();
308    }
309
310
311    private void configureOutputs(Collection<Surface> outputs) throws IOException {
312        stopPreview();
313        if (mGLThreadManager != null) {
314            mGLThreadManager.waitUntilStarted();
315            mGLThreadManager.ignoreNewFrames();
316            mGLThreadManager.waitUntilIdle();
317        }
318        mPreviewOutputs.clear();
319        mCallbackOutputs.clear();
320        mPreviewTexture = null;
321        mInFlightPreview = null;
322        mInFlightJpeg = null;
323
324        if (outputs != null) {
325            for (Surface s : outputs) {
326                int format = LegacyCameraDevice.nativeDetectSurfaceType(s);
327                switch (format) {
328                    case CameraMetadataNative.NATIVE_JPEG_FORMAT:
329                        mCallbackOutputs.add(s);
330                        break;
331                    default:
332                        mPreviewOutputs.add(s);
333                        break;
334                }
335            }
336        }
337        mParams = mCamera.getParameters();
338        if (mPreviewOutputs.size() > 0) {
339            List<Size> outputSizes = new ArrayList<>(outputs.size());
340            for (Surface s : mPreviewOutputs) {
341                int[] dimens = {0, 0};
342                LegacyCameraDevice.nativeDetectSurfaceDimens(s, dimens);
343                outputSizes.add(new Size(dimens[0], dimens[1]));
344            }
345
346            Size largestOutput = findLargestByArea(outputSizes);
347
348            // Find largest jpeg dimension - assume to have the same aspect ratio as sensor.
349            List<Size> supportedJpegSizes = convertSizeList(mParams.getSupportedPictureSizes());
350            Size largestJpegDimen = findLargestByArea(supportedJpegSizes);
351
352            List<Size> supportedPreviewSizes = convertSizeList(mParams.getSupportedPreviewSizes());
353
354            // Use smallest preview dimension with same aspect ratio as sensor that is >= than all
355            // of the configured output dimensions.  If none exists, fall back to using the largest
356            // supported preview size.
357            long largestOutputArea = largestOutput.getHeight() * (long) largestOutput.getWidth();
358            Size bestPreviewDimen = findLargestByArea(supportedPreviewSizes);
359            for (Size s : supportedPreviewSizes) {
360                long currArea = s.getWidth() * s.getHeight();
361                long bestArea = bestPreviewDimen.getWidth() * bestPreviewDimen.getHeight();
362                if (checkAspectRatiosMatch(largestJpegDimen, s) && (currArea < bestArea &&
363                        currArea >= largestOutputArea)) {
364                    bestPreviewDimen = s;
365                }
366            }
367
368            mIntermediateBufferSize = bestPreviewDimen;
369            if (DEBUG) {
370                Log.d(TAG, "Intermediate buffer selected with dimens: " +
371                        bestPreviewDimen.toString());
372            }
373        } else {
374            mIntermediateBufferSize = null;
375            if (DEBUG) {
376                Log.d(TAG, "No Intermediate buffer selected, no preview outputs were configured");
377            }
378        }
379
380        Size smallestSupportedJpegSize = calculatePictureSize(mCallbackOutputs, mParams);
381        if (smallestSupportedJpegSize != null) {
382            /*
383             * Set takePicture size to the smallest supported JPEG size large enough
384             * to scale/crop out of for the bounding rectangle of the configured JPEG sizes.
385             */
386
387            Log.i(TAG, "configureOutputs - set take picture size to " + smallestSupportedJpegSize);
388            mParams.setPictureSize(
389                    smallestSupportedJpegSize.getWidth(), smallestSupportedJpegSize.getHeight());
390        }
391
392        // TODO: Detect and optimize single-output paths here to skip stream teeing.
393        if (mGLThreadManager == null) {
394            mGLThreadManager = new GLThreadManager(mCameraId);
395            mGLThreadManager.start();
396        }
397        mGLThreadManager.waitUntilStarted();
398        mGLThreadManager.setConfigurationAndWait(mPreviewOutputs);
399        mGLThreadManager.allowNewFrames();
400        mPreviewTexture = mGLThreadManager.getCurrentSurfaceTexture();
401        if (mPreviewTexture != null) {
402            mPreviewTexture.setOnFrameAvailableListener(mPreviewCallback);
403        }
404    }
405
406    /**
407     * Find a JPEG size (that is supported by the legacy camera device) which is equal to or larger
408     * than all of the configured {@code JPEG} outputs (by both width and height).
409     *
410     * <p>If multiple supported JPEG sizes are larger, select the smallest of them which
411     * still satisfies the above constraint.</p>
412     *
413     * <p>As a result, the returned size is guaranteed to be usable without needing
414     * to upscale any of the outputs. If only one {@code JPEG} surface is used,
415     * then no scaling/cropping is necessary between the taken picture and
416     * the {@code JPEG} output surface.</p>
417     *
418     * @param callbackOutputs a non-{@code null} list of {@code Surface}s with any image formats
419     * @param params api1 parameters (used for reading only)
420     *
421     * @return a size large enough to fit all of the configured {@code JPEG} outputs, or
422     *          {@code null} if the {@code callbackOutputs} did not have any {@code JPEG}
423     *          surfaces.
424     */
425    private Size calculatePictureSize(
426            Collection<Surface> callbackOutputs, Camera.Parameters params) {
427        /*
428         * Find the largest JPEG size (if any), from the configured outputs:
429         * - the api1 picture size should be set to the smallest legal size that's at least as large
430         *   as the largest configured JPEG size
431         */
432        List<Size> configuredJpegSizes = new ArrayList<Size>();
433        for (Surface callbackSurface : callbackOutputs) {
434            int format = LegacyCameraDevice.nativeDetectSurfaceType(callbackSurface);
435
436            if (format != CameraMetadataNative.NATIVE_JPEG_FORMAT) {
437                continue; // Ignore non-JPEG callback formats
438            }
439
440            Size jpegSize = LegacyCameraDevice.getSurfaceSize(callbackSurface);
441            configuredJpegSizes.add(jpegSize);
442        }
443        if (!configuredJpegSizes.isEmpty()) {
444            /*
445             * Find the largest configured JPEG width, and height, independently
446             * of the rest.
447             *
448             * The rest of the JPEG streams can be cropped out of this smallest bounding
449             * rectangle.
450             */
451            int maxConfiguredJpegWidth = -1;
452            int maxConfiguredJpegHeight = -1;
453            for (Size jpegSize : configuredJpegSizes) {
454                maxConfiguredJpegWidth = jpegSize.getWidth() > maxConfiguredJpegWidth ?
455                        jpegSize.getWidth() : maxConfiguredJpegWidth;
456                maxConfiguredJpegHeight = jpegSize.getHeight() > maxConfiguredJpegHeight ?
457                        jpegSize.getHeight() : maxConfiguredJpegHeight;
458            }
459            Size smallestBoundJpegSize = new Size(maxConfiguredJpegWidth, maxConfiguredJpegHeight);
460
461            List<Size> supportedJpegSizes = convertSizeList(params.getSupportedPictureSizes());
462
463            /*
464             * Find the smallest supported JPEG size that can fit the smallest bounding
465             * rectangle for the configured JPEG sizes.
466             */
467            List<Size> candidateSupportedJpegSizes = new ArrayList<>();
468            for (Size supportedJpegSize : supportedJpegSizes) {
469                if (supportedJpegSize.getWidth() >= maxConfiguredJpegWidth &&
470                    supportedJpegSize.getHeight() >= maxConfiguredJpegHeight) {
471                    candidateSupportedJpegSizes.add(supportedJpegSize);
472                }
473            }
474
475            if (candidateSupportedJpegSizes.isEmpty()) {
476                throw new AssertionError(
477                        "Could not find any supported JPEG sizes large enough to fit " +
478                        smallestBoundJpegSize);
479            }
480
481            Size smallestSupportedJpegSize = Collections.min(candidateSupportedJpegSizes,
482                    new SizeAreaComparator());
483
484            if (!smallestSupportedJpegSize.equals(smallestBoundJpegSize)) {
485                Log.w(TAG,
486                        String.format(
487                                "configureOutputs - Will need to crop picture %s into "
488                                + "smallest bound size %s",
489                                smallestSupportedJpegSize, smallestBoundJpegSize));
490            }
491
492            return smallestSupportedJpegSize;
493        }
494
495        return null;
496    }
497
498    private static Size findLargestByArea(List<Size> sizes) {
499        return Collections.max(sizes, new SizeAreaComparator());
500    }
501
502    private static boolean checkAspectRatiosMatch(Size a, Size b) {
503        float aAspect = a.getWidth() / (float) a.getHeight();
504        float bAspect = b.getWidth() / (float) b.getHeight();
505
506        return Math.abs(aAspect - bAspect) < ASPECT_RATIO_TOLERANCE;
507    }
508
509    private static List<Size> convertSizeList(List<Camera.Size> sizeList) {
510        List<Size> sizes = new ArrayList<>(sizeList.size());
511        for (Camera.Size s : sizeList) {
512            sizes.add(new Size(s.width, s.height));
513        }
514        return sizes;
515    }
516
517    // Calculate the highest FPS range supported
518    private int[] getPhotoPreviewFpsRange(List<int[]> frameRates) {
519        if (frameRates.size() == 0) {
520            Log.e(TAG, "No supported frame rates returned!");
521            return null;
522        }
523
524        int bestMin = 0;
525        int bestMax = 0;
526        int bestIndex = 0;
527        int index = 0;
528        for (int[] rate : frameRates) {
529            int minFps = rate[Camera.Parameters.PREVIEW_FPS_MIN_INDEX];
530            int maxFps = rate[Camera.Parameters.PREVIEW_FPS_MAX_INDEX];
531            if (maxFps > bestMax || (maxFps == bestMax && minFps > bestMin)) {
532                bestMin = minFps;
533                bestMax = maxFps;
534                bestIndex = index;
535            }
536            index++;
537        }
538
539        return frameRates.get(bestIndex);
540    }
541
542    private final Handler.Callback mRequestHandlerCb = new Handler.Callback() {
543        private boolean mCleanup = false;
544
545        @SuppressWarnings("unchecked")
546        @Override
547        public boolean handleMessage(Message msg) {
548            if (mCleanup) {
549                return true;
550            }
551
552            if (DEBUG) {
553                Log.d(TAG, "Request thread handling message:" + msg.what);
554            }
555            switch (msg.what) {
556                case MSG_CONFIGURE_OUTPUTS:
557                    ConfigureHolder config = (ConfigureHolder) msg.obj;
558                    int sizes = config.surfaces != null ? config.surfaces.size() : 0;
559                    Log.i(TAG, "Configure outputs: " + sizes +
560                            " surfaces configured.");
561                    try {
562                        configureOutputs(config.surfaces);
563                    } catch (IOException e) {
564                        // TODO: report error to CameraDevice
565                        throw new IOError(e);
566                    }
567                    config.condition.open();
568                    break;
569                case MSG_SUBMIT_CAPTURE_REQUEST:
570                    Handler handler = RequestThreadManager.this.mRequestThread.getHandler();
571
572                    // Get the next burst from the request queue.
573                    Pair<BurstHolder, Long> nextBurst = mRequestQueue.getNext();
574                    if (nextBurst == null) {
575                        mDeviceState.setIdle();
576                        stopPreview();
577                        break;
578                    } else {
579                        // Queue another capture if we did not get the last burst.
580                        handler.sendEmptyMessage(MSG_SUBMIT_CAPTURE_REQUEST);
581                    }
582
583                    // Complete each request in the burst
584                    List<RequestHolder> requests =
585                            nextBurst.first.produceRequestHolders(nextBurst.second);
586                    for (RequestHolder holder : requests) {
587                        CaptureRequest request = holder.getRequest();
588                        if (mLastRequest == null || mLastRequest != request) {
589                            mLastRequest = request;
590                            LegacyMetadataMapper.convertRequestMetadata(mLastRequest,
591                                /*out*/mParams);
592                            mCamera.setParameters(mParams);
593                        }
594                        mDeviceState.setCaptureStart(holder);
595                        long timestamp = 0;
596                        try {
597                            if (holder.hasPreviewTargets()) {
598                                mReceivedPreview.close();
599                                doPreviewCapture(holder);
600                                if (!mReceivedPreview.block(PREVIEW_FRAME_TIMEOUT)) {
601                                    // TODO: report error to CameraDevice
602                                    Log.e(TAG, "Hit timeout for preview callback!");
603                                }
604                                timestamp = mLastPreviewTimestamp;
605                            }
606                            if (holder.hasJpegTargets()) {
607                                mReceivedJpeg.close();
608                                doJpegCapture(holder);
609                                if (!mReceivedJpeg.block(JPEG_FRAME_TIMEOUT)) {
610                                    // TODO: report error to CameraDevice
611                                    Log.e(TAG, "Hit timeout for jpeg callback!");
612                                }
613                                mInFlightJpeg = null;
614                                timestamp = mLastJpegTimestamp;
615                            }
616                        } catch (IOException e) {
617                            // TODO: err handling
618                            throw new IOError(e);
619                        }
620                        if (timestamp == 0) {
621                            timestamp = SystemClock.elapsedRealtimeNanos();
622                        }
623                        CameraMetadataNative result = LegacyMetadataMapper.convertResultMetadata(mParams,
624                                request, timestamp);
625                        mDeviceState.setCaptureResult(holder, result);
626                    }
627                    if (DEBUG) {
628                        mRequestCounter.countAndLog();
629                    }
630                    break;
631                case MSG_CLEANUP:
632                    mCleanup = true;
633                    if (mGLThreadManager != null) {
634                        mGLThreadManager.quit();
635                    }
636                    if (mCamera != null) {
637                        mCamera.release();
638                    }
639                    break;
640                default:
641                    throw new AssertionError("Unhandled message " + msg.what +
642                            " on RequestThread.");
643            }
644            return true;
645        }
646    };
647
648    /**
649     * Create a new RequestThreadManager.
650     *
651     * @param cameraId the id of the camera to use.
652     * @param camera an open camera object.  The RequestThreadManager takes ownership of this camera
653     *               object, and is responsible for closing it.
654     * @param deviceState a {@link CameraDeviceState} state machine.
655     */
656    public RequestThreadManager(int cameraId, Camera camera,
657                                CameraDeviceState deviceState) {
658        mCamera = camera;
659        mCameraId = cameraId;
660        String name = String.format("RequestThread-%d", cameraId);
661        TAG = name;
662        mDeviceState = deviceState;
663        mRequestThread = new RequestHandlerThread(name, mRequestHandlerCb);
664    }
665
666    /**
667     * Start the request thread.
668     */
669    public void start() {
670        mRequestThread.start();
671    }
672
673    /**
674     * Flush the pending requests.
675     */
676    public void flush() {
677        // TODO: Implement flush.
678        Log.e(TAG, "flush not yet implemented.");
679    }
680
681    /**
682     * Quit the request thread, and clean up everything.
683     */
684    public void quit() {
685        Handler handler = mRequestThread.waitAndGetHandler();
686        handler.sendMessageAtFrontOfQueue(handler.obtainMessage(MSG_CLEANUP));
687        mRequestThread.quitSafely();
688        try {
689            mRequestThread.join();
690        } catch (InterruptedException e) {
691            Log.e(TAG, String.format("Thread %s (%d) interrupted while quitting.",
692                    mRequestThread.getName(), mRequestThread.getId()));
693        }
694    }
695
696    /**
697     * Submit the given burst of requests to be captured.
698     *
699     * <p>If the burst is repeating, replace the current repeating burst.</p>
700     *
701     * @param requests the burst of requests to add to the queue.
702     * @param repeating true if the burst is repeating.
703     * @param frameNumber an output argument that contains either the frame number of the last frame
704     *                    that will be returned for this request, or the frame number of the last
705     *                    frame that will be returned for the current repeating request if this
706     *                    burst is set to be repeating.
707     * @return the request id.
708     */
709    public int submitCaptureRequests(List<CaptureRequest> requests, boolean repeating,
710            /*out*/LongParcelable frameNumber) {
711        Handler handler = mRequestThread.waitAndGetHandler();
712        int ret = mRequestQueue.submit(requests, repeating, frameNumber);
713        handler.sendEmptyMessage(MSG_SUBMIT_CAPTURE_REQUEST);
714        return ret;
715    }
716
717    /**
718     * Cancel a repeating request.
719     *
720     * @param requestId the id of the repeating request to cancel.
721     * @return the last frame to be returned from the HAL for the given repeating request, or
722     *          {@code INVALID_FRAME} if none exists.
723     */
724    public long cancelRepeating(int requestId) {
725        return mRequestQueue.stopRepeating(requestId);
726    }
727
728
729    /**
730     * Configure with the current list of output Surfaces.
731     *
732     * <p>
733     * This operation blocks until the configuration is complete.
734     * </p>
735     *
736     * <p>Using a {@code null} or empty {@code outputs} list is the equivalent of unconfiguring.</p>
737     *
738     * @param outputs a {@link java.util.Collection} of outputs to configure.
739     */
740    public void configure(Collection<Surface> outputs) {
741        Handler handler = mRequestThread.waitAndGetHandler();
742        final ConditionVariable condition = new ConditionVariable(/*closed*/false);
743        ConfigureHolder holder = new ConfigureHolder(condition, outputs);
744        handler.sendMessage(handler.obtainMessage(MSG_CONFIGURE_OUTPUTS, 0, 0, holder));
745        condition.block();
746    }
747}
748