RequestThreadManager.java revision 5776aafc7e70c0b79c4bee2bc50f44121b37c962
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17package android.hardware.camera2.legacy;
18
19import android.graphics.SurfaceTexture;
20import android.hardware.Camera;
21import android.hardware.camera2.CaptureRequest;
22import android.hardware.camera2.utils.LongParcelable;
23import android.hardware.camera2.impl.CameraMetadataNative;
24import android.os.ConditionVariable;
25import android.os.Handler;
26import android.os.Message;
27import android.os.SystemClock;
28import android.util.Log;
29import android.util.Pair;
30import android.util.Size;
31import android.view.Surface;
32
33import java.io.IOError;
34import java.io.IOException;
35import java.util.ArrayList;
36import java.util.Collection;
37import java.util.Collections;
38import java.util.Comparator;
39import java.util.List;
40
41/**
42 * This class executes requests to the {@link Camera}.
43 *
44 * <p>
45 * The main components of this class are:
46 * - A message queue of requests to the {@link Camera}.
47 * - A thread that consumes requests to the {@link Camera} and executes them.
48 * - A {@link GLThreadManager} that draws to the configured output {@link Surface}s.
49 * - An {@link CameraDeviceState} state machine that manages the callbacks for various operations.
50 * </p>
51 */
52public class RequestThreadManager {
53    private final String TAG;
54    private final int mCameraId;
55    private final RequestHandlerThread mRequestThread;
56
57    private static final boolean DEBUG = Log.isLoggable(LegacyCameraDevice.DEBUG_PROP, Log.DEBUG);
58    private final Camera mCamera;
59
60    private final CameraDeviceState mDeviceState;
61
62    private static final int MSG_CONFIGURE_OUTPUTS = 1;
63    private static final int MSG_SUBMIT_CAPTURE_REQUEST = 2;
64    private static final int MSG_CLEANUP = 3;
65
66    private static final int PREVIEW_FRAME_TIMEOUT = 300; // ms
67    private static final int JPEG_FRAME_TIMEOUT = 1000; // ms
68
69    private static final float ASPECT_RATIO_TOLERANCE = 0.01f;
70    private boolean mPreviewRunning = false;
71
72    private volatile long mLastJpegTimestamp;
73    private volatile long mLastPreviewTimestamp;
74    private volatile RequestHolder mInFlightPreview;
75    private volatile RequestHolder mInFlightJpeg;
76
77    private final List<Surface> mPreviewOutputs = new ArrayList<Surface>();
78    private final List<Surface> mCallbackOutputs = new ArrayList<Surface>();
79    private GLThreadManager mGLThreadManager;
80    private SurfaceTexture mPreviewTexture;
81    private Camera.Parameters mParams;
82
83    private Size mIntermediateBufferSize;
84
85    private final RequestQueue mRequestQueue = new RequestQueue();
86    private CaptureRequest mLastRequest = null;
87    private SurfaceTexture mDummyTexture;
88    private Surface mDummySurface;
89
90    private final FpsCounter mPrevCounter = new FpsCounter("Incoming Preview");
91    private final FpsCounter mRequestCounter = new FpsCounter("Incoming Requests");
92
93    /**
94     * Container object for Configure messages.
95     */
96    private static class ConfigureHolder {
97        public final ConditionVariable condition;
98        public final Collection<Surface> surfaces;
99
100        public ConfigureHolder(ConditionVariable condition, Collection<Surface> surfaces) {
101            this.condition = condition;
102            this.surfaces = surfaces;
103        }
104    }
105
106
107    /**
108     * Comparator for {@link Size} objects.
109     *
110     * <p>This comparator compares by rectangle area.  Tiebreaks on width.</p>
111     */
112    private static class SizeComparator implements Comparator<Size> {
113        @Override
114        public int compare(Size size, Size size2) {
115            if (size == null || size2 == null) {
116                throw new NullPointerException("Null argument passed to compare");
117            }
118            if (size.equals(size2)) return 0;
119            long width = size.getWidth();
120            long width2 = size2.getWidth();
121            long area = width * size.getHeight();
122            long area2 = width2 * size2.getHeight();
123            if (area == area2) {
124                return (width > width2) ? 1 : -1;
125            }
126            return (area > area2) ? 1 : -1;
127
128        }
129    }
130
131    /**
132     * Counter class used to calculate and log the current FPS of frame production.
133     */
134    public static class FpsCounter {
135        //TODO: Hook this up to SystTrace?
136        private static final String TAG = "FpsCounter";
137        private int mFrameCount = 0;
138        private long mLastTime = 0;
139        private long mLastPrintTime = 0;
140        private double mLastFps = 0;
141        private final String mStreamType;
142        private static final long NANO_PER_SECOND = 1000000000; //ns
143
144        public FpsCounter(String streamType) {
145            mStreamType = streamType;
146        }
147
148        public synchronized void countFrame() {
149            mFrameCount++;
150            long nextTime = SystemClock.elapsedRealtimeNanos();
151            if (mLastTime == 0) {
152                mLastTime = nextTime;
153            }
154            if (nextTime > mLastTime + NANO_PER_SECOND) {
155                long elapsed = nextTime - mLastTime;
156                mLastFps = mFrameCount * (NANO_PER_SECOND / (double) elapsed);
157                mFrameCount = 0;
158                mLastTime = nextTime;
159            }
160        }
161
162        public synchronized double checkFps() {
163            return mLastFps;
164        }
165
166        public synchronized void staggeredLog() {
167            if (mLastTime > mLastPrintTime + 5 * NANO_PER_SECOND) {
168                mLastPrintTime = mLastTime;
169                Log.d(TAG, "FPS for " + mStreamType + " stream: " + mLastFps );
170            }
171        }
172
173        public synchronized void countAndLog() {
174            countFrame();
175            staggeredLog();
176        }
177    }
178    /**
179     * Fake preview for jpeg captures when there is no active preview
180     */
181    private void createDummySurface() {
182        if (mDummyTexture == null || mDummySurface == null) {
183            mDummyTexture = new SurfaceTexture(/*ignored*/0);
184            // TODO: use smallest default sizes
185            mDummyTexture.setDefaultBufferSize(640, 480);
186            mDummySurface = new Surface(mDummyTexture);
187        }
188    }
189
190    private final ConditionVariable mReceivedJpeg = new ConditionVariable(false);
191    private final ConditionVariable mReceivedPreview = new ConditionVariable(false);
192
193    private final Camera.PictureCallback mJpegCallback = new Camera.PictureCallback() {
194        @Override
195        public void onPictureTaken(byte[] data, Camera camera) {
196            Log.i(TAG, "Received jpeg.");
197            RequestHolder holder = mInFlightJpeg;
198            if (holder == null) {
199                Log.w(TAG, "Dropping jpeg frame.");
200                mInFlightJpeg = null;
201                return;
202            }
203            for (Surface s : holder.getHolderTargets()) {
204                if (RequestHolder.jpegType(s)) {
205                    Log.i(TAG, "Producing jpeg buffer...");
206                    LegacyCameraDevice.nativeSetSurfaceDimens(s, data.length, /*height*/1);
207                    LegacyCameraDevice.nativeProduceFrame(s, data, data.length, /*height*/1,
208                            CameraMetadataNative.NATIVE_JPEG_FORMAT);
209                }
210            }
211            mReceivedJpeg.open();
212        }
213    };
214
215    private final Camera.ShutterCallback mJpegShutterCallback = new Camera.ShutterCallback() {
216        @Override
217        public void onShutter() {
218            mLastJpegTimestamp = SystemClock.elapsedRealtimeNanos();
219        }
220    };
221
222    private final SurfaceTexture.OnFrameAvailableListener mPreviewCallback =
223            new SurfaceTexture.OnFrameAvailableListener() {
224                @Override
225                public void onFrameAvailable(SurfaceTexture surfaceTexture) {
226                    RequestHolder holder = mInFlightPreview;
227                    if (holder == null) {
228                        mGLThreadManager.queueNewFrame(null);
229                        Log.w(TAG, "Dropping preview frame.");
230                        return;
231                    }
232
233                    if (DEBUG) {
234                        mPrevCounter.countAndLog();
235                    }
236                    mInFlightPreview = null;
237
238                    if (holder.hasPreviewTargets()) {
239                        mGLThreadManager.queueNewFrame(holder.getHolderTargets());
240                    }
241
242                    mLastPreviewTimestamp = surfaceTexture.getTimestamp();
243                    mReceivedPreview.open();
244                }
245            };
246
247    private void stopPreview() {
248        if (mPreviewRunning) {
249            mCamera.stopPreview();
250            mPreviewRunning = false;
251        }
252    }
253
254    private void startPreview() {
255        if (!mPreviewRunning) {
256            mCamera.startPreview();
257            mPreviewRunning = true;
258        }
259    }
260
261    private void doJpegCapture(RequestHolder request) throws IOException {
262        if (!mPreviewRunning) {
263            createDummySurface();
264            mCamera.setPreviewTexture(mDummyTexture);
265            startPreview();
266        }
267        mInFlightJpeg = request;
268        // TODO: Hook up shutter callback to CameraDeviceStateListener#onCaptureStarted
269        mCamera.takePicture(mJpegShutterCallback, /*raw*/null, mJpegCallback);
270        mPreviewRunning = false;
271    }
272
273    private void doPreviewCapture(RequestHolder request) throws IOException {
274        mInFlightPreview = request;
275        if (mPreviewRunning) {
276            return; // Already running
277        }
278
279        if (mPreviewTexture == null) {
280            throw new IllegalStateException(
281                    "Preview capture called with no preview surfaces configured.");
282        }
283
284        mPreviewTexture.setDefaultBufferSize(mIntermediateBufferSize.getWidth(),
285                mIntermediateBufferSize.getHeight());
286        mCamera.setPreviewTexture(mPreviewTexture);
287        Camera.Parameters params = mCamera.getParameters();
288        List<int[]> supportedFpsRanges = params.getSupportedPreviewFpsRange();
289        int[] bestRange = getPhotoPreviewFpsRange(supportedFpsRanges);
290        if (DEBUG) {
291            Log.d(TAG, "doPreviewCapture - Selected range [" +
292                    bestRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX] + "," +
293                    bestRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX] + "]");
294        }
295        params.setPreviewFpsRange(bestRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
296                bestRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
297        params.setRecordingHint(true);
298        mCamera.setParameters(params);
299
300        startPreview();
301    }
302
303
304    private void configureOutputs(Collection<Surface> outputs) throws IOException {
305        stopPreview();
306        if (mGLThreadManager != null) {
307            mGLThreadManager.waitUntilStarted();
308            mGLThreadManager.ignoreNewFrames();
309            mGLThreadManager.waitUntilIdle();
310        }
311        mPreviewOutputs.clear();
312        mCallbackOutputs.clear();
313        mPreviewTexture = null;
314        mInFlightPreview = null;
315        mInFlightJpeg = null;
316
317        if (outputs != null) {
318            for (Surface s : outputs) {
319                int format = LegacyCameraDevice.nativeDetectSurfaceType(s);
320                switch (format) {
321                    case CameraMetadataNative.NATIVE_JPEG_FORMAT:
322                        mCallbackOutputs.add(s);
323                        break;
324                    default:
325                        mPreviewOutputs.add(s);
326                        break;
327                }
328            }
329        }
330        mParams = mCamera.getParameters();
331        if (mPreviewOutputs.size() > 0) {
332            List<Size> outputSizes = new ArrayList<>(outputs.size());
333            for (Surface s : mPreviewOutputs) {
334                int[] dimens = {0, 0};
335                LegacyCameraDevice.nativeDetectSurfaceDimens(s, dimens);
336                outputSizes.add(new Size(dimens[0], dimens[1]));
337            }
338
339            Size largestOutput = findLargestByArea(outputSizes);
340
341            // Find largest jpeg dimension - assume to have the same aspect ratio as sensor.
342            List<Size> supportedJpegSizes = convertSizeList(mParams.getSupportedPictureSizes());
343            Size largestJpegDimen = findLargestByArea(supportedJpegSizes);
344
345            List<Size> supportedPreviewSizes = convertSizeList(mParams.getSupportedPreviewSizes());
346
347            // Use smallest preview dimension with same aspect ratio as sensor that is >= than all
348            // of the configured output dimensions.  If none exists, fall back to using the largest
349            // supported preview size.
350            long largestOutputArea = largestOutput.getHeight() * (long) largestOutput.getWidth();
351            Size bestPreviewDimen = findLargestByArea(supportedPreviewSizes);
352            for (Size s : supportedPreviewSizes) {
353                long currArea = s.getWidth() * s.getHeight();
354                long bestArea = bestPreviewDimen.getWidth() * bestPreviewDimen.getHeight();
355                if (checkAspectRatiosMatch(largestJpegDimen, s) && (currArea < bestArea &&
356                        currArea >= largestOutputArea)) {
357                    bestPreviewDimen = s;
358                }
359            }
360
361            mIntermediateBufferSize = bestPreviewDimen;
362            if (DEBUG) {
363                Log.d(TAG, "Intermediate buffer selected with dimens: " +
364                        bestPreviewDimen.toString());
365            }
366        } else {
367            mIntermediateBufferSize = null;
368            if (DEBUG) {
369                Log.d(TAG, "No Intermediate buffer selected, no preview outputs were configured");
370            }
371        }
372
373        // TODO: Detect and optimize single-output paths here to skip stream teeing.
374        if (mGLThreadManager == null) {
375            mGLThreadManager = new GLThreadManager(mCameraId);
376            mGLThreadManager.start();
377        }
378        mGLThreadManager.waitUntilStarted();
379        mGLThreadManager.setConfigurationAndWait(mPreviewOutputs);
380        mGLThreadManager.allowNewFrames();
381        mPreviewTexture = mGLThreadManager.getCurrentSurfaceTexture();
382        if (mPreviewTexture != null) {
383            mPreviewTexture.setOnFrameAvailableListener(mPreviewCallback);
384        }
385    }
386
387    private static Size findLargestByArea(List<Size> sizes) {
388        return Collections.max(sizes, new SizeComparator());
389    }
390
391    private static boolean checkAspectRatiosMatch(Size a, Size b) {
392        float aAspect = a.getWidth() / (float) a.getHeight();
393        float bAspect = b.getWidth() / (float) b.getHeight();
394
395        return Math.abs(aAspect - bAspect) < ASPECT_RATIO_TOLERANCE;
396    }
397
398    private static List<Size> convertSizeList(List<Camera.Size> sizeList) {
399        List<Size> sizes = new ArrayList<>(sizeList.size());
400        for (Camera.Size s : sizeList) {
401            sizes.add(new Size(s.width, s.height));
402        }
403        return sizes;
404    }
405
406    // Calculate the highest FPS range supported
407    private int[] getPhotoPreviewFpsRange(List<int[]> frameRates) {
408        if (frameRates.size() == 0) {
409            Log.e(TAG, "No supported frame rates returned!");
410            return null;
411        }
412
413        int bestMin = 0;
414        int bestMax = 0;
415        int bestIndex = 0;
416        int index = 0;
417        for (int[] rate : frameRates) {
418            int minFps = rate[Camera.Parameters.PREVIEW_FPS_MIN_INDEX];
419            int maxFps = rate[Camera.Parameters.PREVIEW_FPS_MAX_INDEX];
420            if (maxFps > bestMax || (maxFps == bestMax && minFps > bestMin)) {
421                bestMin = minFps;
422                bestMax = maxFps;
423                bestIndex = index;
424            }
425            index++;
426        }
427
428        return frameRates.get(bestIndex);
429    }
430
431    private final Handler.Callback mRequestHandlerCb = new Handler.Callback() {
432        private boolean mCleanup = false;
433
434        @SuppressWarnings("unchecked")
435        @Override
436        public boolean handleMessage(Message msg) {
437            if (mCleanup) {
438                return true;
439            }
440
441            if (DEBUG) {
442                Log.d(TAG, "Request thread handling message:" + msg.what);
443            }
444            switch (msg.what) {
445                case MSG_CONFIGURE_OUTPUTS:
446                    ConfigureHolder config = (ConfigureHolder) msg.obj;
447                    int sizes = config.surfaces != null ? config.surfaces.size() : 0;
448                    Log.i(TAG, "Configure outputs: " + sizes +
449                            " surfaces configured.");
450                    try {
451                        configureOutputs(config.surfaces);
452                    } catch (IOException e) {
453                        // TODO: report error to CameraDevice
454                        throw new IOError(e);
455                    }
456                    config.condition.open();
457                    break;
458                case MSG_SUBMIT_CAPTURE_REQUEST:
459                    Handler handler = RequestThreadManager.this.mRequestThread.getHandler();
460
461                    // Get the next burst from the request queue.
462                    Pair<BurstHolder, Long> nextBurst = mRequestQueue.getNext();
463                    if (nextBurst == null) {
464                        mDeviceState.setIdle();
465                        stopPreview();
466                        break;
467                    } else {
468                        // Queue another capture if we did not get the last burst.
469                        handler.sendEmptyMessage(MSG_SUBMIT_CAPTURE_REQUEST);
470                    }
471
472                    // Complete each request in the burst
473                    List<RequestHolder> requests =
474                            nextBurst.first.produceRequestHolders(nextBurst.second);
475                    for (RequestHolder holder : requests) {
476                        CaptureRequest request = holder.getRequest();
477                        if (mLastRequest == null || mLastRequest != request) {
478                            mLastRequest = request;
479                            LegacyMetadataMapper.convertRequestMetadata(mLastRequest,
480                                /*out*/mParams);
481                            mCamera.setParameters(mParams);
482                        }
483                        mDeviceState.setCaptureStart(holder);
484                        long timestamp = 0;
485                        try {
486                            if (holder.hasPreviewTargets()) {
487                                mReceivedPreview.close();
488                                doPreviewCapture(holder);
489                                if (!mReceivedPreview.block(PREVIEW_FRAME_TIMEOUT)) {
490                                    // TODO: report error to CameraDevice
491                                    Log.e(TAG, "Hit timeout for preview callback!");
492                                }
493                                timestamp = mLastPreviewTimestamp;
494                            }
495                            if (holder.hasJpegTargets()) {
496                                mReceivedJpeg.close();
497                                doJpegCapture(holder);
498                                mReceivedJpeg.block();
499                                if (!mReceivedJpeg.block(JPEG_FRAME_TIMEOUT)) {
500                                    // TODO: report error to CameraDevice
501                                    Log.e(TAG, "Hit timeout for jpeg callback!");
502                                }
503                                mInFlightJpeg = null;
504                                timestamp = mLastJpegTimestamp;
505                            }
506                        } catch (IOException e) {
507                            // TODO: err handling
508                            throw new IOError(e);
509                        }
510                        CameraMetadataNative result = LegacyMetadataMapper.convertResultMetadata(mParams,
511                                request, timestamp);
512                        mDeviceState.setCaptureResult(holder, result);
513                    }
514                    if (DEBUG) {
515                        mRequestCounter.countAndLog();
516                    }
517                    break;
518                case MSG_CLEANUP:
519                    mCleanup = true;
520                    if (mGLThreadManager != null) {
521                        mGLThreadManager.quit();
522                    }
523                    if (mCamera != null) {
524                        mCamera.release();
525                    }
526                    break;
527                default:
528                    throw new AssertionError("Unhandled message " + msg.what +
529                            " on RequestThread.");
530            }
531            return true;
532        }
533    };
534
535    /**
536     * Create a new RequestThreadManager.
537     *
538     * @param cameraId the id of the camera to use.
539     * @param camera an open camera object.  The RequestThreadManager takes ownership of this camera
540     *               object, and is responsible for closing it.
541     * @param deviceState a {@link CameraDeviceState} state machine.
542     */
543    public RequestThreadManager(int cameraId, Camera camera,
544                                CameraDeviceState deviceState) {
545        mCamera = camera;
546        mCameraId = cameraId;
547        String name = String.format("RequestThread-%d", cameraId);
548        TAG = name;
549        mDeviceState = deviceState;
550        mRequestThread = new RequestHandlerThread(name, mRequestHandlerCb);
551    }
552
553    /**
554     * Start the request thread.
555     */
556    public void start() {
557        mRequestThread.start();
558    }
559
560    /**
561     * Flush the pending requests.
562     */
563    public void flush() {
564        // TODO: Implement flush.
565        Log.e(TAG, "flush not yet implemented.");
566    }
567
568    /**
569     * Quit the request thread, and clean up everything.
570     */
571    public void quit() {
572        Handler handler = mRequestThread.waitAndGetHandler();
573        handler.sendMessageAtFrontOfQueue(handler.obtainMessage(MSG_CLEANUP));
574        mRequestThread.quitSafely();
575        try {
576            mRequestThread.join();
577        } catch (InterruptedException e) {
578            Log.e(TAG, String.format("Thread %s (%d) interrupted while quitting.",
579                    mRequestThread.getName(), mRequestThread.getId()));
580        }
581    }
582
583    /**
584     * Submit the given burst of requests to be captured.
585     *
586     * <p>If the burst is repeating, replace the current repeating burst.</p>
587     *
588     * @param requests the burst of requests to add to the queue.
589     * @param repeating true if the burst is repeating.
590     * @param frameNumber an output argument that contains either the frame number of the last frame
591     *                    that will be returned for this request, or the frame number of the last
592     *                    frame that will be returned for the current repeating request if this
593     *                    burst is set to be repeating.
594     * @return the request id.
595     */
596    public int submitCaptureRequests(List<CaptureRequest> requests, boolean repeating,
597            /*out*/LongParcelable frameNumber) {
598        Handler handler = mRequestThread.waitAndGetHandler();
599        int ret = mRequestQueue.submit(requests, repeating, frameNumber);
600        handler.sendEmptyMessage(MSG_SUBMIT_CAPTURE_REQUEST);
601        return ret;
602    }
603
604    /**
605     * Cancel a repeating request.
606     *
607     * @param requestId the id of the repeating request to cancel.
608     * @return the last frame to be returned from the HAL for the given repeating request, or
609     *          {@code INVALID_FRAME} if none exists.
610     */
611    public long cancelRepeating(int requestId) {
612        return mRequestQueue.stopRepeating(requestId);
613    }
614
615
616    /**
617     * Configure with the current list of output Surfaces.
618     *
619     * <p>
620     * This operation blocks until the configuration is complete.
621     * </p>
622     *
623     * <p>Using a {@code null} or empty {@code outputs} list is the equivalent of unconfiguring.</p>
624     *
625     * @param outputs a {@link java.util.Collection} of outputs to configure.
626     */
627    public void configure(Collection<Surface> outputs) {
628        Handler handler = mRequestThread.waitAndGetHandler();
629        final ConditionVariable condition = new ConditionVariable(/*closed*/false);
630        ConfigureHolder holder = new ConfigureHolder(condition, outputs);
631        handler.sendMessage(handler.obtainMessage(MSG_CONFIGURE_OUTPUTS, 0, 0, holder));
632        condition.block();
633    }
634}
635