RequestThreadManager.java revision 49b2b135105e5ca5dc9547f4c6de473bebad647d
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17package android.hardware.camera2.legacy;
18
19import android.graphics.SurfaceTexture;
20import android.hardware.Camera;
21import android.hardware.camera2.CaptureRequest;
22import android.hardware.camera2.CaptureResult;
23import android.hardware.camera2.utils.LongParcelable;
24import android.hardware.camera2.impl.CameraMetadataNative;
25import android.os.ConditionVariable;
26import android.os.Handler;
27import android.os.Message;
28import android.os.SystemClock;
29import android.util.Log;
30import android.util.Pair;
31import android.util.Size;
32import android.view.Surface;
33
34import java.io.IOError;
35import java.io.IOException;
36import java.util.ArrayList;
37import java.util.Collection;
38import java.util.Collections;
39import java.util.Comparator;
40import java.util.List;
41
42/**
43 * This class executes requests to the {@link Camera}.
44 *
45 * <p>
46 * The main components of this class are:
47 * - A message queue of requests to the {@link Camera}.
48 * - A thread that consumes requests to the {@link Camera} and executes them.
49 * - A {@link GLThreadManager} that draws to the configured output {@link Surface}s.
50 * - An {@link CameraDeviceState} state machine that manages the callbacks for various operations.
51 * </p>
52 */
53public class RequestThreadManager {
54    private final String TAG;
55    private final int mCameraId;
56    private final RequestHandlerThread mRequestThread;
57
58    private static final boolean DEBUG = Log.isLoggable(LegacyCameraDevice.DEBUG_PROP, Log.DEBUG);
59    private final Camera mCamera;
60
61    private final CameraDeviceState mDeviceState;
62
63    private static final int MSG_CONFIGURE_OUTPUTS = 1;
64    private static final int MSG_SUBMIT_CAPTURE_REQUEST = 2;
65    private static final int MSG_CLEANUP = 3;
66
67    private static final int PREVIEW_FRAME_TIMEOUT = 300; // ms
68    private static final int JPEG_FRAME_TIMEOUT = 1000; // ms
69
70    private static final float ASPECT_RATIO_TOLERANCE = 0.01f;
71    private boolean mPreviewRunning = false;
72
73    private volatile long mLastJpegTimestamp;
74    private volatile long mLastPreviewTimestamp;
75    private volatile RequestHolder mInFlightPreview;
76    private volatile RequestHolder mInFlightJpeg;
77
78    private final List<Surface> mPreviewOutputs = new ArrayList<Surface>();
79    private final List<Surface> mCallbackOutputs = new ArrayList<Surface>();
80    private GLThreadManager mGLThreadManager;
81    private SurfaceTexture mPreviewTexture;
82    private Camera.Parameters mParams;
83
84    private Size mIntermediateBufferSize;
85
86    private final RequestQueue mRequestQueue = new RequestQueue();
87    private SurfaceTexture mDummyTexture;
88    private Surface mDummySurface;
89
90    private final FpsCounter mPrevCounter = new FpsCounter("Incoming Preview");
91    private final FpsCounter mRequestCounter = new FpsCounter("Incoming Requests");
92
93    /**
94     * Container object for Configure messages.
95     */
96    private static class ConfigureHolder {
97        public final ConditionVariable condition;
98        public final Collection<Surface> surfaces;
99
100        public ConfigureHolder(ConditionVariable condition, Collection<Surface> surfaces) {
101            this.condition = condition;
102            this.surfaces = surfaces;
103        }
104    }
105
106
107    /**
108     * Comparator for {@link Size} objects.
109     *
110     * <p>This comparator compares by rectangle area.  Tiebreaks on width.</p>
111     */
112    private static class SizeComparator implements Comparator<Size> {
113        @Override
114        public int compare(Size size, Size size2) {
115            if (size == null || size2 == null) {
116                throw new NullPointerException("Null argument passed to compare");
117            }
118            if (size.equals(size2)) return 0;
119            long width = size.getWidth();
120            long width2 = size2.getWidth();
121            long area = width * size.getHeight();
122            long area2 = width2 * size2.getHeight();
123            if (area == area2) {
124                return (width > width2) ? 1 : -1;
125            }
126            return (area > area2) ? 1 : -1;
127
128        }
129    }
130
131    /**
132     * Counter class used to calculate and log the current FPS of frame production.
133     */
134    public static class FpsCounter {
135        //TODO: Hook this up to SystTrace?
136        private static final String TAG = "FpsCounter";
137        private int mFrameCount = 0;
138        private long mLastTime = 0;
139        private long mLastPrintTime = 0;
140        private double mLastFps = 0;
141        private final String mStreamType;
142        private static final long NANO_PER_SECOND = 1000000000; //ns
143
144        public FpsCounter(String streamType) {
145            mStreamType = streamType;
146        }
147
148        public synchronized void countFrame() {
149            mFrameCount++;
150            long nextTime = SystemClock.elapsedRealtimeNanos();
151            if (mLastTime == 0) {
152                mLastTime = nextTime;
153            }
154            if (nextTime > mLastTime + NANO_PER_SECOND) {
155                long elapsed = nextTime - mLastTime;
156                mLastFps = mFrameCount * (NANO_PER_SECOND / (double) elapsed);
157                mFrameCount = 0;
158                mLastTime = nextTime;
159            }
160        }
161
162        public synchronized double checkFps() {
163            return mLastFps;
164        }
165
166        public synchronized void staggeredLog() {
167            if (mLastTime > mLastPrintTime + 5 * NANO_PER_SECOND) {
168                mLastPrintTime = mLastTime;
169                Log.d(TAG, "FPS for " + mStreamType + " stream: " + mLastFps );
170            }
171        }
172
173        public synchronized void countAndLog() {
174            countFrame();
175            staggeredLog();
176        }
177    }
178    /**
179     * Fake preview for jpeg captures when there is no active preview
180     */
181    private void createDummySurface() {
182        if (mDummyTexture == null || mDummySurface == null) {
183            mDummyTexture = new SurfaceTexture(/*ignored*/0);
184            // TODO: use smallest default sizes
185            mDummyTexture.setDefaultBufferSize(640, 480);
186            mDummySurface = new Surface(mDummyTexture);
187        }
188    }
189
190    private final ConditionVariable mReceivedJpeg = new ConditionVariable(false);
191    private final ConditionVariable mReceivedPreview = new ConditionVariable(false);
192
193    private final Camera.PictureCallback mJpegCallback = new Camera.PictureCallback() {
194        @Override
195        public void onPictureTaken(byte[] data, Camera camera) {
196            Log.i(TAG, "Received jpeg.");
197            RequestHolder holder = mInFlightJpeg;
198            if (holder == null) {
199                Log.w(TAG, "Dropping jpeg frame.");
200                mInFlightJpeg = null;
201                return;
202            }
203            for (Surface s : holder.getHolderTargets()) {
204                if (RequestHolder.jpegType(s)) {
205                    Log.i(TAG, "Producing jpeg buffer...");
206                    LegacyCameraDevice.nativeSetSurfaceDimens(s, data.length, /*height*/1);
207                    LegacyCameraDevice.nativeProduceFrame(s, data, data.length, /*height*/1,
208                            CameraMetadataNative.NATIVE_JPEG_FORMAT);
209                }
210            }
211            mReceivedJpeg.open();
212        }
213    };
214
215    private final Camera.ShutterCallback mJpegShutterCallback = new Camera.ShutterCallback() {
216        @Override
217        public void onShutter() {
218            mLastJpegTimestamp = SystemClock.elapsedRealtimeNanos();
219        }
220    };
221
222    private final SurfaceTexture.OnFrameAvailableListener mPreviewCallback =
223            new SurfaceTexture.OnFrameAvailableListener() {
224                @Override
225                public void onFrameAvailable(SurfaceTexture surfaceTexture) {
226                    RequestHolder holder = mInFlightPreview;
227                    if (holder == null) {
228                        mGLThreadManager.queueNewFrame(null);
229                        Log.w(TAG, "Dropping preview frame.");
230                        return;
231                    }
232
233                    if (DEBUG) {
234                        mPrevCounter.countAndLog();
235                    }
236                    mInFlightPreview = null;
237
238                    if (holder.hasPreviewTargets()) {
239                        mGLThreadManager.queueNewFrame(holder.getHolderTargets());
240                    }
241
242                    mLastPreviewTimestamp = surfaceTexture.getTimestamp();
243                    mReceivedPreview.open();
244                }
245            };
246
247    private void stopPreview() {
248        if (mPreviewRunning) {
249            mCamera.stopPreview();
250            mPreviewRunning = false;
251        }
252    }
253
254    private void startPreview() {
255        if (!mPreviewRunning) {
256            mCamera.startPreview();
257            mPreviewRunning = true;
258        }
259    }
260
261    private void doJpegCapture(RequestHolder request) throws IOException {
262        if (!mPreviewRunning) {
263            createDummySurface();
264            mCamera.setPreviewTexture(mDummyTexture);
265            startPreview();
266        }
267        mInFlightJpeg = request;
268        // TODO: Hook up shutter callback to CameraDeviceStateListener#onCaptureStarted
269        mCamera.takePicture(mJpegShutterCallback, /*raw*/null, mJpegCallback);
270        mPreviewRunning = false;
271    }
272
273    private void doPreviewCapture(RequestHolder request) throws IOException {
274        mInFlightPreview = request;
275        if (mPreviewRunning) {
276            return; // Already running
277        }
278
279        if (mPreviewTexture == null) {
280            throw new IllegalStateException(
281                    "Preview capture called with no preview surfaces configured.");
282        }
283
284        mPreviewTexture.setDefaultBufferSize(mIntermediateBufferSize.getWidth(),
285                mIntermediateBufferSize.getHeight());
286        mCamera.setPreviewTexture(mPreviewTexture);
287        Camera.Parameters params = mCamera.getParameters();
288        List<int[]> supportedFpsRanges = params.getSupportedPreviewFpsRange();
289        int[] bestRange = getPhotoPreviewFpsRange(supportedFpsRanges);
290        if (DEBUG) {
291            Log.d(TAG, "doPreviewCapture - Selected range [" +
292                    bestRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX] + "," +
293                    bestRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX] + "]");
294        }
295        params.setPreviewFpsRange(bestRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
296                bestRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
297        params.setRecordingHint(true);
298        mCamera.setParameters(params);
299
300        startPreview();
301    }
302
303
304    private void configureOutputs(Collection<Surface> outputs) throws IOException {
305        stopPreview();
306        if (mGLThreadManager != null) {
307            mGLThreadManager.waitUntilStarted();
308            mGLThreadManager.ignoreNewFrames();
309            mGLThreadManager.waitUntilIdle();
310        }
311        mPreviewOutputs.clear();
312        mCallbackOutputs.clear();
313        mPreviewTexture = null;
314        mInFlightPreview = null;
315        mInFlightJpeg = null;
316
317        if (outputs != null) {
318            for (Surface s : outputs) {
319                int format = LegacyCameraDevice.nativeDetectSurfaceType(s);
320                switch (format) {
321                    case CameraMetadataNative.NATIVE_JPEG_FORMAT:
322                        mCallbackOutputs.add(s);
323                        break;
324                    default:
325                        mPreviewOutputs.add(s);
326                        break;
327                }
328            }
329        }
330        mParams = mCamera.getParameters();
331        if (mPreviewOutputs.size() > 0) {
332            List<Size> outputSizes = new ArrayList<>(outputs.size());
333            for (Surface s : mPreviewOutputs) {
334                int[] dimens = {0, 0};
335                LegacyCameraDevice.nativeDetectSurfaceDimens(s, dimens);
336                outputSizes.add(new Size(dimens[0], dimens[1]));
337            }
338
339            Size largestOutput = findLargestByArea(outputSizes);
340
341            // Find largest jpeg dimension - assume to have the same aspect ratio as sensor.
342            List<Size> supportedJpegSizes = convertSizeList(mParams.getSupportedPictureSizes());
343            Size largestJpegDimen = findLargestByArea(supportedJpegSizes);
344
345            List<Size> supportedPreviewSizes = convertSizeList(mParams.getSupportedPreviewSizes());
346
347            // Use smallest preview dimension with same aspect ratio as sensor that is >= than all
348            // of the configured output dimensions.  If none exists, fall back to using the largest
349            // supported preview size.
350            long largestOutputArea = largestOutput.getHeight() * (long) largestOutput.getWidth();
351            Size bestPreviewDimen = findLargestByArea(supportedPreviewSizes);
352            for (Size s : supportedPreviewSizes) {
353                long currArea = s.getWidth() * s.getHeight();
354                long bestArea = bestPreviewDimen.getWidth() * bestPreviewDimen.getHeight();
355                if (checkAspectRatiosMatch(largestJpegDimen, s) && (currArea < bestArea &&
356                        currArea >= largestOutputArea)) {
357                    bestPreviewDimen = s;
358                }
359            }
360
361            mIntermediateBufferSize = bestPreviewDimen;
362            if (DEBUG) {
363                Log.d(TAG, "Intermediate buffer selected with dimens: " +
364                        bestPreviewDimen.toString());
365            }
366        } else {
367            mIntermediateBufferSize = null;
368            if (DEBUG) {
369                Log.d(TAG, "No Intermediate buffer selected, no preview outputs were configured");
370            }
371        }
372
373        // TODO: Detect and optimize single-output paths here to skip stream teeing.
374        if (mGLThreadManager == null) {
375            mGLThreadManager = new GLThreadManager(mCameraId);
376            mGLThreadManager.start();
377        }
378        mGLThreadManager.waitUntilStarted();
379        mGLThreadManager.setConfigurationAndWait(mPreviewOutputs);
380        mGLThreadManager.allowNewFrames();
381        mPreviewTexture = mGLThreadManager.getCurrentSurfaceTexture();
382        if (mPreviewTexture != null) {
383            mPreviewTexture.setOnFrameAvailableListener(mPreviewCallback);
384        }
385    }
386
387    private static Size findLargestByArea(List<Size> sizes) {
388        return Collections.max(sizes, new SizeComparator());
389    }
390
391    private static boolean checkAspectRatiosMatch(Size a, Size b) {
392        float aAspect = a.getWidth() / (float) a.getHeight();
393        float bAspect = b.getWidth() / (float) b.getHeight();
394
395        return Math.abs(aAspect - bAspect) < ASPECT_RATIO_TOLERANCE;
396    }
397
398    private static List<Size> convertSizeList(List<Camera.Size> sizeList) {
399        List<Size> sizes = new ArrayList<>(sizeList.size());
400        for (Camera.Size s : sizeList) {
401            sizes.add(new Size(s.width, s.height));
402        }
403        return sizes;
404    }
405
406    // Calculate the highest FPS range supported
407    private int[] getPhotoPreviewFpsRange(List<int[]> frameRates) {
408        if (frameRates.size() == 0) {
409            Log.e(TAG, "No supported frame rates returned!");
410            return null;
411        }
412
413        int bestMin = 0;
414        int bestMax = 0;
415        int bestIndex = 0;
416        int index = 0;
417        for (int[] rate : frameRates) {
418            int minFps = rate[Camera.Parameters.PREVIEW_FPS_MIN_INDEX];
419            int maxFps = rate[Camera.Parameters.PREVIEW_FPS_MAX_INDEX];
420            if (maxFps > bestMax || (maxFps == bestMax && minFps > bestMin)) {
421                bestMin = minFps;
422                bestMax = maxFps;
423                bestIndex = index;
424            }
425            index++;
426        }
427
428        return frameRates.get(bestIndex);
429    }
430
431    private final Handler.Callback mRequestHandlerCb = new Handler.Callback() {
432        private boolean mCleanup = false;
433        private final List<RequestHolder> mRepeating = null;
434
435        @SuppressWarnings("unchecked")
436        @Override
437        public boolean handleMessage(Message msg) {
438            if (mCleanup) {
439                return true;
440            }
441
442            if (DEBUG) {
443                Log.d(TAG, "Request thread handling message:" + msg.what);
444            }
445            switch (msg.what) {
446                case MSG_CONFIGURE_OUTPUTS:
447                    ConfigureHolder config = (ConfigureHolder) msg.obj;
448                    int sizes = config.surfaces != null ? config.surfaces.size() : 0;
449                    Log.i(TAG, "Configure outputs: " + sizes +
450                            " surfaces configured.");
451                    try {
452                        configureOutputs(config.surfaces);
453                    } catch (IOException e) {
454                        // TODO: report error to CameraDevice
455                        throw new IOError(e);
456                    }
457                    config.condition.open();
458                    break;
459                case MSG_SUBMIT_CAPTURE_REQUEST:
460                    Handler handler = RequestThreadManager.this.mRequestThread.getHandler();
461
462                    // Get the next burst from the request queue.
463                    Pair<BurstHolder, Long> nextBurst = mRequestQueue.getNext();
464                    if (nextBurst == null) {
465                        mDeviceState.setIdle();
466                        stopPreview();
467                        break;
468                    } else {
469                        // Queue another capture if we did not get the last burst.
470                        handler.sendEmptyMessage(MSG_SUBMIT_CAPTURE_REQUEST);
471                    }
472
473                    // Complete each request in the burst
474                    List<RequestHolder> requests =
475                            nextBurst.first.produceRequestHolders(nextBurst.second);
476                    for (RequestHolder holder : requests) {
477                        mDeviceState.setCaptureStart(holder);
478                        long timestamp = 0;
479                        try {
480                            if (holder.hasPreviewTargets()) {
481                                mReceivedPreview.close();
482                                doPreviewCapture(holder);
483                                if (!mReceivedPreview.block(PREVIEW_FRAME_TIMEOUT)) {
484                                    // TODO: report error to CameraDevice
485                                    Log.e(TAG, "Hit timeout for preview callback!");
486                                }
487                                timestamp = mLastPreviewTimestamp;
488                            }
489                            if (holder.hasJpegTargets()) {
490                                mReceivedJpeg.close();
491                                doJpegCapture(holder);
492                                mReceivedJpeg.block();
493                                if (!mReceivedJpeg.block(JPEG_FRAME_TIMEOUT)) {
494                                    // TODO: report error to CameraDevice
495                                    Log.e(TAG, "Hit timeout for jpeg callback!");
496                                }
497                                mInFlightJpeg = null;
498                                timestamp = mLastJpegTimestamp;
499                            }
500                        } catch (IOException e) {
501                            // TODO: err handling
502                            throw new IOError(e);
503                        }
504                        CameraMetadataNative result = convertResultMetadata(mParams,
505                                holder.getRequest(), timestamp);
506                        mDeviceState.setCaptureResult(holder, result);
507                    }
508                    if (DEBUG) {
509                        mRequestCounter.countAndLog();
510                    }
511                    break;
512                case MSG_CLEANUP:
513                    mCleanup = true;
514                    if (mGLThreadManager != null) {
515                        mGLThreadManager.quit();
516                    }
517                    if (mCamera != null) {
518                        mCamera.release();
519                    }
520                    break;
521                default:
522                    throw new AssertionError("Unhandled message " + msg.what +
523                            " on RequestThread.");
524            }
525            return true;
526        }
527    };
528
529    private CameraMetadataNative convertResultMetadata(Camera.Parameters params,
530                                                       CaptureRequest request,
531                                                       long timestamp) {
532        CameraMetadataNative result = new CameraMetadataNative();
533        result.set(CaptureResult.LENS_FOCAL_LENGTH, params.getFocalLength());
534        result.set(CaptureResult.SENSOR_TIMESTAMP, timestamp);
535
536        // TODO: Remaining result metadata tags conversions.
537        return result;
538    }
539
540    /**
541     * Create a new RequestThreadManager.
542     *
543     * @param cameraId the id of the camera to use.
544     * @param camera an open camera object.  The RequestThreadManager takes ownership of this camera
545     *               object, and is responsible for closing it.
546     * @param deviceState a {@link CameraDeviceState} state machine.
547     */
548    public RequestThreadManager(int cameraId, Camera camera,
549                                CameraDeviceState deviceState) {
550        mCamera = camera;
551        mCameraId = cameraId;
552        String name = String.format("RequestThread-%d", cameraId);
553        TAG = name;
554        mDeviceState = deviceState;
555        mRequestThread = new RequestHandlerThread(name, mRequestHandlerCb);
556    }
557
558    /**
559     * Start the request thread.
560     */
561    public void start() {
562        mRequestThread.start();
563    }
564
565    /**
566     * Flush the pending requests.
567     */
568    public void flush() {
569        // TODO: Implement flush.
570        Log.e(TAG, "flush not yet implemented.");
571    }
572
573    /**
574     * Quit the request thread, and clean up everything.
575     */
576    public void quit() {
577        Handler handler = mRequestThread.waitAndGetHandler();
578        handler.sendMessageAtFrontOfQueue(handler.obtainMessage(MSG_CLEANUP));
579        mRequestThread.quitSafely();
580        try {
581            mRequestThread.join();
582        } catch (InterruptedException e) {
583            Log.e(TAG, String.format("Thread %s (%d) interrupted while quitting.",
584                    mRequestThread.getName(), mRequestThread.getId()));
585        }
586    }
587
588    /**
589     * Submit the given burst of requests to be captured.
590     *
591     * <p>If the burst is repeating, replace the current repeating burst.</p>
592     *
593     * @param requests the burst of requests to add to the queue.
594     * @param repeating true if the burst is repeating.
595     * @param frameNumber an output argument that contains either the frame number of the last frame
596     *                    that will be returned for this request, or the frame number of the last
597     *                    frame that will be returned for the current repeating request if this
598     *                    burst is set to be repeating.
599     * @return the request id.
600     */
601    public int submitCaptureRequests(List<CaptureRequest> requests, boolean repeating,
602            /*out*/LongParcelable frameNumber) {
603        Handler handler = mRequestThread.waitAndGetHandler();
604        int ret = mRequestQueue.submit(requests, repeating, frameNumber);
605        handler.sendEmptyMessage(MSG_SUBMIT_CAPTURE_REQUEST);
606        return ret;
607    }
608
609    /**
610     * Cancel a repeating request.
611     *
612     * @param requestId the id of the repeating request to cancel.
613     * @return the last frame to be returned from the HAL for the given repeating request, or
614     *          {@code INVALID_FRAME} if none exists.
615     */
616    public long cancelRepeating(int requestId) {
617        return mRequestQueue.stopRepeating(requestId);
618    }
619
620
621    /**
622     * Configure with the current list of output Surfaces.
623     *
624     * <p>
625     * This operation blocks until the configuration is complete.
626     * </p>
627     *
628     * <p>Using a {@code null} or empty {@code outputs} list is the equivalent of unconfiguring.</p>
629     *
630     * @param outputs a {@link java.util.Collection} of outputs to configure.
631     */
632    public void configure(Collection<Surface> outputs) {
633        Handler handler = mRequestThread.waitAndGetHandler();
634        final ConditionVariable condition = new ConditionVariable(/*closed*/false);
635        ConfigureHolder holder = new ConfigureHolder(condition, outputs);
636        handler.sendMessage(handler.obtainMessage(MSG_CONFIGURE_OUTPUTS, 0, 0, holder));
637        condition.block();
638    }
639}
640