OneCameraZslImpl.java revision 21fdb9db957973fa00777b65787db7e887fc070d
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17package com.android.camera.one.v2;
18
19import android.annotation.TargetApi;
20import android.content.Context;
21import android.graphics.ImageFormat;
22import android.graphics.Rect;
23import android.hardware.camera2.CameraAccessException;
24import android.hardware.camera2.CameraCaptureSession;
25import android.hardware.camera2.CameraCharacteristics;
26import android.hardware.camera2.CameraDevice;
27import android.hardware.camera2.CameraMetadata;
28import android.hardware.camera2.CaptureRequest;
29import android.hardware.camera2.CaptureResult;
30import android.hardware.camera2.CaptureResult.Key;
31import android.hardware.camera2.TotalCaptureResult;
32import android.hardware.camera2.params.MeteringRectangle;
33import android.hardware.camera2.params.StreamConfigurationMap;
34import android.media.CameraProfile;
35import android.media.Image;
36import android.media.ImageReader;
37import android.media.MediaActionSound;
38import android.net.Uri;
39import android.os.Build;
40import android.os.Handler;
41import android.os.HandlerThread;
42import android.os.SystemClock;
43import android.support.v4.util.Pools;
44import android.view.Surface;
45
46import com.android.camera.CaptureModuleUtil;
47import com.android.camera.app.MediaSaver.OnMediaSavedListener;
48import com.android.camera.debug.Log;
49import com.android.camera.debug.Log.Tag;
50import com.android.camera.exif.ExifInterface;
51import com.android.camera.exif.ExifTag;
52import com.android.camera.exif.Rational;
53import com.android.camera.one.AbstractOneCamera;
54import com.android.camera.one.OneCamera;
55import com.android.camera.one.OneCamera.PhotoCaptureParameters.Flash;
56import com.android.camera.one.Settings3A;
57import com.android.camera.one.v2.ImageCaptureManager.ImageCaptureListener;
58import com.android.camera.one.v2.ImageCaptureManager.MetadataChangeListener;
59import com.android.camera.session.CaptureSession;
60import com.android.camera.util.CameraUtil;
61import com.android.camera.util.ConjunctionListenerMux;
62import com.android.camera.util.JpegUtilNative;
63import com.android.camera.util.Size;
64
65import java.nio.ByteBuffer;
66import java.util.ArrayList;
67import java.util.List;
68import java.util.concurrent.LinkedBlockingQueue;
69import java.util.concurrent.ThreadPoolExecutor;
70import java.util.concurrent.TimeUnit;
71import java.util.concurrent.atomic.AtomicLong;
72
73/**
74 * {@link OneCamera} implementation directly on top of the Camera2 API with zero
75 * shutter lag.<br>
76 * TODO: Determine what the maximum number of full YUV capture frames is.
77 */
78@TargetApi(Build.VERSION_CODES.L)
79public class OneCameraZslImpl extends AbstractOneCamera {
80    private static final Tag TAG = new Tag("OneCameraZslImpl2");
81
82    /** Default JPEG encoding quality. */
83    private static final int JPEG_QUALITY = CameraProfile.getJpegEncodingQualityParameter(
84            CameraProfile.QUALITY_HIGH);
85    /**
86     * The maximum number of images to store in the full-size ZSL ring buffer.
87     * <br>
88     * TODO: Determine this number dynamically based on available memory and the
89     * size of frames.
90     */
91    private static final int MAX_CAPTURE_IMAGES = 10;
92    /**
93     * True if zero-shutter-lag images should be captured. Some devices produce
94     * lower-quality images for the high-frequency stream, so we may wish to
95     * disable ZSL in that case.
96     */
97    private static final boolean ZSL_ENABLED = true;
98
99    /**
100     * Tags which may be used in CaptureRequests.
101     */
102    private static enum RequestTag {
103        /**
104         * Indicates that the request was explicitly sent for a single
105         * high-quality still capture. Unlike other requests, such as the
106         * repeating (ZSL) stream and AF/AE triggers, requests with this tag
107         * should always be saved.
108         */
109        EXPLICIT_CAPTURE
110    }
111
112    /**
113     * Set to ImageFormat.JPEG to use the hardware encoder, or
114     * ImageFormat.YUV_420_888 to use the software encoder. No other image
115     * formats are supported.
116     */
117    private static final int sCaptureImageFormat = ImageFormat.YUV_420_888;
118    /**
119     * Token for callbacks posted to {@link #mCameraHandler} to resume
120     * continuous AF.
121     */
122    private static final String FOCUS_RESUME_CALLBACK_TOKEN = "RESUME_CONTINUOUS_AF";
123    /** Zero weight 3A region, to reset regions per API. */
124    MeteringRectangle[] ZERO_WEIGHT_3A_REGION = AutoFocusHelper.getZeroWeightRegion();
125
126    /**
127     * Thread on which high-priority camera operations, such as grabbing preview
128     * frames for the viewfinder, are running.
129     */
130    private final HandlerThread mCameraThread;
131    /** Handler of the {@link #mCameraThread}. */
132    private final Handler mCameraHandler;
133
134    /** Thread on which low-priority camera listeners are running. */
135    private final HandlerThread mCameraListenerThread;
136    private final Handler mCameraListenerHandler;
137
138    /** The characteristics of this camera. */
139    private final CameraCharacteristics mCharacteristics;
140    /** The underlying Camera2 API camera device. */
141    private final CameraDevice mDevice;
142
143    /**
144     * The aspect ratio (width/height) of the full resolution for this camera.
145     * Usually the native aspect ratio of this camera.
146     */
147    private final double mFullSizeAspectRatio;
148    /** The Camera2 API capture session currently active. */
149    private CameraCaptureSession mCaptureSession;
150    /** The surface onto which to render the preview. */
151    private Surface mPreviewSurface;
152    /** Whether closing of this device has been requested. */
153    private volatile boolean mIsClosed = false;
154    /** A callback that is called when the device is fully closed. */
155    private CloseCallback mCloseCallback = null;
156
157    /** Receives the normal captured images. */
158    private final ImageReader mCaptureImageReader;
159
160    /**
161     * Maintains a buffer of images and their associated {@link CaptureResult}s.
162     */
163    private ImageCaptureManager mCaptureManager;
164
165    /**
166     * The sensor timestamp (which may not be relative to the system time) of
167     * the most recently captured image.
168     */
169    private final AtomicLong mLastCapturedImageTimestamp = new AtomicLong(0);
170
171    /** Thread pool for performing slow jpeg encoding and saving tasks. */
172    private final ThreadPoolExecutor mImageSaverThreadPool;
173
174    /** Pool of native byte buffers on which to store jpeg-encoded images. */
175    private final Pools.SynchronizedPool<ByteBuffer> mJpegByteBufferPool = new
176            Pools.SynchronizedPool<ByteBuffer>(64);
177
178    /** Current zoom value. 1.0 is no zoom. */
179    private float mZoomValue = 1f;
180    /** Current crop region: set from mZoomValue. */
181    private Rect mCropRegion;
182    /** Current AE, AF, and AWB regions */
183    private MeteringRectangle[] mAFRegions = ZERO_WEIGHT_3A_REGION;
184    private MeteringRectangle[] mAERegions = ZERO_WEIGHT_3A_REGION;
185
186    private MediaActionSound mMediaActionSound = new MediaActionSound();
187
188    /**
189     * Ready state (typically displayed by the UI shutter-button) depends on two
190     * things:<br>
191     * <ol>
192     * <li>{@link #mCaptureManager} must be ready.</li>
193     * <li>We must not be in the process of capturing a single, high-quality,
194     * image.</li>
195     * </ol>
196     * See {@link ConjunctionListenerMux} and {@link #mReadyStateManager} for
197     * details of how this is managed.
198     */
199    private static enum ReadyStateRequirement {
200        CAPTURE_MANAGER_READY,
201        CAPTURE_NOT_IN_PROGRESS
202    }
203
204    /**
205     * Handles the thread-safe logic of dispatching whenever the logical AND of
206     * these constraints changes.
207     */
208    private final ConjunctionListenerMux<ReadyStateRequirement>
209            mReadyStateManager = new ConjunctionListenerMux<ReadyStateRequirement>(
210                    ReadyStateRequirement.class, new ConjunctionListenerMux.OutputChangeListener() {
211                            @Override
212                        public void onOutputChange(boolean state) {
213                            broadcastReadyState(state);
214                        }
215                    });
216
217    /**
218     * An {@link ImageCaptureListener} which will compress and save an image to
219     * disk.
220     */
221    private class ImageCaptureTask implements ImageCaptureListener {
222        private final PhotoCaptureParameters mParams;
223        private final CaptureSession mSession;
224
225        public ImageCaptureTask(PhotoCaptureParameters parameters,
226                CaptureSession session) {
227            mParams = parameters;
228            mSession = session;
229        }
230
231        @Override
232        public void onImageCaptured(Image image, TotalCaptureResult
233                captureResult) {
234            long timestamp = captureResult.get(CaptureResult.SENSOR_TIMESTAMP);
235
236            // We should only capture the image if it's more recent than the
237            // latest one. Synchronization is necessary since this method is
238            // called on {@link #mImageSaverThreadPool}.
239            synchronized (mLastCapturedImageTimestamp) {
240                if (timestamp > mLastCapturedImageTimestamp.get()) {
241                    mLastCapturedImageTimestamp.set(timestamp);
242                } else {
243                    // There was a more recent (or identical) image which has
244                    // begun being saved, so abort.
245                    return;
246                }
247            }
248
249            mReadyStateManager.setInput(
250                    ReadyStateRequirement.CAPTURE_NOT_IN_PROGRESS, true);
251
252            mSession.startEmpty();
253            savePicture(image, mParams, mSession);
254            mParams.callback.onPictureTaken(mSession);
255            Log.v(TAG, "Image saved.  Frame number = " + captureResult.getFrameNumber());
256        }
257    }
258
259    /**
260     * Instantiates a new camera based on Camera 2 API.
261     *
262     * @param device The underlying Camera 2 device.
263     * @param characteristics The device's characteristics.
264     * @param pictureSize the size of the final image to be taken.
265     */
266    OneCameraZslImpl(CameraDevice device, CameraCharacteristics characteristics, Size pictureSize) {
267        Log.v(TAG, "Creating new OneCameraZslImpl");
268
269        mDevice = device;
270        mCharacteristics = characteristics;
271        mFullSizeAspectRatio = calculateFullSizeAspectRatio(characteristics);
272
273        mCameraThread = new HandlerThread("OneCamera2");
274        // If this thread stalls, it will delay viewfinder frames.
275        mCameraThread.setPriority(Thread.MAX_PRIORITY);
276        mCameraThread.start();
277        mCameraHandler = new Handler(mCameraThread.getLooper());
278
279        mCameraListenerThread = new HandlerThread("OneCamera2-Listener");
280        mCameraListenerThread.start();
281        mCameraListenerHandler = new Handler(mCameraListenerThread.getLooper());
282
283        // TODO: Encoding on multiple cores results in preview jank due to
284        // excessive GC.
285        int numEncodingCores = CameraUtil.getNumCpuCores();
286        mImageSaverThreadPool = new ThreadPoolExecutor(numEncodingCores, numEncodingCores, 10,
287                TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>());
288
289        mCaptureManager = new ImageCaptureManager(MAX_CAPTURE_IMAGES, mCameraListenerHandler,
290                mImageSaverThreadPool);
291        mCaptureManager.setCaptureReadyListener(new ImageCaptureManager.CaptureReadyListener() {
292                @Override
293            public void onReadyStateChange(boolean capturePossible) {
294                mReadyStateManager.setInput(ReadyStateRequirement.CAPTURE_MANAGER_READY,
295                        capturePossible);
296            }
297        });
298
299        // Listen for changes to auto focus state and dispatch to
300        // mFocusStateListener.
301        mCaptureManager.addMetadataChangeListener(CaptureResult.CONTROL_AF_STATE,
302                new ImageCaptureManager.MetadataChangeListener() {
303                @Override
304                    public void onImageMetadataChange(Key<?> key, Object oldValue, Object newValue,
305                            CaptureResult result) {
306                        mFocusStateListener.onFocusStatusUpdate(
307                                AutoFocusHelper.stateFromCamera2State(
308                                        result.get(CaptureResult.CONTROL_AF_STATE)));
309                    }
310                });
311
312        // Allocate the image reader to store all images received from the
313        // camera.
314        if (pictureSize == null) {
315            // TODO The default should be selected by the caller, and
316            // pictureSize should never be null.
317            pictureSize = getDefaultPictureSize();
318        }
319        mCaptureImageReader = ImageReader.newInstance(pictureSize.getWidth(),
320                pictureSize.getHeight(),
321                sCaptureImageFormat, MAX_CAPTURE_IMAGES);
322
323        mCaptureImageReader.setOnImageAvailableListener(mCaptureManager, mCameraHandler);
324        mMediaActionSound.load(MediaActionSound.SHUTTER_CLICK);
325    }
326
327    /**
328     * @return The largest supported picture size.
329     */
330    public Size getDefaultPictureSize() {
331        StreamConfigurationMap configs = mCharacteristics.get(
332                CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
333        android.util.Size[] supportedSizes = configs.getOutputSizes(sCaptureImageFormat);
334
335        // Find the largest supported size.
336        android.util.Size largestSupportedSize = supportedSizes[0];
337        long largestSupportedSizePixels = largestSupportedSize.getWidth()
338                * largestSupportedSize.getHeight();
339        for (int i = 0; i < supportedSizes.length; i++) {
340            long numPixels = supportedSizes[i].getWidth() * supportedSizes[i].getHeight();
341            if (numPixels > largestSupportedSizePixels) {
342                largestSupportedSize = supportedSizes[i];
343                largestSupportedSizePixels = numPixels;
344            }
345        }
346
347        return new Size(largestSupportedSize.getWidth(),
348                largestSupportedSize.getHeight());
349    }
350
351
352    private void onShutterInvokeUI(final PhotoCaptureParameters params) {
353        // Tell CaptureModule shutter has occurred so it can flash the screen.
354        params.callback.onQuickExpose();
355        // Play shutter click sound.
356        mMediaActionSound.play(MediaActionSound.SHUTTER_CLICK);
357    }
358
359    /**
360     * Take a picture.
361     */
362    @Override
363    public void takePicture(final PhotoCaptureParameters params, final CaptureSession session) {
364        params.checkSanity();
365
366        mReadyStateManager.setInput(
367                ReadyStateRequirement.CAPTURE_NOT_IN_PROGRESS, false);
368
369        boolean useZSL = ZSL_ENABLED;
370
371        // We will only capture images from the zsl ring-buffer which satisfy
372        // this constraint.
373        ArrayList<ImageCaptureManager.CapturedImageConstraint> zslConstraints = new ArrayList<
374                ImageCaptureManager.CapturedImageConstraint>();
375        zslConstraints.add(new ImageCaptureManager.CapturedImageConstraint() {
376                @Override
377            public boolean satisfiesConstraint(TotalCaptureResult captureResult) {
378                Long timestamp = captureResult.get(CaptureResult.SENSOR_TIMESTAMP);
379                Integer lensState = captureResult.get(CaptureResult.LENS_STATE);
380                Integer flashState = captureResult.get(CaptureResult.FLASH_STATE);
381                Integer flashMode = captureResult.get(CaptureResult.FLASH_MODE);
382                Integer aeState = captureResult.get(CaptureResult.CONTROL_AE_STATE);
383                Integer afState = captureResult.get(CaptureResult.CONTROL_AF_STATE);
384                Integer awbState = captureResult.get(CaptureResult.CONTROL_AWB_STATE);
385
386                if (timestamp <= mLastCapturedImageTimestamp.get()) {
387                    // Don't save frames older than the most
388                    // recently-captured frame.
389                    // TODO This technically has a race condition in which
390                    // duplicate frames may be saved, but if a user is
391                    // tapping at >30Hz, duplicate images may be what they
392                    // expect.
393                    return false;
394                }
395
396                if (lensState == CaptureResult.LENS_STATE_MOVING) {
397                    // If we know the lens was moving, don't use this image.
398                    return false;
399                }
400
401                if (aeState == CaptureResult.CONTROL_AE_STATE_SEARCHING
402                        || aeState == CaptureResult.CONTROL_AE_STATE_PRECAPTURE) {
403                    return false;
404                }
405                switch (params.flashMode) {
406                    case OFF:
407                        break;
408                    case ON:
409                        if (flashState != CaptureResult.FLASH_STATE_FIRED
410                                || flashMode != CaptureResult.FLASH_MODE_SINGLE) {
411                            return false;
412                        }
413                        break;
414                    case AUTO:
415                        if (aeState == CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED
416                                && flashState != CaptureResult.FLASH_STATE_FIRED) {
417                            return false;
418                        }
419                        break;
420                }
421
422                if (afState == CaptureResult.CONTROL_AF_STATE_ACTIVE_SCAN
423                        || afState == CaptureResult.CONTROL_AF_STATE_PASSIVE_SCAN) {
424                    return false;
425                }
426
427                if (awbState == CaptureResult.CONTROL_AWB_STATE_SEARCHING) {
428                    return false;
429                }
430
431                return true;
432            }
433        });
434        // This constraint lets us capture images which have been explicitly
435        // requested. See {@link RequestTag.EXPLICIT_CAPTURE}.
436        ArrayList<ImageCaptureManager.CapturedImageConstraint> singleCaptureConstraint = new ArrayList<
437                ImageCaptureManager.CapturedImageConstraint>();
438        singleCaptureConstraint.add(new ImageCaptureManager.CapturedImageConstraint() {
439                @Override
440            public boolean satisfiesConstraint(TotalCaptureResult captureResult) {
441                Object tag = captureResult.getRequest().getTag();
442                return tag == RequestTag.EXPLICIT_CAPTURE;
443            }
444        });
445
446        // If we can use ZSL, try to save a previously-captured frame, if an
447        // acceptable one exists in the buffer.
448        if (useZSL) {
449            boolean capturedPreviousFrame = mCaptureManager.tryCaptureExistingImage(
450                    new ImageCaptureTask(params, session), zslConstraints);
451            if (capturedPreviousFrame) {
452                Log.v(TAG, "Saving previous frame");
453                onShutterInvokeUI(params);
454            } else {
455                Log.v(TAG, "No good image Available.  Capturing next available good image.");
456                // If there was no good frame available in the ring buffer
457                // already, capture the next good image.
458                // TODO Disable the shutter button until this image is captured.
459
460                if (params.flashMode == Flash.ON || params.flashMode == Flash.AUTO) {
461                    // We must issue a request for a single capture using the
462                    // flash, including an AE precapture trigger.
463
464                    // The following sets up a sequence of events which will
465                    // occur in reverse order to the associated method
466                    // calls:
467                    // 1. Send a request to trigger the Auto Exposure Precapture
468                    // 2. Wait for the AE_STATE to leave the PRECAPTURE state,
469                    // and then send a request for a single image, with the
470                    // appropriate flash settings.
471                    // 3. Capture the next appropriate image, which should be
472                    // the one we requested in (2).
473
474                    mCaptureManager.captureNextImage(new ImageCaptureTask(params, session),
475                            singleCaptureConstraint);
476
477                    mCaptureManager.addMetadataChangeListener(CaptureResult.CONTROL_AE_STATE,
478                            new MetadataChangeListener() {
479                            @Override
480                                public void onImageMetadataChange(Key<?> key, Object oldValue,
481                                        Object newValue, CaptureResult result) {
482                                    Log.v(TAG, "AE State Changed");
483                                    if (oldValue.equals(
484                                            Integer.valueOf(
485                                                    CaptureResult.CONTROL_AE_STATE_PRECAPTURE))) {
486                                        mCaptureManager.removeMetadataChangeListener(key, this);
487                                        sendSingleRequest(params);
488                                        // TODO: Delay this until onCaptureStarted().
489                                        onShutterInvokeUI(params);
490                                    }
491                                }
492                            });
493
494                    sendAutoExposureTriggerRequest(params.flashMode);
495                } else {
496                    // We may get here if, for example, the auto focus is in the
497                    // middle of a scan.
498                    // If the flash is off, we should just wait for the next
499                    // image that arrives. This will have minimal delay since we
500                    // do not need to send a new capture request.
501                    mCaptureManager.captureNextImage(new ImageCaptureTask(params, session),
502                            zslConstraints);
503                }
504            }
505        } else {
506            // TODO If we can't save a previous frame, create a new capture
507            // request to do what we need (e.g. flash) and call
508            // captureNextImage().
509            throw new UnsupportedOperationException("Non-ZSL capture not yet supported");
510        }
511    }
512
513    @Override
514    public void startPreview(Surface previewSurface, CaptureReadyCallback listener) {
515        mPreviewSurface = previewSurface;
516        setupAsync(mPreviewSurface, listener);
517    }
518
519    @Override
520    public void setViewFinderSize(int width, int height) {
521        throw new RuntimeException("Not implemented yet.");
522    }
523
524    @Override
525    public boolean isFlashSupported(boolean enhanced) {
526        throw new RuntimeException("Not implemented yet.");
527    }
528
529    @Override
530    public boolean isSupportingEnhancedMode() {
531        throw new RuntimeException("Not implemented yet.");
532    }
533
534    @Override
535    public void close(CloseCallback closeCallback) {
536        if (mIsClosed) {
537            Log.w(TAG, "Camera is already closed.");
538            return;
539        }
540        try {
541            mCaptureSession.abortCaptures();
542        } catch (CameraAccessException e) {
543            Log.e(TAG, "Could not abort captures in progress.");
544        }
545        mIsClosed = true;
546        mCloseCallback = closeCallback;
547        mCameraThread.quitSafely();
548        mDevice.close();
549        mCaptureManager.close();
550    }
551
552    @Override
553    public Size[] getSupportedSizes() {
554        StreamConfigurationMap config = mCharacteristics
555                .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
556        return Size.convert(config.getOutputSizes(sCaptureImageFormat));
557    }
558
559    @Override
560    public double getFullSizeAspectRatio() {
561        return mFullSizeAspectRatio;
562    }
563
564    @Override
565    public boolean isFrontFacing() {
566        return mCharacteristics.get(CameraCharacteristics.LENS_FACING)
567                == CameraMetadata.LENS_FACING_FRONT;
568    }
569
570    @Override
571    public boolean isBackFacing() {
572        return mCharacteristics.get(CameraCharacteristics.LENS_FACING)
573                == CameraMetadata.LENS_FACING_BACK;
574    }
575
576    private void savePicture(Image image, final PhotoCaptureParameters captureParams,
577            CaptureSession session) {
578        int heading = captureParams.heading;
579
580        int width = image.getWidth();
581        int height = image.getHeight();
582        int rotation = 0;
583        ExifInterface exif = null;
584
585        exif = new ExifInterface();
586        // TODO: Add more exif tags here.
587
588        exif.setTag(exif.buildTag(ExifInterface.TAG_PIXEL_X_DIMENSION, width));
589        exif.setTag(exif.buildTag(ExifInterface.TAG_PIXEL_Y_DIMENSION, height));
590
591        // TODO: Handle rotation correctly.
592
593        // Set GPS heading direction based on sensor, if location is on.
594        if (heading >= 0) {
595            ExifTag directionRefTag = exif.buildTag(
596                    ExifInterface.TAG_GPS_IMG_DIRECTION_REF,
597                    ExifInterface.GpsTrackRef.MAGNETIC_DIRECTION);
598            ExifTag directionTag = exif.buildTag(
599                    ExifInterface.TAG_GPS_IMG_DIRECTION,
600                    new Rational(heading, 1));
601            exif.setTag(directionRefTag);
602            exif.setTag(directionTag);
603        }
604
605        session.saveAndFinish(acquireJpegBytes(image), width, height, rotation, exif,
606                new OnMediaSavedListener() {
607                @Override
608                    public void onMediaSaved(Uri uri) {
609                        captureParams.callback.onPictureSaved(uri);
610                    }
611                });
612    }
613
614    /**
615     * Asynchronously sets up the capture session.
616     *
617     * @param previewSurface the surface onto which the preview should be
618     *            rendered.
619     * @param listener called when setup is completed.
620     */
621    private void setupAsync(final Surface previewSurface, final CaptureReadyCallback listener) {
622        mCameraHandler.post(new Runnable() {
623                @Override
624            public void run() {
625                setup(previewSurface, listener);
626            }
627        });
628    }
629
630    /**
631     * Configures and attempts to create a capture session.
632     *
633     * @param previewSurface the surface onto which the preview should be
634     *            rendered.
635     * @param listener called when the setup is completed.
636     */
637    private void setup(Surface previewSurface, final CaptureReadyCallback listener) {
638        try {
639            if (mCaptureSession != null) {
640                mCaptureSession.abortCaptures();
641                mCaptureSession = null;
642            }
643            List<Surface> outputSurfaces = new ArrayList<Surface>(2);
644            outputSurfaces.add(previewSurface);
645            outputSurfaces.add(mCaptureImageReader.getSurface());
646
647            mDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateListener() {
648                    @Override
649                public void onConfigureFailed(CameraCaptureSession session) {
650                    listener.onSetupFailed();
651                }
652
653                    @Override
654                public void onConfigured(CameraCaptureSession session) {
655                    mCaptureSession = session;
656                    mAFRegions = ZERO_WEIGHT_3A_REGION;
657                    mAERegions = ZERO_WEIGHT_3A_REGION;
658                    mZoomValue = 1f;
659                    mCropRegion = cropRegionForZoom(mZoomValue);
660                    boolean success = sendRepeatingCaptureRequest();
661                    if (success) {
662                        mReadyStateManager.setInput(ReadyStateRequirement.CAPTURE_NOT_IN_PROGRESS,
663                                true);
664                        mReadyStateManager.notifyListeners();
665                        listener.onReadyForCapture();
666                    } else {
667                        listener.onSetupFailed();
668                    }
669                }
670
671                    @Override
672                public void onClosed(CameraCaptureSession session) {
673                    super.onClosed(session);
674                    if (mCloseCallback != null) {
675                        mCloseCallback.onCameraClosed();
676                    }
677                }
678            }, mCameraHandler);
679        } catch (CameraAccessException ex) {
680            Log.e(TAG, "Could not set up capture session", ex);
681            listener.onSetupFailed();
682        }
683    }
684
685    private void addRegionsToCaptureRequestBuilder(CaptureRequest.Builder builder) {
686        builder.set(CaptureRequest.CONTROL_AE_REGIONS, mAERegions);
687        builder.set(CaptureRequest.CONTROL_AF_REGIONS, mAFRegions);
688        builder.set(CaptureRequest.SCALER_CROP_REGION, mCropRegion);
689    }
690
691    private void addFlashToCaptureRequestBuilder(CaptureRequest.Builder builder, Flash flashMode) {
692        switch (flashMode) {
693            case ON:
694                builder.set(CaptureRequest.CONTROL_AE_MODE,
695                        CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH);
696                builder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_SINGLE);
697                break;
698            case OFF:
699                builder.set(CaptureRequest.CONTROL_AE_MODE,
700                        CaptureRequest.CONTROL_AE_MODE_ON);
701                builder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
702                break;
703            case AUTO:
704                builder.set(CaptureRequest.CONTROL_AE_MODE,
705                        CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
706                break;
707        }
708    }
709
710    /**
711     * Request a stream of images.
712     *
713     * @return true if successful, false if there was an error submitting the
714     *         capture request.
715     */
716    private boolean sendRepeatingCaptureRequest() {
717        Log.v(TAG, "sendRepeatingCaptureRequest()");
718        try {
719            CaptureRequest.Builder builder;
720            if (ZSL_ENABLED) {
721                builder = mDevice.
722                        createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
723            } else {
724                builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
725            }
726
727            builder.addTarget(mPreviewSurface);
728
729            if (ZSL_ENABLED) {
730                builder.addTarget(mCaptureImageReader.getSurface());
731            }
732
733            builder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
734
735            builder.set(CaptureRequest.CONTROL_AF_MODE,
736                    CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
737            builder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_IDLE);
738
739            builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
740            builder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
741
742            addRegionsToCaptureRequestBuilder(builder);
743
744            mCaptureSession.setRepeatingRequest(builder.build(), mCaptureManager,
745                    mCameraHandler);
746            return true;
747        } catch (CameraAccessException e) {
748            if (ZSL_ENABLED) {
749                Log.v(TAG, "Could not execute zero-shutter-lag repeating request.", e);
750            } else {
751                Log.v(TAG, "Could not execute preview request.", e);
752            }
753            return false;
754        }
755    }
756
757    /**
758     * Request a single image.
759     *
760     * @return true if successful, false if there was an error submitting the
761     *         capture request.
762     */
763    private boolean sendSingleRequest(OneCamera.PhotoCaptureParameters params) {
764        Log.v(TAG, "sendSingleRequest()");
765        try {
766            CaptureRequest.Builder builder;
767            builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
768
769            builder.addTarget(mPreviewSurface);
770
771            // Always add this surface for single image capture requests.
772            builder.addTarget(mCaptureImageReader.getSurface());
773
774            builder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
775
776            addFlashToCaptureRequestBuilder(builder, params.flashMode);
777            addRegionsToCaptureRequestBuilder(builder);
778
779            builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO);
780            builder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_IDLE);
781
782            // Tag this as a special request which should be saved.
783            builder.setTag(RequestTag.EXPLICIT_CAPTURE);
784
785            if (sCaptureImageFormat == ImageFormat.JPEG) {
786                builder.set(CaptureRequest.JPEG_QUALITY, (byte) (JPEG_QUALITY));
787                builder.set(CaptureRequest.JPEG_ORIENTATION,
788                        CameraUtil.getJpegRotation(params.orientation, mCharacteristics));
789            }
790
791            mCaptureSession.capture(builder.build(), mCaptureManager,
792                    mCameraHandler);
793            return true;
794        } catch (CameraAccessException e) {
795            Log.v(TAG, "Could not execute single still capture request.", e);
796            return false;
797        }
798    }
799
800    private boolean sendAutoExposureTriggerRequest(Flash flashMode) {
801        Log.v(TAG, "sendAutoExposureTriggerRequest()");
802        try {
803            CaptureRequest.Builder builder;
804            if (ZSL_ENABLED) {
805                builder = mDevice.
806                        createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
807            } else {
808                builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
809            }
810
811            builder.addTarget(mPreviewSurface);
812
813            if (ZSL_ENABLED) {
814                builder.addTarget(mCaptureImageReader.getSurface());
815            }
816
817            builder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
818
819            builder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
820                    CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
821
822            addRegionsToCaptureRequestBuilder(builder);
823            addFlashToCaptureRequestBuilder(builder, flashMode);
824
825            mCaptureSession.capture(builder.build(), mCaptureManager,
826                    mCameraHandler);
827
828            return true;
829        } catch (CameraAccessException e) {
830            Log.v(TAG, "Could not execute auto exposure trigger request.", e);
831            return false;
832        }
833    }
834
835    /**
836     */
837    private boolean sendAutoFocusTriggerRequest() {
838        Log.v(TAG, "sendAutoFocusTriggerRequest()");
839        try {
840            CaptureRequest.Builder builder;
841            if (ZSL_ENABLED) {
842                builder = mDevice.
843                        createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
844            } else {
845                builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
846            }
847
848            builder.addTarget(mPreviewSurface);
849
850            if (ZSL_ENABLED) {
851                builder.addTarget(mCaptureImageReader.getSurface());
852            }
853
854            builder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
855
856            addRegionsToCaptureRequestBuilder(builder);
857
858            builder.set(CaptureRequest.CONTROL_AF_MODE, CameraMetadata.CONTROL_AF_MODE_AUTO);
859            builder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_START);
860
861            mCaptureSession.capture(builder.build(), mCaptureManager,
862                    mCameraHandler);
863
864            return true;
865        } catch (CameraAccessException e) {
866            Log.v(TAG, "Could not execute auto focus trigger request.", e);
867            return false;
868        }
869    }
870
871    /**
872     * Like {@link #sendRepeatingCaptureRequest()}, but with the focus held
873     * constant.
874     *
875     * @return true if successful, false if there was an error submitting the
876     *         capture request.
877     */
878    private boolean sendAutoFocusHoldRequest() {
879        Log.v(TAG, "sendAutoFocusHoldRequest()");
880        try {
881            CaptureRequest.Builder builder;
882            if (ZSL_ENABLED) {
883                builder = mDevice.
884                        createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
885            } else {
886                builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
887            }
888
889            builder.addTarget(mPreviewSurface);
890
891            if (ZSL_ENABLED) {
892                builder.addTarget(mCaptureImageReader.getSurface());
893            }
894
895            builder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
896
897            builder.set(CaptureRequest.CONTROL_AF_MODE, CameraMetadata.CONTROL_AF_MODE_AUTO);
898            builder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_IDLE);
899
900            addRegionsToCaptureRequestBuilder(builder);
901            // TODO: This should fire the torch, if appropriate.
902
903            mCaptureSession.setRepeatingRequest(builder.build(), mCaptureManager, mCameraHandler);
904
905            return true;
906        } catch (CameraAccessException e) {
907            Log.v(TAG, "Could not execute auto focus hold request.", e);
908            return false;
909        }
910    }
911
912    /**
913     * Calculate the aspect ratio of the full size capture on this device.
914     *
915     * @param characteristics the characteristics of the camera device.
916     * @return The aspect ration, in terms of width/height of the full capture
917     *         size.
918     */
919    private static double calculateFullSizeAspectRatio(CameraCharacteristics characteristics) {
920        Rect activeArraySize =
921                characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
922        return (double) activeArraySize.width() / activeArraySize.height();
923    }
924
925    /**
926     * Given an image reader, extracts the JPEG image bytes and then closes the
927     * reader.
928     *
929     * @param img the image from which to extract jpeg bytes or compress to
930     *            jpeg.
931     * @return The bytes of the JPEG image. Newly allocated.
932     */
933    private byte[] acquireJpegBytes(Image img) {
934        ByteBuffer buffer;
935
936        if (img.getFormat() == ImageFormat.JPEG) {
937            Image.Plane plane0 = img.getPlanes()[0];
938            buffer = plane0.getBuffer();
939
940            byte[] imageBytes = new byte[buffer.remaining()];
941            buffer.get(imageBytes);
942            buffer.rewind();
943            return imageBytes;
944        } else if (img.getFormat() == ImageFormat.YUV_420_888) {
945            buffer = mJpegByteBufferPool.acquire();
946            if (buffer == null) {
947                buffer = ByteBuffer.allocateDirect(img.getWidth() * img.getHeight() * 3);
948            }
949
950            int numBytes = JpegUtilNative.compressJpegFromYUV420Image(img, buffer, JPEG_QUALITY);
951
952            if (numBytes < 0) {
953                throw new RuntimeException("Error compressing jpeg.");
954            }
955
956            buffer.limit(numBytes);
957
958            byte[] imageBytes = new byte[buffer.remaining()];
959            buffer.get(imageBytes);
960
961            buffer.clear();
962            mJpegByteBufferPool.release(buffer);
963
964            return imageBytes;
965        } else {
966            throw new RuntimeException("Unsupported image format.");
967        }
968    }
969
970    private void startAFCycle() {
971        // Clean up any existing AF cycle's pending callbacks.
972        mCameraHandler.removeCallbacksAndMessages(FOCUS_RESUME_CALLBACK_TOKEN);
973
974        // Send a single CONTROL_AF_TRIGGER_START capture request.
975        sendAutoFocusTriggerRequest();
976
977        // Immediately send a request for a regular preview stream, but with
978        // CONTROL_AF_MODE_AUTO set so that the focus remains constant after the
979        // AF cycle completes.
980        sendAutoFocusHoldRequest();
981
982        // Waits Settings3A.getFocusHoldMillis() milliseconds before sending
983        // a request for a regular preview stream to resume.
984        mCameraHandler.postAtTime(new Runnable() {
985                @Override
986            public void run() {
987                mAERegions = ZERO_WEIGHT_3A_REGION;
988                mAFRegions = ZERO_WEIGHT_3A_REGION;
989                sendRepeatingCaptureRequest();
990            }
991        }, FOCUS_RESUME_CALLBACK_TOKEN,
992                SystemClock.uptimeMillis() + Settings3A.getFocusHoldMillis());
993    }
994
995    /**
996     * @see com.android.camera.one.OneCamera#triggerFocusAndMeterAtPoint(float,
997     *      float)
998     */
999    @Override
1000    public void triggerFocusAndMeterAtPoint(float nx, float ny) {
1001        // xc, yc is center of tap point in sensor coordinate system.
1002        int xc = mCropRegion.left + (int) (mCropRegion.width() * ny);
1003        int yc = mCropRegion.top + (int) (mCropRegion.height() * (1f - nx));
1004
1005        mAERegions = AutoFocusHelper.aeRegionsForSensorCoord(xc, yc, mCropRegion);
1006        mAFRegions = AutoFocusHelper.afRegionsForSensorCoord(xc, yc, mCropRegion);
1007
1008        startAFCycle();
1009    }
1010
1011    @Override
1012    public Size pickPreviewSize(Size pictureSize, Context context) {
1013        if (pictureSize == null) {
1014            // TODO The default should be selected by the caller, and
1015            // pictureSize should never be null.
1016            pictureSize = getDefaultPictureSize();
1017        }
1018        float pictureAspectRatio = pictureSize.getWidth() / (float) pictureSize.getHeight();
1019        return CaptureModuleUtil.getOptimalPreviewSize(context, getSupportedSizes(),
1020                pictureAspectRatio);
1021    }
1022
1023    @Override
1024    public float getMaxZoom() {
1025        return mCharacteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM);
1026    }
1027
1028    @Override
1029    public void setZoom(float zoom) {
1030        mZoomValue = zoom;
1031        mCropRegion = cropRegionForZoom(zoom);
1032        sendRepeatingCaptureRequest();
1033    }
1034
1035    private Rect cropRegionForZoom(float zoom) {
1036        return AutoFocusHelper.cropRegionForZoom(mCharacteristics, zoom);
1037    }
1038}
1039