1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17package com.android.camera.one.v2;
18
19import android.annotation.TargetApi;
20import android.content.Context;
21import android.graphics.ImageFormat;
22import android.graphics.Rect;
23import android.graphics.SurfaceTexture;
24import android.hardware.camera2.CameraAccessException;
25import android.hardware.camera2.CameraCaptureSession;
26import android.hardware.camera2.CameraCharacteristics;
27import android.hardware.camera2.CameraDevice;
28import android.hardware.camera2.CameraMetadata;
29import android.hardware.camera2.CaptureRequest;
30import android.hardware.camera2.CaptureResult;
31import android.hardware.camera2.DngCreator;
32import android.hardware.camera2.TotalCaptureResult;
33import android.hardware.camera2.params.MeteringRectangle;
34import android.hardware.camera2.params.StreamConfigurationMap;
35import android.location.Location;
36import android.media.Image;
37import android.media.ImageReader;
38import android.net.Uri;
39import android.os.Build;
40import android.os.Handler;
41import android.os.HandlerThread;
42import android.os.SystemClock;
43import android.view.Surface;
44
45import com.android.camera.CaptureModuleUtil;
46import com.android.camera.Exif;
47import com.android.camera.Storage;
48import com.android.camera.debug.DebugPropertyHelper;
49import com.android.camera.debug.Log;
50import com.android.camera.debug.Log.Tag;
51import com.android.camera.exif.ExifInterface;
52import com.android.camera.exif.ExifTag;
53import com.android.camera.exif.Rational;
54import com.android.camera.one.AbstractOneCamera;
55import com.android.camera.one.CameraDirectionProvider;
56import com.android.camera.one.OneCamera;
57import com.android.camera.one.Settings3A;
58import com.android.camera.one.v2.camera2proxy.AndroidCaptureResultProxy;
59import com.android.camera.one.v2.camera2proxy.AndroidImageProxy;
60import com.android.camera.one.v2.camera2proxy.CaptureResultProxy;
61import com.android.camera.processing.imagebackend.TaskImageContainer;
62import com.android.camera.session.CaptureSession;
63import com.android.camera.ui.focus.LensRangeCalculator;
64import com.android.camera.ui.motion.LinearScale;
65import com.android.camera.util.CameraUtil;
66import com.android.camera.util.CaptureDataSerializer;
67import com.android.camera.util.ExifUtil;
68import com.android.camera.util.JpegUtilNative;
69import com.android.camera.util.Size;
70import com.google.common.base.Optional;
71import com.google.common.util.concurrent.FutureCallback;
72import com.google.common.util.concurrent.Futures;
73import com.google.common.util.concurrent.ListenableFuture;
74
75import java.io.File;
76import java.io.FileOutputStream;
77import java.io.IOException;
78import java.nio.ByteBuffer;
79import java.util.ArrayList;
80import java.util.LinkedList;
81import java.util.List;
82
83/**
84 * {@link OneCamera} implementation directly on top of the Camera2 API for
85 * cameras without API 2 FULL support (limited or legacy).
86 */
87@TargetApi(Build.VERSION_CODES.LOLLIPOP)
88public class OneCameraImpl extends AbstractOneCamera {
89    /** Captures that are requested but haven't completed yet. */
90    private static class InFlightCapture {
91        final PhotoCaptureParameters parameters;
92        final CaptureSession session;
93        Image image;
94        TotalCaptureResult totalCaptureResult;
95
96        public InFlightCapture(PhotoCaptureParameters parameters,
97                CaptureSession session) {
98            this.parameters = parameters;
99            this.session = session;
100        }
101
102        /** Set the image once it's been received. */
103        public InFlightCapture setImage(Image capturedImage) {
104            image = capturedImage;
105            return this;
106        }
107
108        /** Set the total capture result once it's been received. */
109        public InFlightCapture setCaptureResult(TotalCaptureResult result) {
110            totalCaptureResult = result;
111            return this;
112        }
113
114        /**
115         * Returns whether the capture is complete (which is the case once the
116         * image and capture result are both present.
117         */
118        boolean isCaptureComplete() {
119            return image != null && totalCaptureResult != null;
120        }
121    }
122
123    private static final Tag TAG = new Tag("OneCameraImpl2");
124
125    /** If true, will write data about each capture request to disk. */
126    private static final boolean DEBUG_WRITE_CAPTURE_DATA = DebugPropertyHelper.writeCaptureData();
127    /** If true, will log per-frame AF info. */
128    private static final boolean DEBUG_FOCUS_LOG = DebugPropertyHelper.showFrameDebugLog();
129
130    /** Default JPEG encoding quality. */
131    private static final Byte JPEG_QUALITY = 90;
132
133    /**
134     * Set to ImageFormat.JPEG, to use the hardware encoder, or
135     * ImageFormat.YUV_420_888 to use the software encoder. You can also try
136     * RAW_SENSOR experimentally.
137     */
138    private static final int sCaptureImageFormat = DebugPropertyHelper.isCaptureDngEnabled() ?
139            ImageFormat.RAW_SENSOR : ImageFormat.JPEG;
140
141    /** Duration to hold after manual focus tap. */
142    private static final int FOCUS_HOLD_MILLIS = Settings3A.getFocusHoldMillis();
143    /** Zero weight 3A region, to reset regions per API. */
144    private static final MeteringRectangle[] ZERO_WEIGHT_3A_REGION = AutoFocusHelper
145            .getZeroWeightRegion();
146
147    /**
148     * CaptureRequest tags.
149     * <ul>
150     * <li>{@link #PRESHOT_TRIGGERED_AF}</li>
151     * <li>{@link #CAPTURE}</li>
152     * </ul>
153     */
154    public static enum RequestTag {
155        /** Request that is part of a pre shot trigger. */
156        PRESHOT_TRIGGERED_AF,
157        /** Capture request (purely for logging). */
158        CAPTURE,
159        /** Tap to focus (purely for logging). */
160        TAP_TO_FOCUS
161    }
162
163    /** Directory to store raw DNG files in. */
164    private static final File RAW_DIRECTORY = new File(Storage.DIRECTORY, "DNG");
165
166    /** Current CONTROL_AF_MODE request to Camera2 API. */
167    private int mControlAFMode = CameraMetadata.CONTROL_AF_MODE_CONTINUOUS_PICTURE;
168    /** Last OneCamera.AutoFocusState reported. */
169    private AutoFocusState mLastResultAFState = AutoFocusState.INACTIVE;
170    /** Flag to take a picture when the lens is stopped. */
171    private boolean mTakePictureWhenLensIsStopped = false;
172    /** Takes a (delayed) picture with appropriate parameters. */
173    private Runnable mTakePictureRunnable;
174    /** Keep PictureCallback for last requested capture. */
175    private PictureCallback mLastPictureCallback = null;
176    /** Last time takePicture() was called in uptimeMillis. */
177    private long mTakePictureStartMillis;
178    /** Runnable that returns to CONTROL_AF_MODE = AF_CONTINUOUS_PICTURE. */
179    private final Runnable mReturnToContinuousAFRunnable = new Runnable() {
180        @Override
181        public void run() {
182            mAFRegions = ZERO_WEIGHT_3A_REGION;
183            mAERegions = ZERO_WEIGHT_3A_REGION;
184            mControlAFMode = CameraMetadata.CONTROL_AF_MODE_CONTINUOUS_PICTURE;
185            repeatingPreview(null);
186        }
187    };
188
189    /** Current zoom value. 1.0 is no zoom. */
190    private float mZoomValue = 1f;
191    /** Current crop region: set from mZoomValue. */
192    private Rect mCropRegion;
193    /** Current AF and AE regions */
194    private MeteringRectangle[] mAFRegions = ZERO_WEIGHT_3A_REGION;
195    private MeteringRectangle[] mAERegions = ZERO_WEIGHT_3A_REGION;
196    /** Last frame for which CONTROL_AF_STATE was received. */
197    private long mLastControlAfStateFrameNumber = 0;
198
199    /**
200     * Common listener for preview frame metadata.
201     */
202    private final CameraCaptureSession.CaptureCallback mCaptureCallback =
203            new CameraCaptureSession.CaptureCallback() {
204                @Override
205                public void onCaptureStarted(CameraCaptureSession session,
206                        CaptureRequest request, long timestamp,
207                        long frameNumber) {
208                    if (request.getTag() == RequestTag.CAPTURE
209                            && mLastPictureCallback != null) {
210                        mLastPictureCallback.onQuickExpose();
211                    }
212                }
213
214                // AF state information is sometimes available 1 frame before
215                // onCaptureCompleted(), so we take advantage of that.
216                @Override
217                public void onCaptureProgressed(CameraCaptureSession session,
218                        CaptureRequest request, CaptureResult partialResult) {
219                    autofocusStateChangeDispatcher(partialResult);
220                    super.onCaptureProgressed(session, request, partialResult);
221                }
222
223                @Override
224                public void onCaptureCompleted(CameraCaptureSession session,
225                        CaptureRequest request, TotalCaptureResult result) {
226                    autofocusStateChangeDispatcher(result);
227                    // This checks for a HAL implementation error where
228                    // TotalCaptureResult
229                    // is missing CONTROL_AF_STATE. This should not happen.
230                    if (result.get(CaptureResult.CONTROL_AF_STATE) == null) {
231                        AutoFocusHelper.checkControlAfState(result);
232                    }
233                    if (DEBUG_FOCUS_LOG) {
234                        AutoFocusHelper.logExtraFocusInfo(result);
235                    }
236
237                    Float diopter = result.get(CaptureResult.LENS_FOCUS_DISTANCE);
238                    if (diopter != null && mFocusDistanceListener != null) {
239                        mFocusDistanceListener.onFocusDistance(diopter, mLensRange);
240                    }
241
242                    if (request.getTag() == RequestTag.CAPTURE) {
243                        // Add the capture result to the latest in-flight
244                        // capture. If all the data for that capture is
245                        // complete, store the image on disk.
246                        InFlightCapture capture = null;
247                        synchronized (mCaptureQueue) {
248                            if (mCaptureQueue.getFirst().setCaptureResult(result)
249                                    .isCaptureComplete()) {
250                                capture = mCaptureQueue.removeFirst();
251                            }
252                        }
253                        if (capture != null) {
254                            OneCameraImpl.this.onCaptureCompleted(capture);
255                        }
256                    }
257                    super.onCaptureCompleted(session, request, result);
258                }
259            };
260    /** Thread on which the camera operations are running. */
261    private final HandlerThread mCameraThread;
262    /** Handler of the {@link #mCameraThread}. */
263    private final Handler mCameraHandler;
264    /** The characteristics of this camera. */
265    private final CameraCharacteristics mCharacteristics;
266    private final LinearScale mLensRange;
267    /** The underlying Camera2 API camera device. */
268    private final CameraDevice mDevice;
269    private final CameraDirectionProvider mDirectionProvider;
270
271    /**
272     * The aspect ratio (width/height) of the full resolution for this camera.
273     * Usually the native aspect ratio of this camera.
274     */
275    private final float mFullSizeAspectRatio;
276    /** The Camera2 API capture session currently active. */
277    private CameraCaptureSession mCaptureSession;
278    /** The surface onto which to render the preview. */
279    private Surface mPreviewSurface;
280    /**
281     * A queue of capture requests that have been requested but are not done
282     * yet.
283     */
284    private final LinkedList<InFlightCapture> mCaptureQueue =
285            new LinkedList<InFlightCapture>();
286    /** Whether closing of this device has been requested. */
287    private volatile boolean mIsClosed = false;
288
289    /** Receives the normal captured images. */
290    private final ImageReader mCaptureImageReader;
291    ImageReader.OnImageAvailableListener mCaptureImageListener =
292            new ImageReader.OnImageAvailableListener() {
293                @Override
294                public void onImageAvailable(ImageReader reader) {
295                    // Add the image data to the latest in-flight capture.
296                    // If all the data for that capture is complete, store the
297                    // image data.
298                    InFlightCapture capture = null;
299                    synchronized (mCaptureQueue) {
300                        if (mCaptureQueue.getFirst().setImage(reader.acquireLatestImage())
301                                .isCaptureComplete()) {
302                            capture = mCaptureQueue.removeFirst();
303                        }
304                    }
305                    if (capture != null) {
306                        onCaptureCompleted(capture);
307                    }
308                }
309            };
310
311    /**
312     * Instantiates a new camera based on Camera 2 API.
313     *
314     * @param device The underlying Camera 2 device.
315     * @param characteristics The device's characteristics.
316     * @param pictureSize the size of the final image to be taken.
317     */
318    OneCameraImpl(CameraDevice device, CameraCharacteristics characteristics, Size pictureSize) {
319        mDevice = device;
320        mCharacteristics = characteristics;
321        mLensRange = LensRangeCalculator.getDiopterToRatioCalculator(characteristics);
322        mDirectionProvider = new CameraDirectionProvider(characteristics);
323        mFullSizeAspectRatio = calculateFullSizeAspectRatio(characteristics);
324
325        // Override pictureSize for RAW (our picture size settings don't include
326        // RAW, which typically only supports one size (sensor size). This also
327        // typically differs from the larges JPEG or YUV size.
328        // TODO: If we ever want to support RAW properly, it should be one entry
329        // in the picture quality list, which should then lead to the right
330        // pictureSize being passes into here.
331        if (sCaptureImageFormat == ImageFormat.RAW_SENSOR) {
332            pictureSize = getDefaultPictureSize();
333        }
334
335        mCameraThread = new HandlerThread("OneCamera2");
336        mCameraThread.start();
337        mCameraHandler = new Handler(mCameraThread.getLooper());
338
339        mCaptureImageReader = ImageReader.newInstance(pictureSize.getWidth(),
340                pictureSize.getHeight(),
341                sCaptureImageFormat, 2);
342        mCaptureImageReader.setOnImageAvailableListener(mCaptureImageListener, mCameraHandler);
343        Log.d(TAG, "New Camera2 based OneCameraImpl created.");
344    }
345
346    /**
347     * Take picture, initiating an auto focus scan if needed.
348     */
349    @Override
350    public void takePicture(final PhotoCaptureParameters params, final CaptureSession session) {
351        // Do not do anything when a picture is already requested.
352        if (mTakePictureWhenLensIsStopped) {
353            return;
354        }
355
356        // Not ready until the picture comes back.
357        broadcastReadyState(false);
358
359        mTakePictureRunnable = new Runnable() {
360            @Override
361            public void run() {
362                takePictureNow(params, session);
363            }
364        };
365        mLastPictureCallback = params.callback;
366        mTakePictureStartMillis = SystemClock.uptimeMillis();
367
368        // This class implements a very simple version of AF, which
369        // only delays capture if the lens is scanning.
370        if (mLastResultAFState == AutoFocusState.ACTIVE_SCAN) {
371            Log.v(TAG, "Waiting until scan is done before taking shot.");
372            mTakePictureWhenLensIsStopped = true;
373        } else {
374            // We could do CONTROL_AF_TRIGGER_START and wait until lens locks,
375            // but this would slow down the capture.
376            takePictureNow(params, session);
377        }
378    }
379
380    /**
381     * Take picture immediately. Parameters passed through from takePicture().
382     */
383    public void takePictureNow(PhotoCaptureParameters params, CaptureSession session) {
384        long dt = SystemClock.uptimeMillis() - mTakePictureStartMillis;
385        Log.v(TAG, "Taking shot with extra AF delay of " + dt + " ms.");
386        try {
387            // JPEG capture.
388            CaptureRequest.Builder builder = mDevice
389                    .createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
390            builder.setTag(RequestTag.CAPTURE);
391            addBaselineCaptureKeysToRequest(builder);
392
393            // Enable lens-shading correction for even better DNGs.
394            if (sCaptureImageFormat == ImageFormat.RAW_SENSOR) {
395                builder.set(CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE,
396                        CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE_ON);
397            } else if (sCaptureImageFormat == ImageFormat.JPEG) {
398                builder.set(CaptureRequest.JPEG_QUALITY, JPEG_QUALITY);
399                builder.set(CaptureRequest.JPEG_ORIENTATION,
400                        CameraUtil.getJpegRotation(params.orientation, mCharacteristics));
401            }
402
403            builder.addTarget(mPreviewSurface);
404            builder.addTarget(mCaptureImageReader.getSurface());
405            CaptureRequest request = builder.build();
406
407            if (DEBUG_WRITE_CAPTURE_DATA) {
408                final String debugDataDir = makeDebugDir(params.debugDataFolder,
409                        "normal_capture_debug");
410                Log.i(TAG, "Writing capture data to: " + debugDataDir);
411                CaptureDataSerializer.toFile("Normal Capture", request, new File(debugDataDir,
412                        "capture.txt"));
413            }
414
415            mCaptureSession.capture(request, mCaptureCallback, mCameraHandler);
416        } catch (CameraAccessException e) {
417            Log.e(TAG, "Could not access camera for still image capture.");
418            broadcastReadyState(true);
419            params.callback.onPictureTakingFailed();
420            return;
421        }
422        synchronized (mCaptureQueue) {
423            mCaptureQueue.add(new InFlightCapture(params, session));
424        }
425    }
426
427    @Override
428    public void startPreview(Surface previewSurface, CaptureReadyCallback listener) {
429        mPreviewSurface = previewSurface;
430        setupAsync(mPreviewSurface, listener);
431    }
432
433    @Override
434    public void close() {
435        if (mIsClosed) {
436            Log.w(TAG, "Camera is already closed.");
437            return;
438        }
439        try {
440            if (mCaptureSession != null) {
441                mCaptureSession.abortCaptures();
442            }
443        } catch (CameraAccessException e) {
444            Log.e(TAG, "Could not abort captures in progress.");
445        }
446        mIsClosed = true;
447        mCameraThread.quitSafely();
448        mDevice.close();
449    }
450
451    public Size[] getSupportedPreviewSizes() {
452        StreamConfigurationMap config = mCharacteristics
453                .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
454        return Size.convert(config.getOutputSizes(SurfaceTexture.class));
455    }
456
457    public float getFullSizeAspectRatio() {
458        return mFullSizeAspectRatio;
459    }
460
461    @Override
462    public Facing getDirection() {
463        return mDirectionProvider.getDirection();
464    }
465
466    private void saveJpegPicture(byte[] jpegData, final PhotoCaptureParameters captureParams,
467            CaptureSession session, CaptureResult result) {
468        int heading = captureParams.heading;
469        int width = 0;
470        int height = 0;
471        int rotation = 0;
472        ExifInterface exif = null;
473        try {
474            exif = new ExifInterface();
475            exif.readExif(jpegData);
476
477            Integer w = exif.getTagIntValue(ExifInterface.TAG_PIXEL_X_DIMENSION);
478            width = (w == null) ? width : w;
479            Integer h = exif.getTagIntValue(ExifInterface.TAG_PIXEL_Y_DIMENSION);
480            height = (h == null) ? height : h;
481
482            // Get image rotation from EXIF.
483            rotation = Exif.getOrientation(exif);
484
485            // Set GPS heading direction based on sensor, if location is on.
486            if (heading >= 0) {
487                ExifTag directionRefTag = exif.buildTag(
488                        ExifInterface.TAG_GPS_IMG_DIRECTION_REF,
489                        ExifInterface.GpsTrackRef.MAGNETIC_DIRECTION);
490                ExifTag directionTag = exif.buildTag(
491                        ExifInterface.TAG_GPS_IMG_DIRECTION,
492                        new Rational(heading, 1));
493                exif.setTag(directionRefTag);
494                exif.setTag(directionTag);
495            }
496            new ExifUtil(exif).populateExif(Optional.<TaskImageContainer.TaskImage> absent(),
497                    Optional.of((CaptureResultProxy) new AndroidCaptureResultProxy(result)),
498                    Optional.<Location> absent());
499        } catch (IOException e) {
500            Log.w(TAG, "Could not read exif from gcam jpeg", e);
501            exif = null;
502        }
503        ListenableFuture<Optional<Uri>> futureUri = session.saveAndFinish(jpegData, width, height,
504                rotation, exif);
505        Futures.addCallback(futureUri, new FutureCallback<Optional<Uri>>() {
506            @Override
507            public void onSuccess(Optional<Uri> uriOptional) {
508                captureParams.callback.onPictureSaved(uriOptional.orNull());
509            }
510
511            @Override
512            public void onFailure(Throwable throwable) {
513                captureParams.callback.onPictureSaved(null);
514            }
515        });
516    }
517
518    /**
519     * Asynchronously sets up the capture session.
520     *
521     * @param previewSurface the surface onto which the preview should be
522     *            rendered.
523     * @param listener called when setup is completed.
524     */
525    private void setupAsync(final Surface previewSurface, final CaptureReadyCallback listener) {
526        mCameraHandler.post(new Runnable() {
527            @Override
528            public void run() {
529                setup(previewSurface, listener);
530            }
531        });
532    }
533
534    /**
535     * Configures and attempts to create a capture session.
536     *
537     * @param previewSurface the surface onto which the preview should be
538     *            rendered.
539     * @param listener called when the setup is completed.
540     */
541    private void setup(Surface previewSurface, final CaptureReadyCallback listener) {
542        try {
543            if (mCaptureSession != null) {
544                mCaptureSession.abortCaptures();
545                mCaptureSession = null;
546            }
547            List<Surface> outputSurfaces = new ArrayList<Surface>(2);
548            outputSurfaces.add(previewSurface);
549            outputSurfaces.add(mCaptureImageReader.getSurface());
550
551            mDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() {
552
553                @Override
554                public void onConfigureFailed(CameraCaptureSession session) {
555                    listener.onSetupFailed();
556                }
557
558                @Override
559                public void onConfigured(CameraCaptureSession session) {
560                    mCaptureSession = session;
561                    mAFRegions = ZERO_WEIGHT_3A_REGION;
562                    mAERegions = ZERO_WEIGHT_3A_REGION;
563                    mZoomValue = 1f;
564                    mCropRegion = cropRegionForZoom(mZoomValue);
565                    boolean success = repeatingPreview(null);
566                    if (success) {
567                        listener.onReadyForCapture();
568                    } else {
569                        listener.onSetupFailed();
570                    }
571                }
572
573                @Override
574                public void onClosed(CameraCaptureSession session) {
575                    super.onClosed(session);
576                }
577            }, mCameraHandler);
578        } catch (CameraAccessException ex) {
579            Log.e(TAG, "Could not set up capture session", ex);
580            listener.onSetupFailed();
581        }
582    }
583
584    /**
585     * Adds current regions to CaptureRequest and base AF mode +
586     * AF_TRIGGER_IDLE.
587     *
588     * @param builder Build for the CaptureRequest
589     */
590    private void addBaselineCaptureKeysToRequest(CaptureRequest.Builder builder) {
591        builder.set(CaptureRequest.CONTROL_AF_REGIONS, mAFRegions);
592        builder.set(CaptureRequest.CONTROL_AE_REGIONS, mAERegions);
593        builder.set(CaptureRequest.SCALER_CROP_REGION, mCropRegion);
594        builder.set(CaptureRequest.CONTROL_AF_MODE, mControlAFMode);
595        builder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_IDLE);
596        // Enable face detection
597        builder.set(CaptureRequest.STATISTICS_FACE_DETECT_MODE,
598                CaptureRequest.STATISTICS_FACE_DETECT_MODE_FULL);
599        builder.set(CaptureRequest.CONTROL_SCENE_MODE,
600                CaptureRequest.CONTROL_SCENE_MODE_FACE_PRIORITY);
601    }
602
603    /**
604     * Request preview capture stream with AF_MODE_CONTINUOUS_PICTURE.
605     *
606     * @return true if request was build and sent successfully.
607     * @param tag
608     */
609    private boolean repeatingPreview(Object tag) {
610        try {
611            CaptureRequest.Builder builder = mDevice.
612                    createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
613            builder.addTarget(mPreviewSurface);
614            builder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
615            addBaselineCaptureKeysToRequest(builder);
616            mCaptureSession.setRepeatingRequest(builder.build(), mCaptureCallback,
617                    mCameraHandler);
618            Log.v(TAG, String.format("Sent repeating Preview request, zoom = %.2f", mZoomValue));
619            return true;
620        } catch (CameraAccessException ex) {
621            Log.e(TAG, "Could not access camera setting up preview.", ex);
622            return false;
623        }
624    }
625
626    /**
627     * Request preview capture stream with auto focus trigger cycle.
628     */
629    private void sendAutoFocusTriggerCaptureRequest(Object tag) {
630        try {
631            // Step 1: Request single frame CONTROL_AF_TRIGGER_START.
632            CaptureRequest.Builder builder;
633            builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
634            builder.addTarget(mPreviewSurface);
635            builder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
636            mControlAFMode = CameraMetadata.CONTROL_AF_MODE_AUTO;
637            addBaselineCaptureKeysToRequest(builder);
638            builder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_START);
639            builder.setTag(tag);
640            mCaptureSession.capture(builder.build(), mCaptureCallback, mCameraHandler);
641
642            // Step 2: Call repeatingPreview to update mControlAFMode.
643            repeatingPreview(tag);
644            resumeContinuousAFAfterDelay(FOCUS_HOLD_MILLIS);
645        } catch (CameraAccessException ex) {
646            Log.e(TAG, "Could not execute preview request.", ex);
647        }
648    }
649
650    /**
651     * Resume AF_MODE_CONTINUOUS_PICTURE after FOCUS_HOLD_MILLIS.
652     */
653    private void resumeContinuousAFAfterDelay(int millis) {
654        mCameraHandler.removeCallbacks(mReturnToContinuousAFRunnable);
655        mCameraHandler.postDelayed(mReturnToContinuousAFRunnable, millis);
656    }
657
658    /**
659     * This method takes appropriate action if camera2 AF state changes.
660     * <ol>
661     * <li>Reports changes in camera2 AF state to OneCamera.FocusStateListener.</li>
662     * <li>Take picture after AF scan if mTakePictureWhenLensIsStopped true.</li>
663     * </ol>
664     */
665    private void autofocusStateChangeDispatcher(CaptureResult result) {
666        if (result.getFrameNumber() < mLastControlAfStateFrameNumber ||
667                result.get(CaptureResult.CONTROL_AF_STATE) == null) {
668            return;
669        }
670        mLastControlAfStateFrameNumber = result.getFrameNumber();
671
672        // Convert to OneCamera mode and state.
673        AutoFocusState resultAFState = AutoFocusHelper.
674                stateFromCamera2State(result.get(CaptureResult.CONTROL_AF_STATE));
675
676        // TODO: Consider using LENS_STATE.
677        boolean lensIsStopped = resultAFState == AutoFocusState.ACTIVE_FOCUSED ||
678                resultAFState == AutoFocusState.ACTIVE_UNFOCUSED ||
679                resultAFState == AutoFocusState.PASSIVE_FOCUSED ||
680                resultAFState == AutoFocusState.PASSIVE_UNFOCUSED;
681
682        if (mTakePictureWhenLensIsStopped && lensIsStopped) {
683            // Take the shot.
684            mCameraHandler.post(mTakePictureRunnable);
685            mTakePictureWhenLensIsStopped = false;
686        }
687
688        // Report state change when AF state has changed.
689        if (resultAFState != mLastResultAFState && mFocusStateListener != null) {
690            mFocusStateListener.onFocusStatusUpdate(resultAFState, result.getFrameNumber());
691        }
692        mLastResultAFState = resultAFState;
693    }
694
695    @Override
696    public void triggerFocusAndMeterAtPoint(float nx, float ny) {
697        int sensorOrientation = mCharacteristics.get(
698                CameraCharacteristics.SENSOR_ORIENTATION);
699        mAERegions = AutoFocusHelper.aeRegionsForNormalizedCoord(nx, ny, mCropRegion,
700                sensorOrientation);
701        mAFRegions = AutoFocusHelper.afRegionsForNormalizedCoord(nx, ny, mCropRegion,
702                sensorOrientation);
703
704        sendAutoFocusTriggerCaptureRequest(RequestTag.TAP_TO_FOCUS);
705    }
706
707    @Override
708    public float getMaxZoom() {
709        return mCharacteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM);
710    }
711
712    @Override
713    public void setZoom(float zoom) {
714        mZoomValue = zoom;
715        mCropRegion = cropRegionForZoom(zoom);
716        repeatingPreview(null);
717    }
718
719    @Override
720    public Size pickPreviewSize(Size pictureSize, Context context) {
721        if (pictureSize == null) {
722            // TODO The default should be selected by the caller, and
723            // pictureSize should never be null.
724            pictureSize = getDefaultPictureSize();
725        }
726        float pictureAspectRatio = pictureSize.getWidth() / (float) pictureSize.getHeight();
727        Size[] supportedSizes = getSupportedPreviewSizes();
728
729        // Since devices only have one raw resolution we need to be more
730        // flexible for selecting a matching preview resolution.
731        Double aspectRatioTolerance = sCaptureImageFormat == ImageFormat.RAW_SENSOR ? 10d : null;
732        Size size = CaptureModuleUtil.getOptimalPreviewSize(supportedSizes,
733                pictureAspectRatio, aspectRatioTolerance);
734        Log.d(TAG, "Selected preview size: " + size);
735        return size;
736    }
737
738    private Rect cropRegionForZoom(float zoom) {
739        return AutoFocusHelper.cropRegionForZoom(mCharacteristics, zoom);
740    }
741
742    /**
743     * Calculate the aspect ratio of the full size capture on this device.
744     *
745     * @param characteristics the characteristics of the camera device.
746     * @return The aspect ration, in terms of width/height of the full capture
747     *         size.
748     */
749    private static float calculateFullSizeAspectRatio(CameraCharacteristics characteristics) {
750        Rect activeArraySize =
751                characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
752        return ((float) (activeArraySize.width())) / activeArraySize.height();
753    }
754
755    /*
756     * Called when a capture that is in flight is completed.
757     * @param capture the in-flight capture which needs to contain the received
758     * image and capture data
759     */
760    private void onCaptureCompleted(InFlightCapture capture) {
761
762        // Experimental support for writing RAW. We do not have a usable JPEG
763        // here, so we don't use the usual capture session mechanism and instead
764        // just store the RAW file in its own directory.
765        // TODO: If we make this a real feature we should probably put the DNGs
766        // into the Camera directly.
767        if (sCaptureImageFormat == ImageFormat.RAW_SENSOR) {
768            if (!RAW_DIRECTORY.exists()) {
769                if (!RAW_DIRECTORY.mkdirs()) {
770                    throw new RuntimeException("Could not create RAW directory.");
771                }
772            }
773            File dngFile = new File(RAW_DIRECTORY, capture.session.getTitle() + ".dng");
774            writeDngBytesAndClose(capture.image, capture.totalCaptureResult,
775                    mCharacteristics, dngFile);
776        } else {
777            // Since this is not an HDR+ session, we will just save the
778            // result.
779            byte[] imageBytes = acquireJpegBytesAndClose(capture.image);
780            saveJpegPicture(imageBytes, capture.parameters, capture.session,
781                    capture.totalCaptureResult);
782        }
783        broadcastReadyState(true);
784        capture.parameters.callback.onPictureTaken(capture.session);
785    }
786
787    /**
788     * Take the given RAW image and capture result, convert it to a DNG and
789     * write it to disk.
790     *
791     * @param image the image containing the 16-bit RAW data (RAW_SENSOR)
792     * @param captureResult the capture result for the image
793     * @param characteristics the camera characteristics of the camera that took
794     *            the RAW image
795     * @param dngFile the destination to where the resulting DNG data is written
796     *            to
797     */
798    private static void writeDngBytesAndClose(Image image, TotalCaptureResult captureResult,
799            CameraCharacteristics characteristics, File dngFile) {
800        try (DngCreator dngCreator = new DngCreator(characteristics, captureResult);
801                FileOutputStream outputStream = new FileOutputStream(dngFile)) {
802            // TODO: Add DngCreator#setThumbnail and add the DNG to the normal
803            // filmstrip.
804            dngCreator.writeImage(outputStream, image);
805            outputStream.close();
806            image.close();
807        } catch (IOException e) {
808            Log.e(TAG, "Could not store DNG file", e);
809            return;
810        }
811        Log.i(TAG, "Successfully stored DNG file: " + dngFile.getAbsolutePath());
812    }
813
814    /**
815     * Given an image reader, this extracts the final image. If the image in the
816     * reader is JPEG, we extract and return it as is. If the image is YUV, we
817     * convert it to JPEG and return the result.
818     *
819     * @param image the image we got from the image reader.
820     * @return A valid JPEG image.
821     */
822    private static byte[] acquireJpegBytesAndClose(Image image) {
823        ByteBuffer buffer;
824        if (image.getFormat() == ImageFormat.JPEG) {
825            Image.Plane plane0 = image.getPlanes()[0];
826            buffer = plane0.getBuffer();
827        } else if (image.getFormat() == ImageFormat.YUV_420_888) {
828            buffer = ByteBuffer.allocateDirect(image.getWidth() * image.getHeight() * 3);
829
830            Log.v(TAG, "Compressing JPEG with software encoder.");
831            int numBytes = JpegUtilNative.compressJpegFromYUV420Image(
832                    new AndroidImageProxy(image), buffer, JPEG_QUALITY);
833
834            if (numBytes < 0) {
835                throw new RuntimeException("Error compressing jpeg.");
836            }
837            buffer.limit(numBytes);
838        } else {
839            throw new RuntimeException("Unsupported image format.");
840        }
841
842        byte[] imageBytes = new byte[buffer.remaining()];
843        buffer.get(imageBytes);
844        buffer.rewind();
845        image.close();
846        return imageBytes;
847    }
848
849    /**
850     * @return The largest supported picture size.
851     */
852    public Size getDefaultPictureSize() {
853        StreamConfigurationMap configs =
854                mCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
855        android.util.Size[] supportedSizes = configs.getOutputSizes(sCaptureImageFormat);
856
857        // Find the largest supported size.
858        android.util.Size largestSupportedSize = supportedSizes[0];
859        long largestSupportedSizePixels =
860                largestSupportedSize.getWidth() * largestSupportedSize.getHeight();
861        for (int i = 1; i < supportedSizes.length; i++) {
862            long numPixels = supportedSizes[i].getWidth() * supportedSizes[i].getHeight();
863            if (numPixels > largestSupportedSizePixels) {
864                largestSupportedSize = supportedSizes[i];
865                largestSupportedSizePixels = numPixels;
866            }
867        }
868        return new Size(largestSupportedSize.getWidth(), largestSupportedSize.getHeight());
869    }
870}
871