1/*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16package com.android.devcamera;
17
18import android.content.Context;
19import android.graphics.ImageFormat;
20import android.graphics.SurfaceTexture;
21import android.hardware.camera2.CameraAccessException;
22import android.hardware.camera2.CameraCaptureSession;
23import android.hardware.camera2.CameraCharacteristics;
24import android.hardware.camera2.CameraDevice;
25import android.hardware.camera2.CameraManager;
26import android.hardware.camera2.CameraMetadata;
27import android.hardware.camera2.CaptureRequest;
28import android.hardware.camera2.CaptureResult;
29import android.hardware.camera2.TotalCaptureResult;
30import android.hardware.camera2.params.Face;
31import android.hardware.camera2.params.InputConfiguration;
32import android.media.Image;
33import android.media.ImageReader;
34import android.media.ImageWriter;
35import android.media.MediaActionSound;
36import android.opengl.GLES11Ext;
37import android.opengl.GLES20;
38import android.os.Handler;
39import android.os.HandlerThread;
40import android.os.SystemClock;
41import android.util.Log;
42import android.util.Size;
43import android.view.Surface;
44import android.media.Image.Plane;
45
46import java.nio.ByteBuffer;
47import java.nio.BufferUnderflowException;
48import java.lang.IndexOutOfBoundsException;
49import java.util.ArrayList;
50import java.util.LinkedList;
51import java.util.List;
52
53import javax.microedition.khronos.opengles.GL10;
54
55
56/**
57 * Api2Camera : a camera2 implementation
58 *
59 * The goal here is to make the simplest possible API2 camera,
60 * where individual streams and capture options (e.g. edge enhancement,
61 * noise reduction, face detection) can be toggled on and off.
62 *
63 */
64
65public class Api2Camera implements CameraInterface, SurfaceTexture.OnFrameAvailableListener {
66    private static final String TAG = "DevCamera_API2";
67
68    // Nth frame to log; put 10^6 if you don't want logging.
69    private static int LOG_NTH_FRAME = 30;
70    // Log dropped frames. There are a log on Angler MDA32.
71    private static boolean LOG_DROPPED_FRAMES = true;
72
73    // IMPORTANT: Only one of these can be true:
74    private static boolean SECOND_YUV_IMAGEREADER_STREAM = true;
75    private static boolean SECOND_SURFACE_TEXTURE_STREAM = false;
76
77    // Enable raw stream if available.
78    private static boolean RAW_STREAM_ENABLE = true;
79    // Use JPEG ImageReader and YUV ImageWriter if reprocessing is available
80    private static final boolean USE_REPROCESSING_IF_AVAIL = true;
81
82    // Whether we are continuously taking pictures, or not.
83    boolean mIsBursting = false;
84    // Last total capture result
85    TotalCaptureResult mLastTotalCaptureResult;
86
87    // ImageReader/Writer buffer sizes.
88    private static final int YUV1_IMAGEREADER_SIZE = 8;
89    private static final int YUV2_IMAGEREADER_SIZE = 8;
90    private static final int RAW_IMAGEREADER_SIZE = 8;
91    private static final int IMAGEWRITER_SIZE = 2;
92
93    private CameraInfoCache mCameraInfoCache;
94    private CameraManager mCameraManager;
95    private CameraCaptureSession mCurrentCaptureSession;
96    private MediaActionSound mMediaActionSound = new MediaActionSound();
97
98    MyCameraCallback mMyCameraCallback;
99
100    // Generally everything running on this thread & this module is *not thread safe*.
101    private HandlerThread mOpsThread;
102    private Handler mOpsHandler;
103    private HandlerThread mInitThread;
104    private Handler mInitHandler;
105    private HandlerThread mJpegListenerThread;
106    private Handler mJpegListenerHandler;
107
108    Context mContext;
109    boolean mCameraIsFront;
110    SurfaceTexture mSurfaceTexture;
111    Surface mSurfaceTextureSurface;
112
113    private boolean mFirstFrameArrived;
114    private ImageReader mYuv1ImageReader;
115    private int mYuv1ImageCounter;
116    // Handle to last received Image: allows ZSL to be implemented.
117    private Image mYuv1LastReceivedImage = null;
118    // Time at which reprocessing request went in (right now we are doing one at a time).
119    private long mReprocessingRequestNanoTime;
120
121    private ImageReader mJpegImageReader;
122    private ImageReader mYuv2ImageReader;
123    private int mYuv2ImageCounter;
124    private ImageReader mRawImageReader;
125    private int mRawImageCounter;
126    private boolean mIsDepthCloudSupported = false;
127    private ImageReader mDepthCloudImageReader;
128    private int mDepthCloudImageCounter = 0;
129    private static int STORE_NTH_DEPTH_CLOUD = 30;
130    private static boolean DEPTH_CLOUD_STORE_ENABLED = false;
131
132    // Starting the preview requires each of these 3 to be true/non-null:
133    volatile private Surface mPreviewSurface;
134    volatile private CameraDevice mCameraDevice;
135    volatile boolean mAllThingsInitialized = false;
136
137    /**
138     * Constructor.
139     */
140    public Api2Camera(Context context, boolean useFrontCamera) {
141        mContext = context;
142        mCameraIsFront = useFrontCamera;
143        mCameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
144        mCameraInfoCache = new CameraInfoCache(mCameraManager, useFrontCamera);
145
146        // Create thread and handler for camera operations.
147        mOpsThread = new HandlerThread("CameraOpsThread");
148        mOpsThread.start();
149        mOpsHandler = new Handler(mOpsThread.getLooper());
150
151        // Create thread and handler for slow initialization operations.
152        // Don't want to use camera operations thread because we want to time camera open carefully.
153        mInitThread = new HandlerThread("CameraInitThread");
154        mInitThread.start();
155        mInitHandler = new Handler(mInitThread.getLooper());
156        mInitHandler.post(new Runnable() {
157            @Override
158            public void run() {
159                InitializeAllTheThings();
160                mAllThingsInitialized = true;
161                Log.v(TAG, "STARTUP_REQUIREMENT ImageReader initialization done.");
162                tryToStartCaptureSession();
163            }
164        });
165
166        // Set initial Noise and Edge modes.
167        if (mCameraInfoCache.isHardwareLevelAtLeast(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_3)) {
168            // YUV streams.
169            if (mCameraInfoCache.supportedModesContains(mCameraInfoCache.noiseModes,
170                    CameraCharacteristics.NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG)) {
171                mCaptureNoiseMode = CameraCharacteristics.NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
172            } else {
173                mCaptureNoiseMode = CameraCharacteristics.NOISE_REDUCTION_MODE_FAST;
174            }
175            if (mCameraInfoCache.supportedModesContains(mCameraInfoCache.edgeModes,
176                    CameraCharacteristics.EDGE_MODE_ZERO_SHUTTER_LAG)) {
177                mCaptureEdgeMode = CameraCharacteristics.EDGE_MODE_ZERO_SHUTTER_LAG;
178            } else {
179                mCaptureEdgeMode = CameraCharacteristics.EDGE_MODE_FAST;
180            }
181
182            // Reprocessing.
183            mReprocessingNoiseMode = CameraCharacteristics.NOISE_REDUCTION_MODE_HIGH_QUALITY;
184            mReprocessingEdgeMode = CameraCharacteristics.EDGE_MODE_HIGH_QUALITY;
185        }
186
187        if (null != mCameraInfoCache.getDepthCloudSize()) {
188            mIsDepthCloudSupported = true;
189        }
190    }
191
192    // Ugh, why is this stuff so slow?
193    private void InitializeAllTheThings() {
194
195        // Thread to handle returned JPEGs.
196        mJpegListenerThread = new HandlerThread("CameraJpegThread");
197        mJpegListenerThread.start();
198        mJpegListenerHandler = new Handler(mJpegListenerThread.getLooper());
199
200        // Create ImageReader to receive JPEG image buffers via reprocessing.
201        mJpegImageReader = ImageReader.newInstance(
202                mCameraInfoCache.getYuvStream1Size().getWidth(),
203                mCameraInfoCache.getYuvStream1Size().getHeight(),
204                ImageFormat.JPEG,
205                2);
206        mJpegImageReader.setOnImageAvailableListener(mJpegImageListener, mJpegListenerHandler);
207
208        // Create ImageReader to receive YUV image buffers.
209        mYuv1ImageReader = ImageReader.newInstance(
210                mCameraInfoCache.getYuvStream1Size().getWidth(),
211                mCameraInfoCache.getYuvStream1Size().getHeight(),
212                ImageFormat.YUV_420_888,
213                YUV1_IMAGEREADER_SIZE);
214        mYuv1ImageReader.setOnImageAvailableListener(mYuv1ImageListener, mOpsHandler);
215
216        if (mIsDepthCloudSupported) {
217            mDepthCloudImageReader = ImageReader.newInstance(
218                    mCameraInfoCache.getDepthCloudSize().getWidth(),
219                    mCameraInfoCache.getDepthCloudSize().getHeight(),
220                    ImageFormat.DEPTH_POINT_CLOUD, 2);
221            mDepthCloudImageReader.setOnImageAvailableListener(mDepthCloudImageListener, mOpsHandler);
222        }
223
224        if (SECOND_YUV_IMAGEREADER_STREAM) {
225            // Create ImageReader to receive YUV image buffers.
226            mYuv2ImageReader = ImageReader.newInstance(
227                    mCameraInfoCache.getYuvStream2Size().getWidth(),
228                    mCameraInfoCache.getYuvStream2Size().getHeight(),
229                    ImageFormat.YUV_420_888,
230                    YUV2_IMAGEREADER_SIZE);
231            mYuv2ImageReader.setOnImageAvailableListener(mYuv2ImageListener, mOpsHandler);
232        }
233
234        if (SECOND_SURFACE_TEXTURE_STREAM) {
235            int[] textures = new int[1];
236            // generate one texture pointer and bind it as an external texture.
237            GLES20.glGenTextures(1, textures, 0);
238            GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textures[0]);
239            // No mip-mapping with camera source.
240            GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
241                    GL10.GL_TEXTURE_MIN_FILTER,
242                    GL10.GL_LINEAR);
243            GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
244                    GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
245            // Clamp to edge is only option.
246            GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
247                    GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE);
248            GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
249                    GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE);
250
251            int texture_id = textures[0];
252            mSurfaceTexture = new SurfaceTexture(texture_id);
253            mSurfaceTexture.setDefaultBufferSize(320, 240);
254            mSurfaceTexture.setOnFrameAvailableListener(this);
255            mSurfaceTextureSurface = new Surface(mSurfaceTexture);
256        }
257
258        if (RAW_STREAM_ENABLE && mCameraInfoCache.rawAvailable()) {
259            // Create ImageReader to receive thumbnail sized YUV image buffers.
260            mRawImageReader = ImageReader.newInstance(
261                    mCameraInfoCache.getRawStreamSize().getWidth(),
262                    mCameraInfoCache.getRawStreamSize().getHeight(),
263                    mCameraInfoCache.getRawFormat(),
264                    RAW_IMAGEREADER_SIZE);
265            mRawImageReader.setOnImageAvailableListener(mRawImageListener, mOpsHandler);
266        }
267
268        // Load click sound.
269        mMediaActionSound.load(MediaActionSound.SHUTTER_CLICK);
270
271    }
272
273    public void setCallback(MyCameraCallback callback) {
274        mMyCameraCallback = callback;
275    }
276
277    public void triggerAFScan() {
278        Log.v(TAG, "AF trigger");
279        issuePreviewCaptureRequest(true);
280    }
281
282    public void setCAF() {
283        Log.v(TAG, "run CAF");
284        issuePreviewCaptureRequest(false);
285    }
286
287    public void takePicture() {
288        mMediaActionSound.play(MediaActionSound.SHUTTER_CLICK);
289        mOpsHandler.post(new Runnable() {
290            @Override
291            public void run() {
292                runReprocessing();
293            }
294        });
295    }
296
297    public void onFrameAvailable (SurfaceTexture surfaceTexture) {
298        Log.v(TAG, " onFrameAvailable(SurfaceTexture)");
299    }
300
301    public void setBurst(boolean go) {
302        // if false to true transition.
303        if (go && !mIsBursting) {
304            takePicture();
305        }
306        mIsBursting = go;
307    }
308
309    public boolean isRawAvailable() {
310        return mCameraInfoCache.rawAvailable();
311    }
312
313    public boolean isReprocessingAvailable() {
314        return mCameraInfoCache.isYuvReprocessingAvailable();
315    }
316
317    @Override
318    public Size getPreviewSize() {
319        return mCameraInfoCache.getPreviewSize();
320    }
321
322    @Override
323    public float[] getFieldOfView() {
324        return mCameraInfoCache.getFieldOfView();
325    }
326
327    @Override
328    public int getOrientation() {
329        return mCameraInfoCache.sensorOrientation();
330    }
331
332    @Override
333    public void openCamera() {
334        // If API2 FULL mode is not available, display toast
335        if (!mCameraInfoCache.isCamera2FullModeAvailable()) {
336            mMyCameraCallback.noCamera2Full();
337        }
338
339        Log.v(TAG, "Opening camera " + mCameraInfoCache.getCameraId());
340        mOpsHandler.post(new Runnable() {
341            @Override
342            public void run() {
343                CameraTimer.t_open_start = SystemClock.elapsedRealtime();
344                try {
345                    mCameraManager.openCamera(mCameraInfoCache.getCameraId(), mCameraStateCallback, null);
346                } catch (CameraAccessException e) {
347                    Log.e(TAG, "Unable to openCamera().");
348                }
349            }
350        });
351    }
352
353    @Override
354    public void closeCamera() {
355        // TODO: We are stalling main thread now which is bad.
356        Log.v(TAG, "Closing camera " + mCameraInfoCache.getCameraId());
357        if (mCameraDevice != null) {
358            try {
359                mCurrentCaptureSession.abortCaptures();
360            } catch (CameraAccessException e) {
361                Log.e(TAG, "Could not abortCaptures().");
362            }
363            mCameraDevice.close();
364        }
365        mCurrentCaptureSession = null;
366        Log.v(TAG, "Done closing camera " + mCameraInfoCache.getCameraId());
367    }
368
369    public void startPreview(final Surface surface) {
370        Log.v(TAG, "STARTUP_REQUIREMENT preview Surface ready.");
371        mPreviewSurface = surface;
372        tryToStartCaptureSession();
373    }
374
375    private CameraDevice.StateCallback mCameraStateCallback = new LoggingCallbacks.DeviceStateCallback() {
376        @Override
377        public void onOpened(CameraDevice camera) {
378            CameraTimer.t_open_end = SystemClock.elapsedRealtime();
379            mCameraDevice = camera;
380            Log.v(TAG, "STARTUP_REQUIREMENT Done opening camera " + mCameraInfoCache.getCameraId() +
381                    ". HAL open took: (" + (CameraTimer.t_open_end - CameraTimer.t_open_start) + " ms)");
382
383            super.onOpened(camera);
384            tryToStartCaptureSession();
385        }
386    };
387
388    private void tryToStartCaptureSession() {
389        if (mCameraDevice != null && mAllThingsInitialized && mPreviewSurface != null) {
390            mOpsHandler.post(new Runnable() {
391                @Override
392                public void run() {
393                    // It used to be: this needed to be posted on a Handler.
394                    startCaptureSession();
395                }
396            });
397        }
398    }
399
400    // Create CameraCaptureSession. Callback will start repeating request with current parameters.
401    private void startCaptureSession() {
402        CameraTimer.t_session_go = SystemClock.elapsedRealtime();
403
404        Log.v(TAG, "Configuring session..");
405        List<Surface> outputSurfaces = new ArrayList<Surface>(4);
406
407        outputSurfaces.add(mPreviewSurface);
408        Log.v(TAG, "  .. added SurfaceView " + mCameraInfoCache.getPreviewSize().getWidth() +
409                " x " + mCameraInfoCache.getPreviewSize().getHeight());
410
411        outputSurfaces.add(mYuv1ImageReader.getSurface());
412        Log.v(TAG, "  .. added YUV ImageReader " + mCameraInfoCache.getYuvStream1Size().getWidth() +
413                " x " + mCameraInfoCache.getYuvStream1Size().getHeight());
414
415        if (mIsDepthCloudSupported) {
416            outputSurfaces.add(mDepthCloudImageReader.getSurface());
417            Log.v(TAG, "  .. added Depth cloud ImageReader");
418        }
419
420        if (SECOND_YUV_IMAGEREADER_STREAM) {
421            outputSurfaces.add(mYuv2ImageReader.getSurface());
422            Log.v(TAG, "  .. added YUV ImageReader " + mCameraInfoCache.getYuvStream2Size().getWidth() +
423                    " x " + mCameraInfoCache.getYuvStream2Size().getHeight());
424        }
425
426        if (SECOND_SURFACE_TEXTURE_STREAM) {
427            outputSurfaces.add(mSurfaceTextureSurface);
428            Log.v(TAG, "  .. added SurfaceTexture");
429        }
430
431        if (RAW_STREAM_ENABLE && mCameraInfoCache.rawAvailable()) {
432            outputSurfaces.add(mRawImageReader.getSurface());
433            Log.v(TAG, "  .. added Raw ImageReader " + mCameraInfoCache.getRawStreamSize().getWidth() +
434                    " x " + mCameraInfoCache.getRawStreamSize().getHeight());
435        }
436
437        if (USE_REPROCESSING_IF_AVAIL && mCameraInfoCache.isYuvReprocessingAvailable()) {
438            outputSurfaces.add(mJpegImageReader.getSurface());
439            Log.v(TAG, "  .. added JPEG ImageReader " + mCameraInfoCache.getJpegStreamSize().getWidth() +
440                    " x " + mCameraInfoCache.getJpegStreamSize().getHeight());
441        }
442
443        try {
444            if (USE_REPROCESSING_IF_AVAIL && mCameraInfoCache.isYuvReprocessingAvailable()) {
445                InputConfiguration inputConfig = new InputConfiguration(mCameraInfoCache.getYuvStream1Size().getWidth(),
446                        mCameraInfoCache.getYuvStream1Size().getHeight(), ImageFormat.YUV_420_888);
447                mCameraDevice.createReprocessableCaptureSession(inputConfig, outputSurfaces,
448                        mSessionStateCallback, null);
449                Log.v(TAG, "  Call to createReprocessableCaptureSession complete.");
450            } else {
451                mCameraDevice.createCaptureSession(outputSurfaces, mSessionStateCallback, null);
452                Log.v(TAG, "  Call to createCaptureSession complete.");
453            }
454
455        } catch (CameraAccessException e) {
456            Log.e(TAG, "Error configuring ISP.");
457        }
458    }
459
460    ImageWriter mImageWriter;
461
462    private CameraCaptureSession.StateCallback mSessionStateCallback = new LoggingCallbacks.SessionStateCallback() {
463        @Override
464        public void onReady(CameraCaptureSession session) {
465            Log.v(TAG, "capture session onReady().  HAL capture session took: (" + (SystemClock.elapsedRealtime() - CameraTimer.t_session_go) + " ms)");
466            mCurrentCaptureSession = session;
467            issuePreviewCaptureRequest(false);
468
469            if (session.isReprocessable()) {
470                mImageWriter = ImageWriter.newInstance(session.getInputSurface(), IMAGEWRITER_SIZE);
471                mImageWriter.setOnImageReleasedListener(
472                        new ImageWriter.OnImageReleasedListener() {
473                            @Override
474                            public void onImageReleased(ImageWriter writer) {
475                                Log.v(TAG, "ImageWriter.OnImageReleasedListener onImageReleased()");
476                            }
477                        }, null);
478                Log.v(TAG, "Created ImageWriter.");
479            }
480            super.onReady(session);
481        }
482    };
483
484    // Variables to hold capture flow state.
485    private boolean mCaptureYuv1 = false;
486    private boolean mCaptureYuv2 = false;
487    private boolean mCaptureRaw = false;
488    private int mCaptureNoiseMode = CaptureRequest.NOISE_REDUCTION_MODE_FAST;
489    private int mCaptureEdgeMode = CaptureRequest.EDGE_MODE_FAST;
490    private boolean mCaptureFace = false;
491    // Variables to hold reprocessing state.
492    private int mReprocessingNoiseMode = CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY;
493    private int mReprocessingEdgeMode = CaptureRequest.EDGE_MODE_HIGH_QUALITY;
494
495    public void setCaptureFlow(Boolean yuv1, Boolean yuv2, Boolean raw10, Boolean nr, Boolean edge, Boolean face) {
496        if (yuv1 != null) mCaptureYuv1 = yuv1;
497        if (yuv2 != null) mCaptureYuv2 = yuv2;
498        if (raw10 != null) mCaptureRaw = raw10 && RAW_STREAM_ENABLE;
499        if (nr) {
500            mCaptureNoiseMode = getNextMode(mCaptureNoiseMode, mCameraInfoCache.noiseModes);
501        }
502        if (edge) {
503            mCaptureEdgeMode = getNextMode(mCaptureEdgeMode, mCameraInfoCache.edgeModes);
504        }
505        if (face != null) mCaptureFace = face;
506        mMyCameraCallback.setNoiseEdgeText(
507                "NR " + noiseModeToString(mCaptureNoiseMode),
508                "Edge " + edgeModeToString(mCaptureEdgeMode)
509        );
510
511        if (mCurrentCaptureSession != null) {
512            issuePreviewCaptureRequest(false);
513        }
514    }
515
516    public void setReprocessingFlow(Boolean nr, Boolean edge) {
517        if (nr) {
518            mReprocessingNoiseMode = getNextMode(mReprocessingNoiseMode, mCameraInfoCache.noiseModes);
519        }
520        if (edge) {
521            mReprocessingEdgeMode = getNextMode(mReprocessingEdgeMode, mCameraInfoCache.edgeModes);
522        }
523        mMyCameraCallback.setNoiseEdgeTextForReprocessing(
524                "NR " + noiseModeToString(mReprocessingNoiseMode),
525                "Edge " + edgeModeToString(mReprocessingEdgeMode)
526        );
527    }
528
529    public void issuePreviewCaptureRequest(boolean AFtrigger) {
530        CameraTimer.t_burst = SystemClock.elapsedRealtime();
531        Log.v(TAG, "issuePreviewCaptureRequest...");
532        try {
533            CaptureRequest.Builder b1 = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
534            b1.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_USE_SCENE_MODE);
535            b1.set(CaptureRequest.CONTROL_SCENE_MODE, CameraMetadata.CONTROL_SCENE_MODE_FACE_PRIORITY);
536            if (AFtrigger) {
537                b1.set(CaptureRequest.CONTROL_AF_MODE, CameraMetadata.CONTROL_AF_MODE_AUTO);
538            } else {
539                b1.set(CaptureRequest.CONTROL_AF_MODE, CameraMetadata.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
540            }
541
542            b1.set(CaptureRequest.NOISE_REDUCTION_MODE, mCaptureNoiseMode);
543            b1.set(CaptureRequest.EDGE_MODE, mCaptureEdgeMode);
544            b1.set(CaptureRequest.STATISTICS_FACE_DETECT_MODE, mCaptureFace ? mCameraInfoCache.bestFaceDetectionMode() : CaptureRequest.STATISTICS_FACE_DETECT_MODE_OFF);
545
546            Log.v(TAG, "  .. NR=" + mCaptureNoiseMode + "  Edge=" + mCaptureEdgeMode + "  Face=" + mCaptureFace);
547
548            if (mCaptureYuv1) {
549                b1.addTarget(mYuv1ImageReader.getSurface());
550                Log.v(TAG, "  .. YUV1 on");
551            }
552
553            if (mCaptureRaw) {
554                b1.addTarget(mRawImageReader.getSurface());
555            }
556
557            b1.addTarget(mPreviewSurface);
558
559            if (mIsDepthCloudSupported && !mCaptureYuv1 && !mCaptureYuv2 && !mCaptureRaw) {
560                b1.addTarget(mDepthCloudImageReader.getSurface());
561            }
562
563            if (mCaptureYuv2) {
564                if (SECOND_SURFACE_TEXTURE_STREAM) {
565                    b1.addTarget(mSurfaceTextureSurface);
566                }
567                if (SECOND_YUV_IMAGEREADER_STREAM) {
568                    b1.addTarget(mYuv2ImageReader.getSurface());
569                }
570                Log.v(TAG, "  .. YUV2 on");
571            }
572
573            if (AFtrigger) {
574                b1.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_START);
575                mCurrentCaptureSession.capture(b1.build(), mCaptureCallback, mOpsHandler);
576                b1.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_IDLE);
577            }
578            mCurrentCaptureSession.setRepeatingRequest(b1.build(), mCaptureCallback, mOpsHandler);
579        } catch (CameraAccessException e) {
580            Log.e(TAG, "Could not access camera for issuePreviewCaptureRequest.");
581        }
582    }
583
584    void runReprocessing() {
585        if (mYuv1LastReceivedImage == null) {
586            Log.e(TAG, "No YUV Image available.");
587            return;
588        }
589        mImageWriter.queueInputImage(mYuv1LastReceivedImage);
590        Log.v(TAG, "  Sent YUV1 image to ImageWriter.queueInputImage()");
591        try {
592            CaptureRequest.Builder b1 = mCameraDevice.createReprocessCaptureRequest(mLastTotalCaptureResult);
593            // Todo: Read current orientation instead of just assuming device is in native orientation
594            b1.set(CaptureRequest.JPEG_ORIENTATION, mCameraInfoCache.sensorOrientation());
595            b1.set(CaptureRequest.JPEG_QUALITY, (byte) 95);
596            b1.set(CaptureRequest.NOISE_REDUCTION_MODE, mReprocessingNoiseMode);
597            b1.set(CaptureRequest.EDGE_MODE, mReprocessingEdgeMode);
598            b1.addTarget(mJpegImageReader.getSurface());
599            mCurrentCaptureSession.capture(b1.build(), mReprocessingCaptureCallback, mOpsHandler);
600            mReprocessingRequestNanoTime = System.nanoTime();
601        } catch (CameraAccessException e) {
602            Log.e(TAG, "Could not access camera for issuePreviewCaptureRequest.");
603        }
604        mYuv1LastReceivedImage = null;
605        Log.v(TAG, "  Reprocessing request submitted.");
606    }
607
608
609    /*********************************
610     * onImageAvailable() processing *
611     *********************************/
612
613    ImageReader.OnImageAvailableListener mYuv1ImageListener =
614            new ImageReader.OnImageAvailableListener() {
615                @Override
616                public void onImageAvailable(ImageReader reader) {
617                    Image img = reader.acquireLatestImage();
618                    if (img == null) {
619                        Log.e(TAG, "Null image returned YUV1");
620                        return;
621                    }
622                    if (mYuv1LastReceivedImage != null) {
623                        mYuv1LastReceivedImage.close();
624                    }
625                    mYuv1LastReceivedImage = img;
626                    if (++mYuv1ImageCounter % LOG_NTH_FRAME == 0) {
627                        Log.v(TAG, "YUV1 buffer available, Frame #=" + mYuv1ImageCounter + " w=" + img.getWidth() + " h=" + img.getHeight() + " time=" + img.getTimestamp());
628                    }
629
630                }
631            };
632
633    ImageReader.OnImageAvailableListener mDepthCloudImageListener =
634            new ImageReader.OnImageAvailableListener() {
635                @Override
636                public void onImageAvailable(ImageReader reader)
637                        throws BufferUnderflowException, IndexOutOfBoundsException {
638                    Image img = reader.acquireLatestImage();
639                    if (img == null) {
640                        Log.e(TAG, "Null image returned Depth");
641                        return;
642                    }
643                    Plane[] planes = img.getPlanes();
644                    if (0 < planes.length) {
645                        if (DEPTH_CLOUD_STORE_ENABLED) {
646                            if ((mDepthCloudImageCounter % STORE_NTH_DEPTH_CLOUD) == 0) {
647                                ByteBuffer b = planes[0].getBuffer();
648                                MediaSaver.saveDepth(mContext, b);
649                            }
650                        }
651                    } else {
652                        Log.e(TAG, "Depth buffer with empty planes!");
653                    }
654                    img.close();
655                    mDepthCloudImageCounter++;
656                }
657            };
658
659    ImageReader.OnImageAvailableListener mJpegImageListener =
660            new ImageReader.OnImageAvailableListener() {
661                @Override
662                public void onImageAvailable(ImageReader reader) {
663                    Image img = reader.acquireLatestImage();
664                    if (img == null) {
665                        Log.e(TAG, "Null image returned JPEG");
666                        return;
667                    }
668                    Image.Plane plane0 = img.getPlanes()[0];
669                    final ByteBuffer buffer = plane0.getBuffer();
670                    long dt = System.nanoTime() - mReprocessingRequestNanoTime;
671                    Log.v(TAG, String.format("JPEG buffer available, w=%d h=%d time=%d size=%d dt=%.1f ms  ISO=%d",
672                            img.getWidth(), img.getHeight(), img.getTimestamp(), buffer.capacity(), 0.000001 * dt, mLastIso));
673                    // Save JPEG on the utility thread,
674                    final byte[] jpegBuf;
675                    if (buffer.hasArray()) {
676                        jpegBuf = buffer.array();
677                    } else {
678                        jpegBuf = new byte[buffer.capacity()];
679                        buffer.get(jpegBuf);
680                    }
681                    mMyCameraCallback.jpegAvailable(jpegBuf, img.getWidth(), img.getHeight());
682                    img.close();
683
684                    // take (reprocess) another picture right away if bursting.
685                    if (mIsBursting) {
686                        takePicture();
687                    }
688                }
689            };
690
691
692    ImageReader.OnImageAvailableListener mYuv2ImageListener =
693            new ImageReader.OnImageAvailableListener() {
694                @Override
695                public void onImageAvailable(ImageReader reader) {
696                    Image img = reader.acquireLatestImage();
697                    if (img == null) {
698                        Log.e(TAG, "Null image returned YUV2");
699                    } else {
700                        if (++mYuv2ImageCounter % LOG_NTH_FRAME == 0) {
701                            Log.v(TAG, "YUV2 buffer available, Frame #=" + mYuv2ImageCounter + " w=" + img.getWidth() + " h=" + img.getHeight() + " time=" + img.getTimestamp());
702                        }
703                        img.close();
704                    }
705                }
706            };
707
708
709    ImageReader.OnImageAvailableListener mRawImageListener =
710            new ImageReader.OnImageAvailableListener() {
711                @Override
712                public void onImageAvailable(ImageReader reader) {
713                    final Image img = reader.acquireLatestImage();
714                    if (img == null) {
715                        Log.e(TAG, "Null image returned RAW");
716                    } else {
717                        if (++mRawImageCounter % LOG_NTH_FRAME == 0) {
718                            Image.Plane plane0 = img.getPlanes()[0];
719                            final ByteBuffer buffer = plane0.getBuffer();
720                            Log.v(TAG, "Raw buffer available, Frame #=" + mRawImageCounter + "w=" + img.getWidth()
721                                    + " h=" + img.getHeight()
722                                    + " format=" + CameraDeviceReport.getFormatName(img.getFormat())
723                                    + " time=" + img.getTimestamp()
724                                    + " size=" + buffer.capacity()
725                                    + " getRowStride()=" + plane0.getRowStride());
726                        }
727                        img.close();
728                    }
729                }
730            };
731
732    /*************************************
733     * CaptureResult metadata processing *
734     *************************************/
735
736    private CameraCaptureSession.CaptureCallback mCaptureCallback = new LoggingCallbacks.SessionCaptureCallback() {
737        @Override
738        public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
739            if (!mFirstFrameArrived) {
740                mFirstFrameArrived = true;
741                long now = SystemClock.elapsedRealtime();
742                long dt = now - CameraTimer.t0;
743                long camera_dt = now - CameraTimer.t_session_go + CameraTimer.t_open_end - CameraTimer.t_open_start;
744                long repeating_req_dt = now - CameraTimer.t_burst;
745                Log.v(TAG, "App control to first frame: (" + dt + " ms)");
746                Log.v(TAG, "HAL request to first frame: (" + repeating_req_dt + " ms) " + " Total HAL wait: (" + camera_dt + " ms)");
747                mMyCameraCallback.receivedFirstFrame();
748                mMyCameraCallback.performanceDataAvailable((int) dt, (int) camera_dt, null);
749            }
750            publishFrameData(result);
751            // Used for reprocessing.
752            mLastTotalCaptureResult = result;
753            super.onCaptureCompleted(session, request, result);
754        }
755    };
756
757    // Reprocessing capture completed.
758    private CameraCaptureSession.CaptureCallback mReprocessingCaptureCallback = new LoggingCallbacks.SessionCaptureCallback() {
759        @Override
760        public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
761            Log.v(TAG, "Reprocessing onCaptureCompleted()");
762        }
763    };
764
765    private static double SHORT_LOG_EXPOSURE = Math.log10(1000000000 / 10000); // 1/10000 second
766    private static double LONG_LOG_EXPOSURE = Math.log10(1000000000 / 10); // 1/10 second
767    public int FPS_CALC_LOOKBACK = 15;
768    private LinkedList<Long> mFrameTimes = new LinkedList<Long>();
769
770    private void publishFrameData(TotalCaptureResult result) {
771        // Faces.
772        final Face[] faces = result.get(CaptureResult.STATISTICS_FACES);
773        NormalizedFace[] newFaces = new NormalizedFace[faces.length];
774        if (faces.length > 0) {
775            int offX = mCameraInfoCache.faceOffsetX();
776            int offY = mCameraInfoCache.faceOffsetY();
777            int dX = mCameraInfoCache.activeAreaWidth() - 2 * offX;
778            int dY = mCameraInfoCache.activeAreaHeight() - 2 * offY;
779            if (mCameraInfoCache.IS_NEXUS_6 && mCameraIsFront) {
780                // Front camera on Nexus 6 is currently 16 x 9 cropped to 4 x 3.
781                // TODO: Generalize this.
782                int cropOffset = dX / 8;
783                dX -= 2 * cropOffset;
784                offX += cropOffset;
785            }
786            int orientation = mCameraInfoCache.sensorOrientation();
787            for (int i = 0; i < faces.length; ++i) {
788                newFaces[i] = new NormalizedFace(faces[i], dX, dY, offX, offY);
789                if (mCameraIsFront && orientation == 90) {
790                    newFaces[i].mirrorInY();
791                }
792                if (mCameraIsFront && orientation == 270) {
793                    newFaces[i].mirrorInX();
794                }
795                if (!mCameraIsFront && orientation == 270) {
796                    newFaces[i].mirrorInX();
797                    newFaces[i].mirrorInY();
798                }
799            }
800        }
801
802        // Normalized lens and exposure coordinates.
803        double rm = Math.log10(result.get(CaptureResult.SENSOR_EXPOSURE_TIME));
804        float normExposure = (float) ((rm - SHORT_LOG_EXPOSURE) / (LONG_LOG_EXPOSURE - SHORT_LOG_EXPOSURE));
805        float normLensPos = (mCameraInfoCache.getDiopterHi() - result.get(CaptureResult.LENS_FOCUS_DISTANCE)) / (mCameraInfoCache.getDiopterHi() - mCameraInfoCache.getDiopterLow());
806        mLastIso = result.get(CaptureResult.SENSOR_SENSITIVITY);
807
808        // Update frame arrival history.
809        mFrameTimes.add(result.get(CaptureResult.SENSOR_TIMESTAMP));
810        if (mFrameTimes.size() > FPS_CALC_LOOKBACK) {
811            mFrameTimes.removeFirst();
812        }
813
814        // Frame drop detector
815        {
816            float frameDuration = result.get(CaptureResult.SENSOR_FRAME_DURATION);
817            if (mFrameTimes.size() > 1) {
818                long dt = result.get(CaptureResult.SENSOR_TIMESTAMP) - mFrameTimes.get(mFrameTimes.size()-2);
819                if (dt > 3 * frameDuration / 2 && LOG_DROPPED_FRAMES) {
820                    float drops = (dt * 1f / frameDuration) - 1f;
821                    Log.e(TAG, String.format("dropped %.2f frames", drops));
822                    mMyCameraCallback.performanceDataAvailable(null, null, drops);
823                }
824            }
825        }
826
827        // FPS calc.
828        float fps = 0;
829        if (mFrameTimes.size() > 1) {
830            long dt = mFrameTimes.getLast() - mFrameTimes.getFirst();
831            fps = (mFrameTimes.size() - 1) * 1000000000f / dt;
832            fps = (float) Math.floor(fps + 0.1); // round to nearest whole number, ish.
833        }
834
835        // Do callback.
836        if (mMyCameraCallback != null) {
837            mMyCameraCallback.frameDataAvailable(newFaces, normExposure, normLensPos, fps,
838                    (int) mLastIso, result.get(CaptureResult.CONTROL_AF_STATE), result.get(CaptureResult.CONTROL_AE_STATE), result.get(CaptureResult.CONTROL_AWB_STATE));
839        } else {
840            Log.v(TAG, "mMyCameraCallbacks is null!!.");
841        }
842    }
843
844    long mLastIso = 0;
845
846    /*********************
847     * UTILITY FUNCTIONS *
848     *********************/
849
850    /**
851     * Return the next mode after currentMode in supportedModes, wrapping to
852     * start of mode list if currentMode is last.  Returns currentMode if it is not found in
853     * supportedModes.
854     *
855     * @param currentMode
856     * @param supportedModes
857     * @return next mode after currentMode in supportedModes
858     */
859    private int getNextMode(int currentMode, int[] supportedModes) {
860        boolean getNext = false;
861        for (int m : supportedModes) {
862            if (getNext) {
863                return m;
864            }
865            if (m == currentMode) {
866                getNext = true;
867            }
868        }
869        if (getNext) {
870            return supportedModes[0];
871        }
872        // Can't find mode in list
873        return currentMode;
874    }
875
876    private static String edgeModeToString(int mode) {
877        switch (mode) {
878            case CaptureRequest.EDGE_MODE_OFF:
879                return "OFF";
880            case CaptureRequest.EDGE_MODE_FAST:
881                return "FAST";
882            case CaptureRequest.EDGE_MODE_HIGH_QUALITY:
883                return "HiQ";
884            case CaptureRequest.EDGE_MODE_ZERO_SHUTTER_LAG:
885                return "ZSL";
886        }
887        return Integer.toString(mode);
888    }
889
890    private static String noiseModeToString(int mode) {
891        switch (mode) {
892            case CaptureRequest.NOISE_REDUCTION_MODE_OFF:
893                return "OFF";
894            case CaptureRequest.NOISE_REDUCTION_MODE_FAST:
895                return "FAST";
896            case CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY:
897                return "HiQ";
898            case CaptureRequest.NOISE_REDUCTION_MODE_MINIMAL:
899                return "MIN";
900            case CaptureRequest.NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG:
901                return "ZSL";
902        }
903        return Integer.toString(mode);
904    }
905}
906