1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17package com.example.android.hdrviewfinder;
18
19import android.app.Activity;
20import android.hardware.camera2.CameraAccessException;
21import android.hardware.camera2.CameraCaptureSession;
22import android.hardware.camera2.CameraCharacteristics;
23import android.hardware.camera2.CameraDevice;
24import android.hardware.camera2.CameraManager;
25import android.hardware.camera2.CaptureRequest;
26import android.hardware.camera2.CaptureResult;
27import android.hardware.camera2.TotalCaptureResult;
28import android.hardware.camera2.params.StreamConfigurationMap;
29import android.os.Bundle;
30import android.os.Handler;
31import android.os.Looper;
32import android.renderscript.RenderScript;
33import android.util.Log;
34import android.util.Size;
35import android.view.GestureDetector;
36import android.view.Menu;
37import android.view.MenuItem;
38import android.view.MotionEvent;
39import android.view.Surface;
40import android.view.SurfaceHolder;
41import android.view.View;
42import android.widget.Button;
43import android.widget.TextView;
44
45import java.util.ArrayList;
46import java.util.List;
47
48/**
49 * A small demo of advanced camera functionality with the Android camera2 API.
50 *
51 * <p>This demo implements a real-time high-dynamic-range camera viewfinder,
52 * by alternating the sensor's exposure time between two exposure values on even and odd
53 * frames, and then compositing together the latest two frames whenever a new frame is
54 * captured.</p>
55 *
56 * <p>The demo has three modes: Regular auto-exposure viewfinder, split-screen manual exposure,
57 * and the fused HDR viewfinder.  The latter two use manual exposure controlled by the user,
58 * by swiping up/down on the right and left halves of the viewfinder.  The left half controls
59 * the exposure time of even frames, and the right half controls the exposure time of odd frames.
60 * </p>
61 *
62 * <p>In split-screen mode, the even frames are shown on the left and the odd frames on the right,
63 * so the user can see two different exposures of the scene simultaneously.  In fused HDR mode,
64 * the even/odd frames are merged together into a single image.  By selecting different exposure
65 * values for the even/odd frames, the fused image has a higher dynamic range than the regular
66 * viewfinder.</p>
67 *
68 * <p>The HDR fusion and the split-screen viewfinder processing is done with RenderScript; as is the
69 * necessary YUV->RGB conversion. The camera subsystem outputs YUV images naturally, while the GPU
70 * and display subsystems generally only accept RGB data.  Therefore, after the images are
71 * fused/composited, a standard YUV->RGB color transform is applied before the the data is written
72 * to the output Allocation. The HDR fusion algorithm is very simple, and tends to result in
73 * lower-contrast scenes, but has very few artifacts and can run very fast.</p>
74 *
75 * <p>Data is passed between the subsystems (camera, RenderScript, and display) using the
76 * Android {@link android.view.Surface} class, which allows for zero-copy transport of large
77 * buffers between processes and subsystems.</p>
78 */
79public class HdrViewfinderActivity extends Activity implements
80        SurfaceHolder.Callback, CameraOps.ErrorDisplayer, CameraOps.CameraReadyListener {
81
82    private static final String TAG = "HdrViewfinderDemo";
83
84    private static final String FRAGMENT_DIALOG = "dialog";
85
86    /**
87     * View for the camera preview.
88     */
89    private FixedAspectSurfaceView mPreviewView;
90
91    /**
92     * This shows the current mode of the app.
93     */
94    private TextView mModeText;
95
96    // These show lengths of exposure for even frames, exposure for odd frames, and auto exposure.
97    private TextView mEvenExposureText, mOddExposureText, mAutoExposureText;
98
99    private Handler mUiHandler;
100
101    private CameraCharacteristics mCameraInfo;
102
103    private Surface mPreviewSurface;
104    private Surface mProcessingHdrSurface;
105    private Surface mProcessingNormalSurface;
106    CaptureRequest.Builder mHdrBuilder;
107    ArrayList<CaptureRequest> mHdrRequests = new ArrayList<CaptureRequest>(2);
108
109    CaptureRequest mPreviewRequest;
110
111    RenderScript mRS;
112    ViewfinderProcessor mProcessor;
113    CameraManager mCameraManager;
114    CameraOps mCameraOps;
115
116    private int mRenderMode = ViewfinderProcessor.MODE_NORMAL;
117
118    // Durations in nanoseconds
119    private static final long MICRO_SECOND = 1000;
120    private static final long MILLI_SECOND = MICRO_SECOND * 1000;
121    private static final long ONE_SECOND = MILLI_SECOND * 1000;
122
123    private long mOddExposure = ONE_SECOND / 33;
124    private long mEvenExposure = ONE_SECOND / 33;
125
126    private Object mOddExposureTag = new Object();
127    private Object mEvenExposureTag = new Object();
128    private Object mAutoExposureTag = new Object();
129
130    @Override
131    protected void onCreate(Bundle savedInstanceState) {
132        super.onCreate(savedInstanceState);
133        setContentView(R.layout.main);
134
135        mPreviewView = (FixedAspectSurfaceView) findViewById(R.id.preview);
136        mPreviewView.getHolder().addCallback(this);
137        mPreviewView.setGestureListener(this, mViewListener);
138
139        Button helpButton = (Button) findViewById(R.id.help_button);
140        helpButton.setOnClickListener(mHelpButtonListener);
141
142        mModeText = (TextView) findViewById(R.id.mode_label);
143        mEvenExposureText = (TextView) findViewById(R.id.even_exposure);
144        mOddExposureText = (TextView) findViewById(R.id.odd_exposure);
145        mAutoExposureText = (TextView) findViewById(R.id.auto_exposure);
146
147        mUiHandler = new Handler(Looper.getMainLooper());
148
149        mCameraManager = (CameraManager) getSystemService(CAMERA_SERVICE);
150        mCameraOps = new CameraOps(mCameraManager,
151                /*errorDisplayer*/ this,
152                /*readyListener*/ this,
153                /*readyHandler*/ mUiHandler);
154
155        mHdrRequests.add(null);
156        mHdrRequests.add(null);
157
158        mRS = RenderScript.create(this);
159    }
160
161    @Override
162    protected void onResume() {
163        super.onResume();
164
165        findAndOpenCamera();
166    }
167
168    @Override
169    protected void onPause() {
170        super.onPause();
171
172        // Wait until camera is closed to ensure the next application can open it
173        mCameraOps.closeCameraAndWait();
174    }
175
176    @Override
177    public boolean onCreateOptionsMenu(Menu menu) {
178        getMenuInflater().inflate(R.menu.main, menu);
179        return super.onCreateOptionsMenu(menu);
180    }
181
182    @Override
183    public boolean onOptionsItemSelected(MenuItem item) {
184        switch (item.getItemId()) {
185            case R.id.info: {
186                MessageDialogFragment.newInstance(R.string.intro_message)
187                        .show(getFragmentManager(), FRAGMENT_DIALOG);
188                break;
189            }
190        }
191        return super.onOptionsItemSelected(item);
192    }
193
194    private GestureDetector.OnGestureListener mViewListener
195            = new GestureDetector.SimpleOnGestureListener() {
196
197        @Override
198        public boolean onDown(MotionEvent e) {
199            return true;
200        }
201
202        @Override
203        public boolean onSingleTapUp(MotionEvent e) {
204            switchRenderMode(1);
205            return true;
206        }
207
208        @Override
209        public boolean onScroll(MotionEvent e1, MotionEvent e2, float distanceX, float distanceY) {
210            if (mRenderMode == ViewfinderProcessor.MODE_NORMAL) return false;
211
212            float xPosition = e1.getAxisValue(MotionEvent.AXIS_X);
213            float width = mPreviewView.getWidth();
214            float height = mPreviewView.getHeight();
215
216            float xPosNorm = xPosition / width;
217            float yDistNorm = distanceY / height;
218
219            final float ACCELERATION_FACTOR = 8;
220            double scaleFactor = Math.pow(2.f, yDistNorm * ACCELERATION_FACTOR);
221
222            // Even on left, odd on right
223            if (xPosNorm > 0.5) {
224                mOddExposure *= scaleFactor;
225            } else {
226                mEvenExposure *= scaleFactor;
227            }
228
229            setHdrBurst();
230
231            return true;
232        }
233    };
234
235    // Show help dialog
236    private View.OnClickListener mHelpButtonListener = new View.OnClickListener() {
237        public void onClick(View v) {
238            MessageDialogFragment.newInstance(R.string.help_text)
239                    .show(getFragmentManager(), FRAGMENT_DIALOG);
240        }
241    };
242
243    private void findAndOpenCamera() {
244
245        String errorMessage = "Unknown error";
246        boolean foundCamera = false;
247        try {
248            // Find first back-facing camera that has necessary capability
249            String[] cameraIds = mCameraManager.getCameraIdList();
250            for (String id : cameraIds) {
251                CameraCharacteristics info = mCameraManager.getCameraCharacteristics(id);
252                int facing = info.get(CameraCharacteristics.LENS_FACING);
253
254                int level = info.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
255                boolean hasFullLevel
256                        = (level == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_FULL);
257
258                int[] capabilities = info.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES);
259                int syncLatency = info.get(CameraCharacteristics.SYNC_MAX_LATENCY);
260                boolean hasManualControl = hasCapability(capabilities,
261                        CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
262                boolean hasEnoughCapability = hasManualControl &&
263                        syncLatency == CameraCharacteristics.SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
264
265                // All these are guaranteed by
266                // CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_FULL, but checking for only
267                // the things we care about expands range of devices we can run on
268                // We want:
269                //  - Back-facing camera
270                //  - Manual sensor control
271                //  - Per-frame synchronization (so that exposure can be changed every frame)
272                if (facing == CameraCharacteristics.LENS_FACING_BACK &&
273                        (hasFullLevel || hasEnoughCapability)) {
274                    // Found suitable camera - get info, open, and set up outputs
275                    mCameraInfo = info;
276                    mCameraOps.openCamera(id);
277                    configureSurfaces();
278                    foundCamera = true;
279                    break;
280                }
281            }
282            if (!foundCamera) {
283                errorMessage = getString(R.string.camera_no_good);
284            }
285        } catch (CameraAccessException e) {
286            errorMessage = getErrorString(e);
287        }
288
289        if (!foundCamera) {
290            showErrorDialog(errorMessage);
291        }
292    }
293
294    private boolean hasCapability(int[] capabilities, int capability) {
295        for (int c : capabilities) {
296            if (c == capability) return true;
297        }
298        return false;
299    }
300
301    private void switchRenderMode(int direction) {
302        mRenderMode = (mRenderMode + direction) % 3;
303
304        mModeText.setText(getResources().getStringArray(R.array.mode_label_array)[mRenderMode]);
305
306        if (mProcessor != null) {
307            mProcessor.setRenderMode(mRenderMode);
308        }
309        if (mRenderMode == ViewfinderProcessor.MODE_NORMAL) {
310            mCameraOps.setRepeatingRequest(mPreviewRequest,
311                    mCaptureCallback, mUiHandler);
312        } else {
313            setHdrBurst();
314        }
315    }
316
317    /**
318     * Configure the surfaceview and RS processing
319     */
320    private void configureSurfaces() {
321        // Find a good size for output - largest 16:9 aspect ratio that's less than 720p
322        final int MAX_WIDTH = 1280;
323        final float TARGET_ASPECT = 16.f / 9.f;
324        final float ASPECT_TOLERANCE = 0.1f;
325
326        StreamConfigurationMap configs =
327                mCameraInfo.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
328
329        Size[] outputSizes = configs.getOutputSizes(SurfaceHolder.class);
330
331        Size outputSize = outputSizes[0];
332        float outputAspect = (float) outputSize.getWidth() / outputSize.getHeight();
333        for (Size candidateSize : outputSizes) {
334            if (candidateSize.getWidth() > MAX_WIDTH) continue;
335            float candidateAspect = (float) candidateSize.getWidth() / candidateSize.getHeight();
336            boolean goodCandidateAspect =
337                    Math.abs(candidateAspect - TARGET_ASPECT) < ASPECT_TOLERANCE;
338            boolean goodOutputAspect =
339                    Math.abs(outputAspect - TARGET_ASPECT) < ASPECT_TOLERANCE;
340            if ((goodCandidateAspect && !goodOutputAspect) ||
341                    candidateSize.getWidth() > outputSize.getWidth()) {
342                outputSize = candidateSize;
343                outputAspect = candidateAspect;
344            }
345        }
346        Log.i(TAG, "Resolution chosen: " + outputSize);
347
348        // Configure processing
349        mProcessor = new ViewfinderProcessor(mRS, outputSize);
350        setupProcessor();
351
352        // Configure the output view - this will fire surfaceChanged
353        mPreviewView.setAspectRatio(outputAspect);
354        mPreviewView.getHolder().setFixedSize(outputSize.getWidth(), outputSize.getHeight());
355    }
356
357    /**
358     * Once camera is open and output surfaces are ready, configure the RS processing
359     * and the camera device inputs/outputs.
360     */
361    private void setupProcessor() {
362        if (mProcessor == null || mPreviewSurface == null) return;
363
364        mProcessor.setOutputSurface(mPreviewSurface);
365        mProcessingHdrSurface = mProcessor.getInputHdrSurface();
366        mProcessingNormalSurface = mProcessor.getInputNormalSurface();
367
368        List<Surface> cameraOutputSurfaces = new ArrayList<Surface>();
369        cameraOutputSurfaces.add(mProcessingHdrSurface);
370        cameraOutputSurfaces.add(mProcessingNormalSurface);
371
372        mCameraOps.setSurfaces(cameraOutputSurfaces);
373    }
374
375    /**
376     * Start running an HDR burst on a configured camera session
377     */
378    public void setHdrBurst() {
379
380        mHdrBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, 1600);
381        mHdrBuilder.set(CaptureRequest.SENSOR_FRAME_DURATION, ONE_SECOND / 30);
382
383        mHdrBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, mEvenExposure);
384        mHdrBuilder.setTag(mEvenExposureTag);
385        mHdrRequests.set(0, mHdrBuilder.build());
386
387        mHdrBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, mOddExposure);
388        mHdrBuilder.setTag(mOddExposureTag);
389        mHdrRequests.set(1, mHdrBuilder.build());
390
391        mCameraOps.setRepeatingBurst(mHdrRequests, mCaptureCallback, mUiHandler);
392    }
393
394    /**
395     * Listener for completed captures
396     * Invoked on UI thread
397     */
398    private CameraCaptureSession.CaptureCallback mCaptureCallback
399            = new CameraCaptureSession.CaptureCallback() {
400
401        public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request,
402                                       TotalCaptureResult result) {
403
404            // Only update UI every so many frames
405            // Use an odd number here to ensure both even and odd exposures get an occasional update
406            long frameNumber = result.getFrameNumber();
407            if (frameNumber % 3 != 0) return;
408
409            long exposureTime = result.get(CaptureResult.SENSOR_EXPOSURE_TIME);
410
411            // Format exposure time nicely
412            String exposureText;
413            if (exposureTime > ONE_SECOND) {
414                exposureText = String.format("%.2f s", exposureTime / 1e9);
415            } else if (exposureTime > MILLI_SECOND) {
416                exposureText = String.format("%.2f ms", exposureTime / 1e6);
417            } else if (exposureTime > MICRO_SECOND) {
418                exposureText = String.format("%.2f us", exposureTime / 1e3);
419            } else {
420                exposureText = String.format("%d ns", exposureTime);
421            }
422
423            Object tag = request.getTag();
424            Log.i(TAG, "Exposure: " + exposureText);
425
426            if (tag == mEvenExposureTag) {
427                mEvenExposureText.setText(exposureText);
428
429                mEvenExposureText.setEnabled(true);
430                mOddExposureText.setEnabled(true);
431                mAutoExposureText.setEnabled(false);
432            } else if (tag == mOddExposureTag) {
433                mOddExposureText.setText(exposureText);
434
435                mEvenExposureText.setEnabled(true);
436                mOddExposureText.setEnabled(true);
437                mAutoExposureText.setEnabled(false);
438            } else {
439                mAutoExposureText.setText(exposureText);
440
441                mEvenExposureText.setEnabled(false);
442                mOddExposureText.setEnabled(false);
443                mAutoExposureText.setEnabled(true);
444            }
445        }
446    };
447
448    /**
449     * Callbacks for the FixedAspectSurfaceView
450     */
451
452    @Override
453    public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
454        mPreviewSurface = holder.getSurface();
455
456        setupProcessor();
457    }
458
459    @Override
460    public void surfaceCreated(SurfaceHolder holder) {
461        // ignored
462    }
463
464    @Override
465    public void surfaceDestroyed(SurfaceHolder holder) {
466        mPreviewSurface = null;
467    }
468
469    /**
470     * Callbacks for CameraOps
471     */
472    @Override
473    public void onCameraReady() {
474        // Ready to send requests in, so set them up
475        try {
476            CaptureRequest.Builder previewBuilder =
477                    mCameraOps.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
478            previewBuilder.addTarget(mProcessingNormalSurface);
479            previewBuilder.setTag(mAutoExposureTag);
480            mPreviewRequest = previewBuilder.build();
481
482            mHdrBuilder =
483                    mCameraOps.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
484            mHdrBuilder.set(CaptureRequest.CONTROL_AE_MODE,
485                    CaptureRequest.CONTROL_AE_MODE_OFF);
486            mHdrBuilder.addTarget(mProcessingHdrSurface);
487
488            switchRenderMode(0);
489
490        } catch (CameraAccessException e) {
491            String errorMessage = getErrorString(e);
492            showErrorDialog(errorMessage);
493        }
494    }
495
496    /**
497     * Utility methods
498     */
499    @Override
500    public void showErrorDialog(String errorMessage) {
501        MessageDialogFragment.newInstance(errorMessage).show(getFragmentManager(), FRAGMENT_DIALOG);
502    }
503
504    @Override
505    public String getErrorString(CameraAccessException e) {
506        String errorMessage;
507        switch (e.getReason()) {
508            case CameraAccessException.CAMERA_DISABLED:
509                errorMessage = getString(R.string.camera_disabled);
510                break;
511            case CameraAccessException.CAMERA_DISCONNECTED:
512                errorMessage = getString(R.string.camera_disconnected);
513                break;
514            case CameraAccessException.CAMERA_ERROR:
515                errorMessage = getString(R.string.camera_error);
516                break;
517            default:
518                errorMessage = getString(R.string.camera_unknown, e.getReason());
519                break;
520        }
521        return errorMessage;
522    }
523
524}
525