VideoCapture.java revision f2477e01787aa58f445919b809d89e252beef54f
1// Copyright (c) 2013 The Chromium Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5package org.chromium.media;
6
7import android.content.Context;
8import android.graphics.ImageFormat;
9import android.graphics.SurfaceTexture;
10import android.graphics.SurfaceTexture.OnFrameAvailableListener;
11import android.hardware.Camera;
12import android.hardware.Camera.PreviewCallback;
13import android.opengl.GLES20;
14import android.util.Log;
15import android.view.Surface;
16import android.view.WindowManager;
17
18import java.io.IOException;
19import java.util.concurrent.locks.ReentrantLock;
20import java.util.Iterator;
21import java.util.List;
22
23import org.chromium.base.CalledByNative;
24import org.chromium.base.JNINamespace;
25
26@JNINamespace("media")
27public class VideoCapture implements PreviewCallback, OnFrameAvailableListener {
28    static class CaptureCapability {
29        public int mWidth = 0;
30        public int mHeight = 0;
31        public int mDesiredFps = 0;
32    }
33
34    // Some devices with OS older than JELLY_BEAN don't support YV12 format correctly.
35    // Some devices don't support YV12 format correctly even with JELLY_BEAN or newer OS.
36    // To work around the issues on those devices, we'd have to request NV21.
37    // This is a temporary hack till device manufacturers fix the problem or
38    // we don't need to support those devices any more.
39    private static class DeviceImageFormatHack {
40        private static final String[] sBUGGY_DEVICE_LIST = {
41            "SAMSUNG-SGH-I747",
42            "ODROID-U2",
43        };
44
45        static int getImageFormat() {
46            if (android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.JELLY_BEAN) {
47                return ImageFormat.NV21;
48            }
49
50            for (String buggyDevice : sBUGGY_DEVICE_LIST) {
51                if (buggyDevice.contentEquals(android.os.Build.MODEL)) {
52                    return ImageFormat.NV21;
53                }
54            }
55            return ImageFormat.YV12;
56        }
57    }
58
59    private Camera mCamera;
60    public ReentrantLock mPreviewBufferLock = new ReentrantLock();
61    private int mImageFormat = ImageFormat.YV12;
62    private byte[] mColorPlane = null;
63    private Context mContext = null;
64    // True when native code has started capture.
65    private boolean mIsRunning = false;
66
67    private static final int NUM_CAPTURE_BUFFERS = 3;
68    private int mExpectedFrameSize = 0;
69    private int mId = 0;
70    // Native callback context variable.
71    private long mNativeVideoCaptureDeviceAndroid = 0;
72    private int[] mGlTextures = null;
73    private SurfaceTexture mSurfaceTexture = null;
74    private static final int GL_TEXTURE_EXTERNAL_OES = 0x8D65;
75
76    private int mCameraOrientation = 0;
77    private int mCameraFacing = 0;
78    private int mDeviceOrientation = 0;
79
80    CaptureCapability mCurrentCapability = null;
81    private static final String TAG = "VideoCapture";
82
83    @CalledByNative
84    public static VideoCapture createVideoCapture(
85            Context context, int id, long nativeVideoCaptureDeviceAndroid) {
86        return new VideoCapture(context, id, nativeVideoCaptureDeviceAndroid);
87    }
88
89    public VideoCapture(
90            Context context, int id, long nativeVideoCaptureDeviceAndroid) {
91        mContext = context;
92        mId = id;
93        mNativeVideoCaptureDeviceAndroid = nativeVideoCaptureDeviceAndroid;
94    }
95
96    // Returns true on success, false otherwise.
97    @CalledByNative
98    public boolean allocate(int width, int height, int frameRate) {
99        Log.d(TAG, "allocate: requested width=" + width +
100              ", height=" + height + ", frameRate=" + frameRate);
101        try {
102            mCamera = Camera.open(mId);
103        } catch (RuntimeException ex) {
104            Log.e(TAG, "allocate:Camera.open: " + ex);
105            return false;
106        }
107
108        try {
109            Camera.CameraInfo camera_info = new Camera.CameraInfo();
110            Camera.getCameraInfo(mId, camera_info);
111            mCameraOrientation = camera_info.orientation;
112            mCameraFacing = camera_info.facing;
113            mDeviceOrientation = getDeviceOrientation();
114            Log.d(TAG, "allocate: device orientation=" + mDeviceOrientation +
115                  ", camera orientation=" + mCameraOrientation +
116                  ", facing=" + mCameraFacing);
117
118            Camera.Parameters parameters = mCamera.getParameters();
119
120            // Calculate fps.
121            List<int[]> listFpsRange = parameters.getSupportedPreviewFpsRange();
122            if (listFpsRange == null || listFpsRange.size() == 0) {
123                Log.e(TAG, "allocate: no fps range found");
124                return false;
125            }
126            int frameRateInMs = frameRate * 1000;
127            Iterator itFpsRange = listFpsRange.iterator();
128            int[] fpsRange = (int[])itFpsRange.next();
129            // Use the first range as default.
130            int fpsMin = fpsRange[0];
131            int fpsMax = fpsRange[1];
132            int newFrameRate = (fpsMin + 999) / 1000;
133            while (itFpsRange.hasNext()) {
134                fpsRange = (int[])itFpsRange.next();
135                if (fpsRange[0] <= frameRateInMs &&
136                    frameRateInMs <= fpsRange[1]) {
137                    fpsMin = fpsRange[0];
138                    fpsMax = fpsRange[1];
139                    newFrameRate = frameRate;
140                    break;
141                }
142            }
143            frameRate = newFrameRate;
144            Log.d(TAG, "allocate: fps set to " + frameRate);
145
146            mCurrentCapability = new CaptureCapability();
147            mCurrentCapability.mDesiredFps = frameRate;
148
149            // Calculate size.
150            List<Camera.Size> listCameraSize =
151                    parameters.getSupportedPreviewSizes();
152            int minDiff = Integer.MAX_VALUE;
153            int matchedWidth = width;
154            int matchedHeight = height;
155            Iterator itCameraSize = listCameraSize.iterator();
156            while (itCameraSize.hasNext()) {
157                Camera.Size size = (Camera.Size)itCameraSize.next();
158                int diff = Math.abs(size.width - width) +
159                           Math.abs(size.height - height);
160                Log.d(TAG, "allocate: support resolution (" +
161                      size.width + ", " + size.height + "), diff=" + diff);
162                // TODO(wjia): Remove this hack (forcing width to be multiple
163                // of 32) by supporting stride in video frame buffer.
164                // Right now, VideoCaptureController requires compact YV12
165                // (i.e., with no padding).
166                if (diff < minDiff && (size.width % 32 == 0)) {
167                    minDiff = diff;
168                    matchedWidth = size.width;
169                    matchedHeight = size.height;
170                }
171            }
172            if (minDiff == Integer.MAX_VALUE) {
173                Log.e(TAG, "allocate: can not find a resolution whose width " +
174                           "is multiple of 32");
175                return false;
176            }
177            mCurrentCapability.mWidth = matchedWidth;
178            mCurrentCapability.mHeight = matchedHeight;
179            Log.d(TAG, "allocate: matched width=" + matchedWidth +
180                  ", height=" + matchedHeight);
181
182            calculateImageFormat(matchedWidth, matchedHeight);
183
184            if (parameters.isVideoStabilizationSupported()){
185                Log.d(TAG, "Image stabilization supported, currently: "
186                      + parameters.getVideoStabilization() + ", setting it.");
187                parameters.setVideoStabilization(true);
188            } else {
189                Log.d(TAG, "Image stabilization not supported.");
190            }
191
192            parameters.setPreviewSize(matchedWidth, matchedHeight);
193            parameters.setPreviewFormat(mImageFormat);
194            parameters.setPreviewFpsRange(fpsMin, fpsMax);
195            mCamera.setParameters(parameters);
196
197            // Set SurfaceTexture.
198            mGlTextures = new int[1];
199            // Generate one texture pointer and bind it as an external texture.
200            GLES20.glGenTextures(1, mGlTextures, 0);
201            GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mGlTextures[0]);
202            // No mip-mapping with camera source.
203            GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES,
204                    GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
205            GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES,
206                    GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
207            // Clamp to edge is only option.
208            GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES,
209                    GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
210            GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES,
211                    GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
212
213            mSurfaceTexture = new SurfaceTexture(mGlTextures[0]);
214            mSurfaceTexture.setOnFrameAvailableListener(null);
215
216            mCamera.setPreviewTexture(mSurfaceTexture);
217
218            int bufSize = matchedWidth * matchedHeight *
219                          ImageFormat.getBitsPerPixel(mImageFormat) / 8;
220            for (int i = 0; i < NUM_CAPTURE_BUFFERS; i++) {
221                byte[] buffer = new byte[bufSize];
222                mCamera.addCallbackBuffer(buffer);
223            }
224            mExpectedFrameSize = bufSize;
225        } catch (IOException ex) {
226            Log.e(TAG, "allocate: " + ex);
227            return false;
228        }
229
230        return true;
231    }
232
233    @CalledByNative
234    public int queryWidth() {
235        return mCurrentCapability.mWidth;
236    }
237
238    @CalledByNative
239    public int queryHeight() {
240        return mCurrentCapability.mHeight;
241    }
242
243    @CalledByNative
244    public int queryFrameRate() {
245        return mCurrentCapability.mDesiredFps;
246    }
247
248    @CalledByNative
249    public int getColorspace() {
250        switch (mImageFormat){
251        case ImageFormat.YV12:
252            return AndroidImageFormatList.ANDROID_IMAGEFORMAT_YV12;
253        case ImageFormat.NV21:
254            return AndroidImageFormatList.ANDROID_IMAGEFORMAT_NV21;
255        case ImageFormat.YUY2:
256            return AndroidImageFormatList.ANDROID_IMAGEFORMAT_YUY2;
257        case ImageFormat.NV16:
258            return AndroidImageFormatList.ANDROID_IMAGEFORMAT_NV16;
259        case ImageFormat.JPEG:
260            return AndroidImageFormatList.ANDROID_IMAGEFORMAT_JPEG;
261        case ImageFormat.RGB_565:
262            return AndroidImageFormatList.ANDROID_IMAGEFORMAT_RGB_565;
263        case ImageFormat.UNKNOWN:
264        default:
265            return AndroidImageFormatList.ANDROID_IMAGEFORMAT_UNKNOWN;
266        }
267    }
268
269    @CalledByNative
270    public int startCapture() {
271        if (mCamera == null) {
272            Log.e(TAG, "startCapture: camera is null");
273            return -1;
274        }
275
276        mPreviewBufferLock.lock();
277        try {
278            if (mIsRunning) {
279                return 0;
280            }
281            mIsRunning = true;
282        } finally {
283            mPreviewBufferLock.unlock();
284        }
285        mCamera.setPreviewCallbackWithBuffer(this);
286        mCamera.startPreview();
287        return 0;
288    }
289
290    @CalledByNative
291    public int stopCapture() {
292        if (mCamera == null) {
293            Log.e(TAG, "stopCapture: camera is null");
294            return 0;
295        }
296
297        mPreviewBufferLock.lock();
298        try {
299            if (!mIsRunning) {
300                return 0;
301            }
302            mIsRunning = false;
303        } finally {
304            mPreviewBufferLock.unlock();
305        }
306
307        mCamera.stopPreview();
308        mCamera.setPreviewCallbackWithBuffer(null);
309        return 0;
310    }
311
312    @CalledByNative
313    public void deallocate() {
314        if (mCamera == null)
315            return;
316
317        stopCapture();
318        try {
319            mCamera.setPreviewTexture(null);
320            if (mGlTextures != null)
321                GLES20.glDeleteTextures(1, mGlTextures, 0);
322            mCurrentCapability = null;
323            mCamera.release();
324            mCamera = null;
325        } catch (IOException ex) {
326            Log.e(TAG, "deallocate: failed to deallocate camera, " + ex);
327            return;
328        }
329    }
330
331    @Override
332    public void onPreviewFrame(byte[] data, Camera camera) {
333        mPreviewBufferLock.lock();
334        try {
335            if (!mIsRunning) {
336                return;
337            }
338            if (data.length == mExpectedFrameSize) {
339                int rotation = getDeviceOrientation();
340                if (rotation != mDeviceOrientation) {
341                    mDeviceOrientation = rotation;
342                    Log.d(TAG,
343                          "onPreviewFrame: device orientation=" +
344                          mDeviceOrientation + ", camera orientation=" +
345                          mCameraOrientation);
346                }
347                boolean flipVertical = false;
348                boolean flipHorizontal = false;
349                if (mCameraFacing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
350                    rotation = (mCameraOrientation + rotation) % 360;
351                    rotation = (360 - rotation) % 360;
352                    flipHorizontal = (rotation == 270 || rotation == 90);
353                    flipVertical = flipHorizontal;
354                } else {
355                    rotation = (mCameraOrientation - rotation + 360) % 360;
356                }
357                nativeOnFrameAvailable(mNativeVideoCaptureDeviceAndroid,
358                        data, mExpectedFrameSize,
359                        rotation, flipVertical, flipHorizontal);
360            }
361        } finally {
362            mPreviewBufferLock.unlock();
363            if (camera != null) {
364                camera.addCallbackBuffer(data);
365            }
366        }
367    }
368
369    // TODO(wjia): investigate whether reading from texture could give better
370    // performance and frame rate.
371    @Override
372    public void onFrameAvailable(SurfaceTexture surfaceTexture) { }
373
374    private static class ChromiumCameraInfo {
375        private final int mId;
376        private final Camera.CameraInfo mCameraInfo;
377
378        private ChromiumCameraInfo(int index) {
379            mId = index;
380            mCameraInfo = new Camera.CameraInfo();
381            Camera.getCameraInfo(index, mCameraInfo);
382        }
383
384        @CalledByNative("ChromiumCameraInfo")
385        private static int getNumberOfCameras() {
386            return Camera.getNumberOfCameras();
387        }
388
389        @CalledByNative("ChromiumCameraInfo")
390        private static ChromiumCameraInfo getAt(int index) {
391            return new ChromiumCameraInfo(index);
392        }
393
394        @CalledByNative("ChromiumCameraInfo")
395        private int getId() {
396            return mId;
397        }
398
399        @CalledByNative("ChromiumCameraInfo")
400        private String getDeviceName() {
401            return  "camera " + mId + ", facing " +
402                    (mCameraInfo.facing ==
403                     Camera.CameraInfo.CAMERA_FACING_FRONT ? "front" : "back");
404        }
405
406        @CalledByNative("ChromiumCameraInfo")
407        private int getOrientation() {
408            return mCameraInfo.orientation;
409        }
410    }
411
412    private native void nativeOnFrameAvailable(
413            long nativeVideoCaptureDeviceAndroid,
414            byte[] data,
415            int length,
416            int rotation,
417            boolean flipVertical,
418            boolean flipHorizontal);
419
420    private int getDeviceOrientation() {
421        int orientation = 0;
422        if (mContext != null) {
423            WindowManager wm = (WindowManager)mContext.getSystemService(
424                    Context.WINDOW_SERVICE);
425            switch(wm.getDefaultDisplay().getRotation()) {
426                case Surface.ROTATION_90:
427                    orientation = 90;
428                    break;
429                case Surface.ROTATION_180:
430                    orientation = 180;
431                    break;
432                case Surface.ROTATION_270:
433                    orientation = 270;
434                    break;
435                case Surface.ROTATION_0:
436                default:
437                    orientation = 0;
438                    break;
439            }
440        }
441        return orientation;
442    }
443
444    private void calculateImageFormat(int width, int height) {
445        mImageFormat = DeviceImageFormatHack.getImageFormat();
446    }
447}
448