Camera2Source.java revision db075afc85b6b50a5d3a988a17ed0d4e09ef0823
1/*
2 * Copyright 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17package androidx.media.filterfw.samples.simplecamera;
18
19import android.content.Context;
20import android.graphics.Bitmap;
21import android.graphics.ImageFormat;
22import android.hardware.camera2.CameraAccessException;
23import android.hardware.camera2.CameraCharacteristics;
24import android.hardware.camera2.CameraDevice;
25import android.hardware.camera2.CameraManager;
26import android.hardware.camera2.CaptureFailure;
27import android.hardware.camera2.CaptureRequest;
28import android.hardware.camera2.CaptureResult;
29import android.hardware.camera2.TotalCaptureResult;
30import android.os.Handler;
31import android.renderscript.Allocation;
32import android.renderscript.Element;
33import android.renderscript.RenderScript;
34import android.renderscript.ScriptIntrinsicYuvToRGB;
35import android.renderscript.Type;
36import android.util.Log;
37import android.view.Surface;
38import com.android.ex.camera2.blocking.BlockingCameraManager;
39import com.android.ex.camera2.blocking.BlockingCameraManager.BlockingOpenException;
40import androidx.media.filterfw.Filter;
41import androidx.media.filterfw.Frame;
42import androidx.media.filterfw.FrameImage2D;
43import androidx.media.filterfw.FrameType;
44import androidx.media.filterfw.FrameValue;
45import androidx.media.filterfw.MffContext;
46import androidx.media.filterfw.OutputPort;
47import androidx.media.filterfw.Signature;
48
49import java.util.ArrayList;
50import java.util.List;
51
52public class Camera2Source extends Filter implements Allocation.OnBufferAvailableListener {
53
54    private boolean mNewFrameAvailable = false;
55    private FrameType mOutputType;
56    private static final String TAG = "Camera2Source";
57    private CameraManager mCameraManager;
58    private CameraDevice mCamera;
59    private RenderScript mRS;
60    private Surface mSurface;
61    private CameraCharacteristics mProperties;
62    private CameraTestThread mLooperThread;
63    private int mHeight = 480;
64    private int mWidth = 640;
65    private Allocation mAllocationIn;
66    private ScriptIntrinsicYuvToRGB rgbConverter;
67    private Allocation mAllocationOut;
68    private Bitmap mBitmap;
69
70    class MyCameraListener extends CameraManager.AvailabilityListener {
71
72        @Override
73        public void onCameraAvailable(String cameraId) {
74            // TODO Auto-generated method stub
75            Log.v(TAG, "camera available to open");
76        }
77
78        @Override
79        public void onCameraUnavailable(String cameraId) {
80            // TODO Auto-generated method stub
81            Log.v(TAG, "camera unavailable to open");
82        }
83
84    }
85
86    class MyCaptureListener extends CameraDevice.CaptureListener {
87
88        @Override
89        public void onCaptureCompleted(CameraDevice camera, CaptureRequest request,
90                TotalCaptureResult result) {
91            // TODO Auto-generated method stub
92            Log.v(TAG, "in onCaptureComplete");
93
94        }
95
96        @Override
97        public void onCaptureFailed(CameraDevice camera, CaptureRequest request,
98                CaptureFailure failure) {
99            // TODO Auto-generated method stub
100            Log.v(TAG, "onCaptureFailed is being called");
101        }
102
103    }
104
105    public Camera2Source(MffContext context, String name) {
106        super(context, name);
107        mOutputType = FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.WRITE_GPU);
108
109        Context ctx = context.getApplicationContext();
110        mCameraManager = (CameraManager) ctx.getSystemService(Context.CAMERA_SERVICE);
111
112        mRS = RenderScript.create(context.getApplicationContext());
113    }
114
115    @Override
116    public Signature getSignature() {
117        return new Signature()
118                .addOutputPort("timestamp", Signature.PORT_OPTIONAL, FrameType.single(long.class))
119                .addOutputPort("video", Signature.PORT_REQUIRED, mOutputType)
120                .addOutputPort("orientation", Signature.PORT_REQUIRED,
121                        FrameType.single(float.class))
122                .disallowOtherPorts();
123    }
124
125    @Override
126    protected void onClose() {
127        Log.v(TAG, "onClose being called");
128        try {
129            mCamera.close();
130            mSurface.release();
131            mLooperThread.close();
132        } catch (Exception e) {
133            // TODO Auto-generated catch block
134            e.printStackTrace();
135        }
136    }
137
138    @Override
139    protected void onOpen() {
140        mLooperThread = new CameraTestThread();
141        Handler mHandler;
142        try {
143            mHandler = mLooperThread.start();
144        } catch (Exception e) {
145            // TODO Auto-generated catch block
146            e.printStackTrace();
147            throw new RuntimeException(e);
148        }
149
150        try {
151            String backCameraId = "0";
152            BlockingCameraManager blkManager = new BlockingCameraManager(mCameraManager);
153            mCamera = blkManager.openCamera(backCameraId, /*listener*/null, mHandler);
154        } catch (CameraAccessException e) {
155            e.printStackTrace();
156            throw new RuntimeException(e);
157        } catch (BlockingOpenException e) {
158            e.printStackTrace();
159            throw new RuntimeException(e);
160        }
161
162        Element ele = Element.createPixel(mRS, Element.DataType.UNSIGNED_8,
163                Element.DataKind.PIXEL_YUV);
164
165        rgbConverter = ScriptIntrinsicYuvToRGB.create(mRS,ele);
166        Type.Builder yuvBuilder = new Type.Builder(mRS,ele);
167
168        yuvBuilder.setYuvFormat(ImageFormat.YUV_420_888);
169        yuvBuilder.setX(mWidth);
170        yuvBuilder.setY(mHeight);
171        mAllocationIn = Allocation.createTyped(mRS, yuvBuilder.create(),
172                Allocation.USAGE_SCRIPT | Allocation.USAGE_IO_INPUT);
173        mSurface = mAllocationIn.getSurface();
174        mAllocationIn.setOnBufferAvailableListener(this);
175        rgbConverter.setInput(mAllocationIn);
176
177        mBitmap = Bitmap.createBitmap(mWidth, mHeight, Bitmap.Config.ARGB_8888);
178        mAllocationOut = Allocation.createFromBitmap(mRS, mBitmap);
179
180
181        Log.v(TAG, "mcamera: " + mCamera);
182
183        List<Surface> surfaces = new ArrayList<Surface>();
184        surfaces.add(mSurface);
185        CaptureRequest.Builder mCaptureRequest = null;
186        try {
187            mCamera.configureOutputs(surfaces);
188            mCaptureRequest = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
189            mCaptureRequest.addTarget(mSurface);
190        } catch (CameraAccessException e) {
191            e.printStackTrace();
192            throw new RuntimeException(e);
193        }
194
195        try {
196            mCamera.setRepeatingRequest(mCaptureRequest.build(), new MyCaptureListener(),
197                    mHandler);
198        } catch (CameraAccessException e) {
199            e.printStackTrace();
200            throw new RuntimeException(e);
201        }
202        mProperties = null;
203        try {
204            mProperties = mCameraManager.getCameraCharacteristics(mCamera.getId());
205        } catch (CameraAccessException e) {
206            e.printStackTrace();
207            throw new RuntimeException(e);
208        }
209
210    }
211
212    @Override
213    protected void onProcess() {
214        Log.v(TAG, "on Process");
215        if (nextFrame()) {
216            OutputPort outPort = getConnectedOutputPort("video");
217
218            // Create a 2D frame that will hold the output
219            int[] dims = new int[] {
220                    mWidth, mHeight
221            };
222            FrameImage2D outputFrame = Frame.create(mOutputType, dims).asFrameImage2D();
223            rgbConverter.forEach(mAllocationOut);
224            mAllocationOut.copyTo(mBitmap);
225            outputFrame.setBitmap(mBitmap);
226            outPort.pushFrame(outputFrame);
227            outputFrame.release();
228
229            OutputPort orientationPort = getConnectedOutputPort("orientation");
230            FrameValue orientationFrame = orientationPort.fetchAvailableFrame(null).asFrameValue();
231
232            // FIXME: Hardcoded value because ORIENTATION returns null, Qualcomm
233            // bug
234            Integer orientation = mProperties.get(CameraCharacteristics.SENSOR_ORIENTATION);
235            float temp;
236            if (orientation != null) {
237                temp = orientation.floatValue();
238            } else {
239                temp = 90.0f;
240            }
241            orientationFrame.setValue(temp);
242            orientationPort.pushFrame(orientationFrame);
243        }
244    }
245
246    private synchronized boolean nextFrame() {
247        boolean frameAvailable = mNewFrameAvailable;
248        if (frameAvailable) {
249            mNewFrameAvailable = false;
250        } else {
251            enterSleepState();
252        }
253        return frameAvailable;
254    }
255
256    public void onBufferAvailable(Allocation a) {
257        Log.v(TAG, "on Buffer Available");
258        a.ioReceive();
259        synchronized (this) {
260            mNewFrameAvailable = true;
261        }
262        wakeUp();
263    }
264
265
266}
267