1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17package androidx.media.filterfw;
18
19import android.annotation.TargetApi;
20import android.app.Activity;
21import android.app.ActivityManager;
22import android.content.Context;
23import android.content.pm.ConfigurationInfo;
24import android.os.Build;
25import android.os.Handler;
26import android.os.Looper;
27import android.renderscript.RenderScript;
28import android.util.Log;
29import android.view.SurfaceHolder;
30import android.view.SurfaceView;
31import android.view.ViewGroup;
32
33import java.util.HashSet;
34import java.util.Set;
35
36/**
37 * The MffContext holds the state and resources of a Mobile Filter Framework processing instance.
38 * Though it is possible to create multiple MffContext instances, typical applications will rely on
39 * a single MffContext to perform all processing within the Mobile Filter Framework.
40 *
41 * The MffContext class declares two methods {@link #onPause()} and {@link #onResume()}, that are
42 * typically called when the application activity is paused and resumed. This will take care of
43 * halting any processing in the context, and releasing resources while the activity is paused.
44 */
45public class MffContext {
46
47    /**
48     * Class to hold configuration information for MffContexts.
49     */
50    public static class Config {
51        /**
52         * Set to true, if this context will make use of the camera.
53         * If your application does not require the camera, the context does not guarantee that
54         * a camera is available for streaming. That is, you may only use a CameraStreamer if
55         * the context's {@link #isCameraStreamingSupported()} returns true.
56         */
57        public boolean requireCamera = true;
58
59        /**
60         * Set to true, if this context requires OpenGL.
61         * If your application does not require OpenGL, the context does not guarantee that OpenGL
62         * is available. That is, you may only use OpenGL (within filters running in this context)
63         * if the context's {@link #isOpenGLSupported()} method returns true.
64         */
65        public boolean requireOpenGL = true;
66
67        /**
68         * On older Android versions the Camera may need a SurfaceView to render into in order to
69         * function. You may specify a dummy SurfaceView here if you do not want the context to
70         * create its own view. Note, that your view may or may not be used. You cannot rely on
71         * your dummy view to be used by the Camera. If you pass null, no dummy view will be used.
72         * In this case your application may not run correctly on older devices if you use the
73         * camera. This flag has no effect if you do not require the camera.
74         */
75        public SurfaceView dummySurface = null;
76
77        /** Force MFF to not use OpenGL in its processing. */
78        public boolean forceNoGL = false;
79    }
80
81    static private class State {
82        public static final int STATE_RUNNING = 1;
83        public static final int STATE_PAUSED = 2;
84        public static final int STATE_DESTROYED = 3;
85
86        public int current = STATE_RUNNING;
87    }
88
89    /** The application context. */
90    private Context mApplicationContext = null;
91
92    /** The set of filter graphs within this context */
93    private Set<FilterGraph> mGraphs = new HashSet<FilterGraph>();
94
95    /** The set of graph runners within this context */
96    private Set<GraphRunner> mRunners = new HashSet<GraphRunner>();
97
98    /** True, if the context preserves frames when paused. */
99    private boolean mPreserveFramesOnPause = false;
100
101    /** The shared CameraStreamer that streams camera frames to CameraSource filters. */
102    private CameraStreamer mCameraStreamer = null;
103
104    /** The current context state. */
105    private State mState = new State();
106
107    /** A dummy SurfaceView that is required for Camera operation on older devices. */
108    private SurfaceView mDummySurfaceView = null;
109
110    /** Handler to execute code in the context's thread, such as issuing callbacks. */
111    private Handler mHandler = null;
112
113    /** Flag whether OpenGL ES 2 is supported in this context. */
114    private boolean mGLSupport;
115
116    /** Flag whether camera streaming is supported in this context. */
117    private boolean mCameraStreamingSupport;
118
119    /** RenderScript base master class. */
120    private RenderScript mRenderScript;
121
122    /**
123     * Creates a new MffContext with the default configuration.
124     *
125     * An MffContext must be attached to a Context object of an application. You may create
126     * multiple MffContexts, however data between them cannot be shared. The context must be
127     * created in a thread with a Looper (such as the main/UI thread).
128     *
129     * On older versions of Android, the MffContext may create a visible dummy view for the
130     * camera to render into. This is a 1x1 SurfaceView that is placed into the top-left corner.
131     *
132     * @param context The application context to attach the MffContext to.
133     */
134    public MffContext(Context context) {
135        init(context, new Config());
136    }
137
138    /**
139     * Creates a new MffContext with the specified configuration.
140     *
141     * An MffContext must be attached to a Context object of an application. You may create
142     * multiple MffContexts, however data between them cannot be shared. The context must be
143     * created in a thread with a Looper (such as the main/UI thread).
144     *
145     * On older versions of Android, the MffContext may create a visible dummy view for the
146     * camera to render into. This is a 1x1 SurfaceView that is placed into the top-left corner.
147     * You may alternatively specify your own SurfaceView in the configuration.
148     *
149     * @param context The application context to attach the MffContext to.
150     * @param config The configuration to use.
151     *
152     * @throws RuntimeException If no context for the requested configuration could be created.
153     */
154    public MffContext(Context context, Config config) {
155        init(context, config);
156    }
157
158    /**
159     * Put all processing in the context on hold.
160     * This is typically called from your application's <code>onPause()</code> method, and will
161     * stop all running graphs (closing their filters). If the context does not preserve frames on
162     * pause (see {@link #setPreserveFramesOnPause(boolean)}) all frames attached to this context
163     * are released.
164     */
165    public void onPause() {
166        synchronized (mState) {
167            if (mState.current == State.STATE_RUNNING) {
168                if (mCameraStreamer != null) {
169                    mCameraStreamer.halt();
170                }
171                stopRunners(true);
172                mState.current = State.STATE_PAUSED;
173            }
174        }
175    }
176
177    /**
178     * Resumes the processing in this context.
179     * This is typically called from the application's <code>onResume()</code> method, and will
180     * resume processing any of the previously stopped filter graphs.
181     */
182    public void onResume() {
183        synchronized (mState) {
184            if (mState.current == State.STATE_PAUSED) {
185                resumeRunners();
186                resumeCamera();
187                mState.current = State.STATE_RUNNING;
188            }
189        }
190    }
191
192    /**
193     * Release all resources associated with this context.
194     * This will also stop any running graphs.
195     */
196    public void release() {
197        synchronized (mState) {
198            if (mState.current != State.STATE_DESTROYED) {
199                if (mCameraStreamer != null) {
200                    mCameraStreamer.stop();
201                    mCameraStreamer.tearDown();
202                }
203                if (Build.VERSION.SDK_INT >= 11) {
204                    maybeDestroyRenderScript();
205                }
206                stopRunners(false);
207                waitUntilStopped();
208                tearDown();
209                mState.current = State.STATE_DESTROYED;
210            }
211        }
212    }
213
214    /**
215     * Set whether frames are preserved when the context is paused.
216     * When passing false, all Frames associated with this context are released. The default
217     * value is true.
218     *
219     * @param preserve true, to preserve frames when the context is paused.
220     *
221     * @see #getPreserveFramesOnPause()
222     */
223    public void setPreserveFramesOnPause(boolean preserve) {
224        mPreserveFramesOnPause = preserve;
225    }
226
227    /**
228     * Returns whether frames are preserved when the context is paused.
229     *
230     * @return true, if frames are preserved when the context is paused.
231     *
232     * @see #setPreserveFramesOnPause(boolean)
233     */
234    public boolean getPreserveFramesOnPause() {
235        return mPreserveFramesOnPause;
236    }
237
238    /**
239     * Returns the application context that the MffContext is attached to.
240     *
241     * @return The application context for this context.
242     */
243    public Context getApplicationContext() {
244        return mApplicationContext;
245    }
246
247    /**
248     * Returns the context's shared CameraStreamer.
249     * Use the CameraStreamer to control the Camera. Frames from the Camera are typically streamed
250     * to CameraSource filters.
251     *
252     * @return The context's CameraStreamer instance.
253     */
254    public CameraStreamer getCameraStreamer() {
255        if (mCameraStreamer == null) {
256            mCameraStreamer = new CameraStreamer(this);
257        }
258        return mCameraStreamer;
259    }
260
261    /**
262     * Set the default EGL config chooser.
263     *
264     * When an EGL context is required by the MFF, the channel sizes specified here are used. The
265     * default sizes are 8 bits per R,G,B,A channel and 0 bits for depth and stencil channels.
266     *
267     * @param redSize The size of the red channel in bits.
268     * @param greenSize The size of the green channel in bits.
269     * @param blueSize The size of the blue channel in bits.
270     * @param alphaSize The size of the alpha channel in bits.
271     * @param depthSize The size of the depth channel in bits.
272     * @param stencilSize The size of the stencil channel in bits.
273     */
274    public static void setEGLConfigChooser(int redSize,
275                                           int greenSize,
276                                           int blueSize,
277                                           int alphaSize,
278                                           int depthSize,
279                                           int stencilSize) {
280        RenderTarget.setEGLConfigChooser(redSize,
281                                         greenSize,
282                                         blueSize,
283                                         alphaSize,
284                                         depthSize,
285                                         stencilSize);
286    }
287
288    /**
289     * Returns true, if this context supports using OpenGL.
290     * @return true, if this context supports using OpenGL.
291     */
292    public final boolean isOpenGLSupported() {
293        return mGLSupport;
294    }
295
296    /**
297     * Returns true, if this context supports camera streaming.
298     * @return true, if this context supports camera streaming.
299     */
300    public final boolean isCameraStreamingSupported() {
301        return mCameraStreamingSupport;
302    }
303
304    @TargetApi(11)
305    public final RenderScript getRenderScript() {
306        if (mRenderScript == null) {
307            mRenderScript = RenderScript.create(mApplicationContext);
308        }
309        return mRenderScript;
310    }
311
312    final void assertOpenGLSupported() {
313        if (!isOpenGLSupported()) {
314            throw new RuntimeException("Attempting to use OpenGL ES 2 in a context that does not "
315                    + "support it!");
316        }
317    }
318
319    void addGraph(FilterGraph graph) {
320        synchronized (mGraphs) {
321            mGraphs.add(graph);
322        }
323    }
324
325    void addRunner(GraphRunner runner) {
326        synchronized (mRunners) {
327            mRunners.add(runner);
328        }
329    }
330
331    SurfaceView getDummySurfaceView() {
332        return mDummySurfaceView;
333    }
334
335    void postRunnable(Runnable runnable) {
336        mHandler.post(runnable);
337    }
338
339    private void init(Context context, Config config) {
340        determineGLSupport(context, config);
341        determineCameraSupport(config);
342        createHandler();
343        mApplicationContext = context.getApplicationContext();
344        fetchDummySurfaceView(context, config);
345    }
346
347    private void fetchDummySurfaceView(Context context, Config config) {
348        if (config.requireCamera && CameraStreamer.requireDummySurfaceView()) {
349            mDummySurfaceView = config.dummySurface != null
350                    ? config.dummySurface
351                    : createDummySurfaceView(context);
352        }
353    }
354
355    private void determineGLSupport(Context context, Config config) {
356        if (config.forceNoGL) {
357            mGLSupport = false;
358        } else {
359            mGLSupport = getPlatformSupportsGLES2(context);
360            if (config.requireOpenGL && !mGLSupport) {
361                throw new RuntimeException("Cannot create context that requires GL support on "
362                        + "this platform!");
363            }
364        }
365    }
366
367    private void determineCameraSupport(Config config) {
368        mCameraStreamingSupport = (CameraStreamer.getNumberOfCameras() > 0);
369        if (config.requireCamera && !mCameraStreamingSupport) {
370            throw new RuntimeException("Cannot create context that requires a camera on "
371                    + "this platform!");
372        }
373    }
374
375    private static boolean getPlatformSupportsGLES2(Context context) {
376        ActivityManager am = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
377        ConfigurationInfo configurationInfo = am.getDeviceConfigurationInfo();
378        return configurationInfo.reqGlEsVersion >= 0x20000;
379    }
380
381    private void createHandler() {
382        if (Looper.myLooper() == null) {
383            throw new RuntimeException("MffContext must be created in a thread with a Looper!");
384        }
385        mHandler = new Handler();
386    }
387
388    private void stopRunners(boolean haltOnly) {
389        synchronized (mRunners) {
390            // Halt all runners (does nothing if not running)
391            for (GraphRunner runner : mRunners) {
392                if (haltOnly) {
393                    runner.halt();
394                } else {
395                    runner.stop();
396                }
397            }
398            // Flush all graphs if requested (this is queued up after the call to halt)
399            if (!mPreserveFramesOnPause) {
400                for (GraphRunner runner : mRunners) {
401                    runner.flushFrames();
402                }
403            }
404        }
405    }
406
407    private void resumeRunners() {
408        synchronized (mRunners) {
409            for (GraphRunner runner : mRunners) {
410                runner.restart();
411            }
412        }
413    }
414
415    private void resumeCamera() {
416        // Restart only affects previously halted cameras that were running.
417        if (mCameraStreamer != null) {
418            mCameraStreamer.restart();
419        }
420    }
421
422    private void waitUntilStopped() {
423        for (GraphRunner runner : mRunners) {
424            runner.waitUntilStop();
425        }
426    }
427
428    private void tearDown() {
429        // Tear down graphs
430        for (FilterGraph graph : mGraphs) {
431            graph.tearDown();
432        }
433
434        // Tear down runners
435        for (GraphRunner runner : mRunners) {
436            runner.tearDown();
437        }
438    }
439
440    @SuppressWarnings("deprecation")
441    private SurfaceView createDummySurfaceView(Context context) {
442        // This is only called on Gingerbread devices, so deprecation warning is unnecessary.
443        SurfaceView dummySurfaceView = new SurfaceView(context);
444        dummySurfaceView.getHolder().setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
445        // If we have an activity for this context we'll add the SurfaceView to it (as a 1x1 view
446        // in the top-left corner). If not, we warn the user that they may need to add one manually.
447        Activity activity = findActivityForContext(context);
448        if (activity != null) {
449            ViewGroup.LayoutParams params = new ViewGroup.LayoutParams(1, 1);
450            activity.addContentView(dummySurfaceView, params);
451        } else {
452            Log.w("MffContext", "Could not find activity for dummy surface! Consider specifying "
453                    + "your own SurfaceView!");
454        }
455        return dummySurfaceView;
456    }
457
458    private Activity findActivityForContext(Context context) {
459        return (context instanceof Activity) ? (Activity) context : null;
460    }
461
462    @TargetApi(11)
463    private void maybeDestroyRenderScript() {
464        if (mRenderScript != null) {
465            mRenderScript.destroy();
466            mRenderScript = null;
467        }
468    }
469
470}
471