EffectsRecorder.java revision 967e964bba87acf4b6d1e9f66446fbd1a25a7b76
1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License"); you may not
5 * use this file except in compliance with the License. You may obtain a copy of
6 * the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 * License for the specific language governing permissions and limitations under
14 * the License.
15 */
16
17package com.android.camera;
18
19import android.annotation.TargetApi;
20import android.content.Context;
21import android.graphics.SurfaceTexture;
22import android.hardware.Camera;
23import android.media.CamcorderProfile;
24import android.media.MediaRecorder;
25import android.os.Handler;
26import android.os.Looper;
27import android.util.Log;
28
29import com.android.gallery3d.common.ApiHelper;
30
31import java.io.FileDescriptor;
32import java.io.IOException;
33import java.io.Serializable;
34import java.lang.reflect.Constructor;
35import java.lang.reflect.InvocationHandler;
36import java.lang.reflect.Method;
37import java.lang.reflect.Proxy;
38
39
40/**
41 * Encapsulates the mobile filter framework components needed to record video
42 * with effects applied. Modeled after MediaRecorder.
43 */
44public class EffectsRecorder {
45    private static final String TAG = "EffectsRecorder";
46
47    private static Class<?> sClassFilter;
48    private static Method sFilterIsAvailable;
49    private static EffectsRecorder sEffectsRecorder;
50
51    private static boolean sReflectionInited = false;
52
53    private static Class<?> sClsLearningDoneListener;
54    private static Class<?> sClsOnRunnerDoneListener;
55    private static Class<?> sClsOnRecordingDoneListener;
56    private static Class<?> sClsSurfaceTextureSourceListener;
57
58    private static Method sFilterSetInputValue;
59
60    private static Constructor<?> sCtPoint;
61    private static Constructor<?> sCtQuad;
62
63    private static Method sLearningDoneListenerOnLearningDone;
64
65    private static Method sObjectEquals;
66    private static Method sObjectToString;
67
68    private static Class<?> sClsGraphRunner;
69    private static Method sGraphRunnerGetGraph;
70    private static Method sGraphRunnerSetDoneCallback;
71    private static Method sGraphRunnerRun;
72    private static Method sGraphRunnerGetError;
73    private static Method sGraphRunnerStop;
74
75    private static Method sFilterGraphGetFilter;
76    private static Method sFilterGraphTearDown;
77
78    private static Method sOnRunnerDoneListenerOnRunnerDone;
79
80    private static Class<?> sClsGraphEnvironment;
81    private static Constructor<?> sCtGraphEnvironment;
82    private static Method sGraphEnvironmentCreateGLEnvironment;
83    private static Method sGraphEnvironmentGetRunner;
84    private static Method sGraphEnvironmentAddReferences;
85    private static Method sGraphEnvironmentLoadGraph;
86    private static Method sGraphEnvironmentGetContext;
87
88    private static Method sFilterContextGetGLEnvironment;
89    private static Method sGLEnvironmentIsActive;
90    private static Method sGLEnvironmentActivate;
91    private static Method sGLEnvironmentDeactivate;
92    private static Method sSurfaceTextureTargetDisconnect;
93    private static Method sOnRecordingDoneListenerOnRecordingDone;
94    private static Method sSurfaceTextureSourceListenerOnSurfaceTextureSourceReady;
95
96    private Object mLearningDoneListener;
97    private Object mRunnerDoneCallback;
98    private Object mSourceReadyCallback;
99    // A callback to finalize the media after the recording is done.
100    private Object mRecordingDoneListener;
101
102    static {
103        try {
104            sClassFilter = Class.forName("android.filterfw.core.Filter");
105            sFilterIsAvailable = sClassFilter.getMethod("isAvailable",
106                    String.class);
107        } catch (ClassNotFoundException ex) {
108            Log.v(TAG, "Can't find the class android.filterfw.core.Filter");
109        } catch (NoSuchMethodException e) {
110            Log.v(TAG, "Can't find the method Filter.isAvailable");
111        }
112    }
113
114    public static final int  EFFECT_NONE        = 0;
115    public static final int  EFFECT_GOOFY_FACE  = 1;
116    public static final int  EFFECT_BACKDROPPER = 2;
117
118    public static final int  EFFECT_GF_SQUEEZE     = 0;
119    public static final int  EFFECT_GF_BIG_EYES    = 1;
120    public static final int  EFFECT_GF_BIG_MOUTH   = 2;
121    public static final int  EFFECT_GF_SMALL_MOUTH = 3;
122    public static final int  EFFECT_GF_BIG_NOSE    = 4;
123    public static final int  EFFECT_GF_SMALL_EYES  = 5;
124    public static final int  NUM_OF_GF_EFFECTS = EFFECT_GF_SMALL_EYES + 1;
125
126    public static final int  EFFECT_MSG_STARTED_LEARNING = 0;
127    public static final int  EFFECT_MSG_DONE_LEARNING    = 1;
128    public static final int  EFFECT_MSG_SWITCHING_EFFECT = 2;
129    public static final int  EFFECT_MSG_EFFECTS_STOPPED  = 3;
130    public static final int  EFFECT_MSG_RECORDING_DONE   = 4;
131    public static final int  EFFECT_MSG_PREVIEW_RUNNING  = 5;
132
133    private Context mContext;
134    private Handler mHandler;
135
136    private Camera mCameraDevice;
137    private CamcorderProfile mProfile;
138    private double mCaptureRate = 0;
139    private SurfaceTexture mPreviewSurfaceTexture;
140    private int mPreviewWidth;
141    private int mPreviewHeight;
142    private MediaRecorder.OnInfoListener mInfoListener;
143    private MediaRecorder.OnErrorListener mErrorListener;
144
145    private String mOutputFile;
146    private FileDescriptor mFd;
147    private int mOrientationHint = 0;
148    private long mMaxFileSize = 0;
149    private int mMaxDurationMs = 0;
150    private int mCameraFacing = Camera.CameraInfo.CAMERA_FACING_BACK;
151    private int mCameraDisplayOrientation;
152
153    private int mEffect = EFFECT_NONE;
154    private int mCurrentEffect = EFFECT_NONE;
155    private EffectsListener mEffectsListener;
156
157    private Object mEffectParameter;
158
159    private Object mGraphEnv;
160    private int mGraphId;
161    private Object mRunner = null;
162    private Object mOldRunner = null;
163
164    private SurfaceTexture mTextureSource;
165
166    private static final int STATE_CONFIGURE              = 0;
167    private static final int STATE_WAITING_FOR_SURFACE    = 1;
168    private static final int STATE_STARTING_PREVIEW       = 2;
169    private static final int STATE_PREVIEW                = 3;
170    private static final int STATE_RECORD                 = 4;
171    private static final int STATE_RELEASED               = 5;
172    private int mState = STATE_CONFIGURE;
173
174    private boolean mLogVerbose = Log.isLoggable(TAG, Log.VERBOSE);
175    private SoundClips.Player mSoundPlayer;
176
177    /** Determine if a given effect is supported at runtime
178     * Some effects require libraries not available on all devices
179     */
180    public static boolean isEffectSupported(int effectId) {
181        if (sFilterIsAvailable == null)  return false;
182
183        try {
184            switch (effectId) {
185                case EFFECT_GOOFY_FACE:
186                    return (Boolean) sFilterIsAvailable.invoke(null,
187                            "com.google.android.filterpacks.facedetect.GoofyRenderFilter");
188                case EFFECT_BACKDROPPER:
189                    return (Boolean) sFilterIsAvailable.invoke(null,
190                            "android.filterpacks.videoproc.BackDropperFilter");
191                default:
192                    return false;
193            }
194        } catch (Exception ex) {
195            Log.e(TAG, "Fail to check filter", ex);
196        }
197        return false;
198    }
199
200    public EffectsRecorder(Context context) {
201        if (mLogVerbose) Log.v(TAG, "EffectsRecorder created (" + this + ")");
202
203        if (!sReflectionInited) {
204            try {
205                sFilterSetInputValue = sClassFilter.getMethod("setInputValue",
206                        new Class[] {String.class, Object.class});
207
208                Class<?> clsPoint = Class.forName("android.filterfw.geometry.Point");
209                sCtPoint = clsPoint.getConstructor(new Class[] {float.class,
210                        float.class});
211
212                Class<?> clsQuad = Class.forName("android.filterfw.geometry.Quad");
213                sCtQuad = clsQuad.getConstructor(new Class[] {clsPoint, clsPoint,
214                        clsPoint, clsPoint});
215
216                Class<?> clsBackDropperFilter = Class.forName(
217                        "android.filterpacks.videoproc.BackDropperFilter");
218                sClsLearningDoneListener = Class.forName(
219                        "android.filterpacks.videoproc.BackDropperFilter$LearningDoneListener");
220                sLearningDoneListenerOnLearningDone = sClsLearningDoneListener
221                        .getMethod("onLearningDone", new Class[] {clsBackDropperFilter});
222
223                sObjectEquals = Object.class.getMethod("equals", new Class[] {Object.class});
224                sObjectToString = Object.class.getMethod("toString");
225
226                sClsOnRunnerDoneListener = Class.forName(
227                        "android.filterfw.core.GraphRunner$OnRunnerDoneListener");
228                sOnRunnerDoneListenerOnRunnerDone = sClsOnRunnerDoneListener.getMethod(
229                        "onRunnerDone", new Class[] {int.class});
230
231                sClsGraphRunner = Class.forName("android.filterfw.core.GraphRunner");
232                sGraphRunnerGetGraph = sClsGraphRunner.getMethod("getGraph");
233                sGraphRunnerSetDoneCallback = sClsGraphRunner.getMethod(
234                        "setDoneCallback", new Class[] {sClsOnRunnerDoneListener});
235                sGraphRunnerRun = sClsGraphRunner.getMethod("run");
236                sGraphRunnerGetError = sClsGraphRunner.getMethod("getError");
237                sGraphRunnerStop = sClsGraphRunner.getMethod("stop");
238
239                Class<?> clsFilterContext = Class.forName("android.filterfw.core.FilterContext");
240                sFilterContextGetGLEnvironment = clsFilterContext.getMethod(
241                        "getGLEnvironment");
242
243                Class<?> clsFilterGraph = Class.forName("android.filterfw.core.FilterGraph");
244                sFilterGraphGetFilter = clsFilterGraph.getMethod("getFilter",
245                        new Class[] {String.class});
246                sFilterGraphTearDown = clsFilterGraph.getMethod("tearDown",
247                        new Class[] {clsFilterContext});
248
249                sClsGraphEnvironment = Class.forName("android.filterfw.GraphEnvironment");
250                sCtGraphEnvironment = sClsGraphEnvironment.getConstructor();
251                sGraphEnvironmentCreateGLEnvironment = sClsGraphEnvironment.getMethod(
252                        "createGLEnvironment");
253                sGraphEnvironmentGetRunner = sClsGraphEnvironment.getMethod(
254                        "getRunner", new Class[] {int.class, int.class});
255                sGraphEnvironmentAddReferences = sClsGraphEnvironment.getMethod(
256                        "addReferences", new Class[] {Object[].class});
257                sGraphEnvironmentLoadGraph = sClsGraphEnvironment.getMethod(
258                        "loadGraph", new Class[] {Context.class, int.class});
259                sGraphEnvironmentGetContext = sClsGraphEnvironment.getMethod(
260                        "getContext");
261
262                Class<?> clsGLEnvironment = Class.forName("android.filterfw.core.GLEnvironment");
263                sGLEnvironmentIsActive = clsGLEnvironment.getMethod("isActive");
264                sGLEnvironmentActivate = clsGLEnvironment.getMethod("activate");
265                sGLEnvironmentDeactivate = clsGLEnvironment.getMethod("deactivate");
266
267                Class<?> clsSurfaceTextureTarget = Class.forName(
268                        "android.filterpacks.videosrc.SurfaceTextureTarget");
269                sSurfaceTextureTargetDisconnect = clsSurfaceTextureTarget.getMethod(
270                        "disconnect", new Class[] {clsFilterContext});
271
272                sClsOnRecordingDoneListener = Class.forName(
273                        "android.filterpacks.videosink.MediaEncoderFilter$OnRecordingDoneListener");
274                sOnRecordingDoneListenerOnRecordingDone =
275                        sClsOnRecordingDoneListener.getMethod("onRecordingDone");
276
277                sClsSurfaceTextureSourceListener = Class.forName(
278                        "android.filterpacks.videosrc.SurfaceTextureSource$SurfaceTextureSourceListener");
279                sSurfaceTextureSourceListenerOnSurfaceTextureSourceReady =
280                        sClsSurfaceTextureSourceListener.getMethod(
281                                "onSurfaceTextureSourceReady",
282                                new Class[] {SurfaceTexture.class});
283            } catch (Exception ex) {
284                throw new RuntimeException(ex);
285            }
286
287            sReflectionInited = true;
288        }
289
290        sEffectsRecorder = this;
291        mLearningDoneListener = Proxy.newProxyInstance(
292                sClsLearningDoneListener.getClassLoader(),
293                new Class[] {sClsLearningDoneListener},
294                new SerializableInvocationHandler());
295        mRunnerDoneCallback = Proxy.newProxyInstance(
296                sClsOnRunnerDoneListener.getClassLoader(),
297                new Class[] {sClsOnRunnerDoneListener},
298                new SerializableInvocationHandler());
299        mSourceReadyCallback = Proxy.newProxyInstance(
300                sClsSurfaceTextureSourceListener.getClassLoader(),
301                new Class[] {sClsSurfaceTextureSourceListener},
302                new SerializableInvocationHandler());
303        mRecordingDoneListener =  Proxy.newProxyInstance(
304                sClsOnRecordingDoneListener.getClassLoader(),
305                new Class[] {sClsOnRecordingDoneListener},
306                new SerializableInvocationHandler());
307
308        mContext = context;
309        mHandler = new Handler(Looper.getMainLooper());
310        mSoundPlayer = SoundClips.getPlayer(context);
311    }
312
313    public synchronized void setCamera(Camera cameraDevice) {
314        switch (mState) {
315            case STATE_PREVIEW:
316                throw new RuntimeException("setCamera cannot be called while previewing!");
317            case STATE_RECORD:
318                throw new RuntimeException("setCamera cannot be called while recording!");
319            case STATE_RELEASED:
320                throw new RuntimeException("setCamera called on an already released recorder!");
321            default:
322                break;
323        }
324
325        mCameraDevice = cameraDevice;
326    }
327
328    public void setProfile(CamcorderProfile profile) {
329        switch (mState) {
330            case STATE_RECORD:
331                throw new RuntimeException("setProfile cannot be called while recording!");
332            case STATE_RELEASED:
333                throw new RuntimeException("setProfile called on an already released recorder!");
334            default:
335                break;
336        }
337        mProfile = profile;
338    }
339
340    public void setOutputFile(String outputFile) {
341        switch (mState) {
342            case STATE_RECORD:
343                throw new RuntimeException("setOutputFile cannot be called while recording!");
344            case STATE_RELEASED:
345                throw new RuntimeException("setOutputFile called on an already released recorder!");
346            default:
347                break;
348        }
349
350        mOutputFile = outputFile;
351        mFd = null;
352    }
353
354    public void setOutputFile(FileDescriptor fd) {
355        switch (mState) {
356            case STATE_RECORD:
357                throw new RuntimeException("setOutputFile cannot be called while recording!");
358            case STATE_RELEASED:
359                throw new RuntimeException("setOutputFile called on an already released recorder!");
360            default:
361                break;
362        }
363
364        mOutputFile = null;
365        mFd = fd;
366    }
367
368    /**
369     * Sets the maximum filesize (in bytes) of the recording session.
370     * This will be passed on to the MediaEncoderFilter and then to the
371     * MediaRecorder ultimately. If zero or negative, the MediaRecorder will
372     * disable the limit
373    */
374    public synchronized void setMaxFileSize(long maxFileSize) {
375        switch (mState) {
376            case STATE_RECORD:
377                throw new RuntimeException("setMaxFileSize cannot be called while recording!");
378            case STATE_RELEASED:
379                throw new RuntimeException(
380                    "setMaxFileSize called on an already released recorder!");
381            default:
382                break;
383        }
384        mMaxFileSize = maxFileSize;
385    }
386
387    /**
388    * Sets the maximum recording duration (in ms) for the next recording session
389    * Setting it to zero (the default) disables the limit.
390    */
391    public synchronized void setMaxDuration(int maxDurationMs) {
392        switch (mState) {
393            case STATE_RECORD:
394                throw new RuntimeException("setMaxDuration cannot be called while recording!");
395            case STATE_RELEASED:
396                throw new RuntimeException(
397                    "setMaxDuration called on an already released recorder!");
398            default:
399                break;
400        }
401        mMaxDurationMs = maxDurationMs;
402    }
403
404
405    public void setCaptureRate(double fps) {
406        switch (mState) {
407            case STATE_RECORD:
408                throw new RuntimeException("setCaptureRate cannot be called while recording!");
409            case STATE_RELEASED:
410                throw new RuntimeException(
411                    "setCaptureRate called on an already released recorder!");
412            default:
413                break;
414        }
415
416        if (mLogVerbose) Log.v(TAG, "Setting time lapse capture rate to " + fps + " fps");
417        mCaptureRate = fps;
418    }
419
420    public void setPreviewSurfaceTexture(SurfaceTexture previewSurfaceTexture,
421                                  int previewWidth,
422                                  int previewHeight) {
423        if (mLogVerbose) Log.v(TAG, "setPreviewSurfaceTexture(" + this + ")");
424        switch (mState) {
425            case STATE_RECORD:
426                throw new RuntimeException(
427                    "setPreviewSurfaceTexture cannot be called while recording!");
428            case STATE_RELEASED:
429                throw new RuntimeException(
430                    "setPreviewSurfaceTexture called on an already released recorder!");
431            default:
432                break;
433        }
434
435        mPreviewSurfaceTexture = previewSurfaceTexture;
436        mPreviewWidth = previewWidth;
437        mPreviewHeight = previewHeight;
438
439        switch (mState) {
440            case STATE_WAITING_FOR_SURFACE:
441                startPreview();
442                break;
443            case STATE_STARTING_PREVIEW:
444            case STATE_PREVIEW:
445                initializeEffect(true);
446                break;
447        }
448    }
449
450    public void setEffect(int effect, Object effectParameter) {
451        if (mLogVerbose) Log.v(TAG,
452                               "setEffect: effect ID " + effect +
453                               ", parameter " + effectParameter.toString());
454        switch (mState) {
455            case STATE_RECORD:
456                throw new RuntimeException("setEffect cannot be called while recording!");
457            case STATE_RELEASED:
458                throw new RuntimeException("setEffect called on an already released recorder!");
459            default:
460                break;
461        }
462
463        mEffect = effect;
464        mEffectParameter = effectParameter;
465
466        if (mState == STATE_PREVIEW ||
467                mState == STATE_STARTING_PREVIEW) {
468            initializeEffect(false);
469        }
470    }
471
472    public interface EffectsListener {
473        public void onEffectsUpdate(int effectId, int effectMsg);
474        public void onEffectsError(Exception exception, String filePath);
475    }
476
477    public void setEffectsListener(EffectsListener listener) {
478        mEffectsListener = listener;
479    }
480
481    private void setFaceDetectOrientation() {
482        if (mCurrentEffect == EFFECT_GOOFY_FACE) {
483            Object rotateFilter = getGraphFilter(mRunner, "rotate");
484            Object metaRotateFilter = getGraphFilter(mRunner, "metarotate");
485            setInputValue(rotateFilter, "rotation", mOrientationHint);
486            int reverseDegrees = (360 - mOrientationHint) % 360;
487            setInputValue(metaRotateFilter, "rotation", reverseDegrees);
488        }
489    }
490
491    private void setRecordingOrientation() {
492        if (mState != STATE_RECORD && mRunner != null) {
493            Object bl = newInstance(sCtPoint, new Object[] {0, 0});
494            Object br = newInstance(sCtPoint, new Object[] {1, 0});
495            Object tl = newInstance(sCtPoint, new Object[] {0, 1});
496            Object tr = newInstance(sCtPoint, new Object[] {1, 1});
497            Object recordingRegion;
498            if (mCameraFacing == Camera.CameraInfo.CAMERA_FACING_BACK) {
499                // The back camera is not mirrored, so use a identity transform
500                recordingRegion = newInstance(sCtQuad, new Object[] {bl, br, tl, tr});
501            } else {
502                // Recording region needs to be tweaked for front cameras, since they
503                // mirror their preview
504                if (mOrientationHint == 0 || mOrientationHint == 180) {
505                    // Horizontal flip in landscape
506                    recordingRegion = newInstance(sCtQuad, new Object[] {br, bl, tr, tl});
507                } else {
508                    // Horizontal flip in portrait
509                    recordingRegion = newInstance(sCtQuad, new Object[] {tl, tr, bl, br});
510                }
511            }
512            Object recorder = getGraphFilter(mRunner, "recorder");
513            setInputValue(recorder, "inputRegion", recordingRegion);
514        }
515    }
516    public void setOrientationHint(int degrees) {
517        switch (mState) {
518            case STATE_RELEASED:
519                throw new RuntimeException(
520                        "setOrientationHint called on an already released recorder!");
521            default:
522                break;
523        }
524        if (mLogVerbose) Log.v(TAG, "Setting orientation hint to: " + degrees);
525        mOrientationHint = degrees;
526        setFaceDetectOrientation();
527        setRecordingOrientation();
528    }
529
530    public void setCameraDisplayOrientation(int orientation) {
531        if (mState != STATE_CONFIGURE) {
532            throw new RuntimeException(
533                "setCameraDisplayOrientation called after configuration!");
534        }
535        mCameraDisplayOrientation = orientation;
536    }
537
538    public void setCameraFacing(int facing) {
539        switch (mState) {
540            case STATE_RELEASED:
541                throw new RuntimeException(
542                    "setCameraFacing called on alrady released recorder!");
543            default:
544                break;
545        }
546        mCameraFacing = facing;
547        setRecordingOrientation();
548    }
549
550    public void setOnInfoListener(MediaRecorder.OnInfoListener infoListener) {
551        switch (mState) {
552            case STATE_RECORD:
553                throw new RuntimeException("setInfoListener cannot be called while recording!");
554            case STATE_RELEASED:
555                throw new RuntimeException(
556                    "setInfoListener called on an already released recorder!");
557            default:
558                break;
559        }
560        mInfoListener = infoListener;
561    }
562
563    public void setOnErrorListener(MediaRecorder.OnErrorListener errorListener) {
564        switch (mState) {
565            case STATE_RECORD:
566                throw new RuntimeException("setErrorListener cannot be called while recording!");
567            case STATE_RELEASED:
568                throw new RuntimeException(
569                    "setErrorListener called on an already released recorder!");
570            default:
571                break;
572        }
573        mErrorListener = errorListener;
574    }
575
576    private void initializeFilterFramework() {
577        mGraphEnv = newInstance(sCtGraphEnvironment);
578        invoke(mGraphEnv, sGraphEnvironmentCreateGLEnvironment);
579
580        int videoFrameWidth = mProfile.videoFrameWidth;
581        int videoFrameHeight = mProfile.videoFrameHeight;
582        if (mCameraDisplayOrientation == 90 || mCameraDisplayOrientation == 270) {
583            int tmp = videoFrameWidth;
584            videoFrameWidth = videoFrameHeight;
585            videoFrameHeight = tmp;
586        }
587
588        invoke(mGraphEnv, sGraphEnvironmentAddReferences,
589                new Object[] {new Object[] {
590                "textureSourceCallback", mSourceReadyCallback,
591                "recordingWidth", videoFrameWidth,
592                "recordingHeight", videoFrameHeight,
593                "recordingProfile", mProfile,
594                "learningDoneListener", mLearningDoneListener,
595                "recordingDoneListener", mRecordingDoneListener}});
596        mRunner = null;
597        mGraphId = -1;
598        mCurrentEffect = EFFECT_NONE;
599    }
600
601    private synchronized void initializeEffect(boolean forceReset) {
602        if (forceReset ||
603            mCurrentEffect != mEffect ||
604            mCurrentEffect == EFFECT_BACKDROPPER) {
605
606            invoke(mGraphEnv, sGraphEnvironmentAddReferences,
607                    new Object[] {new Object[] {
608                    "previewSurfaceTexture", mPreviewSurfaceTexture,
609                    "previewWidth", mPreviewWidth,
610                    "previewHeight", mPreviewHeight,
611                    "orientation", mOrientationHint}});
612            if (mState == STATE_PREVIEW ||
613                    mState == STATE_STARTING_PREVIEW) {
614                // Switching effects while running. Inform video camera.
615                sendMessage(mCurrentEffect, EFFECT_MSG_SWITCHING_EFFECT);
616            }
617
618            switch (mEffect) {
619                case EFFECT_GOOFY_FACE:
620                    mGraphId = (Integer) invoke(mGraphEnv,
621                            sGraphEnvironmentLoadGraph,
622                            new Object[] {mContext, R.raw.goofy_face});
623                    break;
624                case EFFECT_BACKDROPPER:
625                    sendMessage(EFFECT_BACKDROPPER, EFFECT_MSG_STARTED_LEARNING);
626                    mGraphId = (Integer) invoke(mGraphEnv,
627                            sGraphEnvironmentLoadGraph,
628                            new Object[] {mContext, R.raw.backdropper});
629                    break;
630                default:
631                    throw new RuntimeException("Unknown effect ID" + mEffect + "!");
632            }
633            mCurrentEffect = mEffect;
634
635            mOldRunner = mRunner;
636            mRunner = invoke(mGraphEnv, sGraphEnvironmentGetRunner,
637                    new Object[] {mGraphId,
638                    getConstant(sClsGraphEnvironment, "MODE_ASYNCHRONOUS")});
639            invoke(mRunner, sGraphRunnerSetDoneCallback, new Object[] {mRunnerDoneCallback});
640            if (mLogVerbose) {
641                Log.v(TAG, "New runner: " + mRunner
642                      + ". Old runner: " + mOldRunner);
643            }
644            if (mState == STATE_PREVIEW ||
645                    mState == STATE_STARTING_PREVIEW) {
646                // Switching effects while running. Stop existing runner.
647                // The stop callback will take care of starting new runner.
648                mCameraDevice.stopPreview();
649                try {
650                    mCameraDevice.setPreviewTexture(null);
651                } catch (IOException e) {
652                    throw new RuntimeException("Unable to connect camera to effect input", e);
653                }
654                invoke(mOldRunner, sGraphRunnerStop);
655            }
656        }
657
658        switch (mCurrentEffect) {
659            case EFFECT_GOOFY_FACE:
660                tryEnableVideoStabilization(true);
661                Object goofyFilter = getGraphFilter(mRunner, "goofyrenderer");
662                setInputValue(goofyFilter, "currentEffect",
663                        ((Integer) mEffectParameter).intValue());
664                break;
665            case EFFECT_BACKDROPPER:
666                tryEnableVideoStabilization(false);
667                Object backgroundSrc = getGraphFilter(mRunner, "background");
668                setInputValue(backgroundSrc, "sourceUrl", mEffectParameter);
669                // For front camera, the background video needs to be mirrored in the
670                // backdropper filter
671                if (mCameraFacing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
672                    Object replacer = getGraphFilter(mRunner, "replacer");
673                    setInputValue(replacer, "mirrorBg", true);
674                    if (mLogVerbose) Log.v(TAG, "Setting the background to be mirrored");
675                }
676                break;
677            default:
678                break;
679        }
680        setFaceDetectOrientation();
681        setRecordingOrientation();
682    }
683
684    public synchronized void startPreview() {
685        if (mLogVerbose) Log.v(TAG, "Starting preview (" + this + ")");
686
687        switch (mState) {
688            case STATE_STARTING_PREVIEW:
689            case STATE_PREVIEW:
690                // Already running preview
691                Log.w(TAG, "startPreview called when already running preview");
692                return;
693            case STATE_RECORD:
694                throw new RuntimeException("Cannot start preview when already recording!");
695            case STATE_RELEASED:
696                throw new RuntimeException("setEffect called on an already released recorder!");
697            default:
698                break;
699        }
700
701        if (mEffect == EFFECT_NONE) {
702            throw new RuntimeException("No effect selected!");
703        }
704        if (mEffectParameter == null) {
705            throw new RuntimeException("No effect parameter provided!");
706        }
707        if (mProfile == null) {
708            throw new RuntimeException("No recording profile provided!");
709        }
710        if (mPreviewSurfaceTexture == null) {
711            if (mLogVerbose) Log.v(TAG, "Passed a null surface; waiting for valid one");
712            mState = STATE_WAITING_FOR_SURFACE;
713            return;
714        }
715        if (mCameraDevice == null) {
716            throw new RuntimeException("No camera to record from!");
717        }
718
719        if (mLogVerbose) Log.v(TAG, "Initializing filter framework and running the graph.");
720        initializeFilterFramework();
721
722        initializeEffect(true);
723
724        mState = STATE_STARTING_PREVIEW;
725        invoke(mRunner, sGraphRunnerRun);
726        // Rest of preview startup handled in mSourceReadyCallback
727    }
728
729    private Object invokeObjectEquals(Object proxy, Object[] args) {
730        return Boolean.valueOf(proxy == args[0]);
731    }
732
733    private Object invokeObjectToString() {
734        return "Proxy-" + toString();
735    }
736
737    private void invokeOnLearningDone() {
738        if (mLogVerbose) Log.v(TAG, "Learning done callback triggered");
739        // Called in a processing thread, so have to post message back to UI
740        // thread
741        sendMessage(EFFECT_BACKDROPPER, EFFECT_MSG_DONE_LEARNING);
742        enable3ALocks(true);
743    }
744
745    private void invokeOnRunnerDone(Object[] args) {
746        int runnerDoneResult = (Integer) args[0];
747        synchronized (EffectsRecorder.this) {
748            if (mLogVerbose) {
749                Log.v(TAG,
750                      "Graph runner done (" + EffectsRecorder.this
751                      + ", mRunner " + mRunner
752                      + ", mOldRunner " + mOldRunner + ")");
753            }
754            if (runnerDoneResult ==
755                    (Integer) getConstant(sClsGraphRunner, "RESULT_ERROR")) {
756                // Handle error case
757                Log.e(TAG, "Error running filter graph!");
758                Exception e = null;
759                if (mRunner != null) {
760                    e = (Exception) invoke(mRunner, sGraphRunnerGetError);
761                } else if (mOldRunner != null) {
762                    e = (Exception) invoke(mOldRunner, sGraphRunnerGetError);
763                }
764                raiseError(e);
765            }
766            if (mOldRunner != null) {
767                // Tear down old graph if available
768                if (mLogVerbose) Log.v(TAG, "Tearing down old graph.");
769                Object glEnv = getContextGLEnvironment(mGraphEnv);
770                if (glEnv != null && !(Boolean) invoke(glEnv, sGLEnvironmentIsActive)) {
771                    invoke(glEnv, sGLEnvironmentActivate);
772                }
773                getGraphTearDown(mOldRunner,
774                        invoke(mGraphEnv, sGraphEnvironmentGetContext));
775                if (glEnv != null && (Boolean) invoke(glEnv, sGLEnvironmentIsActive)) {
776                    invoke(glEnv, sGLEnvironmentDeactivate);
777                }
778                mOldRunner = null;
779            }
780            if (mState == STATE_PREVIEW ||
781                    mState == STATE_STARTING_PREVIEW) {
782                // Switching effects, start up the new runner
783                if (mLogVerbose) {
784                    Log.v(TAG, "Previous effect halted. Running graph again. state: "
785                            + mState);
786                }
787                tryEnable3ALocks(false);
788                // In case of an error, the graph restarts from beginning and in case
789                // of the BACKDROPPER effect, the learner re-learns the background.
790                // Hence, we need to show the learning dialogue to the user
791                // to avoid recording before the learning is done. Else, the user
792                // could start recording before the learning is done and the new
793                // background comes up later leading to an end result video
794                // with a heterogeneous background.
795                // For BACKDROPPER effect, this path is also executed sometimes at
796                // the end of a normal recording session. In such a case, the graph
797                // does not restart and hence the learner does not re-learn. So we
798                // do not want to show the learning dialogue then.
799                if (runnerDoneResult == (Integer) getConstant(
800                        sClsGraphRunner, "RESULT_ERROR")
801                        && mCurrentEffect == EFFECT_BACKDROPPER) {
802                    sendMessage(EFFECT_BACKDROPPER, EFFECT_MSG_STARTED_LEARNING);
803                }
804                invoke(mRunner, sGraphRunnerRun);
805            } else if (mState != STATE_RELEASED) {
806                // Shutting down effects
807                if (mLogVerbose) Log.v(TAG, "Runner halted, restoring direct preview");
808                tryEnable3ALocks(false);
809                sendMessage(EFFECT_NONE, EFFECT_MSG_EFFECTS_STOPPED);
810            } else {
811                // STATE_RELEASED - camera will be/has been released as well, do nothing.
812            }
813        }
814    }
815
816    private void invokeOnSurfaceTextureSourceReady(Object[] args) {
817        SurfaceTexture source = (SurfaceTexture) args[0];
818        if (mLogVerbose) Log.v(TAG, "SurfaceTexture ready callback received");
819        synchronized (EffectsRecorder.this) {
820            mTextureSource = source;
821
822            if (mState == STATE_CONFIGURE) {
823                // Stop preview happened while the runner was doing startup tasks
824                // Since we haven't started anything up, don't do anything
825                // Rest of cleanup will happen in onRunnerDone
826                if (mLogVerbose) Log.v(TAG, "Ready callback: Already stopped, skipping.");
827                return;
828            }
829            if (mState == STATE_RELEASED) {
830                // EffectsRecorder has been released, so don't touch the camera device
831                // or anything else
832                if (mLogVerbose) Log.v(TAG, "Ready callback: Already released, skipping.");
833                return;
834            }
835            if (source == null) {
836                if (mLogVerbose) {
837                    Log.v(TAG, "Ready callback: source null! Looks like graph was closed!");
838                }
839                if (mState == STATE_PREVIEW ||
840                        mState == STATE_STARTING_PREVIEW ||
841                        mState == STATE_RECORD) {
842                    // A null source here means the graph is shutting down
843                    // unexpectedly, so we need to turn off preview before
844                    // the surface texture goes away.
845                    if (mLogVerbose) {
846                        Log.v(TAG, "Ready callback: State: " + mState
847                                + ". stopCameraPreview");
848                    }
849
850                    stopCameraPreview();
851                }
852                return;
853            }
854
855            // Lock AE/AWB to reduce transition flicker
856            tryEnable3ALocks(true);
857
858            mCameraDevice.stopPreview();
859            if (mLogVerbose) Log.v(TAG, "Runner active, connecting effects preview");
860            try {
861                mCameraDevice.setPreviewTexture(mTextureSource);
862            } catch (IOException e) {
863                throw new RuntimeException("Unable to connect camera to effect input", e);
864            }
865
866            mCameraDevice.startPreview();
867
868            // Unlock AE/AWB after preview started
869            tryEnable3ALocks(false);
870
871            mState = STATE_PREVIEW;
872
873            if (mLogVerbose) Log.v(TAG, "Start preview/effect switch complete");
874
875            // Sending a message to listener that preview is complete
876            sendMessage(mCurrentEffect, EFFECT_MSG_PREVIEW_RUNNING);
877        }
878    }
879
880    private void invokeOnRecordingDone() {
881        // Forward the callback to the VideoCamera object (as an asynchronous event).
882        if (mLogVerbose) Log.v(TAG, "Recording done callback triggered");
883        sendMessage(EFFECT_NONE, EFFECT_MSG_RECORDING_DONE);
884    }
885
886    public synchronized void startRecording() {
887        if (mLogVerbose) Log.v(TAG, "Starting recording (" + this + ")");
888
889        switch (mState) {
890            case STATE_RECORD:
891                throw new RuntimeException("Already recording, cannot begin anew!");
892            case STATE_RELEASED:
893                throw new RuntimeException(
894                    "startRecording called on an already released recorder!");
895            default:
896                break;
897        }
898
899        if ((mOutputFile == null) && (mFd == null)) {
900            throw new RuntimeException("No output file name or descriptor provided!");
901        }
902
903        if (mState == STATE_CONFIGURE) {
904            startPreview();
905        }
906
907        Object recorder = getGraphFilter(mRunner, "recorder");
908        if (mFd != null) {
909            setInputValue(recorder, "outputFileDescriptor", mFd);
910        } else {
911            setInputValue(recorder, "outputFile", mOutputFile);
912        }
913        // It is ok to set the audiosource without checking for timelapse here
914        // since that check will be done in the MediaEncoderFilter itself
915        setInputValue(recorder, "audioSource", MediaRecorder.AudioSource.CAMCORDER);
916        setInputValue(recorder, "recordingProfile", mProfile);
917        setInputValue(recorder, "orientationHint", mOrientationHint);
918        // Important to set the timelapseinterval to 0 if the capture rate is not >0
919        // since the recorder does not get created every time the recording starts.
920        // The recorder infers whether the capture is timelapsed based on the value of
921        // this interval
922        boolean captureTimeLapse = mCaptureRate > 0;
923        if (captureTimeLapse) {
924            double timeBetweenFrameCapture = 1 / mCaptureRate;
925            setInputValue(recorder, "timelapseRecordingIntervalUs",
926                    (long) (1000000 * timeBetweenFrameCapture));
927
928        } else {
929            setInputValue(recorder, "timelapseRecordingIntervalUs", 0L);
930        }
931
932        if (mInfoListener != null) {
933            setInputValue(recorder, "infoListener", mInfoListener);
934        }
935        if (mErrorListener != null) {
936            setInputValue(recorder, "errorListener", mErrorListener);
937        }
938        setInputValue(recorder, "maxFileSize", mMaxFileSize);
939        setInputValue(recorder, "maxDurationMs", mMaxDurationMs);
940        setInputValue(recorder, "recording", true);
941        mSoundPlayer.play(SoundClips.START_VIDEO_RECORDING);
942        mState = STATE_RECORD;
943    }
944
945    public synchronized void stopRecording() {
946        if (mLogVerbose) Log.v(TAG, "Stop recording (" + this + ")");
947
948        switch (mState) {
949            case STATE_CONFIGURE:
950            case STATE_STARTING_PREVIEW:
951            case STATE_PREVIEW:
952                Log.w(TAG, "StopRecording called when recording not active!");
953                return;
954            case STATE_RELEASED:
955                throw new RuntimeException("stopRecording called on released EffectsRecorder!");
956            default:
957                break;
958        }
959        Object recorder = getGraphFilter(mRunner, "recorder");
960        setInputValue(recorder, "recording", false);
961        mSoundPlayer.play(SoundClips.STOP_VIDEO_RECORDING);
962        mState = STATE_PREVIEW;
963    }
964
965    // Called to tell the filter graph that the display surfacetexture is not valid anymore.
966    // So the filter graph should not hold any reference to the surface created with that.
967    public synchronized void disconnectDisplay() {
968        if (mLogVerbose) Log.v(TAG, "Disconnecting the graph from the " +
969            "SurfaceTexture");
970        Object display = getGraphFilter(mRunner, "display");
971        invoke(display, sSurfaceTextureTargetDisconnect, new Object[] {
972                invoke(mGraphEnv, sGraphEnvironmentGetContext)});
973    }
974
975    // The VideoCamera will call this to notify that the camera is being
976    // released to the outside world. This call should happen after the
977    // stopRecording call. Else, the effects may throw an exception.
978    // With the recording stopped, the stopPreview call will not try to
979    // release the camera again.
980    // This must be called in onPause() if the effects are ON.
981    public synchronized void disconnectCamera() {
982        if (mLogVerbose) Log.v(TAG, "Disconnecting the effects from Camera");
983        stopCameraPreview();
984        mCameraDevice = null;
985    }
986
987    // In a normal case, when the disconnect is not called, we should not
988    // set the camera device to null, since on return callback, we try to
989    // enable 3A locks, which need the cameradevice.
990    public synchronized void stopCameraPreview() {
991        if (mLogVerbose) Log.v(TAG, "Stopping camera preview.");
992        if (mCameraDevice == null) {
993            Log.d(TAG, "Camera already null. Nothing to disconnect");
994            return;
995        }
996        mCameraDevice.stopPreview();
997        try {
998            mCameraDevice.setPreviewTexture(null);
999        } catch (IOException e) {
1000            throw new RuntimeException("Unable to disconnect camera");
1001        }
1002    }
1003
1004    // Stop and release effect resources
1005    public synchronized void stopPreview() {
1006        if (mLogVerbose) Log.v(TAG, "Stopping preview (" + this + ")");
1007        switch (mState) {
1008            case STATE_CONFIGURE:
1009                Log.w(TAG, "StopPreview called when preview not active!");
1010                return;
1011            case STATE_RELEASED:
1012                throw new RuntimeException("stopPreview called on released EffectsRecorder!");
1013            default:
1014                break;
1015        }
1016
1017        if (mState == STATE_RECORD) {
1018            stopRecording();
1019        }
1020
1021        mCurrentEffect = EFFECT_NONE;
1022
1023        // This will not do anything if the camera has already been disconnected.
1024        stopCameraPreview();
1025
1026        mState = STATE_CONFIGURE;
1027        mOldRunner = mRunner;
1028        invoke(mRunner, sGraphRunnerStop);
1029        mRunner = null;
1030        // Rest of stop and release handled in mRunnerDoneCallback
1031    }
1032
1033    // Try to enable/disable video stabilization if supported; otherwise return false
1034    // It is called from a synchronized block.
1035    boolean tryEnableVideoStabilization(boolean toggle) {
1036        if (mLogVerbose) Log.v(TAG, "tryEnableVideoStabilization.");
1037        if (mCameraDevice == null) {
1038            Log.d(TAG, "Camera already null. Not enabling video stabilization.");
1039            return false;
1040        }
1041        Camera.Parameters params = mCameraDevice.getParameters();
1042
1043        String vstabSupported = params.get("video-stabilization-supported");
1044        if ("true".equals(vstabSupported)) {
1045            if (mLogVerbose) Log.v(TAG, "Setting video stabilization to " + toggle);
1046            params.set("video-stabilization", toggle ? "true" : "false");
1047            mCameraDevice.setParameters(params);
1048            return true;
1049        }
1050        if (mLogVerbose) Log.v(TAG, "Video stabilization not supported");
1051        return false;
1052    }
1053
1054    // Try to enable/disable 3A locks if supported; otherwise return false
1055    @TargetApi(ApiHelper.VERSION_CODES.ICE_CREAM_SANDWICH)
1056    synchronized boolean tryEnable3ALocks(boolean toggle) {
1057        if (mLogVerbose) Log.v(TAG, "tryEnable3ALocks");
1058        if (mCameraDevice == null) {
1059            Log.d(TAG, "Camera already null. Not tryenabling 3A locks.");
1060            return false;
1061        }
1062        Camera.Parameters params = mCameraDevice.getParameters();
1063        if (Util.isAutoExposureLockSupported(params) &&
1064            Util.isAutoWhiteBalanceLockSupported(params)) {
1065            params.setAutoExposureLock(toggle);
1066            params.setAutoWhiteBalanceLock(toggle);
1067            mCameraDevice.setParameters(params);
1068            return true;
1069        }
1070        return false;
1071    }
1072
1073    // Try to enable/disable 3A locks if supported; otherwise, throw error
1074    // Use this when locks are essential to success
1075    synchronized void enable3ALocks(boolean toggle) {
1076        if (mLogVerbose) Log.v(TAG, "Enable3ALocks");
1077        if (mCameraDevice == null) {
1078            Log.d(TAG, "Camera already null. Not enabling 3A locks.");
1079            return;
1080        }
1081        Camera.Parameters params = mCameraDevice.getParameters();
1082        if (!tryEnable3ALocks(toggle)) {
1083            throw new RuntimeException("Attempt to lock 3A on camera with no locking support!");
1084        }
1085    }
1086
1087    static class SerializableInvocationHandler
1088            implements InvocationHandler, Serializable {
1089        @Override
1090        public Object invoke(Object proxy, Method method, Object[] args)
1091                throws Throwable {
1092            if (sEffectsRecorder == null) return null;
1093            if (method.equals(sObjectEquals)) {
1094                return sEffectsRecorder.invokeObjectEquals(proxy, args);
1095            } else if (method.equals(sObjectToString)) {
1096                return sEffectsRecorder.invokeObjectToString();
1097            } else if (method.equals(sLearningDoneListenerOnLearningDone)) {
1098                sEffectsRecorder.invokeOnLearningDone();
1099            } else if (method.equals(sOnRunnerDoneListenerOnRunnerDone)) {
1100                sEffectsRecorder.invokeOnRunnerDone(args);
1101            } else if (method.equals(
1102                    sSurfaceTextureSourceListenerOnSurfaceTextureSourceReady)) {
1103                sEffectsRecorder.invokeOnSurfaceTextureSourceReady(args);
1104            } else if (method.equals(sOnRecordingDoneListenerOnRecordingDone)) {
1105                sEffectsRecorder.invokeOnRecordingDone();
1106            }
1107            return null;
1108        }
1109    }
1110
1111    // Indicates that all camera/recording activity needs to halt
1112    public synchronized void release() {
1113        if (mLogVerbose) Log.v(TAG, "Releasing (" + this + ")");
1114
1115        switch (mState) {
1116            case STATE_RECORD:
1117            case STATE_STARTING_PREVIEW:
1118            case STATE_PREVIEW:
1119                stopPreview();
1120                // Fall-through
1121            default:
1122                if (mSoundPlayer != null) {
1123                    mSoundPlayer.release();
1124                    mSoundPlayer = null;
1125                }
1126                mState = STATE_RELEASED;
1127                break;
1128        }
1129        sEffectsRecorder = null;
1130    }
1131
1132    private void sendMessage(final int effect, final int msg) {
1133        if (mEffectsListener != null) {
1134            mHandler.post(new Runnable() {
1135                @Override
1136                public void run() {
1137                    mEffectsListener.onEffectsUpdate(effect, msg);
1138                }
1139            });
1140        }
1141    }
1142
1143    private void raiseError(final Exception exception) {
1144        if (mEffectsListener != null) {
1145            mHandler.post(new Runnable() {
1146                @Override
1147                public void run() {
1148                    if (mFd != null) {
1149                        mEffectsListener.onEffectsError(exception, null);
1150                    } else {
1151                        mEffectsListener.onEffectsError(exception, mOutputFile);
1152                    }
1153                }
1154            });
1155        }
1156    }
1157
1158    // invoke method on receiver with no arguments
1159    private Object invoke(Object receiver, Method method) {
1160        try {
1161            return method.invoke(receiver);
1162        } catch (Exception ex) {
1163            throw new RuntimeException(ex);
1164        }
1165    }
1166
1167    // invoke method on receiver with arguments
1168    private Object invoke(Object receiver, Method method, Object[] args) {
1169        try {
1170            return method.invoke(receiver, args);
1171        } catch (Exception ex) {
1172            throw new RuntimeException(ex);
1173        }
1174    }
1175
1176    private void setInputValue(Object receiver, String key, Object value) {
1177        try {
1178            sFilterSetInputValue.invoke(receiver, new Object[] {key, value});
1179        } catch (Exception ex) {
1180            throw new RuntimeException(ex);
1181        }
1182    }
1183
1184    private Object newInstance(Constructor<?> ct, Object[] initArgs) {
1185        try {
1186            return ct.newInstance(initArgs);
1187        } catch (Exception ex) {
1188            throw new RuntimeException(ex);
1189        }
1190    }
1191
1192    private Object newInstance(Constructor<?> ct) {
1193        try {
1194            return ct.newInstance();
1195        } catch (Exception ex) {
1196            throw new RuntimeException(ex);
1197        }
1198    }
1199
1200    private Object getGraphFilter(Object receiver, String name) {
1201        try {
1202            return sFilterGraphGetFilter.invoke(sGraphRunnerGetGraph
1203                    .invoke(receiver), new Object[] {name});
1204        } catch (Exception ex) {
1205            throw new RuntimeException(ex);
1206        }
1207    }
1208
1209    private Object getContextGLEnvironment(Object receiver) {
1210        try {
1211            return sFilterContextGetGLEnvironment
1212                    .invoke(sGraphEnvironmentGetContext.invoke(receiver));
1213        } catch (Exception ex) {
1214            throw new RuntimeException(ex);
1215        }
1216    }
1217
1218    private void getGraphTearDown(Object receiver, Object filterContext) {
1219        try {
1220            sFilterGraphTearDown.invoke(sGraphRunnerGetGraph.invoke(receiver),
1221                    new Object[]{filterContext});
1222        } catch (Exception ex) {
1223            throw new RuntimeException(ex);
1224        }
1225    }
1226
1227    private Object getConstant(Class<?> cls, String name) {
1228        try {
1229            return cls.getDeclaredField(name).get(null);
1230        } catch (Exception ex) {
1231            throw new RuntimeException(ex);
1232        }
1233    }
1234}
1235