EffectsRecorder.java revision 2c2550bd42c614330a3c968bda005ac23d12f8d1
1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License"); you may not
5 * use this file except in compliance with the License. You may obtain a copy of
6 * the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 * License for the specific language governing permissions and limitations under
14 * the License.
15 */
16
17package com.android.camera;
18
19import android.content.Context;
20import android.filterfw.GraphEnvironment;
21import android.filterfw.core.Filter;
22import android.filterfw.core.GLEnvironment;
23import android.filterfw.core.GraphRunner;
24import android.filterfw.core.GraphRunner.OnRunnerDoneListener;
25import android.filterfw.geometry.Point;
26import android.filterfw.geometry.Quad;
27import android.filterpacks.videosrc.SurfaceTextureSource.SurfaceTextureSourceListener;
28import android.filterpacks.videoproc.BackDropperFilter;
29import android.filterpacks.videoproc.BackDropperFilter.LearningDoneListener;
30
31import android.graphics.SurfaceTexture;
32import android.hardware.Camera;
33import android.media.MediaRecorder;
34import android.media.CamcorderProfile;
35import android.os.ConditionVariable;
36import android.os.Handler;
37import android.os.Looper;
38import android.os.ParcelFileDescriptor;
39import android.util.Log;
40import android.view.Surface;
41import android.view.SurfaceHolder;
42
43import java.io.IOException;
44import java.lang.Runnable;
45
46/**
47 * Encapsulates the mobile filter framework components needed to record video with
48 * effects applied. Modeled after MediaRecorder.
49 */
50public class EffectsRecorder {
51
52    public static final int  EFFECT_NONE        = 0;
53    public static final int  EFFECT_GOOFY_FACE  = 1;
54    public static final int  EFFECT_BACKDROPPER = 2;
55
56    public static final int  EFFECT_GF_SQUEEZE     = 0;
57    public static final int  EFFECT_GF_BIG_EYES    = 1;
58    public static final int  EFFECT_GF_BIG_MOUTH   = 2;
59    public static final int  EFFECT_GF_SMALL_MOUTH = 3;
60    public static final int  EFFECT_GF_BIG_NOSE    = 4;
61    public static final int  EFFECT_GF_SMALL_EYES  = 5;
62    public static final int  NUM_OF_GF_EFFECTS = EFFECT_GF_SMALL_EYES + 1;
63
64    public static final int  EFFECT_MSG_STARTED_LEARNING = 0;
65    public static final int  EFFECT_MSG_DONE_LEARNING    = 1;
66    public static final int  EFFECT_MSG_SWITCHING_EFFECT = 2;
67    public static final int  EFFECT_MSG_EFFECTS_STOPPED  = 3;
68
69    private Context mContext;
70    private Handler mHandler;
71    private boolean mReleased;
72
73    private Camera mCameraDevice;
74    private CamcorderProfile mProfile;
75    private SurfaceHolder mPreviewSurfaceHolder;
76    private int mPreviewWidth;
77    private int mPreviewHeight;
78    private MediaRecorder.OnInfoListener mInfoListener;
79    private MediaRecorder.OnErrorListener mErrorListener;
80
81    private String mOutputFile;
82    private int mOrientationHint = 0;
83    private int mCameraFacing = Camera.CameraInfo.CAMERA_FACING_BACK;
84
85    private int mEffect = EFFECT_NONE;
86    private int mCurrentEffect = EFFECT_NONE;
87    private EffectsListener mEffectsListener;
88
89    private Object mEffectParameter;
90
91    private GraphEnvironment mGraphEnv;
92    private int mGraphId;
93    private GraphRunner mRunner = null;
94    private GraphRunner mOldRunner = null;
95
96    private SurfaceTexture mTextureSource;
97
98    private static final int STATE_CONFIGURE              = 0;
99    private static final int STATE_WAITING_FOR_SURFACE    = 1;
100    private static final int STATE_PREVIEW                = 2;
101    private static final int STATE_RECORD                 = 3;
102    private static final int STATE_RELEASED               = 4;
103    private int mState = STATE_CONFIGURE;
104
105    private boolean mLogVerbose = true; //Log.isLoggable(TAG, Log.VERBOSE);
106    private static final String TAG = "effectsrecorder";
107
108    /** Determine if a given effect is supported at runtime
109     * Some effects require libraries not available on all devices
110     */
111    public static boolean isEffectSupported(int effectId) {
112        switch (effectId) {
113            case EFFECT_GOOFY_FACE:
114                return Filter.isAvailable("com.google.android.filterpacks.facedetect.GoofyRenderFilter");
115            case EFFECT_BACKDROPPER:
116                return Filter.isAvailable("android.filterpacks.videoproc.BackDropperFilter");
117            default:
118                return false;
119        }
120    }
121
122    public EffectsRecorder(Context context) {
123        if (mLogVerbose) Log.v(TAG, "EffectsRecorder created (" + this + ")");
124        mContext = context;
125        mHandler = new Handler(Looper.getMainLooper());
126    }
127
128    public void setCamera(Camera cameraDevice) {
129        switch (mState) {
130            case STATE_PREVIEW:
131                throw new RuntimeException("setCamera cannot be called while previewing!");
132            case STATE_RECORD:
133                throw new RuntimeException("setCamera cannot be called while recording!");
134            case STATE_RELEASED:
135                throw new RuntimeException("setCamera called on an already released recorder!");
136            default:
137                break;
138        }
139
140        mCameraDevice = cameraDevice;
141    }
142
143    public void setProfile(CamcorderProfile profile) {
144        switch (mState) {
145            case STATE_RECORD:
146                throw new RuntimeException("setProfile cannot be called while recording!");
147            case STATE_RELEASED:
148                throw new RuntimeException("setProfile called on an already released recorder!");
149            default:
150                break;
151        }
152        mProfile = profile;
153    }
154
155    public void setOutputFile(String outputFile) {
156        switch (mState) {
157            case STATE_RECORD:
158                throw new RuntimeException("setOutputFile cannot be called while recording!");
159            case STATE_RELEASED:
160                throw new RuntimeException("setOutputFile called on an already released recorder!");
161            default:
162                break;
163        }
164
165        mOutputFile = outputFile;
166    }
167
168    public void setPreviewDisplay(SurfaceHolder previewSurfaceHolder,
169                                  int previewWidth,
170                                  int previewHeight) {
171        if (mLogVerbose) Log.v(TAG, "setPreviewDisplay (" + this + ")");
172        switch (mState) {
173            case STATE_RECORD:
174                throw new RuntimeException("setPreviewDisplay cannot be called while recording!");
175            case STATE_RELEASED:
176                throw new RuntimeException("setPreviewDisplay called on an already released recorder!");
177            default:
178                break;
179        }
180
181        mPreviewSurfaceHolder = previewSurfaceHolder;
182        mPreviewWidth = previewWidth;
183        mPreviewHeight = previewHeight;
184
185        switch (mState) {
186            case STATE_WAITING_FOR_SURFACE:
187                startPreview();
188                break;
189            case STATE_PREVIEW:
190                initializeEffect(true);
191                break;
192        }
193    }
194
195    public void setEffect(int effect, Object effectParameter) {
196        if (mLogVerbose) Log.v(TAG,
197                               "setEffect: effect ID " + effect +
198                               ", parameter " + effectParameter.toString() );
199        switch (mState) {
200            case STATE_RECORD:
201                throw new RuntimeException("setEffect cannot be called while recording!");
202            case STATE_RELEASED:
203                throw new RuntimeException("setEffect called on an already released recorder!");
204            default:
205                break;
206        }
207
208        mEffect = effect;
209        mEffectParameter = effectParameter;
210
211        if (mState == STATE_PREVIEW) {
212            initializeEffect(false);
213        }
214    }
215
216    public interface EffectsListener {
217        public void onEffectsUpdate(int effectId, int effectMsg);
218        public void onEffectsError(Exception exception, String filePath);
219    }
220
221    public void setEffectsListener(EffectsListener listener) {
222        mEffectsListener = listener;
223    }
224
225    private void setFaceDetectOrientation(int degrees) {
226        if (mCurrentEffect == EFFECT_GOOFY_FACE) {
227            Filter rotateFilter = mRunner.getGraph().getFilter("rotate");
228            Filter metaRotateFilter = mRunner.getGraph().getFilter("metarotate");
229            rotateFilter.setInputValue("rotation", degrees);
230            int reverseDegrees = (360 - degrees) % 360;
231            metaRotateFilter.setInputValue("rotation", reverseDegrees);
232        }
233    }
234
235    private void setRecordingOrientation() {
236        if ( mState <= STATE_PREVIEW && mRunner != null ) {
237            Point bl = new Point(0, 0);
238            Point br = new Point(1, 0);
239            Point tl = new Point(0, 1);
240            Point tr = new Point(1, 1);
241            Quad recordingRegion;
242            if (mCameraFacing == Camera.CameraInfo.CAMERA_FACING_BACK) {
243                // The back camera is not mirrored, so use a identity transform
244                recordingRegion = new Quad(bl, br, tl, tr);
245            } else {
246                // Recording region needs to be tweaked for front cameras, since they
247                // mirror their preview
248                if (mOrientationHint == 0 || mOrientationHint == 180) {
249                    // Horizontal flip in landscape
250                    recordingRegion = new Quad(br, bl, tr, tl);
251                } else {
252                    // Horizontal flip in portrait
253                    recordingRegion = new Quad(tl, tr, bl, br);
254                }
255            }
256            Filter recorder = mRunner.getGraph().getFilter("recorder");
257            recorder.setInputValue("inputRegion", recordingRegion);
258        }
259    }
260    public void setOrientationHint(int degrees) {
261        switch (mState) {
262            case STATE_RELEASED:
263                throw new RuntimeException(
264                        "setOrientationHint called on an already released recorder!");
265            default:
266                break;
267        }
268        if (mLogVerbose) Log.v(TAG, "Setting orientation hint to: " + degrees);
269
270        mOrientationHint = degrees;
271        setFaceDetectOrientation(degrees);
272        setRecordingOrientation();
273    }
274
275    public void setCameraFacing(int facing) {
276        switch (mState) {
277            case STATE_RELEASED:
278                throw new RuntimeException(
279                    "setCameraFacing called on alrady released recorder!");
280            default:
281                break;
282        }
283        mCameraFacing = facing;
284        setRecordingOrientation();
285    }
286
287    public void setOnInfoListener(MediaRecorder.OnInfoListener infoListener) {
288        switch (mState) {
289            case STATE_RECORD:
290                throw new RuntimeException("setInfoListener cannot be called while recording!");
291            case STATE_RELEASED:
292                throw new RuntimeException("setInfoListener called on an already released recorder!");
293            default:
294                break;
295        }
296        mInfoListener = infoListener;
297    }
298
299    public void setOnErrorListener(MediaRecorder.OnErrorListener errorListener) {
300        switch (mState) {
301            case STATE_RECORD:
302                throw new RuntimeException("setErrorListener cannot be called while recording!");
303            case STATE_RELEASED:
304                throw new RuntimeException("setErrorListener called on an already released recorder!");
305            default:
306                break;
307        }
308        mErrorListener = errorListener;
309    }
310
311    private void initializeFilterFramework() {
312        mGraphEnv = new GraphEnvironment();
313        mGraphEnv.createGLEnvironment();
314
315        if (mLogVerbose) {
316            Log.v(TAG, "Effects framework initializing. Recording size "
317                  + mProfile.videoFrameWidth + ", " + mProfile.videoFrameHeight);
318        }
319
320        mGraphEnv.addReferences(
321                "textureSourceCallback", mSourceReadyCallback,
322                "recordingWidth", mProfile.videoFrameWidth,
323                "recordingHeight", mProfile.videoFrameHeight,
324                "recordingProfile", mProfile,
325                "audioSource", MediaRecorder.AudioSource.CAMCORDER,
326                "learningDoneListener", mLearningDoneListener);
327
328        mRunner = null;
329        mGraphId = -1;
330        mCurrentEffect = EFFECT_NONE;
331    }
332
333    private synchronized void initializeEffect(boolean forceReset) {
334        if (forceReset ||
335            mCurrentEffect != mEffect ||
336            mCurrentEffect == EFFECT_BACKDROPPER) {
337            if (mLogVerbose) {
338                Log.v(TAG, "Effect initializing. Preview size "
339                       + mPreviewWidth + ", " + mPreviewHeight);
340            }
341
342            mGraphEnv.addReferences(
343                    "previewSurface", mPreviewSurfaceHolder.getSurface(),
344                    "previewWidth", mPreviewWidth,
345                    "previewHeight", mPreviewHeight,
346                    "orientation", mOrientationHint);
347            if (mState == STATE_PREVIEW) {
348                // Switching effects while running. Inform video camera.
349                sendMessage(mCurrentEffect, EFFECT_MSG_SWITCHING_EFFECT);
350            }
351
352            switch (mEffect) {
353                case EFFECT_GOOFY_FACE:
354                    mGraphId = mGraphEnv.loadGraph(mContext, R.raw.goofy_face);
355                    break;
356                case EFFECT_BACKDROPPER:
357                    sendMessage(EFFECT_BACKDROPPER, EFFECT_MSG_STARTED_LEARNING);
358                    mGraphId = mGraphEnv.loadGraph(mContext, R.raw.backdropper);
359                    break;
360                default:
361                    throw new RuntimeException("Unknown effect ID" + mEffect + "!");
362            }
363            mCurrentEffect = mEffect;
364
365            mOldRunner = mRunner;
366            mRunner = mGraphEnv.getRunner(mGraphId, GraphEnvironment.MODE_ASYNCHRONOUS);
367            mRunner.setDoneCallback(mRunnerDoneCallback);
368            if (mLogVerbose) {
369                Log.v(TAG, "New runner: " + mRunner
370                      + ". Old runner: " + mOldRunner);
371            }
372            if (mState == STATE_PREVIEW) {
373                // Switching effects while running. Stop existing runner.
374                // The stop callback will take care of starting new runner.
375                mCameraDevice.stopPreview();
376                try {
377                    mCameraDevice.setPreviewTexture(null);
378                } catch(IOException e) {
379                    throw new RuntimeException("Unable to connect camera to effect input", e);
380                }
381                mOldRunner.stop();
382            }
383        }
384
385        switch (mCurrentEffect) {
386            case EFFECT_GOOFY_FACE:
387                Filter goofyFilter = mRunner.getGraph().getFilter("goofyrenderer");
388                goofyFilter.setInputValue("currentEffect",
389                                          ((Integer)mEffectParameter).intValue());
390                break;
391            case EFFECT_BACKDROPPER:
392                Filter backgroundSrc = mRunner.getGraph().getFilter("background");
393                backgroundSrc.setInputValue("sourceUrl",
394                                            (String)mEffectParameter);
395                break;
396            default:
397                break;
398        }
399        setFaceDetectOrientation(mOrientationHint);
400        setRecordingOrientation();
401    }
402
403    public synchronized void startPreview() {
404        if (mLogVerbose) Log.v(TAG, "Starting preview (" + this + ")");
405
406        switch (mState) {
407            case STATE_PREVIEW:
408                // Already running preview
409                Log.w(TAG, "startPreview called when already running preview");
410                return;
411            case STATE_RECORD:
412                throw new RuntimeException("Cannot start preview when already recording!");
413            case STATE_RELEASED:
414                throw new RuntimeException("setEffect called on an already released recorder!");
415            default:
416                break;
417        }
418
419        if (mEffect == EFFECT_NONE) {
420            throw new RuntimeException("No effect selected!");
421        }
422        if (mEffectParameter == null) {
423            throw new RuntimeException("No effect parameter provided!");
424        }
425        if (mProfile == null) {
426            throw new RuntimeException("No recording profile provided!");
427        }
428        if (mPreviewSurfaceHolder == null) {
429            if (mLogVerbose) Log.v(TAG, "Passed a null surface holder; waiting for valid one");
430            mState = STATE_WAITING_FOR_SURFACE;
431            return;
432        }
433        if (mCameraDevice == null) {
434            throw new RuntimeException("No camera to record from!");
435        }
436
437        if (mLogVerbose) Log.v(TAG, "Initializing filter graph");
438
439        initializeFilterFramework();
440
441        initializeEffect(true);
442
443        if (mLogVerbose) Log.v(TAG, "Starting filter graph");
444
445        mRunner.run();
446        // Rest of preview startup handled in mSourceReadyCallback
447    }
448
449    private SurfaceTextureSourceListener mSourceReadyCallback =
450            new SurfaceTextureSourceListener() {
451        public void onSurfaceTextureSourceReady(SurfaceTexture source) {
452            if (mLogVerbose) Log.v(TAG, "SurfaceTexture ready callback received");
453            synchronized(EffectsRecorder.this) {
454                mTextureSource = source;
455
456                // When shutting down a graph, we receive a null SurfaceTexture to
457                // indicate that. Don't want to connect up the camera in that case.
458                if (source == null) return;
459
460                if (mState == STATE_RELEASED) return;
461
462                mCameraDevice.stopPreview();
463                if (mLogVerbose) Log.v(TAG, "Runner active, connecting effects preview");
464                try {
465                    mCameraDevice.setPreviewTexture(mTextureSource);
466                } catch(IOException e) {
467                    throw new RuntimeException("Unable to connect camera to effect input", e);
468                }
469
470                // Lock AE/AWB to reduce transition flicker
471                tryEnable3ALocks(true);
472                mCameraDevice.startPreview();
473
474                // Unlock AE/AWB after preview started
475                tryEnable3ALocks(false);
476
477                mState = STATE_PREVIEW;
478
479                if (mLogVerbose) Log.v(TAG, "Start preview/effect switch complete");
480            }
481        }
482    };
483
484    private LearningDoneListener mLearningDoneListener =
485            new LearningDoneListener() {
486        public void onLearningDone(BackDropperFilter filter) {
487            if (mLogVerbose) Log.v(TAG, "Learning done callback triggered");
488            // Called in a processing thread, so have to post message back to UI
489            // thread
490            sendMessage(EFFECT_BACKDROPPER, EFFECT_MSG_DONE_LEARNING);
491            enable3ALocks(true);
492        }
493    };
494
495    public synchronized void startRecording() {
496        if (mLogVerbose) Log.v(TAG, "Starting recording (" + this + ")");
497
498        switch (mState) {
499            case STATE_RECORD:
500                throw new RuntimeException("Already recording, cannot begin anew!");
501            case STATE_RELEASED:
502                throw new RuntimeException("startRecording called on an already released recorder!");
503            default:
504                break;
505        }
506
507        if (mOutputFile == null) {
508            throw new RuntimeException("No output file name provided!");
509        }
510
511        if (mState == STATE_CONFIGURE) {
512            startPreview();
513        }
514        Filter recorder = mRunner.getGraph().getFilter("recorder");
515        recorder.setInputValue("outputFile", mOutputFile);
516        recorder.setInputValue("orientationHint", mOrientationHint);
517        if (mInfoListener != null) {
518            recorder.setInputValue("infoListener", mInfoListener);
519        }
520        if (mErrorListener != null) {
521            recorder.setInputValue("errorListener", mErrorListener);
522        }
523        recorder.setInputValue("recording", true);
524        mState = STATE_RECORD;
525    }
526
527    public synchronized void stopRecording() {
528        if (mLogVerbose) Log.v(TAG, "Stop recording (" + this + ")");
529
530        switch (mState) {
531            case STATE_CONFIGURE:
532            case STATE_PREVIEW:
533                Log.w(TAG, "StopRecording called when recording not active!");
534                return;
535            case STATE_RELEASED:
536                throw new RuntimeException("stopRecording called on released EffectsRecorder!");
537            default:
538                break;
539        }
540        Filter recorder = mRunner.getGraph().getFilter("recorder");
541        recorder.setInputValue("recording", false);
542        mState = STATE_PREVIEW;
543    }
544
545    // Stop and release effect resources
546    public synchronized void stopPreview() {
547        if (mLogVerbose) Log.v(TAG, "Stopping preview (" + this + ")");
548
549        switch (mState) {
550            case STATE_CONFIGURE:
551                Log.w(TAG, "StopPreview called when preview not active!");
552                return;
553            case STATE_RELEASED:
554                throw new RuntimeException("stopPreview called on released EffectsRecorder!");
555            default:
556                break;
557        }
558
559        if (mState == STATE_RECORD) {
560            stopRecording();
561        }
562
563        mCurrentEffect = EFFECT_NONE;
564
565        mCameraDevice.stopPreview();
566        try {
567            mCameraDevice.setPreviewTexture(null);
568        } catch(IOException e) {
569            throw new RuntimeException("Unable to connect camera to effect input", e);
570        }
571
572        mState = STATE_CONFIGURE;
573        mOldRunner = mRunner;
574        mRunner.stop();
575        mRunner = null;
576        // Rest of stop and release handled in mRunnerDoneCallback
577    }
578
579    // Try to enable/disable 3A locks if supported; otherwise return false
580    boolean tryEnable3ALocks(boolean toggle) {
581        Camera.Parameters params = mCameraDevice.getParameters();
582        if (params.isAutoExposureLockSupported() &&
583            params.isAutoWhiteBalanceLockSupported() ) {
584            params.setAutoExposureLock(toggle);
585            params.setAutoWhiteBalanceLock(toggle);
586            mCameraDevice.setParameters(params);
587            return true;
588        }
589        return false;
590    }
591
592    // Try to enable/disable 3A locks if supported; otherwise, throw error
593    // Use this when locks are essential to success
594    void enable3ALocks(boolean toggle) {
595        Camera.Parameters params = mCameraDevice.getParameters();
596        if (!tryEnable3ALocks(toggle)) {
597            throw new RuntimeException("Attempt to lock 3A on camera with no locking support!");
598        }
599    }
600
601    private OnRunnerDoneListener mRunnerDoneCallback =
602            new OnRunnerDoneListener() {
603        public void onRunnerDone(int result) {
604            synchronized(EffectsRecorder.this) {
605                if (mLogVerbose) {
606                    Log.v(TAG,
607                          "Graph runner done (" + EffectsRecorder.this
608                          + ", mRunner " + mRunner
609                          + ", mOldRunner " + mOldRunner + ")");
610                }
611                if (result == GraphRunner.RESULT_ERROR) {
612                    // Handle error case
613                    Log.e(TAG, "Error running filter graph!");
614                    raiseError(mRunner == null ? null : mRunner.getError());
615                }
616                if (mOldRunner != null) {
617                    // Tear down old graph if available
618                    if (mLogVerbose) Log.v(TAG, "Tearing down old graph.");
619                    GLEnvironment glEnv = mGraphEnv.getContext().getGLEnvironment();
620                    if (glEnv != null && !glEnv.isActive()) {
621                        glEnv.activate();
622                    }
623                    mOldRunner.getGraph().tearDown(mGraphEnv.getContext());
624                    if (glEnv != null && glEnv.isActive()) {
625                        glEnv.deactivate();
626                    }
627                    mOldRunner = null;
628                }
629                if (mState == STATE_PREVIEW) {
630                    // Switching effects, start up the new runner
631                    if (mLogVerbose) Log.v(TAG, "Previous effect halted, starting new effect.");
632                    tryEnable3ALocks(false);
633                    mRunner.run();
634                } else if (mState != STATE_RELEASED) {
635                    // Shutting down effects
636                    if (mLogVerbose) Log.v(TAG, "Runner halted, restoring direct preview");
637                    sendMessage(EFFECT_NONE, EFFECT_MSG_EFFECTS_STOPPED);
638                } else {
639                    // STATE_RELEASED - camera will be/has been released as well, do nothing.
640                }
641            }
642        }
643    };
644
645    // Indicates that all camera/recording activity needs to halt
646    public synchronized void release() {
647        if (mLogVerbose) Log.v(TAG, "Releasing (" + this + ")");
648
649        switch (mState) {
650            case STATE_RECORD:
651            case STATE_PREVIEW:
652                stopPreview();
653                // Fall-through
654            default:
655                mState = STATE_RELEASED;
656                break;
657        }
658    }
659
660    private void sendMessage(final int effect, final int msg) {
661        if (mEffectsListener != null) {
662            mHandler.post(new Runnable() {
663                public void run() {
664                    mEffectsListener.onEffectsUpdate(effect, msg);
665                }
666            });
667        }
668    }
669
670    private void raiseError(final Exception exception) {
671        if (mEffectsListener != null) {
672            mHandler.post(new Runnable() {
673                public void run() {
674                    mEffectsListener.onEffectsError(exception, mOutputFile);
675                }
676            });
677        }
678    }
679}
680