EffectsRecorder.java revision e7b26d26647188c803e0b4b929f44effc6554aec
1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License"); you may not
5 * use this file except in compliance with the License. You may obtain a copy of
6 * the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 * License for the specific language governing permissions and limitations under
14 * the License.
15 */
16
17package com.android.camera;
18
19import android.content.Context;
20import android.filterfw.GraphEnvironment;
21import android.filterfw.core.Filter;
22import android.filterfw.core.GLEnvironment;
23import android.filterfw.core.GraphRunner;
24import android.filterfw.core.GraphRunner.OnRunnerDoneListener;
25import android.filterfw.geometry.Point;
26import android.filterfw.geometry.Quad;
27import android.filterpacks.videoproc.BackDropperFilter;
28import android.filterpacks.videoproc.BackDropperFilter.LearningDoneListener;
29import android.filterpacks.videosink.MediaEncoderFilter.OnRecordingDoneListener;
30import android.filterpacks.videosrc.SurfaceTextureSource.SurfaceTextureSourceListener;
31
32import android.graphics.SurfaceTexture;
33import android.hardware.Camera;
34import android.media.MediaRecorder;
35import android.media.MediaActionSound;
36import android.media.CamcorderProfile;
37import android.os.Handler;
38import android.os.Looper;
39import android.util.Log;
40import android.view.SurfaceHolder;
41
42import java.io.IOException;
43import java.lang.Runnable;
44import java.io.FileDescriptor;
45
46
47/**
48 * Encapsulates the mobile filter framework components needed to record video with
49 * effects applied. Modeled after MediaRecorder.
50 */
51public class EffectsRecorder {
52
53    public static final int  EFFECT_NONE        = 0;
54    public static final int  EFFECT_GOOFY_FACE  = 1;
55    public static final int  EFFECT_BACKDROPPER = 2;
56
57    public static final int  EFFECT_GF_SQUEEZE     = 0;
58    public static final int  EFFECT_GF_BIG_EYES    = 1;
59    public static final int  EFFECT_GF_BIG_MOUTH   = 2;
60    public static final int  EFFECT_GF_SMALL_MOUTH = 3;
61    public static final int  EFFECT_GF_BIG_NOSE    = 4;
62    public static final int  EFFECT_GF_SMALL_EYES  = 5;
63    public static final int  NUM_OF_GF_EFFECTS = EFFECT_GF_SMALL_EYES + 1;
64
65    public static final int  EFFECT_MSG_STARTED_LEARNING = 0;
66    public static final int  EFFECT_MSG_DONE_LEARNING    = 1;
67    public static final int  EFFECT_MSG_SWITCHING_EFFECT = 2;
68    public static final int  EFFECT_MSG_EFFECTS_STOPPED  = 3;
69    public static final int  EFFECT_MSG_RECORDING_DONE   = 4;
70    public static final int  EFFECT_MSG_PREVIEW_RUNNING  = 5;
71
72    private Context mContext;
73    private Handler mHandler;
74
75    private Camera mCameraDevice;
76    private CamcorderProfile mProfile;
77    private double mCaptureRate = 0;
78    private SurfaceHolder mPreviewSurfaceHolder;
79    private int mPreviewWidth;
80    private int mPreviewHeight;
81    private MediaRecorder.OnInfoListener mInfoListener;
82    private MediaRecorder.OnErrorListener mErrorListener;
83
84    private String mOutputFile;
85    private FileDescriptor mFd;
86    private int mOrientationHint = 0;
87    private long mMaxFileSize = 0;
88    private int mMaxDurationMs = 0;
89    private int mCameraFacing = Camera.CameraInfo.CAMERA_FACING_BACK;
90    private boolean mAppIsLandscape;
91
92    private int mEffect = EFFECT_NONE;
93    private int mCurrentEffect = EFFECT_NONE;
94    private EffectsListener mEffectsListener;
95
96    private Object mEffectParameter;
97
98    private GraphEnvironment mGraphEnv;
99    private int mGraphId;
100    private GraphRunner mRunner = null;
101    private GraphRunner mOldRunner = null;
102
103    private SurfaceTexture mTextureSource;
104
105    private static final int STATE_CONFIGURE              = 0;
106    private static final int STATE_WAITING_FOR_SURFACE    = 1;
107    private static final int STATE_STARTING_PREVIEW       = 2;
108    private static final int STATE_PREVIEW                = 3;
109    private static final int STATE_RECORD                 = 4;
110    private static final int STATE_RELEASED               = 5;
111    private int mState = STATE_CONFIGURE;
112
113    private boolean mLogVerbose = Log.isLoggable(TAG, Log.VERBOSE);
114    private static final String TAG = "effectsrecorder";
115    private MediaActionSound mCameraSound;
116
117    /** Determine if a given effect is supported at runtime
118     * Some effects require libraries not available on all devices
119     */
120    public static boolean isEffectSupported(int effectId) {
121        switch (effectId) {
122            case EFFECT_GOOFY_FACE:
123                return Filter.isAvailable("com.google.android.filterpacks.facedetect.GoofyRenderFilter");
124            case EFFECT_BACKDROPPER:
125                return Filter.isAvailable("android.filterpacks.videoproc.BackDropperFilter");
126            default:
127                return false;
128        }
129    }
130
131    public EffectsRecorder(Context context) {
132        if (mLogVerbose) Log.v(TAG, "EffectsRecorder created (" + this + ")");
133        mContext = context;
134        mHandler = new Handler(Looper.getMainLooper());
135        mCameraSound = new MediaActionSound();
136        mCameraSound.load(MediaActionSound.START_VIDEO_RECORDING);
137        mCameraSound.load(MediaActionSound.STOP_VIDEO_RECORDING);
138    }
139
140    public void setCamera(Camera cameraDevice) {
141        switch (mState) {
142            case STATE_PREVIEW:
143                throw new RuntimeException("setCamera cannot be called while previewing!");
144            case STATE_RECORD:
145                throw new RuntimeException("setCamera cannot be called while recording!");
146            case STATE_RELEASED:
147                throw new RuntimeException("setCamera called on an already released recorder!");
148            default:
149                break;
150        }
151
152        mCameraDevice = cameraDevice;
153    }
154
155    public void setProfile(CamcorderProfile profile) {
156        switch (mState) {
157            case STATE_RECORD:
158                throw new RuntimeException("setProfile cannot be called while recording!");
159            case STATE_RELEASED:
160                throw new RuntimeException("setProfile called on an already released recorder!");
161            default:
162                break;
163        }
164        mProfile = profile;
165    }
166
167    public void setOutputFile(String outputFile) {
168        switch (mState) {
169            case STATE_RECORD:
170                throw new RuntimeException("setOutputFile cannot be called while recording!");
171            case STATE_RELEASED:
172                throw new RuntimeException("setOutputFile called on an already released recorder!");
173            default:
174                break;
175        }
176
177        mOutputFile = outputFile;
178        mFd = null;
179    }
180
181    public void setOutputFile(FileDescriptor fd) {
182        switch (mState) {
183            case STATE_RECORD:
184                throw new RuntimeException("setOutputFile cannot be called while recording!");
185            case STATE_RELEASED:
186                throw new RuntimeException("setOutputFile called on an already released recorder!");
187            default:
188                break;
189        }
190
191        mOutputFile = null;
192        mFd = fd;
193    }
194
195    /**
196     * Sets the maximum filesize (in bytes) of the recording session.
197     * This will be passed on to the MediaEncoderFilter and then to the
198     * MediaRecorder ultimately. If zero or negative, the MediaRecorder will
199     * disable the limit
200    */
201    public synchronized void setMaxFileSize(long maxFileSize) {
202        switch (mState) {
203            case STATE_RECORD:
204                throw new RuntimeException("setMaxFileSize cannot be called while recording!");
205            case STATE_RELEASED:
206                throw new RuntimeException("setMaxFileSize called on an already released recorder!");
207            default:
208                break;
209        }
210        mMaxFileSize = maxFileSize;
211    }
212
213    /**
214    * Sets the maximum recording duration (in ms) for the next recording session
215    * Setting it to zero (the default) disables the limit.
216    */
217    public synchronized void setMaxDuration(int maxDurationMs) {
218        switch (mState) {
219            case STATE_RECORD:
220                throw new RuntimeException("setMaxDuration cannot be called while recording!");
221            case STATE_RELEASED:
222                throw new RuntimeException("setMaxDuration called on an already released recorder!");
223            default:
224                break;
225        }
226        mMaxDurationMs = maxDurationMs;
227    }
228
229
230    public void setCaptureRate(double fps) {
231        switch (mState) {
232            case STATE_RECORD:
233                throw new RuntimeException("setCaptureRate cannot be called while recording!");
234            case STATE_RELEASED:
235                throw new RuntimeException("setCaptureRate called on an already released recorder!");
236            default:
237                break;
238        }
239
240        if (mLogVerbose) Log.v(TAG, "Setting time lapse capture rate to " + fps + " fps");
241        mCaptureRate = fps;
242    }
243
244    public void setPreviewDisplay(SurfaceHolder previewSurfaceHolder,
245                                  int previewWidth,
246                                  int previewHeight) {
247        if (mLogVerbose) Log.v(TAG, "setPreviewDisplay (" + this + ")");
248        switch (mState) {
249            case STATE_RECORD:
250                throw new RuntimeException("setPreviewDisplay cannot be called while recording!");
251            case STATE_RELEASED:
252                throw new RuntimeException("setPreviewDisplay called on an already released recorder!");
253            default:
254                break;
255        }
256
257        mPreviewSurfaceHolder = previewSurfaceHolder;
258        mPreviewWidth = previewWidth;
259        mPreviewHeight = previewHeight;
260
261        switch (mState) {
262            case STATE_WAITING_FOR_SURFACE:
263                startPreview();
264                break;
265            case STATE_STARTING_PREVIEW:
266            case STATE_PREVIEW:
267                initializeEffect(true);
268                break;
269        }
270    }
271
272    public void setEffect(int effect, Object effectParameter) {
273        if (mLogVerbose) Log.v(TAG,
274                               "setEffect: effect ID " + effect +
275                               ", parameter " + effectParameter.toString() );
276        switch (mState) {
277            case STATE_RECORD:
278                throw new RuntimeException("setEffect cannot be called while recording!");
279            case STATE_RELEASED:
280                throw new RuntimeException("setEffect called on an already released recorder!");
281            default:
282                break;
283        }
284
285        mEffect = effect;
286        mEffectParameter = effectParameter;
287
288        if (mState == STATE_PREVIEW ||
289                mState == STATE_STARTING_PREVIEW) {
290            initializeEffect(false);
291        }
292    }
293
294    public interface EffectsListener {
295        public void onEffectsUpdate(int effectId, int effectMsg);
296        public void onEffectsError(Exception exception, String filePath);
297    }
298
299    public void setEffectsListener(EffectsListener listener) {
300        mEffectsListener = listener;
301    }
302
303    private void setFaceDetectOrientation() {
304        if (mCurrentEffect == EFFECT_GOOFY_FACE) {
305            Filter rotateFilter = mRunner.getGraph().getFilter("rotate");
306            Filter metaRotateFilter = mRunner.getGraph().getFilter("metarotate");
307            rotateFilter.setInputValue("rotation", mOrientationHint);
308            int reverseDegrees = (360 - mOrientationHint) % 360;
309            metaRotateFilter.setInputValue("rotation", reverseDegrees);
310        }
311    }
312
313    private void setRecordingOrientation() {
314        if ( mState != STATE_RECORD && mRunner != null) {
315            Point bl = new Point(0, 0);
316            Point br = new Point(1, 0);
317            Point tl = new Point(0, 1);
318            Point tr = new Point(1, 1);
319            Quad recordingRegion;
320            if (mCameraFacing == Camera.CameraInfo.CAMERA_FACING_BACK) {
321                // The back camera is not mirrored, so use a identity transform
322                recordingRegion = new Quad(bl, br, tl, tr);
323            } else {
324                // Recording region needs to be tweaked for front cameras, since they
325                // mirror their preview
326                if (mOrientationHint == 0 || mOrientationHint == 180) {
327                    // Horizontal flip in landscape
328                    recordingRegion = new Quad(br, bl, tr, tl);
329                } else {
330                    // Horizontal flip in portrait
331                    recordingRegion = new Quad(tl, tr, bl, br);
332                }
333            }
334            Filter recorder = mRunner.getGraph().getFilter("recorder");
335            recorder.setInputValue("inputRegion", recordingRegion);
336        }
337    }
338    public void setOrientationHint(int degrees) {
339        switch (mState) {
340            case STATE_RELEASED:
341                throw new RuntimeException(
342                        "setOrientationHint called on an already released recorder!");
343            default:
344                break;
345        }
346        if (mLogVerbose) Log.v(TAG, "Setting orientation hint to: " + degrees);
347        mOrientationHint = degrees;
348        setFaceDetectOrientation();
349        setRecordingOrientation();
350    }
351
352    /** Passes the native orientation of the Camera app (device dependent)
353     * to allow for correct output aspect ratio. Defaults to portrait */
354    public void setAppToLandscape(boolean landscape) {
355        if (mState != STATE_CONFIGURE) {
356            throw new RuntimeException(
357                "setAppToLandscape called after configuration!");
358        }
359        mAppIsLandscape = landscape;
360    }
361
362    public void setCameraFacing(int facing) {
363        switch (mState) {
364            case STATE_RELEASED:
365                throw new RuntimeException(
366                    "setCameraFacing called on alrady released recorder!");
367            default:
368                break;
369        }
370        mCameraFacing = facing;
371        setRecordingOrientation();
372    }
373
374    public void setOnInfoListener(MediaRecorder.OnInfoListener infoListener) {
375        switch (mState) {
376            case STATE_RECORD:
377                throw new RuntimeException("setInfoListener cannot be called while recording!");
378            case STATE_RELEASED:
379                throw new RuntimeException("setInfoListener called on an already released recorder!");
380            default:
381                break;
382        }
383        mInfoListener = infoListener;
384    }
385
386    public void setOnErrorListener(MediaRecorder.OnErrorListener errorListener) {
387        switch (mState) {
388            case STATE_RECORD:
389                throw new RuntimeException("setErrorListener cannot be called while recording!");
390            case STATE_RELEASED:
391                throw new RuntimeException("setErrorListener called on an already released recorder!");
392            default:
393                break;
394        }
395        mErrorListener = errorListener;
396    }
397
398    private void initializeFilterFramework() {
399        mGraphEnv = new GraphEnvironment();
400        mGraphEnv.createGLEnvironment();
401
402        if (mLogVerbose) {
403            Log.v(TAG, "Effects framework initializing. Recording size "
404                  + mProfile.videoFrameWidth + ", " + mProfile.videoFrameHeight);
405        }
406        if (!mAppIsLandscape) {
407            int tmp;
408            tmp = mProfile.videoFrameWidth;
409            mProfile.videoFrameWidth = mProfile.videoFrameHeight;
410            mProfile.videoFrameHeight = tmp;
411        }
412        mGraphEnv.addReferences(
413                "textureSourceCallback", mSourceReadyCallback,
414                "recordingWidth", mProfile.videoFrameWidth,
415                "recordingHeight", mProfile.videoFrameHeight,
416                "recordingProfile", mProfile,
417                "learningDoneListener", mLearningDoneListener,
418                "recordingDoneListener", mRecordingDoneListener);
419        mRunner = null;
420        mGraphId = -1;
421        mCurrentEffect = EFFECT_NONE;
422    }
423
424    private synchronized void initializeEffect(boolean forceReset) {
425        if (forceReset ||
426            mCurrentEffect != mEffect ||
427            mCurrentEffect == EFFECT_BACKDROPPER) {
428            if (mLogVerbose) {
429                Log.v(TAG, "Effect initializing. Preview size "
430                       + mPreviewWidth + ", " + mPreviewHeight);
431            }
432
433            mGraphEnv.addReferences(
434                    "previewSurface", mPreviewSurfaceHolder.getSurface(),
435                    "previewWidth", mPreviewWidth,
436                    "previewHeight", mPreviewHeight,
437                    "orientation", mOrientationHint);
438            if (mState == STATE_PREVIEW ||
439                    mState == STATE_STARTING_PREVIEW) {
440                // Switching effects while running. Inform video camera.
441                sendMessage(mCurrentEffect, EFFECT_MSG_SWITCHING_EFFECT);
442            }
443
444            switch (mEffect) {
445                case EFFECT_GOOFY_FACE:
446                    mGraphId = mGraphEnv.loadGraph(mContext, R.raw.goofy_face);
447                    break;
448                case EFFECT_BACKDROPPER:
449                    sendMessage(EFFECT_BACKDROPPER, EFFECT_MSG_STARTED_LEARNING);
450                    mGraphId = mGraphEnv.loadGraph(mContext, R.raw.backdropper);
451                    break;
452                default:
453                    throw new RuntimeException("Unknown effect ID" + mEffect + "!");
454            }
455            mCurrentEffect = mEffect;
456
457            mOldRunner = mRunner;
458            mRunner = mGraphEnv.getRunner(mGraphId, GraphEnvironment.MODE_ASYNCHRONOUS);
459            mRunner.setDoneCallback(mRunnerDoneCallback);
460            if (mLogVerbose) {
461                Log.v(TAG, "New runner: " + mRunner
462                      + ". Old runner: " + mOldRunner);
463            }
464            if (mState == STATE_PREVIEW ||
465                    mState == STATE_STARTING_PREVIEW) {
466                // Switching effects while running. Stop existing runner.
467                // The stop callback will take care of starting new runner.
468                mCameraDevice.stopPreview();
469                try {
470                    mCameraDevice.setPreviewTexture(null);
471                } catch(IOException e) {
472                    throw new RuntimeException("Unable to connect camera to effect input", e);
473                }
474                mOldRunner.stop();
475            }
476        }
477
478        switch (mCurrentEffect) {
479            case EFFECT_GOOFY_FACE:
480                tryEnableVideoStabilization(true);
481                Filter goofyFilter = mRunner.getGraph().getFilter("goofyrenderer");
482                goofyFilter.setInputValue("currentEffect",
483                                          ((Integer)mEffectParameter).intValue());
484                break;
485            case EFFECT_BACKDROPPER:
486                tryEnableVideoStabilization(false);
487                Filter backgroundSrc = mRunner.getGraph().getFilter("background");
488                backgroundSrc.setInputValue("sourceUrl", mEffectParameter);
489                // For front camera, the background video needs to be mirrored in the
490                // backdropper filter
491                if (mCameraFacing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
492                    Filter replacer = mRunner.getGraph().getFilter("replacer");
493                    replacer.setInputValue("mirrorBg", true);
494                    if (mLogVerbose) Log.v(TAG, "Setting the background to be mirrored");
495                }
496                break;
497            default:
498                break;
499        }
500        setFaceDetectOrientation();
501        setRecordingOrientation();
502    }
503
504    public synchronized void startPreview() {
505        if (mLogVerbose) Log.v(TAG, "Starting preview (" + this + ")");
506
507        switch (mState) {
508            case STATE_STARTING_PREVIEW:
509            case STATE_PREVIEW:
510                // Already running preview
511                Log.w(TAG, "startPreview called when already running preview");
512                return;
513            case STATE_RECORD:
514                throw new RuntimeException("Cannot start preview when already recording!");
515            case STATE_RELEASED:
516                throw new RuntimeException("setEffect called on an already released recorder!");
517            default:
518                break;
519        }
520
521        if (mEffect == EFFECT_NONE) {
522            throw new RuntimeException("No effect selected!");
523        }
524        if (mEffectParameter == null) {
525            throw new RuntimeException("No effect parameter provided!");
526        }
527        if (mProfile == null) {
528            throw new RuntimeException("No recording profile provided!");
529        }
530        if (mPreviewSurfaceHolder == null) {
531            if (mLogVerbose) Log.v(TAG, "Passed a null surface holder; waiting for valid one");
532            mState = STATE_WAITING_FOR_SURFACE;
533            return;
534        }
535        if (mCameraDevice == null) {
536            throw new RuntimeException("No camera to record from!");
537        }
538
539        if (mLogVerbose) Log.v(TAG, "Initializing filter graph");
540
541        initializeFilterFramework();
542
543        initializeEffect(true);
544
545        if (mLogVerbose) Log.v(TAG, "Starting filter graph");
546
547        mState = STATE_STARTING_PREVIEW;
548        mRunner.run();
549        // Rest of preview startup handled in mSourceReadyCallback
550    }
551
552    private SurfaceTextureSourceListener mSourceReadyCallback =
553            new SurfaceTextureSourceListener() {
554        @Override
555        public void onSurfaceTextureSourceReady(SurfaceTexture source) {
556            if (mLogVerbose) Log.v(TAG, "SurfaceTexture ready callback received");
557            synchronized(EffectsRecorder.this) {
558                mTextureSource = source;
559
560                if (mState == STATE_CONFIGURE) {
561                    // Stop preview happened while the runner was doing startup tasks
562                    // Since we haven't started anything up, don't do anything
563                    // Rest of cleanup will happen in onRunnerDone
564                    if (mLogVerbose) Log.v(TAG, "Ready callback: Already stopped, skipping.");
565                    return;
566                }
567                if (mState == STATE_RELEASED) {
568                    // EffectsRecorder has been released, so don't touch the camera device
569                    // or anything else
570                    if (mLogVerbose) Log.v(TAG, "Ready callback: Already released, skipping.");
571                    return;
572                }
573                if (source == null) {
574                    if (mState == STATE_PREVIEW ||
575                            mState == STATE_STARTING_PREVIEW ||
576                            mState == STATE_RECORD) {
577                        // A null source here means the graph is shutting down
578                        // unexpectedly, so we need to turn off preview before
579                        // the surface texture goes away.
580                        mCameraDevice.stopPreview();
581                        try {
582                            mCameraDevice.setPreviewTexture(null);
583                        } catch(IOException e) {
584                            throw new RuntimeException("Unable to disconnect " +
585                                    "camera from effect input", e);
586                        }
587                    }
588                    return;
589                }
590
591                // Lock AE/AWB to reduce transition flicker
592                tryEnable3ALocks(true);
593
594                mCameraDevice.stopPreview();
595                if (mLogVerbose) Log.v(TAG, "Runner active, connecting effects preview");
596                try {
597                    mCameraDevice.setPreviewTexture(mTextureSource);
598                } catch(IOException e) {
599                    throw new RuntimeException("Unable to connect camera to effect input", e);
600                }
601
602                mCameraDevice.startPreview();
603
604                // Unlock AE/AWB after preview started
605                tryEnable3ALocks(false);
606
607                mState = STATE_PREVIEW;
608
609                if (mLogVerbose) Log.v(TAG, "Start preview/effect switch complete");
610
611                // Sending a message to listener that preview is complete
612                sendMessage(mCurrentEffect, EFFECT_MSG_PREVIEW_RUNNING);
613            }
614        }
615    };
616
617    private LearningDoneListener mLearningDoneListener =
618            new LearningDoneListener() {
619        @Override
620        public void onLearningDone(BackDropperFilter filter) {
621            if (mLogVerbose) Log.v(TAG, "Learning done callback triggered");
622            // Called in a processing thread, so have to post message back to UI
623            // thread
624            sendMessage(EFFECT_BACKDROPPER, EFFECT_MSG_DONE_LEARNING);
625            enable3ALocks(true);
626        }
627    };
628
629    // A callback to finalize the media after the recording is done.
630    private OnRecordingDoneListener mRecordingDoneListener =
631            new OnRecordingDoneListener() {
632        // Forward the callback to the VideoCamera object (as an asynchronous event).
633        @Override
634        public void onRecordingDone() {
635            if (mLogVerbose) Log.v(TAG, "Recording done callback triggered");
636            sendMessage(EFFECT_NONE, EFFECT_MSG_RECORDING_DONE);
637        }
638    };
639
640    public synchronized void startRecording() {
641        if (mLogVerbose) Log.v(TAG, "Starting recording (" + this + ")");
642
643        switch (mState) {
644            case STATE_RECORD:
645                throw new RuntimeException("Already recording, cannot begin anew!");
646            case STATE_RELEASED:
647                throw new RuntimeException("startRecording called on an already released recorder!");
648            default:
649                break;
650        }
651
652        if ((mOutputFile == null) && (mFd == null)) {
653            throw new RuntimeException("No output file name or descriptor provided!");
654        }
655
656        if (mState == STATE_CONFIGURE) {
657            startPreview();
658        }
659
660        Filter recorder = mRunner.getGraph().getFilter("recorder");
661        if (mFd != null) {
662            recorder.setInputValue("outputFileDescriptor", mFd);
663        } else {
664            recorder.setInputValue("outputFile", mOutputFile);
665        }
666        // It is ok to set the audiosource without checking for timelapse here
667        // since that check will be done in the MediaEncoderFilter itself
668        recorder.setInputValue("audioSource", MediaRecorder.AudioSource.CAMCORDER);
669
670        recorder.setInputValue("recordingProfile", mProfile);
671        recorder.setInputValue("orientationHint", mOrientationHint);
672        // Important to set the timelapseinterval to 0 if the capture rate is not >0
673        // since the recorder does not get created every time the recording starts.
674        // The recorder infers whether the capture is timelapsed based on the value of
675        // this interval
676        boolean captureTimeLapse = mCaptureRate > 0;
677        if (captureTimeLapse) {
678            double timeBetweenFrameCapture = 1 / mCaptureRate;
679            recorder.setInputValue("timelapseRecordingIntervalUs",
680                    (long) (1000000 * timeBetweenFrameCapture));
681        } else {
682            recorder.setInputValue("timelapseRecordingIntervalUs", 0L);
683        }
684
685        if (mInfoListener != null) {
686            recorder.setInputValue("infoListener", mInfoListener);
687        }
688        if (mErrorListener != null) {
689            recorder.setInputValue("errorListener", mErrorListener);
690        }
691        recorder.setInputValue("maxFileSize", mMaxFileSize);
692        recorder.setInputValue("maxDurationMs", mMaxDurationMs);
693        recorder.setInputValue("recording", true);
694        mCameraSound.play(MediaActionSound.START_VIDEO_RECORDING);
695        mState = STATE_RECORD;
696    }
697
698    public synchronized void stopRecording() {
699        if (mLogVerbose) Log.v(TAG, "Stop recording (" + this + ")");
700
701        switch (mState) {
702            case STATE_CONFIGURE:
703            case STATE_STARTING_PREVIEW:
704            case STATE_PREVIEW:
705                Log.w(TAG, "StopRecording called when recording not active!");
706                return;
707            case STATE_RELEASED:
708                throw new RuntimeException("stopRecording called on released EffectsRecorder!");
709            default:
710                break;
711        }
712        Filter recorder = mRunner.getGraph().getFilter("recorder");
713        recorder.setInputValue("recording", false);
714        mCameraSound.play(MediaActionSound.STOP_VIDEO_RECORDING);
715        mState = STATE_PREVIEW;
716    }
717
718    // Stop and release effect resources
719    public synchronized void stopPreview() {
720        if (mLogVerbose) Log.v(TAG, "Stopping preview (" + this + ")");
721
722        switch (mState) {
723            case STATE_CONFIGURE:
724                Log.w(TAG, "StopPreview called when preview not active!");
725                return;
726            case STATE_RELEASED:
727                throw new RuntimeException("stopPreview called on released EffectsRecorder!");
728            default:
729                break;
730        }
731
732        if (mState == STATE_RECORD) {
733            stopRecording();
734        }
735
736        mCurrentEffect = EFFECT_NONE;
737
738        mCameraDevice.stopPreview();
739        try {
740            mCameraDevice.setPreviewTexture(null);
741        } catch(IOException e) {
742            throw new RuntimeException("Unable to connect camera to effect input", e);
743        }
744        mCameraSound.release();
745
746        mState = STATE_CONFIGURE;
747        mOldRunner = mRunner;
748        mRunner.stop();
749        mRunner = null;
750        // Rest of stop and release handled in mRunnerDoneCallback
751    }
752
753    // Try to enable/disable video stabilization if supported; otherwise return false
754    boolean tryEnableVideoStabilization(boolean toggle) {
755        Camera.Parameters params = mCameraDevice.getParameters();
756
757        String vstabSupported = params.get("video-stabilization-supported");
758        if ("true".equals(vstabSupported)) {
759            if (mLogVerbose) Log.v(TAG, "Setting video stabilization to " + toggle);
760            params.set("video-stabilization", toggle ? "true" : "false");
761            mCameraDevice.setParameters(params);
762            return true;
763        }
764        if (mLogVerbose) Log.v(TAG, "Video stabilization not supported");
765        return false;
766    }
767
768    // Try to enable/disable 3A locks if supported; otherwise return false
769    boolean tryEnable3ALocks(boolean toggle) {
770        Camera.Parameters params = mCameraDevice.getParameters();
771        if (params.isAutoExposureLockSupported() &&
772            params.isAutoWhiteBalanceLockSupported() ) {
773            params.setAutoExposureLock(toggle);
774            params.setAutoWhiteBalanceLock(toggle);
775            mCameraDevice.setParameters(params);
776            return true;
777        }
778        return false;
779    }
780
781    // Try to enable/disable 3A locks if supported; otherwise, throw error
782    // Use this when locks are essential to success
783    void enable3ALocks(boolean toggle) {
784        Camera.Parameters params = mCameraDevice.getParameters();
785        if (!tryEnable3ALocks(toggle)) {
786            throw new RuntimeException("Attempt to lock 3A on camera with no locking support!");
787        }
788    }
789
790    private OnRunnerDoneListener mRunnerDoneCallback =
791            new OnRunnerDoneListener() {
792        @Override
793        public void onRunnerDone(int result) {
794            synchronized(EffectsRecorder.this) {
795                if (mLogVerbose) {
796                    Log.v(TAG,
797                          "Graph runner done (" + EffectsRecorder.this
798                          + ", mRunner " + mRunner
799                          + ", mOldRunner " + mOldRunner + ")");
800                }
801                if (result == GraphRunner.RESULT_ERROR) {
802                    // Handle error case
803                    Log.e(TAG, "Error running filter graph!");
804                    Exception e = null;
805                    if (mRunner != null) {
806                        e = mRunner.getError();
807                    } else if (mOldRunner != null) {
808                        e = mOldRunner.getError();
809                    }
810                    raiseError(e);
811                }
812                if (mOldRunner != null) {
813                    // Tear down old graph if available
814                    if (mLogVerbose) Log.v(TAG, "Tearing down old graph.");
815                    GLEnvironment glEnv = mGraphEnv.getContext().getGLEnvironment();
816                    if (glEnv != null && !glEnv.isActive()) {
817                        glEnv.activate();
818                    }
819                    mOldRunner.getGraph().tearDown(mGraphEnv.getContext());
820                    if (glEnv != null && glEnv.isActive()) {
821                        glEnv.deactivate();
822                    }
823                    mOldRunner = null;
824                }
825                if (mState == STATE_PREVIEW ||
826                        mState == STATE_STARTING_PREVIEW) {
827                    // Switching effects, start up the new runner
828                    if (mLogVerbose) Log.v(TAG, "Previous effect halted, starting new effect.");
829                    tryEnable3ALocks(false);
830                    // In case of an error, the graph restarts from beginning and in case
831                    // of the BACKDROPPER effect, the learner re-learns the background.
832                    // Hence, we need to show the learning dialogue to the user
833                    // to avoid recording before the learning is done. Else, the user
834                    // could start recording before the learning is done and the new
835                    // background comes up later leading to an end result video
836                    // with a heterogeneous background.
837                    // For BACKDROPPER effect, this path is also executed sometimes at
838                    // the end of a normal recording session. In such a case, the graph
839                    // does not restart and hence the learner does not re-learn. So we
840                    // do not want to show the learning dialogue then.
841                    if (result == GraphRunner.RESULT_ERROR &&
842                            mCurrentEffect == EFFECT_BACKDROPPER) {
843                        sendMessage(EFFECT_BACKDROPPER, EFFECT_MSG_STARTED_LEARNING);
844                    }
845                    mRunner.run();
846                } else if (mState != STATE_RELEASED) {
847                    // Shutting down effects
848                    if (mLogVerbose) Log.v(TAG, "Runner halted, restoring direct preview");
849                    tryEnable3ALocks(false);
850                    sendMessage(EFFECT_NONE, EFFECT_MSG_EFFECTS_STOPPED);
851                } else {
852                    // STATE_RELEASED - camera will be/has been released as well, do nothing.
853                }
854            }
855        }
856    };
857
858    // Indicates that all camera/recording activity needs to halt
859    public synchronized void release() {
860        if (mLogVerbose) Log.v(TAG, "Releasing (" + this + ")");
861
862        switch (mState) {
863            case STATE_RECORD:
864            case STATE_STARTING_PREVIEW:
865            case STATE_PREVIEW:
866                stopPreview();
867                // Fall-through
868            default:
869                mState = STATE_RELEASED;
870                break;
871        }
872    }
873
874    private void sendMessage(final int effect, final int msg) {
875        if (mEffectsListener != null) {
876            mHandler.post(new Runnable() {
877                @Override
878                public void run() {
879                    mEffectsListener.onEffectsUpdate(effect, msg);
880                }
881            });
882        }
883    }
884
885    private void raiseError(final Exception exception) {
886        if (mEffectsListener != null) {
887            mHandler.post(new Runnable() {
888                @Override
889                public void run() {
890                    if (mFd != null) {
891                        mEffectsListener.onEffectsError(exception, null);
892                    } else {
893                        mEffectsListener.onEffectsError(exception, mOutputFile);
894                    }
895                }
896            });
897        }
898    }
899
900}
901