EffectsRecorder.java revision 07b77037f2f5aa28d6d3188f78588dc7f2d49398
1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License"); you may not
5 * use this file except in compliance with the License. You may obtain a copy of
6 * the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 * License for the specific language governing permissions and limitations under
14 * the License.
15 */
16
17package com.android.camera;
18
19import android.content.Context;
20import android.content.res.AssetFileDescriptor;
21import android.filterfw.GraphEnvironment;
22import android.filterfw.core.Filter;
23import android.filterfw.core.GLEnvironment;
24import android.filterfw.core.GraphRunner;
25import android.filterfw.core.GraphRunner.OnRunnerDoneListener;
26import android.filterfw.geometry.Point;
27import android.filterfw.geometry.Quad;
28import android.filterpacks.videoproc.BackDropperFilter;
29import android.filterpacks.videoproc.BackDropperFilter.LearningDoneListener;
30import android.filterpacks.videosink.MediaEncoderFilter.OnRecordingDoneListener;
31import android.filterpacks.videosrc.SurfaceTextureSource.SurfaceTextureSourceListener;
32
33import android.graphics.SurfaceTexture;
34import android.hardware.Camera;
35import android.media.MediaRecorder;
36import android.media.CamcorderProfile;
37import android.os.ConditionVariable;
38import android.os.Handler;
39import android.os.Looper;
40import android.os.ParcelFileDescriptor;
41import android.os.SystemProperties;
42import android.util.Log;
43import android.view.Surface;
44import android.view.SurfaceHolder;
45
46import java.io.IOException;
47import java.io.FileNotFoundException;
48import java.io.File;
49import java.lang.Runnable;
50import java.io.FileDescriptor;
51
52
53/**
54 * Encapsulates the mobile filter framework components needed to record video with
55 * effects applied. Modeled after MediaRecorder.
56 */
57public class EffectsRecorder {
58
59    public static final int  EFFECT_NONE        = 0;
60    public static final int  EFFECT_GOOFY_FACE  = 1;
61    public static final int  EFFECT_BACKDROPPER = 2;
62
63    public static final int  EFFECT_GF_SQUEEZE     = 0;
64    public static final int  EFFECT_GF_BIG_EYES    = 1;
65    public static final int  EFFECT_GF_BIG_MOUTH   = 2;
66    public static final int  EFFECT_GF_SMALL_MOUTH = 3;
67    public static final int  EFFECT_GF_BIG_NOSE    = 4;
68    public static final int  EFFECT_GF_SMALL_EYES  = 5;
69    public static final int  NUM_OF_GF_EFFECTS = EFFECT_GF_SMALL_EYES + 1;
70
71    public static final int  EFFECT_MSG_STARTED_LEARNING = 0;
72    public static final int  EFFECT_MSG_DONE_LEARNING    = 1;
73    public static final int  EFFECT_MSG_SWITCHING_EFFECT = 2;
74    public static final int  EFFECT_MSG_EFFECTS_STOPPED  = 3;
75    public static final int  EFFECT_MSG_RECORDING_DONE   = 4;
76
77    private Context mContext;
78    private Handler mHandler;
79    private boolean mReleased;
80
81    private Camera mCameraDevice;
82    private CamcorderProfile mProfile;
83    private double mCaptureRate = 0;
84    private SurfaceHolder mPreviewSurfaceHolder;
85    private int mPreviewWidth;
86    private int mPreviewHeight;
87    private MediaRecorder.OnInfoListener mInfoListener;
88    private MediaRecorder.OnErrorListener mErrorListener;
89
90    private String mOutputFile;
91    private FileDescriptor mFd;
92    private int mOrientationHint = 0;
93    private long mMaxFileSize = 0;
94    private int mMaxDurationMs = 0;
95    private int mCameraFacing = Camera.CameraInfo.CAMERA_FACING_BACK;
96
97    private int mEffect = EFFECT_NONE;
98    private int mCurrentEffect = EFFECT_NONE;
99    private EffectsListener mEffectsListener;
100
101    private Object mEffectParameter;
102
103    private GraphEnvironment mGraphEnv;
104    private int mGraphId;
105    private GraphRunner mRunner = null;
106    private GraphRunner mOldRunner = null;
107
108    private SurfaceTexture mTextureSource;
109
110    private static final String mVideoRecordSound = "/system/media/audio/ui/VideoRecord.ogg";
111    private SoundPlayer mRecordSound;
112
113    private static final int STATE_CONFIGURE              = 0;
114    private static final int STATE_WAITING_FOR_SURFACE    = 1;
115    private static final int STATE_STARTING_PREVIEW       = 2;
116    private static final int STATE_PREVIEW                = 3;
117    private static final int STATE_RECORD                 = 4;
118    private static final int STATE_RELEASED               = 5;
119    private int mState = STATE_CONFIGURE;
120
121    private boolean mLogVerbose = Log.isLoggable(TAG, Log.VERBOSE);
122    private static final String TAG = "effectsrecorder";
123
124    /** Determine if a given effect is supported at runtime
125     * Some effects require libraries not available on all devices
126     */
127    public static boolean isEffectSupported(int effectId) {
128        switch (effectId) {
129            case EFFECT_GOOFY_FACE:
130                return Filter.isAvailable("com.google.android.filterpacks.facedetect.GoofyRenderFilter");
131            case EFFECT_BACKDROPPER:
132                return Filter.isAvailable("android.filterpacks.videoproc.BackDropperFilter");
133            default:
134                return false;
135        }
136    }
137
138    public EffectsRecorder(Context context) {
139        if (mLogVerbose) Log.v(TAG, "EffectsRecorder created (" + this + ")");
140        mContext = context;
141        mHandler = new Handler(Looper.getMainLooper());
142
143        // Construct sound player; use enforced sound output if necessary
144        File recordSoundFile = new File(mVideoRecordSound);
145        try {
146            ParcelFileDescriptor recordSoundParcel =
147                    ParcelFileDescriptor.open(recordSoundFile,
148                            ParcelFileDescriptor.MODE_READ_ONLY);
149            AssetFileDescriptor recordSoundAsset =
150                    new AssetFileDescriptor(recordSoundParcel, 0,
151                                            AssetFileDescriptor.UNKNOWN_LENGTH);
152            if (SystemProperties.get("ro.camera.sound.forced", "0").equals("0")) {
153                if (mLogVerbose) Log.v(TAG, "Standard recording sound");
154                mRecordSound = new SoundPlayer(recordSoundAsset, false);
155            } else {
156                if (mLogVerbose) Log.v(TAG, "Forced recording sound");
157                mRecordSound = new SoundPlayer(recordSoundAsset, true);
158            }
159        } catch (java.io.FileNotFoundException e) {
160            Log.e(TAG, "System video record sound not found");
161            mRecordSound = null;
162        }
163
164    }
165
166    public void setCamera(Camera cameraDevice) {
167        switch (mState) {
168            case STATE_PREVIEW:
169                throw new RuntimeException("setCamera cannot be called while previewing!");
170            case STATE_RECORD:
171                throw new RuntimeException("setCamera cannot be called while recording!");
172            case STATE_RELEASED:
173                throw new RuntimeException("setCamera called on an already released recorder!");
174            default:
175                break;
176        }
177
178        mCameraDevice = cameraDevice;
179    }
180
181    public void setProfile(CamcorderProfile profile) {
182        switch (mState) {
183            case STATE_RECORD:
184                throw new RuntimeException("setProfile cannot be called while recording!");
185            case STATE_RELEASED:
186                throw new RuntimeException("setProfile called on an already released recorder!");
187            default:
188                break;
189        }
190        mProfile = profile;
191    }
192
193    public void setOutputFile(String outputFile) {
194        switch (mState) {
195            case STATE_RECORD:
196                throw new RuntimeException("setOutputFile cannot be called while recording!");
197            case STATE_RELEASED:
198                throw new RuntimeException("setOutputFile called on an already released recorder!");
199            default:
200                break;
201        }
202
203        mOutputFile = outputFile;
204        mFd = null;
205    }
206
207    public void setOutputFile(FileDescriptor fd) {
208        switch (mState) {
209            case STATE_RECORD:
210                throw new RuntimeException("setOutputFile cannot be called while recording!");
211            case STATE_RELEASED:
212                throw new RuntimeException("setOutputFile called on an already released recorder!");
213            default:
214                break;
215        }
216
217        mOutputFile = null;
218        mFd = fd;
219    }
220
221    /**
222     * Sets the maximum filesize (in bytes) of the recording session.
223     * This will be passed on to the MediaEncoderFilter and then to the
224     * MediaRecorder ultimately. If zero or negative, the MediaRecorder will
225     * disable the limit
226    */
227    public synchronized void setMaxFileSize(long maxFileSize) {
228        switch (mState) {
229            case STATE_RECORD:
230                throw new RuntimeException("setMaxFileSize cannot be called while recording!");
231            case STATE_RELEASED:
232                throw new RuntimeException("setMaxFileSize called on an already released recorder!");
233            default:
234                break;
235        }
236        mMaxFileSize = maxFileSize;
237    }
238
239    /**
240    * Sets the maximum recording duration (in ms) for the next recording session
241    * Setting it to zero (the default) disables the limit.
242    */
243    public synchronized void setMaxDuration(int maxDurationMs) {
244        switch (mState) {
245            case STATE_RECORD:
246                throw new RuntimeException("setMaxDuration cannot be called while recording!");
247            case STATE_RELEASED:
248                throw new RuntimeException("setMaxDuration called on an already released recorder!");
249            default:
250                break;
251        }
252        mMaxDurationMs = maxDurationMs;
253    }
254
255
256    public void setCaptureRate(double fps) {
257        switch (mState) {
258            case STATE_RECORD:
259                throw new RuntimeException("setCaptureRate cannot be called while recording!");
260            case STATE_RELEASED:
261                throw new RuntimeException("setCaptureRate called on an already released recorder!");
262            default:
263                break;
264        }
265
266        if (mLogVerbose) Log.v(TAG, "Setting time lapse capture rate to " + fps + " fps");
267        mCaptureRate = fps;
268    }
269
270    public void setPreviewDisplay(SurfaceHolder previewSurfaceHolder,
271                                  int previewWidth,
272                                  int previewHeight) {
273        if (mLogVerbose) Log.v(TAG, "setPreviewDisplay (" + this + ")");
274        switch (mState) {
275            case STATE_RECORD:
276                throw new RuntimeException("setPreviewDisplay cannot be called while recording!");
277            case STATE_RELEASED:
278                throw new RuntimeException("setPreviewDisplay called on an already released recorder!");
279            default:
280                break;
281        }
282
283        mPreviewSurfaceHolder = previewSurfaceHolder;
284        mPreviewWidth = previewWidth;
285        mPreviewHeight = previewHeight;
286
287        switch (mState) {
288            case STATE_WAITING_FOR_SURFACE:
289                startPreview();
290                break;
291            case STATE_STARTING_PREVIEW:
292            case STATE_PREVIEW:
293                initializeEffect(true);
294                break;
295        }
296    }
297
298    public void setEffect(int effect, Object effectParameter) {
299        if (mLogVerbose) Log.v(TAG,
300                               "setEffect: effect ID " + effect +
301                               ", parameter " + effectParameter.toString() );
302        switch (mState) {
303            case STATE_RECORD:
304                throw new RuntimeException("setEffect cannot be called while recording!");
305            case STATE_RELEASED:
306                throw new RuntimeException("setEffect called on an already released recorder!");
307            default:
308                break;
309        }
310
311        mEffect = effect;
312        mEffectParameter = effectParameter;
313
314        if (mState == STATE_PREVIEW ||
315                mState == STATE_STARTING_PREVIEW) {
316            initializeEffect(false);
317        }
318    }
319
320    public interface EffectsListener {
321        public void onEffectsUpdate(int effectId, int effectMsg);
322        public void onEffectsError(Exception exception, String filePath);
323    }
324
325    public void setEffectsListener(EffectsListener listener) {
326        mEffectsListener = listener;
327    }
328
329    private void setFaceDetectOrientation() {
330        if (mCurrentEffect == EFFECT_GOOFY_FACE) {
331            Filter rotateFilter = mRunner.getGraph().getFilter("rotate");
332            Filter metaRotateFilter = mRunner.getGraph().getFilter("metarotate");
333            rotateFilter.setInputValue("rotation", mOrientationHint);
334            int reverseDegrees = (360 - mOrientationHint) % 360;
335            metaRotateFilter.setInputValue("rotation", reverseDegrees);
336        }
337    }
338
339    private void setRecordingOrientation() {
340        if ( (mState == STATE_CONFIGURE || mState == STATE_WAITING_FOR_SURFACE)
341                && mRunner != null ) {
342            Point bl = new Point(0, 0);
343            Point br = new Point(1, 0);
344            Point tl = new Point(0, 1);
345            Point tr = new Point(1, 1);
346            Quad recordingRegion;
347            if (mCameraFacing == Camera.CameraInfo.CAMERA_FACING_BACK) {
348                // The back camera is not mirrored, so use a identity transform
349                recordingRegion = new Quad(bl, br, tl, tr);
350            } else {
351                // Recording region needs to be tweaked for front cameras, since they
352                // mirror their preview
353                if (mOrientationHint == 0 || mOrientationHint == 180) {
354                    // Horizontal flip in landscape
355                    recordingRegion = new Quad(br, bl, tr, tl);
356                } else {
357                    // Horizontal flip in portrait
358                    recordingRegion = new Quad(tl, tr, bl, br);
359                }
360            }
361            Filter recorder = mRunner.getGraph().getFilter("recorder");
362            recorder.setInputValue("inputRegion", recordingRegion);
363        }
364    }
365    public void setOrientationHint(int degrees) {
366        switch (mState) {
367            case STATE_RELEASED:
368                throw new RuntimeException(
369                        "setOrientationHint called on an already released recorder!");
370            default:
371                break;
372        }
373        if (mLogVerbose) Log.v(TAG, "Setting orientation hint to: " + degrees);
374        mOrientationHint = degrees;
375        setFaceDetectOrientation();
376        setRecordingOrientation();
377    }
378
379    public void setCameraFacing(int facing) {
380        switch (mState) {
381            case STATE_RELEASED:
382                throw new RuntimeException(
383                    "setCameraFacing called on alrady released recorder!");
384            default:
385                break;
386        }
387        mCameraFacing = facing;
388        setRecordingOrientation();
389    }
390
391    public void setOnInfoListener(MediaRecorder.OnInfoListener infoListener) {
392        switch (mState) {
393            case STATE_RECORD:
394                throw new RuntimeException("setInfoListener cannot be called while recording!");
395            case STATE_RELEASED:
396                throw new RuntimeException("setInfoListener called on an already released recorder!");
397            default:
398                break;
399        }
400        mInfoListener = infoListener;
401    }
402
403    public void setOnErrorListener(MediaRecorder.OnErrorListener errorListener) {
404        switch (mState) {
405            case STATE_RECORD:
406                throw new RuntimeException("setErrorListener cannot be called while recording!");
407            case STATE_RELEASED:
408                throw new RuntimeException("setErrorListener called on an already released recorder!");
409            default:
410                break;
411        }
412        mErrorListener = errorListener;
413    }
414
415    private void initializeFilterFramework() {
416        mGraphEnv = new GraphEnvironment();
417        mGraphEnv.createGLEnvironment();
418
419        if (mLogVerbose) {
420            Log.v(TAG, "Effects framework initializing. Recording size "
421                  + mProfile.videoFrameWidth + ", " + mProfile.videoFrameHeight);
422        }
423
424        mGraphEnv.addReferences(
425                "textureSourceCallback", mSourceReadyCallback,
426                "recordingWidth", mProfile.videoFrameWidth,
427                "recordingHeight", mProfile.videoFrameHeight,
428                "recordingProfile", mProfile,
429                "learningDoneListener", mLearningDoneListener,
430                "recordingDoneListener", mRecordingDoneListener);
431        mRunner = null;
432        mGraphId = -1;
433        mCurrentEffect = EFFECT_NONE;
434    }
435
436    private synchronized void initializeEffect(boolean forceReset) {
437        if (forceReset ||
438            mCurrentEffect != mEffect ||
439            mCurrentEffect == EFFECT_BACKDROPPER) {
440            if (mLogVerbose) {
441                Log.v(TAG, "Effect initializing. Preview size "
442                       + mPreviewWidth + ", " + mPreviewHeight);
443            }
444
445            mGraphEnv.addReferences(
446                    "previewSurface", mPreviewSurfaceHolder.getSurface(),
447                    "previewWidth", mPreviewWidth,
448                    "previewHeight", mPreviewHeight,
449                    "orientation", mOrientationHint);
450            if (mState == STATE_PREVIEW ||
451                    mState == STATE_STARTING_PREVIEW) {
452                // Switching effects while running. Inform video camera.
453                sendMessage(mCurrentEffect, EFFECT_MSG_SWITCHING_EFFECT);
454            }
455
456            switch (mEffect) {
457                case EFFECT_GOOFY_FACE:
458                    mGraphId = mGraphEnv.loadGraph(mContext, R.raw.goofy_face);
459                    break;
460                case EFFECT_BACKDROPPER:
461                    sendMessage(EFFECT_BACKDROPPER, EFFECT_MSG_STARTED_LEARNING);
462                    mGraphId = mGraphEnv.loadGraph(mContext, R.raw.backdropper);
463                    break;
464                default:
465                    throw new RuntimeException("Unknown effect ID" + mEffect + "!");
466            }
467            mCurrentEffect = mEffect;
468
469            mOldRunner = mRunner;
470            mRunner = mGraphEnv.getRunner(mGraphId, GraphEnvironment.MODE_ASYNCHRONOUS);
471            mRunner.setDoneCallback(mRunnerDoneCallback);
472            if (mLogVerbose) {
473                Log.v(TAG, "New runner: " + mRunner
474                      + ". Old runner: " + mOldRunner);
475            }
476            if (mState == STATE_PREVIEW ||
477                    mState == STATE_STARTING_PREVIEW) {
478                // Switching effects while running. Stop existing runner.
479                // The stop callback will take care of starting new runner.
480                mCameraDevice.stopPreview();
481                try {
482                    mCameraDevice.setPreviewTexture(null);
483                } catch(IOException e) {
484                    throw new RuntimeException("Unable to connect camera to effect input", e);
485                }
486                mOldRunner.stop();
487            }
488        }
489
490        switch (mCurrentEffect) {
491            case EFFECT_GOOFY_FACE:
492                tryEnableVideoStabilization(true);
493                Filter goofyFilter = mRunner.getGraph().getFilter("goofyrenderer");
494                goofyFilter.setInputValue("currentEffect",
495                                          ((Integer)mEffectParameter).intValue());
496                break;
497            case EFFECT_BACKDROPPER:
498                tryEnableVideoStabilization(false);
499                Filter backgroundSrc = mRunner.getGraph().getFilter("background");
500                backgroundSrc.setInputValue("sourceUrl",
501                                            (String)mEffectParameter);
502                break;
503            default:
504                break;
505        }
506        setFaceDetectOrientation();
507        setRecordingOrientation();
508    }
509
510    public synchronized void startPreview() {
511        if (mLogVerbose) Log.v(TAG, "Starting preview (" + this + ")");
512
513        switch (mState) {
514            case STATE_STARTING_PREVIEW:
515            case STATE_PREVIEW:
516                // Already running preview
517                Log.w(TAG, "startPreview called when already running preview");
518                return;
519            case STATE_RECORD:
520                throw new RuntimeException("Cannot start preview when already recording!");
521            case STATE_RELEASED:
522                throw new RuntimeException("setEffect called on an already released recorder!");
523            default:
524                break;
525        }
526
527        if (mEffect == EFFECT_NONE) {
528            throw new RuntimeException("No effect selected!");
529        }
530        if (mEffectParameter == null) {
531            throw new RuntimeException("No effect parameter provided!");
532        }
533        if (mProfile == null) {
534            throw new RuntimeException("No recording profile provided!");
535        }
536        if (mPreviewSurfaceHolder == null) {
537            if (mLogVerbose) Log.v(TAG, "Passed a null surface holder; waiting for valid one");
538            mState = STATE_WAITING_FOR_SURFACE;
539            return;
540        }
541        if (mCameraDevice == null) {
542            throw new RuntimeException("No camera to record from!");
543        }
544
545        if (mLogVerbose) Log.v(TAG, "Initializing filter graph");
546
547        initializeFilterFramework();
548
549        initializeEffect(true);
550
551        if (mLogVerbose) Log.v(TAG, "Starting filter graph");
552
553        mState = STATE_STARTING_PREVIEW;
554        mRunner.run();
555        // Rest of preview startup handled in mSourceReadyCallback
556    }
557
558    private SurfaceTextureSourceListener mSourceReadyCallback =
559            new SurfaceTextureSourceListener() {
560        public void onSurfaceTextureSourceReady(SurfaceTexture source) {
561            if (mLogVerbose) Log.v(TAG, "SurfaceTexture ready callback received");
562            synchronized(EffectsRecorder.this) {
563                mTextureSource = source;
564
565                if (mState == STATE_CONFIGURE) {
566                    // Stop preview happened while the runner was doing startup tasks
567                    // Since we haven't started anything up, don't do anything
568                    // Rest of cleanup will happen in onRunnerDone
569                    if (mLogVerbose) Log.v(TAG, "Ready callback: Already stopped, skipping.");
570                    return;
571                }
572                if (mState == STATE_RELEASED) {
573                    // EffectsRecorder has been released, so don't touch the camera device
574                    // or anything else
575                    if (mLogVerbose) Log.v(TAG, "Ready callback: Already released, skipping.");
576                    return;
577                }
578                if (source == null) {
579                    if (mState == STATE_PREVIEW ||
580                            mState == STATE_STARTING_PREVIEW ||
581                            mState == STATE_RECORD) {
582                        // A null source here means the graph is shutting down
583                        // unexpectedly, so we need to turn off preview before
584                        // the surface texture goes away.
585                        mCameraDevice.stopPreview();
586                        try {
587                            mCameraDevice.setPreviewTexture(null);
588                        } catch(IOException e) {
589                            throw new RuntimeException("Unable to disconnect " +
590                                    "camera from effect input", e);
591                        }
592                    }
593                    return;
594                }
595
596                // Lock AE/AWB to reduce transition flicker
597                tryEnable3ALocks(true);
598
599                mCameraDevice.stopPreview();
600                if (mLogVerbose) Log.v(TAG, "Runner active, connecting effects preview");
601                try {
602                    mCameraDevice.setPreviewTexture(mTextureSource);
603                } catch(IOException e) {
604                    throw new RuntimeException("Unable to connect camera to effect input", e);
605                }
606
607                mCameraDevice.startPreview();
608
609                // Unlock AE/AWB after preview started
610                tryEnable3ALocks(false);
611
612                mState = STATE_PREVIEW;
613
614                if (mLogVerbose) Log.v(TAG, "Start preview/effect switch complete");
615            }
616        }
617    };
618
619    private LearningDoneListener mLearningDoneListener =
620            new LearningDoneListener() {
621        public void onLearningDone(BackDropperFilter filter) {
622            if (mLogVerbose) Log.v(TAG, "Learning done callback triggered");
623            // Called in a processing thread, so have to post message back to UI
624            // thread
625            sendMessage(EFFECT_BACKDROPPER, EFFECT_MSG_DONE_LEARNING);
626            enable3ALocks(true);
627        }
628    };
629
630    // A callback to finalize the media after the recording is done.
631    private OnRecordingDoneListener mRecordingDoneListener =
632            new OnRecordingDoneListener() {
633        // Forward the callback to the VideoCamera object (as an asynchronous event).
634        public void onRecordingDone() {
635            if (mLogVerbose) Log.v(TAG, "Recording done callback triggered");
636            sendMessage(EFFECT_NONE, EFFECT_MSG_RECORDING_DONE);
637        }
638    };
639
640    public synchronized void startRecording() {
641        if (mLogVerbose) Log.v(TAG, "Starting recording (" + this + ")");
642
643        switch (mState) {
644            case STATE_RECORD:
645                throw new RuntimeException("Already recording, cannot begin anew!");
646            case STATE_RELEASED:
647                throw new RuntimeException("startRecording called on an already released recorder!");
648            default:
649                break;
650        }
651
652        if ((mOutputFile == null) && (mFd == null)) {
653            throw new RuntimeException("No output file name or descriptor provided!");
654        }
655
656        if (mState == STATE_CONFIGURE) {
657            startPreview();
658        }
659
660        Filter recorder = mRunner.getGraph().getFilter("recorder");
661        if (mFd != null) {
662            recorder.setInputValue("outputFileDescriptor", mFd);
663        } else {
664            recorder.setInputValue("outputFile", mOutputFile);
665        }
666        // It is ok to set the audiosource without checking for timelapse here
667        // since that check will be done in the MediaEncoderFilter itself
668        recorder.setInputValue("audioSource", MediaRecorder.AudioSource.CAMCORDER);
669
670        recorder.setInputValue("recordingProfile", mProfile);
671        recorder.setInputValue("orientationHint", mOrientationHint);
672        // Important to set the timelapseinterval to 0 if the capture rate is not >0
673        // since the recorder does not get created every time the recording starts.
674        // The recorder infers whether the capture is timelapsed based on the value of
675        // this interval
676        boolean captureTimeLapse = mCaptureRate > 0;
677        if (captureTimeLapse) {
678            double timeBetweenFrameCapture = 1 / mCaptureRate;
679            recorder.setInputValue("timelapseRecordingIntervalUs",
680                    (long) (1000000 * timeBetweenFrameCapture));
681        } else {
682            recorder.setInputValue("timelapseRecordingIntervalUs", 0L);
683        }
684
685        if (mInfoListener != null) {
686            recorder.setInputValue("infoListener", mInfoListener);
687        }
688        if (mErrorListener != null) {
689            recorder.setInputValue("errorListener", mErrorListener);
690        }
691        recorder.setInputValue("maxFileSize", mMaxFileSize);
692        recorder.setInputValue("maxDurationMs", mMaxDurationMs);
693        recorder.setInputValue("recording", true);
694        if (mRecordSound != null) mRecordSound.play();
695        mState = STATE_RECORD;
696    }
697
698    public synchronized void stopRecording() {
699        if (mLogVerbose) Log.v(TAG, "Stop recording (" + this + ")");
700
701        switch (mState) {
702            case STATE_CONFIGURE:
703            case STATE_STARTING_PREVIEW:
704            case STATE_PREVIEW:
705                Log.w(TAG, "StopRecording called when recording not active!");
706                return;
707            case STATE_RELEASED:
708                throw new RuntimeException("stopRecording called on released EffectsRecorder!");
709            default:
710                break;
711        }
712        Filter recorder = mRunner.getGraph().getFilter("recorder");
713        recorder.setInputValue("recording", false);
714        if (mRecordSound != null) mRecordSound.play();
715        mState = STATE_PREVIEW;
716    }
717
718    // Stop and release effect resources
719    public synchronized void stopPreview() {
720        if (mLogVerbose) Log.v(TAG, "Stopping preview (" + this + ")");
721
722        switch (mState) {
723            case STATE_CONFIGURE:
724                Log.w(TAG, "StopPreview called when preview not active!");
725                return;
726            case STATE_RELEASED:
727                throw new RuntimeException("stopPreview called on released EffectsRecorder!");
728            default:
729                break;
730        }
731
732        if (mState == STATE_RECORD) {
733            stopRecording();
734        }
735
736        mCurrentEffect = EFFECT_NONE;
737
738        mCameraDevice.stopPreview();
739        try {
740            mCameraDevice.setPreviewTexture(null);
741        } catch(IOException e) {
742            throw new RuntimeException("Unable to connect camera to effect input", e);
743        }
744
745        mState = STATE_CONFIGURE;
746        mOldRunner = mRunner;
747        mRunner.stop();
748        mRunner = null;
749        // Rest of stop and release handled in mRunnerDoneCallback
750    }
751
752    // Try to enable/disable video stabilization if supported; otherwise return false
753    boolean tryEnableVideoStabilization(boolean toggle) {
754        Camera.Parameters params = mCameraDevice.getParameters();
755
756        String vstabSupported = params.get("video-stabilization-supported");
757        if ("true".equals(vstabSupported)) {
758            if (mLogVerbose) Log.v(TAG, "Setting video stabilization to " + toggle);
759            params.set("video-stabilization", toggle ? "true" : "false");
760            mCameraDevice.setParameters(params);
761            return true;
762        }
763        if (mLogVerbose) Log.v(TAG, "Video stabilization not supported");
764        return false;
765    }
766
767    // Try to enable/disable 3A locks if supported; otherwise return false
768    boolean tryEnable3ALocks(boolean toggle) {
769        Camera.Parameters params = mCameraDevice.getParameters();
770        if (params.isAutoExposureLockSupported() &&
771            params.isAutoWhiteBalanceLockSupported() ) {
772            params.setAutoExposureLock(toggle);
773            params.setAutoWhiteBalanceLock(toggle);
774            mCameraDevice.setParameters(params);
775            return true;
776        }
777        return false;
778    }
779
780    // Try to enable/disable 3A locks if supported; otherwise, throw error
781    // Use this when locks are essential to success
782    void enable3ALocks(boolean toggle) {
783        Camera.Parameters params = mCameraDevice.getParameters();
784        if (!tryEnable3ALocks(toggle)) {
785            throw new RuntimeException("Attempt to lock 3A on camera with no locking support!");
786        }
787    }
788
789    private OnRunnerDoneListener mRunnerDoneCallback =
790            new OnRunnerDoneListener() {
791        public void onRunnerDone(int result) {
792            synchronized(EffectsRecorder.this) {
793                if (mLogVerbose) {
794                    Log.v(TAG,
795                          "Graph runner done (" + EffectsRecorder.this
796                          + ", mRunner " + mRunner
797                          + ", mOldRunner " + mOldRunner + ")");
798                }
799                if (result == GraphRunner.RESULT_ERROR) {
800                    // Handle error case
801                    Log.e(TAG, "Error running filter graph!");
802                    raiseError(mRunner == null ? null : mRunner.getError());
803                }
804                if (mOldRunner != null) {
805                    // Tear down old graph if available
806                    if (mLogVerbose) Log.v(TAG, "Tearing down old graph.");
807                    GLEnvironment glEnv = mGraphEnv.getContext().getGLEnvironment();
808                    if (glEnv != null && !glEnv.isActive()) {
809                        glEnv.activate();
810                    }
811                    mOldRunner.getGraph().tearDown(mGraphEnv.getContext());
812                    if (glEnv != null && glEnv.isActive()) {
813                        glEnv.deactivate();
814                    }
815                    mOldRunner = null;
816                }
817                if (mState == STATE_PREVIEW ||
818                        mState == STATE_STARTING_PREVIEW) {
819                    // Switching effects, start up the new runner
820                    if (mLogVerbose) Log.v(TAG, "Previous effect halted, starting new effect.");
821                    tryEnable3ALocks(false);
822                    mRunner.run();
823                } else if (mState != STATE_RELEASED) {
824                    // Shutting down effects
825                    if (mLogVerbose) Log.v(TAG, "Runner halted, restoring direct preview");
826                    tryEnable3ALocks(false);
827                    sendMessage(EFFECT_NONE, EFFECT_MSG_EFFECTS_STOPPED);
828                } else {
829                    // STATE_RELEASED - camera will be/has been released as well, do nothing.
830                }
831            }
832        }
833    };
834
835    // Indicates that all camera/recording activity needs to halt
836    public synchronized void release() {
837        if (mLogVerbose) Log.v(TAG, "Releasing (" + this + ")");
838
839        switch (mState) {
840            case STATE_RECORD:
841            case STATE_STARTING_PREVIEW:
842            case STATE_PREVIEW:
843                stopPreview();
844                // Fall-through
845            default:
846                mRecordSound.release();
847                mState = STATE_RELEASED;
848                break;
849        }
850    }
851
852    private void sendMessage(final int effect, final int msg) {
853        if (mEffectsListener != null) {
854            mHandler.post(new Runnable() {
855                public void run() {
856                    mEffectsListener.onEffectsUpdate(effect, msg);
857                }
858            });
859        }
860    }
861
862    private void raiseError(final Exception exception) {
863        if (mEffectsListener != null) {
864            mHandler.post(new Runnable() {
865                public void run() {
866                    if (mFd != null) {
867                        mEffectsListener.onEffectsError(exception, null);
868                    } else {
869                        mEffectsListener.onEffectsError(exception, mOutputFile);
870                    }
871                }
872            });
873        }
874    }
875
876}
877