EffectsRecorder.java revision 58cede9e3e8004f6be9731e9cb9a135238a2efd8
1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License"); you may not
5 * use this file except in compliance with the License. You may obtain a copy of
6 * the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 * License for the specific language governing permissions and limitations under
14 * the License.
15 */
16
17package com.android.camera;
18
19import android.content.Context;
20import android.content.res.AssetFileDescriptor;
21import android.filterfw.GraphEnvironment;
22import android.filterfw.core.Filter;
23import android.filterfw.core.GLEnvironment;
24import android.filterfw.core.GraphRunner;
25import android.filterfw.core.GraphRunner.OnRunnerDoneListener;
26import android.filterfw.geometry.Point;
27import android.filterfw.geometry.Quad;
28import android.filterpacks.videoproc.BackDropperFilter;
29import android.filterpacks.videoproc.BackDropperFilter.LearningDoneListener;
30import android.filterpacks.videosink.MediaEncoderFilter.OnRecordingDoneListener;
31import android.filterpacks.videosrc.SurfaceTextureSource.SurfaceTextureSourceListener;
32
33import android.graphics.SurfaceTexture;
34import android.hardware.Camera;
35import android.media.MediaRecorder;
36import android.media.CamcorderProfile;
37import android.os.ConditionVariable;
38import android.os.Handler;
39import android.os.Looper;
40import android.os.ParcelFileDescriptor;
41import android.os.SystemProperties;
42import android.util.Log;
43import android.view.Surface;
44import android.view.SurfaceHolder;
45
46import java.io.IOException;
47import java.io.FileNotFoundException;
48import java.io.File;
49import java.lang.Runnable;
50import java.io.FileDescriptor;
51
52
53/**
54 * Encapsulates the mobile filter framework components needed to record video with
55 * effects applied. Modeled after MediaRecorder.
56 */
57public class EffectsRecorder {
58
59    public static final int  EFFECT_NONE        = 0;
60    public static final int  EFFECT_GOOFY_FACE  = 1;
61    public static final int  EFFECT_BACKDROPPER = 2;
62
63    public static final int  EFFECT_GF_SQUEEZE     = 0;
64    public static final int  EFFECT_GF_BIG_EYES    = 1;
65    public static final int  EFFECT_GF_BIG_MOUTH   = 2;
66    public static final int  EFFECT_GF_SMALL_MOUTH = 3;
67    public static final int  EFFECT_GF_BIG_NOSE    = 4;
68    public static final int  EFFECT_GF_SMALL_EYES  = 5;
69    public static final int  NUM_OF_GF_EFFECTS = EFFECT_GF_SMALL_EYES + 1;
70
71    public static final int  EFFECT_MSG_STARTED_LEARNING = 0;
72    public static final int  EFFECT_MSG_DONE_LEARNING    = 1;
73    public static final int  EFFECT_MSG_SWITCHING_EFFECT = 2;
74    public static final int  EFFECT_MSG_EFFECTS_STOPPED  = 3;
75    public static final int  EFFECT_MSG_RECORDING_DONE   = 4;
76
77    private Context mContext;
78    private Handler mHandler;
79    private boolean mReleased;
80
81    private Camera mCameraDevice;
82    private CamcorderProfile mProfile;
83    private double mCaptureRate = 0;
84    private SurfaceHolder mPreviewSurfaceHolder;
85    private int mPreviewWidth;
86    private int mPreviewHeight;
87    private MediaRecorder.OnInfoListener mInfoListener;
88    private MediaRecorder.OnErrorListener mErrorListener;
89
90    private String mOutputFile;
91    private FileDescriptor mFd;
92    private int mOrientationHint = 0;
93    private long mMaxFileSize = 0;
94    private int mMaxDurationMs = 0;
95    private int mCameraFacing = Camera.CameraInfo.CAMERA_FACING_BACK;
96
97    private int mEffect = EFFECT_NONE;
98    private int mCurrentEffect = EFFECT_NONE;
99    private EffectsListener mEffectsListener;
100
101    private Object mEffectParameter;
102
103    private GraphEnvironment mGraphEnv;
104    private int mGraphId;
105    private GraphRunner mRunner = null;
106    private GraphRunner mOldRunner = null;
107
108    private SurfaceTexture mTextureSource;
109
110    private static final String mVideoRecordSound = "/system/media/audio/ui/VideoRecord.ogg";
111    private SoundPlayer mRecordSound;
112
113    private static final int STATE_CONFIGURE              = 0;
114    private static final int STATE_WAITING_FOR_SURFACE    = 1;
115    private static final int STATE_PREVIEW                = 2;
116    private static final int STATE_RECORD                 = 3;
117    private static final int STATE_RELEASED               = 4;
118    private int mState = STATE_CONFIGURE;
119
120    private boolean mLogVerbose = Log.isLoggable(TAG, Log.VERBOSE);
121    private static final String TAG = "effectsrecorder";
122
123    /** Determine if a given effect is supported at runtime
124     * Some effects require libraries not available on all devices
125     */
126    public static boolean isEffectSupported(int effectId) {
127        switch (effectId) {
128            case EFFECT_GOOFY_FACE:
129                return Filter.isAvailable("com.google.android.filterpacks.facedetect.GoofyRenderFilter");
130            case EFFECT_BACKDROPPER:
131                return Filter.isAvailable("android.filterpacks.videoproc.BackDropperFilter");
132            default:
133                return false;
134        }
135    }
136
137    public EffectsRecorder(Context context) {
138        if (mLogVerbose) Log.v(TAG, "EffectsRecorder created (" + this + ")");
139        mContext = context;
140        mHandler = new Handler(Looper.getMainLooper());
141
142        // Construct sound player; use enforced sound output if necessary
143        File recordSoundFile = new File(mVideoRecordSound);
144        try {
145            ParcelFileDescriptor recordSoundParcel =
146                    ParcelFileDescriptor.open(recordSoundFile,
147                            ParcelFileDescriptor.MODE_READ_ONLY);
148            AssetFileDescriptor recordSoundAsset =
149                    new AssetFileDescriptor(recordSoundParcel, 0,
150                                            AssetFileDescriptor.UNKNOWN_LENGTH);
151            if (SystemProperties.get("ro.camera.sound.forced", "0").equals("0")) {
152                if (mLogVerbose) Log.v(TAG, "Standard recording sound");
153                mRecordSound = new SoundPlayer(recordSoundAsset, false);
154            } else {
155                if (mLogVerbose) Log.v(TAG, "Forced recording sound");
156                mRecordSound = new SoundPlayer(recordSoundAsset, true);
157            }
158        } catch (java.io.FileNotFoundException e) {
159            Log.e(TAG, "System video record sound not found");
160            mRecordSound = null;
161        }
162
163    }
164
165    public void setCamera(Camera cameraDevice) {
166        switch (mState) {
167            case STATE_PREVIEW:
168                throw new RuntimeException("setCamera cannot be called while previewing!");
169            case STATE_RECORD:
170                throw new RuntimeException("setCamera cannot be called while recording!");
171            case STATE_RELEASED:
172                throw new RuntimeException("setCamera called on an already released recorder!");
173            default:
174                break;
175        }
176
177        mCameraDevice = cameraDevice;
178    }
179
180    public void setProfile(CamcorderProfile profile) {
181        switch (mState) {
182            case STATE_RECORD:
183                throw new RuntimeException("setProfile cannot be called while recording!");
184            case STATE_RELEASED:
185                throw new RuntimeException("setProfile called on an already released recorder!");
186            default:
187                break;
188        }
189        mProfile = profile;
190    }
191
192    public void setOutputFile(String outputFile) {
193        switch (mState) {
194            case STATE_RECORD:
195                throw new RuntimeException("setOutputFile cannot be called while recording!");
196            case STATE_RELEASED:
197                throw new RuntimeException("setOutputFile called on an already released recorder!");
198            default:
199                break;
200        }
201
202        mOutputFile = outputFile;
203        mFd = null;
204    }
205
206    public void setOutputFile(FileDescriptor fd) {
207        switch (mState) {
208            case STATE_RECORD:
209                throw new RuntimeException("setOutputFile cannot be called while recording!");
210            case STATE_RELEASED:
211                throw new RuntimeException("setOutputFile called on an already released recorder!");
212            default:
213                break;
214        }
215
216        mOutputFile = null;
217        mFd = fd;
218    }
219
220    /**
221     * Sets the maximum filesize (in bytes) of the recording session.
222     * This will be passed on to the MediaEncoderFilter and then to the
223     * MediaRecorder ultimately. If zero or negative, the MediaRecorder will
224     * disable the limit
225    */
226    public synchronized void setMaxFileSize(long maxFileSize) {
227        switch (mState) {
228            case STATE_RECORD:
229                throw new RuntimeException("setMaxFileSize cannot be called while recording!");
230            case STATE_RELEASED:
231                throw new RuntimeException("setMaxFileSize called on an already released recorder!");
232            default:
233                break;
234        }
235        mMaxFileSize = maxFileSize;
236    }
237
238    /**
239    * Sets the maximum recording duration (in ms) for the next recording session
240    * Setting it to zero (the default) disables the limit.
241    */
242    public synchronized void setMaxDuration(int maxDurationMs) {
243        switch (mState) {
244            case STATE_RECORD:
245                throw new RuntimeException("setMaxDuration cannot be called while recording!");
246            case STATE_RELEASED:
247                throw new RuntimeException("setMaxDuration called on an already released recorder!");
248            default:
249                break;
250        }
251        mMaxDurationMs = maxDurationMs;
252    }
253
254
255    public void setCaptureRate(double fps) {
256        switch (mState) {
257            case STATE_RECORD:
258                throw new RuntimeException("setCaptureRate cannot be called while recording!");
259            case STATE_RELEASED:
260                throw new RuntimeException("setCaptureRate called on an already released recorder!");
261            default:
262                break;
263        }
264
265        if (mLogVerbose) Log.v(TAG, "Setting time lapse capture rate to " + fps + " fps");
266        mCaptureRate = fps;
267    }
268
269    public void setPreviewDisplay(SurfaceHolder previewSurfaceHolder,
270                                  int previewWidth,
271                                  int previewHeight) {
272        if (mLogVerbose) Log.v(TAG, "setPreviewDisplay (" + this + ")");
273        switch (mState) {
274            case STATE_RECORD:
275                throw new RuntimeException("setPreviewDisplay cannot be called while recording!");
276            case STATE_RELEASED:
277                throw new RuntimeException("setPreviewDisplay called on an already released recorder!");
278            default:
279                break;
280        }
281
282        mPreviewSurfaceHolder = previewSurfaceHolder;
283        mPreviewWidth = previewWidth;
284        mPreviewHeight = previewHeight;
285
286        switch (mState) {
287            case STATE_WAITING_FOR_SURFACE:
288                startPreview();
289                break;
290            case STATE_PREVIEW:
291                initializeEffect(true);
292                break;
293        }
294    }
295
296    public void setEffect(int effect, Object effectParameter) {
297        if (mLogVerbose) Log.v(TAG,
298                               "setEffect: effect ID " + effect +
299                               ", parameter " + effectParameter.toString() );
300        switch (mState) {
301            case STATE_RECORD:
302                throw new RuntimeException("setEffect cannot be called while recording!");
303            case STATE_RELEASED:
304                throw new RuntimeException("setEffect called on an already released recorder!");
305            default:
306                break;
307        }
308
309        mEffect = effect;
310        mEffectParameter = effectParameter;
311
312        if (mState == STATE_PREVIEW) {
313            initializeEffect(false);
314        }
315    }
316
317    public interface EffectsListener {
318        public void onEffectsUpdate(int effectId, int effectMsg);
319        public void onEffectsError(Exception exception, String filePath);
320    }
321
322    public void setEffectsListener(EffectsListener listener) {
323        mEffectsListener = listener;
324    }
325
326    private void setFaceDetectOrientation() {
327        if (mCurrentEffect == EFFECT_GOOFY_FACE) {
328            Filter rotateFilter = mRunner.getGraph().getFilter("rotate");
329            Filter metaRotateFilter = mRunner.getGraph().getFilter("metarotate");
330            rotateFilter.setInputValue("rotation", mOrientationHint);
331            int reverseDegrees = (360 - mOrientationHint) % 360;
332            metaRotateFilter.setInputValue("rotation", reverseDegrees);
333        }
334    }
335
336    private void setRecordingOrientation() {
337        if ( mState <= STATE_PREVIEW && mRunner != null ) {
338            Point bl = new Point(0, 0);
339            Point br = new Point(1, 0);
340            Point tl = new Point(0, 1);
341            Point tr = new Point(1, 1);
342            Quad recordingRegion;
343            if (mCameraFacing == Camera.CameraInfo.CAMERA_FACING_BACK) {
344                // The back camera is not mirrored, so use a identity transform
345                recordingRegion = new Quad(bl, br, tl, tr);
346            } else {
347                // Recording region needs to be tweaked for front cameras, since they
348                // mirror their preview
349                if (mOrientationHint == 0 || mOrientationHint == 180) {
350                    // Horizontal flip in landscape
351                    recordingRegion = new Quad(br, bl, tr, tl);
352                } else {
353                    // Horizontal flip in portrait
354                    recordingRegion = new Quad(tl, tr, bl, br);
355                }
356            }
357            Filter recorder = mRunner.getGraph().getFilter("recorder");
358            recorder.setInputValue("inputRegion", recordingRegion);
359        }
360    }
361    public void setOrientationHint(int degrees) {
362        switch (mState) {
363            case STATE_RELEASED:
364                throw new RuntimeException(
365                        "setOrientationHint called on an already released recorder!");
366            default:
367                break;
368        }
369        if (mLogVerbose) Log.v(TAG, "Setting orientation hint to: " + degrees);
370        mOrientationHint = degrees;
371        setFaceDetectOrientation();
372        setRecordingOrientation();
373    }
374
375    public void setCameraFacing(int facing) {
376        switch (mState) {
377            case STATE_RELEASED:
378                throw new RuntimeException(
379                    "setCameraFacing called on alrady released recorder!");
380            default:
381                break;
382        }
383        mCameraFacing = facing;
384        setRecordingOrientation();
385    }
386
387    public void setOnInfoListener(MediaRecorder.OnInfoListener infoListener) {
388        switch (mState) {
389            case STATE_RECORD:
390                throw new RuntimeException("setInfoListener cannot be called while recording!");
391            case STATE_RELEASED:
392                throw new RuntimeException("setInfoListener called on an already released recorder!");
393            default:
394                break;
395        }
396        mInfoListener = infoListener;
397    }
398
399    public void setOnErrorListener(MediaRecorder.OnErrorListener errorListener) {
400        switch (mState) {
401            case STATE_RECORD:
402                throw new RuntimeException("setErrorListener cannot be called while recording!");
403            case STATE_RELEASED:
404                throw new RuntimeException("setErrorListener called on an already released recorder!");
405            default:
406                break;
407        }
408        mErrorListener = errorListener;
409    }
410
411    private void initializeFilterFramework() {
412        mGraphEnv = new GraphEnvironment();
413        mGraphEnv.createGLEnvironment();
414
415        if (mLogVerbose) {
416            Log.v(TAG, "Effects framework initializing. Recording size "
417                  + mProfile.videoFrameWidth + ", " + mProfile.videoFrameHeight);
418        }
419
420        mGraphEnv.addReferences(
421                "textureSourceCallback", mSourceReadyCallback,
422                "recordingWidth", mProfile.videoFrameWidth,
423                "recordingHeight", mProfile.videoFrameHeight,
424                "recordingProfile", mProfile,
425                "learningDoneListener", mLearningDoneListener,
426                "recordingDoneListener", mRecordingDoneListener);
427        mRunner = null;
428        mGraphId = -1;
429        mCurrentEffect = EFFECT_NONE;
430    }
431
432    private synchronized void initializeEffect(boolean forceReset) {
433        if (forceReset ||
434            mCurrentEffect != mEffect ||
435            mCurrentEffect == EFFECT_BACKDROPPER) {
436            if (mLogVerbose) {
437                Log.v(TAG, "Effect initializing. Preview size "
438                       + mPreviewWidth + ", " + mPreviewHeight);
439            }
440
441            mGraphEnv.addReferences(
442                    "previewSurface", mPreviewSurfaceHolder.getSurface(),
443                    "previewWidth", mPreviewWidth,
444                    "previewHeight", mPreviewHeight,
445                    "orientation", mOrientationHint);
446            if (mState == STATE_PREVIEW) {
447                // Switching effects while running. Inform video camera.
448                sendMessage(mCurrentEffect, EFFECT_MSG_SWITCHING_EFFECT);
449            }
450
451            switch (mEffect) {
452                case EFFECT_GOOFY_FACE:
453                    mGraphId = mGraphEnv.loadGraph(mContext, R.raw.goofy_face);
454                    break;
455                case EFFECT_BACKDROPPER:
456                    sendMessage(EFFECT_BACKDROPPER, EFFECT_MSG_STARTED_LEARNING);
457                    mGraphId = mGraphEnv.loadGraph(mContext, R.raw.backdropper);
458                    break;
459                default:
460                    throw new RuntimeException("Unknown effect ID" + mEffect + "!");
461            }
462            mCurrentEffect = mEffect;
463
464            mOldRunner = mRunner;
465            mRunner = mGraphEnv.getRunner(mGraphId, GraphEnvironment.MODE_ASYNCHRONOUS);
466            mRunner.setDoneCallback(mRunnerDoneCallback);
467            if (mLogVerbose) {
468                Log.v(TAG, "New runner: " + mRunner
469                      + ". Old runner: " + mOldRunner);
470            }
471            if (mState == STATE_PREVIEW) {
472                // Switching effects while running. Stop existing runner.
473                // The stop callback will take care of starting new runner.
474                mCameraDevice.stopPreview();
475                try {
476                    mCameraDevice.setPreviewTexture(null);
477                } catch(IOException e) {
478                    throw new RuntimeException("Unable to connect camera to effect input", e);
479                }
480                mOldRunner.stop();
481            }
482        }
483
484        switch (mCurrentEffect) {
485            case EFFECT_GOOFY_FACE:
486                tryEnableVideoStabilization(true);
487                Filter goofyFilter = mRunner.getGraph().getFilter("goofyrenderer");
488                goofyFilter.setInputValue("currentEffect",
489                                          ((Integer)mEffectParameter).intValue());
490                break;
491            case EFFECT_BACKDROPPER:
492                tryEnableVideoStabilization(false);
493                Filter backgroundSrc = mRunner.getGraph().getFilter("background");
494                backgroundSrc.setInputValue("sourceUrl",
495                                            (String)mEffectParameter);
496                break;
497            default:
498                break;
499        }
500        setFaceDetectOrientation();
501        setRecordingOrientation();
502    }
503
504    public synchronized void startPreview() {
505        if (mLogVerbose) Log.v(TAG, "Starting preview (" + this + ")");
506
507        switch (mState) {
508            case STATE_PREVIEW:
509                // Already running preview
510                Log.w(TAG, "startPreview called when already running preview");
511                return;
512            case STATE_RECORD:
513                throw new RuntimeException("Cannot start preview when already recording!");
514            case STATE_RELEASED:
515                throw new RuntimeException("setEffect called on an already released recorder!");
516            default:
517                break;
518        }
519
520        if (mEffect == EFFECT_NONE) {
521            throw new RuntimeException("No effect selected!");
522        }
523        if (mEffectParameter == null) {
524            throw new RuntimeException("No effect parameter provided!");
525        }
526        if (mProfile == null) {
527            throw new RuntimeException("No recording profile provided!");
528        }
529        if (mPreviewSurfaceHolder == null) {
530            if (mLogVerbose) Log.v(TAG, "Passed a null surface holder; waiting for valid one");
531            mState = STATE_WAITING_FOR_SURFACE;
532            return;
533        }
534        if (mCameraDevice == null) {
535            throw new RuntimeException("No camera to record from!");
536        }
537
538        if (mLogVerbose) Log.v(TAG, "Initializing filter graph");
539
540        initializeFilterFramework();
541
542        initializeEffect(true);
543
544        if (mLogVerbose) Log.v(TAG, "Starting filter graph");
545
546        mRunner.run();
547        // Rest of preview startup handled in mSourceReadyCallback
548    }
549
550    private SurfaceTextureSourceListener mSourceReadyCallback =
551            new SurfaceTextureSourceListener() {
552        public void onSurfaceTextureSourceReady(SurfaceTexture source) {
553            if (mLogVerbose) Log.v(TAG, "SurfaceTexture ready callback received");
554            synchronized(EffectsRecorder.this) {
555                mTextureSource = source;
556
557                if (mState == STATE_RELEASED) return;
558
559                if (source == null) {
560                    if (mState == STATE_PREVIEW ||
561                            mState == STATE_RECORD) {
562                        // A null source here means the graph is shutting down
563                        // unexpectedly, so we need to turn off preview before
564                        // the surface texture goes away.
565                        mCameraDevice.stopPreview();
566                        try {
567                            mCameraDevice.setPreviewTexture(null);
568                        } catch(IOException e) {
569                            throw new RuntimeException("Unable to disconnect " +
570                                    "camera from effect input", e);
571                        }
572                    }
573                    return;
574                }
575
576                // Lock AE/AWB to reduce transition flicker
577                tryEnable3ALocks(true);
578
579                mCameraDevice.stopPreview();
580                if (mLogVerbose) Log.v(TAG, "Runner active, connecting effects preview");
581                try {
582                    mCameraDevice.setPreviewTexture(mTextureSource);
583                } catch(IOException e) {
584                    throw new RuntimeException("Unable to connect camera to effect input", e);
585                }
586
587                mCameraDevice.startPreview();
588
589                // Unlock AE/AWB after preview started
590                tryEnable3ALocks(false);
591
592                mState = STATE_PREVIEW;
593
594                if (mLogVerbose) Log.v(TAG, "Start preview/effect switch complete");
595            }
596        }
597    };
598
599    private LearningDoneListener mLearningDoneListener =
600            new LearningDoneListener() {
601        public void onLearningDone(BackDropperFilter filter) {
602            if (mLogVerbose) Log.v(TAG, "Learning done callback triggered");
603            // Called in a processing thread, so have to post message back to UI
604            // thread
605            sendMessage(EFFECT_BACKDROPPER, EFFECT_MSG_DONE_LEARNING);
606            enable3ALocks(true);
607        }
608    };
609
610    // A callback to finalize the media after the recording is done.
611    private OnRecordingDoneListener mRecordingDoneListener =
612            new OnRecordingDoneListener() {
613        // Forward the callback to the VideoCamera object (as an asynchronous event).
614        public void onRecordingDone() {
615            if (mLogVerbose) Log.v(TAG, "Recording done callback triggered");
616            sendMessage(EFFECT_NONE, EFFECT_MSG_RECORDING_DONE);
617        }
618    };
619
620    public synchronized void startRecording() {
621        if (mLogVerbose) Log.v(TAG, "Starting recording (" + this + ")");
622
623        switch (mState) {
624            case STATE_RECORD:
625                throw new RuntimeException("Already recording, cannot begin anew!");
626            case STATE_RELEASED:
627                throw new RuntimeException("startRecording called on an already released recorder!");
628            default:
629                break;
630        }
631
632        if ((mOutputFile == null) && (mFd == null)) {
633            throw new RuntimeException("No output file name or descriptor provided!");
634        }
635
636        if (mState == STATE_CONFIGURE) {
637            startPreview();
638        }
639
640        Filter recorder = mRunner.getGraph().getFilter("recorder");
641        if (mFd != null) {
642            recorder.setInputValue("outputFileDescriptor", mFd);
643        } else {
644            recorder.setInputValue("outputFile", mOutputFile);
645        }
646        // It is ok to set the audiosource without checking for timelapse here
647        // since that check will be done in the MediaEncoderFilter itself
648        recorder.setInputValue("audioSource", MediaRecorder.AudioSource.CAMCORDER);
649
650        recorder.setInputValue("recordingProfile", mProfile);
651        recorder.setInputValue("orientationHint", mOrientationHint);
652        // Important to set the timelapseinterval to 0 if the capture rate is not >0
653        // since the recorder does not get created every time the recording starts.
654        // The recorder infers whether the capture is timelapsed based on the value of
655        // this interval
656        boolean captureTimeLapse = mCaptureRate > 0;
657        if (captureTimeLapse) {
658            double timeBetweenFrameCapture = 1 / mCaptureRate;
659            recorder.setInputValue("timelapseRecordingIntervalUs",
660                    (long) (1000000 * timeBetweenFrameCapture));
661        } else {
662            recorder.setInputValue("timelapseRecordingIntervalUs", 0L);
663        }
664
665        if (mInfoListener != null) {
666            recorder.setInputValue("infoListener", mInfoListener);
667        }
668        if (mErrorListener != null) {
669            recorder.setInputValue("errorListener", mErrorListener);
670        }
671        recorder.setInputValue("maxFileSize", mMaxFileSize);
672        recorder.setInputValue("maxDurationMs", mMaxDurationMs);
673        recorder.setInputValue("recording", true);
674        if (mRecordSound != null) mRecordSound.play();
675        mState = STATE_RECORD;
676    }
677
678    public synchronized void stopRecording() {
679        if (mLogVerbose) Log.v(TAG, "Stop recording (" + this + ")");
680
681        switch (mState) {
682            case STATE_CONFIGURE:
683            case STATE_PREVIEW:
684                Log.w(TAG, "StopRecording called when recording not active!");
685                return;
686            case STATE_RELEASED:
687                throw new RuntimeException("stopRecording called on released EffectsRecorder!");
688            default:
689                break;
690        }
691        Filter recorder = mRunner.getGraph().getFilter("recorder");
692        recorder.setInputValue("recording", false);
693        if (mRecordSound != null) mRecordSound.play();
694        mState = STATE_PREVIEW;
695    }
696
697    // Stop and release effect resources
698    public synchronized void stopPreview() {
699        if (mLogVerbose) Log.v(TAG, "Stopping preview (" + this + ")");
700
701        switch (mState) {
702            case STATE_CONFIGURE:
703                Log.w(TAG, "StopPreview called when preview not active!");
704                return;
705            case STATE_RELEASED:
706                throw new RuntimeException("stopPreview called on released EffectsRecorder!");
707            default:
708                break;
709        }
710
711        if (mState == STATE_RECORD) {
712            stopRecording();
713        }
714
715        mCurrentEffect = EFFECT_NONE;
716
717        mCameraDevice.stopPreview();
718        try {
719            mCameraDevice.setPreviewTexture(null);
720        } catch(IOException e) {
721            throw new RuntimeException("Unable to connect camera to effect input", e);
722        }
723
724        mState = STATE_CONFIGURE;
725        mOldRunner = mRunner;
726        mRunner.stop();
727        mRunner = null;
728        // Rest of stop and release handled in mRunnerDoneCallback
729    }
730
731    // Try to enable/disable video stabilization if supported; otherwise return false
732    boolean tryEnableVideoStabilization(boolean toggle) {
733        Camera.Parameters params = mCameraDevice.getParameters();
734
735        String vstabSupported = params.get("video-stabilization-supported");
736        if ("true".equals(vstabSupported)) {
737            if (mLogVerbose) Log.v(TAG, "Setting video stabilization to " + toggle);
738            params.set("video-stabilization", toggle ? "true" : "false");
739            mCameraDevice.setParameters(params);
740            return true;
741        }
742        if (mLogVerbose) Log.v(TAG, "Video stabilization not supported");
743        return false;
744    }
745
746    // Try to enable/disable 3A locks if supported; otherwise return false
747    boolean tryEnable3ALocks(boolean toggle) {
748        Camera.Parameters params = mCameraDevice.getParameters();
749        if (params.isAutoExposureLockSupported() &&
750            params.isAutoWhiteBalanceLockSupported() ) {
751            params.setAutoExposureLock(toggle);
752            params.setAutoWhiteBalanceLock(toggle);
753            mCameraDevice.setParameters(params);
754            return true;
755        }
756        return false;
757    }
758
759    // Try to enable/disable 3A locks if supported; otherwise, throw error
760    // Use this when locks are essential to success
761    void enable3ALocks(boolean toggle) {
762        Camera.Parameters params = mCameraDevice.getParameters();
763        if (!tryEnable3ALocks(toggle)) {
764            throw new RuntimeException("Attempt to lock 3A on camera with no locking support!");
765        }
766    }
767
768    private OnRunnerDoneListener mRunnerDoneCallback =
769            new OnRunnerDoneListener() {
770        public void onRunnerDone(int result) {
771            synchronized(EffectsRecorder.this) {
772                if (mLogVerbose) {
773                    Log.v(TAG,
774                          "Graph runner done (" + EffectsRecorder.this
775                          + ", mRunner " + mRunner
776                          + ", mOldRunner " + mOldRunner + ")");
777                }
778                if (result == GraphRunner.RESULT_ERROR) {
779                    // Handle error case
780                    Log.e(TAG, "Error running filter graph!");
781                    raiseError(mRunner == null ? null : mRunner.getError());
782                }
783                if (mOldRunner != null) {
784                    // Tear down old graph if available
785                    if (mLogVerbose) Log.v(TAG, "Tearing down old graph.");
786                    GLEnvironment glEnv = mGraphEnv.getContext().getGLEnvironment();
787                    if (glEnv != null && !glEnv.isActive()) {
788                        glEnv.activate();
789                    }
790                    mOldRunner.getGraph().tearDown(mGraphEnv.getContext());
791                    if (glEnv != null && glEnv.isActive()) {
792                        glEnv.deactivate();
793                    }
794                    mOldRunner = null;
795                }
796                if (mState == STATE_PREVIEW) {
797                    // Switching effects, start up the new runner
798                    if (mLogVerbose) Log.v(TAG, "Previous effect halted, starting new effect.");
799                    tryEnable3ALocks(false);
800                    mRunner.run();
801                } else if (mState != STATE_RELEASED) {
802                    // Shutting down effects
803                    if (mLogVerbose) Log.v(TAG, "Runner halted, restoring direct preview");
804                    tryEnable3ALocks(false);
805                    sendMessage(EFFECT_NONE, EFFECT_MSG_EFFECTS_STOPPED);
806                } else {
807                    // STATE_RELEASED - camera will be/has been released as well, do nothing.
808                }
809            }
810        }
811    };
812
813    // Indicates that all camera/recording activity needs to halt
814    public synchronized void release() {
815        if (mLogVerbose) Log.v(TAG, "Releasing (" + this + ")");
816
817        switch (mState) {
818            case STATE_RECORD:
819            case STATE_PREVIEW:
820                stopPreview();
821                // Fall-through
822            default:
823                mRecordSound.release();
824                mState = STATE_RELEASED;
825                break;
826        }
827    }
828
829    private void sendMessage(final int effect, final int msg) {
830        if (mEffectsListener != null) {
831            mHandler.post(new Runnable() {
832                public void run() {
833                    mEffectsListener.onEffectsUpdate(effect, msg);
834                }
835            });
836        }
837    }
838
839    private void raiseError(final Exception exception) {
840        if (mEffectsListener != null) {
841            mHandler.post(new Runnable() {
842                public void run() {
843                    if (mFd != null) {
844                        mEffectsListener.onEffectsError(exception, null);
845                    } else {
846                        mEffectsListener.onEffectsError(exception, mOutputFile);
847                    }
848                }
849            });
850        }
851    }
852
853}
854