1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License"); you may not
5 * use this file except in compliance with the License. You may obtain a copy of
6 * the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 * License for the specific language governing permissions and limitations under
14 * the License.
15 */
16
17package com.android.camera;
18
19import android.content.Context;
20import android.content.res.AssetFileDescriptor;
21import android.filterfw.GraphEnvironment;
22import android.filterfw.core.Filter;
23import android.filterfw.core.GLEnvironment;
24import android.filterfw.core.GraphRunner;
25import android.filterfw.core.GraphRunner.OnRunnerDoneListener;
26import android.filterfw.geometry.Point;
27import android.filterfw.geometry.Quad;
28import android.filterpacks.videoproc.BackDropperFilter;
29import android.filterpacks.videoproc.BackDropperFilter.LearningDoneListener;
30import android.filterpacks.videosink.MediaEncoderFilter.OnRecordingDoneListener;
31import android.filterpacks.videosrc.SurfaceTextureSource.SurfaceTextureSourceListener;
32
33import android.graphics.SurfaceTexture;
34import android.hardware.Camera;
35import android.media.MediaActionSound;
36import android.media.MediaRecorder;
37import android.media.CamcorderProfile;
38import android.os.Handler;
39import android.os.Looper;
40import android.os.ParcelFileDescriptor;
41import android.util.Log;
42import android.view.Surface;
43import android.view.SurfaceHolder;
44
45import java.io.IOException;
46import java.io.FileNotFoundException;
47import java.io.File;
48import java.lang.Runnable;
49import java.io.FileDescriptor;
50
51
52/**
53 * Encapsulates the mobile filter framework components needed to record video with
54 * effects applied. Modeled after MediaRecorder.
55 */
56public class EffectsRecorder {
57
58    public static final int  EFFECT_NONE        = 0;
59    public static final int  EFFECT_GOOFY_FACE  = 1;
60    public static final int  EFFECT_BACKDROPPER = 2;
61
62    public static final int  EFFECT_GF_SQUEEZE     = 0;
63    public static final int  EFFECT_GF_BIG_EYES    = 1;
64    public static final int  EFFECT_GF_BIG_MOUTH   = 2;
65    public static final int  EFFECT_GF_SMALL_MOUTH = 3;
66    public static final int  EFFECT_GF_BIG_NOSE    = 4;
67    public static final int  EFFECT_GF_SMALL_EYES  = 5;
68    public static final int  NUM_OF_GF_EFFECTS = EFFECT_GF_SMALL_EYES + 1;
69
70    public static final int  EFFECT_MSG_STARTED_LEARNING = 0;
71    public static final int  EFFECT_MSG_DONE_LEARNING    = 1;
72    public static final int  EFFECT_MSG_SWITCHING_EFFECT = 2;
73    public static final int  EFFECT_MSG_EFFECTS_STOPPED  = 3;
74    public static final int  EFFECT_MSG_RECORDING_DONE   = 4;
75    public static final int  EFFECT_MSG_PREVIEW_RUNNING  = 5;
76
77    private Context mContext;
78    private Handler mHandler;
79    private boolean mReleased;
80
81    private Camera mCameraDevice;
82    private CamcorderProfile mProfile;
83    private double mCaptureRate = 0;
84    private SurfaceHolder mPreviewSurfaceHolder;
85    private int mPreviewWidth;
86    private int mPreviewHeight;
87    private MediaRecorder.OnInfoListener mInfoListener;
88    private MediaRecorder.OnErrorListener mErrorListener;
89
90    private String mOutputFile;
91    private FileDescriptor mFd;
92    private int mOrientationHint = 0;
93    private long mMaxFileSize = 0;
94    private int mMaxDurationMs = 0;
95    private int mCameraFacing = Camera.CameraInfo.CAMERA_FACING_BACK;
96    private boolean mAppIsLandscape;
97
98    private int mEffect = EFFECT_NONE;
99    private int mCurrentEffect = EFFECT_NONE;
100    private EffectsListener mEffectsListener;
101
102    private Object mEffectParameter;
103
104    private GraphEnvironment mGraphEnv;
105    private int mGraphId;
106    private GraphRunner mRunner = null;
107    private GraphRunner mOldRunner = null;
108
109    private SurfaceTexture mTextureSource;
110
111    private static final int STATE_CONFIGURE              = 0;
112    private static final int STATE_WAITING_FOR_SURFACE    = 1;
113    private static final int STATE_STARTING_PREVIEW       = 2;
114    private static final int STATE_PREVIEW                = 3;
115    private static final int STATE_RECORD                 = 4;
116    private static final int STATE_RELEASED               = 5;
117    private int mState = STATE_CONFIGURE;
118
119    private boolean mLogVerbose = Log.isLoggable(TAG, Log.VERBOSE);
120    private static final String TAG = "effectsrecorder";
121    private MediaActionSound mCameraSound;
122
123    /** Determine if a given effect is supported at runtime
124     * Some effects require libraries not available on all devices
125     */
126    public static boolean isEffectSupported(int effectId) {
127        switch (effectId) {
128            case EFFECT_GOOFY_FACE:
129                return Filter.isAvailable("com.google.android.filterpacks.facedetect.GoofyRenderFilter");
130            case EFFECT_BACKDROPPER:
131                return Filter.isAvailable("android.filterpacks.videoproc.BackDropperFilter");
132            default:
133                return false;
134        }
135    }
136
137    public EffectsRecorder(Context context) {
138        if (mLogVerbose) Log.v(TAG, "EffectsRecorder created (" + this + ")");
139        mContext = context;
140        mHandler = new Handler(Looper.getMainLooper());
141        mCameraSound = new MediaActionSound();
142    }
143
144    public void setCamera(Camera cameraDevice) {
145        switch (mState) {
146            case STATE_PREVIEW:
147                throw new RuntimeException("setCamera cannot be called while previewing!");
148            case STATE_RECORD:
149                throw new RuntimeException("setCamera cannot be called while recording!");
150            case STATE_RELEASED:
151                throw new RuntimeException("setCamera called on an already released recorder!");
152            default:
153                break;
154        }
155
156        mCameraDevice = cameraDevice;
157    }
158
159    public void setProfile(CamcorderProfile profile) {
160        switch (mState) {
161            case STATE_RECORD:
162                throw new RuntimeException("setProfile cannot be called while recording!");
163            case STATE_RELEASED:
164                throw new RuntimeException("setProfile called on an already released recorder!");
165            default:
166                break;
167        }
168        mProfile = profile;
169    }
170
171    public void setOutputFile(String outputFile) {
172        switch (mState) {
173            case STATE_RECORD:
174                throw new RuntimeException("setOutputFile cannot be called while recording!");
175            case STATE_RELEASED:
176                throw new RuntimeException("setOutputFile called on an already released recorder!");
177            default:
178                break;
179        }
180
181        mOutputFile = outputFile;
182        mFd = null;
183    }
184
185    public void setOutputFile(FileDescriptor fd) {
186        switch (mState) {
187            case STATE_RECORD:
188                throw new RuntimeException("setOutputFile cannot be called while recording!");
189            case STATE_RELEASED:
190                throw new RuntimeException("setOutputFile called on an already released recorder!");
191            default:
192                break;
193        }
194
195        mOutputFile = null;
196        mFd = fd;
197    }
198
199    /**
200     * Sets the maximum filesize (in bytes) of the recording session.
201     * This will be passed on to the MediaEncoderFilter and then to the
202     * MediaRecorder ultimately. If zero or negative, the MediaRecorder will
203     * disable the limit
204    */
205    public synchronized void setMaxFileSize(long maxFileSize) {
206        switch (mState) {
207            case STATE_RECORD:
208                throw new RuntimeException("setMaxFileSize cannot be called while recording!");
209            case STATE_RELEASED:
210                throw new RuntimeException("setMaxFileSize called on an already released recorder!");
211            default:
212                break;
213        }
214        mMaxFileSize = maxFileSize;
215    }
216
217    /**
218    * Sets the maximum recording duration (in ms) for the next recording session
219    * Setting it to zero (the default) disables the limit.
220    */
221    public synchronized void setMaxDuration(int maxDurationMs) {
222        switch (mState) {
223            case STATE_RECORD:
224                throw new RuntimeException("setMaxDuration cannot be called while recording!");
225            case STATE_RELEASED:
226                throw new RuntimeException("setMaxDuration called on an already released recorder!");
227            default:
228                break;
229        }
230        mMaxDurationMs = maxDurationMs;
231    }
232
233
234    public void setCaptureRate(double fps) {
235        switch (mState) {
236            case STATE_RECORD:
237                throw new RuntimeException("setCaptureRate cannot be called while recording!");
238            case STATE_RELEASED:
239                throw new RuntimeException("setCaptureRate called on an already released recorder!");
240            default:
241                break;
242        }
243
244        if (mLogVerbose) Log.v(TAG, "Setting time lapse capture rate to " + fps + " fps");
245        mCaptureRate = fps;
246    }
247
248    public void setPreviewDisplay(SurfaceHolder previewSurfaceHolder,
249                                  int previewWidth,
250                                  int previewHeight) {
251        if (mLogVerbose) Log.v(TAG, "setPreviewDisplay (" + this + ")");
252        switch (mState) {
253            case STATE_RECORD:
254                throw new RuntimeException("setPreviewDisplay cannot be called while recording!");
255            case STATE_RELEASED:
256                throw new RuntimeException("setPreviewDisplay called on an already released recorder!");
257            default:
258                break;
259        }
260
261        mPreviewSurfaceHolder = previewSurfaceHolder;
262        mPreviewWidth = previewWidth;
263        mPreviewHeight = previewHeight;
264
265        switch (mState) {
266            case STATE_WAITING_FOR_SURFACE:
267                startPreview();
268                break;
269            case STATE_STARTING_PREVIEW:
270            case STATE_PREVIEW:
271                initializeEffect(true);
272                break;
273        }
274    }
275
276    public void setEffect(int effect, Object effectParameter) {
277        if (mLogVerbose) Log.v(TAG,
278                               "setEffect: effect ID " + effect +
279                               ", parameter " + effectParameter.toString() );
280        switch (mState) {
281            case STATE_RECORD:
282                throw new RuntimeException("setEffect cannot be called while recording!");
283            case STATE_RELEASED:
284                throw new RuntimeException("setEffect called on an already released recorder!");
285            default:
286                break;
287        }
288
289        mEffect = effect;
290        mEffectParameter = effectParameter;
291
292        if (mState == STATE_PREVIEW ||
293                mState == STATE_STARTING_PREVIEW) {
294            initializeEffect(false);
295        }
296    }
297
298    public interface EffectsListener {
299        public void onEffectsUpdate(int effectId, int effectMsg);
300        public void onEffectsError(Exception exception, String filePath);
301    }
302
303    public void setEffectsListener(EffectsListener listener) {
304        mEffectsListener = listener;
305    }
306
307    private void setFaceDetectOrientation() {
308        if (mCurrentEffect == EFFECT_GOOFY_FACE) {
309            Filter rotateFilter = mRunner.getGraph().getFilter("rotate");
310            Filter metaRotateFilter = mRunner.getGraph().getFilter("metarotate");
311            rotateFilter.setInputValue("rotation", mOrientationHint);
312            int reverseDegrees = (360 - mOrientationHint) % 360;
313            metaRotateFilter.setInputValue("rotation", reverseDegrees);
314        }
315    }
316
317    private void setRecordingOrientation() {
318        if ( mState != STATE_RECORD && mRunner != null) {
319            Point bl = new Point(0, 0);
320            Point br = new Point(1, 0);
321            Point tl = new Point(0, 1);
322            Point tr = new Point(1, 1);
323            Quad recordingRegion;
324            if (mCameraFacing == Camera.CameraInfo.CAMERA_FACING_BACK) {
325                // The back camera is not mirrored, so use a identity transform
326                recordingRegion = new Quad(bl, br, tl, tr);
327            } else {
328                // Recording region needs to be tweaked for front cameras, since they
329                // mirror their preview
330                if (mOrientationHint == 0 || mOrientationHint == 180) {
331                    // Horizontal flip in landscape
332                    recordingRegion = new Quad(br, bl, tr, tl);
333                } else {
334                    // Horizontal flip in portrait
335                    recordingRegion = new Quad(tl, tr, bl, br);
336                }
337            }
338            Filter recorder = mRunner.getGraph().getFilter("recorder");
339            recorder.setInputValue("inputRegion", recordingRegion);
340        }
341    }
342    public void setOrientationHint(int degrees) {
343        switch (mState) {
344            case STATE_RELEASED:
345                throw new RuntimeException(
346                        "setOrientationHint called on an already released recorder!");
347            default:
348                break;
349        }
350        if (mLogVerbose) Log.v(TAG, "Setting orientation hint to: " + degrees);
351        mOrientationHint = degrees;
352        setFaceDetectOrientation();
353        setRecordingOrientation();
354    }
355
356    /** Passes the native orientation of the Camera app (device dependent)
357     * to allow for correct output aspect ratio. Defaults to portrait */
358    public void setAppToLandscape(boolean landscape) {
359        if (mState != STATE_CONFIGURE) {
360            throw new RuntimeException(
361                "setAppToLandscape called after configuration!");
362        }
363        mAppIsLandscape = landscape;
364    }
365
366    public void setCameraFacing(int facing) {
367        switch (mState) {
368            case STATE_RELEASED:
369                throw new RuntimeException(
370                    "setCameraFacing called on alrady released recorder!");
371            default:
372                break;
373        }
374        mCameraFacing = facing;
375        setRecordingOrientation();
376    }
377
378    public void setOnInfoListener(MediaRecorder.OnInfoListener infoListener) {
379        switch (mState) {
380            case STATE_RECORD:
381                throw new RuntimeException("setInfoListener cannot be called while recording!");
382            case STATE_RELEASED:
383                throw new RuntimeException("setInfoListener called on an already released recorder!");
384            default:
385                break;
386        }
387        mInfoListener = infoListener;
388    }
389
390    public void setOnErrorListener(MediaRecorder.OnErrorListener errorListener) {
391        switch (mState) {
392            case STATE_RECORD:
393                throw new RuntimeException("setErrorListener cannot be called while recording!");
394            case STATE_RELEASED:
395                throw new RuntimeException("setErrorListener called on an already released recorder!");
396            default:
397                break;
398        }
399        mErrorListener = errorListener;
400    }
401
402    private void initializeFilterFramework() {
403        mGraphEnv = new GraphEnvironment();
404        mGraphEnv.createGLEnvironment();
405
406        if (mLogVerbose) {
407            Log.v(TAG, "Effects framework initializing. Recording size "
408                  + mProfile.videoFrameWidth + ", " + mProfile.videoFrameHeight);
409        }
410        if (!mAppIsLandscape) {
411            int tmp;
412            tmp = mProfile.videoFrameWidth;
413            mProfile.videoFrameWidth = mProfile.videoFrameHeight;
414            mProfile.videoFrameHeight = tmp;
415        }
416        mGraphEnv.addReferences(
417                "textureSourceCallback", mSourceReadyCallback,
418                "recordingWidth", mProfile.videoFrameWidth,
419                "recordingHeight", mProfile.videoFrameHeight,
420                "recordingProfile", mProfile,
421                "learningDoneListener", mLearningDoneListener,
422                "recordingDoneListener", mRecordingDoneListener);
423        mRunner = null;
424        mGraphId = -1;
425        mCurrentEffect = EFFECT_NONE;
426    }
427
428    private synchronized void initializeEffect(boolean forceReset) {
429        if (forceReset ||
430            mCurrentEffect != mEffect ||
431            mCurrentEffect == EFFECT_BACKDROPPER) {
432            if (mLogVerbose) {
433                Log.v(TAG, "Effect initializing. Preview size "
434                       + mPreviewWidth + ", " + mPreviewHeight);
435            }
436
437            mGraphEnv.addReferences(
438                    "previewSurface", mPreviewSurfaceHolder.getSurface(),
439                    "previewWidth", mPreviewWidth,
440                    "previewHeight", mPreviewHeight,
441                    "orientation", mOrientationHint);
442            if (mState == STATE_PREVIEW ||
443                    mState == STATE_STARTING_PREVIEW) {
444                // Switching effects while running. Inform video camera.
445                sendMessage(mCurrentEffect, EFFECT_MSG_SWITCHING_EFFECT);
446            }
447
448            switch (mEffect) {
449                case EFFECT_GOOFY_FACE:
450                    mGraphId = mGraphEnv.loadGraph(mContext, R.raw.goofy_face);
451                    break;
452                case EFFECT_BACKDROPPER:
453                    sendMessage(EFFECT_BACKDROPPER, EFFECT_MSG_STARTED_LEARNING);
454                    mGraphId = mGraphEnv.loadGraph(mContext, R.raw.backdropper);
455                    break;
456                default:
457                    throw new RuntimeException("Unknown effect ID" + mEffect + "!");
458            }
459            mCurrentEffect = mEffect;
460
461            mOldRunner = mRunner;
462            mRunner = mGraphEnv.getRunner(mGraphId, GraphEnvironment.MODE_ASYNCHRONOUS);
463            mRunner.setDoneCallback(mRunnerDoneCallback);
464            if (mLogVerbose) {
465                Log.v(TAG, "New runner: " + mRunner
466                      + ". Old runner: " + mOldRunner);
467            }
468            if (mState == STATE_PREVIEW ||
469                    mState == STATE_STARTING_PREVIEW) {
470                // Switching effects while running. Stop existing runner.
471                // The stop callback will take care of starting new runner.
472                mCameraDevice.stopPreview();
473                try {
474                    mCameraDevice.setPreviewTexture(null);
475                } catch(IOException e) {
476                    throw new RuntimeException("Unable to connect camera to effect input", e);
477                }
478                mOldRunner.stop();
479            }
480        }
481
482        switch (mCurrentEffect) {
483            case EFFECT_GOOFY_FACE:
484                tryEnableVideoStabilization(true);
485                Filter goofyFilter = mRunner.getGraph().getFilter("goofyrenderer");
486                goofyFilter.setInputValue("currentEffect",
487                                          ((Integer)mEffectParameter).intValue());
488                break;
489            case EFFECT_BACKDROPPER:
490                tryEnableVideoStabilization(false);
491                Filter backgroundSrc = mRunner.getGraph().getFilter("background");
492                backgroundSrc.setInputValue("sourceUrl",
493                                            (String)mEffectParameter);
494                // For front camera, the background video needs to be mirrored in the
495                // backdropper filter
496                if (mCameraFacing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
497                    Filter replacer = mRunner.getGraph().getFilter("replacer");
498                    replacer.setInputValue("mirrorBg", true);
499                    if (mLogVerbose) Log.v(TAG, "Setting the background to be mirrored");
500                }
501                break;
502            default:
503                break;
504        }
505        setFaceDetectOrientation();
506        setRecordingOrientation();
507    }
508
509    public synchronized void startPreview() {
510        if (mLogVerbose) Log.v(TAG, "Starting preview (" + this + ")");
511
512        switch (mState) {
513            case STATE_STARTING_PREVIEW:
514            case STATE_PREVIEW:
515                // Already running preview
516                Log.w(TAG, "startPreview called when already running preview");
517                return;
518            case STATE_RECORD:
519                throw new RuntimeException("Cannot start preview when already recording!");
520            case STATE_RELEASED:
521                throw new RuntimeException("setEffect called on an already released recorder!");
522            default:
523                break;
524        }
525
526        if (mEffect == EFFECT_NONE) {
527            throw new RuntimeException("No effect selected!");
528        }
529        if (mEffectParameter == null) {
530            throw new RuntimeException("No effect parameter provided!");
531        }
532        if (mProfile == null) {
533            throw new RuntimeException("No recording profile provided!");
534        }
535        if (mPreviewSurfaceHolder == null) {
536            if (mLogVerbose) Log.v(TAG, "Passed a null surface holder; waiting for valid one");
537            mState = STATE_WAITING_FOR_SURFACE;
538            return;
539        }
540        if (mCameraDevice == null) {
541            throw new RuntimeException("No camera to record from!");
542        }
543
544        if (mLogVerbose) Log.v(TAG, "Initializing filter graph");
545
546        initializeFilterFramework();
547
548        initializeEffect(true);
549
550        if (mLogVerbose) Log.v(TAG, "Starting filter graph");
551
552        mState = STATE_STARTING_PREVIEW;
553        mRunner.run();
554        // Rest of preview startup handled in mSourceReadyCallback
555    }
556
557    private SurfaceTextureSourceListener mSourceReadyCallback =
558            new SurfaceTextureSourceListener() {
559        public void onSurfaceTextureSourceReady(SurfaceTexture source) {
560            if (mLogVerbose) Log.v(TAG, "SurfaceTexture ready callback received");
561            synchronized(EffectsRecorder.this) {
562                mTextureSource = source;
563
564                if (mState == STATE_CONFIGURE) {
565                    // Stop preview happened while the runner was doing startup tasks
566                    // Since we haven't started anything up, don't do anything
567                    // Rest of cleanup will happen in onRunnerDone
568                    if (mLogVerbose) Log.v(TAG, "Ready callback: Already stopped, skipping.");
569                    return;
570                }
571                if (mState == STATE_RELEASED) {
572                    // EffectsRecorder has been released, so don't touch the camera device
573                    // or anything else
574                    if (mLogVerbose) Log.v(TAG, "Ready callback: Already released, skipping.");
575                    return;
576                }
577                if (source == null) {
578                    if (mState == STATE_PREVIEW ||
579                            mState == STATE_STARTING_PREVIEW ||
580                            mState == STATE_RECORD) {
581                        // A null source here means the graph is shutting down
582                        // unexpectedly, so we need to turn off preview before
583                        // the surface texture goes away.
584                        mCameraDevice.stopPreview();
585                        try {
586                            mCameraDevice.setPreviewTexture(null);
587                        } catch(IOException e) {
588                            throw new RuntimeException("Unable to disconnect " +
589                                    "camera from effect input", e);
590                        }
591                    }
592                    return;
593                }
594
595                // Lock AE/AWB to reduce transition flicker
596                tryEnable3ALocks(true);
597
598                mCameraDevice.stopPreview();
599                if (mLogVerbose) Log.v(TAG, "Runner active, connecting effects preview");
600                try {
601                    mCameraDevice.setPreviewTexture(mTextureSource);
602                } catch(IOException e) {
603                    throw new RuntimeException("Unable to connect camera to effect input", e);
604                }
605
606                mCameraDevice.startPreview();
607
608                // Unlock AE/AWB after preview started
609                tryEnable3ALocks(false);
610
611                mState = STATE_PREVIEW;
612
613                if (mLogVerbose) Log.v(TAG, "Start preview/effect switch complete");
614
615                // Sending a message to listener that preview is complete
616                sendMessage(mCurrentEffect, EFFECT_MSG_PREVIEW_RUNNING);
617            }
618        }
619    };
620
621    private LearningDoneListener mLearningDoneListener =
622            new LearningDoneListener() {
623        public void onLearningDone(BackDropperFilter filter) {
624            if (mLogVerbose) Log.v(TAG, "Learning done callback triggered");
625            // Called in a processing thread, so have to post message back to UI
626            // thread
627            sendMessage(EFFECT_BACKDROPPER, EFFECT_MSG_DONE_LEARNING);
628            enable3ALocks(true);
629        }
630    };
631
632    // A callback to finalize the media after the recording is done.
633    private OnRecordingDoneListener mRecordingDoneListener =
634            new OnRecordingDoneListener() {
635        // Forward the callback to the VideoCamera object (as an asynchronous event).
636        public void onRecordingDone() {
637            if (mLogVerbose) Log.v(TAG, "Recording done callback triggered");
638            sendMessage(EFFECT_NONE, EFFECT_MSG_RECORDING_DONE);
639        }
640    };
641
642    public synchronized void startRecording() {
643        if (mLogVerbose) Log.v(TAG, "Starting recording (" + this + ")");
644
645        switch (mState) {
646            case STATE_RECORD:
647                throw new RuntimeException("Already recording, cannot begin anew!");
648            case STATE_RELEASED:
649                throw new RuntimeException("startRecording called on an already released recorder!");
650            default:
651                break;
652        }
653
654        if ((mOutputFile == null) && (mFd == null)) {
655            throw new RuntimeException("No output file name or descriptor provided!");
656        }
657
658        if (mState == STATE_CONFIGURE) {
659            startPreview();
660        }
661
662        Filter recorder = mRunner.getGraph().getFilter("recorder");
663        if (mFd != null) {
664            recorder.setInputValue("outputFileDescriptor", mFd);
665        } else {
666            recorder.setInputValue("outputFile", mOutputFile);
667        }
668        // It is ok to set the audiosource without checking for timelapse here
669        // since that check will be done in the MediaEncoderFilter itself
670        recorder.setInputValue("audioSource", MediaRecorder.AudioSource.CAMCORDER);
671
672        recorder.setInputValue("recordingProfile", mProfile);
673        recorder.setInputValue("orientationHint", mOrientationHint);
674        // Important to set the timelapseinterval to 0 if the capture rate is not >0
675        // since the recorder does not get created every time the recording starts.
676        // The recorder infers whether the capture is timelapsed based on the value of
677        // this interval
678        boolean captureTimeLapse = mCaptureRate > 0;
679        if (captureTimeLapse) {
680            double timeBetweenFrameCapture = 1 / mCaptureRate;
681            recorder.setInputValue("timelapseRecordingIntervalUs",
682                    (long) (1000000 * timeBetweenFrameCapture));
683        } else {
684            recorder.setInputValue("timelapseRecordingIntervalUs", 0L);
685        }
686
687        if (mInfoListener != null) {
688            recorder.setInputValue("infoListener", mInfoListener);
689        }
690        if (mErrorListener != null) {
691            recorder.setInputValue("errorListener", mErrorListener);
692        }
693        recorder.setInputValue("maxFileSize", mMaxFileSize);
694        recorder.setInputValue("maxDurationMs", mMaxDurationMs);
695        recorder.setInputValue("recording", true);
696        mCameraSound.play(MediaActionSound.START_VIDEO_RECORDING);
697        mState = STATE_RECORD;
698    }
699
700    public synchronized void stopRecording() {
701        if (mLogVerbose) Log.v(TAG, "Stop recording (" + this + ")");
702
703        switch (mState) {
704            case STATE_CONFIGURE:
705            case STATE_STARTING_PREVIEW:
706            case STATE_PREVIEW:
707                Log.w(TAG, "StopRecording called when recording not active!");
708                return;
709            case STATE_RELEASED:
710                throw new RuntimeException("stopRecording called on released EffectsRecorder!");
711            default:
712                break;
713        }
714        Filter recorder = mRunner.getGraph().getFilter("recorder");
715        recorder.setInputValue("recording", false);
716        mCameraSound.play(MediaActionSound.STOP_VIDEO_RECORDING);
717        mState = STATE_PREVIEW;
718    }
719
720    // Stop and release effect resources
721    public synchronized void stopPreview() {
722        if (mLogVerbose) Log.v(TAG, "Stopping preview (" + this + ")");
723
724        switch (mState) {
725            case STATE_CONFIGURE:
726                Log.w(TAG, "StopPreview called when preview not active!");
727                return;
728            case STATE_RELEASED:
729                throw new RuntimeException("stopPreview called on released EffectsRecorder!");
730            default:
731                break;
732        }
733
734        if (mState == STATE_RECORD) {
735            stopRecording();
736        }
737
738        mCurrentEffect = EFFECT_NONE;
739
740        mCameraDevice.stopPreview();
741        try {
742            mCameraDevice.setPreviewTexture(null);
743        } catch(IOException e) {
744            throw new RuntimeException("Unable to connect camera to effect input", e);
745        }
746        mCameraSound.release();
747
748        mState = STATE_CONFIGURE;
749        mOldRunner = mRunner;
750        mRunner.stop();
751        mRunner = null;
752        // Rest of stop and release handled in mRunnerDoneCallback
753    }
754
755    // Try to enable/disable video stabilization if supported; otherwise return false
756    boolean tryEnableVideoStabilization(boolean toggle) {
757        Camera.Parameters params = mCameraDevice.getParameters();
758
759        String vstabSupported = params.get("video-stabilization-supported");
760        if ("true".equals(vstabSupported)) {
761            if (mLogVerbose) Log.v(TAG, "Setting video stabilization to " + toggle);
762            params.set("video-stabilization", toggle ? "true" : "false");
763            mCameraDevice.setParameters(params);
764            return true;
765        }
766        if (mLogVerbose) Log.v(TAG, "Video stabilization not supported");
767        return false;
768    }
769
770    // Try to enable/disable 3A locks if supported; otherwise return false
771    boolean tryEnable3ALocks(boolean toggle) {
772        Camera.Parameters params = mCameraDevice.getParameters();
773        if (params.isAutoExposureLockSupported() &&
774            params.isAutoWhiteBalanceLockSupported() ) {
775            params.setAutoExposureLock(toggle);
776            params.setAutoWhiteBalanceLock(toggle);
777            mCameraDevice.setParameters(params);
778            return true;
779        }
780        return false;
781    }
782
783    // Try to enable/disable 3A locks if supported; otherwise, throw error
784    // Use this when locks are essential to success
785    void enable3ALocks(boolean toggle) {
786        Camera.Parameters params = mCameraDevice.getParameters();
787        if (!tryEnable3ALocks(toggle)) {
788            throw new RuntimeException("Attempt to lock 3A on camera with no locking support!");
789        }
790    }
791
792    private OnRunnerDoneListener mRunnerDoneCallback =
793            new OnRunnerDoneListener() {
794        public void onRunnerDone(int result) {
795            synchronized(EffectsRecorder.this) {
796                if (mLogVerbose) {
797                    Log.v(TAG,
798                          "Graph runner done (" + EffectsRecorder.this
799                          + ", mRunner " + mRunner
800                          + ", mOldRunner " + mOldRunner + ")");
801                }
802                if (result == GraphRunner.RESULT_ERROR) {
803                    // Handle error case
804                    Log.e(TAG, "Error running filter graph!");
805                    Exception e = null;
806                    if (mRunner != null) {
807                        e = mRunner.getError();
808                    } else if (mOldRunner != null) {
809                        e = mOldRunner.getError();
810                    }
811                    raiseError(e);
812                }
813                if (mOldRunner != null) {
814                    // Tear down old graph if available
815                    if (mLogVerbose) Log.v(TAG, "Tearing down old graph.");
816                    GLEnvironment glEnv = mGraphEnv.getContext().getGLEnvironment();
817                    if (glEnv != null && !glEnv.isActive()) {
818                        glEnv.activate();
819                    }
820                    mOldRunner.getGraph().tearDown(mGraphEnv.getContext());
821                    if (glEnv != null && glEnv.isActive()) {
822                        glEnv.deactivate();
823                    }
824                    mOldRunner = null;
825                }
826                if (mState == STATE_PREVIEW ||
827                        mState == STATE_STARTING_PREVIEW) {
828                    // Switching effects, start up the new runner
829                    if (mLogVerbose) Log.v(TAG, "Previous effect halted, starting new effect.");
830                    tryEnable3ALocks(false);
831                    mRunner.run();
832                } else if (mState != STATE_RELEASED) {
833                    // Shutting down effects
834                    if (mLogVerbose) Log.v(TAG, "Runner halted, restoring direct preview");
835                    tryEnable3ALocks(false);
836                    sendMessage(EFFECT_NONE, EFFECT_MSG_EFFECTS_STOPPED);
837                } else {
838                    // STATE_RELEASED - camera will be/has been released as well, do nothing.
839                }
840            }
841        }
842    };
843
844    // Indicates that all camera/recording activity needs to halt
845    public synchronized void release() {
846        if (mLogVerbose) Log.v(TAG, "Releasing (" + this + ")");
847
848        switch (mState) {
849            case STATE_RECORD:
850            case STATE_STARTING_PREVIEW:
851            case STATE_PREVIEW:
852                stopPreview();
853                // Fall-through
854            default:
855                mState = STATE_RELEASED;
856                break;
857        }
858    }
859
860    private void sendMessage(final int effect, final int msg) {
861        if (mEffectsListener != null) {
862            mHandler.post(new Runnable() {
863                public void run() {
864                    mEffectsListener.onEffectsUpdate(effect, msg);
865                }
866            });
867        }
868    }
869
870    private void raiseError(final Exception exception) {
871        if (mEffectsListener != null) {
872            mHandler.post(new Runnable() {
873                public void run() {
874                    if (mFd != null) {
875                        mEffectsListener.onEffectsError(exception, null);
876                    } else {
877                        mEffectsListener.onEffectsError(exception, mOutputFile);
878                    }
879                }
880            });
881        }
882    }
883
884}
885