1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License"); you may not
5 * use this file except in compliance with the License. You may obtain a copy of
6 * the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 * License for the specific language governing permissions and limitations under
14 * the License.
15 */
16
17package com.android.camera;
18
19import android.content.Context;
20import android.filterfw.GraphEnvironment;
21import android.filterfw.core.Filter;
22import android.filterfw.core.GLEnvironment;
23import android.filterfw.core.GraphRunner;
24import android.filterfw.core.GraphRunner.OnRunnerDoneListener;
25import android.filterfw.geometry.Point;
26import android.filterfw.geometry.Quad;
27import android.filterpacks.videoproc.BackDropperFilter;
28import android.filterpacks.videoproc.BackDropperFilter.LearningDoneListener;
29import android.filterpacks.videosink.MediaEncoderFilter.OnRecordingDoneListener;
30import android.filterpacks.videosrc.SurfaceTextureSource.SurfaceTextureSourceListener;
31import android.filterpacks.videosrc.SurfaceTextureTarget;
32import android.graphics.SurfaceTexture;
33import android.hardware.Camera;
34import android.media.CamcorderProfile;
35import android.media.MediaActionSound;
36import android.media.MediaRecorder;
37import android.os.Handler;
38import android.os.Looper;
39import android.util.Log;
40import android.view.Surface;
41
42import java.io.FileDescriptor;
43import java.io.IOException;
44
45
46/**
47 * Encapsulates the mobile filter framework components needed to record video with
48 * effects applied. Modeled after MediaRecorder.
49 */
50public class EffectsRecorder {
51
52    public static final int  EFFECT_NONE        = 0;
53    public static final int  EFFECT_GOOFY_FACE  = 1;
54    public static final int  EFFECT_BACKDROPPER = 2;
55
56    public static final int  EFFECT_GF_SQUEEZE     = 0;
57    public static final int  EFFECT_GF_BIG_EYES    = 1;
58    public static final int  EFFECT_GF_BIG_MOUTH   = 2;
59    public static final int  EFFECT_GF_SMALL_MOUTH = 3;
60    public static final int  EFFECT_GF_BIG_NOSE    = 4;
61    public static final int  EFFECT_GF_SMALL_EYES  = 5;
62    public static final int  NUM_OF_GF_EFFECTS = EFFECT_GF_SMALL_EYES + 1;
63
64    public static final int  EFFECT_MSG_STARTED_LEARNING = 0;
65    public static final int  EFFECT_MSG_DONE_LEARNING    = 1;
66    public static final int  EFFECT_MSG_SWITCHING_EFFECT = 2;
67    public static final int  EFFECT_MSG_EFFECTS_STOPPED  = 3;
68    public static final int  EFFECT_MSG_RECORDING_DONE   = 4;
69    public static final int  EFFECT_MSG_PREVIEW_RUNNING  = 5;
70
71    private Context mContext;
72    private Handler mHandler;
73
74    private Camera mCameraDevice;
75    private CamcorderProfile mProfile;
76    private double mCaptureRate = 0;
77    private SurfaceTexture mPreviewSurfaceTexture;
78    private int mPreviewWidth;
79    private int mPreviewHeight;
80    private MediaRecorder.OnInfoListener mInfoListener;
81    private MediaRecorder.OnErrorListener mErrorListener;
82
83    private String mOutputFile;
84    private FileDescriptor mFd;
85    private int mOrientationHint = 0;
86    private long mMaxFileSize = 0;
87    private int mMaxDurationMs = 0;
88    private int mCameraFacing = Camera.CameraInfo.CAMERA_FACING_BACK;
89    private int mCameraDisplayOrientation;
90
91    private int mEffect = EFFECT_NONE;
92    private int mCurrentEffect = EFFECT_NONE;
93    private EffectsListener mEffectsListener;
94
95    private Object mEffectParameter;
96
97    private GraphEnvironment mGraphEnv;
98    private int mGraphId;
99    private GraphRunner mRunner = null;
100    private GraphRunner mOldRunner = null;
101
102    private SurfaceTexture mTextureSource;
103
104    private static final int STATE_CONFIGURE              = 0;
105    private static final int STATE_WAITING_FOR_SURFACE    = 1;
106    private static final int STATE_STARTING_PREVIEW       = 2;
107    private static final int STATE_PREVIEW                = 3;
108    private static final int STATE_RECORD                 = 4;
109    private static final int STATE_RELEASED               = 5;
110    private int mState = STATE_CONFIGURE;
111
112    private boolean mLogVerbose = Log.isLoggable(TAG, Log.VERBOSE);
113    private static final String TAG = "EffectsRecorder";
114    private MediaActionSound mCameraSound;
115
116    /** Determine if a given effect is supported at runtime
117     * Some effects require libraries not available on all devices
118     */
119    public static boolean isEffectSupported(int effectId) {
120        switch (effectId) {
121            case EFFECT_GOOFY_FACE:
122                return Filter.isAvailable(
123                    "com.google.android.filterpacks.facedetect.GoofyRenderFilter");
124            case EFFECT_BACKDROPPER:
125                return Filter.isAvailable("android.filterpacks.videoproc.BackDropperFilter");
126            default:
127                return false;
128        }
129    }
130
131    public EffectsRecorder(Context context) {
132        if (mLogVerbose) Log.v(TAG, "EffectsRecorder created (" + this + ")");
133        mContext = context;
134        mHandler = new Handler(Looper.getMainLooper());
135        mCameraSound = new MediaActionSound();
136        mCameraSound.load(MediaActionSound.START_VIDEO_RECORDING);
137        mCameraSound.load(MediaActionSound.STOP_VIDEO_RECORDING);
138    }
139
140    public synchronized void setCamera(Camera cameraDevice) {
141        switch (mState) {
142            case STATE_PREVIEW:
143                throw new RuntimeException("setCamera cannot be called while previewing!");
144            case STATE_RECORD:
145                throw new RuntimeException("setCamera cannot be called while recording!");
146            case STATE_RELEASED:
147                throw new RuntimeException("setCamera called on an already released recorder!");
148            default:
149                break;
150        }
151
152        mCameraDevice = cameraDevice;
153    }
154
155    public void setProfile(CamcorderProfile profile) {
156        switch (mState) {
157            case STATE_RECORD:
158                throw new RuntimeException("setProfile cannot be called while recording!");
159            case STATE_RELEASED:
160                throw new RuntimeException("setProfile called on an already released recorder!");
161            default:
162                break;
163        }
164        mProfile = profile;
165    }
166
167    public void setOutputFile(String outputFile) {
168        switch (mState) {
169            case STATE_RECORD:
170                throw new RuntimeException("setOutputFile cannot be called while recording!");
171            case STATE_RELEASED:
172                throw new RuntimeException("setOutputFile called on an already released recorder!");
173            default:
174                break;
175        }
176
177        mOutputFile = outputFile;
178        mFd = null;
179    }
180
181    public void setOutputFile(FileDescriptor fd) {
182        switch (mState) {
183            case STATE_RECORD:
184                throw new RuntimeException("setOutputFile cannot be called while recording!");
185            case STATE_RELEASED:
186                throw new RuntimeException("setOutputFile called on an already released recorder!");
187            default:
188                break;
189        }
190
191        mOutputFile = null;
192        mFd = fd;
193    }
194
195    /**
196     * Sets the maximum filesize (in bytes) of the recording session.
197     * This will be passed on to the MediaEncoderFilter and then to the
198     * MediaRecorder ultimately. If zero or negative, the MediaRecorder will
199     * disable the limit
200    */
201    public synchronized void setMaxFileSize(long maxFileSize) {
202        switch (mState) {
203            case STATE_RECORD:
204                throw new RuntimeException("setMaxFileSize cannot be called while recording!");
205            case STATE_RELEASED:
206                throw new RuntimeException(
207                    "setMaxFileSize called on an already released recorder!");
208            default:
209                break;
210        }
211        mMaxFileSize = maxFileSize;
212    }
213
214    /**
215    * Sets the maximum recording duration (in ms) for the next recording session
216    * Setting it to zero (the default) disables the limit.
217    */
218    public synchronized void setMaxDuration(int maxDurationMs) {
219        switch (mState) {
220            case STATE_RECORD:
221                throw new RuntimeException("setMaxDuration cannot be called while recording!");
222            case STATE_RELEASED:
223                throw new RuntimeException(
224                    "setMaxDuration called on an already released recorder!");
225            default:
226                break;
227        }
228        mMaxDurationMs = maxDurationMs;
229    }
230
231
232    public void setCaptureRate(double fps) {
233        switch (mState) {
234            case STATE_RECORD:
235                throw new RuntimeException("setCaptureRate cannot be called while recording!");
236            case STATE_RELEASED:
237                throw new RuntimeException(
238                    "setCaptureRate called on an already released recorder!");
239            default:
240                break;
241        }
242
243        if (mLogVerbose) Log.v(TAG, "Setting time lapse capture rate to " + fps + " fps");
244        mCaptureRate = fps;
245    }
246
247    public void setPreviewSurfaceTexture(SurfaceTexture previewSurfaceTexture,
248                                  int previewWidth,
249                                  int previewHeight) {
250        if (mLogVerbose) Log.v(TAG, "setPreviewSurfaceTexture(" + this + ")");
251        switch (mState) {
252            case STATE_RECORD:
253                throw new RuntimeException(
254                    "setPreviewSurfaceTexture cannot be called while recording!");
255            case STATE_RELEASED:
256                throw new RuntimeException(
257                    "setPreviewSurfaceTexture called on an already released recorder!");
258            default:
259                break;
260        }
261
262        mPreviewSurfaceTexture= previewSurfaceTexture;
263        mPreviewWidth = previewWidth;
264        mPreviewHeight = previewHeight;
265
266        switch (mState) {
267            case STATE_WAITING_FOR_SURFACE:
268                startPreview();
269                break;
270            case STATE_STARTING_PREVIEW:
271            case STATE_PREVIEW:
272                initializeEffect(true);
273                break;
274        }
275    }
276
277    public void setEffect(int effect, Object effectParameter) {
278        if (mLogVerbose) Log.v(TAG,
279                               "setEffect: effect ID " + effect +
280                               ", parameter " + effectParameter.toString() );
281        switch (mState) {
282            case STATE_RECORD:
283                throw new RuntimeException("setEffect cannot be called while recording!");
284            case STATE_RELEASED:
285                throw new RuntimeException("setEffect called on an already released recorder!");
286            default:
287                break;
288        }
289
290        mEffect = effect;
291        mEffectParameter = effectParameter;
292
293        if (mState == STATE_PREVIEW ||
294                mState == STATE_STARTING_PREVIEW) {
295            initializeEffect(false);
296        }
297    }
298
299    public interface EffectsListener {
300        public void onEffectsUpdate(int effectId, int effectMsg);
301        public void onEffectsError(Exception exception, String filePath);
302    }
303
304    public void setEffectsListener(EffectsListener listener) {
305        mEffectsListener = listener;
306    }
307
308    private void setFaceDetectOrientation() {
309        if (mCurrentEffect == EFFECT_GOOFY_FACE) {
310            Filter rotateFilter = mRunner.getGraph().getFilter("rotate");
311            Filter metaRotateFilter = mRunner.getGraph().getFilter("metarotate");
312            rotateFilter.setInputValue("rotation", mOrientationHint);
313            int reverseDegrees = (360 - mOrientationHint) % 360;
314            metaRotateFilter.setInputValue("rotation", reverseDegrees);
315        }
316    }
317
318    private void setRecordingOrientation() {
319        if ( mState != STATE_RECORD && mRunner != null) {
320            Point bl = new Point(0, 0);
321            Point br = new Point(1, 0);
322            Point tl = new Point(0, 1);
323            Point tr = new Point(1, 1);
324            Quad recordingRegion;
325            if (mCameraFacing == Camera.CameraInfo.CAMERA_FACING_BACK) {
326                // The back camera is not mirrored, so use a identity transform
327                recordingRegion = new Quad(bl, br, tl, tr);
328            } else {
329                // Recording region needs to be tweaked for front cameras, since they
330                // mirror their preview
331                if (mOrientationHint == 0 || mOrientationHint == 180) {
332                    // Horizontal flip in landscape
333                    recordingRegion = new Quad(br, bl, tr, tl);
334                } else {
335                    // Horizontal flip in portrait
336                    recordingRegion = new Quad(tl, tr, bl, br);
337                }
338            }
339            Filter recorder = mRunner.getGraph().getFilter("recorder");
340            recorder.setInputValue("inputRegion", recordingRegion);
341        }
342    }
343    public void setOrientationHint(int degrees) {
344        switch (mState) {
345            case STATE_RELEASED:
346                throw new RuntimeException(
347                        "setOrientationHint called on an already released recorder!");
348            default:
349                break;
350        }
351        if (mLogVerbose) Log.v(TAG, "Setting orientation hint to: " + degrees);
352        mOrientationHint = degrees;
353        setFaceDetectOrientation();
354        setRecordingOrientation();
355    }
356
357    public void setCameraDisplayOrientation(int orientation) {
358        if (mState != STATE_CONFIGURE) {
359            throw new RuntimeException(
360                "setCameraDisplayOrientation called after configuration!");
361        }
362        mCameraDisplayOrientation = orientation;
363    }
364
365    public void setCameraFacing(int facing) {
366        switch (mState) {
367            case STATE_RELEASED:
368                throw new RuntimeException(
369                    "setCameraFacing called on alrady released recorder!");
370            default:
371                break;
372        }
373        mCameraFacing = facing;
374        setRecordingOrientation();
375    }
376
377    public void setOnInfoListener(MediaRecorder.OnInfoListener infoListener) {
378        switch (mState) {
379            case STATE_RECORD:
380                throw new RuntimeException("setInfoListener cannot be called while recording!");
381            case STATE_RELEASED:
382                throw new RuntimeException(
383                    "setInfoListener called on an already released recorder!");
384            default:
385                break;
386        }
387        mInfoListener = infoListener;
388    }
389
390    public void setOnErrorListener(MediaRecorder.OnErrorListener errorListener) {
391        switch (mState) {
392            case STATE_RECORD:
393                throw new RuntimeException("setErrorListener cannot be called while recording!");
394            case STATE_RELEASED:
395                throw new RuntimeException(
396                    "setErrorListener called on an already released recorder!");
397            default:
398                break;
399        }
400        mErrorListener = errorListener;
401    }
402
403    private void initializeFilterFramework() {
404        mGraphEnv = new GraphEnvironment();
405        mGraphEnv.createGLEnvironment();
406
407        int videoFrameWidth = mProfile.videoFrameWidth;
408        int videoFrameHeight = mProfile.videoFrameHeight;
409        if (mCameraDisplayOrientation == 90 || mCameraDisplayOrientation == 270) {
410            int tmp = videoFrameWidth;
411            videoFrameWidth = videoFrameHeight;
412            videoFrameHeight = tmp;
413        }
414
415        mGraphEnv.addReferences(
416                "textureSourceCallback", mSourceReadyCallback,
417                "recordingWidth", videoFrameWidth,
418                "recordingHeight", videoFrameHeight,
419                "recordingProfile", mProfile,
420                "learningDoneListener", mLearningDoneListener,
421                "recordingDoneListener", mRecordingDoneListener);
422        mRunner = null;
423        mGraphId = -1;
424        mCurrentEffect = EFFECT_NONE;
425    }
426
427    private synchronized void initializeEffect(boolean forceReset) {
428        if (forceReset ||
429            mCurrentEffect != mEffect ||
430            mCurrentEffect == EFFECT_BACKDROPPER) {
431
432            mGraphEnv.addReferences(
433                    "previewSurfaceTexture", mPreviewSurfaceTexture,
434                    "previewWidth", mPreviewWidth,
435                    "previewHeight", mPreviewHeight,
436                    "orientation", mOrientationHint);
437            if (mState == STATE_PREVIEW ||
438                    mState == STATE_STARTING_PREVIEW) {
439                // Switching effects while running. Inform video camera.
440                sendMessage(mCurrentEffect, EFFECT_MSG_SWITCHING_EFFECT);
441            }
442
443            switch (mEffect) {
444                case EFFECT_GOOFY_FACE:
445                    mGraphId = mGraphEnv.loadGraph(mContext, R.raw.goofy_face);
446                    break;
447                case EFFECT_BACKDROPPER:
448                    sendMessage(EFFECT_BACKDROPPER, EFFECT_MSG_STARTED_LEARNING);
449                    mGraphId = mGraphEnv.loadGraph(mContext, R.raw.backdropper);
450                    break;
451                default:
452                    throw new RuntimeException("Unknown effect ID" + mEffect + "!");
453            }
454            mCurrentEffect = mEffect;
455
456            mOldRunner = mRunner;
457            mRunner = mGraphEnv.getRunner(mGraphId, GraphEnvironment.MODE_ASYNCHRONOUS);
458            mRunner.setDoneCallback(mRunnerDoneCallback);
459            if (mLogVerbose) {
460                Log.v(TAG, "New runner: " + mRunner
461                      + ". Old runner: " + mOldRunner);
462            }
463            if (mState == STATE_PREVIEW ||
464                    mState == STATE_STARTING_PREVIEW) {
465                // Switching effects while running. Stop existing runner.
466                // The stop callback will take care of starting new runner.
467                mCameraDevice.stopPreview();
468                try {
469                    mCameraDevice.setPreviewTexture(null);
470                } catch(IOException e) {
471                    throw new RuntimeException("Unable to connect camera to effect input", e);
472                }
473                mOldRunner.stop();
474            }
475        }
476
477        switch (mCurrentEffect) {
478            case EFFECT_GOOFY_FACE:
479                tryEnableVideoStabilization(true);
480                Filter goofyFilter = mRunner.getGraph().getFilter("goofyrenderer");
481                goofyFilter.setInputValue("currentEffect",
482                                          ((Integer)mEffectParameter).intValue());
483                break;
484            case EFFECT_BACKDROPPER:
485                tryEnableVideoStabilization(false);
486                Filter backgroundSrc = mRunner.getGraph().getFilter("background");
487                backgroundSrc.setInputValue("sourceUrl", mEffectParameter);
488                // For front camera, the background video needs to be mirrored in the
489                // backdropper filter
490                if (mCameraFacing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
491                    Filter replacer = mRunner.getGraph().getFilter("replacer");
492                    replacer.setInputValue("mirrorBg", true);
493                    if (mLogVerbose) Log.v(TAG, "Setting the background to be mirrored");
494                }
495                break;
496            default:
497                break;
498        }
499        setFaceDetectOrientation();
500        setRecordingOrientation();
501    }
502
503    public synchronized void startPreview() {
504        if (mLogVerbose) Log.v(TAG, "Starting preview (" + this + ")");
505
506        switch (mState) {
507            case STATE_STARTING_PREVIEW:
508            case STATE_PREVIEW:
509                // Already running preview
510                Log.w(TAG, "startPreview called when already running preview");
511                return;
512            case STATE_RECORD:
513                throw new RuntimeException("Cannot start preview when already recording!");
514            case STATE_RELEASED:
515                throw new RuntimeException("setEffect called on an already released recorder!");
516            default:
517                break;
518        }
519
520        if (mEffect == EFFECT_NONE) {
521            throw new RuntimeException("No effect selected!");
522        }
523        if (mEffectParameter == null) {
524            throw new RuntimeException("No effect parameter provided!");
525        }
526        if (mProfile == null) {
527            throw new RuntimeException("No recording profile provided!");
528        }
529        if (mPreviewSurfaceTexture == null) {
530            if (mLogVerbose) Log.v(TAG, "Passed a null surface; waiting for valid one");
531            mState = STATE_WAITING_FOR_SURFACE;
532            return;
533        }
534        if (mCameraDevice == null) {
535            throw new RuntimeException("No camera to record from!");
536        }
537
538        if (mLogVerbose) Log.v(TAG, "Initializing filter framework and running the graph.");
539        initializeFilterFramework();
540
541        initializeEffect(true);
542
543        mState = STATE_STARTING_PREVIEW;
544        mRunner.run();
545        // Rest of preview startup handled in mSourceReadyCallback
546    }
547
548    private SurfaceTextureSourceListener mSourceReadyCallback =
549            new SurfaceTextureSourceListener() {
550        @Override
551        public void onSurfaceTextureSourceReady(SurfaceTexture source) {
552            if (mLogVerbose) Log.v(TAG, "SurfaceTexture ready callback received");
553            synchronized(EffectsRecorder.this) {
554                mTextureSource = source;
555
556                if (mState == STATE_CONFIGURE) {
557                    // Stop preview happened while the runner was doing startup tasks
558                    // Since we haven't started anything up, don't do anything
559                    // Rest of cleanup will happen in onRunnerDone
560                    if (mLogVerbose) Log.v(TAG, "Ready callback: Already stopped, skipping.");
561                    return;
562                }
563                if (mState == STATE_RELEASED) {
564                    // EffectsRecorder has been released, so don't touch the camera device
565                    // or anything else
566                    if (mLogVerbose) Log.v(TAG, "Ready callback: Already released, skipping.");
567                    return;
568                }
569                if (source == null) {
570                    if (mLogVerbose) {
571                        Log.v(TAG, "Ready callback: source null! Looks like graph was closed!");
572                    }
573                    if (mState == STATE_PREVIEW ||
574                            mState == STATE_STARTING_PREVIEW ||
575                            mState == STATE_RECORD) {
576                        // A null source here means the graph is shutting down
577                        // unexpectedly, so we need to turn off preview before
578                        // the surface texture goes away.
579                        if (mLogVerbose) {
580                            Log.v(TAG, "Ready callback: State: " + mState + ". stopCameraPreview");
581                        }
582
583                        stopCameraPreview();
584                    }
585                    return;
586                }
587
588                // Lock AE/AWB to reduce transition flicker
589                tryEnable3ALocks(true);
590
591                mCameraDevice.stopPreview();
592                if (mLogVerbose) Log.v(TAG, "Runner active, connecting effects preview");
593                try {
594                    mCameraDevice.setPreviewTexture(mTextureSource);
595                } catch(IOException e) {
596                    throw new RuntimeException("Unable to connect camera to effect input", e);
597                }
598
599                mCameraDevice.startPreview();
600
601                // Unlock AE/AWB after preview started
602                tryEnable3ALocks(false);
603
604                mState = STATE_PREVIEW;
605
606                if (mLogVerbose) Log.v(TAG, "Start preview/effect switch complete");
607
608                // Sending a message to listener that preview is complete
609                sendMessage(mCurrentEffect, EFFECT_MSG_PREVIEW_RUNNING);
610            }
611        }
612    };
613
614    private LearningDoneListener mLearningDoneListener =
615            new LearningDoneListener() {
616        @Override
617        public void onLearningDone(BackDropperFilter filter) {
618            if (mLogVerbose) Log.v(TAG, "Learning done callback triggered");
619            // Called in a processing thread, so have to post message back to UI
620            // thread
621            sendMessage(EFFECT_BACKDROPPER, EFFECT_MSG_DONE_LEARNING);
622            enable3ALocks(true);
623        }
624    };
625
626    // A callback to finalize the media after the recording is done.
627    private OnRecordingDoneListener mRecordingDoneListener =
628            new OnRecordingDoneListener() {
629        // Forward the callback to the VideoCamera object (as an asynchronous event).
630        @Override
631        public void onRecordingDone() {
632            if (mLogVerbose) Log.v(TAG, "Recording done callback triggered");
633            sendMessage(EFFECT_NONE, EFFECT_MSG_RECORDING_DONE);
634        }
635    };
636
637    public synchronized void startRecording() {
638        if (mLogVerbose) Log.v(TAG, "Starting recording (" + this + ")");
639
640        switch (mState) {
641            case STATE_RECORD:
642                throw new RuntimeException("Already recording, cannot begin anew!");
643            case STATE_RELEASED:
644                throw new RuntimeException(
645                    "startRecording called on an already released recorder!");
646            default:
647                break;
648        }
649
650        if ((mOutputFile == null) && (mFd == null)) {
651            throw new RuntimeException("No output file name or descriptor provided!");
652        }
653
654        if (mState == STATE_CONFIGURE) {
655            startPreview();
656        }
657
658        Filter recorder = mRunner.getGraph().getFilter("recorder");
659        if (mFd != null) {
660            recorder.setInputValue("outputFileDescriptor", mFd);
661        } else {
662            recorder.setInputValue("outputFile", mOutputFile);
663        }
664        // It is ok to set the audiosource without checking for timelapse here
665        // since that check will be done in the MediaEncoderFilter itself
666        recorder.setInputValue("audioSource", MediaRecorder.AudioSource.CAMCORDER);
667
668        recorder.setInputValue("recordingProfile", mProfile);
669        recorder.setInputValue("orientationHint", mOrientationHint);
670        // Important to set the timelapseinterval to 0 if the capture rate is not >0
671        // since the recorder does not get created every time the recording starts.
672        // The recorder infers whether the capture is timelapsed based on the value of
673        // this interval
674        boolean captureTimeLapse = mCaptureRate > 0;
675        if (captureTimeLapse) {
676            double timeBetweenFrameCapture = 1 / mCaptureRate;
677            recorder.setInputValue("timelapseRecordingIntervalUs",
678                    (long) (1000000 * timeBetweenFrameCapture));
679        } else {
680            recorder.setInputValue("timelapseRecordingIntervalUs", 0L);
681        }
682
683        if (mInfoListener != null) {
684            recorder.setInputValue("infoListener", mInfoListener);
685        }
686        if (mErrorListener != null) {
687            recorder.setInputValue("errorListener", mErrorListener);
688        }
689        recorder.setInputValue("maxFileSize", mMaxFileSize);
690        recorder.setInputValue("maxDurationMs", mMaxDurationMs);
691        recorder.setInputValue("recording", true);
692        mCameraSound.play(MediaActionSound.START_VIDEO_RECORDING);
693        mState = STATE_RECORD;
694    }
695
696    public synchronized void stopRecording() {
697        if (mLogVerbose) Log.v(TAG, "Stop recording (" + this + ")");
698
699        switch (mState) {
700            case STATE_CONFIGURE:
701            case STATE_STARTING_PREVIEW:
702            case STATE_PREVIEW:
703                Log.w(TAG, "StopRecording called when recording not active!");
704                return;
705            case STATE_RELEASED:
706                throw new RuntimeException("stopRecording called on released EffectsRecorder!");
707            default:
708                break;
709        }
710        Filter recorder = mRunner.getGraph().getFilter("recorder");
711        recorder.setInputValue("recording", false);
712        mCameraSound.play(MediaActionSound.STOP_VIDEO_RECORDING);
713        mState = STATE_PREVIEW;
714    }
715
716    // Called to tell the filter graph that the display surfacetexture is not valid anymore.
717    // So the filter graph should not hold any reference to the surface created with that.
718    public synchronized void disconnectDisplay() {
719        if (mLogVerbose) Log.v(TAG, "Disconnecting the graph from the " +
720            "SurfaceTexture");
721        SurfaceTextureTarget display = (SurfaceTextureTarget)
722            mRunner.getGraph().getFilter("display");
723        display.disconnect(mGraphEnv.getContext());
724    }
725
726    // The VideoCamera will call this to notify that the camera is being
727    // released to the outside world. This call should happen after the
728    // stopRecording call. Else, the effects may throw an exception.
729    // With the recording stopped, the stopPreview call will not try to
730    // release the camera again.
731    // This must be called in onPause() if the effects are ON.
732    public synchronized void disconnectCamera() {
733        if (mLogVerbose) Log.v(TAG, "Disconnecting the effects from Camera");
734        stopCameraPreview();
735        mCameraDevice = null;
736    }
737
738    // In a normal case, when the disconnect is not called, we should not
739    // set the camera device to null, since on return callback, we try to
740    // enable 3A locks, which need the cameradevice.
741    public synchronized void stopCameraPreview() {
742        if (mLogVerbose) Log.v(TAG, "Stopping camera preview.");
743        if (mCameraDevice == null) {
744            Log.d(TAG, "Camera already null. Nothing to disconnect");
745            return;
746        }
747        mCameraDevice.stopPreview();
748        try {
749            mCameraDevice.setPreviewTexture(null);
750        } catch(IOException e) {
751            throw new RuntimeException("Unable to disconnect camera");
752        }
753    }
754
755    // Stop and release effect resources
756    public synchronized void stopPreview() {
757        if (mLogVerbose) Log.v(TAG, "Stopping preview (" + this + ")");
758        switch (mState) {
759            case STATE_CONFIGURE:
760                Log.w(TAG, "StopPreview called when preview not active!");
761                return;
762            case STATE_RELEASED:
763                throw new RuntimeException("stopPreview called on released EffectsRecorder!");
764            default:
765                break;
766        }
767
768        if (mState == STATE_RECORD) {
769            stopRecording();
770        }
771
772        mCurrentEffect = EFFECT_NONE;
773
774        // This will not do anything if the camera has already been disconnected.
775        stopCameraPreview();
776
777        mState = STATE_CONFIGURE;
778        mOldRunner = mRunner;
779        mRunner.stop();
780        mRunner = null;
781        // Rest of stop and release handled in mRunnerDoneCallback
782    }
783
784    // Try to enable/disable video stabilization if supported; otherwise return false
785    // It is called from a synchronized block.
786    boolean tryEnableVideoStabilization(boolean toggle) {
787        if (mLogVerbose) Log.v(TAG, "tryEnableVideoStabilization.");
788        if (mCameraDevice == null) {
789            Log.d(TAG, "Camera already null. Not enabling video stabilization.");
790            return false;
791        }
792        Camera.Parameters params = mCameraDevice.getParameters();
793
794        String vstabSupported = params.get("video-stabilization-supported");
795        if ("true".equals(vstabSupported)) {
796            if (mLogVerbose) Log.v(TAG, "Setting video stabilization to " + toggle);
797            params.set("video-stabilization", toggle ? "true" : "false");
798            mCameraDevice.setParameters(params);
799            return true;
800        }
801        if (mLogVerbose) Log.v(TAG, "Video stabilization not supported");
802        return false;
803    }
804
805    // Try to enable/disable 3A locks if supported; otherwise return false
806    synchronized boolean tryEnable3ALocks(boolean toggle) {
807        if (mLogVerbose) Log.v(TAG, "tryEnable3ALocks");
808        if (mCameraDevice == null) {
809            Log.d(TAG, "Camera already null. Not tryenabling 3A locks.");
810            return false;
811        }
812        Camera.Parameters params = mCameraDevice.getParameters();
813        if (params.isAutoExposureLockSupported() &&
814            params.isAutoWhiteBalanceLockSupported() ) {
815            params.setAutoExposureLock(toggle);
816            params.setAutoWhiteBalanceLock(toggle);
817            mCameraDevice.setParameters(params);
818            return true;
819        }
820        return false;
821    }
822
823    // Try to enable/disable 3A locks if supported; otherwise, throw error
824    // Use this when locks are essential to success
825    synchronized void enable3ALocks(boolean toggle) {
826        if (mLogVerbose) Log.v(TAG, "Enable3ALocks");
827        if (mCameraDevice == null) {
828            Log.d(TAG, "Camera already null. Not enabling 3A locks.");
829            return;
830        }
831        Camera.Parameters params = mCameraDevice.getParameters();
832        if (!tryEnable3ALocks(toggle)) {
833            throw new RuntimeException("Attempt to lock 3A on camera with no locking support!");
834        }
835    }
836
837    private OnRunnerDoneListener mRunnerDoneCallback =
838            new OnRunnerDoneListener() {
839        @Override
840        public void onRunnerDone(int result) {
841            synchronized(EffectsRecorder.this) {
842                if (mLogVerbose) {
843                    Log.v(TAG,
844                          "Graph runner done (" + EffectsRecorder.this
845                          + ", mRunner " + mRunner
846                          + ", mOldRunner " + mOldRunner + ")");
847                }
848                if (result == GraphRunner.RESULT_ERROR) {
849                    // Handle error case
850                    Log.e(TAG, "Error running filter graph!");
851                    Exception e = null;
852                    if (mRunner != null) {
853                        e = mRunner.getError();
854                    } else if (mOldRunner != null) {
855                        e = mOldRunner.getError();
856                    }
857                    raiseError(e);
858                }
859                if (mOldRunner != null) {
860                    // Tear down old graph if available
861                    if (mLogVerbose) Log.v(TAG, "Tearing down old graph.");
862                    GLEnvironment glEnv = mGraphEnv.getContext().getGLEnvironment();
863                    if (glEnv != null && !glEnv.isActive()) {
864                        glEnv.activate();
865                    }
866                    mOldRunner.getGraph().tearDown(mGraphEnv.getContext());
867                    if (glEnv != null && glEnv.isActive()) {
868                        glEnv.deactivate();
869                    }
870                    mOldRunner = null;
871                }
872                if (mState == STATE_PREVIEW ||
873                        mState == STATE_STARTING_PREVIEW) {
874                    // Switching effects, start up the new runner
875                    if (mLogVerbose) {
876                        Log.v(TAG, "Previous effect halted. Running graph again. state: " + mState);
877                    }
878                    tryEnable3ALocks(false);
879                    // In case of an error, the graph restarts from beginning and in case
880                    // of the BACKDROPPER effect, the learner re-learns the background.
881                    // Hence, we need to show the learning dialogue to the user
882                    // to avoid recording before the learning is done. Else, the user
883                    // could start recording before the learning is done and the new
884                    // background comes up later leading to an end result video
885                    // with a heterogeneous background.
886                    // For BACKDROPPER effect, this path is also executed sometimes at
887                    // the end of a normal recording session. In such a case, the graph
888                    // does not restart and hence the learner does not re-learn. So we
889                    // do not want to show the learning dialogue then.
890                    if (result == GraphRunner.RESULT_ERROR &&
891                            mCurrentEffect == EFFECT_BACKDROPPER) {
892                        sendMessage(EFFECT_BACKDROPPER, EFFECT_MSG_STARTED_LEARNING);
893                    }
894                    mRunner.run();
895                } else if (mState != STATE_RELEASED) {
896                    // Shutting down effects
897                    if (mLogVerbose) Log.v(TAG, "Runner halted, restoring direct preview");
898                    tryEnable3ALocks(false);
899                    sendMessage(EFFECT_NONE, EFFECT_MSG_EFFECTS_STOPPED);
900                } else {
901                    // STATE_RELEASED - camera will be/has been released as well, do nothing.
902                }
903            }
904        }
905    };
906
907    // Indicates that all camera/recording activity needs to halt
908    public synchronized void release() {
909        if (mLogVerbose) Log.v(TAG, "Releasing (" + this + ")");
910
911        switch (mState) {
912            case STATE_RECORD:
913            case STATE_STARTING_PREVIEW:
914            case STATE_PREVIEW:
915                stopPreview();
916                // Fall-through
917            default:
918                if (mCameraSound != null) {
919                    mCameraSound.release();
920                    mCameraSound = null;
921                }
922                mState = STATE_RELEASED;
923                break;
924        }
925    }
926
927    private void sendMessage(final int effect, final int msg) {
928        if (mEffectsListener != null) {
929            mHandler.post(new Runnable() {
930                @Override
931                public void run() {
932                    mEffectsListener.onEffectsUpdate(effect, msg);
933                }
934            });
935        }
936    }
937
938    private void raiseError(final Exception exception) {
939        if (mEffectsListener != null) {
940            mHandler.post(new Runnable() {
941                @Override
942                public void run() {
943                    if (mFd != null) {
944                        mEffectsListener.onEffectsError(exception, null);
945                    } else {
946                        mEffectsListener.onEffectsError(exception, mOutputFile);
947                    }
948                }
949            });
950        }
951    }
952
953}
954