EffectsRecorder.java revision a83a9a6b43682406f2332e6329ad0d90d175e7af
1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License"); you may not
5 * use this file except in compliance with the License. You may obtain a copy of
6 * the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 * License for the specific language governing permissions and limitations under
14 * the License.
15 */
16
17package com.android.camera;
18
19import android.content.Context;
20import android.content.res.AssetFileDescriptor;
21import android.filterfw.GraphEnvironment;
22import android.filterfw.core.Filter;
23import android.filterfw.core.GLEnvironment;
24import android.filterfw.core.GraphRunner;
25import android.filterfw.core.GraphRunner.OnRunnerDoneListener;
26import android.filterfw.geometry.Point;
27import android.filterfw.geometry.Quad;
28import android.filterpacks.videoproc.BackDropperFilter;
29import android.filterpacks.videoproc.BackDropperFilter.LearningDoneListener;
30import android.filterpacks.videosink.MediaEncoderFilter.OnRecordingDoneListener;
31import android.filterpacks.videosrc.SurfaceTextureSource.SurfaceTextureSourceListener;
32
33import android.graphics.SurfaceTexture;
34import android.hardware.Camera;
35import android.media.MediaRecorder;
36import android.media.CamcorderProfile;
37import android.os.ConditionVariable;
38import android.os.Handler;
39import android.os.Looper;
40import android.os.ParcelFileDescriptor;
41import android.os.SystemProperties;
42import android.util.Log;
43import android.view.Surface;
44import android.view.SurfaceHolder;
45
46import java.io.IOException;
47import java.io.FileNotFoundException;
48import java.io.File;
49import java.lang.Runnable;
50import java.io.FileDescriptor;
51
52
53/**
54 * Encapsulates the mobile filter framework components needed to record video with
55 * effects applied. Modeled after MediaRecorder.
56 */
57public class EffectsRecorder {
58
59    public static final int  EFFECT_NONE        = 0;
60    public static final int  EFFECT_GOOFY_FACE  = 1;
61    public static final int  EFFECT_BACKDROPPER = 2;
62
63    public static final int  EFFECT_GF_SQUEEZE     = 0;
64    public static final int  EFFECT_GF_BIG_EYES    = 1;
65    public static final int  EFFECT_GF_BIG_MOUTH   = 2;
66    public static final int  EFFECT_GF_SMALL_MOUTH = 3;
67    public static final int  EFFECT_GF_BIG_NOSE    = 4;
68    public static final int  EFFECT_GF_SMALL_EYES  = 5;
69    public static final int  NUM_OF_GF_EFFECTS = EFFECT_GF_SMALL_EYES + 1;
70
71    public static final int  EFFECT_MSG_STARTED_LEARNING = 0;
72    public static final int  EFFECT_MSG_DONE_LEARNING    = 1;
73    public static final int  EFFECT_MSG_SWITCHING_EFFECT = 2;
74    public static final int  EFFECT_MSG_EFFECTS_STOPPED  = 3;
75    public static final int  EFFECT_MSG_RECORDING_DONE   = 4;
76
77    private Context mContext;
78    private Handler mHandler;
79    private boolean mReleased;
80
81    private Camera mCameraDevice;
82    private CamcorderProfile mProfile;
83    private double mCaptureRate = 0;
84    private SurfaceHolder mPreviewSurfaceHolder;
85    private int mPreviewWidth;
86    private int mPreviewHeight;
87    private MediaRecorder.OnInfoListener mInfoListener;
88    private MediaRecorder.OnErrorListener mErrorListener;
89
90    private String mOutputFile;
91    private FileDescriptor mFd;
92    private int mOrientationHint = 0;
93    private long mMaxFileSize = 0;
94    private int mMaxDurationMs = 0;
95    private int mCameraFacing = Camera.CameraInfo.CAMERA_FACING_BACK;
96    private boolean mAppIsLandscape;
97
98    private int mEffect = EFFECT_NONE;
99    private int mCurrentEffect = EFFECT_NONE;
100    private EffectsListener mEffectsListener;
101
102    private Object mEffectParameter;
103
104    private GraphEnvironment mGraphEnv;
105    private int mGraphId;
106    private GraphRunner mRunner = null;
107    private GraphRunner mOldRunner = null;
108
109    private SurfaceTexture mTextureSource;
110
111    private static final String mVideoRecordSound = "/system/media/audio/ui/VideoRecord.ogg";
112    private SoundPlayer mRecordSound;
113
114    private static final int STATE_CONFIGURE              = 0;
115    private static final int STATE_WAITING_FOR_SURFACE    = 1;
116    private static final int STATE_STARTING_PREVIEW       = 2;
117    private static final int STATE_PREVIEW                = 3;
118    private static final int STATE_RECORD                 = 4;
119    private static final int STATE_RELEASED               = 5;
120    private int mState = STATE_CONFIGURE;
121
122    private boolean mLogVerbose = Log.isLoggable(TAG, Log.VERBOSE);
123    private static final String TAG = "effectsrecorder";
124
125    /** Determine if a given effect is supported at runtime
126     * Some effects require libraries not available on all devices
127     */
128    public static boolean isEffectSupported(int effectId) {
129        switch (effectId) {
130            case EFFECT_GOOFY_FACE:
131                return Filter.isAvailable("com.google.android.filterpacks.facedetect.GoofyRenderFilter");
132            case EFFECT_BACKDROPPER:
133                return Filter.isAvailable("android.filterpacks.videoproc.BackDropperFilter");
134            default:
135                return false;
136        }
137    }
138
139    public EffectsRecorder(Context context) {
140        if (mLogVerbose) Log.v(TAG, "EffectsRecorder created (" + this + ")");
141        mContext = context;
142        mHandler = new Handler(Looper.getMainLooper());
143
144        // Construct sound player; use enforced sound output if necessary
145        File recordSoundFile = new File(mVideoRecordSound);
146        try {
147            ParcelFileDescriptor recordSoundParcel =
148                    ParcelFileDescriptor.open(recordSoundFile,
149                            ParcelFileDescriptor.MODE_READ_ONLY);
150            AssetFileDescriptor recordSoundAsset =
151                    new AssetFileDescriptor(recordSoundParcel, 0,
152                                            AssetFileDescriptor.UNKNOWN_LENGTH);
153            if (SystemProperties.get("ro.camera.sound.forced", "0").equals("0")) {
154                if (mLogVerbose) Log.v(TAG, "Standard recording sound");
155                mRecordSound = new SoundPlayer(recordSoundAsset, false);
156            } else {
157                if (mLogVerbose) Log.v(TAG, "Forced recording sound");
158                mRecordSound = new SoundPlayer(recordSoundAsset, true);
159            }
160        } catch (java.io.FileNotFoundException e) {
161            Log.e(TAG, "System video record sound not found");
162            mRecordSound = null;
163        }
164
165    }
166
167    public void setCamera(Camera cameraDevice) {
168        switch (mState) {
169            case STATE_PREVIEW:
170                throw new RuntimeException("setCamera cannot be called while previewing!");
171            case STATE_RECORD:
172                throw new RuntimeException("setCamera cannot be called while recording!");
173            case STATE_RELEASED:
174                throw new RuntimeException("setCamera called on an already released recorder!");
175            default:
176                break;
177        }
178
179        mCameraDevice = cameraDevice;
180    }
181
182    public void setProfile(CamcorderProfile profile) {
183        switch (mState) {
184            case STATE_RECORD:
185                throw new RuntimeException("setProfile cannot be called while recording!");
186            case STATE_RELEASED:
187                throw new RuntimeException("setProfile called on an already released recorder!");
188            default:
189                break;
190        }
191        mProfile = profile;
192    }
193
194    public void setOutputFile(String outputFile) {
195        switch (mState) {
196            case STATE_RECORD:
197                throw new RuntimeException("setOutputFile cannot be called while recording!");
198            case STATE_RELEASED:
199                throw new RuntimeException("setOutputFile called on an already released recorder!");
200            default:
201                break;
202        }
203
204        mOutputFile = outputFile;
205        mFd = null;
206    }
207
208    public void setOutputFile(FileDescriptor fd) {
209        switch (mState) {
210            case STATE_RECORD:
211                throw new RuntimeException("setOutputFile cannot be called while recording!");
212            case STATE_RELEASED:
213                throw new RuntimeException("setOutputFile called on an already released recorder!");
214            default:
215                break;
216        }
217
218        mOutputFile = null;
219        mFd = fd;
220    }
221
222    /**
223     * Sets the maximum filesize (in bytes) of the recording session.
224     * This will be passed on to the MediaEncoderFilter and then to the
225     * MediaRecorder ultimately. If zero or negative, the MediaRecorder will
226     * disable the limit
227    */
228    public synchronized void setMaxFileSize(long maxFileSize) {
229        switch (mState) {
230            case STATE_RECORD:
231                throw new RuntimeException("setMaxFileSize cannot be called while recording!");
232            case STATE_RELEASED:
233                throw new RuntimeException("setMaxFileSize called on an already released recorder!");
234            default:
235                break;
236        }
237        mMaxFileSize = maxFileSize;
238    }
239
240    /**
241    * Sets the maximum recording duration (in ms) for the next recording session
242    * Setting it to zero (the default) disables the limit.
243    */
244    public synchronized void setMaxDuration(int maxDurationMs) {
245        switch (mState) {
246            case STATE_RECORD:
247                throw new RuntimeException("setMaxDuration cannot be called while recording!");
248            case STATE_RELEASED:
249                throw new RuntimeException("setMaxDuration called on an already released recorder!");
250            default:
251                break;
252        }
253        mMaxDurationMs = maxDurationMs;
254    }
255
256
257    public void setCaptureRate(double fps) {
258        switch (mState) {
259            case STATE_RECORD:
260                throw new RuntimeException("setCaptureRate cannot be called while recording!");
261            case STATE_RELEASED:
262                throw new RuntimeException("setCaptureRate called on an already released recorder!");
263            default:
264                break;
265        }
266
267        if (mLogVerbose) Log.v(TAG, "Setting time lapse capture rate to " + fps + " fps");
268        mCaptureRate = fps;
269    }
270
271    public void setPreviewDisplay(SurfaceHolder previewSurfaceHolder,
272                                  int previewWidth,
273                                  int previewHeight) {
274        if (mLogVerbose) Log.v(TAG, "setPreviewDisplay (" + this + ")");
275        switch (mState) {
276            case STATE_RECORD:
277                throw new RuntimeException("setPreviewDisplay cannot be called while recording!");
278            case STATE_RELEASED:
279                throw new RuntimeException("setPreviewDisplay called on an already released recorder!");
280            default:
281                break;
282        }
283
284        mPreviewSurfaceHolder = previewSurfaceHolder;
285        mPreviewWidth = previewWidth;
286        mPreviewHeight = previewHeight;
287
288        switch (mState) {
289            case STATE_WAITING_FOR_SURFACE:
290                startPreview();
291                break;
292            case STATE_STARTING_PREVIEW:
293            case STATE_PREVIEW:
294                initializeEffect(true);
295                break;
296        }
297    }
298
299    public void setEffect(int effect, Object effectParameter) {
300        if (mLogVerbose) Log.v(TAG,
301                               "setEffect: effect ID " + effect +
302                               ", parameter " + effectParameter.toString() );
303        switch (mState) {
304            case STATE_RECORD:
305                throw new RuntimeException("setEffect cannot be called while recording!");
306            case STATE_RELEASED:
307                throw new RuntimeException("setEffect called on an already released recorder!");
308            default:
309                break;
310        }
311
312        mEffect = effect;
313        mEffectParameter = effectParameter;
314
315        if (mState == STATE_PREVIEW ||
316                mState == STATE_STARTING_PREVIEW) {
317            initializeEffect(false);
318        }
319    }
320
321    public interface EffectsListener {
322        public void onEffectsUpdate(int effectId, int effectMsg);
323        public void onEffectsError(Exception exception, String filePath);
324    }
325
326    public void setEffectsListener(EffectsListener listener) {
327        mEffectsListener = listener;
328    }
329
330    private void setFaceDetectOrientation() {
331        if (mCurrentEffect == EFFECT_GOOFY_FACE) {
332            Filter rotateFilter = mRunner.getGraph().getFilter("rotate");
333            Filter metaRotateFilter = mRunner.getGraph().getFilter("metarotate");
334            rotateFilter.setInputValue("rotation", mOrientationHint);
335            int reverseDegrees = (360 - mOrientationHint) % 360;
336            metaRotateFilter.setInputValue("rotation", reverseDegrees);
337        }
338    }
339
340    private void setRecordingOrientation() {
341        if ( mState != STATE_RECORD && mRunner != null) {
342            Point bl = new Point(0, 0);
343            Point br = new Point(1, 0);
344            Point tl = new Point(0, 1);
345            Point tr = new Point(1, 1);
346            Quad recordingRegion;
347            if (mCameraFacing == Camera.CameraInfo.CAMERA_FACING_BACK) {
348                // The back camera is not mirrored, so use a identity transform
349                recordingRegion = new Quad(bl, br, tl, tr);
350            } else {
351                // Recording region needs to be tweaked for front cameras, since they
352                // mirror their preview
353                if (mOrientationHint == 0 || mOrientationHint == 180) {
354                    // Horizontal flip in landscape
355                    recordingRegion = new Quad(br, bl, tr, tl);
356                } else {
357                    // Horizontal flip in portrait
358                    recordingRegion = new Quad(tl, tr, bl, br);
359                }
360            }
361            Filter recorder = mRunner.getGraph().getFilter("recorder");
362            recorder.setInputValue("inputRegion", recordingRegion);
363        }
364    }
365    public void setOrientationHint(int degrees) {
366        switch (mState) {
367            case STATE_RELEASED:
368                throw new RuntimeException(
369                        "setOrientationHint called on an already released recorder!");
370            default:
371                break;
372        }
373        if (mLogVerbose) Log.v(TAG, "Setting orientation hint to: " + degrees);
374        mOrientationHint = degrees;
375        setFaceDetectOrientation();
376        setRecordingOrientation();
377    }
378
379    /** Passes the native orientation of the Camera app (device dependent)
380     * to allow for correct output aspect ratio. Defaults to portrait */
381    public void setAppToLandscape(boolean landscape) {
382        if (mState != STATE_CONFIGURE) {
383            throw new RuntimeException(
384                "setAppToLandscape called after configuration!");
385        }
386        mAppIsLandscape = landscape;
387    }
388
389    public void setCameraFacing(int facing) {
390        switch (mState) {
391            case STATE_RELEASED:
392                throw new RuntimeException(
393                    "setCameraFacing called on alrady released recorder!");
394            default:
395                break;
396        }
397        mCameraFacing = facing;
398        setRecordingOrientation();
399    }
400
401    public void setOnInfoListener(MediaRecorder.OnInfoListener infoListener) {
402        switch (mState) {
403            case STATE_RECORD:
404                throw new RuntimeException("setInfoListener cannot be called while recording!");
405            case STATE_RELEASED:
406                throw new RuntimeException("setInfoListener called on an already released recorder!");
407            default:
408                break;
409        }
410        mInfoListener = infoListener;
411    }
412
413    public void setOnErrorListener(MediaRecorder.OnErrorListener errorListener) {
414        switch (mState) {
415            case STATE_RECORD:
416                throw new RuntimeException("setErrorListener cannot be called while recording!");
417            case STATE_RELEASED:
418                throw new RuntimeException("setErrorListener called on an already released recorder!");
419            default:
420                break;
421        }
422        mErrorListener = errorListener;
423    }
424
425    private void initializeFilterFramework() {
426        mGraphEnv = new GraphEnvironment();
427        mGraphEnv.createGLEnvironment();
428
429        if (mLogVerbose) {
430            Log.v(TAG, "Effects framework initializing. Recording size "
431                  + mProfile.videoFrameWidth + ", " + mProfile.videoFrameHeight);
432        }
433        if (!mAppIsLandscape) {
434            int tmp;
435            tmp = mProfile.videoFrameWidth;
436            mProfile.videoFrameWidth = mProfile.videoFrameHeight;
437            mProfile.videoFrameHeight = tmp;
438        }
439        mGraphEnv.addReferences(
440                "textureSourceCallback", mSourceReadyCallback,
441                "recordingWidth", mProfile.videoFrameWidth,
442                "recordingHeight", mProfile.videoFrameHeight,
443                "recordingProfile", mProfile,
444                "learningDoneListener", mLearningDoneListener,
445                "recordingDoneListener", mRecordingDoneListener);
446        mRunner = null;
447        mGraphId = -1;
448        mCurrentEffect = EFFECT_NONE;
449    }
450
451    private synchronized void initializeEffect(boolean forceReset) {
452        if (forceReset ||
453            mCurrentEffect != mEffect ||
454            mCurrentEffect == EFFECT_BACKDROPPER) {
455            if (mLogVerbose) {
456                Log.v(TAG, "Effect initializing. Preview size "
457                       + mPreviewWidth + ", " + mPreviewHeight);
458            }
459
460            mGraphEnv.addReferences(
461                    "previewSurface", mPreviewSurfaceHolder.getSurface(),
462                    "previewWidth", mPreviewWidth,
463                    "previewHeight", mPreviewHeight,
464                    "orientation", mOrientationHint);
465            if (mState == STATE_PREVIEW ||
466                    mState == STATE_STARTING_PREVIEW) {
467                // Switching effects while running. Inform video camera.
468                sendMessage(mCurrentEffect, EFFECT_MSG_SWITCHING_EFFECT);
469            }
470
471            switch (mEffect) {
472                case EFFECT_GOOFY_FACE:
473                    mGraphId = mGraphEnv.loadGraph(mContext, R.raw.goofy_face);
474                    break;
475                case EFFECT_BACKDROPPER:
476                    sendMessage(EFFECT_BACKDROPPER, EFFECT_MSG_STARTED_LEARNING);
477                    mGraphId = mGraphEnv.loadGraph(mContext, R.raw.backdropper);
478                    break;
479                default:
480                    throw new RuntimeException("Unknown effect ID" + mEffect + "!");
481            }
482            mCurrentEffect = mEffect;
483
484            mOldRunner = mRunner;
485            mRunner = mGraphEnv.getRunner(mGraphId, GraphEnvironment.MODE_ASYNCHRONOUS);
486            mRunner.setDoneCallback(mRunnerDoneCallback);
487            if (mLogVerbose) {
488                Log.v(TAG, "New runner: " + mRunner
489                      + ". Old runner: " + mOldRunner);
490            }
491            if (mState == STATE_PREVIEW ||
492                    mState == STATE_STARTING_PREVIEW) {
493                // Switching effects while running. Stop existing runner.
494                // The stop callback will take care of starting new runner.
495                mCameraDevice.stopPreview();
496                try {
497                    mCameraDevice.setPreviewTexture(null);
498                } catch(IOException e) {
499                    throw new RuntimeException("Unable to connect camera to effect input", e);
500                }
501                mOldRunner.stop();
502            }
503        }
504
505        switch (mCurrentEffect) {
506            case EFFECT_GOOFY_FACE:
507                tryEnableVideoStabilization(true);
508                Filter goofyFilter = mRunner.getGraph().getFilter("goofyrenderer");
509                goofyFilter.setInputValue("currentEffect",
510                                          ((Integer)mEffectParameter).intValue());
511                break;
512            case EFFECT_BACKDROPPER:
513                tryEnableVideoStabilization(false);
514                Filter backgroundSrc = mRunner.getGraph().getFilter("background");
515                backgroundSrc.setInputValue("sourceUrl",
516                                            (String)mEffectParameter);
517                break;
518            default:
519                break;
520        }
521        setFaceDetectOrientation();
522        setRecordingOrientation();
523    }
524
525    public synchronized void startPreview() {
526        if (mLogVerbose) Log.v(TAG, "Starting preview (" + this + ")");
527
528        switch (mState) {
529            case STATE_STARTING_PREVIEW:
530            case STATE_PREVIEW:
531                // Already running preview
532                Log.w(TAG, "startPreview called when already running preview");
533                return;
534            case STATE_RECORD:
535                throw new RuntimeException("Cannot start preview when already recording!");
536            case STATE_RELEASED:
537                throw new RuntimeException("setEffect called on an already released recorder!");
538            default:
539                break;
540        }
541
542        if (mEffect == EFFECT_NONE) {
543            throw new RuntimeException("No effect selected!");
544        }
545        if (mEffectParameter == null) {
546            throw new RuntimeException("No effect parameter provided!");
547        }
548        if (mProfile == null) {
549            throw new RuntimeException("No recording profile provided!");
550        }
551        if (mPreviewSurfaceHolder == null) {
552            if (mLogVerbose) Log.v(TAG, "Passed a null surface holder; waiting for valid one");
553            mState = STATE_WAITING_FOR_SURFACE;
554            return;
555        }
556        if (mCameraDevice == null) {
557            throw new RuntimeException("No camera to record from!");
558        }
559
560        if (mLogVerbose) Log.v(TAG, "Initializing filter graph");
561
562        initializeFilterFramework();
563
564        initializeEffect(true);
565
566        if (mLogVerbose) Log.v(TAG, "Starting filter graph");
567
568        mState = STATE_STARTING_PREVIEW;
569        mRunner.run();
570        // Rest of preview startup handled in mSourceReadyCallback
571    }
572
573    private SurfaceTextureSourceListener mSourceReadyCallback =
574            new SurfaceTextureSourceListener() {
575        public void onSurfaceTextureSourceReady(SurfaceTexture source) {
576            if (mLogVerbose) Log.v(TAG, "SurfaceTexture ready callback received");
577            synchronized(EffectsRecorder.this) {
578                mTextureSource = source;
579
580                if (mState == STATE_CONFIGURE) {
581                    // Stop preview happened while the runner was doing startup tasks
582                    // Since we haven't started anything up, don't do anything
583                    // Rest of cleanup will happen in onRunnerDone
584                    if (mLogVerbose) Log.v(TAG, "Ready callback: Already stopped, skipping.");
585                    return;
586                }
587                if (mState == STATE_RELEASED) {
588                    // EffectsRecorder has been released, so don't touch the camera device
589                    // or anything else
590                    if (mLogVerbose) Log.v(TAG, "Ready callback: Already released, skipping.");
591                    return;
592                }
593                if (source == null) {
594                    if (mState == STATE_PREVIEW ||
595                            mState == STATE_STARTING_PREVIEW ||
596                            mState == STATE_RECORD) {
597                        // A null source here means the graph is shutting down
598                        // unexpectedly, so we need to turn off preview before
599                        // the surface texture goes away.
600                        mCameraDevice.stopPreview();
601                        try {
602                            mCameraDevice.setPreviewTexture(null);
603                        } catch(IOException e) {
604                            throw new RuntimeException("Unable to disconnect " +
605                                    "camera from effect input", e);
606                        }
607                    }
608                    return;
609                }
610
611                // Lock AE/AWB to reduce transition flicker
612                tryEnable3ALocks(true);
613
614                mCameraDevice.stopPreview();
615                if (mLogVerbose) Log.v(TAG, "Runner active, connecting effects preview");
616                try {
617                    mCameraDevice.setPreviewTexture(mTextureSource);
618                } catch(IOException e) {
619                    throw new RuntimeException("Unable to connect camera to effect input", e);
620                }
621
622                mCameraDevice.startPreview();
623
624                // Unlock AE/AWB after preview started
625                tryEnable3ALocks(false);
626
627                mState = STATE_PREVIEW;
628
629                if (mLogVerbose) Log.v(TAG, "Start preview/effect switch complete");
630            }
631        }
632    };
633
634    private LearningDoneListener mLearningDoneListener =
635            new LearningDoneListener() {
636        public void onLearningDone(BackDropperFilter filter) {
637            if (mLogVerbose) Log.v(TAG, "Learning done callback triggered");
638            // Called in a processing thread, so have to post message back to UI
639            // thread
640            sendMessage(EFFECT_BACKDROPPER, EFFECT_MSG_DONE_LEARNING);
641            enable3ALocks(true);
642        }
643    };
644
645    // A callback to finalize the media after the recording is done.
646    private OnRecordingDoneListener mRecordingDoneListener =
647            new OnRecordingDoneListener() {
648        // Forward the callback to the VideoCamera object (as an asynchronous event).
649        public void onRecordingDone() {
650            if (mLogVerbose) Log.v(TAG, "Recording done callback triggered");
651            sendMessage(EFFECT_NONE, EFFECT_MSG_RECORDING_DONE);
652        }
653    };
654
655    public synchronized void startRecording() {
656        if (mLogVerbose) Log.v(TAG, "Starting recording (" + this + ")");
657
658        switch (mState) {
659            case STATE_RECORD:
660                throw new RuntimeException("Already recording, cannot begin anew!");
661            case STATE_RELEASED:
662                throw new RuntimeException("startRecording called on an already released recorder!");
663            default:
664                break;
665        }
666
667        if ((mOutputFile == null) && (mFd == null)) {
668            throw new RuntimeException("No output file name or descriptor provided!");
669        }
670
671        if (mState == STATE_CONFIGURE) {
672            startPreview();
673        }
674
675        Filter recorder = mRunner.getGraph().getFilter("recorder");
676        if (mFd != null) {
677            recorder.setInputValue("outputFileDescriptor", mFd);
678        } else {
679            recorder.setInputValue("outputFile", mOutputFile);
680        }
681        // It is ok to set the audiosource without checking for timelapse here
682        // since that check will be done in the MediaEncoderFilter itself
683        recorder.setInputValue("audioSource", MediaRecorder.AudioSource.CAMCORDER);
684
685        recorder.setInputValue("recordingProfile", mProfile);
686        recorder.setInputValue("orientationHint", mOrientationHint);
687        // Important to set the timelapseinterval to 0 if the capture rate is not >0
688        // since the recorder does not get created every time the recording starts.
689        // The recorder infers whether the capture is timelapsed based on the value of
690        // this interval
691        boolean captureTimeLapse = mCaptureRate > 0;
692        if (captureTimeLapse) {
693            double timeBetweenFrameCapture = 1 / mCaptureRate;
694            recorder.setInputValue("timelapseRecordingIntervalUs",
695                    (long) (1000000 * timeBetweenFrameCapture));
696        } else {
697            recorder.setInputValue("timelapseRecordingIntervalUs", 0L);
698        }
699
700        if (mInfoListener != null) {
701            recorder.setInputValue("infoListener", mInfoListener);
702        }
703        if (mErrorListener != null) {
704            recorder.setInputValue("errorListener", mErrorListener);
705        }
706        recorder.setInputValue("maxFileSize", mMaxFileSize);
707        recorder.setInputValue("maxDurationMs", mMaxDurationMs);
708        recorder.setInputValue("recording", true);
709        if (mRecordSound != null) mRecordSound.play();
710        mState = STATE_RECORD;
711    }
712
713    public synchronized void stopRecording() {
714        if (mLogVerbose) Log.v(TAG, "Stop recording (" + this + ")");
715
716        switch (mState) {
717            case STATE_CONFIGURE:
718            case STATE_STARTING_PREVIEW:
719            case STATE_PREVIEW:
720                Log.w(TAG, "StopRecording called when recording not active!");
721                return;
722            case STATE_RELEASED:
723                throw new RuntimeException("stopRecording called on released EffectsRecorder!");
724            default:
725                break;
726        }
727        Filter recorder = mRunner.getGraph().getFilter("recorder");
728        recorder.setInputValue("recording", false);
729        if (mRecordSound != null) mRecordSound.play();
730        mState = STATE_PREVIEW;
731    }
732
733    // Stop and release effect resources
734    public synchronized void stopPreview() {
735        if (mLogVerbose) Log.v(TAG, "Stopping preview (" + this + ")");
736
737        switch (mState) {
738            case STATE_CONFIGURE:
739                Log.w(TAG, "StopPreview called when preview not active!");
740                return;
741            case STATE_RELEASED:
742                throw new RuntimeException("stopPreview called on released EffectsRecorder!");
743            default:
744                break;
745        }
746
747        if (mState == STATE_RECORD) {
748            stopRecording();
749        }
750
751        mCurrentEffect = EFFECT_NONE;
752
753        mCameraDevice.stopPreview();
754        try {
755            mCameraDevice.setPreviewTexture(null);
756        } catch(IOException e) {
757            throw new RuntimeException("Unable to connect camera to effect input", e);
758        }
759
760        mState = STATE_CONFIGURE;
761        mOldRunner = mRunner;
762        mRunner.stop();
763        mRunner = null;
764        // Rest of stop and release handled in mRunnerDoneCallback
765    }
766
767    // Try to enable/disable video stabilization if supported; otherwise return false
768    boolean tryEnableVideoStabilization(boolean toggle) {
769        Camera.Parameters params = mCameraDevice.getParameters();
770
771        String vstabSupported = params.get("video-stabilization-supported");
772        if ("true".equals(vstabSupported)) {
773            if (mLogVerbose) Log.v(TAG, "Setting video stabilization to " + toggle);
774            params.set("video-stabilization", toggle ? "true" : "false");
775            mCameraDevice.setParameters(params);
776            return true;
777        }
778        if (mLogVerbose) Log.v(TAG, "Video stabilization not supported");
779        return false;
780    }
781
782    // Try to enable/disable 3A locks if supported; otherwise return false
783    boolean tryEnable3ALocks(boolean toggle) {
784        Camera.Parameters params = mCameraDevice.getParameters();
785        if (params.isAutoExposureLockSupported() &&
786            params.isAutoWhiteBalanceLockSupported() ) {
787            params.setAutoExposureLock(toggle);
788            params.setAutoWhiteBalanceLock(toggle);
789            mCameraDevice.setParameters(params);
790            return true;
791        }
792        return false;
793    }
794
795    // Try to enable/disable 3A locks if supported; otherwise, throw error
796    // Use this when locks are essential to success
797    void enable3ALocks(boolean toggle) {
798        Camera.Parameters params = mCameraDevice.getParameters();
799        if (!tryEnable3ALocks(toggle)) {
800            throw new RuntimeException("Attempt to lock 3A on camera with no locking support!");
801        }
802    }
803
804    private OnRunnerDoneListener mRunnerDoneCallback =
805            new OnRunnerDoneListener() {
806        public void onRunnerDone(int result) {
807            synchronized(EffectsRecorder.this) {
808                if (mLogVerbose) {
809                    Log.v(TAG,
810                          "Graph runner done (" + EffectsRecorder.this
811                          + ", mRunner " + mRunner
812                          + ", mOldRunner " + mOldRunner + ")");
813                }
814                if (result == GraphRunner.RESULT_ERROR) {
815                    // Handle error case
816                    Log.e(TAG, "Error running filter graph!");
817                    raiseError(mRunner == null ? null : mRunner.getError());
818                }
819                if (mOldRunner != null) {
820                    // Tear down old graph if available
821                    if (mLogVerbose) Log.v(TAG, "Tearing down old graph.");
822                    GLEnvironment glEnv = mGraphEnv.getContext().getGLEnvironment();
823                    if (glEnv != null && !glEnv.isActive()) {
824                        glEnv.activate();
825                    }
826                    mOldRunner.getGraph().tearDown(mGraphEnv.getContext());
827                    if (glEnv != null && glEnv.isActive()) {
828                        glEnv.deactivate();
829                    }
830                    mOldRunner = null;
831                }
832                if (mState == STATE_PREVIEW ||
833                        mState == STATE_STARTING_PREVIEW) {
834                    // Switching effects, start up the new runner
835                    if (mLogVerbose) Log.v(TAG, "Previous effect halted, starting new effect.");
836                    tryEnable3ALocks(false);
837                    mRunner.run();
838                } else if (mState != STATE_RELEASED) {
839                    // Shutting down effects
840                    if (mLogVerbose) Log.v(TAG, "Runner halted, restoring direct preview");
841                    tryEnable3ALocks(false);
842                    sendMessage(EFFECT_NONE, EFFECT_MSG_EFFECTS_STOPPED);
843                } else {
844                    // STATE_RELEASED - camera will be/has been released as well, do nothing.
845                }
846            }
847        }
848    };
849
850    // Indicates that all camera/recording activity needs to halt
851    public synchronized void release() {
852        if (mLogVerbose) Log.v(TAG, "Releasing (" + this + ")");
853
854        switch (mState) {
855            case STATE_RECORD:
856            case STATE_STARTING_PREVIEW:
857            case STATE_PREVIEW:
858                stopPreview();
859                // Fall-through
860            default:
861                mRecordSound.release();
862                mState = STATE_RELEASED;
863                break;
864        }
865    }
866
867    private void sendMessage(final int effect, final int msg) {
868        if (mEffectsListener != null) {
869            mHandler.post(new Runnable() {
870                public void run() {
871                    mEffectsListener.onEffectsUpdate(effect, msg);
872                }
873            });
874        }
875    }
876
877    private void raiseError(final Exception exception) {
878        if (mEffectsListener != null) {
879            mHandler.post(new Runnable() {
880                public void run() {
881                    if (mFd != null) {
882                        mEffectsListener.onEffectsError(exception, null);
883                    } else {
884                        mEffectsListener.onEffectsError(exception, mOutputFile);
885                    }
886                }
887            });
888        }
889    }
890
891}
892