EffectsRecorder.java revision 40cc1c311bc14b20c858414b04ee1c67766aa890
1/* 2 * Copyright (C) 2011 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); you may not 5 * use this file except in compliance with the License. You may obtain a copy of 6 * the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 12 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 13 * License for the specific language governing permissions and limitations under 14 * the License. 15 */ 16 17package com.android.camera; 18 19import android.content.Context; 20import android.filterfw.GraphEnvironment; 21import android.filterfw.core.Filter; 22import android.filterfw.core.GLEnvironment; 23import android.filterfw.core.GraphRunner; 24import android.filterfw.core.GraphRunner.OnRunnerDoneListener; 25import android.filterfw.geometry.Point; 26import android.filterfw.geometry.Quad; 27import android.filterpacks.videoproc.BackDropperFilter; 28import android.filterpacks.videoproc.BackDropperFilter.LearningDoneListener; 29import android.filterpacks.videosink.MediaEncoderFilter.OnRecordingDoneListener; 30import android.filterpacks.videosrc.SurfaceTextureSource.SurfaceTextureSourceListener; 31import android.filterpacks.videosrc.SurfaceTextureTarget; 32import android.graphics.SurfaceTexture; 33import android.hardware.Camera; 34import android.media.CamcorderProfile; 35import android.media.MediaActionSound; 36import android.media.MediaRecorder; 37import android.os.Handler; 38import android.os.Looper; 39import android.util.Log; 40import android.view.Surface; 41 42import java.io.FileDescriptor; 43import java.io.IOException; 44 45 46/** 47 * Encapsulates the mobile filter framework components needed to record video with 48 * effects applied. Modeled after MediaRecorder. 49 */ 50public class EffectsRecorder { 51 52 public static final int EFFECT_NONE = 0; 53 public static final int EFFECT_GOOFY_FACE = 1; 54 public static final int EFFECT_BACKDROPPER = 2; 55 56 public static final int EFFECT_GF_SQUEEZE = 0; 57 public static final int EFFECT_GF_BIG_EYES = 1; 58 public static final int EFFECT_GF_BIG_MOUTH = 2; 59 public static final int EFFECT_GF_SMALL_MOUTH = 3; 60 public static final int EFFECT_GF_BIG_NOSE = 4; 61 public static final int EFFECT_GF_SMALL_EYES = 5; 62 public static final int NUM_OF_GF_EFFECTS = EFFECT_GF_SMALL_EYES + 1; 63 64 public static final int EFFECT_MSG_STARTED_LEARNING = 0; 65 public static final int EFFECT_MSG_DONE_LEARNING = 1; 66 public static final int EFFECT_MSG_SWITCHING_EFFECT = 2; 67 public static final int EFFECT_MSG_EFFECTS_STOPPED = 3; 68 public static final int EFFECT_MSG_RECORDING_DONE = 4; 69 public static final int EFFECT_MSG_PREVIEW_RUNNING = 5; 70 71 private Context mContext; 72 private Handler mHandler; 73 74 private Camera mCameraDevice; 75 private CamcorderProfile mProfile; 76 private double mCaptureRate = 0; 77 private SurfaceTexture mPreviewSurfaceTexture; 78 private int mPreviewWidth; 79 private int mPreviewHeight; 80 private MediaRecorder.OnInfoListener mInfoListener; 81 private MediaRecorder.OnErrorListener mErrorListener; 82 83 private String mOutputFile; 84 private FileDescriptor mFd; 85 private int mOrientationHint = 0; 86 private long mMaxFileSize = 0; 87 private int mMaxDurationMs = 0; 88 private int mCameraFacing = Camera.CameraInfo.CAMERA_FACING_BACK; 89 private boolean mAppIsLandscape; 90 91 private int mEffect = EFFECT_NONE; 92 private int mCurrentEffect = EFFECT_NONE; 93 private EffectsListener mEffectsListener; 94 95 private Object mEffectParameter; 96 97 private GraphEnvironment mGraphEnv; 98 private int mGraphId; 99 private GraphRunner mRunner = null; 100 private GraphRunner mOldRunner = null; 101 102 private SurfaceTexture mTextureSource; 103 104 private static final int STATE_CONFIGURE = 0; 105 private static final int STATE_WAITING_FOR_SURFACE = 1; 106 private static final int STATE_STARTING_PREVIEW = 2; 107 private static final int STATE_PREVIEW = 3; 108 private static final int STATE_RECORD = 4; 109 private static final int STATE_RELEASED = 5; 110 private int mState = STATE_CONFIGURE; 111 112 private boolean mLogVerbose = Log.isLoggable(TAG, Log.VERBOSE); 113 private static final String TAG = "EffectsRecorder"; 114 private MediaActionSound mCameraSound; 115 116 /** Determine if a given effect is supported at runtime 117 * Some effects require libraries not available on all devices 118 */ 119 public static boolean isEffectSupported(int effectId) { 120 switch (effectId) { 121 case EFFECT_GOOFY_FACE: 122 return Filter.isAvailable( 123 "com.google.android.filterpacks.facedetect.GoofyRenderFilter"); 124 case EFFECT_BACKDROPPER: 125 return Filter.isAvailable("android.filterpacks.videoproc.BackDropperFilter"); 126 default: 127 return false; 128 } 129 } 130 131 public EffectsRecorder(Context context) { 132 if (mLogVerbose) Log.v(TAG, "EffectsRecorder created (" + this + ")"); 133 mContext = context; 134 mHandler = new Handler(Looper.getMainLooper()); 135 mCameraSound = new MediaActionSound(); 136 mCameraSound.load(MediaActionSound.START_VIDEO_RECORDING); 137 mCameraSound.load(MediaActionSound.STOP_VIDEO_RECORDING); 138 } 139 140 public void setCamera(Camera cameraDevice) { 141 switch (mState) { 142 case STATE_PREVIEW: 143 throw new RuntimeException("setCamera cannot be called while previewing!"); 144 case STATE_RECORD: 145 throw new RuntimeException("setCamera cannot be called while recording!"); 146 case STATE_RELEASED: 147 throw new RuntimeException("setCamera called on an already released recorder!"); 148 default: 149 break; 150 } 151 152 mCameraDevice = cameraDevice; 153 } 154 155 public void setProfile(CamcorderProfile profile) { 156 switch (mState) { 157 case STATE_RECORD: 158 throw new RuntimeException("setProfile cannot be called while recording!"); 159 case STATE_RELEASED: 160 throw new RuntimeException("setProfile called on an already released recorder!"); 161 default: 162 break; 163 } 164 mProfile = profile; 165 } 166 167 public void setOutputFile(String outputFile) { 168 switch (mState) { 169 case STATE_RECORD: 170 throw new RuntimeException("setOutputFile cannot be called while recording!"); 171 case STATE_RELEASED: 172 throw new RuntimeException("setOutputFile called on an already released recorder!"); 173 default: 174 break; 175 } 176 177 mOutputFile = outputFile; 178 mFd = null; 179 } 180 181 public void setOutputFile(FileDescriptor fd) { 182 switch (mState) { 183 case STATE_RECORD: 184 throw new RuntimeException("setOutputFile cannot be called while recording!"); 185 case STATE_RELEASED: 186 throw new RuntimeException("setOutputFile called on an already released recorder!"); 187 default: 188 break; 189 } 190 191 mOutputFile = null; 192 mFd = fd; 193 } 194 195 /** 196 * Sets the maximum filesize (in bytes) of the recording session. 197 * This will be passed on to the MediaEncoderFilter and then to the 198 * MediaRecorder ultimately. If zero or negative, the MediaRecorder will 199 * disable the limit 200 */ 201 public synchronized void setMaxFileSize(long maxFileSize) { 202 switch (mState) { 203 case STATE_RECORD: 204 throw new RuntimeException("setMaxFileSize cannot be called while recording!"); 205 case STATE_RELEASED: 206 throw new RuntimeException( 207 "setMaxFileSize called on an already released recorder!"); 208 default: 209 break; 210 } 211 mMaxFileSize = maxFileSize; 212 } 213 214 /** 215 * Sets the maximum recording duration (in ms) for the next recording session 216 * Setting it to zero (the default) disables the limit. 217 */ 218 public synchronized void setMaxDuration(int maxDurationMs) { 219 switch (mState) { 220 case STATE_RECORD: 221 throw new RuntimeException("setMaxDuration cannot be called while recording!"); 222 case STATE_RELEASED: 223 throw new RuntimeException( 224 "setMaxDuration called on an already released recorder!"); 225 default: 226 break; 227 } 228 mMaxDurationMs = maxDurationMs; 229 } 230 231 232 public void setCaptureRate(double fps) { 233 switch (mState) { 234 case STATE_RECORD: 235 throw new RuntimeException("setCaptureRate cannot be called while recording!"); 236 case STATE_RELEASED: 237 throw new RuntimeException( 238 "setCaptureRate called on an already released recorder!"); 239 default: 240 break; 241 } 242 243 if (mLogVerbose) Log.v(TAG, "Setting time lapse capture rate to " + fps + " fps"); 244 mCaptureRate = fps; 245 } 246 247 public void setPreviewSurfaceTexture(SurfaceTexture previewSurfaceTexture, 248 int previewWidth, 249 int previewHeight) { 250 if (mLogVerbose) Log.v(TAG, "setPreviewSurfaceTexture(" + this + ")"); 251 switch (mState) { 252 case STATE_RECORD: 253 throw new RuntimeException( 254 "setPreviewSurfaceTexture cannot be called while recording!"); 255 case STATE_RELEASED: 256 throw new RuntimeException( 257 "setPreviewSurfaceTexture called on an already released recorder!"); 258 default: 259 break; 260 } 261 262 mPreviewSurfaceTexture= previewSurfaceTexture; 263 mPreviewWidth = previewWidth; 264 mPreviewHeight = previewHeight; 265 266 switch (mState) { 267 case STATE_WAITING_FOR_SURFACE: 268 startPreview(); 269 break; 270 case STATE_STARTING_PREVIEW: 271 case STATE_PREVIEW: 272 initializeEffect(true); 273 break; 274 } 275 } 276 277 public void setEffect(int effect, Object effectParameter) { 278 if (mLogVerbose) Log.v(TAG, 279 "setEffect: effect ID " + effect + 280 ", parameter " + effectParameter.toString() ); 281 switch (mState) { 282 case STATE_RECORD: 283 throw new RuntimeException("setEffect cannot be called while recording!"); 284 case STATE_RELEASED: 285 throw new RuntimeException("setEffect called on an already released recorder!"); 286 default: 287 break; 288 } 289 290 mEffect = effect; 291 mEffectParameter = effectParameter; 292 293 if (mState == STATE_PREVIEW || 294 mState == STATE_STARTING_PREVIEW) { 295 initializeEffect(false); 296 } 297 } 298 299 public interface EffectsListener { 300 public void onEffectsUpdate(int effectId, int effectMsg); 301 public void onEffectsError(Exception exception, String filePath); 302 } 303 304 public void setEffectsListener(EffectsListener listener) { 305 mEffectsListener = listener; 306 } 307 308 private void setFaceDetectOrientation() { 309 if (mCurrentEffect == EFFECT_GOOFY_FACE) { 310 Filter rotateFilter = mRunner.getGraph().getFilter("rotate"); 311 Filter metaRotateFilter = mRunner.getGraph().getFilter("metarotate"); 312 rotateFilter.setInputValue("rotation", mOrientationHint); 313 int reverseDegrees = (360 - mOrientationHint) % 360; 314 metaRotateFilter.setInputValue("rotation", reverseDegrees); 315 } 316 } 317 318 private void setRecordingOrientation() { 319 if ( mState != STATE_RECORD && mRunner != null) { 320 Point bl = new Point(0, 0); 321 Point br = new Point(1, 0); 322 Point tl = new Point(0, 1); 323 Point tr = new Point(1, 1); 324 Quad recordingRegion; 325 if (mCameraFacing == Camera.CameraInfo.CAMERA_FACING_BACK) { 326 // The back camera is not mirrored, so use a identity transform 327 recordingRegion = new Quad(bl, br, tl, tr); 328 } else { 329 // Recording region needs to be tweaked for front cameras, since they 330 // mirror their preview 331 if (mOrientationHint == 0 || mOrientationHint == 180) { 332 // Horizontal flip in landscape 333 recordingRegion = new Quad(br, bl, tr, tl); 334 } else { 335 // Horizontal flip in portrait 336 recordingRegion = new Quad(tl, tr, bl, br); 337 } 338 } 339 Filter recorder = mRunner.getGraph().getFilter("recorder"); 340 recorder.setInputValue("inputRegion", recordingRegion); 341 } 342 } 343 public void setOrientationHint(int degrees) { 344 switch (mState) { 345 case STATE_RELEASED: 346 throw new RuntimeException( 347 "setOrientationHint called on an already released recorder!"); 348 default: 349 break; 350 } 351 if (mLogVerbose) Log.v(TAG, "Setting orientation hint to: " + degrees); 352 mOrientationHint = degrees; 353 setFaceDetectOrientation(); 354 setRecordingOrientation(); 355 } 356 357 /** Passes the native orientation of the Camera app (device dependent) 358 * to allow for correct output aspect ratio. Defaults to portrait */ 359 public void setAppToLandscape(boolean landscape) { 360 if (mState != STATE_CONFIGURE) { 361 throw new RuntimeException( 362 "setAppToLandscape called after configuration!"); 363 } 364 mAppIsLandscape = landscape; 365 } 366 367 public void setCameraFacing(int facing) { 368 switch (mState) { 369 case STATE_RELEASED: 370 throw new RuntimeException( 371 "setCameraFacing called on alrady released recorder!"); 372 default: 373 break; 374 } 375 mCameraFacing = facing; 376 setRecordingOrientation(); 377 } 378 379 public void setOnInfoListener(MediaRecorder.OnInfoListener infoListener) { 380 switch (mState) { 381 case STATE_RECORD: 382 throw new RuntimeException("setInfoListener cannot be called while recording!"); 383 case STATE_RELEASED: 384 throw new RuntimeException( 385 "setInfoListener called on an already released recorder!"); 386 default: 387 break; 388 } 389 mInfoListener = infoListener; 390 } 391 392 public void setOnErrorListener(MediaRecorder.OnErrorListener errorListener) { 393 switch (mState) { 394 case STATE_RECORD: 395 throw new RuntimeException("setErrorListener cannot be called while recording!"); 396 case STATE_RELEASED: 397 throw new RuntimeException( 398 "setErrorListener called on an already released recorder!"); 399 default: 400 break; 401 } 402 mErrorListener = errorListener; 403 } 404 405 private void initializeFilterFramework() { 406 mGraphEnv = new GraphEnvironment(); 407 mGraphEnv.createGLEnvironment(); 408 409 if (mLogVerbose) { 410 Log.v(TAG, "Effects framework initializing. Recording size " 411 + mProfile.videoFrameWidth + ", " + mProfile.videoFrameHeight); 412 } 413 if (!mAppIsLandscape) { 414 int tmp; 415 tmp = mProfile.videoFrameWidth; 416 mProfile.videoFrameWidth = mProfile.videoFrameHeight; 417 mProfile.videoFrameHeight = tmp; 418 } 419 mGraphEnv.addReferences( 420 "textureSourceCallback", mSourceReadyCallback, 421 "recordingWidth", mProfile.videoFrameWidth, 422 "recordingHeight", mProfile.videoFrameHeight, 423 "recordingProfile", mProfile, 424 "learningDoneListener", mLearningDoneListener, 425 "recordingDoneListener", mRecordingDoneListener); 426 mRunner = null; 427 mGraphId = -1; 428 mCurrentEffect = EFFECT_NONE; 429 } 430 431 private synchronized void initializeEffect(boolean forceReset) { 432 if (forceReset || 433 mCurrentEffect != mEffect || 434 mCurrentEffect == EFFECT_BACKDROPPER) { 435 if (mLogVerbose) { 436 Log.v(TAG, "Effect initializing. Preview size " 437 + mPreviewWidth + ", " + mPreviewHeight); 438 } 439 440 mGraphEnv.addReferences( 441 "previewSurfaceTexture", mPreviewSurfaceTexture, 442 "previewWidth", mPreviewWidth, 443 "previewHeight", mPreviewHeight, 444 "orientation", mOrientationHint); 445 if (mState == STATE_PREVIEW || 446 mState == STATE_STARTING_PREVIEW) { 447 // Switching effects while running. Inform video camera. 448 sendMessage(mCurrentEffect, EFFECT_MSG_SWITCHING_EFFECT); 449 } 450 451 switch (mEffect) { 452 case EFFECT_GOOFY_FACE: 453 mGraphId = mGraphEnv.loadGraph(mContext, R.raw.goofy_face); 454 break; 455 case EFFECT_BACKDROPPER: 456 sendMessage(EFFECT_BACKDROPPER, EFFECT_MSG_STARTED_LEARNING); 457 mGraphId = mGraphEnv.loadGraph(mContext, R.raw.backdropper); 458 break; 459 default: 460 throw new RuntimeException("Unknown effect ID" + mEffect + "!"); 461 } 462 mCurrentEffect = mEffect; 463 464 mOldRunner = mRunner; 465 mRunner = mGraphEnv.getRunner(mGraphId, GraphEnvironment.MODE_ASYNCHRONOUS); 466 mRunner.setDoneCallback(mRunnerDoneCallback); 467 if (mLogVerbose) { 468 Log.v(TAG, "New runner: " + mRunner 469 + ". Old runner: " + mOldRunner); 470 } 471 if (mState == STATE_PREVIEW || 472 mState == STATE_STARTING_PREVIEW) { 473 // Switching effects while running. Stop existing runner. 474 // The stop callback will take care of starting new runner. 475 mCameraDevice.stopPreview(); 476 try { 477 mCameraDevice.setPreviewTexture(null); 478 } catch(IOException e) { 479 throw new RuntimeException("Unable to connect camera to effect input", e); 480 } 481 mOldRunner.stop(); 482 } 483 } 484 485 switch (mCurrentEffect) { 486 case EFFECT_GOOFY_FACE: 487 tryEnableVideoStabilization(true); 488 Filter goofyFilter = mRunner.getGraph().getFilter("goofyrenderer"); 489 goofyFilter.setInputValue("currentEffect", 490 ((Integer)mEffectParameter).intValue()); 491 break; 492 case EFFECT_BACKDROPPER: 493 tryEnableVideoStabilization(false); 494 Filter backgroundSrc = mRunner.getGraph().getFilter("background"); 495 backgroundSrc.setInputValue("sourceUrl", mEffectParameter); 496 // For front camera, the background video needs to be mirrored in the 497 // backdropper filter 498 if (mCameraFacing == Camera.CameraInfo.CAMERA_FACING_FRONT) { 499 Filter replacer = mRunner.getGraph().getFilter("replacer"); 500 replacer.setInputValue("mirrorBg", true); 501 if (mLogVerbose) Log.v(TAG, "Setting the background to be mirrored"); 502 } 503 break; 504 default: 505 break; 506 } 507 setFaceDetectOrientation(); 508 setRecordingOrientation(); 509 } 510 511 public synchronized void startPreview() { 512 if (mLogVerbose) Log.v(TAG, "Starting preview (" + this + ")"); 513 514 switch (mState) { 515 case STATE_STARTING_PREVIEW: 516 case STATE_PREVIEW: 517 // Already running preview 518 Log.w(TAG, "startPreview called when already running preview"); 519 return; 520 case STATE_RECORD: 521 throw new RuntimeException("Cannot start preview when already recording!"); 522 case STATE_RELEASED: 523 throw new RuntimeException("setEffect called on an already released recorder!"); 524 default: 525 break; 526 } 527 528 if (mEffect == EFFECT_NONE) { 529 throw new RuntimeException("No effect selected!"); 530 } 531 if (mEffectParameter == null) { 532 throw new RuntimeException("No effect parameter provided!"); 533 } 534 if (mProfile == null) { 535 throw new RuntimeException("No recording profile provided!"); 536 } 537 if (mPreviewSurfaceTexture == null) { 538 if (mLogVerbose) Log.v(TAG, "Passed a null surface; waiting for valid one"); 539 mState = STATE_WAITING_FOR_SURFACE; 540 return; 541 } 542 if (mCameraDevice == null) { 543 throw new RuntimeException("No camera to record from!"); 544 } 545 546 if (mLogVerbose) Log.v(TAG, "Initializing filter graph"); 547 548 initializeFilterFramework(); 549 550 initializeEffect(true); 551 552 if (mLogVerbose) Log.v(TAG, "Starting filter graph"); 553 554 mState = STATE_STARTING_PREVIEW; 555 mRunner.run(); 556 // Rest of preview startup handled in mSourceReadyCallback 557 } 558 559 private SurfaceTextureSourceListener mSourceReadyCallback = 560 new SurfaceTextureSourceListener() { 561 @Override 562 public void onSurfaceTextureSourceReady(SurfaceTexture source) { 563 if (mLogVerbose) Log.v(TAG, "SurfaceTexture ready callback received"); 564 synchronized(EffectsRecorder.this) { 565 mTextureSource = source; 566 567 if (mState == STATE_CONFIGURE) { 568 // Stop preview happened while the runner was doing startup tasks 569 // Since we haven't started anything up, don't do anything 570 // Rest of cleanup will happen in onRunnerDone 571 if (mLogVerbose) Log.v(TAG, "Ready callback: Already stopped, skipping."); 572 return; 573 } 574 if (mState == STATE_RELEASED) { 575 // EffectsRecorder has been released, so don't touch the camera device 576 // or anything else 577 if (mLogVerbose) Log.v(TAG, "Ready callback: Already released, skipping."); 578 return; 579 } 580 if (source == null) { 581 if (mState == STATE_PREVIEW || 582 mState == STATE_STARTING_PREVIEW || 583 mState == STATE_RECORD) { 584 // A null source here means the graph is shutting down 585 // unexpectedly, so we need to turn off preview before 586 // the surface texture goes away. 587 mCameraDevice.stopPreview(); 588 try { 589 mCameraDevice.setPreviewTexture(null); 590 } catch(IOException e) { 591 throw new RuntimeException("Unable to disconnect " + 592 "camera from effect input", e); 593 } 594 } 595 return; 596 } 597 598 // Lock AE/AWB to reduce transition flicker 599 tryEnable3ALocks(true); 600 601 mCameraDevice.stopPreview(); 602 if (mLogVerbose) Log.v(TAG, "Runner active, connecting effects preview"); 603 try { 604 mCameraDevice.setPreviewTexture(mTextureSource); 605 } catch(IOException e) { 606 throw new RuntimeException("Unable to connect camera to effect input", e); 607 } 608 609 mCameraDevice.startPreview(); 610 611 // Unlock AE/AWB after preview started 612 tryEnable3ALocks(false); 613 614 mState = STATE_PREVIEW; 615 616 if (mLogVerbose) Log.v(TAG, "Start preview/effect switch complete"); 617 618 // Sending a message to listener that preview is complete 619 sendMessage(mCurrentEffect, EFFECT_MSG_PREVIEW_RUNNING); 620 } 621 } 622 }; 623 624 private LearningDoneListener mLearningDoneListener = 625 new LearningDoneListener() { 626 @Override 627 public void onLearningDone(BackDropperFilter filter) { 628 if (mLogVerbose) Log.v(TAG, "Learning done callback triggered"); 629 // Called in a processing thread, so have to post message back to UI 630 // thread 631 sendMessage(EFFECT_BACKDROPPER, EFFECT_MSG_DONE_LEARNING); 632 enable3ALocks(true); 633 } 634 }; 635 636 // A callback to finalize the media after the recording is done. 637 private OnRecordingDoneListener mRecordingDoneListener = 638 new OnRecordingDoneListener() { 639 // Forward the callback to the VideoCamera object (as an asynchronous event). 640 @Override 641 public void onRecordingDone() { 642 if (mLogVerbose) Log.v(TAG, "Recording done callback triggered"); 643 sendMessage(EFFECT_NONE, EFFECT_MSG_RECORDING_DONE); 644 } 645 }; 646 647 public synchronized void startRecording() { 648 if (mLogVerbose) Log.v(TAG, "Starting recording (" + this + ")"); 649 650 switch (mState) { 651 case STATE_RECORD: 652 throw new RuntimeException("Already recording, cannot begin anew!"); 653 case STATE_RELEASED: 654 throw new RuntimeException( 655 "startRecording called on an already released recorder!"); 656 default: 657 break; 658 } 659 660 if ((mOutputFile == null) && (mFd == null)) { 661 throw new RuntimeException("No output file name or descriptor provided!"); 662 } 663 664 if (mState == STATE_CONFIGURE) { 665 startPreview(); 666 } 667 668 Filter recorder = mRunner.getGraph().getFilter("recorder"); 669 if (mFd != null) { 670 recorder.setInputValue("outputFileDescriptor", mFd); 671 } else { 672 recorder.setInputValue("outputFile", mOutputFile); 673 } 674 // It is ok to set the audiosource without checking for timelapse here 675 // since that check will be done in the MediaEncoderFilter itself 676 recorder.setInputValue("audioSource", MediaRecorder.AudioSource.CAMCORDER); 677 678 recorder.setInputValue("recordingProfile", mProfile); 679 recorder.setInputValue("orientationHint", mOrientationHint); 680 // Important to set the timelapseinterval to 0 if the capture rate is not >0 681 // since the recorder does not get created every time the recording starts. 682 // The recorder infers whether the capture is timelapsed based on the value of 683 // this interval 684 boolean captureTimeLapse = mCaptureRate > 0; 685 if (captureTimeLapse) { 686 double timeBetweenFrameCapture = 1 / mCaptureRate; 687 recorder.setInputValue("timelapseRecordingIntervalUs", 688 (long) (1000000 * timeBetweenFrameCapture)); 689 } else { 690 recorder.setInputValue("timelapseRecordingIntervalUs", 0L); 691 } 692 693 if (mInfoListener != null) { 694 recorder.setInputValue("infoListener", mInfoListener); 695 } 696 if (mErrorListener != null) { 697 recorder.setInputValue("errorListener", mErrorListener); 698 } 699 recorder.setInputValue("maxFileSize", mMaxFileSize); 700 recorder.setInputValue("maxDurationMs", mMaxDurationMs); 701 recorder.setInputValue("recording", true); 702 mCameraSound.play(MediaActionSound.START_VIDEO_RECORDING); 703 mState = STATE_RECORD; 704 } 705 706 public synchronized void stopRecording() { 707 if (mLogVerbose) Log.v(TAG, "Stop recording (" + this + ")"); 708 709 switch (mState) { 710 case STATE_CONFIGURE: 711 case STATE_STARTING_PREVIEW: 712 case STATE_PREVIEW: 713 Log.w(TAG, "StopRecording called when recording not active!"); 714 return; 715 case STATE_RELEASED: 716 throw new RuntimeException("stopRecording called on released EffectsRecorder!"); 717 default: 718 break; 719 } 720 Filter recorder = mRunner.getGraph().getFilter("recorder"); 721 recorder.setInputValue("recording", false); 722 mCameraSound.play(MediaActionSound.STOP_VIDEO_RECORDING); 723 mState = STATE_PREVIEW; 724 } 725 726 // Called to tell the filter graph that the display surfacetexture is not valid anymore. 727 // So the filter graph should not hold any reference to the surface created with that. 728 public synchronized void disconnectDisplay() { 729 if (mLogVerbose) Log.v(TAG, "Disconnecting the graph from the " + 730 "SurfaceTexture"); 731 SurfaceTextureTarget display = (SurfaceTextureTarget) 732 mRunner.getGraph().getFilter("display"); 733 display.disconnect(mGraphEnv.getContext()); 734 } 735 736 // The VideoCamera will call this to notify that the camera is being 737 // released to the outside world. This call should happen after the 738 // stopRecording call. Else, the effects may throw an exception. 739 // With the recording stopped, the stopPreview call will not try to 740 // release the camera again. 741 // This must be called in onPause() if the effects are ON. 742 public synchronized void disconnectCamera() { 743 if (mLogVerbose) Log.v(TAG, "Disconnecting the effects from Camera"); 744 if (mCameraDevice == null) { 745 Log.d(TAG, "Camera already null. Nothing to disconnect"); 746 return; 747 } 748 mCameraDevice.stopPreview(); 749 try { 750 mCameraDevice.setPreviewTexture(null); 751 } catch(IOException e) { 752 throw new RuntimeException("Unable to disconnect camera"); 753 } 754 mCameraDevice = null; 755 } 756 757 // Stop and release effect resources 758 public synchronized void stopPreview() { 759 if (mLogVerbose) Log.v(TAG, "Stopping preview (" + this + ")"); 760 switch (mState) { 761 case STATE_CONFIGURE: 762 Log.w(TAG, "StopPreview called when preview not active!"); 763 return; 764 case STATE_RELEASED: 765 throw new RuntimeException("stopPreview called on released EffectsRecorder!"); 766 default: 767 break; 768 } 769 770 if (mState == STATE_RECORD) { 771 stopRecording(); 772 } 773 774 mCurrentEffect = EFFECT_NONE; 775 776 // This will not do anything if the camera has already been disconnected. 777 disconnectCamera(); 778 779 mState = STATE_CONFIGURE; 780 mOldRunner = mRunner; 781 mRunner.stop(); 782 mRunner = null; 783 // Rest of stop and release handled in mRunnerDoneCallback 784 } 785 786 // Try to enable/disable video stabilization if supported; otherwise return false 787 boolean tryEnableVideoStabilization(boolean toggle) { 788 Camera.Parameters params = mCameraDevice.getParameters(); 789 790 String vstabSupported = params.get("video-stabilization-supported"); 791 if ("true".equals(vstabSupported)) { 792 if (mLogVerbose) Log.v(TAG, "Setting video stabilization to " + toggle); 793 params.set("video-stabilization", toggle ? "true" : "false"); 794 mCameraDevice.setParameters(params); 795 return true; 796 } 797 if (mLogVerbose) Log.v(TAG, "Video stabilization not supported"); 798 return false; 799 } 800 801 // Try to enable/disable 3A locks if supported; otherwise return false 802 boolean tryEnable3ALocks(boolean toggle) { 803 Camera.Parameters params = mCameraDevice.getParameters(); 804 if (params.isAutoExposureLockSupported() && 805 params.isAutoWhiteBalanceLockSupported() ) { 806 params.setAutoExposureLock(toggle); 807 params.setAutoWhiteBalanceLock(toggle); 808 mCameraDevice.setParameters(params); 809 return true; 810 } 811 return false; 812 } 813 814 // Try to enable/disable 3A locks if supported; otherwise, throw error 815 // Use this when locks are essential to success 816 void enable3ALocks(boolean toggle) { 817 Camera.Parameters params = mCameraDevice.getParameters(); 818 if (!tryEnable3ALocks(toggle)) { 819 throw new RuntimeException("Attempt to lock 3A on camera with no locking support!"); 820 } 821 } 822 823 private OnRunnerDoneListener mRunnerDoneCallback = 824 new OnRunnerDoneListener() { 825 @Override 826 public void onRunnerDone(int result) { 827 synchronized(EffectsRecorder.this) { 828 if (mLogVerbose) { 829 Log.v(TAG, 830 "Graph runner done (" + EffectsRecorder.this 831 + ", mRunner " + mRunner 832 + ", mOldRunner " + mOldRunner + ")"); 833 } 834 if (result == GraphRunner.RESULT_ERROR) { 835 // Handle error case 836 Log.e(TAG, "Error running filter graph!"); 837 Exception e = null; 838 if (mRunner != null) { 839 e = mRunner.getError(); 840 } else if (mOldRunner != null) { 841 e = mOldRunner.getError(); 842 } 843 raiseError(e); 844 } 845 if (mOldRunner != null) { 846 // Tear down old graph if available 847 if (mLogVerbose) Log.v(TAG, "Tearing down old graph."); 848 GLEnvironment glEnv = mGraphEnv.getContext().getGLEnvironment(); 849 if (glEnv != null && !glEnv.isActive()) { 850 glEnv.activate(); 851 } 852 mOldRunner.getGraph().tearDown(mGraphEnv.getContext()); 853 if (glEnv != null && glEnv.isActive()) { 854 glEnv.deactivate(); 855 } 856 mOldRunner = null; 857 } 858 if (mState == STATE_PREVIEW || 859 mState == STATE_STARTING_PREVIEW) { 860 // Switching effects, start up the new runner 861 if (mLogVerbose) Log.v(TAG, "Previous effect halted, starting new effect."); 862 tryEnable3ALocks(false); 863 // In case of an error, the graph restarts from beginning and in case 864 // of the BACKDROPPER effect, the learner re-learns the background. 865 // Hence, we need to show the learning dialogue to the user 866 // to avoid recording before the learning is done. Else, the user 867 // could start recording before the learning is done and the new 868 // background comes up later leading to an end result video 869 // with a heterogeneous background. 870 // For BACKDROPPER effect, this path is also executed sometimes at 871 // the end of a normal recording session. In such a case, the graph 872 // does not restart and hence the learner does not re-learn. So we 873 // do not want to show the learning dialogue then. 874 if (result == GraphRunner.RESULT_ERROR && 875 mCurrentEffect == EFFECT_BACKDROPPER) { 876 sendMessage(EFFECT_BACKDROPPER, EFFECT_MSG_STARTED_LEARNING); 877 } 878 mRunner.run(); 879 } else if (mState != STATE_RELEASED) { 880 // Shutting down effects 881 if (mLogVerbose) Log.v(TAG, "Runner halted, restoring direct preview"); 882 tryEnable3ALocks(false); 883 sendMessage(EFFECT_NONE, EFFECT_MSG_EFFECTS_STOPPED); 884 } else { 885 // STATE_RELEASED - camera will be/has been released as well, do nothing. 886 } 887 } 888 } 889 }; 890 891 // Indicates that all camera/recording activity needs to halt 892 public synchronized void release() { 893 if (mLogVerbose) Log.v(TAG, "Releasing (" + this + ")"); 894 895 switch (mState) { 896 case STATE_RECORD: 897 case STATE_STARTING_PREVIEW: 898 case STATE_PREVIEW: 899 stopPreview(); 900 // Fall-through 901 default: 902 if (mCameraSound != null) { 903 mCameraSound.release(); 904 mCameraSound = null; 905 } 906 mState = STATE_RELEASED; 907 break; 908 } 909 } 910 911 private void sendMessage(final int effect, final int msg) { 912 if (mEffectsListener != null) { 913 mHandler.post(new Runnable() { 914 @Override 915 public void run() { 916 mEffectsListener.onEffectsUpdate(effect, msg); 917 } 918 }); 919 } 920 } 921 922 private void raiseError(final Exception exception) { 923 if (mEffectsListener != null) { 924 mHandler.post(new Runnable() { 925 @Override 926 public void run() { 927 if (mFd != null) { 928 mEffectsListener.onEffectsError(exception, null); 929 } else { 930 mEffectsListener.onEffectsError(exception, mOutputFile); 931 } 932 } 933 }); 934 } 935 } 936 937} 938