1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License"); you may not
5 * use this file except in compliance with the License. You may obtain a copy of
6 * the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 * License for the specific language governing permissions and limitations under
14 * the License.
15 */
16
17package com.android.camera;
18
19import android.annotation.TargetApi;
20import android.content.Context;
21import android.graphics.SurfaceTexture;
22import android.hardware.Camera;
23import android.media.CamcorderProfile;
24import android.media.MediaRecorder;
25import android.os.Handler;
26import android.os.Looper;
27import android.util.Log;
28
29import com.android.gallery3d.common.ApiHelper;
30
31import java.io.FileDescriptor;
32import java.io.IOException;
33import java.io.Serializable;
34import java.lang.reflect.Constructor;
35import java.lang.reflect.InvocationHandler;
36import java.lang.reflect.Method;
37import java.lang.reflect.Proxy;
38
39
40/**
41 * Encapsulates the mobile filter framework components needed to record video
42 * with effects applied. Modeled after MediaRecorder.
43 */
44@TargetApi(ApiHelper.VERSION_CODES.HONEYCOMB) // uses SurfaceTexture
45public class EffectsRecorder {
46    private static final String TAG = "EffectsRecorder";
47
48    private static Class<?> sClassFilter;
49    private static Method sFilterIsAvailable;
50    private static EffectsRecorder sEffectsRecorder;
51    // The index of the current effects recorder.
52    private static int sEffectsRecorderIndex;
53
54    private static boolean sReflectionInited = false;
55
56    private static Class<?> sClsLearningDoneListener;
57    private static Class<?> sClsOnRunnerDoneListener;
58    private static Class<?> sClsOnRecordingDoneListener;
59    private static Class<?> sClsSurfaceTextureSourceListener;
60
61    private static Method sFilterSetInputValue;
62
63    private static Constructor<?> sCtPoint;
64    private static Constructor<?> sCtQuad;
65
66    private static Method sLearningDoneListenerOnLearningDone;
67
68    private static Method sObjectEquals;
69    private static Method sObjectToString;
70
71    private static Class<?> sClsGraphRunner;
72    private static Method sGraphRunnerGetGraph;
73    private static Method sGraphRunnerSetDoneCallback;
74    private static Method sGraphRunnerRun;
75    private static Method sGraphRunnerGetError;
76    private static Method sGraphRunnerStop;
77
78    private static Method sFilterGraphGetFilter;
79    private static Method sFilterGraphTearDown;
80
81    private static Method sOnRunnerDoneListenerOnRunnerDone;
82
83    private static Class<?> sClsGraphEnvironment;
84    private static Constructor<?> sCtGraphEnvironment;
85    private static Method sGraphEnvironmentCreateGLEnvironment;
86    private static Method sGraphEnvironmentGetRunner;
87    private static Method sGraphEnvironmentAddReferences;
88    private static Method sGraphEnvironmentLoadGraph;
89    private static Method sGraphEnvironmentGetContext;
90
91    private static Method sFilterContextGetGLEnvironment;
92    private static Method sGLEnvironmentIsActive;
93    private static Method sGLEnvironmentActivate;
94    private static Method sGLEnvironmentDeactivate;
95    private static Method sSurfaceTextureTargetDisconnect;
96    private static Method sOnRecordingDoneListenerOnRecordingDone;
97    private static Method sSurfaceTextureSourceListenerOnSurfaceTextureSourceReady;
98
99    private Object mLearningDoneListener;
100    private Object mRunnerDoneCallback;
101    private Object mSourceReadyCallback;
102    // A callback to finalize the media after the recording is done.
103    private Object mRecordingDoneListener;
104
105    static {
106        try {
107            sClassFilter = Class.forName("android.filterfw.core.Filter");
108            sFilterIsAvailable = sClassFilter.getMethod("isAvailable",
109                    String.class);
110        } catch (ClassNotFoundException ex) {
111            Log.v(TAG, "Can't find the class android.filterfw.core.Filter");
112        } catch (NoSuchMethodException e) {
113            Log.v(TAG, "Can't find the method Filter.isAvailable");
114        }
115    }
116
117    public static final int  EFFECT_NONE        = 0;
118    public static final int  EFFECT_GOOFY_FACE  = 1;
119    public static final int  EFFECT_BACKDROPPER = 2;
120
121    public static final int  EFFECT_GF_SQUEEZE     = 0;
122    public static final int  EFFECT_GF_BIG_EYES    = 1;
123    public static final int  EFFECT_GF_BIG_MOUTH   = 2;
124    public static final int  EFFECT_GF_SMALL_MOUTH = 3;
125    public static final int  EFFECT_GF_BIG_NOSE    = 4;
126    public static final int  EFFECT_GF_SMALL_EYES  = 5;
127    public static final int  NUM_OF_GF_EFFECTS = EFFECT_GF_SMALL_EYES + 1;
128
129    public static final int  EFFECT_MSG_STARTED_LEARNING = 0;
130    public static final int  EFFECT_MSG_DONE_LEARNING    = 1;
131    public static final int  EFFECT_MSG_SWITCHING_EFFECT = 2;
132    public static final int  EFFECT_MSG_EFFECTS_STOPPED  = 3;
133    public static final int  EFFECT_MSG_RECORDING_DONE   = 4;
134    public static final int  EFFECT_MSG_PREVIEW_RUNNING  = 5;
135
136    private Context mContext;
137    private Handler mHandler;
138
139    private CameraManager.CameraProxy mCameraDevice;
140    private CamcorderProfile mProfile;
141    private double mCaptureRate = 0;
142    private SurfaceTexture mPreviewSurfaceTexture;
143    private int mPreviewWidth;
144    private int mPreviewHeight;
145    private MediaRecorder.OnInfoListener mInfoListener;
146    private MediaRecorder.OnErrorListener mErrorListener;
147
148    private String mOutputFile;
149    private FileDescriptor mFd;
150    private int mOrientationHint = 0;
151    private long mMaxFileSize = 0;
152    private int mMaxDurationMs = 0;
153    private int mCameraFacing = Camera.CameraInfo.CAMERA_FACING_BACK;
154    private int mCameraDisplayOrientation;
155
156    private int mEffect = EFFECT_NONE;
157    private int mCurrentEffect = EFFECT_NONE;
158    private EffectsListener mEffectsListener;
159
160    private Object mEffectParameter;
161
162    private Object mGraphEnv;
163    private int mGraphId;
164    private Object mRunner = null;
165    private Object mOldRunner = null;
166
167    private SurfaceTexture mTextureSource;
168
169    private static final int STATE_CONFIGURE              = 0;
170    private static final int STATE_WAITING_FOR_SURFACE    = 1;
171    private static final int STATE_STARTING_PREVIEW       = 2;
172    private static final int STATE_PREVIEW                = 3;
173    private static final int STATE_RECORD                 = 4;
174    private static final int STATE_RELEASED               = 5;
175    private int mState = STATE_CONFIGURE;
176
177    private boolean mLogVerbose = Log.isLoggable(TAG, Log.VERBOSE);
178    private SoundClips.Player mSoundPlayer;
179
180    /** Determine if a given effect is supported at runtime
181     * Some effects require libraries not available on all devices
182     */
183    public static boolean isEffectSupported(int effectId) {
184        if (sFilterIsAvailable == null)  return false;
185
186        try {
187            switch (effectId) {
188                case EFFECT_GOOFY_FACE:
189                    return (Boolean) sFilterIsAvailable.invoke(null,
190                            "com.google.android.filterpacks.facedetect.GoofyRenderFilter");
191                case EFFECT_BACKDROPPER:
192                    return (Boolean) sFilterIsAvailable.invoke(null,
193                            "android.filterpacks.videoproc.BackDropperFilter");
194                default:
195                    return false;
196            }
197        } catch (Exception ex) {
198            Log.e(TAG, "Fail to check filter", ex);
199        }
200        return false;
201    }
202
203    public EffectsRecorder(Context context) {
204        if (mLogVerbose) Log.v(TAG, "EffectsRecorder created (" + this + ")");
205
206        if (!sReflectionInited) {
207            try {
208                sFilterSetInputValue = sClassFilter.getMethod("setInputValue",
209                        new Class[] {String.class, Object.class});
210
211                Class<?> clsPoint = Class.forName("android.filterfw.geometry.Point");
212                sCtPoint = clsPoint.getConstructor(new Class[] {float.class,
213                        float.class});
214
215                Class<?> clsQuad = Class.forName("android.filterfw.geometry.Quad");
216                sCtQuad = clsQuad.getConstructor(new Class[] {clsPoint, clsPoint,
217                        clsPoint, clsPoint});
218
219                Class<?> clsBackDropperFilter = Class.forName(
220                        "android.filterpacks.videoproc.BackDropperFilter");
221                sClsLearningDoneListener = Class.forName(
222                        "android.filterpacks.videoproc.BackDropperFilter$LearningDoneListener");
223                sLearningDoneListenerOnLearningDone = sClsLearningDoneListener
224                        .getMethod("onLearningDone", new Class[] {clsBackDropperFilter});
225
226                sObjectEquals = Object.class.getMethod("equals", new Class[] {Object.class});
227                sObjectToString = Object.class.getMethod("toString");
228
229                sClsOnRunnerDoneListener = Class.forName(
230                        "android.filterfw.core.GraphRunner$OnRunnerDoneListener");
231                sOnRunnerDoneListenerOnRunnerDone = sClsOnRunnerDoneListener.getMethod(
232                        "onRunnerDone", new Class[] {int.class});
233
234                sClsGraphRunner = Class.forName("android.filterfw.core.GraphRunner");
235                sGraphRunnerGetGraph = sClsGraphRunner.getMethod("getGraph");
236                sGraphRunnerSetDoneCallback = sClsGraphRunner.getMethod(
237                        "setDoneCallback", new Class[] {sClsOnRunnerDoneListener});
238                sGraphRunnerRun = sClsGraphRunner.getMethod("run");
239                sGraphRunnerGetError = sClsGraphRunner.getMethod("getError");
240                sGraphRunnerStop = sClsGraphRunner.getMethod("stop");
241
242                Class<?> clsFilterContext = Class.forName("android.filterfw.core.FilterContext");
243                sFilterContextGetGLEnvironment = clsFilterContext.getMethod(
244                        "getGLEnvironment");
245
246                Class<?> clsFilterGraph = Class.forName("android.filterfw.core.FilterGraph");
247                sFilterGraphGetFilter = clsFilterGraph.getMethod("getFilter",
248                        new Class[] {String.class});
249                sFilterGraphTearDown = clsFilterGraph.getMethod("tearDown",
250                        new Class[] {clsFilterContext});
251
252                sClsGraphEnvironment = Class.forName("android.filterfw.GraphEnvironment");
253                sCtGraphEnvironment = sClsGraphEnvironment.getConstructor();
254                sGraphEnvironmentCreateGLEnvironment = sClsGraphEnvironment.getMethod(
255                        "createGLEnvironment");
256                sGraphEnvironmentGetRunner = sClsGraphEnvironment.getMethod(
257                        "getRunner", new Class[] {int.class, int.class});
258                sGraphEnvironmentAddReferences = sClsGraphEnvironment.getMethod(
259                        "addReferences", new Class[] {Object[].class});
260                sGraphEnvironmentLoadGraph = sClsGraphEnvironment.getMethod(
261                        "loadGraph", new Class[] {Context.class, int.class});
262                sGraphEnvironmentGetContext = sClsGraphEnvironment.getMethod(
263                        "getContext");
264
265                Class<?> clsGLEnvironment = Class.forName("android.filterfw.core.GLEnvironment");
266                sGLEnvironmentIsActive = clsGLEnvironment.getMethod("isActive");
267                sGLEnvironmentActivate = clsGLEnvironment.getMethod("activate");
268                sGLEnvironmentDeactivate = clsGLEnvironment.getMethod("deactivate");
269
270                Class<?> clsSurfaceTextureTarget = Class.forName(
271                        "android.filterpacks.videosrc.SurfaceTextureTarget");
272                sSurfaceTextureTargetDisconnect = clsSurfaceTextureTarget.getMethod(
273                        "disconnect", new Class[] {clsFilterContext});
274
275                sClsOnRecordingDoneListener = Class.forName(
276                        "android.filterpacks.videosink.MediaEncoderFilter$OnRecordingDoneListener");
277                sOnRecordingDoneListenerOnRecordingDone =
278                        sClsOnRecordingDoneListener.getMethod("onRecordingDone");
279
280                sClsSurfaceTextureSourceListener = Class.forName(
281                        "android.filterpacks.videosrc.SurfaceTextureSource$SurfaceTextureSourceListener");
282                sSurfaceTextureSourceListenerOnSurfaceTextureSourceReady =
283                        sClsSurfaceTextureSourceListener.getMethod(
284                                "onSurfaceTextureSourceReady",
285                                new Class[] {SurfaceTexture.class});
286            } catch (Exception ex) {
287                throw new RuntimeException(ex);
288            }
289
290            sReflectionInited = true;
291        }
292
293        sEffectsRecorderIndex++;
294        Log.v(TAG, "Current effects recorder index is " + sEffectsRecorderIndex);
295        sEffectsRecorder = this;
296        SerializableInvocationHandler sih = new SerializableInvocationHandler(
297                sEffectsRecorderIndex);
298        mLearningDoneListener = Proxy.newProxyInstance(
299                sClsLearningDoneListener.getClassLoader(),
300                new Class[] {sClsLearningDoneListener}, sih);
301        mRunnerDoneCallback = Proxy.newProxyInstance(
302                sClsOnRunnerDoneListener.getClassLoader(),
303                new Class[] {sClsOnRunnerDoneListener}, sih);
304        mSourceReadyCallback = Proxy.newProxyInstance(
305                sClsSurfaceTextureSourceListener.getClassLoader(),
306                new Class[] {sClsSurfaceTextureSourceListener}, sih);
307        mRecordingDoneListener =  Proxy.newProxyInstance(
308                sClsOnRecordingDoneListener.getClassLoader(),
309                new Class[] {sClsOnRecordingDoneListener}, sih);
310
311        mContext = context;
312        mHandler = new Handler(Looper.getMainLooper());
313        mSoundPlayer = SoundClips.getPlayer(context);
314    }
315
316    public synchronized void setCamera(CameraManager.CameraProxy cameraDevice) {
317        switch (mState) {
318            case STATE_PREVIEW:
319                throw new RuntimeException("setCamera cannot be called while previewing!");
320            case STATE_RECORD:
321                throw new RuntimeException("setCamera cannot be called while recording!");
322            case STATE_RELEASED:
323                throw new RuntimeException("setCamera called on an already released recorder!");
324            default:
325                break;
326        }
327
328        mCameraDevice = cameraDevice;
329    }
330
331    public void setProfile(CamcorderProfile profile) {
332        switch (mState) {
333            case STATE_RECORD:
334                throw new RuntimeException("setProfile cannot be called while recording!");
335            case STATE_RELEASED:
336                throw new RuntimeException("setProfile called on an already released recorder!");
337            default:
338                break;
339        }
340        mProfile = profile;
341    }
342
343    public void setOutputFile(String outputFile) {
344        switch (mState) {
345            case STATE_RECORD:
346                throw new RuntimeException("setOutputFile cannot be called while recording!");
347            case STATE_RELEASED:
348                throw new RuntimeException("setOutputFile called on an already released recorder!");
349            default:
350                break;
351        }
352
353        mOutputFile = outputFile;
354        mFd = null;
355    }
356
357    public void setOutputFile(FileDescriptor fd) {
358        switch (mState) {
359            case STATE_RECORD:
360                throw new RuntimeException("setOutputFile cannot be called while recording!");
361            case STATE_RELEASED:
362                throw new RuntimeException("setOutputFile called on an already released recorder!");
363            default:
364                break;
365        }
366
367        mOutputFile = null;
368        mFd = fd;
369    }
370
371    /**
372     * Sets the maximum filesize (in bytes) of the recording session.
373     * This will be passed on to the MediaEncoderFilter and then to the
374     * MediaRecorder ultimately. If zero or negative, the MediaRecorder will
375     * disable the limit
376    */
377    public synchronized void setMaxFileSize(long maxFileSize) {
378        switch (mState) {
379            case STATE_RECORD:
380                throw new RuntimeException("setMaxFileSize cannot be called while recording!");
381            case STATE_RELEASED:
382                throw new RuntimeException(
383                    "setMaxFileSize called on an already released recorder!");
384            default:
385                break;
386        }
387        mMaxFileSize = maxFileSize;
388    }
389
390    /**
391    * Sets the maximum recording duration (in ms) for the next recording session
392    * Setting it to zero (the default) disables the limit.
393    */
394    public synchronized void setMaxDuration(int maxDurationMs) {
395        switch (mState) {
396            case STATE_RECORD:
397                throw new RuntimeException("setMaxDuration cannot be called while recording!");
398            case STATE_RELEASED:
399                throw new RuntimeException(
400                    "setMaxDuration called on an already released recorder!");
401            default:
402                break;
403        }
404        mMaxDurationMs = maxDurationMs;
405    }
406
407
408    public void setCaptureRate(double fps) {
409        switch (mState) {
410            case STATE_RECORD:
411                throw new RuntimeException("setCaptureRate cannot be called while recording!");
412            case STATE_RELEASED:
413                throw new RuntimeException(
414                    "setCaptureRate called on an already released recorder!");
415            default:
416                break;
417        }
418
419        if (mLogVerbose) Log.v(TAG, "Setting time lapse capture rate to " + fps + " fps");
420        mCaptureRate = fps;
421    }
422
423    public void setPreviewSurfaceTexture(SurfaceTexture previewSurfaceTexture,
424                                  int previewWidth,
425                                  int previewHeight) {
426        if (mLogVerbose) Log.v(TAG, "setPreviewSurfaceTexture(" + this + ")");
427        switch (mState) {
428            case STATE_RECORD:
429                throw new RuntimeException(
430                    "setPreviewSurfaceTexture cannot be called while recording!");
431            case STATE_RELEASED:
432                throw new RuntimeException(
433                    "setPreviewSurfaceTexture called on an already released recorder!");
434            default:
435                break;
436        }
437
438        mPreviewSurfaceTexture = previewSurfaceTexture;
439        mPreviewWidth = previewWidth;
440        mPreviewHeight = previewHeight;
441
442        switch (mState) {
443            case STATE_WAITING_FOR_SURFACE:
444                startPreview();
445                break;
446            case STATE_STARTING_PREVIEW:
447            case STATE_PREVIEW:
448                initializeEffect(true);
449                break;
450        }
451    }
452
453    public void setEffect(int effect, Object effectParameter) {
454        if (mLogVerbose) Log.v(TAG,
455                               "setEffect: effect ID " + effect +
456                               ", parameter " + effectParameter.toString());
457        switch (mState) {
458            case STATE_RECORD:
459                throw new RuntimeException("setEffect cannot be called while recording!");
460            case STATE_RELEASED:
461                throw new RuntimeException("setEffect called on an already released recorder!");
462            default:
463                break;
464        }
465
466        mEffect = effect;
467        mEffectParameter = effectParameter;
468
469        if (mState == STATE_PREVIEW ||
470                mState == STATE_STARTING_PREVIEW) {
471            initializeEffect(false);
472        }
473    }
474
475    public interface EffectsListener {
476        public void onEffectsUpdate(int effectId, int effectMsg);
477        public void onEffectsError(Exception exception, String filePath);
478    }
479
480    public void setEffectsListener(EffectsListener listener) {
481        mEffectsListener = listener;
482    }
483
484    private void setFaceDetectOrientation() {
485        if (mCurrentEffect == EFFECT_GOOFY_FACE) {
486            Object rotateFilter = getGraphFilter(mRunner, "rotate");
487            Object metaRotateFilter = getGraphFilter(mRunner, "metarotate");
488            setInputValue(rotateFilter, "rotation", mOrientationHint);
489            int reverseDegrees = (360 - mOrientationHint) % 360;
490            setInputValue(metaRotateFilter, "rotation", reverseDegrees);
491        }
492    }
493
494    private void setRecordingOrientation() {
495        if (mState != STATE_RECORD && mRunner != null) {
496            Object bl = newInstance(sCtPoint, new Object[] {0, 0});
497            Object br = newInstance(sCtPoint, new Object[] {1, 0});
498            Object tl = newInstance(sCtPoint, new Object[] {0, 1});
499            Object tr = newInstance(sCtPoint, new Object[] {1, 1});
500            Object recordingRegion;
501            if (mCameraFacing == Camera.CameraInfo.CAMERA_FACING_BACK) {
502                // The back camera is not mirrored, so use a identity transform
503                recordingRegion = newInstance(sCtQuad, new Object[] {bl, br, tl, tr});
504            } else {
505                // Recording region needs to be tweaked for front cameras, since they
506                // mirror their preview
507                if (mOrientationHint == 0 || mOrientationHint == 180) {
508                    // Horizontal flip in landscape
509                    recordingRegion = newInstance(sCtQuad, new Object[] {br, bl, tr, tl});
510                } else {
511                    // Horizontal flip in portrait
512                    recordingRegion = newInstance(sCtQuad, new Object[] {tl, tr, bl, br});
513                }
514            }
515            Object recorder = getGraphFilter(mRunner, "recorder");
516            setInputValue(recorder, "inputRegion", recordingRegion);
517        }
518    }
519    public void setOrientationHint(int degrees) {
520        switch (mState) {
521            case STATE_RELEASED:
522                throw new RuntimeException(
523                        "setOrientationHint called on an already released recorder!");
524            default:
525                break;
526        }
527        if (mLogVerbose) Log.v(TAG, "Setting orientation hint to: " + degrees);
528        mOrientationHint = degrees;
529        setFaceDetectOrientation();
530        setRecordingOrientation();
531    }
532
533    public void setCameraDisplayOrientation(int orientation) {
534        if (mState != STATE_CONFIGURE) {
535            throw new RuntimeException(
536                "setCameraDisplayOrientation called after configuration!");
537        }
538        mCameraDisplayOrientation = orientation;
539    }
540
541    public void setCameraFacing(int facing) {
542        switch (mState) {
543            case STATE_RELEASED:
544                throw new RuntimeException(
545                    "setCameraFacing called on alrady released recorder!");
546            default:
547                break;
548        }
549        mCameraFacing = facing;
550        setRecordingOrientation();
551    }
552
553    public void setOnInfoListener(MediaRecorder.OnInfoListener infoListener) {
554        switch (mState) {
555            case STATE_RECORD:
556                throw new RuntimeException("setInfoListener cannot be called while recording!");
557            case STATE_RELEASED:
558                throw new RuntimeException(
559                    "setInfoListener called on an already released recorder!");
560            default:
561                break;
562        }
563        mInfoListener = infoListener;
564    }
565
566    public void setOnErrorListener(MediaRecorder.OnErrorListener errorListener) {
567        switch (mState) {
568            case STATE_RECORD:
569                throw new RuntimeException("setErrorListener cannot be called while recording!");
570            case STATE_RELEASED:
571                throw new RuntimeException(
572                    "setErrorListener called on an already released recorder!");
573            default:
574                break;
575        }
576        mErrorListener = errorListener;
577    }
578
579    private void initializeFilterFramework() {
580        mGraphEnv = newInstance(sCtGraphEnvironment);
581        invoke(mGraphEnv, sGraphEnvironmentCreateGLEnvironment);
582
583        int videoFrameWidth = mProfile.videoFrameWidth;
584        int videoFrameHeight = mProfile.videoFrameHeight;
585        if (mCameraDisplayOrientation == 90 || mCameraDisplayOrientation == 270) {
586            int tmp = videoFrameWidth;
587            videoFrameWidth = videoFrameHeight;
588            videoFrameHeight = tmp;
589        }
590
591        invoke(mGraphEnv, sGraphEnvironmentAddReferences,
592                new Object[] {new Object[] {
593                "textureSourceCallback", mSourceReadyCallback,
594                "recordingWidth", videoFrameWidth,
595                "recordingHeight", videoFrameHeight,
596                "recordingProfile", mProfile,
597                "learningDoneListener", mLearningDoneListener,
598                "recordingDoneListener", mRecordingDoneListener}});
599        mRunner = null;
600        mGraphId = -1;
601        mCurrentEffect = EFFECT_NONE;
602    }
603
604    private synchronized void initializeEffect(boolean forceReset) {
605        if (forceReset ||
606            mCurrentEffect != mEffect ||
607            mCurrentEffect == EFFECT_BACKDROPPER) {
608
609            invoke(mGraphEnv, sGraphEnvironmentAddReferences,
610                    new Object[] {new Object[] {
611                    "previewSurfaceTexture", mPreviewSurfaceTexture,
612                    "previewWidth", mPreviewWidth,
613                    "previewHeight", mPreviewHeight,
614                    "orientation", mOrientationHint}});
615            if (mState == STATE_PREVIEW ||
616                    mState == STATE_STARTING_PREVIEW) {
617                // Switching effects while running. Inform video camera.
618                sendMessage(mCurrentEffect, EFFECT_MSG_SWITCHING_EFFECT);
619            }
620
621            switch (mEffect) {
622                case EFFECT_GOOFY_FACE:
623                    mGraphId = (Integer) invoke(mGraphEnv,
624                            sGraphEnvironmentLoadGraph,
625                            new Object[] {mContext, R.raw.goofy_face});
626                    break;
627                case EFFECT_BACKDROPPER:
628                    sendMessage(EFFECT_BACKDROPPER, EFFECT_MSG_STARTED_LEARNING);
629                    mGraphId = (Integer) invoke(mGraphEnv,
630                            sGraphEnvironmentLoadGraph,
631                            new Object[] {mContext, R.raw.backdropper});
632                    break;
633                default:
634                    throw new RuntimeException("Unknown effect ID" + mEffect + "!");
635            }
636            mCurrentEffect = mEffect;
637
638            mOldRunner = mRunner;
639            mRunner = invoke(mGraphEnv, sGraphEnvironmentGetRunner,
640                    new Object[] {mGraphId,
641                    getConstant(sClsGraphEnvironment, "MODE_ASYNCHRONOUS")});
642            invoke(mRunner, sGraphRunnerSetDoneCallback, new Object[] {mRunnerDoneCallback});
643            if (mLogVerbose) {
644                Log.v(TAG, "New runner: " + mRunner
645                      + ". Old runner: " + mOldRunner);
646            }
647            if (mState == STATE_PREVIEW ||
648                    mState == STATE_STARTING_PREVIEW) {
649                // Switching effects while running. Stop existing runner.
650                // The stop callback will take care of starting new runner.
651                mCameraDevice.stopPreview();
652                mCameraDevice.setPreviewTextureAsync(null);
653                invoke(mOldRunner, sGraphRunnerStop);
654            }
655        }
656
657        switch (mCurrentEffect) {
658            case EFFECT_GOOFY_FACE:
659                tryEnableVideoStabilization(true);
660                Object goofyFilter = getGraphFilter(mRunner, "goofyrenderer");
661                setInputValue(goofyFilter, "currentEffect",
662                        ((Integer) mEffectParameter).intValue());
663                break;
664            case EFFECT_BACKDROPPER:
665                tryEnableVideoStabilization(false);
666                Object backgroundSrc = getGraphFilter(mRunner, "background");
667                if (ApiHelper.HAS_EFFECTS_RECORDING_CONTEXT_INPUT) {
668                    // Set the context first before setting sourceUrl to
669                    // guarantee the content URI get resolved properly.
670                    setInputValue(backgroundSrc, "context", mContext);
671                }
672                setInputValue(backgroundSrc, "sourceUrl", mEffectParameter);
673                // For front camera, the background video needs to be mirrored in the
674                // backdropper filter
675                if (mCameraFacing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
676                    Object replacer = getGraphFilter(mRunner, "replacer");
677                    setInputValue(replacer, "mirrorBg", true);
678                    if (mLogVerbose) Log.v(TAG, "Setting the background to be mirrored");
679                }
680                break;
681            default:
682                break;
683        }
684        setFaceDetectOrientation();
685        setRecordingOrientation();
686    }
687
688    public synchronized void startPreview() {
689        if (mLogVerbose) Log.v(TAG, "Starting preview (" + this + ")");
690
691        switch (mState) {
692            case STATE_STARTING_PREVIEW:
693            case STATE_PREVIEW:
694                // Already running preview
695                Log.w(TAG, "startPreview called when already running preview");
696                return;
697            case STATE_RECORD:
698                throw new RuntimeException("Cannot start preview when already recording!");
699            case STATE_RELEASED:
700                throw new RuntimeException("setEffect called on an already released recorder!");
701            default:
702                break;
703        }
704
705        if (mEffect == EFFECT_NONE) {
706            throw new RuntimeException("No effect selected!");
707        }
708        if (mEffectParameter == null) {
709            throw new RuntimeException("No effect parameter provided!");
710        }
711        if (mProfile == null) {
712            throw new RuntimeException("No recording profile provided!");
713        }
714        if (mPreviewSurfaceTexture == null) {
715            if (mLogVerbose) Log.v(TAG, "Passed a null surface; waiting for valid one");
716            mState = STATE_WAITING_FOR_SURFACE;
717            return;
718        }
719        if (mCameraDevice == null) {
720            throw new RuntimeException("No camera to record from!");
721        }
722
723        if (mLogVerbose) Log.v(TAG, "Initializing filter framework and running the graph.");
724        initializeFilterFramework();
725
726        initializeEffect(true);
727
728        mState = STATE_STARTING_PREVIEW;
729        invoke(mRunner, sGraphRunnerRun);
730        // Rest of preview startup handled in mSourceReadyCallback
731    }
732
733    private Object invokeObjectEquals(Object proxy, Object[] args) {
734        return Boolean.valueOf(proxy == args[0]);
735    }
736
737    private Object invokeObjectToString() {
738        return "Proxy-" + toString();
739    }
740
741    private void invokeOnLearningDone() {
742        if (mLogVerbose) Log.v(TAG, "Learning done callback triggered");
743        // Called in a processing thread, so have to post message back to UI
744        // thread
745        sendMessage(EFFECT_BACKDROPPER, EFFECT_MSG_DONE_LEARNING);
746        enable3ALocks(true);
747    }
748
749    private void invokeOnRunnerDone(Object[] args) {
750        int runnerDoneResult = (Integer) args[0];
751        synchronized (EffectsRecorder.this) {
752            if (mLogVerbose) {
753                Log.v(TAG,
754                      "Graph runner done (" + EffectsRecorder.this
755                      + ", mRunner " + mRunner
756                      + ", mOldRunner " + mOldRunner + ")");
757            }
758            if (runnerDoneResult ==
759                    (Integer) getConstant(sClsGraphRunner, "RESULT_ERROR")) {
760                // Handle error case
761                Log.e(TAG, "Error running filter graph!");
762                Exception e = null;
763                if (mRunner != null) {
764                    e = (Exception) invoke(mRunner, sGraphRunnerGetError);
765                } else if (mOldRunner != null) {
766                    e = (Exception) invoke(mOldRunner, sGraphRunnerGetError);
767                }
768                raiseError(e);
769            }
770            if (mOldRunner != null) {
771                // Tear down old graph if available
772                if (mLogVerbose) Log.v(TAG, "Tearing down old graph.");
773                Object glEnv = getContextGLEnvironment(mGraphEnv);
774                if (glEnv != null && !(Boolean) invoke(glEnv, sGLEnvironmentIsActive)) {
775                    invoke(glEnv, sGLEnvironmentActivate);
776                }
777                getGraphTearDown(mOldRunner,
778                        invoke(mGraphEnv, sGraphEnvironmentGetContext));
779                if (glEnv != null && (Boolean) invoke(glEnv, sGLEnvironmentIsActive)) {
780                    invoke(glEnv, sGLEnvironmentDeactivate);
781                }
782                mOldRunner = null;
783            }
784            if (mState == STATE_PREVIEW ||
785                    mState == STATE_STARTING_PREVIEW) {
786                // Switching effects, start up the new runner
787                if (mLogVerbose) {
788                    Log.v(TAG, "Previous effect halted. Running graph again. state: "
789                            + mState);
790                }
791                tryEnable3ALocks(false);
792                // In case of an error, the graph restarts from beginning and in case
793                // of the BACKDROPPER effect, the learner re-learns the background.
794                // Hence, we need to show the learning dialogue to the user
795                // to avoid recording before the learning is done. Else, the user
796                // could start recording before the learning is done and the new
797                // background comes up later leading to an end result video
798                // with a heterogeneous background.
799                // For BACKDROPPER effect, this path is also executed sometimes at
800                // the end of a normal recording session. In such a case, the graph
801                // does not restart and hence the learner does not re-learn. So we
802                // do not want to show the learning dialogue then.
803                if (runnerDoneResult == (Integer) getConstant(
804                        sClsGraphRunner, "RESULT_ERROR")
805                        && mCurrentEffect == EFFECT_BACKDROPPER) {
806                    sendMessage(EFFECT_BACKDROPPER, EFFECT_MSG_STARTED_LEARNING);
807                }
808                invoke(mRunner, sGraphRunnerRun);
809            } else if (mState != STATE_RELEASED) {
810                // Shutting down effects
811                if (mLogVerbose) Log.v(TAG, "Runner halted, restoring direct preview");
812                tryEnable3ALocks(false);
813                sendMessage(EFFECT_NONE, EFFECT_MSG_EFFECTS_STOPPED);
814            } else {
815                // STATE_RELEASED - camera will be/has been released as well, do nothing.
816            }
817        }
818    }
819
820    private void invokeOnSurfaceTextureSourceReady(Object[] args) {
821        SurfaceTexture source = (SurfaceTexture) args[0];
822        if (mLogVerbose) Log.v(TAG, "SurfaceTexture ready callback received");
823        synchronized (EffectsRecorder.this) {
824            mTextureSource = source;
825
826            if (mState == STATE_CONFIGURE) {
827                // Stop preview happened while the runner was doing startup tasks
828                // Since we haven't started anything up, don't do anything
829                // Rest of cleanup will happen in onRunnerDone
830                if (mLogVerbose) Log.v(TAG, "Ready callback: Already stopped, skipping.");
831                return;
832            }
833            if (mState == STATE_RELEASED) {
834                // EffectsRecorder has been released, so don't touch the camera device
835                // or anything else
836                if (mLogVerbose) Log.v(TAG, "Ready callback: Already released, skipping.");
837                return;
838            }
839            if (source == null) {
840                if (mLogVerbose) {
841                    Log.v(TAG, "Ready callback: source null! Looks like graph was closed!");
842                }
843                if (mState == STATE_PREVIEW ||
844                        mState == STATE_STARTING_PREVIEW ||
845                        mState == STATE_RECORD) {
846                    // A null source here means the graph is shutting down
847                    // unexpectedly, so we need to turn off preview before
848                    // the surface texture goes away.
849                    if (mLogVerbose) {
850                        Log.v(TAG, "Ready callback: State: " + mState
851                                + ". stopCameraPreview");
852                    }
853
854                    stopCameraPreview();
855                }
856                return;
857            }
858
859            // Lock AE/AWB to reduce transition flicker
860            tryEnable3ALocks(true);
861
862            mCameraDevice.stopPreview();
863            if (mLogVerbose) Log.v(TAG, "Runner active, connecting effects preview");
864            mCameraDevice.setPreviewTextureAsync(mTextureSource);
865
866            mCameraDevice.startPreviewAsync();
867
868            // Unlock AE/AWB after preview started
869            tryEnable3ALocks(false);
870
871            mState = STATE_PREVIEW;
872
873            if (mLogVerbose) Log.v(TAG, "Start preview/effect switch complete");
874
875            // Sending a message to listener that preview is complete
876            sendMessage(mCurrentEffect, EFFECT_MSG_PREVIEW_RUNNING);
877        }
878    }
879
880    private void invokeOnRecordingDone() {
881        // Forward the callback to the VideoModule object (as an asynchronous event).
882        if (mLogVerbose) Log.v(TAG, "Recording done callback triggered");
883        sendMessage(EFFECT_NONE, EFFECT_MSG_RECORDING_DONE);
884    }
885
886    public synchronized void startRecording() {
887        if (mLogVerbose) Log.v(TAG, "Starting recording (" + this + ")");
888
889        switch (mState) {
890            case STATE_RECORD:
891                throw new RuntimeException("Already recording, cannot begin anew!");
892            case STATE_RELEASED:
893                throw new RuntimeException(
894                    "startRecording called on an already released recorder!");
895            default:
896                break;
897        }
898
899        if ((mOutputFile == null) && (mFd == null)) {
900            throw new RuntimeException("No output file name or descriptor provided!");
901        }
902
903        if (mState == STATE_CONFIGURE) {
904            startPreview();
905        }
906
907        Object recorder = getGraphFilter(mRunner, "recorder");
908        if (mFd != null) {
909            setInputValue(recorder, "outputFileDescriptor", mFd);
910        } else {
911            setInputValue(recorder, "outputFile", mOutputFile);
912        }
913        // It is ok to set the audiosource without checking for timelapse here
914        // since that check will be done in the MediaEncoderFilter itself
915        setInputValue(recorder, "audioSource", MediaRecorder.AudioSource.CAMCORDER);
916        setInputValue(recorder, "recordingProfile", mProfile);
917        setInputValue(recorder, "orientationHint", mOrientationHint);
918        // Important to set the timelapseinterval to 0 if the capture rate is not >0
919        // since the recorder does not get created every time the recording starts.
920        // The recorder infers whether the capture is timelapsed based on the value of
921        // this interval
922        boolean captureTimeLapse = mCaptureRate > 0;
923        if (captureTimeLapse) {
924            double timeBetweenFrameCapture = 1 / mCaptureRate;
925            setInputValue(recorder, "timelapseRecordingIntervalUs",
926                    (long) (1000000 * timeBetweenFrameCapture));
927
928        } else {
929            setInputValue(recorder, "timelapseRecordingIntervalUs", 0L);
930        }
931
932        if (mInfoListener != null) {
933            setInputValue(recorder, "infoListener", mInfoListener);
934        }
935        if (mErrorListener != null) {
936            setInputValue(recorder, "errorListener", mErrorListener);
937        }
938        setInputValue(recorder, "maxFileSize", mMaxFileSize);
939        setInputValue(recorder, "maxDurationMs", mMaxDurationMs);
940        setInputValue(recorder, "recording", true);
941        mSoundPlayer.play(SoundClips.START_VIDEO_RECORDING);
942        mState = STATE_RECORD;
943    }
944
945    public synchronized void stopRecording() {
946        if (mLogVerbose) Log.v(TAG, "Stop recording (" + this + ")");
947
948        switch (mState) {
949            case STATE_CONFIGURE:
950            case STATE_STARTING_PREVIEW:
951            case STATE_PREVIEW:
952                Log.w(TAG, "StopRecording called when recording not active!");
953                return;
954            case STATE_RELEASED:
955                throw new RuntimeException("stopRecording called on released EffectsRecorder!");
956            default:
957                break;
958        }
959        Object recorder = getGraphFilter(mRunner, "recorder");
960        setInputValue(recorder, "recording", false);
961        mSoundPlayer.play(SoundClips.STOP_VIDEO_RECORDING);
962        mState = STATE_PREVIEW;
963    }
964
965    // Called to tell the filter graph that the display surfacetexture is not valid anymore.
966    // So the filter graph should not hold any reference to the surface created with that.
967    public synchronized void disconnectDisplay() {
968        if (mLogVerbose) Log.v(TAG, "Disconnecting the graph from the " +
969            "SurfaceTexture");
970        Object display = getGraphFilter(mRunner, "display");
971        invoke(display, sSurfaceTextureTargetDisconnect, new Object[] {
972                invoke(mGraphEnv, sGraphEnvironmentGetContext)});
973    }
974
975    // The VideoModule will call this to notify that the camera is being
976    // released to the outside world. This call should happen after the
977    // stopRecording call. Else, the effects may throw an exception.
978    // With the recording stopped, the stopPreview call will not try to
979    // release the camera again.
980    // This must be called in onPause() if the effects are ON.
981    public synchronized void disconnectCamera() {
982        if (mLogVerbose) Log.v(TAG, "Disconnecting the effects from Camera");
983        stopCameraPreview();
984        mCameraDevice = null;
985    }
986
987    // In a normal case, when the disconnect is not called, we should not
988    // set the camera device to null, since on return callback, we try to
989    // enable 3A locks, which need the cameradevice.
990    public synchronized void stopCameraPreview() {
991        if (mLogVerbose) Log.v(TAG, "Stopping camera preview.");
992        if (mCameraDevice == null) {
993            Log.d(TAG, "Camera already null. Nothing to disconnect");
994            return;
995        }
996        mCameraDevice.stopPreview();
997        mCameraDevice.setPreviewTextureAsync(null);
998    }
999
1000    // Stop and release effect resources
1001    public synchronized void stopPreview() {
1002        if (mLogVerbose) Log.v(TAG, "Stopping preview (" + this + ")");
1003        switch (mState) {
1004            case STATE_CONFIGURE:
1005                Log.w(TAG, "StopPreview called when preview not active!");
1006                return;
1007            case STATE_RELEASED:
1008                throw new RuntimeException("stopPreview called on released EffectsRecorder!");
1009            default:
1010                break;
1011        }
1012
1013        if (mState == STATE_RECORD) {
1014            stopRecording();
1015        }
1016
1017        mCurrentEffect = EFFECT_NONE;
1018
1019        // This will not do anything if the camera has already been disconnected.
1020        stopCameraPreview();
1021
1022        mState = STATE_CONFIGURE;
1023        mOldRunner = mRunner;
1024        invoke(mRunner, sGraphRunnerStop);
1025        mRunner = null;
1026        // Rest of stop and release handled in mRunnerDoneCallback
1027    }
1028
1029    // Try to enable/disable video stabilization if supported; otherwise return false
1030    // It is called from a synchronized block.
1031    boolean tryEnableVideoStabilization(boolean toggle) {
1032        if (mLogVerbose) Log.v(TAG, "tryEnableVideoStabilization.");
1033        if (mCameraDevice == null) {
1034            Log.d(TAG, "Camera already null. Not enabling video stabilization.");
1035            return false;
1036        }
1037        Camera.Parameters params = mCameraDevice.getParameters();
1038
1039        String vstabSupported = params.get("video-stabilization-supported");
1040        if ("true".equals(vstabSupported)) {
1041            if (mLogVerbose) Log.v(TAG, "Setting video stabilization to " + toggle);
1042            params.set("video-stabilization", toggle ? "true" : "false");
1043            mCameraDevice.setParameters(params);
1044            return true;
1045        }
1046        if (mLogVerbose) Log.v(TAG, "Video stabilization not supported");
1047        return false;
1048    }
1049
1050    // Try to enable/disable 3A locks if supported; otherwise return false
1051    @TargetApi(ApiHelper.VERSION_CODES.ICE_CREAM_SANDWICH)
1052    synchronized boolean tryEnable3ALocks(boolean toggle) {
1053        if (mLogVerbose) Log.v(TAG, "tryEnable3ALocks");
1054        if (mCameraDevice == null) {
1055            Log.d(TAG, "Camera already null. Not tryenabling 3A locks.");
1056            return false;
1057        }
1058        Camera.Parameters params = mCameraDevice.getParameters();
1059        if (Util.isAutoExposureLockSupported(params) &&
1060            Util.isAutoWhiteBalanceLockSupported(params)) {
1061            params.setAutoExposureLock(toggle);
1062            params.setAutoWhiteBalanceLock(toggle);
1063            mCameraDevice.setParameters(params);
1064            return true;
1065        }
1066        return false;
1067    }
1068
1069    // Try to enable/disable 3A locks if supported; otherwise, throw error
1070    // Use this when locks are essential to success
1071    synchronized void enable3ALocks(boolean toggle) {
1072        if (mLogVerbose) Log.v(TAG, "Enable3ALocks");
1073        if (mCameraDevice == null) {
1074            Log.d(TAG, "Camera already null. Not enabling 3A locks.");
1075            return;
1076        }
1077        Camera.Parameters params = mCameraDevice.getParameters();
1078        if (!tryEnable3ALocks(toggle)) {
1079            throw new RuntimeException("Attempt to lock 3A on camera with no locking support!");
1080        }
1081    }
1082
1083    static class SerializableInvocationHandler
1084            implements InvocationHandler, Serializable {
1085        private final int mEffectsRecorderIndex;
1086        public SerializableInvocationHandler(int index) {
1087            mEffectsRecorderIndex = index;
1088        }
1089
1090        @Override
1091        public Object invoke(Object proxy, Method method, Object[] args)
1092                throws Throwable {
1093            if (sEffectsRecorder == null) return null;
1094            if (mEffectsRecorderIndex != sEffectsRecorderIndex) {
1095                Log.v(TAG, "Ignore old callback " + mEffectsRecorderIndex);
1096                return null;
1097            }
1098            if (method.equals(sObjectEquals)) {
1099                return sEffectsRecorder.invokeObjectEquals(proxy, args);
1100            } else if (method.equals(sObjectToString)) {
1101                return sEffectsRecorder.invokeObjectToString();
1102            } else if (method.equals(sLearningDoneListenerOnLearningDone)) {
1103                sEffectsRecorder.invokeOnLearningDone();
1104            } else if (method.equals(sOnRunnerDoneListenerOnRunnerDone)) {
1105                sEffectsRecorder.invokeOnRunnerDone(args);
1106            } else if (method.equals(
1107                    sSurfaceTextureSourceListenerOnSurfaceTextureSourceReady)) {
1108                sEffectsRecorder.invokeOnSurfaceTextureSourceReady(args);
1109            } else if (method.equals(sOnRecordingDoneListenerOnRecordingDone)) {
1110                sEffectsRecorder.invokeOnRecordingDone();
1111            }
1112            return null;
1113        }
1114    }
1115
1116    // Indicates that all camera/recording activity needs to halt
1117    public synchronized void release() {
1118        if (mLogVerbose) Log.v(TAG, "Releasing (" + this + ")");
1119
1120        switch (mState) {
1121            case STATE_RECORD:
1122            case STATE_STARTING_PREVIEW:
1123            case STATE_PREVIEW:
1124                stopPreview();
1125                // Fall-through
1126            default:
1127                if (mSoundPlayer != null) {
1128                    mSoundPlayer.release();
1129                    mSoundPlayer = null;
1130                }
1131                mState = STATE_RELEASED;
1132                break;
1133        }
1134        sEffectsRecorder = null;
1135    }
1136
1137    private void sendMessage(final int effect, final int msg) {
1138        if (mEffectsListener != null) {
1139            mHandler.post(new Runnable() {
1140                @Override
1141                public void run() {
1142                    mEffectsListener.onEffectsUpdate(effect, msg);
1143                }
1144            });
1145        }
1146    }
1147
1148    private void raiseError(final Exception exception) {
1149        if (mEffectsListener != null) {
1150            mHandler.post(new Runnable() {
1151                @Override
1152                public void run() {
1153                    if (mFd != null) {
1154                        mEffectsListener.onEffectsError(exception, null);
1155                    } else {
1156                        mEffectsListener.onEffectsError(exception, mOutputFile);
1157                    }
1158                }
1159            });
1160        }
1161    }
1162
1163    // invoke method on receiver with no arguments
1164    private Object invoke(Object receiver, Method method) {
1165        try {
1166            return method.invoke(receiver);
1167        } catch (Exception ex) {
1168            throw new RuntimeException(ex);
1169        }
1170    }
1171
1172    // invoke method on receiver with arguments
1173    private Object invoke(Object receiver, Method method, Object[] args) {
1174        try {
1175            return method.invoke(receiver, args);
1176        } catch (Exception ex) {
1177            throw new RuntimeException(ex);
1178        }
1179    }
1180
1181    private void setInputValue(Object receiver, String key, Object value) {
1182        try {
1183            sFilterSetInputValue.invoke(receiver, new Object[] {key, value});
1184        } catch (Exception ex) {
1185            throw new RuntimeException(ex);
1186        }
1187    }
1188
1189    private Object newInstance(Constructor<?> ct, Object[] initArgs) {
1190        try {
1191            return ct.newInstance(initArgs);
1192        } catch (Exception ex) {
1193            throw new RuntimeException(ex);
1194        }
1195    }
1196
1197    private Object newInstance(Constructor<?> ct) {
1198        try {
1199            return ct.newInstance();
1200        } catch (Exception ex) {
1201            throw new RuntimeException(ex);
1202        }
1203    }
1204
1205    private Object getGraphFilter(Object receiver, String name) {
1206        try {
1207            return sFilterGraphGetFilter.invoke(sGraphRunnerGetGraph
1208                    .invoke(receiver), new Object[] {name});
1209        } catch (Exception ex) {
1210            throw new RuntimeException(ex);
1211        }
1212    }
1213
1214    private Object getContextGLEnvironment(Object receiver) {
1215        try {
1216            return sFilterContextGetGLEnvironment
1217                    .invoke(sGraphEnvironmentGetContext.invoke(receiver));
1218        } catch (Exception ex) {
1219            throw new RuntimeException(ex);
1220        }
1221    }
1222
1223    private void getGraphTearDown(Object receiver, Object filterContext) {
1224        try {
1225            sFilterGraphTearDown.invoke(sGraphRunnerGetGraph.invoke(receiver),
1226                    new Object[]{filterContext});
1227        } catch (Exception ex) {
1228            throw new RuntimeException(ex);
1229        }
1230    }
1231
1232    private Object getConstant(Class<?> cls, String name) {
1233        try {
1234            return cls.getDeclaredField(name).get(null);
1235        } catch (Exception ex) {
1236            throw new RuntimeException(ex);
1237        }
1238    }
1239}
1240