MediaArtistNativeHelper.java revision 600acf14ff12eaf139f0ac644fb7e17849af65fa
1/*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17package android.media.videoeditor;
18
19import java.io.File;
20import java.io.IOException;
21import java.math.BigDecimal;
22import java.nio.IntBuffer;
23import java.util.Iterator;
24import java.util.List;
25import java.util.concurrent.Semaphore;
26
27import android.graphics.Bitmap;
28import android.graphics.BitmapFactory;
29import android.graphics.Canvas;
30import android.graphics.Paint;
31import android.graphics.Rect;
32import android.media.videoeditor.VideoEditor.ExportProgressListener;
33import android.media.videoeditor.VideoEditor.PreviewProgressListener;
34import android.media.videoeditor.VideoEditor.MediaProcessingProgressListener;
35import android.util.Log;
36import android.util.Pair;
37import android.view.Surface;
38
39/**
40 *This class provide Native methods to be used by MediaArtist {@hide}
41 */
42class MediaArtistNativeHelper {
43    private static final String TAG = "MediaArtistNativeHelper";
44
45    static {
46        System.loadLibrary("videoeditor_jni");
47    }
48
49    private static final int MAX_THUMBNAIL_PERMITTED = 8;
50
51    public static final int TASK_LOADING_SETTINGS = 1;
52    public static final int TASK_ENCODING = 2;
53
54    /**
55     *  The resize paint
56     */
57    private static final Paint sResizePaint = new Paint(Paint.FILTER_BITMAP_FLAG);
58
59    private final VideoEditor mVideoEditor;
60    /*
61     *  Semaphore to control preview calls
62     */
63    private final Semaphore mLock;
64
65    private EditSettings mStoryBoardSettings;
66
67    private String mOutputFilename;
68
69    private PreviewClipProperties mClipProperties = null;
70
71    private EditSettings mPreviewEditSettings;
72
73    private AudioSettings mAudioSettings = null;
74
75    private AudioTrack mAudioTrack = null;
76
77    private boolean mInvalidatePreviewArray = true;
78
79    private boolean mRegenerateAudio = true;
80
81    private String mExportFilename = null;
82
83    private int mProgressToApp;
84
85    private String mRenderPreviewOverlayFile;
86    private int mRenderPreviewRenderingMode;
87
88    private boolean mIsFirstProgress;
89
90    private static final String AUDIO_TRACK_PCM_FILE = "AudioPcm.pcm";
91
92    // Processing indication
93    public static final int PROCESSING_NONE          = 0;
94    public static final int PROCESSING_AUDIO_PCM     = 1;
95    public static final int PROCESSING_TRANSITION    = 2;
96    public static final int PROCESSING_KENBURNS      = 3;
97    public static final int PROCESSING_INTERMEDIATE1 = 11;
98    public static final int PROCESSING_INTERMEDIATE2 = 12;
99    public static final int PROCESSING_INTERMEDIATE3 = 13;
100    public static final int PROCESSING_EXPORT        = 20;
101
102    private int mProcessingState;
103    private Object mProcessingObject;
104    private PreviewProgressListener mPreviewProgressListener;
105    private ExportProgressListener mExportProgressListener;
106    private ExtractAudioWaveformProgressListener mExtractAudioWaveformProgressListener;
107    private MediaProcessingProgressListener mMediaProcessingProgressListener;
108    private final String mProjectPath;
109
110    private long mPreviewProgress;
111
112    private String mAudioTrackPCMFilePath;
113
114    private int mTotalClips = 0;
115
116    private boolean mErrorFlagSet = false;
117
118    @SuppressWarnings("unused")
119    private int mManualEditContext;
120
121    /* Listeners */
122
123    /**
124     * Interface definition for a listener to be invoked when there is an update
125     * in a running task.
126     */
127    public interface OnProgressUpdateListener {
128        /**
129         * Called when there is an update.
130         *
131         * @param taskId id of the task reporting an update.
132         * @param progress progress of the task [0..100].
133         * @see BasicEdit#TASK_ENCODING
134         */
135        public void OnProgressUpdate(int taskId, int progress);
136    }
137
138    /** Defines the version. */
139    public final class Version {
140
141        /** Major version number */
142        public int major;
143
144        /** Minor version number */
145        public int minor;
146
147        /** Revision number */
148        public int revision;
149
150        /** VIDEOEDITOR major version number */
151        private static final int VIDEOEDITOR_MAJOR_VERSION = 0;
152
153        /** VIDEOEDITOR minor version number */
154        private static final int VIDEOEDITOR_MINOR_VERSION = 0;
155
156        /** VIDEOEDITOR revision number */
157        private static final int VIDEOEDITOR_REVISION_VERSION = 1;
158
159        /** Method which returns the current VIDEOEDITOR version */
160        public Version getVersion() {
161            Version version = new Version();
162
163            version.major = Version.VIDEOEDITOR_MAJOR_VERSION;
164            version.minor = Version.VIDEOEDITOR_MINOR_VERSION;
165            version.revision = Version.VIDEOEDITOR_REVISION_VERSION;
166
167            return version;
168        }
169    }
170
171    /**
172     * Defines output audio formats.
173     */
174    public final class AudioFormat {
175        /** No audio present in output clip. Used to generate video only clip */
176        public static final int NO_AUDIO = 0;
177
178        /** AMR Narrow Band. */
179        public static final int AMR_NB = 1;
180
181        /** Advanced Audio Coding (AAC). */
182        public static final int AAC = 2;
183
184        /** Advanced Audio Codec Plus (HE-AAC v1). */
185        public static final int AAC_PLUS = 3;
186
187        /** Advanced Audio Codec Plus (HE-AAC v2). */
188        public static final int ENHANCED_AAC_PLUS = 4;
189
190        /** MPEG layer 3 (MP3). */
191        public static final int MP3 = 5;
192
193        /** Enhanced Variable RateCodec (EVRC). */
194        public static final int EVRC = 6;
195
196        /** PCM (PCM). */
197        public static final int PCM = 7;
198
199        /** No transcoding. Output audio format is same as input audio format */
200        public static final int NULL_AUDIO = 254;
201
202        /** Unsupported audio format. */
203        public static final int UNSUPPORTED_AUDIO = 255;
204    }
205
206    /**
207     * Defines audio sampling frequencies.
208     */
209    public final class AudioSamplingFrequency {
210        /**
211         * Default sampling frequency. Uses the default frequency for a specific
212         * audio format. For AAC the only supported (and thus default) sampling
213         * frequency is 16 kHz. For this audio format the sampling frequency in
214         * the OutputParams.
215         **/
216        public static final int FREQ_DEFAULT = 0;
217
218        /** Audio sampling frequency of 8000 Hz. */
219        public static final int FREQ_8000 = 8000;
220
221        /** Audio sampling frequency of 11025 Hz. */
222        public static final int FREQ_11025 = 11025;
223
224        /** Audio sampling frequency of 12000 Hz. */
225        public static final int FREQ_12000 = 12000;
226
227        /** Audio sampling frequency of 16000 Hz. */
228        public static final int FREQ_16000 = 16000;
229
230        /** Audio sampling frequency of 22050 Hz. */
231        public static final int FREQ_22050 = 22050;
232
233        /** Audio sampling frequency of 24000 Hz. */
234        public static final int FREQ_24000 = 24000;
235
236        /** Audio sampling frequency of 32000 Hz. */
237        public static final int FREQ_32000 = 32000;
238
239        /** Audio sampling frequency of 44100 Hz. */
240        public static final int FREQ_44100 = 44100;
241
242        /** Audio sampling frequency of 48000 Hz. Not available for output file. */
243        public static final int FREQ_48000 = 48000;
244    }
245
246    /**
247     * Defines the supported fixed audio and video bitrates. These values are
248     * for output audio video only.
249     */
250    public final class Bitrate {
251        /** Variable bitrate. Means no bitrate regulation */
252        public static final int VARIABLE = -1;
253
254        /** An undefined bitrate. */
255        public static final int UNDEFINED = 0;
256
257        /** A bitrate of 9.2 kbits/s. */
258        public static final int BR_9_2_KBPS = 9200;
259
260        /** A bitrate of 12.2 kbits/s. */
261        public static final int BR_12_2_KBPS = 12200;
262
263        /** A bitrate of 16 kbits/s. */
264        public static final int BR_16_KBPS = 16000;
265
266        /** A bitrate of 24 kbits/s. */
267        public static final int BR_24_KBPS = 24000;
268
269        /** A bitrate of 32 kbits/s. */
270        public static final int BR_32_KBPS = 32000;
271
272        /** A bitrate of 48 kbits/s. */
273        public static final int BR_48_KBPS = 48000;
274
275        /** A bitrate of 64 kbits/s. */
276        public static final int BR_64_KBPS = 64000;
277
278        /** A bitrate of 96 kbits/s. */
279        public static final int BR_96_KBPS = 96000;
280
281        /** A bitrate of 128 kbits/s. */
282        public static final int BR_128_KBPS = 128000;
283
284        /** A bitrate of 192 kbits/s. */
285        public static final int BR_192_KBPS = 192000;
286
287        /** A bitrate of 256 kbits/s. */
288        public static final int BR_256_KBPS = 256000;
289
290        /** A bitrate of 288 kbits/s. */
291        public static final int BR_288_KBPS = 288000;
292
293        /** A bitrate of 384 kbits/s. */
294        public static final int BR_384_KBPS = 384000;
295
296        /** A bitrate of 512 kbits/s. */
297        public static final int BR_512_KBPS = 512000;
298
299        /** A bitrate of 800 kbits/s. */
300        public static final int BR_800_KBPS = 800000;
301
302        /** A bitrate of 2 Mbits/s. */
303        public static final int BR_2_MBPS = 2000000;
304
305        /** A bitrate of 5 Mbits/s. */
306        public static final int BR_5_MBPS = 5000000;
307
308        /** A bitrate of 8 Mbits/s. */
309        public static final int BR_8_MBPS = 8000000;
310    }
311
312    /**
313     * Defines all supported file types.
314     */
315    public final class FileType {
316        /** 3GPP file type. */
317        public static final int THREE_GPP = 0;
318
319        /** MP4 file type. */
320        public static final int MP4 = 1;
321
322        /** AMR file type. */
323        public static final int AMR = 2;
324
325        /** MP3 audio file type. */
326        public static final int MP3 = 3;
327
328        /** PCM audio file type. */
329        public static final int PCM = 4;
330
331        /** JPEG image file type. */
332        public static final int JPG = 5;
333
334        /** GIF image file type. */
335        public static final int GIF = 7;
336
337        /** PNG image file type. */
338        public static final int PNG = 8;
339
340        /** M4V file type. */
341        public static final int M4V = 10;
342
343        /** Unsupported file type. */
344        public static final int UNSUPPORTED = 255;
345    }
346
347    /**
348     * Defines rendering types. Rendering can only be applied to files
349     * containing video streams.
350     **/
351    public final class MediaRendering {
352        /**
353         * Resize to fit the output video with changing the aspect ratio if
354         * needed.
355         */
356        public static final int RESIZING = 0;
357
358        /**
359         * Crop the input video to fit it with the output video resolution.
360         **/
361        public static final int CROPPING = 1;
362
363        /**
364         * Resize to fit the output video resolution but maintain the aspect
365         * ratio. This framing type adds black borders if needed.
366         */
367        public static final int BLACK_BORDERS = 2;
368    }
369
370    /**
371     * Defines the results.
372     */
373    public final class Result {
374        /** No error. result OK */
375        public static final int NO_ERROR = 0;
376
377        /** File not found */
378        public static final int ERR_FILE_NOT_FOUND = 1;
379
380        /**
381         * In case of UTF8 conversion, the size of the converted path will be
382         * more than the corresponding allocated buffer.
383         */
384        public static final int ERR_BUFFER_OUT_TOO_SMALL = 2;
385
386        /** Invalid file type. */
387        public static final int ERR_INVALID_FILE_TYPE = 3;
388
389        /** Invalid effect kind. */
390        public static final int ERR_INVALID_EFFECT_KIND = 4;
391
392        /** Invalid video effect. */
393        public static final int ERR_INVALID_VIDEO_EFFECT_TYPE = 5;
394
395        /** Invalid audio effect. */
396        public static final int ERR_INVALID_AUDIO_EFFECT_TYPE = 6;
397
398        /** Invalid video transition. */
399        public static final int ERR_INVALID_VIDEO_TRANSITION_TYPE = 7;
400
401        /** Invalid audio transition. */
402        public static final int ERR_INVALID_AUDIO_TRANSITION_TYPE = 8;
403
404        /** Invalid encoding frame rate. */
405        public static final int ERR_INVALID_VIDEO_ENCODING_FRAME_RATE = 9;
406
407        /** External effect is called but this function is not set. */
408        public static final int ERR_EXTERNAL_EFFECT_NULL = 10;
409
410        /** External transition is called but this function is not set. */
411        public static final int ERR_EXTERNAL_TRANSITION_NULL = 11;
412
413        /** Begin time cut is larger than the video clip duration. */
414        public static final int ERR_BEGIN_CUT_LARGER_THAN_DURATION = 12;
415
416        /** Begin cut time is larger or equal than end cut. */
417        public static final int ERR_BEGIN_CUT_LARGER_THAN_END_CUT = 13;
418
419        /** Two consecutive transitions are overlapping on one clip. */
420        public static final int ERR_OVERLAPPING_TRANSITIONS = 14;
421
422        /** Internal error, type size mismatch. */
423        public static final int ERR_ANALYSIS_DATA_SIZE_TOO_SMALL = 15;
424
425        /** An input 3GPP file is invalid/corrupted. */
426        public static final int ERR_INVALID_3GPP_FILE = 16;
427
428        /** A file contains an unsupported video format. */
429        public static final int ERR_UNSUPPORTED_INPUT_VIDEO_FORMAT = 17;
430
431        /** A file contains an unsupported audio format. */
432        public static final int ERR_UNSUPPORTED_INPUT_AUDIO_FORMAT = 18;
433
434        /** A file format is not supported. */
435        public static final int ERR_AMR_EDITING_UNSUPPORTED = 19;
436
437        /** An input clip has an unexpectedly large Video AU. */
438        public static final int ERR_INPUT_VIDEO_AU_TOO_LARGE = 20;
439
440        /** An input clip has an unexpectedly large Audio AU. */
441        public static final int ERR_INPUT_AUDIO_AU_TOO_LARGE = 21;
442
443        /** An input clip has a corrupted Audio AU. */
444        public static final int ERR_INPUT_AUDIO_CORRUPTED_AU = 22;
445
446        /** The video encoder encountered an Access Unit error. */
447        public static final int ERR_ENCODER_ACCES_UNIT_ERROR = 23;
448
449        /** Unsupported video format for Video Editing. */
450        public static final int ERR_EDITING_UNSUPPORTED_VIDEO_FORMAT = 24;
451
452        /** Unsupported H263 profile for Video Editing. */
453        public static final int ERR_EDITING_UNSUPPORTED_H263_PROFILE = 25;
454
455        /** Unsupported MPEG-4 profile for Video Editing. */
456        public static final int ERR_EDITING_UNSUPPORTED_MPEG4_PROFILE = 26;
457
458        /** Unsupported MPEG-4 RVLC tool for Video Editing. */
459        public static final int ERR_EDITING_UNSUPPORTED_MPEG4_RVLC = 27;
460
461        /** Unsupported audio format for Video Editing. */
462        public static final int ERR_EDITING_UNSUPPORTED_AUDIO_FORMAT = 28;
463
464        /** File contains no supported stream. */
465        public static final int ERR_EDITING_NO_SUPPORTED_STREAM_IN_FILE = 29;
466
467        /** File contains no video stream or an unsupported video stream. */
468        public static final int ERR_EDITING_NO_SUPPORTED_VIDEO_STREAM_IN_FILE = 30;
469
470        /** Internal error, clip analysis version mismatch. */
471        public static final int ERR_INVALID_CLIP_ANALYSIS_VERSION = 31;
472
473        /**
474         * At least one of the clip analysis has been generated on another
475         * platform (WIN32, ARM, etc.).
476         */
477        public static final int ERR_INVALID_CLIP_ANALYSIS_PLATFORM = 32;
478
479        /** Clips don't have the same video format (H263 or MPEG4). */
480        public static final int ERR_INCOMPATIBLE_VIDEO_FORMAT = 33;
481
482        /** Clips don't have the same frame size. */
483        public static final int ERR_INCOMPATIBLE_VIDEO_FRAME_SIZE = 34;
484
485        /** Clips don't have the same MPEG-4 time scale. */
486        public static final int ERR_INCOMPATIBLE_VIDEO_TIME_SCALE = 35;
487
488        /** Clips don't have the same use of MPEG-4 data partitioning. */
489        public static final int ERR_INCOMPATIBLE_VIDEO_DATA_PARTITIONING = 36;
490
491        /** MP3 clips can't be assembled. */
492        public static final int ERR_UNSUPPORTED_MP3_ASSEMBLY = 37;
493
494        /**
495         * The input 3GPP file does not contain any supported audio or video
496         * track.
497         */
498        public static final int ERR_NO_SUPPORTED_STREAM_IN_FILE = 38;
499
500        /**
501         * The Volume of the added audio track (AddVolume) must be strictly
502         * superior than zero.
503         */
504        public static final int ERR_ADDVOLUME_EQUALS_ZERO = 39;
505
506        /**
507         * The time at which an audio track is added can't be higher than the
508         * input video track duration..
509         */
510        public static final int ERR_ADDCTS_HIGHER_THAN_VIDEO_DURATION = 40;
511
512        /** The audio track file format setting is undefined. */
513        public static final int ERR_UNDEFINED_AUDIO_TRACK_FILE_FORMAT = 41;
514
515        /** The added audio track stream has an unsupported format. */
516        public static final int ERR_UNSUPPORTED_ADDED_AUDIO_STREAM = 42;
517
518        /** The audio mixing feature doesn't support the audio track type. */
519        public static final int ERR_AUDIO_MIXING_UNSUPPORTED = 43;
520
521        /** The audio mixing feature doesn't support MP3 audio tracks. */
522        public static final int ERR_AUDIO_MIXING_MP3_UNSUPPORTED = 44;
523
524        /**
525         * An added audio track limits the available features: uiAddCts must be
526         * 0 and bRemoveOriginal must be true.
527         */
528        public static final int ERR_FEATURE_UNSUPPORTED_WITH_AUDIO_TRACK = 45;
529
530        /**
531         * An added audio track limits the available features: uiAddCts must be
532         * 0 and bRemoveOriginal must be true.
533         */
534        public static final int ERR_FEATURE_UNSUPPORTED_WITH_AAC = 46;
535
536        /** Input audio track is not of a type that can be mixed with output. */
537        public static final int ERR_AUDIO_CANNOT_BE_MIXED = 47;
538
539        /** Input audio track is not AMR-NB, so it can't be mixed with output. */
540        public static final int ERR_ONLY_AMRNB_INPUT_CAN_BE_MIXED = 48;
541
542        /**
543         * An added EVRC audio track limit the available features: uiAddCts must
544         * be 0 and bRemoveOriginal must be true.
545         */
546        public static final int ERR_FEATURE_UNSUPPORTED_WITH_EVRC = 49;
547
548        /** H263 profiles other than 0 are not supported. */
549        public static final int ERR_H263_PROFILE_NOT_SUPPORTED = 51;
550
551        /** File contains no video stream or an unsupported video stream. */
552        public static final int ERR_NO_SUPPORTED_VIDEO_STREAM_IN_FILE = 52;
553
554        /** Transcoding of the input file(s) is necessary. */
555        public static final int WAR_TRANSCODING_NECESSARY = 53;
556
557        /**
558         * The size of the output file will exceed the maximum configured value.
559         */
560        public static final int WAR_MAX_OUTPUT_SIZE_EXCEEDED = 54;
561
562        /** The time scale is too big. */
563        public static final int WAR_TIMESCALE_TOO_BIG = 55;
564
565        /** The year is out of range */
566        public static final int ERR_CLOCK_BAD_REF_YEAR = 56;
567
568        /** The directory could not be opened */
569        public static final int ERR_DIR_OPEN_FAILED = 57;
570
571        /** The directory could not be read */
572        public static final int ERR_DIR_READ_FAILED = 58;
573
574        /** There are no more entries in the current directory */
575        public static final int ERR_DIR_NO_MORE_ENTRY = 59;
576
577        /** The input parameter/s has error */
578        public static final int ERR_PARAMETER = 60;
579
580        /** There is a state machine error */
581        public static final int ERR_STATE = 61;
582
583        /** Memory allocation failed */
584        public static final int ERR_ALLOC = 62;
585
586        /** Context is invalid */
587        public static final int ERR_BAD_CONTEXT = 63;
588
589        /** Context creation failed */
590        public static final int ERR_CONTEXT_FAILED = 64;
591
592        /** Invalid stream ID */
593        public static final int ERR_BAD_STREAM_ID = 65;
594
595        /** Invalid option ID */
596        public static final int ERR_BAD_OPTION_ID = 66;
597
598        /** The option is write only */
599        public static final int ERR_WRITE_ONLY = 67;
600
601        /** The option is read only */
602        public static final int ERR_READ_ONLY = 68;
603
604        /** The feature is not implemented in this version */
605        public static final int ERR_NOT_IMPLEMENTED = 69;
606
607        /** The media type is not supported */
608        public static final int ERR_UNSUPPORTED_MEDIA_TYPE = 70;
609
610        /** No data to be encoded */
611        public static final int WAR_NO_DATA_YET = 71;
612
613        /** No data to be decoded */
614        public static final int WAR_NO_MORE_STREAM = 72;
615
616        /** Time stamp is invalid */
617        public static final int WAR_INVALID_TIME = 73;
618
619        /** No more data to be decoded */
620        public static final int WAR_NO_MORE_AU = 74;
621
622        /** Semaphore timed out */
623        public static final int WAR_TIME_OUT = 75;
624
625        /** Memory buffer is full */
626        public static final int WAR_BUFFER_FULL = 76;
627
628        /** Server has asked for redirection */
629        public static final int WAR_REDIRECT = 77;
630
631        /** Too many streams in input */
632        public static final int WAR_TOO_MUCH_STREAMS = 78;
633
634        /** The file cannot be opened/ written into as it is locked */
635        public static final int ERR_FILE_LOCKED = 79;
636
637        /** The file access mode is invalid */
638        public static final int ERR_FILE_BAD_MODE_ACCESS = 80;
639
640        /** The file pointer points to an invalid location */
641        public static final int ERR_FILE_INVALID_POSITION = 81;
642
643        /** Invalid string */
644        public static final int ERR_STR_BAD_STRING = 94;
645
646        /** The input string cannot be converted */
647        public static final int ERR_STR_CONV_FAILED = 95;
648
649        /** The string size is too large */
650        public static final int ERR_STR_OVERFLOW = 96;
651
652        /** Bad string arguments */
653        public static final int ERR_STR_BAD_ARGS = 97;
654
655        /** The string value is larger than maximum size allowed */
656        public static final int WAR_STR_OVERFLOW = 98;
657
658        /** The string value is not present in this comparison operation */
659        public static final int WAR_STR_NOT_FOUND = 99;
660
661        /** The thread is not started */
662        public static final int ERR_THREAD_NOT_STARTED = 100;
663
664        /** Trancoding done warning */
665        public static final int WAR_TRANSCODING_DONE = 101;
666
667        /** Unsupported mediatype */
668        public static final int WAR_MEDIATYPE_NOT_SUPPORTED = 102;
669
670        /** Input file contains invalid/unsupported streams */
671        public static final int ERR_INPUT_FILE_CONTAINS_NO_SUPPORTED_STREAM = 103;
672
673        /** Invalid input file */
674        public static final int ERR_INVALID_INPUT_FILE = 104;
675
676        /** Invalid output video format */
677        public static final int ERR_UNDEFINED_OUTPUT_VIDEO_FORMAT = 105;
678
679        /** Invalid output video frame size */
680        public static final int ERR_UNDEFINED_OUTPUT_VIDEO_FRAME_SIZE = 106;
681
682        /** Invalid output video frame rate */
683        public static final int ERR_UNDEFINED_OUTPUT_VIDEO_FRAME_RATE = 107;
684
685        /** Invalid output audio format */
686        public static final int ERR_UNDEFINED_OUTPUT_AUDIO_FORMAT = 108;
687
688        /** Invalid video frame size for H.263 */
689        public static final int ERR_INVALID_VIDEO_FRAME_SIZE_FOR_H263 = 109;
690
691        /** Invalid video frame rate for H.263 */
692        public static final int ERR_INVALID_VIDEO_FRAME_RATE_FOR_H263 = 110;
693
694        /** invalid playback duration */
695        public static final int ERR_DURATION_IS_NULL = 111;
696
697        /** Invalid H.263 profile in file */
698        public static final int ERR_H263_FORBIDDEN_IN_MP4_FILE = 112;
699
700        /** Invalid AAC sampling frequency */
701        public static final int ERR_INVALID_AAC_SAMPLING_FREQUENCY = 113;
702
703        /** Audio conversion failure */
704        public static final int ERR_AUDIO_CONVERSION_FAILED = 114;
705
706        /** Invalid trim start and end times */
707        public static final int ERR_BEGIN_CUT_EQUALS_END_CUT = 115;
708
709        /** End time smaller than start time for trim */
710        public static final int ERR_END_CUT_SMALLER_THAN_BEGIN_CUT = 116;
711
712        /** Output file size is small */
713        public static final int ERR_MAXFILESIZE_TOO_SMALL = 117;
714
715        /** Output video bitrate is too low */
716        public static final int ERR_VIDEOBITRATE_TOO_LOW = 118;
717
718        /** Output audio bitrate is too low */
719        public static final int ERR_AUDIOBITRATE_TOO_LOW = 119;
720
721        /** Output video bitrate is too high */
722        public static final int ERR_VIDEOBITRATE_TOO_HIGH = 120;
723
724        /** Output audio bitrate is too high */
725        public static final int ERR_AUDIOBITRATE_TOO_HIGH = 121;
726
727        /** Output file size is too small */
728        public static final int ERR_OUTPUT_FILE_SIZE_TOO_SMALL = 122;
729
730        /** Unknown stream type */
731        public static final int ERR_READER_UNKNOWN_STREAM_TYPE = 123;
732
733        /** Invalid metadata in input stream */
734        public static final int WAR_READER_NO_METADATA = 124;
735
736        /** Invalid file reader info warning */
737        public static final int WAR_READER_INFORMATION_NOT_PRESENT = 125;
738
739        /** Warning to indicate the the writer is being stopped */
740        public static final int WAR_WRITER_STOP_REQ = 131;
741
742        /** Video decoder failed to provide frame for transcoding */
743        public static final int WAR_VIDEORENDERER_NO_NEW_FRAME = 132;
744
745        /** Video deblocking filter is not implemented */
746        public static final int WAR_DEBLOCKING_FILTER_NOT_IMPLEMENTED = 133;
747
748        /** H.263 decoder profile not supported */
749        public static final int ERR_DECODER_H263_PROFILE_NOT_SUPPORTED = 134;
750
751        /** The input file contains unsupported H.263 profile */
752        public static final int ERR_DECODER_H263_NOT_BASELINE = 135;
753
754        /** There is no more space to store the output file */
755        public static final int ERR_NOMORE_SPACE_FOR_FILE = 136;
756
757        /** Internal error. */
758        public static final int ERR_INTERNAL = 255;
759    }
760
761    /**
762     * Defines output video formats.
763     */
764    public final class VideoFormat {
765        /** No video present in output clip. Used to generate audio only clip */
766        public static final int NO_VIDEO = 0;
767
768        /** H263 baseline format. */
769        public static final int H263 = 1;
770
771        /** MPEG4 video Simple Profile format. */
772        public static final int MPEG4 = 2;
773
774        /** MPEG4 video Simple Profile format with support for EMP. */
775        public static final int MPEG4_EMP = 3;
776
777        /** H264 video */
778        public static final int H264 = 4;
779
780        /** No transcoding. Output video format is same as input video format */
781        public static final int NULL_VIDEO = 254;
782
783        /** Unsupported video format. */
784        public static final int UNSUPPORTED = 255;
785    }
786
787    /** Defines video profiles and levels. */
788    public final class VideoProfile {
789        /** H263, Profile 0, Level 10. */
790        public static final int H263_PROFILE_0_LEVEL_10 = MediaProperties.H263_PROFILE_0_LEVEL_10;
791
792        /** H263, Profile 0, Level 20. */
793        public static final int H263_PROFILE_0_LEVEL_20 = MediaProperties.H263_PROFILE_0_LEVEL_20;
794
795        /** H263, Profile 0, Level 30. */
796        public static final int H263_PROFILE_0_LEVEL_30 = MediaProperties.H263_PROFILE_0_LEVEL_30;
797
798        /** H263, Profile 0, Level 40. */
799        public static final int H263_PROFILE_0_LEVEL_40 = MediaProperties.H263_PROFILE_0_LEVEL_40;
800
801        /** H263, Profile 0, Level 45. */
802        public static final int H263_PROFILE_0_LEVEL_45 = MediaProperties.H263_PROFILE_0_LEVEL_45;
803
804        /** MPEG4, Simple Profile, Level 0. */
805        public static final int MPEG4_SP_LEVEL_0 = MediaProperties.MPEG4_SP_LEVEL_0;
806
807        /** MPEG4, Simple Profile, Level 0B. */
808        public static final int MPEG4_SP_LEVEL_0B = MediaProperties.MPEG4_SP_LEVEL_0B;
809
810        /** MPEG4, Simple Profile, Level 1. */
811        public static final int MPEG4_SP_LEVEL_1 = MediaProperties.MPEG4_SP_LEVEL_1;
812
813        /** MPEG4, Simple Profile, Level 2. */
814        public static final int MPEG4_SP_LEVEL_2 = MediaProperties.MPEG4_SP_LEVEL_2;
815
816        /** MPEG4, Simple Profile, Level 3. */
817        public static final int MPEG4_SP_LEVEL_3 = MediaProperties.MPEG4_SP_LEVEL_3;
818
819        /** MPEG4, Simple Profile, Level 4A. */
820        public static final int MPEG4_SP_LEVEL_4A = MediaProperties.MPEG4_SP_LEVEL_4A;
821
822        /** MPEG4, Simple Profile, Level 0. */
823        public static final int MPEG4_SP_LEVEL_5 = MediaProperties.MPEG4_SP_LEVEL_5;
824
825        /** H264, Profile 0, Level 1. */
826        public static final int H264_PROFILE_0_LEVEL_1 = MediaProperties.H264_PROFILE_0_LEVEL_1;
827
828        /** H264, Profile 0, Level 1b. */
829        public static final int H264_PROFILE_0_LEVEL_1b = MediaProperties.H264_PROFILE_0_LEVEL_1B;
830
831        /** H264, Profile 0, Level 1.1 */
832        public static final int H264_PROFILE_0_LEVEL_1_1 = MediaProperties.H264_PROFILE_0_LEVEL_1_1;
833
834        /** H264, Profile 0, Level 1.2 */
835        public static final int H264_PROFILE_0_LEVEL_1_2 = MediaProperties.H264_PROFILE_0_LEVEL_1_2;
836
837        /** H264, Profile 0, Level 1.3 */
838        public static final int H264_PROFILE_0_LEVEL_1_3 = MediaProperties.H264_PROFILE_0_LEVEL_1_3;
839
840        /** H264, Profile 0, Level 2. */
841        public static final int H264_PROFILE_0_LEVEL_2 = MediaProperties.H264_PROFILE_0_LEVEL_2;
842
843        /** H264, Profile 0, Level 2.1 */
844        public static final int H264_PROFILE_0_LEVEL_2_1 = MediaProperties.H264_PROFILE_0_LEVEL_2_1;
845
846        /** H264, Profile 0, Level 2.2 */
847        public static final int H264_PROFILE_0_LEVEL_2_2 = MediaProperties.H264_PROFILE_0_LEVEL_2_2;
848
849        /** H264, Profile 0, Level 3. */
850        public static final int H264_PROFILE_0_LEVEL_3 = MediaProperties.H264_PROFILE_0_LEVEL_3;
851
852        /** H264, Profile 0, Level 3.1 */
853        public static final int H264_PROFILE_0_LEVEL_3_1 = MediaProperties.H264_PROFILE_0_LEVEL_3_1;
854
855        /** H264, Profile 0, Level 3.2 */
856        public static final int H264_PROFILE_0_LEVEL_3_2 = MediaProperties.H264_PROFILE_0_LEVEL_3_2;
857
858        /** H264, Profile 0, Level 4. */
859        public static final int H264_PROFILE_0_LEVEL_4 = MediaProperties.H264_PROFILE_0_LEVEL_4;
860
861        /** H264, Profile 0, Level 4.1 */
862        public static final int H264_PROFILE_0_LEVEL_4_1 = MediaProperties.H264_PROFILE_0_LEVEL_4_1;
863
864        /** H264, Profile 0, Level 4.2 */
865        public static final int H264_PROFILE_0_LEVEL_4_2 = MediaProperties.H264_PROFILE_0_LEVEL_4_2;
866
867        /** H264, Profile 0, Level 5. */
868        public static final int H264_PROFILE_0_LEVEL_5 = MediaProperties.H264_PROFILE_0_LEVEL_5;
869
870        /** H264, Profile 0, Level 5.1 */
871        public static final int H264_PROFILE_0_LEVEL_5_1 = MediaProperties.H264_PROFILE_0_LEVEL_5_1;
872
873        /** Profile out of range. */
874        public static final int OUT_OF_RANGE = MediaProperties.UNSUPPORTED_PROFILE_LEVEL;
875    }
876
877    /** Defines video frame sizes. */
878    public final class VideoFrameSize {
879
880        public static final int SIZE_UNDEFINED = -1;
881
882        /** SQCIF 128 x 96 pixels. */
883        public static final int SQCIF = 0;
884
885        /** QQVGA 160 x 120 pixels. */
886        public static final int QQVGA = 1;
887
888        /** QCIF 176 x 144 pixels. */
889        public static final int QCIF = 2;
890
891        /** QVGA 320 x 240 pixels. */
892        public static final int QVGA = 3;
893
894        /** CIF 352 x 288 pixels. */
895        public static final int CIF = 4;
896
897        /** VGA 640 x 480 pixels. */
898        public static final int VGA = 5;
899
900        /** WVGA 800 X 480 pixels */
901        public static final int WVGA = 6;
902
903        /** NTSC 720 X 480 pixels */
904        public static final int NTSC = 7;
905
906        /** 640 x 360 */
907        public static final int nHD = 8;
908
909        /** 854 x 480 */
910        public static final int WVGA16x9 = 9;
911
912        /** 720p 1280 X 720 */
913        public static final int V720p = 10;
914
915        /** W720p 1080 x 720 */
916        public static final int W720p = 11;
917
918        /** S720p 960 x 720 */
919        public static final int S720p = 12;
920
921        /** 1080p 1920 x 1080 */
922        public static final int V1080p = 13;
923    }
924
925    /**
926     * Defines output video frame rates.
927     */
928    public final class VideoFrameRate {
929        /** Frame rate of 5 frames per second. */
930        public static final int FR_5_FPS = 0;
931
932        /** Frame rate of 7.5 frames per second. */
933        public static final int FR_7_5_FPS = 1;
934
935        /** Frame rate of 10 frames per second. */
936        public static final int FR_10_FPS = 2;
937
938        /** Frame rate of 12.5 frames per second. */
939        public static final int FR_12_5_FPS = 3;
940
941        /** Frame rate of 15 frames per second. */
942        public static final int FR_15_FPS = 4;
943
944        /** Frame rate of 20 frames per second. */
945        public static final int FR_20_FPS = 5;
946
947        /** Frame rate of 25 frames per second. */
948        public static final int FR_25_FPS = 6;
949
950        /** Frame rate of 30 frames per second. */
951        public static final int FR_30_FPS = 7;
952    }
953
954    /**
955     * Defines Video Effect Types.
956     */
957    public static class VideoEffect {
958
959        public static final int NONE = 0;
960
961        public static final int FADE_FROM_BLACK = 8;
962
963        public static final int FADE_TO_BLACK = 16;
964
965        public static final int EXTERNAL = 256;
966
967        public static final int BLACK_AND_WHITE = 257;
968
969        public static final int PINK = 258;
970
971        public static final int GREEN = 259;
972
973        public static final int SEPIA = 260;
974
975        public static final int NEGATIVE = 261;
976
977        public static final int FRAMING = 262;
978
979        public static final int TEXT = 263;
980
981        public static final int ZOOM_IN = 264;
982
983        public static final int ZOOM_OUT = 265;
984
985        public static final int FIFTIES = 266;
986
987        public static final int COLORRGB16 = 267;
988
989        public static final int GRADIENT = 268;
990    }
991
992    /**
993     * Defines the video transitions.
994     */
995    public static class VideoTransition {
996        /** No transition */
997        public static final int NONE = 0;
998
999        /** Cross fade transition */
1000        public static final int CROSS_FADE = 1;
1001
1002        /** External transition. Currently not available. */
1003        public static final int EXTERNAL = 256;
1004
1005        /** AlphaMagic transition. */
1006        public static final int ALPHA_MAGIC = 257;
1007
1008        /** Slide transition. */
1009        public static final int SLIDE_TRANSITION = 258;
1010
1011        /** Fade to black transition. */
1012        public static final int FADE_BLACK = 259;
1013    }
1014
1015    /**
1016     * Defines settings for the AlphaMagic transition
1017     */
1018    public static class AlphaMagicSettings {
1019        /** Name of the alpha file (JPEG file). */
1020        public String file;
1021
1022        /** Blending percentage [0..100] 0 = no blending. */
1023        public int blendingPercent;
1024
1025        /** Invert the default rotation direction of the AlphaMagic effect. */
1026        public boolean invertRotation;
1027
1028        public int rgbWidth;
1029        public int rgbHeight;
1030    }
1031
1032    /** Defines the direction of the Slide transition. */
1033    public static final class SlideDirection {
1034
1035        /** Right out left in. */
1036        public static final int RIGHT_OUT_LEFT_IN = 0;
1037
1038        /** Left out right in. */
1039        public static final int LEFT_OUT_RIGTH_IN = 1;
1040
1041        /** Top out bottom in. */
1042        public static final int TOP_OUT_BOTTOM_IN = 2;
1043
1044        /** Bottom out top in */
1045        public static final int BOTTOM_OUT_TOP_IN = 3;
1046    }
1047
1048    /** Defines the Slide transition settings. */
1049    public static class SlideTransitionSettings {
1050        /**
1051         * Direction of the slide transition. See {@link SlideDirection
1052         * SlideDirection} for valid values.
1053         */
1054        public int direction;
1055    }
1056
1057    /**
1058     * Defines the settings of a single clip.
1059     */
1060    public static class ClipSettings {
1061
1062        /**
1063         * The path to the clip file.
1064         * <p>
1065         * File format of the clip, it can be:
1066         * <ul>
1067         * <li>3GP file containing MPEG4/H263/H264 video and AAC/AMR audio
1068         * <li>JPG file
1069         * </ul>
1070         */
1071
1072        public String clipPath;
1073
1074        /**
1075         * The path of the decoded file. This is used only for image files.
1076         */
1077        public String clipDecodedPath;
1078
1079        /**
1080         * The path of the Original file. This is used only for image files.
1081         */
1082        public String clipOriginalPath;
1083
1084        /**
1085         * File type of the clip. See {@link FileType FileType} for valid
1086         * values.
1087         */
1088        public int fileType;
1089
1090        /** Begin of the cut in the clip in milliseconds. */
1091        public int beginCutTime;
1092
1093        /**
1094         * End of the cut in the clip in milliseconds. Set both
1095         * <code>beginCutTime</code> and <code>endCutTime</code> to
1096         * <code>0</code> to get the full length of the clip without a cut. In
1097         * case of JPG clip, this is the duration of the JPEG file.
1098         */
1099        public int endCutTime;
1100
1101        /**
1102         * Begin of the cut in the clip in percentage of the file duration.
1103         */
1104        public int beginCutPercent;
1105
1106        /**
1107         * End of the cut in the clip in percentage of the file duration. Set
1108         * both <code>beginCutPercent</code> and <code>endCutPercent</code> to
1109         * <code>0</code> to get the full length of the clip without a cut.
1110         */
1111        public int endCutPercent;
1112
1113        /** Enable panning and zooming. */
1114        public boolean panZoomEnabled;
1115
1116        /** Zoom percentage at start of clip. 0 = no zoom, 100 = full zoom */
1117        public int panZoomPercentStart;
1118
1119        /** Top left X coordinate at start of clip. */
1120        public int panZoomTopLeftXStart;
1121
1122        /** Top left Y coordinate at start of clip. */
1123        public int panZoomTopLeftYStart;
1124
1125        /** Zoom percentage at start of clip. 0 = no zoom, 100 = full zoom */
1126        public int panZoomPercentEnd;
1127
1128        /** Top left X coordinate at end of clip. */
1129        public int panZoomTopLeftXEnd;
1130
1131        /** Top left Y coordinate at end of clip. */
1132        public int panZoomTopLeftYEnd;
1133
1134        /**
1135         * Set The media rendering. See {@link MediaRendering MediaRendering}
1136         * for valid values.
1137         */
1138        public int mediaRendering;
1139
1140        /**
1141         * RGB width and Height
1142         */
1143         public int rgbWidth;
1144         public int rgbHeight;
1145    }
1146
1147    /**
1148     * Defines settings for a transition.
1149     */
1150    public static class TransitionSettings {
1151
1152        /** Duration of the transition in msec. */
1153        public int duration;
1154
1155        /**
1156         * Transition type for video. See {@link VideoTransition
1157         * VideoTransition} for valid values.
1158         */
1159        public int videoTransitionType;
1160
1161        /**
1162         * Transition type for audio. See {@link AudioTransition
1163         * AudioTransition} for valid values.
1164         */
1165        public int audioTransitionType;
1166
1167        /**
1168         * Transition behaviour. See {@link TransitionBehaviour
1169         * TransitionBehaviour} for valid values.
1170         */
1171        public int transitionBehaviour;
1172
1173        /**
1174         * Settings for AlphaMagic transition. Only needs to be set if
1175         * <code>videoTransitionType</code> is set to
1176         * <code>VideoTransition.ALPHA_MAGIC</code>. See
1177         * {@link AlphaMagicSettings AlphaMagicSettings}.
1178         */
1179        public AlphaMagicSettings alphaSettings;
1180
1181        /**
1182         * Settings for the Slide transition. See
1183         * {@link SlideTransitionSettings SlideTransitionSettings}.
1184         */
1185        public SlideTransitionSettings slideSettings;
1186    }
1187
1188    public static final class AudioTransition {
1189        /** No audio transition. */
1190        public static final int NONE = 0;
1191
1192        /** Cross-fade audio transition. */
1193        public static final int CROSS_FADE = 1;
1194    }
1195
1196    /**
1197     * Defines transition behaviors.
1198     */
1199    public static final class TransitionBehaviour {
1200
1201        /** The transition uses an increasing speed. */
1202        public static final int SPEED_UP = 0;
1203
1204        /** The transition uses a linear (constant) speed. */
1205        public static final int LINEAR = 1;
1206
1207        /** The transition uses a decreasing speed. */
1208        public static final int SPEED_DOWN = 2;
1209
1210        /**
1211         * The transition uses a constant speed, but slows down in the middle
1212         * section.
1213         */
1214        public static final int SLOW_MIDDLE = 3;
1215
1216        /**
1217         * The transition uses a constant speed, but increases speed in the
1218         * middle section.
1219         */
1220        public static final int FAST_MIDDLE = 4;
1221    }
1222
1223    /**
1224     * Defines settings for the background music.
1225     */
1226    public static class BackgroundMusicSettings {
1227
1228        /** Background music file. */
1229        public String file;
1230
1231        /** File type. See {@link FileType FileType} for valid values. */
1232        public int fileType;
1233
1234        /**
1235         * Insertion time in milliseconds, in the output video where the
1236         * background music must be inserted.
1237         */
1238        public long insertionTime;
1239
1240        /**
1241         * Volume, as a percentage of the background music track, to use. If
1242         * this field is set to 100, the background music will replace the audio
1243         * from the video input file(s).
1244         */
1245        public int volumePercent;
1246
1247        /**
1248         * Start time in milliseconds in the background muisc file from where
1249         * the background music should loop. Set both <code>beginLoop</code> and
1250         * <code>endLoop</code> to <code>0</code> to disable looping.
1251         */
1252        public long beginLoop;
1253
1254        /**
1255         * End time in milliseconds in the background music file to where the
1256         * background music should loop. Set both <code>beginLoop</code> and
1257         * <code>endLoop</code> to <code>0</code> to disable looping.
1258         */
1259        public long endLoop;
1260
1261        public boolean enableDucking;
1262
1263        public int duckingThreshold;
1264
1265        public int lowVolume;
1266
1267        public boolean isLooping;
1268    }
1269
1270    /** Defines settings for an effect. */
1271    public static class AudioEffect {
1272        /** No audio effect. */
1273        public static final int NONE = 0;
1274
1275        /** Fade-in effect. */
1276        public static final int FADE_IN = 8;
1277
1278        /** Fade-out effect. */
1279        public static final int FADE_OUT = 16;
1280    }
1281
1282    /** Defines the effect settings. */
1283    public static class EffectSettings {
1284
1285        /** Start time of the effect in milliseconds. */
1286        public int startTime;
1287
1288        /** Duration of the effect in milliseconds. */
1289        public int duration;
1290
1291        /**
1292         * Video effect type. See {@link VideoEffect VideoEffect} for valid
1293         * values.
1294         */
1295        public int videoEffectType;
1296
1297        /**
1298         * Audio effect type. See {@link AudioEffect AudioEffect} for valid
1299         * values.
1300         */
1301        public int audioEffectType;
1302
1303        /**
1304         * Start time of the effect in percents of the duration of the clip. A
1305         * value of 0 percent means start time is from the beginning of the
1306         * clip.
1307         */
1308        public int startPercent;
1309
1310        /**
1311         * Duration of the effect in percents of the duration of the clip.
1312         */
1313        public int durationPercent;
1314
1315        /**
1316         * Framing file.
1317         * <p>
1318         * This field is only used when the field <code>videoEffectType</code>
1319         * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise
1320         * this field is ignored.
1321         */
1322        public String framingFile;
1323
1324        /**
1325         * Framing buffer.
1326         * <p>
1327         * This field is only used when the field <code>videoEffectType</code>
1328         * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise
1329         * this field is ignored.
1330         */
1331        public int[] framingBuffer;
1332
1333        /**
1334         * Bitmap type Can be from RGB_565 (4), ARGB_4444 (5), ARGB_8888 (6);
1335         **/
1336
1337        public int bitmapType;
1338
1339        public int width;
1340
1341        public int height;
1342
1343        /**
1344         * Top left x coordinate. This coordinate is used to set the x
1345         * coordinate of the picture in the framing file when the framing file
1346         * is selected. The x coordinate is also used to set the location of the
1347         * text in the text effect.
1348         * <p>
1349         * This field is only used when the field <code>videoEffectType</code>
1350         * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING} or
1351         * {@link VideoEffect#TEXT VideoEffect.TEXT}. Otherwise this field is
1352         * ignored.
1353         */
1354        public int topLeftX;
1355
1356        /**
1357         * Top left y coordinate. This coordinate is used to set the y
1358         * coordinate of the picture in the framing file when the framing file
1359         * is selected. The y coordinate is also used to set the location of the
1360         * text in the text effect.
1361         * <p>
1362         * This field is only used when the field <code>videoEffectType</code>
1363         * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING} or
1364         * {@link VideoEffect#TEXT VideoEffect.TEXT}. Otherwise this field is
1365         * ignored.
1366         */
1367        public int topLeftY;
1368
1369        /**
1370         * Should the frame be resized or not. If this field is set to
1371         * <link>true</code> then the frame size is matched with the output
1372         * video size.
1373         * <p>
1374         * This field is only used when the field <code>videoEffectType</code>
1375         * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise
1376         * this field is ignored.
1377         */
1378        public boolean framingResize;
1379
1380        /**
1381         * Size to which the framing buffer needs to be resized to
1382         * This is valid only if framingResize is true
1383         */
1384        public int framingScaledSize;
1385        /**
1386         * Text to insert in the video.
1387         * <p>
1388         * This field is only used when the field <code>videoEffectType</code>
1389         * is set to {@link VideoEffect#TEXT VideoEffect.TEXT}. Otherwise this
1390         * field is ignored.
1391         */
1392        public String text;
1393
1394        /**
1395         * Text attributes for the text to insert in the video.
1396         * <p>
1397         * This field is only used when the field <code>videoEffectType</code>
1398         * is set to {@link VideoEffect#TEXT VideoEffect.TEXT}. Otherwise this
1399         * field is ignored. For more details about this field see the
1400         * integration guide.
1401         */
1402        public String textRenderingData;
1403
1404        /** Width of the text buffer in pixels. */
1405        public int textBufferWidth;
1406
1407        /** Height of the text buffer in pixels. */
1408        public int textBufferHeight;
1409
1410        /**
1411         * Processing rate for the fifties effect. A high value (e.g. 30)
1412         * results in high effect strength.
1413         * <p>
1414         * This field is only used when the field <code>videoEffectType</code>
1415         * is set to {@link VideoEffect#FIFTIES VideoEffect.FIFTIES}. Otherwise
1416         * this field is ignored.
1417         */
1418        public int fiftiesFrameRate;
1419
1420        /**
1421         * RGB 16 color of the RGB16 and gradient color effect.
1422         * <p>
1423         * This field is only used when the field <code>videoEffectType</code>
1424         * is set to {@link VideoEffect#COLORRGB16 VideoEffect.COLORRGB16} or
1425         * {@link VideoEffect#GRADIENT VideoEffect.GRADIENT}. Otherwise this
1426         * field is ignored.
1427         */
1428        public int rgb16InputColor;
1429
1430        /**
1431         * Start alpha blending percentage.
1432         * <p>
1433         * This field is only used when the field <code>videoEffectType</code>
1434         * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or
1435         * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field
1436         * is ignored.
1437         */
1438        public int alphaBlendingStartPercent;
1439
1440        /**
1441         * Middle alpha blending percentage.
1442         * <p>
1443         * This field is only used when the field <code>videoEffectType</code>
1444         * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or
1445         * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field
1446         * is ignored.
1447         */
1448        public int alphaBlendingMiddlePercent;
1449
1450        /**
1451         * End alpha blending percentage.
1452         * <p>
1453         * This field is only used when the field <code>videoEffectType</code>
1454         * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or
1455         * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field
1456         * is ignored.
1457         */
1458        public int alphaBlendingEndPercent;
1459
1460        /**
1461         * Duration, in percentage of effect duration of the fade-in phase.
1462         * <p>
1463         * This field is only used when the field <code>videoEffectType</code>
1464         * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or
1465         * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field
1466         * is ignored.
1467         */
1468        public int alphaBlendingFadeInTimePercent;
1469
1470        /**
1471         * Duration, in percentage of effect duration of the fade-out phase.
1472         * <p>
1473         * This field is only used when the field <code>videoEffectType</code>
1474         * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or
1475         * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field
1476         * is ignored.
1477         */
1478        public int alphaBlendingFadeOutTimePercent;
1479    }
1480
1481    /** Defines the clip properties for preview */
1482    public static class PreviewClips {
1483
1484        /**
1485         * The path to the clip file.
1486         * <p>
1487         * File format of the clip, it can be:
1488         * <ul>
1489         * <li>3GP file containing MPEG4/H263 video and AAC/AMR audio
1490         * <li>JPG file
1491         * </ul>
1492         */
1493
1494        public String clipPath;
1495
1496        /**
1497         * File type of the clip. See {@link FileType FileType} for valid
1498         * values.
1499         */
1500        public int fileType;
1501
1502        /** Begin of the cut in the clip in milliseconds. */
1503        public long beginPlayTime;
1504
1505        public long endPlayTime;
1506
1507        /**
1508         * Set The media rendering. See {@link MediaRendering MediaRendering}
1509         * for valid values.
1510         */
1511        public int mediaRendering;
1512
1513    }
1514
1515    /** Defines the audio settings. */
1516    public static class AudioSettings {
1517
1518        String pFile;
1519
1520        /** < PCM file path */
1521        String Id;
1522
1523        boolean bRemoveOriginal;
1524
1525        /** < If true, the original audio track is not taken into account */
1526        int channels;
1527
1528        /** < Number of channels (1=mono, 2=stereo) of BGM clip */
1529        int Fs;
1530
1531        /**
1532         * < Sampling audio frequency (8000 for amr, 16000 or more for aac) of
1533         * BGM clip
1534         */
1535        int ExtendedFs;
1536
1537        /** < Extended frequency for AAC+, eAAC+ streams of BGM clip */
1538        long startMs;
1539
1540        /** < Time, in milliseconds, at which the added audio track is inserted */
1541        long beginCutTime;
1542
1543        long endCutTime;
1544
1545        int fileType;
1546
1547        int volume;
1548
1549        /** < Volume, in percentage, of the added audio track */
1550        boolean loop;
1551
1552        /** < Looping on/off > **/
1553
1554        /** Audio mix and Duck **/
1555        int ducking_threshold;
1556
1557        int ducking_lowVolume;
1558
1559        boolean bInDucking_enable;
1560
1561        String pcmFilePath;
1562    }
1563
1564    /** Encapsulates preview clips and effect settings */
1565    public static class PreviewSettings {
1566
1567        public PreviewClips[] previewClipsArray;
1568
1569        /** The effect settings. */
1570        public EffectSettings[] effectSettingsArray;
1571
1572    }
1573
1574    /** Encapsulates clip properties */
1575    public static class PreviewClipProperties {
1576
1577        public Properties[] clipProperties;
1578
1579    }
1580
1581    /** Defines the editing settings. */
1582    public static class EditSettings {
1583
1584        /**
1585         * Array of clip settings. There is one <code>clipSetting</code> for
1586         * each clip.
1587         */
1588        public ClipSettings[] clipSettingsArray;
1589
1590        /**
1591         * Array of transition settings. If there are n clips (and thus n
1592         * <code>clipSettings</code>) then there are (n-1) transitions and (n-1)
1593         * <code>transistionSettings</code> in
1594         * <code>transistionSettingsArray</code>.
1595         */
1596        public TransitionSettings[] transitionSettingsArray;
1597
1598        /** The effect settings. */
1599        public EffectSettings[] effectSettingsArray;
1600
1601        /**
1602         * Video frame rate of the output clip. See {@link VideoFrameRate
1603         * VideoFrameRate} for valid values.
1604         */
1605        public int videoFrameRate;
1606
1607        /** Output file name. Must be an absolute path. */
1608        public String outputFile;
1609
1610        /**
1611         * Size of the video frames in the output clip. See
1612         * {@link VideoFrameSize VideoFrameSize} for valid values.
1613         */
1614        public int videoFrameSize;
1615
1616        /**
1617         * Format of the video stream in the output clip. See
1618         * {@link VideoFormat VideoFormat} for valid values.
1619         */
1620        public int videoFormat;
1621
1622        /**
1623         * Format of the audio stream in the output clip. See
1624         * {@link AudioFormat AudioFormat} for valid values.
1625         */
1626        public int audioFormat;
1627
1628        /**
1629         * Sampling frequency of the audio stream in the output clip. See
1630         * {@link AudioSamplingFrequency AudioSamplingFrequency} for valid
1631         * values.
1632         */
1633        public int audioSamplingFreq;
1634
1635        /**
1636         * Maximum file size. By setting this you can set the maximum size of
1637         * the output clip. Set it to <code>0</code> to let the class ignore
1638         * this filed.
1639         */
1640        public int maxFileSize;
1641
1642        /**
1643         * Number of audio channels in output clip. Use <code>0</code> for none,
1644         * <code>1</code> for mono or <code>2</code> for stereo. None is only
1645         * allowed when the <code>audioFormat</code> field is set to
1646         * {@link AudioFormat#NO_AUDIO AudioFormat.NO_AUDIO} or
1647         * {@link AudioFormat#NULL_AUDIO AudioFormat.NULL_AUDIO} Mono is only
1648         * allowed when the <code>audioFormat</code> field is set to
1649         * {@link AudioFormat#AAC AudioFormat.AAC}
1650         */
1651        public int audioChannels;
1652
1653        /** Video bitrate. See {@link Bitrate Bitrate} for valid values. */
1654        public int videoBitrate;
1655
1656        /** Audio bitrate. See {@link Bitrate Bitrate} for valid values. */
1657        public int audioBitrate;
1658
1659        /**
1660         * Background music settings. See {@link BackgroundMusicSettings
1661         * BackgroundMusicSettings} for valid values.
1662         */
1663        public BackgroundMusicSettings backgroundMusicSettings;
1664
1665        public int primaryTrackVolume;
1666
1667    }
1668
1669    /**
1670     * Defines the media properties.
1671     **/
1672
1673    public static class Properties {
1674
1675        /**
1676         * Duration of the media in milliseconds.
1677         */
1678
1679        public int duration;
1680
1681        /**
1682         * File type.
1683         */
1684
1685        public int fileType;
1686
1687        /**
1688         * Video format.
1689         */
1690
1691        public int videoFormat;
1692
1693        /**
1694         * Duration of the video stream of the media in milliseconds.
1695         */
1696
1697        public int videoDuration;
1698
1699        /**
1700         * Bitrate of the video stream of the media.
1701         */
1702
1703        public int videoBitrate;
1704
1705        /**
1706         * Width of the video frames or the width of the still picture in
1707         * pixels.
1708         */
1709
1710        public int width;
1711
1712        /**
1713         * Height of the video frames or the height of the still picture in
1714         * pixels.
1715         */
1716
1717        public int height;
1718
1719        /**
1720         * Average frame rate of video in the media in frames per second.
1721         */
1722
1723        public float averageFrameRate;
1724
1725        /**
1726         * Profile and level of the video in the media.
1727         */
1728
1729        public int profileAndLevel;
1730
1731        /**
1732         * Audio format.
1733         */
1734
1735        public int audioFormat;
1736
1737        /**
1738         * Duration of the audio stream of the media in milliseconds.
1739         */
1740
1741        public int audioDuration;
1742
1743        /**
1744         * Bitrate of the audio stream of the media.
1745         */
1746
1747        public int audioBitrate;
1748
1749        /**
1750         * Number of audio channels in the media.
1751         */
1752
1753        public int audioChannels;
1754
1755        /**
1756         * Sampling frequency of the audio stream in the media in samples per
1757         * second.
1758         */
1759
1760        public int audioSamplingFrequency;
1761
1762        /**
1763         * Volume value of the audio track as percentage.
1764         */
1765        public int audioVolumeValue;
1766
1767        public String Id;
1768    }
1769
1770    /**
1771     * Constructor
1772     *
1773     * @param projectPath The path where the VideoEditor stores all files
1774     *        related to the project
1775     * @param lock The semaphore
1776     * @param veObj The video editor reference
1777     */
1778    public MediaArtistNativeHelper(String projectPath, Semaphore lock, VideoEditor veObj) {
1779        mProjectPath = projectPath;
1780        if (veObj != null) {
1781            mVideoEditor = veObj;
1782        } else {
1783            mVideoEditor = null;
1784            throw new IllegalArgumentException("video editor object is null");
1785        }
1786        if (mStoryBoardSettings == null) {
1787            mStoryBoardSettings = new EditSettings();
1788        }
1789
1790        mLock = lock;
1791
1792        _init(mProjectPath, "null");
1793        mAudioTrackPCMFilePath = null;
1794    }
1795
1796    /**
1797     * @return The project path
1798     */
1799    String getProjectPath() {
1800        return mProjectPath;
1801    }
1802
1803    /**
1804     * @return The Audio Track PCM file path
1805     */
1806    String getProjectAudioTrackPCMFilePath() {
1807        return mAudioTrackPCMFilePath;
1808    }
1809
1810    /**
1811     * Invalidates the PCM file
1812     */
1813    void invalidatePcmFile() {
1814        if (mAudioTrackPCMFilePath != null) {
1815            new File(mAudioTrackPCMFilePath).delete();
1816            mAudioTrackPCMFilePath = null;
1817        }
1818    }
1819
1820    @SuppressWarnings("unused")
1821    private void onProgressUpdate(int taskId, int progress) {
1822        if (mProcessingState == PROCESSING_EXPORT) {
1823            if (mExportProgressListener != null) {
1824                if (mProgressToApp < progress) {
1825                    mExportProgressListener.onProgress(mVideoEditor, mOutputFilename, progress);
1826                    /* record previous progress */
1827                    mProgressToApp = progress;
1828                }
1829            }
1830        }
1831        else {
1832            // Adapt progress depending on current state
1833            int actualProgress = 0;
1834            int action = 0;
1835
1836            if (mProcessingState == PROCESSING_AUDIO_PCM) {
1837                action = MediaProcessingProgressListener.ACTION_DECODE;
1838            } else {
1839                action = MediaProcessingProgressListener.ACTION_ENCODE;
1840            }
1841
1842            switch (mProcessingState) {
1843                case PROCESSING_AUDIO_PCM:
1844                    actualProgress = progress;
1845                    break;
1846                case PROCESSING_TRANSITION:
1847                    actualProgress = progress;
1848                    break;
1849                case PROCESSING_KENBURNS:
1850                    actualProgress = progress;
1851                    break;
1852                case PROCESSING_INTERMEDIATE1:
1853                    if ((progress == 0) && (mProgressToApp != 0)) {
1854                        mProgressToApp = 0;
1855                    }
1856                    if ((progress != 0) || (mProgressToApp != 0)) {
1857                        actualProgress = progress/4;
1858                    }
1859                    break;
1860                case PROCESSING_INTERMEDIATE2:
1861                    if ((progress != 0) || (mProgressToApp != 0)) {
1862                        actualProgress = 25 + progress/4;
1863                    }
1864                    break;
1865                case PROCESSING_INTERMEDIATE3:
1866                    if ((progress != 0) || (mProgressToApp != 0)) {
1867                        actualProgress = 50 + progress/2;
1868                    }
1869                    break;
1870                case PROCESSING_NONE:
1871
1872                default:
1873                    Log.e(TAG, "ERROR unexpected State=" + mProcessingState);
1874                    return;
1875            }
1876            if ((mProgressToApp != actualProgress) && (actualProgress != 0)) {
1877
1878                mProgressToApp = actualProgress;
1879
1880                if (mMediaProcessingProgressListener != null) {
1881                    // Send the progress indication
1882                    mMediaProcessingProgressListener.onProgress(mProcessingObject, action,
1883                                                                actualProgress);
1884                }
1885            }
1886            /* avoid 0 in next intermediate call */
1887            if (mProgressToApp == 0) {
1888                if (mMediaProcessingProgressListener != null) {
1889                    /*
1890                     *  Send the progress indication
1891                     */
1892                    mMediaProcessingProgressListener.onProgress(mProcessingObject, action,
1893                                                                actualProgress);
1894                }
1895                mProgressToApp = 1;
1896            }
1897        }
1898    }
1899
1900    @SuppressWarnings("unused")
1901    private void onPreviewProgressUpdate(int progress, boolean isFinished,
1902                  boolean updateOverlay, String filename, int renderingMode) {
1903        if (mPreviewProgressListener != null) {
1904            if (mIsFirstProgress) {
1905                mPreviewProgressListener.onStart(mVideoEditor);
1906                mIsFirstProgress = false;
1907            }
1908
1909            final VideoEditor.OverlayData overlayData;
1910            if (updateOverlay) {
1911                overlayData = new VideoEditor.OverlayData();
1912                if (filename != null) {
1913                    overlayData.set(BitmapFactory.decodeFile(filename), renderingMode);
1914                } else {
1915                    overlayData.setClear();
1916                }
1917            } else {
1918                overlayData = null;
1919            }
1920
1921            if (progress != 0) {
1922                mPreviewProgress = progress;
1923            }
1924
1925            if (isFinished) {
1926                mPreviewProgressListener.onStop(mVideoEditor);
1927            } else {
1928                mPreviewProgressListener.onProgress(mVideoEditor, progress, overlayData);
1929            }
1930        }
1931    }
1932
1933    /**
1934     * Release the native helper object
1935     */
1936    void releaseNativeHelper() throws InterruptedException {
1937        release();
1938    }
1939
1940    /**
1941     * Release the native helper to end the Audio Graph process
1942     */
1943    @SuppressWarnings("unused")
1944    private void onAudioGraphExtractProgressUpdate(int progress, boolean isVideo) {
1945        if ((mExtractAudioWaveformProgressListener != null) && (progress > 0)) {
1946            mExtractAudioWaveformProgressListener.onProgress(progress);
1947        }
1948    }
1949
1950    /**
1951     * Populates the Effect Settings in EffectSettings
1952     *
1953     * @param effects The reference of EffectColor
1954     *
1955     * @return The populated effect settings in EffectSettings reference
1956     */
1957    EffectSettings getEffectSettings(EffectColor effects) {
1958        EffectSettings effectSettings = new EffectSettings();
1959        effectSettings.startTime = (int)effects.getStartTime();
1960        effectSettings.duration = (int)effects.getDuration();
1961        effectSettings.videoEffectType = getEffectColorType(effects);
1962        effectSettings.audioEffectType = 0;
1963        effectSettings.startPercent = 0;
1964        effectSettings.durationPercent = 0;
1965        effectSettings.framingFile = null;
1966        effectSettings.topLeftX = 0;
1967        effectSettings.topLeftY = 0;
1968        effectSettings.framingResize = false;
1969        effectSettings.text = null;
1970        effectSettings.textRenderingData = null;
1971        effectSettings.textBufferWidth = 0;
1972        effectSettings.textBufferHeight = 0;
1973        if (effects.getType() == EffectColor.TYPE_FIFTIES) {
1974            effectSettings.fiftiesFrameRate = 15;
1975        } else {
1976            effectSettings.fiftiesFrameRate = 0;
1977        }
1978
1979        if ((effectSettings.videoEffectType == VideoEffect.COLORRGB16)
1980                || (effectSettings.videoEffectType == VideoEffect.GRADIENT)) {
1981            effectSettings.rgb16InputColor = effects.getColor();
1982        }
1983
1984        effectSettings.alphaBlendingStartPercent = 0;
1985        effectSettings.alphaBlendingMiddlePercent = 0;
1986        effectSettings.alphaBlendingEndPercent = 0;
1987        effectSettings.alphaBlendingFadeInTimePercent = 0;
1988        effectSettings.alphaBlendingFadeOutTimePercent = 0;
1989        return effectSettings;
1990    }
1991
1992    /**
1993     * Populates the Overlay Settings in EffectSettings
1994     *
1995     * @param overlay The reference of OverlayFrame
1996     *
1997     * @return The populated overlay settings in EffectSettings reference
1998     */
1999    EffectSettings getOverlaySettings(OverlayFrame overlay) {
2000        EffectSettings effectSettings = new EffectSettings();
2001        Bitmap bitmap = null;
2002
2003        effectSettings.startTime = (int)overlay.getStartTime();
2004        effectSettings.duration = (int)overlay.getDuration();
2005        effectSettings.videoEffectType = VideoEffect.FRAMING;
2006        effectSettings.audioEffectType = 0;
2007        effectSettings.startPercent = 0;
2008        effectSettings.durationPercent = 0;
2009        effectSettings.framingFile = null;
2010
2011        if ((bitmap = overlay.getBitmap()) != null) {
2012            effectSettings.framingFile = overlay.getFilename();
2013
2014            if (effectSettings.framingFile == null) {
2015                try {
2016                    (overlay).save(mProjectPath);
2017                } catch (IOException e) {
2018                    Log.e(TAG, "getOverlaySettings : File not found");
2019                }
2020                effectSettings.framingFile = overlay.getFilename();
2021            }
2022            if (bitmap.getConfig() == Bitmap.Config.ARGB_8888)
2023                effectSettings.bitmapType = 6;
2024            else if (bitmap.getConfig() == Bitmap.Config.ARGB_4444)
2025                effectSettings.bitmapType = 5;
2026            else if (bitmap.getConfig() == Bitmap.Config.RGB_565)
2027                effectSettings.bitmapType = 4;
2028            else if (bitmap.getConfig() == Bitmap.Config.ALPHA_8)
2029                throw new RuntimeException("Bitmap config not supported");
2030
2031            effectSettings.width = bitmap.getWidth();
2032            effectSettings.height = bitmap.getHeight();
2033            effectSettings.framingBuffer = new int[effectSettings.width];
2034            int tmp = 0;
2035            short maxAlpha = 0;
2036            short minAlpha = (short)0xFF;
2037            short alpha = 0;
2038            while (tmp < effectSettings.height) {
2039                bitmap.getPixels(effectSettings.framingBuffer, 0,
2040                                 effectSettings.width, 0, tmp,
2041                                 effectSettings.width, 1);
2042                for (int i = 0; i < effectSettings.width; i++) {
2043                    alpha = (short)((effectSettings.framingBuffer[i] >> 24) & 0xFF);
2044                    if (alpha > maxAlpha) {
2045                        maxAlpha = alpha;
2046                    }
2047                    if (alpha < minAlpha) {
2048                        minAlpha = alpha;
2049                    }
2050                }
2051                tmp += 1;
2052            }
2053            alpha = (short)((maxAlpha + minAlpha) / 2);
2054            alpha = (short)((alpha * 100) / 256);
2055            effectSettings.alphaBlendingEndPercent = alpha;
2056            effectSettings.alphaBlendingMiddlePercent = alpha;
2057            effectSettings.alphaBlendingStartPercent = alpha;
2058            effectSettings.alphaBlendingFadeInTimePercent = 100;
2059            effectSettings.alphaBlendingFadeOutTimePercent = 100;
2060            effectSettings.framingBuffer = null;
2061
2062            /*
2063             * Set the resized RGB file dimensions
2064             */
2065            effectSettings.width = overlay.getResizedRGBSizeWidth();
2066            if(effectSettings.width == 0) {
2067                effectSettings.width = bitmap.getWidth();
2068            }
2069
2070            effectSettings.height = overlay.getResizedRGBSizeHeight();
2071            if(effectSettings.height == 0) {
2072                effectSettings.height = bitmap.getHeight();
2073            }
2074
2075        }
2076
2077        effectSettings.topLeftX = 0;
2078        effectSettings.topLeftY = 0;
2079
2080        effectSettings.framingResize = true;
2081        effectSettings.text = null;
2082        effectSettings.textRenderingData = null;
2083        effectSettings.textBufferWidth = 0;
2084        effectSettings.textBufferHeight = 0;
2085        effectSettings.fiftiesFrameRate = 0;
2086        effectSettings.rgb16InputColor = 0;
2087        int mediaItemHeight;
2088        int aspectRatio;
2089        if (overlay.getMediaItem() instanceof MediaImageItem) {
2090            if (((MediaImageItem)overlay.getMediaItem()).getGeneratedImageClip() != null) {
2091                // Ken Burns was applied
2092                mediaItemHeight = ((MediaImageItem)overlay.getMediaItem()).getGeneratedClipHeight();
2093                aspectRatio = getAspectRatio(
2094                    ((MediaImageItem)overlay.getMediaItem()).getGeneratedClipWidth()
2095                    , mediaItemHeight);
2096            } else {
2097                //For image get the scaled height. Aspect ratio would remain the same
2098                mediaItemHeight = ((MediaImageItem)overlay.getMediaItem()).getScaledHeight();
2099                aspectRatio = overlay.getMediaItem().getAspectRatio();
2100            }
2101        } else {
2102            aspectRatio = overlay.getMediaItem().getAspectRatio();
2103            mediaItemHeight = overlay.getMediaItem().getHeight();
2104        }
2105        effectSettings.framingScaledSize = findVideoResolution(aspectRatio, mediaItemHeight);
2106        return effectSettings;
2107    }
2108
2109     /* get Video Editor aspect ratio */
2110    int nativeHelperGetAspectRatio() {
2111        return mVideoEditor.getAspectRatio();
2112    }
2113
2114    /**
2115     * Sets the audio regenerate flag
2116     *
2117     * @param flag The boolean to set the audio regenerate flag
2118     *
2119     */
2120    void setAudioflag(boolean flag) {
2121        //check if the file exists.
2122        if (!(new File(String.format(mProjectPath + "/" + AUDIO_TRACK_PCM_FILE)).exists())) {
2123            flag = true;
2124        }
2125        mRegenerateAudio = flag;
2126    }
2127
2128    /**
2129     * Gets the audio regenerate flag
2130     *
2131     * @param return The boolean to get the audio regenerate flag
2132     *
2133     */
2134    boolean getAudioflag() {
2135        return mRegenerateAudio;
2136    }
2137
2138    /**
2139     * Maps the average frame rate to one of the defined enum values
2140     *
2141     * @param averageFrameRate The average frame rate of video item
2142     *
2143     * @return The frame rate from one of the defined enum values
2144     */
2145    int GetClosestVideoFrameRate(int averageFrameRate) {
2146        if (averageFrameRate >= 25) {
2147            return VideoFrameRate.FR_30_FPS;
2148        } else if (averageFrameRate >= 20) {
2149            return VideoFrameRate.FR_25_FPS;
2150        } else if (averageFrameRate >= 15) {
2151            return VideoFrameRate.FR_20_FPS;
2152        } else if (averageFrameRate >= 12) {
2153            return VideoFrameRate.FR_15_FPS;
2154        } else if (averageFrameRate >= 10) {
2155            return VideoFrameRate.FR_12_5_FPS;
2156        } else if (averageFrameRate >= 7) {
2157            return VideoFrameRate.FR_10_FPS;
2158        } else if (averageFrameRate >= 5) {
2159            return VideoFrameRate.FR_7_5_FPS;
2160        } else {
2161            return -1;
2162        }
2163    }
2164
2165    /**
2166     * Helper function to adjust the effect or overlay start time
2167     * depending on the begin and end boundary time of meddia item
2168     */
2169    public void adjustEffectsStartTimeAndDuration(EffectSettings lEffect, int beginCutTime,
2170                                                  int endCutTime) {
2171
2172        int effectStartTime = 0;
2173        int effectDuration = 0;
2174
2175        /**
2176         * cbct -> clip begin cut time
2177         * cect -> clip end cut time
2178         ****************************************
2179         *  |                                 |
2180         *  |         cbct        cect        |
2181         *  | <-1-->   |           |          |
2182         *  |       <--|-2->       |          |
2183         *  |          | <---3---> |          |
2184         *  |          |        <--|-4--->    |
2185         *  |          |           | <--5-->  |
2186         *  |      <---|------6----|---->     |
2187         *  |                                 |
2188         *  < : effectStart
2189         *  > : effectStart + effectDuration
2190         ****************************************
2191         **/
2192
2193        /** 1 & 5 */
2194        /**
2195         * Effect falls out side the trim duration. In such a case effects shall
2196         * not be applied.
2197         */
2198        if ((lEffect.startTime > endCutTime)
2199                || ((lEffect.startTime + lEffect.duration) <= beginCutTime)) {
2200
2201            effectStartTime = 0;
2202            effectDuration = 0;
2203
2204            lEffect.startTime = effectStartTime;
2205            lEffect.duration = effectDuration;
2206            return;
2207        }
2208
2209        /** 2 */
2210        if ((lEffect.startTime < beginCutTime)
2211                && ((lEffect.startTime + lEffect.duration) > beginCutTime)
2212                && ((lEffect.startTime + lEffect.duration) <= endCutTime)) {
2213            effectStartTime = 0;
2214            effectDuration = lEffect.duration;
2215
2216            effectDuration -= (beginCutTime - lEffect.startTime);
2217            lEffect.startTime = effectStartTime;
2218            lEffect.duration = effectDuration;
2219            return;
2220        }
2221
2222        /** 3 */
2223        if ((lEffect.startTime >= beginCutTime)
2224                && ((lEffect.startTime + lEffect.duration) <= endCutTime)) {
2225            effectStartTime = lEffect.startTime - beginCutTime;
2226            lEffect.startTime = effectStartTime;
2227            lEffect.duration = lEffect.duration;
2228            return;
2229        }
2230
2231        /** 4 */
2232        if ((lEffect.startTime >= beginCutTime)
2233                && ((lEffect.startTime + lEffect.duration) > endCutTime)) {
2234            effectStartTime = lEffect.startTime - beginCutTime;
2235            effectDuration = endCutTime - lEffect.startTime;
2236            lEffect.startTime = effectStartTime;
2237            lEffect.duration = effectDuration;
2238            return;
2239        }
2240
2241        /** 6 */
2242        if ((lEffect.startTime < beginCutTime)
2243                && ((lEffect.startTime + lEffect.duration) > endCutTime)) {
2244            effectStartTime = 0;
2245            effectDuration = endCutTime - beginCutTime;
2246            lEffect.startTime = effectStartTime;
2247            lEffect.duration = effectDuration;
2248            return;
2249        }
2250
2251    }
2252
2253    /**
2254     * Generates the clip for preview or export
2255     *
2256     * @param editSettings The EditSettings reference for generating
2257     * a clip for preview or export
2258     *
2259     * @return error value
2260     */
2261    public int generateClip(EditSettings editSettings) {
2262        int err = 0;
2263
2264        try {
2265            err = nativeGenerateClip(editSettings);
2266        } catch (IllegalArgumentException ex) {
2267            Log.e(TAG, "Illegal Argument exception in load settings");
2268            return -1;
2269        } catch (IllegalStateException ex) {
2270            Log.e(TAG, "Illegal state exception in load settings");
2271            return -1;
2272        } catch (RuntimeException ex) {
2273            Log.e(TAG, "Runtime exception in load settings");
2274            return -1;
2275        }
2276        return err;
2277    }
2278
2279    /**
2280     * Init function to initialiZe the  ClipSettings reference to
2281     * default values
2282     *
2283     * @param lclipSettings The ClipSettings reference
2284     */
2285    void initClipSettings(ClipSettings lclipSettings) {
2286        lclipSettings.clipPath = null;
2287        lclipSettings.clipDecodedPath = null;
2288        lclipSettings.clipOriginalPath = null;
2289        lclipSettings.fileType = 0;
2290        lclipSettings.endCutTime = 0;
2291        lclipSettings.beginCutTime = 0;
2292        lclipSettings.beginCutPercent = 0;
2293        lclipSettings.endCutPercent = 0;
2294        lclipSettings.panZoomEnabled = false;
2295        lclipSettings.panZoomPercentStart = 0;
2296        lclipSettings.panZoomTopLeftXStart = 0;
2297        lclipSettings.panZoomTopLeftYStart = 0;
2298        lclipSettings.panZoomPercentEnd = 0;
2299        lclipSettings.panZoomTopLeftXEnd = 0;
2300        lclipSettings.panZoomTopLeftYEnd = 0;
2301        lclipSettings.mediaRendering = 0;
2302    }
2303
2304
2305    /**
2306     * Populates the settings for generating an effect clip
2307     *
2308     * @param lMediaItem The media item for which the effect clip
2309     * needs to be generated
2310     * @param lclipSettings The ClipSettings reference containing
2311     * clips data
2312     * @param e The EditSettings reference containing effect specific data
2313     * @param uniqueId The unique id used in the name of the output clip
2314     * @param clipNo Used for internal purpose
2315     *
2316     * @return The name and path of generated clip
2317     */
2318    String generateEffectClip(MediaItem lMediaItem, ClipSettings lclipSettings,
2319            EditSettings e,String uniqueId,int clipNo) {
2320        int err = 0;
2321        EditSettings editSettings = null;
2322        String EffectClipPath = null;
2323
2324        editSettings = new EditSettings();
2325
2326        editSettings.clipSettingsArray = new ClipSettings[1];
2327        editSettings.clipSettingsArray[0] = lclipSettings;
2328
2329        editSettings.backgroundMusicSettings = null;
2330        editSettings.transitionSettingsArray = null;
2331        editSettings.effectSettingsArray = e.effectSettingsArray;
2332
2333        EffectClipPath = String.format(mProjectPath + "/" + "ClipEffectIntermediate" + "_"
2334                + lMediaItem.getId() + uniqueId + ".3gp");
2335
2336        File tmpFile = new File(EffectClipPath);
2337        if (tmpFile.exists()) {
2338            tmpFile.delete();
2339        }
2340
2341        if (lMediaItem instanceof MediaVideoItem) {
2342            MediaVideoItem m = (MediaVideoItem)lMediaItem;
2343
2344            editSettings.audioFormat = AudioFormat.AAC;
2345            editSettings.audioChannels = 2;
2346            editSettings.audioBitrate = Bitrate.BR_64_KBPS;
2347            editSettings.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000;
2348
2349            editSettings.videoBitrate = Bitrate.BR_5_MBPS;
2350            //editSettings.videoFormat = VideoFormat.MPEG4;
2351            editSettings.videoFormat = VideoFormat.H264;
2352            editSettings.videoFrameRate = VideoFrameRate.FR_30_FPS;
2353            editSettings.videoFrameSize = findVideoResolution(mVideoEditor.getAspectRatio(),
2354                    m.getHeight());
2355        } else {
2356            MediaImageItem m = (MediaImageItem)lMediaItem;
2357            editSettings.audioBitrate = Bitrate.BR_64_KBPS;
2358            editSettings.audioChannels = 2;
2359            editSettings.audioFormat = AudioFormat.AAC;
2360            editSettings.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000;
2361
2362            editSettings.videoBitrate = Bitrate.BR_5_MBPS;
2363            editSettings.videoFormat = VideoFormat.H264;
2364            editSettings.videoFrameRate = VideoFrameRate.FR_30_FPS;
2365            editSettings.videoFrameSize = findVideoResolution(mVideoEditor.getAspectRatio(),
2366                    m.getScaledHeight());
2367        }
2368
2369        editSettings.outputFile = EffectClipPath;
2370
2371        if (clipNo == 1) {
2372            mProcessingState  = PROCESSING_INTERMEDIATE1;
2373        } else if (clipNo == 2) {
2374            mProcessingState  = PROCESSING_INTERMEDIATE2;
2375        }
2376        mProcessingObject = lMediaItem;
2377        err = generateClip(editSettings);
2378        mProcessingState  = PROCESSING_NONE;
2379
2380        if (err == 0) {
2381            lclipSettings.clipPath = EffectClipPath;
2382            lclipSettings.fileType = FileType.THREE_GPP;
2383            return EffectClipPath;
2384        } else {
2385            throw new RuntimeException("preview generation cannot be completed");
2386        }
2387    }
2388
2389
2390    /**
2391     * Populates the settings for generating a Ken Burn effect clip
2392     *
2393     * @param m The media image item for which the Ken Burn effect clip
2394     * needs to be generated
2395     * @param e The EditSettings reference clip specific data
2396     *
2397     * @return The name and path of generated clip
2398     */
2399    String generateKenBurnsClip(EditSettings e, MediaImageItem m) {
2400        String output = null;
2401        int err = 0;
2402
2403        e.backgroundMusicSettings = null;
2404        e.transitionSettingsArray = null;
2405        e.effectSettingsArray = null;
2406        output = String.format(mProjectPath + "/" + "ImageClip-" + m.getId() + ".3gp");
2407
2408        File tmpFile = new File(output);
2409        if (tmpFile.exists()) {
2410            tmpFile.delete();
2411        }
2412
2413        e.outputFile = output;
2414        e.audioBitrate = Bitrate.BR_64_KBPS;
2415        e.audioChannels = 2;
2416        e.audioFormat = AudioFormat.AAC;
2417        e.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000;
2418
2419        e.videoBitrate = Bitrate.BR_5_MBPS;
2420        e.videoFormat = VideoFormat.H264;
2421        e.videoFrameRate = VideoFrameRate.FR_30_FPS;
2422        e.videoFrameSize = findVideoResolution(mVideoEditor.getAspectRatio(),
2423                                                           m.getScaledHeight());
2424        mProcessingState  = PROCESSING_KENBURNS;
2425        mProcessingObject = m;
2426        err = generateClip(e);
2427        // Reset the processing state and check for errors
2428        mProcessingState  = PROCESSING_NONE;
2429        if (err != 0) {
2430            throw new RuntimeException("preview generation cannot be completed");
2431        }
2432        return output;
2433    }
2434
2435
2436    /**
2437     * Calculates the output resolution for transition clip
2438     *
2439     * @param m1 First media item associated with transition
2440     * @param m2 Second media item associated with transition
2441     *
2442     * @return The transition resolution
2443     */
2444    private int getTransitionResolution(MediaItem m1, MediaItem m2) {
2445        int clip1Height = 0;
2446        int clip2Height = 0;
2447        int videoSize = 0;
2448
2449        if (m1 != null && m2 != null) {
2450            if (m1 instanceof MediaVideoItem) {
2451                clip1Height = m1.getHeight();
2452            } else if (m1 instanceof MediaImageItem) {
2453                clip1Height = ((MediaImageItem)m1).getScaledHeight();
2454            }
2455            if (m2 instanceof MediaVideoItem) {
2456                clip2Height = m2.getHeight();
2457            } else if (m2 instanceof MediaImageItem) {
2458                clip2Height = ((MediaImageItem)m2).getScaledHeight();
2459            }
2460            if (clip1Height > clip2Height) {
2461                videoSize = findVideoResolution(mVideoEditor.getAspectRatio(), clip1Height);
2462            } else {
2463                videoSize = findVideoResolution(mVideoEditor.getAspectRatio(), clip2Height);
2464            }
2465        } else if (m1 == null && m2 != null) {
2466            if (m2 instanceof MediaVideoItem) {
2467                clip2Height = m2.getHeight();
2468            } else if (m2 instanceof MediaImageItem) {
2469                clip2Height = ((MediaImageItem)m2).getScaledHeight();
2470            }
2471            videoSize = findVideoResolution(mVideoEditor.getAspectRatio(), clip2Height);
2472        } else if (m1 != null && m2 == null) {
2473            if (m1 instanceof MediaVideoItem) {
2474                clip1Height = m1.getHeight();
2475            } else if (m1 instanceof MediaImageItem) {
2476                clip1Height = ((MediaImageItem)m1).getScaledHeight();
2477            }
2478            videoSize = findVideoResolution(mVideoEditor.getAspectRatio(), clip1Height);
2479        }
2480        return videoSize;
2481    }
2482
2483    /**
2484     * Populates the settings for generating an transition clip
2485     *
2486     * @param m1 First media item associated with transition
2487     * @param m2 Second media item associated with transition
2488     * @param e The EditSettings reference containing
2489     * clip specific data
2490     * @param uniqueId The unique id used in the name of the output clip
2491     * @param t The Transition specific data
2492     *
2493     * @return The name and path of generated clip
2494     */
2495    String generateTransitionClip(EditSettings e, String uniqueId,
2496            MediaItem m1, MediaItem m2,Transition t) {
2497        String outputFilename = null;
2498        int err = 0;
2499
2500        outputFilename = String.format(mProjectPath + "/" + uniqueId + ".3gp");
2501        e.outputFile = outputFilename;
2502        e.audioBitrate = Bitrate.BR_64_KBPS;
2503        e.audioChannels = 2;
2504        e.audioFormat = AudioFormat.AAC;
2505        e.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000;
2506
2507        e.videoBitrate = Bitrate.BR_5_MBPS;
2508        e.videoFormat = VideoFormat.H264;
2509        e.videoFrameRate = VideoFrameRate.FR_30_FPS;
2510        e.videoFrameSize = getTransitionResolution(m1, m2);
2511
2512        if (new File(outputFilename).exists()) {
2513            new File(outputFilename).delete();
2514        }
2515        mProcessingState  = PROCESSING_INTERMEDIATE3;
2516        mProcessingObject = t;
2517        err = generateClip(e);
2518        // Reset the processing state and check for errors
2519        mProcessingState  = PROCESSING_NONE;
2520        if (err != 0) {
2521            throw new RuntimeException("preview generation cannot be completed");
2522        }
2523        return outputFilename;
2524    }
2525
2526    /**
2527     * Populates effects and overlays in EffectSettings structure
2528     * and also adjust the start time and duration of effects and overlays
2529     * w.r.t to total story board time
2530     *
2531     * @param m1 Media item associated with effect
2532     * @param effectSettings The EffectSettings reference containing
2533     *      effect specific data
2534     * @param beginCutTime The begin cut time of the clip associated with effect
2535     * @param endCutTime The end cut time of the clip associated with effect
2536     * @param storyBoardTime The current story board time
2537     *
2538     * @return The updated index
2539     */
2540    private int populateEffects(MediaItem m, EffectSettings[] effectSettings, int i,
2541            int beginCutTime, int endCutTime, int storyBoardTime) {
2542
2543        if (m.getBeginTransition() != null && m.getBeginTransition().getDuration() > 0
2544                && m.getEndTransition() != null && m.getEndTransition().getDuration() > 0) {
2545            beginCutTime += m.getBeginTransition().getDuration();
2546            endCutTime -= m.getEndTransition().getDuration();
2547        } else if (m.getBeginTransition() == null && m.getEndTransition() != null
2548                && m.getEndTransition().getDuration() > 0) {
2549            endCutTime -= m.getEndTransition().getDuration();
2550        } else if (m.getEndTransition() == null && m.getBeginTransition() != null
2551                && m.getBeginTransition().getDuration() > 0) {
2552            beginCutTime += m.getBeginTransition().getDuration();
2553        }
2554
2555        final List<Effect> effects = m.getAllEffects();
2556        final List<Overlay> overlays = m.getAllOverlays();
2557
2558        for (Overlay overlay : overlays) {
2559            effectSettings[i] = getOverlaySettings((OverlayFrame)overlay);
2560            adjustEffectsStartTimeAndDuration(effectSettings[i], beginCutTime, endCutTime);
2561            effectSettings[i].startTime += storyBoardTime;
2562            i++;
2563        }
2564
2565        for (Effect effect : effects) {
2566            if (effect instanceof EffectColor) {
2567                effectSettings[i] = getEffectSettings((EffectColor)effect);
2568                adjustEffectsStartTimeAndDuration(effectSettings[i], beginCutTime, endCutTime);
2569                effectSettings[i].startTime += storyBoardTime;
2570                i++;
2571            }
2572        }
2573
2574        return i;
2575    }
2576
2577    /**
2578     * Adjusts the media item boundaries for use in export or preview
2579     *
2580     * @param clipSettings The ClipSettings reference
2581     * @param clipProperties The Properties reference
2582     * @param m The media item
2583     */
2584    private void adjustMediaItemBoundary(ClipSettings clipSettings,
2585                                         Properties clipProperties, MediaItem m) {
2586        if (m.getBeginTransition() != null && m.getBeginTransition().getDuration() > 0
2587                && m.getEndTransition() != null && m.getEndTransition().getDuration() > 0) {
2588            clipSettings.beginCutTime += m.getBeginTransition().getDuration();
2589            clipSettings.endCutTime -= m.getEndTransition().getDuration();
2590        } else if (m.getBeginTransition() == null && m.getEndTransition() != null
2591                && m.getEndTransition().getDuration() > 0) {
2592            clipSettings.endCutTime -= m.getEndTransition().getDuration();
2593        } else if (m.getEndTransition() == null && m.getBeginTransition() != null
2594                && m.getBeginTransition().getDuration() > 0) {
2595            clipSettings.beginCutTime += m.getBeginTransition().getDuration();
2596        }
2597
2598        clipProperties.duration = clipSettings.endCutTime - clipSettings.beginCutTime;
2599
2600        if (clipProperties.videoDuration != 0) {
2601            clipProperties.videoDuration = clipSettings.endCutTime - clipSettings.beginCutTime;
2602        }
2603
2604        if (clipProperties.audioDuration != 0) {
2605            clipProperties.audioDuration = clipSettings.endCutTime - clipSettings.beginCutTime;
2606        }
2607    }
2608
2609    /**
2610     * Generates the transition if transition is present
2611     * and is in invalidated state
2612     *
2613     * @param transition The Transition reference
2614     * @param editSettings The EditSettings reference
2615     * @param clipPropertiesArray The clip Properties array
2616     * @param i The index in clip Properties array for current clip
2617     */
2618    private void generateTransition(Transition transition, EditSettings editSettings,
2619            PreviewClipProperties clipPropertiesArray, int index) {
2620        if (!(transition.isGenerated())) {
2621            transition.generate();
2622        }
2623        editSettings.clipSettingsArray[index] = new ClipSettings();
2624        editSettings.clipSettingsArray[index].clipPath = transition.getFilename();
2625        editSettings.clipSettingsArray[index].fileType = FileType.THREE_GPP;
2626        editSettings.clipSettingsArray[index].beginCutTime = 0;
2627        editSettings.clipSettingsArray[index].endCutTime = (int)transition.getDuration();
2628        editSettings.clipSettingsArray[index].mediaRendering = MediaRendering.BLACK_BORDERS;
2629
2630        try {
2631            clipPropertiesArray.clipProperties[index] =
2632                getMediaProperties(transition.getFilename());
2633        } catch (Exception e) {
2634            throw new IllegalArgumentException("Unsupported file or file not found");
2635        }
2636
2637        clipPropertiesArray.clipProperties[index].Id = null;
2638        clipPropertiesArray.clipProperties[index].audioVolumeValue = 100;
2639        clipPropertiesArray.clipProperties[index].duration = (int)transition.getDuration();
2640        if (clipPropertiesArray.clipProperties[index].videoDuration != 0) {
2641            clipPropertiesArray.clipProperties[index].videoDuration = (int)transition.getDuration();
2642        }
2643
2644        if (clipPropertiesArray.clipProperties[index].audioDuration != 0) {
2645            clipPropertiesArray.clipProperties[index].audioDuration = (int)transition.getDuration();
2646        }
2647    }
2648
2649    /**
2650     * Sets the volume for current media item in clip properties array
2651     *
2652     * @param m The media item
2653     * @param clipProperties The clip properties array reference
2654     * @param i The index in clip Properties array for current clip
2655     */
2656    private void adjustVolume(MediaItem m, PreviewClipProperties clipProperties,
2657                              int index) {
2658        if (m instanceof MediaVideoItem) {
2659            final boolean videoMuted = ((MediaVideoItem)m).isMuted();
2660            if (videoMuted == false) {
2661                mClipProperties.clipProperties[index].audioVolumeValue =
2662                    ((MediaVideoItem)m).getVolume();
2663            } else {
2664                mClipProperties.clipProperties[index].audioVolumeValue = 0;
2665            }
2666        } else if (m instanceof MediaImageItem) {
2667            mClipProperties.clipProperties[index].audioVolumeValue = 0;
2668        }
2669    }
2670
2671    /**
2672     * Checks for odd size image width and height
2673     *
2674     * @param m The media item
2675     * @param clipProperties The clip properties array reference
2676     * @param i The index in clip Properties array for current clip
2677     */
2678    private void checkOddSizeImage(MediaItem m, PreviewClipProperties clipProperties, int index) {
2679        if (m instanceof MediaImageItem) {
2680            int width = mClipProperties.clipProperties[index].width;
2681            int height = mClipProperties.clipProperties[index].height;
2682
2683            if ((width % 2) != 0) {
2684                width -= 1;
2685            }
2686            if ((height % 2) != 0) {
2687                height -= 1;
2688            }
2689            mClipProperties.clipProperties[index].width = width;
2690            mClipProperties.clipProperties[index].height = height;
2691        }
2692    }
2693
2694    /**
2695     * Populates the media item properties and calculates the maximum
2696     * height among all the clips
2697     *
2698     * @param m The media item
2699     * @param i The index in clip Properties array for current clip
2700     * @param maxHeight The max height from the clip properties
2701     *
2702     * @return Updates the max height if current clip's height is greater
2703     * than all previous clips height
2704     */
2705    private int populateMediaItemProperties(MediaItem m, int index, int maxHeight) {
2706        mPreviewEditSettings.clipSettingsArray[index] = new ClipSettings();
2707        if (m instanceof MediaVideoItem) {
2708            mPreviewEditSettings.clipSettingsArray[index] =
2709                ((MediaVideoItem)m).getVideoClipProperties();
2710            if (((MediaVideoItem)m).getHeight() > maxHeight) {
2711                maxHeight = ((MediaVideoItem)m).getHeight();
2712            }
2713        } else if (m instanceof MediaImageItem) {
2714            mPreviewEditSettings.clipSettingsArray[index] =
2715                ((MediaImageItem)m).getImageClipProperties();
2716            if (((MediaImageItem)m).getScaledHeight() > maxHeight) {
2717                maxHeight = ((MediaImageItem)m).getScaledHeight();
2718            }
2719        }
2720        /** + Handle the image files here */
2721        if (mPreviewEditSettings.clipSettingsArray[index].fileType == FileType.JPG) {
2722            mPreviewEditSettings.clipSettingsArray[index].clipDecodedPath =
2723                ((MediaImageItem)m).getDecodedImageFileName();
2724
2725            mPreviewEditSettings.clipSettingsArray[index].clipOriginalPath =
2726                         mPreviewEditSettings.clipSettingsArray[index].clipPath;
2727        }
2728        return maxHeight;
2729    }
2730
2731    /**
2732     * Populates the background music track properties
2733     *
2734     * @param mediaBGMList The background music list
2735     *
2736     */
2737    private void populateBackgroundMusicProperties(List<AudioTrack> mediaBGMList) {
2738
2739        if (mediaBGMList.size() == 1) {
2740            mAudioTrack = mediaBGMList.get(0);
2741        } else {
2742            mAudioTrack = null;
2743        }
2744
2745        if (mAudioTrack != null) {
2746            mAudioSettings = new AudioSettings();
2747            Properties mAudioProperties = new Properties();
2748            mAudioSettings.pFile = null;
2749            mAudioSettings.Id = mAudioTrack.getId();
2750            try {
2751                mAudioProperties = getMediaProperties(mAudioTrack.getFilename());
2752            } catch (Exception e) {
2753               throw new IllegalArgumentException("Unsupported file or file not found");
2754            }
2755            mAudioSettings.bRemoveOriginal = false;
2756            mAudioSettings.channels = mAudioProperties.audioChannels;
2757            mAudioSettings.Fs = mAudioProperties.audioSamplingFrequency;
2758            mAudioSettings.loop = mAudioTrack.isLooping();
2759            mAudioSettings.ExtendedFs = 0;
2760            mAudioSettings.pFile = mAudioTrack.getFilename();
2761            mAudioSettings.startMs = mAudioTrack.getStartTime();
2762            mAudioSettings.beginCutTime = mAudioTrack.getBoundaryBeginTime();
2763            mAudioSettings.endCutTime = mAudioTrack.getBoundaryEndTime();
2764            if (mAudioTrack.isMuted()) {
2765                mAudioSettings.volume = 0;
2766            } else {
2767                mAudioSettings.volume = mAudioTrack.getVolume();
2768            }
2769            mAudioSettings.fileType = mAudioProperties.fileType;
2770            mAudioSettings.ducking_lowVolume = mAudioTrack.getDuckedTrackVolume();
2771            mAudioSettings.ducking_threshold = mAudioTrack.getDuckingThreshhold();
2772            mAudioSettings.bInDucking_enable = mAudioTrack.isDuckingEnabled();
2773            mAudioTrackPCMFilePath = String.format(mProjectPath + "/" + AUDIO_TRACK_PCM_FILE);
2774            mAudioSettings.pcmFilePath = mAudioTrackPCMFilePath;
2775
2776            mPreviewEditSettings.backgroundMusicSettings = new BackgroundMusicSettings();
2777            mPreviewEditSettings.backgroundMusicSettings.file = mAudioTrackPCMFilePath;
2778            mPreviewEditSettings.backgroundMusicSettings.fileType = mAudioProperties.fileType;
2779            mPreviewEditSettings.backgroundMusicSettings.insertionTime =
2780                mAudioTrack.getStartTime();
2781            mPreviewEditSettings.backgroundMusicSettings.volumePercent = mAudioTrack.getVolume();
2782            mPreviewEditSettings.backgroundMusicSettings.beginLoop =
2783                mAudioTrack.getBoundaryBeginTime();
2784            mPreviewEditSettings.backgroundMusicSettings.endLoop =
2785                                               mAudioTrack.getBoundaryEndTime();
2786            mPreviewEditSettings.backgroundMusicSettings.enableDucking =
2787                mAudioTrack.isDuckingEnabled();
2788            mPreviewEditSettings.backgroundMusicSettings.duckingThreshold =
2789                mAudioTrack.getDuckingThreshhold();
2790            mPreviewEditSettings.backgroundMusicSettings.lowVolume =
2791                mAudioTrack.getDuckedTrackVolume();
2792            mPreviewEditSettings.backgroundMusicSettings.isLooping = mAudioTrack.isLooping();
2793            mPreviewEditSettings.primaryTrackVolume = 100;
2794            mProcessingState  = PROCESSING_AUDIO_PCM;
2795            mProcessingObject = mAudioTrack;
2796        } else {
2797            mAudioSettings = null;
2798            mPreviewEditSettings.backgroundMusicSettings = null;
2799            mAudioTrackPCMFilePath = null;
2800        }
2801    }
2802
2803    /**
2804     * Calculates all the effects in all the media items
2805     * in media items list
2806     *
2807     * @param mediaItemsList The media item list
2808     *
2809     * @return The total number of effects
2810     *
2811     */
2812    private int getTotalEffects(List<MediaItem> mediaItemsList) {
2813        int totalEffects = 0;
2814        final Iterator<MediaItem> it = mediaItemsList.iterator();
2815        while (it.hasNext()) {
2816            final MediaItem t = it.next();
2817            totalEffects += t.getAllEffects().size();
2818            totalEffects += t.getAllOverlays().size();
2819            final Iterator<Effect> ef = t.getAllEffects().iterator();
2820            while (ef.hasNext()) {
2821                final Effect e = ef.next();
2822                if (e instanceof EffectKenBurns) {
2823                    totalEffects--;
2824                }
2825            }
2826        }
2827        return totalEffects;
2828    }
2829
2830    /**
2831     * This function is responsible for forming clip settings
2832     * array and clip properties array including transition clips
2833     * and effect settings for preview purpose or export.
2834     *
2835     *
2836     * @param mediaItemsList The media item list
2837     * @param mediaTransitionList The transitions list
2838     * @param mediaBGMList The background music list
2839     * @param listener The MediaProcessingProgressListener
2840     *
2841     */
2842    void previewStoryBoard(List<MediaItem> mediaItemsList,
2843            List<Transition> mediaTransitionList, List<AudioTrack> mediaBGMList,
2844            MediaProcessingProgressListener listener) {
2845        if (mInvalidatePreviewArray) {
2846            int previewIndex = 0;
2847            int totalEffects = 0;
2848            int storyBoardTime = 0;
2849            int maxHeight = 0;
2850            int beginCutTime = 0;
2851            int endCutTime = 0;
2852            int effectIndex = 0;
2853            Transition lTransition = null;
2854            MediaItem lMediaItem = null;
2855            mPreviewEditSettings = new EditSettings();
2856            mClipProperties = new PreviewClipProperties();
2857            mTotalClips = 0;
2858
2859            mTotalClips = mediaItemsList.size();
2860            for (Transition transition : mediaTransitionList) {
2861                if (transition.getDuration() > 0) {
2862                    mTotalClips++;
2863                }
2864            }
2865
2866            totalEffects = getTotalEffects(mediaItemsList);
2867
2868            mPreviewEditSettings.clipSettingsArray = new ClipSettings[mTotalClips];
2869            mPreviewEditSettings.effectSettingsArray = new EffectSettings[totalEffects];
2870            mClipProperties.clipProperties = new Properties[mTotalClips];
2871
2872            /** record the call back progress listener */
2873            mMediaProcessingProgressListener = listener;
2874            mProgressToApp = 0;
2875
2876            if (mediaItemsList.size() > 0) {
2877                for (int i = 0; i < mediaItemsList.size(); i++) {
2878                    /* Get the Media Item from the list */
2879                    lMediaItem = mediaItemsList.get(i);
2880                    if (lMediaItem instanceof MediaVideoItem) {
2881                        beginCutTime = (int)((MediaVideoItem)lMediaItem).getBoundaryBeginTime();
2882                        endCutTime = (int)((MediaVideoItem)lMediaItem).getBoundaryEndTime();
2883                    } else if (lMediaItem instanceof MediaImageItem) {
2884                        beginCutTime = 0;
2885                        endCutTime = (int)((MediaImageItem)lMediaItem).getTimelineDuration();
2886                    }
2887                    /* Get the transition associated with Media Item */
2888                    lTransition = lMediaItem.getBeginTransition();
2889                    if (lTransition != null && (lTransition.getDuration() > 0)) {
2890                        /* generate transition clip */
2891                        generateTransition(lTransition, mPreviewEditSettings,
2892                                           mClipProperties, previewIndex);
2893                        storyBoardTime += mClipProperties.clipProperties[previewIndex].duration;
2894                        previewIndex++;
2895                    }
2896                    /* Populate media item properties */
2897                    maxHeight = populateMediaItemProperties(lMediaItem, previewIndex, maxHeight);
2898                    /* Get the clip properties of the media item. */
2899                    if (lMediaItem instanceof MediaImageItem) {
2900                        int tmpCnt = 0;
2901                        boolean bEffectKbPresent = false;
2902                        final List<Effect> effectList = lMediaItem.getAllEffects();
2903                        /**
2904                         * Check if Ken Burns effect is present
2905                         */
2906                        while (tmpCnt < effectList.size()) {
2907                            if (effectList.get(tmpCnt) instanceof EffectKenBurns) {
2908                                bEffectKbPresent = true;
2909                                break;
2910                            }
2911                            tmpCnt++;
2912                        }
2913
2914                        if (bEffectKbPresent) {
2915                            try {
2916                                  if(((MediaImageItem)lMediaItem).getGeneratedImageClip() != null) {
2917                                     mClipProperties.clipProperties[previewIndex]
2918                                        = getMediaProperties(((MediaImageItem)lMediaItem).
2919                                                             getGeneratedImageClip());
2920                                  }
2921                                  else {
2922                                   mClipProperties.clipProperties[previewIndex]
2923                                      = getMediaProperties(((MediaImageItem)lMediaItem).
2924                                                             getScaledImageFileName());
2925                                   mClipProperties.clipProperties[previewIndex].width =
2926                                             ((MediaImageItem)lMediaItem).getScaledWidth();
2927                                   mClipProperties.clipProperties[previewIndex].height =
2928                                             ((MediaImageItem)lMediaItem).getScaledHeight();
2929                                  }
2930                                } catch (Exception e) {
2931                                   throw new IllegalArgumentException("Unsupported file or file not found");
2932                                }
2933                         } else {
2934                              try {
2935                                  mClipProperties.clipProperties[previewIndex]
2936                                      = getMediaProperties(((MediaImageItem)lMediaItem).
2937                                                               getScaledImageFileName());
2938                              } catch (Exception e) {
2939                                throw new IllegalArgumentException("Unsupported file or file not found");
2940                              }
2941                            mClipProperties.clipProperties[previewIndex].width =
2942                                        ((MediaImageItem)lMediaItem).getScaledWidth();
2943                            mClipProperties.clipProperties[previewIndex].height =
2944                                        ((MediaImageItem)lMediaItem).getScaledHeight();
2945                        }
2946                    } else {
2947                        try {
2948                            mClipProperties.clipProperties[previewIndex]
2949                                 = getMediaProperties(lMediaItem.getFilename());
2950                            } catch (Exception e) {
2951                              throw new IllegalArgumentException("Unsupported file or file not found");
2952                          }
2953                    }
2954                    mClipProperties.clipProperties[previewIndex].Id = lMediaItem.getId();
2955                    checkOddSizeImage(lMediaItem, mClipProperties, previewIndex);
2956                    adjustVolume(lMediaItem, mClipProperties, previewIndex);
2957
2958                    /*
2959                     * Adjust media item start time and end time w.r.t to begin
2960                     * and end transitions associated with media item
2961                     */
2962
2963                    adjustMediaItemBoundary(mPreviewEditSettings.clipSettingsArray[previewIndex],
2964                            mClipProperties.clipProperties[previewIndex], lMediaItem);
2965
2966                    /*
2967                     * Get all the effects and overlays for that media item and
2968                     * adjust start time and duration of effects
2969                     */
2970
2971                    effectIndex = populateEffects(lMediaItem,
2972                            mPreviewEditSettings.effectSettingsArray, effectIndex, beginCutTime,
2973                            endCutTime, storyBoardTime);
2974                    storyBoardTime += mClipProperties.clipProperties[previewIndex].duration;
2975                    previewIndex++;
2976
2977                    /* Check if there is any end transition at last media item */
2978
2979                    if (i == (mediaItemsList.size() - 1)) {
2980                        lTransition = lMediaItem.getEndTransition();
2981                        if (lTransition != null && (lTransition.getDuration() > 0)) {
2982                            generateTransition(lTransition, mPreviewEditSettings, mClipProperties,
2983                                    previewIndex);
2984                            break;
2985                        }
2986                    }
2987                }
2988
2989                if (!mErrorFlagSet) {
2990                    mPreviewEditSettings.videoFrameSize = findVideoResolution(mVideoEditor
2991                            .getAspectRatio(), maxHeight);
2992                    populateBackgroundMusicProperties(mediaBGMList);
2993
2994                    /** call to native populate settings */
2995                    try {
2996                        nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings);
2997                    } catch (IllegalArgumentException ex) {
2998                        Log.e(TAG, "Illegal argument exception in nativePopulateSettings");
2999                        throw ex;
3000                    } catch (IllegalStateException ex) {
3001                        Log.e(TAG, "Illegal state exception in nativePopulateSettings");
3002                        throw ex;
3003                    } catch (RuntimeException ex) {
3004                        Log.e(TAG, "Runtime exception in nativePopulateSettings");
3005                        throw ex;
3006                    }
3007                    mInvalidatePreviewArray = false;
3008                    mProcessingState  = PROCESSING_NONE;
3009                }
3010            }
3011            if (mErrorFlagSet) {
3012                mErrorFlagSet = false;
3013                throw new RuntimeException("preview generation cannot be completed");
3014            }
3015        }
3016    } /* END of previewStoryBoard */
3017
3018    /**
3019     * This function is responsible for starting the preview
3020     *
3021     *
3022     * @param surface The surface on which preview has to be displayed
3023     * @param fromMs The time in ms from which preview has to be started
3024     * @param toMs The time in ms till preview has to be played
3025     * @param loop To loop the preview or not
3026     * @param callbackAfterFrameCount INdicated after how many frames
3027     * the callback is needed
3028     * @param listener The PreviewProgressListener
3029     */
3030    void doPreview(Surface surface, long fromMs, long toMs, boolean loop,
3031            int callbackAfterFrameCount, PreviewProgressListener listener) {
3032        mPreviewProgress = fromMs;
3033        mIsFirstProgress = true;
3034        mPreviewProgressListener = listener;
3035
3036        if (!mInvalidatePreviewArray) {
3037            try {
3038                /** Modify the image files names to rgb image files. */
3039                for (int clipCnt = 0; clipCnt < mPreviewEditSettings.clipSettingsArray.length;
3040                    clipCnt++) {
3041                    if (mPreviewEditSettings.clipSettingsArray[clipCnt].fileType == FileType.JPG) {
3042                        mPreviewEditSettings.clipSettingsArray[clipCnt].clipPath =
3043                            mPreviewEditSettings.clipSettingsArray[clipCnt].clipDecodedPath;
3044                    }
3045                }
3046                nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings);
3047                nativeStartPreview(surface, fromMs, toMs, callbackAfterFrameCount, loop);
3048            } catch (IllegalArgumentException ex) {
3049                Log.e(TAG, "Illegal argument exception in nativeStartPreview");
3050                throw ex;
3051            } catch (IllegalStateException ex) {
3052                Log.e(TAG, "Illegal state exception in nativeStartPreview");
3053                throw ex;
3054            } catch (RuntimeException ex) {
3055                Log.e(TAG, "Runtime exception in nativeStartPreview");
3056                throw ex;
3057            }
3058        } else {
3059            throw new IllegalStateException("generatePreview is in progress");
3060        }
3061    }
3062
3063    /**
3064     * This function is responsible for stopping the preview
3065     */
3066    long stopPreview() {
3067        return nativeStopPreview();
3068    }
3069
3070    /**
3071     * This function is responsible for rendering a single frame
3072     * from the complete story board on the surface
3073     *
3074     * @param surface The surface on which frame has to be rendered
3075     * @param time The time in ms at which the frame has to be rendered
3076     * @param surfaceWidth The surface width
3077     * @param surfaceHeight The surface height
3078     * @param overlayData The overlay data
3079     *
3080     * @return The actual time from the story board at which the  frame was extracted
3081     * and rendered
3082     */
3083    long renderPreviewFrame(Surface surface, long time, int surfaceWidth,
3084            int surfaceHeight, VideoEditor.OverlayData overlayData) {
3085        if (mInvalidatePreviewArray) {
3086            if (Log.isLoggable(TAG, Log.DEBUG)) {
3087                Log.d(TAG, "Call generate preview first");
3088            }
3089            throw new IllegalStateException("Call generate preview first");
3090        }
3091
3092        long timeMs = 0;
3093        try {
3094            for (int clipCnt = 0; clipCnt < mPreviewEditSettings.clipSettingsArray.length;
3095                  clipCnt++) {
3096                if (mPreviewEditSettings.clipSettingsArray[clipCnt].fileType == FileType.JPG) {
3097                    mPreviewEditSettings.clipSettingsArray[clipCnt].clipPath =
3098                        mPreviewEditSettings.clipSettingsArray[clipCnt].clipDecodedPath;
3099                }
3100            }
3101
3102            // Reset the render preview frame params that shall be set by native.
3103            mRenderPreviewOverlayFile = null;
3104            mRenderPreviewRenderingMode = MediaRendering.RESIZING;
3105
3106            nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings);
3107
3108            timeMs = (long)nativeRenderPreviewFrame(surface, time, surfaceWidth, surfaceHeight);
3109
3110            if (mRenderPreviewOverlayFile != null) {
3111                overlayData.set(BitmapFactory.decodeFile(mRenderPreviewOverlayFile),
3112                        mRenderPreviewRenderingMode);
3113            } else {
3114                overlayData.setClear();
3115            }
3116        } catch (IllegalArgumentException ex) {
3117            Log.e(TAG, "Illegal Argument exception in nativeRenderPreviewFrame");
3118            throw ex;
3119        } catch (IllegalStateException ex) {
3120            Log.e(TAG, "Illegal state exception in nativeRenderPreviewFrame");
3121            throw ex;
3122        } catch (RuntimeException ex) {
3123            Log.e(TAG, "Runtime exception in nativeRenderPreviewFrame");
3124            throw ex;
3125        }
3126
3127        return timeMs;
3128    }
3129
3130    private void previewFrameEditInfo(String filename, int renderingMode) {
3131        mRenderPreviewOverlayFile = filename;
3132        mRenderPreviewRenderingMode = renderingMode;
3133    }
3134
3135
3136    /**
3137     * This function is responsible for rendering a single frame
3138     * from a single media item on the surface
3139     *
3140     * @param surface The surface on which frame has to be rendered
3141     * @param filepath The file path for which the frame needs to be displayed
3142     * @param time The time in ms at which the frame has to be rendered
3143     * @param framewidth The frame width
3144     * @param framewidth The frame height
3145     *
3146     * @return The actual time from media item at which the  frame was extracted
3147     * and rendered
3148     */
3149    long renderMediaItemPreviewFrame(Surface surface, String filepath,
3150                                            long time, int framewidth, int frameheight) {
3151        long timeMs = 0;
3152        try {
3153            timeMs = (long)nativeRenderMediaItemPreviewFrame(surface, filepath, framewidth,
3154                    frameheight, 0, 0, time);
3155        } catch (IllegalArgumentException ex) {
3156            Log.e(TAG, "Illegal Argument exception in renderMediaItemPreviewFrame");
3157            throw ex;
3158        } catch (IllegalStateException ex) {
3159            Log.e(TAG, "Illegal state exception in renderMediaItemPreviewFrame");
3160            throw ex;
3161        } catch (RuntimeException ex) {
3162            Log.e(TAG, "Runtime exception in renderMediaItemPreviewFrame");
3163            throw ex;
3164        }
3165
3166        return timeMs;
3167    }
3168
3169    /**
3170     * This function sets the flag to invalidate the preview array
3171     * and for generating the preview again
3172     */
3173    void setGeneratePreview(boolean isRequired) {
3174        boolean semAcquiredDone = false;
3175        try {
3176            lock();
3177            semAcquiredDone = true;
3178            mInvalidatePreviewArray = isRequired;
3179        } catch (InterruptedException ex) {
3180            Log.e(TAG, "Runtime exception in renderMediaItemPreviewFrame");
3181        } finally {
3182            if (semAcquiredDone) {
3183                unlock();
3184            }
3185        }
3186    }
3187
3188    /**
3189     * @return Returns the current status of preview invalidation
3190     * flag
3191     */
3192    boolean getGeneratePreview() {
3193        return mInvalidatePreviewArray;
3194    }
3195
3196    /**
3197     * Calculates the aspect ratio from widht and height
3198     *
3199     * @param w The width of media item
3200     * @param h The height of media item
3201     *
3202     * @return The calculated aspect ratio
3203     */
3204    int getAspectRatio(int w, int h) {
3205        double apRatio = (double)(w) / (double)(h);
3206        BigDecimal bd = new BigDecimal(apRatio);
3207        bd = bd.setScale(3, BigDecimal.ROUND_HALF_UP);
3208        apRatio = bd.doubleValue();
3209        int var = MediaProperties.ASPECT_RATIO_16_9;
3210        if (apRatio >= 1.7) {
3211            var = MediaProperties.ASPECT_RATIO_16_9;
3212        } else if (apRatio >= 1.6) {
3213            var = MediaProperties.ASPECT_RATIO_5_3;
3214        } else if (apRatio >= 1.5) {
3215            var = MediaProperties.ASPECT_RATIO_3_2;
3216        } else if (apRatio > 1.3) {
3217            var = MediaProperties.ASPECT_RATIO_4_3;
3218        } else if (apRatio >= 1.2) {
3219            var = MediaProperties.ASPECT_RATIO_11_9;
3220        }
3221        return var;
3222    }
3223
3224    /**
3225     * Maps the file type used in native layer
3226     * to file type used in JAVA layer
3227     *
3228     * @param fileType The file type in native layer
3229     *
3230     * @return The File type in JAVA layer
3231     */
3232    int getFileType(int fileType) {
3233        int retValue = -1;
3234        switch (fileType) {
3235            case FileType.UNSUPPORTED:
3236                retValue = MediaProperties.FILE_UNSUPPORTED;
3237                break;
3238            case FileType.THREE_GPP:
3239                retValue = MediaProperties.FILE_3GP;
3240                break;
3241            case FileType.MP4:
3242                retValue = MediaProperties.FILE_MP4;
3243                break;
3244            case FileType.JPG:
3245                retValue = MediaProperties.FILE_JPEG;
3246                break;
3247            case FileType.PNG:
3248                retValue = MediaProperties.FILE_PNG;
3249                break;
3250            case FileType.MP3:
3251                retValue = MediaProperties.FILE_MP3;
3252                break;
3253            case FileType.M4V:
3254                retValue = MediaProperties.FILE_M4V;
3255                break;
3256
3257            default:
3258                retValue = -1;
3259        }
3260        return retValue;
3261    }
3262
3263    /**
3264     * Maps the video codec type used in native layer
3265     * to video codec type used in JAVA layer
3266     *
3267     * @param codecType The video codec type in native layer
3268     *
3269     * @return The video codec type in JAVA layer
3270     */
3271    int getVideoCodecType(int codecType) {
3272        int retValue = -1;
3273        switch (codecType) {
3274            case VideoFormat.H263:
3275                retValue = MediaProperties.VCODEC_H263;
3276                break;
3277            case VideoFormat.H264:
3278                retValue = MediaProperties.VCODEC_H264BP;
3279                break;
3280            case VideoFormat.MPEG4:
3281                retValue = MediaProperties.VCODEC_MPEG4;
3282                break;
3283            case VideoFormat.UNSUPPORTED:
3284
3285            default:
3286                retValue = -1;
3287        }
3288        return retValue;
3289    }
3290
3291    /**
3292     * Maps the audio codec type used in native layer
3293     * to audio codec type used in JAVA layer
3294     *
3295     * @param audioType The audio codec type in native layer
3296     *
3297     * @return The audio codec type in JAVA layer
3298     */
3299    int getAudioCodecType(int codecType) {
3300        int retValue = -1;
3301        switch (codecType) {
3302            case AudioFormat.AMR_NB:
3303                retValue = MediaProperties.ACODEC_AMRNB;
3304                break;
3305            case AudioFormat.AAC:
3306                retValue = MediaProperties.ACODEC_AAC_LC;
3307                break;
3308            case AudioFormat.MP3:
3309                retValue = MediaProperties.ACODEC_MP3;
3310                break;
3311
3312            default:
3313                retValue = -1;
3314        }
3315        return retValue;
3316    }
3317
3318    /**
3319     * Returns the frame rate as integer
3320     *
3321     * @param fps The fps as enum
3322     *
3323     * @return The frame rate as integer
3324     */
3325    int getFrameRate(int fps) {
3326        int retValue = -1;
3327        switch (fps) {
3328            case VideoFrameRate.FR_5_FPS:
3329                retValue = 5;
3330                break;
3331            case VideoFrameRate.FR_7_5_FPS:
3332                retValue = 8;
3333                break;
3334            case VideoFrameRate.FR_10_FPS:
3335                retValue = 10;
3336                break;
3337            case VideoFrameRate.FR_12_5_FPS:
3338                retValue = 13;
3339                break;
3340            case VideoFrameRate.FR_15_FPS:
3341                retValue = 15;
3342                break;
3343            case VideoFrameRate.FR_20_FPS:
3344                retValue = 20;
3345                break;
3346            case VideoFrameRate.FR_25_FPS:
3347                retValue = 25;
3348                break;
3349            case VideoFrameRate.FR_30_FPS:
3350                retValue = 30;
3351                break;
3352
3353            default:
3354                retValue = -1;
3355        }
3356        return retValue;
3357    }
3358
3359    /**
3360     * Maps the file type used in JAVA layer
3361     * to file type used in native layer
3362     *
3363     * @param fileType The file type in JAVA layer
3364     *
3365     * @return The File type in native layer
3366     */
3367    int getMediaItemFileType(int fileType) {
3368        int retValue = -1;
3369
3370        switch (fileType) {
3371            case MediaProperties.FILE_UNSUPPORTED:
3372                retValue = FileType.UNSUPPORTED;
3373                break;
3374            case MediaProperties.FILE_3GP:
3375                retValue = FileType.THREE_GPP;
3376                break;
3377            case MediaProperties.FILE_MP4:
3378                retValue = FileType.MP4;
3379                break;
3380            case MediaProperties.FILE_JPEG:
3381                retValue = FileType.JPG;
3382                break;
3383            case MediaProperties.FILE_PNG:
3384                retValue = FileType.PNG;
3385                break;
3386            case MediaProperties.FILE_M4V:
3387                retValue = FileType.M4V;
3388                break;
3389
3390            default:
3391                retValue = -1;
3392        }
3393        return retValue;
3394
3395    }
3396
3397    /**
3398     * Maps the rendering mode used in native layer
3399     * to rendering mode used in JAVA layer
3400     *
3401     * @param renderingMode The rendering mode in JAVA layer
3402     *
3403     * @return The rendering mode in native layer
3404     */
3405    int getMediaItemRenderingMode(int renderingMode) {
3406        int retValue = -1;
3407        switch (renderingMode) {
3408            case MediaItem.RENDERING_MODE_BLACK_BORDER:
3409                retValue = MediaRendering.BLACK_BORDERS;
3410                break;
3411            case MediaItem.RENDERING_MODE_STRETCH:
3412                retValue = MediaRendering.RESIZING;
3413                break;
3414            case MediaItem.RENDERING_MODE_CROPPING:
3415                retValue = MediaRendering.CROPPING;
3416                break;
3417
3418            default:
3419                retValue = -1;
3420        }
3421        return retValue;
3422    }
3423
3424    /**
3425     * Maps the transition behavior used in JAVA layer
3426     * to transition behavior used in native layer
3427     *
3428     * @param transitionType The transition behavior in JAVA layer
3429     *
3430     * @return The transition behavior in native layer
3431     */
3432    int getVideoTransitionBehaviour(int transitionType) {
3433        int retValue = -1;
3434        switch (transitionType) {
3435            case Transition.BEHAVIOR_SPEED_UP:
3436                retValue = TransitionBehaviour.SPEED_UP;
3437                break;
3438            case Transition.BEHAVIOR_SPEED_DOWN:
3439                retValue = TransitionBehaviour.SPEED_DOWN;
3440                break;
3441            case Transition.BEHAVIOR_LINEAR:
3442                retValue = TransitionBehaviour.LINEAR;
3443                break;
3444            case Transition.BEHAVIOR_MIDDLE_SLOW:
3445                retValue = TransitionBehaviour.SLOW_MIDDLE;
3446                break;
3447            case Transition.BEHAVIOR_MIDDLE_FAST:
3448                retValue = TransitionBehaviour.FAST_MIDDLE;
3449                break;
3450
3451            default:
3452                retValue = -1;
3453        }
3454        return retValue;
3455    }
3456
3457    /**
3458     * Maps the transition slide direction used in JAVA layer
3459     * to transition slide direction used in native layer
3460     *
3461     * @param slideDirection The transition slide direction
3462     * in JAVA layer
3463     *
3464     * @return The transition slide direction in native layer
3465     */
3466    int getSlideSettingsDirection(int slideDirection) {
3467        int retValue = -1;
3468        switch (slideDirection) {
3469            case TransitionSliding.DIRECTION_RIGHT_OUT_LEFT_IN:
3470                retValue = SlideDirection.RIGHT_OUT_LEFT_IN;
3471                break;
3472            case TransitionSliding.DIRECTION_LEFT_OUT_RIGHT_IN:
3473                retValue = SlideDirection.LEFT_OUT_RIGTH_IN;
3474                break;
3475            case TransitionSliding.DIRECTION_TOP_OUT_BOTTOM_IN:
3476                retValue = SlideDirection.TOP_OUT_BOTTOM_IN;
3477                break;
3478            case TransitionSliding.DIRECTION_BOTTOM_OUT_TOP_IN:
3479                retValue = SlideDirection.BOTTOM_OUT_TOP_IN;
3480                break;
3481
3482            default:
3483                retValue = -1;
3484        }
3485        return retValue;
3486    }
3487
3488    /**
3489     * Maps the effect color type used in JAVA layer
3490     * to effect color type used in native layer
3491     *
3492     * @param effect The EffectColor reference
3493     *
3494     * @return The color effect value from native layer
3495     */
3496    private int getEffectColorType(EffectColor effect) {
3497        int retValue = -1;
3498        switch (effect.getType()) {
3499            case EffectColor.TYPE_COLOR:
3500                if (effect.getColor() == EffectColor.GREEN) {
3501                    retValue = VideoEffect.GREEN;
3502                } else if (effect.getColor() == EffectColor.PINK) {
3503                    retValue = VideoEffect.PINK;
3504                } else if (effect.getColor() == EffectColor.GRAY) {
3505                    retValue = VideoEffect.BLACK_AND_WHITE;
3506                } else {
3507                    retValue = VideoEffect.COLORRGB16;
3508                }
3509                break;
3510            case EffectColor.TYPE_GRADIENT:
3511                retValue = VideoEffect.GRADIENT;
3512                break;
3513            case EffectColor.TYPE_SEPIA:
3514                retValue = VideoEffect.SEPIA;
3515                break;
3516            case EffectColor.TYPE_NEGATIVE:
3517                retValue = VideoEffect.NEGATIVE;
3518                break;
3519            case EffectColor.TYPE_FIFTIES:
3520                retValue = VideoEffect.FIFTIES;
3521                break;
3522
3523            default:
3524                retValue = -1;
3525        }
3526        return retValue;
3527    }
3528
3529    /**
3530     * Calculates video resolution for output clip
3531     * based on clip's height and aspect ratio of storyboard
3532     *
3533     * @param aspectRatio The aspect ratio of story board
3534     * @param height The height of clip
3535     *
3536     * @return The video resolution
3537     */
3538    private int findVideoResolution(int aspectRatio, int height) {
3539        final Pair<Integer, Integer>[] resolutions;
3540        final Pair<Integer, Integer> maxResolution;
3541        int retValue = VideoFrameSize.SIZE_UNDEFINED;
3542        switch (aspectRatio) {
3543            case MediaProperties.ASPECT_RATIO_3_2:
3544                if (height == MediaProperties.HEIGHT_480)
3545                    retValue = VideoFrameSize.NTSC;
3546                else if (height == MediaProperties.HEIGHT_720)
3547                    retValue = VideoFrameSize.W720p;
3548                break;
3549            case MediaProperties.ASPECT_RATIO_16_9:
3550                if (height == MediaProperties.HEIGHT_480)
3551                    retValue = VideoFrameSize.WVGA16x9;
3552                else if (height == MediaProperties.HEIGHT_720)
3553                    retValue = VideoFrameSize.V720p;
3554                else if (height == MediaProperties.HEIGHT_1080)
3555                    retValue = VideoFrameSize.V1080p;
3556                break;
3557            case MediaProperties.ASPECT_RATIO_4_3:
3558                if (height == MediaProperties.HEIGHT_480)
3559                    retValue = VideoFrameSize.VGA;
3560                else if (height == MediaProperties.HEIGHT_720)
3561                    retValue = VideoFrameSize.S720p;
3562                break;
3563            case MediaProperties.ASPECT_RATIO_5_3:
3564                if (height == MediaProperties.HEIGHT_480)
3565                    retValue = VideoFrameSize.WVGA;
3566                break;
3567            case MediaProperties.ASPECT_RATIO_11_9:
3568                if (height == MediaProperties.HEIGHT_144)
3569                    retValue = VideoFrameSize.QCIF;
3570                else if (height == MediaProperties.HEIGHT_288)
3571                    retValue = VideoFrameSize.CIF;
3572                break;
3573        }
3574        if (retValue == VideoFrameSize.SIZE_UNDEFINED) {
3575            resolutions = MediaProperties.getSupportedResolutions(mVideoEditor.getAspectRatio());
3576            // Get the highest resolution
3577            maxResolution = resolutions[resolutions.length - 1];
3578            retValue = findVideoResolution(mVideoEditor.getAspectRatio(), maxResolution.second);
3579        }
3580
3581        return retValue;
3582    }
3583
3584    /**
3585     * This method is responsible for exporting a movie
3586     *
3587     * @param filePath The output file path
3588     * @param projectDir The output project directory
3589     * @param height The height of clip
3590     * @param bitrate The bitrate at which the movie should be exported
3591     * @param mediaItemsList The media items list
3592     * @param mediaTransitionList The transitions list
3593     * @param mediaBGMList The background track list
3594     * @param listener The ExportProgressListener
3595     *
3596     */
3597    void export(String filePath, String projectDir, int height, int bitrate,
3598            List<MediaItem> mediaItemsList, List<Transition> mediaTransitionList,
3599            List<AudioTrack> mediaBGMList, ExportProgressListener listener) {
3600
3601        int outBitrate = 0;
3602        mExportFilename = filePath;
3603        previewStoryBoard(mediaItemsList, mediaTransitionList, mediaBGMList,null);
3604        mExportProgressListener = listener;
3605
3606        mProgressToApp = 0;
3607
3608        switch (bitrate) {
3609            case MediaProperties.BITRATE_28K:
3610                outBitrate = Bitrate.BR_32_KBPS;
3611                break;
3612            case MediaProperties.BITRATE_40K:
3613                outBitrate = Bitrate.BR_48_KBPS;
3614                break;
3615            case MediaProperties.BITRATE_64K:
3616                outBitrate = Bitrate.BR_64_KBPS;
3617                break;
3618            case MediaProperties.BITRATE_96K:
3619                outBitrate = Bitrate.BR_96_KBPS;
3620                break;
3621            case MediaProperties.BITRATE_128K:
3622                outBitrate = Bitrate.BR_128_KBPS;
3623                break;
3624            case MediaProperties.BITRATE_192K:
3625                outBitrate = Bitrate.BR_192_KBPS;
3626                break;
3627            case MediaProperties.BITRATE_256K:
3628                outBitrate = Bitrate.BR_256_KBPS;
3629                break;
3630            case MediaProperties.BITRATE_384K:
3631                outBitrate = Bitrate.BR_384_KBPS;
3632                break;
3633            case MediaProperties.BITRATE_512K:
3634                outBitrate = Bitrate.BR_512_KBPS;
3635                break;
3636            case MediaProperties.BITRATE_800K:
3637                outBitrate = Bitrate.BR_800_KBPS;
3638                break;
3639            case MediaProperties.BITRATE_2M:
3640                outBitrate = Bitrate.BR_2_MBPS;
3641                break;
3642
3643            case MediaProperties.BITRATE_5M:
3644                outBitrate = Bitrate.BR_5_MBPS;
3645                break;
3646            case MediaProperties.BITRATE_8M:
3647                outBitrate = Bitrate.BR_8_MBPS;
3648                break;
3649
3650            default:
3651                throw new IllegalArgumentException("Argument Bitrate incorrect");
3652        }
3653        mPreviewEditSettings.videoFrameRate = VideoFrameRate.FR_30_FPS;
3654        mPreviewEditSettings.outputFile = mOutputFilename = filePath;
3655
3656        int aspectRatio = mVideoEditor.getAspectRatio();
3657        mPreviewEditSettings.videoFrameSize = findVideoResolution(aspectRatio, height);
3658        mPreviewEditSettings.videoFormat = VideoFormat.H264;
3659        mPreviewEditSettings.audioFormat = AudioFormat.AAC;
3660        mPreviewEditSettings.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000;
3661        mPreviewEditSettings.maxFileSize = 0;
3662        mPreviewEditSettings.audioChannels = 2;
3663        mPreviewEditSettings.videoBitrate = outBitrate;
3664        mPreviewEditSettings.audioBitrate = Bitrate.BR_96_KBPS;
3665
3666        mPreviewEditSettings.transitionSettingsArray = new TransitionSettings[mTotalClips - 1];
3667        for (int index = 0; index < mTotalClips - 1; index++) {
3668            mPreviewEditSettings.transitionSettingsArray[index] = new TransitionSettings();
3669            mPreviewEditSettings.transitionSettingsArray[index].videoTransitionType =
3670                VideoTransition.NONE;
3671            mPreviewEditSettings.transitionSettingsArray[index].audioTransitionType =
3672                AudioTransition.NONE;
3673        }
3674
3675        for (int clipCnt = 0; clipCnt < mPreviewEditSettings.clipSettingsArray.length; clipCnt++) {
3676            if (mPreviewEditSettings.clipSettingsArray[clipCnt].fileType == FileType.JPG) {
3677                mPreviewEditSettings.clipSettingsArray[clipCnt].clipPath =
3678                mPreviewEditSettings.clipSettingsArray[clipCnt].clipOriginalPath;
3679            }
3680        }
3681        nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings);
3682
3683        int err = 0;
3684        try {
3685            mProcessingState  = PROCESSING_EXPORT;
3686            mProcessingObject = null;
3687            err = generateClip(mPreviewEditSettings);
3688            mProcessingState  = PROCESSING_NONE;
3689        } catch (IllegalArgumentException ex) {
3690            Log.e(TAG, "IllegalArgument for generateClip");
3691            throw ex;
3692        } catch (IllegalStateException ex) {
3693            Log.e(TAG, "IllegalStateExceptiont for generateClip");
3694            throw ex;
3695        } catch (RuntimeException ex) {
3696            Log.e(TAG, "RuntimeException for generateClip");
3697            throw ex;
3698        }
3699
3700        if (err != 0) {
3701            Log.e(TAG, "RuntimeException for generateClip");
3702            throw new RuntimeException("generateClip failed with error=" + err);
3703        }
3704
3705        mExportProgressListener = null;
3706    }
3707
3708    /**
3709     * This methods takes care of stopping the Export process
3710     *
3711     * @param The input file name for which export has to be stopped
3712     */
3713    void stop(String filename) {
3714        try {
3715            stopEncoding();
3716            new File(mExportFilename).delete();
3717        } catch (IllegalStateException ex) {
3718            Log.e(TAG, "Illegal state exception in unload settings");
3719            throw ex;
3720        } catch (RuntimeException ex) {
3721            Log.e(TAG, "Runtime exception in unload settings");
3722            throw ex;
3723        }
3724    }
3725
3726    /**
3727     * This method extracts a frame from the input file
3728     * and returns the frame as a bitmap
3729     *
3730     * @param inputFile The inputFile
3731     * @param width The width of the output frame
3732     * @param height The height of the output frame
3733     * @param timeMS The time in ms at which the frame has to be extracted
3734     */
3735    Bitmap getPixels(String inputFile, int width, int height, long timeMS) {
3736        if (inputFile == null) {
3737            throw new IllegalArgumentException("Invalid input file");
3738        }
3739
3740        /* Make width and height as even */
3741        final int newWidth = (width + 1) & 0xFFFFFFFE;
3742        final int newHeight = (height + 1) & 0xFFFFFFFE;
3743
3744        /* Create a temp bitmap for resized thumbnails */
3745        Bitmap tempBitmap = null;
3746        if ((newWidth != width) || (newHeight != height)) {
3747             tempBitmap = Bitmap.createBitmap(newWidth, newHeight, Bitmap.Config.ARGB_8888);
3748        }
3749
3750        IntBuffer rgb888 = IntBuffer.allocate(newWidth * newHeight * 4);
3751        Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
3752        nativeGetPixels(inputFile, rgb888.array(), newWidth, newHeight, timeMS);
3753
3754        if ((newWidth == width) && (newHeight == height)) {
3755            bitmap.copyPixelsFromBuffer(rgb888);
3756        } else {
3757            /* Create a temp bitmap to be used for resize */
3758            tempBitmap.copyPixelsFromBuffer(rgb888);
3759
3760            /* Create a canvas to resize */
3761            final Canvas canvas = new Canvas(bitmap);
3762            canvas.drawBitmap(tempBitmap, new Rect(0, 0, newWidth, newHeight),
3763                                          new Rect(0, 0, width, height), sResizePaint);
3764        }
3765
3766        if (tempBitmap != null) {
3767            tempBitmap.recycle();
3768        }
3769
3770        return bitmap;
3771    }
3772
3773    /**
3774     * This method extracts a list of frame from the
3775     * input file and returns the frame in bitmap array
3776     *
3777     * @param filename The inputFile
3778     * @param width The width of the output frame
3779     * @param height The height of the output frame
3780     * @param startMs The starting time in ms
3781     * @param endMs The end time in ms
3782     * @param thumbnailCount The number of frames to be extracted
3783     * from startMs to endMs
3784     *
3785     * @return The frames as bitmaps in bitmap array
3786     **/
3787    Bitmap[] getPixelsList(String filename, int width, int height, long startMs, long endMs,
3788            int thumbnailCount) {
3789        int[] rgb888 = null;
3790        int thumbnailSize = 0;
3791        Bitmap tempBitmap = null;
3792
3793        /* Make width and height as even */
3794        final int newWidth = (width + 1) & 0xFFFFFFFE;
3795        final int newHeight = (height + 1) & 0xFFFFFFFE;
3796        thumbnailSize = newWidth * newHeight * 4;
3797
3798        /* Create a temp bitmap for resized thumbnails */
3799        if ((newWidth != width) || (newHeight != height)) {
3800            tempBitmap = Bitmap.createBitmap(newWidth, newHeight, Bitmap.Config.ARGB_8888);
3801        }
3802        int i = 0;
3803        int deltaTime = (int)(endMs - startMs) / thumbnailCount;
3804        Bitmap[] bitmaps = null;
3805
3806        try {
3807            // This may result in out of Memory Error
3808            rgb888 = new int[thumbnailSize * thumbnailCount];
3809            bitmaps = new Bitmap[thumbnailCount];
3810        } catch (Throwable e) {
3811            // Allocating to new size with Fixed count
3812            try {
3813                rgb888 = new int[thumbnailSize * MAX_THUMBNAIL_PERMITTED];
3814                bitmaps = new Bitmap[MAX_THUMBNAIL_PERMITTED];
3815                thumbnailCount = MAX_THUMBNAIL_PERMITTED;
3816            } catch (Throwable ex) {
3817                throw new RuntimeException("Memory allocation fails, thumbnail count too large: "
3818                        + thumbnailCount);
3819            }
3820        }
3821        IntBuffer tmpBuffer = IntBuffer.allocate(thumbnailSize);
3822        nativeGetPixelsList(filename, rgb888, newWidth, newHeight, deltaTime, thumbnailCount,
3823                startMs, endMs);
3824
3825        for (; i < thumbnailCount; i++) {
3826            bitmaps[i] = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
3827            tmpBuffer.put(rgb888, (i * thumbnailSize), thumbnailSize);
3828            tmpBuffer.rewind();
3829
3830            if ((newWidth == width) && (newHeight == height)) {
3831                bitmaps[i].copyPixelsFromBuffer(tmpBuffer);
3832            } else {
3833                /* Copy the out rgb buffer to temp bitmap */
3834                tempBitmap.copyPixelsFromBuffer(tmpBuffer);
3835
3836                /* Create a canvas to resize */
3837                final Canvas canvas = new Canvas(bitmaps[i]);
3838                canvas.drawBitmap(tempBitmap, new Rect(0, 0, newWidth, newHeight),
3839                                              new Rect(0, 0, width, height), sResizePaint);
3840            }
3841        }
3842
3843        if (tempBitmap != null) {
3844            tempBitmap.recycle();
3845        }
3846
3847        return bitmaps;
3848    }
3849
3850    /**
3851     * This method generates the audio graph
3852     *
3853     * @param uniqueId The unique id
3854     * @param inFileName The inputFile
3855     * @param OutAudiGraphFileName output filename
3856     * @param frameDuration The each frame duration
3857     * @param audioChannels The number of audio channels
3858     * @param samplesCount Total number of samples count
3859     * @param listener ExtractAudioWaveformProgressListener reference
3860     * @param isVideo The flag to indicate if the file is video file or not
3861     *
3862     **/
3863    void generateAudioGraph(String uniqueId, String inFileName, String OutAudiGraphFileName,
3864            int frameDuration, int audioChannels, int samplesCount,
3865            ExtractAudioWaveformProgressListener listener, boolean isVideo) {
3866        String tempPCMFileName;
3867
3868        mExtractAudioWaveformProgressListener = listener;
3869
3870        /**
3871         * In case of Video, first call will generate the PCM file to make the
3872         * audio graph
3873         */
3874        if (isVideo) {
3875            tempPCMFileName = String.format(mProjectPath + "/" + uniqueId + ".pcm");
3876        } else {
3877            tempPCMFileName = mAudioTrackPCMFilePath;
3878        }
3879
3880        /**
3881         * For Video item, generate the PCM
3882         */
3883        if (isVideo) {
3884            nativeGenerateRawAudio(inFileName, tempPCMFileName);
3885        }
3886
3887        nativeGenerateAudioGraph(tempPCMFileName, OutAudiGraphFileName, frameDuration,
3888                audioChannels, samplesCount);
3889
3890        /**
3891         * Once the audio graph file is generated, delete the pcm file
3892         */
3893        if (isVideo) {
3894            new File(tempPCMFileName).delete();
3895        }
3896    }
3897
3898    void clearPreviewSurface(Surface surface) {
3899        nativeClearSurface(surface);
3900    }
3901
3902    /**
3903     * Grab the semaphore which arbitrates access to the editor
3904     *
3905     * @throws InterruptedException
3906     */
3907    private void lock() throws InterruptedException {
3908        if (Log.isLoggable(TAG, Log.DEBUG)) {
3909            Log.d(TAG, "lock: grabbing semaphore", new Throwable());
3910        }
3911        mLock.acquire();
3912        if (Log.isLoggable(TAG, Log.DEBUG)) {
3913            Log.d(TAG, "lock: grabbed semaphore");
3914        }
3915    }
3916
3917    /**
3918     * Release the semaphore which arbitrates access to the editor
3919     */
3920    private void unlock() {
3921        if (Log.isLoggable(TAG, Log.DEBUG)) {
3922            Log.d(TAG, "unlock: releasing semaphore");
3923        }
3924        mLock.release();
3925    }
3926
3927    /**     Native Methods        */
3928    native Properties getMediaProperties(String file) throws IllegalArgumentException,
3929            IllegalStateException, RuntimeException, Exception;
3930
3931    /**
3932     * Get the version of ManualEdit.
3933     *
3934     * @return version of ManualEdit
3935     * @throws RuntimeException if an error occurred
3936     * @see Version
3937     */
3938    private static native Version getVersion() throws RuntimeException;
3939
3940    /**
3941     * Returns the video thumbnail in an array of integers. Output format is
3942     * ARGB8888.
3943     *
3944     * @param pixelArray the array that receives the pixel values
3945     * @param width width of the video thumbnail
3946     * @param height height of the video thumbnail
3947     * @param timeMS desired time of the thumbnail in ms
3948     * @return actual time in ms of the thumbnail generated
3949     * @throws IllegalStateException if the class has not been initialized
3950     * @throws IllegalArgumentException if the pixelArray is not available or
3951     *             one of the dimensions is negative or zero or the time is
3952     *             negative
3953     * @throws RuntimeException on runtime errors in native code
3954     */
3955    private native int nativeGetPixels(String fileName, int[] pixelArray, int width, int height,
3956            long timeMS);
3957
3958    private native int nativeGetPixelsList(String fileName, int[] pixelArray, int width, int height,
3959            int timeMS, int nosofTN, long startTimeMs, long endTimeMs);
3960
3961    /**
3962     * Releases the JNI and cleans up the core native module.. Should be called
3963     * only after init( )
3964     *
3965     * @throws IllegalStateException if the method could not be called
3966     */
3967    private native void release() throws IllegalStateException, RuntimeException;
3968
3969    /*
3970     * Clear the preview surface
3971     */
3972    private native void nativeClearSurface(Surface surface);
3973
3974    /**
3975     * Stops the encoding. This method should only be called after encoding has
3976     * started using method <code> startEncoding</code>
3977     *
3978     * @throws IllegalStateException if the method could not be called
3979     */
3980    private native void stopEncoding() throws IllegalStateException, RuntimeException;
3981
3982
3983    private native void _init(String tempPath, String libraryPath)
3984            throws IllegalArgumentException, IllegalStateException, RuntimeException;
3985
3986    private native void nativeStartPreview(Surface mSurface, long fromMs, long toMs,
3987            int callbackAfterFrameCount, boolean loop) throws IllegalArgumentException,
3988            IllegalStateException, RuntimeException;
3989
3990    private native void nativePopulateSettings(EditSettings editSettings,
3991            PreviewClipProperties mProperties, AudioSettings mAudioSettings)
3992    throws IllegalArgumentException, IllegalStateException, RuntimeException;
3993
3994    private native int nativeRenderPreviewFrame(Surface mSurface, long timeMs,
3995                                                 int surfaceWidth, int surfaceHeight)
3996                                                 throws IllegalArgumentException,
3997                                                 IllegalStateException, RuntimeException;
3998
3999    private native int nativeRenderMediaItemPreviewFrame(Surface mSurface, String filepath,
4000            int framewidth, int frameheight, int surfacewidth, int surfaceheight, long timeMs)
4001    throws IllegalArgumentException, IllegalStateException, RuntimeException;
4002
4003    private native int nativeStopPreview();
4004
4005    private native int nativeGenerateAudioGraph(String pcmFilePath, String outGraphPath,
4006            int frameDuration, int channels, int sampleCount);
4007
4008    private native int nativeGenerateRawAudio(String InFileName, String PCMFileName);
4009
4010    private native int nativeGenerateClip(EditSettings editSettings)
4011    throws IllegalArgumentException, IllegalStateException, RuntimeException;
4012
4013}
4014