MediaArtistNativeHelper.java revision 9bcedf7cf3e9c981837f2d8ec98cd118efad3f01
1/*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17package android.media.videoeditor;
18
19import java.io.File;
20import java.io.IOException;
21import java.math.BigDecimal;
22import java.nio.IntBuffer;
23import java.util.ArrayList;
24import java.util.Iterator;
25import java.util.List;
26import android.graphics.Bitmap;
27import android.media.videoeditor.VideoEditor.ExportProgressListener;
28import android.media.videoeditor.VideoEditor.PreviewProgressListener;
29import android.media.videoeditor.VideoEditor.MediaProcessingProgressListener;
30import android.util.Log;
31import android.util.Pair;
32import android.view.Surface;
33
34/**
35 *This class provide Native methods to be used by MediaArtist {@hide}
36 */
37class MediaArtistNativeHelper {
38
39    static {
40        System.loadLibrary("videoeditor_jni");
41    }
42
43    private final int MAX_THUMBNAIL_PERMITTED = 8;
44
45    private final VideoEditor mVideoEditor;
46
47    public EditSettings mStoryBoardSettings;
48
49    private String mOutputFilename;
50
51    EditSettings mEditSettings = null;
52
53    PreviewClipProperties mClipProperties = null;
54
55    private EditSettings mPreviewEditSettings;
56
57    private AudioSettings mAudioSettings = null;
58
59    private AudioTrack mAudioTrack = null;
60
61    public boolean mInvalidatePreviewArray = true;
62
63    private boolean mRegenerateAudio = true;
64
65    private String mExportFilename = null;
66
67    private boolean mExportDone = false;
68
69    private int mProgressToApp;
70
71
72    public static final int TASK_LOADING_SETTINGS = 1;
73
74    public static final int TASK_ENCODING = 2;
75
76    private static final String AUDIO_TRACK_PCM_FILE = "AudioPcm.pcm";
77
78    // Processing indication
79    public static final int PROCESSING_NONE          = 0;
80    public static final int PROCESSING_AUDIO_PCM     = 1;
81    public static final int PROCESSING_TRANSITION    = 2;
82    public static final int PROCESSING_KENBURNS      = 3;
83    public static final int PROCESSING_INTERMEDIATE1 = 11;
84    public static final int PROCESSING_INTERMEDIATE2 = 12;
85    public static final int PROCESSING_INTERMEDIATE3 = 13;
86    public static final int PROCESSING_EXPORT        = 20;
87
88    private int    mProcessingState;
89    private Object mProcessingObject;
90
91    private PreviewProgressListener mPreviewProgressListener;
92    private ExportProgressListener mExportProgressListener;
93    private ExtractAudioWaveformProgressListener mExtractAudioWaveformProgressListener;
94    private MediaProcessingProgressListener      mMediaProcessingProgressListener;
95    private final String mProjectPath;
96
97    private long mPreviewProgress;
98
99    private String mAudioTrackPCMFilePath;
100
101    int mTotalClips = 0;
102
103    int mPreviewEffectsSize = 0;
104
105    private boolean mErrorFlagSet = false;
106
107    @SuppressWarnings("unused")
108    private int mManualEditContext;
109
110
111    List<Effect> mMediaEffectList;
112
113    List<Overlay> mMediaOverLayList;
114
115    /* Listeners */
116
117    /**
118     * Interface definition for a listener to be invoked when there is an update
119     * in a running task.
120     */
121    public interface OnProgressUpdateListener {
122        /**
123         * Called when there is an update.
124         *
125         * @param taskId id of the task reporting an update.
126         * @param progress progress of the task [0..100].
127         * @see BasicEdit#TASK_ENCODING
128         */
129        public void OnProgressUpdate(int taskId, int progress);
130    }
131
132    /** Defines the version. */
133    public final class Version {
134
135        /** Major version number */
136        public int major;
137
138        /** Minor version number */
139        public int minor;
140
141        /** Revision number */
142        public int revision;
143
144        /** VIDEOEDITOR major version number */
145        private static final int VIDEOEDITOR_MAJOR_VERSION = 0;
146
147        /** VIDEOEDITOR minor version number */
148        private static final int VIDEOEDITOR_MINOR_VERSION = 0;
149
150        /** VIDEOEDITOR revision number */
151        private static final int VIDEOEDITOR_REVISION_VERSION = 1;
152
153        /** Method which returns the current VIDEOEDITOR version */
154        public Version getVersion() {
155            Version version = new Version();
156
157            version.major = Version.VIDEOEDITOR_MAJOR_VERSION;
158            version.minor = Version.VIDEOEDITOR_MINOR_VERSION;
159            version.revision = Version.VIDEOEDITOR_REVISION_VERSION;
160
161            return version;
162        }
163    }
164
165    /**
166     * Defines output audio formats.
167     */
168    public final class AudioFormat {
169        /** No audio present in output clip. Used to generate video only clip */
170        public static final int NO_AUDIO = 0;
171
172        /** AMR Narrow Band. */
173        public static final int AMR_NB = 1;
174
175        /** Advanced Audio Coding (AAC). */
176        public static final int AAC = 2;
177
178        /** Advanced Audio Codec Plus (HE-AAC v1). */
179        public static final int AAC_PLUS = 3;
180
181        /** Advanced Audio Codec Plus (HE-AAC v2). */
182        public static final int ENHANCED_AAC_PLUS = 4;
183
184        /** MPEG layer 3 (MP3). */
185        public static final int MP3 = 5;
186
187        /** Enhanced Variable RateCodec (EVRC). */
188        public static final int EVRC = 6;
189
190        /** PCM (PCM). */
191        public static final int PCM = 7;
192
193        /** No transcoding. Output audio format is same as input audio format */
194        public static final int NULL_AUDIO = 254;
195
196        /** Unsupported audio format. */
197        public static final int UNSUPPORTED_AUDIO = 255;
198    }
199
200    /**
201     * Defines audio sampling frequencies.
202     */
203    public final class AudioSamplingFrequency {
204        /**
205         * Default sampling frequency. Uses the default frequency for a specific
206         * audio format. For AAC the only supported (and thus default) sampling
207         * frequency is 16 kHz. For this audio format the sampling frequency in
208         * the OutputParams.
209         **/
210        public static final int FREQ_DEFAULT = 0;
211
212        /** Audio sampling frequency of 8000 Hz. */
213        public static final int FREQ_8000 = 8000;
214
215        /** Audio sampling frequency of 11025 Hz. */
216        public static final int FREQ_11025 = 11025;
217
218        /** Audio sampling frequency of 12000 Hz. */
219        public static final int FREQ_12000 = 12000;
220
221        /** Audio sampling frequency of 16000 Hz. */
222        public static final int FREQ_16000 = 16000;
223
224        /** Audio sampling frequency of 22050 Hz. */
225        public static final int FREQ_22050 = 22050;
226
227        /** Audio sampling frequency of 24000 Hz. */
228        public static final int FREQ_24000 = 24000;
229
230        /** Audio sampling frequency of 32000 Hz. */
231        public static final int FREQ_32000 = 32000;
232
233        /** Audio sampling frequency of 44100 Hz. */
234        public static final int FREQ_44100 = 44100;
235
236        /** Audio sampling frequency of 48000 Hz. Not available for output file. */
237        public static final int FREQ_48000 = 48000;
238    }
239
240    /**
241     * Defines the supported fixed audio and video bitrates. These values are
242     * for output audio video only.
243     */
244    public final class Bitrate {
245        /** Variable bitrate. Means no bitrate regulation */
246        public static final int VARIABLE = -1;
247
248        /** An undefined bitrate. */
249        public static final int UNDEFINED = 0;
250
251        /** A bitrate of 9.2 kbits/s. */
252        public static final int BR_9_2_KBPS = 9200;
253
254        /** A bitrate of 12.2 kbits/s. */
255        public static final int BR_12_2_KBPS = 12200;
256
257        /** A bitrate of 16 kbits/s. */
258        public static final int BR_16_KBPS = 16000;
259
260        /** A bitrate of 24 kbits/s. */
261        public static final int BR_24_KBPS = 24000;
262
263        /** A bitrate of 32 kbits/s. */
264        public static final int BR_32_KBPS = 32000;
265
266        /** A bitrate of 48 kbits/s. */
267        public static final int BR_48_KBPS = 48000;
268
269        /** A bitrate of 64 kbits/s. */
270        public static final int BR_64_KBPS = 64000;
271
272        /** A bitrate of 96 kbits/s. */
273        public static final int BR_96_KBPS = 96000;
274
275        /** A bitrate of 128 kbits/s. */
276        public static final int BR_128_KBPS = 128000;
277
278        /** A bitrate of 192 kbits/s. */
279        public static final int BR_192_KBPS = 192000;
280
281        /** A bitrate of 256 kbits/s. */
282        public static final int BR_256_KBPS = 256000;
283
284        /** A bitrate of 288 kbits/s. */
285        public static final int BR_288_KBPS = 288000;
286
287        /** A bitrate of 384 kbits/s. */
288        public static final int BR_384_KBPS = 384000;
289
290        /** A bitrate of 512 kbits/s. */
291        public static final int BR_512_KBPS = 512000;
292
293        /** A bitrate of 800 kbits/s. */
294        public static final int BR_800_KBPS = 800000;
295
296        /** A bitrate of 2 Mbits/s. */
297        public static final int BR_2_MBPS = 2000000;
298
299        /** A bitrate of 5 Mbits/s. */
300        public static final int BR_5_MBPS = 5000000;
301
302        /** A bitrate of 8 Mbits/s. */
303        public static final int BR_8_MBPS = 8000000;
304    }
305
306    /**
307     * Defines all supported file types.
308     */
309    public final class FileType {
310        /** 3GPP file type. */
311        public static final int THREE_GPP = 0;
312
313        /** MP4 file type. */
314        public static final int MP4 = 1;
315
316        /** AMR file type. */
317        public static final int AMR = 2;
318
319        /** MP3 audio file type. */
320        public static final int MP3 = 3;
321
322        /** PCM audio file type. */
323        public static final int PCM = 4;
324
325        /** JPEG image file type. */
326        public static final int JPG = 5;
327
328        /** GIF image file type. */
329        public static final int GIF = 6;
330
331        /** PNG image file type. */
332        public static final int PNG = 7;
333
334        /** Unsupported file type. */
335        public static final int UNSUPPORTED = 255;
336    }
337
338    /**
339     * Defines rendering types. Rendering can only be applied to files
340     * containing video streams.
341     **/
342    public final class MediaRendering {
343        /**
344         * Resize to fit the output video with changing the aspect ratio if
345         * needed.
346         */
347        public static final int RESIZING = 0;
348
349        /**
350         * Crop the input video to fit it with the output video resolution.
351         **/
352        public static final int CROPPING = 1;
353
354        /**
355         * Resize to fit the output video resolution but maintain the aspect
356         * ratio. This framing type adds black borders if needed.
357         */
358        public static final int BLACK_BORDERS = 2;
359    }
360
361    /**
362     * Defines the results.
363     */
364    public final class Result {
365        /** No error. result OK */
366        public static final int NO_ERROR = 0;
367
368        /** File not found */
369        public static final int ERR_FILE_NOT_FOUND = 1;
370
371        /**
372         * In case of UTF8 conversion, the size of the converted path will be
373         * more than the corresponding allocated buffer.
374         */
375        public static final int ERR_BUFFER_OUT_TOO_SMALL = 2;
376
377        /** Invalid file type. */
378        public static final int ERR_INVALID_FILE_TYPE = 3;
379
380        /** Invalid effect kind. */
381        public static final int ERR_INVALID_EFFECT_KIND = 4;
382
383        /** Invalid video effect. */
384        public static final int ERR_INVALID_VIDEO_EFFECT_TYPE = 5;
385
386        /** Invalid audio effect. */
387        public static final int ERR_INVALID_AUDIO_EFFECT_TYPE = 6;
388
389        /** Invalid video transition. */
390        public static final int ERR_INVALID_VIDEO_TRANSITION_TYPE = 7;
391
392        /** Invalid audio transition. */
393        public static final int ERR_INVALID_AUDIO_TRANSITION_TYPE = 8;
394
395        /** Invalid encoding frame rate. */
396        public static final int ERR_INVALID_VIDEO_ENCODING_FRAME_RATE = 9;
397
398        /** External effect is called but this function is not set. */
399        public static final int ERR_EXTERNAL_EFFECT_NULL = 10;
400
401        /** External transition is called but this function is not set. */
402        public static final int ERR_EXTERNAL_TRANSITION_NULL = 11;
403
404        /** Begin time cut is larger than the video clip duration. */
405        public static final int ERR_BEGIN_CUT_LARGER_THAN_DURATION = 12;
406
407        /** Begin cut time is larger or equal than end cut. */
408        public static final int ERR_BEGIN_CUT_LARGER_THAN_END_CUT = 13;
409
410        /** Two consecutive transitions are overlapping on one clip. */
411        public static final int ERR_OVERLAPPING_TRANSITIONS = 14;
412
413        /** Internal error, type size mismatch. */
414        public static final int ERR_ANALYSIS_DATA_SIZE_TOO_SMALL = 15;
415
416        /** An input 3GPP file is invalid/corrupted. */
417        public static final int ERR_INVALID_3GPP_FILE = 16;
418
419        /** A file contains an unsupported video format. */
420        public static final int ERR_UNSUPPORTED_INPUT_VIDEO_FORMAT = 17;
421
422        /** A file contains an unsupported audio format. */
423        public static final int ERR_UNSUPPORTED_INPUT_AUDIO_FORMAT = 18;
424
425        /** A file format is not supported. */
426        public static final int ERR_AMR_EDITING_UNSUPPORTED = 19;
427
428        /** An input clip has an unexpectedly large Video AU. */
429        public static final int ERR_INPUT_VIDEO_AU_TOO_LARGE = 20;
430
431        /** An input clip has an unexpectedly large Audio AU. */
432        public static final int ERR_INPUT_AUDIO_AU_TOO_LARGE = 21;
433
434        /** An input clip has a corrupted Audio AU. */
435        public static final int ERR_INPUT_AUDIO_CORRUPTED_AU = 22;
436
437        /** The video encoder encountered an Access Unit error. */
438        public static final int ERR_ENCODER_ACCES_UNIT_ERROR = 23;
439
440        /** Unsupported video format for Video Editing. */
441        public static final int ERR_EDITING_UNSUPPORTED_VIDEO_FORMAT = 24;
442
443        /** Unsupported H263 profile for Video Editing. */
444        public static final int ERR_EDITING_UNSUPPORTED_H263_PROFILE = 25;
445
446        /** Unsupported MPEG-4 profile for Video Editing. */
447        public static final int ERR_EDITING_UNSUPPORTED_MPEG4_PROFILE = 26;
448
449        /** Unsupported MPEG-4 RVLC tool for Video Editing. */
450        public static final int ERR_EDITING_UNSUPPORTED_MPEG4_RVLC = 27;
451
452        /** Unsupported audio format for Video Editing. */
453        public static final int ERR_EDITING_UNSUPPORTED_AUDIO_FORMAT = 28;
454
455        /** File contains no supported stream. */
456        public static final int ERR_EDITING_NO_SUPPORTED_STREAM_IN_FILE = 29;
457
458        /** File contains no video stream or an unsupported video stream. */
459        public static final int ERR_EDITING_NO_SUPPORTED_VIDEO_STREAM_IN_FILE = 30;
460
461        /** Internal error, clip analysis version mismatch. */
462        public static final int ERR_INVALID_CLIP_ANALYSIS_VERSION = 31;
463
464        /**
465         * At least one of the clip analysis has been generated on another
466         * platform (WIN32, ARM, etc.).
467         */
468        public static final int ERR_INVALID_CLIP_ANALYSIS_PLATFORM = 32;
469
470        /** Clips don't have the same video format (H263 or MPEG4). */
471        public static final int ERR_INCOMPATIBLE_VIDEO_FORMAT = 33;
472
473        /** Clips don't have the same frame size. */
474        public static final int ERR_INCOMPATIBLE_VIDEO_FRAME_SIZE = 34;
475
476        /** Clips don't have the same MPEG-4 time scale. */
477        public static final int ERR_INCOMPATIBLE_VIDEO_TIME_SCALE = 35;
478
479        /** Clips don't have the same use of MPEG-4 data partitioning. */
480        public static final int ERR_INCOMPATIBLE_VIDEO_DATA_PARTITIONING = 36;
481
482        /** MP3 clips can't be assembled. */
483        public static final int ERR_UNSUPPORTED_MP3_ASSEMBLY = 37;
484
485        /**
486         * The input 3GPP file does not contain any supported audio or video
487         * track.
488         */
489        public static final int ERR_NO_SUPPORTED_STREAM_IN_FILE = 38;
490
491        /**
492         * The Volume of the added audio track (AddVolume) must be strictly
493         * superior than zero.
494         */
495        public static final int ERR_ADDVOLUME_EQUALS_ZERO = 39;
496
497        /**
498         * The time at which an audio track is added can't be higher than the
499         * input video track duration..
500         */
501        public static final int ERR_ADDCTS_HIGHER_THAN_VIDEO_DURATION = 40;
502
503        /** The audio track file format setting is undefined. */
504        public static final int ERR_UNDEFINED_AUDIO_TRACK_FILE_FORMAT = 41;
505
506        /** The added audio track stream has an unsupported format. */
507        public static final int ERR_UNSUPPORTED_ADDED_AUDIO_STREAM = 42;
508
509        /** The audio mixing feature doesn't support the audio track type. */
510        public static final int ERR_AUDIO_MIXING_UNSUPPORTED = 43;
511
512        /** The audio mixing feature doesn't support MP3 audio tracks. */
513        public static final int ERR_AUDIO_MIXING_MP3_UNSUPPORTED = 44;
514
515        /**
516         * An added audio track limits the available features: uiAddCts must be
517         * 0 and bRemoveOriginal must be true.
518         */
519        public static final int ERR_FEATURE_UNSUPPORTED_WITH_AUDIO_TRACK = 45;
520
521        /**
522         * An added audio track limits the available features: uiAddCts must be
523         * 0 and bRemoveOriginal must be true.
524         */
525        public static final int ERR_FEATURE_UNSUPPORTED_WITH_AAC = 46;
526
527        /** Input audio track is not of a type that can be mixed with output. */
528        public static final int ERR_AUDIO_CANNOT_BE_MIXED = 47;
529
530        /** Input audio track is not AMR-NB, so it can't be mixed with output. */
531        public static final int ERR_ONLY_AMRNB_INPUT_CAN_BE_MIXED = 48;
532
533        /**
534         * An added EVRC audio track limit the available features: uiAddCts must
535         * be 0 and bRemoveOriginal must be true.
536         */
537        public static final int ERR_FEATURE_UNSUPPORTED_WITH_EVRC = 49;
538
539        /** H263 profiles other than 0 are not supported. */
540        public static final int ERR_H263_PROFILE_NOT_SUPPORTED = 51;
541
542        /** File contains no video stream or an unsupported video stream. */
543        public static final int ERR_NO_SUPPORTED_VIDEO_STREAM_IN_FILE = 52;
544
545        /** Transcoding of the input file(s) is necessary. */
546        public static final int WAR_TRANSCODING_NECESSARY = 53;
547
548        /**
549         * The size of the output file will exceed the maximum configured value.
550         */
551        public static final int WAR_MAX_OUTPUT_SIZE_EXCEEDED = 54;
552
553        /** The time scale is too big. */
554        public static final int WAR_TIMESCALE_TOO_BIG = 55;
555
556        /** The year is out of range */
557        public static final int ERR_CLOCK_BAD_REF_YEAR = 56;
558
559        /** The directory could not be opened */
560        public static final int ERR_DIR_OPEN_FAILED = 57;
561
562        /** The directory could not be read */
563        public static final int ERR_DIR_READ_FAILED = 58;
564
565        /** There are no more entries in the current directory */
566        public static final int ERR_DIR_NO_MORE_ENTRY = 59;
567
568        /** The input parameter/s has error */
569        public static final int ERR_PARAMETER = 60;
570
571        /** There is a state machine error */
572        public static final int ERR_STATE = 61;
573
574        /** Memory allocation failed */
575        public static final int ERR_ALLOC = 62;
576
577        /** Context is invalid */
578        public static final int ERR_BAD_CONTEXT = 63;
579
580        /** Context creation failed */
581        public static final int ERR_CONTEXT_FAILED = 64;
582
583        /** Invalid stream ID */
584        public static final int ERR_BAD_STREAM_ID = 65;
585
586        /** Invalid option ID */
587        public static final int ERR_BAD_OPTION_ID = 66;
588
589        /** The option is write only */
590        public static final int ERR_WRITE_ONLY = 67;
591
592        /** The option is read only */
593        public static final int ERR_READ_ONLY = 68;
594
595        /** The feature is not implemented in this version */
596        public static final int ERR_NOT_IMPLEMENTED = 69;
597
598        /** The media type is not supported */
599        public static final int ERR_UNSUPPORTED_MEDIA_TYPE = 70;
600
601        /** No data to be encoded */
602        public static final int WAR_NO_DATA_YET = 71;
603
604        /** No data to be decoded */
605        public static final int WAR_NO_MORE_STREAM = 72;
606
607        /** Time stamp is invalid */
608        public static final int WAR_INVALID_TIME = 73;
609
610        /** No more data to be decoded */
611        public static final int WAR_NO_MORE_AU = 74;
612
613        /** Semaphore timed out */
614        public static final int WAR_TIME_OUT = 75;
615
616        /** Memory buffer is full */
617        public static final int WAR_BUFFER_FULL = 76;
618
619        /** Server has asked for redirection */
620        public static final int WAR_REDIRECT = 77;
621
622        /** Too many streams in input */
623        public static final int WAR_TOO_MUCH_STREAMS = 78;
624
625        /** The file cannot be opened/ written into as it is locked */
626        public static final int ERR_FILE_LOCKED = 79;
627
628        /** The file access mode is invalid */
629        public static final int ERR_FILE_BAD_MODE_ACCESS = 80;
630
631        /** The file pointer points to an invalid location */
632        public static final int ERR_FILE_INVALID_POSITION = 81;
633
634        /** Invalid string */
635        public static final int ERR_STR_BAD_STRING = 94;
636
637        /** The input string cannot be converted */
638        public static final int ERR_STR_CONV_FAILED = 95;
639
640        /** The string size is too large */
641        public static final int ERR_STR_OVERFLOW = 96;
642
643        /** Bad string arguments */
644        public static final int ERR_STR_BAD_ARGS = 97;
645
646        /** The string value is larger than maximum size allowed */
647        public static final int WAR_STR_OVERFLOW = 98;
648
649        /** The string value is not present in this comparison operation */
650        public static final int WAR_STR_NOT_FOUND = 99;
651
652        /** The thread is not started */
653        public static final int ERR_THREAD_NOT_STARTED = 100;
654
655        /** Trancoding done warning */
656        public static final int WAR_TRANSCODING_DONE = 101;
657
658        /** Unsupported mediatype */
659        public static final int WAR_MEDIATYPE_NOT_SUPPORTED = 102;
660
661        /** Input file contains invalid/unsupported streams */
662        public static final int ERR_INPUT_FILE_CONTAINS_NO_SUPPORTED_STREAM = 103;
663
664        /** Invalid input file */
665        public static final int ERR_INVALID_INPUT_FILE = 104;
666
667        /** Invalid output video format */
668        public static final int ERR_UNDEFINED_OUTPUT_VIDEO_FORMAT = 105;
669
670        /** Invalid output video frame size */
671        public static final int ERR_UNDEFINED_OUTPUT_VIDEO_FRAME_SIZE = 106;
672
673        /** Invalid output video frame rate */
674        public static final int ERR_UNDEFINED_OUTPUT_VIDEO_FRAME_RATE = 107;
675
676        /** Invalid output audio format */
677        public static final int ERR_UNDEFINED_OUTPUT_AUDIO_FORMAT = 108;
678
679        /** Invalid video frame size for H.263 */
680        public static final int ERR_INVALID_VIDEO_FRAME_SIZE_FOR_H263 = 109;
681
682        /** Invalid video frame rate for H.263 */
683        public static final int ERR_INVALID_VIDEO_FRAME_RATE_FOR_H263 = 110;
684
685        /** invalid playback duration */
686        public static final int ERR_DURATION_IS_NULL = 111;
687
688        /** Invalid H.263 profile in file */
689        public static final int ERR_H263_FORBIDDEN_IN_MP4_FILE = 112;
690
691        /** Invalid AAC sampling frequency */
692        public static final int ERR_INVALID_AAC_SAMPLING_FREQUENCY = 113;
693
694        /** Audio conversion failure */
695        public static final int ERR_AUDIO_CONVERSION_FAILED = 114;
696
697        /** Invalid trim start and end times */
698        public static final int ERR_BEGIN_CUT_EQUALS_END_CUT = 115;
699
700        /** End time smaller than start time for trim */
701        public static final int ERR_END_CUT_SMALLER_THAN_BEGIN_CUT = 116;
702
703        /** Output file size is small */
704        public static final int ERR_MAXFILESIZE_TOO_SMALL = 117;
705
706        /** Output video bitrate is too low */
707        public static final int ERR_VIDEOBITRATE_TOO_LOW = 118;
708
709        /** Output audio bitrate is too low */
710        public static final int ERR_AUDIOBITRATE_TOO_LOW = 119;
711
712        /** Output video bitrate is too high */
713        public static final int ERR_VIDEOBITRATE_TOO_HIGH = 120;
714
715        /** Output audio bitrate is too high */
716        public static final int ERR_AUDIOBITRATE_TOO_HIGH = 121;
717
718        /** Output file size is too small */
719        public static final int ERR_OUTPUT_FILE_SIZE_TOO_SMALL = 122;
720
721        /** Unknown stream type */
722        public static final int ERR_READER_UNKNOWN_STREAM_TYPE = 123;
723
724        /** Invalid metadata in input stream */
725        public static final int WAR_READER_NO_METADATA = 124;
726
727        /** Invalid file reader info warning */
728        public static final int WAR_READER_INFORMATION_NOT_PRESENT = 125;
729
730        /** Warning to indicate the the writer is being stopped */
731        public static final int WAR_WRITER_STOP_REQ = 131;
732
733        /** Video decoder failed to provide frame for transcoding */
734        public static final int WAR_VIDEORENDERER_NO_NEW_FRAME = 132;
735
736        /** Video deblocking filter is not implemented */
737        public static final int WAR_DEBLOCKING_FILTER_NOT_IMPLEMENTED = 133;
738
739        /** H.263 decoder profile not supported */
740        public static final int ERR_DECODER_H263_PROFILE_NOT_SUPPORTED = 134;
741
742        /** The input file contains unsupported H.263 profile */
743        public static final int ERR_DECODER_H263_NOT_BASELINE = 135;
744
745        /** There is no more space to store the output file */
746        public static final int ERR_NOMORE_SPACE_FOR_FILE = 136;
747
748        /** Internal error. */
749        public static final int ERR_INTERNAL = 255;
750
751    }
752
753    /**
754     * Defines output video formats.
755     */
756    public final class VideoFormat {
757        /** No video present in output clip. Used to generate audio only clip */
758        public static final int NO_VIDEO = 0;
759
760        /** H263 baseline format. */
761        public static final int H263 = 1;
762
763        /** MPEG4 video Simple Profile format. */
764        public static final int MPEG4 = 2;
765
766        /** MPEG4 video Simple Profile format with support for EMP. */
767        public static final int MPEG4_EMP = 3;
768
769        /** H264 video */
770        public static final int H264 = 4;
771
772        /** No transcoding. Output video format is same as input video format */
773        public static final int NULL_VIDEO = 254;
774
775        /** Unsupported video format. */
776        public static final int UNSUPPORTED = 255;
777    }
778
779    /** Defines video profiles and levels. */
780    public final class VideoProfile {
781        /** MPEG4, Simple Profile, Level 0. */
782        public static final int MPEG4_SP_LEVEL_0 = 0;
783
784        /** MPEG4, Simple Profile, Level 0B. */
785        public static final int MPEG4_SP_LEVEL_0B = 1;
786
787        /** MPEG4, Simple Profile, Level 1. */
788        public static final int MPEG4_SP_LEVEL_1 = 2;
789
790        /** MPEG4, Simple Profile, Level 2. */
791        public static final int MPEG4_SP_LEVEL_2 = 3;
792
793        /** MPEG4, Simple Profile, Level 3. */
794        public static final int MPEG4_SP_LEVEL_3 = 4;
795
796        /** H263, Profile 0, Level 10. */
797        public static final int H263_PROFILE_0_LEVEL_10 = 5;
798
799        /** H263, Profile 0, Level 20. */
800        public static final int H263_PROFILE_0_LEVEL_20 = 6;
801
802        /** H263, Profile 0, Level 30. */
803        public static final int H263_PROFILE_0_LEVEL_30 = 7;
804
805        /** H263, Profile 0, Level 40. */
806        public static final int H263_PROFILE_0_LEVEL_40 = 8;
807
808        /** H263, Profile 0, Level 45. */
809        public static final int H263_PROFILE_0_LEVEL_45 = 9;
810
811        /** MPEG4, Simple Profile, Level 4A. */
812        public static final int MPEG4_SP_LEVEL_4A = 10;
813
814        /** MPEG4, Simple Profile, Level 0. */
815        public static final int MPEG4_SP_LEVEL_5 = 11;
816
817        /** H264, Profile 0, Level 1. */
818        public static final int H264_PROFILE_0_LEVEL_1 = 12;
819
820        /** H264, Profile 0, Level 1b. */
821        public static final int H264_PROFILE_0_LEVEL_1b = 13;
822
823        /** H264, Profile 0, Level 1.1 */
824        public static final int H264_PROFILE_0_LEVEL_1_1 = 14;
825
826        /** H264, Profile 0, Level 1.2 */
827        public static final int H264_PROFILE_0_LEVEL_1_2 = 15;
828
829        /** H264, Profile 0, Level 1.3 */
830        public static final int H264_PROFILE_0_LEVEL_1_3 = 16;
831
832        /** H264, Profile 0, Level 2. */
833        public static final int H264_PROFILE_0_LEVEL_2 = 17;
834
835        /** H264, Profile 0, Level 2.1 */
836        public static final int H264_PROFILE_0_LEVEL_2_1 = 18;
837
838        /** H264, Profile 0, Level 2.2 */
839        public static final int H264_PROFILE_0_LEVEL_2_2 = 19;
840
841        /** H264, Profile 0, Level 3. */
842        public static final int H264_PROFILE_0_LEVEL_3 = 20;
843
844        /** H264, Profile 0, Level 3.1 */
845        public static final int H264_PROFILE_0_LEVEL_3_1 = 21;
846
847        /** H264, Profile 0, Level 3.2 */
848        public static final int H264_PROFILE_0_LEVEL_3_2 = 22;
849
850        /** H264, Profile 0, Level 4. */
851        public static final int H264_PROFILE_0_LEVEL_4 = 23;
852
853        /** H264, Profile 0, Level 4.1 */
854        public static final int H264_PROFILE_0_LEVEL_4_1 = 24;
855
856        /** H264, Profile 0, Level 4.2 */
857        public static final int H264_PROFILE_0_LEVEL_4_2 = 25;
858
859        /** H264, Profile 0, Level 5. */
860        public static final int H264_PROFILE_0_LEVEL_5 = 26;
861
862        /** H264, Profile 0, Level 5.1 */
863        public static final int H264_PROFILE_0_LEVEL_5_1 = 27;
864
865        /** Profile out of range. */
866        public static final int OUT_OF_RANGE = 255;
867    }
868
869    /** Defines video frame sizes. */
870    public final class VideoFrameSize {
871
872        public static final int SIZE_UNDEFINED = -1;
873
874        /** SQCIF 128 x 96 pixels. */
875        public static final int SQCIF = 0;
876
877        /** QQVGA 160 x 120 pixels. */
878        public static final int QQVGA = 1;
879
880        /** QCIF 176 x 144 pixels. */
881        public static final int QCIF = 2;
882
883        /** QVGA 320 x 240 pixels. */
884        public static final int QVGA = 3;
885
886        /** CIF 352 x 288 pixels. */
887        public static final int CIF = 4;
888
889        /** VGA 640 x 480 pixels. */
890        public static final int VGA = 5;
891
892        /** WVGA 800 X 480 pixels */
893        public static final int WVGA = 6;
894
895        /** NTSC 720 X 480 pixels */
896        public static final int NTSC = 7;
897
898        /** 640 x 360 */
899        public static final int nHD = 8;
900
901        /** 854 x 480 */
902        public static final int WVGA16x9 = 9;
903
904        /** 720p 1280 X 720 */
905        public static final int V720p = 10;
906
907        /** 1080 x 720 */
908        public static final int W720p = 11;
909
910        /** 1080 960 x 720 */
911        public static final int S720p = 12;
912    }
913
914    /**
915     * Defines output video frame rates.
916     */
917    public final class VideoFrameRate {
918        /** Frame rate of 5 frames per second. */
919        public static final int FR_5_FPS = 0;
920
921        /** Frame rate of 7.5 frames per second. */
922        public static final int FR_7_5_FPS = 1;
923
924        /** Frame rate of 10 frames per second. */
925        public static final int FR_10_FPS = 2;
926
927        /** Frame rate of 12.5 frames per second. */
928        public static final int FR_12_5_FPS = 3;
929
930        /** Frame rate of 15 frames per second. */
931        public static final int FR_15_FPS = 4;
932
933        /** Frame rate of 20 frames per second. */
934        public static final int FR_20_FPS = 5;
935
936        /** Frame rate of 25 frames per second. */
937        public static final int FR_25_FPS = 6;
938
939        /** Frame rate of 30 frames per second. */
940        public static final int FR_30_FPS = 7;
941    }
942
943    /**
944     * Defines Video Effect Types.
945     */
946    public static class VideoEffect {
947
948        public static final int NONE = 0;
949
950        public static final int FADE_FROM_BLACK = 8;
951
952        public static final int CURTAIN_OPENING = 9;
953
954        public static final int FADE_TO_BLACK = 16;
955
956        public static final int CURTAIN_CLOSING = 17;
957
958        public static final int EXTERNAL = 256;
959
960        public static final int BLACK_AND_WHITE = 257;
961
962        public static final int PINK = 258;
963
964        public static final int GREEN = 259;
965
966        public static final int SEPIA = 260;
967
968        public static final int NEGATIVE = 261;
969
970        public static final int FRAMING = 262;
971
972        public static final int TEXT = 263;
973
974        public static final int ZOOM_IN = 264;
975
976        public static final int ZOOM_OUT = 265;
977
978        public static final int FIFTIES = 266;
979
980        public static final int COLORRGB16 = 267;
981
982        public static final int GRADIENT = 268;
983    }
984
985    /**
986     * Defines the video transitions.
987     */
988    public static class VideoTransition {
989        /** No transition */
990        public static final int NONE = 0;
991
992        /** Cross fade transition */
993        public static final int CROSS_FADE = 1;
994
995        /** External transition. Currently not available. */
996        public static final int EXTERNAL = 256;
997
998        /** AlphaMagic transition. */
999        public static final int ALPHA_MAGIC = 257;
1000
1001        /** Slide transition. */
1002        public static final int SLIDE_TRANSITION = 258;
1003
1004        /** Fade to black transition. */
1005        public static final int FADE_BLACK = 259;
1006    }
1007
1008    /**
1009     * Defines settings for the AlphaMagic transition
1010     */
1011    public static class AlphaMagicSettings {
1012        /** Name of the alpha file (JPEG file). */
1013        public String file;
1014
1015        /** Blending percentage [0..100] 0 = no blending. */
1016        public int blendingPercent;
1017
1018        /** Invert the default rotation direction of the AlphaMagic effect. */
1019        public boolean invertRotation;
1020
1021        public int rgbWidth;
1022        public int rgbHeight;
1023    }
1024
1025    /** Defines the direction of the Slide transition. */
1026    public static final class SlideDirection {
1027
1028        /** Right out left in. */
1029        public static final int RIGHT_OUT_LEFT_IN = 0;
1030
1031        /** Left out right in. */
1032        public static final int LEFT_OUT_RIGTH_IN = 1;
1033
1034        /** Top out bottom in. */
1035        public static final int TOP_OUT_BOTTOM_IN = 2;
1036
1037        /** Bottom out top in */
1038        public static final int BOTTOM_OUT_TOP_IN = 3;
1039    }
1040
1041    /** Defines the Slide transition settings. */
1042    public static class SlideTransitionSettings {
1043        /**
1044         * Direction of the slide transition. See {@link SlideDirection
1045         * SlideDirection} for valid values.
1046         */
1047        public int direction;
1048    }
1049
1050    /**
1051     * Defines the settings of a single clip.
1052     */
1053    public static class ClipSettings {
1054
1055        /**
1056         * The path to the clip file.
1057         * <p>
1058         * File format of the clip, it can be:
1059         * <ul>
1060         * <li>3GP file containing MPEG4/H263/H264 video and AAC/AMR audio
1061         * <li>JPG file
1062         * </ul>
1063         */
1064
1065        public String clipPath;
1066
1067        /**
1068         * The path of the decoded file. This is used only for image files.
1069         */
1070        public String clipDecodedPath;
1071
1072        /**
1073         * The path of the Original file. This is used only for image files.
1074         */
1075        public String clipOriginalPath;
1076
1077        /**
1078         * File type of the clip. See {@link FileType FileType} for valid
1079         * values.
1080         */
1081        public int fileType;
1082
1083        /** Begin of the cut in the clip in milliseconds. */
1084        public int beginCutTime;
1085
1086        /**
1087         * End of the cut in the clip in milliseconds. Set both
1088         * <code>beginCutTime</code> and <code>endCutTime</code> to
1089         * <code>0</code> to get the full length of the clip without a cut. In
1090         * case of JPG clip, this is the duration of the JPEG file.
1091         */
1092        public int endCutTime;
1093
1094        /**
1095         * Begin of the cut in the clip in percentage of the file duration.
1096         */
1097        public int beginCutPercent;
1098
1099        /**
1100         * End of the cut in the clip in percentage of the file duration. Set
1101         * both <code>beginCutPercent</code> and <code>endCutPercent</code> to
1102         * <code>0</code> to get the full length of the clip without a cut.
1103         */
1104        public int endCutPercent;
1105
1106        /** Enable panning and zooming. */
1107        public boolean panZoomEnabled;
1108
1109        /** Zoom percentage at start of clip. 0 = no zoom, 100 = full zoom */
1110        public int panZoomPercentStart;
1111
1112        /** Top left X coordinate at start of clip. */
1113        public int panZoomTopLeftXStart;
1114
1115        /** Top left Y coordinate at start of clip. */
1116        public int panZoomTopLeftYStart;
1117
1118        /** Zoom percentage at start of clip. 0 = no zoom, 100 = full zoom */
1119        public int panZoomPercentEnd;
1120
1121        /** Top left X coordinate at end of clip. */
1122        public int panZoomTopLeftXEnd;
1123
1124        /** Top left Y coordinate at end of clip. */
1125        public int panZoomTopLeftYEnd;
1126
1127        /**
1128         * Set The media rendering. See {@link MediaRendering MediaRendering}
1129         * for valid values.
1130         */
1131        public int mediaRendering;
1132
1133        /**
1134         * RGB width and Height
1135         */
1136         public int rgbWidth;
1137         public int rgbHeight;
1138    }
1139
1140    /**
1141     * Defines settings for a transition.
1142     */
1143    public static class TransitionSettings {
1144
1145        /** Duration of the transition in msec. */
1146        public int duration;
1147
1148        /**
1149         * Transition type for video. See {@link VideoTransition
1150         * VideoTransition} for valid values.
1151         */
1152        public int videoTransitionType;
1153
1154        /**
1155         * Transition type for audio. See {@link AudioTransition
1156         * AudioTransition} for valid values.
1157         */
1158        public int audioTransitionType;
1159
1160        /**
1161         * Transition behaviour. See {@link TransitionBehaviour
1162         * TransitionBehaviour} for valid values.
1163         */
1164        public int transitionBehaviour;
1165
1166        /**
1167         * Settings for AlphaMagic transition. Only needs to be set if
1168         * <code>videoTransitionType</code> is set to
1169         * <code>VideoTransition.ALPHA_MAGIC</code>. See
1170         * {@link AlphaMagicSettings AlphaMagicSettings}.
1171         */
1172        public AlphaMagicSettings alphaSettings;
1173
1174        /**
1175         * Settings for the Slide transition. See
1176         * {@link SlideTransitionSettings SlideTransitionSettings}.
1177         */
1178        public SlideTransitionSettings slideSettings;
1179    }
1180
1181    public static final class AudioTransition {
1182        /** No audio transition. */
1183        public static final int NONE = 0;
1184
1185        /** Cross-fade audio transition. */
1186        public static final int CROSS_FADE = 1;
1187    }
1188
1189    /**
1190     * Defines transition behaviours.
1191     **/
1192
1193    public static final class TransitionBehaviour {
1194
1195        /** The transition uses an increasing speed. */
1196        public static final int SPEED_UP = 0;
1197
1198        /** The transition uses a linear (constant) speed. */
1199        public static final int LINEAR = 1;
1200
1201        /** The transition uses a decreasing speed. */
1202        public static final int SPEED_DOWN = 2;
1203
1204        /**
1205         * The transition uses a constant speed, but slows down in the middle
1206         * section.
1207         */
1208        public static final int SLOW_MIDDLE = 3;
1209
1210        /**
1211         * The transition uses a constant speed, but increases speed in the
1212         * middle section.
1213         */
1214        public static final int FAST_MIDDLE = 4;
1215    }
1216
1217    /** Defines settings for the background music. */
1218    public static class BackgroundMusicSettings {
1219
1220        /** Background music file. */
1221        public String file;
1222
1223        /** File type. See {@link FileType FileType} for valid values. */
1224        public int fileType;
1225
1226        /**
1227         * Insertion time in milliseconds, in the output video where the
1228         * background music must be inserted.
1229         */
1230        public long insertionTime;
1231
1232        /**
1233         * Volume, as a percentage of the background music track, to use. If
1234         * this field is set to 100, the background music will replace the audio
1235         * from the video input file(s).
1236         */
1237        public int volumePercent;
1238
1239        /**
1240         * Start time in milliseconds in the background muisc file from where
1241         * the background music should loop. Set both <code>beginLoop</code> and
1242         * <code>endLoop</code> to <code>0</code> to disable looping.
1243         */
1244        public long beginLoop;
1245
1246        /**
1247         * End time in milliseconds in the background music file to where the
1248         * background music should loop. Set both <code>beginLoop</code> and
1249         * <code>endLoop</code> to <code>0</code> to disable looping.
1250         */
1251        public long endLoop;
1252
1253        public boolean enableDucking;
1254
1255        public int duckingThreshold;
1256
1257        public int lowVolume;
1258
1259        public boolean isLooping;
1260
1261    }
1262
1263    /** Defines settings for an effect. */
1264    public static class AudioEffect {
1265        /** No audio effect. */
1266        public static final int NONE = 0;
1267
1268        /** Fade-in effect. */
1269        public static final int FADE_IN = 8;
1270
1271        /** Fade-out effect. */
1272        public static final int FADE_OUT = 16;
1273    }
1274
1275    /** Defines the effect settings. */
1276    public static class EffectSettings {
1277
1278        /** Start time of the effect in milliseconds. */
1279        public int startTime;
1280
1281        /** Duration of the effect in milliseconds. */
1282        public int duration;
1283
1284        /**
1285         * Video effect type. See {@link VideoEffect VideoEffect} for valid
1286         * values.
1287         */
1288        public int videoEffectType;
1289
1290        /**
1291         * Audio effect type. See {@link AudioEffect AudioEffect} for valid
1292         * values.
1293         */
1294        public int audioEffectType;
1295
1296        /**
1297         * Start time of the effect in percents of the duration of the clip. A
1298         * value of 0 percent means start time is from the beginning of the
1299         * clip.
1300         */
1301        public int startPercent;
1302
1303        /**
1304         * Duration of the effect in percents of the duration of the clip.
1305         */
1306        public int durationPercent;
1307
1308        /**
1309         * Framing file.
1310         * <p>
1311         * This field is only used when the field <code>videoEffectType</code>
1312         * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise
1313         * this field is ignored.
1314         */
1315        public String framingFile;
1316
1317        /**
1318         * Framing buffer.
1319         * <p>
1320         * This field is only used when the field <code>videoEffectType</code>
1321         * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise
1322         * this field is ignored.
1323         */
1324        public int[] framingBuffer;
1325
1326        /**
1327         * Bitmap type Can be from RGB_565 (4), ARGB_4444 (5), ARGB_8888 (6);
1328         **/
1329
1330        public int bitmapType;
1331
1332        public int width;
1333
1334        public int height;
1335
1336        /**
1337         * Top left x coordinate. This coordinate is used to set the x
1338         * coordinate of the picture in the framing file when the framing file
1339         * is selected. The x coordinate is also used to set the location of the
1340         * text in the text effect.
1341         * <p>
1342         * This field is only used when the field <code>videoEffectType</code>
1343         * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING} or
1344         * {@link VideoEffect#TEXT VideoEffect.TEXT}. Otherwise this field is
1345         * ignored.
1346         */
1347        public int topLeftX;
1348
1349        /**
1350         * Top left y coordinate. This coordinate is used to set the y
1351         * coordinate of the picture in the framing file when the framing file
1352         * is selected. The y coordinate is also used to set the location of the
1353         * text in the text effect.
1354         * <p>
1355         * This field is only used when the field <code>videoEffectType</code>
1356         * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING} or
1357         * {@link VideoEffect#TEXT VideoEffect.TEXT}. Otherwise this field is
1358         * ignored.
1359         */
1360        public int topLeftY;
1361
1362        /**
1363         * Should the frame be resized or not. If this field is set to
1364         * <link>true</code> then the frame size is matched with the output
1365         * video size.
1366         * <p>
1367         * This field is only used when the field <code>videoEffectType</code>
1368         * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise
1369         * this field is ignored.
1370         */
1371        public boolean framingResize;
1372
1373        /**
1374         * Size to which the framing buffer needs to be resized to
1375         * This is valid only if framingResize is true
1376         */
1377        public int framingScaledSize;
1378        /**
1379         * Text to insert in the video.
1380         * <p>
1381         * This field is only used when the field <code>videoEffectType</code>
1382         * is set to {@link VideoEffect#TEXT VideoEffect.TEXT}. Otherwise this
1383         * field is ignored.
1384         */
1385        public String text;
1386
1387        /**
1388         * Text attributes for the text to insert in the video.
1389         * <p>
1390         * This field is only used when the field <code>videoEffectType</code>
1391         * is set to {@link VideoEffect#TEXT VideoEffect.TEXT}. Otherwise this
1392         * field is ignored. For more details about this field see the
1393         * integration guide.
1394         */
1395        public String textRenderingData;
1396
1397        /** Width of the text buffer in pixels. */
1398        public int textBufferWidth;
1399
1400        /** Height of the text buffer in pixels. */
1401        public int textBufferHeight;
1402
1403        /**
1404         * Processing rate for the fifties effect. A high value (e.g. 30)
1405         * results in high effect strength.
1406         * <p>
1407         * This field is only used when the field <code>videoEffectType</code>
1408         * is set to {@link VideoEffect#FIFTIES VideoEffect.FIFTIES}. Otherwise
1409         * this field is ignored.
1410         */
1411        public int fiftiesFrameRate;
1412
1413        /**
1414         * RGB 16 color of the RGB16 and gradient color effect.
1415         * <p>
1416         * This field is only used when the field <code>videoEffectType</code>
1417         * is set to {@link VideoEffect#COLORRGB16 VideoEffect.COLORRGB16} or
1418         * {@link VideoEffect#GRADIENT VideoEffect.GRADIENT}. Otherwise this
1419         * field is ignored.
1420         */
1421        public int rgb16InputColor;
1422
1423        /**
1424         * Start alpha blending percentage.
1425         * <p>
1426         * This field is only used when the field <code>videoEffectType</code>
1427         * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or
1428         * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field
1429         * is ignored.
1430         */
1431        public int alphaBlendingStartPercent;
1432
1433        /**
1434         * Middle alpha blending percentage.
1435         * <p>
1436         * This field is only used when the field <code>videoEffectType</code>
1437         * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or
1438         * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field
1439         * is ignored.
1440         */
1441        public int alphaBlendingMiddlePercent;
1442
1443        /**
1444         * End alpha blending percentage.
1445         * <p>
1446         * This field is only used when the field <code>videoEffectType</code>
1447         * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or
1448         * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field
1449         * is ignored.
1450         */
1451        public int alphaBlendingEndPercent;
1452
1453        /**
1454         * Duration, in percentage of effect duration of the fade-in phase.
1455         * <p>
1456         * This field is only used when the field <code>videoEffectType</code>
1457         * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or
1458         * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field
1459         * is ignored.
1460         */
1461        public int alphaBlendingFadeInTimePercent;
1462
1463        /**
1464         * Duration, in percentage of effect duration of the fade-out phase.
1465         * <p>
1466         * This field is only used when the field <code>videoEffectType</code>
1467         * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or
1468         * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field
1469         * is ignored.
1470         */
1471        public int alphaBlendingFadeOutTimePercent;
1472    }
1473
1474    /** Defines the clip properties for preview */
1475    public static class PreviewClips {
1476
1477        /**
1478         * The path to the clip file.
1479         * <p>
1480         * File format of the clip, it can be:
1481         * <ul>
1482         * <li>3GP file containing MPEG4/H263 video and AAC/AMR audio
1483         * <li>JPG file
1484         * </ul>
1485         */
1486
1487        public String clipPath;
1488
1489        /**
1490         * File type of the clip. See {@link FileType FileType} for valid
1491         * values.
1492         */
1493        public int fileType;
1494
1495        /** Begin of the cut in the clip in milliseconds. */
1496        public long beginPlayTime;
1497
1498        public long endPlayTime;
1499
1500        /**
1501         * Set The media rendering. See {@link MediaRendering MediaRendering}
1502         * for valid values.
1503         */
1504        public int mediaRendering;
1505
1506    }
1507
1508    /** Defines the audio settings. */
1509    public static class AudioSettings {
1510
1511        String pFile;
1512
1513        /** < PCM file path */
1514        String Id;
1515
1516        boolean bRemoveOriginal;
1517
1518        /** < If true, the original audio track is not taken into account */
1519        int channels;
1520
1521        /** < Number of channels (1=mono, 2=stereo) of BGM clip */
1522        int Fs;
1523
1524        /**
1525         * < Sampling audio frequency (8000 for amr, 16000 or more for aac) of
1526         * BGM clip
1527         */
1528        int ExtendedFs;
1529
1530        /** < Extended frequency for AAC+, eAAC+ streams of BGM clip */
1531        long startMs;
1532
1533        /** < Time, in milliseconds, at which the added audio track is inserted */
1534        long beginCutTime;
1535
1536        long endCutTime;
1537
1538        int fileType;
1539
1540        int volume;
1541
1542        /** < Volume, in percentage, of the added audio track */
1543        boolean loop;
1544
1545        /** < Looping on/off > **/
1546
1547        /** Audio mix and Duck **/
1548        int ducking_threshold;
1549
1550        int ducking_lowVolume;
1551
1552        boolean bInDucking_enable;
1553
1554        String pcmFilePath;
1555
1556    }
1557
1558    /** Encapsulates preview clips and effect settings */
1559    public static class PreviewSettings {
1560
1561        public PreviewClips[] previewClipsArray;
1562
1563        /** The effect settings. */
1564        public EffectSettings[] effectSettingsArray;
1565
1566    }
1567
1568    /** Encapsulates clip properties */
1569    public static class PreviewClipProperties {
1570
1571        public Properties[] clipProperties;
1572
1573    }
1574
1575    /** Defines the editing settings. */
1576    public static class EditSettings {
1577
1578        /**
1579         * Array of clip settings. There is one <code>clipSetting</code> for
1580         * each clip.
1581         */
1582        public ClipSettings[] clipSettingsArray;
1583
1584        /**
1585         * Array of transition settings. If there are n clips (and thus n
1586         * <code>clipSettings</code>) then there are (n-1) transitions and (n-1)
1587         * <code>transistionSettings</code> in
1588         * <code>transistionSettingsArray</code>.
1589         */
1590        public TransitionSettings[] transitionSettingsArray;
1591
1592        /** The effect settings. */
1593        public EffectSettings[] effectSettingsArray;
1594
1595        /**
1596         * Video frame rate of the output clip. See {@link VideoFrameRate
1597         * VideoFrameRate} for valid values.
1598         */
1599        public int videoFrameRate;
1600
1601        /** Output file name. Must be an absolute path. */
1602        public String outputFile;
1603
1604        /**
1605         * Size of the video frames in the output clip. See
1606         * {@link VideoFrameSize VideoFrameSize} for valid values.
1607         */
1608        public int videoFrameSize;
1609
1610        /**
1611         * Format of the video stream in the output clip. See
1612         * {@link VideoFormat VideoFormat} for valid values.
1613         */
1614        public int videoFormat;
1615
1616        /**
1617         * Format of the audio stream in the output clip. See
1618         * {@link AudioFormat AudioFormat} for valid values.
1619         */
1620        public int audioFormat;
1621
1622        /**
1623         * Sampling frequency of the audio stream in the output clip. See
1624         * {@link AudioSamplingFrequency AudioSamplingFrequency} for valid
1625         * values.
1626         */
1627        public int audioSamplingFreq;
1628
1629        /**
1630         * Maximum file size. By setting this you can set the maximum size of
1631         * the output clip. Set it to <code>0</code> to let the class ignore
1632         * this filed.
1633         */
1634        public int maxFileSize;
1635
1636        /**
1637         * Number of audio channels in output clip. Use <code>0</code> for none,
1638         * <code>1</code> for mono or <code>2</code> for stereo. None is only
1639         * allowed when the <code>audioFormat</code> field is set to
1640         * {@link AudioFormat#NO_AUDIO AudioFormat.NO_AUDIO} or
1641         * {@link AudioFormat#NULL_AUDIO AudioFormat.NULL_AUDIO} Mono is only
1642         * allowed when the <code>audioFormat</code> field is set to
1643         * {@link AudioFormat#AAC AudioFormat.AAC}
1644         */
1645        public int audioChannels;
1646
1647        /** Video bitrate. See {@link Bitrate Bitrate} for valid values. */
1648        public int videoBitrate;
1649
1650        /** Audio bitrate. See {@link Bitrate Bitrate} for valid values. */
1651        public int audioBitrate;
1652
1653        /**
1654         * Background music settings. See {@link BackgroundMusicSettings
1655         * BackgroundMusicSettings} for valid values.
1656         */
1657        public BackgroundMusicSettings backgroundMusicSettings;
1658
1659        public int primaryTrackVolume;
1660
1661    }
1662
1663    /**
1664     * Defines the media properties.
1665     **/
1666
1667    public static class Properties {
1668
1669        /**
1670         * Duration of the media in milliseconds.
1671         */
1672
1673        public int duration;
1674
1675        /**
1676         * File type.
1677         */
1678
1679        public int fileType;
1680
1681        /**
1682         * Video format.
1683         */
1684
1685        public int videoFormat;
1686
1687        /**
1688         * Duration of the video stream of the media in milliseconds.
1689         */
1690
1691        public int videoDuration;
1692
1693        /**
1694         * Bitrate of the video stream of the media.
1695         */
1696
1697        public int videoBitrate;
1698
1699        /**
1700         * Width of the video frames or the width of the still picture in
1701         * pixels.
1702         */
1703
1704        public int width;
1705
1706        /**
1707         * Height of the video frames or the height of the still picture in
1708         * pixels.
1709         */
1710
1711        public int height;
1712
1713        /**
1714         * Average frame rate of video in the media in frames per second.
1715         */
1716
1717        public float averageFrameRate;
1718
1719        /**
1720         * Profile and level of the video in the media.
1721         */
1722
1723        public int profileAndLevel;
1724
1725        /**
1726         * Audio format.
1727         */
1728
1729        public int audioFormat;
1730
1731        /**
1732         * Duration of the audio stream of the media in milliseconds.
1733         */
1734
1735        public int audioDuration;
1736
1737        /**
1738         * Bitrate of the audio stream of the media.
1739         */
1740
1741        public int audioBitrate;
1742
1743        /**
1744         * Number of audio channels in the media.
1745         */
1746
1747        public int audioChannels;
1748
1749        /**
1750         * Sampling frequency of the audio stream in the media in samples per
1751         * second.
1752         */
1753
1754        public int audioSamplingFrequency;
1755
1756        /**
1757         * Volume value of the audio track as percentage.
1758         */
1759        public int audioVolumeValue;
1760
1761        public String Id;
1762
1763    }
1764
1765    /**
1766     * Constructor
1767     *
1768     * @param projectPath The path where the VideoEditor stores all files
1769     *        related to the project
1770     * @param veObj The video editor reference
1771     */
1772    public MediaArtistNativeHelper(String projectPath, VideoEditor veObj) {
1773        mProjectPath = projectPath;
1774        if (veObj != null) {
1775            mVideoEditor = veObj;
1776        } else {
1777            mVideoEditor = null;
1778            throw new IllegalArgumentException("video editor object is null");
1779        }
1780        if (mStoryBoardSettings == null)
1781            mStoryBoardSettings = new EditSettings();
1782
1783        mMediaEffectList = new ArrayList<Effect>();
1784        mMediaOverLayList = new ArrayList<Overlay>();
1785        _init(mProjectPath, "null");
1786        mAudioTrackPCMFilePath = null;
1787    }
1788
1789    /**
1790     * @return The project path
1791     */
1792    String getProjectPath() {
1793        return mProjectPath;
1794    }
1795
1796    /**
1797     * @return The Audio Track PCM file path
1798     */
1799    String getProjectAudioTrackPCMFilePath() {
1800        return mAudioTrackPCMFilePath;
1801    }
1802
1803    /**
1804     * Invalidates the PCM file
1805     */
1806    void invalidatePcmFile() {
1807        if (mAudioTrackPCMFilePath != null) {
1808            new File(mAudioTrackPCMFilePath).delete();
1809            mAudioTrackPCMFilePath = null;
1810        }
1811    }
1812
1813    @SuppressWarnings("unused")
1814    private void onProgressUpdate(int taskId, int progress) {
1815        if (mProcessingState == PROCESSING_EXPORT) {
1816            if (mExportProgressListener != null) {
1817                if ((progress % 2) == 0) {
1818                    mProgressToApp++;
1819                    mExportProgressListener.onProgress(mVideoEditor, mOutputFilename, mProgressToApp);
1820                }
1821            }
1822        }
1823        else {
1824            // Adapt progress depending on current state
1825            int actualProgress = 0;
1826            int action = 0;
1827
1828            if (mProcessingState == PROCESSING_AUDIO_PCM) {
1829                action = MediaProcessingProgressListener.ACTION_DECODE;
1830            } else {
1831                action = MediaProcessingProgressListener.ACTION_ENCODE;
1832            }
1833
1834            switch (mProcessingState) {
1835                case PROCESSING_AUDIO_PCM:
1836                    actualProgress = progress;
1837                    break;
1838                case PROCESSING_TRANSITION:
1839                    actualProgress = progress;
1840                    break;
1841                case PROCESSING_KENBURNS:
1842                    actualProgress = progress;
1843                    break;
1844                case PROCESSING_INTERMEDIATE1:
1845                    if ((progress == 0) && (mProgressToApp != 0)) {
1846                        mProgressToApp = 0;
1847                    }
1848                    if ((progress != 0) || (mProgressToApp != 0)) {
1849                        actualProgress = progress/4;
1850                    }
1851                    break;
1852                case PROCESSING_INTERMEDIATE2:
1853                    if ((progress != 0) || (mProgressToApp != 0)) {
1854                        actualProgress = 25 + progress/4;
1855                    }
1856                    break;
1857                case PROCESSING_INTERMEDIATE3:
1858                    if ((progress != 0) || (mProgressToApp != 0)) {
1859                        actualProgress = 50 + progress/2;
1860                    }
1861                    break;
1862                case PROCESSING_NONE:
1863
1864                default:
1865                    Log.e("MediaArtistNativeHelper", "ERROR unexpected State=" + mProcessingState);
1866                    return;
1867            }
1868            if ((mProgressToApp != actualProgress) && (actualProgress != 0)) {
1869
1870                mProgressToApp = actualProgress;
1871
1872                if (mMediaProcessingProgressListener != null) {
1873                    // Send the progress indication
1874                    mMediaProcessingProgressListener.onProgress(mProcessingObject,
1875                                                                action,
1876                                                                actualProgress);
1877                }
1878            }
1879            /* avoid 0 in next intermediate call */
1880            if (mProgressToApp == 0) {
1881                if (mMediaProcessingProgressListener != null) {
1882                    /*
1883                     *  Send the progress indication
1884                     */
1885                    mMediaProcessingProgressListener.onProgress(mProcessingObject,
1886                                                                action,
1887                                                                actualProgress);
1888                }
1889                mProgressToApp = 1;
1890            }
1891        }
1892    }
1893
1894    @SuppressWarnings("unused")
1895    private void onPreviewProgressUpdate(int progress, boolean isFinished) {
1896        if (mPreviewProgressListener != null) {
1897            mPreviewProgressListener.onProgress(mVideoEditor, progress, isFinished);
1898            mPreviewProgress = progress;
1899        }
1900    }
1901
1902    /**
1903     * Release the native helper object
1904     */
1905    public void releaseNativeHelper() {
1906        try {
1907            release();
1908        } catch (IllegalStateException ex) {
1909            Log.e("MediaArtistNativeHelper",
1910            "Illegal State exeption caught in releaseNativeHelper");
1911            throw ex;
1912        } catch (RuntimeException ex) {
1913            Log.e("MediaArtistNativeHelper", "Runtime exeption caught in releaseNativeHelper");
1914            throw ex;
1915        }
1916    }
1917
1918    /**
1919     * Release the native helper to end the Audio Graph process
1920     */
1921    @SuppressWarnings("unused")
1922    private void onAudioGraphExtractProgressUpdate(int progress, boolean isVideo) {
1923
1924        if ((mExtractAudioWaveformProgressListener != null) && (progress > 0))
1925        {
1926            mExtractAudioWaveformProgressListener.onProgress(progress);
1927        }
1928    }
1929
1930    /**
1931     * Populates the Effect Settings in EffectSettings
1932     *
1933     * @param effects The reference of EffectColor
1934     *
1935     * @return The populated effect settings in EffectSettings
1936     * reference
1937     */
1938    EffectSettings getEffectSettings(EffectColor effects) {
1939        EffectSettings effectSettings = new EffectSettings();
1940        effectSettings.startTime = (int)effects.getStartTime();
1941        effectSettings.duration = (int)effects.getDuration();
1942        effectSettings.videoEffectType = getEffectColorType(effects);
1943        effectSettings.audioEffectType = 0;
1944        effectSettings.startPercent = 0;
1945        effectSettings.durationPercent = 0;
1946        effectSettings.framingFile = null;
1947        effectSettings.topLeftX = 0;
1948        effectSettings.topLeftY = 0;
1949        effectSettings.framingResize = false;
1950        effectSettings.text = null;
1951        effectSettings.textRenderingData = null;
1952        effectSettings.textBufferWidth = 0;
1953        effectSettings.textBufferHeight = 0;
1954        if (effects.getType() == EffectColor.TYPE_FIFTIES) {
1955            effectSettings.fiftiesFrameRate = 15;
1956        } else {
1957            effectSettings.fiftiesFrameRate = 0;
1958        }
1959
1960        if ((effectSettings.videoEffectType == VideoEffect.COLORRGB16)
1961                || (effectSettings.videoEffectType == VideoEffect.GRADIENT)) {
1962            effectSettings.rgb16InputColor = effects.getColor();
1963        }
1964
1965        effectSettings.alphaBlendingStartPercent = 0;
1966        effectSettings.alphaBlendingMiddlePercent = 0;
1967        effectSettings.alphaBlendingEndPercent = 0;
1968        effectSettings.alphaBlendingFadeInTimePercent = 0;
1969        effectSettings.alphaBlendingFadeOutTimePercent = 0;
1970        return effectSettings;
1971    }
1972
1973    /**
1974     * Populates the Overlay Settings in EffectSettings
1975     *
1976     * @param overlay The reference of OverlayFrame
1977     *
1978     * @return The populated overlay settings in EffectSettings
1979     * reference
1980     */
1981    EffectSettings getOverlaySettings(OverlayFrame overlay) {
1982        EffectSettings effectSettings = new EffectSettings();
1983        Bitmap bitmap = null;
1984
1985        effectSettings.startTime = (int)overlay.getStartTime();
1986        effectSettings.duration = (int)overlay.getDuration();
1987        effectSettings.videoEffectType = VideoEffect.FRAMING;
1988        effectSettings.audioEffectType = 0;
1989        effectSettings.startPercent = 0;
1990        effectSettings.durationPercent = 0;
1991        effectSettings.framingFile = null;
1992
1993        if ((bitmap = overlay.getBitmap()) != null) {
1994            effectSettings.framingFile = overlay.getFilename();
1995
1996            if (effectSettings.framingFile == null) {
1997                try {
1998                    (overlay).save(mProjectPath);
1999                } catch (IOException e) {
2000                    Log.e("MediaArtistNativeHelper","getOverlaySettings : File not found");
2001                }
2002                effectSettings.framingFile = overlay.getFilename();
2003            }
2004            if (bitmap.getConfig() == Bitmap.Config.ARGB_8888)
2005                effectSettings.bitmapType = 6;
2006            else if (bitmap.getConfig() == Bitmap.Config.ARGB_4444)
2007                effectSettings.bitmapType = 5;
2008            else if (bitmap.getConfig() == Bitmap.Config.RGB_565)
2009                effectSettings.bitmapType = 4;
2010            else if (bitmap.getConfig() == Bitmap.Config.ALPHA_8)
2011                throw new RuntimeException("Bitmap config not supported");
2012
2013            effectSettings.width = bitmap.getWidth();
2014            effectSettings.height = bitmap.getHeight();
2015            effectSettings.framingBuffer = new int[effectSettings.width];
2016            int tmp = 0;
2017            short maxAlpha = 0;
2018            short minAlpha = (short)0xFF;
2019            short alpha = 0;
2020            while (tmp < effectSettings.height) {
2021                bitmap.getPixels(effectSettings.framingBuffer, 0,
2022                                 effectSettings.width, 0, tmp,
2023                                 effectSettings.width, 1);
2024                for (int i = 0; i < effectSettings.width; i++) {
2025                    alpha = (short)((effectSettings.framingBuffer[i] >> 24) & 0xFF);
2026                    if (alpha > maxAlpha) {
2027                        maxAlpha = alpha;
2028                    }
2029                    if (alpha < minAlpha) {
2030                        minAlpha = alpha;
2031                    }
2032                }
2033                tmp += 1;
2034            }
2035            alpha = (short)((maxAlpha + minAlpha) / 2);
2036            alpha = (short)((alpha * 100) / 256);
2037            effectSettings.alphaBlendingEndPercent = alpha;
2038            effectSettings.alphaBlendingMiddlePercent = alpha;
2039            effectSettings.alphaBlendingStartPercent = alpha;
2040            effectSettings.alphaBlendingFadeInTimePercent = 100;
2041            effectSettings.alphaBlendingFadeOutTimePercent = 100;
2042            effectSettings.framingBuffer = null;
2043        }
2044
2045        effectSettings.topLeftX = 0;
2046        effectSettings.topLeftY = 0;
2047
2048        effectSettings.framingResize = true;
2049        effectSettings.text = null;
2050        effectSettings.textRenderingData = null;
2051        effectSettings.textBufferWidth = 0;
2052        effectSettings.textBufferHeight = 0;
2053        effectSettings.fiftiesFrameRate = 0;
2054        effectSettings.rgb16InputColor = 0;
2055        int mediaItemHeight;
2056        int aspectRatio;
2057        if (overlay.getMediaItem() instanceof MediaImageItem) {
2058            if (((MediaImageItem)overlay.getMediaItem()).getGeneratedImageClip() != null) {
2059                //Kenburns was applied
2060                mediaItemHeight = ((MediaImageItem)overlay.getMediaItem()).getGeneratedClipHeight();
2061                aspectRatio = getAspectRatio(
2062                    ((MediaImageItem)overlay.getMediaItem()).getGeneratedClipWidth()
2063                    , mediaItemHeight);
2064            }
2065            else {
2066                //For image get the scaled height. Aspect ratio would remain the same
2067                mediaItemHeight = ((MediaImageItem)overlay.getMediaItem()).getScaledHeight();
2068                aspectRatio = overlay.getMediaItem().getAspectRatio();
2069                effectSettings.framingResize = false; //since the image can be of odd size.
2070            }
2071        } else {
2072            aspectRatio = overlay.getMediaItem().getAspectRatio();
2073            mediaItemHeight = overlay.getMediaItem().getHeight();
2074        }
2075        effectSettings.framingScaledSize = findVideoResolution(aspectRatio, mediaItemHeight);
2076        return effectSettings;
2077    }
2078
2079    /**
2080     * Sets the audio regenerate flag
2081     *
2082     * @param flag The boolean to set the audio regenerate flag
2083     *
2084     */
2085    void setAudioflag(boolean flag) {
2086        //check if the file exists.
2087        if (!(new File(String.format(mProjectPath + "/" + AUDIO_TRACK_PCM_FILE)).exists())) {
2088            flag = true;
2089        }
2090        mRegenerateAudio = flag;
2091    }
2092
2093    /**
2094     * Gets the audio regenerate flag
2095     *
2096     * @param return The boolean to get the audio regenerate flag
2097     *
2098     */
2099    boolean getAudioflag() {
2100        return mRegenerateAudio;
2101    }
2102
2103    /**
2104     * Maps the average frame rate to one of the defined enum values
2105     *
2106     * @param averageFrameRate The average frame rate of video item
2107     *
2108     * @return The frame rate from one of the defined enum values
2109     */
2110    public int GetClosestVideoFrameRate(int averageFrameRate) {
2111        if (averageFrameRate >= 25) {
2112            return VideoFrameRate.FR_30_FPS;
2113        } else if (averageFrameRate >= 20) {
2114            return VideoFrameRate.FR_25_FPS;
2115        } else if (averageFrameRate >= 15) {
2116            return VideoFrameRate.FR_20_FPS;
2117        } else if (averageFrameRate >= 12) {
2118            return VideoFrameRate.FR_15_FPS;
2119        } else if (averageFrameRate >= 10) {
2120            return VideoFrameRate.FR_12_5_FPS;
2121        } else if (averageFrameRate >= 7) {
2122            return VideoFrameRate.FR_10_FPS;
2123        } else if (averageFrameRate >= 5) {
2124            return VideoFrameRate.FR_7_5_FPS;
2125        } else {
2126            return -1;
2127        }
2128    }
2129
2130    /**
2131     * Helper function to adjust the effect or overlay start time
2132     * depending on the begin and end boundary time of meddia item
2133     */
2134    public void adjustEffectsStartTimeAndDuration(EffectSettings lEffect,
2135                                                  int beginCutTime,
2136                                                  int endCutTime) {
2137
2138        int effectStartTime = 0;
2139        int effectDuration = 0;
2140
2141        /**
2142         * cbct -> clip begin cut time
2143         * cect -> clip end cut time
2144         ****************************************
2145         *  |                                 |
2146         *  |         cbct        cect        |
2147         *  | <-1-->   |           |          |
2148         *  |       <--|-2->       |          |
2149         *  |          | <---3---> |          |
2150         *  |          |        <--|-4--->    |
2151         *  |          |           | <--5-->  |
2152         *  |      <---|------6----|---->     |
2153         *  |                                 |
2154         *  < : effectStart
2155         *  > : effectStart + effectDuration
2156         ****************************************
2157         **/
2158
2159        /** 1 & 5 */
2160        /**
2161         * Effect falls out side the trim duration. In such a case effects shall
2162         * not be applied.
2163         */
2164        if ((lEffect.startTime > endCutTime)
2165                || ((lEffect.startTime + lEffect.duration) <= beginCutTime)) {
2166
2167            effectStartTime = 0;
2168            effectDuration = 0;
2169
2170            lEffect.startTime = effectStartTime;
2171            lEffect.duration = effectDuration;
2172            return;
2173        }
2174
2175        /** 2 */
2176        if ((lEffect.startTime < beginCutTime)
2177                && ((lEffect.startTime + lEffect.duration) > beginCutTime)
2178                && ((lEffect.startTime + lEffect.duration) <= endCutTime)) {
2179            effectStartTime = 0;
2180            effectDuration = lEffect.duration;
2181
2182            effectDuration -= (beginCutTime - lEffect.startTime);
2183            lEffect.startTime = effectStartTime;
2184            lEffect.duration = effectDuration;
2185            return;
2186        }
2187
2188        /** 3 */
2189        if ((lEffect.startTime >= beginCutTime)
2190                && ((lEffect.startTime + lEffect.duration) <= endCutTime)) {
2191            effectStartTime = lEffect.startTime - beginCutTime;
2192            lEffect.startTime = effectStartTime;
2193            lEffect.duration = lEffect.duration;
2194            return;
2195        }
2196
2197        /** 4 */
2198        if ((lEffect.startTime >= beginCutTime)
2199                && ((lEffect.startTime + lEffect.duration) > endCutTime)) {
2200            effectStartTime = lEffect.startTime - beginCutTime;
2201            effectDuration = endCutTime - lEffect.startTime;
2202            lEffect.startTime = effectStartTime;
2203            lEffect.duration = effectDuration;
2204            return;
2205        }
2206
2207        /** 6 */
2208        if ((lEffect.startTime < beginCutTime)
2209                && ((lEffect.startTime + lEffect.duration) > endCutTime)) {
2210            effectStartTime = 0;
2211            effectDuration = endCutTime - beginCutTime;
2212            lEffect.startTime = effectStartTime;
2213            lEffect.duration = effectDuration;
2214            return;
2215        }
2216
2217    }
2218
2219    /**
2220     * Generates the clip for preview or export
2221     *
2222     * @param editSettings The EditSettings reference for generating
2223     * a clip for preview or export
2224     *
2225     * @return error value
2226     */
2227    public int generateClip(EditSettings editSettings) {
2228        int err = 0;
2229
2230        try {
2231            err = nativeGenerateClip(editSettings);
2232        } catch (IllegalArgumentException ex) {
2233            Log.e("MediaArtistNativeHelper","Illegal Argument exception in load settings");
2234            return -1;
2235        } catch (IllegalStateException ex) {
2236            Log.e("MediaArtistNativeHelper","Illegal state exception in load settings");
2237            return -1;
2238        } catch (RuntimeException ex) {
2239            Log.e("MediaArtistNativeHelper", "Runtime exception in load settings");
2240            return -1;
2241        }
2242        return err;
2243    }
2244
2245    /**
2246     * Init function to initialise the  ClipSettings reference to
2247     * default values
2248     *
2249     * @param lclipSettings The ClipSettings reference
2250     */
2251    void initClipSettings(ClipSettings lclipSettings) {
2252        lclipSettings.clipPath = null;
2253        lclipSettings.clipDecodedPath = null;
2254        lclipSettings.clipOriginalPath = null;
2255        lclipSettings.fileType = 0;
2256        lclipSettings.endCutTime = 0;
2257        lclipSettings.beginCutTime = 0;
2258        lclipSettings.beginCutPercent = 0;
2259        lclipSettings.endCutPercent = 0;
2260        lclipSettings.panZoomEnabled = false;
2261        lclipSettings.panZoomPercentStart = 0;
2262        lclipSettings.panZoomTopLeftXStart = 0;
2263        lclipSettings.panZoomTopLeftYStart = 0;
2264        lclipSettings.panZoomPercentEnd = 0;
2265        lclipSettings.panZoomTopLeftXEnd = 0;
2266        lclipSettings.panZoomTopLeftYEnd = 0;
2267        lclipSettings.mediaRendering = 0;
2268    }
2269
2270
2271    /**
2272     * Populates the settings for generating an effect clip
2273     *
2274     * @param lMediaItem The media item for which the effect clip
2275     * needs to be generated
2276     * @param lclipSettings The ClipSettings reference containing
2277     * clips data
2278     * @param e The EditSettings reference containing effect specific data
2279     * @param uniqueId The unique id used in the name of the output clip
2280     * @param clipNo Used for internal purpose
2281     *
2282     * @return The name and path of generated clip
2283     */
2284    String generateEffectClip(MediaItem lMediaItem, ClipSettings lclipSettings,
2285            EditSettings e,String uniqueId,int clipNo) {
2286        int err = 0;
2287        EditSettings editSettings = null;
2288        String EffectClipPath = null;
2289
2290        editSettings = new EditSettings();
2291
2292        editSettings.clipSettingsArray = new ClipSettings[1];
2293        editSettings.clipSettingsArray[0] = lclipSettings;
2294
2295        editSettings.backgroundMusicSettings = null;
2296        editSettings.transitionSettingsArray = null;
2297        editSettings.effectSettingsArray = e.effectSettingsArray;
2298
2299        EffectClipPath = String.format(mProjectPath + "/" + "ClipEffectIntermediate" + "_"
2300                + lMediaItem.getId() + uniqueId + ".3gp");
2301
2302        File tmpFile = new File(EffectClipPath);
2303        if (tmpFile.exists()) {
2304            tmpFile.delete();
2305        }
2306
2307        if (lMediaItem instanceof MediaVideoItem) {
2308            MediaVideoItem m = (MediaVideoItem)lMediaItem;
2309
2310            editSettings.audioFormat = AudioFormat.AAC;
2311            editSettings.audioChannels = 2;
2312            editSettings.audioBitrate = Bitrate.BR_64_KBPS;
2313            editSettings.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000;
2314
2315            editSettings.videoBitrate = Bitrate.BR_5_MBPS;
2316            //editSettings.videoFormat = VideoFormat.MPEG4;
2317            editSettings.videoFormat = VideoFormat.H264;
2318            editSettings.videoFrameRate = VideoFrameRate.FR_30_FPS;
2319            editSettings.videoFrameSize = findVideoResolution(mVideoEditor.getAspectRatio(), m
2320                    .getHeight());
2321
2322        } else {
2323            MediaImageItem m = (MediaImageItem)lMediaItem;
2324            editSettings.audioBitrate = Bitrate.BR_64_KBPS;
2325            editSettings.audioChannels = 2;
2326            editSettings.audioFormat = AudioFormat.AAC;
2327            editSettings.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000;
2328
2329            editSettings.videoBitrate = Bitrate.BR_5_MBPS;
2330            editSettings.videoFormat = VideoFormat.H264;
2331            editSettings.videoFrameRate = VideoFrameRate.FR_30_FPS;
2332            editSettings.videoFrameSize = findVideoResolution(mVideoEditor.getAspectRatio(), m
2333                    .getScaledHeight());
2334        }
2335
2336        editSettings.outputFile = EffectClipPath;
2337
2338        if (clipNo == 1) {
2339            mProcessingState  = PROCESSING_INTERMEDIATE1;
2340        } else if (clipNo == 2) {
2341            mProcessingState  = PROCESSING_INTERMEDIATE2;
2342        }
2343        mProcessingObject = lMediaItem;
2344        err = generateClip(editSettings);
2345        mProcessingState  = PROCESSING_NONE;
2346
2347        if (err == 0) {
2348            lclipSettings.clipPath = EffectClipPath;
2349            lclipSettings.fileType = FileType.THREE_GPP;
2350            return EffectClipPath;
2351        } else {
2352            throw new RuntimeException("preview generation cannot be completed");
2353        }
2354    }
2355
2356
2357    /**
2358     * Populates the settings for generating a Ken Burn effect clip
2359     *
2360     * @param m The media image item for which the Ken Burn effect clip
2361     * needs to be generated
2362     * @param e The EditSettings reference clip specific data
2363     *
2364     * @return The name and path of generated clip
2365     */
2366    String generateKenBurnsClip(EditSettings e, MediaImageItem m) {
2367        String output = null;
2368        int err = 0;
2369
2370        e.backgroundMusicSettings = null;
2371        e.transitionSettingsArray = null;
2372        e.effectSettingsArray = null;
2373        output = String.format(mProjectPath + "/" + "ImageClip-" + m.getId() + ".3gp");
2374
2375        File tmpFile = new File(output);
2376        if (tmpFile.exists()) {
2377            tmpFile.delete();
2378        }
2379
2380        e.outputFile = output;
2381        e.audioBitrate = Bitrate.BR_64_KBPS;
2382        e.audioChannels = 2;
2383        e.audioFormat = AudioFormat.AAC;
2384        e.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000;
2385
2386        e.videoBitrate = Bitrate.BR_5_MBPS;
2387        e.videoFormat = VideoFormat.H264;
2388        e.videoFrameRate = VideoFrameRate.FR_30_FPS;
2389        e.videoFrameSize = findVideoResolution(mVideoEditor.getAspectRatio(),
2390                                                           m.getScaledHeight());
2391        mProcessingState  = PROCESSING_KENBURNS;
2392        mProcessingObject = m;
2393        err = generateClip(e);
2394        // Reset the processing state and check for errors
2395        mProcessingState  = PROCESSING_NONE;
2396        if (err != 0) {
2397            throw new RuntimeException("preview generation cannot be completed");
2398        }
2399        return output;
2400    }
2401
2402
2403    /**
2404     * Calculates the output resolution for transition clip
2405     *
2406     * @param m1 First media item associated with transition
2407     * @param m2 Second media item associated with transition
2408     *
2409     * @return The transition resolution
2410     */
2411    private int getTransitionResolution(MediaItem m1, MediaItem m2) {
2412        int clip1Height = 0;
2413        int clip2Height = 0;
2414        int videoSize = 0;
2415
2416        if (m1 != null && m2 != null) {
2417            if (m1 instanceof MediaVideoItem) {
2418                clip1Height = m1.getHeight();
2419            } else if (m1 instanceof MediaImageItem) {
2420                clip1Height = ((MediaImageItem)m1).getScaledHeight();
2421            }
2422            if (m2 instanceof MediaVideoItem) {
2423                clip2Height = m2.getHeight();
2424            } else if (m2 instanceof MediaImageItem) {
2425                clip2Height = ((MediaImageItem)m2).getScaledHeight();
2426            }
2427            if (clip1Height > clip2Height) {
2428                videoSize = findVideoResolution(mVideoEditor.getAspectRatio(),
2429                                                                   clip1Height);
2430            } else {
2431                videoSize = findVideoResolution(mVideoEditor.getAspectRatio(),
2432                                                                   clip2Height);
2433            }
2434        } else if (m1 == null && m2 != null) {
2435            if (m2 instanceof MediaVideoItem) {
2436                clip2Height = m2.getHeight();
2437            } else if (m2 instanceof MediaImageItem) {
2438                clip2Height = ((MediaImageItem)m2).getScaledHeight();
2439            }
2440            videoSize = findVideoResolution(mVideoEditor.getAspectRatio(),
2441                                                                   clip2Height);
2442        } else if (m1 != null && m2 == null) {
2443            if (m1 instanceof MediaVideoItem) {
2444                clip1Height = m1.getHeight();
2445            } else if (m1 instanceof MediaImageItem) {
2446                clip1Height = ((MediaImageItem)m1).getScaledHeight();
2447            }
2448            videoSize = findVideoResolution(mVideoEditor.getAspectRatio(),
2449                                                                   clip1Height);
2450        }
2451        return videoSize;
2452    }
2453
2454    /**
2455     * Populates the settings for generating an transition clip
2456     *
2457     * @param m1 First media item associated with transition
2458     * @param m2 Second media item associated with transition
2459     * @param e The EditSettings reference containing
2460     * clip specific data
2461     * @param uniqueId The unique id used in the name of the output clip
2462     * @param t The Transition specific data
2463     *
2464     * @return The name and path of generated clip
2465     */
2466    String generateTransitionClip(EditSettings e, String uniqueId,
2467            MediaItem m1, MediaItem m2,Transition t) {
2468        String outputFilename = null;
2469        int err = 0;
2470
2471        outputFilename = String.format(mProjectPath + "/" + uniqueId + ".3gp");
2472        e.outputFile = outputFilename;
2473        e.audioBitrate = Bitrate.BR_64_KBPS;
2474        e.audioChannels = 2;
2475        e.audioFormat = AudioFormat.AAC;
2476        e.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000;
2477
2478        e.videoBitrate = Bitrate.BR_5_MBPS;
2479        e.videoFormat = VideoFormat.H264;
2480        e.videoFrameRate = VideoFrameRate.FR_30_FPS;
2481        e.videoFrameSize = getTransitionResolution(m1, m2);
2482
2483        if (new File(outputFilename).exists()) {
2484            new File(outputFilename).delete();
2485        }
2486        mProcessingState  = PROCESSING_INTERMEDIATE3;
2487        mProcessingObject = t;
2488        err = generateClip(e);
2489        // Reset the processing state and check for errors
2490        mProcessingState  = PROCESSING_NONE;
2491        if (err != 0) {
2492            throw new RuntimeException("preview generation cannot be completed");
2493        }
2494        return outputFilename;
2495    }
2496
2497    /**
2498     * Populates effects and overlays in EffectSettings structure
2499     * and also adjust the start time and duration of effects and overlays
2500     * w.r.t to total story board time
2501     *
2502     * @param m1 Media item associated with effect
2503     * @param effectSettings The EffectSettings reference containing
2504     * effect specific data
2505     * @param beginCutTime The begin cut time of the clip associated with effect
2506     * @param endCutTime The end cut time of the clip associated with effect
2507     * @param storyBoardTime The current story board time
2508     *
2509     * @return The updated index
2510     */
2511    private int populateEffects(MediaItem m, EffectSettings[] effectSettings, int i,
2512            int beginCutTime, int endCutTime, int storyBoardTime) {
2513        List<Effect> effects = m.getAllEffects();
2514        List<Overlay> overlays = m.getAllOverlays();
2515
2516        if (m.getBeginTransition() != null && m.getBeginTransition().getDuration() > 0
2517                && m.getEndTransition() != null && m.getEndTransition().getDuration() > 0) {
2518            beginCutTime += m.getBeginTransition().getDuration();
2519            endCutTime -= m.getEndTransition().getDuration();
2520        } else if (m.getBeginTransition() == null && m.getEndTransition() != null
2521                && m.getEndTransition().getDuration() > 0) {
2522            endCutTime -= m.getEndTransition().getDuration();
2523        } else if (m.getEndTransition() == null && m.getBeginTransition() != null
2524                && m.getBeginTransition().getDuration() > 0) {
2525            beginCutTime += m.getBeginTransition().getDuration();
2526        }
2527
2528        for (Effect effect : effects) {
2529            if (effect instanceof EffectColor) {
2530                effectSettings[i] = getEffectSettings((EffectColor)effect);
2531                adjustEffectsStartTimeAndDuration(effectSettings[i],
2532                                                      beginCutTime, endCutTime);
2533                effectSettings[i].startTime += storyBoardTime;
2534                i++;
2535            }
2536        }
2537        for (Overlay overlay : overlays) {
2538            effectSettings[i] = getOverlaySettings((OverlayFrame)overlay);
2539            adjustEffectsStartTimeAndDuration(effectSettings[i],
2540                                                      beginCutTime, endCutTime);
2541            effectSettings[i].startTime += storyBoardTime;
2542            i++;
2543        }
2544        return i;
2545    }
2546
2547    /**
2548     * Adjusts the media item boundaries for use in export or preview
2549     *
2550     * @param clipSettings The ClipSettings reference
2551     * @param clipProperties The Properties reference
2552     * @param m The media item
2553     */
2554    private void adjustMediaItemBoundary(ClipSettings clipSettings,
2555                                         Properties clipProperties, MediaItem m) {
2556        if (m.getBeginTransition() != null && m.getBeginTransition().getDuration() > 0
2557                && m.getEndTransition() != null && m.getEndTransition().getDuration() > 0) {
2558
2559            clipSettings.beginCutTime += m.getBeginTransition().getDuration();
2560            clipSettings.endCutTime -= m.getEndTransition().getDuration();
2561
2562        } else if (m.getBeginTransition() == null && m.getEndTransition() != null
2563                && m.getEndTransition().getDuration() > 0) {
2564
2565            clipSettings.endCutTime -= m.getEndTransition().getDuration();
2566
2567        } else if (m.getEndTransition() == null && m.getBeginTransition() != null
2568                && m.getBeginTransition().getDuration() > 0) {
2569
2570            clipSettings.beginCutTime += m.getBeginTransition().getDuration();
2571        }
2572        clipProperties.duration = clipSettings.endCutTime -
2573                                                      clipSettings.beginCutTime;
2574
2575        if (clipProperties.videoDuration != 0) {
2576            clipProperties.videoDuration = clipSettings.endCutTime -
2577                                                      clipSettings.beginCutTime;
2578        }
2579
2580        if (clipProperties.audioDuration != 0) {
2581            clipProperties.audioDuration = clipSettings.endCutTime -
2582                                                      clipSettings.beginCutTime;
2583        }
2584    }
2585
2586    /**
2587     * Generates the transition if transition is present
2588     * and is in invalidated state
2589     *
2590     * @param transition The Transition reference
2591     * @param editSettings The EditSettings reference
2592     * @param clipPropertiesArray The clip Properties array
2593     * @param i The index in clip Properties array for current clip
2594     */
2595    private void generateTransition(Transition transition, EditSettings editSettings,
2596            PreviewClipProperties clipPropertiesArray, int index) {
2597        if (!(transition.isGenerated())) {
2598            transition.generate();
2599        }
2600        editSettings.clipSettingsArray[index] = new ClipSettings();
2601        editSettings.clipSettingsArray[index].clipPath = transition.getFilename();
2602        editSettings.clipSettingsArray[index].fileType = FileType.THREE_GPP;
2603        editSettings.clipSettingsArray[index].beginCutTime = 0;
2604        editSettings.clipSettingsArray[index].endCutTime =
2605                                                  (int)transition.getDuration();
2606        editSettings.clipSettingsArray[index].mediaRendering =
2607                                                   MediaRendering.BLACK_BORDERS;
2608        try {
2609            clipPropertiesArray.clipProperties[index] =
2610                                   getMediaProperties(transition.getFilename());
2611        } catch (Exception e) {
2612            throw new IllegalArgumentException("Unsupported file or file not found");
2613        }
2614        clipPropertiesArray.clipProperties[index].Id = null;
2615        clipPropertiesArray.clipProperties[index].audioVolumeValue = 100;
2616        clipPropertiesArray.clipProperties[index].duration =
2617                                                  (int)transition.getDuration();
2618        if (clipPropertiesArray.clipProperties[index].videoDuration != 0) {
2619            clipPropertiesArray.clipProperties[index].videoDuration =
2620                                                  (int)transition.getDuration();
2621        }
2622        if (clipPropertiesArray.clipProperties[index].audioDuration != 0) {
2623            clipPropertiesArray.clipProperties[index].audioDuration =
2624                                                  (int)transition.getDuration();
2625        }
2626    }
2627
2628    /**
2629     * Sets the volume for current media item in clip properties array
2630     *
2631     * @param m The media item
2632     * @param clipProperties The clip properties array reference
2633     * @param i The index in clip Properties array for current clip
2634     */
2635    private void adjustVolume(MediaItem m, PreviewClipProperties clipProperties,
2636                              int index) {
2637        if (m instanceof MediaVideoItem) {
2638            boolean videoMuted = ((MediaVideoItem)m).isMuted();
2639            if (videoMuted == false) {
2640                mClipProperties.clipProperties[index].audioVolumeValue = ((MediaVideoItem)m)
2641                .getVolume();
2642            } else {
2643                mClipProperties.clipProperties[index].audioVolumeValue = 0;
2644            }
2645        } else if (m instanceof MediaImageItem) {
2646            mClipProperties.clipProperties[index].audioVolumeValue = 0;
2647        }
2648    }
2649
2650    /**
2651     * Checks for odd size image width and height
2652     *
2653     * @param m The media item
2654     * @param clipProperties The clip properties array reference
2655     * @param i The index in clip Properties array for current clip
2656     */
2657    private void checkOddSizeImage(MediaItem m, PreviewClipProperties clipProperties, int index) {
2658        if (m instanceof MediaImageItem) {
2659            int width = mClipProperties.clipProperties[index].width;
2660            int height = mClipProperties.clipProperties[index].height;
2661
2662            if ((width % 2) != 0) {
2663                width -= 1;
2664            }
2665            if ((height % 2) != 0) {
2666                height -= 1;
2667            }
2668            mClipProperties.clipProperties[index].width = width;
2669            mClipProperties.clipProperties[index].height = height;
2670        }
2671    }
2672
2673    /**
2674     * Populates the media item properties and calculates the maximum
2675     * height among all the clips
2676     *
2677     * @param m The media item
2678     * @param i The index in clip Properties array for current clip
2679     * @param maxHeight The max height from the clip properties
2680     *
2681     * @return Updates the max height if current clip's height is greater
2682     * than all previous clips height
2683     */
2684    private int populateMediaItemProperties(MediaItem m, int index, int maxHeight) {
2685        mPreviewEditSettings.clipSettingsArray[index] = new ClipSettings();
2686        if (m instanceof MediaVideoItem) {
2687            mPreviewEditSettings.clipSettingsArray[index] = ((MediaVideoItem)m)
2688            .getVideoClipProperties();
2689            if (((MediaVideoItem)m).getHeight() > maxHeight) {
2690                maxHeight = ((MediaVideoItem)m).getHeight();
2691            }
2692        } else if (m instanceof MediaImageItem) {
2693            mPreviewEditSettings.clipSettingsArray[index] = ((MediaImageItem)m)
2694            .getImageClipProperties();
2695            if (((MediaImageItem)m).getScaledHeight() > maxHeight) {
2696                maxHeight = ((MediaImageItem)m).getScaledHeight();
2697            }
2698        }
2699        /** + Handle the image files here */
2700        if (mPreviewEditSettings.clipSettingsArray[index].fileType == FileType.JPG) {
2701            mPreviewEditSettings.clipSettingsArray[index].clipDecodedPath = ((MediaImageItem)m)
2702            .getDecodedImageFileName();
2703
2704            mPreviewEditSettings.clipSettingsArray[index].clipOriginalPath =
2705                         mPreviewEditSettings.clipSettingsArray[index].clipPath;
2706        }
2707        return maxHeight;
2708    }
2709
2710    /**
2711     * Populates the background music track properties
2712     *
2713     * @param mediaBGMList The background music list
2714     *
2715     */
2716    private void populateBackgroundMusicProperties(List<AudioTrack> mediaBGMList) {
2717
2718        if (mediaBGMList.size() == 1) {
2719            mAudioTrack = mediaBGMList.get(0);
2720        } else
2721        {
2722            mAudioTrack = null;
2723        }
2724
2725        if (mAudioTrack != null) {
2726            mAudioSettings = new AudioSettings();
2727            Properties mAudioProperties = new Properties();
2728            mAudioSettings.pFile = null;
2729            mAudioSettings.Id = mAudioTrack.getId();
2730            try {
2731                mAudioProperties = getMediaProperties(mAudioTrack.getFilename());
2732            } catch (Exception e) {
2733               throw new IllegalArgumentException("Unsupported file or file not found");
2734            }
2735            mAudioSettings.bRemoveOriginal = false;
2736            mAudioSettings.channels = mAudioProperties.audioChannels;
2737            mAudioSettings.Fs = mAudioProperties.audioSamplingFrequency;
2738            mAudioSettings.loop = mAudioTrack.isLooping();
2739            mAudioSettings.ExtendedFs = 0;
2740            mAudioSettings.pFile = mAudioTrack.getFilename();
2741            mAudioSettings.startMs = mAudioTrack.getStartTime();
2742            mAudioSettings.beginCutTime = mAudioTrack.getBoundaryBeginTime();
2743            mAudioSettings.endCutTime = mAudioTrack.getBoundaryEndTime();
2744            if (mAudioTrack.isMuted()) {
2745                mAudioSettings.volume = 0;
2746            } else {
2747                mAudioSettings.volume = mAudioTrack.getVolume();
2748            }
2749            mAudioSettings.fileType = mAudioProperties.fileType;
2750            mAudioSettings.ducking_lowVolume = mAudioTrack.getDuckedTrackVolume();
2751            mAudioSettings.ducking_threshold = mAudioTrack.getDuckingThreshhold();
2752            mAudioSettings.bInDucking_enable = mAudioTrack.isDuckingEnabled();
2753            mAudioTrackPCMFilePath = String.format(mProjectPath + "/" + AUDIO_TRACK_PCM_FILE);
2754            //String.format(mProjectPath + "/" + "AudioPcm" + ".pcm");
2755            mAudioSettings.pcmFilePath = mAudioTrackPCMFilePath;
2756
2757            mPreviewEditSettings.backgroundMusicSettings =
2758                                                  new BackgroundMusicSettings();
2759            mPreviewEditSettings.backgroundMusicSettings.file =
2760                                                         mAudioTrackPCMFilePath;
2761            mPreviewEditSettings.backgroundMusicSettings.fileType =
2762                                                      mAudioProperties.fileType;
2763            mPreviewEditSettings.backgroundMusicSettings.insertionTime =
2764                                                     mAudioTrack.getStartTime();
2765            mPreviewEditSettings.backgroundMusicSettings.volumePercent =
2766                                                        mAudioTrack.getVolume();
2767            mPreviewEditSettings.backgroundMusicSettings.beginLoop = mAudioTrack
2768            .getBoundaryBeginTime();
2769            mPreviewEditSettings.backgroundMusicSettings.endLoop =
2770                                               mAudioTrack.getBoundaryEndTime();
2771            mPreviewEditSettings.backgroundMusicSettings.enableDucking = mAudioTrack
2772            .isDuckingEnabled();
2773            mPreviewEditSettings.backgroundMusicSettings.duckingThreshold = mAudioTrack
2774            .getDuckingThreshhold();
2775            mPreviewEditSettings.backgroundMusicSettings.lowVolume = mAudioTrack
2776            .getDuckedTrackVolume();
2777            mPreviewEditSettings.backgroundMusicSettings.isLooping =
2778                                                        mAudioTrack.isLooping();
2779            mPreviewEditSettings.primaryTrackVolume = 100;
2780            mProcessingState  = PROCESSING_AUDIO_PCM;
2781            mProcessingObject = mAudioTrack;
2782        } else {
2783            if (mAudioSettings != null) {
2784                mAudioSettings = null;
2785            }
2786            if (mPreviewEditSettings.backgroundMusicSettings != null) {
2787                mPreviewEditSettings.backgroundMusicSettings = null;
2788            }
2789            mAudioTrackPCMFilePath = null;
2790        }
2791    }
2792
2793    /**
2794     * Calculates all the effects in all the media items
2795     * in media items list
2796     *
2797     * @param mediaItemsList The media item list
2798     *
2799     * @return The total number of effects
2800     *
2801     */
2802    private int getTotalEffects(List<MediaItem> mediaItemsList) {
2803        int totalEffects = 0;
2804        final Iterator<MediaItem> it = mediaItemsList.iterator();
2805        while (it.hasNext()) {
2806            final MediaItem t = it.next();
2807            totalEffects += t.getAllEffects().size();
2808            totalEffects += t.getAllOverlays().size();
2809            final Iterator<Effect> ef = t.getAllEffects().iterator();
2810            while (ef.hasNext()) {
2811                final Effect e = ef.next();
2812                if (e instanceof EffectKenBurns)
2813                    totalEffects--;
2814            }
2815        }
2816        return totalEffects;
2817    }
2818
2819    /**
2820     * This function is responsible for forming clip settings
2821     * array and clip properties array including transition clips
2822     * and effect settings for preview purpose or export.
2823     *
2824     *
2825     * @param mediaItemsList The media item list
2826     * @param mediaTransitionList The transitions list
2827     * @param mediaBGMList The background music list
2828     * @param listener The MediaProcessingProgressListener
2829     *
2830     */
2831    public void previewStoryBoard(List<MediaItem> mediaItemsList,
2832            List<Transition> mediaTransitionList, List<AudioTrack> mediaBGMList,
2833            MediaProcessingProgressListener listener) {
2834        if (mInvalidatePreviewArray) {
2835            int previewIndex = 0;
2836            int totalEffects = 0;
2837            int storyBoardTime = 0;
2838            int maxHeight = 0;
2839            int beginCutTime = 0;
2840            int endCutTime = 0;
2841            int effectIndex = 0;
2842            Transition lTransition = null;
2843            MediaItem lMediaItem = null;
2844            mPreviewEditSettings = new EditSettings();
2845            mClipProperties = new PreviewClipProperties();
2846            mTotalClips = 0;
2847
2848            mTotalClips = mediaItemsList.size();
2849            for (Transition transition : mediaTransitionList) {
2850                if (transition.getDuration() > 0)
2851                    mTotalClips++;
2852            }
2853
2854            totalEffects = getTotalEffects(mediaItemsList);
2855
2856            mPreviewEditSettings.clipSettingsArray = new ClipSettings[mTotalClips];
2857            mPreviewEditSettings.effectSettingsArray = new EffectSettings[totalEffects];
2858            mClipProperties.clipProperties = new Properties[mTotalClips];
2859
2860            /** record the call back progress listner */
2861            if (listener != null)
2862            {
2863                mMediaProcessingProgressListener = listener;
2864                mProgressToApp = 0;
2865            }
2866
2867            if (mediaItemsList.size() > 0) {
2868                for (int i = 0; i < mediaItemsList.size(); i++) {
2869                    /* Get the Media Item from the list */
2870                    lMediaItem = mediaItemsList.get(i);
2871                    if (lMediaItem instanceof MediaVideoItem) {
2872                        beginCutTime = (int)((MediaVideoItem)lMediaItem).getBoundaryBeginTime();
2873                        endCutTime = (int)((MediaVideoItem)lMediaItem).getBoundaryEndTime();
2874                    } else if (lMediaItem instanceof MediaImageItem) {
2875                        beginCutTime = 0;
2876                        endCutTime = (int)((MediaImageItem)lMediaItem).getTimelineDuration();
2877                    }
2878                    /* Get the transition associated with Media Item */
2879                    lTransition = lMediaItem.getBeginTransition();
2880                    if (lTransition != null && (lTransition.getDuration() > 0)) {
2881                        /* generate transition clip */
2882                        generateTransition(lTransition, mPreviewEditSettings,
2883                                           mClipProperties, previewIndex);
2884                        storyBoardTime += mClipProperties.clipProperties[previewIndex].duration;
2885                        previewIndex++;
2886                    }
2887                    /* Populate media item properties */
2888                    maxHeight = populateMediaItemProperties(lMediaItem,
2889                                                            previewIndex,
2890                                                            maxHeight);
2891                    if (lMediaItem instanceof MediaImageItem)
2892                    {
2893                        int tmpCnt = 0;
2894                        boolean bEffectKbPresent = false;
2895                        List<Effect> effectList = lMediaItem.getAllEffects();
2896                        /**
2897                         * check if Kenburns effect is present
2898                         */
2899                        while ( tmpCnt < effectList.size()) {
2900                            if (effectList.get(tmpCnt) instanceof EffectKenBurns) {
2901                                bEffectKbPresent = true;
2902                                break;
2903                            }
2904                            tmpCnt++;
2905                        }
2906
2907                        if (bEffectKbPresent) {
2908                            try {
2909                                mClipProperties.clipProperties[previewIndex]
2910                                    = getMediaProperties(((MediaImageItem)lMediaItem).getGeneratedImageClip());
2911                            } catch (Exception e) {
2912                                throw new IllegalArgumentException("Unsupported file or file not found");
2913                            }
2914                        } else {
2915                            try {
2916                                mClipProperties.clipProperties[previewIndex]
2917                                    = getMediaProperties(((MediaImageItem)lMediaItem).getScaledImageFileName());
2918                            } catch (Exception e) {
2919                                throw new IllegalArgumentException("Unsupported file or file not found");
2920                            }
2921                            mClipProperties.clipProperties[previewIndex].width = ((MediaImageItem)lMediaItem).getScaledWidth();
2922                            mClipProperties.clipProperties[previewIndex].height = ((MediaImageItem)lMediaItem).getScaledHeight();
2923                        }
2924
2925                    }else
2926                    {
2927                        try {
2928                            mClipProperties.clipProperties[previewIndex]
2929                                 = getMediaProperties(lMediaItem.getFilename());
2930                        } catch (Exception e) {
2931                            throw new IllegalArgumentException("Unsupported file or file not found");
2932                        }
2933                    }
2934                    mClipProperties.clipProperties[previewIndex].Id = lMediaItem.getId();
2935                    checkOddSizeImage(lMediaItem, mClipProperties, previewIndex);
2936                    adjustVolume(lMediaItem, mClipProperties, previewIndex);
2937
2938                    /*
2939                     * Adjust media item start time and end time w.r.t to begin
2940                     * and end transitions associated with media item
2941                     */
2942
2943                    adjustMediaItemBoundary(mPreviewEditSettings.clipSettingsArray[previewIndex],
2944                            mClipProperties.clipProperties[previewIndex], lMediaItem);
2945
2946                    /*
2947                     * Get all the effects and overlays for that media item and
2948                     * adjust start time and duration of effects
2949                     */
2950
2951                    effectIndex = populateEffects(lMediaItem,
2952                            mPreviewEditSettings.effectSettingsArray, effectIndex, beginCutTime,
2953                            endCutTime, storyBoardTime);
2954                    storyBoardTime += mClipProperties.clipProperties[previewIndex].duration;
2955                    previewIndex++;
2956
2957                    /* Check if there is any end transition at last media item */
2958
2959                    if (i == (mediaItemsList.size() - 1)) {
2960                        lTransition = lMediaItem.getEndTransition();
2961                        if (lTransition != null && (lTransition.getDuration() > 0)) {
2962                            generateTransition(lTransition, mPreviewEditSettings, mClipProperties,
2963                                    previewIndex);
2964                            break;
2965                        }
2966                    }
2967                }
2968            }
2969            if (!mErrorFlagSet) {
2970                mPreviewEditSettings.videoFrameSize = findVideoResolution(mVideoEditor
2971                        .getAspectRatio(), maxHeight);
2972                /*if (mediaBGMList.size() == 1) //for remove Audio check */ {
2973                    populateBackgroundMusicProperties(mediaBGMList);
2974                }
2975                /** call to native populate settings */
2976                try {
2977                    nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings);
2978                } catch (IllegalArgumentException ex) {
2979                    Log.e("MediaArtistNativeHelper",
2980                    "Illegal argument exception in nativePopulateSettings");
2981                    throw ex;
2982                } catch (IllegalStateException ex) {
2983                    Log.e("MediaArtistNativeHelper",
2984                    "Illegal state exception in nativePopulateSettings");
2985                    throw ex;
2986                } catch (RuntimeException ex) {
2987                    Log.e("MediaArtistNativeHelper", "Runtime exception in nativePopulateSettings");
2988                    throw ex;
2989                }
2990                mInvalidatePreviewArray = false;
2991                mProcessingState  = PROCESSING_NONE;
2992            }
2993            if (mErrorFlagSet) {
2994                mErrorFlagSet = false;
2995                throw new RuntimeException("preview generation cannot be completed");
2996            }
2997        }
2998    } /* END of previewStoryBoard */
2999
3000    /**
3001     * This function is responsible for starting the preview
3002     *
3003     *
3004     * @param surface The surface on which preview has to be displayed
3005     * @param fromMs The time in ms from which preview has to be started
3006     * @param toMs The time in ms till preview has to be played
3007     * @param loop To loop the preview or not
3008     * @param callbackAfterFrameCount INdicated after how many frames
3009     * the callback is needed
3010     * @param listener The PreviewProgressListener
3011     *
3012     */
3013    public void doPreview(Surface surface, long fromMs, long toMs, boolean loop,
3014            int callbackAfterFrameCount, PreviewProgressListener listener) {
3015        mPreviewProgress = 0;
3016        if (listener != null) {
3017            mPreviewProgressListener = listener;
3018        }
3019        if (!mInvalidatePreviewArray) {
3020            try {
3021                /** Modify the image files names to rgb image files. */
3022                for (int clipCnt = 0; clipCnt < mPreviewEditSettings.clipSettingsArray.length; clipCnt++) {
3023                    if (mPreviewEditSettings.clipSettingsArray[clipCnt].fileType == FileType.JPG) {
3024                        mPreviewEditSettings.clipSettingsArray[clipCnt].clipPath = mPreviewEditSettings.clipSettingsArray[clipCnt].clipDecodedPath;
3025                    }
3026                }
3027                nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings);
3028                nativeStartPreview(surface, fromMs, toMs, callbackAfterFrameCount, loop);
3029            } catch (IllegalArgumentException ex) {
3030                Log.e("MediaArtistNativeHelper",
3031                "Illegal argument exception in nativeStartPreview");
3032                throw ex;
3033            } catch (IllegalStateException ex) {
3034                Log.e("MediaArtistNativeHelper", "Illegal state exception in nativeStartPreview");
3035                throw ex;
3036            } catch (RuntimeException ex) {
3037                Log.e("MediaArtistNativeHelper", "Runtime exception in nativeStartPreview");
3038                throw ex;
3039            }
3040
3041        } else {
3042            return;
3043        }
3044    }
3045
3046    /**
3047     * This function is responsible for stopping the preview
3048     */
3049    public long stopPreview() {
3050        nativeStopPreview();
3051        return mPreviewProgress;
3052    }
3053
3054    /**
3055     * This function is responsible for rendering a single frame
3056     * from the complete story board on the surface
3057     *
3058     * @param surface The surface on which frame has to be rendered
3059     * @param time The time in ms at which the frame has to be rendered
3060     * @param surfaceWidth The surface width
3061     * @param surfaceHeight The surface height
3062     *
3063     * @return The actual time from the story board at which the  frame was extracted
3064     * and rendered
3065     */
3066    public long renderPreviewFrame(Surface surface, long time, int surfaceWidth,
3067                                   int surfaceHeight) {
3068        long timeMs = 0;
3069        if (!mInvalidatePreviewArray) {
3070            try {
3071                for (int clipCnt = 0; clipCnt < mPreviewEditSettings.clipSettingsArray.length; clipCnt++) {
3072                    if (mPreviewEditSettings.clipSettingsArray[clipCnt].fileType == FileType.JPG) {
3073                        mPreviewEditSettings.clipSettingsArray[clipCnt].clipPath = mPreviewEditSettings.clipSettingsArray[clipCnt].clipDecodedPath;
3074                    }
3075                }
3076                nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings);
3077                timeMs = (long)nativeRenderPreviewFrame(surface, time, surfaceWidth, surfaceHeight);
3078            } catch (IllegalArgumentException ex) {
3079                Log.e("MediaArtistNativeHelper",
3080                "Illegal Argument exception in nativeRenderPreviewFrame");
3081                throw ex;
3082            } catch (IllegalStateException ex) {
3083                Log.e("MediaArtistNativeHelper",
3084                "Illegal state exception in nativeRenderPreviewFrame");
3085                throw ex;
3086            } catch (RuntimeException ex) {
3087                Log.e("MediaArtistNativeHelper", "Runtime exception in nativeRenderPreviewFrame");
3088                throw ex;
3089            }
3090            return timeMs;
3091        } else {
3092
3093            throw new RuntimeException("Call generate preview first");
3094        }
3095    }
3096
3097    /**
3098     * This function is responsible for rendering a single frame
3099     * from a single media item on the surface
3100     *
3101     * @param surface The surface on which frame has to be rendered
3102     * @param filepath The file path for which the frame needs to be displayed
3103     * @param time The time in ms at which the frame has to be rendered
3104     * @param framewidth The frame width
3105     * @param framewidth The frame height
3106     *
3107     * @return The actual time from media item at which the  frame was extracted
3108     * and rendered
3109     */
3110    public long renderMediaItemPreviewFrame(Surface surface, String filepath,
3111                                            long time, int framewidth,
3112                                            int frameheight) {
3113        long timeMs = 0;
3114        try {
3115
3116            timeMs = (long)nativeRenderMediaItemPreviewFrame(surface, filepath, framewidth,
3117                    frameheight, 0, 0, time);
3118        } catch (IllegalArgumentException ex) {
3119            Log.e("MediaArtistNativeHelper",
3120            "Illegal Argument exception in renderMediaItemPreviewFrame");
3121            throw ex;
3122        } catch (IllegalStateException ex) {
3123            Log.e("MediaArtistNativeHelper",
3124            "Illegal state exception in renderMediaItemPreviewFrame");
3125            throw ex;
3126        } catch (RuntimeException ex) {
3127            Log.e("MediaArtistNativeHelper", "Runtime exception in renderMediaItemPreviewFrame");
3128            throw ex;
3129        }
3130
3131        return timeMs;
3132    }
3133
3134    /**
3135     * This function sets the flag to invalidate the preview array
3136     * and for generating the preview again
3137     */
3138    void setGeneratePreview(boolean isRequired) {
3139        mInvalidatePreviewArray = isRequired;
3140    }
3141
3142    /**
3143     * @return Returns the current status of preview invalidation
3144     * flag
3145     */
3146    boolean getGeneratePreview() {
3147        return mInvalidatePreviewArray;
3148    }
3149
3150    /**
3151     * Calculates the aspect ratio from widht and height
3152     *
3153     * @param w The width of media item
3154     * @param h The height of media item
3155     *
3156     * @return The calculated aspect ratio
3157     */
3158    public int getAspectRatio(int w, int h) {
3159        double apRatio = (double)(w) / (double)(h);
3160        BigDecimal bd = new BigDecimal(apRatio);
3161        bd = bd.setScale(3, BigDecimal.ROUND_HALF_UP);
3162        apRatio = bd.doubleValue();
3163        int var = MediaProperties.ASPECT_RATIO_16_9;
3164        if (apRatio >= 1.7) {
3165            var = MediaProperties.ASPECT_RATIO_16_9;
3166        } else if (apRatio >= 1.6) {
3167            var = MediaProperties.ASPECT_RATIO_5_3;
3168        } else if (apRatio >= 1.5) {
3169            var = MediaProperties.ASPECT_RATIO_3_2;
3170        } else if (apRatio > 1.3) {
3171            var = MediaProperties.ASPECT_RATIO_4_3;
3172        } else if (apRatio >= 1.2) {
3173            var = MediaProperties.ASPECT_RATIO_11_9;
3174        }
3175        return var;
3176    }
3177
3178    /**
3179     * Maps the file type used in native layer
3180     * to file type used in JAVA layer
3181     *
3182     * @param fileType The file type in native layer
3183     *
3184     * @return The File type in JAVA layer
3185     */
3186    public int getFileType(int fileType) {
3187        int retValue = -1;
3188        switch (fileType) {
3189            case FileType.UNSUPPORTED:
3190                retValue = MediaProperties.FILE_UNSUPPORTED;
3191                break;
3192            case FileType.THREE_GPP:
3193                retValue = MediaProperties.FILE_3GP;
3194                break;
3195            case FileType.MP4:
3196                retValue = MediaProperties.FILE_MP4;
3197                break;
3198            case FileType.JPG:
3199                retValue = MediaProperties.FILE_JPEG;
3200                break;
3201            case FileType.PNG:
3202                retValue = MediaProperties.FILE_PNG;
3203                break;
3204            case FileType.MP3:
3205                retValue = MediaProperties.FILE_MP3;
3206                break;
3207
3208            default:
3209                retValue = -1;
3210        }
3211        return retValue;
3212    }
3213
3214    /**
3215     * Maps the video codec type used in native layer
3216     * to video codec type used in JAVA layer
3217     *
3218     * @param codecType The video codec type in native layer
3219     *
3220     * @return The video codec type in JAVA layer
3221     */
3222    public int getVideoCodecType(int codecType) {
3223        int retValue = -1;
3224        switch (codecType) {
3225            case VideoFormat.H263:
3226                retValue = MediaProperties.VCODEC_H263;
3227                break;
3228            case VideoFormat.H264:
3229                retValue = MediaProperties.VCODEC_H264BP;
3230                break;
3231            case VideoFormat.MPEG4:
3232                retValue = MediaProperties.VCODEC_MPEG4;
3233                break;
3234            case VideoFormat.UNSUPPORTED:
3235
3236            default:
3237                retValue = -1;
3238        }
3239        return retValue;
3240    }
3241
3242    /**
3243     * Maps the audio codec type used in native layer
3244     * to audio codec type used in JAVA layer
3245     *
3246     * @param audioType The audio codec type in native layer
3247     *
3248     * @return The audio codec type in JAVA layer
3249     */
3250    public int getAudioCodecType(int codecType) {
3251        int retValue = -1;
3252        switch (codecType) {
3253            case AudioFormat.AMR_NB:
3254                retValue = MediaProperties.ACODEC_AMRNB;
3255                break;
3256            case AudioFormat.AAC:
3257                retValue = MediaProperties.ACODEC_AAC_LC;
3258                break;
3259            case AudioFormat.MP3:
3260                retValue = MediaProperties.ACODEC_MP3;
3261                break;
3262
3263            default:
3264                retValue = -1;
3265        }
3266        return retValue;
3267    }
3268
3269    /**
3270     * Returns the frame rate as integer
3271     *
3272     * @param fps The fps as enum
3273     *
3274     * @return The frame rate as integer
3275     */
3276    public int getFrameRate(int fps) {
3277        int retValue = -1;
3278        switch (fps) {
3279            case VideoFrameRate.FR_5_FPS:
3280                retValue = 5;
3281                break;
3282            case VideoFrameRate.FR_7_5_FPS:
3283                retValue = 8;
3284                break;
3285            case VideoFrameRate.FR_10_FPS:
3286                retValue = 10;
3287                break;
3288            case VideoFrameRate.FR_12_5_FPS:
3289                retValue = 13;
3290                break;
3291            case VideoFrameRate.FR_15_FPS:
3292                retValue = 15;
3293                break;
3294            case VideoFrameRate.FR_20_FPS:
3295                retValue = 20;
3296                break;
3297            case VideoFrameRate.FR_25_FPS:
3298                retValue = 25;
3299                break;
3300            case VideoFrameRate.FR_30_FPS:
3301                retValue = 30;
3302                break;
3303
3304            default:
3305                retValue = -1;
3306        }
3307        return retValue;
3308    }
3309
3310    /**
3311     * Maps the file type used in JAVA layer
3312     * to file type used in native layer
3313     *
3314     * @param fileType The file type in JAVA layer
3315     *
3316     * @return The File type in native layer
3317     */
3318    int getMediaItemFileType(int fileType) {
3319        int retValue = -1;
3320
3321        switch (fileType) {
3322            case MediaProperties.FILE_UNSUPPORTED:
3323                retValue = FileType.UNSUPPORTED;
3324                break;
3325            case MediaProperties.FILE_3GP:
3326                retValue = FileType.THREE_GPP;
3327                break;
3328            case MediaProperties.FILE_MP4:
3329                retValue = FileType.MP4;
3330                break;
3331            case MediaProperties.FILE_JPEG:
3332                retValue = FileType.JPG;
3333                break;
3334            case MediaProperties.FILE_PNG:
3335                retValue = FileType.PNG;
3336                break;
3337
3338            default:
3339                retValue = -1;
3340        }
3341        return retValue;
3342
3343    }
3344
3345    /**
3346     * Maps the rendering mode used in native layer
3347     * to rendering mode used in JAVA layer
3348     *
3349     * @param renderingMode The rendering mode in JAVA layer
3350     *
3351     * @return The rendering mode in native layer
3352     */
3353    int getMediaItemRenderingMode(int renderingMode) {
3354        int retValue = -1;
3355        switch (renderingMode) {
3356            case MediaItem.RENDERING_MODE_BLACK_BORDER:
3357                retValue = MediaRendering.BLACK_BORDERS;
3358                break;
3359            case MediaItem.RENDERING_MODE_STRETCH:
3360                retValue = MediaRendering.RESIZING;
3361                break;
3362            case MediaItem.RENDERING_MODE_CROPPING:
3363                retValue = MediaRendering.CROPPING;
3364                break;
3365
3366            default:
3367                retValue = -1;
3368        }
3369        return retValue;
3370    }
3371
3372    /**
3373     * Maps the transition behavior used in JAVA layer
3374     * to transition behavior used in native layer
3375     *
3376     * @param transitionType The transition behavior in JAVA layer
3377     *
3378     * @return The transition behavior in native layer
3379     */
3380    int getVideoTransitionBehaviour(int transitionType) {
3381        int retValue = -1;
3382        switch (transitionType) {
3383            case Transition.BEHAVIOR_SPEED_UP:
3384                retValue = TransitionBehaviour.SPEED_UP;
3385                break;
3386            case Transition.BEHAVIOR_SPEED_DOWN:
3387                retValue = TransitionBehaviour.SPEED_DOWN;
3388                break;
3389            case Transition.BEHAVIOR_LINEAR:
3390                retValue = TransitionBehaviour.LINEAR;
3391                break;
3392            case Transition.BEHAVIOR_MIDDLE_SLOW:
3393                retValue = TransitionBehaviour.SLOW_MIDDLE;
3394                break;
3395            case Transition.BEHAVIOR_MIDDLE_FAST:
3396                retValue = TransitionBehaviour.FAST_MIDDLE;
3397                break;
3398
3399            default:
3400                retValue = -1;
3401        }
3402        return retValue;
3403    }
3404
3405    /**
3406     * Maps the transition slide direction used in JAVA layer
3407     * to transition slide direction used in native layer
3408     *
3409     * @param slideDirection The transition slide direction
3410     * in JAVA layer
3411     *
3412     * @return The transition slide direction in native layer
3413     */
3414    int getSlideSettingsDirection(int slideDirection) {
3415        int retValue = -1;
3416        switch (slideDirection) {
3417            case TransitionSliding.DIRECTION_RIGHT_OUT_LEFT_IN:
3418                retValue = SlideDirection.RIGHT_OUT_LEFT_IN;
3419                break;
3420            case TransitionSliding.DIRECTION_LEFT_OUT_RIGHT_IN:
3421                retValue = SlideDirection.LEFT_OUT_RIGTH_IN;
3422                break;
3423            case TransitionSliding.DIRECTION_TOP_OUT_BOTTOM_IN:
3424                retValue = SlideDirection.TOP_OUT_BOTTOM_IN;
3425                break;
3426            case TransitionSliding.DIRECTION_BOTTOM_OUT_TOP_IN:
3427                retValue = SlideDirection.BOTTOM_OUT_TOP_IN;
3428                break;
3429
3430            default:
3431                retValue = -1;
3432        }
3433        return retValue;
3434    }
3435
3436    /**
3437     * Maps the effect color type used in JAVA layer
3438     * to effect color type used in native layer
3439     *
3440     * @param effect The EffectColor reference
3441     *
3442     * @return The color effect value from native layer
3443     */
3444    private int getEffectColorType(EffectColor effect) {
3445        int retValue = -1;
3446        switch (effect.getType()) {
3447            case EffectColor.TYPE_COLOR:
3448                if (effect.getColor() == EffectColor.GREEN) {
3449                    retValue = VideoEffect.GREEN;
3450                } else if (effect.getColor() == EffectColor.PINK) {
3451                    retValue = VideoEffect.PINK;
3452                } else if (effect.getColor() == EffectColor.GRAY) {
3453                    retValue = VideoEffect.BLACK_AND_WHITE;
3454                } else {
3455                    retValue = VideoEffect.COLORRGB16;
3456                }
3457                break;
3458            case EffectColor.TYPE_GRADIENT:
3459                retValue = VideoEffect.GRADIENT;
3460                break;
3461            case EffectColor.TYPE_SEPIA:
3462                retValue = VideoEffect.SEPIA;
3463                break;
3464            case EffectColor.TYPE_NEGATIVE:
3465                retValue = VideoEffect.NEGATIVE;
3466                break;
3467            case EffectColor.TYPE_FIFTIES:
3468                retValue = VideoEffect.FIFTIES;
3469                break;
3470
3471            default:
3472                retValue = -1;
3473        }
3474        return retValue;
3475    }
3476
3477    /**
3478     * Calculates videdo resolution for output clip
3479     * based on clip's height and aspect ratio of storyboard
3480     *
3481     * @param aspectRatio The aspect ratio of story board
3482     * @param height The height of clip
3483     *
3484     * @return The video resolution
3485     */
3486    private int findVideoResolution(int aspectRatio, int height) {
3487        final Pair<Integer, Integer>[] resolutions;
3488        final Pair<Integer, Integer> maxResolution;
3489        int retValue = VideoFrameSize.SIZE_UNDEFINED;
3490        switch (aspectRatio) {
3491            case MediaProperties.ASPECT_RATIO_3_2:
3492                if (height == MediaProperties.HEIGHT_480)
3493                    retValue = VideoFrameSize.NTSC;
3494                else if (height == MediaProperties.HEIGHT_720)
3495                    retValue = VideoFrameSize.W720p;
3496                break;
3497            case MediaProperties.ASPECT_RATIO_16_9:
3498                if (height == MediaProperties.HEIGHT_480)
3499                    retValue = VideoFrameSize.WVGA16x9;
3500                else if (height == MediaProperties.HEIGHT_720)
3501                    retValue = VideoFrameSize.V720p;
3502                break;
3503            case MediaProperties.ASPECT_RATIO_4_3:
3504                if (height == MediaProperties.HEIGHT_480)
3505                    retValue = VideoFrameSize.VGA;
3506                if (height == MediaProperties.HEIGHT_720)
3507                    retValue = VideoFrameSize.S720p;
3508                break;
3509            case MediaProperties.ASPECT_RATIO_5_3:
3510                if (height == MediaProperties.HEIGHT_480)
3511                    retValue = VideoFrameSize.WVGA;
3512                break;
3513            case MediaProperties.ASPECT_RATIO_11_9:
3514                if (height == MediaProperties.HEIGHT_144)
3515                    retValue = VideoFrameSize.QCIF;
3516                break;
3517        }
3518        if (retValue == VideoFrameSize.SIZE_UNDEFINED) {
3519            resolutions = MediaProperties.getSupportedResolutions(mVideoEditor.getAspectRatio());
3520            // Get the highest resolution
3521            maxResolution = resolutions[resolutions.length - 1];
3522            retValue = findVideoResolution(mVideoEditor.getAspectRatio(),
3523                                           maxResolution.second);
3524        }
3525
3526        return retValue;
3527    }
3528
3529    /**
3530     * This method is responsible for exporting a movie
3531     *
3532     * @param filePath The output file path
3533     * @param projectDir The output project directory
3534     * @param height The height of clip
3535     * @param bitrate The bitrate at which the movie should be exported
3536     * @param mediaItemsList The media items list
3537     * @param mediaTransitionList The transitons list
3538     * @param mediaBGMList The background track list
3539     * @param listener The ExportProgressListener
3540     *
3541     */
3542    public void export(String filePath, String projectDir, int height, int bitrate,
3543            List<MediaItem> mediaItemsList, List<Transition> mediaTransitionList,
3544            List<AudioTrack> mediaBGMList, ExportProgressListener listener) {
3545
3546        int outBitrate = 0;
3547        mExportFilename = filePath;
3548        previewStoryBoard(mediaItemsList, mediaTransitionList, mediaBGMList,null);
3549        if (listener != null) {
3550            mExportProgressListener = listener;
3551        }
3552        mProgressToApp = 0;
3553
3554        switch (bitrate) {
3555            case MediaProperties.BITRATE_28K:
3556                outBitrate = Bitrate.BR_32_KBPS;
3557                break;
3558            case MediaProperties.BITRATE_40K:
3559                outBitrate = Bitrate.BR_48_KBPS;
3560                break;
3561            case MediaProperties.BITRATE_64K:
3562                outBitrate = Bitrate.BR_64_KBPS;
3563                break;
3564            case MediaProperties.BITRATE_96K:
3565                outBitrate = Bitrate.BR_96_KBPS;
3566                break;
3567            case MediaProperties.BITRATE_128K:
3568                outBitrate = Bitrate.BR_128_KBPS;
3569                break;
3570            case MediaProperties.BITRATE_192K:
3571                outBitrate = Bitrate.BR_192_KBPS;
3572                break;
3573            case MediaProperties.BITRATE_256K:
3574                outBitrate = Bitrate.BR_256_KBPS;
3575                break;
3576            case MediaProperties.BITRATE_384K:
3577                outBitrate = Bitrate.BR_384_KBPS;
3578                break;
3579            case MediaProperties.BITRATE_512K:
3580                outBitrate = Bitrate.BR_512_KBPS;
3581                break;
3582            case MediaProperties.BITRATE_800K:
3583                outBitrate = Bitrate.BR_800_KBPS;
3584                break;
3585            case MediaProperties.BITRATE_2M:
3586                outBitrate = Bitrate.BR_2_MBPS;
3587                break;
3588
3589            case MediaProperties.BITRATE_5M:
3590                outBitrate = Bitrate.BR_5_MBPS;
3591                break;
3592            case MediaProperties.BITRATE_8M:
3593                outBitrate = Bitrate.BR_8_MBPS;
3594                break;
3595
3596            default:
3597                throw new IllegalArgumentException("Argument Bitrate incorrect");
3598        }
3599        mPreviewEditSettings.videoFrameRate = VideoFrameRate.FR_30_FPS;
3600        mPreviewEditSettings.outputFile = mOutputFilename = filePath;
3601
3602        int aspectRatio = mVideoEditor.getAspectRatio();
3603        mPreviewEditSettings.videoFrameSize = findVideoResolution(aspectRatio, height);
3604        mPreviewEditSettings.videoFormat = VideoFormat.H264;
3605        mPreviewEditSettings.audioFormat = AudioFormat.AAC;
3606        mPreviewEditSettings.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000;
3607        mPreviewEditSettings.maxFileSize = 0;
3608        mPreviewEditSettings.audioChannels = 2;
3609        mPreviewEditSettings.videoBitrate = outBitrate;
3610        mPreviewEditSettings.audioBitrate = Bitrate.BR_96_KBPS;
3611
3612        mPreviewEditSettings.transitionSettingsArray = new TransitionSettings[mTotalClips - 1];
3613        for (int index = 0; index < mTotalClips - 1; index++) {
3614            mPreviewEditSettings.transitionSettingsArray[index] = new TransitionSettings();
3615            mPreviewEditSettings.transitionSettingsArray[index].videoTransitionType = VideoTransition.NONE;
3616            mPreviewEditSettings.transitionSettingsArray[index].audioTransitionType = AudioTransition.NONE;
3617        }
3618        for (int clipCnt = 0; clipCnt < mPreviewEditSettings.clipSettingsArray.length; clipCnt++) {
3619            if (mPreviewEditSettings.clipSettingsArray[clipCnt].fileType == FileType.JPG) {
3620                mPreviewEditSettings.clipSettingsArray[clipCnt].clipPath =
3621                mPreviewEditSettings.clipSettingsArray[clipCnt].clipOriginalPath;
3622            }
3623        }
3624        nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings);
3625
3626        int err = 0;
3627        try {
3628            mProcessingState  = PROCESSING_EXPORT;
3629            mProcessingObject = null;
3630            err = generateClip(mPreviewEditSettings);
3631            mProcessingState  = PROCESSING_NONE;
3632        } catch (IllegalArgumentException ex) {
3633            Log.e("MediaArtistNativeHelper", "IllegalArgument for generateClip");
3634            throw ex;
3635        } catch (IllegalStateException ex) {
3636            Log.e("MediaArtistNativeHelper", "IllegalStateExceptiont for generateClip");
3637            throw ex;
3638        } catch (RuntimeException ex) {
3639            Log.e("MediaArtistNativeHelper", "RuntimeException for generateClip");
3640            throw ex;
3641        }
3642
3643        if (err != 0) {
3644            Log.e("MediaArtistNativeHelper", "RuntimeException for generateClip");
3645            throw new RuntimeException("generateClip failed with error="+err );
3646        }
3647
3648        mExportDone = true;
3649        setGeneratePreview(true);
3650        mExportProgressListener = null;
3651    }
3652
3653    /**
3654     * This method is responsible for exporting a movie
3655     *
3656     * @param filePath The output file path
3657     * @param projectDir The output project directory
3658     * @param height The height of clip
3659     * @param bitrate The bitrate at which the movie should be exported
3660     * @param audioCodec The audio codec to use
3661     * @param videoCodec The video codec to use
3662     * @param mediaItemsList The media items list
3663     * @param mediaTransitionList The transitons list
3664     * @param mediaBGMList The background track list
3665     * @param listener The ExportProgressListener
3666     *
3667     */
3668    public void export(String filePath, String projectDir,int height,int bitrate,
3669            int audioCodec,int videoCodec,List<MediaItem> mediaItemsList,
3670            List<Transition> mediaTransitionList,List<AudioTrack> mediaBGMList,
3671            ExportProgressListener listener) {
3672
3673        int outBitrate = 0;
3674        mExportFilename = filePath;
3675        previewStoryBoard(mediaItemsList, mediaTransitionList, mediaBGMList,null);
3676        if (listener != null) {
3677            mExportProgressListener = listener;
3678        }
3679        mProgressToApp = 0;
3680
3681        switch (bitrate) {
3682            case MediaProperties.BITRATE_28K:
3683                outBitrate = Bitrate.BR_32_KBPS;
3684                break;
3685            case MediaProperties.BITRATE_40K:
3686                outBitrate = Bitrate.BR_48_KBPS;
3687                break;
3688            case MediaProperties.BITRATE_64K:
3689                outBitrate = Bitrate.BR_64_KBPS;
3690                break;
3691            case MediaProperties.BITRATE_96K:
3692                outBitrate = Bitrate.BR_96_KBPS;
3693                break;
3694            case MediaProperties.BITRATE_128K:
3695                outBitrate = Bitrate.BR_128_KBPS;
3696                break;
3697            case MediaProperties.BITRATE_192K:
3698                outBitrate = Bitrate.BR_192_KBPS;
3699                break;
3700            case MediaProperties.BITRATE_256K:
3701                outBitrate = Bitrate.BR_256_KBPS;
3702                break;
3703            case MediaProperties.BITRATE_384K:
3704                outBitrate = Bitrate.BR_384_KBPS;
3705                break;
3706            case MediaProperties.BITRATE_512K:
3707                outBitrate = Bitrate.BR_512_KBPS;
3708                break;
3709            case MediaProperties.BITRATE_800K:
3710                outBitrate = Bitrate.BR_800_KBPS;
3711                break;
3712            case MediaProperties.BITRATE_2M:
3713                outBitrate = Bitrate.BR_2_MBPS;
3714                break;
3715            case MediaProperties.BITRATE_5M:
3716                outBitrate = Bitrate.BR_5_MBPS;
3717                break;
3718            case MediaProperties.BITRATE_8M:
3719                outBitrate = Bitrate.BR_8_MBPS;
3720                break;
3721
3722            default:
3723                throw new IllegalArgumentException("Argument Bitrate incorrect");
3724        }
3725        mPreviewEditSettings.videoFrameRate = VideoFrameRate.FR_30_FPS;
3726        mPreviewEditSettings.outputFile = mOutputFilename = filePath;
3727
3728        int aspectRatio = mVideoEditor.getAspectRatio();
3729        mPreviewEditSettings.videoFrameSize = findVideoResolution(aspectRatio, height);
3730        switch (audioCodec) {
3731            case MediaProperties.ACODEC_AAC_LC:
3732                mPreviewEditSettings.audioFormat = AudioFormat.AAC;
3733                break;
3734            case MediaProperties.ACODEC_AMRNB:
3735                mPreviewEditSettings.audioFormat = AudioFormat.AMR_NB;
3736                break;
3737        }
3738
3739        switch (videoCodec) {
3740            case MediaProperties.VCODEC_H263:
3741                mPreviewEditSettings.videoFormat = VideoFormat.H263;
3742                break;
3743            case MediaProperties.VCODEC_H264BP:
3744                mPreviewEditSettings.videoFormat = VideoFormat.H264;
3745                break;
3746            case MediaProperties.VCODEC_MPEG4:
3747                mPreviewEditSettings.videoFormat = VideoFormat.MPEG4;
3748                break;
3749        }
3750
3751        mPreviewEditSettings.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000;
3752        mPreviewEditSettings.maxFileSize = 0;
3753        mPreviewEditSettings.audioChannels = 2;
3754        mPreviewEditSettings.videoBitrate = outBitrate;
3755        mPreviewEditSettings.audioBitrate = Bitrate.BR_96_KBPS;
3756
3757        mPreviewEditSettings.transitionSettingsArray =
3758                                        new TransitionSettings[mTotalClips - 1];
3759        for (int index = 0; index < mTotalClips - 1; index++) {
3760            mPreviewEditSettings.transitionSettingsArray[index] =
3761                                                       new TransitionSettings();
3762            mPreviewEditSettings.transitionSettingsArray[index].videoTransitionType =
3763                                                                      VideoTransition.NONE;
3764            mPreviewEditSettings.transitionSettingsArray[index].audioTransitionType =
3765                                                                      AudioTransition.NONE;
3766        }
3767        for (int clipCnt = 0; clipCnt < mPreviewEditSettings.clipSettingsArray.length; clipCnt++) {
3768            if (mPreviewEditSettings.clipSettingsArray[clipCnt].fileType == FileType.JPG) {
3769                mPreviewEditSettings.clipSettingsArray[clipCnt].clipPath =
3770                  mPreviewEditSettings.clipSettingsArray[clipCnt].clipOriginalPath;
3771            }
3772        }
3773        nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings);
3774
3775        int err = 0;
3776        try {
3777            mProcessingState  = PROCESSING_EXPORT;
3778            mProcessingObject = null;
3779            err = generateClip(mPreviewEditSettings);
3780            mProcessingState  = PROCESSING_NONE;
3781        } catch (IllegalArgumentException ex) {
3782            Log.e("MediaArtistNativeHelper", "IllegalArgument for generateClip");
3783            throw ex;
3784        } catch (IllegalStateException ex) {
3785            Log.e("MediaArtistNativeHelper", "IllegalStateExceptiont for generateClip");
3786            throw ex;
3787        } catch (RuntimeException ex) {
3788            Log.e("MediaArtistNativeHelper", "RuntimeException for generateClip");
3789            throw ex;
3790        }
3791
3792        if (err != 0) {
3793            Log.e("MediaArtistNativeHelper", "RuntimeException for generateClip");
3794            throw new RuntimeException("generateClip failed with error="+err );
3795        }
3796
3797        mExportDone = true;
3798        setGeneratePreview(true);
3799        mExportProgressListener = null;
3800    }
3801
3802
3803    /**
3804     * This methods takes care of stopping the Export process
3805     *
3806     * @param The input file name for which export has to be stopped
3807     */
3808    public void stop(String filename) {
3809        if (!mExportDone) {
3810            try {
3811                stopEncoding();
3812            } catch (IllegalStateException ex) {
3813                Log.e("MediaArtistNativeHelper", "Illegal state exception in unload settings");
3814                throw ex;
3815            } catch (RuntimeException ex) {
3816                Log.e("MediaArtistNativeHelper", "Runtime exception in unload settings");
3817                throw ex;
3818            }
3819
3820            new File(mExportFilename).delete();
3821        }
3822    }
3823
3824    /**
3825     * This method extracts a frame from the input file
3826     * and returns the frame as a bitmap
3827     *
3828     * @param inputFile The inputFile
3829     * @param width The width of the output frame
3830     * @param height The height of the output frame
3831     * @param timeMS The time in ms at which the frame hass to be extracted
3832     */
3833    public Bitmap getPixels(String inputFile, int width, int height, long timeMS) {
3834        if (inputFile == null) {
3835            throw new IllegalArgumentException();
3836        }
3837
3838        IntBuffer rgb888 = IntBuffer.allocate(width * height * 4);
3839        Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
3840        nativeGetPixels(inputFile, rgb888.array(), width, height, timeMS);
3841        bitmap.copyPixelsFromBuffer(rgb888);
3842
3843        return bitmap;
3844    }
3845
3846    /**
3847     * This method extracts a list of frame from the
3848     * input file and returns the frame in bitmap array
3849     *
3850     * @param filename The inputFile
3851     * @param width The width of the output frame
3852     * @param height The height of the output frame
3853     * @param startMs The starting time in ms
3854     * @param endMs The end time in ms
3855     * @param thumbnailCount The number of frames to be extracted
3856     * from startMs to endMs
3857     *
3858     * @return The frames as bitmaps in bitmap array
3859     **/
3860    public Bitmap[] getPixelsList(String filename, int width, int height, long startMs, long endMs,
3861            int thumbnailCount) {
3862        int[] rgb888 = null;
3863        int thumbnailSize = width * height * 4;
3864
3865        int i = 0;
3866        int deltaTime = (int)(endMs - startMs) / thumbnailCount;
3867        Bitmap[] bitmap = null;
3868        try {
3869            // This may result in out of Memory Error
3870            rgb888 = new int[thumbnailSize * thumbnailCount];
3871            bitmap = new Bitmap[thumbnailCount];
3872        } catch (Throwable e) {
3873            // Allocating to new size with Fixed count
3874            try {
3875                System.gc();
3876                rgb888 = new int[thumbnailSize * MAX_THUMBNAIL_PERMITTED];
3877                bitmap = new Bitmap[MAX_THUMBNAIL_PERMITTED];
3878                thumbnailCount = MAX_THUMBNAIL_PERMITTED;
3879            } catch (Throwable ex) {
3880                throw new RuntimeException("Memory allocation fails,reduce nos of thumbanail count");
3881            }
3882        }
3883        IntBuffer tmpBuffer = IntBuffer.allocate(thumbnailSize);
3884        nativeGetPixelsList(filename, rgb888, width, height, deltaTime, thumbnailCount, startMs,
3885                endMs);
3886        for (; i < thumbnailCount; i++) {
3887            bitmap[i] = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
3888            tmpBuffer.put(rgb888, (i * thumbnailSize), thumbnailSize);
3889            tmpBuffer.rewind();
3890            bitmap[i].copyPixelsFromBuffer(tmpBuffer);
3891        }
3892
3893        return bitmap;
3894    }
3895
3896    /**
3897     * This method generates the audio graph
3898     *
3899     * @param uniqueId The unique id
3900     * @param inFileName The inputFile
3901     * @param OutAudiGraphFileName output filename
3902     * @param frameDuration The each frame duration
3903     * @param audioChannels The number of audio channels
3904     * @param samplesCount Total number of samples count
3905     * @param listener ExtractAudioWaveformProgressListener reference
3906     * @param isVideo The flag to indicate if the file is video file or not
3907     *
3908     **/
3909    public void generateAudioGraph(String uniqueId, String inFileName, String OutAudiGraphFileName,
3910            int frameDuration, int audioChannels, int samplesCount,
3911            ExtractAudioWaveformProgressListener listener, boolean isVideo) {
3912        String tempPCMFileName;
3913
3914        if (listener != null) {
3915            mExtractAudioWaveformProgressListener = listener;
3916        }
3917        /**
3918         * in case of Video , first call will generate the PCM file to make the
3919         * audio graph
3920         */
3921        if (isVideo) {
3922            tempPCMFileName = String.format(mProjectPath + "/" + uniqueId + ".pcm");
3923        } else {
3924            tempPCMFileName = mAudioTrackPCMFilePath;
3925        }
3926        /**
3927         * For Video item, generate the PCM
3928         */
3929        if (isVideo) {
3930            nativeGenerateRawAudio(inFileName, tempPCMFileName);
3931        }
3932
3933        nativeGenerateAudioGraph(tempPCMFileName, OutAudiGraphFileName, frameDuration,
3934                audioChannels, samplesCount);
3935
3936        /* once the audio graph file is generated, delete the pcm file */
3937        if (isVideo) {
3938            new File(tempPCMFileName).delete();
3939        }
3940    }
3941
3942    /**     Native Methods        */
3943
3944    public native Properties getMediaProperties(String file) throws IllegalArgumentException,
3945    IllegalStateException, RuntimeException, Exception;
3946
3947    /**
3948     * Get the version of ManualEdit.
3949     *
3950     * @return version of ManualEdit
3951     * @throws RuntimeException if an error occurred
3952     * @see Version
3953     */
3954    public static native Version getVersion() throws RuntimeException;
3955
3956    /**
3957     * Returns the video thumbnail in an array of integers. Output format is
3958     * ARGB8888.
3959     *
3960     * @param pixelArray the array that receives the pixelvalues
3961     * @param width width of the video thumbnail
3962     * @param height height of the video thumbnail
3963     * @param timeMS desired time of the thumbnail in ms
3964     * @return actual time in ms of the thumbnail generated
3965     * @throws IllegalStateException if the class has not been initialized
3966     * @throws IllegalArgumentException if the pixelArray is not available or
3967     *             one of the dimensions is negative or zero or the time is
3968     *             negative
3969     * @throws RuntimeException on runtime errors in native code
3970     */
3971    public native int nativeGetPixels(String fileName, int[] pixelArray, int width, int height,
3972            long timeMS);
3973
3974    public native int nativeGetPixelsList(String fileName, int[] pixelArray, int width, int height,
3975            int timeMS, int nosofTN, long startTimeMs, long endTimeMs);
3976
3977    /**
3978     * Releases the JNI and cleans up the core native module.. Should be called
3979     * only after init( )
3980     *
3981     * @throws IllegalStateException if the method could not be called
3982     */
3983    public native void release() throws IllegalStateException, RuntimeException;
3984
3985
3986
3987
3988    /**
3989     * Stops the encoding. This method should only be called after encoding has
3990     * started using method <code> startEncoding</code>
3991     *
3992     * @throws IllegalStateException if the method could not be called
3993     */
3994    public native void stopEncoding() throws IllegalStateException, RuntimeException;
3995
3996
3997
3998    private native void _init(String tempPath, String libraryPath)
3999            throws IllegalArgumentException, IllegalStateException, RuntimeException;
4000
4001    private native void nativeStartPreview(Surface mSurface, long fromMs, long toMs,
4002            int callbackAfterFrameCount, boolean loop) throws IllegalArgumentException,
4003            IllegalStateException, RuntimeException;
4004
4005    private native void nativePopulateSettings(EditSettings mEditSettings,
4006            PreviewClipProperties mProperties, AudioSettings mAudioSettings)
4007    throws IllegalArgumentException, IllegalStateException, RuntimeException;
4008
4009    private native int nativeRenderPreviewFrame(Surface mSurface, long timeMs,
4010                                                 int surfaceWidth, int surfaceHeight)
4011                                                 throws IllegalArgumentException,
4012                                                 IllegalStateException, RuntimeException;
4013
4014    private native int nativeRenderMediaItemPreviewFrame(Surface mSurface, String filepath,
4015            int framewidth, int frameheight, int surfacewidth, int surfaceheight, long timeMs)
4016    throws IllegalArgumentException, IllegalStateException, RuntimeException;
4017
4018    private native void nativeStopPreview();
4019
4020    public native int nativeGenerateAudioGraph(String pcmFilePath, String outGraphPath,
4021            int frameDuration, int channels, int sampleCount);
4022
4023    public native int nativeGenerateRawAudio(String InFileName, String PCMFileName);
4024
4025    public native int nativeGenerateClip(EditSettings editSettings)
4026    throws IllegalArgumentException, IllegalStateException, RuntimeException;
4027
4028}
4029