MediaArtistNativeHelper.java revision a3f87f855b9abe2d086658eba8a92e04b69dd078
1/*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17package android.media.videoeditor;
18
19import java.io.File;
20import java.io.IOException;
21import java.math.BigDecimal;
22import java.nio.IntBuffer;
23import java.util.Iterator;
24import java.util.List;
25import java.util.concurrent.Semaphore;
26import java.util.concurrent.TimeUnit;
27
28import android.graphics.Bitmap;
29import android.graphics.BitmapFactory;
30import android.graphics.Canvas;
31import android.graphics.Paint;
32import android.graphics.Rect;
33import android.media.videoeditor.VideoEditor.ExportProgressListener;
34import android.media.videoeditor.VideoEditor.PreviewProgressListener;
35import android.media.videoeditor.VideoEditor.MediaProcessingProgressListener;
36import android.util.Log;
37import android.util.Pair;
38import android.view.Surface;
39
40/**
41 *This class provide Native methods to be used by MediaArtist {@hide}
42 */
43class MediaArtistNativeHelper {
44    private static final String TAG = "MediaArtistNativeHelper";
45
46    static {
47        System.loadLibrary("videoeditor_jni");
48    }
49
50    private static final int MAX_THUMBNAIL_PERMITTED = 8;
51
52    public static final int TASK_LOADING_SETTINGS = 1;
53    public static final int TASK_ENCODING = 2;
54
55    /**
56     *  The resize paint
57     */
58    private static final Paint sResizePaint = new Paint(Paint.FILTER_BITMAP_FLAG);
59
60    private final VideoEditor mVideoEditor;
61
62    private EditSettings mStoryBoardSettings;
63
64    private String mOutputFilename;
65
66    private PreviewClipProperties mClipProperties = null;
67
68    private EditSettings mPreviewEditSettings;
69
70    private AudioSettings mAudioSettings = null;
71
72    private AudioTrack mAudioTrack = null;
73
74    private boolean mInvalidatePreviewArray = true;
75
76    private boolean mRegenerateAudio = true;
77
78    private String mExportFilename = null;
79
80    private int mProgressToApp;
81
82    /*
83     *  Semaphore to control preview calls
84     */
85    private final Semaphore mLock = new Semaphore(1, true);
86
87    private String mRenderPreviewOverlayFile;
88    private int mRenderPreviewRenderingMode;
89
90    private boolean mIsFirstProgress;
91
92    private static final String AUDIO_TRACK_PCM_FILE = "AudioPcm.pcm";
93
94    // Processing indication
95    public static final int PROCESSING_NONE          = 0;
96    public static final int PROCESSING_AUDIO_PCM     = 1;
97    public static final int PROCESSING_TRANSITION    = 2;
98    public static final int PROCESSING_KENBURNS      = 3;
99    public static final int PROCESSING_INTERMEDIATE1 = 11;
100    public static final int PROCESSING_INTERMEDIATE2 = 12;
101    public static final int PROCESSING_INTERMEDIATE3 = 13;
102    public static final int PROCESSING_EXPORT        = 20;
103
104    private int mProcessingState;
105    private Object mProcessingObject;
106    private PreviewProgressListener mPreviewProgressListener;
107    private ExportProgressListener mExportProgressListener;
108    private ExtractAudioWaveformProgressListener mExtractAudioWaveformProgressListener;
109    private MediaProcessingProgressListener mMediaProcessingProgressListener;
110    private final String mProjectPath;
111
112    private long mPreviewProgress;
113
114    private String mAudioTrackPCMFilePath;
115
116    private int mTotalClips = 0;
117
118    private boolean mErrorFlagSet = false;
119
120    @SuppressWarnings("unused")
121    private int mManualEditContext;
122
123    /* Listeners */
124
125    /**
126     * Interface definition for a listener to be invoked when there is an update
127     * in a running task.
128     */
129    public interface OnProgressUpdateListener {
130        /**
131         * Called when there is an update.
132         *
133         * @param taskId id of the task reporting an update.
134         * @param progress progress of the task [0..100].
135         * @see BasicEdit#TASK_ENCODING
136         */
137        public void OnProgressUpdate(int taskId, int progress);
138    }
139
140    /** Defines the version. */
141    public final class Version {
142
143        /** Major version number */
144        public int major;
145
146        /** Minor version number */
147        public int minor;
148
149        /** Revision number */
150        public int revision;
151
152        /** VIDEOEDITOR major version number */
153        private static final int VIDEOEDITOR_MAJOR_VERSION = 0;
154
155        /** VIDEOEDITOR minor version number */
156        private static final int VIDEOEDITOR_MINOR_VERSION = 0;
157
158        /** VIDEOEDITOR revision number */
159        private static final int VIDEOEDITOR_REVISION_VERSION = 1;
160
161        /** Method which returns the current VIDEOEDITOR version */
162        public Version getVersion() {
163            Version version = new Version();
164
165            version.major = Version.VIDEOEDITOR_MAJOR_VERSION;
166            version.minor = Version.VIDEOEDITOR_MINOR_VERSION;
167            version.revision = Version.VIDEOEDITOR_REVISION_VERSION;
168
169            return version;
170        }
171    }
172
173    /**
174     * Defines output audio formats.
175     */
176    public final class AudioFormat {
177        /** No audio present in output clip. Used to generate video only clip */
178        public static final int NO_AUDIO = 0;
179
180        /** AMR Narrow Band. */
181        public static final int AMR_NB = 1;
182
183        /** Advanced Audio Coding (AAC). */
184        public static final int AAC = 2;
185
186        /** Advanced Audio Codec Plus (HE-AAC v1). */
187        public static final int AAC_PLUS = 3;
188
189        /** Advanced Audio Codec Plus (HE-AAC v2). */
190        public static final int ENHANCED_AAC_PLUS = 4;
191
192        /** MPEG layer 3 (MP3). */
193        public static final int MP3 = 5;
194
195        /** Enhanced Variable RateCodec (EVRC). */
196        public static final int EVRC = 6;
197
198        /** PCM (PCM). */
199        public static final int PCM = 7;
200
201        /** No transcoding. Output audio format is same as input audio format */
202        public static final int NULL_AUDIO = 254;
203
204        /** Unsupported audio format. */
205        public static final int UNSUPPORTED_AUDIO = 255;
206    }
207
208    /**
209     * Defines audio sampling frequencies.
210     */
211    public final class AudioSamplingFrequency {
212        /**
213         * Default sampling frequency. Uses the default frequency for a specific
214         * audio format. For AAC the only supported (and thus default) sampling
215         * frequency is 16 kHz. For this audio format the sampling frequency in
216         * the OutputParams.
217         **/
218        public static final int FREQ_DEFAULT = 0;
219
220        /** Audio sampling frequency of 8000 Hz. */
221        public static final int FREQ_8000 = 8000;
222
223        /** Audio sampling frequency of 11025 Hz. */
224        public static final int FREQ_11025 = 11025;
225
226        /** Audio sampling frequency of 12000 Hz. */
227        public static final int FREQ_12000 = 12000;
228
229        /** Audio sampling frequency of 16000 Hz. */
230        public static final int FREQ_16000 = 16000;
231
232        /** Audio sampling frequency of 22050 Hz. */
233        public static final int FREQ_22050 = 22050;
234
235        /** Audio sampling frequency of 24000 Hz. */
236        public static final int FREQ_24000 = 24000;
237
238        /** Audio sampling frequency of 32000 Hz. */
239        public static final int FREQ_32000 = 32000;
240
241        /** Audio sampling frequency of 44100 Hz. */
242        public static final int FREQ_44100 = 44100;
243
244        /** Audio sampling frequency of 48000 Hz. Not available for output file. */
245        public static final int FREQ_48000 = 48000;
246    }
247
248    /**
249     * Defines the supported fixed audio and video bitrates. These values are
250     * for output audio video only.
251     */
252    public final class Bitrate {
253        /** Variable bitrate. Means no bitrate regulation */
254        public static final int VARIABLE = -1;
255
256        /** An undefined bitrate. */
257        public static final int UNDEFINED = 0;
258
259        /** A bitrate of 9.2 kbits/s. */
260        public static final int BR_9_2_KBPS = 9200;
261
262        /** A bitrate of 12.2 kbits/s. */
263        public static final int BR_12_2_KBPS = 12200;
264
265        /** A bitrate of 16 kbits/s. */
266        public static final int BR_16_KBPS = 16000;
267
268        /** A bitrate of 24 kbits/s. */
269        public static final int BR_24_KBPS = 24000;
270
271        /** A bitrate of 32 kbits/s. */
272        public static final int BR_32_KBPS = 32000;
273
274        /** A bitrate of 48 kbits/s. */
275        public static final int BR_48_KBPS = 48000;
276
277        /** A bitrate of 64 kbits/s. */
278        public static final int BR_64_KBPS = 64000;
279
280        /** A bitrate of 96 kbits/s. */
281        public static final int BR_96_KBPS = 96000;
282
283        /** A bitrate of 128 kbits/s. */
284        public static final int BR_128_KBPS = 128000;
285
286        /** A bitrate of 192 kbits/s. */
287        public static final int BR_192_KBPS = 192000;
288
289        /** A bitrate of 256 kbits/s. */
290        public static final int BR_256_KBPS = 256000;
291
292        /** A bitrate of 288 kbits/s. */
293        public static final int BR_288_KBPS = 288000;
294
295        /** A bitrate of 384 kbits/s. */
296        public static final int BR_384_KBPS = 384000;
297
298        /** A bitrate of 512 kbits/s. */
299        public static final int BR_512_KBPS = 512000;
300
301        /** A bitrate of 800 kbits/s. */
302        public static final int BR_800_KBPS = 800000;
303
304        /** A bitrate of 2 Mbits/s. */
305        public static final int BR_2_MBPS = 2000000;
306
307        /** A bitrate of 5 Mbits/s. */
308        public static final int BR_5_MBPS = 5000000;
309
310        /** A bitrate of 8 Mbits/s. */
311        public static final int BR_8_MBPS = 8000000;
312    }
313
314    /**
315     * Defines all supported file types.
316     */
317    public final class FileType {
318        /** 3GPP file type. */
319        public static final int THREE_GPP = 0;
320
321        /** MP4 file type. */
322        public static final int MP4 = 1;
323
324        /** AMR file type. */
325        public static final int AMR = 2;
326
327        /** MP3 audio file type. */
328        public static final int MP3 = 3;
329
330        /** PCM audio file type. */
331        public static final int PCM = 4;
332
333        /** JPEG image file type. */
334        public static final int JPG = 5;
335
336        /** GIF image file type. */
337        public static final int GIF = 7;
338
339        /** PNG image file type. */
340        public static final int PNG = 8;
341
342        /** M4V file type. */
343        public static final int M4V = 10;
344
345        /** Unsupported file type. */
346        public static final int UNSUPPORTED = 255;
347    }
348
349    /**
350     * Defines rendering types. Rendering can only be applied to files
351     * containing video streams.
352     **/
353    public final class MediaRendering {
354        /**
355         * Resize to fit the output video with changing the aspect ratio if
356         * needed.
357         */
358        public static final int RESIZING = 0;
359
360        /**
361         * Crop the input video to fit it with the output video resolution.
362         **/
363        public static final int CROPPING = 1;
364
365        /**
366         * Resize to fit the output video resolution but maintain the aspect
367         * ratio. This framing type adds black borders if needed.
368         */
369        public static final int BLACK_BORDERS = 2;
370    }
371
372    /**
373     * Defines the results.
374     */
375    public final class Result {
376        /** No error. result OK */
377        public static final int NO_ERROR = 0;
378
379        /** File not found */
380        public static final int ERR_FILE_NOT_FOUND = 1;
381
382        /**
383         * In case of UTF8 conversion, the size of the converted path will be
384         * more than the corresponding allocated buffer.
385         */
386        public static final int ERR_BUFFER_OUT_TOO_SMALL = 2;
387
388        /** Invalid file type. */
389        public static final int ERR_INVALID_FILE_TYPE = 3;
390
391        /** Invalid effect kind. */
392        public static final int ERR_INVALID_EFFECT_KIND = 4;
393
394        /** Invalid video effect. */
395        public static final int ERR_INVALID_VIDEO_EFFECT_TYPE = 5;
396
397        /** Invalid audio effect. */
398        public static final int ERR_INVALID_AUDIO_EFFECT_TYPE = 6;
399
400        /** Invalid video transition. */
401        public static final int ERR_INVALID_VIDEO_TRANSITION_TYPE = 7;
402
403        /** Invalid audio transition. */
404        public static final int ERR_INVALID_AUDIO_TRANSITION_TYPE = 8;
405
406        /** Invalid encoding frame rate. */
407        public static final int ERR_INVALID_VIDEO_ENCODING_FRAME_RATE = 9;
408
409        /** External effect is called but this function is not set. */
410        public static final int ERR_EXTERNAL_EFFECT_NULL = 10;
411
412        /** External transition is called but this function is not set. */
413        public static final int ERR_EXTERNAL_TRANSITION_NULL = 11;
414
415        /** Begin time cut is larger than the video clip duration. */
416        public static final int ERR_BEGIN_CUT_LARGER_THAN_DURATION = 12;
417
418        /** Begin cut time is larger or equal than end cut. */
419        public static final int ERR_BEGIN_CUT_LARGER_THAN_END_CUT = 13;
420
421        /** Two consecutive transitions are overlapping on one clip. */
422        public static final int ERR_OVERLAPPING_TRANSITIONS = 14;
423
424        /** Internal error, type size mismatch. */
425        public static final int ERR_ANALYSIS_DATA_SIZE_TOO_SMALL = 15;
426
427        /** An input 3GPP file is invalid/corrupted. */
428        public static final int ERR_INVALID_3GPP_FILE = 16;
429
430        /** A file contains an unsupported video format. */
431        public static final int ERR_UNSUPPORTED_INPUT_VIDEO_FORMAT = 17;
432
433        /** A file contains an unsupported audio format. */
434        public static final int ERR_UNSUPPORTED_INPUT_AUDIO_FORMAT = 18;
435
436        /** A file format is not supported. */
437        public static final int ERR_AMR_EDITING_UNSUPPORTED = 19;
438
439        /** An input clip has an unexpectedly large Video AU. */
440        public static final int ERR_INPUT_VIDEO_AU_TOO_LARGE = 20;
441
442        /** An input clip has an unexpectedly large Audio AU. */
443        public static final int ERR_INPUT_AUDIO_AU_TOO_LARGE = 21;
444
445        /** An input clip has a corrupted Audio AU. */
446        public static final int ERR_INPUT_AUDIO_CORRUPTED_AU = 22;
447
448        /** The video encoder encountered an Access Unit error. */
449        public static final int ERR_ENCODER_ACCES_UNIT_ERROR = 23;
450
451        /** Unsupported video format for Video Editing. */
452        public static final int ERR_EDITING_UNSUPPORTED_VIDEO_FORMAT = 24;
453
454        /** Unsupported H263 profile for Video Editing. */
455        public static final int ERR_EDITING_UNSUPPORTED_H263_PROFILE = 25;
456
457        /** Unsupported MPEG-4 profile for Video Editing. */
458        public static final int ERR_EDITING_UNSUPPORTED_MPEG4_PROFILE = 26;
459
460        /** Unsupported MPEG-4 RVLC tool for Video Editing. */
461        public static final int ERR_EDITING_UNSUPPORTED_MPEG4_RVLC = 27;
462
463        /** Unsupported audio format for Video Editing. */
464        public static final int ERR_EDITING_UNSUPPORTED_AUDIO_FORMAT = 28;
465
466        /** File contains no supported stream. */
467        public static final int ERR_EDITING_NO_SUPPORTED_STREAM_IN_FILE = 29;
468
469        /** File contains no video stream or an unsupported video stream. */
470        public static final int ERR_EDITING_NO_SUPPORTED_VIDEO_STREAM_IN_FILE = 30;
471
472        /** Internal error, clip analysis version mismatch. */
473        public static final int ERR_INVALID_CLIP_ANALYSIS_VERSION = 31;
474
475        /**
476         * At least one of the clip analysis has been generated on another
477         * platform (WIN32, ARM, etc.).
478         */
479        public static final int ERR_INVALID_CLIP_ANALYSIS_PLATFORM = 32;
480
481        /** Clips don't have the same video format (H263 or MPEG4). */
482        public static final int ERR_INCOMPATIBLE_VIDEO_FORMAT = 33;
483
484        /** Clips don't have the same frame size. */
485        public static final int ERR_INCOMPATIBLE_VIDEO_FRAME_SIZE = 34;
486
487        /** Clips don't have the same MPEG-4 time scale. */
488        public static final int ERR_INCOMPATIBLE_VIDEO_TIME_SCALE = 35;
489
490        /** Clips don't have the same use of MPEG-4 data partitioning. */
491        public static final int ERR_INCOMPATIBLE_VIDEO_DATA_PARTITIONING = 36;
492
493        /** MP3 clips can't be assembled. */
494        public static final int ERR_UNSUPPORTED_MP3_ASSEMBLY = 37;
495
496        /**
497         * The input 3GPP file does not contain any supported audio or video
498         * track.
499         */
500        public static final int ERR_NO_SUPPORTED_STREAM_IN_FILE = 38;
501
502        /**
503         * The Volume of the added audio track (AddVolume) must be strictly
504         * superior than zero.
505         */
506        public static final int ERR_ADDVOLUME_EQUALS_ZERO = 39;
507
508        /**
509         * The time at which an audio track is added can't be higher than the
510         * input video track duration..
511         */
512        public static final int ERR_ADDCTS_HIGHER_THAN_VIDEO_DURATION = 40;
513
514        /** The audio track file format setting is undefined. */
515        public static final int ERR_UNDEFINED_AUDIO_TRACK_FILE_FORMAT = 41;
516
517        /** The added audio track stream has an unsupported format. */
518        public static final int ERR_UNSUPPORTED_ADDED_AUDIO_STREAM = 42;
519
520        /** The audio mixing feature doesn't support the audio track type. */
521        public static final int ERR_AUDIO_MIXING_UNSUPPORTED = 43;
522
523        /** The audio mixing feature doesn't support MP3 audio tracks. */
524        public static final int ERR_AUDIO_MIXING_MP3_UNSUPPORTED = 44;
525
526        /**
527         * An added audio track limits the available features: uiAddCts must be
528         * 0 and bRemoveOriginal must be true.
529         */
530        public static final int ERR_FEATURE_UNSUPPORTED_WITH_AUDIO_TRACK = 45;
531
532        /**
533         * An added audio track limits the available features: uiAddCts must be
534         * 0 and bRemoveOriginal must be true.
535         */
536        public static final int ERR_FEATURE_UNSUPPORTED_WITH_AAC = 46;
537
538        /** Input audio track is not of a type that can be mixed with output. */
539        public static final int ERR_AUDIO_CANNOT_BE_MIXED = 47;
540
541        /** Input audio track is not AMR-NB, so it can't be mixed with output. */
542        public static final int ERR_ONLY_AMRNB_INPUT_CAN_BE_MIXED = 48;
543
544        /**
545         * An added EVRC audio track limit the available features: uiAddCts must
546         * be 0 and bRemoveOriginal must be true.
547         */
548        public static final int ERR_FEATURE_UNSUPPORTED_WITH_EVRC = 49;
549
550        /** H263 profiles other than 0 are not supported. */
551        public static final int ERR_H263_PROFILE_NOT_SUPPORTED = 51;
552
553        /** File contains no video stream or an unsupported video stream. */
554        public static final int ERR_NO_SUPPORTED_VIDEO_STREAM_IN_FILE = 52;
555
556        /** Transcoding of the input file(s) is necessary. */
557        public static final int WAR_TRANSCODING_NECESSARY = 53;
558
559        /**
560         * The size of the output file will exceed the maximum configured value.
561         */
562        public static final int WAR_MAX_OUTPUT_SIZE_EXCEEDED = 54;
563
564        /** The time scale is too big. */
565        public static final int WAR_TIMESCALE_TOO_BIG = 55;
566
567        /** The year is out of range */
568        public static final int ERR_CLOCK_BAD_REF_YEAR = 56;
569
570        /** The directory could not be opened */
571        public static final int ERR_DIR_OPEN_FAILED = 57;
572
573        /** The directory could not be read */
574        public static final int ERR_DIR_READ_FAILED = 58;
575
576        /** There are no more entries in the current directory */
577        public static final int ERR_DIR_NO_MORE_ENTRY = 59;
578
579        /** The input parameter/s has error */
580        public static final int ERR_PARAMETER = 60;
581
582        /** There is a state machine error */
583        public static final int ERR_STATE = 61;
584
585        /** Memory allocation failed */
586        public static final int ERR_ALLOC = 62;
587
588        /** Context is invalid */
589        public static final int ERR_BAD_CONTEXT = 63;
590
591        /** Context creation failed */
592        public static final int ERR_CONTEXT_FAILED = 64;
593
594        /** Invalid stream ID */
595        public static final int ERR_BAD_STREAM_ID = 65;
596
597        /** Invalid option ID */
598        public static final int ERR_BAD_OPTION_ID = 66;
599
600        /** The option is write only */
601        public static final int ERR_WRITE_ONLY = 67;
602
603        /** The option is read only */
604        public static final int ERR_READ_ONLY = 68;
605
606        /** The feature is not implemented in this version */
607        public static final int ERR_NOT_IMPLEMENTED = 69;
608
609        /** The media type is not supported */
610        public static final int ERR_UNSUPPORTED_MEDIA_TYPE = 70;
611
612        /** No data to be encoded */
613        public static final int WAR_NO_DATA_YET = 71;
614
615        /** No data to be decoded */
616        public static final int WAR_NO_MORE_STREAM = 72;
617
618        /** Time stamp is invalid */
619        public static final int WAR_INVALID_TIME = 73;
620
621        /** No more data to be decoded */
622        public static final int WAR_NO_MORE_AU = 74;
623
624        /** Semaphore timed out */
625        public static final int WAR_TIME_OUT = 75;
626
627        /** Memory buffer is full */
628        public static final int WAR_BUFFER_FULL = 76;
629
630        /** Server has asked for redirection */
631        public static final int WAR_REDIRECT = 77;
632
633        /** Too many streams in input */
634        public static final int WAR_TOO_MUCH_STREAMS = 78;
635
636        /** The file cannot be opened/ written into as it is locked */
637        public static final int ERR_FILE_LOCKED = 79;
638
639        /** The file access mode is invalid */
640        public static final int ERR_FILE_BAD_MODE_ACCESS = 80;
641
642        /** The file pointer points to an invalid location */
643        public static final int ERR_FILE_INVALID_POSITION = 81;
644
645        /** Invalid string */
646        public static final int ERR_STR_BAD_STRING = 94;
647
648        /** The input string cannot be converted */
649        public static final int ERR_STR_CONV_FAILED = 95;
650
651        /** The string size is too large */
652        public static final int ERR_STR_OVERFLOW = 96;
653
654        /** Bad string arguments */
655        public static final int ERR_STR_BAD_ARGS = 97;
656
657        /** The string value is larger than maximum size allowed */
658        public static final int WAR_STR_OVERFLOW = 98;
659
660        /** The string value is not present in this comparison operation */
661        public static final int WAR_STR_NOT_FOUND = 99;
662
663        /** The thread is not started */
664        public static final int ERR_THREAD_NOT_STARTED = 100;
665
666        /** Trancoding done warning */
667        public static final int WAR_TRANSCODING_DONE = 101;
668
669        /** Unsupported mediatype */
670        public static final int WAR_MEDIATYPE_NOT_SUPPORTED = 102;
671
672        /** Input file contains invalid/unsupported streams */
673        public static final int ERR_INPUT_FILE_CONTAINS_NO_SUPPORTED_STREAM = 103;
674
675        /** Invalid input file */
676        public static final int ERR_INVALID_INPUT_FILE = 104;
677
678        /** Invalid output video format */
679        public static final int ERR_UNDEFINED_OUTPUT_VIDEO_FORMAT = 105;
680
681        /** Invalid output video frame size */
682        public static final int ERR_UNDEFINED_OUTPUT_VIDEO_FRAME_SIZE = 106;
683
684        /** Invalid output video frame rate */
685        public static final int ERR_UNDEFINED_OUTPUT_VIDEO_FRAME_RATE = 107;
686
687        /** Invalid output audio format */
688        public static final int ERR_UNDEFINED_OUTPUT_AUDIO_FORMAT = 108;
689
690        /** Invalid video frame size for H.263 */
691        public static final int ERR_INVALID_VIDEO_FRAME_SIZE_FOR_H263 = 109;
692
693        /** Invalid video frame rate for H.263 */
694        public static final int ERR_INVALID_VIDEO_FRAME_RATE_FOR_H263 = 110;
695
696        /** invalid playback duration */
697        public static final int ERR_DURATION_IS_NULL = 111;
698
699        /** Invalid H.263 profile in file */
700        public static final int ERR_H263_FORBIDDEN_IN_MP4_FILE = 112;
701
702        /** Invalid AAC sampling frequency */
703        public static final int ERR_INVALID_AAC_SAMPLING_FREQUENCY = 113;
704
705        /** Audio conversion failure */
706        public static final int ERR_AUDIO_CONVERSION_FAILED = 114;
707
708        /** Invalid trim start and end times */
709        public static final int ERR_BEGIN_CUT_EQUALS_END_CUT = 115;
710
711        /** End time smaller than start time for trim */
712        public static final int ERR_END_CUT_SMALLER_THAN_BEGIN_CUT = 116;
713
714        /** Output file size is small */
715        public static final int ERR_MAXFILESIZE_TOO_SMALL = 117;
716
717        /** Output video bitrate is too low */
718        public static final int ERR_VIDEOBITRATE_TOO_LOW = 118;
719
720        /** Output audio bitrate is too low */
721        public static final int ERR_AUDIOBITRATE_TOO_LOW = 119;
722
723        /** Output video bitrate is too high */
724        public static final int ERR_VIDEOBITRATE_TOO_HIGH = 120;
725
726        /** Output audio bitrate is too high */
727        public static final int ERR_AUDIOBITRATE_TOO_HIGH = 121;
728
729        /** Output file size is too small */
730        public static final int ERR_OUTPUT_FILE_SIZE_TOO_SMALL = 122;
731
732        /** Unknown stream type */
733        public static final int ERR_READER_UNKNOWN_STREAM_TYPE = 123;
734
735        /** Invalid metadata in input stream */
736        public static final int WAR_READER_NO_METADATA = 124;
737
738        /** Invalid file reader info warning */
739        public static final int WAR_READER_INFORMATION_NOT_PRESENT = 125;
740
741        /** Warning to indicate the the writer is being stopped */
742        public static final int WAR_WRITER_STOP_REQ = 131;
743
744        /** Video decoder failed to provide frame for transcoding */
745        public static final int WAR_VIDEORENDERER_NO_NEW_FRAME = 132;
746
747        /** Video deblocking filter is not implemented */
748        public static final int WAR_DEBLOCKING_FILTER_NOT_IMPLEMENTED = 133;
749
750        /** H.263 decoder profile not supported */
751        public static final int ERR_DECODER_H263_PROFILE_NOT_SUPPORTED = 134;
752
753        /** The input file contains unsupported H.263 profile */
754        public static final int ERR_DECODER_H263_NOT_BASELINE = 135;
755
756        /** There is no more space to store the output file */
757        public static final int ERR_NOMORE_SPACE_FOR_FILE = 136;
758
759        /** Internal error. */
760        public static final int ERR_INTERNAL = 255;
761    }
762
763    /**
764     * Defines output video formats.
765     */
766    public final class VideoFormat {
767        /** No video present in output clip. Used to generate audio only clip */
768        public static final int NO_VIDEO = 0;
769
770        /** H263 baseline format. */
771        public static final int H263 = 1;
772
773        /** MPEG4 video Simple Profile format. */
774        public static final int MPEG4 = 2;
775
776        /** MPEG4 video Simple Profile format with support for EMP. */
777        public static final int MPEG4_EMP = 3;
778
779        /** H264 video */
780        public static final int H264 = 4;
781
782        /** No transcoding. Output video format is same as input video format */
783        public static final int NULL_VIDEO = 254;
784
785        /** Unsupported video format. */
786        public static final int UNSUPPORTED = 255;
787    }
788
789    /** Defines video profiles and levels. */
790    public final class VideoProfile {
791        /** MPEG4, Simple Profile, Level 0. */
792        public static final int MPEG4_SP_LEVEL_0 = 0;
793
794        /** MPEG4, Simple Profile, Level 0B. */
795        public static final int MPEG4_SP_LEVEL_0B = 1;
796
797        /** MPEG4, Simple Profile, Level 1. */
798        public static final int MPEG4_SP_LEVEL_1 = 2;
799
800        /** MPEG4, Simple Profile, Level 2. */
801        public static final int MPEG4_SP_LEVEL_2 = 3;
802
803        /** MPEG4, Simple Profile, Level 3. */
804        public static final int MPEG4_SP_LEVEL_3 = 4;
805
806        /** H263, Profile 0, Level 10. */
807        public static final int H263_PROFILE_0_LEVEL_10 = 5;
808
809        /** H263, Profile 0, Level 20. */
810        public static final int H263_PROFILE_0_LEVEL_20 = 6;
811
812        /** H263, Profile 0, Level 30. */
813        public static final int H263_PROFILE_0_LEVEL_30 = 7;
814
815        /** H263, Profile 0, Level 40. */
816        public static final int H263_PROFILE_0_LEVEL_40 = 8;
817
818        /** H263, Profile 0, Level 45. */
819        public static final int H263_PROFILE_0_LEVEL_45 = 9;
820
821        /** MPEG4, Simple Profile, Level 4A. */
822        public static final int MPEG4_SP_LEVEL_4A = 10;
823
824        /** MPEG4, Simple Profile, Level 0. */
825        public static final int MPEG4_SP_LEVEL_5 = 11;
826
827        /** H264, Profile 0, Level 1. */
828        public static final int H264_PROFILE_0_LEVEL_1 = 12;
829
830        /** H264, Profile 0, Level 1b. */
831        public static final int H264_PROFILE_0_LEVEL_1b = 13;
832
833        /** H264, Profile 0, Level 1.1 */
834        public static final int H264_PROFILE_0_LEVEL_1_1 = 14;
835
836        /** H264, Profile 0, Level 1.2 */
837        public static final int H264_PROFILE_0_LEVEL_1_2 = 15;
838
839        /** H264, Profile 0, Level 1.3 */
840        public static final int H264_PROFILE_0_LEVEL_1_3 = 16;
841
842        /** H264, Profile 0, Level 2. */
843        public static final int H264_PROFILE_0_LEVEL_2 = 17;
844
845        /** H264, Profile 0, Level 2.1 */
846        public static final int H264_PROFILE_0_LEVEL_2_1 = 18;
847
848        /** H264, Profile 0, Level 2.2 */
849        public static final int H264_PROFILE_0_LEVEL_2_2 = 19;
850
851        /** H264, Profile 0, Level 3. */
852        public static final int H264_PROFILE_0_LEVEL_3 = 20;
853
854        /** H264, Profile 0, Level 3.1 */
855        public static final int H264_PROFILE_0_LEVEL_3_1 = 21;
856
857        /** H264, Profile 0, Level 3.2 */
858        public static final int H264_PROFILE_0_LEVEL_3_2 = 22;
859
860        /** H264, Profile 0, Level 4. */
861        public static final int H264_PROFILE_0_LEVEL_4 = 23;
862
863        /** H264, Profile 0, Level 4.1 */
864        public static final int H264_PROFILE_0_LEVEL_4_1 = 24;
865
866        /** H264, Profile 0, Level 4.2 */
867        public static final int H264_PROFILE_0_LEVEL_4_2 = 25;
868
869        /** H264, Profile 0, Level 5. */
870        public static final int H264_PROFILE_0_LEVEL_5 = 26;
871
872        /** H264, Profile 0, Level 5.1 */
873        public static final int H264_PROFILE_0_LEVEL_5_1 = 27;
874
875        /** Profile out of range. */
876        public static final int OUT_OF_RANGE = 255;
877    }
878
879    /** Defines video frame sizes. */
880    public final class VideoFrameSize {
881
882        public static final int SIZE_UNDEFINED = -1;
883
884        /** SQCIF 128 x 96 pixels. */
885        public static final int SQCIF = 0;
886
887        /** QQVGA 160 x 120 pixels. */
888        public static final int QQVGA = 1;
889
890        /** QCIF 176 x 144 pixels. */
891        public static final int QCIF = 2;
892
893        /** QVGA 320 x 240 pixels. */
894        public static final int QVGA = 3;
895
896        /** CIF 352 x 288 pixels. */
897        public static final int CIF = 4;
898
899        /** VGA 640 x 480 pixels. */
900        public static final int VGA = 5;
901
902        /** WVGA 800 X 480 pixels */
903        public static final int WVGA = 6;
904
905        /** NTSC 720 X 480 pixels */
906        public static final int NTSC = 7;
907
908        /** 640 x 360 */
909        public static final int nHD = 8;
910
911        /** 854 x 480 */
912        public static final int WVGA16x9 = 9;
913
914        /** 720p 1280 X 720 */
915        public static final int V720p = 10;
916
917        /** 1080 x 720 */
918        public static final int W720p = 11;
919
920        /** 1080 960 x 720 */
921        public static final int S720p = 12;
922    }
923
924    /**
925     * Defines output video frame rates.
926     */
927    public final class VideoFrameRate {
928        /** Frame rate of 5 frames per second. */
929        public static final int FR_5_FPS = 0;
930
931        /** Frame rate of 7.5 frames per second. */
932        public static final int FR_7_5_FPS = 1;
933
934        /** Frame rate of 10 frames per second. */
935        public static final int FR_10_FPS = 2;
936
937        /** Frame rate of 12.5 frames per second. */
938        public static final int FR_12_5_FPS = 3;
939
940        /** Frame rate of 15 frames per second. */
941        public static final int FR_15_FPS = 4;
942
943        /** Frame rate of 20 frames per second. */
944        public static final int FR_20_FPS = 5;
945
946        /** Frame rate of 25 frames per second. */
947        public static final int FR_25_FPS = 6;
948
949        /** Frame rate of 30 frames per second. */
950        public static final int FR_30_FPS = 7;
951    }
952
953    /**
954     * Defines Video Effect Types.
955     */
956    public static class VideoEffect {
957
958        public static final int NONE = 0;
959
960        public static final int FADE_FROM_BLACK = 8;
961
962        public static final int CURTAIN_OPENING = 9;
963
964        public static final int FADE_TO_BLACK = 16;
965
966        public static final int CURTAIN_CLOSING = 17;
967
968        public static final int EXTERNAL = 256;
969
970        public static final int BLACK_AND_WHITE = 257;
971
972        public static final int PINK = 258;
973
974        public static final int GREEN = 259;
975
976        public static final int SEPIA = 260;
977
978        public static final int NEGATIVE = 261;
979
980        public static final int FRAMING = 262;
981
982        public static final int TEXT = 263;
983
984        public static final int ZOOM_IN = 264;
985
986        public static final int ZOOM_OUT = 265;
987
988        public static final int FIFTIES = 266;
989
990        public static final int COLORRGB16 = 267;
991
992        public static final int GRADIENT = 268;
993    }
994
995    /**
996     * Defines the video transitions.
997     */
998    public static class VideoTransition {
999        /** No transition */
1000        public static final int NONE = 0;
1001
1002        /** Cross fade transition */
1003        public static final int CROSS_FADE = 1;
1004
1005        /** External transition. Currently not available. */
1006        public static final int EXTERNAL = 256;
1007
1008        /** AlphaMagic transition. */
1009        public static final int ALPHA_MAGIC = 257;
1010
1011        /** Slide transition. */
1012        public static final int SLIDE_TRANSITION = 258;
1013
1014        /** Fade to black transition. */
1015        public static final int FADE_BLACK = 259;
1016    }
1017
1018    /**
1019     * Defines settings for the AlphaMagic transition
1020     */
1021    public static class AlphaMagicSettings {
1022        /** Name of the alpha file (JPEG file). */
1023        public String file;
1024
1025        /** Blending percentage [0..100] 0 = no blending. */
1026        public int blendingPercent;
1027
1028        /** Invert the default rotation direction of the AlphaMagic effect. */
1029        public boolean invertRotation;
1030
1031        public int rgbWidth;
1032        public int rgbHeight;
1033    }
1034
1035    /** Defines the direction of the Slide transition. */
1036    public static final class SlideDirection {
1037
1038        /** Right out left in. */
1039        public static final int RIGHT_OUT_LEFT_IN = 0;
1040
1041        /** Left out right in. */
1042        public static final int LEFT_OUT_RIGTH_IN = 1;
1043
1044        /** Top out bottom in. */
1045        public static final int TOP_OUT_BOTTOM_IN = 2;
1046
1047        /** Bottom out top in */
1048        public static final int BOTTOM_OUT_TOP_IN = 3;
1049    }
1050
1051    /** Defines the Slide transition settings. */
1052    public static class SlideTransitionSettings {
1053        /**
1054         * Direction of the slide transition. See {@link SlideDirection
1055         * SlideDirection} for valid values.
1056         */
1057        public int direction;
1058    }
1059
1060    /**
1061     * Defines the settings of a single clip.
1062     */
1063    public static class ClipSettings {
1064
1065        /**
1066         * The path to the clip file.
1067         * <p>
1068         * File format of the clip, it can be:
1069         * <ul>
1070         * <li>3GP file containing MPEG4/H263/H264 video and AAC/AMR audio
1071         * <li>JPG file
1072         * </ul>
1073         */
1074
1075        public String clipPath;
1076
1077        /**
1078         * The path of the decoded file. This is used only for image files.
1079         */
1080        public String clipDecodedPath;
1081
1082        /**
1083         * The path of the Original file. This is used only for image files.
1084         */
1085        public String clipOriginalPath;
1086
1087        /**
1088         * File type of the clip. See {@link FileType FileType} for valid
1089         * values.
1090         */
1091        public int fileType;
1092
1093        /** Begin of the cut in the clip in milliseconds. */
1094        public int beginCutTime;
1095
1096        /**
1097         * End of the cut in the clip in milliseconds. Set both
1098         * <code>beginCutTime</code> and <code>endCutTime</code> to
1099         * <code>0</code> to get the full length of the clip without a cut. In
1100         * case of JPG clip, this is the duration of the JPEG file.
1101         */
1102        public int endCutTime;
1103
1104        /**
1105         * Begin of the cut in the clip in percentage of the file duration.
1106         */
1107        public int beginCutPercent;
1108
1109        /**
1110         * End of the cut in the clip in percentage of the file duration. Set
1111         * both <code>beginCutPercent</code> and <code>endCutPercent</code> to
1112         * <code>0</code> to get the full length of the clip without a cut.
1113         */
1114        public int endCutPercent;
1115
1116        /** Enable panning and zooming. */
1117        public boolean panZoomEnabled;
1118
1119        /** Zoom percentage at start of clip. 0 = no zoom, 100 = full zoom */
1120        public int panZoomPercentStart;
1121
1122        /** Top left X coordinate at start of clip. */
1123        public int panZoomTopLeftXStart;
1124
1125        /** Top left Y coordinate at start of clip. */
1126        public int panZoomTopLeftYStart;
1127
1128        /** Zoom percentage at start of clip. 0 = no zoom, 100 = full zoom */
1129        public int panZoomPercentEnd;
1130
1131        /** Top left X coordinate at end of clip. */
1132        public int panZoomTopLeftXEnd;
1133
1134        /** Top left Y coordinate at end of clip. */
1135        public int panZoomTopLeftYEnd;
1136
1137        /**
1138         * Set The media rendering. See {@link MediaRendering MediaRendering}
1139         * for valid values.
1140         */
1141        public int mediaRendering;
1142
1143        /**
1144         * RGB width and Height
1145         */
1146         public int rgbWidth;
1147         public int rgbHeight;
1148    }
1149
1150    /**
1151     * Defines settings for a transition.
1152     */
1153    public static class TransitionSettings {
1154
1155        /** Duration of the transition in msec. */
1156        public int duration;
1157
1158        /**
1159         * Transition type for video. See {@link VideoTransition
1160         * VideoTransition} for valid values.
1161         */
1162        public int videoTransitionType;
1163
1164        /**
1165         * Transition type for audio. See {@link AudioTransition
1166         * AudioTransition} for valid values.
1167         */
1168        public int audioTransitionType;
1169
1170        /**
1171         * Transition behaviour. See {@link TransitionBehaviour
1172         * TransitionBehaviour} for valid values.
1173         */
1174        public int transitionBehaviour;
1175
1176        /**
1177         * Settings for AlphaMagic transition. Only needs to be set if
1178         * <code>videoTransitionType</code> is set to
1179         * <code>VideoTransition.ALPHA_MAGIC</code>. See
1180         * {@link AlphaMagicSettings AlphaMagicSettings}.
1181         */
1182        public AlphaMagicSettings alphaSettings;
1183
1184        /**
1185         * Settings for the Slide transition. See
1186         * {@link SlideTransitionSettings SlideTransitionSettings}.
1187         */
1188        public SlideTransitionSettings slideSettings;
1189    }
1190
1191    public static final class AudioTransition {
1192        /** No audio transition. */
1193        public static final int NONE = 0;
1194
1195        /** Cross-fade audio transition. */
1196        public static final int CROSS_FADE = 1;
1197    }
1198
1199    /**
1200     * Defines transition behaviors.
1201     */
1202    public static final class TransitionBehaviour {
1203
1204        /** The transition uses an increasing speed. */
1205        public static final int SPEED_UP = 0;
1206
1207        /** The transition uses a linear (constant) speed. */
1208        public static final int LINEAR = 1;
1209
1210        /** The transition uses a decreasing speed. */
1211        public static final int SPEED_DOWN = 2;
1212
1213        /**
1214         * The transition uses a constant speed, but slows down in the middle
1215         * section.
1216         */
1217        public static final int SLOW_MIDDLE = 3;
1218
1219        /**
1220         * The transition uses a constant speed, but increases speed in the
1221         * middle section.
1222         */
1223        public static final int FAST_MIDDLE = 4;
1224    }
1225
1226    /**
1227     * Defines settings for the background music.
1228     */
1229    public static class BackgroundMusicSettings {
1230
1231        /** Background music file. */
1232        public String file;
1233
1234        /** File type. See {@link FileType FileType} for valid values. */
1235        public int fileType;
1236
1237        /**
1238         * Insertion time in milliseconds, in the output video where the
1239         * background music must be inserted.
1240         */
1241        public long insertionTime;
1242
1243        /**
1244         * Volume, as a percentage of the background music track, to use. If
1245         * this field is set to 100, the background music will replace the audio
1246         * from the video input file(s).
1247         */
1248        public int volumePercent;
1249
1250        /**
1251         * Start time in milliseconds in the background muisc file from where
1252         * the background music should loop. Set both <code>beginLoop</code> and
1253         * <code>endLoop</code> to <code>0</code> to disable looping.
1254         */
1255        public long beginLoop;
1256
1257        /**
1258         * End time in milliseconds in the background music file to where the
1259         * background music should loop. Set both <code>beginLoop</code> and
1260         * <code>endLoop</code> to <code>0</code> to disable looping.
1261         */
1262        public long endLoop;
1263
1264        public boolean enableDucking;
1265
1266        public int duckingThreshold;
1267
1268        public int lowVolume;
1269
1270        public boolean isLooping;
1271    }
1272
1273    /** Defines settings for an effect. */
1274    public static class AudioEffect {
1275        /** No audio effect. */
1276        public static final int NONE = 0;
1277
1278        /** Fade-in effect. */
1279        public static final int FADE_IN = 8;
1280
1281        /** Fade-out effect. */
1282        public static final int FADE_OUT = 16;
1283    }
1284
1285    /** Defines the effect settings. */
1286    public static class EffectSettings {
1287
1288        /** Start time of the effect in milliseconds. */
1289        public int startTime;
1290
1291        /** Duration of the effect in milliseconds. */
1292        public int duration;
1293
1294        /**
1295         * Video effect type. See {@link VideoEffect VideoEffect} for valid
1296         * values.
1297         */
1298        public int videoEffectType;
1299
1300        /**
1301         * Audio effect type. See {@link AudioEffect AudioEffect} for valid
1302         * values.
1303         */
1304        public int audioEffectType;
1305
1306        /**
1307         * Start time of the effect in percents of the duration of the clip. A
1308         * value of 0 percent means start time is from the beginning of the
1309         * clip.
1310         */
1311        public int startPercent;
1312
1313        /**
1314         * Duration of the effect in percents of the duration of the clip.
1315         */
1316        public int durationPercent;
1317
1318        /**
1319         * Framing file.
1320         * <p>
1321         * This field is only used when the field <code>videoEffectType</code>
1322         * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise
1323         * this field is ignored.
1324         */
1325        public String framingFile;
1326
1327        /**
1328         * Framing buffer.
1329         * <p>
1330         * This field is only used when the field <code>videoEffectType</code>
1331         * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise
1332         * this field is ignored.
1333         */
1334        public int[] framingBuffer;
1335
1336        /**
1337         * Bitmap type Can be from RGB_565 (4), ARGB_4444 (5), ARGB_8888 (6);
1338         **/
1339
1340        public int bitmapType;
1341
1342        public int width;
1343
1344        public int height;
1345
1346        /**
1347         * Top left x coordinate. This coordinate is used to set the x
1348         * coordinate of the picture in the framing file when the framing file
1349         * is selected. The x coordinate is also used to set the location of the
1350         * text in the text effect.
1351         * <p>
1352         * This field is only used when the field <code>videoEffectType</code>
1353         * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING} or
1354         * {@link VideoEffect#TEXT VideoEffect.TEXT}. Otherwise this field is
1355         * ignored.
1356         */
1357        public int topLeftX;
1358
1359        /**
1360         * Top left y coordinate. This coordinate is used to set the y
1361         * coordinate of the picture in the framing file when the framing file
1362         * is selected. The y coordinate is also used to set the location of the
1363         * text in the text effect.
1364         * <p>
1365         * This field is only used when the field <code>videoEffectType</code>
1366         * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING} or
1367         * {@link VideoEffect#TEXT VideoEffect.TEXT}. Otherwise this field is
1368         * ignored.
1369         */
1370        public int topLeftY;
1371
1372        /**
1373         * Should the frame be resized or not. If this field is set to
1374         * <link>true</code> then the frame size is matched with the output
1375         * video size.
1376         * <p>
1377         * This field is only used when the field <code>videoEffectType</code>
1378         * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise
1379         * this field is ignored.
1380         */
1381        public boolean framingResize;
1382
1383        /**
1384         * Size to which the framing buffer needs to be resized to
1385         * This is valid only if framingResize is true
1386         */
1387        public int framingScaledSize;
1388        /**
1389         * Text to insert in the video.
1390         * <p>
1391         * This field is only used when the field <code>videoEffectType</code>
1392         * is set to {@link VideoEffect#TEXT VideoEffect.TEXT}. Otherwise this
1393         * field is ignored.
1394         */
1395        public String text;
1396
1397        /**
1398         * Text attributes for the text to insert in the video.
1399         * <p>
1400         * This field is only used when the field <code>videoEffectType</code>
1401         * is set to {@link VideoEffect#TEXT VideoEffect.TEXT}. Otherwise this
1402         * field is ignored. For more details about this field see the
1403         * integration guide.
1404         */
1405        public String textRenderingData;
1406
1407        /** Width of the text buffer in pixels. */
1408        public int textBufferWidth;
1409
1410        /** Height of the text buffer in pixels. */
1411        public int textBufferHeight;
1412
1413        /**
1414         * Processing rate for the fifties effect. A high value (e.g. 30)
1415         * results in high effect strength.
1416         * <p>
1417         * This field is only used when the field <code>videoEffectType</code>
1418         * is set to {@link VideoEffect#FIFTIES VideoEffect.FIFTIES}. Otherwise
1419         * this field is ignored.
1420         */
1421        public int fiftiesFrameRate;
1422
1423        /**
1424         * RGB 16 color of the RGB16 and gradient color effect.
1425         * <p>
1426         * This field is only used when the field <code>videoEffectType</code>
1427         * is set to {@link VideoEffect#COLORRGB16 VideoEffect.COLORRGB16} or
1428         * {@link VideoEffect#GRADIENT VideoEffect.GRADIENT}. Otherwise this
1429         * field is ignored.
1430         */
1431        public int rgb16InputColor;
1432
1433        /**
1434         * Start alpha blending percentage.
1435         * <p>
1436         * This field is only used when the field <code>videoEffectType</code>
1437         * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or
1438         * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field
1439         * is ignored.
1440         */
1441        public int alphaBlendingStartPercent;
1442
1443        /**
1444         * Middle alpha blending percentage.
1445         * <p>
1446         * This field is only used when the field <code>videoEffectType</code>
1447         * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or
1448         * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field
1449         * is ignored.
1450         */
1451        public int alphaBlendingMiddlePercent;
1452
1453        /**
1454         * End alpha blending percentage.
1455         * <p>
1456         * This field is only used when the field <code>videoEffectType</code>
1457         * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or
1458         * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field
1459         * is ignored.
1460         */
1461        public int alphaBlendingEndPercent;
1462
1463        /**
1464         * Duration, in percentage of effect duration of the fade-in phase.
1465         * <p>
1466         * This field is only used when the field <code>videoEffectType</code>
1467         * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or
1468         * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field
1469         * is ignored.
1470         */
1471        public int alphaBlendingFadeInTimePercent;
1472
1473        /**
1474         * Duration, in percentage of effect duration of the fade-out phase.
1475         * <p>
1476         * This field is only used when the field <code>videoEffectType</code>
1477         * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or
1478         * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field
1479         * is ignored.
1480         */
1481        public int alphaBlendingFadeOutTimePercent;
1482    }
1483
1484    /** Defines the clip properties for preview */
1485    public static class PreviewClips {
1486
1487        /**
1488         * The path to the clip file.
1489         * <p>
1490         * File format of the clip, it can be:
1491         * <ul>
1492         * <li>3GP file containing MPEG4/H263 video and AAC/AMR audio
1493         * <li>JPG file
1494         * </ul>
1495         */
1496
1497        public String clipPath;
1498
1499        /**
1500         * File type of the clip. See {@link FileType FileType} for valid
1501         * values.
1502         */
1503        public int fileType;
1504
1505        /** Begin of the cut in the clip in milliseconds. */
1506        public long beginPlayTime;
1507
1508        public long endPlayTime;
1509
1510        /**
1511         * Set The media rendering. See {@link MediaRendering MediaRendering}
1512         * for valid values.
1513         */
1514        public int mediaRendering;
1515
1516    }
1517
1518    /** Defines the audio settings. */
1519    public static class AudioSettings {
1520
1521        String pFile;
1522
1523        /** < PCM file path */
1524        String Id;
1525
1526        boolean bRemoveOriginal;
1527
1528        /** < If true, the original audio track is not taken into account */
1529        int channels;
1530
1531        /** < Number of channels (1=mono, 2=stereo) of BGM clip */
1532        int Fs;
1533
1534        /**
1535         * < Sampling audio frequency (8000 for amr, 16000 or more for aac) of
1536         * BGM clip
1537         */
1538        int ExtendedFs;
1539
1540        /** < Extended frequency for AAC+, eAAC+ streams of BGM clip */
1541        long startMs;
1542
1543        /** < Time, in milliseconds, at which the added audio track is inserted */
1544        long beginCutTime;
1545
1546        long endCutTime;
1547
1548        int fileType;
1549
1550        int volume;
1551
1552        /** < Volume, in percentage, of the added audio track */
1553        boolean loop;
1554
1555        /** < Looping on/off > **/
1556
1557        /** Audio mix and Duck **/
1558        int ducking_threshold;
1559
1560        int ducking_lowVolume;
1561
1562        boolean bInDucking_enable;
1563
1564        String pcmFilePath;
1565    }
1566
1567    /** Encapsulates preview clips and effect settings */
1568    public static class PreviewSettings {
1569
1570        public PreviewClips[] previewClipsArray;
1571
1572        /** The effect settings. */
1573        public EffectSettings[] effectSettingsArray;
1574
1575    }
1576
1577    /** Encapsulates clip properties */
1578    public static class PreviewClipProperties {
1579
1580        public Properties[] clipProperties;
1581
1582    }
1583
1584    /** Defines the editing settings. */
1585    public static class EditSettings {
1586
1587        /**
1588         * Array of clip settings. There is one <code>clipSetting</code> for
1589         * each clip.
1590         */
1591        public ClipSettings[] clipSettingsArray;
1592
1593        /**
1594         * Array of transition settings. If there are n clips (and thus n
1595         * <code>clipSettings</code>) then there are (n-1) transitions and (n-1)
1596         * <code>transistionSettings</code> in
1597         * <code>transistionSettingsArray</code>.
1598         */
1599        public TransitionSettings[] transitionSettingsArray;
1600
1601        /** The effect settings. */
1602        public EffectSettings[] effectSettingsArray;
1603
1604        /**
1605         * Video frame rate of the output clip. See {@link VideoFrameRate
1606         * VideoFrameRate} for valid values.
1607         */
1608        public int videoFrameRate;
1609
1610        /** Output file name. Must be an absolute path. */
1611        public String outputFile;
1612
1613        /**
1614         * Size of the video frames in the output clip. See
1615         * {@link VideoFrameSize VideoFrameSize} for valid values.
1616         */
1617        public int videoFrameSize;
1618
1619        /**
1620         * Format of the video stream in the output clip. See
1621         * {@link VideoFormat VideoFormat} for valid values.
1622         */
1623        public int videoFormat;
1624
1625        /**
1626         * Format of the audio stream in the output clip. See
1627         * {@link AudioFormat AudioFormat} for valid values.
1628         */
1629        public int audioFormat;
1630
1631        /**
1632         * Sampling frequency of the audio stream in the output clip. See
1633         * {@link AudioSamplingFrequency AudioSamplingFrequency} for valid
1634         * values.
1635         */
1636        public int audioSamplingFreq;
1637
1638        /**
1639         * Maximum file size. By setting this you can set the maximum size of
1640         * the output clip. Set it to <code>0</code> to let the class ignore
1641         * this filed.
1642         */
1643        public int maxFileSize;
1644
1645        /**
1646         * Number of audio channels in output clip. Use <code>0</code> for none,
1647         * <code>1</code> for mono or <code>2</code> for stereo. None is only
1648         * allowed when the <code>audioFormat</code> field is set to
1649         * {@link AudioFormat#NO_AUDIO AudioFormat.NO_AUDIO} or
1650         * {@link AudioFormat#NULL_AUDIO AudioFormat.NULL_AUDIO} Mono is only
1651         * allowed when the <code>audioFormat</code> field is set to
1652         * {@link AudioFormat#AAC AudioFormat.AAC}
1653         */
1654        public int audioChannels;
1655
1656        /** Video bitrate. See {@link Bitrate Bitrate} for valid values. */
1657        public int videoBitrate;
1658
1659        /** Audio bitrate. See {@link Bitrate Bitrate} for valid values. */
1660        public int audioBitrate;
1661
1662        /**
1663         * Background music settings. See {@link BackgroundMusicSettings
1664         * BackgroundMusicSettings} for valid values.
1665         */
1666        public BackgroundMusicSettings backgroundMusicSettings;
1667
1668        public int primaryTrackVolume;
1669
1670    }
1671
1672    /**
1673     * Defines the media properties.
1674     **/
1675
1676    public static class Properties {
1677
1678        /**
1679         * Duration of the media in milliseconds.
1680         */
1681
1682        public int duration;
1683
1684        /**
1685         * File type.
1686         */
1687
1688        public int fileType;
1689
1690        /**
1691         * Video format.
1692         */
1693
1694        public int videoFormat;
1695
1696        /**
1697         * Duration of the video stream of the media in milliseconds.
1698         */
1699
1700        public int videoDuration;
1701
1702        /**
1703         * Bitrate of the video stream of the media.
1704         */
1705
1706        public int videoBitrate;
1707
1708        /**
1709         * Width of the video frames or the width of the still picture in
1710         * pixels.
1711         */
1712
1713        public int width;
1714
1715        /**
1716         * Height of the video frames or the height of the still picture in
1717         * pixels.
1718         */
1719
1720        public int height;
1721
1722        /**
1723         * Average frame rate of video in the media in frames per second.
1724         */
1725
1726        public float averageFrameRate;
1727
1728        /**
1729         * Profile and level of the video in the media.
1730         */
1731
1732        public int profileAndLevel;
1733
1734        /**
1735         * Audio format.
1736         */
1737
1738        public int audioFormat;
1739
1740        /**
1741         * Duration of the audio stream of the media in milliseconds.
1742         */
1743
1744        public int audioDuration;
1745
1746        /**
1747         * Bitrate of the audio stream of the media.
1748         */
1749
1750        public int audioBitrate;
1751
1752        /**
1753         * Number of audio channels in the media.
1754         */
1755
1756        public int audioChannels;
1757
1758        /**
1759         * Sampling frequency of the audio stream in the media in samples per
1760         * second.
1761         */
1762
1763        public int audioSamplingFrequency;
1764
1765        /**
1766         * Volume value of the audio track as percentage.
1767         */
1768        public int audioVolumeValue;
1769
1770        public String Id;
1771    }
1772
1773    /**
1774     * Constructor
1775     *
1776     * @param projectPath The path where the VideoEditor stores all files
1777     *        related to the project
1778     * @param veObj The video editor reference
1779     */
1780    public MediaArtistNativeHelper(String projectPath, VideoEditor veObj) {
1781        mProjectPath = projectPath;
1782        if (veObj != null) {
1783            mVideoEditor = veObj;
1784        } else {
1785            mVideoEditor = null;
1786            throw new IllegalArgumentException("video editor object is null");
1787        }
1788        if (mStoryBoardSettings == null)
1789            mStoryBoardSettings = new EditSettings();
1790
1791        _init(mProjectPath, "null");
1792        mAudioTrackPCMFilePath = null;
1793    }
1794
1795    /**
1796     * @return The project path
1797     */
1798    String getProjectPath() {
1799        return mProjectPath;
1800    }
1801
1802    /**
1803     * @return The Audio Track PCM file path
1804     */
1805    String getProjectAudioTrackPCMFilePath() {
1806        return mAudioTrackPCMFilePath;
1807    }
1808
1809    /**
1810     * Invalidates the PCM file
1811     */
1812    void invalidatePcmFile() {
1813        if (mAudioTrackPCMFilePath != null) {
1814            new File(mAudioTrackPCMFilePath).delete();
1815            mAudioTrackPCMFilePath = null;
1816        }
1817    }
1818
1819    @SuppressWarnings("unused")
1820    private void onProgressUpdate(int taskId, int progress) {
1821        if (mProcessingState == PROCESSING_EXPORT) {
1822            if (mExportProgressListener != null) {
1823                if (mProgressToApp < progress) {
1824                    mExportProgressListener.onProgress(mVideoEditor, mOutputFilename, progress);
1825                    /* record previous progress */
1826                    mProgressToApp = progress;
1827                }
1828            }
1829        }
1830        else {
1831            // Adapt progress depending on current state
1832            int actualProgress = 0;
1833            int action = 0;
1834
1835            if (mProcessingState == PROCESSING_AUDIO_PCM) {
1836                action = MediaProcessingProgressListener.ACTION_DECODE;
1837            } else {
1838                action = MediaProcessingProgressListener.ACTION_ENCODE;
1839            }
1840
1841            switch (mProcessingState) {
1842                case PROCESSING_AUDIO_PCM:
1843                    actualProgress = progress;
1844                    break;
1845                case PROCESSING_TRANSITION:
1846                    actualProgress = progress;
1847                    break;
1848                case PROCESSING_KENBURNS:
1849                    actualProgress = progress;
1850                    break;
1851                case PROCESSING_INTERMEDIATE1:
1852                    if ((progress == 0) && (mProgressToApp != 0)) {
1853                        mProgressToApp = 0;
1854                    }
1855                    if ((progress != 0) || (mProgressToApp != 0)) {
1856                        actualProgress = progress/4;
1857                    }
1858                    break;
1859                case PROCESSING_INTERMEDIATE2:
1860                    if ((progress != 0) || (mProgressToApp != 0)) {
1861                        actualProgress = 25 + progress/4;
1862                    }
1863                    break;
1864                case PROCESSING_INTERMEDIATE3:
1865                    if ((progress != 0) || (mProgressToApp != 0)) {
1866                        actualProgress = 50 + progress/2;
1867                    }
1868                    break;
1869                case PROCESSING_NONE:
1870
1871                default:
1872                    Log.e(TAG, "ERROR unexpected State=" + mProcessingState);
1873                    return;
1874            }
1875            if ((mProgressToApp != actualProgress) && (actualProgress != 0)) {
1876
1877                mProgressToApp = actualProgress;
1878
1879                if (mMediaProcessingProgressListener != null) {
1880                    // Send the progress indication
1881                    mMediaProcessingProgressListener.onProgress(mProcessingObject, action,
1882                                                                actualProgress);
1883                }
1884            }
1885            /* avoid 0 in next intermediate call */
1886            if (mProgressToApp == 0) {
1887                if (mMediaProcessingProgressListener != null) {
1888                    /*
1889                     *  Send the progress indication
1890                     */
1891                    mMediaProcessingProgressListener.onProgress(mProcessingObject, action,
1892                                                                actualProgress);
1893                }
1894                mProgressToApp = 1;
1895            }
1896        }
1897    }
1898
1899    @SuppressWarnings("unused")
1900    private void onPreviewProgressUpdate(int progress, boolean isFinished,
1901                  boolean updateOverlay, String filename, int renderingMode) {
1902        if (mPreviewProgressListener != null) {
1903            if (mIsFirstProgress) {
1904                mPreviewProgressListener.onStart(mVideoEditor);
1905                mIsFirstProgress = false;
1906            }
1907
1908            final VideoEditor.OverlayData overlayData;
1909            if (updateOverlay) {
1910                overlayData = new VideoEditor.OverlayData();
1911                if (filename != null) {
1912                    overlayData.set(BitmapFactory.decodeFile(filename), renderingMode);
1913                } else {
1914                    overlayData.setClear();
1915                }
1916            } else {
1917                overlayData = null;
1918            }
1919
1920            mPreviewProgressListener.onProgress(mVideoEditor, progress, overlayData);
1921
1922            if (progress != 0) {
1923                mPreviewProgress = progress;
1924            }
1925
1926            if (isFinished) {
1927                mPreviewProgressListener.onStop(mVideoEditor);
1928            }
1929        }
1930    }
1931
1932    /**
1933     * Release the native helper object
1934     */
1935    void releaseNativeHelper() {
1936        try {
1937            release();
1938        } catch (IllegalStateException ex) {
1939            Log.e(TAG, "Illegal State exeption caught in releaseNativeHelper");
1940            throw ex;
1941        } catch (RuntimeException ex) {
1942            Log.e(TAG, "Runtime exeption caught in releaseNativeHelper");
1943            throw ex;
1944        }
1945    }
1946
1947    /**
1948     * Release the native helper to end the Audio Graph process
1949     */
1950    @SuppressWarnings("unused")
1951    private void onAudioGraphExtractProgressUpdate(int progress, boolean isVideo) {
1952        if ((mExtractAudioWaveformProgressListener != null) && (progress > 0)) {
1953            mExtractAudioWaveformProgressListener.onProgress(progress);
1954        }
1955    }
1956
1957    /**
1958     * Populates the Effect Settings in EffectSettings
1959     *
1960     * @param effects The reference of EffectColor
1961     *
1962     * @return The populated effect settings in EffectSettings reference
1963     */
1964    EffectSettings getEffectSettings(EffectColor effects) {
1965        EffectSettings effectSettings = new EffectSettings();
1966        effectSettings.startTime = (int)effects.getStartTime();
1967        effectSettings.duration = (int)effects.getDuration();
1968        effectSettings.videoEffectType = getEffectColorType(effects);
1969        effectSettings.audioEffectType = 0;
1970        effectSettings.startPercent = 0;
1971        effectSettings.durationPercent = 0;
1972        effectSettings.framingFile = null;
1973        effectSettings.topLeftX = 0;
1974        effectSettings.topLeftY = 0;
1975        effectSettings.framingResize = false;
1976        effectSettings.text = null;
1977        effectSettings.textRenderingData = null;
1978        effectSettings.textBufferWidth = 0;
1979        effectSettings.textBufferHeight = 0;
1980        if (effects.getType() == EffectColor.TYPE_FIFTIES) {
1981            effectSettings.fiftiesFrameRate = 15;
1982        } else {
1983            effectSettings.fiftiesFrameRate = 0;
1984        }
1985
1986        if ((effectSettings.videoEffectType == VideoEffect.COLORRGB16)
1987                || (effectSettings.videoEffectType == VideoEffect.GRADIENT)) {
1988            effectSettings.rgb16InputColor = effects.getColor();
1989        }
1990
1991        effectSettings.alphaBlendingStartPercent = 0;
1992        effectSettings.alphaBlendingMiddlePercent = 0;
1993        effectSettings.alphaBlendingEndPercent = 0;
1994        effectSettings.alphaBlendingFadeInTimePercent = 0;
1995        effectSettings.alphaBlendingFadeOutTimePercent = 0;
1996        return effectSettings;
1997    }
1998
1999    /**
2000     * Populates the Overlay Settings in EffectSettings
2001     *
2002     * @param overlay The reference of OverlayFrame
2003     *
2004     * @return The populated overlay settings in EffectSettings reference
2005     */
2006    EffectSettings getOverlaySettings(OverlayFrame overlay) {
2007        EffectSettings effectSettings = new EffectSettings();
2008        Bitmap bitmap = null;
2009
2010        effectSettings.startTime = (int)overlay.getStartTime();
2011        effectSettings.duration = (int)overlay.getDuration();
2012        effectSettings.videoEffectType = VideoEffect.FRAMING;
2013        effectSettings.audioEffectType = 0;
2014        effectSettings.startPercent = 0;
2015        effectSettings.durationPercent = 0;
2016        effectSettings.framingFile = null;
2017
2018        if ((bitmap = overlay.getBitmap()) != null) {
2019            effectSettings.framingFile = overlay.getFilename();
2020
2021            if (effectSettings.framingFile == null) {
2022                try {
2023                    (overlay).save(mProjectPath);
2024                } catch (IOException e) {
2025                    Log.e(TAG, "getOverlaySettings : File not found");
2026                }
2027                effectSettings.framingFile = overlay.getFilename();
2028            }
2029            if (bitmap.getConfig() == Bitmap.Config.ARGB_8888)
2030                effectSettings.bitmapType = 6;
2031            else if (bitmap.getConfig() == Bitmap.Config.ARGB_4444)
2032                effectSettings.bitmapType = 5;
2033            else if (bitmap.getConfig() == Bitmap.Config.RGB_565)
2034                effectSettings.bitmapType = 4;
2035            else if (bitmap.getConfig() == Bitmap.Config.ALPHA_8)
2036                throw new RuntimeException("Bitmap config not supported");
2037
2038            effectSettings.width = bitmap.getWidth();
2039            effectSettings.height = bitmap.getHeight();
2040            effectSettings.framingBuffer = new int[effectSettings.width];
2041            int tmp = 0;
2042            short maxAlpha = 0;
2043            short minAlpha = (short)0xFF;
2044            short alpha = 0;
2045            while (tmp < effectSettings.height) {
2046                bitmap.getPixels(effectSettings.framingBuffer, 0,
2047                                 effectSettings.width, 0, tmp,
2048                                 effectSettings.width, 1);
2049                for (int i = 0; i < effectSettings.width; i++) {
2050                    alpha = (short)((effectSettings.framingBuffer[i] >> 24) & 0xFF);
2051                    if (alpha > maxAlpha) {
2052                        maxAlpha = alpha;
2053                    }
2054                    if (alpha < minAlpha) {
2055                        minAlpha = alpha;
2056                    }
2057                }
2058                tmp += 1;
2059            }
2060            alpha = (short)((maxAlpha + minAlpha) / 2);
2061            alpha = (short)((alpha * 100) / 256);
2062            effectSettings.alphaBlendingEndPercent = alpha;
2063            effectSettings.alphaBlendingMiddlePercent = alpha;
2064            effectSettings.alphaBlendingStartPercent = alpha;
2065            effectSettings.alphaBlendingFadeInTimePercent = 100;
2066            effectSettings.alphaBlendingFadeOutTimePercent = 100;
2067            effectSettings.framingBuffer = null;
2068
2069            /*
2070             * Set the resized RGB file dimensions
2071             */
2072            effectSettings.width = overlay.getResizedRGBSizeWidth();
2073            if(effectSettings.width == 0) {
2074                effectSettings.width = bitmap.getWidth();
2075            }
2076
2077            effectSettings.height = overlay.getResizedRGBSizeHeight();
2078            if(effectSettings.height == 0) {
2079                effectSettings.height = bitmap.getHeight();
2080            }
2081
2082        }
2083
2084        effectSettings.topLeftX = 0;
2085        effectSettings.topLeftY = 0;
2086
2087        effectSettings.framingResize = true;
2088        effectSettings.text = null;
2089        effectSettings.textRenderingData = null;
2090        effectSettings.textBufferWidth = 0;
2091        effectSettings.textBufferHeight = 0;
2092        effectSettings.fiftiesFrameRate = 0;
2093        effectSettings.rgb16InputColor = 0;
2094        int mediaItemHeight;
2095        int aspectRatio;
2096        if (overlay.getMediaItem() instanceof MediaImageItem) {
2097            if (((MediaImageItem)overlay.getMediaItem()).getGeneratedImageClip() != null) {
2098                // Ken Burns was applied
2099                mediaItemHeight = ((MediaImageItem)overlay.getMediaItem()).getGeneratedClipHeight();
2100                aspectRatio = getAspectRatio(
2101                    ((MediaImageItem)overlay.getMediaItem()).getGeneratedClipWidth()
2102                    , mediaItemHeight);
2103            } else {
2104                //For image get the scaled height. Aspect ratio would remain the same
2105                mediaItemHeight = ((MediaImageItem)overlay.getMediaItem()).getScaledHeight();
2106                aspectRatio = overlay.getMediaItem().getAspectRatio();
2107            }
2108        } else {
2109            aspectRatio = overlay.getMediaItem().getAspectRatio();
2110            mediaItemHeight = overlay.getMediaItem().getHeight();
2111        }
2112        effectSettings.framingScaledSize = findVideoResolution(aspectRatio, mediaItemHeight);
2113        return effectSettings;
2114    }
2115
2116     /* get Video Editor aspect ratio */
2117    int nativeHelperGetAspectRatio() {
2118        return mVideoEditor.getAspectRatio();
2119    }
2120
2121    /**
2122     * Sets the audio regenerate flag
2123     *
2124     * @param flag The boolean to set the audio regenerate flag
2125     *
2126     */
2127    void setAudioflag(boolean flag) {
2128        //check if the file exists.
2129        if (!(new File(String.format(mProjectPath + "/" + AUDIO_TRACK_PCM_FILE)).exists())) {
2130            flag = true;
2131        }
2132        mRegenerateAudio = flag;
2133    }
2134
2135    /**
2136     * Gets the audio regenerate flag
2137     *
2138     * @param return The boolean to get the audio regenerate flag
2139     *
2140     */
2141    boolean getAudioflag() {
2142        return mRegenerateAudio;
2143    }
2144
2145    /**
2146     * Maps the average frame rate to one of the defined enum values
2147     *
2148     * @param averageFrameRate The average frame rate of video item
2149     *
2150     * @return The frame rate from one of the defined enum values
2151     */
2152    int GetClosestVideoFrameRate(int averageFrameRate) {
2153        if (averageFrameRate >= 25) {
2154            return VideoFrameRate.FR_30_FPS;
2155        } else if (averageFrameRate >= 20) {
2156            return VideoFrameRate.FR_25_FPS;
2157        } else if (averageFrameRate >= 15) {
2158            return VideoFrameRate.FR_20_FPS;
2159        } else if (averageFrameRate >= 12) {
2160            return VideoFrameRate.FR_15_FPS;
2161        } else if (averageFrameRate >= 10) {
2162            return VideoFrameRate.FR_12_5_FPS;
2163        } else if (averageFrameRate >= 7) {
2164            return VideoFrameRate.FR_10_FPS;
2165        } else if (averageFrameRate >= 5) {
2166            return VideoFrameRate.FR_7_5_FPS;
2167        } else {
2168            return -1;
2169        }
2170    }
2171
2172    /**
2173     * Helper function to adjust the effect or overlay start time
2174     * depending on the begin and end boundary time of meddia item
2175     */
2176    public void adjustEffectsStartTimeAndDuration(EffectSettings lEffect, int beginCutTime,
2177                                                  int endCutTime) {
2178
2179        int effectStartTime = 0;
2180        int effectDuration = 0;
2181
2182        /**
2183         * cbct -> clip begin cut time
2184         * cect -> clip end cut time
2185         ****************************************
2186         *  |                                 |
2187         *  |         cbct        cect        |
2188         *  | <-1-->   |           |          |
2189         *  |       <--|-2->       |          |
2190         *  |          | <---3---> |          |
2191         *  |          |        <--|-4--->    |
2192         *  |          |           | <--5-->  |
2193         *  |      <---|------6----|---->     |
2194         *  |                                 |
2195         *  < : effectStart
2196         *  > : effectStart + effectDuration
2197         ****************************************
2198         **/
2199
2200        /** 1 & 5 */
2201        /**
2202         * Effect falls out side the trim duration. In such a case effects shall
2203         * not be applied.
2204         */
2205        if ((lEffect.startTime > endCutTime)
2206                || ((lEffect.startTime + lEffect.duration) <= beginCutTime)) {
2207
2208            effectStartTime = 0;
2209            effectDuration = 0;
2210
2211            lEffect.startTime = effectStartTime;
2212            lEffect.duration = effectDuration;
2213            return;
2214        }
2215
2216        /** 2 */
2217        if ((lEffect.startTime < beginCutTime)
2218                && ((lEffect.startTime + lEffect.duration) > beginCutTime)
2219                && ((lEffect.startTime + lEffect.duration) <= endCutTime)) {
2220            effectStartTime = 0;
2221            effectDuration = lEffect.duration;
2222
2223            effectDuration -= (beginCutTime - lEffect.startTime);
2224            lEffect.startTime = effectStartTime;
2225            lEffect.duration = effectDuration;
2226            return;
2227        }
2228
2229        /** 3 */
2230        if ((lEffect.startTime >= beginCutTime)
2231                && ((lEffect.startTime + lEffect.duration) <= endCutTime)) {
2232            effectStartTime = lEffect.startTime - beginCutTime;
2233            lEffect.startTime = effectStartTime;
2234            lEffect.duration = lEffect.duration;
2235            return;
2236        }
2237
2238        /** 4 */
2239        if ((lEffect.startTime >= beginCutTime)
2240                && ((lEffect.startTime + lEffect.duration) > endCutTime)) {
2241            effectStartTime = lEffect.startTime - beginCutTime;
2242            effectDuration = endCutTime - lEffect.startTime;
2243            lEffect.startTime = effectStartTime;
2244            lEffect.duration = effectDuration;
2245            return;
2246        }
2247
2248        /** 6 */
2249        if ((lEffect.startTime < beginCutTime)
2250                && ((lEffect.startTime + lEffect.duration) > endCutTime)) {
2251            effectStartTime = 0;
2252            effectDuration = endCutTime - beginCutTime;
2253            lEffect.startTime = effectStartTime;
2254            lEffect.duration = effectDuration;
2255            return;
2256        }
2257
2258    }
2259
2260    /**
2261     * Generates the clip for preview or export
2262     *
2263     * @param editSettings The EditSettings reference for generating
2264     * a clip for preview or export
2265     *
2266     * @return error value
2267     */
2268    public int generateClip(EditSettings editSettings) {
2269        int err = 0;
2270
2271        try {
2272            err = nativeGenerateClip(editSettings);
2273        } catch (IllegalArgumentException ex) {
2274            Log.e(TAG, "Illegal Argument exception in load settings");
2275            return -1;
2276        } catch (IllegalStateException ex) {
2277            Log.e(TAG, "Illegal state exception in load settings");
2278            return -1;
2279        } catch (RuntimeException ex) {
2280            Log.e(TAG, "Runtime exception in load settings");
2281            return -1;
2282        }
2283        return err;
2284    }
2285
2286    /**
2287     * Init function to initialiZe the  ClipSettings reference to
2288     * default values
2289     *
2290     * @param lclipSettings The ClipSettings reference
2291     */
2292    void initClipSettings(ClipSettings lclipSettings) {
2293        lclipSettings.clipPath = null;
2294        lclipSettings.clipDecodedPath = null;
2295        lclipSettings.clipOriginalPath = null;
2296        lclipSettings.fileType = 0;
2297        lclipSettings.endCutTime = 0;
2298        lclipSettings.beginCutTime = 0;
2299        lclipSettings.beginCutPercent = 0;
2300        lclipSettings.endCutPercent = 0;
2301        lclipSettings.panZoomEnabled = false;
2302        lclipSettings.panZoomPercentStart = 0;
2303        lclipSettings.panZoomTopLeftXStart = 0;
2304        lclipSettings.panZoomTopLeftYStart = 0;
2305        lclipSettings.panZoomPercentEnd = 0;
2306        lclipSettings.panZoomTopLeftXEnd = 0;
2307        lclipSettings.panZoomTopLeftYEnd = 0;
2308        lclipSettings.mediaRendering = 0;
2309    }
2310
2311
2312    /**
2313     * Populates the settings for generating an effect clip
2314     *
2315     * @param lMediaItem The media item for which the effect clip
2316     * needs to be generated
2317     * @param lclipSettings The ClipSettings reference containing
2318     * clips data
2319     * @param e The EditSettings reference containing effect specific data
2320     * @param uniqueId The unique id used in the name of the output clip
2321     * @param clipNo Used for internal purpose
2322     *
2323     * @return The name and path of generated clip
2324     */
2325    String generateEffectClip(MediaItem lMediaItem, ClipSettings lclipSettings,
2326            EditSettings e,String uniqueId,int clipNo) {
2327        int err = 0;
2328        EditSettings editSettings = null;
2329        String EffectClipPath = null;
2330
2331        editSettings = new EditSettings();
2332
2333        editSettings.clipSettingsArray = new ClipSettings[1];
2334        editSettings.clipSettingsArray[0] = lclipSettings;
2335
2336        editSettings.backgroundMusicSettings = null;
2337        editSettings.transitionSettingsArray = null;
2338        editSettings.effectSettingsArray = e.effectSettingsArray;
2339
2340        EffectClipPath = String.format(mProjectPath + "/" + "ClipEffectIntermediate" + "_"
2341                + lMediaItem.getId() + uniqueId + ".3gp");
2342
2343        File tmpFile = new File(EffectClipPath);
2344        if (tmpFile.exists()) {
2345            tmpFile.delete();
2346        }
2347
2348        if (lMediaItem instanceof MediaVideoItem) {
2349            MediaVideoItem m = (MediaVideoItem)lMediaItem;
2350
2351            editSettings.audioFormat = AudioFormat.AAC;
2352            editSettings.audioChannels = 2;
2353            editSettings.audioBitrate = Bitrate.BR_64_KBPS;
2354            editSettings.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000;
2355
2356            editSettings.videoBitrate = Bitrate.BR_5_MBPS;
2357            //editSettings.videoFormat = VideoFormat.MPEG4;
2358            editSettings.videoFormat = VideoFormat.H264;
2359            editSettings.videoFrameRate = VideoFrameRate.FR_30_FPS;
2360            editSettings.videoFrameSize = findVideoResolution(mVideoEditor.getAspectRatio(),
2361                    m.getHeight());
2362        } else {
2363            MediaImageItem m = (MediaImageItem)lMediaItem;
2364            editSettings.audioBitrate = Bitrate.BR_64_KBPS;
2365            editSettings.audioChannels = 2;
2366            editSettings.audioFormat = AudioFormat.AAC;
2367            editSettings.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000;
2368
2369            editSettings.videoBitrate = Bitrate.BR_5_MBPS;
2370            editSettings.videoFormat = VideoFormat.H264;
2371            editSettings.videoFrameRate = VideoFrameRate.FR_30_FPS;
2372            editSettings.videoFrameSize = findVideoResolution(mVideoEditor.getAspectRatio(),
2373                    m.getScaledHeight());
2374        }
2375
2376        editSettings.outputFile = EffectClipPath;
2377
2378        if (clipNo == 1) {
2379            mProcessingState  = PROCESSING_INTERMEDIATE1;
2380        } else if (clipNo == 2) {
2381            mProcessingState  = PROCESSING_INTERMEDIATE2;
2382        }
2383        mProcessingObject = lMediaItem;
2384        err = generateClip(editSettings);
2385        mProcessingState  = PROCESSING_NONE;
2386
2387        if (err == 0) {
2388            lclipSettings.clipPath = EffectClipPath;
2389            lclipSettings.fileType = FileType.THREE_GPP;
2390            return EffectClipPath;
2391        } else {
2392            throw new RuntimeException("preview generation cannot be completed");
2393        }
2394    }
2395
2396
2397    /**
2398     * Populates the settings for generating a Ken Burn effect clip
2399     *
2400     * @param m The media image item for which the Ken Burn effect clip
2401     * needs to be generated
2402     * @param e The EditSettings reference clip specific data
2403     *
2404     * @return The name and path of generated clip
2405     */
2406    String generateKenBurnsClip(EditSettings e, MediaImageItem m) {
2407        String output = null;
2408        int err = 0;
2409
2410        e.backgroundMusicSettings = null;
2411        e.transitionSettingsArray = null;
2412        e.effectSettingsArray = null;
2413        output = String.format(mProjectPath + "/" + "ImageClip-" + m.getId() + ".3gp");
2414
2415        File tmpFile = new File(output);
2416        if (tmpFile.exists()) {
2417            tmpFile.delete();
2418        }
2419
2420        e.outputFile = output;
2421        e.audioBitrate = Bitrate.BR_64_KBPS;
2422        e.audioChannels = 2;
2423        e.audioFormat = AudioFormat.AAC;
2424        e.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000;
2425
2426        e.videoBitrate = Bitrate.BR_5_MBPS;
2427        e.videoFormat = VideoFormat.H264;
2428        e.videoFrameRate = VideoFrameRate.FR_30_FPS;
2429        e.videoFrameSize = findVideoResolution(mVideoEditor.getAspectRatio(),
2430                                                           m.getScaledHeight());
2431        mProcessingState  = PROCESSING_KENBURNS;
2432        mProcessingObject = m;
2433        err = generateClip(e);
2434        // Reset the processing state and check for errors
2435        mProcessingState  = PROCESSING_NONE;
2436        if (err != 0) {
2437            throw new RuntimeException("preview generation cannot be completed");
2438        }
2439        return output;
2440    }
2441
2442
2443    /**
2444     * Calculates the output resolution for transition clip
2445     *
2446     * @param m1 First media item associated with transition
2447     * @param m2 Second media item associated with transition
2448     *
2449     * @return The transition resolution
2450     */
2451    private int getTransitionResolution(MediaItem m1, MediaItem m2) {
2452        int clip1Height = 0;
2453        int clip2Height = 0;
2454        int videoSize = 0;
2455
2456        if (m1 != null && m2 != null) {
2457            if (m1 instanceof MediaVideoItem) {
2458                clip1Height = m1.getHeight();
2459            } else if (m1 instanceof MediaImageItem) {
2460                clip1Height = ((MediaImageItem)m1).getScaledHeight();
2461            }
2462            if (m2 instanceof MediaVideoItem) {
2463                clip2Height = m2.getHeight();
2464            } else if (m2 instanceof MediaImageItem) {
2465                clip2Height = ((MediaImageItem)m2).getScaledHeight();
2466            }
2467            if (clip1Height > clip2Height) {
2468                videoSize = findVideoResolution(mVideoEditor.getAspectRatio(), clip1Height);
2469            } else {
2470                videoSize = findVideoResolution(mVideoEditor.getAspectRatio(), clip2Height);
2471            }
2472        } else if (m1 == null && m2 != null) {
2473            if (m2 instanceof MediaVideoItem) {
2474                clip2Height = m2.getHeight();
2475            } else if (m2 instanceof MediaImageItem) {
2476                clip2Height = ((MediaImageItem)m2).getScaledHeight();
2477            }
2478            videoSize = findVideoResolution(mVideoEditor.getAspectRatio(), clip2Height);
2479        } else if (m1 != null && m2 == null) {
2480            if (m1 instanceof MediaVideoItem) {
2481                clip1Height = m1.getHeight();
2482            } else if (m1 instanceof MediaImageItem) {
2483                clip1Height = ((MediaImageItem)m1).getScaledHeight();
2484            }
2485            videoSize = findVideoResolution(mVideoEditor.getAspectRatio(), clip1Height);
2486        }
2487        return videoSize;
2488    }
2489
2490    /**
2491     * Populates the settings for generating an transition clip
2492     *
2493     * @param m1 First media item associated with transition
2494     * @param m2 Second media item associated with transition
2495     * @param e The EditSettings reference containing
2496     * clip specific data
2497     * @param uniqueId The unique id used in the name of the output clip
2498     * @param t The Transition specific data
2499     *
2500     * @return The name and path of generated clip
2501     */
2502    String generateTransitionClip(EditSettings e, String uniqueId,
2503            MediaItem m1, MediaItem m2,Transition t) {
2504        String outputFilename = null;
2505        int err = 0;
2506
2507        outputFilename = String.format(mProjectPath + "/" + uniqueId + ".3gp");
2508        e.outputFile = outputFilename;
2509        e.audioBitrate = Bitrate.BR_64_KBPS;
2510        e.audioChannels = 2;
2511        e.audioFormat = AudioFormat.AAC;
2512        e.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000;
2513
2514        e.videoBitrate = Bitrate.BR_5_MBPS;
2515        e.videoFormat = VideoFormat.H264;
2516        e.videoFrameRate = VideoFrameRate.FR_30_FPS;
2517        e.videoFrameSize = getTransitionResolution(m1, m2);
2518
2519        if (new File(outputFilename).exists()) {
2520            new File(outputFilename).delete();
2521        }
2522        mProcessingState  = PROCESSING_INTERMEDIATE3;
2523        mProcessingObject = t;
2524        err = generateClip(e);
2525        // Reset the processing state and check for errors
2526        mProcessingState  = PROCESSING_NONE;
2527        if (err != 0) {
2528            throw new RuntimeException("preview generation cannot be completed");
2529        }
2530        return outputFilename;
2531    }
2532
2533    /**
2534     * Populates effects and overlays in EffectSettings structure
2535     * and also adjust the start time and duration of effects and overlays
2536     * w.r.t to total story board time
2537     *
2538     * @param m1 Media item associated with effect
2539     * @param effectSettings The EffectSettings reference containing
2540     *      effect specific data
2541     * @param beginCutTime The begin cut time of the clip associated with effect
2542     * @param endCutTime The end cut time of the clip associated with effect
2543     * @param storyBoardTime The current story board time
2544     *
2545     * @return The updated index
2546     */
2547    private int populateEffects(MediaItem m, EffectSettings[] effectSettings, int i,
2548            int beginCutTime, int endCutTime, int storyBoardTime) {
2549
2550        if (m.getBeginTransition() != null && m.getBeginTransition().getDuration() > 0
2551                && m.getEndTransition() != null && m.getEndTransition().getDuration() > 0) {
2552            beginCutTime += m.getBeginTransition().getDuration();
2553            endCutTime -= m.getEndTransition().getDuration();
2554        } else if (m.getBeginTransition() == null && m.getEndTransition() != null
2555                && m.getEndTransition().getDuration() > 0) {
2556            endCutTime -= m.getEndTransition().getDuration();
2557        } else if (m.getEndTransition() == null && m.getBeginTransition() != null
2558                && m.getBeginTransition().getDuration() > 0) {
2559            beginCutTime += m.getBeginTransition().getDuration();
2560        }
2561
2562        final List<Effect> effects = m.getAllEffects();
2563        final List<Overlay> overlays = m.getAllOverlays();
2564        for (Effect effect : effects) {
2565            if (effect instanceof EffectColor) {
2566                effectSettings[i] = getEffectSettings((EffectColor)effect);
2567                adjustEffectsStartTimeAndDuration(effectSettings[i], beginCutTime, endCutTime);
2568                effectSettings[i].startTime += storyBoardTime;
2569                i++;
2570            }
2571        }
2572
2573        for (Overlay overlay : overlays) {
2574            effectSettings[i] = getOverlaySettings((OverlayFrame)overlay);
2575            adjustEffectsStartTimeAndDuration(effectSettings[i], beginCutTime, endCutTime);
2576            effectSettings[i].startTime += storyBoardTime;
2577            i++;
2578        }
2579        return i;
2580    }
2581
2582    /**
2583     * Adjusts the media item boundaries for use in export or preview
2584     *
2585     * @param clipSettings The ClipSettings reference
2586     * @param clipProperties The Properties reference
2587     * @param m The media item
2588     */
2589    private void adjustMediaItemBoundary(ClipSettings clipSettings,
2590                                         Properties clipProperties, MediaItem m) {
2591        if (m.getBeginTransition() != null && m.getBeginTransition().getDuration() > 0
2592                && m.getEndTransition() != null && m.getEndTransition().getDuration() > 0) {
2593            clipSettings.beginCutTime += m.getBeginTransition().getDuration();
2594            clipSettings.endCutTime -= m.getEndTransition().getDuration();
2595        } else if (m.getBeginTransition() == null && m.getEndTransition() != null
2596                && m.getEndTransition().getDuration() > 0) {
2597            clipSettings.endCutTime -= m.getEndTransition().getDuration();
2598        } else if (m.getEndTransition() == null && m.getBeginTransition() != null
2599                && m.getBeginTransition().getDuration() > 0) {
2600            clipSettings.beginCutTime += m.getBeginTransition().getDuration();
2601        }
2602
2603        clipProperties.duration = clipSettings.endCutTime - clipSettings.beginCutTime;
2604
2605        if (clipProperties.videoDuration != 0) {
2606            clipProperties.videoDuration = clipSettings.endCutTime - clipSettings.beginCutTime;
2607        }
2608
2609        if (clipProperties.audioDuration != 0) {
2610            clipProperties.audioDuration = clipSettings.endCutTime - clipSettings.beginCutTime;
2611        }
2612    }
2613
2614    /**
2615     * Generates the transition if transition is present
2616     * and is in invalidated state
2617     *
2618     * @param transition The Transition reference
2619     * @param editSettings The EditSettings reference
2620     * @param clipPropertiesArray The clip Properties array
2621     * @param i The index in clip Properties array for current clip
2622     */
2623    private void generateTransition(Transition transition, EditSettings editSettings,
2624            PreviewClipProperties clipPropertiesArray, int index) {
2625        if (!(transition.isGenerated())) {
2626            transition.generate();
2627        }
2628        editSettings.clipSettingsArray[index] = new ClipSettings();
2629        editSettings.clipSettingsArray[index].clipPath = transition.getFilename();
2630        editSettings.clipSettingsArray[index].fileType = FileType.THREE_GPP;
2631        editSettings.clipSettingsArray[index].beginCutTime = 0;
2632        editSettings.clipSettingsArray[index].endCutTime = (int)transition.getDuration();
2633        editSettings.clipSettingsArray[index].mediaRendering = MediaRendering.BLACK_BORDERS;
2634
2635        try {
2636            clipPropertiesArray.clipProperties[index] =
2637                getMediaProperties(transition.getFilename());
2638        } catch (Exception e) {
2639            throw new IllegalArgumentException("Unsupported file or file not found");
2640        }
2641
2642        clipPropertiesArray.clipProperties[index].Id = null;
2643        clipPropertiesArray.clipProperties[index].audioVolumeValue = 100;
2644        clipPropertiesArray.clipProperties[index].duration = (int)transition.getDuration();
2645        if (clipPropertiesArray.clipProperties[index].videoDuration != 0) {
2646            clipPropertiesArray.clipProperties[index].videoDuration = (int)transition.getDuration();
2647        }
2648
2649        if (clipPropertiesArray.clipProperties[index].audioDuration != 0) {
2650            clipPropertiesArray.clipProperties[index].audioDuration = (int)transition.getDuration();
2651        }
2652    }
2653
2654    /**
2655     * Sets the volume for current media item in clip properties array
2656     *
2657     * @param m The media item
2658     * @param clipProperties The clip properties array reference
2659     * @param i The index in clip Properties array for current clip
2660     */
2661    private void adjustVolume(MediaItem m, PreviewClipProperties clipProperties,
2662                              int index) {
2663        if (m instanceof MediaVideoItem) {
2664            final boolean videoMuted = ((MediaVideoItem)m).isMuted();
2665            if (videoMuted == false) {
2666                mClipProperties.clipProperties[index].audioVolumeValue =
2667                    ((MediaVideoItem)m).getVolume();
2668            } else {
2669                mClipProperties.clipProperties[index].audioVolumeValue = 0;
2670            }
2671        } else if (m instanceof MediaImageItem) {
2672            mClipProperties.clipProperties[index].audioVolumeValue = 0;
2673        }
2674    }
2675
2676    /**
2677     * Checks for odd size image width and height
2678     *
2679     * @param m The media item
2680     * @param clipProperties The clip properties array reference
2681     * @param i The index in clip Properties array for current clip
2682     */
2683    private void checkOddSizeImage(MediaItem m, PreviewClipProperties clipProperties, int index) {
2684        if (m instanceof MediaImageItem) {
2685            int width = mClipProperties.clipProperties[index].width;
2686            int height = mClipProperties.clipProperties[index].height;
2687
2688            if ((width % 2) != 0) {
2689                width -= 1;
2690            }
2691            if ((height % 2) != 0) {
2692                height -= 1;
2693            }
2694            mClipProperties.clipProperties[index].width = width;
2695            mClipProperties.clipProperties[index].height = height;
2696        }
2697    }
2698
2699    /**
2700     * Populates the media item properties and calculates the maximum
2701     * height among all the clips
2702     *
2703     * @param m The media item
2704     * @param i The index in clip Properties array for current clip
2705     * @param maxHeight The max height from the clip properties
2706     *
2707     * @return Updates the max height if current clip's height is greater
2708     * than all previous clips height
2709     */
2710    private int populateMediaItemProperties(MediaItem m, int index, int maxHeight) {
2711        mPreviewEditSettings.clipSettingsArray[index] = new ClipSettings();
2712        if (m instanceof MediaVideoItem) {
2713            mPreviewEditSettings.clipSettingsArray[index] =
2714                ((MediaVideoItem)m).getVideoClipProperties();
2715            if (((MediaVideoItem)m).getHeight() > maxHeight) {
2716                maxHeight = ((MediaVideoItem)m).getHeight();
2717            }
2718        } else if (m instanceof MediaImageItem) {
2719            mPreviewEditSettings.clipSettingsArray[index] =
2720                ((MediaImageItem)m).getImageClipProperties();
2721            if (((MediaImageItem)m).getScaledHeight() > maxHeight) {
2722                maxHeight = ((MediaImageItem)m).getScaledHeight();
2723            }
2724        }
2725        /** + Handle the image files here */
2726        if (mPreviewEditSettings.clipSettingsArray[index].fileType == FileType.JPG) {
2727            mPreviewEditSettings.clipSettingsArray[index].clipDecodedPath =
2728                ((MediaImageItem)m).getDecodedImageFileName();
2729
2730            mPreviewEditSettings.clipSettingsArray[index].clipOriginalPath =
2731                         mPreviewEditSettings.clipSettingsArray[index].clipPath;
2732        }
2733        return maxHeight;
2734    }
2735
2736    /**
2737     * Populates the background music track properties
2738     *
2739     * @param mediaBGMList The background music list
2740     *
2741     */
2742    private void populateBackgroundMusicProperties(List<AudioTrack> mediaBGMList) {
2743
2744        if (mediaBGMList.size() == 1) {
2745            mAudioTrack = mediaBGMList.get(0);
2746        } else {
2747            mAudioTrack = null;
2748        }
2749
2750        if (mAudioTrack != null) {
2751            mAudioSettings = new AudioSettings();
2752            Properties mAudioProperties = new Properties();
2753            mAudioSettings.pFile = null;
2754            mAudioSettings.Id = mAudioTrack.getId();
2755            try {
2756                mAudioProperties = getMediaProperties(mAudioTrack.getFilename());
2757            } catch (Exception e) {
2758               throw new IllegalArgumentException("Unsupported file or file not found");
2759            }
2760            mAudioSettings.bRemoveOriginal = false;
2761            mAudioSettings.channels = mAudioProperties.audioChannels;
2762            mAudioSettings.Fs = mAudioProperties.audioSamplingFrequency;
2763            mAudioSettings.loop = mAudioTrack.isLooping();
2764            mAudioSettings.ExtendedFs = 0;
2765            mAudioSettings.pFile = mAudioTrack.getFilename();
2766            mAudioSettings.startMs = mAudioTrack.getStartTime();
2767            mAudioSettings.beginCutTime = mAudioTrack.getBoundaryBeginTime();
2768            mAudioSettings.endCutTime = mAudioTrack.getBoundaryEndTime();
2769            if (mAudioTrack.isMuted()) {
2770                mAudioSettings.volume = 0;
2771            } else {
2772                mAudioSettings.volume = mAudioTrack.getVolume();
2773            }
2774            mAudioSettings.fileType = mAudioProperties.fileType;
2775            mAudioSettings.ducking_lowVolume = mAudioTrack.getDuckedTrackVolume();
2776            mAudioSettings.ducking_threshold = mAudioTrack.getDuckingThreshhold();
2777            mAudioSettings.bInDucking_enable = mAudioTrack.isDuckingEnabled();
2778            mAudioTrackPCMFilePath = String.format(mProjectPath + "/" + AUDIO_TRACK_PCM_FILE);
2779            mAudioSettings.pcmFilePath = mAudioTrackPCMFilePath;
2780
2781            mPreviewEditSettings.backgroundMusicSettings = new BackgroundMusicSettings();
2782            mPreviewEditSettings.backgroundMusicSettings.file = mAudioTrackPCMFilePath;
2783            mPreviewEditSettings.backgroundMusicSettings.fileType = mAudioProperties.fileType;
2784            mPreviewEditSettings.backgroundMusicSettings.insertionTime =
2785                mAudioTrack.getStartTime();
2786            mPreviewEditSettings.backgroundMusicSettings.volumePercent = mAudioTrack.getVolume();
2787            mPreviewEditSettings.backgroundMusicSettings.beginLoop =
2788                mAudioTrack.getBoundaryBeginTime();
2789            mPreviewEditSettings.backgroundMusicSettings.endLoop =
2790                                               mAudioTrack.getBoundaryEndTime();
2791            mPreviewEditSettings.backgroundMusicSettings.enableDucking =
2792                mAudioTrack.isDuckingEnabled();
2793            mPreviewEditSettings.backgroundMusicSettings.duckingThreshold =
2794                mAudioTrack.getDuckingThreshhold();
2795            mPreviewEditSettings.backgroundMusicSettings.lowVolume =
2796                mAudioTrack.getDuckedTrackVolume();
2797            mPreviewEditSettings.backgroundMusicSettings.isLooping = mAudioTrack.isLooping();
2798            mPreviewEditSettings.primaryTrackVolume = 100;
2799            mProcessingState  = PROCESSING_AUDIO_PCM;
2800            mProcessingObject = mAudioTrack;
2801        } else {
2802            mAudioSettings = null;
2803            mPreviewEditSettings.backgroundMusicSettings = null;
2804            mAudioTrackPCMFilePath = null;
2805        }
2806    }
2807
2808    /**
2809     * Calculates all the effects in all the media items
2810     * in media items list
2811     *
2812     * @param mediaItemsList The media item list
2813     *
2814     * @return The total number of effects
2815     *
2816     */
2817    private int getTotalEffects(List<MediaItem> mediaItemsList) {
2818        int totalEffects = 0;
2819        final Iterator<MediaItem> it = mediaItemsList.iterator();
2820        while (it.hasNext()) {
2821            final MediaItem t = it.next();
2822            totalEffects += t.getAllEffects().size();
2823            totalEffects += t.getAllOverlays().size();
2824            final Iterator<Effect> ef = t.getAllEffects().iterator();
2825            while (ef.hasNext()) {
2826                final Effect e = ef.next();
2827                if (e instanceof EffectKenBurns) {
2828                    totalEffects--;
2829                }
2830            }
2831        }
2832        return totalEffects;
2833    }
2834
2835    /**
2836     * This function is responsible for forming clip settings
2837     * array and clip properties array including transition clips
2838     * and effect settings for preview purpose or export.
2839     *
2840     *
2841     * @param mediaItemsList The media item list
2842     * @param mediaTransitionList The transitions list
2843     * @param mediaBGMList The background music list
2844     * @param listener The MediaProcessingProgressListener
2845     *
2846     */
2847    void previewStoryBoard(List<MediaItem> mediaItemsList,
2848            List<Transition> mediaTransitionList, List<AudioTrack> mediaBGMList,
2849            MediaProcessingProgressListener listener) {
2850        if (mInvalidatePreviewArray) {
2851            int previewIndex = 0;
2852            int totalEffects = 0;
2853            int storyBoardTime = 0;
2854            int maxHeight = 0;
2855            int beginCutTime = 0;
2856            int endCutTime = 0;
2857            int effectIndex = 0;
2858            Transition lTransition = null;
2859            MediaItem lMediaItem = null;
2860            mPreviewEditSettings = new EditSettings();
2861            mClipProperties = new PreviewClipProperties();
2862            mTotalClips = 0;
2863
2864            mTotalClips = mediaItemsList.size();
2865            for (Transition transition : mediaTransitionList) {
2866                if (transition.getDuration() > 0) {
2867                    mTotalClips++;
2868                }
2869            }
2870
2871            totalEffects = getTotalEffects(mediaItemsList);
2872
2873            mPreviewEditSettings.clipSettingsArray = new ClipSettings[mTotalClips];
2874            mPreviewEditSettings.effectSettingsArray = new EffectSettings[totalEffects];
2875            mClipProperties.clipProperties = new Properties[mTotalClips];
2876
2877            /** record the call back progress listener */
2878            mMediaProcessingProgressListener = listener;
2879            mProgressToApp = 0;
2880
2881            if (mediaItemsList.size() > 0) {
2882                for (int i = 0; i < mediaItemsList.size(); i++) {
2883                    /* Get the Media Item from the list */
2884                    lMediaItem = mediaItemsList.get(i);
2885                    if (lMediaItem instanceof MediaVideoItem) {
2886                        beginCutTime = (int)((MediaVideoItem)lMediaItem).getBoundaryBeginTime();
2887                        endCutTime = (int)((MediaVideoItem)lMediaItem).getBoundaryEndTime();
2888                    } else if (lMediaItem instanceof MediaImageItem) {
2889                        beginCutTime = 0;
2890                        endCutTime = (int)((MediaImageItem)lMediaItem).getTimelineDuration();
2891                    }
2892                    /* Get the transition associated with Media Item */
2893                    lTransition = lMediaItem.getBeginTransition();
2894                    if (lTransition != null && (lTransition.getDuration() > 0)) {
2895                        /* generate transition clip */
2896                        generateTransition(lTransition, mPreviewEditSettings,
2897                                           mClipProperties, previewIndex);
2898                        storyBoardTime += mClipProperties.clipProperties[previewIndex].duration;
2899                        previewIndex++;
2900                    }
2901                    /* Populate media item properties */
2902                    maxHeight = populateMediaItemProperties(lMediaItem, previewIndex, maxHeight);
2903                    /* Get the clip properties of the media item. */
2904                    if (lMediaItem instanceof MediaImageItem) {
2905                        int tmpCnt = 0;
2906                        boolean bEffectKbPresent = false;
2907                        final List<Effect> effectList = lMediaItem.getAllEffects();
2908                        /**
2909                         * Check if Ken Burns effect is present
2910                         */
2911                        while (tmpCnt < effectList.size()) {
2912                            if (effectList.get(tmpCnt) instanceof EffectKenBurns) {
2913                                bEffectKbPresent = true;
2914                                break;
2915                            }
2916                            tmpCnt++;
2917                        }
2918
2919                        if (bEffectKbPresent) {
2920                            try {
2921                                  if(((MediaImageItem)lMediaItem).getGeneratedImageClip() != null) {
2922                                     mClipProperties.clipProperties[previewIndex]
2923                                        = getMediaProperties(((MediaImageItem)lMediaItem).
2924                                                             getGeneratedImageClip());
2925                                  }
2926                                  else {
2927                                   mClipProperties.clipProperties[previewIndex]
2928                                      = getMediaProperties(((MediaImageItem)lMediaItem).
2929                                                             getScaledImageFileName());
2930                                   mClipProperties.clipProperties[previewIndex].width =
2931                                             ((MediaImageItem)lMediaItem).getScaledWidth();
2932                                   mClipProperties.clipProperties[previewIndex].height =
2933                                             ((MediaImageItem)lMediaItem).getScaledHeight();
2934                                  }
2935                                } catch (Exception e) {
2936                                   throw new IllegalArgumentException("Unsupported file or file not found");
2937                                }
2938                         } else {
2939                              try {
2940                                  mClipProperties.clipProperties[previewIndex]
2941                                      = getMediaProperties(((MediaImageItem)lMediaItem).
2942                                                               getScaledImageFileName());
2943                              } catch (Exception e) {
2944                                throw new IllegalArgumentException("Unsupported file or file not found");
2945                              }
2946                            mClipProperties.clipProperties[previewIndex].width =
2947                                        ((MediaImageItem)lMediaItem).getScaledWidth();
2948                            mClipProperties.clipProperties[previewIndex].height =
2949                                        ((MediaImageItem)lMediaItem).getScaledHeight();
2950                        }
2951                    } else {
2952                        try {
2953                            mClipProperties.clipProperties[previewIndex]
2954                                 = getMediaProperties(lMediaItem.getFilename());
2955                            } catch (Exception e) {
2956                              throw new IllegalArgumentException("Unsupported file or file not found");
2957                          }
2958                    }
2959                    mClipProperties.clipProperties[previewIndex].Id = lMediaItem.getId();
2960                    checkOddSizeImage(lMediaItem, mClipProperties, previewIndex);
2961                    adjustVolume(lMediaItem, mClipProperties, previewIndex);
2962
2963                    /*
2964                     * Adjust media item start time and end time w.r.t to begin
2965                     * and end transitions associated with media item
2966                     */
2967
2968                    adjustMediaItemBoundary(mPreviewEditSettings.clipSettingsArray[previewIndex],
2969                            mClipProperties.clipProperties[previewIndex], lMediaItem);
2970
2971                    /*
2972                     * Get all the effects and overlays for that media item and
2973                     * adjust start time and duration of effects
2974                     */
2975
2976                    effectIndex = populateEffects(lMediaItem,
2977                            mPreviewEditSettings.effectSettingsArray, effectIndex, beginCutTime,
2978                            endCutTime, storyBoardTime);
2979                    storyBoardTime += mClipProperties.clipProperties[previewIndex].duration;
2980                    previewIndex++;
2981
2982                    /* Check if there is any end transition at last media item */
2983
2984                    if (i == (mediaItemsList.size() - 1)) {
2985                        lTransition = lMediaItem.getEndTransition();
2986                        if (lTransition != null && (lTransition.getDuration() > 0)) {
2987                            generateTransition(lTransition, mPreviewEditSettings, mClipProperties,
2988                                    previewIndex);
2989                            break;
2990                        }
2991                    }
2992                }
2993            }
2994            if (!mErrorFlagSet) {
2995                mPreviewEditSettings.videoFrameSize = findVideoResolution(mVideoEditor
2996                        .getAspectRatio(), maxHeight);
2997                populateBackgroundMusicProperties(mediaBGMList);
2998
2999                /** call to native populate settings */
3000                try {
3001                    nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings);
3002                } catch (IllegalArgumentException ex) {
3003                    Log.e(TAG, "Illegal argument exception in nativePopulateSettings");
3004                    throw ex;
3005                } catch (IllegalStateException ex) {
3006                    Log.e(TAG, "Illegal state exception in nativePopulateSettings");
3007                    throw ex;
3008                } catch (RuntimeException ex) {
3009                    Log.e(TAG, "Runtime exception in nativePopulateSettings");
3010                    throw ex;
3011                }
3012                mInvalidatePreviewArray = false;
3013                mProcessingState  = PROCESSING_NONE;
3014            }
3015            if (mErrorFlagSet) {
3016                mErrorFlagSet = false;
3017                throw new RuntimeException("preview generation cannot be completed");
3018            }
3019        }
3020    } /* END of previewStoryBoard */
3021
3022    /**
3023     * This function is responsible for starting the preview
3024     *
3025     *
3026     * @param surface The surface on which preview has to be displayed
3027     * @param fromMs The time in ms from which preview has to be started
3028     * @param toMs The time in ms till preview has to be played
3029     * @param loop To loop the preview or not
3030     * @param callbackAfterFrameCount INdicated after how many frames
3031     * the callback is needed
3032     * @param listener The PreviewProgressListener
3033     */
3034    void doPreview(Surface surface, long fromMs, long toMs, boolean loop,
3035            int callbackAfterFrameCount, PreviewProgressListener listener) {
3036        mPreviewProgress = fromMs;
3037        mIsFirstProgress = true;
3038        mPreviewProgressListener = listener;
3039
3040        if (!mInvalidatePreviewArray) {
3041            try {
3042                /** Modify the image files names to rgb image files. */
3043                for (int clipCnt = 0; clipCnt < mPreviewEditSettings.clipSettingsArray.length;
3044                    clipCnt++) {
3045                    if (mPreviewEditSettings.clipSettingsArray[clipCnt].fileType == FileType.JPG) {
3046                        mPreviewEditSettings.clipSettingsArray[clipCnt].clipPath =
3047                            mPreviewEditSettings.clipSettingsArray[clipCnt].clipDecodedPath;
3048                    }
3049                }
3050                nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings);
3051                nativeStartPreview(surface, fromMs, toMs, callbackAfterFrameCount, loop);
3052            } catch (IllegalArgumentException ex) {
3053                Log.e(TAG, "Illegal argument exception in nativeStartPreview");
3054                throw ex;
3055            } catch (IllegalStateException ex) {
3056                Log.e(TAG, "Illegal state exception in nativeStartPreview");
3057                throw ex;
3058            } catch (RuntimeException ex) {
3059                Log.e(TAG, "Runtime exception in nativeStartPreview");
3060                throw ex;
3061            }
3062        } else {
3063            throw new IllegalStateException("generatePreview is in progress");
3064        }
3065    }
3066
3067    /**
3068     * This function is responsible for stopping the preview
3069     */
3070    long stopPreview() {
3071        nativeStopPreview();
3072        return mPreviewProgress;
3073    }
3074
3075    /**
3076     * This function is responsible for rendering a single frame
3077     * from the complete story board on the surface
3078     *
3079     * @param surface The surface on which frame has to be rendered
3080     * @param time The time in ms at which the frame has to be rendered
3081     * @param surfaceWidth The surface width
3082     * @param surfaceHeight The surface height
3083     * @param overlayData The overlay data
3084     *
3085     * @return The actual time from the story board at which the  frame was extracted
3086     * and rendered
3087     */
3088    long renderPreviewFrame(Surface surface, long time, int surfaceWidth,
3089            int surfaceHeight, VideoEditor.OverlayData overlayData) {
3090        if (mInvalidatePreviewArray) {
3091            if (Log.isLoggable(TAG, Log.DEBUG)) {
3092                Log.d(TAG, "Call generate preview first");
3093            }
3094            throw new IllegalStateException("Call generate preview first");
3095        }
3096
3097        long timeMs = 0;
3098        try {
3099            for (int clipCnt = 0; clipCnt < mPreviewEditSettings.clipSettingsArray.length;
3100                  clipCnt++) {
3101                if (mPreviewEditSettings.clipSettingsArray[clipCnt].fileType == FileType.JPG) {
3102                    mPreviewEditSettings.clipSettingsArray[clipCnt].clipPath =
3103                        mPreviewEditSettings.clipSettingsArray[clipCnt].clipDecodedPath;
3104                }
3105            }
3106
3107            // Reset the render preview frame params that shall be set by native.
3108            mRenderPreviewOverlayFile = null;
3109            mRenderPreviewRenderingMode = MediaRendering.RESIZING;
3110
3111            nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings);
3112
3113            timeMs = (long)nativeRenderPreviewFrame(surface, time, surfaceWidth, surfaceHeight);
3114
3115            if (mRenderPreviewOverlayFile != null) {
3116                overlayData.set(BitmapFactory.decodeFile(mRenderPreviewOverlayFile),
3117                        mRenderPreviewRenderingMode);
3118            } else {
3119                overlayData.setClear();
3120            }
3121        } catch (IllegalArgumentException ex) {
3122            Log.e(TAG, "Illegal Argument exception in nativeRenderPreviewFrame");
3123            throw ex;
3124        } catch (IllegalStateException ex) {
3125            Log.e(TAG, "Illegal state exception in nativeRenderPreviewFrame");
3126            throw ex;
3127        } catch (RuntimeException ex) {
3128            Log.e(TAG, "Runtime exception in nativeRenderPreviewFrame");
3129            throw ex;
3130        }
3131
3132        return timeMs;
3133    }
3134
3135    private void previewFrameEditInfo(String filename, int renderingMode) {
3136        mRenderPreviewOverlayFile = filename;
3137        mRenderPreviewRenderingMode = renderingMode;
3138    }
3139
3140
3141    /**
3142     * This function is responsible for rendering a single frame
3143     * from a single media item on the surface
3144     *
3145     * @param surface The surface on which frame has to be rendered
3146     * @param filepath The file path for which the frame needs to be displayed
3147     * @param time The time in ms at which the frame has to be rendered
3148     * @param framewidth The frame width
3149     * @param framewidth The frame height
3150     *
3151     * @return The actual time from media item at which the  frame was extracted
3152     * and rendered
3153     */
3154    long renderMediaItemPreviewFrame(Surface surface, String filepath,
3155                                            long time, int framewidth, int frameheight) {
3156        long timeMs = 0;
3157        try {
3158            timeMs = (long)nativeRenderMediaItemPreviewFrame(surface, filepath, framewidth,
3159                    frameheight, 0, 0, time);
3160        } catch (IllegalArgumentException ex) {
3161            Log.e(TAG, "Illegal Argument exception in renderMediaItemPreviewFrame");
3162            throw ex;
3163        } catch (IllegalStateException ex) {
3164            Log.e(TAG, "Illegal state exception in renderMediaItemPreviewFrame");
3165            throw ex;
3166        } catch (RuntimeException ex) {
3167            Log.e(TAG, "Runtime exception in renderMediaItemPreviewFrame");
3168            throw ex;
3169        }
3170
3171        return timeMs;
3172    }
3173
3174    /**
3175     * This function sets the flag to invalidate the preview array
3176     * and for generating the preview again
3177     */
3178    void setGeneratePreview(boolean isRequired) {
3179        boolean semAcquiredDone = false;
3180        try {
3181            lock();
3182            semAcquiredDone = true;
3183            mInvalidatePreviewArray = isRequired;
3184        } catch (InterruptedException ex) {
3185            Log.e(TAG, "Runtime exception in renderMediaItemPreviewFrame");
3186        } finally {
3187            if (semAcquiredDone) {
3188                unlock();
3189            }
3190        }
3191    }
3192
3193    /**
3194     * @return Returns the current status of preview invalidation
3195     * flag
3196     */
3197    boolean getGeneratePreview() {
3198        return mInvalidatePreviewArray;
3199    }
3200
3201    /**
3202     * Calculates the aspect ratio from widht and height
3203     *
3204     * @param w The width of media item
3205     * @param h The height of media item
3206     *
3207     * @return The calculated aspect ratio
3208     */
3209    int getAspectRatio(int w, int h) {
3210        double apRatio = (double)(w) / (double)(h);
3211        BigDecimal bd = new BigDecimal(apRatio);
3212        bd = bd.setScale(3, BigDecimal.ROUND_HALF_UP);
3213        apRatio = bd.doubleValue();
3214        int var = MediaProperties.ASPECT_RATIO_16_9;
3215        if (apRatio >= 1.7) {
3216            var = MediaProperties.ASPECT_RATIO_16_9;
3217        } else if (apRatio >= 1.6) {
3218            var = MediaProperties.ASPECT_RATIO_5_3;
3219        } else if (apRatio >= 1.5) {
3220            var = MediaProperties.ASPECT_RATIO_3_2;
3221        } else if (apRatio > 1.3) {
3222            var = MediaProperties.ASPECT_RATIO_4_3;
3223        } else if (apRatio >= 1.2) {
3224            var = MediaProperties.ASPECT_RATIO_11_9;
3225        }
3226        return var;
3227    }
3228
3229    /**
3230     * Maps the file type used in native layer
3231     * to file type used in JAVA layer
3232     *
3233     * @param fileType The file type in native layer
3234     *
3235     * @return The File type in JAVA layer
3236     */
3237    int getFileType(int fileType) {
3238        int retValue = -1;
3239        switch (fileType) {
3240            case FileType.UNSUPPORTED:
3241                retValue = MediaProperties.FILE_UNSUPPORTED;
3242                break;
3243            case FileType.THREE_GPP:
3244                retValue = MediaProperties.FILE_3GP;
3245                break;
3246            case FileType.MP4:
3247                retValue = MediaProperties.FILE_MP4;
3248                break;
3249            case FileType.JPG:
3250                retValue = MediaProperties.FILE_JPEG;
3251                break;
3252            case FileType.PNG:
3253                retValue = MediaProperties.FILE_PNG;
3254                break;
3255            case FileType.MP3:
3256                retValue = MediaProperties.FILE_MP3;
3257                break;
3258            case FileType.M4V:
3259                retValue = MediaProperties.FILE_M4V;
3260                break;
3261
3262            default:
3263                retValue = -1;
3264        }
3265        return retValue;
3266    }
3267
3268    /**
3269     * Maps the video codec type used in native layer
3270     * to video codec type used in JAVA layer
3271     *
3272     * @param codecType The video codec type in native layer
3273     *
3274     * @return The video codec type in JAVA layer
3275     */
3276    int getVideoCodecType(int codecType) {
3277        int retValue = -1;
3278        switch (codecType) {
3279            case VideoFormat.H263:
3280                retValue = MediaProperties.VCODEC_H263;
3281                break;
3282            case VideoFormat.H264:
3283                retValue = MediaProperties.VCODEC_H264BP;
3284                break;
3285            case VideoFormat.MPEG4:
3286                retValue = MediaProperties.VCODEC_MPEG4;
3287                break;
3288            case VideoFormat.UNSUPPORTED:
3289
3290            default:
3291                retValue = -1;
3292        }
3293        return retValue;
3294    }
3295
3296    /**
3297     * Maps the audio codec type used in native layer
3298     * to audio codec type used in JAVA layer
3299     *
3300     * @param audioType The audio codec type in native layer
3301     *
3302     * @return The audio codec type in JAVA layer
3303     */
3304    int getAudioCodecType(int codecType) {
3305        int retValue = -1;
3306        switch (codecType) {
3307            case AudioFormat.AMR_NB:
3308                retValue = MediaProperties.ACODEC_AMRNB;
3309                break;
3310            case AudioFormat.AAC:
3311                retValue = MediaProperties.ACODEC_AAC_LC;
3312                break;
3313            case AudioFormat.MP3:
3314                retValue = MediaProperties.ACODEC_MP3;
3315                break;
3316
3317            default:
3318                retValue = -1;
3319        }
3320        return retValue;
3321    }
3322
3323    /**
3324     * Returns the frame rate as integer
3325     *
3326     * @param fps The fps as enum
3327     *
3328     * @return The frame rate as integer
3329     */
3330    int getFrameRate(int fps) {
3331        int retValue = -1;
3332        switch (fps) {
3333            case VideoFrameRate.FR_5_FPS:
3334                retValue = 5;
3335                break;
3336            case VideoFrameRate.FR_7_5_FPS:
3337                retValue = 8;
3338                break;
3339            case VideoFrameRate.FR_10_FPS:
3340                retValue = 10;
3341                break;
3342            case VideoFrameRate.FR_12_5_FPS:
3343                retValue = 13;
3344                break;
3345            case VideoFrameRate.FR_15_FPS:
3346                retValue = 15;
3347                break;
3348            case VideoFrameRate.FR_20_FPS:
3349                retValue = 20;
3350                break;
3351            case VideoFrameRate.FR_25_FPS:
3352                retValue = 25;
3353                break;
3354            case VideoFrameRate.FR_30_FPS:
3355                retValue = 30;
3356                break;
3357
3358            default:
3359                retValue = -1;
3360        }
3361        return retValue;
3362    }
3363
3364    /**
3365     * Maps the file type used in JAVA layer
3366     * to file type used in native layer
3367     *
3368     * @param fileType The file type in JAVA layer
3369     *
3370     * @return The File type in native layer
3371     */
3372    int getMediaItemFileType(int fileType) {
3373        int retValue = -1;
3374
3375        switch (fileType) {
3376            case MediaProperties.FILE_UNSUPPORTED:
3377                retValue = FileType.UNSUPPORTED;
3378                break;
3379            case MediaProperties.FILE_3GP:
3380                retValue = FileType.THREE_GPP;
3381                break;
3382            case MediaProperties.FILE_MP4:
3383                retValue = FileType.MP4;
3384                break;
3385            case MediaProperties.FILE_JPEG:
3386                retValue = FileType.JPG;
3387                break;
3388            case MediaProperties.FILE_PNG:
3389                retValue = FileType.PNG;
3390                break;
3391            case MediaProperties.FILE_M4V:
3392                retValue = FileType.M4V;
3393                break;
3394
3395            default:
3396                retValue = -1;
3397        }
3398        return retValue;
3399
3400    }
3401
3402    /**
3403     * Maps the rendering mode used in native layer
3404     * to rendering mode used in JAVA layer
3405     *
3406     * @param renderingMode The rendering mode in JAVA layer
3407     *
3408     * @return The rendering mode in native layer
3409     */
3410    int getMediaItemRenderingMode(int renderingMode) {
3411        int retValue = -1;
3412        switch (renderingMode) {
3413            case MediaItem.RENDERING_MODE_BLACK_BORDER:
3414                retValue = MediaRendering.BLACK_BORDERS;
3415                break;
3416            case MediaItem.RENDERING_MODE_STRETCH:
3417                retValue = MediaRendering.RESIZING;
3418                break;
3419            case MediaItem.RENDERING_MODE_CROPPING:
3420                retValue = MediaRendering.CROPPING;
3421                break;
3422
3423            default:
3424                retValue = -1;
3425        }
3426        return retValue;
3427    }
3428
3429    /**
3430     * Maps the transition behavior used in JAVA layer
3431     * to transition behavior used in native layer
3432     *
3433     * @param transitionType The transition behavior in JAVA layer
3434     *
3435     * @return The transition behavior in native layer
3436     */
3437    int getVideoTransitionBehaviour(int transitionType) {
3438        int retValue = -1;
3439        switch (transitionType) {
3440            case Transition.BEHAVIOR_SPEED_UP:
3441                retValue = TransitionBehaviour.SPEED_UP;
3442                break;
3443            case Transition.BEHAVIOR_SPEED_DOWN:
3444                retValue = TransitionBehaviour.SPEED_DOWN;
3445                break;
3446            case Transition.BEHAVIOR_LINEAR:
3447                retValue = TransitionBehaviour.LINEAR;
3448                break;
3449            case Transition.BEHAVIOR_MIDDLE_SLOW:
3450                retValue = TransitionBehaviour.SLOW_MIDDLE;
3451                break;
3452            case Transition.BEHAVIOR_MIDDLE_FAST:
3453                retValue = TransitionBehaviour.FAST_MIDDLE;
3454                break;
3455
3456            default:
3457                retValue = -1;
3458        }
3459        return retValue;
3460    }
3461
3462    /**
3463     * Maps the transition slide direction used in JAVA layer
3464     * to transition slide direction used in native layer
3465     *
3466     * @param slideDirection The transition slide direction
3467     * in JAVA layer
3468     *
3469     * @return The transition slide direction in native layer
3470     */
3471    int getSlideSettingsDirection(int slideDirection) {
3472        int retValue = -1;
3473        switch (slideDirection) {
3474            case TransitionSliding.DIRECTION_RIGHT_OUT_LEFT_IN:
3475                retValue = SlideDirection.RIGHT_OUT_LEFT_IN;
3476                break;
3477            case TransitionSliding.DIRECTION_LEFT_OUT_RIGHT_IN:
3478                retValue = SlideDirection.LEFT_OUT_RIGTH_IN;
3479                break;
3480            case TransitionSliding.DIRECTION_TOP_OUT_BOTTOM_IN:
3481                retValue = SlideDirection.TOP_OUT_BOTTOM_IN;
3482                break;
3483            case TransitionSliding.DIRECTION_BOTTOM_OUT_TOP_IN:
3484                retValue = SlideDirection.BOTTOM_OUT_TOP_IN;
3485                break;
3486
3487            default:
3488                retValue = -1;
3489        }
3490        return retValue;
3491    }
3492
3493    /**
3494     * Maps the effect color type used in JAVA layer
3495     * to effect color type used in native layer
3496     *
3497     * @param effect The EffectColor reference
3498     *
3499     * @return The color effect value from native layer
3500     */
3501    private int getEffectColorType(EffectColor effect) {
3502        int retValue = -1;
3503        switch (effect.getType()) {
3504            case EffectColor.TYPE_COLOR:
3505                if (effect.getColor() == EffectColor.GREEN) {
3506                    retValue = VideoEffect.GREEN;
3507                } else if (effect.getColor() == EffectColor.PINK) {
3508                    retValue = VideoEffect.PINK;
3509                } else if (effect.getColor() == EffectColor.GRAY) {
3510                    retValue = VideoEffect.BLACK_AND_WHITE;
3511                } else {
3512                    retValue = VideoEffect.COLORRGB16;
3513                }
3514                break;
3515            case EffectColor.TYPE_GRADIENT:
3516                retValue = VideoEffect.GRADIENT;
3517                break;
3518            case EffectColor.TYPE_SEPIA:
3519                retValue = VideoEffect.SEPIA;
3520                break;
3521            case EffectColor.TYPE_NEGATIVE:
3522                retValue = VideoEffect.NEGATIVE;
3523                break;
3524            case EffectColor.TYPE_FIFTIES:
3525                retValue = VideoEffect.FIFTIES;
3526                break;
3527
3528            default:
3529                retValue = -1;
3530        }
3531        return retValue;
3532    }
3533
3534    /**
3535     * Calculates video resolution for output clip
3536     * based on clip's height and aspect ratio of storyboard
3537     *
3538     * @param aspectRatio The aspect ratio of story board
3539     * @param height The height of clip
3540     *
3541     * @return The video resolution
3542     */
3543    private int findVideoResolution(int aspectRatio, int height) {
3544        final Pair<Integer, Integer>[] resolutions;
3545        final Pair<Integer, Integer> maxResolution;
3546        int retValue = VideoFrameSize.SIZE_UNDEFINED;
3547        switch (aspectRatio) {
3548            case MediaProperties.ASPECT_RATIO_3_2:
3549                if (height == MediaProperties.HEIGHT_480)
3550                    retValue = VideoFrameSize.NTSC;
3551                else if (height == MediaProperties.HEIGHT_720)
3552                    retValue = VideoFrameSize.W720p;
3553                break;
3554            case MediaProperties.ASPECT_RATIO_16_9:
3555                if (height == MediaProperties.HEIGHT_480)
3556                    retValue = VideoFrameSize.WVGA16x9;
3557                else if (height == MediaProperties.HEIGHT_720)
3558                    retValue = VideoFrameSize.V720p;
3559                break;
3560            case MediaProperties.ASPECT_RATIO_4_3:
3561                if (height == MediaProperties.HEIGHT_480)
3562                    retValue = VideoFrameSize.VGA;
3563                if (height == MediaProperties.HEIGHT_720)
3564                    retValue = VideoFrameSize.S720p;
3565                break;
3566            case MediaProperties.ASPECT_RATIO_5_3:
3567                if (height == MediaProperties.HEIGHT_480)
3568                    retValue = VideoFrameSize.WVGA;
3569                break;
3570            case MediaProperties.ASPECT_RATIO_11_9:
3571                if (height == MediaProperties.HEIGHT_144)
3572                    retValue = VideoFrameSize.QCIF;
3573                break;
3574        }
3575        if (retValue == VideoFrameSize.SIZE_UNDEFINED) {
3576            resolutions = MediaProperties.getSupportedResolutions(mVideoEditor.getAspectRatio());
3577            // Get the highest resolution
3578            maxResolution = resolutions[resolutions.length - 1];
3579            retValue = findVideoResolution(mVideoEditor.getAspectRatio(), maxResolution.second);
3580        }
3581
3582        return retValue;
3583    }
3584
3585    /**
3586     * This method is responsible for exporting a movie
3587     *
3588     * @param filePath The output file path
3589     * @param projectDir The output project directory
3590     * @param height The height of clip
3591     * @param bitrate The bitrate at which the movie should be exported
3592     * @param mediaItemsList The media items list
3593     * @param mediaTransitionList The transitions list
3594     * @param mediaBGMList The background track list
3595     * @param listener The ExportProgressListener
3596     *
3597     */
3598    void export(String filePath, String projectDir, int height, int bitrate,
3599            List<MediaItem> mediaItemsList, List<Transition> mediaTransitionList,
3600            List<AudioTrack> mediaBGMList, ExportProgressListener listener) {
3601
3602        int outBitrate = 0;
3603        mExportFilename = filePath;
3604        previewStoryBoard(mediaItemsList, mediaTransitionList, mediaBGMList,null);
3605        mExportProgressListener = listener;
3606
3607        mProgressToApp = 0;
3608
3609        switch (bitrate) {
3610            case MediaProperties.BITRATE_28K:
3611                outBitrate = Bitrate.BR_32_KBPS;
3612                break;
3613            case MediaProperties.BITRATE_40K:
3614                outBitrate = Bitrate.BR_48_KBPS;
3615                break;
3616            case MediaProperties.BITRATE_64K:
3617                outBitrate = Bitrate.BR_64_KBPS;
3618                break;
3619            case MediaProperties.BITRATE_96K:
3620                outBitrate = Bitrate.BR_96_KBPS;
3621                break;
3622            case MediaProperties.BITRATE_128K:
3623                outBitrate = Bitrate.BR_128_KBPS;
3624                break;
3625            case MediaProperties.BITRATE_192K:
3626                outBitrate = Bitrate.BR_192_KBPS;
3627                break;
3628            case MediaProperties.BITRATE_256K:
3629                outBitrate = Bitrate.BR_256_KBPS;
3630                break;
3631            case MediaProperties.BITRATE_384K:
3632                outBitrate = Bitrate.BR_384_KBPS;
3633                break;
3634            case MediaProperties.BITRATE_512K:
3635                outBitrate = Bitrate.BR_512_KBPS;
3636                break;
3637            case MediaProperties.BITRATE_800K:
3638                outBitrate = Bitrate.BR_800_KBPS;
3639                break;
3640            case MediaProperties.BITRATE_2M:
3641                outBitrate = Bitrate.BR_2_MBPS;
3642                break;
3643
3644            case MediaProperties.BITRATE_5M:
3645                outBitrate = Bitrate.BR_5_MBPS;
3646                break;
3647            case MediaProperties.BITRATE_8M:
3648                outBitrate = Bitrate.BR_8_MBPS;
3649                break;
3650
3651            default:
3652                throw new IllegalArgumentException("Argument Bitrate incorrect");
3653        }
3654        mPreviewEditSettings.videoFrameRate = VideoFrameRate.FR_30_FPS;
3655        mPreviewEditSettings.outputFile = mOutputFilename = filePath;
3656
3657        int aspectRatio = mVideoEditor.getAspectRatio();
3658        mPreviewEditSettings.videoFrameSize = findVideoResolution(aspectRatio, height);
3659        mPreviewEditSettings.videoFormat = VideoFormat.H264;
3660        mPreviewEditSettings.audioFormat = AudioFormat.AAC;
3661        mPreviewEditSettings.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000;
3662        mPreviewEditSettings.maxFileSize = 0;
3663        mPreviewEditSettings.audioChannels = 2;
3664        mPreviewEditSettings.videoBitrate = outBitrate;
3665        mPreviewEditSettings.audioBitrate = Bitrate.BR_96_KBPS;
3666
3667        mPreviewEditSettings.transitionSettingsArray = new TransitionSettings[mTotalClips - 1];
3668        for (int index = 0; index < mTotalClips - 1; index++) {
3669            mPreviewEditSettings.transitionSettingsArray[index] = new TransitionSettings();
3670            mPreviewEditSettings.transitionSettingsArray[index].videoTransitionType =
3671                VideoTransition.NONE;
3672            mPreviewEditSettings.transitionSettingsArray[index].audioTransitionType =
3673                AudioTransition.NONE;
3674        }
3675
3676        for (int clipCnt = 0; clipCnt < mPreviewEditSettings.clipSettingsArray.length; clipCnt++) {
3677            if (mPreviewEditSettings.clipSettingsArray[clipCnt].fileType == FileType.JPG) {
3678                mPreviewEditSettings.clipSettingsArray[clipCnt].clipPath =
3679                mPreviewEditSettings.clipSettingsArray[clipCnt].clipOriginalPath;
3680            }
3681        }
3682        nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings);
3683
3684        int err = 0;
3685        try {
3686            mProcessingState  = PROCESSING_EXPORT;
3687            mProcessingObject = null;
3688            err = generateClip(mPreviewEditSettings);
3689            mProcessingState  = PROCESSING_NONE;
3690        } catch (IllegalArgumentException ex) {
3691            Log.e(TAG, "IllegalArgument for generateClip");
3692            throw ex;
3693        } catch (IllegalStateException ex) {
3694            Log.e(TAG, "IllegalStateExceptiont for generateClip");
3695            throw ex;
3696        } catch (RuntimeException ex) {
3697            Log.e(TAG, "RuntimeException for generateClip");
3698            throw ex;
3699        }
3700
3701        if (err != 0) {
3702            Log.e(TAG, "RuntimeException for generateClip");
3703            throw new RuntimeException("generateClip failed with error=" + err);
3704        }
3705
3706        mExportProgressListener = null;
3707    }
3708
3709    /**
3710     * This methods takes care of stopping the Export process
3711     *
3712     * @param The input file name for which export has to be stopped
3713     */
3714    void stop(String filename) {
3715        try {
3716            stopEncoding();
3717            new File(mExportFilename).delete();
3718        } catch (IllegalStateException ex) {
3719            Log.e(TAG, "Illegal state exception in unload settings");
3720            throw ex;
3721        } catch (RuntimeException ex) {
3722            Log.e(TAG, "Runtime exception in unload settings");
3723            throw ex;
3724        }
3725    }
3726
3727    /**
3728     * This method extracts a frame from the input file
3729     * and returns the frame as a bitmap
3730     *
3731     * @param inputFile The inputFile
3732     * @param width The width of the output frame
3733     * @param height The height of the output frame
3734     * @param timeMS The time in ms at which the frame has to be extracted
3735     */
3736    Bitmap getPixels(String inputFile, int width, int height, long timeMS) {
3737        if (inputFile == null) {
3738            throw new IllegalArgumentException();
3739        }
3740
3741        int newWidth = 0;
3742        int newHeight = 0;
3743        Bitmap tempBitmap = null;
3744
3745        /* Make width and height as even */
3746        newWidth = (width + 1) & 0xFFFFFFFE;
3747        newHeight = (height + 1) & 0xFFFFFFFE;
3748
3749        /* Create a temp bitmap for resized thumbnails */
3750        if ((newWidth != width) || (newHeight != height)) {
3751             tempBitmap = Bitmap.createBitmap(newWidth, newHeight, Bitmap.Config.ARGB_8888);
3752        }
3753
3754        IntBuffer rgb888 = IntBuffer.allocate(newWidth * newHeight * 4);
3755        Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
3756        nativeGetPixels(inputFile, rgb888.array(), newWidth, newHeight, timeMS);
3757
3758        if ((newWidth == width) && (newHeight == height)) {
3759            bitmap.copyPixelsFromBuffer(rgb888);
3760        } else {
3761            /* Create a temp bitmap to be used for resize */
3762            tempBitmap.copyPixelsFromBuffer(rgb888);
3763
3764            /* Create a canvas to resize */
3765            final Canvas canvas = new Canvas(bitmap);
3766            canvas.drawBitmap(tempBitmap, new Rect(0, 0, newWidth, newHeight),
3767                                          new Rect(0, 0, width, height), sResizePaint);
3768        }
3769
3770        if (tempBitmap != null) {
3771            tempBitmap.recycle();
3772        }
3773        return bitmap;
3774    }
3775
3776    /**
3777     * This method extracts a list of frame from the
3778     * input file and returns the frame in bitmap array
3779     *
3780     * @param filename The inputFile
3781     * @param width The width of the output frame
3782     * @param height The height of the output frame
3783     * @param startMs The starting time in ms
3784     * @param endMs The end time in ms
3785     * @param thumbnailCount The number of frames to be extracted
3786     * from startMs to endMs
3787     *
3788     * @return The frames as bitmaps in bitmap array
3789     **/
3790    public Bitmap[] getPixelsList(String filename, int width, int height, long startMs, long endMs,
3791            int thumbnailCount) {
3792        int[] rgb888 = null;
3793        int thumbnailSize = 0;
3794        int newWidth = 0;
3795        int newHeight = 0;
3796        Bitmap tempBitmap = null;
3797
3798        /* Make width and height as even */
3799        newWidth = (width + 1) & 0xFFFFFFFE;
3800        newHeight = (height + 1) & 0xFFFFFFFE;
3801        thumbnailSize = newWidth * newHeight * 4;
3802
3803        /* Create a temp bitmap for resized thumbnails */
3804        if ((newWidth != width) || (newHeight != height)) {
3805            tempBitmap = Bitmap.createBitmap(newWidth, newHeight, Bitmap.Config.ARGB_8888);
3806        }
3807        int i = 0;
3808        int deltaTime = (int)(endMs - startMs) / thumbnailCount;
3809        Bitmap[] bitmaps = null;
3810
3811        try {
3812            // This may result in out of Memory Error
3813            rgb888 = new int[thumbnailSize * thumbnailCount];
3814            bitmaps = new Bitmap[thumbnailCount];
3815        } catch (Throwable e) {
3816            // Allocating to new size with Fixed count
3817            try {
3818                System.gc();
3819                rgb888 = new int[thumbnailSize * MAX_THUMBNAIL_PERMITTED];
3820                bitmaps = new Bitmap[MAX_THUMBNAIL_PERMITTED];
3821                thumbnailCount = MAX_THUMBNAIL_PERMITTED;
3822            } catch (Throwable ex) {
3823                throw new RuntimeException("Memory allocation fails, thumbnail count too large: "+thumbnailCount);
3824            }
3825        }
3826        IntBuffer tmpBuffer = IntBuffer.allocate(thumbnailSize);
3827        nativeGetPixelsList(filename, rgb888, newWidth, newHeight, deltaTime, thumbnailCount,
3828                startMs, endMs);
3829
3830        for (; i < thumbnailCount; i++) {
3831            bitmaps[i] = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
3832            tmpBuffer.put(rgb888, (i * thumbnailSize), thumbnailSize);
3833            tmpBuffer.rewind();
3834
3835            if ((newWidth == width) && (newHeight == height)) {
3836                bitmaps[i].copyPixelsFromBuffer(tmpBuffer);
3837            } else {
3838                /* Copy the out rgb buffer to temp bitmap */
3839                tempBitmap.copyPixelsFromBuffer(tmpBuffer);
3840
3841                /* Create a canvas to resize */
3842                final Canvas canvas = new Canvas(bitmaps[i]);
3843                canvas.drawBitmap(tempBitmap, new Rect(0, 0, newWidth, newHeight),
3844                                              new Rect(0, 0, width, height), sResizePaint);
3845            }
3846        }
3847
3848        if (tempBitmap != null) {
3849            tempBitmap.recycle();
3850        }
3851        return bitmaps;
3852    }
3853
3854    /**
3855     * This method generates the audio graph
3856     *
3857     * @param uniqueId The unique id
3858     * @param inFileName The inputFile
3859     * @param OutAudiGraphFileName output filename
3860     * @param frameDuration The each frame duration
3861     * @param audioChannels The number of audio channels
3862     * @param samplesCount Total number of samples count
3863     * @param listener ExtractAudioWaveformProgressListener reference
3864     * @param isVideo The flag to indicate if the file is video file or not
3865     *
3866     **/
3867    void generateAudioGraph(String uniqueId, String inFileName, String OutAudiGraphFileName,
3868            int frameDuration, int audioChannels, int samplesCount,
3869            ExtractAudioWaveformProgressListener listener, boolean isVideo) {
3870        String tempPCMFileName;
3871
3872        mExtractAudioWaveformProgressListener = listener;
3873
3874        /**
3875         * In case of Video, first call will generate the PCM file to make the
3876         * audio graph
3877         */
3878        if (isVideo) {
3879            tempPCMFileName = String.format(mProjectPath + "/" + uniqueId + ".pcm");
3880        } else {
3881            tempPCMFileName = mAudioTrackPCMFilePath;
3882        }
3883
3884        /**
3885         * For Video item, generate the PCM
3886         */
3887        if (isVideo) {
3888            nativeGenerateRawAudio(inFileName, tempPCMFileName);
3889        }
3890
3891        nativeGenerateAudioGraph(tempPCMFileName, OutAudiGraphFileName, frameDuration,
3892                audioChannels, samplesCount);
3893
3894        /**
3895         * Once the audio graph file is generated, delete the pcm file
3896         */
3897        if (isVideo) {
3898            new File(tempPCMFileName).delete();
3899        }
3900    }
3901
3902    void clearPreviewSurface(Surface surface) {
3903        nativeClearSurface(surface);
3904    }
3905
3906    /**
3907     * Grab the semaphore which arbitrates access to the editor
3908     *
3909     * @throws InterruptedException
3910     */
3911    void lock() throws InterruptedException {
3912        if (Log.isLoggable(TAG, Log.DEBUG)) {
3913            Log.d(TAG, "lock: grabbing semaphore", new Throwable());
3914        }
3915        mLock.acquire();
3916        if (Log.isLoggable(TAG, Log.DEBUG)) {
3917            Log.d(TAG, "lock: grabbed semaphore");
3918        }
3919    }
3920
3921    /**
3922     * Tries to grab the semaphore with a specified time out which arbitrates access to the editor
3923     *
3924     * @param timeoutMs time out in ms.
3925     *
3926     * @return true if the semaphore is acquired, false otherwise
3927     * @throws InterruptedException
3928     */
3929    boolean lock(long timeoutMs) throws InterruptedException {
3930        if (Log.isLoggable(TAG, Log.DEBUG)) {
3931            Log.d(TAG, "lock: grabbing semaphore with timeout " + timeoutMs, new Throwable());
3932        }
3933
3934        boolean acquireSem = mLock.tryAcquire(timeoutMs, TimeUnit.MILLISECONDS);
3935        if (Log.isLoggable(TAG, Log.DEBUG)) {
3936            Log.d(TAG, "lock: grabbed semaphore status " + acquireSem);
3937        }
3938
3939        return acquireSem;
3940    }
3941
3942    /**
3943     * Release the semaphore which arbitrates access to the editor
3944     */
3945    void unlock() {
3946        if (Log.isLoggable(TAG, Log.DEBUG)) {
3947            Log.d(TAG, "unlock: releasing semaphore");
3948        }
3949        mLock.release();
3950    }
3951
3952    /**     Native Methods        */
3953    native Properties getMediaProperties(String file) throws IllegalArgumentException,
3954            IllegalStateException, RuntimeException, Exception;
3955
3956    /**
3957     * Get the version of ManualEdit.
3958     *
3959     * @return version of ManualEdit
3960     * @throws RuntimeException if an error occurred
3961     * @see Version
3962     */
3963    private static native Version getVersion() throws RuntimeException;
3964
3965    /**
3966     * Returns the video thumbnail in an array of integers. Output format is
3967     * ARGB8888.
3968     *
3969     * @param pixelArray the array that receives the pixel values
3970     * @param width width of the video thumbnail
3971     * @param height height of the video thumbnail
3972     * @param timeMS desired time of the thumbnail in ms
3973     * @return actual time in ms of the thumbnail generated
3974     * @throws IllegalStateException if the class has not been initialized
3975     * @throws IllegalArgumentException if the pixelArray is not available or
3976     *             one of the dimensions is negative or zero or the time is
3977     *             negative
3978     * @throws RuntimeException on runtime errors in native code
3979     */
3980    private native int nativeGetPixels(String fileName, int[] pixelArray, int width, int height,
3981            long timeMS);
3982
3983    private native int nativeGetPixelsList(String fileName, int[] pixelArray, int width, int height,
3984            int timeMS, int nosofTN, long startTimeMs, long endTimeMs);
3985
3986    /**
3987     * Releases the JNI and cleans up the core native module.. Should be called
3988     * only after init( )
3989     *
3990     * @throws IllegalStateException if the method could not be called
3991     */
3992    private native void release() throws IllegalStateException, RuntimeException;
3993
3994    /*
3995     * Clear the preview surface
3996     */
3997    private native void nativeClearSurface(Surface surface);
3998
3999    /**
4000     * Stops the encoding. This method should only be called after encoding has
4001     * started using method <code> startEncoding</code>
4002     *
4003     * @throws IllegalStateException if the method could not be called
4004     */
4005    private native void stopEncoding() throws IllegalStateException, RuntimeException;
4006
4007
4008    private native void _init(String tempPath, String libraryPath)
4009            throws IllegalArgumentException, IllegalStateException, RuntimeException;
4010
4011    private native void nativeStartPreview(Surface mSurface, long fromMs, long toMs,
4012            int callbackAfterFrameCount, boolean loop) throws IllegalArgumentException,
4013            IllegalStateException, RuntimeException;
4014
4015    private native void nativePopulateSettings(EditSettings editSettings,
4016            PreviewClipProperties mProperties, AudioSettings mAudioSettings)
4017    throws IllegalArgumentException, IllegalStateException, RuntimeException;
4018
4019    private native int nativeRenderPreviewFrame(Surface mSurface, long timeMs,
4020                                                 int surfaceWidth, int surfaceHeight)
4021                                                 throws IllegalArgumentException,
4022                                                 IllegalStateException, RuntimeException;
4023
4024    private native int nativeRenderMediaItemPreviewFrame(Surface mSurface, String filepath,
4025            int framewidth, int frameheight, int surfacewidth, int surfaceheight, long timeMs)
4026    throws IllegalArgumentException, IllegalStateException, RuntimeException;
4027
4028    private native void nativeStopPreview();
4029
4030    private native int nativeGenerateAudioGraph(String pcmFilePath, String outGraphPath,
4031            int frameDuration, int channels, int sampleCount);
4032
4033    private native int nativeGenerateRawAudio(String InFileName, String PCMFileName);
4034
4035    private native int nativeGenerateClip(EditSettings editSettings)
4036    throws IllegalArgumentException, IllegalStateException, RuntimeException;
4037
4038}
4039