MediaArtistNativeHelper.java revision 3ced044154945f9d60983032278e00fe28f4ab1b
1/*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17package android.media.videoeditor;
18
19import java.io.File;
20import java.io.IOException;
21import java.math.BigDecimal;
22import java.nio.IntBuffer;
23import java.util.Iterator;
24import java.util.List;
25import java.util.concurrent.Semaphore;
26
27import android.graphics.Bitmap;
28import android.graphics.BitmapFactory;
29import android.graphics.Canvas;
30import android.graphics.Paint;
31import android.graphics.Rect;
32import android.media.videoeditor.VideoEditor.ExportProgressListener;
33import android.media.videoeditor.VideoEditor.PreviewProgressListener;
34import android.media.videoeditor.VideoEditor.MediaProcessingProgressListener;
35import android.util.Log;
36import android.util.Pair;
37import android.view.Surface;
38
39/**
40 *This class provide Native methods to be used by MediaArtist {@hide}
41 */
42class MediaArtistNativeHelper {
43    private static final String TAG = "MediaArtistNativeHelper";
44
45    static {
46        System.loadLibrary("videoeditor_jni");
47    }
48
49    private static final int MAX_THUMBNAIL_PERMITTED = 8;
50
51    public static final int TASK_LOADING_SETTINGS = 1;
52    public static final int TASK_ENCODING = 2;
53
54    /**
55     *  The resize paint
56     */
57    private static final Paint sResizePaint = new Paint(Paint.FILTER_BITMAP_FLAG);
58
59    private final VideoEditor mVideoEditor;
60    /*
61     *  Semaphore to control preview calls
62     */
63    private final Semaphore mLock;
64
65    private EditSettings mStoryBoardSettings;
66
67    private String mOutputFilename;
68
69    private PreviewClipProperties mClipProperties = null;
70
71    private EditSettings mPreviewEditSettings;
72
73    private AudioSettings mAudioSettings = null;
74
75    private AudioTrack mAudioTrack = null;
76
77    private boolean mInvalidatePreviewArray = true;
78
79    private boolean mRegenerateAudio = true;
80
81    private String mExportFilename = null;
82    private int mExportVideoCodec = 0;
83    private int mExportAudioCodec = 0;
84    private int mProgressToApp;
85
86    private String mRenderPreviewOverlayFile;
87    private int mRenderPreviewRenderingMode;
88
89    private boolean mIsFirstProgress;
90
91    private static final String AUDIO_TRACK_PCM_FILE = "AudioPcm.pcm";
92
93    // Processing indication
94    public static final int PROCESSING_NONE          = 0;
95    public static final int PROCESSING_AUDIO_PCM     = 1;
96    public static final int PROCESSING_TRANSITION    = 2;
97    public static final int PROCESSING_KENBURNS      = 3;
98    public static final int PROCESSING_INTERMEDIATE1 = 11;
99    public static final int PROCESSING_INTERMEDIATE2 = 12;
100    public static final int PROCESSING_INTERMEDIATE3 = 13;
101    public static final int PROCESSING_EXPORT        = 20;
102
103    private int mProcessingState;
104    private Object mProcessingObject;
105    private PreviewProgressListener mPreviewProgressListener;
106    private ExportProgressListener mExportProgressListener;
107    private ExtractAudioWaveformProgressListener mExtractAudioWaveformProgressListener;
108    private MediaProcessingProgressListener mMediaProcessingProgressListener;
109    private final String mProjectPath;
110
111    private long mPreviewProgress;
112
113    private String mAudioTrackPCMFilePath;
114
115    private int mTotalClips = 0;
116
117    private boolean mErrorFlagSet = false;
118
119    @SuppressWarnings("unused")
120    private int mManualEditContext;
121
122    /* Listeners */
123
124    /**
125     * Interface definition for a listener to be invoked when there is an update
126     * in a running task.
127     */
128    public interface OnProgressUpdateListener {
129        /**
130         * Called when there is an update.
131         *
132         * @param taskId id of the task reporting an update.
133         * @param progress progress of the task [0..100].
134         * @see BasicEdit#TASK_ENCODING
135         */
136        public void OnProgressUpdate(int taskId, int progress);
137    }
138
139    /** Defines the version. */
140    public final class Version {
141
142        /** Major version number */
143        public int major;
144
145        /** Minor version number */
146        public int minor;
147
148        /** Revision number */
149        public int revision;
150
151        /** VIDEOEDITOR major version number */
152        private static final int VIDEOEDITOR_MAJOR_VERSION = 0;
153
154        /** VIDEOEDITOR minor version number */
155        private static final int VIDEOEDITOR_MINOR_VERSION = 0;
156
157        /** VIDEOEDITOR revision number */
158        private static final int VIDEOEDITOR_REVISION_VERSION = 1;
159
160        /** Method which returns the current VIDEOEDITOR version */
161        public Version getVersion() {
162            Version version = new Version();
163
164            version.major = Version.VIDEOEDITOR_MAJOR_VERSION;
165            version.minor = Version.VIDEOEDITOR_MINOR_VERSION;
166            version.revision = Version.VIDEOEDITOR_REVISION_VERSION;
167
168            return version;
169        }
170    }
171
172    /**
173     * Defines output audio formats.
174     */
175    public final class AudioFormat {
176        /** No audio present in output clip. Used to generate video only clip */
177        public static final int NO_AUDIO = 0;
178
179        /** AMR Narrow Band. */
180        public static final int AMR_NB = 1;
181
182        /** Advanced Audio Coding (AAC). */
183        public static final int AAC = 2;
184
185        /** Advanced Audio Codec Plus (HE-AAC v1). */
186        public static final int AAC_PLUS = 3;
187
188        /** Advanced Audio Codec Plus (HE-AAC v2). */
189        public static final int ENHANCED_AAC_PLUS = 4;
190
191        /** MPEG layer 3 (MP3). */
192        public static final int MP3 = 5;
193
194        /** Enhanced Variable RateCodec (EVRC). */
195        public static final int EVRC = 6;
196
197        /** PCM (PCM). */
198        public static final int PCM = 7;
199
200        /** No transcoding. Output audio format is same as input audio format */
201        public static final int NULL_AUDIO = 254;
202
203        /** Unsupported audio format. */
204        public static final int UNSUPPORTED_AUDIO = 255;
205    }
206
207    /**
208     * Defines audio sampling frequencies.
209     */
210    public final class AudioSamplingFrequency {
211        /**
212         * Default sampling frequency. Uses the default frequency for a specific
213         * audio format. For AAC the only supported (and thus default) sampling
214         * frequency is 16 kHz. For this audio format the sampling frequency in
215         * the OutputParams.
216         **/
217        public static final int FREQ_DEFAULT = 0;
218
219        /** Audio sampling frequency of 8000 Hz. */
220        public static final int FREQ_8000 = 8000;
221
222        /** Audio sampling frequency of 11025 Hz. */
223        public static final int FREQ_11025 = 11025;
224
225        /** Audio sampling frequency of 12000 Hz. */
226        public static final int FREQ_12000 = 12000;
227
228        /** Audio sampling frequency of 16000 Hz. */
229        public static final int FREQ_16000 = 16000;
230
231        /** Audio sampling frequency of 22050 Hz. */
232        public static final int FREQ_22050 = 22050;
233
234        /** Audio sampling frequency of 24000 Hz. */
235        public static final int FREQ_24000 = 24000;
236
237        /** Audio sampling frequency of 32000 Hz. */
238        public static final int FREQ_32000 = 32000;
239
240        /** Audio sampling frequency of 44100 Hz. */
241        public static final int FREQ_44100 = 44100;
242
243        /** Audio sampling frequency of 48000 Hz. Not available for output file. */
244        public static final int FREQ_48000 = 48000;
245    }
246
247    /**
248     * Defines the supported fixed audio and video bitrates. These values are
249     * for output audio video only.
250     */
251    public final class Bitrate {
252        /** Variable bitrate. Means no bitrate regulation */
253        public static final int VARIABLE = -1;
254
255        /** An undefined bitrate. */
256        public static final int UNDEFINED = 0;
257
258        /** A bitrate of 9.2 kbits/s. */
259        public static final int BR_9_2_KBPS = 9200;
260
261        /** A bitrate of 12.2 kbits/s. */
262        public static final int BR_12_2_KBPS = 12200;
263
264        /** A bitrate of 16 kbits/s. */
265        public static final int BR_16_KBPS = 16000;
266
267        /** A bitrate of 24 kbits/s. */
268        public static final int BR_24_KBPS = 24000;
269
270        /** A bitrate of 32 kbits/s. */
271        public static final int BR_32_KBPS = 32000;
272
273        /** A bitrate of 48 kbits/s. */
274        public static final int BR_48_KBPS = 48000;
275
276        /** A bitrate of 64 kbits/s. */
277        public static final int BR_64_KBPS = 64000;
278
279        /** A bitrate of 96 kbits/s. */
280        public static final int BR_96_KBPS = 96000;
281
282        /** A bitrate of 128 kbits/s. */
283        public static final int BR_128_KBPS = 128000;
284
285        /** A bitrate of 192 kbits/s. */
286        public static final int BR_192_KBPS = 192000;
287
288        /** A bitrate of 256 kbits/s. */
289        public static final int BR_256_KBPS = 256000;
290
291        /** A bitrate of 288 kbits/s. */
292        public static final int BR_288_KBPS = 288000;
293
294        /** A bitrate of 384 kbits/s. */
295        public static final int BR_384_KBPS = 384000;
296
297        /** A bitrate of 512 kbits/s. */
298        public static final int BR_512_KBPS = 512000;
299
300        /** A bitrate of 800 kbits/s. */
301        public static final int BR_800_KBPS = 800000;
302
303        /** A bitrate of 2 Mbits/s. */
304        public static final int BR_2_MBPS = 2000000;
305
306        /** A bitrate of 5 Mbits/s. */
307        public static final int BR_5_MBPS = 5000000;
308
309        /** A bitrate of 8 Mbits/s. */
310        public static final int BR_8_MBPS = 8000000;
311    }
312
313    /**
314     * Defines all supported file types.
315     */
316    public final class FileType {
317        /** 3GPP file type. */
318        public static final int THREE_GPP = 0;
319
320        /** MP4 file type. */
321        public static final int MP4 = 1;
322
323        /** AMR file type. */
324        public static final int AMR = 2;
325
326        /** MP3 audio file type. */
327        public static final int MP3 = 3;
328
329        /** PCM audio file type. */
330        public static final int PCM = 4;
331
332        /** JPEG image file type. */
333        public static final int JPG = 5;
334
335        /** GIF image file type. */
336        public static final int GIF = 7;
337
338        /** PNG image file type. */
339        public static final int PNG = 8;
340
341        /** M4V file type. */
342        public static final int M4V = 10;
343
344        /** Unsupported file type. */
345        public static final int UNSUPPORTED = 255;
346    }
347
348    /**
349     * Defines rendering types. Rendering can only be applied to files
350     * containing video streams.
351     **/
352    public final class MediaRendering {
353        /**
354         * Resize to fit the output video with changing the aspect ratio if
355         * needed.
356         */
357        public static final int RESIZING = 0;
358
359        /**
360         * Crop the input video to fit it with the output video resolution.
361         **/
362        public static final int CROPPING = 1;
363
364        /**
365         * Resize to fit the output video resolution but maintain the aspect
366         * ratio. This framing type adds black borders if needed.
367         */
368        public static final int BLACK_BORDERS = 2;
369    }
370
371    /**
372     * Defines the results.
373     */
374    public final class Result {
375        /** No error. result OK */
376        public static final int NO_ERROR = 0;
377
378        /** File not found */
379        public static final int ERR_FILE_NOT_FOUND = 1;
380
381        /**
382         * In case of UTF8 conversion, the size of the converted path will be
383         * more than the corresponding allocated buffer.
384         */
385        public static final int ERR_BUFFER_OUT_TOO_SMALL = 2;
386
387        /** Invalid file type. */
388        public static final int ERR_INVALID_FILE_TYPE = 3;
389
390        /** Invalid effect kind. */
391        public static final int ERR_INVALID_EFFECT_KIND = 4;
392
393        /** Invalid video effect. */
394        public static final int ERR_INVALID_VIDEO_EFFECT_TYPE = 5;
395
396        /** Invalid audio effect. */
397        public static final int ERR_INVALID_AUDIO_EFFECT_TYPE = 6;
398
399        /** Invalid video transition. */
400        public static final int ERR_INVALID_VIDEO_TRANSITION_TYPE = 7;
401
402        /** Invalid audio transition. */
403        public static final int ERR_INVALID_AUDIO_TRANSITION_TYPE = 8;
404
405        /** Invalid encoding frame rate. */
406        public static final int ERR_INVALID_VIDEO_ENCODING_FRAME_RATE = 9;
407
408        /** External effect is called but this function is not set. */
409        public static final int ERR_EXTERNAL_EFFECT_NULL = 10;
410
411        /** External transition is called but this function is not set. */
412        public static final int ERR_EXTERNAL_TRANSITION_NULL = 11;
413
414        /** Begin time cut is larger than the video clip duration. */
415        public static final int ERR_BEGIN_CUT_LARGER_THAN_DURATION = 12;
416
417        /** Begin cut time is larger or equal than end cut. */
418        public static final int ERR_BEGIN_CUT_LARGER_THAN_END_CUT = 13;
419
420        /** Two consecutive transitions are overlapping on one clip. */
421        public static final int ERR_OVERLAPPING_TRANSITIONS = 14;
422
423        /** Internal error, type size mismatch. */
424        public static final int ERR_ANALYSIS_DATA_SIZE_TOO_SMALL = 15;
425
426        /** An input 3GPP file is invalid/corrupted. */
427        public static final int ERR_INVALID_3GPP_FILE = 16;
428
429        /** A file contains an unsupported video format. */
430        public static final int ERR_UNSUPPORTED_INPUT_VIDEO_FORMAT = 17;
431
432        /** A file contains an unsupported audio format. */
433        public static final int ERR_UNSUPPORTED_INPUT_AUDIO_FORMAT = 18;
434
435        /** A file format is not supported. */
436        public static final int ERR_AMR_EDITING_UNSUPPORTED = 19;
437
438        /** An input clip has an unexpectedly large Video AU. */
439        public static final int ERR_INPUT_VIDEO_AU_TOO_LARGE = 20;
440
441        /** An input clip has an unexpectedly large Audio AU. */
442        public static final int ERR_INPUT_AUDIO_AU_TOO_LARGE = 21;
443
444        /** An input clip has a corrupted Audio AU. */
445        public static final int ERR_INPUT_AUDIO_CORRUPTED_AU = 22;
446
447        /** The video encoder encountered an Access Unit error. */
448        public static final int ERR_ENCODER_ACCES_UNIT_ERROR = 23;
449
450        /** Unsupported video format for Video Editing. */
451        public static final int ERR_EDITING_UNSUPPORTED_VIDEO_FORMAT = 24;
452
453        /** Unsupported H263 profile for Video Editing. */
454        public static final int ERR_EDITING_UNSUPPORTED_H263_PROFILE = 25;
455
456        /** Unsupported MPEG-4 profile for Video Editing. */
457        public static final int ERR_EDITING_UNSUPPORTED_MPEG4_PROFILE = 26;
458
459        /** Unsupported MPEG-4 RVLC tool for Video Editing. */
460        public static final int ERR_EDITING_UNSUPPORTED_MPEG4_RVLC = 27;
461
462        /** Unsupported audio format for Video Editing. */
463        public static final int ERR_EDITING_UNSUPPORTED_AUDIO_FORMAT = 28;
464
465        /** File contains no supported stream. */
466        public static final int ERR_EDITING_NO_SUPPORTED_STREAM_IN_FILE = 29;
467
468        /** File contains no video stream or an unsupported video stream. */
469        public static final int ERR_EDITING_NO_SUPPORTED_VIDEO_STREAM_IN_FILE = 30;
470
471        /** Internal error, clip analysis version mismatch. */
472        public static final int ERR_INVALID_CLIP_ANALYSIS_VERSION = 31;
473
474        /**
475         * At least one of the clip analysis has been generated on another
476         * platform (WIN32, ARM, etc.).
477         */
478        public static final int ERR_INVALID_CLIP_ANALYSIS_PLATFORM = 32;
479
480        /** Clips don't have the same video format (H263 or MPEG4). */
481        public static final int ERR_INCOMPATIBLE_VIDEO_FORMAT = 33;
482
483        /** Clips don't have the same frame size. */
484        public static final int ERR_INCOMPATIBLE_VIDEO_FRAME_SIZE = 34;
485
486        /** Clips don't have the same MPEG-4 time scale. */
487        public static final int ERR_INCOMPATIBLE_VIDEO_TIME_SCALE = 35;
488
489        /** Clips don't have the same use of MPEG-4 data partitioning. */
490        public static final int ERR_INCOMPATIBLE_VIDEO_DATA_PARTITIONING = 36;
491
492        /** MP3 clips can't be assembled. */
493        public static final int ERR_UNSUPPORTED_MP3_ASSEMBLY = 37;
494
495        /**
496         * The input 3GPP file does not contain any supported audio or video
497         * track.
498         */
499        public static final int ERR_NO_SUPPORTED_STREAM_IN_FILE = 38;
500
501        /**
502         * The Volume of the added audio track (AddVolume) must be strictly
503         * superior than zero.
504         */
505        public static final int ERR_ADDVOLUME_EQUALS_ZERO = 39;
506
507        /**
508         * The time at which an audio track is added can't be higher than the
509         * input video track duration..
510         */
511        public static final int ERR_ADDCTS_HIGHER_THAN_VIDEO_DURATION = 40;
512
513        /** The audio track file format setting is undefined. */
514        public static final int ERR_UNDEFINED_AUDIO_TRACK_FILE_FORMAT = 41;
515
516        /** The added audio track stream has an unsupported format. */
517        public static final int ERR_UNSUPPORTED_ADDED_AUDIO_STREAM = 42;
518
519        /** The audio mixing feature doesn't support the audio track type. */
520        public static final int ERR_AUDIO_MIXING_UNSUPPORTED = 43;
521
522        /** The audio mixing feature doesn't support MP3 audio tracks. */
523        public static final int ERR_AUDIO_MIXING_MP3_UNSUPPORTED = 44;
524
525        /**
526         * An added audio track limits the available features: uiAddCts must be
527         * 0 and bRemoveOriginal must be true.
528         */
529        public static final int ERR_FEATURE_UNSUPPORTED_WITH_AUDIO_TRACK = 45;
530
531        /**
532         * An added audio track limits the available features: uiAddCts must be
533         * 0 and bRemoveOriginal must be true.
534         */
535        public static final int ERR_FEATURE_UNSUPPORTED_WITH_AAC = 46;
536
537        /** Input audio track is not of a type that can be mixed with output. */
538        public static final int ERR_AUDIO_CANNOT_BE_MIXED = 47;
539
540        /** Input audio track is not AMR-NB, so it can't be mixed with output. */
541        public static final int ERR_ONLY_AMRNB_INPUT_CAN_BE_MIXED = 48;
542
543        /**
544         * An added EVRC audio track limit the available features: uiAddCts must
545         * be 0 and bRemoveOriginal must be true.
546         */
547        public static final int ERR_FEATURE_UNSUPPORTED_WITH_EVRC = 49;
548
549        /** H263 profiles other than 0 are not supported. */
550        public static final int ERR_H263_PROFILE_NOT_SUPPORTED = 51;
551
552        /** File contains no video stream or an unsupported video stream. */
553        public static final int ERR_NO_SUPPORTED_VIDEO_STREAM_IN_FILE = 52;
554
555        /** Transcoding of the input file(s) is necessary. */
556        public static final int WAR_TRANSCODING_NECESSARY = 53;
557
558        /**
559         * The size of the output file will exceed the maximum configured value.
560         */
561        public static final int WAR_MAX_OUTPUT_SIZE_EXCEEDED = 54;
562
563        /** The time scale is too big. */
564        public static final int WAR_TIMESCALE_TOO_BIG = 55;
565
566        /** The year is out of range */
567        public static final int ERR_CLOCK_BAD_REF_YEAR = 56;
568
569        /** The directory could not be opened */
570        public static final int ERR_DIR_OPEN_FAILED = 57;
571
572        /** The directory could not be read */
573        public static final int ERR_DIR_READ_FAILED = 58;
574
575        /** There are no more entries in the current directory */
576        public static final int ERR_DIR_NO_MORE_ENTRY = 59;
577
578        /** The input parameter/s has error */
579        public static final int ERR_PARAMETER = 60;
580
581        /** There is a state machine error */
582        public static final int ERR_STATE = 61;
583
584        /** Memory allocation failed */
585        public static final int ERR_ALLOC = 62;
586
587        /** Context is invalid */
588        public static final int ERR_BAD_CONTEXT = 63;
589
590        /** Context creation failed */
591        public static final int ERR_CONTEXT_FAILED = 64;
592
593        /** Invalid stream ID */
594        public static final int ERR_BAD_STREAM_ID = 65;
595
596        /** Invalid option ID */
597        public static final int ERR_BAD_OPTION_ID = 66;
598
599        /** The option is write only */
600        public static final int ERR_WRITE_ONLY = 67;
601
602        /** The option is read only */
603        public static final int ERR_READ_ONLY = 68;
604
605        /** The feature is not implemented in this version */
606        public static final int ERR_NOT_IMPLEMENTED = 69;
607
608        /** The media type is not supported */
609        public static final int ERR_UNSUPPORTED_MEDIA_TYPE = 70;
610
611        /** No data to be encoded */
612        public static final int WAR_NO_DATA_YET = 71;
613
614        /** No data to be decoded */
615        public static final int WAR_NO_MORE_STREAM = 72;
616
617        /** Time stamp is invalid */
618        public static final int WAR_INVALID_TIME = 73;
619
620        /** No more data to be decoded */
621        public static final int WAR_NO_MORE_AU = 74;
622
623        /** Semaphore timed out */
624        public static final int WAR_TIME_OUT = 75;
625
626        /** Memory buffer is full */
627        public static final int WAR_BUFFER_FULL = 76;
628
629        /** Server has asked for redirection */
630        public static final int WAR_REDIRECT = 77;
631
632        /** Too many streams in input */
633        public static final int WAR_TOO_MUCH_STREAMS = 78;
634
635        /** The file cannot be opened/ written into as it is locked */
636        public static final int ERR_FILE_LOCKED = 79;
637
638        /** The file access mode is invalid */
639        public static final int ERR_FILE_BAD_MODE_ACCESS = 80;
640
641        /** The file pointer points to an invalid location */
642        public static final int ERR_FILE_INVALID_POSITION = 81;
643
644        /** Invalid string */
645        public static final int ERR_STR_BAD_STRING = 94;
646
647        /** The input string cannot be converted */
648        public static final int ERR_STR_CONV_FAILED = 95;
649
650        /** The string size is too large */
651        public static final int ERR_STR_OVERFLOW = 96;
652
653        /** Bad string arguments */
654        public static final int ERR_STR_BAD_ARGS = 97;
655
656        /** The string value is larger than maximum size allowed */
657        public static final int WAR_STR_OVERFLOW = 98;
658
659        /** The string value is not present in this comparison operation */
660        public static final int WAR_STR_NOT_FOUND = 99;
661
662        /** The thread is not started */
663        public static final int ERR_THREAD_NOT_STARTED = 100;
664
665        /** Trancoding done warning */
666        public static final int WAR_TRANSCODING_DONE = 101;
667
668        /** Unsupported mediatype */
669        public static final int WAR_MEDIATYPE_NOT_SUPPORTED = 102;
670
671        /** Input file contains invalid/unsupported streams */
672        public static final int ERR_INPUT_FILE_CONTAINS_NO_SUPPORTED_STREAM = 103;
673
674        /** Invalid input file */
675        public static final int ERR_INVALID_INPUT_FILE = 104;
676
677        /** Invalid output video format */
678        public static final int ERR_UNDEFINED_OUTPUT_VIDEO_FORMAT = 105;
679
680        /** Invalid output video frame size */
681        public static final int ERR_UNDEFINED_OUTPUT_VIDEO_FRAME_SIZE = 106;
682
683        /** Invalid output video frame rate */
684        public static final int ERR_UNDEFINED_OUTPUT_VIDEO_FRAME_RATE = 107;
685
686        /** Invalid output audio format */
687        public static final int ERR_UNDEFINED_OUTPUT_AUDIO_FORMAT = 108;
688
689        /** Invalid video frame size for H.263 */
690        public static final int ERR_INVALID_VIDEO_FRAME_SIZE_FOR_H263 = 109;
691
692        /** Invalid video frame rate for H.263 */
693        public static final int ERR_INVALID_VIDEO_FRAME_RATE_FOR_H263 = 110;
694
695        /** invalid playback duration */
696        public static final int ERR_DURATION_IS_NULL = 111;
697
698        /** Invalid H.263 profile in file */
699        public static final int ERR_H263_FORBIDDEN_IN_MP4_FILE = 112;
700
701        /** Invalid AAC sampling frequency */
702        public static final int ERR_INVALID_AAC_SAMPLING_FREQUENCY = 113;
703
704        /** Audio conversion failure */
705        public static final int ERR_AUDIO_CONVERSION_FAILED = 114;
706
707        /** Invalid trim start and end times */
708        public static final int ERR_BEGIN_CUT_EQUALS_END_CUT = 115;
709
710        /** End time smaller than start time for trim */
711        public static final int ERR_END_CUT_SMALLER_THAN_BEGIN_CUT = 116;
712
713        /** Output file size is small */
714        public static final int ERR_MAXFILESIZE_TOO_SMALL = 117;
715
716        /** Output video bitrate is too low */
717        public static final int ERR_VIDEOBITRATE_TOO_LOW = 118;
718
719        /** Output audio bitrate is too low */
720        public static final int ERR_AUDIOBITRATE_TOO_LOW = 119;
721
722        /** Output video bitrate is too high */
723        public static final int ERR_VIDEOBITRATE_TOO_HIGH = 120;
724
725        /** Output audio bitrate is too high */
726        public static final int ERR_AUDIOBITRATE_TOO_HIGH = 121;
727
728        /** Output file size is too small */
729        public static final int ERR_OUTPUT_FILE_SIZE_TOO_SMALL = 122;
730
731        /** Unknown stream type */
732        public static final int ERR_READER_UNKNOWN_STREAM_TYPE = 123;
733
734        /** Invalid metadata in input stream */
735        public static final int WAR_READER_NO_METADATA = 124;
736
737        /** Invalid file reader info warning */
738        public static final int WAR_READER_INFORMATION_NOT_PRESENT = 125;
739
740        /** Warning to indicate the the writer is being stopped */
741        public static final int WAR_WRITER_STOP_REQ = 131;
742
743        /** Video decoder failed to provide frame for transcoding */
744        public static final int WAR_VIDEORENDERER_NO_NEW_FRAME = 132;
745
746        /** Video deblocking filter is not implemented */
747        public static final int WAR_DEBLOCKING_FILTER_NOT_IMPLEMENTED = 133;
748
749        /** H.263 decoder profile not supported */
750        public static final int ERR_DECODER_H263_PROFILE_NOT_SUPPORTED = 134;
751
752        /** The input file contains unsupported H.263 profile */
753        public static final int ERR_DECODER_H263_NOT_BASELINE = 135;
754
755        /** There is no more space to store the output file */
756        public static final int ERR_NOMORE_SPACE_FOR_FILE = 136;
757
758        /** Internal error. */
759        public static final int ERR_INTERNAL = 255;
760    }
761
762    /**
763     * Defines output video formats.
764     */
765    public final class VideoFormat {
766        /** No video present in output clip. Used to generate audio only clip */
767        public static final int NO_VIDEO = 0;
768
769        /** H263 video format. */
770        public static final int H263 = 1;
771
772        /** H264 video */
773        public static final int H264 = 2;
774
775        /** MPEG4 video format. */
776        public static final int MPEG4 = 3;
777
778        /** No transcoding. Output video format is same as input video format */
779        public static final int NULL_VIDEO = 254;
780
781        /** Unsupported video format. */
782        public static final int UNSUPPORTED = 255;
783    }
784
785    /** Defines video frame sizes. */
786    public final class VideoFrameSize {
787
788        public static final int SIZE_UNDEFINED = -1;
789
790        /** SQCIF 128 x 96 pixels. */
791        public static final int SQCIF = 0;
792
793        /** QQVGA 160 x 120 pixels. */
794        public static final int QQVGA = 1;
795
796        /** QCIF 176 x 144 pixels. */
797        public static final int QCIF = 2;
798
799        /** QVGA 320 x 240 pixels. */
800        public static final int QVGA = 3;
801
802        /** CIF 352 x 288 pixels. */
803        public static final int CIF = 4;
804
805        /** VGA 640 x 480 pixels. */
806        public static final int VGA = 5;
807
808        /** WVGA 800 X 480 pixels */
809        public static final int WVGA = 6;
810
811        /** NTSC 720 X 480 pixels */
812        public static final int NTSC = 7;
813
814        /** 640 x 360 */
815        public static final int nHD = 8;
816
817        /** 854 x 480 */
818        public static final int WVGA16x9 = 9;
819
820        /** 720p 1280 X 720 */
821        public static final int V720p = 10;
822
823        /** W720p 1080 x 720 */
824        public static final int W720p = 11;
825
826        /** S720p 960 x 720 */
827        public static final int S720p = 12;
828
829        /** 1080p 1920 x 1080 */
830        public static final int V1080p = 13;
831    }
832
833    /**
834     * Defines output video frame rates.
835     */
836    public final class VideoFrameRate {
837        /** Frame rate of 5 frames per second. */
838        public static final int FR_5_FPS = 0;
839
840        /** Frame rate of 7.5 frames per second. */
841        public static final int FR_7_5_FPS = 1;
842
843        /** Frame rate of 10 frames per second. */
844        public static final int FR_10_FPS = 2;
845
846        /** Frame rate of 12.5 frames per second. */
847        public static final int FR_12_5_FPS = 3;
848
849        /** Frame rate of 15 frames per second. */
850        public static final int FR_15_FPS = 4;
851
852        /** Frame rate of 20 frames per second. */
853        public static final int FR_20_FPS = 5;
854
855        /** Frame rate of 25 frames per second. */
856        public static final int FR_25_FPS = 6;
857
858        /** Frame rate of 30 frames per second. */
859        public static final int FR_30_FPS = 7;
860    }
861
862    /**
863     * Defines Video Effect Types.
864     */
865    public static class VideoEffect {
866
867        public static final int NONE = 0;
868
869        public static final int FADE_FROM_BLACK = 8;
870
871        public static final int FADE_TO_BLACK = 16;
872
873        public static final int EXTERNAL = 256;
874
875        public static final int BLACK_AND_WHITE = 257;
876
877        public static final int PINK = 258;
878
879        public static final int GREEN = 259;
880
881        public static final int SEPIA = 260;
882
883        public static final int NEGATIVE = 261;
884
885        public static final int FRAMING = 262;
886
887        public static final int TEXT = 263;
888
889        public static final int ZOOM_IN = 264;
890
891        public static final int ZOOM_OUT = 265;
892
893        public static final int FIFTIES = 266;
894
895        public static final int COLORRGB16 = 267;
896
897        public static final int GRADIENT = 268;
898    }
899
900    /**
901     * Defines the video transitions.
902     */
903    public static class VideoTransition {
904        /** No transition */
905        public static final int NONE = 0;
906
907        /** Cross fade transition */
908        public static final int CROSS_FADE = 1;
909
910        /** External transition. Currently not available. */
911        public static final int EXTERNAL = 256;
912
913        /** AlphaMagic transition. */
914        public static final int ALPHA_MAGIC = 257;
915
916        /** Slide transition. */
917        public static final int SLIDE_TRANSITION = 258;
918
919        /** Fade to black transition. */
920        public static final int FADE_BLACK = 259;
921    }
922
923    /**
924     * Defines settings for the AlphaMagic transition
925     */
926    public static class AlphaMagicSettings {
927        /** Name of the alpha file (JPEG file). */
928        public String file;
929
930        /** Blending percentage [0..100] 0 = no blending. */
931        public int blendingPercent;
932
933        /** Invert the default rotation direction of the AlphaMagic effect. */
934        public boolean invertRotation;
935
936        public int rgbWidth;
937        public int rgbHeight;
938    }
939
940    /** Defines the direction of the Slide transition. */
941    public static final class SlideDirection {
942
943        /** Right out left in. */
944        public static final int RIGHT_OUT_LEFT_IN = 0;
945
946        /** Left out right in. */
947        public static final int LEFT_OUT_RIGTH_IN = 1;
948
949        /** Top out bottom in. */
950        public static final int TOP_OUT_BOTTOM_IN = 2;
951
952        /** Bottom out top in */
953        public static final int BOTTOM_OUT_TOP_IN = 3;
954    }
955
956    /** Defines the Slide transition settings. */
957    public static class SlideTransitionSettings {
958        /**
959         * Direction of the slide transition. See {@link SlideDirection
960         * SlideDirection} for valid values.
961         */
962        public int direction;
963    }
964
965    /**
966     * Defines the settings of a single clip.
967     */
968    public static class ClipSettings {
969
970        /**
971         * The path to the clip file.
972         * <p>
973         * File format of the clip, it can be:
974         * <ul>
975         * <li>3GP file containing MPEG4/H263/H264 video and AAC/AMR audio
976         * <li>JPG file
977         * </ul>
978         */
979
980        public String clipPath;
981
982        /**
983         * The path of the decoded file. This is used only for image files.
984         */
985        public String clipDecodedPath;
986
987        /**
988         * The path of the Original file. This is used only for image files.
989         */
990        public String clipOriginalPath;
991
992        /**
993         * File type of the clip. See {@link FileType FileType} for valid
994         * values.
995         */
996        public int fileType;
997
998        /** Begin of the cut in the clip in milliseconds. */
999        public int beginCutTime;
1000
1001        /**
1002         * End of the cut in the clip in milliseconds. Set both
1003         * <code>beginCutTime</code> and <code>endCutTime</code> to
1004         * <code>0</code> to get the full length of the clip without a cut. In
1005         * case of JPG clip, this is the duration of the JPEG file.
1006         */
1007        public int endCutTime;
1008
1009        /**
1010         * Begin of the cut in the clip in percentage of the file duration.
1011         */
1012        public int beginCutPercent;
1013
1014        /**
1015         * End of the cut in the clip in percentage of the file duration. Set
1016         * both <code>beginCutPercent</code> and <code>endCutPercent</code> to
1017         * <code>0</code> to get the full length of the clip without a cut.
1018         */
1019        public int endCutPercent;
1020
1021        /** Enable panning and zooming. */
1022        public boolean panZoomEnabled;
1023
1024        /** Zoom percentage at start of clip. 0 = no zoom, 100 = full zoom */
1025        public int panZoomPercentStart;
1026
1027        /** Top left X coordinate at start of clip. */
1028        public int panZoomTopLeftXStart;
1029
1030        /** Top left Y coordinate at start of clip. */
1031        public int panZoomTopLeftYStart;
1032
1033        /** Zoom percentage at start of clip. 0 = no zoom, 100 = full zoom */
1034        public int panZoomPercentEnd;
1035
1036        /** Top left X coordinate at end of clip. */
1037        public int panZoomTopLeftXEnd;
1038
1039        /** Top left Y coordinate at end of clip. */
1040        public int panZoomTopLeftYEnd;
1041
1042        /**
1043         * Set The media rendering. See {@link MediaRendering MediaRendering}
1044         * for valid values.
1045         */
1046        public int mediaRendering;
1047
1048        /**
1049         * RGB width and Height
1050         */
1051         public int rgbWidth;
1052         public int rgbHeight;
1053    }
1054
1055    /**
1056     * Defines settings for a transition.
1057     */
1058    public static class TransitionSettings {
1059
1060        /** Duration of the transition in msec. */
1061        public int duration;
1062
1063        /**
1064         * Transition type for video. See {@link VideoTransition
1065         * VideoTransition} for valid values.
1066         */
1067        public int videoTransitionType;
1068
1069        /**
1070         * Transition type for audio. See {@link AudioTransition
1071         * AudioTransition} for valid values.
1072         */
1073        public int audioTransitionType;
1074
1075        /**
1076         * Transition behaviour. See {@link TransitionBehaviour
1077         * TransitionBehaviour} for valid values.
1078         */
1079        public int transitionBehaviour;
1080
1081        /**
1082         * Settings for AlphaMagic transition. Only needs to be set if
1083         * <code>videoTransitionType</code> is set to
1084         * <code>VideoTransition.ALPHA_MAGIC</code>. See
1085         * {@link AlphaMagicSettings AlphaMagicSettings}.
1086         */
1087        public AlphaMagicSettings alphaSettings;
1088
1089        /**
1090         * Settings for the Slide transition. See
1091         * {@link SlideTransitionSettings SlideTransitionSettings}.
1092         */
1093        public SlideTransitionSettings slideSettings;
1094    }
1095
1096    public static final class AudioTransition {
1097        /** No audio transition. */
1098        public static final int NONE = 0;
1099
1100        /** Cross-fade audio transition. */
1101        public static final int CROSS_FADE = 1;
1102    }
1103
1104    /**
1105     * Defines transition behaviors.
1106     */
1107    public static final class TransitionBehaviour {
1108
1109        /** The transition uses an increasing speed. */
1110        public static final int SPEED_UP = 0;
1111
1112        /** The transition uses a linear (constant) speed. */
1113        public static final int LINEAR = 1;
1114
1115        /** The transition uses a decreasing speed. */
1116        public static final int SPEED_DOWN = 2;
1117
1118        /**
1119         * The transition uses a constant speed, but slows down in the middle
1120         * section.
1121         */
1122        public static final int SLOW_MIDDLE = 3;
1123
1124        /**
1125         * The transition uses a constant speed, but increases speed in the
1126         * middle section.
1127         */
1128        public static final int FAST_MIDDLE = 4;
1129    }
1130
1131    /**
1132     * Defines settings for the background music.
1133     */
1134    public static class BackgroundMusicSettings {
1135
1136        /** Background music file. */
1137        public String file;
1138
1139        /** File type. See {@link FileType FileType} for valid values. */
1140        public int fileType;
1141
1142        /**
1143         * Insertion time in milliseconds, in the output video where the
1144         * background music must be inserted.
1145         */
1146        public long insertionTime;
1147
1148        /**
1149         * Volume, as a percentage of the background music track, to use. If
1150         * this field is set to 100, the background music will replace the audio
1151         * from the video input file(s).
1152         */
1153        public int volumePercent;
1154
1155        /**
1156         * Start time in milliseconds in the background muisc file from where
1157         * the background music should loop. Set both <code>beginLoop</code> and
1158         * <code>endLoop</code> to <code>0</code> to disable looping.
1159         */
1160        public long beginLoop;
1161
1162        /**
1163         * End time in milliseconds in the background music file to where the
1164         * background music should loop. Set both <code>beginLoop</code> and
1165         * <code>endLoop</code> to <code>0</code> to disable looping.
1166         */
1167        public long endLoop;
1168
1169        public boolean enableDucking;
1170
1171        public int duckingThreshold;
1172
1173        public int lowVolume;
1174
1175        public boolean isLooping;
1176    }
1177
1178    /** Defines settings for an effect. */
1179    public static class AudioEffect {
1180        /** No audio effect. */
1181        public static final int NONE = 0;
1182
1183        /** Fade-in effect. */
1184        public static final int FADE_IN = 8;
1185
1186        /** Fade-out effect. */
1187        public static final int FADE_OUT = 16;
1188    }
1189
1190    /** Defines the effect settings. */
1191    public static class EffectSettings {
1192
1193        /** Start time of the effect in milliseconds. */
1194        public int startTime;
1195
1196        /** Duration of the effect in milliseconds. */
1197        public int duration;
1198
1199        /**
1200         * Video effect type. See {@link VideoEffect VideoEffect} for valid
1201         * values.
1202         */
1203        public int videoEffectType;
1204
1205        /**
1206         * Audio effect type. See {@link AudioEffect AudioEffect} for valid
1207         * values.
1208         */
1209        public int audioEffectType;
1210
1211        /**
1212         * Start time of the effect in percents of the duration of the clip. A
1213         * value of 0 percent means start time is from the beginning of the
1214         * clip.
1215         */
1216        public int startPercent;
1217
1218        /**
1219         * Duration of the effect in percents of the duration of the clip.
1220         */
1221        public int durationPercent;
1222
1223        /**
1224         * Framing file.
1225         * <p>
1226         * This field is only used when the field <code>videoEffectType</code>
1227         * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise
1228         * this field is ignored.
1229         */
1230        public String framingFile;
1231
1232        /**
1233         * Framing buffer.
1234         * <p>
1235         * This field is only used when the field <code>videoEffectType</code>
1236         * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise
1237         * this field is ignored.
1238         */
1239        public int[] framingBuffer;
1240
1241        /**
1242         * Bitmap type Can be from RGB_565 (4), ARGB_4444 (5), ARGB_8888 (6);
1243         **/
1244
1245        public int bitmapType;
1246
1247        public int width;
1248
1249        public int height;
1250
1251        /**
1252         * Top left x coordinate. This coordinate is used to set the x
1253         * coordinate of the picture in the framing file when the framing file
1254         * is selected. The x coordinate is also used to set the location of the
1255         * text in the text effect.
1256         * <p>
1257         * This field is only used when the field <code>videoEffectType</code>
1258         * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING} or
1259         * {@link VideoEffect#TEXT VideoEffect.TEXT}. Otherwise this field is
1260         * ignored.
1261         */
1262        public int topLeftX;
1263
1264        /**
1265         * Top left y coordinate. This coordinate is used to set the y
1266         * coordinate of the picture in the framing file when the framing file
1267         * is selected. The y coordinate is also used to set the location of the
1268         * text in the text effect.
1269         * <p>
1270         * This field is only used when the field <code>videoEffectType</code>
1271         * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING} or
1272         * {@link VideoEffect#TEXT VideoEffect.TEXT}. Otherwise this field is
1273         * ignored.
1274         */
1275        public int topLeftY;
1276
1277        /**
1278         * Should the frame be resized or not. If this field is set to
1279         * <link>true</code> then the frame size is matched with the output
1280         * video size.
1281         * <p>
1282         * This field is only used when the field <code>videoEffectType</code>
1283         * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise
1284         * this field is ignored.
1285         */
1286        public boolean framingResize;
1287
1288        /**
1289         * Size to which the framing buffer needs to be resized to
1290         * This is valid only if framingResize is true
1291         */
1292        public int framingScaledSize;
1293        /**
1294         * Text to insert in the video.
1295         * <p>
1296         * This field is only used when the field <code>videoEffectType</code>
1297         * is set to {@link VideoEffect#TEXT VideoEffect.TEXT}. Otherwise this
1298         * field is ignored.
1299         */
1300        public String text;
1301
1302        /**
1303         * Text attributes for the text to insert in the video.
1304         * <p>
1305         * This field is only used when the field <code>videoEffectType</code>
1306         * is set to {@link VideoEffect#TEXT VideoEffect.TEXT}. Otherwise this
1307         * field is ignored. For more details about this field see the
1308         * integration guide.
1309         */
1310        public String textRenderingData;
1311
1312        /** Width of the text buffer in pixels. */
1313        public int textBufferWidth;
1314
1315        /** Height of the text buffer in pixels. */
1316        public int textBufferHeight;
1317
1318        /**
1319         * Processing rate for the fifties effect. A high value (e.g. 30)
1320         * results in high effect strength.
1321         * <p>
1322         * This field is only used when the field <code>videoEffectType</code>
1323         * is set to {@link VideoEffect#FIFTIES VideoEffect.FIFTIES}. Otherwise
1324         * this field is ignored.
1325         */
1326        public int fiftiesFrameRate;
1327
1328        /**
1329         * RGB 16 color of the RGB16 and gradient color effect.
1330         * <p>
1331         * This field is only used when the field <code>videoEffectType</code>
1332         * is set to {@link VideoEffect#COLORRGB16 VideoEffect.COLORRGB16} or
1333         * {@link VideoEffect#GRADIENT VideoEffect.GRADIENT}. Otherwise this
1334         * field is ignored.
1335         */
1336        public int rgb16InputColor;
1337
1338        /**
1339         * Start alpha blending percentage.
1340         * <p>
1341         * This field is only used when the field <code>videoEffectType</code>
1342         * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or
1343         * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field
1344         * is ignored.
1345         */
1346        public int alphaBlendingStartPercent;
1347
1348        /**
1349         * Middle alpha blending percentage.
1350         * <p>
1351         * This field is only used when the field <code>videoEffectType</code>
1352         * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or
1353         * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field
1354         * is ignored.
1355         */
1356        public int alphaBlendingMiddlePercent;
1357
1358        /**
1359         * End alpha blending percentage.
1360         * <p>
1361         * This field is only used when the field <code>videoEffectType</code>
1362         * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or
1363         * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field
1364         * is ignored.
1365         */
1366        public int alphaBlendingEndPercent;
1367
1368        /**
1369         * Duration, in percentage of effect duration of the fade-in phase.
1370         * <p>
1371         * This field is only used when the field <code>videoEffectType</code>
1372         * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or
1373         * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field
1374         * is ignored.
1375         */
1376        public int alphaBlendingFadeInTimePercent;
1377
1378        /**
1379         * Duration, in percentage of effect duration of the fade-out phase.
1380         * <p>
1381         * This field is only used when the field <code>videoEffectType</code>
1382         * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or
1383         * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field
1384         * is ignored.
1385         */
1386        public int alphaBlendingFadeOutTimePercent;
1387    }
1388
1389    /** Defines the clip properties for preview */
1390    public static class PreviewClips {
1391
1392        /**
1393         * The path to the clip file.
1394         * <p>
1395         * File format of the clip, it can be:
1396         * <ul>
1397         * <li>3GP file containing MPEG4/H263 video and AAC/AMR audio
1398         * <li>JPG file
1399         * </ul>
1400         */
1401
1402        public String clipPath;
1403
1404        /**
1405         * File type of the clip. See {@link FileType FileType} for valid
1406         * values.
1407         */
1408        public int fileType;
1409
1410        /** Begin of the cut in the clip in milliseconds. */
1411        public long beginPlayTime;
1412
1413        public long endPlayTime;
1414
1415        /**
1416         * Set The media rendering. See {@link MediaRendering MediaRendering}
1417         * for valid values.
1418         */
1419        public int mediaRendering;
1420
1421    }
1422
1423    /** Defines the audio settings. */
1424    public static class AudioSettings {
1425
1426        String pFile;
1427
1428        /** < PCM file path */
1429        String Id;
1430
1431        boolean bRemoveOriginal;
1432
1433        /** < If true, the original audio track is not taken into account */
1434        int channels;
1435
1436        /** < Number of channels (1=mono, 2=stereo) of BGM clip */
1437        int Fs;
1438
1439        /**
1440         * < Sampling audio frequency (8000 for amr, 16000 or more for aac) of
1441         * BGM clip
1442         */
1443        int ExtendedFs;
1444
1445        /** < Extended frequency for AAC+, eAAC+ streams of BGM clip */
1446        long startMs;
1447
1448        /** < Time, in milliseconds, at which the added audio track is inserted */
1449        long beginCutTime;
1450
1451        long endCutTime;
1452
1453        int fileType;
1454
1455        int volume;
1456
1457        /** < Volume, in percentage, of the added audio track */
1458        boolean loop;
1459
1460        /** < Looping on/off > **/
1461
1462        /** Audio mix and Duck **/
1463        int ducking_threshold;
1464
1465        int ducking_lowVolume;
1466
1467        boolean bInDucking_enable;
1468
1469        String pcmFilePath;
1470    }
1471
1472    /** Encapsulates preview clips and effect settings */
1473    public static class PreviewSettings {
1474
1475        public PreviewClips[] previewClipsArray;
1476
1477        /** The effect settings. */
1478        public EffectSettings[] effectSettingsArray;
1479
1480    }
1481
1482    /** Encapsulates clip properties */
1483    public static class PreviewClipProperties {
1484
1485        public Properties[] clipProperties;
1486
1487    }
1488
1489    /** Defines the editing settings. */
1490    public static class EditSettings {
1491
1492        /**
1493         * Array of clip settings. There is one <code>clipSetting</code> for
1494         * each clip.
1495         */
1496        public ClipSettings[] clipSettingsArray;
1497
1498        /**
1499         * Array of transition settings. If there are n clips (and thus n
1500         * <code>clipSettings</code>) then there are (n-1) transitions and (n-1)
1501         * <code>transistionSettings</code> in
1502         * <code>transistionSettingsArray</code>.
1503         */
1504        public TransitionSettings[] transitionSettingsArray;
1505
1506        /** The effect settings. */
1507        public EffectSettings[] effectSettingsArray;
1508
1509        /**
1510         * Video frame rate of the output clip. See {@link VideoFrameRate
1511         * VideoFrameRate} for valid values.
1512         */
1513        public int videoFrameRate;
1514
1515        /** Output file name. Must be an absolute path. */
1516        public String outputFile;
1517
1518        /**
1519         * Size of the video frames in the output clip. See
1520         * {@link VideoFrameSize VideoFrameSize} for valid values.
1521         */
1522        public int videoFrameSize;
1523
1524        /**
1525         * Format of the video stream in the output clip. See
1526         * {@link VideoFormat VideoFormat} for valid values.
1527         */
1528        public int videoFormat;
1529
1530        /**
1531         * Profile of the video stream in the output clip.
1532         */
1533        public int videoProfile;
1534
1535        /**
1536         * Level of the video stream in the output clip.
1537         */
1538        public int videoLevel;
1539
1540        /**
1541         * Format of the audio stream in the output clip. See
1542         * {@link AudioFormat AudioFormat} for valid values.
1543         */
1544        public int audioFormat;
1545
1546        /**
1547         * Sampling frequency of the audio stream in the output clip. See
1548         * {@link AudioSamplingFrequency AudioSamplingFrequency} for valid
1549         * values.
1550         */
1551        public int audioSamplingFreq;
1552
1553        /**
1554         * Maximum file size. By setting this you can set the maximum size of
1555         * the output clip. Set it to <code>0</code> to let the class ignore
1556         * this filed.
1557         */
1558        public int maxFileSize;
1559
1560        /**
1561         * Number of audio channels in output clip. Use <code>0</code> for none,
1562         * <code>1</code> for mono or <code>2</code> for stereo. None is only
1563         * allowed when the <code>audioFormat</code> field is set to
1564         * {@link AudioFormat#NO_AUDIO AudioFormat.NO_AUDIO} or
1565         * {@link AudioFormat#NULL_AUDIO AudioFormat.NULL_AUDIO} Mono is only
1566         * allowed when the <code>audioFormat</code> field is set to
1567         * {@link AudioFormat#AAC AudioFormat.AAC}
1568         */
1569        public int audioChannels;
1570
1571        /** Video bitrate. See {@link Bitrate Bitrate} for valid values. */
1572        public int videoBitrate;
1573
1574        /** Audio bitrate. See {@link Bitrate Bitrate} for valid values. */
1575        public int audioBitrate;
1576
1577        /**
1578         * Background music settings. See {@link BackgroundMusicSettings
1579         * BackgroundMusicSettings} for valid values.
1580         */
1581        public BackgroundMusicSettings backgroundMusicSettings;
1582
1583        public int primaryTrackVolume;
1584
1585    }
1586
1587    /**
1588     * Defines the media properties.
1589     **/
1590
1591    public static class Properties {
1592
1593        /**
1594         * Duration of the media in milliseconds.
1595         */
1596
1597        public int duration;
1598
1599        /**
1600         * File type.
1601         */
1602
1603        public int fileType;
1604
1605        /**
1606         * Video format.
1607         */
1608
1609        public int videoFormat;
1610
1611        /**
1612         * Duration of the video stream of the media in milliseconds.
1613         */
1614
1615        public int videoDuration;
1616
1617        /**
1618         * Bitrate of the video stream of the media.
1619         */
1620
1621        public int videoBitrate;
1622
1623        /**
1624         * Width of the video frames or the width of the still picture in
1625         * pixels.
1626         */
1627
1628        public int width;
1629
1630        /**
1631         * Height of the video frames or the height of the still picture in
1632         * pixels.
1633         */
1634
1635        public int height;
1636
1637        /**
1638         * Average frame rate of video in the media in frames per second.
1639         */
1640
1641        public float averageFrameRate;
1642
1643        /**
1644         * Profile of the video in the media.
1645         */
1646
1647        public int profile;
1648
1649        /**
1650         * Level of the video in the media.
1651         */
1652
1653        public int level;
1654
1655        /**
1656         * Is Video Profile supported.
1657         */
1658
1659        public boolean profileSupported;
1660
1661        /**
1662         * Is Video Level supported.
1663         */
1664
1665        public boolean levelSupported;
1666
1667        /**
1668         * Audio format.
1669         */
1670
1671        public int audioFormat;
1672
1673        /**
1674         * Duration of the audio stream of the media in milliseconds.
1675         */
1676
1677        public int audioDuration;
1678
1679        /**
1680         * Bitrate of the audio stream of the media.
1681         */
1682
1683        public int audioBitrate;
1684
1685        /**
1686         * Number of audio channels in the media.
1687         */
1688
1689        public int audioChannels;
1690
1691        /**
1692         * Sampling frequency of the audio stream in the media in samples per
1693         * second.
1694         */
1695
1696        public int audioSamplingFrequency;
1697
1698        /**
1699         * Volume value of the audio track as percentage.
1700         */
1701        public int audioVolumeValue;
1702
1703        public String Id;
1704    }
1705
1706    /**
1707     * Constructor
1708     *
1709     * @param projectPath The path where the VideoEditor stores all files
1710     *        related to the project
1711     * @param lock The semaphore
1712     * @param veObj The video editor reference
1713     */
1714    public MediaArtistNativeHelper(String projectPath, Semaphore lock, VideoEditor veObj) {
1715        mProjectPath = projectPath;
1716        if (veObj != null) {
1717            mVideoEditor = veObj;
1718        } else {
1719            mVideoEditor = null;
1720            throw new IllegalArgumentException("video editor object is null");
1721        }
1722        if (mStoryBoardSettings == null) {
1723            mStoryBoardSettings = new EditSettings();
1724        }
1725
1726        mLock = lock;
1727
1728        _init(mProjectPath, "null");
1729        mAudioTrackPCMFilePath = null;
1730    }
1731
1732    /**
1733     * @return The project path
1734     */
1735    String getProjectPath() {
1736        return mProjectPath;
1737    }
1738
1739    /**
1740     * @return The Audio Track PCM file path
1741     */
1742    String getProjectAudioTrackPCMFilePath() {
1743        return mAudioTrackPCMFilePath;
1744    }
1745
1746    /**
1747     * Invalidates the PCM file
1748     */
1749    void invalidatePcmFile() {
1750        if (mAudioTrackPCMFilePath != null) {
1751            new File(mAudioTrackPCMFilePath).delete();
1752            mAudioTrackPCMFilePath = null;
1753        }
1754    }
1755
1756    @SuppressWarnings("unused")
1757    private void onProgressUpdate(int taskId, int progress) {
1758        if (mProcessingState == PROCESSING_EXPORT) {
1759            if (mExportProgressListener != null) {
1760                if (mProgressToApp < progress) {
1761                    mExportProgressListener.onProgress(mVideoEditor, mOutputFilename, progress);
1762                    /* record previous progress */
1763                    mProgressToApp = progress;
1764                }
1765            }
1766        }
1767        else {
1768            // Adapt progress depending on current state
1769            int actualProgress = 0;
1770            int action = 0;
1771
1772            if (mProcessingState == PROCESSING_AUDIO_PCM) {
1773                action = MediaProcessingProgressListener.ACTION_DECODE;
1774            } else {
1775                action = MediaProcessingProgressListener.ACTION_ENCODE;
1776            }
1777
1778            switch (mProcessingState) {
1779                case PROCESSING_AUDIO_PCM:
1780                    actualProgress = progress;
1781                    break;
1782                case PROCESSING_TRANSITION:
1783                    actualProgress = progress;
1784                    break;
1785                case PROCESSING_KENBURNS:
1786                    actualProgress = progress;
1787                    break;
1788                case PROCESSING_INTERMEDIATE1:
1789                    if ((progress == 0) && (mProgressToApp != 0)) {
1790                        mProgressToApp = 0;
1791                    }
1792                    if ((progress != 0) || (mProgressToApp != 0)) {
1793                        actualProgress = progress/4;
1794                    }
1795                    break;
1796                case PROCESSING_INTERMEDIATE2:
1797                    if ((progress != 0) || (mProgressToApp != 0)) {
1798                        actualProgress = 25 + progress/4;
1799                    }
1800                    break;
1801                case PROCESSING_INTERMEDIATE3:
1802                    if ((progress != 0) || (mProgressToApp != 0)) {
1803                        actualProgress = 50 + progress/2;
1804                    }
1805                    break;
1806                case PROCESSING_NONE:
1807
1808                default:
1809                    Log.e(TAG, "ERROR unexpected State=" + mProcessingState);
1810                    return;
1811            }
1812            if ((mProgressToApp != actualProgress) && (actualProgress != 0)) {
1813
1814                mProgressToApp = actualProgress;
1815
1816                if (mMediaProcessingProgressListener != null) {
1817                    // Send the progress indication
1818                    mMediaProcessingProgressListener.onProgress(mProcessingObject, action,
1819                                                                actualProgress);
1820                }
1821            }
1822            /* avoid 0 in next intermediate call */
1823            if (mProgressToApp == 0) {
1824                if (mMediaProcessingProgressListener != null) {
1825                    /*
1826                     *  Send the progress indication
1827                     */
1828                    mMediaProcessingProgressListener.onProgress(mProcessingObject, action,
1829                                                                actualProgress);
1830                }
1831                mProgressToApp = 1;
1832            }
1833        }
1834    }
1835
1836    @SuppressWarnings("unused")
1837    private void onPreviewProgressUpdate(int progress, boolean isFinished,
1838                  boolean updateOverlay, String filename, int renderingMode) {
1839        if (mPreviewProgressListener != null) {
1840            if (mIsFirstProgress) {
1841                mPreviewProgressListener.onStart(mVideoEditor);
1842                mIsFirstProgress = false;
1843            }
1844
1845            final VideoEditor.OverlayData overlayData;
1846            if (updateOverlay) {
1847                overlayData = new VideoEditor.OverlayData();
1848                if (filename != null) {
1849                    overlayData.set(BitmapFactory.decodeFile(filename), renderingMode);
1850                } else {
1851                    overlayData.setClear();
1852                }
1853            } else {
1854                overlayData = null;
1855            }
1856
1857            if (progress != 0) {
1858                mPreviewProgress = progress;
1859            }
1860
1861            if (isFinished) {
1862                mPreviewProgressListener.onStop(mVideoEditor);
1863            } else {
1864                mPreviewProgressListener.onProgress(mVideoEditor, progress, overlayData);
1865            }
1866        }
1867    }
1868
1869    /**
1870     * Release the native helper object
1871     */
1872    void releaseNativeHelper() throws InterruptedException {
1873        release();
1874    }
1875
1876    /**
1877     * Release the native helper to end the Audio Graph process
1878     */
1879    @SuppressWarnings("unused")
1880    private void onAudioGraphExtractProgressUpdate(int progress, boolean isVideo) {
1881        if ((mExtractAudioWaveformProgressListener != null) && (progress > 0)) {
1882            mExtractAudioWaveformProgressListener.onProgress(progress);
1883        }
1884    }
1885
1886    /**
1887     * Populates the Effect Settings in EffectSettings
1888     *
1889     * @param effects The reference of EffectColor
1890     *
1891     * @return The populated effect settings in EffectSettings reference
1892     */
1893    EffectSettings getEffectSettings(EffectColor effects) {
1894        EffectSettings effectSettings = new EffectSettings();
1895        effectSettings.startTime = (int)effects.getStartTime();
1896        effectSettings.duration = (int)effects.getDuration();
1897        effectSettings.videoEffectType = getEffectColorType(effects);
1898        effectSettings.audioEffectType = 0;
1899        effectSettings.startPercent = 0;
1900        effectSettings.durationPercent = 0;
1901        effectSettings.framingFile = null;
1902        effectSettings.topLeftX = 0;
1903        effectSettings.topLeftY = 0;
1904        effectSettings.framingResize = false;
1905        effectSettings.text = null;
1906        effectSettings.textRenderingData = null;
1907        effectSettings.textBufferWidth = 0;
1908        effectSettings.textBufferHeight = 0;
1909        if (effects.getType() == EffectColor.TYPE_FIFTIES) {
1910            effectSettings.fiftiesFrameRate = 15;
1911        } else {
1912            effectSettings.fiftiesFrameRate = 0;
1913        }
1914
1915        if ((effectSettings.videoEffectType == VideoEffect.COLORRGB16)
1916                || (effectSettings.videoEffectType == VideoEffect.GRADIENT)) {
1917            effectSettings.rgb16InputColor = effects.getColor();
1918        }
1919
1920        effectSettings.alphaBlendingStartPercent = 0;
1921        effectSettings.alphaBlendingMiddlePercent = 0;
1922        effectSettings.alphaBlendingEndPercent = 0;
1923        effectSettings.alphaBlendingFadeInTimePercent = 0;
1924        effectSettings.alphaBlendingFadeOutTimePercent = 0;
1925        return effectSettings;
1926    }
1927
1928    /**
1929     * Populates the Overlay Settings in EffectSettings
1930     *
1931     * @param overlay The reference of OverlayFrame
1932     *
1933     * @return The populated overlay settings in EffectSettings reference
1934     */
1935    EffectSettings getOverlaySettings(OverlayFrame overlay) {
1936        EffectSettings effectSettings = new EffectSettings();
1937        Bitmap bitmap = null;
1938
1939        effectSettings.startTime = (int)overlay.getStartTime();
1940        effectSettings.duration = (int)overlay.getDuration();
1941        effectSettings.videoEffectType = VideoEffect.FRAMING;
1942        effectSettings.audioEffectType = 0;
1943        effectSettings.startPercent = 0;
1944        effectSettings.durationPercent = 0;
1945        effectSettings.framingFile = null;
1946
1947        if ((bitmap = overlay.getBitmap()) != null) {
1948            effectSettings.framingFile = overlay.getFilename();
1949
1950            if (effectSettings.framingFile == null) {
1951                try {
1952                    (overlay).save(mProjectPath);
1953                } catch (IOException e) {
1954                    Log.e(TAG, "getOverlaySettings : File not found");
1955                }
1956                effectSettings.framingFile = overlay.getFilename();
1957            }
1958            if (bitmap.getConfig() == Bitmap.Config.ARGB_8888)
1959                effectSettings.bitmapType = 6;
1960            else if (bitmap.getConfig() == Bitmap.Config.ARGB_4444)
1961                effectSettings.bitmapType = 5;
1962            else if (bitmap.getConfig() == Bitmap.Config.RGB_565)
1963                effectSettings.bitmapType = 4;
1964            else if (bitmap.getConfig() == Bitmap.Config.ALPHA_8)
1965                throw new RuntimeException("Bitmap config not supported");
1966
1967            effectSettings.width = bitmap.getWidth();
1968            effectSettings.height = bitmap.getHeight();
1969            effectSettings.framingBuffer = new int[effectSettings.width];
1970            int tmp = 0;
1971            short maxAlpha = 0;
1972            short minAlpha = (short)0xFF;
1973            short alpha = 0;
1974            while (tmp < effectSettings.height) {
1975                bitmap.getPixels(effectSettings.framingBuffer, 0,
1976                                 effectSettings.width, 0, tmp,
1977                                 effectSettings.width, 1);
1978                for (int i = 0; i < effectSettings.width; i++) {
1979                    alpha = (short)((effectSettings.framingBuffer[i] >> 24) & 0xFF);
1980                    if (alpha > maxAlpha) {
1981                        maxAlpha = alpha;
1982                    }
1983                    if (alpha < minAlpha) {
1984                        minAlpha = alpha;
1985                    }
1986                }
1987                tmp += 1;
1988            }
1989            alpha = (short)((maxAlpha + minAlpha) / 2);
1990            alpha = (short)((alpha * 100) / 256);
1991            effectSettings.alphaBlendingEndPercent = alpha;
1992            effectSettings.alphaBlendingMiddlePercent = alpha;
1993            effectSettings.alphaBlendingStartPercent = alpha;
1994            effectSettings.alphaBlendingFadeInTimePercent = 100;
1995            effectSettings.alphaBlendingFadeOutTimePercent = 100;
1996            effectSettings.framingBuffer = null;
1997
1998            /*
1999             * Set the resized RGB file dimensions
2000             */
2001            effectSettings.width = overlay.getResizedRGBSizeWidth();
2002            if(effectSettings.width == 0) {
2003                effectSettings.width = bitmap.getWidth();
2004            }
2005
2006            effectSettings.height = overlay.getResizedRGBSizeHeight();
2007            if(effectSettings.height == 0) {
2008                effectSettings.height = bitmap.getHeight();
2009            }
2010
2011        }
2012
2013        effectSettings.topLeftX = 0;
2014        effectSettings.topLeftY = 0;
2015
2016        effectSettings.framingResize = true;
2017        effectSettings.text = null;
2018        effectSettings.textRenderingData = null;
2019        effectSettings.textBufferWidth = 0;
2020        effectSettings.textBufferHeight = 0;
2021        effectSettings.fiftiesFrameRate = 0;
2022        effectSettings.rgb16InputColor = 0;
2023        int mediaItemHeight;
2024        int aspectRatio;
2025        if (overlay.getMediaItem() instanceof MediaImageItem) {
2026            if (((MediaImageItem)overlay.getMediaItem()).getGeneratedImageClip() != null) {
2027                // Ken Burns was applied
2028                mediaItemHeight = ((MediaImageItem)overlay.getMediaItem()).getGeneratedClipHeight();
2029                aspectRatio = getAspectRatio(
2030                    ((MediaImageItem)overlay.getMediaItem()).getGeneratedClipWidth()
2031                    , mediaItemHeight);
2032            } else {
2033                //For image get the scaled height. Aspect ratio would remain the same
2034                mediaItemHeight = ((MediaImageItem)overlay.getMediaItem()).getScaledHeight();
2035                aspectRatio = overlay.getMediaItem().getAspectRatio();
2036            }
2037        } else {
2038            aspectRatio = overlay.getMediaItem().getAspectRatio();
2039            mediaItemHeight = overlay.getMediaItem().getHeight();
2040        }
2041        effectSettings.framingScaledSize = findVideoResolution(aspectRatio, mediaItemHeight);
2042        return effectSettings;
2043    }
2044
2045     /* get Video Editor aspect ratio */
2046    int nativeHelperGetAspectRatio() {
2047        return mVideoEditor.getAspectRatio();
2048    }
2049
2050    /**
2051     * Sets the export audio codec
2052     *
2053     * @param export audio codec
2054     *
2055     */
2056    void setAudioCodec(int codec) {
2057        mExportAudioCodec = codec;
2058    }
2059    /**
2060     * Sets the export video codec
2061     *
2062     * @param export video codec
2063     *
2064     */
2065    void setVideoCodec(int codec) {
2066        mExportVideoCodec = codec;
2067    }
2068
2069    /**
2070     * Sets the audio regenerate flag
2071     *
2072     * @param flag The boolean to set the audio regenerate flag
2073     *
2074     */
2075    void setAudioflag(boolean flag) {
2076        //check if the file exists.
2077        if (!(new File(String.format(mProjectPath + "/" + AUDIO_TRACK_PCM_FILE)).exists())) {
2078            flag = true;
2079        }
2080        mRegenerateAudio = flag;
2081    }
2082
2083    /**
2084     * Gets the audio regenerate flag
2085     *
2086     * @param return The boolean to get the audio regenerate flag
2087     *
2088     */
2089    boolean getAudioflag() {
2090        return mRegenerateAudio;
2091    }
2092
2093    /**
2094     * Maps the average frame rate to one of the defined enum values
2095     *
2096     * @param averageFrameRate The average frame rate of video item
2097     *
2098     * @return The frame rate from one of the defined enum values
2099     */
2100    int GetClosestVideoFrameRate(int averageFrameRate) {
2101        if (averageFrameRate >= 25) {
2102            return VideoFrameRate.FR_30_FPS;
2103        } else if (averageFrameRate >= 20) {
2104            return VideoFrameRate.FR_25_FPS;
2105        } else if (averageFrameRate >= 15) {
2106            return VideoFrameRate.FR_20_FPS;
2107        } else if (averageFrameRate >= 12) {
2108            return VideoFrameRate.FR_15_FPS;
2109        } else if (averageFrameRate >= 10) {
2110            return VideoFrameRate.FR_12_5_FPS;
2111        } else if (averageFrameRate >= 7) {
2112            return VideoFrameRate.FR_10_FPS;
2113        } else if (averageFrameRate >= 5) {
2114            return VideoFrameRate.FR_7_5_FPS;
2115        } else {
2116            return -1;
2117        }
2118    }
2119
2120    /**
2121     * Helper function to adjust the effect or overlay start time
2122     * depending on the begin and end boundary time of meddia item
2123     */
2124    public void adjustEffectsStartTimeAndDuration(EffectSettings lEffect, int beginCutTime,
2125                                                  int endCutTime) {
2126
2127        int effectStartTime = 0;
2128        int effectDuration = 0;
2129
2130        /**
2131         * cbct -> clip begin cut time
2132         * cect -> clip end cut time
2133         ****************************************
2134         *  |                                 |
2135         *  |         cbct        cect        |
2136         *  | <-1-->   |           |          |
2137         *  |       <--|-2->       |          |
2138         *  |          | <---3---> |          |
2139         *  |          |        <--|-4--->    |
2140         *  |          |           | <--5-->  |
2141         *  |      <---|------6----|---->     |
2142         *  |                                 |
2143         *  < : effectStart
2144         *  > : effectStart + effectDuration
2145         ****************************************
2146         **/
2147
2148        /** 1 & 5 */
2149        /**
2150         * Effect falls out side the trim duration. In such a case effects shall
2151         * not be applied.
2152         */
2153        if ((lEffect.startTime > endCutTime)
2154                || ((lEffect.startTime + lEffect.duration) <= beginCutTime)) {
2155
2156            effectStartTime = 0;
2157            effectDuration = 0;
2158
2159            lEffect.startTime = effectStartTime;
2160            lEffect.duration = effectDuration;
2161            return;
2162        }
2163
2164        /** 2 */
2165        if ((lEffect.startTime < beginCutTime)
2166                && ((lEffect.startTime + lEffect.duration) > beginCutTime)
2167                && ((lEffect.startTime + lEffect.duration) <= endCutTime)) {
2168            effectStartTime = 0;
2169            effectDuration = lEffect.duration;
2170
2171            effectDuration -= (beginCutTime - lEffect.startTime);
2172            lEffect.startTime = effectStartTime;
2173            lEffect.duration = effectDuration;
2174            return;
2175        }
2176
2177        /** 3 */
2178        if ((lEffect.startTime >= beginCutTime)
2179                && ((lEffect.startTime + lEffect.duration) <= endCutTime)) {
2180            effectStartTime = lEffect.startTime - beginCutTime;
2181            lEffect.startTime = effectStartTime;
2182            lEffect.duration = lEffect.duration;
2183            return;
2184        }
2185
2186        /** 4 */
2187        if ((lEffect.startTime >= beginCutTime)
2188                && ((lEffect.startTime + lEffect.duration) > endCutTime)) {
2189            effectStartTime = lEffect.startTime - beginCutTime;
2190            effectDuration = endCutTime - lEffect.startTime;
2191            lEffect.startTime = effectStartTime;
2192            lEffect.duration = effectDuration;
2193            return;
2194        }
2195
2196        /** 6 */
2197        if ((lEffect.startTime < beginCutTime)
2198                && ((lEffect.startTime + lEffect.duration) > endCutTime)) {
2199            effectStartTime = 0;
2200            effectDuration = endCutTime - beginCutTime;
2201            lEffect.startTime = effectStartTime;
2202            lEffect.duration = effectDuration;
2203            return;
2204        }
2205
2206    }
2207
2208    /**
2209     * Generates the clip for preview or export
2210     *
2211     * @param editSettings The EditSettings reference for generating
2212     * a clip for preview or export
2213     *
2214     * @return error value
2215     */
2216    public int generateClip(EditSettings editSettings) {
2217        int err = 0;
2218
2219        try {
2220            err = nativeGenerateClip(editSettings);
2221        } catch (IllegalArgumentException ex) {
2222            Log.e(TAG, "Illegal Argument exception in load settings");
2223            return -1;
2224        } catch (IllegalStateException ex) {
2225            Log.e(TAG, "Illegal state exception in load settings");
2226            return -1;
2227        } catch (RuntimeException ex) {
2228            Log.e(TAG, "Runtime exception in load settings");
2229            return -1;
2230        }
2231        return err;
2232    }
2233
2234    /**
2235     * Init function to initialiZe the  ClipSettings reference to
2236     * default values
2237     *
2238     * @param lclipSettings The ClipSettings reference
2239     */
2240    void initClipSettings(ClipSettings lclipSettings) {
2241        lclipSettings.clipPath = null;
2242        lclipSettings.clipDecodedPath = null;
2243        lclipSettings.clipOriginalPath = null;
2244        lclipSettings.fileType = 0;
2245        lclipSettings.endCutTime = 0;
2246        lclipSettings.beginCutTime = 0;
2247        lclipSettings.beginCutPercent = 0;
2248        lclipSettings.endCutPercent = 0;
2249        lclipSettings.panZoomEnabled = false;
2250        lclipSettings.panZoomPercentStart = 0;
2251        lclipSettings.panZoomTopLeftXStart = 0;
2252        lclipSettings.panZoomTopLeftYStart = 0;
2253        lclipSettings.panZoomPercentEnd = 0;
2254        lclipSettings.panZoomTopLeftXEnd = 0;
2255        lclipSettings.panZoomTopLeftYEnd = 0;
2256        lclipSettings.mediaRendering = 0;
2257    }
2258
2259
2260    /**
2261     * Populates the settings for generating an effect clip
2262     *
2263     * @param lMediaItem The media item for which the effect clip
2264     * needs to be generated
2265     * @param lclipSettings The ClipSettings reference containing
2266     * clips data
2267     * @param e The EditSettings reference containing effect specific data
2268     * @param uniqueId The unique id used in the name of the output clip
2269     * @param clipNo Used for internal purpose
2270     *
2271     * @return The name and path of generated clip
2272     */
2273    String generateEffectClip(MediaItem lMediaItem, ClipSettings lclipSettings,
2274            EditSettings e,String uniqueId,int clipNo) {
2275        int err = 0;
2276        EditSettings editSettings = null;
2277        String EffectClipPath = null;
2278        int outVideoProfile = 0;
2279        int outVideoLevel = 0;
2280        editSettings = new EditSettings();
2281
2282        editSettings.clipSettingsArray = new ClipSettings[1];
2283        editSettings.clipSettingsArray[0] = lclipSettings;
2284
2285        editSettings.backgroundMusicSettings = null;
2286        editSettings.transitionSettingsArray = null;
2287        editSettings.effectSettingsArray = e.effectSettingsArray;
2288
2289        EffectClipPath = String.format(mProjectPath + "/" + "ClipEffectIntermediate" + "_"
2290                + lMediaItem.getId() + uniqueId + ".3gp");
2291
2292        File tmpFile = new File(EffectClipPath);
2293        if (tmpFile.exists()) {
2294            tmpFile.delete();
2295        }
2296
2297        outVideoProfile = VideoEditorProfile.getExportProfile(VideoFormat.H264);
2298        outVideoLevel = VideoEditorProfile.getExportLevel(VideoFormat.H264);
2299        editSettings.videoProfile = outVideoProfile;
2300        editSettings.videoLevel= outVideoLevel;
2301
2302        if (lMediaItem instanceof MediaVideoItem) {
2303            MediaVideoItem m = (MediaVideoItem)lMediaItem;
2304
2305            editSettings.audioFormat = AudioFormat.AAC;
2306            editSettings.audioChannels = 2;
2307            editSettings.audioBitrate = Bitrate.BR_64_KBPS;
2308            editSettings.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000;
2309
2310            editSettings.videoBitrate = Bitrate.BR_5_MBPS;
2311            //editSettings.videoFormat = VideoFormat.MPEG4;
2312            editSettings.videoFormat = VideoFormat.H264;
2313            editSettings.videoFrameRate = VideoFrameRate.FR_30_FPS;
2314            editSettings.videoFrameSize = findVideoResolution(mVideoEditor.getAspectRatio(),
2315                    m.getHeight());
2316        } else {
2317            MediaImageItem m = (MediaImageItem)lMediaItem;
2318            editSettings.audioBitrate = Bitrate.BR_64_KBPS;
2319            editSettings.audioChannels = 2;
2320            editSettings.audioFormat = AudioFormat.AAC;
2321            editSettings.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000;
2322
2323            editSettings.videoBitrate = Bitrate.BR_5_MBPS;
2324            editSettings.videoFormat = VideoFormat.H264;
2325            editSettings.videoFrameRate = VideoFrameRate.FR_30_FPS;
2326            editSettings.videoFrameSize = findVideoResolution(mVideoEditor.getAspectRatio(),
2327                    m.getScaledHeight());
2328        }
2329
2330        editSettings.outputFile = EffectClipPath;
2331
2332        if (clipNo == 1) {
2333            mProcessingState  = PROCESSING_INTERMEDIATE1;
2334        } else if (clipNo == 2) {
2335            mProcessingState  = PROCESSING_INTERMEDIATE2;
2336        }
2337        mProcessingObject = lMediaItem;
2338        err = generateClip(editSettings);
2339        mProcessingState  = PROCESSING_NONE;
2340
2341        if (err == 0) {
2342            lclipSettings.clipPath = EffectClipPath;
2343            lclipSettings.fileType = FileType.THREE_GPP;
2344            return EffectClipPath;
2345        } else {
2346            throw new RuntimeException("preview generation cannot be completed");
2347        }
2348    }
2349
2350
2351    /**
2352     * Populates the settings for generating a Ken Burn effect clip
2353     *
2354     * @param m The media image item for which the Ken Burn effect clip
2355     * needs to be generated
2356     * @param e The EditSettings reference clip specific data
2357     *
2358     * @return The name and path of generated clip
2359     */
2360    String generateKenBurnsClip(EditSettings e, MediaImageItem m) {
2361        String output = null;
2362        int err = 0;
2363        int outVideoProfile = 0;
2364        int outVideoLevel = 0;
2365
2366        e.backgroundMusicSettings = null;
2367        e.transitionSettingsArray = null;
2368        e.effectSettingsArray = null;
2369        output = String.format(mProjectPath + "/" + "ImageClip-" + m.getId() + ".3gp");
2370
2371        File tmpFile = new File(output);
2372        if (tmpFile.exists()) {
2373            tmpFile.delete();
2374        }
2375
2376        outVideoProfile = VideoEditorProfile.getExportProfile(VideoFormat.H264);
2377        outVideoLevel = VideoEditorProfile.getExportLevel(VideoFormat.H264);
2378        e.videoProfile = outVideoProfile;
2379        e.videoLevel = outVideoLevel;
2380
2381        e.outputFile = output;
2382        e.audioBitrate = Bitrate.BR_64_KBPS;
2383        e.audioChannels = 2;
2384        e.audioFormat = AudioFormat.AAC;
2385        e.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000;
2386
2387        e.videoBitrate = Bitrate.BR_5_MBPS;
2388        e.videoFormat = VideoFormat.H264;
2389        e.videoFrameRate = VideoFrameRate.FR_30_FPS;
2390        e.videoFrameSize = findVideoResolution(mVideoEditor.getAspectRatio(),
2391                                                           m.getScaledHeight());
2392        mProcessingState  = PROCESSING_KENBURNS;
2393        mProcessingObject = m;
2394        err = generateClip(e);
2395        // Reset the processing state and check for errors
2396        mProcessingState  = PROCESSING_NONE;
2397        if (err != 0) {
2398            throw new RuntimeException("preview generation cannot be completed");
2399        }
2400        return output;
2401    }
2402
2403
2404    /**
2405     * Calculates the output resolution for transition clip
2406     *
2407     * @param m1 First media item associated with transition
2408     * @param m2 Second media item associated with transition
2409     *
2410     * @return The transition resolution
2411     */
2412    private int getTransitionResolution(MediaItem m1, MediaItem m2) {
2413        int clip1Height = 0;
2414        int clip2Height = 0;
2415        int videoSize = 0;
2416
2417        if (m1 != null && m2 != null) {
2418            if (m1 instanceof MediaVideoItem) {
2419                clip1Height = m1.getHeight();
2420            } else if (m1 instanceof MediaImageItem) {
2421                clip1Height = ((MediaImageItem)m1).getScaledHeight();
2422            }
2423            if (m2 instanceof MediaVideoItem) {
2424                clip2Height = m2.getHeight();
2425            } else if (m2 instanceof MediaImageItem) {
2426                clip2Height = ((MediaImageItem)m2).getScaledHeight();
2427            }
2428            if (clip1Height > clip2Height) {
2429                videoSize = findVideoResolution(mVideoEditor.getAspectRatio(), clip1Height);
2430            } else {
2431                videoSize = findVideoResolution(mVideoEditor.getAspectRatio(), clip2Height);
2432            }
2433        } else if (m1 == null && m2 != null) {
2434            if (m2 instanceof MediaVideoItem) {
2435                clip2Height = m2.getHeight();
2436            } else if (m2 instanceof MediaImageItem) {
2437                clip2Height = ((MediaImageItem)m2).getScaledHeight();
2438            }
2439            videoSize = findVideoResolution(mVideoEditor.getAspectRatio(), clip2Height);
2440        } else if (m1 != null && m2 == null) {
2441            if (m1 instanceof MediaVideoItem) {
2442                clip1Height = m1.getHeight();
2443            } else if (m1 instanceof MediaImageItem) {
2444                clip1Height = ((MediaImageItem)m1).getScaledHeight();
2445            }
2446            videoSize = findVideoResolution(mVideoEditor.getAspectRatio(), clip1Height);
2447        }
2448        return videoSize;
2449    }
2450
2451    /**
2452     * Populates the settings for generating an transition clip
2453     *
2454     * @param m1 First media item associated with transition
2455     * @param m2 Second media item associated with transition
2456     * @param e The EditSettings reference containing
2457     * clip specific data
2458     * @param uniqueId The unique id used in the name of the output clip
2459     * @param t The Transition specific data
2460     *
2461     * @return The name and path of generated clip
2462     */
2463    String generateTransitionClip(EditSettings e, String uniqueId,
2464            MediaItem m1, MediaItem m2,Transition t) {
2465        String outputFilename = null;
2466        int err = 0;
2467        int outVideoProfile = 0;
2468        int outVideoLevel = 0;
2469        outputFilename = String.format(mProjectPath + "/" + uniqueId + ".3gp");
2470
2471        outVideoProfile = VideoEditorProfile.getExportProfile(VideoFormat.H264);
2472        outVideoLevel = VideoEditorProfile.getExportLevel(VideoFormat.H264);
2473        e.videoProfile = outVideoProfile;
2474        e.videoLevel = outVideoLevel;
2475
2476        e.outputFile = outputFilename;
2477        e.audioBitrate = Bitrate.BR_64_KBPS;
2478        e.audioChannels = 2;
2479        e.audioFormat = AudioFormat.AAC;
2480        e.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000;
2481
2482        e.videoBitrate = Bitrate.BR_5_MBPS;
2483        e.videoFormat = VideoFormat.H264;
2484        e.videoFrameRate = VideoFrameRate.FR_30_FPS;
2485        e.videoFrameSize = getTransitionResolution(m1, m2);
2486
2487        if (new File(outputFilename).exists()) {
2488            new File(outputFilename).delete();
2489        }
2490        mProcessingState  = PROCESSING_INTERMEDIATE3;
2491        mProcessingObject = t;
2492        err = generateClip(e);
2493        // Reset the processing state and check for errors
2494        mProcessingState  = PROCESSING_NONE;
2495        if (err != 0) {
2496            throw new RuntimeException("preview generation cannot be completed");
2497        }
2498        return outputFilename;
2499    }
2500
2501    /**
2502     * Populates effects and overlays in EffectSettings structure
2503     * and also adjust the start time and duration of effects and overlays
2504     * w.r.t to total story board time
2505     *
2506     * @param m1 Media item associated with effect
2507     * @param effectSettings The EffectSettings reference containing
2508     *      effect specific data
2509     * @param beginCutTime The begin cut time of the clip associated with effect
2510     * @param endCutTime The end cut time of the clip associated with effect
2511     * @param storyBoardTime The current story board time
2512     *
2513     * @return The updated index
2514     */
2515    private int populateEffects(MediaItem m, EffectSettings[] effectSettings, int i,
2516            int beginCutTime, int endCutTime, int storyBoardTime) {
2517
2518        if (m.getBeginTransition() != null && m.getBeginTransition().getDuration() > 0
2519                && m.getEndTransition() != null && m.getEndTransition().getDuration() > 0) {
2520            beginCutTime += m.getBeginTransition().getDuration();
2521            endCutTime -= m.getEndTransition().getDuration();
2522        } else if (m.getBeginTransition() == null && m.getEndTransition() != null
2523                && m.getEndTransition().getDuration() > 0) {
2524            endCutTime -= m.getEndTransition().getDuration();
2525        } else if (m.getEndTransition() == null && m.getBeginTransition() != null
2526                && m.getBeginTransition().getDuration() > 0) {
2527            beginCutTime += m.getBeginTransition().getDuration();
2528        }
2529
2530        final List<Effect> effects = m.getAllEffects();
2531        final List<Overlay> overlays = m.getAllOverlays();
2532
2533        for (Overlay overlay : overlays) {
2534            effectSettings[i] = getOverlaySettings((OverlayFrame)overlay);
2535            adjustEffectsStartTimeAndDuration(effectSettings[i], beginCutTime, endCutTime);
2536            effectSettings[i].startTime += storyBoardTime;
2537            i++;
2538        }
2539
2540        for (Effect effect : effects) {
2541            if (effect instanceof EffectColor) {
2542                effectSettings[i] = getEffectSettings((EffectColor)effect);
2543                adjustEffectsStartTimeAndDuration(effectSettings[i], beginCutTime, endCutTime);
2544                effectSettings[i].startTime += storyBoardTime;
2545                i++;
2546            }
2547        }
2548
2549        return i;
2550    }
2551
2552    /**
2553     * Adjusts the media item boundaries for use in export or preview
2554     *
2555     * @param clipSettings The ClipSettings reference
2556     * @param clipProperties The Properties reference
2557     * @param m The media item
2558     */
2559    private void adjustMediaItemBoundary(ClipSettings clipSettings,
2560                                         Properties clipProperties, MediaItem m) {
2561        if (m.getBeginTransition() != null && m.getBeginTransition().getDuration() > 0
2562                && m.getEndTransition() != null && m.getEndTransition().getDuration() > 0) {
2563            clipSettings.beginCutTime += m.getBeginTransition().getDuration();
2564            clipSettings.endCutTime -= m.getEndTransition().getDuration();
2565        } else if (m.getBeginTransition() == null && m.getEndTransition() != null
2566                && m.getEndTransition().getDuration() > 0) {
2567            clipSettings.endCutTime -= m.getEndTransition().getDuration();
2568        } else if (m.getEndTransition() == null && m.getBeginTransition() != null
2569                && m.getBeginTransition().getDuration() > 0) {
2570            clipSettings.beginCutTime += m.getBeginTransition().getDuration();
2571        }
2572
2573        clipProperties.duration = clipSettings.endCutTime - clipSettings.beginCutTime;
2574
2575        if (clipProperties.videoDuration != 0) {
2576            clipProperties.videoDuration = clipSettings.endCutTime - clipSettings.beginCutTime;
2577        }
2578
2579        if (clipProperties.audioDuration != 0) {
2580            clipProperties.audioDuration = clipSettings.endCutTime - clipSettings.beginCutTime;
2581        }
2582    }
2583
2584    /**
2585     * Generates the transition if transition is present
2586     * and is in invalidated state
2587     *
2588     * @param transition The Transition reference
2589     * @param editSettings The EditSettings reference
2590     * @param clipPropertiesArray The clip Properties array
2591     * @param i The index in clip Properties array for current clip
2592     */
2593    private void generateTransition(Transition transition, EditSettings editSettings,
2594            PreviewClipProperties clipPropertiesArray, int index) {
2595        if (!(transition.isGenerated())) {
2596            transition.generate();
2597        }
2598        editSettings.clipSettingsArray[index] = new ClipSettings();
2599        editSettings.clipSettingsArray[index].clipPath = transition.getFilename();
2600        editSettings.clipSettingsArray[index].fileType = FileType.THREE_GPP;
2601        editSettings.clipSettingsArray[index].beginCutTime = 0;
2602        editSettings.clipSettingsArray[index].endCutTime = (int)transition.getDuration();
2603        editSettings.clipSettingsArray[index].mediaRendering = MediaRendering.BLACK_BORDERS;
2604
2605        try {
2606            clipPropertiesArray.clipProperties[index] =
2607                getMediaProperties(transition.getFilename());
2608        } catch (Exception e) {
2609            throw new IllegalArgumentException("Unsupported file or file not found");
2610        }
2611
2612        clipPropertiesArray.clipProperties[index].Id = null;
2613        clipPropertiesArray.clipProperties[index].audioVolumeValue = 100;
2614        clipPropertiesArray.clipProperties[index].duration = (int)transition.getDuration();
2615        if (clipPropertiesArray.clipProperties[index].videoDuration != 0) {
2616            clipPropertiesArray.clipProperties[index].videoDuration = (int)transition.getDuration();
2617        }
2618
2619        if (clipPropertiesArray.clipProperties[index].audioDuration != 0) {
2620            clipPropertiesArray.clipProperties[index].audioDuration = (int)transition.getDuration();
2621        }
2622    }
2623
2624    /**
2625     * Sets the volume for current media item in clip properties array
2626     *
2627     * @param m The media item
2628     * @param clipProperties The clip properties array reference
2629     * @param i The index in clip Properties array for current clip
2630     */
2631    private void adjustVolume(MediaItem m, PreviewClipProperties clipProperties,
2632                              int index) {
2633        if (m instanceof MediaVideoItem) {
2634            final boolean videoMuted = ((MediaVideoItem)m).isMuted();
2635            if (videoMuted == false) {
2636                mClipProperties.clipProperties[index].audioVolumeValue =
2637                    ((MediaVideoItem)m).getVolume();
2638            } else {
2639                mClipProperties.clipProperties[index].audioVolumeValue = 0;
2640            }
2641        } else if (m instanceof MediaImageItem) {
2642            mClipProperties.clipProperties[index].audioVolumeValue = 0;
2643        }
2644    }
2645
2646    /**
2647     * Checks for odd size image width and height
2648     *
2649     * @param m The media item
2650     * @param clipProperties The clip properties array reference
2651     * @param i The index in clip Properties array for current clip
2652     */
2653    private void checkOddSizeImage(MediaItem m, PreviewClipProperties clipProperties, int index) {
2654        if (m instanceof MediaImageItem) {
2655            int width = mClipProperties.clipProperties[index].width;
2656            int height = mClipProperties.clipProperties[index].height;
2657
2658            if ((width % 2) != 0) {
2659                width -= 1;
2660            }
2661            if ((height % 2) != 0) {
2662                height -= 1;
2663            }
2664            mClipProperties.clipProperties[index].width = width;
2665            mClipProperties.clipProperties[index].height = height;
2666        }
2667    }
2668
2669    /**
2670     * Populates the media item properties and calculates the maximum
2671     * height among all the clips
2672     *
2673     * @param m The media item
2674     * @param i The index in clip Properties array for current clip
2675     * @param maxHeight The max height from the clip properties
2676     *
2677     * @return Updates the max height if current clip's height is greater
2678     * than all previous clips height
2679     */
2680    private int populateMediaItemProperties(MediaItem m, int index, int maxHeight) {
2681        mPreviewEditSettings.clipSettingsArray[index] = new ClipSettings();
2682        if (m instanceof MediaVideoItem) {
2683            mPreviewEditSettings.clipSettingsArray[index] =
2684                ((MediaVideoItem)m).getVideoClipProperties();
2685            if (((MediaVideoItem)m).getHeight() > maxHeight) {
2686                maxHeight = ((MediaVideoItem)m).getHeight();
2687            }
2688        } else if (m instanceof MediaImageItem) {
2689            mPreviewEditSettings.clipSettingsArray[index] =
2690                ((MediaImageItem)m).getImageClipProperties();
2691            if (((MediaImageItem)m).getScaledHeight() > maxHeight) {
2692                maxHeight = ((MediaImageItem)m).getScaledHeight();
2693            }
2694        }
2695        /** + Handle the image files here */
2696        if (mPreviewEditSettings.clipSettingsArray[index].fileType == FileType.JPG) {
2697            mPreviewEditSettings.clipSettingsArray[index].clipDecodedPath =
2698                ((MediaImageItem)m).getDecodedImageFileName();
2699
2700            mPreviewEditSettings.clipSettingsArray[index].clipOriginalPath =
2701                         mPreviewEditSettings.clipSettingsArray[index].clipPath;
2702        }
2703        return maxHeight;
2704    }
2705
2706    /**
2707     * Populates the background music track properties
2708     *
2709     * @param mediaBGMList The background music list
2710     *
2711     */
2712    private void populateBackgroundMusicProperties(List<AudioTrack> mediaBGMList) {
2713
2714        if (mediaBGMList.size() == 1) {
2715            mAudioTrack = mediaBGMList.get(0);
2716        } else {
2717            mAudioTrack = null;
2718        }
2719
2720        if (mAudioTrack != null) {
2721            mAudioSettings = new AudioSettings();
2722            Properties mAudioProperties = new Properties();
2723            mAudioSettings.pFile = null;
2724            mAudioSettings.Id = mAudioTrack.getId();
2725            try {
2726                mAudioProperties = getMediaProperties(mAudioTrack.getFilename());
2727            } catch (Exception e) {
2728               throw new IllegalArgumentException("Unsupported file or file not found");
2729            }
2730            mAudioSettings.bRemoveOriginal = false;
2731            mAudioSettings.channels = mAudioProperties.audioChannels;
2732            mAudioSettings.Fs = mAudioProperties.audioSamplingFrequency;
2733            mAudioSettings.loop = mAudioTrack.isLooping();
2734            mAudioSettings.ExtendedFs = 0;
2735            mAudioSettings.pFile = mAudioTrack.getFilename();
2736            mAudioSettings.startMs = mAudioTrack.getStartTime();
2737            mAudioSettings.beginCutTime = mAudioTrack.getBoundaryBeginTime();
2738            mAudioSettings.endCutTime = mAudioTrack.getBoundaryEndTime();
2739            if (mAudioTrack.isMuted()) {
2740                mAudioSettings.volume = 0;
2741            } else {
2742                mAudioSettings.volume = mAudioTrack.getVolume();
2743            }
2744            mAudioSettings.fileType = mAudioProperties.fileType;
2745            mAudioSettings.ducking_lowVolume = mAudioTrack.getDuckedTrackVolume();
2746            mAudioSettings.ducking_threshold = mAudioTrack.getDuckingThreshhold();
2747            mAudioSettings.bInDucking_enable = mAudioTrack.isDuckingEnabled();
2748            mAudioTrackPCMFilePath = String.format(mProjectPath + "/" + AUDIO_TRACK_PCM_FILE);
2749            mAudioSettings.pcmFilePath = mAudioTrackPCMFilePath;
2750
2751            mPreviewEditSettings.backgroundMusicSettings = new BackgroundMusicSettings();
2752            mPreviewEditSettings.backgroundMusicSettings.file = mAudioTrackPCMFilePath;
2753            mPreviewEditSettings.backgroundMusicSettings.fileType = mAudioProperties.fileType;
2754            mPreviewEditSettings.backgroundMusicSettings.insertionTime =
2755                mAudioTrack.getStartTime();
2756            mPreviewEditSettings.backgroundMusicSettings.volumePercent = mAudioTrack.getVolume();
2757            mPreviewEditSettings.backgroundMusicSettings.beginLoop =
2758                mAudioTrack.getBoundaryBeginTime();
2759            mPreviewEditSettings.backgroundMusicSettings.endLoop =
2760                                               mAudioTrack.getBoundaryEndTime();
2761            mPreviewEditSettings.backgroundMusicSettings.enableDucking =
2762                mAudioTrack.isDuckingEnabled();
2763            mPreviewEditSettings.backgroundMusicSettings.duckingThreshold =
2764                mAudioTrack.getDuckingThreshhold();
2765            mPreviewEditSettings.backgroundMusicSettings.lowVolume =
2766                mAudioTrack.getDuckedTrackVolume();
2767            mPreviewEditSettings.backgroundMusicSettings.isLooping = mAudioTrack.isLooping();
2768            mPreviewEditSettings.primaryTrackVolume = 100;
2769            mProcessingState  = PROCESSING_AUDIO_PCM;
2770            mProcessingObject = mAudioTrack;
2771        } else {
2772            mAudioSettings = null;
2773            mPreviewEditSettings.backgroundMusicSettings = null;
2774            mAudioTrackPCMFilePath = null;
2775        }
2776    }
2777
2778    /**
2779     * Calculates all the effects in all the media items
2780     * in media items list
2781     *
2782     * @param mediaItemsList The media item list
2783     *
2784     * @return The total number of effects
2785     *
2786     */
2787    private int getTotalEffects(List<MediaItem> mediaItemsList) {
2788        int totalEffects = 0;
2789        final Iterator<MediaItem> it = mediaItemsList.iterator();
2790        while (it.hasNext()) {
2791            final MediaItem t = it.next();
2792            totalEffects += t.getAllEffects().size();
2793            totalEffects += t.getAllOverlays().size();
2794            final Iterator<Effect> ef = t.getAllEffects().iterator();
2795            while (ef.hasNext()) {
2796                final Effect e = ef.next();
2797                if (e instanceof EffectKenBurns) {
2798                    totalEffects--;
2799                }
2800            }
2801        }
2802        return totalEffects;
2803    }
2804
2805    /**
2806     * This function is responsible for forming clip settings
2807     * array and clip properties array including transition clips
2808     * and effect settings for preview purpose or export.
2809     *
2810     *
2811     * @param mediaItemsList The media item list
2812     * @param mediaTransitionList The transitions list
2813     * @param mediaBGMList The background music list
2814     * @param listener The MediaProcessingProgressListener
2815     *
2816     */
2817    void previewStoryBoard(List<MediaItem> mediaItemsList,
2818            List<Transition> mediaTransitionList, List<AudioTrack> mediaBGMList,
2819            MediaProcessingProgressListener listener) {
2820        if (mInvalidatePreviewArray) {
2821            int previewIndex = 0;
2822            int totalEffects = 0;
2823            int storyBoardTime = 0;
2824            int maxHeight = 0;
2825            int beginCutTime = 0;
2826            int endCutTime = 0;
2827            int effectIndex = 0;
2828            Transition lTransition = null;
2829            MediaItem lMediaItem = null;
2830            mPreviewEditSettings = new EditSettings();
2831            mClipProperties = new PreviewClipProperties();
2832            mTotalClips = 0;
2833
2834            mTotalClips = mediaItemsList.size();
2835            for (Transition transition : mediaTransitionList) {
2836                if (transition.getDuration() > 0) {
2837                    mTotalClips++;
2838                }
2839            }
2840
2841            totalEffects = getTotalEffects(mediaItemsList);
2842
2843            mPreviewEditSettings.clipSettingsArray = new ClipSettings[mTotalClips];
2844            mPreviewEditSettings.effectSettingsArray = new EffectSettings[totalEffects];
2845            mClipProperties.clipProperties = new Properties[mTotalClips];
2846
2847            /** record the call back progress listener */
2848            mMediaProcessingProgressListener = listener;
2849            mProgressToApp = 0;
2850
2851            if (mediaItemsList.size() > 0) {
2852                for (int i = 0; i < mediaItemsList.size(); i++) {
2853                    /* Get the Media Item from the list */
2854                    lMediaItem = mediaItemsList.get(i);
2855                    if (lMediaItem instanceof MediaVideoItem) {
2856                        beginCutTime = (int)((MediaVideoItem)lMediaItem).getBoundaryBeginTime();
2857                        endCutTime = (int)((MediaVideoItem)lMediaItem).getBoundaryEndTime();
2858                    } else if (lMediaItem instanceof MediaImageItem) {
2859                        beginCutTime = 0;
2860                        endCutTime = (int)((MediaImageItem)lMediaItem).getTimelineDuration();
2861                    }
2862                    /* Get the transition associated with Media Item */
2863                    lTransition = lMediaItem.getBeginTransition();
2864                    if (lTransition != null && (lTransition.getDuration() > 0)) {
2865                        /* generate transition clip */
2866                        generateTransition(lTransition, mPreviewEditSettings,
2867                                           mClipProperties, previewIndex);
2868                        storyBoardTime += mClipProperties.clipProperties[previewIndex].duration;
2869                        previewIndex++;
2870                    }
2871                    /* Populate media item properties */
2872                    maxHeight = populateMediaItemProperties(lMediaItem, previewIndex, maxHeight);
2873                    /* Get the clip properties of the media item. */
2874                    if (lMediaItem instanceof MediaImageItem) {
2875                        int tmpCnt = 0;
2876                        boolean bEffectKbPresent = false;
2877                        final List<Effect> effectList = lMediaItem.getAllEffects();
2878                        /**
2879                         * Check if Ken Burns effect is present
2880                         */
2881                        while (tmpCnt < effectList.size()) {
2882                            if (effectList.get(tmpCnt) instanceof EffectKenBurns) {
2883                                bEffectKbPresent = true;
2884                                break;
2885                            }
2886                            tmpCnt++;
2887                        }
2888
2889                        if (bEffectKbPresent) {
2890                            try {
2891                                  if(((MediaImageItem)lMediaItem).getGeneratedImageClip() != null) {
2892                                     mClipProperties.clipProperties[previewIndex]
2893                                        = getMediaProperties(((MediaImageItem)lMediaItem).
2894                                                             getGeneratedImageClip());
2895                                  }
2896                                  else {
2897                                   mClipProperties.clipProperties[previewIndex]
2898                                      = getMediaProperties(((MediaImageItem)lMediaItem).
2899                                                             getScaledImageFileName());
2900                                   mClipProperties.clipProperties[previewIndex].width =
2901                                             ((MediaImageItem)lMediaItem).getScaledWidth();
2902                                   mClipProperties.clipProperties[previewIndex].height =
2903                                             ((MediaImageItem)lMediaItem).getScaledHeight();
2904                                  }
2905                                } catch (Exception e) {
2906                                   throw new IllegalArgumentException("Unsupported file or file not found");
2907                                }
2908                         } else {
2909                              try {
2910                                  mClipProperties.clipProperties[previewIndex]
2911                                      = getMediaProperties(((MediaImageItem)lMediaItem).
2912                                                               getScaledImageFileName());
2913                              } catch (Exception e) {
2914                                throw new IllegalArgumentException("Unsupported file or file not found");
2915                              }
2916                            mClipProperties.clipProperties[previewIndex].width =
2917                                        ((MediaImageItem)lMediaItem).getScaledWidth();
2918                            mClipProperties.clipProperties[previewIndex].height =
2919                                        ((MediaImageItem)lMediaItem).getScaledHeight();
2920                        }
2921                    } else {
2922                        try {
2923                            mClipProperties.clipProperties[previewIndex]
2924                                 = getMediaProperties(lMediaItem.getFilename());
2925                            } catch (Exception e) {
2926                              throw new IllegalArgumentException("Unsupported file or file not found");
2927                          }
2928                    }
2929                    mClipProperties.clipProperties[previewIndex].Id = lMediaItem.getId();
2930                    checkOddSizeImage(lMediaItem, mClipProperties, previewIndex);
2931                    adjustVolume(lMediaItem, mClipProperties, previewIndex);
2932
2933                    /*
2934                     * Adjust media item start time and end time w.r.t to begin
2935                     * and end transitions associated with media item
2936                     */
2937
2938                    adjustMediaItemBoundary(mPreviewEditSettings.clipSettingsArray[previewIndex],
2939                            mClipProperties.clipProperties[previewIndex], lMediaItem);
2940
2941                    /*
2942                     * Get all the effects and overlays for that media item and
2943                     * adjust start time and duration of effects
2944                     */
2945
2946                    effectIndex = populateEffects(lMediaItem,
2947                            mPreviewEditSettings.effectSettingsArray, effectIndex, beginCutTime,
2948                            endCutTime, storyBoardTime);
2949                    storyBoardTime += mClipProperties.clipProperties[previewIndex].duration;
2950                    previewIndex++;
2951
2952                    /* Check if there is any end transition at last media item */
2953
2954                    if (i == (mediaItemsList.size() - 1)) {
2955                        lTransition = lMediaItem.getEndTransition();
2956                        if (lTransition != null && (lTransition.getDuration() > 0)) {
2957                            generateTransition(lTransition, mPreviewEditSettings, mClipProperties,
2958                                    previewIndex);
2959                            break;
2960                        }
2961                    }
2962                }
2963
2964                if (!mErrorFlagSet) {
2965                    mPreviewEditSettings.videoFrameSize = findVideoResolution(mVideoEditor
2966                            .getAspectRatio(), maxHeight);
2967                    populateBackgroundMusicProperties(mediaBGMList);
2968
2969                    /** call to native populate settings */
2970                    try {
2971                        nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings);
2972                    } catch (IllegalArgumentException ex) {
2973                        Log.e(TAG, "Illegal argument exception in nativePopulateSettings");
2974                        throw ex;
2975                    } catch (IllegalStateException ex) {
2976                        Log.e(TAG, "Illegal state exception in nativePopulateSettings");
2977                        throw ex;
2978                    } catch (RuntimeException ex) {
2979                        Log.e(TAG, "Runtime exception in nativePopulateSettings");
2980                        throw ex;
2981                    }
2982                    mInvalidatePreviewArray = false;
2983                    mProcessingState  = PROCESSING_NONE;
2984                }
2985            }
2986            if (mErrorFlagSet) {
2987                mErrorFlagSet = false;
2988                throw new RuntimeException("preview generation cannot be completed");
2989            }
2990        }
2991    } /* END of previewStoryBoard */
2992
2993    /**
2994     * This function is responsible for starting the preview
2995     *
2996     *
2997     * @param surface The surface on which preview has to be displayed
2998     * @param fromMs The time in ms from which preview has to be started
2999     * @param toMs The time in ms till preview has to be played
3000     * @param loop To loop the preview or not
3001     * @param callbackAfterFrameCount INdicated after how many frames
3002     * the callback is needed
3003     * @param listener The PreviewProgressListener
3004     */
3005    void doPreview(Surface surface, long fromMs, long toMs, boolean loop,
3006            int callbackAfterFrameCount, PreviewProgressListener listener) {
3007        mPreviewProgress = fromMs;
3008        mIsFirstProgress = true;
3009        mPreviewProgressListener = listener;
3010
3011        if (!mInvalidatePreviewArray) {
3012            try {
3013                /** Modify the image files names to rgb image files. */
3014                for (int clipCnt = 0; clipCnt < mPreviewEditSettings.clipSettingsArray.length;
3015                    clipCnt++) {
3016                    if (mPreviewEditSettings.clipSettingsArray[clipCnt].fileType == FileType.JPG) {
3017                        mPreviewEditSettings.clipSettingsArray[clipCnt].clipPath =
3018                            mPreviewEditSettings.clipSettingsArray[clipCnt].clipDecodedPath;
3019                    }
3020                }
3021                nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings);
3022                nativeStartPreview(surface, fromMs, toMs, callbackAfterFrameCount, loop);
3023            } catch (IllegalArgumentException ex) {
3024                Log.e(TAG, "Illegal argument exception in nativeStartPreview");
3025                throw ex;
3026            } catch (IllegalStateException ex) {
3027                Log.e(TAG, "Illegal state exception in nativeStartPreview");
3028                throw ex;
3029            } catch (RuntimeException ex) {
3030                Log.e(TAG, "Runtime exception in nativeStartPreview");
3031                throw ex;
3032            }
3033        } else {
3034            throw new IllegalStateException("generatePreview is in progress");
3035        }
3036    }
3037
3038    /**
3039     * This function is responsible for stopping the preview
3040     */
3041    long stopPreview() {
3042        return nativeStopPreview();
3043    }
3044
3045    /**
3046     * This function is responsible for rendering a single frame
3047     * from the complete story board on the surface
3048     *
3049     * @param surface The surface on which frame has to be rendered
3050     * @param time The time in ms at which the frame has to be rendered
3051     * @param surfaceWidth The surface width
3052     * @param surfaceHeight The surface height
3053     * @param overlayData The overlay data
3054     *
3055     * @return The actual time from the story board at which the  frame was extracted
3056     * and rendered
3057     */
3058    long renderPreviewFrame(Surface surface, long time, int surfaceWidth,
3059            int surfaceHeight, VideoEditor.OverlayData overlayData) {
3060        if (mInvalidatePreviewArray) {
3061            if (Log.isLoggable(TAG, Log.DEBUG)) {
3062                Log.d(TAG, "Call generate preview first");
3063            }
3064            throw new IllegalStateException("Call generate preview first");
3065        }
3066
3067        long timeMs = 0;
3068        try {
3069            for (int clipCnt = 0; clipCnt < mPreviewEditSettings.clipSettingsArray.length;
3070                  clipCnt++) {
3071                if (mPreviewEditSettings.clipSettingsArray[clipCnt].fileType == FileType.JPG) {
3072                    mPreviewEditSettings.clipSettingsArray[clipCnt].clipPath =
3073                        mPreviewEditSettings.clipSettingsArray[clipCnt].clipDecodedPath;
3074                }
3075            }
3076
3077            // Reset the render preview frame params that shall be set by native.
3078            mRenderPreviewOverlayFile = null;
3079            mRenderPreviewRenderingMode = MediaRendering.RESIZING;
3080
3081            nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings);
3082
3083            timeMs = (long)nativeRenderPreviewFrame(surface, time, surfaceWidth, surfaceHeight);
3084
3085            if (mRenderPreviewOverlayFile != null) {
3086                overlayData.set(BitmapFactory.decodeFile(mRenderPreviewOverlayFile),
3087                        mRenderPreviewRenderingMode);
3088            } else {
3089                overlayData.setClear();
3090            }
3091        } catch (IllegalArgumentException ex) {
3092            Log.e(TAG, "Illegal Argument exception in nativeRenderPreviewFrame");
3093            throw ex;
3094        } catch (IllegalStateException ex) {
3095            Log.e(TAG, "Illegal state exception in nativeRenderPreviewFrame");
3096            throw ex;
3097        } catch (RuntimeException ex) {
3098            Log.e(TAG, "Runtime exception in nativeRenderPreviewFrame");
3099            throw ex;
3100        }
3101
3102        return timeMs;
3103    }
3104
3105    private void previewFrameEditInfo(String filename, int renderingMode) {
3106        mRenderPreviewOverlayFile = filename;
3107        mRenderPreviewRenderingMode = renderingMode;
3108    }
3109
3110
3111    /**
3112     * This function is responsible for rendering a single frame
3113     * from a single media item on the surface
3114     *
3115     * @param surface The surface on which frame has to be rendered
3116     * @param filepath The file path for which the frame needs to be displayed
3117     * @param time The time in ms at which the frame has to be rendered
3118     * @param framewidth The frame width
3119     * @param framewidth The frame height
3120     *
3121     * @return The actual time from media item at which the  frame was extracted
3122     * and rendered
3123     */
3124    long renderMediaItemPreviewFrame(Surface surface, String filepath,
3125                                            long time, int framewidth, int frameheight) {
3126        long timeMs = 0;
3127        try {
3128            timeMs = (long)nativeRenderMediaItemPreviewFrame(surface, filepath, framewidth,
3129                    frameheight, 0, 0, time);
3130        } catch (IllegalArgumentException ex) {
3131            Log.e(TAG, "Illegal Argument exception in renderMediaItemPreviewFrame");
3132            throw ex;
3133        } catch (IllegalStateException ex) {
3134            Log.e(TAG, "Illegal state exception in renderMediaItemPreviewFrame");
3135            throw ex;
3136        } catch (RuntimeException ex) {
3137            Log.e(TAG, "Runtime exception in renderMediaItemPreviewFrame");
3138            throw ex;
3139        }
3140
3141        return timeMs;
3142    }
3143
3144    /**
3145     * This function sets the flag to invalidate the preview array
3146     * and for generating the preview again
3147     */
3148    void setGeneratePreview(boolean isRequired) {
3149        boolean semAcquiredDone = false;
3150        try {
3151            lock();
3152            semAcquiredDone = true;
3153            mInvalidatePreviewArray = isRequired;
3154        } catch (InterruptedException ex) {
3155            Log.e(TAG, "Runtime exception in renderMediaItemPreviewFrame");
3156        } finally {
3157            if (semAcquiredDone) {
3158                unlock();
3159            }
3160        }
3161    }
3162
3163    /**
3164     * @return Returns the current status of preview invalidation
3165     * flag
3166     */
3167    boolean getGeneratePreview() {
3168        return mInvalidatePreviewArray;
3169    }
3170
3171    /**
3172     * Calculates the aspect ratio from widht and height
3173     *
3174     * @param w The width of media item
3175     * @param h The height of media item
3176     *
3177     * @return The calculated aspect ratio
3178     */
3179    int getAspectRatio(int w, int h) {
3180        double apRatio = (double)(w) / (double)(h);
3181        BigDecimal bd = new BigDecimal(apRatio);
3182        bd = bd.setScale(3, BigDecimal.ROUND_HALF_UP);
3183        apRatio = bd.doubleValue();
3184        int var = MediaProperties.ASPECT_RATIO_16_9;
3185        if (apRatio >= 1.7) {
3186            var = MediaProperties.ASPECT_RATIO_16_9;
3187        } else if (apRatio >= 1.6) {
3188            var = MediaProperties.ASPECT_RATIO_5_3;
3189        } else if (apRatio >= 1.5) {
3190            var = MediaProperties.ASPECT_RATIO_3_2;
3191        } else if (apRatio > 1.3) {
3192            var = MediaProperties.ASPECT_RATIO_4_3;
3193        } else if (apRatio >= 1.2) {
3194            var = MediaProperties.ASPECT_RATIO_11_9;
3195        }
3196        return var;
3197    }
3198
3199    /**
3200     * Maps the file type used in native layer
3201     * to file type used in JAVA layer
3202     *
3203     * @param fileType The file type in native layer
3204     *
3205     * @return The File type in JAVA layer
3206     */
3207    int getFileType(int fileType) {
3208        int retValue = -1;
3209        switch (fileType) {
3210            case FileType.UNSUPPORTED:
3211                retValue = MediaProperties.FILE_UNSUPPORTED;
3212                break;
3213            case FileType.THREE_GPP:
3214                retValue = MediaProperties.FILE_3GP;
3215                break;
3216            case FileType.MP4:
3217                retValue = MediaProperties.FILE_MP4;
3218                break;
3219            case FileType.JPG:
3220                retValue = MediaProperties.FILE_JPEG;
3221                break;
3222            case FileType.PNG:
3223                retValue = MediaProperties.FILE_PNG;
3224                break;
3225            case FileType.MP3:
3226                retValue = MediaProperties.FILE_MP3;
3227                break;
3228            case FileType.M4V:
3229                retValue = MediaProperties.FILE_M4V;
3230                break;
3231
3232            default:
3233                retValue = -1;
3234        }
3235        return retValue;
3236    }
3237
3238    /**
3239     * Maps the video codec type used in native layer
3240     * to video codec type used in JAVA layer
3241     *
3242     * @param codecType The video codec type in native layer
3243     *
3244     * @return The video codec type in JAVA layer
3245     */
3246    int getVideoCodecType(int codecType) {
3247        int retValue = -1;
3248        switch (codecType) {
3249            case VideoFormat.H263:
3250                retValue = MediaProperties.VCODEC_H263;
3251                break;
3252            case VideoFormat.H264:
3253                retValue = MediaProperties.VCODEC_H264;
3254                break;
3255            case VideoFormat.MPEG4:
3256                retValue = MediaProperties.VCODEC_MPEG4;
3257                break;
3258            case VideoFormat.UNSUPPORTED:
3259
3260            default:
3261                retValue = -1;
3262        }
3263        return retValue;
3264    }
3265
3266    /**
3267     * Maps the audio codec type used in native layer
3268     * to audio codec type used in JAVA layer
3269     *
3270     * @param audioType The audio codec type in native layer
3271     *
3272     * @return The audio codec type in JAVA layer
3273     */
3274    int getAudioCodecType(int codecType) {
3275        int retValue = -1;
3276        switch (codecType) {
3277            case AudioFormat.AMR_NB:
3278                retValue = MediaProperties.ACODEC_AMRNB;
3279                break;
3280            case AudioFormat.AAC:
3281                retValue = MediaProperties.ACODEC_AAC_LC;
3282                break;
3283            case AudioFormat.MP3:
3284                retValue = MediaProperties.ACODEC_MP3;
3285                break;
3286
3287            default:
3288                retValue = -1;
3289        }
3290        return retValue;
3291    }
3292
3293    /**
3294     * Returns the frame rate as integer
3295     *
3296     * @param fps The fps as enum
3297     *
3298     * @return The frame rate as integer
3299     */
3300    int getFrameRate(int fps) {
3301        int retValue = -1;
3302        switch (fps) {
3303            case VideoFrameRate.FR_5_FPS:
3304                retValue = 5;
3305                break;
3306            case VideoFrameRate.FR_7_5_FPS:
3307                retValue = 8;
3308                break;
3309            case VideoFrameRate.FR_10_FPS:
3310                retValue = 10;
3311                break;
3312            case VideoFrameRate.FR_12_5_FPS:
3313                retValue = 13;
3314                break;
3315            case VideoFrameRate.FR_15_FPS:
3316                retValue = 15;
3317                break;
3318            case VideoFrameRate.FR_20_FPS:
3319                retValue = 20;
3320                break;
3321            case VideoFrameRate.FR_25_FPS:
3322                retValue = 25;
3323                break;
3324            case VideoFrameRate.FR_30_FPS:
3325                retValue = 30;
3326                break;
3327
3328            default:
3329                retValue = -1;
3330        }
3331        return retValue;
3332    }
3333
3334    /**
3335     * Maps the file type used in JAVA layer
3336     * to file type used in native layer
3337     *
3338     * @param fileType The file type in JAVA layer
3339     *
3340     * @return The File type in native layer
3341     */
3342    int getMediaItemFileType(int fileType) {
3343        int retValue = -1;
3344
3345        switch (fileType) {
3346            case MediaProperties.FILE_UNSUPPORTED:
3347                retValue = FileType.UNSUPPORTED;
3348                break;
3349            case MediaProperties.FILE_3GP:
3350                retValue = FileType.THREE_GPP;
3351                break;
3352            case MediaProperties.FILE_MP4:
3353                retValue = FileType.MP4;
3354                break;
3355            case MediaProperties.FILE_JPEG:
3356                retValue = FileType.JPG;
3357                break;
3358            case MediaProperties.FILE_PNG:
3359                retValue = FileType.PNG;
3360                break;
3361            case MediaProperties.FILE_M4V:
3362                retValue = FileType.M4V;
3363                break;
3364
3365            default:
3366                retValue = -1;
3367        }
3368        return retValue;
3369
3370    }
3371
3372    /**
3373     * Maps the rendering mode used in native layer
3374     * to rendering mode used in JAVA layer
3375     *
3376     * @param renderingMode The rendering mode in JAVA layer
3377     *
3378     * @return The rendering mode in native layer
3379     */
3380    int getMediaItemRenderingMode(int renderingMode) {
3381        int retValue = -1;
3382        switch (renderingMode) {
3383            case MediaItem.RENDERING_MODE_BLACK_BORDER:
3384                retValue = MediaRendering.BLACK_BORDERS;
3385                break;
3386            case MediaItem.RENDERING_MODE_STRETCH:
3387                retValue = MediaRendering.RESIZING;
3388                break;
3389            case MediaItem.RENDERING_MODE_CROPPING:
3390                retValue = MediaRendering.CROPPING;
3391                break;
3392
3393            default:
3394                retValue = -1;
3395        }
3396        return retValue;
3397    }
3398
3399    /**
3400     * Maps the transition behavior used in JAVA layer
3401     * to transition behavior used in native layer
3402     *
3403     * @param transitionType The transition behavior in JAVA layer
3404     *
3405     * @return The transition behavior in native layer
3406     */
3407    int getVideoTransitionBehaviour(int transitionType) {
3408        int retValue = -1;
3409        switch (transitionType) {
3410            case Transition.BEHAVIOR_SPEED_UP:
3411                retValue = TransitionBehaviour.SPEED_UP;
3412                break;
3413            case Transition.BEHAVIOR_SPEED_DOWN:
3414                retValue = TransitionBehaviour.SPEED_DOWN;
3415                break;
3416            case Transition.BEHAVIOR_LINEAR:
3417                retValue = TransitionBehaviour.LINEAR;
3418                break;
3419            case Transition.BEHAVIOR_MIDDLE_SLOW:
3420                retValue = TransitionBehaviour.SLOW_MIDDLE;
3421                break;
3422            case Transition.BEHAVIOR_MIDDLE_FAST:
3423                retValue = TransitionBehaviour.FAST_MIDDLE;
3424                break;
3425
3426            default:
3427                retValue = -1;
3428        }
3429        return retValue;
3430    }
3431
3432    /**
3433     * Maps the transition slide direction used in JAVA layer
3434     * to transition slide direction used in native layer
3435     *
3436     * @param slideDirection The transition slide direction
3437     * in JAVA layer
3438     *
3439     * @return The transition slide direction in native layer
3440     */
3441    int getSlideSettingsDirection(int slideDirection) {
3442        int retValue = -1;
3443        switch (slideDirection) {
3444            case TransitionSliding.DIRECTION_RIGHT_OUT_LEFT_IN:
3445                retValue = SlideDirection.RIGHT_OUT_LEFT_IN;
3446                break;
3447            case TransitionSliding.DIRECTION_LEFT_OUT_RIGHT_IN:
3448                retValue = SlideDirection.LEFT_OUT_RIGTH_IN;
3449                break;
3450            case TransitionSliding.DIRECTION_TOP_OUT_BOTTOM_IN:
3451                retValue = SlideDirection.TOP_OUT_BOTTOM_IN;
3452                break;
3453            case TransitionSliding.DIRECTION_BOTTOM_OUT_TOP_IN:
3454                retValue = SlideDirection.BOTTOM_OUT_TOP_IN;
3455                break;
3456
3457            default:
3458                retValue = -1;
3459        }
3460        return retValue;
3461    }
3462
3463    /**
3464     * Maps the effect color type used in JAVA layer
3465     * to effect color type used in native layer
3466     *
3467     * @param effect The EffectColor reference
3468     *
3469     * @return The color effect value from native layer
3470     */
3471    private int getEffectColorType(EffectColor effect) {
3472        int retValue = -1;
3473        switch (effect.getType()) {
3474            case EffectColor.TYPE_COLOR:
3475                if (effect.getColor() == EffectColor.GREEN) {
3476                    retValue = VideoEffect.GREEN;
3477                } else if (effect.getColor() == EffectColor.PINK) {
3478                    retValue = VideoEffect.PINK;
3479                } else if (effect.getColor() == EffectColor.GRAY) {
3480                    retValue = VideoEffect.BLACK_AND_WHITE;
3481                } else {
3482                    retValue = VideoEffect.COLORRGB16;
3483                }
3484                break;
3485            case EffectColor.TYPE_GRADIENT:
3486                retValue = VideoEffect.GRADIENT;
3487                break;
3488            case EffectColor.TYPE_SEPIA:
3489                retValue = VideoEffect.SEPIA;
3490                break;
3491            case EffectColor.TYPE_NEGATIVE:
3492                retValue = VideoEffect.NEGATIVE;
3493                break;
3494            case EffectColor.TYPE_FIFTIES:
3495                retValue = VideoEffect.FIFTIES;
3496                break;
3497
3498            default:
3499                retValue = -1;
3500        }
3501        return retValue;
3502    }
3503
3504    /**
3505     * Calculates video resolution for output clip
3506     * based on clip's height and aspect ratio of storyboard
3507     *
3508     * @param aspectRatio The aspect ratio of story board
3509     * @param height The height of clip
3510     *
3511     * @return The video resolution
3512     */
3513    private int findVideoResolution(int aspectRatio, int height) {
3514        final Pair<Integer, Integer>[] resolutions;
3515        final Pair<Integer, Integer> maxResolution;
3516        int retValue = VideoFrameSize.SIZE_UNDEFINED;
3517        switch (aspectRatio) {
3518            case MediaProperties.ASPECT_RATIO_3_2:
3519                if (height == MediaProperties.HEIGHT_480)
3520                    retValue = VideoFrameSize.NTSC;
3521                else if (height == MediaProperties.HEIGHT_720)
3522                    retValue = VideoFrameSize.W720p;
3523                break;
3524            case MediaProperties.ASPECT_RATIO_16_9:
3525                if (height == MediaProperties.HEIGHT_480)
3526                    retValue = VideoFrameSize.WVGA16x9;
3527                else if (height == MediaProperties.HEIGHT_720)
3528                    retValue = VideoFrameSize.V720p;
3529                else if (height == MediaProperties.HEIGHT_1080)
3530                    retValue = VideoFrameSize.V1080p;
3531                break;
3532            case MediaProperties.ASPECT_RATIO_4_3:
3533                if (height == MediaProperties.HEIGHT_480)
3534                    retValue = VideoFrameSize.VGA;
3535                else if (height == MediaProperties.HEIGHT_720)
3536                    retValue = VideoFrameSize.S720p;
3537                break;
3538            case MediaProperties.ASPECT_RATIO_5_3:
3539                if (height == MediaProperties.HEIGHT_480)
3540                    retValue = VideoFrameSize.WVGA;
3541                break;
3542            case MediaProperties.ASPECT_RATIO_11_9:
3543                if (height == MediaProperties.HEIGHT_144)
3544                    retValue = VideoFrameSize.QCIF;
3545                else if (height == MediaProperties.HEIGHT_288)
3546                    retValue = VideoFrameSize.CIF;
3547                break;
3548        }
3549        if (retValue == VideoFrameSize.SIZE_UNDEFINED) {
3550            resolutions = MediaProperties.getSupportedResolutions(mVideoEditor.getAspectRatio());
3551            // Get the highest resolution
3552            maxResolution = resolutions[resolutions.length - 1];
3553            retValue = findVideoResolution(mVideoEditor.getAspectRatio(), maxResolution.second);
3554        }
3555
3556        return retValue;
3557    }
3558
3559    /**
3560     * This method is responsible for exporting a movie
3561     *
3562     * @param filePath The output file path
3563     * @param projectDir The output project directory
3564     * @param height The height of clip
3565     * @param bitrate The bitrate at which the movie should be exported
3566     * @param mediaItemsList The media items list
3567     * @param mediaTransitionList The transitions list
3568     * @param mediaBGMList The background track list
3569     * @param listener The ExportProgressListener
3570     *
3571     */
3572    void export(String filePath, String projectDir, int height, int bitrate,
3573            List<MediaItem> mediaItemsList, List<Transition> mediaTransitionList,
3574            List<AudioTrack> mediaBGMList, ExportProgressListener listener) {
3575
3576        int outBitrate = 0;
3577        mExportFilename = filePath;
3578        previewStoryBoard(mediaItemsList, mediaTransitionList, mediaBGMList,null);
3579        mExportProgressListener = listener;
3580        int outVideoProfile = 0;
3581        int outVideoLevel = 0;
3582
3583        /** Check the platform specific maximum export resolution */
3584        VideoEditorProfile veProfile = VideoEditorProfile.get();
3585        if (veProfile == null) {
3586            throw new RuntimeException("Can't get the video editor profile");
3587        }
3588        final int maxOutputHeight = veProfile.maxOutputVideoFrameHeight;
3589        final int maxOutputWidth = veProfile.maxOutputVideoFrameWidth;
3590        if (height > maxOutputHeight) {
3591            throw new IllegalArgumentException(
3592                    "Unsupported export resolution. Supported maximum width:" +
3593                    maxOutputWidth + " height:" + maxOutputHeight +
3594                    " current height:" + height);
3595        }
3596        outVideoProfile = VideoEditorProfile.getExportProfile(mExportVideoCodec);
3597        outVideoLevel = VideoEditorProfile.getExportLevel(mExportVideoCodec);
3598
3599        mProgressToApp = 0;
3600
3601        switch (bitrate) {
3602            case MediaProperties.BITRATE_28K:
3603                outBitrate = Bitrate.BR_32_KBPS;
3604                break;
3605            case MediaProperties.BITRATE_40K:
3606                outBitrate = Bitrate.BR_48_KBPS;
3607                break;
3608            case MediaProperties.BITRATE_64K:
3609                outBitrate = Bitrate.BR_64_KBPS;
3610                break;
3611            case MediaProperties.BITRATE_96K:
3612                outBitrate = Bitrate.BR_96_KBPS;
3613                break;
3614            case MediaProperties.BITRATE_128K:
3615                outBitrate = Bitrate.BR_128_KBPS;
3616                break;
3617            case MediaProperties.BITRATE_192K:
3618                outBitrate = Bitrate.BR_192_KBPS;
3619                break;
3620            case MediaProperties.BITRATE_256K:
3621                outBitrate = Bitrate.BR_256_KBPS;
3622                break;
3623            case MediaProperties.BITRATE_384K:
3624                outBitrate = Bitrate.BR_384_KBPS;
3625                break;
3626            case MediaProperties.BITRATE_512K:
3627                outBitrate = Bitrate.BR_512_KBPS;
3628                break;
3629            case MediaProperties.BITRATE_800K:
3630                outBitrate = Bitrate.BR_800_KBPS;
3631                break;
3632            case MediaProperties.BITRATE_2M:
3633                outBitrate = Bitrate.BR_2_MBPS;
3634                break;
3635
3636            case MediaProperties.BITRATE_5M:
3637                outBitrate = Bitrate.BR_5_MBPS;
3638                break;
3639            case MediaProperties.BITRATE_8M:
3640                outBitrate = Bitrate.BR_8_MBPS;
3641                break;
3642
3643            default:
3644                throw new IllegalArgumentException("Argument Bitrate incorrect");
3645        }
3646        mPreviewEditSettings.videoFrameRate = VideoFrameRate.FR_30_FPS;
3647        mPreviewEditSettings.outputFile = mOutputFilename = filePath;
3648
3649        int aspectRatio = mVideoEditor.getAspectRatio();
3650        mPreviewEditSettings.videoFrameSize = findVideoResolution(aspectRatio, height);
3651        mPreviewEditSettings.videoFormat = mExportVideoCodec;
3652        mPreviewEditSettings.audioFormat = mExportAudioCodec;
3653        mPreviewEditSettings.videoProfile = outVideoProfile;
3654        mPreviewEditSettings.videoLevel = outVideoLevel;
3655        mPreviewEditSettings.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000;
3656        mPreviewEditSettings.maxFileSize = 0;
3657        mPreviewEditSettings.audioChannels = 2;
3658        mPreviewEditSettings.videoBitrate = outBitrate;
3659        mPreviewEditSettings.audioBitrate = Bitrate.BR_96_KBPS;
3660
3661        mPreviewEditSettings.transitionSettingsArray = new TransitionSettings[mTotalClips - 1];
3662        for (int index = 0; index < mTotalClips - 1; index++) {
3663            mPreviewEditSettings.transitionSettingsArray[index] = new TransitionSettings();
3664            mPreviewEditSettings.transitionSettingsArray[index].videoTransitionType =
3665                VideoTransition.NONE;
3666            mPreviewEditSettings.transitionSettingsArray[index].audioTransitionType =
3667                AudioTransition.NONE;
3668        }
3669
3670        for (int clipCnt = 0; clipCnt < mPreviewEditSettings.clipSettingsArray.length; clipCnt++) {
3671            if (mPreviewEditSettings.clipSettingsArray[clipCnt].fileType == FileType.JPG) {
3672                mPreviewEditSettings.clipSettingsArray[clipCnt].clipPath =
3673                mPreviewEditSettings.clipSettingsArray[clipCnt].clipOriginalPath;
3674            }
3675        }
3676        nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings);
3677
3678        int err = 0;
3679        try {
3680            mProcessingState  = PROCESSING_EXPORT;
3681            mProcessingObject = null;
3682            err = generateClip(mPreviewEditSettings);
3683            mProcessingState  = PROCESSING_NONE;
3684        } catch (IllegalArgumentException ex) {
3685            Log.e(TAG, "IllegalArgument for generateClip");
3686            throw ex;
3687        } catch (IllegalStateException ex) {
3688            Log.e(TAG, "IllegalStateExceptiont for generateClip");
3689            throw ex;
3690        } catch (RuntimeException ex) {
3691            Log.e(TAG, "RuntimeException for generateClip");
3692            throw ex;
3693        }
3694
3695        if (err != 0) {
3696            Log.e(TAG, "RuntimeException for generateClip");
3697            throw new RuntimeException("generateClip failed with error=" + err);
3698        }
3699
3700        mExportProgressListener = null;
3701    }
3702
3703    /**
3704     * This methods takes care of stopping the Export process
3705     *
3706     * @param The input file name for which export has to be stopped
3707     */
3708    void stop(String filename) {
3709        try {
3710            stopEncoding();
3711            new File(mExportFilename).delete();
3712        } catch (IllegalStateException ex) {
3713            Log.e(TAG, "Illegal state exception in unload settings");
3714            throw ex;
3715        } catch (RuntimeException ex) {
3716            Log.e(TAG, "Runtime exception in unload settings");
3717            throw ex;
3718        }
3719    }
3720
3721    /**
3722     * This method extracts a frame from the input file
3723     * and returns the frame as a bitmap
3724     *
3725     * @param inputFile The inputFile
3726     * @param width The width of the output frame
3727     * @param height The height of the output frame
3728     * @param timeMS The time in ms at which the frame has to be extracted
3729     */
3730    Bitmap getPixels(String inputFile, int width, int height, long timeMS) {
3731        if (inputFile == null) {
3732            throw new IllegalArgumentException("Invalid input file");
3733        }
3734
3735        /* Make width and height as even */
3736        final int newWidth = (width + 1) & 0xFFFFFFFE;
3737        final int newHeight = (height + 1) & 0xFFFFFFFE;
3738
3739        /* Create a temp bitmap for resized thumbnails */
3740        Bitmap tempBitmap = null;
3741        if ((newWidth != width) || (newHeight != height)) {
3742             tempBitmap = Bitmap.createBitmap(newWidth, newHeight, Bitmap.Config.ARGB_8888);
3743        }
3744
3745        IntBuffer rgb888 = IntBuffer.allocate(newWidth * newHeight * 4);
3746        Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
3747        nativeGetPixels(inputFile, rgb888.array(), newWidth, newHeight, timeMS);
3748
3749        if ((newWidth == width) && (newHeight == height)) {
3750            bitmap.copyPixelsFromBuffer(rgb888);
3751        } else {
3752            /* Create a temp bitmap to be used for resize */
3753            tempBitmap.copyPixelsFromBuffer(rgb888);
3754
3755            /* Create a canvas to resize */
3756            final Canvas canvas = new Canvas(bitmap);
3757            canvas.drawBitmap(tempBitmap, new Rect(0, 0, newWidth, newHeight),
3758                                          new Rect(0, 0, width, height), sResizePaint);
3759            canvas.setBitmap(null);
3760        }
3761
3762        if (tempBitmap != null) {
3763            tempBitmap.recycle();
3764        }
3765
3766        return bitmap;
3767    }
3768
3769    /**
3770     * This method extracts a list of frame from the
3771     * input file and returns the frame in bitmap array
3772     *
3773     * @param filename The inputFile
3774     * @param width The width of the output frame
3775     * @param height The height of the output frame
3776     * @param startMs The starting time in ms
3777     * @param endMs The end time in ms
3778     * @param thumbnailCount The number of frames to be extracted
3779     * @param indices The indices of thumbnails wanted
3780     * @param callback The callback used to pass back the bitmaps
3781     * from startMs to endMs
3782     *
3783     * @return The frames as bitmaps in bitmap array
3784     **/
3785    void getPixelsList(String filename, final int width, final int height,
3786            long startMs, long endMs, int thumbnailCount, int[] indices,
3787            final MediaItem.GetThumbnailListCallback callback) {
3788        /* Make width and height as even */
3789        final int newWidth = (width + 1) & 0xFFFFFFFE;
3790        final int newHeight = (height + 1) & 0xFFFFFFFE;
3791        final int thumbnailSize = newWidth * newHeight;
3792
3793        /* Create a temp bitmap for resized thumbnails */
3794        final Bitmap tempBitmap =
3795                (newWidth != width || newHeight != height)
3796                ? Bitmap.createBitmap(newWidth, newHeight, Bitmap.Config.ARGB_8888)
3797                : null;
3798
3799        final int[] rgb888 = new int[thumbnailSize];
3800        final IntBuffer tmpBuffer = IntBuffer.allocate(thumbnailSize);
3801        nativeGetPixelsList(filename, rgb888, newWidth, newHeight,
3802                thumbnailCount, startMs, endMs, indices,
3803                new NativeGetPixelsListCallback() {
3804            public void onThumbnail(int index) {
3805                Bitmap bitmap = Bitmap.createBitmap(
3806                        width, height, Bitmap.Config.ARGB_8888);
3807                tmpBuffer.put(rgb888, 0, thumbnailSize);
3808                tmpBuffer.rewind();
3809
3810                if ((newWidth == width) && (newHeight == height)) {
3811                    bitmap.copyPixelsFromBuffer(tmpBuffer);
3812                } else {
3813                    /* Copy the out rgb buffer to temp bitmap */
3814                    tempBitmap.copyPixelsFromBuffer(tmpBuffer);
3815
3816                    /* Create a canvas to resize */
3817                    final Canvas canvas = new Canvas(bitmap);
3818                    canvas.drawBitmap(tempBitmap,
3819                            new Rect(0, 0, newWidth, newHeight),
3820                            new Rect(0, 0, width, height), sResizePaint);
3821
3822                    canvas.setBitmap(null);
3823                }
3824                callback.onThumbnail(bitmap, index);
3825            }
3826        });
3827
3828        if (tempBitmap != null) {
3829            tempBitmap.recycle();
3830        }
3831    }
3832
3833    interface NativeGetPixelsListCallback {
3834        public void onThumbnail(int index);
3835    }
3836
3837    /**
3838     * This method generates the audio graph
3839     *
3840     * @param uniqueId The unique id
3841     * @param inFileName The inputFile
3842     * @param OutAudiGraphFileName output filename
3843     * @param frameDuration The each frame duration
3844     * @param audioChannels The number of audio channels
3845     * @param samplesCount Total number of samples count
3846     * @param listener ExtractAudioWaveformProgressListener reference
3847     * @param isVideo The flag to indicate if the file is video file or not
3848     *
3849     **/
3850    void generateAudioGraph(String uniqueId, String inFileName, String OutAudiGraphFileName,
3851            int frameDuration, int audioChannels, int samplesCount,
3852            ExtractAudioWaveformProgressListener listener, boolean isVideo) {
3853        String tempPCMFileName;
3854
3855        mExtractAudioWaveformProgressListener = listener;
3856
3857        /**
3858         * In case of Video, first call will generate the PCM file to make the
3859         * audio graph
3860         */
3861        if (isVideo) {
3862            tempPCMFileName = String.format(mProjectPath + "/" + uniqueId + ".pcm");
3863        } else {
3864            tempPCMFileName = mAudioTrackPCMFilePath;
3865        }
3866
3867        /**
3868         * For Video item, generate the PCM
3869         */
3870        if (isVideo) {
3871            nativeGenerateRawAudio(inFileName, tempPCMFileName);
3872        }
3873
3874        nativeGenerateAudioGraph(tempPCMFileName, OutAudiGraphFileName, frameDuration,
3875                audioChannels, samplesCount);
3876
3877        /**
3878         * Once the audio graph file is generated, delete the pcm file
3879         */
3880        if (isVideo) {
3881            new File(tempPCMFileName).delete();
3882        }
3883    }
3884
3885    void clearPreviewSurface(Surface surface) {
3886        nativeClearSurface(surface);
3887    }
3888
3889    /**
3890     * Grab the semaphore which arbitrates access to the editor
3891     *
3892     * @throws InterruptedException
3893     */
3894    private void lock() throws InterruptedException {
3895        if (Log.isLoggable(TAG, Log.DEBUG)) {
3896            Log.d(TAG, "lock: grabbing semaphore", new Throwable());
3897        }
3898        mLock.acquire();
3899        if (Log.isLoggable(TAG, Log.DEBUG)) {
3900            Log.d(TAG, "lock: grabbed semaphore");
3901        }
3902    }
3903
3904    /**
3905     * Release the semaphore which arbitrates access to the editor
3906     */
3907    private void unlock() {
3908        if (Log.isLoggable(TAG, Log.DEBUG)) {
3909            Log.d(TAG, "unlock: releasing semaphore");
3910        }
3911        mLock.release();
3912    }
3913
3914    /**     Native Methods        */
3915    native Properties getMediaProperties(String file) throws IllegalArgumentException,
3916            IllegalStateException, RuntimeException, Exception;
3917
3918    /**
3919     * Get the version of ManualEdit.
3920     *
3921     * @return version of ManualEdit
3922     * @throws RuntimeException if an error occurred
3923     * @see Version
3924     */
3925    private static native Version getVersion() throws RuntimeException;
3926
3927    /**
3928     * Returns the video thumbnail in an array of integers. Output format is
3929     * ARGB8888.
3930     *
3931     * @param pixelArray the array that receives the pixel values
3932     * @param width width of the video thumbnail
3933     * @param height height of the video thumbnail
3934     * @param timeMS desired time of the thumbnail in ms
3935     * @return actual time in ms of the thumbnail generated
3936     * @throws IllegalStateException if the class has not been initialized
3937     * @throws IllegalArgumentException if the pixelArray is not available or
3938     *             one of the dimensions is negative or zero or the time is
3939     *             negative
3940     * @throws RuntimeException on runtime errors in native code
3941     */
3942    private native int nativeGetPixels(String fileName, int[] pixelArray, int width, int height,
3943            long timeMS);
3944
3945    private native int nativeGetPixelsList(String fileName, int[] pixelArray,
3946            int width, int height, int nosofTN, long startTimeMs, long endTimeMs,
3947            int[] indices, NativeGetPixelsListCallback callback);
3948
3949    /**
3950     * Releases the JNI and cleans up the core native module.. Should be called
3951     * only after init( )
3952     *
3953     * @throws IllegalStateException if the method could not be called
3954     */
3955    private native void release() throws IllegalStateException, RuntimeException;
3956
3957    /*
3958     * Clear the preview surface
3959     */
3960    private native void nativeClearSurface(Surface surface);
3961
3962    /**
3963     * Stops the encoding. This method should only be called after encoding has
3964     * started using method <code> startEncoding</code>
3965     *
3966     * @throws IllegalStateException if the method could not be called
3967     */
3968    private native void stopEncoding() throws IllegalStateException, RuntimeException;
3969
3970
3971    private native void _init(String tempPath, String libraryPath)
3972            throws IllegalArgumentException, IllegalStateException, RuntimeException;
3973
3974    private native void nativeStartPreview(Surface mSurface, long fromMs, long toMs,
3975            int callbackAfterFrameCount, boolean loop) throws IllegalArgumentException,
3976            IllegalStateException, RuntimeException;
3977
3978    private native void nativePopulateSettings(EditSettings editSettings,
3979            PreviewClipProperties mProperties, AudioSettings mAudioSettings)
3980    throws IllegalArgumentException, IllegalStateException, RuntimeException;
3981
3982    private native int nativeRenderPreviewFrame(Surface mSurface, long timeMs,
3983                                                 int surfaceWidth, int surfaceHeight)
3984                                                 throws IllegalArgumentException,
3985                                                 IllegalStateException, RuntimeException;
3986
3987    private native int nativeRenderMediaItemPreviewFrame(Surface mSurface, String filepath,
3988            int framewidth, int frameheight, int surfacewidth, int surfaceheight, long timeMs)
3989    throws IllegalArgumentException, IllegalStateException, RuntimeException;
3990
3991    private native int nativeStopPreview();
3992
3993    private native int nativeGenerateAudioGraph(String pcmFilePath, String outGraphPath,
3994            int frameDuration, int channels, int sampleCount);
3995
3996    private native int nativeGenerateRawAudio(String InFileName, String PCMFileName);
3997
3998    private native int nativeGenerateClip(EditSettings editSettings)
3999    throws IllegalArgumentException, IllegalStateException, RuntimeException;
4000
4001}
4002