MediaArtistNativeHelper.java revision 63298a980930c81754ab4fb7849a4ff61088d864
1/*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17package android.media.videoeditor;
18
19import java.io.File;
20import java.io.IOException;
21import java.math.BigDecimal;
22import java.nio.IntBuffer;
23import java.util.Iterator;
24import java.util.List;
25import java.util.concurrent.Semaphore;
26
27import android.graphics.Bitmap;
28import android.graphics.BitmapFactory;
29import android.graphics.Canvas;
30import android.graphics.Paint;
31import android.graphics.Rect;
32import android.media.videoeditor.VideoEditor.ExportProgressListener;
33import android.media.videoeditor.VideoEditor.PreviewProgressListener;
34import android.media.videoeditor.VideoEditor.MediaProcessingProgressListener;
35import android.util.Log;
36import android.util.Pair;
37import android.view.Surface;
38
39/**
40 *This class provide Native methods to be used by MediaArtist {@hide}
41 */
42class MediaArtistNativeHelper {
43    private static final String TAG = "MediaArtistNativeHelper";
44
45    static {
46        System.loadLibrary("videoeditor_jni");
47    }
48
49    private static final int MAX_THUMBNAIL_PERMITTED = 8;
50
51    public static final int TASK_LOADING_SETTINGS = 1;
52    public static final int TASK_ENCODING = 2;
53
54    /**
55     *  The resize paint
56     */
57    private static final Paint sResizePaint = new Paint(Paint.FILTER_BITMAP_FLAG);
58
59    private final VideoEditor mVideoEditor;
60
61    private EditSettings mStoryBoardSettings;
62
63    private String mOutputFilename;
64
65    private PreviewClipProperties mClipProperties = null;
66
67    private EditSettings mPreviewEditSettings;
68
69    private AudioSettings mAudioSettings = null;
70
71    private AudioTrack mAudioTrack = null;
72
73    private boolean mInvalidatePreviewArray = true;
74
75    private boolean mRegenerateAudio = true;
76
77    private String mExportFilename = null;
78
79    private int mProgressToApp;
80
81    /*
82     *  Semaphore to control preview calls
83     */
84    private final Semaphore mLock = new Semaphore(1, true);
85
86    private String mRenderPreviewOverlayFile;
87    private int mRenderPreviewRenderingMode;
88
89    private boolean mIsFirstProgress;
90
91    private static final String AUDIO_TRACK_PCM_FILE = "AudioPcm.pcm";
92
93    // Processing indication
94    public static final int PROCESSING_NONE          = 0;
95    public static final int PROCESSING_AUDIO_PCM     = 1;
96    public static final int PROCESSING_TRANSITION    = 2;
97    public static final int PROCESSING_KENBURNS      = 3;
98    public static final int PROCESSING_INTERMEDIATE1 = 11;
99    public static final int PROCESSING_INTERMEDIATE2 = 12;
100    public static final int PROCESSING_INTERMEDIATE3 = 13;
101    public static final int PROCESSING_EXPORT        = 20;
102
103    private int mProcessingState;
104    private Object mProcessingObject;
105    private PreviewProgressListener mPreviewProgressListener;
106    private ExportProgressListener mExportProgressListener;
107    private ExtractAudioWaveformProgressListener mExtractAudioWaveformProgressListener;
108    private MediaProcessingProgressListener mMediaProcessingProgressListener;
109    private final String mProjectPath;
110
111    private long mPreviewProgress;
112
113    private String mAudioTrackPCMFilePath;
114
115    private int mTotalClips = 0;
116
117    private boolean mErrorFlagSet = false;
118
119    @SuppressWarnings("unused")
120    private int mManualEditContext;
121
122    /* Listeners */
123
124    /**
125     * Interface definition for a listener to be invoked when there is an update
126     * in a running task.
127     */
128    public interface OnProgressUpdateListener {
129        /**
130         * Called when there is an update.
131         *
132         * @param taskId id of the task reporting an update.
133         * @param progress progress of the task [0..100].
134         * @see BasicEdit#TASK_ENCODING
135         */
136        public void OnProgressUpdate(int taskId, int progress);
137    }
138
139    /** Defines the version. */
140    public final class Version {
141
142        /** Major version number */
143        public int major;
144
145        /** Minor version number */
146        public int minor;
147
148        /** Revision number */
149        public int revision;
150
151        /** VIDEOEDITOR major version number */
152        private static final int VIDEOEDITOR_MAJOR_VERSION = 0;
153
154        /** VIDEOEDITOR minor version number */
155        private static final int VIDEOEDITOR_MINOR_VERSION = 0;
156
157        /** VIDEOEDITOR revision number */
158        private static final int VIDEOEDITOR_REVISION_VERSION = 1;
159
160        /** Method which returns the current VIDEOEDITOR version */
161        public Version getVersion() {
162            Version version = new Version();
163
164            version.major = Version.VIDEOEDITOR_MAJOR_VERSION;
165            version.minor = Version.VIDEOEDITOR_MINOR_VERSION;
166            version.revision = Version.VIDEOEDITOR_REVISION_VERSION;
167
168            return version;
169        }
170    }
171
172    /**
173     * Defines output audio formats.
174     */
175    public final class AudioFormat {
176        /** No audio present in output clip. Used to generate video only clip */
177        public static final int NO_AUDIO = 0;
178
179        /** AMR Narrow Band. */
180        public static final int AMR_NB = 1;
181
182        /** Advanced Audio Coding (AAC). */
183        public static final int AAC = 2;
184
185        /** Advanced Audio Codec Plus (HE-AAC v1). */
186        public static final int AAC_PLUS = 3;
187
188        /** Advanced Audio Codec Plus (HE-AAC v2). */
189        public static final int ENHANCED_AAC_PLUS = 4;
190
191        /** MPEG layer 3 (MP3). */
192        public static final int MP3 = 5;
193
194        /** Enhanced Variable RateCodec (EVRC). */
195        public static final int EVRC = 6;
196
197        /** PCM (PCM). */
198        public static final int PCM = 7;
199
200        /** No transcoding. Output audio format is same as input audio format */
201        public static final int NULL_AUDIO = 254;
202
203        /** Unsupported audio format. */
204        public static final int UNSUPPORTED_AUDIO = 255;
205    }
206
207    /**
208     * Defines audio sampling frequencies.
209     */
210    public final class AudioSamplingFrequency {
211        /**
212         * Default sampling frequency. Uses the default frequency for a specific
213         * audio format. For AAC the only supported (and thus default) sampling
214         * frequency is 16 kHz. For this audio format the sampling frequency in
215         * the OutputParams.
216         **/
217        public static final int FREQ_DEFAULT = 0;
218
219        /** Audio sampling frequency of 8000 Hz. */
220        public static final int FREQ_8000 = 8000;
221
222        /** Audio sampling frequency of 11025 Hz. */
223        public static final int FREQ_11025 = 11025;
224
225        /** Audio sampling frequency of 12000 Hz. */
226        public static final int FREQ_12000 = 12000;
227
228        /** Audio sampling frequency of 16000 Hz. */
229        public static final int FREQ_16000 = 16000;
230
231        /** Audio sampling frequency of 22050 Hz. */
232        public static final int FREQ_22050 = 22050;
233
234        /** Audio sampling frequency of 24000 Hz. */
235        public static final int FREQ_24000 = 24000;
236
237        /** Audio sampling frequency of 32000 Hz. */
238        public static final int FREQ_32000 = 32000;
239
240        /** Audio sampling frequency of 44100 Hz. */
241        public static final int FREQ_44100 = 44100;
242
243        /** Audio sampling frequency of 48000 Hz. Not available for output file. */
244        public static final int FREQ_48000 = 48000;
245    }
246
247    /**
248     * Defines the supported fixed audio and video bitrates. These values are
249     * for output audio video only.
250     */
251    public final class Bitrate {
252        /** Variable bitrate. Means no bitrate regulation */
253        public static final int VARIABLE = -1;
254
255        /** An undefined bitrate. */
256        public static final int UNDEFINED = 0;
257
258        /** A bitrate of 9.2 kbits/s. */
259        public static final int BR_9_2_KBPS = 9200;
260
261        /** A bitrate of 12.2 kbits/s. */
262        public static final int BR_12_2_KBPS = 12200;
263
264        /** A bitrate of 16 kbits/s. */
265        public static final int BR_16_KBPS = 16000;
266
267        /** A bitrate of 24 kbits/s. */
268        public static final int BR_24_KBPS = 24000;
269
270        /** A bitrate of 32 kbits/s. */
271        public static final int BR_32_KBPS = 32000;
272
273        /** A bitrate of 48 kbits/s. */
274        public static final int BR_48_KBPS = 48000;
275
276        /** A bitrate of 64 kbits/s. */
277        public static final int BR_64_KBPS = 64000;
278
279        /** A bitrate of 96 kbits/s. */
280        public static final int BR_96_KBPS = 96000;
281
282        /** A bitrate of 128 kbits/s. */
283        public static final int BR_128_KBPS = 128000;
284
285        /** A bitrate of 192 kbits/s. */
286        public static final int BR_192_KBPS = 192000;
287
288        /** A bitrate of 256 kbits/s. */
289        public static final int BR_256_KBPS = 256000;
290
291        /** A bitrate of 288 kbits/s. */
292        public static final int BR_288_KBPS = 288000;
293
294        /** A bitrate of 384 kbits/s. */
295        public static final int BR_384_KBPS = 384000;
296
297        /** A bitrate of 512 kbits/s. */
298        public static final int BR_512_KBPS = 512000;
299
300        /** A bitrate of 800 kbits/s. */
301        public static final int BR_800_KBPS = 800000;
302
303        /** A bitrate of 2 Mbits/s. */
304        public static final int BR_2_MBPS = 2000000;
305
306        /** A bitrate of 5 Mbits/s. */
307        public static final int BR_5_MBPS = 5000000;
308
309        /** A bitrate of 8 Mbits/s. */
310        public static final int BR_8_MBPS = 8000000;
311    }
312
313    /**
314     * Defines all supported file types.
315     */
316    public final class FileType {
317        /** 3GPP file type. */
318        public static final int THREE_GPP = 0;
319
320        /** MP4 file type. */
321        public static final int MP4 = 1;
322
323        /** AMR file type. */
324        public static final int AMR = 2;
325
326        /** MP3 audio file type. */
327        public static final int MP3 = 3;
328
329        /** PCM audio file type. */
330        public static final int PCM = 4;
331
332        /** JPEG image file type. */
333        public static final int JPG = 5;
334
335        /** GIF image file type. */
336        public static final int GIF = 7;
337
338        /** PNG image file type. */
339        public static final int PNG = 8;
340
341        /** M4V file type. */
342        public static final int M4V = 10;
343
344        /** Unsupported file type. */
345        public static final int UNSUPPORTED = 255;
346    }
347
348    /**
349     * Defines rendering types. Rendering can only be applied to files
350     * containing video streams.
351     **/
352    public final class MediaRendering {
353        /**
354         * Resize to fit the output video with changing the aspect ratio if
355         * needed.
356         */
357        public static final int RESIZING = 0;
358
359        /**
360         * Crop the input video to fit it with the output video resolution.
361         **/
362        public static final int CROPPING = 1;
363
364        /**
365         * Resize to fit the output video resolution but maintain the aspect
366         * ratio. This framing type adds black borders if needed.
367         */
368        public static final int BLACK_BORDERS = 2;
369    }
370
371    /**
372     * Defines the results.
373     */
374    public final class Result {
375        /** No error. result OK */
376        public static final int NO_ERROR = 0;
377
378        /** File not found */
379        public static final int ERR_FILE_NOT_FOUND = 1;
380
381        /**
382         * In case of UTF8 conversion, the size of the converted path will be
383         * more than the corresponding allocated buffer.
384         */
385        public static final int ERR_BUFFER_OUT_TOO_SMALL = 2;
386
387        /** Invalid file type. */
388        public static final int ERR_INVALID_FILE_TYPE = 3;
389
390        /** Invalid effect kind. */
391        public static final int ERR_INVALID_EFFECT_KIND = 4;
392
393        /** Invalid video effect. */
394        public static final int ERR_INVALID_VIDEO_EFFECT_TYPE = 5;
395
396        /** Invalid audio effect. */
397        public static final int ERR_INVALID_AUDIO_EFFECT_TYPE = 6;
398
399        /** Invalid video transition. */
400        public static final int ERR_INVALID_VIDEO_TRANSITION_TYPE = 7;
401
402        /** Invalid audio transition. */
403        public static final int ERR_INVALID_AUDIO_TRANSITION_TYPE = 8;
404
405        /** Invalid encoding frame rate. */
406        public static final int ERR_INVALID_VIDEO_ENCODING_FRAME_RATE = 9;
407
408        /** External effect is called but this function is not set. */
409        public static final int ERR_EXTERNAL_EFFECT_NULL = 10;
410
411        /** External transition is called but this function is not set. */
412        public static final int ERR_EXTERNAL_TRANSITION_NULL = 11;
413
414        /** Begin time cut is larger than the video clip duration. */
415        public static final int ERR_BEGIN_CUT_LARGER_THAN_DURATION = 12;
416
417        /** Begin cut time is larger or equal than end cut. */
418        public static final int ERR_BEGIN_CUT_LARGER_THAN_END_CUT = 13;
419
420        /** Two consecutive transitions are overlapping on one clip. */
421        public static final int ERR_OVERLAPPING_TRANSITIONS = 14;
422
423        /** Internal error, type size mismatch. */
424        public static final int ERR_ANALYSIS_DATA_SIZE_TOO_SMALL = 15;
425
426        /** An input 3GPP file is invalid/corrupted. */
427        public static final int ERR_INVALID_3GPP_FILE = 16;
428
429        /** A file contains an unsupported video format. */
430        public static final int ERR_UNSUPPORTED_INPUT_VIDEO_FORMAT = 17;
431
432        /** A file contains an unsupported audio format. */
433        public static final int ERR_UNSUPPORTED_INPUT_AUDIO_FORMAT = 18;
434
435        /** A file format is not supported. */
436        public static final int ERR_AMR_EDITING_UNSUPPORTED = 19;
437
438        /** An input clip has an unexpectedly large Video AU. */
439        public static final int ERR_INPUT_VIDEO_AU_TOO_LARGE = 20;
440
441        /** An input clip has an unexpectedly large Audio AU. */
442        public static final int ERR_INPUT_AUDIO_AU_TOO_LARGE = 21;
443
444        /** An input clip has a corrupted Audio AU. */
445        public static final int ERR_INPUT_AUDIO_CORRUPTED_AU = 22;
446
447        /** The video encoder encountered an Access Unit error. */
448        public static final int ERR_ENCODER_ACCES_UNIT_ERROR = 23;
449
450        /** Unsupported video format for Video Editing. */
451        public static final int ERR_EDITING_UNSUPPORTED_VIDEO_FORMAT = 24;
452
453        /** Unsupported H263 profile for Video Editing. */
454        public static final int ERR_EDITING_UNSUPPORTED_H263_PROFILE = 25;
455
456        /** Unsupported MPEG-4 profile for Video Editing. */
457        public static final int ERR_EDITING_UNSUPPORTED_MPEG4_PROFILE = 26;
458
459        /** Unsupported MPEG-4 RVLC tool for Video Editing. */
460        public static final int ERR_EDITING_UNSUPPORTED_MPEG4_RVLC = 27;
461
462        /** Unsupported audio format for Video Editing. */
463        public static final int ERR_EDITING_UNSUPPORTED_AUDIO_FORMAT = 28;
464
465        /** File contains no supported stream. */
466        public static final int ERR_EDITING_NO_SUPPORTED_STREAM_IN_FILE = 29;
467
468        /** File contains no video stream or an unsupported video stream. */
469        public static final int ERR_EDITING_NO_SUPPORTED_VIDEO_STREAM_IN_FILE = 30;
470
471        /** Internal error, clip analysis version mismatch. */
472        public static final int ERR_INVALID_CLIP_ANALYSIS_VERSION = 31;
473
474        /**
475         * At least one of the clip analysis has been generated on another
476         * platform (WIN32, ARM, etc.).
477         */
478        public static final int ERR_INVALID_CLIP_ANALYSIS_PLATFORM = 32;
479
480        /** Clips don't have the same video format (H263 or MPEG4). */
481        public static final int ERR_INCOMPATIBLE_VIDEO_FORMAT = 33;
482
483        /** Clips don't have the same frame size. */
484        public static final int ERR_INCOMPATIBLE_VIDEO_FRAME_SIZE = 34;
485
486        /** Clips don't have the same MPEG-4 time scale. */
487        public static final int ERR_INCOMPATIBLE_VIDEO_TIME_SCALE = 35;
488
489        /** Clips don't have the same use of MPEG-4 data partitioning. */
490        public static final int ERR_INCOMPATIBLE_VIDEO_DATA_PARTITIONING = 36;
491
492        /** MP3 clips can't be assembled. */
493        public static final int ERR_UNSUPPORTED_MP3_ASSEMBLY = 37;
494
495        /**
496         * The input 3GPP file does not contain any supported audio or video
497         * track.
498         */
499        public static final int ERR_NO_SUPPORTED_STREAM_IN_FILE = 38;
500
501        /**
502         * The Volume of the added audio track (AddVolume) must be strictly
503         * superior than zero.
504         */
505        public static final int ERR_ADDVOLUME_EQUALS_ZERO = 39;
506
507        /**
508         * The time at which an audio track is added can't be higher than the
509         * input video track duration..
510         */
511        public static final int ERR_ADDCTS_HIGHER_THAN_VIDEO_DURATION = 40;
512
513        /** The audio track file format setting is undefined. */
514        public static final int ERR_UNDEFINED_AUDIO_TRACK_FILE_FORMAT = 41;
515
516        /** The added audio track stream has an unsupported format. */
517        public static final int ERR_UNSUPPORTED_ADDED_AUDIO_STREAM = 42;
518
519        /** The audio mixing feature doesn't support the audio track type. */
520        public static final int ERR_AUDIO_MIXING_UNSUPPORTED = 43;
521
522        /** The audio mixing feature doesn't support MP3 audio tracks. */
523        public static final int ERR_AUDIO_MIXING_MP3_UNSUPPORTED = 44;
524
525        /**
526         * An added audio track limits the available features: uiAddCts must be
527         * 0 and bRemoveOriginal must be true.
528         */
529        public static final int ERR_FEATURE_UNSUPPORTED_WITH_AUDIO_TRACK = 45;
530
531        /**
532         * An added audio track limits the available features: uiAddCts must be
533         * 0 and bRemoveOriginal must be true.
534         */
535        public static final int ERR_FEATURE_UNSUPPORTED_WITH_AAC = 46;
536
537        /** Input audio track is not of a type that can be mixed with output. */
538        public static final int ERR_AUDIO_CANNOT_BE_MIXED = 47;
539
540        /** Input audio track is not AMR-NB, so it can't be mixed with output. */
541        public static final int ERR_ONLY_AMRNB_INPUT_CAN_BE_MIXED = 48;
542
543        /**
544         * An added EVRC audio track limit the available features: uiAddCts must
545         * be 0 and bRemoveOriginal must be true.
546         */
547        public static final int ERR_FEATURE_UNSUPPORTED_WITH_EVRC = 49;
548
549        /** H263 profiles other than 0 are not supported. */
550        public static final int ERR_H263_PROFILE_NOT_SUPPORTED = 51;
551
552        /** File contains no video stream or an unsupported video stream. */
553        public static final int ERR_NO_SUPPORTED_VIDEO_STREAM_IN_FILE = 52;
554
555        /** Transcoding of the input file(s) is necessary. */
556        public static final int WAR_TRANSCODING_NECESSARY = 53;
557
558        /**
559         * The size of the output file will exceed the maximum configured value.
560         */
561        public static final int WAR_MAX_OUTPUT_SIZE_EXCEEDED = 54;
562
563        /** The time scale is too big. */
564        public static final int WAR_TIMESCALE_TOO_BIG = 55;
565
566        /** The year is out of range */
567        public static final int ERR_CLOCK_BAD_REF_YEAR = 56;
568
569        /** The directory could not be opened */
570        public static final int ERR_DIR_OPEN_FAILED = 57;
571
572        /** The directory could not be read */
573        public static final int ERR_DIR_READ_FAILED = 58;
574
575        /** There are no more entries in the current directory */
576        public static final int ERR_DIR_NO_MORE_ENTRY = 59;
577
578        /** The input parameter/s has error */
579        public static final int ERR_PARAMETER = 60;
580
581        /** There is a state machine error */
582        public static final int ERR_STATE = 61;
583
584        /** Memory allocation failed */
585        public static final int ERR_ALLOC = 62;
586
587        /** Context is invalid */
588        public static final int ERR_BAD_CONTEXT = 63;
589
590        /** Context creation failed */
591        public static final int ERR_CONTEXT_FAILED = 64;
592
593        /** Invalid stream ID */
594        public static final int ERR_BAD_STREAM_ID = 65;
595
596        /** Invalid option ID */
597        public static final int ERR_BAD_OPTION_ID = 66;
598
599        /** The option is write only */
600        public static final int ERR_WRITE_ONLY = 67;
601
602        /** The option is read only */
603        public static final int ERR_READ_ONLY = 68;
604
605        /** The feature is not implemented in this version */
606        public static final int ERR_NOT_IMPLEMENTED = 69;
607
608        /** The media type is not supported */
609        public static final int ERR_UNSUPPORTED_MEDIA_TYPE = 70;
610
611        /** No data to be encoded */
612        public static final int WAR_NO_DATA_YET = 71;
613
614        /** No data to be decoded */
615        public static final int WAR_NO_MORE_STREAM = 72;
616
617        /** Time stamp is invalid */
618        public static final int WAR_INVALID_TIME = 73;
619
620        /** No more data to be decoded */
621        public static final int WAR_NO_MORE_AU = 74;
622
623        /** Semaphore timed out */
624        public static final int WAR_TIME_OUT = 75;
625
626        /** Memory buffer is full */
627        public static final int WAR_BUFFER_FULL = 76;
628
629        /** Server has asked for redirection */
630        public static final int WAR_REDIRECT = 77;
631
632        /** Too many streams in input */
633        public static final int WAR_TOO_MUCH_STREAMS = 78;
634
635        /** The file cannot be opened/ written into as it is locked */
636        public static final int ERR_FILE_LOCKED = 79;
637
638        /** The file access mode is invalid */
639        public static final int ERR_FILE_BAD_MODE_ACCESS = 80;
640
641        /** The file pointer points to an invalid location */
642        public static final int ERR_FILE_INVALID_POSITION = 81;
643
644        /** Invalid string */
645        public static final int ERR_STR_BAD_STRING = 94;
646
647        /** The input string cannot be converted */
648        public static final int ERR_STR_CONV_FAILED = 95;
649
650        /** The string size is too large */
651        public static final int ERR_STR_OVERFLOW = 96;
652
653        /** Bad string arguments */
654        public static final int ERR_STR_BAD_ARGS = 97;
655
656        /** The string value is larger than maximum size allowed */
657        public static final int WAR_STR_OVERFLOW = 98;
658
659        /** The string value is not present in this comparison operation */
660        public static final int WAR_STR_NOT_FOUND = 99;
661
662        /** The thread is not started */
663        public static final int ERR_THREAD_NOT_STARTED = 100;
664
665        /** Trancoding done warning */
666        public static final int WAR_TRANSCODING_DONE = 101;
667
668        /** Unsupported mediatype */
669        public static final int WAR_MEDIATYPE_NOT_SUPPORTED = 102;
670
671        /** Input file contains invalid/unsupported streams */
672        public static final int ERR_INPUT_FILE_CONTAINS_NO_SUPPORTED_STREAM = 103;
673
674        /** Invalid input file */
675        public static final int ERR_INVALID_INPUT_FILE = 104;
676
677        /** Invalid output video format */
678        public static final int ERR_UNDEFINED_OUTPUT_VIDEO_FORMAT = 105;
679
680        /** Invalid output video frame size */
681        public static final int ERR_UNDEFINED_OUTPUT_VIDEO_FRAME_SIZE = 106;
682
683        /** Invalid output video frame rate */
684        public static final int ERR_UNDEFINED_OUTPUT_VIDEO_FRAME_RATE = 107;
685
686        /** Invalid output audio format */
687        public static final int ERR_UNDEFINED_OUTPUT_AUDIO_FORMAT = 108;
688
689        /** Invalid video frame size for H.263 */
690        public static final int ERR_INVALID_VIDEO_FRAME_SIZE_FOR_H263 = 109;
691
692        /** Invalid video frame rate for H.263 */
693        public static final int ERR_INVALID_VIDEO_FRAME_RATE_FOR_H263 = 110;
694
695        /** invalid playback duration */
696        public static final int ERR_DURATION_IS_NULL = 111;
697
698        /** Invalid H.263 profile in file */
699        public static final int ERR_H263_FORBIDDEN_IN_MP4_FILE = 112;
700
701        /** Invalid AAC sampling frequency */
702        public static final int ERR_INVALID_AAC_SAMPLING_FREQUENCY = 113;
703
704        /** Audio conversion failure */
705        public static final int ERR_AUDIO_CONVERSION_FAILED = 114;
706
707        /** Invalid trim start and end times */
708        public static final int ERR_BEGIN_CUT_EQUALS_END_CUT = 115;
709
710        /** End time smaller than start time for trim */
711        public static final int ERR_END_CUT_SMALLER_THAN_BEGIN_CUT = 116;
712
713        /** Output file size is small */
714        public static final int ERR_MAXFILESIZE_TOO_SMALL = 117;
715
716        /** Output video bitrate is too low */
717        public static final int ERR_VIDEOBITRATE_TOO_LOW = 118;
718
719        /** Output audio bitrate is too low */
720        public static final int ERR_AUDIOBITRATE_TOO_LOW = 119;
721
722        /** Output video bitrate is too high */
723        public static final int ERR_VIDEOBITRATE_TOO_HIGH = 120;
724
725        /** Output audio bitrate is too high */
726        public static final int ERR_AUDIOBITRATE_TOO_HIGH = 121;
727
728        /** Output file size is too small */
729        public static final int ERR_OUTPUT_FILE_SIZE_TOO_SMALL = 122;
730
731        /** Unknown stream type */
732        public static final int ERR_READER_UNKNOWN_STREAM_TYPE = 123;
733
734        /** Invalid metadata in input stream */
735        public static final int WAR_READER_NO_METADATA = 124;
736
737        /** Invalid file reader info warning */
738        public static final int WAR_READER_INFORMATION_NOT_PRESENT = 125;
739
740        /** Warning to indicate the the writer is being stopped */
741        public static final int WAR_WRITER_STOP_REQ = 131;
742
743        /** Video decoder failed to provide frame for transcoding */
744        public static final int WAR_VIDEORENDERER_NO_NEW_FRAME = 132;
745
746        /** Video deblocking filter is not implemented */
747        public static final int WAR_DEBLOCKING_FILTER_NOT_IMPLEMENTED = 133;
748
749        /** H.263 decoder profile not supported */
750        public static final int ERR_DECODER_H263_PROFILE_NOT_SUPPORTED = 134;
751
752        /** The input file contains unsupported H.263 profile */
753        public static final int ERR_DECODER_H263_NOT_BASELINE = 135;
754
755        /** There is no more space to store the output file */
756        public static final int ERR_NOMORE_SPACE_FOR_FILE = 136;
757
758        /** Internal error. */
759        public static final int ERR_INTERNAL = 255;
760    }
761
762    /**
763     * Defines output video formats.
764     */
765    public final class VideoFormat {
766        /** No video present in output clip. Used to generate audio only clip */
767        public static final int NO_VIDEO = 0;
768
769        /** H263 baseline format. */
770        public static final int H263 = 1;
771
772        /** MPEG4 video Simple Profile format. */
773        public static final int MPEG4 = 2;
774
775        /** MPEG4 video Simple Profile format with support for EMP. */
776        public static final int MPEG4_EMP = 3;
777
778        /** H264 video */
779        public static final int H264 = 4;
780
781        /** No transcoding. Output video format is same as input video format */
782        public static final int NULL_VIDEO = 254;
783
784        /** Unsupported video format. */
785        public static final int UNSUPPORTED = 255;
786    }
787
788    /** Defines video profiles and levels. */
789    public final class VideoProfile {
790        /** MPEG4, Simple Profile, Level 0. */
791        public static final int MPEG4_SP_LEVEL_0 = 0;
792
793        /** MPEG4, Simple Profile, Level 0B. */
794        public static final int MPEG4_SP_LEVEL_0B = 1;
795
796        /** MPEG4, Simple Profile, Level 1. */
797        public static final int MPEG4_SP_LEVEL_1 = 2;
798
799        /** MPEG4, Simple Profile, Level 2. */
800        public static final int MPEG4_SP_LEVEL_2 = 3;
801
802        /** MPEG4, Simple Profile, Level 3. */
803        public static final int MPEG4_SP_LEVEL_3 = 4;
804
805        /** H263, Profile 0, Level 10. */
806        public static final int H263_PROFILE_0_LEVEL_10 = 5;
807
808        /** H263, Profile 0, Level 20. */
809        public static final int H263_PROFILE_0_LEVEL_20 = 6;
810
811        /** H263, Profile 0, Level 30. */
812        public static final int H263_PROFILE_0_LEVEL_30 = 7;
813
814        /** H263, Profile 0, Level 40. */
815        public static final int H263_PROFILE_0_LEVEL_40 = 8;
816
817        /** H263, Profile 0, Level 45. */
818        public static final int H263_PROFILE_0_LEVEL_45 = 9;
819
820        /** MPEG4, Simple Profile, Level 4A. */
821        public static final int MPEG4_SP_LEVEL_4A = 10;
822
823        /** MPEG4, Simple Profile, Level 0. */
824        public static final int MPEG4_SP_LEVEL_5 = 11;
825
826        /** H264, Profile 0, Level 1. */
827        public static final int H264_PROFILE_0_LEVEL_1 = 12;
828
829        /** H264, Profile 0, Level 1b. */
830        public static final int H264_PROFILE_0_LEVEL_1b = 13;
831
832        /** H264, Profile 0, Level 1.1 */
833        public static final int H264_PROFILE_0_LEVEL_1_1 = 14;
834
835        /** H264, Profile 0, Level 1.2 */
836        public static final int H264_PROFILE_0_LEVEL_1_2 = 15;
837
838        /** H264, Profile 0, Level 1.3 */
839        public static final int H264_PROFILE_0_LEVEL_1_3 = 16;
840
841        /** H264, Profile 0, Level 2. */
842        public static final int H264_PROFILE_0_LEVEL_2 = 17;
843
844        /** H264, Profile 0, Level 2.1 */
845        public static final int H264_PROFILE_0_LEVEL_2_1 = 18;
846
847        /** H264, Profile 0, Level 2.2 */
848        public static final int H264_PROFILE_0_LEVEL_2_2 = 19;
849
850        /** H264, Profile 0, Level 3. */
851        public static final int H264_PROFILE_0_LEVEL_3 = 20;
852
853        /** H264, Profile 0, Level 3.1 */
854        public static final int H264_PROFILE_0_LEVEL_3_1 = 21;
855
856        /** H264, Profile 0, Level 3.2 */
857        public static final int H264_PROFILE_0_LEVEL_3_2 = 22;
858
859        /** H264, Profile 0, Level 4. */
860        public static final int H264_PROFILE_0_LEVEL_4 = 23;
861
862        /** H264, Profile 0, Level 4.1 */
863        public static final int H264_PROFILE_0_LEVEL_4_1 = 24;
864
865        /** H264, Profile 0, Level 4.2 */
866        public static final int H264_PROFILE_0_LEVEL_4_2 = 25;
867
868        /** H264, Profile 0, Level 5. */
869        public static final int H264_PROFILE_0_LEVEL_5 = 26;
870
871        /** H264, Profile 0, Level 5.1 */
872        public static final int H264_PROFILE_0_LEVEL_5_1 = 27;
873
874        /** Profile out of range. */
875        public static final int OUT_OF_RANGE = 255;
876    }
877
878    /** Defines video frame sizes. */
879    public final class VideoFrameSize {
880
881        public static final int SIZE_UNDEFINED = -1;
882
883        /** SQCIF 128 x 96 pixels. */
884        public static final int SQCIF = 0;
885
886        /** QQVGA 160 x 120 pixels. */
887        public static final int QQVGA = 1;
888
889        /** QCIF 176 x 144 pixels. */
890        public static final int QCIF = 2;
891
892        /** QVGA 320 x 240 pixels. */
893        public static final int QVGA = 3;
894
895        /** CIF 352 x 288 pixels. */
896        public static final int CIF = 4;
897
898        /** VGA 640 x 480 pixels. */
899        public static final int VGA = 5;
900
901        /** WVGA 800 X 480 pixels */
902        public static final int WVGA = 6;
903
904        /** NTSC 720 X 480 pixels */
905        public static final int NTSC = 7;
906
907        /** 640 x 360 */
908        public static final int nHD = 8;
909
910        /** 854 x 480 */
911        public static final int WVGA16x9 = 9;
912
913        /** 720p 1280 X 720 */
914        public static final int V720p = 10;
915
916        /** 1080 x 720 */
917        public static final int W720p = 11;
918
919        /** 1080 960 x 720 */
920        public static final int S720p = 12;
921    }
922
923    /**
924     * Defines output video frame rates.
925     */
926    public final class VideoFrameRate {
927        /** Frame rate of 5 frames per second. */
928        public static final int FR_5_FPS = 0;
929
930        /** Frame rate of 7.5 frames per second. */
931        public static final int FR_7_5_FPS = 1;
932
933        /** Frame rate of 10 frames per second. */
934        public static final int FR_10_FPS = 2;
935
936        /** Frame rate of 12.5 frames per second. */
937        public static final int FR_12_5_FPS = 3;
938
939        /** Frame rate of 15 frames per second. */
940        public static final int FR_15_FPS = 4;
941
942        /** Frame rate of 20 frames per second. */
943        public static final int FR_20_FPS = 5;
944
945        /** Frame rate of 25 frames per second. */
946        public static final int FR_25_FPS = 6;
947
948        /** Frame rate of 30 frames per second. */
949        public static final int FR_30_FPS = 7;
950    }
951
952    /**
953     * Defines Video Effect Types.
954     */
955    public static class VideoEffect {
956
957        public static final int NONE = 0;
958
959        public static final int FADE_FROM_BLACK = 8;
960
961        public static final int CURTAIN_OPENING = 9;
962
963        public static final int FADE_TO_BLACK = 16;
964
965        public static final int CURTAIN_CLOSING = 17;
966
967        public static final int EXTERNAL = 256;
968
969        public static final int BLACK_AND_WHITE = 257;
970
971        public static final int PINK = 258;
972
973        public static final int GREEN = 259;
974
975        public static final int SEPIA = 260;
976
977        public static final int NEGATIVE = 261;
978
979        public static final int FRAMING = 262;
980
981        public static final int TEXT = 263;
982
983        public static final int ZOOM_IN = 264;
984
985        public static final int ZOOM_OUT = 265;
986
987        public static final int FIFTIES = 266;
988
989        public static final int COLORRGB16 = 267;
990
991        public static final int GRADIENT = 268;
992    }
993
994    /**
995     * Defines the video transitions.
996     */
997    public static class VideoTransition {
998        /** No transition */
999        public static final int NONE = 0;
1000
1001        /** Cross fade transition */
1002        public static final int CROSS_FADE = 1;
1003
1004        /** External transition. Currently not available. */
1005        public static final int EXTERNAL = 256;
1006
1007        /** AlphaMagic transition. */
1008        public static final int ALPHA_MAGIC = 257;
1009
1010        /** Slide transition. */
1011        public static final int SLIDE_TRANSITION = 258;
1012
1013        /** Fade to black transition. */
1014        public static final int FADE_BLACK = 259;
1015    }
1016
1017    /**
1018     * Defines settings for the AlphaMagic transition
1019     */
1020    public static class AlphaMagicSettings {
1021        /** Name of the alpha file (JPEG file). */
1022        public String file;
1023
1024        /** Blending percentage [0..100] 0 = no blending. */
1025        public int blendingPercent;
1026
1027        /** Invert the default rotation direction of the AlphaMagic effect. */
1028        public boolean invertRotation;
1029
1030        public int rgbWidth;
1031        public int rgbHeight;
1032    }
1033
1034    /** Defines the direction of the Slide transition. */
1035    public static final class SlideDirection {
1036
1037        /** Right out left in. */
1038        public static final int RIGHT_OUT_LEFT_IN = 0;
1039
1040        /** Left out right in. */
1041        public static final int LEFT_OUT_RIGTH_IN = 1;
1042
1043        /** Top out bottom in. */
1044        public static final int TOP_OUT_BOTTOM_IN = 2;
1045
1046        /** Bottom out top in */
1047        public static final int BOTTOM_OUT_TOP_IN = 3;
1048    }
1049
1050    /** Defines the Slide transition settings. */
1051    public static class SlideTransitionSettings {
1052        /**
1053         * Direction of the slide transition. See {@link SlideDirection
1054         * SlideDirection} for valid values.
1055         */
1056        public int direction;
1057    }
1058
1059    /**
1060     * Defines the settings of a single clip.
1061     */
1062    public static class ClipSettings {
1063
1064        /**
1065         * The path to the clip file.
1066         * <p>
1067         * File format of the clip, it can be:
1068         * <ul>
1069         * <li>3GP file containing MPEG4/H263/H264 video and AAC/AMR audio
1070         * <li>JPG file
1071         * </ul>
1072         */
1073
1074        public String clipPath;
1075
1076        /**
1077         * The path of the decoded file. This is used only for image files.
1078         */
1079        public String clipDecodedPath;
1080
1081        /**
1082         * The path of the Original file. This is used only for image files.
1083         */
1084        public String clipOriginalPath;
1085
1086        /**
1087         * File type of the clip. See {@link FileType FileType} for valid
1088         * values.
1089         */
1090        public int fileType;
1091
1092        /** Begin of the cut in the clip in milliseconds. */
1093        public int beginCutTime;
1094
1095        /**
1096         * End of the cut in the clip in milliseconds. Set both
1097         * <code>beginCutTime</code> and <code>endCutTime</code> to
1098         * <code>0</code> to get the full length of the clip without a cut. In
1099         * case of JPG clip, this is the duration of the JPEG file.
1100         */
1101        public int endCutTime;
1102
1103        /**
1104         * Begin of the cut in the clip in percentage of the file duration.
1105         */
1106        public int beginCutPercent;
1107
1108        /**
1109         * End of the cut in the clip in percentage of the file duration. Set
1110         * both <code>beginCutPercent</code> and <code>endCutPercent</code> to
1111         * <code>0</code> to get the full length of the clip without a cut.
1112         */
1113        public int endCutPercent;
1114
1115        /** Enable panning and zooming. */
1116        public boolean panZoomEnabled;
1117
1118        /** Zoom percentage at start of clip. 0 = no zoom, 100 = full zoom */
1119        public int panZoomPercentStart;
1120
1121        /** Top left X coordinate at start of clip. */
1122        public int panZoomTopLeftXStart;
1123
1124        /** Top left Y coordinate at start of clip. */
1125        public int panZoomTopLeftYStart;
1126
1127        /** Zoom percentage at start of clip. 0 = no zoom, 100 = full zoom */
1128        public int panZoomPercentEnd;
1129
1130        /** Top left X coordinate at end of clip. */
1131        public int panZoomTopLeftXEnd;
1132
1133        /** Top left Y coordinate at end of clip. */
1134        public int panZoomTopLeftYEnd;
1135
1136        /**
1137         * Set The media rendering. See {@link MediaRendering MediaRendering}
1138         * for valid values.
1139         */
1140        public int mediaRendering;
1141
1142        /**
1143         * RGB width and Height
1144         */
1145         public int rgbWidth;
1146         public int rgbHeight;
1147    }
1148
1149    /**
1150     * Defines settings for a transition.
1151     */
1152    public static class TransitionSettings {
1153
1154        /** Duration of the transition in msec. */
1155        public int duration;
1156
1157        /**
1158         * Transition type for video. See {@link VideoTransition
1159         * VideoTransition} for valid values.
1160         */
1161        public int videoTransitionType;
1162
1163        /**
1164         * Transition type for audio. See {@link AudioTransition
1165         * AudioTransition} for valid values.
1166         */
1167        public int audioTransitionType;
1168
1169        /**
1170         * Transition behaviour. See {@link TransitionBehaviour
1171         * TransitionBehaviour} for valid values.
1172         */
1173        public int transitionBehaviour;
1174
1175        /**
1176         * Settings for AlphaMagic transition. Only needs to be set if
1177         * <code>videoTransitionType</code> is set to
1178         * <code>VideoTransition.ALPHA_MAGIC</code>. See
1179         * {@link AlphaMagicSettings AlphaMagicSettings}.
1180         */
1181        public AlphaMagicSettings alphaSettings;
1182
1183        /**
1184         * Settings for the Slide transition. See
1185         * {@link SlideTransitionSettings SlideTransitionSettings}.
1186         */
1187        public SlideTransitionSettings slideSettings;
1188    }
1189
1190    public static final class AudioTransition {
1191        /** No audio transition. */
1192        public static final int NONE = 0;
1193
1194        /** Cross-fade audio transition. */
1195        public static final int CROSS_FADE = 1;
1196    }
1197
1198    /**
1199     * Defines transition behaviors.
1200     */
1201    public static final class TransitionBehaviour {
1202
1203        /** The transition uses an increasing speed. */
1204        public static final int SPEED_UP = 0;
1205
1206        /** The transition uses a linear (constant) speed. */
1207        public static final int LINEAR = 1;
1208
1209        /** The transition uses a decreasing speed. */
1210        public static final int SPEED_DOWN = 2;
1211
1212        /**
1213         * The transition uses a constant speed, but slows down in the middle
1214         * section.
1215         */
1216        public static final int SLOW_MIDDLE = 3;
1217
1218        /**
1219         * The transition uses a constant speed, but increases speed in the
1220         * middle section.
1221         */
1222        public static final int FAST_MIDDLE = 4;
1223    }
1224
1225    /**
1226     * Defines settings for the background music.
1227     */
1228    public static class BackgroundMusicSettings {
1229
1230        /** Background music file. */
1231        public String file;
1232
1233        /** File type. See {@link FileType FileType} for valid values. */
1234        public int fileType;
1235
1236        /**
1237         * Insertion time in milliseconds, in the output video where the
1238         * background music must be inserted.
1239         */
1240        public long insertionTime;
1241
1242        /**
1243         * Volume, as a percentage of the background music track, to use. If
1244         * this field is set to 100, the background music will replace the audio
1245         * from the video input file(s).
1246         */
1247        public int volumePercent;
1248
1249        /**
1250         * Start time in milliseconds in the background muisc file from where
1251         * the background music should loop. Set both <code>beginLoop</code> and
1252         * <code>endLoop</code> to <code>0</code> to disable looping.
1253         */
1254        public long beginLoop;
1255
1256        /**
1257         * End time in milliseconds in the background music file to where the
1258         * background music should loop. Set both <code>beginLoop</code> and
1259         * <code>endLoop</code> to <code>0</code> to disable looping.
1260         */
1261        public long endLoop;
1262
1263        public boolean enableDucking;
1264
1265        public int duckingThreshold;
1266
1267        public int lowVolume;
1268
1269        public boolean isLooping;
1270    }
1271
1272    /** Defines settings for an effect. */
1273    public static class AudioEffect {
1274        /** No audio effect. */
1275        public static final int NONE = 0;
1276
1277        /** Fade-in effect. */
1278        public static final int FADE_IN = 8;
1279
1280        /** Fade-out effect. */
1281        public static final int FADE_OUT = 16;
1282    }
1283
1284    /** Defines the effect settings. */
1285    public static class EffectSettings {
1286
1287        /** Start time of the effect in milliseconds. */
1288        public int startTime;
1289
1290        /** Duration of the effect in milliseconds. */
1291        public int duration;
1292
1293        /**
1294         * Video effect type. See {@link VideoEffect VideoEffect} for valid
1295         * values.
1296         */
1297        public int videoEffectType;
1298
1299        /**
1300         * Audio effect type. See {@link AudioEffect AudioEffect} for valid
1301         * values.
1302         */
1303        public int audioEffectType;
1304
1305        /**
1306         * Start time of the effect in percents of the duration of the clip. A
1307         * value of 0 percent means start time is from the beginning of the
1308         * clip.
1309         */
1310        public int startPercent;
1311
1312        /**
1313         * Duration of the effect in percents of the duration of the clip.
1314         */
1315        public int durationPercent;
1316
1317        /**
1318         * Framing file.
1319         * <p>
1320         * This field is only used when the field <code>videoEffectType</code>
1321         * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise
1322         * this field is ignored.
1323         */
1324        public String framingFile;
1325
1326        /**
1327         * Framing buffer.
1328         * <p>
1329         * This field is only used when the field <code>videoEffectType</code>
1330         * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise
1331         * this field is ignored.
1332         */
1333        public int[] framingBuffer;
1334
1335        /**
1336         * Bitmap type Can be from RGB_565 (4), ARGB_4444 (5), ARGB_8888 (6);
1337         **/
1338
1339        public int bitmapType;
1340
1341        public int width;
1342
1343        public int height;
1344
1345        /**
1346         * Top left x coordinate. This coordinate is used to set the x
1347         * coordinate of the picture in the framing file when the framing file
1348         * is selected. The x coordinate is also used to set the location of the
1349         * text in the text effect.
1350         * <p>
1351         * This field is only used when the field <code>videoEffectType</code>
1352         * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING} or
1353         * {@link VideoEffect#TEXT VideoEffect.TEXT}. Otherwise this field is
1354         * ignored.
1355         */
1356        public int topLeftX;
1357
1358        /**
1359         * Top left y coordinate. This coordinate is used to set the y
1360         * coordinate of the picture in the framing file when the framing file
1361         * is selected. The y coordinate is also used to set the location of the
1362         * text in the text effect.
1363         * <p>
1364         * This field is only used when the field <code>videoEffectType</code>
1365         * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING} or
1366         * {@link VideoEffect#TEXT VideoEffect.TEXT}. Otherwise this field is
1367         * ignored.
1368         */
1369        public int topLeftY;
1370
1371        /**
1372         * Should the frame be resized or not. If this field is set to
1373         * <link>true</code> then the frame size is matched with the output
1374         * video size.
1375         * <p>
1376         * This field is only used when the field <code>videoEffectType</code>
1377         * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise
1378         * this field is ignored.
1379         */
1380        public boolean framingResize;
1381
1382        /**
1383         * Size to which the framing buffer needs to be resized to
1384         * This is valid only if framingResize is true
1385         */
1386        public int framingScaledSize;
1387        /**
1388         * Text to insert in the video.
1389         * <p>
1390         * This field is only used when the field <code>videoEffectType</code>
1391         * is set to {@link VideoEffect#TEXT VideoEffect.TEXT}. Otherwise this
1392         * field is ignored.
1393         */
1394        public String text;
1395
1396        /**
1397         * Text attributes for the text to insert in the video.
1398         * <p>
1399         * This field is only used when the field <code>videoEffectType</code>
1400         * is set to {@link VideoEffect#TEXT VideoEffect.TEXT}. Otherwise this
1401         * field is ignored. For more details about this field see the
1402         * integration guide.
1403         */
1404        public String textRenderingData;
1405
1406        /** Width of the text buffer in pixels. */
1407        public int textBufferWidth;
1408
1409        /** Height of the text buffer in pixels. */
1410        public int textBufferHeight;
1411
1412        /**
1413         * Processing rate for the fifties effect. A high value (e.g. 30)
1414         * results in high effect strength.
1415         * <p>
1416         * This field is only used when the field <code>videoEffectType</code>
1417         * is set to {@link VideoEffect#FIFTIES VideoEffect.FIFTIES}. Otherwise
1418         * this field is ignored.
1419         */
1420        public int fiftiesFrameRate;
1421
1422        /**
1423         * RGB 16 color of the RGB16 and gradient color effect.
1424         * <p>
1425         * This field is only used when the field <code>videoEffectType</code>
1426         * is set to {@link VideoEffect#COLORRGB16 VideoEffect.COLORRGB16} or
1427         * {@link VideoEffect#GRADIENT VideoEffect.GRADIENT}. Otherwise this
1428         * field is ignored.
1429         */
1430        public int rgb16InputColor;
1431
1432        /**
1433         * Start alpha blending percentage.
1434         * <p>
1435         * This field is only used when the field <code>videoEffectType</code>
1436         * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or
1437         * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field
1438         * is ignored.
1439         */
1440        public int alphaBlendingStartPercent;
1441
1442        /**
1443         * Middle alpha blending percentage.
1444         * <p>
1445         * This field is only used when the field <code>videoEffectType</code>
1446         * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or
1447         * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field
1448         * is ignored.
1449         */
1450        public int alphaBlendingMiddlePercent;
1451
1452        /**
1453         * End alpha blending percentage.
1454         * <p>
1455         * This field is only used when the field <code>videoEffectType</code>
1456         * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or
1457         * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field
1458         * is ignored.
1459         */
1460        public int alphaBlendingEndPercent;
1461
1462        /**
1463         * Duration, in percentage of effect duration of the fade-in phase.
1464         * <p>
1465         * This field is only used when the field <code>videoEffectType</code>
1466         * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or
1467         * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field
1468         * is ignored.
1469         */
1470        public int alphaBlendingFadeInTimePercent;
1471
1472        /**
1473         * Duration, in percentage of effect duration of the fade-out phase.
1474         * <p>
1475         * This field is only used when the field <code>videoEffectType</code>
1476         * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or
1477         * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field
1478         * is ignored.
1479         */
1480        public int alphaBlendingFadeOutTimePercent;
1481    }
1482
1483    /** Defines the clip properties for preview */
1484    public static class PreviewClips {
1485
1486        /**
1487         * The path to the clip file.
1488         * <p>
1489         * File format of the clip, it can be:
1490         * <ul>
1491         * <li>3GP file containing MPEG4/H263 video and AAC/AMR audio
1492         * <li>JPG file
1493         * </ul>
1494         */
1495
1496        public String clipPath;
1497
1498        /**
1499         * File type of the clip. See {@link FileType FileType} for valid
1500         * values.
1501         */
1502        public int fileType;
1503
1504        /** Begin of the cut in the clip in milliseconds. */
1505        public long beginPlayTime;
1506
1507        public long endPlayTime;
1508
1509        /**
1510         * Set The media rendering. See {@link MediaRendering MediaRendering}
1511         * for valid values.
1512         */
1513        public int mediaRendering;
1514
1515    }
1516
1517    /** Defines the audio settings. */
1518    public static class AudioSettings {
1519
1520        String pFile;
1521
1522        /** < PCM file path */
1523        String Id;
1524
1525        boolean bRemoveOriginal;
1526
1527        /** < If true, the original audio track is not taken into account */
1528        int channels;
1529
1530        /** < Number of channels (1=mono, 2=stereo) of BGM clip */
1531        int Fs;
1532
1533        /**
1534         * < Sampling audio frequency (8000 for amr, 16000 or more for aac) of
1535         * BGM clip
1536         */
1537        int ExtendedFs;
1538
1539        /** < Extended frequency for AAC+, eAAC+ streams of BGM clip */
1540        long startMs;
1541
1542        /** < Time, in milliseconds, at which the added audio track is inserted */
1543        long beginCutTime;
1544
1545        long endCutTime;
1546
1547        int fileType;
1548
1549        int volume;
1550
1551        /** < Volume, in percentage, of the added audio track */
1552        boolean loop;
1553
1554        /** < Looping on/off > **/
1555
1556        /** Audio mix and Duck **/
1557        int ducking_threshold;
1558
1559        int ducking_lowVolume;
1560
1561        boolean bInDucking_enable;
1562
1563        String pcmFilePath;
1564    }
1565
1566    /** Encapsulates preview clips and effect settings */
1567    public static class PreviewSettings {
1568
1569        public PreviewClips[] previewClipsArray;
1570
1571        /** The effect settings. */
1572        public EffectSettings[] effectSettingsArray;
1573
1574    }
1575
1576    /** Encapsulates clip properties */
1577    public static class PreviewClipProperties {
1578
1579        public Properties[] clipProperties;
1580
1581    }
1582
1583    /** Defines the editing settings. */
1584    public static class EditSettings {
1585
1586        /**
1587         * Array of clip settings. There is one <code>clipSetting</code> for
1588         * each clip.
1589         */
1590        public ClipSettings[] clipSettingsArray;
1591
1592        /**
1593         * Array of transition settings. If there are n clips (and thus n
1594         * <code>clipSettings</code>) then there are (n-1) transitions and (n-1)
1595         * <code>transistionSettings</code> in
1596         * <code>transistionSettingsArray</code>.
1597         */
1598        public TransitionSettings[] transitionSettingsArray;
1599
1600        /** The effect settings. */
1601        public EffectSettings[] effectSettingsArray;
1602
1603        /**
1604         * Video frame rate of the output clip. See {@link VideoFrameRate
1605         * VideoFrameRate} for valid values.
1606         */
1607        public int videoFrameRate;
1608
1609        /** Output file name. Must be an absolute path. */
1610        public String outputFile;
1611
1612        /**
1613         * Size of the video frames in the output clip. See
1614         * {@link VideoFrameSize VideoFrameSize} for valid values.
1615         */
1616        public int videoFrameSize;
1617
1618        /**
1619         * Format of the video stream in the output clip. See
1620         * {@link VideoFormat VideoFormat} for valid values.
1621         */
1622        public int videoFormat;
1623
1624        /**
1625         * Format of the audio stream in the output clip. See
1626         * {@link AudioFormat AudioFormat} for valid values.
1627         */
1628        public int audioFormat;
1629
1630        /**
1631         * Sampling frequency of the audio stream in the output clip. See
1632         * {@link AudioSamplingFrequency AudioSamplingFrequency} for valid
1633         * values.
1634         */
1635        public int audioSamplingFreq;
1636
1637        /**
1638         * Maximum file size. By setting this you can set the maximum size of
1639         * the output clip. Set it to <code>0</code> to let the class ignore
1640         * this filed.
1641         */
1642        public int maxFileSize;
1643
1644        /**
1645         * Number of audio channels in output clip. Use <code>0</code> for none,
1646         * <code>1</code> for mono or <code>2</code> for stereo. None is only
1647         * allowed when the <code>audioFormat</code> field is set to
1648         * {@link AudioFormat#NO_AUDIO AudioFormat.NO_AUDIO} or
1649         * {@link AudioFormat#NULL_AUDIO AudioFormat.NULL_AUDIO} Mono is only
1650         * allowed when the <code>audioFormat</code> field is set to
1651         * {@link AudioFormat#AAC AudioFormat.AAC}
1652         */
1653        public int audioChannels;
1654
1655        /** Video bitrate. See {@link Bitrate Bitrate} for valid values. */
1656        public int videoBitrate;
1657
1658        /** Audio bitrate. See {@link Bitrate Bitrate} for valid values. */
1659        public int audioBitrate;
1660
1661        /**
1662         * Background music settings. See {@link BackgroundMusicSettings
1663         * BackgroundMusicSettings} for valid values.
1664         */
1665        public BackgroundMusicSettings backgroundMusicSettings;
1666
1667        public int primaryTrackVolume;
1668
1669    }
1670
1671    /**
1672     * Defines the media properties.
1673     **/
1674
1675    public static class Properties {
1676
1677        /**
1678         * Duration of the media in milliseconds.
1679         */
1680
1681        public int duration;
1682
1683        /**
1684         * File type.
1685         */
1686
1687        public int fileType;
1688
1689        /**
1690         * Video format.
1691         */
1692
1693        public int videoFormat;
1694
1695        /**
1696         * Duration of the video stream of the media in milliseconds.
1697         */
1698
1699        public int videoDuration;
1700
1701        /**
1702         * Bitrate of the video stream of the media.
1703         */
1704
1705        public int videoBitrate;
1706
1707        /**
1708         * Width of the video frames or the width of the still picture in
1709         * pixels.
1710         */
1711
1712        public int width;
1713
1714        /**
1715         * Height of the video frames or the height of the still picture in
1716         * pixels.
1717         */
1718
1719        public int height;
1720
1721        /**
1722         * Average frame rate of video in the media in frames per second.
1723         */
1724
1725        public float averageFrameRate;
1726
1727        /**
1728         * Profile and level of the video in the media.
1729         */
1730
1731        public int profileAndLevel;
1732
1733        /**
1734         * Audio format.
1735         */
1736
1737        public int audioFormat;
1738
1739        /**
1740         * Duration of the audio stream of the media in milliseconds.
1741         */
1742
1743        public int audioDuration;
1744
1745        /**
1746         * Bitrate of the audio stream of the media.
1747         */
1748
1749        public int audioBitrate;
1750
1751        /**
1752         * Number of audio channels in the media.
1753         */
1754
1755        public int audioChannels;
1756
1757        /**
1758         * Sampling frequency of the audio stream in the media in samples per
1759         * second.
1760         */
1761
1762        public int audioSamplingFrequency;
1763
1764        /**
1765         * Volume value of the audio track as percentage.
1766         */
1767        public int audioVolumeValue;
1768
1769        public String Id;
1770    }
1771
1772    /**
1773     * Constructor
1774     *
1775     * @param projectPath The path where the VideoEditor stores all files
1776     *        related to the project
1777     * @param veObj The video editor reference
1778     */
1779    public MediaArtistNativeHelper(String projectPath, VideoEditor veObj) {
1780        mProjectPath = projectPath;
1781        if (veObj != null) {
1782            mVideoEditor = veObj;
1783        } else {
1784            mVideoEditor = null;
1785            throw new IllegalArgumentException("video editor object is null");
1786        }
1787        if (mStoryBoardSettings == null)
1788            mStoryBoardSettings = new EditSettings();
1789
1790        _init(mProjectPath, "null");
1791        mAudioTrackPCMFilePath = null;
1792    }
1793
1794    /**
1795     * @return The project path
1796     */
1797    String getProjectPath() {
1798        return mProjectPath;
1799    }
1800
1801    /**
1802     * @return The Audio Track PCM file path
1803     */
1804    String getProjectAudioTrackPCMFilePath() {
1805        return mAudioTrackPCMFilePath;
1806    }
1807
1808    /**
1809     * Invalidates the PCM file
1810     */
1811    void invalidatePcmFile() {
1812        if (mAudioTrackPCMFilePath != null) {
1813            new File(mAudioTrackPCMFilePath).delete();
1814            mAudioTrackPCMFilePath = null;
1815        }
1816    }
1817
1818    @SuppressWarnings("unused")
1819    private void onProgressUpdate(int taskId, int progress) {
1820        if (mProcessingState == PROCESSING_EXPORT) {
1821            if (mExportProgressListener != null) {
1822                if (mProgressToApp < progress) {
1823                    mExportProgressListener.onProgress(mVideoEditor, mOutputFilename, progress);
1824                    /* record previous progress */
1825                    mProgressToApp = progress;
1826                }
1827            }
1828        }
1829        else {
1830            // Adapt progress depending on current state
1831            int actualProgress = 0;
1832            int action = 0;
1833
1834            if (mProcessingState == PROCESSING_AUDIO_PCM) {
1835                action = MediaProcessingProgressListener.ACTION_DECODE;
1836            } else {
1837                action = MediaProcessingProgressListener.ACTION_ENCODE;
1838            }
1839
1840            switch (mProcessingState) {
1841                case PROCESSING_AUDIO_PCM:
1842                    actualProgress = progress;
1843                    break;
1844                case PROCESSING_TRANSITION:
1845                    actualProgress = progress;
1846                    break;
1847                case PROCESSING_KENBURNS:
1848                    actualProgress = progress;
1849                    break;
1850                case PROCESSING_INTERMEDIATE1:
1851                    if ((progress == 0) && (mProgressToApp != 0)) {
1852                        mProgressToApp = 0;
1853                    }
1854                    if ((progress != 0) || (mProgressToApp != 0)) {
1855                        actualProgress = progress/4;
1856                    }
1857                    break;
1858                case PROCESSING_INTERMEDIATE2:
1859                    if ((progress != 0) || (mProgressToApp != 0)) {
1860                        actualProgress = 25 + progress/4;
1861                    }
1862                    break;
1863                case PROCESSING_INTERMEDIATE3:
1864                    if ((progress != 0) || (mProgressToApp != 0)) {
1865                        actualProgress = 50 + progress/2;
1866                    }
1867                    break;
1868                case PROCESSING_NONE:
1869
1870                default:
1871                    Log.e(TAG, "ERROR unexpected State=" + mProcessingState);
1872                    return;
1873            }
1874            if ((mProgressToApp != actualProgress) && (actualProgress != 0)) {
1875
1876                mProgressToApp = actualProgress;
1877
1878                if (mMediaProcessingProgressListener != null) {
1879                    // Send the progress indication
1880                    mMediaProcessingProgressListener.onProgress(mProcessingObject, action,
1881                                                                actualProgress);
1882                }
1883            }
1884            /* avoid 0 in next intermediate call */
1885            if (mProgressToApp == 0) {
1886                if (mMediaProcessingProgressListener != null) {
1887                    /*
1888                     *  Send the progress indication
1889                     */
1890                    mMediaProcessingProgressListener.onProgress(mProcessingObject, action,
1891                                                                actualProgress);
1892                }
1893                mProgressToApp = 1;
1894            }
1895        }
1896    }
1897
1898    @SuppressWarnings("unused")
1899    private void onPreviewProgressUpdate(int progress, boolean isFinished,
1900                  boolean updateOverlay, String filename, int renderingMode) {
1901        if (mPreviewProgressListener != null) {
1902            if (mIsFirstProgress) {
1903                mPreviewProgressListener.onStart(mVideoEditor);
1904                mIsFirstProgress = false;
1905            }
1906
1907            final VideoEditor.OverlayData overlayData;
1908            if (updateOverlay) {
1909                overlayData = new VideoEditor.OverlayData();
1910                if (filename != null) {
1911                    overlayData.set(BitmapFactory.decodeFile(filename), renderingMode);
1912                } else {
1913                    overlayData.setClear();
1914                }
1915            } else {
1916                overlayData = null;
1917            }
1918
1919            mPreviewProgressListener.onProgress(mVideoEditor, progress, overlayData);
1920
1921            if (progress != 0) {
1922                mPreviewProgress = progress;
1923            }
1924
1925            if (isFinished) {
1926                mPreviewProgressListener.onStop(mVideoEditor);
1927            }
1928        }
1929    }
1930
1931    /**
1932     * Release the native helper object
1933     */
1934    void releaseNativeHelper() {
1935        try {
1936            release();
1937        } catch (IllegalStateException ex) {
1938            Log.e(TAG, "Illegal State exeption caught in releaseNativeHelper");
1939            throw ex;
1940        } catch (RuntimeException ex) {
1941            Log.e(TAG, "Runtime exeption caught in releaseNativeHelper");
1942            throw ex;
1943        }
1944    }
1945
1946    /**
1947     * Release the native helper to end the Audio Graph process
1948     */
1949    @SuppressWarnings("unused")
1950    private void onAudioGraphExtractProgressUpdate(int progress, boolean isVideo) {
1951        if ((mExtractAudioWaveformProgressListener != null) && (progress > 0)) {
1952            mExtractAudioWaveformProgressListener.onProgress(progress);
1953        }
1954    }
1955
1956    /**
1957     * Populates the Effect Settings in EffectSettings
1958     *
1959     * @param effects The reference of EffectColor
1960     *
1961     * @return The populated effect settings in EffectSettings reference
1962     */
1963    EffectSettings getEffectSettings(EffectColor effects) {
1964        EffectSettings effectSettings = new EffectSettings();
1965        effectSettings.startTime = (int)effects.getStartTime();
1966        effectSettings.duration = (int)effects.getDuration();
1967        effectSettings.videoEffectType = getEffectColorType(effects);
1968        effectSettings.audioEffectType = 0;
1969        effectSettings.startPercent = 0;
1970        effectSettings.durationPercent = 0;
1971        effectSettings.framingFile = null;
1972        effectSettings.topLeftX = 0;
1973        effectSettings.topLeftY = 0;
1974        effectSettings.framingResize = false;
1975        effectSettings.text = null;
1976        effectSettings.textRenderingData = null;
1977        effectSettings.textBufferWidth = 0;
1978        effectSettings.textBufferHeight = 0;
1979        if (effects.getType() == EffectColor.TYPE_FIFTIES) {
1980            effectSettings.fiftiesFrameRate = 15;
1981        } else {
1982            effectSettings.fiftiesFrameRate = 0;
1983        }
1984
1985        if ((effectSettings.videoEffectType == VideoEffect.COLORRGB16)
1986                || (effectSettings.videoEffectType == VideoEffect.GRADIENT)) {
1987            effectSettings.rgb16InputColor = effects.getColor();
1988        }
1989
1990        effectSettings.alphaBlendingStartPercent = 0;
1991        effectSettings.alphaBlendingMiddlePercent = 0;
1992        effectSettings.alphaBlendingEndPercent = 0;
1993        effectSettings.alphaBlendingFadeInTimePercent = 0;
1994        effectSettings.alphaBlendingFadeOutTimePercent = 0;
1995        return effectSettings;
1996    }
1997
1998    /**
1999     * Populates the Overlay Settings in EffectSettings
2000     *
2001     * @param overlay The reference of OverlayFrame
2002     *
2003     * @return The populated overlay settings in EffectSettings reference
2004     */
2005    EffectSettings getOverlaySettings(OverlayFrame overlay) {
2006        EffectSettings effectSettings = new EffectSettings();
2007        Bitmap bitmap = null;
2008
2009        effectSettings.startTime = (int)overlay.getStartTime();
2010        effectSettings.duration = (int)overlay.getDuration();
2011        effectSettings.videoEffectType = VideoEffect.FRAMING;
2012        effectSettings.audioEffectType = 0;
2013        effectSettings.startPercent = 0;
2014        effectSettings.durationPercent = 0;
2015        effectSettings.framingFile = null;
2016
2017        if ((bitmap = overlay.getBitmap()) != null) {
2018            effectSettings.framingFile = overlay.getFilename();
2019
2020            if (effectSettings.framingFile == null) {
2021                try {
2022                    (overlay).save(mProjectPath);
2023                } catch (IOException e) {
2024                    Log.e(TAG, "getOverlaySettings : File not found");
2025                }
2026                effectSettings.framingFile = overlay.getFilename();
2027            }
2028            if (bitmap.getConfig() == Bitmap.Config.ARGB_8888)
2029                effectSettings.bitmapType = 6;
2030            else if (bitmap.getConfig() == Bitmap.Config.ARGB_4444)
2031                effectSettings.bitmapType = 5;
2032            else if (bitmap.getConfig() == Bitmap.Config.RGB_565)
2033                effectSettings.bitmapType = 4;
2034            else if (bitmap.getConfig() == Bitmap.Config.ALPHA_8)
2035                throw new RuntimeException("Bitmap config not supported");
2036
2037            effectSettings.width = bitmap.getWidth();
2038            effectSettings.height = bitmap.getHeight();
2039            effectSettings.framingBuffer = new int[effectSettings.width];
2040            int tmp = 0;
2041            short maxAlpha = 0;
2042            short minAlpha = (short)0xFF;
2043            short alpha = 0;
2044            while (tmp < effectSettings.height) {
2045                bitmap.getPixels(effectSettings.framingBuffer, 0,
2046                                 effectSettings.width, 0, tmp,
2047                                 effectSettings.width, 1);
2048                for (int i = 0; i < effectSettings.width; i++) {
2049                    alpha = (short)((effectSettings.framingBuffer[i] >> 24) & 0xFF);
2050                    if (alpha > maxAlpha) {
2051                        maxAlpha = alpha;
2052                    }
2053                    if (alpha < minAlpha) {
2054                        minAlpha = alpha;
2055                    }
2056                }
2057                tmp += 1;
2058            }
2059            alpha = (short)((maxAlpha + minAlpha) / 2);
2060            alpha = (short)((alpha * 100) / 256);
2061            effectSettings.alphaBlendingEndPercent = alpha;
2062            effectSettings.alphaBlendingMiddlePercent = alpha;
2063            effectSettings.alphaBlendingStartPercent = alpha;
2064            effectSettings.alphaBlendingFadeInTimePercent = 100;
2065            effectSettings.alphaBlendingFadeOutTimePercent = 100;
2066            effectSettings.framingBuffer = null;
2067
2068            /*
2069             * Set the resized RGB file dimensions
2070             */
2071            effectSettings.width = overlay.getResizedRGBSizeWidth();
2072            if(effectSettings.width == 0) {
2073                effectSettings.width = bitmap.getWidth();
2074            }
2075
2076            effectSettings.height = overlay.getResizedRGBSizeHeight();
2077            if(effectSettings.height == 0) {
2078                effectSettings.height = bitmap.getHeight();
2079            }
2080
2081        }
2082
2083        effectSettings.topLeftX = 0;
2084        effectSettings.topLeftY = 0;
2085
2086        effectSettings.framingResize = true;
2087        effectSettings.text = null;
2088        effectSettings.textRenderingData = null;
2089        effectSettings.textBufferWidth = 0;
2090        effectSettings.textBufferHeight = 0;
2091        effectSettings.fiftiesFrameRate = 0;
2092        effectSettings.rgb16InputColor = 0;
2093        int mediaItemHeight;
2094        int aspectRatio;
2095        if (overlay.getMediaItem() instanceof MediaImageItem) {
2096            if (((MediaImageItem)overlay.getMediaItem()).getGeneratedImageClip() != null) {
2097                // Ken Burns was applied
2098                mediaItemHeight = ((MediaImageItem)overlay.getMediaItem()).getGeneratedClipHeight();
2099                aspectRatio = getAspectRatio(
2100                    ((MediaImageItem)overlay.getMediaItem()).getGeneratedClipWidth()
2101                    , mediaItemHeight);
2102            } else {
2103                //For image get the scaled height. Aspect ratio would remain the same
2104                mediaItemHeight = ((MediaImageItem)overlay.getMediaItem()).getScaledHeight();
2105                aspectRatio = overlay.getMediaItem().getAspectRatio();
2106            }
2107        } else {
2108            aspectRatio = overlay.getMediaItem().getAspectRatio();
2109            mediaItemHeight = overlay.getMediaItem().getHeight();
2110        }
2111        effectSettings.framingScaledSize = findVideoResolution(aspectRatio, mediaItemHeight);
2112        return effectSettings;
2113    }
2114
2115     /* get Video Editor aspect ratio */
2116    int nativeHelperGetAspectRatio() {
2117        return mVideoEditor.getAspectRatio();
2118    }
2119
2120    /**
2121     * Sets the audio regenerate flag
2122     *
2123     * @param flag The boolean to set the audio regenerate flag
2124     *
2125     */
2126    void setAudioflag(boolean flag) {
2127        //check if the file exists.
2128        if (!(new File(String.format(mProjectPath + "/" + AUDIO_TRACK_PCM_FILE)).exists())) {
2129            flag = true;
2130        }
2131        mRegenerateAudio = flag;
2132    }
2133
2134    /**
2135     * Gets the audio regenerate flag
2136     *
2137     * @param return The boolean to get the audio regenerate flag
2138     *
2139     */
2140    boolean getAudioflag() {
2141        return mRegenerateAudio;
2142    }
2143
2144    /**
2145     * Maps the average frame rate to one of the defined enum values
2146     *
2147     * @param averageFrameRate The average frame rate of video item
2148     *
2149     * @return The frame rate from one of the defined enum values
2150     */
2151    int GetClosestVideoFrameRate(int averageFrameRate) {
2152        if (averageFrameRate >= 25) {
2153            return VideoFrameRate.FR_30_FPS;
2154        } else if (averageFrameRate >= 20) {
2155            return VideoFrameRate.FR_25_FPS;
2156        } else if (averageFrameRate >= 15) {
2157            return VideoFrameRate.FR_20_FPS;
2158        } else if (averageFrameRate >= 12) {
2159            return VideoFrameRate.FR_15_FPS;
2160        } else if (averageFrameRate >= 10) {
2161            return VideoFrameRate.FR_12_5_FPS;
2162        } else if (averageFrameRate >= 7) {
2163            return VideoFrameRate.FR_10_FPS;
2164        } else if (averageFrameRate >= 5) {
2165            return VideoFrameRate.FR_7_5_FPS;
2166        } else {
2167            return -1;
2168        }
2169    }
2170
2171    /**
2172     * Helper function to adjust the effect or overlay start time
2173     * depending on the begin and end boundary time of meddia item
2174     */
2175    public void adjustEffectsStartTimeAndDuration(EffectSettings lEffect, int beginCutTime,
2176                                                  int endCutTime) {
2177
2178        int effectStartTime = 0;
2179        int effectDuration = 0;
2180
2181        /**
2182         * cbct -> clip begin cut time
2183         * cect -> clip end cut time
2184         ****************************************
2185         *  |                                 |
2186         *  |         cbct        cect        |
2187         *  | <-1-->   |           |          |
2188         *  |       <--|-2->       |          |
2189         *  |          | <---3---> |          |
2190         *  |          |        <--|-4--->    |
2191         *  |          |           | <--5-->  |
2192         *  |      <---|------6----|---->     |
2193         *  |                                 |
2194         *  < : effectStart
2195         *  > : effectStart + effectDuration
2196         ****************************************
2197         **/
2198
2199        /** 1 & 5 */
2200        /**
2201         * Effect falls out side the trim duration. In such a case effects shall
2202         * not be applied.
2203         */
2204        if ((lEffect.startTime > endCutTime)
2205                || ((lEffect.startTime + lEffect.duration) <= beginCutTime)) {
2206
2207            effectStartTime = 0;
2208            effectDuration = 0;
2209
2210            lEffect.startTime = effectStartTime;
2211            lEffect.duration = effectDuration;
2212            return;
2213        }
2214
2215        /** 2 */
2216        if ((lEffect.startTime < beginCutTime)
2217                && ((lEffect.startTime + lEffect.duration) > beginCutTime)
2218                && ((lEffect.startTime + lEffect.duration) <= endCutTime)) {
2219            effectStartTime = 0;
2220            effectDuration = lEffect.duration;
2221
2222            effectDuration -= (beginCutTime - lEffect.startTime);
2223            lEffect.startTime = effectStartTime;
2224            lEffect.duration = effectDuration;
2225            return;
2226        }
2227
2228        /** 3 */
2229        if ((lEffect.startTime >= beginCutTime)
2230                && ((lEffect.startTime + lEffect.duration) <= endCutTime)) {
2231            effectStartTime = lEffect.startTime - beginCutTime;
2232            lEffect.startTime = effectStartTime;
2233            lEffect.duration = lEffect.duration;
2234            return;
2235        }
2236
2237        /** 4 */
2238        if ((lEffect.startTime >= beginCutTime)
2239                && ((lEffect.startTime + lEffect.duration) > endCutTime)) {
2240            effectStartTime = lEffect.startTime - beginCutTime;
2241            effectDuration = endCutTime - lEffect.startTime;
2242            lEffect.startTime = effectStartTime;
2243            lEffect.duration = effectDuration;
2244            return;
2245        }
2246
2247        /** 6 */
2248        if ((lEffect.startTime < beginCutTime)
2249                && ((lEffect.startTime + lEffect.duration) > endCutTime)) {
2250            effectStartTime = 0;
2251            effectDuration = endCutTime - beginCutTime;
2252            lEffect.startTime = effectStartTime;
2253            lEffect.duration = effectDuration;
2254            return;
2255        }
2256
2257    }
2258
2259    /**
2260     * Generates the clip for preview or export
2261     *
2262     * @param editSettings The EditSettings reference for generating
2263     * a clip for preview or export
2264     *
2265     * @return error value
2266     */
2267    public int generateClip(EditSettings editSettings) {
2268        int err = 0;
2269
2270        try {
2271            err = nativeGenerateClip(editSettings);
2272        } catch (IllegalArgumentException ex) {
2273            Log.e(TAG, "Illegal Argument exception in load settings");
2274            return -1;
2275        } catch (IllegalStateException ex) {
2276            Log.e(TAG, "Illegal state exception in load settings");
2277            return -1;
2278        } catch (RuntimeException ex) {
2279            Log.e(TAG, "Runtime exception in load settings");
2280            return -1;
2281        }
2282        return err;
2283    }
2284
2285    /**
2286     * Init function to initialiZe the  ClipSettings reference to
2287     * default values
2288     *
2289     * @param lclipSettings The ClipSettings reference
2290     */
2291    void initClipSettings(ClipSettings lclipSettings) {
2292        lclipSettings.clipPath = null;
2293        lclipSettings.clipDecodedPath = null;
2294        lclipSettings.clipOriginalPath = null;
2295        lclipSettings.fileType = 0;
2296        lclipSettings.endCutTime = 0;
2297        lclipSettings.beginCutTime = 0;
2298        lclipSettings.beginCutPercent = 0;
2299        lclipSettings.endCutPercent = 0;
2300        lclipSettings.panZoomEnabled = false;
2301        lclipSettings.panZoomPercentStart = 0;
2302        lclipSettings.panZoomTopLeftXStart = 0;
2303        lclipSettings.panZoomTopLeftYStart = 0;
2304        lclipSettings.panZoomPercentEnd = 0;
2305        lclipSettings.panZoomTopLeftXEnd = 0;
2306        lclipSettings.panZoomTopLeftYEnd = 0;
2307        lclipSettings.mediaRendering = 0;
2308    }
2309
2310
2311    /**
2312     * Populates the settings for generating an effect clip
2313     *
2314     * @param lMediaItem The media item for which the effect clip
2315     * needs to be generated
2316     * @param lclipSettings The ClipSettings reference containing
2317     * clips data
2318     * @param e The EditSettings reference containing effect specific data
2319     * @param uniqueId The unique id used in the name of the output clip
2320     * @param clipNo Used for internal purpose
2321     *
2322     * @return The name and path of generated clip
2323     */
2324    String generateEffectClip(MediaItem lMediaItem, ClipSettings lclipSettings,
2325            EditSettings e,String uniqueId,int clipNo) {
2326        int err = 0;
2327        EditSettings editSettings = null;
2328        String EffectClipPath = null;
2329
2330        editSettings = new EditSettings();
2331
2332        editSettings.clipSettingsArray = new ClipSettings[1];
2333        editSettings.clipSettingsArray[0] = lclipSettings;
2334
2335        editSettings.backgroundMusicSettings = null;
2336        editSettings.transitionSettingsArray = null;
2337        editSettings.effectSettingsArray = e.effectSettingsArray;
2338
2339        EffectClipPath = String.format(mProjectPath + "/" + "ClipEffectIntermediate" + "_"
2340                + lMediaItem.getId() + uniqueId + ".3gp");
2341
2342        File tmpFile = new File(EffectClipPath);
2343        if (tmpFile.exists()) {
2344            tmpFile.delete();
2345        }
2346
2347        if (lMediaItem instanceof MediaVideoItem) {
2348            MediaVideoItem m = (MediaVideoItem)lMediaItem;
2349
2350            editSettings.audioFormat = AudioFormat.AAC;
2351            editSettings.audioChannels = 2;
2352            editSettings.audioBitrate = Bitrate.BR_64_KBPS;
2353            editSettings.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000;
2354
2355            editSettings.videoBitrate = Bitrate.BR_5_MBPS;
2356            //editSettings.videoFormat = VideoFormat.MPEG4;
2357            editSettings.videoFormat = VideoFormat.H264;
2358            editSettings.videoFrameRate = VideoFrameRate.FR_30_FPS;
2359            editSettings.videoFrameSize = findVideoResolution(mVideoEditor.getAspectRatio(),
2360                    m.getHeight());
2361        } else {
2362            MediaImageItem m = (MediaImageItem)lMediaItem;
2363            editSettings.audioBitrate = Bitrate.BR_64_KBPS;
2364            editSettings.audioChannels = 2;
2365            editSettings.audioFormat = AudioFormat.AAC;
2366            editSettings.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000;
2367
2368            editSettings.videoBitrate = Bitrate.BR_5_MBPS;
2369            editSettings.videoFormat = VideoFormat.H264;
2370            editSettings.videoFrameRate = VideoFrameRate.FR_30_FPS;
2371            editSettings.videoFrameSize = findVideoResolution(mVideoEditor.getAspectRatio(),
2372                    m.getScaledHeight());
2373        }
2374
2375        editSettings.outputFile = EffectClipPath;
2376
2377        if (clipNo == 1) {
2378            mProcessingState  = PROCESSING_INTERMEDIATE1;
2379        } else if (clipNo == 2) {
2380            mProcessingState  = PROCESSING_INTERMEDIATE2;
2381        }
2382        mProcessingObject = lMediaItem;
2383        err = generateClip(editSettings);
2384        mProcessingState  = PROCESSING_NONE;
2385
2386        if (err == 0) {
2387            lclipSettings.clipPath = EffectClipPath;
2388            lclipSettings.fileType = FileType.THREE_GPP;
2389            return EffectClipPath;
2390        } else {
2391            throw new RuntimeException("preview generation cannot be completed");
2392        }
2393    }
2394
2395
2396    /**
2397     * Populates the settings for generating a Ken Burn effect clip
2398     *
2399     * @param m The media image item for which the Ken Burn effect clip
2400     * needs to be generated
2401     * @param e The EditSettings reference clip specific data
2402     *
2403     * @return The name and path of generated clip
2404     */
2405    String generateKenBurnsClip(EditSettings e, MediaImageItem m) {
2406        String output = null;
2407        int err = 0;
2408
2409        e.backgroundMusicSettings = null;
2410        e.transitionSettingsArray = null;
2411        e.effectSettingsArray = null;
2412        output = String.format(mProjectPath + "/" + "ImageClip-" + m.getId() + ".3gp");
2413
2414        File tmpFile = new File(output);
2415        if (tmpFile.exists()) {
2416            tmpFile.delete();
2417        }
2418
2419        e.outputFile = output;
2420        e.audioBitrate = Bitrate.BR_64_KBPS;
2421        e.audioChannels = 2;
2422        e.audioFormat = AudioFormat.AAC;
2423        e.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000;
2424
2425        e.videoBitrate = Bitrate.BR_5_MBPS;
2426        e.videoFormat = VideoFormat.H264;
2427        e.videoFrameRate = VideoFrameRate.FR_30_FPS;
2428        e.videoFrameSize = findVideoResolution(mVideoEditor.getAspectRatio(),
2429                                                           m.getScaledHeight());
2430        mProcessingState  = PROCESSING_KENBURNS;
2431        mProcessingObject = m;
2432        err = generateClip(e);
2433        // Reset the processing state and check for errors
2434        mProcessingState  = PROCESSING_NONE;
2435        if (err != 0) {
2436            throw new RuntimeException("preview generation cannot be completed");
2437        }
2438        return output;
2439    }
2440
2441
2442    /**
2443     * Calculates the output resolution for transition clip
2444     *
2445     * @param m1 First media item associated with transition
2446     * @param m2 Second media item associated with transition
2447     *
2448     * @return The transition resolution
2449     */
2450    private int getTransitionResolution(MediaItem m1, MediaItem m2) {
2451        int clip1Height = 0;
2452        int clip2Height = 0;
2453        int videoSize = 0;
2454
2455        if (m1 != null && m2 != null) {
2456            if (m1 instanceof MediaVideoItem) {
2457                clip1Height = m1.getHeight();
2458            } else if (m1 instanceof MediaImageItem) {
2459                clip1Height = ((MediaImageItem)m1).getScaledHeight();
2460            }
2461            if (m2 instanceof MediaVideoItem) {
2462                clip2Height = m2.getHeight();
2463            } else if (m2 instanceof MediaImageItem) {
2464                clip2Height = ((MediaImageItem)m2).getScaledHeight();
2465            }
2466            if (clip1Height > clip2Height) {
2467                videoSize = findVideoResolution(mVideoEditor.getAspectRatio(), clip1Height);
2468            } else {
2469                videoSize = findVideoResolution(mVideoEditor.getAspectRatio(), clip2Height);
2470            }
2471        } else if (m1 == null && m2 != null) {
2472            if (m2 instanceof MediaVideoItem) {
2473                clip2Height = m2.getHeight();
2474            } else if (m2 instanceof MediaImageItem) {
2475                clip2Height = ((MediaImageItem)m2).getScaledHeight();
2476            }
2477            videoSize = findVideoResolution(mVideoEditor.getAspectRatio(), clip2Height);
2478        } else if (m1 != null && m2 == null) {
2479            if (m1 instanceof MediaVideoItem) {
2480                clip1Height = m1.getHeight();
2481            } else if (m1 instanceof MediaImageItem) {
2482                clip1Height = ((MediaImageItem)m1).getScaledHeight();
2483            }
2484            videoSize = findVideoResolution(mVideoEditor.getAspectRatio(), clip1Height);
2485        }
2486        return videoSize;
2487    }
2488
2489    /**
2490     * Populates the settings for generating an transition clip
2491     *
2492     * @param m1 First media item associated with transition
2493     * @param m2 Second media item associated with transition
2494     * @param e The EditSettings reference containing
2495     * clip specific data
2496     * @param uniqueId The unique id used in the name of the output clip
2497     * @param t The Transition specific data
2498     *
2499     * @return The name and path of generated clip
2500     */
2501    String generateTransitionClip(EditSettings e, String uniqueId,
2502            MediaItem m1, MediaItem m2,Transition t) {
2503        String outputFilename = null;
2504        int err = 0;
2505
2506        outputFilename = String.format(mProjectPath + "/" + uniqueId + ".3gp");
2507        e.outputFile = outputFilename;
2508        e.audioBitrate = Bitrate.BR_64_KBPS;
2509        e.audioChannels = 2;
2510        e.audioFormat = AudioFormat.AAC;
2511        e.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000;
2512
2513        e.videoBitrate = Bitrate.BR_5_MBPS;
2514        e.videoFormat = VideoFormat.H264;
2515        e.videoFrameRate = VideoFrameRate.FR_30_FPS;
2516        e.videoFrameSize = getTransitionResolution(m1, m2);
2517
2518        if (new File(outputFilename).exists()) {
2519            new File(outputFilename).delete();
2520        }
2521        mProcessingState  = PROCESSING_INTERMEDIATE3;
2522        mProcessingObject = t;
2523        err = generateClip(e);
2524        // Reset the processing state and check for errors
2525        mProcessingState  = PROCESSING_NONE;
2526        if (err != 0) {
2527            throw new RuntimeException("preview generation cannot be completed");
2528        }
2529        return outputFilename;
2530    }
2531
2532    /**
2533     * Populates effects and overlays in EffectSettings structure
2534     * and also adjust the start time and duration of effects and overlays
2535     * w.r.t to total story board time
2536     *
2537     * @param m1 Media item associated with effect
2538     * @param effectSettings The EffectSettings reference containing
2539     *      effect specific data
2540     * @param beginCutTime The begin cut time of the clip associated with effect
2541     * @param endCutTime The end cut time of the clip associated with effect
2542     * @param storyBoardTime The current story board time
2543     *
2544     * @return The updated index
2545     */
2546    private int populateEffects(MediaItem m, EffectSettings[] effectSettings, int i,
2547            int beginCutTime, int endCutTime, int storyBoardTime) {
2548
2549        if (m.getBeginTransition() != null && m.getBeginTransition().getDuration() > 0
2550                && m.getEndTransition() != null && m.getEndTransition().getDuration() > 0) {
2551            beginCutTime += m.getBeginTransition().getDuration();
2552            endCutTime -= m.getEndTransition().getDuration();
2553        } else if (m.getBeginTransition() == null && m.getEndTransition() != null
2554                && m.getEndTransition().getDuration() > 0) {
2555            endCutTime -= m.getEndTransition().getDuration();
2556        } else if (m.getEndTransition() == null && m.getBeginTransition() != null
2557                && m.getBeginTransition().getDuration() > 0) {
2558            beginCutTime += m.getBeginTransition().getDuration();
2559        }
2560
2561        final List<Effect> effects = m.getAllEffects();
2562        final List<Overlay> overlays = m.getAllOverlays();
2563        for (Effect effect : effects) {
2564            if (effect instanceof EffectColor) {
2565                effectSettings[i] = getEffectSettings((EffectColor)effect);
2566                adjustEffectsStartTimeAndDuration(effectSettings[i], beginCutTime, endCutTime);
2567                effectSettings[i].startTime += storyBoardTime;
2568                i++;
2569            }
2570        }
2571
2572        for (Overlay overlay : overlays) {
2573            effectSettings[i] = getOverlaySettings((OverlayFrame)overlay);
2574            adjustEffectsStartTimeAndDuration(effectSettings[i], beginCutTime, endCutTime);
2575            effectSettings[i].startTime += storyBoardTime;
2576            i++;
2577        }
2578        return i;
2579    }
2580
2581    /**
2582     * Adjusts the media item boundaries for use in export or preview
2583     *
2584     * @param clipSettings The ClipSettings reference
2585     * @param clipProperties The Properties reference
2586     * @param m The media item
2587     */
2588    private void adjustMediaItemBoundary(ClipSettings clipSettings,
2589                                         Properties clipProperties, MediaItem m) {
2590        if (m.getBeginTransition() != null && m.getBeginTransition().getDuration() > 0
2591                && m.getEndTransition() != null && m.getEndTransition().getDuration() > 0) {
2592            clipSettings.beginCutTime += m.getBeginTransition().getDuration();
2593            clipSettings.endCutTime -= m.getEndTransition().getDuration();
2594        } else if (m.getBeginTransition() == null && m.getEndTransition() != null
2595                && m.getEndTransition().getDuration() > 0) {
2596            clipSettings.endCutTime -= m.getEndTransition().getDuration();
2597        } else if (m.getEndTransition() == null && m.getBeginTransition() != null
2598                && m.getBeginTransition().getDuration() > 0) {
2599            clipSettings.beginCutTime += m.getBeginTransition().getDuration();
2600        }
2601
2602        clipProperties.duration = clipSettings.endCutTime - clipSettings.beginCutTime;
2603
2604        if (clipProperties.videoDuration != 0) {
2605            clipProperties.videoDuration = clipSettings.endCutTime - clipSettings.beginCutTime;
2606        }
2607
2608        if (clipProperties.audioDuration != 0) {
2609            clipProperties.audioDuration = clipSettings.endCutTime - clipSettings.beginCutTime;
2610        }
2611    }
2612
2613    /**
2614     * Generates the transition if transition is present
2615     * and is in invalidated state
2616     *
2617     * @param transition The Transition reference
2618     * @param editSettings The EditSettings reference
2619     * @param clipPropertiesArray The clip Properties array
2620     * @param i The index in clip Properties array for current clip
2621     */
2622    private void generateTransition(Transition transition, EditSettings editSettings,
2623            PreviewClipProperties clipPropertiesArray, int index) {
2624        if (!(transition.isGenerated())) {
2625            transition.generate();
2626        }
2627        editSettings.clipSettingsArray[index] = new ClipSettings();
2628        editSettings.clipSettingsArray[index].clipPath = transition.getFilename();
2629        editSettings.clipSettingsArray[index].fileType = FileType.THREE_GPP;
2630        editSettings.clipSettingsArray[index].beginCutTime = 0;
2631        editSettings.clipSettingsArray[index].endCutTime = (int)transition.getDuration();
2632        editSettings.clipSettingsArray[index].mediaRendering = MediaRendering.BLACK_BORDERS;
2633
2634        try {
2635            clipPropertiesArray.clipProperties[index] =
2636                getMediaProperties(transition.getFilename());
2637        } catch (Exception e) {
2638            throw new IllegalArgumentException("Unsupported file or file not found");
2639        }
2640
2641        clipPropertiesArray.clipProperties[index].Id = null;
2642        clipPropertiesArray.clipProperties[index].audioVolumeValue = 100;
2643        clipPropertiesArray.clipProperties[index].duration = (int)transition.getDuration();
2644        if (clipPropertiesArray.clipProperties[index].videoDuration != 0) {
2645            clipPropertiesArray.clipProperties[index].videoDuration = (int)transition.getDuration();
2646        }
2647
2648        if (clipPropertiesArray.clipProperties[index].audioDuration != 0) {
2649            clipPropertiesArray.clipProperties[index].audioDuration = (int)transition.getDuration();
2650        }
2651    }
2652
2653    /**
2654     * Sets the volume for current media item in clip properties array
2655     *
2656     * @param m The media item
2657     * @param clipProperties The clip properties array reference
2658     * @param i The index in clip Properties array for current clip
2659     */
2660    private void adjustVolume(MediaItem m, PreviewClipProperties clipProperties,
2661                              int index) {
2662        if (m instanceof MediaVideoItem) {
2663            final boolean videoMuted = ((MediaVideoItem)m).isMuted();
2664            if (videoMuted == false) {
2665                mClipProperties.clipProperties[index].audioVolumeValue =
2666                    ((MediaVideoItem)m).getVolume();
2667            } else {
2668                mClipProperties.clipProperties[index].audioVolumeValue = 0;
2669            }
2670        } else if (m instanceof MediaImageItem) {
2671            mClipProperties.clipProperties[index].audioVolumeValue = 0;
2672        }
2673    }
2674
2675    /**
2676     * Checks for odd size image width and height
2677     *
2678     * @param m The media item
2679     * @param clipProperties The clip properties array reference
2680     * @param i The index in clip Properties array for current clip
2681     */
2682    private void checkOddSizeImage(MediaItem m, PreviewClipProperties clipProperties, int index) {
2683        if (m instanceof MediaImageItem) {
2684            int width = mClipProperties.clipProperties[index].width;
2685            int height = mClipProperties.clipProperties[index].height;
2686
2687            if ((width % 2) != 0) {
2688                width -= 1;
2689            }
2690            if ((height % 2) != 0) {
2691                height -= 1;
2692            }
2693            mClipProperties.clipProperties[index].width = width;
2694            mClipProperties.clipProperties[index].height = height;
2695        }
2696    }
2697
2698    /**
2699     * Populates the media item properties and calculates the maximum
2700     * height among all the clips
2701     *
2702     * @param m The media item
2703     * @param i The index in clip Properties array for current clip
2704     * @param maxHeight The max height from the clip properties
2705     *
2706     * @return Updates the max height if current clip's height is greater
2707     * than all previous clips height
2708     */
2709    private int populateMediaItemProperties(MediaItem m, int index, int maxHeight) {
2710        mPreviewEditSettings.clipSettingsArray[index] = new ClipSettings();
2711        if (m instanceof MediaVideoItem) {
2712            mPreviewEditSettings.clipSettingsArray[index] =
2713                ((MediaVideoItem)m).getVideoClipProperties();
2714            if (((MediaVideoItem)m).getHeight() > maxHeight) {
2715                maxHeight = ((MediaVideoItem)m).getHeight();
2716            }
2717        } else if (m instanceof MediaImageItem) {
2718            mPreviewEditSettings.clipSettingsArray[index] =
2719                ((MediaImageItem)m).getImageClipProperties();
2720            if (((MediaImageItem)m).getScaledHeight() > maxHeight) {
2721                maxHeight = ((MediaImageItem)m).getScaledHeight();
2722            }
2723        }
2724        /** + Handle the image files here */
2725        if (mPreviewEditSettings.clipSettingsArray[index].fileType == FileType.JPG) {
2726            mPreviewEditSettings.clipSettingsArray[index].clipDecodedPath =
2727                ((MediaImageItem)m).getDecodedImageFileName();
2728
2729            mPreviewEditSettings.clipSettingsArray[index].clipOriginalPath =
2730                         mPreviewEditSettings.clipSettingsArray[index].clipPath;
2731        }
2732        return maxHeight;
2733    }
2734
2735    /**
2736     * Populates the background music track properties
2737     *
2738     * @param mediaBGMList The background music list
2739     *
2740     */
2741    private void populateBackgroundMusicProperties(List<AudioTrack> mediaBGMList) {
2742
2743        if (mediaBGMList.size() == 1) {
2744            mAudioTrack = mediaBGMList.get(0);
2745        } else {
2746            mAudioTrack = null;
2747        }
2748
2749        if (mAudioTrack != null) {
2750            mAudioSettings = new AudioSettings();
2751            Properties mAudioProperties = new Properties();
2752            mAudioSettings.pFile = null;
2753            mAudioSettings.Id = mAudioTrack.getId();
2754            try {
2755                mAudioProperties = getMediaProperties(mAudioTrack.getFilename());
2756            } catch (Exception e) {
2757               throw new IllegalArgumentException("Unsupported file or file not found");
2758            }
2759            mAudioSettings.bRemoveOriginal = false;
2760            mAudioSettings.channels = mAudioProperties.audioChannels;
2761            mAudioSettings.Fs = mAudioProperties.audioSamplingFrequency;
2762            mAudioSettings.loop = mAudioTrack.isLooping();
2763            mAudioSettings.ExtendedFs = 0;
2764            mAudioSettings.pFile = mAudioTrack.getFilename();
2765            mAudioSettings.startMs = mAudioTrack.getStartTime();
2766            mAudioSettings.beginCutTime = mAudioTrack.getBoundaryBeginTime();
2767            mAudioSettings.endCutTime = mAudioTrack.getBoundaryEndTime();
2768            if (mAudioTrack.isMuted()) {
2769                mAudioSettings.volume = 0;
2770            } else {
2771                mAudioSettings.volume = mAudioTrack.getVolume();
2772            }
2773            mAudioSettings.fileType = mAudioProperties.fileType;
2774            mAudioSettings.ducking_lowVolume = mAudioTrack.getDuckedTrackVolume();
2775            mAudioSettings.ducking_threshold = mAudioTrack.getDuckingThreshhold();
2776            mAudioSettings.bInDucking_enable = mAudioTrack.isDuckingEnabled();
2777            mAudioTrackPCMFilePath = String.format(mProjectPath + "/" + AUDIO_TRACK_PCM_FILE);
2778            mAudioSettings.pcmFilePath = mAudioTrackPCMFilePath;
2779
2780            mPreviewEditSettings.backgroundMusicSettings = new BackgroundMusicSettings();
2781            mPreviewEditSettings.backgroundMusicSettings.file = mAudioTrackPCMFilePath;
2782            mPreviewEditSettings.backgroundMusicSettings.fileType = mAudioProperties.fileType;
2783            mPreviewEditSettings.backgroundMusicSettings.insertionTime =
2784                mAudioTrack.getStartTime();
2785            mPreviewEditSettings.backgroundMusicSettings.volumePercent = mAudioTrack.getVolume();
2786            mPreviewEditSettings.backgroundMusicSettings.beginLoop =
2787                mAudioTrack.getBoundaryBeginTime();
2788            mPreviewEditSettings.backgroundMusicSettings.endLoop =
2789                                               mAudioTrack.getBoundaryEndTime();
2790            mPreviewEditSettings.backgroundMusicSettings.enableDucking =
2791                mAudioTrack.isDuckingEnabled();
2792            mPreviewEditSettings.backgroundMusicSettings.duckingThreshold =
2793                mAudioTrack.getDuckingThreshhold();
2794            mPreviewEditSettings.backgroundMusicSettings.lowVolume =
2795                mAudioTrack.getDuckedTrackVolume();
2796            mPreviewEditSettings.backgroundMusicSettings.isLooping = mAudioTrack.isLooping();
2797            mPreviewEditSettings.primaryTrackVolume = 100;
2798            mProcessingState  = PROCESSING_AUDIO_PCM;
2799            mProcessingObject = mAudioTrack;
2800        } else {
2801            mAudioSettings = null;
2802            mPreviewEditSettings.backgroundMusicSettings = null;
2803            mAudioTrackPCMFilePath = null;
2804        }
2805    }
2806
2807    /**
2808     * Calculates all the effects in all the media items
2809     * in media items list
2810     *
2811     * @param mediaItemsList The media item list
2812     *
2813     * @return The total number of effects
2814     *
2815     */
2816    private int getTotalEffects(List<MediaItem> mediaItemsList) {
2817        int totalEffects = 0;
2818        final Iterator<MediaItem> it = mediaItemsList.iterator();
2819        while (it.hasNext()) {
2820            final MediaItem t = it.next();
2821            totalEffects += t.getAllEffects().size();
2822            totalEffects += t.getAllOverlays().size();
2823            final Iterator<Effect> ef = t.getAllEffects().iterator();
2824            while (ef.hasNext()) {
2825                final Effect e = ef.next();
2826                if (e instanceof EffectKenBurns) {
2827                    totalEffects--;
2828                }
2829            }
2830        }
2831        return totalEffects;
2832    }
2833
2834    /**
2835     * This function is responsible for forming clip settings
2836     * array and clip properties array including transition clips
2837     * and effect settings for preview purpose or export.
2838     *
2839     *
2840     * @param mediaItemsList The media item list
2841     * @param mediaTransitionList The transitions list
2842     * @param mediaBGMList The background music list
2843     * @param listener The MediaProcessingProgressListener
2844     *
2845     */
2846    void previewStoryBoard(List<MediaItem> mediaItemsList,
2847            List<Transition> mediaTransitionList, List<AudioTrack> mediaBGMList,
2848            MediaProcessingProgressListener listener) {
2849        if (mInvalidatePreviewArray) {
2850            int previewIndex = 0;
2851            int totalEffects = 0;
2852            int storyBoardTime = 0;
2853            int maxHeight = 0;
2854            int beginCutTime = 0;
2855            int endCutTime = 0;
2856            int effectIndex = 0;
2857            Transition lTransition = null;
2858            MediaItem lMediaItem = null;
2859            mPreviewEditSettings = new EditSettings();
2860            mClipProperties = new PreviewClipProperties();
2861            mTotalClips = 0;
2862
2863            mTotalClips = mediaItemsList.size();
2864            for (Transition transition : mediaTransitionList) {
2865                if (transition.getDuration() > 0) {
2866                    mTotalClips++;
2867                }
2868            }
2869
2870            totalEffects = getTotalEffects(mediaItemsList);
2871
2872            mPreviewEditSettings.clipSettingsArray = new ClipSettings[mTotalClips];
2873            mPreviewEditSettings.effectSettingsArray = new EffectSettings[totalEffects];
2874            mClipProperties.clipProperties = new Properties[mTotalClips];
2875
2876            /** record the call back progress listener */
2877            mMediaProcessingProgressListener = listener;
2878            mProgressToApp = 0;
2879
2880            if (mediaItemsList.size() > 0) {
2881                for (int i = 0; i < mediaItemsList.size(); i++) {
2882                    /* Get the Media Item from the list */
2883                    lMediaItem = mediaItemsList.get(i);
2884                    if (lMediaItem instanceof MediaVideoItem) {
2885                        beginCutTime = (int)((MediaVideoItem)lMediaItem).getBoundaryBeginTime();
2886                        endCutTime = (int)((MediaVideoItem)lMediaItem).getBoundaryEndTime();
2887                    } else if (lMediaItem instanceof MediaImageItem) {
2888                        beginCutTime = 0;
2889                        endCutTime = (int)((MediaImageItem)lMediaItem).getTimelineDuration();
2890                    }
2891                    /* Get the transition associated with Media Item */
2892                    lTransition = lMediaItem.getBeginTransition();
2893                    if (lTransition != null && (lTransition.getDuration() > 0)) {
2894                        /* generate transition clip */
2895                        generateTransition(lTransition, mPreviewEditSettings,
2896                                           mClipProperties, previewIndex);
2897                        storyBoardTime += mClipProperties.clipProperties[previewIndex].duration;
2898                        previewIndex++;
2899                    }
2900                    /* Populate media item properties */
2901                    maxHeight = populateMediaItemProperties(lMediaItem, previewIndex, maxHeight);
2902                    /* Get the clip properties of the media item. */
2903                    if (lMediaItem instanceof MediaImageItem) {
2904                        int tmpCnt = 0;
2905                        boolean bEffectKbPresent = false;
2906                        final List<Effect> effectList = lMediaItem.getAllEffects();
2907                        /**
2908                         * Check if Ken Burns effect is present
2909                         */
2910                        while (tmpCnt < effectList.size()) {
2911                            if (effectList.get(tmpCnt) instanceof EffectKenBurns) {
2912                                bEffectKbPresent = true;
2913                                break;
2914                            }
2915                            tmpCnt++;
2916                        }
2917
2918                        if (bEffectKbPresent) {
2919                            try {
2920                                  if(((MediaImageItem)lMediaItem).getGeneratedImageClip() != null) {
2921                                     mClipProperties.clipProperties[previewIndex]
2922                                        = getMediaProperties(((MediaImageItem)lMediaItem).
2923                                                             getGeneratedImageClip());
2924                                  }
2925                                  else {
2926                                   mClipProperties.clipProperties[previewIndex]
2927                                      = getMediaProperties(((MediaImageItem)lMediaItem).
2928                                                             getScaledImageFileName());
2929                                   mClipProperties.clipProperties[previewIndex].width =
2930                                             ((MediaImageItem)lMediaItem).getScaledWidth();
2931                                   mClipProperties.clipProperties[previewIndex].height =
2932                                             ((MediaImageItem)lMediaItem).getScaledHeight();
2933                                  }
2934                                } catch (Exception e) {
2935                                   throw new IllegalArgumentException("Unsupported file or file not found");
2936                                }
2937                         } else {
2938                              try {
2939                                  mClipProperties.clipProperties[previewIndex]
2940                                      = getMediaProperties(((MediaImageItem)lMediaItem).
2941                                                               getScaledImageFileName());
2942                              } catch (Exception e) {
2943                                throw new IllegalArgumentException("Unsupported file or file not found");
2944                              }
2945                            mClipProperties.clipProperties[previewIndex].width =
2946                                        ((MediaImageItem)lMediaItem).getScaledWidth();
2947                            mClipProperties.clipProperties[previewIndex].height =
2948                                        ((MediaImageItem)lMediaItem).getScaledHeight();
2949                        }
2950                    } else {
2951                        try {
2952                            mClipProperties.clipProperties[previewIndex]
2953                                 = getMediaProperties(lMediaItem.getFilename());
2954                            } catch (Exception e) {
2955                              throw new IllegalArgumentException("Unsupported file or file not found");
2956                          }
2957                    }
2958                    mClipProperties.clipProperties[previewIndex].Id = lMediaItem.getId();
2959                    checkOddSizeImage(lMediaItem, mClipProperties, previewIndex);
2960                    adjustVolume(lMediaItem, mClipProperties, previewIndex);
2961
2962                    /*
2963                     * Adjust media item start time and end time w.r.t to begin
2964                     * and end transitions associated with media item
2965                     */
2966
2967                    adjustMediaItemBoundary(mPreviewEditSettings.clipSettingsArray[previewIndex],
2968                            mClipProperties.clipProperties[previewIndex], lMediaItem);
2969
2970                    /*
2971                     * Get all the effects and overlays for that media item and
2972                     * adjust start time and duration of effects
2973                     */
2974
2975                    effectIndex = populateEffects(lMediaItem,
2976                            mPreviewEditSettings.effectSettingsArray, effectIndex, beginCutTime,
2977                            endCutTime, storyBoardTime);
2978                    storyBoardTime += mClipProperties.clipProperties[previewIndex].duration;
2979                    previewIndex++;
2980
2981                    /* Check if there is any end transition at last media item */
2982
2983                    if (i == (mediaItemsList.size() - 1)) {
2984                        lTransition = lMediaItem.getEndTransition();
2985                        if (lTransition != null && (lTransition.getDuration() > 0)) {
2986                            generateTransition(lTransition, mPreviewEditSettings, mClipProperties,
2987                                    previewIndex);
2988                            break;
2989                        }
2990                    }
2991                }
2992            }
2993            if (!mErrorFlagSet) {
2994                mPreviewEditSettings.videoFrameSize = findVideoResolution(mVideoEditor
2995                        .getAspectRatio(), maxHeight);
2996                populateBackgroundMusicProperties(mediaBGMList);
2997
2998                /** call to native populate settings */
2999                try {
3000                    nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings);
3001                } catch (IllegalArgumentException ex) {
3002                    Log.e(TAG, "Illegal argument exception in nativePopulateSettings");
3003                    throw ex;
3004                } catch (IllegalStateException ex) {
3005                    Log.e(TAG, "Illegal state exception in nativePopulateSettings");
3006                    throw ex;
3007                } catch (RuntimeException ex) {
3008                    Log.e(TAG, "Runtime exception in nativePopulateSettings");
3009                    throw ex;
3010                }
3011                mInvalidatePreviewArray = false;
3012                mProcessingState  = PROCESSING_NONE;
3013            }
3014            if (mErrorFlagSet) {
3015                mErrorFlagSet = false;
3016                throw new RuntimeException("preview generation cannot be completed");
3017            }
3018        }
3019    } /* END of previewStoryBoard */
3020
3021    /**
3022     * This function is responsible for starting the preview
3023     *
3024     *
3025     * @param surface The surface on which preview has to be displayed
3026     * @param fromMs The time in ms from which preview has to be started
3027     * @param toMs The time in ms till preview has to be played
3028     * @param loop To loop the preview or not
3029     * @param callbackAfterFrameCount INdicated after how many frames
3030     * the callback is needed
3031     * @param listener The PreviewProgressListener
3032     */
3033    void doPreview(Surface surface, long fromMs, long toMs, boolean loop,
3034            int callbackAfterFrameCount, PreviewProgressListener listener) {
3035        mPreviewProgress = fromMs;
3036        mIsFirstProgress = true;
3037        mPreviewProgressListener = listener;
3038
3039        if (!mInvalidatePreviewArray) {
3040            try {
3041                /** Modify the image files names to rgb image files. */
3042                for (int clipCnt = 0; clipCnt < mPreviewEditSettings.clipSettingsArray.length;
3043                    clipCnt++) {
3044                    if (mPreviewEditSettings.clipSettingsArray[clipCnt].fileType == FileType.JPG) {
3045                        mPreviewEditSettings.clipSettingsArray[clipCnt].clipPath =
3046                            mPreviewEditSettings.clipSettingsArray[clipCnt].clipDecodedPath;
3047                    }
3048                }
3049                nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings);
3050                nativeStartPreview(surface, fromMs, toMs, callbackAfterFrameCount, loop);
3051            } catch (IllegalArgumentException ex) {
3052                Log.e(TAG, "Illegal argument exception in nativeStartPreview");
3053                throw ex;
3054            } catch (IllegalStateException ex) {
3055                Log.e(TAG, "Illegal state exception in nativeStartPreview");
3056                throw ex;
3057            } catch (RuntimeException ex) {
3058                Log.e(TAG, "Runtime exception in nativeStartPreview");
3059                throw ex;
3060            }
3061        }
3062    }
3063
3064    /**
3065     * This function is responsible for stopping the preview
3066     */
3067    long stopPreview() {
3068        nativeStopPreview();
3069        return mPreviewProgress;
3070    }
3071
3072    /**
3073     * This function is responsible for rendering a single frame
3074     * from the complete story board on the surface
3075     *
3076     * @param surface The surface on which frame has to be rendered
3077     * @param time The time in ms at which the frame has to be rendered
3078     * @param surfaceWidth The surface width
3079     * @param surfaceHeight The surface height
3080     * @param overlayData The overlay data
3081     *
3082     * @return The actual time from the story board at which the  frame was extracted
3083     * and rendered
3084     */
3085    long renderPreviewFrame(Surface surface, long time, int surfaceWidth,
3086            int surfaceHeight, VideoEditor.OverlayData overlayData) {
3087        if (mInvalidatePreviewArray) {
3088            throw new RuntimeException("Call generate preview first");
3089        }
3090
3091        long timeMs = 0;
3092        try {
3093            for (int clipCnt = 0; clipCnt < mPreviewEditSettings.clipSettingsArray.length;
3094                  clipCnt++) {
3095                if (mPreviewEditSettings.clipSettingsArray[clipCnt].fileType == FileType.JPG) {
3096                    mPreviewEditSettings.clipSettingsArray[clipCnt].clipPath =
3097                        mPreviewEditSettings.clipSettingsArray[clipCnt].clipDecodedPath;
3098                }
3099            }
3100
3101            // Reset the render preview frame params that shall be set by native.
3102            mRenderPreviewOverlayFile = null;
3103            mRenderPreviewRenderingMode = MediaRendering.RESIZING;
3104
3105            nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings);
3106
3107            timeMs = (long)nativeRenderPreviewFrame(surface, time, surfaceWidth, surfaceHeight);
3108
3109            if (mRenderPreviewOverlayFile != null) {
3110                overlayData.set(BitmapFactory.decodeFile(mRenderPreviewOverlayFile),
3111                        mRenderPreviewRenderingMode);
3112            } else {
3113                overlayData.setClear();
3114            }
3115        } catch (IllegalArgumentException ex) {
3116            Log.e(TAG, "Illegal Argument exception in nativeRenderPreviewFrame");
3117            throw ex;
3118        } catch (IllegalStateException ex) {
3119            Log.e(TAG, "Illegal state exception in nativeRenderPreviewFrame");
3120            throw ex;
3121        } catch (RuntimeException ex) {
3122            Log.e(TAG, "Runtime exception in nativeRenderPreviewFrame");
3123            throw ex;
3124        }
3125
3126        return timeMs;
3127    }
3128
3129    private void previewFrameEditInfo(String filename, int renderingMode) {
3130        mRenderPreviewOverlayFile = filename;
3131        mRenderPreviewRenderingMode = renderingMode;
3132    }
3133
3134
3135    /**
3136     * This function is responsible for rendering a single frame
3137     * from a single media item on the surface
3138     *
3139     * @param surface The surface on which frame has to be rendered
3140     * @param filepath The file path for which the frame needs to be displayed
3141     * @param time The time in ms at which the frame has to be rendered
3142     * @param framewidth The frame width
3143     * @param framewidth The frame height
3144     *
3145     * @return The actual time from media item at which the  frame was extracted
3146     * and rendered
3147     */
3148    long renderMediaItemPreviewFrame(Surface surface, String filepath,
3149                                            long time, int framewidth, int frameheight) {
3150        long timeMs = 0;
3151        try {
3152            timeMs = (long)nativeRenderMediaItemPreviewFrame(surface, filepath, framewidth,
3153                    frameheight, 0, 0, time);
3154        } catch (IllegalArgumentException ex) {
3155            Log.e(TAG, "Illegal Argument exception in renderMediaItemPreviewFrame");
3156            throw ex;
3157        } catch (IllegalStateException ex) {
3158            Log.e(TAG, "Illegal state exception in renderMediaItemPreviewFrame");
3159            throw ex;
3160        } catch (RuntimeException ex) {
3161            Log.e(TAG, "Runtime exception in renderMediaItemPreviewFrame");
3162            throw ex;
3163        }
3164
3165        return timeMs;
3166    }
3167
3168    /**
3169     * This function sets the flag to invalidate the preview array
3170     * and for generating the preview again
3171     */
3172    void setGeneratePreview(boolean isRequired) {
3173        boolean semAcquiredDone = false;
3174        try {
3175            lock();
3176            semAcquiredDone = true;
3177            mInvalidatePreviewArray = isRequired;
3178        } catch (InterruptedException ex) {
3179            Log.e(TAG, "Runtime exception in renderMediaItemPreviewFrame");
3180        } finally {
3181            if (semAcquiredDone) {
3182                unlock();
3183            }
3184        }
3185    }
3186
3187    /**
3188     * @return Returns the current status of preview invalidation
3189     * flag
3190     */
3191    boolean getGeneratePreview() {
3192        return mInvalidatePreviewArray;
3193    }
3194
3195    /**
3196     * Calculates the aspect ratio from widht and height
3197     *
3198     * @param w The width of media item
3199     * @param h The height of media item
3200     *
3201     * @return The calculated aspect ratio
3202     */
3203    int getAspectRatio(int w, int h) {
3204        double apRatio = (double)(w) / (double)(h);
3205        BigDecimal bd = new BigDecimal(apRatio);
3206        bd = bd.setScale(3, BigDecimal.ROUND_HALF_UP);
3207        apRatio = bd.doubleValue();
3208        int var = MediaProperties.ASPECT_RATIO_16_9;
3209        if (apRatio >= 1.7) {
3210            var = MediaProperties.ASPECT_RATIO_16_9;
3211        } else if (apRatio >= 1.6) {
3212            var = MediaProperties.ASPECT_RATIO_5_3;
3213        } else if (apRatio >= 1.5) {
3214            var = MediaProperties.ASPECT_RATIO_3_2;
3215        } else if (apRatio > 1.3) {
3216            var = MediaProperties.ASPECT_RATIO_4_3;
3217        } else if (apRatio >= 1.2) {
3218            var = MediaProperties.ASPECT_RATIO_11_9;
3219        }
3220        return var;
3221    }
3222
3223    /**
3224     * Maps the file type used in native layer
3225     * to file type used in JAVA layer
3226     *
3227     * @param fileType The file type in native layer
3228     *
3229     * @return The File type in JAVA layer
3230     */
3231    int getFileType(int fileType) {
3232        int retValue = -1;
3233        switch (fileType) {
3234            case FileType.UNSUPPORTED:
3235                retValue = MediaProperties.FILE_UNSUPPORTED;
3236                break;
3237            case FileType.THREE_GPP:
3238                retValue = MediaProperties.FILE_3GP;
3239                break;
3240            case FileType.MP4:
3241                retValue = MediaProperties.FILE_MP4;
3242                break;
3243            case FileType.JPG:
3244                retValue = MediaProperties.FILE_JPEG;
3245                break;
3246            case FileType.PNG:
3247                retValue = MediaProperties.FILE_PNG;
3248                break;
3249            case FileType.MP3:
3250                retValue = MediaProperties.FILE_MP3;
3251                break;
3252            case FileType.M4V:
3253                retValue = MediaProperties.FILE_M4V;
3254                break;
3255
3256            default:
3257                retValue = -1;
3258        }
3259        return retValue;
3260    }
3261
3262    /**
3263     * Maps the video codec type used in native layer
3264     * to video codec type used in JAVA layer
3265     *
3266     * @param codecType The video codec type in native layer
3267     *
3268     * @return The video codec type in JAVA layer
3269     */
3270    int getVideoCodecType(int codecType) {
3271        int retValue = -1;
3272        switch (codecType) {
3273            case VideoFormat.H263:
3274                retValue = MediaProperties.VCODEC_H263;
3275                break;
3276            case VideoFormat.H264:
3277                retValue = MediaProperties.VCODEC_H264BP;
3278                break;
3279            case VideoFormat.MPEG4:
3280                retValue = MediaProperties.VCODEC_MPEG4;
3281                break;
3282            case VideoFormat.UNSUPPORTED:
3283
3284            default:
3285                retValue = -1;
3286        }
3287        return retValue;
3288    }
3289
3290    /**
3291     * Maps the audio codec type used in native layer
3292     * to audio codec type used in JAVA layer
3293     *
3294     * @param audioType The audio codec type in native layer
3295     *
3296     * @return The audio codec type in JAVA layer
3297     */
3298    int getAudioCodecType(int codecType) {
3299        int retValue = -1;
3300        switch (codecType) {
3301            case AudioFormat.AMR_NB:
3302                retValue = MediaProperties.ACODEC_AMRNB;
3303                break;
3304            case AudioFormat.AAC:
3305                retValue = MediaProperties.ACODEC_AAC_LC;
3306                break;
3307            case AudioFormat.MP3:
3308                retValue = MediaProperties.ACODEC_MP3;
3309                break;
3310
3311            default:
3312                retValue = -1;
3313        }
3314        return retValue;
3315    }
3316
3317    /**
3318     * Returns the frame rate as integer
3319     *
3320     * @param fps The fps as enum
3321     *
3322     * @return The frame rate as integer
3323     */
3324    int getFrameRate(int fps) {
3325        int retValue = -1;
3326        switch (fps) {
3327            case VideoFrameRate.FR_5_FPS:
3328                retValue = 5;
3329                break;
3330            case VideoFrameRate.FR_7_5_FPS:
3331                retValue = 8;
3332                break;
3333            case VideoFrameRate.FR_10_FPS:
3334                retValue = 10;
3335                break;
3336            case VideoFrameRate.FR_12_5_FPS:
3337                retValue = 13;
3338                break;
3339            case VideoFrameRate.FR_15_FPS:
3340                retValue = 15;
3341                break;
3342            case VideoFrameRate.FR_20_FPS:
3343                retValue = 20;
3344                break;
3345            case VideoFrameRate.FR_25_FPS:
3346                retValue = 25;
3347                break;
3348            case VideoFrameRate.FR_30_FPS:
3349                retValue = 30;
3350                break;
3351
3352            default:
3353                retValue = -1;
3354        }
3355        return retValue;
3356    }
3357
3358    /**
3359     * Maps the file type used in JAVA layer
3360     * to file type used in native layer
3361     *
3362     * @param fileType The file type in JAVA layer
3363     *
3364     * @return The File type in native layer
3365     */
3366    int getMediaItemFileType(int fileType) {
3367        int retValue = -1;
3368
3369        switch (fileType) {
3370            case MediaProperties.FILE_UNSUPPORTED:
3371                retValue = FileType.UNSUPPORTED;
3372                break;
3373            case MediaProperties.FILE_3GP:
3374                retValue = FileType.THREE_GPP;
3375                break;
3376            case MediaProperties.FILE_MP4:
3377                retValue = FileType.MP4;
3378                break;
3379            case MediaProperties.FILE_JPEG:
3380                retValue = FileType.JPG;
3381                break;
3382            case MediaProperties.FILE_PNG:
3383                retValue = FileType.PNG;
3384                break;
3385            case MediaProperties.FILE_M4V:
3386                retValue = FileType.M4V;
3387                break;
3388
3389            default:
3390                retValue = -1;
3391        }
3392        return retValue;
3393
3394    }
3395
3396    /**
3397     * Maps the rendering mode used in native layer
3398     * to rendering mode used in JAVA layer
3399     *
3400     * @param renderingMode The rendering mode in JAVA layer
3401     *
3402     * @return The rendering mode in native layer
3403     */
3404    int getMediaItemRenderingMode(int renderingMode) {
3405        int retValue = -1;
3406        switch (renderingMode) {
3407            case MediaItem.RENDERING_MODE_BLACK_BORDER:
3408                retValue = MediaRendering.BLACK_BORDERS;
3409                break;
3410            case MediaItem.RENDERING_MODE_STRETCH:
3411                retValue = MediaRendering.RESIZING;
3412                break;
3413            case MediaItem.RENDERING_MODE_CROPPING:
3414                retValue = MediaRendering.CROPPING;
3415                break;
3416
3417            default:
3418                retValue = -1;
3419        }
3420        return retValue;
3421    }
3422
3423    /**
3424     * Maps the transition behavior used in JAVA layer
3425     * to transition behavior used in native layer
3426     *
3427     * @param transitionType The transition behavior in JAVA layer
3428     *
3429     * @return The transition behavior in native layer
3430     */
3431    int getVideoTransitionBehaviour(int transitionType) {
3432        int retValue = -1;
3433        switch (transitionType) {
3434            case Transition.BEHAVIOR_SPEED_UP:
3435                retValue = TransitionBehaviour.SPEED_UP;
3436                break;
3437            case Transition.BEHAVIOR_SPEED_DOWN:
3438                retValue = TransitionBehaviour.SPEED_DOWN;
3439                break;
3440            case Transition.BEHAVIOR_LINEAR:
3441                retValue = TransitionBehaviour.LINEAR;
3442                break;
3443            case Transition.BEHAVIOR_MIDDLE_SLOW:
3444                retValue = TransitionBehaviour.SLOW_MIDDLE;
3445                break;
3446            case Transition.BEHAVIOR_MIDDLE_FAST:
3447                retValue = TransitionBehaviour.FAST_MIDDLE;
3448                break;
3449
3450            default:
3451                retValue = -1;
3452        }
3453        return retValue;
3454    }
3455
3456    /**
3457     * Maps the transition slide direction used in JAVA layer
3458     * to transition slide direction used in native layer
3459     *
3460     * @param slideDirection The transition slide direction
3461     * in JAVA layer
3462     *
3463     * @return The transition slide direction in native layer
3464     */
3465    int getSlideSettingsDirection(int slideDirection) {
3466        int retValue = -1;
3467        switch (slideDirection) {
3468            case TransitionSliding.DIRECTION_RIGHT_OUT_LEFT_IN:
3469                retValue = SlideDirection.RIGHT_OUT_LEFT_IN;
3470                break;
3471            case TransitionSliding.DIRECTION_LEFT_OUT_RIGHT_IN:
3472                retValue = SlideDirection.LEFT_OUT_RIGTH_IN;
3473                break;
3474            case TransitionSliding.DIRECTION_TOP_OUT_BOTTOM_IN:
3475                retValue = SlideDirection.TOP_OUT_BOTTOM_IN;
3476                break;
3477            case TransitionSliding.DIRECTION_BOTTOM_OUT_TOP_IN:
3478                retValue = SlideDirection.BOTTOM_OUT_TOP_IN;
3479                break;
3480
3481            default:
3482                retValue = -1;
3483        }
3484        return retValue;
3485    }
3486
3487    /**
3488     * Maps the effect color type used in JAVA layer
3489     * to effect color type used in native layer
3490     *
3491     * @param effect The EffectColor reference
3492     *
3493     * @return The color effect value from native layer
3494     */
3495    private int getEffectColorType(EffectColor effect) {
3496        int retValue = -1;
3497        switch (effect.getType()) {
3498            case EffectColor.TYPE_COLOR:
3499                if (effect.getColor() == EffectColor.GREEN) {
3500                    retValue = VideoEffect.GREEN;
3501                } else if (effect.getColor() == EffectColor.PINK) {
3502                    retValue = VideoEffect.PINK;
3503                } else if (effect.getColor() == EffectColor.GRAY) {
3504                    retValue = VideoEffect.BLACK_AND_WHITE;
3505                } else {
3506                    retValue = VideoEffect.COLORRGB16;
3507                }
3508                break;
3509            case EffectColor.TYPE_GRADIENT:
3510                retValue = VideoEffect.GRADIENT;
3511                break;
3512            case EffectColor.TYPE_SEPIA:
3513                retValue = VideoEffect.SEPIA;
3514                break;
3515            case EffectColor.TYPE_NEGATIVE:
3516                retValue = VideoEffect.NEGATIVE;
3517                break;
3518            case EffectColor.TYPE_FIFTIES:
3519                retValue = VideoEffect.FIFTIES;
3520                break;
3521
3522            default:
3523                retValue = -1;
3524        }
3525        return retValue;
3526    }
3527
3528    /**
3529     * Calculates video resolution for output clip
3530     * based on clip's height and aspect ratio of storyboard
3531     *
3532     * @param aspectRatio The aspect ratio of story board
3533     * @param height The height of clip
3534     *
3535     * @return The video resolution
3536     */
3537    private int findVideoResolution(int aspectRatio, int height) {
3538        final Pair<Integer, Integer>[] resolutions;
3539        final Pair<Integer, Integer> maxResolution;
3540        int retValue = VideoFrameSize.SIZE_UNDEFINED;
3541        switch (aspectRatio) {
3542            case MediaProperties.ASPECT_RATIO_3_2:
3543                if (height == MediaProperties.HEIGHT_480)
3544                    retValue = VideoFrameSize.NTSC;
3545                else if (height == MediaProperties.HEIGHT_720)
3546                    retValue = VideoFrameSize.W720p;
3547                break;
3548            case MediaProperties.ASPECT_RATIO_16_9:
3549                if (height == MediaProperties.HEIGHT_480)
3550                    retValue = VideoFrameSize.WVGA16x9;
3551                else if (height == MediaProperties.HEIGHT_720)
3552                    retValue = VideoFrameSize.V720p;
3553                break;
3554            case MediaProperties.ASPECT_RATIO_4_3:
3555                if (height == MediaProperties.HEIGHT_480)
3556                    retValue = VideoFrameSize.VGA;
3557                if (height == MediaProperties.HEIGHT_720)
3558                    retValue = VideoFrameSize.S720p;
3559                break;
3560            case MediaProperties.ASPECT_RATIO_5_3:
3561                if (height == MediaProperties.HEIGHT_480)
3562                    retValue = VideoFrameSize.WVGA;
3563                break;
3564            case MediaProperties.ASPECT_RATIO_11_9:
3565                if (height == MediaProperties.HEIGHT_144)
3566                    retValue = VideoFrameSize.QCIF;
3567                break;
3568        }
3569        if (retValue == VideoFrameSize.SIZE_UNDEFINED) {
3570            resolutions = MediaProperties.getSupportedResolutions(mVideoEditor.getAspectRatio());
3571            // Get the highest resolution
3572            maxResolution = resolutions[resolutions.length - 1];
3573            retValue = findVideoResolution(mVideoEditor.getAspectRatio(), maxResolution.second);
3574        }
3575
3576        return retValue;
3577    }
3578
3579    /**
3580     * This method is responsible for exporting a movie
3581     *
3582     * @param filePath The output file path
3583     * @param projectDir The output project directory
3584     * @param height The height of clip
3585     * @param bitrate The bitrate at which the movie should be exported
3586     * @param mediaItemsList The media items list
3587     * @param mediaTransitionList The transitions list
3588     * @param mediaBGMList The background track list
3589     * @param listener The ExportProgressListener
3590     *
3591     */
3592    void export(String filePath, String projectDir, int height, int bitrate,
3593            List<MediaItem> mediaItemsList, List<Transition> mediaTransitionList,
3594            List<AudioTrack> mediaBGMList, ExportProgressListener listener) {
3595
3596        int outBitrate = 0;
3597        mExportFilename = filePath;
3598        previewStoryBoard(mediaItemsList, mediaTransitionList, mediaBGMList,null);
3599        mExportProgressListener = listener;
3600
3601        mProgressToApp = 0;
3602
3603        switch (bitrate) {
3604            case MediaProperties.BITRATE_28K:
3605                outBitrate = Bitrate.BR_32_KBPS;
3606                break;
3607            case MediaProperties.BITRATE_40K:
3608                outBitrate = Bitrate.BR_48_KBPS;
3609                break;
3610            case MediaProperties.BITRATE_64K:
3611                outBitrate = Bitrate.BR_64_KBPS;
3612                break;
3613            case MediaProperties.BITRATE_96K:
3614                outBitrate = Bitrate.BR_96_KBPS;
3615                break;
3616            case MediaProperties.BITRATE_128K:
3617                outBitrate = Bitrate.BR_128_KBPS;
3618                break;
3619            case MediaProperties.BITRATE_192K:
3620                outBitrate = Bitrate.BR_192_KBPS;
3621                break;
3622            case MediaProperties.BITRATE_256K:
3623                outBitrate = Bitrate.BR_256_KBPS;
3624                break;
3625            case MediaProperties.BITRATE_384K:
3626                outBitrate = Bitrate.BR_384_KBPS;
3627                break;
3628            case MediaProperties.BITRATE_512K:
3629                outBitrate = Bitrate.BR_512_KBPS;
3630                break;
3631            case MediaProperties.BITRATE_800K:
3632                outBitrate = Bitrate.BR_800_KBPS;
3633                break;
3634            case MediaProperties.BITRATE_2M:
3635                outBitrate = Bitrate.BR_2_MBPS;
3636                break;
3637
3638            case MediaProperties.BITRATE_5M:
3639                outBitrate = Bitrate.BR_5_MBPS;
3640                break;
3641            case MediaProperties.BITRATE_8M:
3642                outBitrate = Bitrate.BR_8_MBPS;
3643                break;
3644
3645            default:
3646                throw new IllegalArgumentException("Argument Bitrate incorrect");
3647        }
3648        mPreviewEditSettings.videoFrameRate = VideoFrameRate.FR_30_FPS;
3649        mPreviewEditSettings.outputFile = mOutputFilename = filePath;
3650
3651        int aspectRatio = mVideoEditor.getAspectRatio();
3652        mPreviewEditSettings.videoFrameSize = findVideoResolution(aspectRatio, height);
3653        mPreviewEditSettings.videoFormat = VideoFormat.H264;
3654        mPreviewEditSettings.audioFormat = AudioFormat.AAC;
3655        mPreviewEditSettings.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000;
3656        mPreviewEditSettings.maxFileSize = 0;
3657        mPreviewEditSettings.audioChannels = 2;
3658        mPreviewEditSettings.videoBitrate = outBitrate;
3659        mPreviewEditSettings.audioBitrate = Bitrate.BR_96_KBPS;
3660
3661        mPreviewEditSettings.transitionSettingsArray = new TransitionSettings[mTotalClips - 1];
3662        for (int index = 0; index < mTotalClips - 1; index++) {
3663            mPreviewEditSettings.transitionSettingsArray[index] = new TransitionSettings();
3664            mPreviewEditSettings.transitionSettingsArray[index].videoTransitionType =
3665                VideoTransition.NONE;
3666            mPreviewEditSettings.transitionSettingsArray[index].audioTransitionType =
3667                AudioTransition.NONE;
3668        }
3669
3670        for (int clipCnt = 0; clipCnt < mPreviewEditSettings.clipSettingsArray.length; clipCnt++) {
3671            if (mPreviewEditSettings.clipSettingsArray[clipCnt].fileType == FileType.JPG) {
3672                mPreviewEditSettings.clipSettingsArray[clipCnt].clipPath =
3673                mPreviewEditSettings.clipSettingsArray[clipCnt].clipOriginalPath;
3674            }
3675        }
3676        nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings);
3677
3678        int err = 0;
3679        try {
3680            mProcessingState  = PROCESSING_EXPORT;
3681            mProcessingObject = null;
3682            err = generateClip(mPreviewEditSettings);
3683            mProcessingState  = PROCESSING_NONE;
3684        } catch (IllegalArgumentException ex) {
3685            Log.e(TAG, "IllegalArgument for generateClip");
3686            throw ex;
3687        } catch (IllegalStateException ex) {
3688            Log.e(TAG, "IllegalStateExceptiont for generateClip");
3689            throw ex;
3690        } catch (RuntimeException ex) {
3691            Log.e(TAG, "RuntimeException for generateClip");
3692            throw ex;
3693        }
3694
3695        if (err != 0) {
3696            Log.e(TAG, "RuntimeException for generateClip");
3697            throw new RuntimeException("generateClip failed with error=" + err);
3698        }
3699
3700        mExportProgressListener = null;
3701    }
3702
3703    /**
3704     * This methods takes care of stopping the Export process
3705     *
3706     * @param The input file name for which export has to be stopped
3707     */
3708    void stop(String filename) {
3709        try {
3710            stopEncoding();
3711            new File(mExportFilename).delete();
3712        } catch (IllegalStateException ex) {
3713            Log.e(TAG, "Illegal state exception in unload settings");
3714            throw ex;
3715        } catch (RuntimeException ex) {
3716            Log.e(TAG, "Runtime exception in unload settings");
3717            throw ex;
3718        }
3719    }
3720
3721    /**
3722     * This method extracts a frame from the input file
3723     * and returns the frame as a bitmap
3724     *
3725     * @param inputFile The inputFile
3726     * @param width The width of the output frame
3727     * @param height The height of the output frame
3728     * @param timeMS The time in ms at which the frame has to be extracted
3729     */
3730    Bitmap getPixels(String inputFile, int width, int height, long timeMS) {
3731        if (inputFile == null) {
3732            throw new IllegalArgumentException();
3733        }
3734
3735        int newWidth = 0;
3736        int newHeight = 0;
3737        Bitmap tempBitmap = null;
3738
3739        /* Make width and height as even */
3740        newWidth = (width + 1) & 0xFFFFFFFE;
3741        newHeight = (height + 1) & 0xFFFFFFFE;
3742
3743        /* Create a temp bitmap for resized thumbnails */
3744        if ((newWidth != width) || (newHeight != height)) {
3745             tempBitmap = Bitmap.createBitmap(newWidth, newHeight, Bitmap.Config.ARGB_8888);
3746        }
3747
3748        IntBuffer rgb888 = IntBuffer.allocate(newWidth * newHeight * 4);
3749        Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
3750        nativeGetPixels(inputFile, rgb888.array(), newWidth, newHeight, timeMS);
3751
3752        if ((newWidth == width) && (newHeight == height)) {
3753            bitmap.copyPixelsFromBuffer(rgb888);
3754        } else {
3755            /* Create a temp bitmap to be used for resize */
3756            tempBitmap.copyPixelsFromBuffer(rgb888);
3757
3758            /* Create a canvas to resize */
3759            final Canvas canvas = new Canvas(bitmap);
3760            canvas.drawBitmap(tempBitmap, new Rect(0, 0, newWidth, newHeight),
3761                                          new Rect(0, 0, width, height), sResizePaint);
3762        }
3763
3764        if (tempBitmap != null) {
3765            tempBitmap.recycle();
3766        }
3767        return bitmap;
3768    }
3769
3770    /**
3771     * This method extracts a list of frame from the
3772     * input file and returns the frame in bitmap array
3773     *
3774     * @param filename The inputFile
3775     * @param width The width of the output frame
3776     * @param height The height of the output frame
3777     * @param startMs The starting time in ms
3778     * @param endMs The end time in ms
3779     * @param thumbnailCount The number of frames to be extracted
3780     * from startMs to endMs
3781     *
3782     * @return The frames as bitmaps in bitmap array
3783     **/
3784    public Bitmap[] getPixelsList(String filename, int width, int height, long startMs, long endMs,
3785            int thumbnailCount) {
3786        int[] rgb888 = null;
3787        int thumbnailSize = 0;
3788        int newWidth = 0;
3789        int newHeight = 0;
3790        Bitmap tempBitmap = null;
3791
3792        /* Make width and height as even */
3793        newWidth = (width + 1) & 0xFFFFFFFE;
3794        newHeight = (height + 1) & 0xFFFFFFFE;
3795        thumbnailSize = newWidth * newHeight * 4;
3796
3797        /* Create a temp bitmap for resized thumbnails */
3798        if ((newWidth != width) || (newHeight != height)) {
3799            tempBitmap = Bitmap.createBitmap(newWidth, newHeight, Bitmap.Config.ARGB_8888);
3800        }
3801        int i = 0;
3802        int deltaTime = (int)(endMs - startMs) / thumbnailCount;
3803        Bitmap[] bitmaps = null;
3804
3805        try {
3806            // This may result in out of Memory Error
3807            rgb888 = new int[thumbnailSize * thumbnailCount];
3808            bitmaps = new Bitmap[thumbnailCount];
3809        } catch (Throwable e) {
3810            // Allocating to new size with Fixed count
3811            try {
3812                System.gc();
3813                rgb888 = new int[thumbnailSize * MAX_THUMBNAIL_PERMITTED];
3814                bitmaps = new Bitmap[MAX_THUMBNAIL_PERMITTED];
3815                thumbnailCount = MAX_THUMBNAIL_PERMITTED;
3816            } catch (Throwable ex) {
3817                throw new RuntimeException("Memory allocation fails, thumbnail count too large: "+thumbnailCount);
3818            }
3819        }
3820        IntBuffer tmpBuffer = IntBuffer.allocate(thumbnailSize);
3821        nativeGetPixelsList(filename, rgb888, newWidth, newHeight, deltaTime, thumbnailCount,
3822                startMs, endMs);
3823
3824        for (; i < thumbnailCount; i++) {
3825            bitmaps[i] = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
3826            tmpBuffer.put(rgb888, (i * thumbnailSize), thumbnailSize);
3827            tmpBuffer.rewind();
3828
3829            if ((newWidth == width) && (newHeight == height)) {
3830                bitmaps[i].copyPixelsFromBuffer(tmpBuffer);
3831            } else {
3832                /* Copy the out rgb buffer to temp bitmap */
3833                tempBitmap.copyPixelsFromBuffer(tmpBuffer);
3834
3835                /* Create a canvas to resize */
3836                final Canvas canvas = new Canvas(bitmaps[i]);
3837                canvas.drawBitmap(tempBitmap, new Rect(0, 0, newWidth, newHeight),
3838                                              new Rect(0, 0, width, height), sResizePaint);
3839            }
3840        }
3841
3842        if (tempBitmap != null) {
3843            tempBitmap.recycle();
3844        }
3845        return bitmaps;
3846    }
3847
3848    /**
3849     * This method generates the audio graph
3850     *
3851     * @param uniqueId The unique id
3852     * @param inFileName The inputFile
3853     * @param OutAudiGraphFileName output filename
3854     * @param frameDuration The each frame duration
3855     * @param audioChannels The number of audio channels
3856     * @param samplesCount Total number of samples count
3857     * @param listener ExtractAudioWaveformProgressListener reference
3858     * @param isVideo The flag to indicate if the file is video file or not
3859     *
3860     **/
3861    void generateAudioGraph(String uniqueId, String inFileName, String OutAudiGraphFileName,
3862            int frameDuration, int audioChannels, int samplesCount,
3863            ExtractAudioWaveformProgressListener listener, boolean isVideo) {
3864        String tempPCMFileName;
3865
3866        mExtractAudioWaveformProgressListener = listener;
3867
3868        /**
3869         * In case of Video, first call will generate the PCM file to make the
3870         * audio graph
3871         */
3872        if (isVideo) {
3873            tempPCMFileName = String.format(mProjectPath + "/" + uniqueId + ".pcm");
3874        } else {
3875            tempPCMFileName = mAudioTrackPCMFilePath;
3876        }
3877
3878        /**
3879         * For Video item, generate the PCM
3880         */
3881        if (isVideo) {
3882            nativeGenerateRawAudio(inFileName, tempPCMFileName);
3883        }
3884
3885        nativeGenerateAudioGraph(tempPCMFileName, OutAudiGraphFileName, frameDuration,
3886                audioChannels, samplesCount);
3887
3888        /**
3889         * Once the audio graph file is generated, delete the pcm file
3890         */
3891        if (isVideo) {
3892            new File(tempPCMFileName).delete();
3893        }
3894    }
3895
3896    void clearPreviewSurface(Surface surface) {
3897        nativeClearSurface(surface);
3898    }
3899
3900    /**
3901     * Grab the semaphore which arbitrates access to the editor
3902     *
3903     * @throws InterruptedException
3904     */
3905    void lock() throws InterruptedException {
3906        if (Log.isLoggable(TAG, Log.DEBUG)) {
3907            Log.d(TAG, "lock: grabbing semaphore", new Throwable());
3908        }
3909        mLock.acquire();
3910        if (Log.isLoggable(TAG, Log.DEBUG)) {
3911            Log.d(TAG, "lock: grabbed semaphore");
3912        }
3913    }
3914
3915    /**
3916     * Release the semaphore which arbitrates access to the editor
3917     */
3918    void unlock() {
3919        if (Log.isLoggable(TAG, Log.DEBUG)) {
3920            Log.d(TAG, "unlock: releasing semaphore");
3921        }
3922        mLock.release();
3923    }
3924
3925    /**     Native Methods        */
3926    native Properties getMediaProperties(String file) throws IllegalArgumentException,
3927            IllegalStateException, RuntimeException, Exception;
3928
3929    /**
3930     * Get the version of ManualEdit.
3931     *
3932     * @return version of ManualEdit
3933     * @throws RuntimeException if an error occurred
3934     * @see Version
3935     */
3936    private static native Version getVersion() throws RuntimeException;
3937
3938    /**
3939     * Returns the video thumbnail in an array of integers. Output format is
3940     * ARGB8888.
3941     *
3942     * @param pixelArray the array that receives the pixel values
3943     * @param width width of the video thumbnail
3944     * @param height height of the video thumbnail
3945     * @param timeMS desired time of the thumbnail in ms
3946     * @return actual time in ms of the thumbnail generated
3947     * @throws IllegalStateException if the class has not been initialized
3948     * @throws IllegalArgumentException if the pixelArray is not available or
3949     *             one of the dimensions is negative or zero or the time is
3950     *             negative
3951     * @throws RuntimeException on runtime errors in native code
3952     */
3953    private native int nativeGetPixels(String fileName, int[] pixelArray, int width, int height,
3954            long timeMS);
3955
3956    private native int nativeGetPixelsList(String fileName, int[] pixelArray, int width, int height,
3957            int timeMS, int nosofTN, long startTimeMs, long endTimeMs);
3958
3959    /**
3960     * Releases the JNI and cleans up the core native module.. Should be called
3961     * only after init( )
3962     *
3963     * @throws IllegalStateException if the method could not be called
3964     */
3965    private native void release() throws IllegalStateException, RuntimeException;
3966
3967    /*
3968     * Clear the preview surface
3969     */
3970    private native void nativeClearSurface(Surface surface);
3971
3972    /**
3973     * Stops the encoding. This method should only be called after encoding has
3974     * started using method <code> startEncoding</code>
3975     *
3976     * @throws IllegalStateException if the method could not be called
3977     */
3978    private native void stopEncoding() throws IllegalStateException, RuntimeException;
3979
3980
3981    private native void _init(String tempPath, String libraryPath)
3982            throws IllegalArgumentException, IllegalStateException, RuntimeException;
3983
3984    private native void nativeStartPreview(Surface mSurface, long fromMs, long toMs,
3985            int callbackAfterFrameCount, boolean loop) throws IllegalArgumentException,
3986            IllegalStateException, RuntimeException;
3987
3988    private native void nativePopulateSettings(EditSettings editSettings,
3989            PreviewClipProperties mProperties, AudioSettings mAudioSettings)
3990    throws IllegalArgumentException, IllegalStateException, RuntimeException;
3991
3992    private native int nativeRenderPreviewFrame(Surface mSurface, long timeMs,
3993                                                 int surfaceWidth, int surfaceHeight)
3994                                                 throws IllegalArgumentException,
3995                                                 IllegalStateException, RuntimeException;
3996
3997    private native int nativeRenderMediaItemPreviewFrame(Surface mSurface, String filepath,
3998            int framewidth, int frameheight, int surfacewidth, int surfaceheight, long timeMs)
3999    throws IllegalArgumentException, IllegalStateException, RuntimeException;
4000
4001    private native void nativeStopPreview();
4002
4003    private native int nativeGenerateAudioGraph(String pcmFilePath, String outGraphPath,
4004            int frameDuration, int channels, int sampleCount);
4005
4006    private native int nativeGenerateRawAudio(String InFileName, String PCMFileName);
4007
4008    private native int nativeGenerateClip(EditSettings editSettings)
4009    throws IllegalArgumentException, IllegalStateException, RuntimeException;
4010
4011}
4012