MediaArtistNativeHelper.java revision 9c077e44c05f6829dd12067947a387c132dc6eaa
1/*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17package android.media.videoeditor;
18
19import java.io.File;
20import java.io.IOException;
21import java.math.BigDecimal;
22import java.nio.IntBuffer;
23import java.util.ArrayList;
24import java.util.Iterator;
25import java.util.List;
26import android.graphics.Bitmap;
27import android.graphics.BitmapFactory;
28import android.graphics.Canvas;
29import android.graphics.Paint;
30import android.graphics.Rect;
31import android.media.videoeditor.VideoEditor.ExportProgressListener;
32import android.media.videoeditor.VideoEditor.PreviewProgressListener;
33import android.media.videoeditor.VideoEditor.MediaProcessingProgressListener;
34import android.util.Log;
35import android.util.Pair;
36import android.view.Surface;
37
38/**
39 *This class provide Native methods to be used by MediaArtist {@hide}
40 */
41class MediaArtistNativeHelper {
42
43    static {
44        System.loadLibrary("videoeditor_jni");
45    }
46
47    private final int MAX_THUMBNAIL_PERMITTED = 8;
48
49    private final VideoEditor mVideoEditor;
50
51    public EditSettings mStoryBoardSettings;
52
53    private String mOutputFilename;
54
55    EditSettings mEditSettings = null;
56
57    PreviewClipProperties mClipProperties = null;
58
59    private EditSettings mPreviewEditSettings;
60
61    private AudioSettings mAudioSettings = null;
62
63    private AudioTrack mAudioTrack = null;
64
65    public boolean mInvalidatePreviewArray = true;
66
67    private boolean mRegenerateAudio = true;
68
69    private String mExportFilename = null;
70
71    private boolean mExportDone = false;
72
73    private int mProgressToApp;
74
75    /**
76     *  The resize paint
77     */
78    private static final Paint sResizePaint = new Paint(Paint.FILTER_BITMAP_FLAG);
79
80    private String mRenderPreviewOverlayFile;
81    private int   mRenderPreviewRenderingMode;
82
83    private boolean mIsFirstProgress;
84
85    public static final int TASK_LOADING_SETTINGS = 1;
86
87    public static final int TASK_ENCODING = 2;
88
89    private static final String AUDIO_TRACK_PCM_FILE = "AudioPcm.pcm";
90
91    // Processing indication
92    public static final int PROCESSING_NONE          = 0;
93    public static final int PROCESSING_AUDIO_PCM     = 1;
94    public static final int PROCESSING_TRANSITION    = 2;
95    public static final int PROCESSING_KENBURNS      = 3;
96    public static final int PROCESSING_INTERMEDIATE1 = 11;
97    public static final int PROCESSING_INTERMEDIATE2 = 12;
98    public static final int PROCESSING_INTERMEDIATE3 = 13;
99    public static final int PROCESSING_EXPORT        = 20;
100
101    private int    mProcessingState;
102    private Object mProcessingObject;
103
104    private PreviewProgressListener mPreviewProgressListener;
105    private ExportProgressListener mExportProgressListener;
106    private ExtractAudioWaveformProgressListener mExtractAudioWaveformProgressListener;
107    private MediaProcessingProgressListener      mMediaProcessingProgressListener;
108    private final String mProjectPath;
109
110    private long mPreviewProgress;
111
112    private String mAudioTrackPCMFilePath;
113
114    int mTotalClips = 0;
115
116    int mPreviewEffectsSize = 0;
117
118    private boolean mErrorFlagSet = false;
119
120    @SuppressWarnings("unused")
121    private int mManualEditContext;
122
123
124    List<Effect> mMediaEffectList;
125
126    List<Overlay> mMediaOverLayList;
127
128    /* Listeners */
129
130    /**
131     * Interface definition for a listener to be invoked when there is an update
132     * in a running task.
133     */
134    public interface OnProgressUpdateListener {
135        /**
136         * Called when there is an update.
137         *
138         * @param taskId id of the task reporting an update.
139         * @param progress progress of the task [0..100].
140         * @see BasicEdit#TASK_ENCODING
141         */
142        public void OnProgressUpdate(int taskId, int progress);
143    }
144
145    /** Defines the version. */
146    public final class Version {
147
148        /** Major version number */
149        public int major;
150
151        /** Minor version number */
152        public int minor;
153
154        /** Revision number */
155        public int revision;
156
157        /** VIDEOEDITOR major version number */
158        private static final int VIDEOEDITOR_MAJOR_VERSION = 0;
159
160        /** VIDEOEDITOR minor version number */
161        private static final int VIDEOEDITOR_MINOR_VERSION = 0;
162
163        /** VIDEOEDITOR revision number */
164        private static final int VIDEOEDITOR_REVISION_VERSION = 1;
165
166        /** Method which returns the current VIDEOEDITOR version */
167        public Version getVersion() {
168            Version version = new Version();
169
170            version.major = Version.VIDEOEDITOR_MAJOR_VERSION;
171            version.minor = Version.VIDEOEDITOR_MINOR_VERSION;
172            version.revision = Version.VIDEOEDITOR_REVISION_VERSION;
173
174            return version;
175        }
176    }
177
178    /**
179     * Defines output audio formats.
180     */
181    public final class AudioFormat {
182        /** No audio present in output clip. Used to generate video only clip */
183        public static final int NO_AUDIO = 0;
184
185        /** AMR Narrow Band. */
186        public static final int AMR_NB = 1;
187
188        /** Advanced Audio Coding (AAC). */
189        public static final int AAC = 2;
190
191        /** Advanced Audio Codec Plus (HE-AAC v1). */
192        public static final int AAC_PLUS = 3;
193
194        /** Advanced Audio Codec Plus (HE-AAC v2). */
195        public static final int ENHANCED_AAC_PLUS = 4;
196
197        /** MPEG layer 3 (MP3). */
198        public static final int MP3 = 5;
199
200        /** Enhanced Variable RateCodec (EVRC). */
201        public static final int EVRC = 6;
202
203        /** PCM (PCM). */
204        public static final int PCM = 7;
205
206        /** No transcoding. Output audio format is same as input audio format */
207        public static final int NULL_AUDIO = 254;
208
209        /** Unsupported audio format. */
210        public static final int UNSUPPORTED_AUDIO = 255;
211    }
212
213    /**
214     * Defines audio sampling frequencies.
215     */
216    public final class AudioSamplingFrequency {
217        /**
218         * Default sampling frequency. Uses the default frequency for a specific
219         * audio format. For AAC the only supported (and thus default) sampling
220         * frequency is 16 kHz. For this audio format the sampling frequency in
221         * the OutputParams.
222         **/
223        public static final int FREQ_DEFAULT = 0;
224
225        /** Audio sampling frequency of 8000 Hz. */
226        public static final int FREQ_8000 = 8000;
227
228        /** Audio sampling frequency of 11025 Hz. */
229        public static final int FREQ_11025 = 11025;
230
231        /** Audio sampling frequency of 12000 Hz. */
232        public static final int FREQ_12000 = 12000;
233
234        /** Audio sampling frequency of 16000 Hz. */
235        public static final int FREQ_16000 = 16000;
236
237        /** Audio sampling frequency of 22050 Hz. */
238        public static final int FREQ_22050 = 22050;
239
240        /** Audio sampling frequency of 24000 Hz. */
241        public static final int FREQ_24000 = 24000;
242
243        /** Audio sampling frequency of 32000 Hz. */
244        public static final int FREQ_32000 = 32000;
245
246        /** Audio sampling frequency of 44100 Hz. */
247        public static final int FREQ_44100 = 44100;
248
249        /** Audio sampling frequency of 48000 Hz. Not available for output file. */
250        public static final int FREQ_48000 = 48000;
251    }
252
253    /**
254     * Defines the supported fixed audio and video bitrates. These values are
255     * for output audio video only.
256     */
257    public final class Bitrate {
258        /** Variable bitrate. Means no bitrate regulation */
259        public static final int VARIABLE = -1;
260
261        /** An undefined bitrate. */
262        public static final int UNDEFINED = 0;
263
264        /** A bitrate of 9.2 kbits/s. */
265        public static final int BR_9_2_KBPS = 9200;
266
267        /** A bitrate of 12.2 kbits/s. */
268        public static final int BR_12_2_KBPS = 12200;
269
270        /** A bitrate of 16 kbits/s. */
271        public static final int BR_16_KBPS = 16000;
272
273        /** A bitrate of 24 kbits/s. */
274        public static final int BR_24_KBPS = 24000;
275
276        /** A bitrate of 32 kbits/s. */
277        public static final int BR_32_KBPS = 32000;
278
279        /** A bitrate of 48 kbits/s. */
280        public static final int BR_48_KBPS = 48000;
281
282        /** A bitrate of 64 kbits/s. */
283        public static final int BR_64_KBPS = 64000;
284
285        /** A bitrate of 96 kbits/s. */
286        public static final int BR_96_KBPS = 96000;
287
288        /** A bitrate of 128 kbits/s. */
289        public static final int BR_128_KBPS = 128000;
290
291        /** A bitrate of 192 kbits/s. */
292        public static final int BR_192_KBPS = 192000;
293
294        /** A bitrate of 256 kbits/s. */
295        public static final int BR_256_KBPS = 256000;
296
297        /** A bitrate of 288 kbits/s. */
298        public static final int BR_288_KBPS = 288000;
299
300        /** A bitrate of 384 kbits/s. */
301        public static final int BR_384_KBPS = 384000;
302
303        /** A bitrate of 512 kbits/s. */
304        public static final int BR_512_KBPS = 512000;
305
306        /** A bitrate of 800 kbits/s. */
307        public static final int BR_800_KBPS = 800000;
308
309        /** A bitrate of 2 Mbits/s. */
310        public static final int BR_2_MBPS = 2000000;
311
312        /** A bitrate of 5 Mbits/s. */
313        public static final int BR_5_MBPS = 5000000;
314
315        /** A bitrate of 8 Mbits/s. */
316        public static final int BR_8_MBPS = 8000000;
317    }
318
319    /**
320     * Defines all supported file types.
321     */
322    public final class FileType {
323        /** 3GPP file type. */
324        public static final int THREE_GPP = 0;
325
326        /** MP4 file type. */
327        public static final int MP4 = 1;
328
329        /** AMR file type. */
330        public static final int AMR = 2;
331
332        /** MP3 audio file type. */
333        public static final int MP3 = 3;
334
335        /** PCM audio file type. */
336        public static final int PCM = 4;
337
338        /** JPEG image file type. */
339        public static final int JPG = 5;
340
341        /** GIF image file type. */
342        public static final int GIF = 6;
343
344        /** PNG image file type. */
345        public static final int PNG = 7;
346
347        /** Unsupported file type. */
348        public static final int UNSUPPORTED = 255;
349    }
350
351    /**
352     * Defines rendering types. Rendering can only be applied to files
353     * containing video streams.
354     **/
355    public final class MediaRendering {
356        /**
357         * Resize to fit the output video with changing the aspect ratio if
358         * needed.
359         */
360        public static final int RESIZING = 0;
361
362        /**
363         * Crop the input video to fit it with the output video resolution.
364         **/
365        public static final int CROPPING = 1;
366
367        /**
368         * Resize to fit the output video resolution but maintain the aspect
369         * ratio. This framing type adds black borders if needed.
370         */
371        public static final int BLACK_BORDERS = 2;
372    }
373
374    /**
375     * Defines the results.
376     */
377    public final class Result {
378        /** No error. result OK */
379        public static final int NO_ERROR = 0;
380
381        /** File not found */
382        public static final int ERR_FILE_NOT_FOUND = 1;
383
384        /**
385         * In case of UTF8 conversion, the size of the converted path will be
386         * more than the corresponding allocated buffer.
387         */
388        public static final int ERR_BUFFER_OUT_TOO_SMALL = 2;
389
390        /** Invalid file type. */
391        public static final int ERR_INVALID_FILE_TYPE = 3;
392
393        /** Invalid effect kind. */
394        public static final int ERR_INVALID_EFFECT_KIND = 4;
395
396        /** Invalid video effect. */
397        public static final int ERR_INVALID_VIDEO_EFFECT_TYPE = 5;
398
399        /** Invalid audio effect. */
400        public static final int ERR_INVALID_AUDIO_EFFECT_TYPE = 6;
401
402        /** Invalid video transition. */
403        public static final int ERR_INVALID_VIDEO_TRANSITION_TYPE = 7;
404
405        /** Invalid audio transition. */
406        public static final int ERR_INVALID_AUDIO_TRANSITION_TYPE = 8;
407
408        /** Invalid encoding frame rate. */
409        public static final int ERR_INVALID_VIDEO_ENCODING_FRAME_RATE = 9;
410
411        /** External effect is called but this function is not set. */
412        public static final int ERR_EXTERNAL_EFFECT_NULL = 10;
413
414        /** External transition is called but this function is not set. */
415        public static final int ERR_EXTERNAL_TRANSITION_NULL = 11;
416
417        /** Begin time cut is larger than the video clip duration. */
418        public static final int ERR_BEGIN_CUT_LARGER_THAN_DURATION = 12;
419
420        /** Begin cut time is larger or equal than end cut. */
421        public static final int ERR_BEGIN_CUT_LARGER_THAN_END_CUT = 13;
422
423        /** Two consecutive transitions are overlapping on one clip. */
424        public static final int ERR_OVERLAPPING_TRANSITIONS = 14;
425
426        /** Internal error, type size mismatch. */
427        public static final int ERR_ANALYSIS_DATA_SIZE_TOO_SMALL = 15;
428
429        /** An input 3GPP file is invalid/corrupted. */
430        public static final int ERR_INVALID_3GPP_FILE = 16;
431
432        /** A file contains an unsupported video format. */
433        public static final int ERR_UNSUPPORTED_INPUT_VIDEO_FORMAT = 17;
434
435        /** A file contains an unsupported audio format. */
436        public static final int ERR_UNSUPPORTED_INPUT_AUDIO_FORMAT = 18;
437
438        /** A file format is not supported. */
439        public static final int ERR_AMR_EDITING_UNSUPPORTED = 19;
440
441        /** An input clip has an unexpectedly large Video AU. */
442        public static final int ERR_INPUT_VIDEO_AU_TOO_LARGE = 20;
443
444        /** An input clip has an unexpectedly large Audio AU. */
445        public static final int ERR_INPUT_AUDIO_AU_TOO_LARGE = 21;
446
447        /** An input clip has a corrupted Audio AU. */
448        public static final int ERR_INPUT_AUDIO_CORRUPTED_AU = 22;
449
450        /** The video encoder encountered an Access Unit error. */
451        public static final int ERR_ENCODER_ACCES_UNIT_ERROR = 23;
452
453        /** Unsupported video format for Video Editing. */
454        public static final int ERR_EDITING_UNSUPPORTED_VIDEO_FORMAT = 24;
455
456        /** Unsupported H263 profile for Video Editing. */
457        public static final int ERR_EDITING_UNSUPPORTED_H263_PROFILE = 25;
458
459        /** Unsupported MPEG-4 profile for Video Editing. */
460        public static final int ERR_EDITING_UNSUPPORTED_MPEG4_PROFILE = 26;
461
462        /** Unsupported MPEG-4 RVLC tool for Video Editing. */
463        public static final int ERR_EDITING_UNSUPPORTED_MPEG4_RVLC = 27;
464
465        /** Unsupported audio format for Video Editing. */
466        public static final int ERR_EDITING_UNSUPPORTED_AUDIO_FORMAT = 28;
467
468        /** File contains no supported stream. */
469        public static final int ERR_EDITING_NO_SUPPORTED_STREAM_IN_FILE = 29;
470
471        /** File contains no video stream or an unsupported video stream. */
472        public static final int ERR_EDITING_NO_SUPPORTED_VIDEO_STREAM_IN_FILE = 30;
473
474        /** Internal error, clip analysis version mismatch. */
475        public static final int ERR_INVALID_CLIP_ANALYSIS_VERSION = 31;
476
477        /**
478         * At least one of the clip analysis has been generated on another
479         * platform (WIN32, ARM, etc.).
480         */
481        public static final int ERR_INVALID_CLIP_ANALYSIS_PLATFORM = 32;
482
483        /** Clips don't have the same video format (H263 or MPEG4). */
484        public static final int ERR_INCOMPATIBLE_VIDEO_FORMAT = 33;
485
486        /** Clips don't have the same frame size. */
487        public static final int ERR_INCOMPATIBLE_VIDEO_FRAME_SIZE = 34;
488
489        /** Clips don't have the same MPEG-4 time scale. */
490        public static final int ERR_INCOMPATIBLE_VIDEO_TIME_SCALE = 35;
491
492        /** Clips don't have the same use of MPEG-4 data partitioning. */
493        public static final int ERR_INCOMPATIBLE_VIDEO_DATA_PARTITIONING = 36;
494
495        /** MP3 clips can't be assembled. */
496        public static final int ERR_UNSUPPORTED_MP3_ASSEMBLY = 37;
497
498        /**
499         * The input 3GPP file does not contain any supported audio or video
500         * track.
501         */
502        public static final int ERR_NO_SUPPORTED_STREAM_IN_FILE = 38;
503
504        /**
505         * The Volume of the added audio track (AddVolume) must be strictly
506         * superior than zero.
507         */
508        public static final int ERR_ADDVOLUME_EQUALS_ZERO = 39;
509
510        /**
511         * The time at which an audio track is added can't be higher than the
512         * input video track duration..
513         */
514        public static final int ERR_ADDCTS_HIGHER_THAN_VIDEO_DURATION = 40;
515
516        /** The audio track file format setting is undefined. */
517        public static final int ERR_UNDEFINED_AUDIO_TRACK_FILE_FORMAT = 41;
518
519        /** The added audio track stream has an unsupported format. */
520        public static final int ERR_UNSUPPORTED_ADDED_AUDIO_STREAM = 42;
521
522        /** The audio mixing feature doesn't support the audio track type. */
523        public static final int ERR_AUDIO_MIXING_UNSUPPORTED = 43;
524
525        /** The audio mixing feature doesn't support MP3 audio tracks. */
526        public static final int ERR_AUDIO_MIXING_MP3_UNSUPPORTED = 44;
527
528        /**
529         * An added audio track limits the available features: uiAddCts must be
530         * 0 and bRemoveOriginal must be true.
531         */
532        public static final int ERR_FEATURE_UNSUPPORTED_WITH_AUDIO_TRACK = 45;
533
534        /**
535         * An added audio track limits the available features: uiAddCts must be
536         * 0 and bRemoveOriginal must be true.
537         */
538        public static final int ERR_FEATURE_UNSUPPORTED_WITH_AAC = 46;
539
540        /** Input audio track is not of a type that can be mixed with output. */
541        public static final int ERR_AUDIO_CANNOT_BE_MIXED = 47;
542
543        /** Input audio track is not AMR-NB, so it can't be mixed with output. */
544        public static final int ERR_ONLY_AMRNB_INPUT_CAN_BE_MIXED = 48;
545
546        /**
547         * An added EVRC audio track limit the available features: uiAddCts must
548         * be 0 and bRemoveOriginal must be true.
549         */
550        public static final int ERR_FEATURE_UNSUPPORTED_WITH_EVRC = 49;
551
552        /** H263 profiles other than 0 are not supported. */
553        public static final int ERR_H263_PROFILE_NOT_SUPPORTED = 51;
554
555        /** File contains no video stream or an unsupported video stream. */
556        public static final int ERR_NO_SUPPORTED_VIDEO_STREAM_IN_FILE = 52;
557
558        /** Transcoding of the input file(s) is necessary. */
559        public static final int WAR_TRANSCODING_NECESSARY = 53;
560
561        /**
562         * The size of the output file will exceed the maximum configured value.
563         */
564        public static final int WAR_MAX_OUTPUT_SIZE_EXCEEDED = 54;
565
566        /** The time scale is too big. */
567        public static final int WAR_TIMESCALE_TOO_BIG = 55;
568
569        /** The year is out of range */
570        public static final int ERR_CLOCK_BAD_REF_YEAR = 56;
571
572        /** The directory could not be opened */
573        public static final int ERR_DIR_OPEN_FAILED = 57;
574
575        /** The directory could not be read */
576        public static final int ERR_DIR_READ_FAILED = 58;
577
578        /** There are no more entries in the current directory */
579        public static final int ERR_DIR_NO_MORE_ENTRY = 59;
580
581        /** The input parameter/s has error */
582        public static final int ERR_PARAMETER = 60;
583
584        /** There is a state machine error */
585        public static final int ERR_STATE = 61;
586
587        /** Memory allocation failed */
588        public static final int ERR_ALLOC = 62;
589
590        /** Context is invalid */
591        public static final int ERR_BAD_CONTEXT = 63;
592
593        /** Context creation failed */
594        public static final int ERR_CONTEXT_FAILED = 64;
595
596        /** Invalid stream ID */
597        public static final int ERR_BAD_STREAM_ID = 65;
598
599        /** Invalid option ID */
600        public static final int ERR_BAD_OPTION_ID = 66;
601
602        /** The option is write only */
603        public static final int ERR_WRITE_ONLY = 67;
604
605        /** The option is read only */
606        public static final int ERR_READ_ONLY = 68;
607
608        /** The feature is not implemented in this version */
609        public static final int ERR_NOT_IMPLEMENTED = 69;
610
611        /** The media type is not supported */
612        public static final int ERR_UNSUPPORTED_MEDIA_TYPE = 70;
613
614        /** No data to be encoded */
615        public static final int WAR_NO_DATA_YET = 71;
616
617        /** No data to be decoded */
618        public static final int WAR_NO_MORE_STREAM = 72;
619
620        /** Time stamp is invalid */
621        public static final int WAR_INVALID_TIME = 73;
622
623        /** No more data to be decoded */
624        public static final int WAR_NO_MORE_AU = 74;
625
626        /** Semaphore timed out */
627        public static final int WAR_TIME_OUT = 75;
628
629        /** Memory buffer is full */
630        public static final int WAR_BUFFER_FULL = 76;
631
632        /** Server has asked for redirection */
633        public static final int WAR_REDIRECT = 77;
634
635        /** Too many streams in input */
636        public static final int WAR_TOO_MUCH_STREAMS = 78;
637
638        /** The file cannot be opened/ written into as it is locked */
639        public static final int ERR_FILE_LOCKED = 79;
640
641        /** The file access mode is invalid */
642        public static final int ERR_FILE_BAD_MODE_ACCESS = 80;
643
644        /** The file pointer points to an invalid location */
645        public static final int ERR_FILE_INVALID_POSITION = 81;
646
647        /** Invalid string */
648        public static final int ERR_STR_BAD_STRING = 94;
649
650        /** The input string cannot be converted */
651        public static final int ERR_STR_CONV_FAILED = 95;
652
653        /** The string size is too large */
654        public static final int ERR_STR_OVERFLOW = 96;
655
656        /** Bad string arguments */
657        public static final int ERR_STR_BAD_ARGS = 97;
658
659        /** The string value is larger than maximum size allowed */
660        public static final int WAR_STR_OVERFLOW = 98;
661
662        /** The string value is not present in this comparison operation */
663        public static final int WAR_STR_NOT_FOUND = 99;
664
665        /** The thread is not started */
666        public static final int ERR_THREAD_NOT_STARTED = 100;
667
668        /** Trancoding done warning */
669        public static final int WAR_TRANSCODING_DONE = 101;
670
671        /** Unsupported mediatype */
672        public static final int WAR_MEDIATYPE_NOT_SUPPORTED = 102;
673
674        /** Input file contains invalid/unsupported streams */
675        public static final int ERR_INPUT_FILE_CONTAINS_NO_SUPPORTED_STREAM = 103;
676
677        /** Invalid input file */
678        public static final int ERR_INVALID_INPUT_FILE = 104;
679
680        /** Invalid output video format */
681        public static final int ERR_UNDEFINED_OUTPUT_VIDEO_FORMAT = 105;
682
683        /** Invalid output video frame size */
684        public static final int ERR_UNDEFINED_OUTPUT_VIDEO_FRAME_SIZE = 106;
685
686        /** Invalid output video frame rate */
687        public static final int ERR_UNDEFINED_OUTPUT_VIDEO_FRAME_RATE = 107;
688
689        /** Invalid output audio format */
690        public static final int ERR_UNDEFINED_OUTPUT_AUDIO_FORMAT = 108;
691
692        /** Invalid video frame size for H.263 */
693        public static final int ERR_INVALID_VIDEO_FRAME_SIZE_FOR_H263 = 109;
694
695        /** Invalid video frame rate for H.263 */
696        public static final int ERR_INVALID_VIDEO_FRAME_RATE_FOR_H263 = 110;
697
698        /** invalid playback duration */
699        public static final int ERR_DURATION_IS_NULL = 111;
700
701        /** Invalid H.263 profile in file */
702        public static final int ERR_H263_FORBIDDEN_IN_MP4_FILE = 112;
703
704        /** Invalid AAC sampling frequency */
705        public static final int ERR_INVALID_AAC_SAMPLING_FREQUENCY = 113;
706
707        /** Audio conversion failure */
708        public static final int ERR_AUDIO_CONVERSION_FAILED = 114;
709
710        /** Invalid trim start and end times */
711        public static final int ERR_BEGIN_CUT_EQUALS_END_CUT = 115;
712
713        /** End time smaller than start time for trim */
714        public static final int ERR_END_CUT_SMALLER_THAN_BEGIN_CUT = 116;
715
716        /** Output file size is small */
717        public static final int ERR_MAXFILESIZE_TOO_SMALL = 117;
718
719        /** Output video bitrate is too low */
720        public static final int ERR_VIDEOBITRATE_TOO_LOW = 118;
721
722        /** Output audio bitrate is too low */
723        public static final int ERR_AUDIOBITRATE_TOO_LOW = 119;
724
725        /** Output video bitrate is too high */
726        public static final int ERR_VIDEOBITRATE_TOO_HIGH = 120;
727
728        /** Output audio bitrate is too high */
729        public static final int ERR_AUDIOBITRATE_TOO_HIGH = 121;
730
731        /** Output file size is too small */
732        public static final int ERR_OUTPUT_FILE_SIZE_TOO_SMALL = 122;
733
734        /** Unknown stream type */
735        public static final int ERR_READER_UNKNOWN_STREAM_TYPE = 123;
736
737        /** Invalid metadata in input stream */
738        public static final int WAR_READER_NO_METADATA = 124;
739
740        /** Invalid file reader info warning */
741        public static final int WAR_READER_INFORMATION_NOT_PRESENT = 125;
742
743        /** Warning to indicate the the writer is being stopped */
744        public static final int WAR_WRITER_STOP_REQ = 131;
745
746        /** Video decoder failed to provide frame for transcoding */
747        public static final int WAR_VIDEORENDERER_NO_NEW_FRAME = 132;
748
749        /** Video deblocking filter is not implemented */
750        public static final int WAR_DEBLOCKING_FILTER_NOT_IMPLEMENTED = 133;
751
752        /** H.263 decoder profile not supported */
753        public static final int ERR_DECODER_H263_PROFILE_NOT_SUPPORTED = 134;
754
755        /** The input file contains unsupported H.263 profile */
756        public static final int ERR_DECODER_H263_NOT_BASELINE = 135;
757
758        /** There is no more space to store the output file */
759        public static final int ERR_NOMORE_SPACE_FOR_FILE = 136;
760
761        /** Internal error. */
762        public static final int ERR_INTERNAL = 255;
763
764    }
765
766    /**
767     * Defines output video formats.
768     */
769    public final class VideoFormat {
770        /** No video present in output clip. Used to generate audio only clip */
771        public static final int NO_VIDEO = 0;
772
773        /** H263 baseline format. */
774        public static final int H263 = 1;
775
776        /** MPEG4 video Simple Profile format. */
777        public static final int MPEG4 = 2;
778
779        /** MPEG4 video Simple Profile format with support for EMP. */
780        public static final int MPEG4_EMP = 3;
781
782        /** H264 video */
783        public static final int H264 = 4;
784
785        /** No transcoding. Output video format is same as input video format */
786        public static final int NULL_VIDEO = 254;
787
788        /** Unsupported video format. */
789        public static final int UNSUPPORTED = 255;
790    }
791
792    /** Defines video profiles and levels. */
793    public final class VideoProfile {
794        /** MPEG4, Simple Profile, Level 0. */
795        public static final int MPEG4_SP_LEVEL_0 = 0;
796
797        /** MPEG4, Simple Profile, Level 0B. */
798        public static final int MPEG4_SP_LEVEL_0B = 1;
799
800        /** MPEG4, Simple Profile, Level 1. */
801        public static final int MPEG4_SP_LEVEL_1 = 2;
802
803        /** MPEG4, Simple Profile, Level 2. */
804        public static final int MPEG4_SP_LEVEL_2 = 3;
805
806        /** MPEG4, Simple Profile, Level 3. */
807        public static final int MPEG4_SP_LEVEL_3 = 4;
808
809        /** H263, Profile 0, Level 10. */
810        public static final int H263_PROFILE_0_LEVEL_10 = 5;
811
812        /** H263, Profile 0, Level 20. */
813        public static final int H263_PROFILE_0_LEVEL_20 = 6;
814
815        /** H263, Profile 0, Level 30. */
816        public static final int H263_PROFILE_0_LEVEL_30 = 7;
817
818        /** H263, Profile 0, Level 40. */
819        public static final int H263_PROFILE_0_LEVEL_40 = 8;
820
821        /** H263, Profile 0, Level 45. */
822        public static final int H263_PROFILE_0_LEVEL_45 = 9;
823
824        /** MPEG4, Simple Profile, Level 4A. */
825        public static final int MPEG4_SP_LEVEL_4A = 10;
826
827        /** MPEG4, Simple Profile, Level 0. */
828        public static final int MPEG4_SP_LEVEL_5 = 11;
829
830        /** H264, Profile 0, Level 1. */
831        public static final int H264_PROFILE_0_LEVEL_1 = 12;
832
833        /** H264, Profile 0, Level 1b. */
834        public static final int H264_PROFILE_0_LEVEL_1b = 13;
835
836        /** H264, Profile 0, Level 1.1 */
837        public static final int H264_PROFILE_0_LEVEL_1_1 = 14;
838
839        /** H264, Profile 0, Level 1.2 */
840        public static final int H264_PROFILE_0_LEVEL_1_2 = 15;
841
842        /** H264, Profile 0, Level 1.3 */
843        public static final int H264_PROFILE_0_LEVEL_1_3 = 16;
844
845        /** H264, Profile 0, Level 2. */
846        public static final int H264_PROFILE_0_LEVEL_2 = 17;
847
848        /** H264, Profile 0, Level 2.1 */
849        public static final int H264_PROFILE_0_LEVEL_2_1 = 18;
850
851        /** H264, Profile 0, Level 2.2 */
852        public static final int H264_PROFILE_0_LEVEL_2_2 = 19;
853
854        /** H264, Profile 0, Level 3. */
855        public static final int H264_PROFILE_0_LEVEL_3 = 20;
856
857        /** H264, Profile 0, Level 3.1 */
858        public static final int H264_PROFILE_0_LEVEL_3_1 = 21;
859
860        /** H264, Profile 0, Level 3.2 */
861        public static final int H264_PROFILE_0_LEVEL_3_2 = 22;
862
863        /** H264, Profile 0, Level 4. */
864        public static final int H264_PROFILE_0_LEVEL_4 = 23;
865
866        /** H264, Profile 0, Level 4.1 */
867        public static final int H264_PROFILE_0_LEVEL_4_1 = 24;
868
869        /** H264, Profile 0, Level 4.2 */
870        public static final int H264_PROFILE_0_LEVEL_4_2 = 25;
871
872        /** H264, Profile 0, Level 5. */
873        public static final int H264_PROFILE_0_LEVEL_5 = 26;
874
875        /** H264, Profile 0, Level 5.1 */
876        public static final int H264_PROFILE_0_LEVEL_5_1 = 27;
877
878        /** Profile out of range. */
879        public static final int OUT_OF_RANGE = 255;
880    }
881
882    /** Defines video frame sizes. */
883    public final class VideoFrameSize {
884
885        public static final int SIZE_UNDEFINED = -1;
886
887        /** SQCIF 128 x 96 pixels. */
888        public static final int SQCIF = 0;
889
890        /** QQVGA 160 x 120 pixels. */
891        public static final int QQVGA = 1;
892
893        /** QCIF 176 x 144 pixels. */
894        public static final int QCIF = 2;
895
896        /** QVGA 320 x 240 pixels. */
897        public static final int QVGA = 3;
898
899        /** CIF 352 x 288 pixels. */
900        public static final int CIF = 4;
901
902        /** VGA 640 x 480 pixels. */
903        public static final int VGA = 5;
904
905        /** WVGA 800 X 480 pixels */
906        public static final int WVGA = 6;
907
908        /** NTSC 720 X 480 pixels */
909        public static final int NTSC = 7;
910
911        /** 640 x 360 */
912        public static final int nHD = 8;
913
914        /** 854 x 480 */
915        public static final int WVGA16x9 = 9;
916
917        /** 720p 1280 X 720 */
918        public static final int V720p = 10;
919
920        /** 1080 x 720 */
921        public static final int W720p = 11;
922
923        /** 1080 960 x 720 */
924        public static final int S720p = 12;
925    }
926
927    /**
928     * Defines output video frame rates.
929     */
930    public final class VideoFrameRate {
931        /** Frame rate of 5 frames per second. */
932        public static final int FR_5_FPS = 0;
933
934        /** Frame rate of 7.5 frames per second. */
935        public static final int FR_7_5_FPS = 1;
936
937        /** Frame rate of 10 frames per second. */
938        public static final int FR_10_FPS = 2;
939
940        /** Frame rate of 12.5 frames per second. */
941        public static final int FR_12_5_FPS = 3;
942
943        /** Frame rate of 15 frames per second. */
944        public static final int FR_15_FPS = 4;
945
946        /** Frame rate of 20 frames per second. */
947        public static final int FR_20_FPS = 5;
948
949        /** Frame rate of 25 frames per second. */
950        public static final int FR_25_FPS = 6;
951
952        /** Frame rate of 30 frames per second. */
953        public static final int FR_30_FPS = 7;
954    }
955
956    /**
957     * Defines Video Effect Types.
958     */
959    public static class VideoEffect {
960
961        public static final int NONE = 0;
962
963        public static final int FADE_FROM_BLACK = 8;
964
965        public static final int CURTAIN_OPENING = 9;
966
967        public static final int FADE_TO_BLACK = 16;
968
969        public static final int CURTAIN_CLOSING = 17;
970
971        public static final int EXTERNAL = 256;
972
973        public static final int BLACK_AND_WHITE = 257;
974
975        public static final int PINK = 258;
976
977        public static final int GREEN = 259;
978
979        public static final int SEPIA = 260;
980
981        public static final int NEGATIVE = 261;
982
983        public static final int FRAMING = 262;
984
985        public static final int TEXT = 263;
986
987        public static final int ZOOM_IN = 264;
988
989        public static final int ZOOM_OUT = 265;
990
991        public static final int FIFTIES = 266;
992
993        public static final int COLORRGB16 = 267;
994
995        public static final int GRADIENT = 268;
996    }
997
998    /**
999     * Defines the video transitions.
1000     */
1001    public static class VideoTransition {
1002        /** No transition */
1003        public static final int NONE = 0;
1004
1005        /** Cross fade transition */
1006        public static final int CROSS_FADE = 1;
1007
1008        /** External transition. Currently not available. */
1009        public static final int EXTERNAL = 256;
1010
1011        /** AlphaMagic transition. */
1012        public static final int ALPHA_MAGIC = 257;
1013
1014        /** Slide transition. */
1015        public static final int SLIDE_TRANSITION = 258;
1016
1017        /** Fade to black transition. */
1018        public static final int FADE_BLACK = 259;
1019    }
1020
1021    /**
1022     * Defines settings for the AlphaMagic transition
1023     */
1024    public static class AlphaMagicSettings {
1025        /** Name of the alpha file (JPEG file). */
1026        public String file;
1027
1028        /** Blending percentage [0..100] 0 = no blending. */
1029        public int blendingPercent;
1030
1031        /** Invert the default rotation direction of the AlphaMagic effect. */
1032        public boolean invertRotation;
1033
1034        public int rgbWidth;
1035        public int rgbHeight;
1036    }
1037
1038    /** Defines the direction of the Slide transition. */
1039    public static final class SlideDirection {
1040
1041        /** Right out left in. */
1042        public static final int RIGHT_OUT_LEFT_IN = 0;
1043
1044        /** Left out right in. */
1045        public static final int LEFT_OUT_RIGTH_IN = 1;
1046
1047        /** Top out bottom in. */
1048        public static final int TOP_OUT_BOTTOM_IN = 2;
1049
1050        /** Bottom out top in */
1051        public static final int BOTTOM_OUT_TOP_IN = 3;
1052    }
1053
1054    /** Defines the Slide transition settings. */
1055    public static class SlideTransitionSettings {
1056        /**
1057         * Direction of the slide transition. See {@link SlideDirection
1058         * SlideDirection} for valid values.
1059         */
1060        public int direction;
1061    }
1062
1063    /**
1064     * Defines the settings of a single clip.
1065     */
1066    public static class ClipSettings {
1067
1068        /**
1069         * The path to the clip file.
1070         * <p>
1071         * File format of the clip, it can be:
1072         * <ul>
1073         * <li>3GP file containing MPEG4/H263/H264 video and AAC/AMR audio
1074         * <li>JPG file
1075         * </ul>
1076         */
1077
1078        public String clipPath;
1079
1080        /**
1081         * The path of the decoded file. This is used only for image files.
1082         */
1083        public String clipDecodedPath;
1084
1085        /**
1086         * The path of the Original file. This is used only for image files.
1087         */
1088        public String clipOriginalPath;
1089
1090        /**
1091         * File type of the clip. See {@link FileType FileType} for valid
1092         * values.
1093         */
1094        public int fileType;
1095
1096        /** Begin of the cut in the clip in milliseconds. */
1097        public int beginCutTime;
1098
1099        /**
1100         * End of the cut in the clip in milliseconds. Set both
1101         * <code>beginCutTime</code> and <code>endCutTime</code> to
1102         * <code>0</code> to get the full length of the clip without a cut. In
1103         * case of JPG clip, this is the duration of the JPEG file.
1104         */
1105        public int endCutTime;
1106
1107        /**
1108         * Begin of the cut in the clip in percentage of the file duration.
1109         */
1110        public int beginCutPercent;
1111
1112        /**
1113         * End of the cut in the clip in percentage of the file duration. Set
1114         * both <code>beginCutPercent</code> and <code>endCutPercent</code> to
1115         * <code>0</code> to get the full length of the clip without a cut.
1116         */
1117        public int endCutPercent;
1118
1119        /** Enable panning and zooming. */
1120        public boolean panZoomEnabled;
1121
1122        /** Zoom percentage at start of clip. 0 = no zoom, 100 = full zoom */
1123        public int panZoomPercentStart;
1124
1125        /** Top left X coordinate at start of clip. */
1126        public int panZoomTopLeftXStart;
1127
1128        /** Top left Y coordinate at start of clip. */
1129        public int panZoomTopLeftYStart;
1130
1131        /** Zoom percentage at start of clip. 0 = no zoom, 100 = full zoom */
1132        public int panZoomPercentEnd;
1133
1134        /** Top left X coordinate at end of clip. */
1135        public int panZoomTopLeftXEnd;
1136
1137        /** Top left Y coordinate at end of clip. */
1138        public int panZoomTopLeftYEnd;
1139
1140        /**
1141         * Set The media rendering. See {@link MediaRendering MediaRendering}
1142         * for valid values.
1143         */
1144        public int mediaRendering;
1145
1146        /**
1147         * RGB width and Height
1148         */
1149         public int rgbWidth;
1150         public int rgbHeight;
1151    }
1152
1153    /**
1154     * Defines settings for a transition.
1155     */
1156    public static class TransitionSettings {
1157
1158        /** Duration of the transition in msec. */
1159        public int duration;
1160
1161        /**
1162         * Transition type for video. See {@link VideoTransition
1163         * VideoTransition} for valid values.
1164         */
1165        public int videoTransitionType;
1166
1167        /**
1168         * Transition type for audio. See {@link AudioTransition
1169         * AudioTransition} for valid values.
1170         */
1171        public int audioTransitionType;
1172
1173        /**
1174         * Transition behaviour. See {@link TransitionBehaviour
1175         * TransitionBehaviour} for valid values.
1176         */
1177        public int transitionBehaviour;
1178
1179        /**
1180         * Settings for AlphaMagic transition. Only needs to be set if
1181         * <code>videoTransitionType</code> is set to
1182         * <code>VideoTransition.ALPHA_MAGIC</code>. See
1183         * {@link AlphaMagicSettings AlphaMagicSettings}.
1184         */
1185        public AlphaMagicSettings alphaSettings;
1186
1187        /**
1188         * Settings for the Slide transition. See
1189         * {@link SlideTransitionSettings SlideTransitionSettings}.
1190         */
1191        public SlideTransitionSettings slideSettings;
1192    }
1193
1194    public static final class AudioTransition {
1195        /** No audio transition. */
1196        public static final int NONE = 0;
1197
1198        /** Cross-fade audio transition. */
1199        public static final int CROSS_FADE = 1;
1200    }
1201
1202    /**
1203     * Defines transition behaviours.
1204     **/
1205
1206    public static final class TransitionBehaviour {
1207
1208        /** The transition uses an increasing speed. */
1209        public static final int SPEED_UP = 0;
1210
1211        /** The transition uses a linear (constant) speed. */
1212        public static final int LINEAR = 1;
1213
1214        /** The transition uses a decreasing speed. */
1215        public static final int SPEED_DOWN = 2;
1216
1217        /**
1218         * The transition uses a constant speed, but slows down in the middle
1219         * section.
1220         */
1221        public static final int SLOW_MIDDLE = 3;
1222
1223        /**
1224         * The transition uses a constant speed, but increases speed in the
1225         * middle section.
1226         */
1227        public static final int FAST_MIDDLE = 4;
1228    }
1229
1230    /** Defines settings for the background music. */
1231    public static class BackgroundMusicSettings {
1232
1233        /** Background music file. */
1234        public String file;
1235
1236        /** File type. See {@link FileType FileType} for valid values. */
1237        public int fileType;
1238
1239        /**
1240         * Insertion time in milliseconds, in the output video where the
1241         * background music must be inserted.
1242         */
1243        public long insertionTime;
1244
1245        /**
1246         * Volume, as a percentage of the background music track, to use. If
1247         * this field is set to 100, the background music will replace the audio
1248         * from the video input file(s).
1249         */
1250        public int volumePercent;
1251
1252        /**
1253         * Start time in milliseconds in the background muisc file from where
1254         * the background music should loop. Set both <code>beginLoop</code> and
1255         * <code>endLoop</code> to <code>0</code> to disable looping.
1256         */
1257        public long beginLoop;
1258
1259        /**
1260         * End time in milliseconds in the background music file to where the
1261         * background music should loop. Set both <code>beginLoop</code> and
1262         * <code>endLoop</code> to <code>0</code> to disable looping.
1263         */
1264        public long endLoop;
1265
1266        public boolean enableDucking;
1267
1268        public int duckingThreshold;
1269
1270        public int lowVolume;
1271
1272        public boolean isLooping;
1273
1274    }
1275
1276    /** Defines settings for an effect. */
1277    public static class AudioEffect {
1278        /** No audio effect. */
1279        public static final int NONE = 0;
1280
1281        /** Fade-in effect. */
1282        public static final int FADE_IN = 8;
1283
1284        /** Fade-out effect. */
1285        public static final int FADE_OUT = 16;
1286    }
1287
1288    /** Defines the effect settings. */
1289    public static class EffectSettings {
1290
1291        /** Start time of the effect in milliseconds. */
1292        public int startTime;
1293
1294        /** Duration of the effect in milliseconds. */
1295        public int duration;
1296
1297        /**
1298         * Video effect type. See {@link VideoEffect VideoEffect} for valid
1299         * values.
1300         */
1301        public int videoEffectType;
1302
1303        /**
1304         * Audio effect type. See {@link AudioEffect AudioEffect} for valid
1305         * values.
1306         */
1307        public int audioEffectType;
1308
1309        /**
1310         * Start time of the effect in percents of the duration of the clip. A
1311         * value of 0 percent means start time is from the beginning of the
1312         * clip.
1313         */
1314        public int startPercent;
1315
1316        /**
1317         * Duration of the effect in percents of the duration of the clip.
1318         */
1319        public int durationPercent;
1320
1321        /**
1322         * Framing file.
1323         * <p>
1324         * This field is only used when the field <code>videoEffectType</code>
1325         * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise
1326         * this field is ignored.
1327         */
1328        public String framingFile;
1329
1330        /**
1331         * Framing buffer.
1332         * <p>
1333         * This field is only used when the field <code>videoEffectType</code>
1334         * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise
1335         * this field is ignored.
1336         */
1337        public int[] framingBuffer;
1338
1339        /**
1340         * Bitmap type Can be from RGB_565 (4), ARGB_4444 (5), ARGB_8888 (6);
1341         **/
1342
1343        public int bitmapType;
1344
1345        public int width;
1346
1347        public int height;
1348
1349        /**
1350         * Top left x coordinate. This coordinate is used to set the x
1351         * coordinate of the picture in the framing file when the framing file
1352         * is selected. The x coordinate is also used to set the location of the
1353         * text in the text effect.
1354         * <p>
1355         * This field is only used when the field <code>videoEffectType</code>
1356         * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING} or
1357         * {@link VideoEffect#TEXT VideoEffect.TEXT}. Otherwise this field is
1358         * ignored.
1359         */
1360        public int topLeftX;
1361
1362        /**
1363         * Top left y coordinate. This coordinate is used to set the y
1364         * coordinate of the picture in the framing file when the framing file
1365         * is selected. The y coordinate is also used to set the location of the
1366         * text in the text effect.
1367         * <p>
1368         * This field is only used when the field <code>videoEffectType</code>
1369         * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING} or
1370         * {@link VideoEffect#TEXT VideoEffect.TEXT}. Otherwise this field is
1371         * ignored.
1372         */
1373        public int topLeftY;
1374
1375        /**
1376         * Should the frame be resized or not. If this field is set to
1377         * <link>true</code> then the frame size is matched with the output
1378         * video size.
1379         * <p>
1380         * This field is only used when the field <code>videoEffectType</code>
1381         * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise
1382         * this field is ignored.
1383         */
1384        public boolean framingResize;
1385
1386        /**
1387         * Size to which the framing buffer needs to be resized to
1388         * This is valid only if framingResize is true
1389         */
1390        public int framingScaledSize;
1391        /**
1392         * Text to insert in the video.
1393         * <p>
1394         * This field is only used when the field <code>videoEffectType</code>
1395         * is set to {@link VideoEffect#TEXT VideoEffect.TEXT}. Otherwise this
1396         * field is ignored.
1397         */
1398        public String text;
1399
1400        /**
1401         * Text attributes for the text to insert in the video.
1402         * <p>
1403         * This field is only used when the field <code>videoEffectType</code>
1404         * is set to {@link VideoEffect#TEXT VideoEffect.TEXT}. Otherwise this
1405         * field is ignored. For more details about this field see the
1406         * integration guide.
1407         */
1408        public String textRenderingData;
1409
1410        /** Width of the text buffer in pixels. */
1411        public int textBufferWidth;
1412
1413        /** Height of the text buffer in pixels. */
1414        public int textBufferHeight;
1415
1416        /**
1417         * Processing rate for the fifties effect. A high value (e.g. 30)
1418         * results in high effect strength.
1419         * <p>
1420         * This field is only used when the field <code>videoEffectType</code>
1421         * is set to {@link VideoEffect#FIFTIES VideoEffect.FIFTIES}. Otherwise
1422         * this field is ignored.
1423         */
1424        public int fiftiesFrameRate;
1425
1426        /**
1427         * RGB 16 color of the RGB16 and gradient color effect.
1428         * <p>
1429         * This field is only used when the field <code>videoEffectType</code>
1430         * is set to {@link VideoEffect#COLORRGB16 VideoEffect.COLORRGB16} or
1431         * {@link VideoEffect#GRADIENT VideoEffect.GRADIENT}. Otherwise this
1432         * field is ignored.
1433         */
1434        public int rgb16InputColor;
1435
1436        /**
1437         * Start alpha blending percentage.
1438         * <p>
1439         * This field is only used when the field <code>videoEffectType</code>
1440         * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or
1441         * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field
1442         * is ignored.
1443         */
1444        public int alphaBlendingStartPercent;
1445
1446        /**
1447         * Middle alpha blending percentage.
1448         * <p>
1449         * This field is only used when the field <code>videoEffectType</code>
1450         * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or
1451         * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field
1452         * is ignored.
1453         */
1454        public int alphaBlendingMiddlePercent;
1455
1456        /**
1457         * End alpha blending percentage.
1458         * <p>
1459         * This field is only used when the field <code>videoEffectType</code>
1460         * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or
1461         * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field
1462         * is ignored.
1463         */
1464        public int alphaBlendingEndPercent;
1465
1466        /**
1467         * Duration, in percentage of effect duration of the fade-in phase.
1468         * <p>
1469         * This field is only used when the field <code>videoEffectType</code>
1470         * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or
1471         * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field
1472         * is ignored.
1473         */
1474        public int alphaBlendingFadeInTimePercent;
1475
1476        /**
1477         * Duration, in percentage of effect duration of the fade-out phase.
1478         * <p>
1479         * This field is only used when the field <code>videoEffectType</code>
1480         * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or
1481         * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field
1482         * is ignored.
1483         */
1484        public int alphaBlendingFadeOutTimePercent;
1485    }
1486
1487    /** Defines the clip properties for preview */
1488    public static class PreviewClips {
1489
1490        /**
1491         * The path to the clip file.
1492         * <p>
1493         * File format of the clip, it can be:
1494         * <ul>
1495         * <li>3GP file containing MPEG4/H263 video and AAC/AMR audio
1496         * <li>JPG file
1497         * </ul>
1498         */
1499
1500        public String clipPath;
1501
1502        /**
1503         * File type of the clip. See {@link FileType FileType} for valid
1504         * values.
1505         */
1506        public int fileType;
1507
1508        /** Begin of the cut in the clip in milliseconds. */
1509        public long beginPlayTime;
1510
1511        public long endPlayTime;
1512
1513        /**
1514         * Set The media rendering. See {@link MediaRendering MediaRendering}
1515         * for valid values.
1516         */
1517        public int mediaRendering;
1518
1519    }
1520
1521    /** Defines the audio settings. */
1522    public static class AudioSettings {
1523
1524        String pFile;
1525
1526        /** < PCM file path */
1527        String Id;
1528
1529        boolean bRemoveOriginal;
1530
1531        /** < If true, the original audio track is not taken into account */
1532        int channels;
1533
1534        /** < Number of channels (1=mono, 2=stereo) of BGM clip */
1535        int Fs;
1536
1537        /**
1538         * < Sampling audio frequency (8000 for amr, 16000 or more for aac) of
1539         * BGM clip
1540         */
1541        int ExtendedFs;
1542
1543        /** < Extended frequency for AAC+, eAAC+ streams of BGM clip */
1544        long startMs;
1545
1546        /** < Time, in milliseconds, at which the added audio track is inserted */
1547        long beginCutTime;
1548
1549        long endCutTime;
1550
1551        int fileType;
1552
1553        int volume;
1554
1555        /** < Volume, in percentage, of the added audio track */
1556        boolean loop;
1557
1558        /** < Looping on/off > **/
1559
1560        /** Audio mix and Duck **/
1561        int ducking_threshold;
1562
1563        int ducking_lowVolume;
1564
1565        boolean bInDucking_enable;
1566
1567        String pcmFilePath;
1568
1569    }
1570
1571    /** Encapsulates preview clips and effect settings */
1572    public static class PreviewSettings {
1573
1574        public PreviewClips[] previewClipsArray;
1575
1576        /** The effect settings. */
1577        public EffectSettings[] effectSettingsArray;
1578
1579    }
1580
1581    /** Encapsulates clip properties */
1582    public static class PreviewClipProperties {
1583
1584        public Properties[] clipProperties;
1585
1586    }
1587
1588    /** Defines the editing settings. */
1589    public static class EditSettings {
1590
1591        /**
1592         * Array of clip settings. There is one <code>clipSetting</code> for
1593         * each clip.
1594         */
1595        public ClipSettings[] clipSettingsArray;
1596
1597        /**
1598         * Array of transition settings. If there are n clips (and thus n
1599         * <code>clipSettings</code>) then there are (n-1) transitions and (n-1)
1600         * <code>transistionSettings</code> in
1601         * <code>transistionSettingsArray</code>.
1602         */
1603        public TransitionSettings[] transitionSettingsArray;
1604
1605        /** The effect settings. */
1606        public EffectSettings[] effectSettingsArray;
1607
1608        /**
1609         * Video frame rate of the output clip. See {@link VideoFrameRate
1610         * VideoFrameRate} for valid values.
1611         */
1612        public int videoFrameRate;
1613
1614        /** Output file name. Must be an absolute path. */
1615        public String outputFile;
1616
1617        /**
1618         * Size of the video frames in the output clip. See
1619         * {@link VideoFrameSize VideoFrameSize} for valid values.
1620         */
1621        public int videoFrameSize;
1622
1623        /**
1624         * Format of the video stream in the output clip. See
1625         * {@link VideoFormat VideoFormat} for valid values.
1626         */
1627        public int videoFormat;
1628
1629        /**
1630         * Format of the audio stream in the output clip. See
1631         * {@link AudioFormat AudioFormat} for valid values.
1632         */
1633        public int audioFormat;
1634
1635        /**
1636         * Sampling frequency of the audio stream in the output clip. See
1637         * {@link AudioSamplingFrequency AudioSamplingFrequency} for valid
1638         * values.
1639         */
1640        public int audioSamplingFreq;
1641
1642        /**
1643         * Maximum file size. By setting this you can set the maximum size of
1644         * the output clip. Set it to <code>0</code> to let the class ignore
1645         * this filed.
1646         */
1647        public int maxFileSize;
1648
1649        /**
1650         * Number of audio channels in output clip. Use <code>0</code> for none,
1651         * <code>1</code> for mono or <code>2</code> for stereo. None is only
1652         * allowed when the <code>audioFormat</code> field is set to
1653         * {@link AudioFormat#NO_AUDIO AudioFormat.NO_AUDIO} or
1654         * {@link AudioFormat#NULL_AUDIO AudioFormat.NULL_AUDIO} Mono is only
1655         * allowed when the <code>audioFormat</code> field is set to
1656         * {@link AudioFormat#AAC AudioFormat.AAC}
1657         */
1658        public int audioChannels;
1659
1660        /** Video bitrate. See {@link Bitrate Bitrate} for valid values. */
1661        public int videoBitrate;
1662
1663        /** Audio bitrate. See {@link Bitrate Bitrate} for valid values. */
1664        public int audioBitrate;
1665
1666        /**
1667         * Background music settings. See {@link BackgroundMusicSettings
1668         * BackgroundMusicSettings} for valid values.
1669         */
1670        public BackgroundMusicSettings backgroundMusicSettings;
1671
1672        public int primaryTrackVolume;
1673
1674    }
1675
1676    /**
1677     * Defines the media properties.
1678     **/
1679
1680    public static class Properties {
1681
1682        /**
1683         * Duration of the media in milliseconds.
1684         */
1685
1686        public int duration;
1687
1688        /**
1689         * File type.
1690         */
1691
1692        public int fileType;
1693
1694        /**
1695         * Video format.
1696         */
1697
1698        public int videoFormat;
1699
1700        /**
1701         * Duration of the video stream of the media in milliseconds.
1702         */
1703
1704        public int videoDuration;
1705
1706        /**
1707         * Bitrate of the video stream of the media.
1708         */
1709
1710        public int videoBitrate;
1711
1712        /**
1713         * Width of the video frames or the width of the still picture in
1714         * pixels.
1715         */
1716
1717        public int width;
1718
1719        /**
1720         * Height of the video frames or the height of the still picture in
1721         * pixels.
1722         */
1723
1724        public int height;
1725
1726        /**
1727         * Average frame rate of video in the media in frames per second.
1728         */
1729
1730        public float averageFrameRate;
1731
1732        /**
1733         * Profile and level of the video in the media.
1734         */
1735
1736        public int profileAndLevel;
1737
1738        /**
1739         * Audio format.
1740         */
1741
1742        public int audioFormat;
1743
1744        /**
1745         * Duration of the audio stream of the media in milliseconds.
1746         */
1747
1748        public int audioDuration;
1749
1750        /**
1751         * Bitrate of the audio stream of the media.
1752         */
1753
1754        public int audioBitrate;
1755
1756        /**
1757         * Number of audio channels in the media.
1758         */
1759
1760        public int audioChannels;
1761
1762        /**
1763         * Sampling frequency of the audio stream in the media in samples per
1764         * second.
1765         */
1766
1767        public int audioSamplingFrequency;
1768
1769        /**
1770         * Volume value of the audio track as percentage.
1771         */
1772        public int audioVolumeValue;
1773
1774        public String Id;
1775
1776    }
1777
1778    /**
1779     * Constructor
1780     *
1781     * @param projectPath The path where the VideoEditor stores all files
1782     *        related to the project
1783     * @param veObj The video editor reference
1784     */
1785    public MediaArtistNativeHelper(String projectPath, VideoEditor veObj) {
1786        mProjectPath = projectPath;
1787        if (veObj != null) {
1788            mVideoEditor = veObj;
1789        } else {
1790            mVideoEditor = null;
1791            throw new IllegalArgumentException("video editor object is null");
1792        }
1793        if (mStoryBoardSettings == null)
1794            mStoryBoardSettings = new EditSettings();
1795
1796        mMediaEffectList = new ArrayList<Effect>();
1797        mMediaOverLayList = new ArrayList<Overlay>();
1798        _init(mProjectPath, "null");
1799        mAudioTrackPCMFilePath = null;
1800    }
1801
1802    /**
1803     * @return The project path
1804     */
1805    String getProjectPath() {
1806        return mProjectPath;
1807    }
1808
1809    /**
1810     * @return The Audio Track PCM file path
1811     */
1812    String getProjectAudioTrackPCMFilePath() {
1813        return mAudioTrackPCMFilePath;
1814    }
1815
1816    /**
1817     * Invalidates the PCM file
1818     */
1819    void invalidatePcmFile() {
1820        if (mAudioTrackPCMFilePath != null) {
1821            new File(mAudioTrackPCMFilePath).delete();
1822            mAudioTrackPCMFilePath = null;
1823        }
1824    }
1825
1826    @SuppressWarnings("unused")
1827    private void onProgressUpdate(int taskId, int progress) {
1828        if (mProcessingState == PROCESSING_EXPORT) {
1829            if (mExportProgressListener != null) {
1830                if (mProgressToApp < progress) {
1831                    mExportProgressListener.onProgress(mVideoEditor, mOutputFilename, progress);
1832                    /* record previous progress */
1833                    mProgressToApp = progress;
1834                }
1835            }
1836        }
1837        else {
1838            // Adapt progress depending on current state
1839            int actualProgress = 0;
1840            int action = 0;
1841
1842            if (mProcessingState == PROCESSING_AUDIO_PCM) {
1843                action = MediaProcessingProgressListener.ACTION_DECODE;
1844            } else {
1845                action = MediaProcessingProgressListener.ACTION_ENCODE;
1846            }
1847
1848            switch (mProcessingState) {
1849                case PROCESSING_AUDIO_PCM:
1850                    actualProgress = progress;
1851                    break;
1852                case PROCESSING_TRANSITION:
1853                    actualProgress = progress;
1854                    break;
1855                case PROCESSING_KENBURNS:
1856                    actualProgress = progress;
1857                    break;
1858                case PROCESSING_INTERMEDIATE1:
1859                    if ((progress == 0) && (mProgressToApp != 0)) {
1860                        mProgressToApp = 0;
1861                    }
1862                    if ((progress != 0) || (mProgressToApp != 0)) {
1863                        actualProgress = progress/4;
1864                    }
1865                    break;
1866                case PROCESSING_INTERMEDIATE2:
1867                    if ((progress != 0) || (mProgressToApp != 0)) {
1868                        actualProgress = 25 + progress/4;
1869                    }
1870                    break;
1871                case PROCESSING_INTERMEDIATE3:
1872                    if ((progress != 0) || (mProgressToApp != 0)) {
1873                        actualProgress = 50 + progress/2;
1874                    }
1875                    break;
1876                case PROCESSING_NONE:
1877
1878                default:
1879                    Log.e("MediaArtistNativeHelper", "ERROR unexpected State=" + mProcessingState);
1880                    return;
1881            }
1882            if ((mProgressToApp != actualProgress) && (actualProgress != 0)) {
1883
1884                mProgressToApp = actualProgress;
1885
1886                if (mMediaProcessingProgressListener != null) {
1887                    // Send the progress indication
1888                    mMediaProcessingProgressListener.onProgress(mProcessingObject,
1889                                                                action,
1890                                                                actualProgress);
1891                }
1892            }
1893            /* avoid 0 in next intermediate call */
1894            if (mProgressToApp == 0) {
1895                if (mMediaProcessingProgressListener != null) {
1896                    /*
1897                     *  Send the progress indication
1898                     */
1899                    mMediaProcessingProgressListener.onProgress(mProcessingObject,
1900                                                                action,
1901                                                                actualProgress);
1902                }
1903                mProgressToApp = 1;
1904            }
1905        }
1906    }
1907
1908    @SuppressWarnings("unused")
1909    private void onPreviewProgressUpdate(int progress, boolean isFinished,
1910                  boolean updateOverlay, String filename, int renderingMode) {
1911        if (mPreviewProgressListener != null) {
1912            if (mIsFirstProgress) {
1913                mPreviewProgressListener.onStart(mVideoEditor);
1914                mIsFirstProgress = false;
1915            }
1916
1917            final VideoEditor.OverlayData overlayData;
1918            if (updateOverlay) {
1919                overlayData = new VideoEditor.OverlayData();
1920                if (filename != null) {
1921                    overlayData.set(BitmapFactory.decodeFile(filename), renderingMode);
1922                } else {
1923                    overlayData.setClear();
1924                }
1925            } else {
1926                overlayData = null;
1927            }
1928
1929            mPreviewProgressListener.onProgress(mVideoEditor, progress, overlayData);
1930
1931            if (progress != 0) {
1932                mPreviewProgress = progress;
1933            }
1934
1935            if (isFinished) {
1936                mPreviewProgressListener.onStop(mVideoEditor);
1937            }
1938        }
1939    }
1940
1941    /**
1942     * Release the native helper object
1943     */
1944    public void releaseNativeHelper() {
1945        try {
1946            release();
1947        } catch (IllegalStateException ex) {
1948            Log.e("MediaArtistNativeHelper",
1949            "Illegal State exeption caught in releaseNativeHelper");
1950            throw ex;
1951        } catch (RuntimeException ex) {
1952            Log.e("MediaArtistNativeHelper", "Runtime exeption caught in releaseNativeHelper");
1953            throw ex;
1954        }
1955    }
1956
1957    /**
1958     * Release the native helper to end the Audio Graph process
1959     */
1960    @SuppressWarnings("unused")
1961    private void onAudioGraphExtractProgressUpdate(int progress, boolean isVideo) {
1962
1963        if ((mExtractAudioWaveformProgressListener != null) && (progress > 0))
1964        {
1965            mExtractAudioWaveformProgressListener.onProgress(progress);
1966        }
1967    }
1968
1969    /**
1970     * Populates the Effect Settings in EffectSettings
1971     *
1972     * @param effects The reference of EffectColor
1973     *
1974     * @return The populated effect settings in EffectSettings
1975     * reference
1976     */
1977    EffectSettings getEffectSettings(EffectColor effects) {
1978        EffectSettings effectSettings = new EffectSettings();
1979        effectSettings.startTime = (int)effects.getStartTime();
1980        effectSettings.duration = (int)effects.getDuration();
1981        effectSettings.videoEffectType = getEffectColorType(effects);
1982        effectSettings.audioEffectType = 0;
1983        effectSettings.startPercent = 0;
1984        effectSettings.durationPercent = 0;
1985        effectSettings.framingFile = null;
1986        effectSettings.topLeftX = 0;
1987        effectSettings.topLeftY = 0;
1988        effectSettings.framingResize = false;
1989        effectSettings.text = null;
1990        effectSettings.textRenderingData = null;
1991        effectSettings.textBufferWidth = 0;
1992        effectSettings.textBufferHeight = 0;
1993        if (effects.getType() == EffectColor.TYPE_FIFTIES) {
1994            effectSettings.fiftiesFrameRate = 15;
1995        } else {
1996            effectSettings.fiftiesFrameRate = 0;
1997        }
1998
1999        if ((effectSettings.videoEffectType == VideoEffect.COLORRGB16)
2000                || (effectSettings.videoEffectType == VideoEffect.GRADIENT)) {
2001            effectSettings.rgb16InputColor = effects.getColor();
2002        }
2003
2004        effectSettings.alphaBlendingStartPercent = 0;
2005        effectSettings.alphaBlendingMiddlePercent = 0;
2006        effectSettings.alphaBlendingEndPercent = 0;
2007        effectSettings.alphaBlendingFadeInTimePercent = 0;
2008        effectSettings.alphaBlendingFadeOutTimePercent = 0;
2009        return effectSettings;
2010    }
2011
2012    /**
2013     * Populates the Overlay Settings in EffectSettings
2014     *
2015     * @param overlay The reference of OverlayFrame
2016     *
2017     * @return The populated overlay settings in EffectSettings
2018     * reference
2019     */
2020    EffectSettings getOverlaySettings(OverlayFrame overlay) {
2021        EffectSettings effectSettings = new EffectSettings();
2022        Bitmap bitmap = null;
2023
2024        effectSettings.startTime = (int)overlay.getStartTime();
2025        effectSettings.duration = (int)overlay.getDuration();
2026        effectSettings.videoEffectType = VideoEffect.FRAMING;
2027        effectSettings.audioEffectType = 0;
2028        effectSettings.startPercent = 0;
2029        effectSettings.durationPercent = 0;
2030        effectSettings.framingFile = null;
2031
2032        if ((bitmap = overlay.getBitmap()) != null) {
2033            effectSettings.framingFile = overlay.getFilename();
2034
2035            if (effectSettings.framingFile == null) {
2036                try {
2037                    (overlay).save(mProjectPath);
2038                } catch (IOException e) {
2039                    Log.e("MediaArtistNativeHelper","getOverlaySettings : File not found");
2040                }
2041                effectSettings.framingFile = overlay.getFilename();
2042            }
2043            if (bitmap.getConfig() == Bitmap.Config.ARGB_8888)
2044                effectSettings.bitmapType = 6;
2045            else if (bitmap.getConfig() == Bitmap.Config.ARGB_4444)
2046                effectSettings.bitmapType = 5;
2047            else if (bitmap.getConfig() == Bitmap.Config.RGB_565)
2048                effectSettings.bitmapType = 4;
2049            else if (bitmap.getConfig() == Bitmap.Config.ALPHA_8)
2050                throw new RuntimeException("Bitmap config not supported");
2051
2052            effectSettings.width = bitmap.getWidth();
2053            effectSettings.height = bitmap.getHeight();
2054            effectSettings.framingBuffer = new int[effectSettings.width];
2055            int tmp = 0;
2056            short maxAlpha = 0;
2057            short minAlpha = (short)0xFF;
2058            short alpha = 0;
2059            while (tmp < effectSettings.height) {
2060                bitmap.getPixels(effectSettings.framingBuffer, 0,
2061                                 effectSettings.width, 0, tmp,
2062                                 effectSettings.width, 1);
2063                for (int i = 0; i < effectSettings.width; i++) {
2064                    alpha = (short)((effectSettings.framingBuffer[i] >> 24) & 0xFF);
2065                    if (alpha > maxAlpha) {
2066                        maxAlpha = alpha;
2067                    }
2068                    if (alpha < minAlpha) {
2069                        minAlpha = alpha;
2070                    }
2071                }
2072                tmp += 1;
2073            }
2074            alpha = (short)((maxAlpha + minAlpha) / 2);
2075            alpha = (short)((alpha * 100) / 256);
2076            effectSettings.alphaBlendingEndPercent = alpha;
2077            effectSettings.alphaBlendingMiddlePercent = alpha;
2078            effectSettings.alphaBlendingStartPercent = alpha;
2079            effectSettings.alphaBlendingFadeInTimePercent = 100;
2080            effectSettings.alphaBlendingFadeOutTimePercent = 100;
2081            effectSettings.framingBuffer = null;
2082        }
2083
2084        effectSettings.topLeftX = 0;
2085        effectSettings.topLeftY = 0;
2086
2087        effectSettings.framingResize = true;
2088        effectSettings.text = null;
2089        effectSettings.textRenderingData = null;
2090        effectSettings.textBufferWidth = 0;
2091        effectSettings.textBufferHeight = 0;
2092        effectSettings.fiftiesFrameRate = 0;
2093        effectSettings.rgb16InputColor = 0;
2094        int mediaItemHeight;
2095        int aspectRatio;
2096        if (overlay.getMediaItem() instanceof MediaImageItem) {
2097            if (((MediaImageItem)overlay.getMediaItem()).getGeneratedImageClip() != null) {
2098                //Kenburns was applied
2099                mediaItemHeight = ((MediaImageItem)overlay.getMediaItem()).getGeneratedClipHeight();
2100                aspectRatio = getAspectRatio(
2101                    ((MediaImageItem)overlay.getMediaItem()).getGeneratedClipWidth()
2102                    , mediaItemHeight);
2103            }
2104            else {
2105                //For image get the scaled height. Aspect ratio would remain the same
2106                mediaItemHeight = ((MediaImageItem)overlay.getMediaItem()).getScaledHeight();
2107                aspectRatio = overlay.getMediaItem().getAspectRatio();
2108                effectSettings.framingResize = false; //since the image can be of odd size.
2109            }
2110        } else {
2111            aspectRatio = overlay.getMediaItem().getAspectRatio();
2112            mediaItemHeight = overlay.getMediaItem().getHeight();
2113        }
2114        effectSettings.framingScaledSize = findVideoResolution(aspectRatio, mediaItemHeight);
2115        return effectSettings;
2116    }
2117
2118    /**
2119     * Sets the audio regenerate flag
2120     *
2121     * @param flag The boolean to set the audio regenerate flag
2122     *
2123     */
2124    void setAudioflag(boolean flag) {
2125        //check if the file exists.
2126        if (!(new File(String.format(mProjectPath + "/" + AUDIO_TRACK_PCM_FILE)).exists())) {
2127            flag = true;
2128        }
2129        mRegenerateAudio = flag;
2130    }
2131
2132    /**
2133     * Gets the audio regenerate flag
2134     *
2135     * @param return The boolean to get the audio regenerate flag
2136     *
2137     */
2138    boolean getAudioflag() {
2139        return mRegenerateAudio;
2140    }
2141
2142    /**
2143     * Maps the average frame rate to one of the defined enum values
2144     *
2145     * @param averageFrameRate The average frame rate of video item
2146     *
2147     * @return The frame rate from one of the defined enum values
2148     */
2149    public int GetClosestVideoFrameRate(int averageFrameRate) {
2150        if (averageFrameRate >= 25) {
2151            return VideoFrameRate.FR_30_FPS;
2152        } else if (averageFrameRate >= 20) {
2153            return VideoFrameRate.FR_25_FPS;
2154        } else if (averageFrameRate >= 15) {
2155            return VideoFrameRate.FR_20_FPS;
2156        } else if (averageFrameRate >= 12) {
2157            return VideoFrameRate.FR_15_FPS;
2158        } else if (averageFrameRate >= 10) {
2159            return VideoFrameRate.FR_12_5_FPS;
2160        } else if (averageFrameRate >= 7) {
2161            return VideoFrameRate.FR_10_FPS;
2162        } else if (averageFrameRate >= 5) {
2163            return VideoFrameRate.FR_7_5_FPS;
2164        } else {
2165            return -1;
2166        }
2167    }
2168
2169    /**
2170     * Helper function to adjust the effect or overlay start time
2171     * depending on the begin and end boundary time of meddia item
2172     */
2173    public void adjustEffectsStartTimeAndDuration(EffectSettings lEffect,
2174                                                  int beginCutTime,
2175                                                  int endCutTime) {
2176
2177        int effectStartTime = 0;
2178        int effectDuration = 0;
2179
2180        /**
2181         * cbct -> clip begin cut time
2182         * cect -> clip end cut time
2183         ****************************************
2184         *  |                                 |
2185         *  |         cbct        cect        |
2186         *  | <-1-->   |           |          |
2187         *  |       <--|-2->       |          |
2188         *  |          | <---3---> |          |
2189         *  |          |        <--|-4--->    |
2190         *  |          |           | <--5-->  |
2191         *  |      <---|------6----|---->     |
2192         *  |                                 |
2193         *  < : effectStart
2194         *  > : effectStart + effectDuration
2195         ****************************************
2196         **/
2197
2198        /** 1 & 5 */
2199        /**
2200         * Effect falls out side the trim duration. In such a case effects shall
2201         * not be applied.
2202         */
2203        if ((lEffect.startTime > endCutTime)
2204                || ((lEffect.startTime + lEffect.duration) <= beginCutTime)) {
2205
2206            effectStartTime = 0;
2207            effectDuration = 0;
2208
2209            lEffect.startTime = effectStartTime;
2210            lEffect.duration = effectDuration;
2211            return;
2212        }
2213
2214        /** 2 */
2215        if ((lEffect.startTime < beginCutTime)
2216                && ((lEffect.startTime + lEffect.duration) > beginCutTime)
2217                && ((lEffect.startTime + lEffect.duration) <= endCutTime)) {
2218            effectStartTime = 0;
2219            effectDuration = lEffect.duration;
2220
2221            effectDuration -= (beginCutTime - lEffect.startTime);
2222            lEffect.startTime = effectStartTime;
2223            lEffect.duration = effectDuration;
2224            return;
2225        }
2226
2227        /** 3 */
2228        if ((lEffect.startTime >= beginCutTime)
2229                && ((lEffect.startTime + lEffect.duration) <= endCutTime)) {
2230            effectStartTime = lEffect.startTime - beginCutTime;
2231            lEffect.startTime = effectStartTime;
2232            lEffect.duration = lEffect.duration;
2233            return;
2234        }
2235
2236        /** 4 */
2237        if ((lEffect.startTime >= beginCutTime)
2238                && ((lEffect.startTime + lEffect.duration) > endCutTime)) {
2239            effectStartTime = lEffect.startTime - beginCutTime;
2240            effectDuration = endCutTime - lEffect.startTime;
2241            lEffect.startTime = effectStartTime;
2242            lEffect.duration = effectDuration;
2243            return;
2244        }
2245
2246        /** 6 */
2247        if ((lEffect.startTime < beginCutTime)
2248                && ((lEffect.startTime + lEffect.duration) > endCutTime)) {
2249            effectStartTime = 0;
2250            effectDuration = endCutTime - beginCutTime;
2251            lEffect.startTime = effectStartTime;
2252            lEffect.duration = effectDuration;
2253            return;
2254        }
2255
2256    }
2257
2258    /**
2259     * Generates the clip for preview or export
2260     *
2261     * @param editSettings The EditSettings reference for generating
2262     * a clip for preview or export
2263     *
2264     * @return error value
2265     */
2266    public int generateClip(EditSettings editSettings) {
2267        int err = 0;
2268
2269        try {
2270            err = nativeGenerateClip(editSettings);
2271        } catch (IllegalArgumentException ex) {
2272            Log.e("MediaArtistNativeHelper","Illegal Argument exception in load settings");
2273            return -1;
2274        } catch (IllegalStateException ex) {
2275            Log.e("MediaArtistNativeHelper","Illegal state exception in load settings");
2276            return -1;
2277        } catch (RuntimeException ex) {
2278            Log.e("MediaArtistNativeHelper", "Runtime exception in load settings");
2279            return -1;
2280        }
2281        return err;
2282    }
2283
2284    /**
2285     * Init function to initialise the  ClipSettings reference to
2286     * default values
2287     *
2288     * @param lclipSettings The ClipSettings reference
2289     */
2290    void initClipSettings(ClipSettings lclipSettings) {
2291        lclipSettings.clipPath = null;
2292        lclipSettings.clipDecodedPath = null;
2293        lclipSettings.clipOriginalPath = null;
2294        lclipSettings.fileType = 0;
2295        lclipSettings.endCutTime = 0;
2296        lclipSettings.beginCutTime = 0;
2297        lclipSettings.beginCutPercent = 0;
2298        lclipSettings.endCutPercent = 0;
2299        lclipSettings.panZoomEnabled = false;
2300        lclipSettings.panZoomPercentStart = 0;
2301        lclipSettings.panZoomTopLeftXStart = 0;
2302        lclipSettings.panZoomTopLeftYStart = 0;
2303        lclipSettings.panZoomPercentEnd = 0;
2304        lclipSettings.panZoomTopLeftXEnd = 0;
2305        lclipSettings.panZoomTopLeftYEnd = 0;
2306        lclipSettings.mediaRendering = 0;
2307    }
2308
2309
2310    /**
2311     * Populates the settings for generating an effect clip
2312     *
2313     * @param lMediaItem The media item for which the effect clip
2314     * needs to be generated
2315     * @param lclipSettings The ClipSettings reference containing
2316     * clips data
2317     * @param e The EditSettings reference containing effect specific data
2318     * @param uniqueId The unique id used in the name of the output clip
2319     * @param clipNo Used for internal purpose
2320     *
2321     * @return The name and path of generated clip
2322     */
2323    String generateEffectClip(MediaItem lMediaItem, ClipSettings lclipSettings,
2324            EditSettings e,String uniqueId,int clipNo) {
2325        int err = 0;
2326        EditSettings editSettings = null;
2327        String EffectClipPath = null;
2328
2329        editSettings = new EditSettings();
2330
2331        editSettings.clipSettingsArray = new ClipSettings[1];
2332        editSettings.clipSettingsArray[0] = lclipSettings;
2333
2334        editSettings.backgroundMusicSettings = null;
2335        editSettings.transitionSettingsArray = null;
2336        editSettings.effectSettingsArray = e.effectSettingsArray;
2337
2338        EffectClipPath = String.format(mProjectPath + "/" + "ClipEffectIntermediate" + "_"
2339                + lMediaItem.getId() + uniqueId + ".3gp");
2340
2341        File tmpFile = new File(EffectClipPath);
2342        if (tmpFile.exists()) {
2343            tmpFile.delete();
2344        }
2345
2346        if (lMediaItem instanceof MediaVideoItem) {
2347            MediaVideoItem m = (MediaVideoItem)lMediaItem;
2348
2349            editSettings.audioFormat = AudioFormat.AAC;
2350            editSettings.audioChannels = 2;
2351            editSettings.audioBitrate = Bitrate.BR_64_KBPS;
2352            editSettings.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000;
2353
2354            editSettings.videoBitrate = Bitrate.BR_5_MBPS;
2355            //editSettings.videoFormat = VideoFormat.MPEG4;
2356            editSettings.videoFormat = VideoFormat.H264;
2357            editSettings.videoFrameRate = VideoFrameRate.FR_30_FPS;
2358            editSettings.videoFrameSize = findVideoResolution(mVideoEditor.getAspectRatio(), m
2359                    .getHeight());
2360
2361        } else {
2362            MediaImageItem m = (MediaImageItem)lMediaItem;
2363            editSettings.audioBitrate = Bitrate.BR_64_KBPS;
2364            editSettings.audioChannels = 2;
2365            editSettings.audioFormat = AudioFormat.AAC;
2366            editSettings.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000;
2367
2368            editSettings.videoBitrate = Bitrate.BR_5_MBPS;
2369            editSettings.videoFormat = VideoFormat.H264;
2370            editSettings.videoFrameRate = VideoFrameRate.FR_30_FPS;
2371            editSettings.videoFrameSize = findVideoResolution(mVideoEditor.getAspectRatio(), m
2372                    .getScaledHeight());
2373        }
2374
2375        editSettings.outputFile = EffectClipPath;
2376
2377        if (clipNo == 1) {
2378            mProcessingState  = PROCESSING_INTERMEDIATE1;
2379        } else if (clipNo == 2) {
2380            mProcessingState  = PROCESSING_INTERMEDIATE2;
2381        }
2382        mProcessingObject = lMediaItem;
2383        err = generateClip(editSettings);
2384        mProcessingState  = PROCESSING_NONE;
2385
2386        if (err == 0) {
2387            lclipSettings.clipPath = EffectClipPath;
2388            lclipSettings.fileType = FileType.THREE_GPP;
2389            return EffectClipPath;
2390        } else {
2391            throw new RuntimeException("preview generation cannot be completed");
2392        }
2393    }
2394
2395
2396    /**
2397     * Populates the settings for generating a Ken Burn effect clip
2398     *
2399     * @param m The media image item for which the Ken Burn effect clip
2400     * needs to be generated
2401     * @param e The EditSettings reference clip specific data
2402     *
2403     * @return The name and path of generated clip
2404     */
2405    String generateKenBurnsClip(EditSettings e, MediaImageItem m) {
2406        String output = null;
2407        int err = 0;
2408
2409        e.backgroundMusicSettings = null;
2410        e.transitionSettingsArray = null;
2411        e.effectSettingsArray = null;
2412        output = String.format(mProjectPath + "/" + "ImageClip-" + m.getId() + ".3gp");
2413
2414        File tmpFile = new File(output);
2415        if (tmpFile.exists()) {
2416            tmpFile.delete();
2417        }
2418
2419        e.outputFile = output;
2420        e.audioBitrate = Bitrate.BR_64_KBPS;
2421        e.audioChannels = 2;
2422        e.audioFormat = AudioFormat.AAC;
2423        e.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000;
2424
2425        e.videoBitrate = Bitrate.BR_5_MBPS;
2426        e.videoFormat = VideoFormat.H264;
2427        e.videoFrameRate = VideoFrameRate.FR_30_FPS;
2428        e.videoFrameSize = findVideoResolution(mVideoEditor.getAspectRatio(),
2429                                                           m.getScaledHeight());
2430        mProcessingState  = PROCESSING_KENBURNS;
2431        mProcessingObject = m;
2432        err = generateClip(e);
2433        // Reset the processing state and check for errors
2434        mProcessingState  = PROCESSING_NONE;
2435        if (err != 0) {
2436            throw new RuntimeException("preview generation cannot be completed");
2437        }
2438        return output;
2439    }
2440
2441
2442    /**
2443     * Calculates the output resolution for transition clip
2444     *
2445     * @param m1 First media item associated with transition
2446     * @param m2 Second media item associated with transition
2447     *
2448     * @return The transition resolution
2449     */
2450    private int getTransitionResolution(MediaItem m1, MediaItem m2) {
2451        int clip1Height = 0;
2452        int clip2Height = 0;
2453        int videoSize = 0;
2454
2455        if (m1 != null && m2 != null) {
2456            if (m1 instanceof MediaVideoItem) {
2457                clip1Height = m1.getHeight();
2458            } else if (m1 instanceof MediaImageItem) {
2459                clip1Height = ((MediaImageItem)m1).getScaledHeight();
2460            }
2461            if (m2 instanceof MediaVideoItem) {
2462                clip2Height = m2.getHeight();
2463            } else if (m2 instanceof MediaImageItem) {
2464                clip2Height = ((MediaImageItem)m2).getScaledHeight();
2465            }
2466            if (clip1Height > clip2Height) {
2467                videoSize = findVideoResolution(mVideoEditor.getAspectRatio(),
2468                                                                   clip1Height);
2469            } else {
2470                videoSize = findVideoResolution(mVideoEditor.getAspectRatio(),
2471                                                                   clip2Height);
2472            }
2473        } else if (m1 == null && m2 != null) {
2474            if (m2 instanceof MediaVideoItem) {
2475                clip2Height = m2.getHeight();
2476            } else if (m2 instanceof MediaImageItem) {
2477                clip2Height = ((MediaImageItem)m2).getScaledHeight();
2478            }
2479            videoSize = findVideoResolution(mVideoEditor.getAspectRatio(),
2480                                                                   clip2Height);
2481        } else if (m1 != null && m2 == null) {
2482            if (m1 instanceof MediaVideoItem) {
2483                clip1Height = m1.getHeight();
2484            } else if (m1 instanceof MediaImageItem) {
2485                clip1Height = ((MediaImageItem)m1).getScaledHeight();
2486            }
2487            videoSize = findVideoResolution(mVideoEditor.getAspectRatio(),
2488                                                                   clip1Height);
2489        }
2490        return videoSize;
2491    }
2492
2493    /**
2494     * Populates the settings for generating an transition clip
2495     *
2496     * @param m1 First media item associated with transition
2497     * @param m2 Second media item associated with transition
2498     * @param e The EditSettings reference containing
2499     * clip specific data
2500     * @param uniqueId The unique id used in the name of the output clip
2501     * @param t The Transition specific data
2502     *
2503     * @return The name and path of generated clip
2504     */
2505    String generateTransitionClip(EditSettings e, String uniqueId,
2506            MediaItem m1, MediaItem m2,Transition t) {
2507        String outputFilename = null;
2508        int err = 0;
2509
2510        outputFilename = String.format(mProjectPath + "/" + uniqueId + ".3gp");
2511        e.outputFile = outputFilename;
2512        e.audioBitrate = Bitrate.BR_64_KBPS;
2513        e.audioChannels = 2;
2514        e.audioFormat = AudioFormat.AAC;
2515        e.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000;
2516
2517        e.videoBitrate = Bitrate.BR_5_MBPS;
2518        e.videoFormat = VideoFormat.H264;
2519        e.videoFrameRate = VideoFrameRate.FR_30_FPS;
2520        e.videoFrameSize = getTransitionResolution(m1, m2);
2521
2522        if (new File(outputFilename).exists()) {
2523            new File(outputFilename).delete();
2524        }
2525        mProcessingState  = PROCESSING_INTERMEDIATE3;
2526        mProcessingObject = t;
2527        err = generateClip(e);
2528        // Reset the processing state and check for errors
2529        mProcessingState  = PROCESSING_NONE;
2530        if (err != 0) {
2531            throw new RuntimeException("preview generation cannot be completed");
2532        }
2533        return outputFilename;
2534    }
2535
2536    /**
2537     * Populates effects and overlays in EffectSettings structure
2538     * and also adjust the start time and duration of effects and overlays
2539     * w.r.t to total story board time
2540     *
2541     * @param m1 Media item associated with effect
2542     * @param effectSettings The EffectSettings reference containing
2543     * effect specific data
2544     * @param beginCutTime The begin cut time of the clip associated with effect
2545     * @param endCutTime The end cut time of the clip associated with effect
2546     * @param storyBoardTime The current story board time
2547     *
2548     * @return The updated index
2549     */
2550    private int populateEffects(MediaItem m, EffectSettings[] effectSettings, int i,
2551            int beginCutTime, int endCutTime, int storyBoardTime) {
2552        List<Effect> effects = m.getAllEffects();
2553        List<Overlay> overlays = m.getAllOverlays();
2554
2555        if (m.getBeginTransition() != null && m.getBeginTransition().getDuration() > 0
2556                && m.getEndTransition() != null && m.getEndTransition().getDuration() > 0) {
2557            beginCutTime += m.getBeginTransition().getDuration();
2558            endCutTime -= m.getEndTransition().getDuration();
2559        } else if (m.getBeginTransition() == null && m.getEndTransition() != null
2560                && m.getEndTransition().getDuration() > 0) {
2561            endCutTime -= m.getEndTransition().getDuration();
2562        } else if (m.getEndTransition() == null && m.getBeginTransition() != null
2563                && m.getBeginTransition().getDuration() > 0) {
2564            beginCutTime += m.getBeginTransition().getDuration();
2565        }
2566
2567        for (Effect effect : effects) {
2568            if (effect instanceof EffectColor) {
2569                effectSettings[i] = getEffectSettings((EffectColor)effect);
2570                adjustEffectsStartTimeAndDuration(effectSettings[i],
2571                                                      beginCutTime, endCutTime);
2572                effectSettings[i].startTime += storyBoardTime;
2573                i++;
2574            }
2575        }
2576        for (Overlay overlay : overlays) {
2577            effectSettings[i] = getOverlaySettings((OverlayFrame)overlay);
2578            adjustEffectsStartTimeAndDuration(effectSettings[i],
2579                                                      beginCutTime, endCutTime);
2580            effectSettings[i].startTime += storyBoardTime;
2581            i++;
2582        }
2583        return i;
2584    }
2585
2586    /**
2587     * Adjusts the media item boundaries for use in export or preview
2588     *
2589     * @param clipSettings The ClipSettings reference
2590     * @param clipProperties The Properties reference
2591     * @param m The media item
2592     */
2593    private void adjustMediaItemBoundary(ClipSettings clipSettings,
2594                                         Properties clipProperties, MediaItem m) {
2595        if (m.getBeginTransition() != null && m.getBeginTransition().getDuration() > 0
2596                && m.getEndTransition() != null && m.getEndTransition().getDuration() > 0) {
2597
2598            clipSettings.beginCutTime += m.getBeginTransition().getDuration();
2599            clipSettings.endCutTime -= m.getEndTransition().getDuration();
2600
2601        } else if (m.getBeginTransition() == null && m.getEndTransition() != null
2602                && m.getEndTransition().getDuration() > 0) {
2603
2604            clipSettings.endCutTime -= m.getEndTransition().getDuration();
2605
2606        } else if (m.getEndTransition() == null && m.getBeginTransition() != null
2607                && m.getBeginTransition().getDuration() > 0) {
2608
2609            clipSettings.beginCutTime += m.getBeginTransition().getDuration();
2610        }
2611        clipProperties.duration = clipSettings.endCutTime -
2612                                                      clipSettings.beginCutTime;
2613
2614        if (clipProperties.videoDuration != 0) {
2615            clipProperties.videoDuration = clipSettings.endCutTime -
2616                                                      clipSettings.beginCutTime;
2617        }
2618
2619        if (clipProperties.audioDuration != 0) {
2620            clipProperties.audioDuration = clipSettings.endCutTime -
2621                                                      clipSettings.beginCutTime;
2622        }
2623    }
2624
2625    /**
2626     * Generates the transition if transition is present
2627     * and is in invalidated state
2628     *
2629     * @param transition The Transition reference
2630     * @param editSettings The EditSettings reference
2631     * @param clipPropertiesArray The clip Properties array
2632     * @param i The index in clip Properties array for current clip
2633     */
2634    private void generateTransition(Transition transition, EditSettings editSettings,
2635            PreviewClipProperties clipPropertiesArray, int index) {
2636        if (!(transition.isGenerated())) {
2637            transition.generate();
2638        }
2639        editSettings.clipSettingsArray[index] = new ClipSettings();
2640        editSettings.clipSettingsArray[index].clipPath = transition.getFilename();
2641        editSettings.clipSettingsArray[index].fileType = FileType.THREE_GPP;
2642        editSettings.clipSettingsArray[index].beginCutTime = 0;
2643        editSettings.clipSettingsArray[index].endCutTime =
2644                                                  (int)transition.getDuration();
2645        editSettings.clipSettingsArray[index].mediaRendering =
2646                                                   MediaRendering.BLACK_BORDERS;
2647        try {
2648            clipPropertiesArray.clipProperties[index] =
2649                                   getMediaProperties(transition.getFilename());
2650        } catch (Exception e) {
2651            throw new IllegalArgumentException("Unsupported file or file not found");
2652        }
2653        clipPropertiesArray.clipProperties[index].Id = null;
2654        clipPropertiesArray.clipProperties[index].audioVolumeValue = 100;
2655        clipPropertiesArray.clipProperties[index].duration =
2656                                                  (int)transition.getDuration();
2657        if (clipPropertiesArray.clipProperties[index].videoDuration != 0) {
2658            clipPropertiesArray.clipProperties[index].videoDuration =
2659                                                  (int)transition.getDuration();
2660        }
2661        if (clipPropertiesArray.clipProperties[index].audioDuration != 0) {
2662            clipPropertiesArray.clipProperties[index].audioDuration =
2663                                                  (int)transition.getDuration();
2664        }
2665    }
2666
2667    /**
2668     * Sets the volume for current media item in clip properties array
2669     *
2670     * @param m The media item
2671     * @param clipProperties The clip properties array reference
2672     * @param i The index in clip Properties array for current clip
2673     */
2674    private void adjustVolume(MediaItem m, PreviewClipProperties clipProperties,
2675                              int index) {
2676        if (m instanceof MediaVideoItem) {
2677            boolean videoMuted = ((MediaVideoItem)m).isMuted();
2678            if (videoMuted == false) {
2679                mClipProperties.clipProperties[index].audioVolumeValue = ((MediaVideoItem)m)
2680                .getVolume();
2681            } else {
2682                mClipProperties.clipProperties[index].audioVolumeValue = 0;
2683            }
2684        } else if (m instanceof MediaImageItem) {
2685            mClipProperties.clipProperties[index].audioVolumeValue = 0;
2686        }
2687    }
2688
2689    /**
2690     * Checks for odd size image width and height
2691     *
2692     * @param m The media item
2693     * @param clipProperties The clip properties array reference
2694     * @param i The index in clip Properties array for current clip
2695     */
2696    private void checkOddSizeImage(MediaItem m, PreviewClipProperties clipProperties, int index) {
2697        if (m instanceof MediaImageItem) {
2698            int width = mClipProperties.clipProperties[index].width;
2699            int height = mClipProperties.clipProperties[index].height;
2700
2701            if ((width % 2) != 0) {
2702                width -= 1;
2703            }
2704            if ((height % 2) != 0) {
2705                height -= 1;
2706            }
2707            mClipProperties.clipProperties[index].width = width;
2708            mClipProperties.clipProperties[index].height = height;
2709        }
2710    }
2711
2712    /**
2713     * Populates the media item properties and calculates the maximum
2714     * height among all the clips
2715     *
2716     * @param m The media item
2717     * @param i The index in clip Properties array for current clip
2718     * @param maxHeight The max height from the clip properties
2719     *
2720     * @return Updates the max height if current clip's height is greater
2721     * than all previous clips height
2722     */
2723    private int populateMediaItemProperties(MediaItem m, int index, int maxHeight) {
2724        mPreviewEditSettings.clipSettingsArray[index] = new ClipSettings();
2725        if (m instanceof MediaVideoItem) {
2726            mPreviewEditSettings.clipSettingsArray[index] = ((MediaVideoItem)m)
2727            .getVideoClipProperties();
2728            if (((MediaVideoItem)m).getHeight() > maxHeight) {
2729                maxHeight = ((MediaVideoItem)m).getHeight();
2730            }
2731        } else if (m instanceof MediaImageItem) {
2732            mPreviewEditSettings.clipSettingsArray[index] = ((MediaImageItem)m)
2733            .getImageClipProperties();
2734            if (((MediaImageItem)m).getScaledHeight() > maxHeight) {
2735                maxHeight = ((MediaImageItem)m).getScaledHeight();
2736            }
2737        }
2738        /** + Handle the image files here */
2739        if (mPreviewEditSettings.clipSettingsArray[index].fileType == FileType.JPG) {
2740            mPreviewEditSettings.clipSettingsArray[index].clipDecodedPath = ((MediaImageItem)m)
2741            .getDecodedImageFileName();
2742
2743            mPreviewEditSettings.clipSettingsArray[index].clipOriginalPath =
2744                         mPreviewEditSettings.clipSettingsArray[index].clipPath;
2745        }
2746        return maxHeight;
2747    }
2748
2749    /**
2750     * Populates the background music track properties
2751     *
2752     * @param mediaBGMList The background music list
2753     *
2754     */
2755    private void populateBackgroundMusicProperties(List<AudioTrack> mediaBGMList) {
2756
2757        if (mediaBGMList.size() == 1) {
2758            mAudioTrack = mediaBGMList.get(0);
2759        } else
2760        {
2761            mAudioTrack = null;
2762        }
2763
2764        if (mAudioTrack != null) {
2765            mAudioSettings = new AudioSettings();
2766            Properties mAudioProperties = new Properties();
2767            mAudioSettings.pFile = null;
2768            mAudioSettings.Id = mAudioTrack.getId();
2769            try {
2770                mAudioProperties = getMediaProperties(mAudioTrack.getFilename());
2771            } catch (Exception e) {
2772               throw new IllegalArgumentException("Unsupported file or file not found");
2773            }
2774            mAudioSettings.bRemoveOriginal = false;
2775            mAudioSettings.channels = mAudioProperties.audioChannels;
2776            mAudioSettings.Fs = mAudioProperties.audioSamplingFrequency;
2777            mAudioSettings.loop = mAudioTrack.isLooping();
2778            mAudioSettings.ExtendedFs = 0;
2779            mAudioSettings.pFile = mAudioTrack.getFilename();
2780            mAudioSettings.startMs = mAudioTrack.getStartTime();
2781            mAudioSettings.beginCutTime = mAudioTrack.getBoundaryBeginTime();
2782            mAudioSettings.endCutTime = mAudioTrack.getBoundaryEndTime();
2783            if (mAudioTrack.isMuted()) {
2784                mAudioSettings.volume = 0;
2785            } else {
2786                mAudioSettings.volume = mAudioTrack.getVolume();
2787            }
2788            mAudioSettings.fileType = mAudioProperties.fileType;
2789            mAudioSettings.ducking_lowVolume = mAudioTrack.getDuckedTrackVolume();
2790            mAudioSettings.ducking_threshold = mAudioTrack.getDuckingThreshhold();
2791            mAudioSettings.bInDucking_enable = mAudioTrack.isDuckingEnabled();
2792            mAudioTrackPCMFilePath = String.format(mProjectPath + "/" + AUDIO_TRACK_PCM_FILE);
2793            //String.format(mProjectPath + "/" + "AudioPcm" + ".pcm");
2794            mAudioSettings.pcmFilePath = mAudioTrackPCMFilePath;
2795
2796            mPreviewEditSettings.backgroundMusicSettings =
2797                                                  new BackgroundMusicSettings();
2798            mPreviewEditSettings.backgroundMusicSettings.file =
2799                                                         mAudioTrackPCMFilePath;
2800            mPreviewEditSettings.backgroundMusicSettings.fileType =
2801                                                      mAudioProperties.fileType;
2802            mPreviewEditSettings.backgroundMusicSettings.insertionTime =
2803                                                     mAudioTrack.getStartTime();
2804            mPreviewEditSettings.backgroundMusicSettings.volumePercent =
2805                                                        mAudioTrack.getVolume();
2806            mPreviewEditSettings.backgroundMusicSettings.beginLoop = mAudioTrack
2807            .getBoundaryBeginTime();
2808            mPreviewEditSettings.backgroundMusicSettings.endLoop =
2809                                               mAudioTrack.getBoundaryEndTime();
2810            mPreviewEditSettings.backgroundMusicSettings.enableDucking = mAudioTrack
2811            .isDuckingEnabled();
2812            mPreviewEditSettings.backgroundMusicSettings.duckingThreshold = mAudioTrack
2813            .getDuckingThreshhold();
2814            mPreviewEditSettings.backgroundMusicSettings.lowVolume = mAudioTrack
2815            .getDuckedTrackVolume();
2816            mPreviewEditSettings.backgroundMusicSettings.isLooping =
2817                                                        mAudioTrack.isLooping();
2818            mPreviewEditSettings.primaryTrackVolume = 100;
2819            mProcessingState  = PROCESSING_AUDIO_PCM;
2820            mProcessingObject = mAudioTrack;
2821        } else {
2822            if (mAudioSettings != null) {
2823                mAudioSettings = null;
2824            }
2825            if (mPreviewEditSettings.backgroundMusicSettings != null) {
2826                mPreviewEditSettings.backgroundMusicSettings = null;
2827            }
2828            mAudioTrackPCMFilePath = null;
2829        }
2830    }
2831
2832    /**
2833     * Calculates all the effects in all the media items
2834     * in media items list
2835     *
2836     * @param mediaItemsList The media item list
2837     *
2838     * @return The total number of effects
2839     *
2840     */
2841    private int getTotalEffects(List<MediaItem> mediaItemsList) {
2842        int totalEffects = 0;
2843        final Iterator<MediaItem> it = mediaItemsList.iterator();
2844        while (it.hasNext()) {
2845            final MediaItem t = it.next();
2846            totalEffects += t.getAllEffects().size();
2847            totalEffects += t.getAllOverlays().size();
2848            final Iterator<Effect> ef = t.getAllEffects().iterator();
2849            while (ef.hasNext()) {
2850                final Effect e = ef.next();
2851                if (e instanceof EffectKenBurns)
2852                    totalEffects--;
2853            }
2854        }
2855        return totalEffects;
2856    }
2857
2858    /**
2859     * This function is responsible for forming clip settings
2860     * array and clip properties array including transition clips
2861     * and effect settings for preview purpose or export.
2862     *
2863     *
2864     * @param mediaItemsList The media item list
2865     * @param mediaTransitionList The transitions list
2866     * @param mediaBGMList The background music list
2867     * @param listener The MediaProcessingProgressListener
2868     *
2869     */
2870    public void previewStoryBoard(List<MediaItem> mediaItemsList,
2871            List<Transition> mediaTransitionList, List<AudioTrack> mediaBGMList,
2872            MediaProcessingProgressListener listener) {
2873        if (mInvalidatePreviewArray) {
2874            int previewIndex = 0;
2875            int totalEffects = 0;
2876            int storyBoardTime = 0;
2877            int maxHeight = 0;
2878            int beginCutTime = 0;
2879            int endCutTime = 0;
2880            int effectIndex = 0;
2881            Transition lTransition = null;
2882            MediaItem lMediaItem = null;
2883            mPreviewEditSettings = new EditSettings();
2884            mClipProperties = new PreviewClipProperties();
2885            mTotalClips = 0;
2886
2887            mTotalClips = mediaItemsList.size();
2888            for (Transition transition : mediaTransitionList) {
2889                if (transition.getDuration() > 0)
2890                    mTotalClips++;
2891            }
2892
2893            totalEffects = getTotalEffects(mediaItemsList);
2894
2895            mPreviewEditSettings.clipSettingsArray = new ClipSettings[mTotalClips];
2896            mPreviewEditSettings.effectSettingsArray = new EffectSettings[totalEffects];
2897            mClipProperties.clipProperties = new Properties[mTotalClips];
2898
2899            /** record the call back progress listner */
2900            mMediaProcessingProgressListener = listener;
2901            mProgressToApp = 0;
2902
2903            if (mediaItemsList.size() > 0) {
2904                for (int i = 0; i < mediaItemsList.size(); i++) {
2905                    /* Get the Media Item from the list */
2906                    lMediaItem = mediaItemsList.get(i);
2907                    if (lMediaItem instanceof MediaVideoItem) {
2908                        beginCutTime = (int)((MediaVideoItem)lMediaItem).getBoundaryBeginTime();
2909                        endCutTime = (int)((MediaVideoItem)lMediaItem).getBoundaryEndTime();
2910                    } else if (lMediaItem instanceof MediaImageItem) {
2911                        beginCutTime = 0;
2912                        endCutTime = (int)((MediaImageItem)lMediaItem).getTimelineDuration();
2913                    }
2914                    /* Get the transition associated with Media Item */
2915                    lTransition = lMediaItem.getBeginTransition();
2916                    if (lTransition != null && (lTransition.getDuration() > 0)) {
2917                        /* generate transition clip */
2918                        generateTransition(lTransition, mPreviewEditSettings,
2919                                           mClipProperties, previewIndex);
2920                        storyBoardTime += mClipProperties.clipProperties[previewIndex].duration;
2921                        previewIndex++;
2922                    }
2923                    /* Populate media item properties */
2924                    maxHeight = populateMediaItemProperties(lMediaItem,
2925                                                            previewIndex,
2926                                                            maxHeight);
2927                    /* Get the clip properties of the media item. */
2928                    if (lMediaItem instanceof MediaImageItem)
2929                    {
2930                        int tmpCnt = 0;
2931                        boolean bEffectKbPresent = false;
2932                        List<Effect> effectList = lMediaItem.getAllEffects();
2933                        /**
2934                         * check if Kenburns effect is present
2935                         */
2936                        while ( tmpCnt < effectList.size()) {
2937                            if (effectList.get(tmpCnt) instanceof EffectKenBurns) {
2938                                bEffectKbPresent = true;
2939                                break;
2940                            }
2941                            tmpCnt++;
2942                        }
2943
2944                        if (bEffectKbPresent) {
2945                            try {
2946                                mClipProperties.clipProperties[previewIndex]
2947                                    = getMediaProperties(((MediaImageItem)lMediaItem).getGeneratedImageClip());
2948                            } catch (Exception e) {
2949                                throw new IllegalArgumentException("Unsupported file or file not found");
2950                            }
2951                        } else {
2952                            try {
2953                                mClipProperties.clipProperties[previewIndex]
2954                                    = getMediaProperties(((MediaImageItem)lMediaItem).getScaledImageFileName());
2955                            } catch (Exception e) {
2956                                throw new IllegalArgumentException("Unsupported file or file not found");
2957                            }
2958                            mClipProperties.clipProperties[previewIndex].width = ((MediaImageItem)lMediaItem).getScaledWidth();
2959                            mClipProperties.clipProperties[previewIndex].height = ((MediaImageItem)lMediaItem).getScaledHeight();
2960                        }
2961
2962                    }else
2963                    {
2964                        try {
2965                            mClipProperties.clipProperties[previewIndex]
2966                                 = getMediaProperties(lMediaItem.getFilename());
2967                        } catch (Exception e) {
2968                            throw new IllegalArgumentException("Unsupported file or file not found");
2969                        }
2970                    }
2971                    mClipProperties.clipProperties[previewIndex].Id = lMediaItem.getId();
2972                    checkOddSizeImage(lMediaItem, mClipProperties, previewIndex);
2973                    adjustVolume(lMediaItem, mClipProperties, previewIndex);
2974
2975                    /*
2976                     * Adjust media item start time and end time w.r.t to begin
2977                     * and end transitions associated with media item
2978                     */
2979
2980                    adjustMediaItemBoundary(mPreviewEditSettings.clipSettingsArray[previewIndex],
2981                            mClipProperties.clipProperties[previewIndex], lMediaItem);
2982
2983                    /*
2984                     * Get all the effects and overlays for that media item and
2985                     * adjust start time and duration of effects
2986                     */
2987
2988                    effectIndex = populateEffects(lMediaItem,
2989                            mPreviewEditSettings.effectSettingsArray, effectIndex, beginCutTime,
2990                            endCutTime, storyBoardTime);
2991                    storyBoardTime += mClipProperties.clipProperties[previewIndex].duration;
2992                    previewIndex++;
2993
2994                    /* Check if there is any end transition at last media item */
2995
2996                    if (i == (mediaItemsList.size() - 1)) {
2997                        lTransition = lMediaItem.getEndTransition();
2998                        if (lTransition != null && (lTransition.getDuration() > 0)) {
2999                            generateTransition(lTransition, mPreviewEditSettings, mClipProperties,
3000                                    previewIndex);
3001                            break;
3002                        }
3003                    }
3004                }
3005            }
3006            if (!mErrorFlagSet) {
3007                mPreviewEditSettings.videoFrameSize = findVideoResolution(mVideoEditor
3008                        .getAspectRatio(), maxHeight);
3009                /*if (mediaBGMList.size() == 1) //for remove Audio check */ {
3010                    populateBackgroundMusicProperties(mediaBGMList);
3011                }
3012                /** call to native populate settings */
3013                try {
3014                    nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings);
3015                } catch (IllegalArgumentException ex) {
3016                    Log.e("MediaArtistNativeHelper",
3017                    "Illegal argument exception in nativePopulateSettings");
3018                    throw ex;
3019                } catch (IllegalStateException ex) {
3020                    Log.e("MediaArtistNativeHelper",
3021                    "Illegal state exception in nativePopulateSettings");
3022                    throw ex;
3023                } catch (RuntimeException ex) {
3024                    Log.e("MediaArtistNativeHelper", "Runtime exception in nativePopulateSettings");
3025                    throw ex;
3026                }
3027                mInvalidatePreviewArray = false;
3028                mProcessingState  = PROCESSING_NONE;
3029            }
3030            if (mErrorFlagSet) {
3031                mErrorFlagSet = false;
3032                throw new RuntimeException("preview generation cannot be completed");
3033            }
3034        }
3035    } /* END of previewStoryBoard */
3036
3037    /**
3038     * This function is responsible for starting the preview
3039     *
3040     *
3041     * @param surface The surface on which preview has to be displayed
3042     * @param fromMs The time in ms from which preview has to be started
3043     * @param toMs The time in ms till preview has to be played
3044     * @param loop To loop the preview or not
3045     * @param callbackAfterFrameCount INdicated after how many frames
3046     * the callback is needed
3047     * @param listener The PreviewProgressListener
3048     */
3049    public void doPreview(Surface surface, long fromMs, long toMs, boolean loop,
3050            int callbackAfterFrameCount, PreviewProgressListener listener) {
3051        mPreviewProgress = fromMs;
3052        mIsFirstProgress = true;
3053        mPreviewProgressListener = listener;
3054
3055        if (!mInvalidatePreviewArray) {
3056            try {
3057                /** Modify the image files names to rgb image files. */
3058                for (int clipCnt = 0; clipCnt < mPreviewEditSettings.clipSettingsArray.length; clipCnt++) {
3059                    if (mPreviewEditSettings.clipSettingsArray[clipCnt].fileType == FileType.JPG) {
3060                        mPreviewEditSettings.clipSettingsArray[clipCnt].clipPath = mPreviewEditSettings.clipSettingsArray[clipCnt].clipDecodedPath;
3061                    }
3062                }
3063                nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings);
3064                nativeStartPreview(surface, fromMs, toMs, callbackAfterFrameCount, loop);
3065            } catch (IllegalArgumentException ex) {
3066                Log.e("MediaArtistNativeHelper",
3067                "Illegal argument exception in nativeStartPreview");
3068                throw ex;
3069            } catch (IllegalStateException ex) {
3070                Log.e("MediaArtistNativeHelper", "Illegal state exception in nativeStartPreview");
3071                throw ex;
3072            } catch (RuntimeException ex) {
3073                Log.e("MediaArtistNativeHelper", "Runtime exception in nativeStartPreview");
3074                throw ex;
3075            }
3076        }
3077    }
3078
3079    /**
3080     * This function is responsible for stopping the preview
3081     */
3082    public long stopPreview() {
3083        nativeStopPreview();
3084        return mPreviewProgress;
3085    }
3086
3087    /**
3088     * This function is responsible for rendering a single frame
3089     * from the complete story board on the surface
3090     *
3091     * @param surface The surface on which frame has to be rendered
3092     * @param time The time in ms at which the frame has to be rendered
3093     * @param surfaceWidth The surface width
3094     * @param surfaceHeight The surface height
3095     * @param overlayData The overlay data
3096     *
3097     * @return The actual time from the story board at which the  frame was extracted
3098     * and rendered
3099     */
3100    public long renderPreviewFrame(Surface surface, long time, int surfaceWidth,
3101            int surfaceHeight, VideoEditor.OverlayData overlayData) {
3102        long timeMs = 0;
3103        if (!mInvalidatePreviewArray) {
3104            try {
3105                for (int clipCnt = 0;
3106                      clipCnt < mPreviewEditSettings.clipSettingsArray.length;
3107                      clipCnt++) {
3108
3109                    if (mPreviewEditSettings.clipSettingsArray[clipCnt].fileType == FileType.JPG) {
3110                        mPreviewEditSettings.clipSettingsArray[clipCnt].clipPath =
3111                            mPreviewEditSettings.clipSettingsArray[clipCnt].clipDecodedPath;
3112                    }
3113                }
3114
3115                // Reset the render preview frame params that shall be set by native.
3116                mRenderPreviewOverlayFile = null;
3117                mRenderPreviewRenderingMode = MediaRendering.RESIZING;
3118                nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings);
3119                timeMs = (long)nativeRenderPreviewFrame(surface, time, surfaceWidth, surfaceHeight);
3120
3121                if (mRenderPreviewOverlayFile != null) {
3122                    overlayData.set(BitmapFactory.decodeFile(mRenderPreviewOverlayFile), mRenderPreviewRenderingMode);
3123                } else {
3124                    overlayData.setClear();
3125                }
3126            } catch (IllegalArgumentException ex) {
3127                Log.e("MediaArtistNativeHelper",
3128                "Illegal Argument exception in nativeRenderPreviewFrame");
3129                throw ex;
3130            } catch (IllegalStateException ex) {
3131                Log.e("MediaArtistNativeHelper",
3132                "Illegal state exception in nativeRenderPreviewFrame");
3133                throw ex;
3134            } catch (RuntimeException ex) {
3135                Log.e("MediaArtistNativeHelper", "Runtime exception in nativeRenderPreviewFrame");
3136                throw ex;
3137            }
3138            return timeMs;
3139        } else {
3140            throw new RuntimeException("Call generate preview first");
3141        }
3142    }
3143
3144    private void previewFrameEditInfo(String filename, int renderingMode) {
3145        mRenderPreviewOverlayFile = filename;
3146        mRenderPreviewRenderingMode = renderingMode;
3147    }
3148
3149
3150    /**
3151     * This function is responsible for rendering a single frame
3152     * from a single media item on the surface
3153     *
3154     * @param surface The surface on which frame has to be rendered
3155     * @param filepath The file path for which the frame needs to be displayed
3156     * @param time The time in ms at which the frame has to be rendered
3157     * @param framewidth The frame width
3158     * @param framewidth The frame height
3159     *
3160     * @return The actual time from media item at which the  frame was extracted
3161     * and rendered
3162     */
3163    public long renderMediaItemPreviewFrame(Surface surface, String filepath,
3164                                            long time, int framewidth,
3165                                            int frameheight) {
3166        long timeMs = 0;
3167        try {
3168
3169            timeMs = (long)nativeRenderMediaItemPreviewFrame(surface, filepath, framewidth,
3170                    frameheight, 0, 0, time);
3171        } catch (IllegalArgumentException ex) {
3172            Log.e("MediaArtistNativeHelper",
3173            "Illegal Argument exception in renderMediaItemPreviewFrame");
3174            throw ex;
3175        } catch (IllegalStateException ex) {
3176            Log.e("MediaArtistNativeHelper",
3177            "Illegal state exception in renderMediaItemPreviewFrame");
3178            throw ex;
3179        } catch (RuntimeException ex) {
3180            Log.e("MediaArtistNativeHelper", "Runtime exception in renderMediaItemPreviewFrame");
3181            throw ex;
3182        }
3183
3184        return timeMs;
3185    }
3186
3187    /**
3188     * This function sets the flag to invalidate the preview array
3189     * and for generating the preview again
3190     */
3191    void setGeneratePreview(boolean isRequired) {
3192        mInvalidatePreviewArray = isRequired;
3193    }
3194
3195    /**
3196     * @return Returns the current status of preview invalidation
3197     * flag
3198     */
3199    boolean getGeneratePreview() {
3200        return mInvalidatePreviewArray;
3201    }
3202
3203    /**
3204     * Calculates the aspect ratio from widht and height
3205     *
3206     * @param w The width of media item
3207     * @param h The height of media item
3208     *
3209     * @return The calculated aspect ratio
3210     */
3211    public int getAspectRatio(int w, int h) {
3212        double apRatio = (double)(w) / (double)(h);
3213        BigDecimal bd = new BigDecimal(apRatio);
3214        bd = bd.setScale(3, BigDecimal.ROUND_HALF_UP);
3215        apRatio = bd.doubleValue();
3216        int var = MediaProperties.ASPECT_RATIO_16_9;
3217        if (apRatio >= 1.7) {
3218            var = MediaProperties.ASPECT_RATIO_16_9;
3219        } else if (apRatio >= 1.6) {
3220            var = MediaProperties.ASPECT_RATIO_5_3;
3221        } else if (apRatio >= 1.5) {
3222            var = MediaProperties.ASPECT_RATIO_3_2;
3223        } else if (apRatio > 1.3) {
3224            var = MediaProperties.ASPECT_RATIO_4_3;
3225        } else if (apRatio >= 1.2) {
3226            var = MediaProperties.ASPECT_RATIO_11_9;
3227        }
3228        return var;
3229    }
3230
3231    /**
3232     * Maps the file type used in native layer
3233     * to file type used in JAVA layer
3234     *
3235     * @param fileType The file type in native layer
3236     *
3237     * @return The File type in JAVA layer
3238     */
3239    public int getFileType(int fileType) {
3240        int retValue = -1;
3241        switch (fileType) {
3242            case FileType.UNSUPPORTED:
3243                retValue = MediaProperties.FILE_UNSUPPORTED;
3244                break;
3245            case FileType.THREE_GPP:
3246                retValue = MediaProperties.FILE_3GP;
3247                break;
3248            case FileType.MP4:
3249                retValue = MediaProperties.FILE_MP4;
3250                break;
3251            case FileType.JPG:
3252                retValue = MediaProperties.FILE_JPEG;
3253                break;
3254            case FileType.PNG:
3255                retValue = MediaProperties.FILE_PNG;
3256                break;
3257            case FileType.MP3:
3258                retValue = MediaProperties.FILE_MP3;
3259                break;
3260
3261            default:
3262                retValue = -1;
3263        }
3264        return retValue;
3265    }
3266
3267    /**
3268     * Maps the video codec type used in native layer
3269     * to video codec type used in JAVA layer
3270     *
3271     * @param codecType The video codec type in native layer
3272     *
3273     * @return The video codec type in JAVA layer
3274     */
3275    public int getVideoCodecType(int codecType) {
3276        int retValue = -1;
3277        switch (codecType) {
3278            case VideoFormat.H263:
3279                retValue = MediaProperties.VCODEC_H263;
3280                break;
3281            case VideoFormat.H264:
3282                retValue = MediaProperties.VCODEC_H264BP;
3283                break;
3284            case VideoFormat.MPEG4:
3285                retValue = MediaProperties.VCODEC_MPEG4;
3286                break;
3287            case VideoFormat.UNSUPPORTED:
3288
3289            default:
3290                retValue = -1;
3291        }
3292        return retValue;
3293    }
3294
3295    /**
3296     * Maps the audio codec type used in native layer
3297     * to audio codec type used in JAVA layer
3298     *
3299     * @param audioType The audio codec type in native layer
3300     *
3301     * @return The audio codec type in JAVA layer
3302     */
3303    public int getAudioCodecType(int codecType) {
3304        int retValue = -1;
3305        switch (codecType) {
3306            case AudioFormat.AMR_NB:
3307                retValue = MediaProperties.ACODEC_AMRNB;
3308                break;
3309            case AudioFormat.AAC:
3310                retValue = MediaProperties.ACODEC_AAC_LC;
3311                break;
3312            case AudioFormat.MP3:
3313                retValue = MediaProperties.ACODEC_MP3;
3314                break;
3315
3316            default:
3317                retValue = -1;
3318        }
3319        return retValue;
3320    }
3321
3322    /**
3323     * Returns the frame rate as integer
3324     *
3325     * @param fps The fps as enum
3326     *
3327     * @return The frame rate as integer
3328     */
3329    public int getFrameRate(int fps) {
3330        int retValue = -1;
3331        switch (fps) {
3332            case VideoFrameRate.FR_5_FPS:
3333                retValue = 5;
3334                break;
3335            case VideoFrameRate.FR_7_5_FPS:
3336                retValue = 8;
3337                break;
3338            case VideoFrameRate.FR_10_FPS:
3339                retValue = 10;
3340                break;
3341            case VideoFrameRate.FR_12_5_FPS:
3342                retValue = 13;
3343                break;
3344            case VideoFrameRate.FR_15_FPS:
3345                retValue = 15;
3346                break;
3347            case VideoFrameRate.FR_20_FPS:
3348                retValue = 20;
3349                break;
3350            case VideoFrameRate.FR_25_FPS:
3351                retValue = 25;
3352                break;
3353            case VideoFrameRate.FR_30_FPS:
3354                retValue = 30;
3355                break;
3356
3357            default:
3358                retValue = -1;
3359        }
3360        return retValue;
3361    }
3362
3363    /**
3364     * Maps the file type used in JAVA layer
3365     * to file type used in native layer
3366     *
3367     * @param fileType The file type in JAVA layer
3368     *
3369     * @return The File type in native layer
3370     */
3371    int getMediaItemFileType(int fileType) {
3372        int retValue = -1;
3373
3374        switch (fileType) {
3375            case MediaProperties.FILE_UNSUPPORTED:
3376                retValue = FileType.UNSUPPORTED;
3377                break;
3378            case MediaProperties.FILE_3GP:
3379                retValue = FileType.THREE_GPP;
3380                break;
3381            case MediaProperties.FILE_MP4:
3382                retValue = FileType.MP4;
3383                break;
3384            case MediaProperties.FILE_JPEG:
3385                retValue = FileType.JPG;
3386                break;
3387            case MediaProperties.FILE_PNG:
3388                retValue = FileType.PNG;
3389                break;
3390
3391            default:
3392                retValue = -1;
3393        }
3394        return retValue;
3395
3396    }
3397
3398    /**
3399     * Maps the rendering mode used in native layer
3400     * to rendering mode used in JAVA layer
3401     *
3402     * @param renderingMode The rendering mode in JAVA layer
3403     *
3404     * @return The rendering mode in native layer
3405     */
3406    int getMediaItemRenderingMode(int renderingMode) {
3407        int retValue = -1;
3408        switch (renderingMode) {
3409            case MediaItem.RENDERING_MODE_BLACK_BORDER:
3410                retValue = MediaRendering.BLACK_BORDERS;
3411                break;
3412            case MediaItem.RENDERING_MODE_STRETCH:
3413                retValue = MediaRendering.RESIZING;
3414                break;
3415            case MediaItem.RENDERING_MODE_CROPPING:
3416                retValue = MediaRendering.CROPPING;
3417                break;
3418
3419            default:
3420                retValue = -1;
3421        }
3422        return retValue;
3423    }
3424
3425    /**
3426     * Maps the transition behavior used in JAVA layer
3427     * to transition behavior used in native layer
3428     *
3429     * @param transitionType The transition behavior in JAVA layer
3430     *
3431     * @return The transition behavior in native layer
3432     */
3433    int getVideoTransitionBehaviour(int transitionType) {
3434        int retValue = -1;
3435        switch (transitionType) {
3436            case Transition.BEHAVIOR_SPEED_UP:
3437                retValue = TransitionBehaviour.SPEED_UP;
3438                break;
3439            case Transition.BEHAVIOR_SPEED_DOWN:
3440                retValue = TransitionBehaviour.SPEED_DOWN;
3441                break;
3442            case Transition.BEHAVIOR_LINEAR:
3443                retValue = TransitionBehaviour.LINEAR;
3444                break;
3445            case Transition.BEHAVIOR_MIDDLE_SLOW:
3446                retValue = TransitionBehaviour.SLOW_MIDDLE;
3447                break;
3448            case Transition.BEHAVIOR_MIDDLE_FAST:
3449                retValue = TransitionBehaviour.FAST_MIDDLE;
3450                break;
3451
3452            default:
3453                retValue = -1;
3454        }
3455        return retValue;
3456    }
3457
3458    /**
3459     * Maps the transition slide direction used in JAVA layer
3460     * to transition slide direction used in native layer
3461     *
3462     * @param slideDirection The transition slide direction
3463     * in JAVA layer
3464     *
3465     * @return The transition slide direction in native layer
3466     */
3467    int getSlideSettingsDirection(int slideDirection) {
3468        int retValue = -1;
3469        switch (slideDirection) {
3470            case TransitionSliding.DIRECTION_RIGHT_OUT_LEFT_IN:
3471                retValue = SlideDirection.RIGHT_OUT_LEFT_IN;
3472                break;
3473            case TransitionSliding.DIRECTION_LEFT_OUT_RIGHT_IN:
3474                retValue = SlideDirection.LEFT_OUT_RIGTH_IN;
3475                break;
3476            case TransitionSliding.DIRECTION_TOP_OUT_BOTTOM_IN:
3477                retValue = SlideDirection.TOP_OUT_BOTTOM_IN;
3478                break;
3479            case TransitionSliding.DIRECTION_BOTTOM_OUT_TOP_IN:
3480                retValue = SlideDirection.BOTTOM_OUT_TOP_IN;
3481                break;
3482
3483            default:
3484                retValue = -1;
3485        }
3486        return retValue;
3487    }
3488
3489    /**
3490     * Maps the effect color type used in JAVA layer
3491     * to effect color type used in native layer
3492     *
3493     * @param effect The EffectColor reference
3494     *
3495     * @return The color effect value from native layer
3496     */
3497    private int getEffectColorType(EffectColor effect) {
3498        int retValue = -1;
3499        switch (effect.getType()) {
3500            case EffectColor.TYPE_COLOR:
3501                if (effect.getColor() == EffectColor.GREEN) {
3502                    retValue = VideoEffect.GREEN;
3503                } else if (effect.getColor() == EffectColor.PINK) {
3504                    retValue = VideoEffect.PINK;
3505                } else if (effect.getColor() == EffectColor.GRAY) {
3506                    retValue = VideoEffect.BLACK_AND_WHITE;
3507                } else {
3508                    retValue = VideoEffect.COLORRGB16;
3509                }
3510                break;
3511            case EffectColor.TYPE_GRADIENT:
3512                retValue = VideoEffect.GRADIENT;
3513                break;
3514            case EffectColor.TYPE_SEPIA:
3515                retValue = VideoEffect.SEPIA;
3516                break;
3517            case EffectColor.TYPE_NEGATIVE:
3518                retValue = VideoEffect.NEGATIVE;
3519                break;
3520            case EffectColor.TYPE_FIFTIES:
3521                retValue = VideoEffect.FIFTIES;
3522                break;
3523
3524            default:
3525                retValue = -1;
3526        }
3527        return retValue;
3528    }
3529
3530    /**
3531     * Calculates videdo resolution for output clip
3532     * based on clip's height and aspect ratio of storyboard
3533     *
3534     * @param aspectRatio The aspect ratio of story board
3535     * @param height The height of clip
3536     *
3537     * @return The video resolution
3538     */
3539    private int findVideoResolution(int aspectRatio, int height) {
3540        final Pair<Integer, Integer>[] resolutions;
3541        final Pair<Integer, Integer> maxResolution;
3542        int retValue = VideoFrameSize.SIZE_UNDEFINED;
3543        switch (aspectRatio) {
3544            case MediaProperties.ASPECT_RATIO_3_2:
3545                if (height == MediaProperties.HEIGHT_480)
3546                    retValue = VideoFrameSize.NTSC;
3547                else if (height == MediaProperties.HEIGHT_720)
3548                    retValue = VideoFrameSize.W720p;
3549                break;
3550            case MediaProperties.ASPECT_RATIO_16_9:
3551                if (height == MediaProperties.HEIGHT_480)
3552                    retValue = VideoFrameSize.WVGA16x9;
3553                else if (height == MediaProperties.HEIGHT_720)
3554                    retValue = VideoFrameSize.V720p;
3555                break;
3556            case MediaProperties.ASPECT_RATIO_4_3:
3557                if (height == MediaProperties.HEIGHT_480)
3558                    retValue = VideoFrameSize.VGA;
3559                if (height == MediaProperties.HEIGHT_720)
3560                    retValue = VideoFrameSize.S720p;
3561                break;
3562            case MediaProperties.ASPECT_RATIO_5_3:
3563                if (height == MediaProperties.HEIGHT_480)
3564                    retValue = VideoFrameSize.WVGA;
3565                break;
3566            case MediaProperties.ASPECT_RATIO_11_9:
3567                if (height == MediaProperties.HEIGHT_144)
3568                    retValue = VideoFrameSize.QCIF;
3569                break;
3570        }
3571        if (retValue == VideoFrameSize.SIZE_UNDEFINED) {
3572            resolutions = MediaProperties.getSupportedResolutions(mVideoEditor.getAspectRatio());
3573            // Get the highest resolution
3574            maxResolution = resolutions[resolutions.length - 1];
3575            retValue = findVideoResolution(mVideoEditor.getAspectRatio(),
3576                                           maxResolution.second);
3577        }
3578
3579        return retValue;
3580    }
3581
3582    /**
3583     * This method is responsible for exporting a movie
3584     *
3585     * @param filePath The output file path
3586     * @param projectDir The output project directory
3587     * @param height The height of clip
3588     * @param bitrate The bitrate at which the movie should be exported
3589     * @param mediaItemsList The media items list
3590     * @param mediaTransitionList The transitons list
3591     * @param mediaBGMList The background track list
3592     * @param listener The ExportProgressListener
3593     *
3594     */
3595    public void export(String filePath, String projectDir, int height, int bitrate,
3596            List<MediaItem> mediaItemsList, List<Transition> mediaTransitionList,
3597            List<AudioTrack> mediaBGMList, ExportProgressListener listener) {
3598
3599        int outBitrate = 0;
3600        mExportFilename = filePath;
3601        previewStoryBoard(mediaItemsList, mediaTransitionList, mediaBGMList,null);
3602        mExportProgressListener = listener;
3603
3604        mProgressToApp = 0;
3605
3606        switch (bitrate) {
3607            case MediaProperties.BITRATE_28K:
3608                outBitrate = Bitrate.BR_32_KBPS;
3609                break;
3610            case MediaProperties.BITRATE_40K:
3611                outBitrate = Bitrate.BR_48_KBPS;
3612                break;
3613            case MediaProperties.BITRATE_64K:
3614                outBitrate = Bitrate.BR_64_KBPS;
3615                break;
3616            case MediaProperties.BITRATE_96K:
3617                outBitrate = Bitrate.BR_96_KBPS;
3618                break;
3619            case MediaProperties.BITRATE_128K:
3620                outBitrate = Bitrate.BR_128_KBPS;
3621                break;
3622            case MediaProperties.BITRATE_192K:
3623                outBitrate = Bitrate.BR_192_KBPS;
3624                break;
3625            case MediaProperties.BITRATE_256K:
3626                outBitrate = Bitrate.BR_256_KBPS;
3627                break;
3628            case MediaProperties.BITRATE_384K:
3629                outBitrate = Bitrate.BR_384_KBPS;
3630                break;
3631            case MediaProperties.BITRATE_512K:
3632                outBitrate = Bitrate.BR_512_KBPS;
3633                break;
3634            case MediaProperties.BITRATE_800K:
3635                outBitrate = Bitrate.BR_800_KBPS;
3636                break;
3637            case MediaProperties.BITRATE_2M:
3638                outBitrate = Bitrate.BR_2_MBPS;
3639                break;
3640
3641            case MediaProperties.BITRATE_5M:
3642                outBitrate = Bitrate.BR_5_MBPS;
3643                break;
3644            case MediaProperties.BITRATE_8M:
3645                outBitrate = Bitrate.BR_8_MBPS;
3646                break;
3647
3648            default:
3649                throw new IllegalArgumentException("Argument Bitrate incorrect");
3650        }
3651        mPreviewEditSettings.videoFrameRate = VideoFrameRate.FR_30_FPS;
3652        mPreviewEditSettings.outputFile = mOutputFilename = filePath;
3653
3654        int aspectRatio = mVideoEditor.getAspectRatio();
3655        mPreviewEditSettings.videoFrameSize = findVideoResolution(aspectRatio, height);
3656        mPreviewEditSettings.videoFormat = VideoFormat.H264;
3657        mPreviewEditSettings.audioFormat = AudioFormat.AAC;
3658        mPreviewEditSettings.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000;
3659        mPreviewEditSettings.maxFileSize = 0;
3660        mPreviewEditSettings.audioChannels = 2;
3661        mPreviewEditSettings.videoBitrate = outBitrate;
3662        mPreviewEditSettings.audioBitrate = Bitrate.BR_96_KBPS;
3663
3664        mPreviewEditSettings.transitionSettingsArray = new TransitionSettings[mTotalClips - 1];
3665        for (int index = 0; index < mTotalClips - 1; index++) {
3666            mPreviewEditSettings.transitionSettingsArray[index] = new TransitionSettings();
3667            mPreviewEditSettings.transitionSettingsArray[index].videoTransitionType = VideoTransition.NONE;
3668            mPreviewEditSettings.transitionSettingsArray[index].audioTransitionType = AudioTransition.NONE;
3669        }
3670        for (int clipCnt = 0; clipCnt < mPreviewEditSettings.clipSettingsArray.length; clipCnt++) {
3671            if (mPreviewEditSettings.clipSettingsArray[clipCnt].fileType == FileType.JPG) {
3672                mPreviewEditSettings.clipSettingsArray[clipCnt].clipPath =
3673                mPreviewEditSettings.clipSettingsArray[clipCnt].clipOriginalPath;
3674            }
3675        }
3676        nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings);
3677
3678        int err = 0;
3679        try {
3680            mProcessingState  = PROCESSING_EXPORT;
3681            mProcessingObject = null;
3682            err = generateClip(mPreviewEditSettings);
3683            mProcessingState  = PROCESSING_NONE;
3684        } catch (IllegalArgumentException ex) {
3685            Log.e("MediaArtistNativeHelper", "IllegalArgument for generateClip");
3686            throw ex;
3687        } catch (IllegalStateException ex) {
3688            Log.e("MediaArtistNativeHelper", "IllegalStateExceptiont for generateClip");
3689            throw ex;
3690        } catch (RuntimeException ex) {
3691            Log.e("MediaArtistNativeHelper", "RuntimeException for generateClip");
3692            throw ex;
3693        }
3694
3695        if (err != 0) {
3696            Log.e("MediaArtistNativeHelper", "RuntimeException for generateClip");
3697            throw new RuntimeException("generateClip failed with error="+err );
3698        }
3699
3700        mExportDone = true;
3701        setGeneratePreview(true);
3702        mExportProgressListener = null;
3703    }
3704
3705    /**
3706     * This method is responsible for exporting a movie
3707     *
3708     * @param filePath The output file path
3709     * @param projectDir The output project directory
3710     * @param height The height of clip
3711     * @param bitrate The bitrate at which the movie should be exported
3712     * @param audioCodec The audio codec to use
3713     * @param videoCodec The video codec to use
3714     * @param mediaItemsList The media items list
3715     * @param mediaTransitionList The transitons list
3716     * @param mediaBGMList The background track list
3717     * @param listener The ExportProgressListener
3718     *
3719     */
3720    public void export(String filePath, String projectDir,int height,int bitrate,
3721            int audioCodec,int videoCodec,List<MediaItem> mediaItemsList,
3722            List<Transition> mediaTransitionList,List<AudioTrack> mediaBGMList,
3723            ExportProgressListener listener) {
3724
3725        int outBitrate = 0;
3726        mExportFilename = filePath;
3727        previewStoryBoard(mediaItemsList, mediaTransitionList, mediaBGMList,null);
3728        mExportProgressListener = listener;
3729
3730        mProgressToApp = 0;
3731
3732        switch (bitrate) {
3733            case MediaProperties.BITRATE_28K:
3734                outBitrate = Bitrate.BR_32_KBPS;
3735                break;
3736            case MediaProperties.BITRATE_40K:
3737                outBitrate = Bitrate.BR_48_KBPS;
3738                break;
3739            case MediaProperties.BITRATE_64K:
3740                outBitrate = Bitrate.BR_64_KBPS;
3741                break;
3742            case MediaProperties.BITRATE_96K:
3743                outBitrate = Bitrate.BR_96_KBPS;
3744                break;
3745            case MediaProperties.BITRATE_128K:
3746                outBitrate = Bitrate.BR_128_KBPS;
3747                break;
3748            case MediaProperties.BITRATE_192K:
3749                outBitrate = Bitrate.BR_192_KBPS;
3750                break;
3751            case MediaProperties.BITRATE_256K:
3752                outBitrate = Bitrate.BR_256_KBPS;
3753                break;
3754            case MediaProperties.BITRATE_384K:
3755                outBitrate = Bitrate.BR_384_KBPS;
3756                break;
3757            case MediaProperties.BITRATE_512K:
3758                outBitrate = Bitrate.BR_512_KBPS;
3759                break;
3760            case MediaProperties.BITRATE_800K:
3761                outBitrate = Bitrate.BR_800_KBPS;
3762                break;
3763            case MediaProperties.BITRATE_2M:
3764                outBitrate = Bitrate.BR_2_MBPS;
3765                break;
3766            case MediaProperties.BITRATE_5M:
3767                outBitrate = Bitrate.BR_5_MBPS;
3768                break;
3769            case MediaProperties.BITRATE_8M:
3770                outBitrate = Bitrate.BR_8_MBPS;
3771                break;
3772
3773            default:
3774                throw new IllegalArgumentException("Argument Bitrate incorrect");
3775        }
3776        mPreviewEditSettings.videoFrameRate = VideoFrameRate.FR_30_FPS;
3777        mPreviewEditSettings.outputFile = mOutputFilename = filePath;
3778
3779        int aspectRatio = mVideoEditor.getAspectRatio();
3780        mPreviewEditSettings.videoFrameSize = findVideoResolution(aspectRatio, height);
3781        switch (audioCodec) {
3782            case MediaProperties.ACODEC_AAC_LC:
3783                mPreviewEditSettings.audioFormat = AudioFormat.AAC;
3784                break;
3785            case MediaProperties.ACODEC_AMRNB:
3786                mPreviewEditSettings.audioFormat = AudioFormat.AMR_NB;
3787                break;
3788        }
3789
3790        switch (videoCodec) {
3791            case MediaProperties.VCODEC_H263:
3792                mPreviewEditSettings.videoFormat = VideoFormat.H263;
3793                break;
3794            case MediaProperties.VCODEC_H264BP:
3795                mPreviewEditSettings.videoFormat = VideoFormat.H264;
3796                break;
3797            case MediaProperties.VCODEC_MPEG4:
3798                mPreviewEditSettings.videoFormat = VideoFormat.MPEG4;
3799                break;
3800        }
3801
3802        mPreviewEditSettings.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000;
3803        mPreviewEditSettings.maxFileSize = 0;
3804        mPreviewEditSettings.audioChannels = 2;
3805        mPreviewEditSettings.videoBitrate = outBitrate;
3806        mPreviewEditSettings.audioBitrate = Bitrate.BR_96_KBPS;
3807
3808        mPreviewEditSettings.transitionSettingsArray =
3809                                        new TransitionSettings[mTotalClips - 1];
3810        for (int index = 0; index < mTotalClips - 1; index++) {
3811            mPreviewEditSettings.transitionSettingsArray[index] =
3812                                                       new TransitionSettings();
3813            mPreviewEditSettings.transitionSettingsArray[index].videoTransitionType =
3814                                                                      VideoTransition.NONE;
3815            mPreviewEditSettings.transitionSettingsArray[index].audioTransitionType =
3816                                                                      AudioTransition.NONE;
3817        }
3818        for (int clipCnt = 0; clipCnt < mPreviewEditSettings.clipSettingsArray.length; clipCnt++) {
3819            if (mPreviewEditSettings.clipSettingsArray[clipCnt].fileType == FileType.JPG) {
3820                mPreviewEditSettings.clipSettingsArray[clipCnt].clipPath =
3821                  mPreviewEditSettings.clipSettingsArray[clipCnt].clipOriginalPath;
3822            }
3823        }
3824        nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings);
3825
3826        int err = 0;
3827        try {
3828            mProcessingState  = PROCESSING_EXPORT;
3829            mProcessingObject = null;
3830            err = generateClip(mPreviewEditSettings);
3831            mProcessingState  = PROCESSING_NONE;
3832        } catch (IllegalArgumentException ex) {
3833            Log.e("MediaArtistNativeHelper", "IllegalArgument for generateClip");
3834            throw ex;
3835        } catch (IllegalStateException ex) {
3836            Log.e("MediaArtistNativeHelper", "IllegalStateExceptiont for generateClip");
3837            throw ex;
3838        } catch (RuntimeException ex) {
3839            Log.e("MediaArtistNativeHelper", "RuntimeException for generateClip");
3840            throw ex;
3841        }
3842
3843        if (err != 0) {
3844            Log.e("MediaArtistNativeHelper", "RuntimeException for generateClip");
3845            throw new RuntimeException("generateClip failed with error="+err );
3846        }
3847
3848        mExportDone = true;
3849        setGeneratePreview(true);
3850        mExportProgressListener = null;
3851    }
3852
3853
3854    /**
3855     * This methods takes care of stopping the Export process
3856     *
3857     * @param The input file name for which export has to be stopped
3858     */
3859    public void stop(String filename) {
3860        if (!mExportDone) {
3861            try {
3862                stopEncoding();
3863            } catch (IllegalStateException ex) {
3864                Log.e("MediaArtistNativeHelper", "Illegal state exception in unload settings");
3865                throw ex;
3866            } catch (RuntimeException ex) {
3867                Log.e("MediaArtistNativeHelper", "Runtime exception in unload settings");
3868                throw ex;
3869            }
3870
3871            new File(mExportFilename).delete();
3872        }
3873    }
3874
3875    /**
3876     * This method extracts a frame from the input file
3877     * and returns the frame as a bitmap
3878     *
3879     * @param inputFile The inputFile
3880     * @param width The width of the output frame
3881     * @param height The height of the output frame
3882     * @param timeMS The time in ms at which the frame hass to be extracted
3883     */
3884    public Bitmap getPixels(String inputFile, int width, int height, long timeMS) {
3885        if (inputFile == null) {
3886            throw new IllegalArgumentException();
3887        }
3888
3889        int newWidth = 0;
3890        int newHeight = 0;
3891        Bitmap tempBitmap = null;
3892
3893        /* Make width and height as even */
3894        newWidth = (width + 1) & 0xFFFFFFFE;
3895        newHeight = (height + 1) & 0xFFFFFFFE;
3896
3897        /* Create a temp bitmap for resized thumbnails */
3898        if ((newWidth != width) || (newHeight != height)) {
3899             tempBitmap = Bitmap.createBitmap(newWidth, newHeight, Bitmap.Config.ARGB_8888);
3900        }
3901
3902        IntBuffer rgb888 = IntBuffer.allocate(newWidth * newHeight * 4);
3903        Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
3904        nativeGetPixels(inputFile, rgb888.array(), newWidth, newHeight, timeMS);
3905
3906        if ((newWidth == width) && (newHeight == height)) {
3907            bitmap.copyPixelsFromBuffer(rgb888);
3908        } else {
3909            /* Create a temp bitmap to be used for resize */
3910            tempBitmap.copyPixelsFromBuffer(rgb888);
3911
3912            /* Create a canvas to resize */
3913            final Canvas canvas = new Canvas(bitmap);
3914            canvas.drawBitmap(tempBitmap, new Rect(0, 0, newWidth, newHeight),
3915                                          new Rect(0, 0, width, height),
3916                                          sResizePaint);
3917        }
3918
3919        if (tempBitmap != null) {
3920            tempBitmap.recycle();
3921        }
3922        return bitmap;
3923    }
3924
3925    /**
3926     * This method extracts a list of frame from the
3927     * input file and returns the frame in bitmap array
3928     *
3929     * @param filename The inputFile
3930     * @param width The width of the output frame
3931     * @param height The height of the output frame
3932     * @param startMs The starting time in ms
3933     * @param endMs The end time in ms
3934     * @param thumbnailCount The number of frames to be extracted
3935     * from startMs to endMs
3936     *
3937     * @return The frames as bitmaps in bitmap array
3938     **/
3939    public Bitmap[] getPixelsList(String filename, int width, int height, long startMs, long endMs,
3940            int thumbnailCount) {
3941        int[] rgb888 = null;
3942        int thumbnailSize = 0;
3943        int newWidth = 0;
3944        int newHeight = 0;
3945        Bitmap tempBitmap = null;
3946
3947        /* Make width and height as even */
3948        newWidth = (width + 1) & 0xFFFFFFFE;
3949        newHeight = (height + 1) & 0xFFFFFFFE;
3950        thumbnailSize = newWidth * newHeight * 4;
3951
3952        /* Create a temp bitmap for resized thumbnails */
3953        if ((newWidth != width) || (newHeight != height)) {
3954            tempBitmap = Bitmap.createBitmap(newWidth, newHeight, Bitmap.Config.ARGB_8888);
3955        }
3956        int i = 0;
3957        int deltaTime = (int)(endMs - startMs) / thumbnailCount;
3958        Bitmap[] bitmap = null;
3959
3960        try {
3961            // This may result in out of Memory Error
3962            rgb888 = new int[thumbnailSize * thumbnailCount];
3963            bitmap = new Bitmap[thumbnailCount];
3964        } catch (Throwable e) {
3965            // Allocating to new size with Fixed count
3966            try {
3967                System.gc();
3968                rgb888 = new int[thumbnailSize * MAX_THUMBNAIL_PERMITTED];
3969                bitmap = new Bitmap[MAX_THUMBNAIL_PERMITTED];
3970                thumbnailCount = MAX_THUMBNAIL_PERMITTED;
3971            } catch (Throwable ex) {
3972                throw new RuntimeException("Memory allocation fails, thumbnail count too large: "+thumbnailCount);
3973            }
3974        }
3975        IntBuffer tmpBuffer = IntBuffer.allocate(thumbnailSize);
3976        nativeGetPixelsList(filename, rgb888, newWidth, newHeight, deltaTime, thumbnailCount, startMs,
3977                endMs);
3978
3979        for (; i < thumbnailCount; i++) {
3980            bitmap[i] = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
3981            tmpBuffer.put(rgb888, (i * thumbnailSize), thumbnailSize);
3982            tmpBuffer.rewind();
3983
3984            if ((newWidth == width) && (newHeight == height)) {
3985                bitmap[i].copyPixelsFromBuffer(tmpBuffer);
3986            } else {
3987                /* Copy the out rgb buffer to temp bitmap */
3988                tempBitmap.copyPixelsFromBuffer(tmpBuffer);
3989
3990                /* Create a canvas to resize */
3991                final Canvas canvas = new Canvas(bitmap[i]);
3992                canvas.drawBitmap(tempBitmap, new Rect(0, 0, newWidth, newHeight),
3993                                              new Rect(0, 0, width, height),
3994                                              sResizePaint);
3995            }
3996        }
3997
3998        if (tempBitmap != null) {
3999            tempBitmap.recycle();
4000        }
4001        return bitmap;
4002    }
4003
4004    /**
4005     * This method generates the audio graph
4006     *
4007     * @param uniqueId The unique id
4008     * @param inFileName The inputFile
4009     * @param OutAudiGraphFileName output filename
4010     * @param frameDuration The each frame duration
4011     * @param audioChannels The number of audio channels
4012     * @param samplesCount Total number of samples count
4013     * @param listener ExtractAudioWaveformProgressListener reference
4014     * @param isVideo The flag to indicate if the file is video file or not
4015     *
4016     **/
4017    public void generateAudioGraph(String uniqueId, String inFileName, String OutAudiGraphFileName,
4018            int frameDuration, int audioChannels, int samplesCount,
4019            ExtractAudioWaveformProgressListener listener, boolean isVideo) {
4020        String tempPCMFileName;
4021
4022        mExtractAudioWaveformProgressListener = listener;
4023
4024        /**
4025         * in case of Video , first call will generate the PCM file to make the
4026         * audio graph
4027         */
4028        if (isVideo) {
4029            tempPCMFileName = String.format(mProjectPath + "/" + uniqueId + ".pcm");
4030        } else {
4031            tempPCMFileName = mAudioTrackPCMFilePath;
4032        }
4033        /**
4034         * For Video item, generate the PCM
4035         */
4036        if (isVideo) {
4037            nativeGenerateRawAudio(inFileName, tempPCMFileName);
4038        }
4039
4040        nativeGenerateAudioGraph(tempPCMFileName, OutAudiGraphFileName, frameDuration,
4041                audioChannels, samplesCount);
4042
4043        /* once the audio graph file is generated, delete the pcm file */
4044        if (isVideo) {
4045            new File(tempPCMFileName).delete();
4046        }
4047    }
4048
4049    public void clearPreviewSurface(Surface surface) {
4050       nativeClearSurface(surface);
4051    }
4052
4053    /**     Native Methods        */
4054    native Properties getMediaProperties(String file) throws IllegalArgumentException,
4055    IllegalStateException, RuntimeException, Exception;
4056
4057    /**
4058     * Get the version of ManualEdit.
4059     *
4060     * @return version of ManualEdit
4061     * @throws RuntimeException if an error occurred
4062     * @see Version
4063     */
4064    private static native Version getVersion() throws RuntimeException;
4065
4066    /**
4067     * Returns the video thumbnail in an array of integers. Output format is
4068     * ARGB8888.
4069     *
4070     * @param pixelArray the array that receives the pixelvalues
4071     * @param width width of the video thumbnail
4072     * @param height height of the video thumbnail
4073     * @param timeMS desired time of the thumbnail in ms
4074     * @return actual time in ms of the thumbnail generated
4075     * @throws IllegalStateException if the class has not been initialized
4076     * @throws IllegalArgumentException if the pixelArray is not available or
4077     *             one of the dimensions is negative or zero or the time is
4078     *             negative
4079     * @throws RuntimeException on runtime errors in native code
4080     */
4081    private native int nativeGetPixels(String fileName, int[] pixelArray, int width, int height,
4082            long timeMS);
4083
4084    private native int nativeGetPixelsList(String fileName, int[] pixelArray, int width, int height,
4085            int timeMS, int nosofTN, long startTimeMs, long endTimeMs);
4086
4087    /**
4088     * Releases the JNI and cleans up the core native module.. Should be called
4089     * only after init( )
4090     *
4091     * @throws IllegalStateException if the method could not be called
4092     */
4093    private native void release() throws IllegalStateException, RuntimeException;
4094
4095    /*
4096     * Clear the preview surface
4097     */
4098    private native void nativeClearSurface(Surface surface);
4099
4100
4101    /**
4102     * Stops the encoding. This method should only be called after encoding has
4103     * started using method <code> startEncoding</code>
4104     *
4105     * @throws IllegalStateException if the method could not be called
4106     */
4107    private native void stopEncoding() throws IllegalStateException, RuntimeException;
4108
4109
4110
4111    private native void _init(String tempPath, String libraryPath)
4112            throws IllegalArgumentException, IllegalStateException, RuntimeException;
4113
4114    private native void nativeStartPreview(Surface mSurface, long fromMs, long toMs,
4115            int callbackAfterFrameCount, boolean loop) throws IllegalArgumentException,
4116            IllegalStateException, RuntimeException;
4117
4118    private native void nativePopulateSettings(EditSettings mEditSettings,
4119            PreviewClipProperties mProperties, AudioSettings mAudioSettings)
4120    throws IllegalArgumentException, IllegalStateException, RuntimeException;
4121
4122    private native int nativeRenderPreviewFrame(Surface mSurface, long timeMs,
4123                                                 int surfaceWidth, int surfaceHeight)
4124                                                 throws IllegalArgumentException,
4125                                                 IllegalStateException, RuntimeException;
4126
4127    private native int nativeRenderMediaItemPreviewFrame(Surface mSurface, String filepath,
4128            int framewidth, int frameheight, int surfacewidth, int surfaceheight, long timeMs)
4129    throws IllegalArgumentException, IllegalStateException, RuntimeException;
4130
4131    private native void nativeStopPreview();
4132
4133    private native int nativeGenerateAudioGraph(String pcmFilePath, String outGraphPath,
4134            int frameDuration, int channels, int sampleCount);
4135
4136    private native int nativeGenerateRawAudio(String InFileName, String PCMFileName);
4137
4138    private native int nativeGenerateClip(EditSettings editSettings)
4139    throws IllegalArgumentException, IllegalStateException, RuntimeException;
4140
4141}
4142