MediaArtistNativeHelper.java revision ff90212a3c00e89b25399eb90cdce471cc8c8cc1
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17package android.media.videoeditor; 18 19import java.io.File; 20import java.io.IOException; 21import java.math.BigDecimal; 22import java.nio.IntBuffer; 23import java.util.Iterator; 24import java.util.List; 25import java.util.concurrent.Semaphore; 26 27import android.graphics.Bitmap; 28import android.graphics.BitmapFactory; 29import android.graphics.Canvas; 30import android.graphics.Paint; 31import android.graphics.Rect; 32import android.media.videoeditor.VideoEditor.ExportProgressListener; 33import android.media.videoeditor.VideoEditor.PreviewProgressListener; 34import android.media.videoeditor.VideoEditor.MediaProcessingProgressListener; 35import android.util.Log; 36import android.util.Pair; 37import android.view.Surface; 38 39/** 40 *This class provide Native methods to be used by MediaArtist {@hide} 41 */ 42class MediaArtistNativeHelper { 43 private static final String TAG = "MediaArtistNativeHelper"; 44 45 static { 46 System.loadLibrary("videoeditor_jni"); 47 } 48 49 private static final int MAX_THUMBNAIL_PERMITTED = 8; 50 51 public static final int TASK_LOADING_SETTINGS = 1; 52 public static final int TASK_ENCODING = 2; 53 54 /** 55 * The resize paint 56 */ 57 private static final Paint sResizePaint = new Paint(Paint.FILTER_BITMAP_FLAG); 58 59 private final VideoEditor mVideoEditor; 60 /* 61 * Semaphore to control preview calls 62 */ 63 private final Semaphore mLock; 64 65 private EditSettings mStoryBoardSettings; 66 67 private String mOutputFilename; 68 69 private PreviewClipProperties mClipProperties = null; 70 71 private EditSettings mPreviewEditSettings; 72 73 private AudioSettings mAudioSettings = null; 74 75 private AudioTrack mAudioTrack = null; 76 77 private boolean mInvalidatePreviewArray = true; 78 79 private boolean mRegenerateAudio = true; 80 81 private String mExportFilename = null; 82 83 private int mProgressToApp; 84 85 private String mRenderPreviewOverlayFile; 86 private int mRenderPreviewRenderingMode; 87 88 private boolean mIsFirstProgress; 89 90 private static final String AUDIO_TRACK_PCM_FILE = "AudioPcm.pcm"; 91 92 // Processing indication 93 public static final int PROCESSING_NONE = 0; 94 public static final int PROCESSING_AUDIO_PCM = 1; 95 public static final int PROCESSING_TRANSITION = 2; 96 public static final int PROCESSING_KENBURNS = 3; 97 public static final int PROCESSING_INTERMEDIATE1 = 11; 98 public static final int PROCESSING_INTERMEDIATE2 = 12; 99 public static final int PROCESSING_INTERMEDIATE3 = 13; 100 public static final int PROCESSING_EXPORT = 20; 101 102 private int mProcessingState; 103 private Object mProcessingObject; 104 private PreviewProgressListener mPreviewProgressListener; 105 private ExportProgressListener mExportProgressListener; 106 private ExtractAudioWaveformProgressListener mExtractAudioWaveformProgressListener; 107 private MediaProcessingProgressListener mMediaProcessingProgressListener; 108 private final String mProjectPath; 109 110 private long mPreviewProgress; 111 112 private String mAudioTrackPCMFilePath; 113 114 private int mTotalClips = 0; 115 116 private boolean mErrorFlagSet = false; 117 118 @SuppressWarnings("unused") 119 private int mManualEditContext; 120 121 /* Listeners */ 122 123 /** 124 * Interface definition for a listener to be invoked when there is an update 125 * in a running task. 126 */ 127 public interface OnProgressUpdateListener { 128 /** 129 * Called when there is an update. 130 * 131 * @param taskId id of the task reporting an update. 132 * @param progress progress of the task [0..100]. 133 * @see BasicEdit#TASK_ENCODING 134 */ 135 public void OnProgressUpdate(int taskId, int progress); 136 } 137 138 /** Defines the version. */ 139 public final class Version { 140 141 /** Major version number */ 142 public int major; 143 144 /** Minor version number */ 145 public int minor; 146 147 /** Revision number */ 148 public int revision; 149 150 /** VIDEOEDITOR major version number */ 151 private static final int VIDEOEDITOR_MAJOR_VERSION = 0; 152 153 /** VIDEOEDITOR minor version number */ 154 private static final int VIDEOEDITOR_MINOR_VERSION = 0; 155 156 /** VIDEOEDITOR revision number */ 157 private static final int VIDEOEDITOR_REVISION_VERSION = 1; 158 159 /** Method which returns the current VIDEOEDITOR version */ 160 public Version getVersion() { 161 Version version = new Version(); 162 163 version.major = Version.VIDEOEDITOR_MAJOR_VERSION; 164 version.minor = Version.VIDEOEDITOR_MINOR_VERSION; 165 version.revision = Version.VIDEOEDITOR_REVISION_VERSION; 166 167 return version; 168 } 169 } 170 171 /** 172 * Defines output audio formats. 173 */ 174 public final class AudioFormat { 175 /** No audio present in output clip. Used to generate video only clip */ 176 public static final int NO_AUDIO = 0; 177 178 /** AMR Narrow Band. */ 179 public static final int AMR_NB = 1; 180 181 /** Advanced Audio Coding (AAC). */ 182 public static final int AAC = 2; 183 184 /** Advanced Audio Codec Plus (HE-AAC v1). */ 185 public static final int AAC_PLUS = 3; 186 187 /** Advanced Audio Codec Plus (HE-AAC v2). */ 188 public static final int ENHANCED_AAC_PLUS = 4; 189 190 /** MPEG layer 3 (MP3). */ 191 public static final int MP3 = 5; 192 193 /** Enhanced Variable RateCodec (EVRC). */ 194 public static final int EVRC = 6; 195 196 /** PCM (PCM). */ 197 public static final int PCM = 7; 198 199 /** No transcoding. Output audio format is same as input audio format */ 200 public static final int NULL_AUDIO = 254; 201 202 /** Unsupported audio format. */ 203 public static final int UNSUPPORTED_AUDIO = 255; 204 } 205 206 /** 207 * Defines audio sampling frequencies. 208 */ 209 public final class AudioSamplingFrequency { 210 /** 211 * Default sampling frequency. Uses the default frequency for a specific 212 * audio format. For AAC the only supported (and thus default) sampling 213 * frequency is 16 kHz. For this audio format the sampling frequency in 214 * the OutputParams. 215 **/ 216 public static final int FREQ_DEFAULT = 0; 217 218 /** Audio sampling frequency of 8000 Hz. */ 219 public static final int FREQ_8000 = 8000; 220 221 /** Audio sampling frequency of 11025 Hz. */ 222 public static final int FREQ_11025 = 11025; 223 224 /** Audio sampling frequency of 12000 Hz. */ 225 public static final int FREQ_12000 = 12000; 226 227 /** Audio sampling frequency of 16000 Hz. */ 228 public static final int FREQ_16000 = 16000; 229 230 /** Audio sampling frequency of 22050 Hz. */ 231 public static final int FREQ_22050 = 22050; 232 233 /** Audio sampling frequency of 24000 Hz. */ 234 public static final int FREQ_24000 = 24000; 235 236 /** Audio sampling frequency of 32000 Hz. */ 237 public static final int FREQ_32000 = 32000; 238 239 /** Audio sampling frequency of 44100 Hz. */ 240 public static final int FREQ_44100 = 44100; 241 242 /** Audio sampling frequency of 48000 Hz. Not available for output file. */ 243 public static final int FREQ_48000 = 48000; 244 } 245 246 /** 247 * Defines the supported fixed audio and video bitrates. These values are 248 * for output audio video only. 249 */ 250 public final class Bitrate { 251 /** Variable bitrate. Means no bitrate regulation */ 252 public static final int VARIABLE = -1; 253 254 /** An undefined bitrate. */ 255 public static final int UNDEFINED = 0; 256 257 /** A bitrate of 9.2 kbits/s. */ 258 public static final int BR_9_2_KBPS = 9200; 259 260 /** A bitrate of 12.2 kbits/s. */ 261 public static final int BR_12_2_KBPS = 12200; 262 263 /** A bitrate of 16 kbits/s. */ 264 public static final int BR_16_KBPS = 16000; 265 266 /** A bitrate of 24 kbits/s. */ 267 public static final int BR_24_KBPS = 24000; 268 269 /** A bitrate of 32 kbits/s. */ 270 public static final int BR_32_KBPS = 32000; 271 272 /** A bitrate of 48 kbits/s. */ 273 public static final int BR_48_KBPS = 48000; 274 275 /** A bitrate of 64 kbits/s. */ 276 public static final int BR_64_KBPS = 64000; 277 278 /** A bitrate of 96 kbits/s. */ 279 public static final int BR_96_KBPS = 96000; 280 281 /** A bitrate of 128 kbits/s. */ 282 public static final int BR_128_KBPS = 128000; 283 284 /** A bitrate of 192 kbits/s. */ 285 public static final int BR_192_KBPS = 192000; 286 287 /** A bitrate of 256 kbits/s. */ 288 public static final int BR_256_KBPS = 256000; 289 290 /** A bitrate of 288 kbits/s. */ 291 public static final int BR_288_KBPS = 288000; 292 293 /** A bitrate of 384 kbits/s. */ 294 public static final int BR_384_KBPS = 384000; 295 296 /** A bitrate of 512 kbits/s. */ 297 public static final int BR_512_KBPS = 512000; 298 299 /** A bitrate of 800 kbits/s. */ 300 public static final int BR_800_KBPS = 800000; 301 302 /** A bitrate of 2 Mbits/s. */ 303 public static final int BR_2_MBPS = 2000000; 304 305 /** A bitrate of 5 Mbits/s. */ 306 public static final int BR_5_MBPS = 5000000; 307 308 /** A bitrate of 8 Mbits/s. */ 309 public static final int BR_8_MBPS = 8000000; 310 } 311 312 /** 313 * Defines all supported file types. 314 */ 315 public final class FileType { 316 /** 3GPP file type. */ 317 public static final int THREE_GPP = 0; 318 319 /** MP4 file type. */ 320 public static final int MP4 = 1; 321 322 /** AMR file type. */ 323 public static final int AMR = 2; 324 325 /** MP3 audio file type. */ 326 public static final int MP3 = 3; 327 328 /** PCM audio file type. */ 329 public static final int PCM = 4; 330 331 /** JPEG image file type. */ 332 public static final int JPG = 5; 333 334 /** GIF image file type. */ 335 public static final int GIF = 7; 336 337 /** PNG image file type. */ 338 public static final int PNG = 8; 339 340 /** M4V file type. */ 341 public static final int M4V = 10; 342 343 /** Unsupported file type. */ 344 public static final int UNSUPPORTED = 255; 345 } 346 347 /** 348 * Defines rendering types. Rendering can only be applied to files 349 * containing video streams. 350 **/ 351 public final class MediaRendering { 352 /** 353 * Resize to fit the output video with changing the aspect ratio if 354 * needed. 355 */ 356 public static final int RESIZING = 0; 357 358 /** 359 * Crop the input video to fit it with the output video resolution. 360 **/ 361 public static final int CROPPING = 1; 362 363 /** 364 * Resize to fit the output video resolution but maintain the aspect 365 * ratio. This framing type adds black borders if needed. 366 */ 367 public static final int BLACK_BORDERS = 2; 368 } 369 370 /** 371 * Defines the results. 372 */ 373 public final class Result { 374 /** No error. result OK */ 375 public static final int NO_ERROR = 0; 376 377 /** File not found */ 378 public static final int ERR_FILE_NOT_FOUND = 1; 379 380 /** 381 * In case of UTF8 conversion, the size of the converted path will be 382 * more than the corresponding allocated buffer. 383 */ 384 public static final int ERR_BUFFER_OUT_TOO_SMALL = 2; 385 386 /** Invalid file type. */ 387 public static final int ERR_INVALID_FILE_TYPE = 3; 388 389 /** Invalid effect kind. */ 390 public static final int ERR_INVALID_EFFECT_KIND = 4; 391 392 /** Invalid video effect. */ 393 public static final int ERR_INVALID_VIDEO_EFFECT_TYPE = 5; 394 395 /** Invalid audio effect. */ 396 public static final int ERR_INVALID_AUDIO_EFFECT_TYPE = 6; 397 398 /** Invalid video transition. */ 399 public static final int ERR_INVALID_VIDEO_TRANSITION_TYPE = 7; 400 401 /** Invalid audio transition. */ 402 public static final int ERR_INVALID_AUDIO_TRANSITION_TYPE = 8; 403 404 /** Invalid encoding frame rate. */ 405 public static final int ERR_INVALID_VIDEO_ENCODING_FRAME_RATE = 9; 406 407 /** External effect is called but this function is not set. */ 408 public static final int ERR_EXTERNAL_EFFECT_NULL = 10; 409 410 /** External transition is called but this function is not set. */ 411 public static final int ERR_EXTERNAL_TRANSITION_NULL = 11; 412 413 /** Begin time cut is larger than the video clip duration. */ 414 public static final int ERR_BEGIN_CUT_LARGER_THAN_DURATION = 12; 415 416 /** Begin cut time is larger or equal than end cut. */ 417 public static final int ERR_BEGIN_CUT_LARGER_THAN_END_CUT = 13; 418 419 /** Two consecutive transitions are overlapping on one clip. */ 420 public static final int ERR_OVERLAPPING_TRANSITIONS = 14; 421 422 /** Internal error, type size mismatch. */ 423 public static final int ERR_ANALYSIS_DATA_SIZE_TOO_SMALL = 15; 424 425 /** An input 3GPP file is invalid/corrupted. */ 426 public static final int ERR_INVALID_3GPP_FILE = 16; 427 428 /** A file contains an unsupported video format. */ 429 public static final int ERR_UNSUPPORTED_INPUT_VIDEO_FORMAT = 17; 430 431 /** A file contains an unsupported audio format. */ 432 public static final int ERR_UNSUPPORTED_INPUT_AUDIO_FORMAT = 18; 433 434 /** A file format is not supported. */ 435 public static final int ERR_AMR_EDITING_UNSUPPORTED = 19; 436 437 /** An input clip has an unexpectedly large Video AU. */ 438 public static final int ERR_INPUT_VIDEO_AU_TOO_LARGE = 20; 439 440 /** An input clip has an unexpectedly large Audio AU. */ 441 public static final int ERR_INPUT_AUDIO_AU_TOO_LARGE = 21; 442 443 /** An input clip has a corrupted Audio AU. */ 444 public static final int ERR_INPUT_AUDIO_CORRUPTED_AU = 22; 445 446 /** The video encoder encountered an Access Unit error. */ 447 public static final int ERR_ENCODER_ACCES_UNIT_ERROR = 23; 448 449 /** Unsupported video format for Video Editing. */ 450 public static final int ERR_EDITING_UNSUPPORTED_VIDEO_FORMAT = 24; 451 452 /** Unsupported H263 profile for Video Editing. */ 453 public static final int ERR_EDITING_UNSUPPORTED_H263_PROFILE = 25; 454 455 /** Unsupported MPEG-4 profile for Video Editing. */ 456 public static final int ERR_EDITING_UNSUPPORTED_MPEG4_PROFILE = 26; 457 458 /** Unsupported MPEG-4 RVLC tool for Video Editing. */ 459 public static final int ERR_EDITING_UNSUPPORTED_MPEG4_RVLC = 27; 460 461 /** Unsupported audio format for Video Editing. */ 462 public static final int ERR_EDITING_UNSUPPORTED_AUDIO_FORMAT = 28; 463 464 /** File contains no supported stream. */ 465 public static final int ERR_EDITING_NO_SUPPORTED_STREAM_IN_FILE = 29; 466 467 /** File contains no video stream or an unsupported video stream. */ 468 public static final int ERR_EDITING_NO_SUPPORTED_VIDEO_STREAM_IN_FILE = 30; 469 470 /** Internal error, clip analysis version mismatch. */ 471 public static final int ERR_INVALID_CLIP_ANALYSIS_VERSION = 31; 472 473 /** 474 * At least one of the clip analysis has been generated on another 475 * platform (WIN32, ARM, etc.). 476 */ 477 public static final int ERR_INVALID_CLIP_ANALYSIS_PLATFORM = 32; 478 479 /** Clips don't have the same video format (H263 or MPEG4). */ 480 public static final int ERR_INCOMPATIBLE_VIDEO_FORMAT = 33; 481 482 /** Clips don't have the same frame size. */ 483 public static final int ERR_INCOMPATIBLE_VIDEO_FRAME_SIZE = 34; 484 485 /** Clips don't have the same MPEG-4 time scale. */ 486 public static final int ERR_INCOMPATIBLE_VIDEO_TIME_SCALE = 35; 487 488 /** Clips don't have the same use of MPEG-4 data partitioning. */ 489 public static final int ERR_INCOMPATIBLE_VIDEO_DATA_PARTITIONING = 36; 490 491 /** MP3 clips can't be assembled. */ 492 public static final int ERR_UNSUPPORTED_MP3_ASSEMBLY = 37; 493 494 /** 495 * The input 3GPP file does not contain any supported audio or video 496 * track. 497 */ 498 public static final int ERR_NO_SUPPORTED_STREAM_IN_FILE = 38; 499 500 /** 501 * The Volume of the added audio track (AddVolume) must be strictly 502 * superior than zero. 503 */ 504 public static final int ERR_ADDVOLUME_EQUALS_ZERO = 39; 505 506 /** 507 * The time at which an audio track is added can't be higher than the 508 * input video track duration.. 509 */ 510 public static final int ERR_ADDCTS_HIGHER_THAN_VIDEO_DURATION = 40; 511 512 /** The audio track file format setting is undefined. */ 513 public static final int ERR_UNDEFINED_AUDIO_TRACK_FILE_FORMAT = 41; 514 515 /** The added audio track stream has an unsupported format. */ 516 public static final int ERR_UNSUPPORTED_ADDED_AUDIO_STREAM = 42; 517 518 /** The audio mixing feature doesn't support the audio track type. */ 519 public static final int ERR_AUDIO_MIXING_UNSUPPORTED = 43; 520 521 /** The audio mixing feature doesn't support MP3 audio tracks. */ 522 public static final int ERR_AUDIO_MIXING_MP3_UNSUPPORTED = 44; 523 524 /** 525 * An added audio track limits the available features: uiAddCts must be 526 * 0 and bRemoveOriginal must be true. 527 */ 528 public static final int ERR_FEATURE_UNSUPPORTED_WITH_AUDIO_TRACK = 45; 529 530 /** 531 * An added audio track limits the available features: uiAddCts must be 532 * 0 and bRemoveOriginal must be true. 533 */ 534 public static final int ERR_FEATURE_UNSUPPORTED_WITH_AAC = 46; 535 536 /** Input audio track is not of a type that can be mixed with output. */ 537 public static final int ERR_AUDIO_CANNOT_BE_MIXED = 47; 538 539 /** Input audio track is not AMR-NB, so it can't be mixed with output. */ 540 public static final int ERR_ONLY_AMRNB_INPUT_CAN_BE_MIXED = 48; 541 542 /** 543 * An added EVRC audio track limit the available features: uiAddCts must 544 * be 0 and bRemoveOriginal must be true. 545 */ 546 public static final int ERR_FEATURE_UNSUPPORTED_WITH_EVRC = 49; 547 548 /** H263 profiles other than 0 are not supported. */ 549 public static final int ERR_H263_PROFILE_NOT_SUPPORTED = 51; 550 551 /** File contains no video stream or an unsupported video stream. */ 552 public static final int ERR_NO_SUPPORTED_VIDEO_STREAM_IN_FILE = 52; 553 554 /** Transcoding of the input file(s) is necessary. */ 555 public static final int WAR_TRANSCODING_NECESSARY = 53; 556 557 /** 558 * The size of the output file will exceed the maximum configured value. 559 */ 560 public static final int WAR_MAX_OUTPUT_SIZE_EXCEEDED = 54; 561 562 /** The time scale is too big. */ 563 public static final int WAR_TIMESCALE_TOO_BIG = 55; 564 565 /** The year is out of range */ 566 public static final int ERR_CLOCK_BAD_REF_YEAR = 56; 567 568 /** The directory could not be opened */ 569 public static final int ERR_DIR_OPEN_FAILED = 57; 570 571 /** The directory could not be read */ 572 public static final int ERR_DIR_READ_FAILED = 58; 573 574 /** There are no more entries in the current directory */ 575 public static final int ERR_DIR_NO_MORE_ENTRY = 59; 576 577 /** The input parameter/s has error */ 578 public static final int ERR_PARAMETER = 60; 579 580 /** There is a state machine error */ 581 public static final int ERR_STATE = 61; 582 583 /** Memory allocation failed */ 584 public static final int ERR_ALLOC = 62; 585 586 /** Context is invalid */ 587 public static final int ERR_BAD_CONTEXT = 63; 588 589 /** Context creation failed */ 590 public static final int ERR_CONTEXT_FAILED = 64; 591 592 /** Invalid stream ID */ 593 public static final int ERR_BAD_STREAM_ID = 65; 594 595 /** Invalid option ID */ 596 public static final int ERR_BAD_OPTION_ID = 66; 597 598 /** The option is write only */ 599 public static final int ERR_WRITE_ONLY = 67; 600 601 /** The option is read only */ 602 public static final int ERR_READ_ONLY = 68; 603 604 /** The feature is not implemented in this version */ 605 public static final int ERR_NOT_IMPLEMENTED = 69; 606 607 /** The media type is not supported */ 608 public static final int ERR_UNSUPPORTED_MEDIA_TYPE = 70; 609 610 /** No data to be encoded */ 611 public static final int WAR_NO_DATA_YET = 71; 612 613 /** No data to be decoded */ 614 public static final int WAR_NO_MORE_STREAM = 72; 615 616 /** Time stamp is invalid */ 617 public static final int WAR_INVALID_TIME = 73; 618 619 /** No more data to be decoded */ 620 public static final int WAR_NO_MORE_AU = 74; 621 622 /** Semaphore timed out */ 623 public static final int WAR_TIME_OUT = 75; 624 625 /** Memory buffer is full */ 626 public static final int WAR_BUFFER_FULL = 76; 627 628 /** Server has asked for redirection */ 629 public static final int WAR_REDIRECT = 77; 630 631 /** Too many streams in input */ 632 public static final int WAR_TOO_MUCH_STREAMS = 78; 633 634 /** The file cannot be opened/ written into as it is locked */ 635 public static final int ERR_FILE_LOCKED = 79; 636 637 /** The file access mode is invalid */ 638 public static final int ERR_FILE_BAD_MODE_ACCESS = 80; 639 640 /** The file pointer points to an invalid location */ 641 public static final int ERR_FILE_INVALID_POSITION = 81; 642 643 /** Invalid string */ 644 public static final int ERR_STR_BAD_STRING = 94; 645 646 /** The input string cannot be converted */ 647 public static final int ERR_STR_CONV_FAILED = 95; 648 649 /** The string size is too large */ 650 public static final int ERR_STR_OVERFLOW = 96; 651 652 /** Bad string arguments */ 653 public static final int ERR_STR_BAD_ARGS = 97; 654 655 /** The string value is larger than maximum size allowed */ 656 public static final int WAR_STR_OVERFLOW = 98; 657 658 /** The string value is not present in this comparison operation */ 659 public static final int WAR_STR_NOT_FOUND = 99; 660 661 /** The thread is not started */ 662 public static final int ERR_THREAD_NOT_STARTED = 100; 663 664 /** Trancoding done warning */ 665 public static final int WAR_TRANSCODING_DONE = 101; 666 667 /** Unsupported mediatype */ 668 public static final int WAR_MEDIATYPE_NOT_SUPPORTED = 102; 669 670 /** Input file contains invalid/unsupported streams */ 671 public static final int ERR_INPUT_FILE_CONTAINS_NO_SUPPORTED_STREAM = 103; 672 673 /** Invalid input file */ 674 public static final int ERR_INVALID_INPUT_FILE = 104; 675 676 /** Invalid output video format */ 677 public static final int ERR_UNDEFINED_OUTPUT_VIDEO_FORMAT = 105; 678 679 /** Invalid output video frame size */ 680 public static final int ERR_UNDEFINED_OUTPUT_VIDEO_FRAME_SIZE = 106; 681 682 /** Invalid output video frame rate */ 683 public static final int ERR_UNDEFINED_OUTPUT_VIDEO_FRAME_RATE = 107; 684 685 /** Invalid output audio format */ 686 public static final int ERR_UNDEFINED_OUTPUT_AUDIO_FORMAT = 108; 687 688 /** Invalid video frame size for H.263 */ 689 public static final int ERR_INVALID_VIDEO_FRAME_SIZE_FOR_H263 = 109; 690 691 /** Invalid video frame rate for H.263 */ 692 public static final int ERR_INVALID_VIDEO_FRAME_RATE_FOR_H263 = 110; 693 694 /** invalid playback duration */ 695 public static final int ERR_DURATION_IS_NULL = 111; 696 697 /** Invalid H.263 profile in file */ 698 public static final int ERR_H263_FORBIDDEN_IN_MP4_FILE = 112; 699 700 /** Invalid AAC sampling frequency */ 701 public static final int ERR_INVALID_AAC_SAMPLING_FREQUENCY = 113; 702 703 /** Audio conversion failure */ 704 public static final int ERR_AUDIO_CONVERSION_FAILED = 114; 705 706 /** Invalid trim start and end times */ 707 public static final int ERR_BEGIN_CUT_EQUALS_END_CUT = 115; 708 709 /** End time smaller than start time for trim */ 710 public static final int ERR_END_CUT_SMALLER_THAN_BEGIN_CUT = 116; 711 712 /** Output file size is small */ 713 public static final int ERR_MAXFILESIZE_TOO_SMALL = 117; 714 715 /** Output video bitrate is too low */ 716 public static final int ERR_VIDEOBITRATE_TOO_LOW = 118; 717 718 /** Output audio bitrate is too low */ 719 public static final int ERR_AUDIOBITRATE_TOO_LOW = 119; 720 721 /** Output video bitrate is too high */ 722 public static final int ERR_VIDEOBITRATE_TOO_HIGH = 120; 723 724 /** Output audio bitrate is too high */ 725 public static final int ERR_AUDIOBITRATE_TOO_HIGH = 121; 726 727 /** Output file size is too small */ 728 public static final int ERR_OUTPUT_FILE_SIZE_TOO_SMALL = 122; 729 730 /** Unknown stream type */ 731 public static final int ERR_READER_UNKNOWN_STREAM_TYPE = 123; 732 733 /** Invalid metadata in input stream */ 734 public static final int WAR_READER_NO_METADATA = 124; 735 736 /** Invalid file reader info warning */ 737 public static final int WAR_READER_INFORMATION_NOT_PRESENT = 125; 738 739 /** Warning to indicate the the writer is being stopped */ 740 public static final int WAR_WRITER_STOP_REQ = 131; 741 742 /** Video decoder failed to provide frame for transcoding */ 743 public static final int WAR_VIDEORENDERER_NO_NEW_FRAME = 132; 744 745 /** Video deblocking filter is not implemented */ 746 public static final int WAR_DEBLOCKING_FILTER_NOT_IMPLEMENTED = 133; 747 748 /** H.263 decoder profile not supported */ 749 public static final int ERR_DECODER_H263_PROFILE_NOT_SUPPORTED = 134; 750 751 /** The input file contains unsupported H.263 profile */ 752 public static final int ERR_DECODER_H263_NOT_BASELINE = 135; 753 754 /** There is no more space to store the output file */ 755 public static final int ERR_NOMORE_SPACE_FOR_FILE = 136; 756 757 /** Internal error. */ 758 public static final int ERR_INTERNAL = 255; 759 } 760 761 /** 762 * Defines output video formats. 763 */ 764 public final class VideoFormat { 765 /** No video present in output clip. Used to generate audio only clip */ 766 public static final int NO_VIDEO = 0; 767 768 /** H263 baseline format. */ 769 public static final int H263 = 1; 770 771 /** MPEG4 video Simple Profile format. */ 772 public static final int MPEG4 = 2; 773 774 /** MPEG4 video Simple Profile format with support for EMP. */ 775 public static final int MPEG4_EMP = 3; 776 777 /** H264 video */ 778 public static final int H264 = 4; 779 780 /** No transcoding. Output video format is same as input video format */ 781 public static final int NULL_VIDEO = 254; 782 783 /** Unsupported video format. */ 784 public static final int UNSUPPORTED = 255; 785 } 786 787 /** Defines video profiles and levels. */ 788 public final class VideoProfile { 789 /** H263, Profile 0, Level 10. */ 790 public static final int H263_PROFILE_0_LEVEL_10 = MediaProperties.H263_PROFILE_0_LEVEL_10; 791 792 /** H263, Profile 0, Level 20. */ 793 public static final int H263_PROFILE_0_LEVEL_20 = MediaProperties.H263_PROFILE_0_LEVEL_20; 794 795 /** H263, Profile 0, Level 30. */ 796 public static final int H263_PROFILE_0_LEVEL_30 = MediaProperties.H263_PROFILE_0_LEVEL_30; 797 798 /** H263, Profile 0, Level 40. */ 799 public static final int H263_PROFILE_0_LEVEL_40 = MediaProperties.H263_PROFILE_0_LEVEL_40; 800 801 /** H263, Profile 0, Level 45. */ 802 public static final int H263_PROFILE_0_LEVEL_45 = MediaProperties.H263_PROFILE_0_LEVEL_45; 803 804 /** MPEG4, Simple Profile, Level 0. */ 805 public static final int MPEG4_SP_LEVEL_0 = MediaProperties.MPEG4_SP_LEVEL_0; 806 807 /** MPEG4, Simple Profile, Level 0B. */ 808 public static final int MPEG4_SP_LEVEL_0B = MediaProperties.MPEG4_SP_LEVEL_0B; 809 810 /** MPEG4, Simple Profile, Level 1. */ 811 public static final int MPEG4_SP_LEVEL_1 = MediaProperties.MPEG4_SP_LEVEL_1; 812 813 /** MPEG4, Simple Profile, Level 2. */ 814 public static final int MPEG4_SP_LEVEL_2 = MediaProperties.MPEG4_SP_LEVEL_2; 815 816 /** MPEG4, Simple Profile, Level 3. */ 817 public static final int MPEG4_SP_LEVEL_3 = MediaProperties.MPEG4_SP_LEVEL_3; 818 819 /** MPEG4, Simple Profile, Level 4A. */ 820 public static final int MPEG4_SP_LEVEL_4A = MediaProperties.MPEG4_SP_LEVEL_4A; 821 822 /** MPEG4, Simple Profile, Level 0. */ 823 public static final int MPEG4_SP_LEVEL_5 = MediaProperties.MPEG4_SP_LEVEL_5; 824 825 /** H264, Profile 0, Level 1. */ 826 public static final int H264_PROFILE_0_LEVEL_1 = MediaProperties.H264_PROFILE_0_LEVEL_1; 827 828 /** H264, Profile 0, Level 1b. */ 829 public static final int H264_PROFILE_0_LEVEL_1b = MediaProperties.H264_PROFILE_0_LEVEL_1B; 830 831 /** H264, Profile 0, Level 1.1 */ 832 public static final int H264_PROFILE_0_LEVEL_1_1 = MediaProperties.H264_PROFILE_0_LEVEL_1_1; 833 834 /** H264, Profile 0, Level 1.2 */ 835 public static final int H264_PROFILE_0_LEVEL_1_2 = MediaProperties.H264_PROFILE_0_LEVEL_1_2; 836 837 /** H264, Profile 0, Level 1.3 */ 838 public static final int H264_PROFILE_0_LEVEL_1_3 = MediaProperties.H264_PROFILE_0_LEVEL_1_3; 839 840 /** H264, Profile 0, Level 2. */ 841 public static final int H264_PROFILE_0_LEVEL_2 = MediaProperties.H264_PROFILE_0_LEVEL_2; 842 843 /** H264, Profile 0, Level 2.1 */ 844 public static final int H264_PROFILE_0_LEVEL_2_1 = MediaProperties.H264_PROFILE_0_LEVEL_2_1; 845 846 /** H264, Profile 0, Level 2.2 */ 847 public static final int H264_PROFILE_0_LEVEL_2_2 = MediaProperties.H264_PROFILE_0_LEVEL_2_2; 848 849 /** H264, Profile 0, Level 3. */ 850 public static final int H264_PROFILE_0_LEVEL_3 = MediaProperties.H264_PROFILE_0_LEVEL_3; 851 852 /** H264, Profile 0, Level 3.1 */ 853 public static final int H264_PROFILE_0_LEVEL_3_1 = MediaProperties.H264_PROFILE_0_LEVEL_3_1; 854 855 /** H264, Profile 0, Level 3.2 */ 856 public static final int H264_PROFILE_0_LEVEL_3_2 = MediaProperties.H264_PROFILE_0_LEVEL_3_2; 857 858 /** H264, Profile 0, Level 4. */ 859 public static final int H264_PROFILE_0_LEVEL_4 = MediaProperties.H264_PROFILE_0_LEVEL_4; 860 861 /** H264, Profile 0, Level 4.1 */ 862 public static final int H264_PROFILE_0_LEVEL_4_1 = MediaProperties.H264_PROFILE_0_LEVEL_4_1; 863 864 /** H264, Profile 0, Level 4.2 */ 865 public static final int H264_PROFILE_0_LEVEL_4_2 = MediaProperties.H264_PROFILE_0_LEVEL_4_2; 866 867 /** H264, Profile 0, Level 5. */ 868 public static final int H264_PROFILE_0_LEVEL_5 = MediaProperties.H264_PROFILE_0_LEVEL_5; 869 870 /** H264, Profile 0, Level 5.1 */ 871 public static final int H264_PROFILE_0_LEVEL_5_1 = MediaProperties.H264_PROFILE_0_LEVEL_5_1; 872 873 /** Profile out of range. */ 874 public static final int OUT_OF_RANGE = MediaProperties.UNSUPPORTED_PROFILE_LEVEL; 875 } 876 877 /** Defines video frame sizes. */ 878 public final class VideoFrameSize { 879 880 public static final int SIZE_UNDEFINED = -1; 881 882 /** SQCIF 128 x 96 pixels. */ 883 public static final int SQCIF = 0; 884 885 /** QQVGA 160 x 120 pixels. */ 886 public static final int QQVGA = 1; 887 888 /** QCIF 176 x 144 pixels. */ 889 public static final int QCIF = 2; 890 891 /** QVGA 320 x 240 pixels. */ 892 public static final int QVGA = 3; 893 894 /** CIF 352 x 288 pixels. */ 895 public static final int CIF = 4; 896 897 /** VGA 640 x 480 pixels. */ 898 public static final int VGA = 5; 899 900 /** WVGA 800 X 480 pixels */ 901 public static final int WVGA = 6; 902 903 /** NTSC 720 X 480 pixels */ 904 public static final int NTSC = 7; 905 906 /** 640 x 360 */ 907 public static final int nHD = 8; 908 909 /** 854 x 480 */ 910 public static final int WVGA16x9 = 9; 911 912 /** 720p 1280 X 720 */ 913 public static final int V720p = 10; 914 915 /** 1080 x 720 */ 916 public static final int W720p = 11; 917 918 /** 1080 960 x 720 */ 919 public static final int S720p = 12; 920 } 921 922 /** 923 * Defines output video frame rates. 924 */ 925 public final class VideoFrameRate { 926 /** Frame rate of 5 frames per second. */ 927 public static final int FR_5_FPS = 0; 928 929 /** Frame rate of 7.5 frames per second. */ 930 public static final int FR_7_5_FPS = 1; 931 932 /** Frame rate of 10 frames per second. */ 933 public static final int FR_10_FPS = 2; 934 935 /** Frame rate of 12.5 frames per second. */ 936 public static final int FR_12_5_FPS = 3; 937 938 /** Frame rate of 15 frames per second. */ 939 public static final int FR_15_FPS = 4; 940 941 /** Frame rate of 20 frames per second. */ 942 public static final int FR_20_FPS = 5; 943 944 /** Frame rate of 25 frames per second. */ 945 public static final int FR_25_FPS = 6; 946 947 /** Frame rate of 30 frames per second. */ 948 public static final int FR_30_FPS = 7; 949 } 950 951 /** 952 * Defines Video Effect Types. 953 */ 954 public static class VideoEffect { 955 956 public static final int NONE = 0; 957 958 public static final int FADE_FROM_BLACK = 8; 959 960 public static final int FADE_TO_BLACK = 16; 961 962 public static final int EXTERNAL = 256; 963 964 public static final int BLACK_AND_WHITE = 257; 965 966 public static final int PINK = 258; 967 968 public static final int GREEN = 259; 969 970 public static final int SEPIA = 260; 971 972 public static final int NEGATIVE = 261; 973 974 public static final int FRAMING = 262; 975 976 public static final int TEXT = 263; 977 978 public static final int ZOOM_IN = 264; 979 980 public static final int ZOOM_OUT = 265; 981 982 public static final int FIFTIES = 266; 983 984 public static final int COLORRGB16 = 267; 985 986 public static final int GRADIENT = 268; 987 } 988 989 /** 990 * Defines the video transitions. 991 */ 992 public static class VideoTransition { 993 /** No transition */ 994 public static final int NONE = 0; 995 996 /** Cross fade transition */ 997 public static final int CROSS_FADE = 1; 998 999 /** External transition. Currently not available. */ 1000 public static final int EXTERNAL = 256; 1001 1002 /** AlphaMagic transition. */ 1003 public static final int ALPHA_MAGIC = 257; 1004 1005 /** Slide transition. */ 1006 public static final int SLIDE_TRANSITION = 258; 1007 1008 /** Fade to black transition. */ 1009 public static final int FADE_BLACK = 259; 1010 } 1011 1012 /** 1013 * Defines settings for the AlphaMagic transition 1014 */ 1015 public static class AlphaMagicSettings { 1016 /** Name of the alpha file (JPEG file). */ 1017 public String file; 1018 1019 /** Blending percentage [0..100] 0 = no blending. */ 1020 public int blendingPercent; 1021 1022 /** Invert the default rotation direction of the AlphaMagic effect. */ 1023 public boolean invertRotation; 1024 1025 public int rgbWidth; 1026 public int rgbHeight; 1027 } 1028 1029 /** Defines the direction of the Slide transition. */ 1030 public static final class SlideDirection { 1031 1032 /** Right out left in. */ 1033 public static final int RIGHT_OUT_LEFT_IN = 0; 1034 1035 /** Left out right in. */ 1036 public static final int LEFT_OUT_RIGTH_IN = 1; 1037 1038 /** Top out bottom in. */ 1039 public static final int TOP_OUT_BOTTOM_IN = 2; 1040 1041 /** Bottom out top in */ 1042 public static final int BOTTOM_OUT_TOP_IN = 3; 1043 } 1044 1045 /** Defines the Slide transition settings. */ 1046 public static class SlideTransitionSettings { 1047 /** 1048 * Direction of the slide transition. See {@link SlideDirection 1049 * SlideDirection} for valid values. 1050 */ 1051 public int direction; 1052 } 1053 1054 /** 1055 * Defines the settings of a single clip. 1056 */ 1057 public static class ClipSettings { 1058 1059 /** 1060 * The path to the clip file. 1061 * <p> 1062 * File format of the clip, it can be: 1063 * <ul> 1064 * <li>3GP file containing MPEG4/H263/H264 video and AAC/AMR audio 1065 * <li>JPG file 1066 * </ul> 1067 */ 1068 1069 public String clipPath; 1070 1071 /** 1072 * The path of the decoded file. This is used only for image files. 1073 */ 1074 public String clipDecodedPath; 1075 1076 /** 1077 * The path of the Original file. This is used only for image files. 1078 */ 1079 public String clipOriginalPath; 1080 1081 /** 1082 * File type of the clip. See {@link FileType FileType} for valid 1083 * values. 1084 */ 1085 public int fileType; 1086 1087 /** Begin of the cut in the clip in milliseconds. */ 1088 public int beginCutTime; 1089 1090 /** 1091 * End of the cut in the clip in milliseconds. Set both 1092 * <code>beginCutTime</code> and <code>endCutTime</code> to 1093 * <code>0</code> to get the full length of the clip without a cut. In 1094 * case of JPG clip, this is the duration of the JPEG file. 1095 */ 1096 public int endCutTime; 1097 1098 /** 1099 * Begin of the cut in the clip in percentage of the file duration. 1100 */ 1101 public int beginCutPercent; 1102 1103 /** 1104 * End of the cut in the clip in percentage of the file duration. Set 1105 * both <code>beginCutPercent</code> and <code>endCutPercent</code> to 1106 * <code>0</code> to get the full length of the clip without a cut. 1107 */ 1108 public int endCutPercent; 1109 1110 /** Enable panning and zooming. */ 1111 public boolean panZoomEnabled; 1112 1113 /** Zoom percentage at start of clip. 0 = no zoom, 100 = full zoom */ 1114 public int panZoomPercentStart; 1115 1116 /** Top left X coordinate at start of clip. */ 1117 public int panZoomTopLeftXStart; 1118 1119 /** Top left Y coordinate at start of clip. */ 1120 public int panZoomTopLeftYStart; 1121 1122 /** Zoom percentage at start of clip. 0 = no zoom, 100 = full zoom */ 1123 public int panZoomPercentEnd; 1124 1125 /** Top left X coordinate at end of clip. */ 1126 public int panZoomTopLeftXEnd; 1127 1128 /** Top left Y coordinate at end of clip. */ 1129 public int panZoomTopLeftYEnd; 1130 1131 /** 1132 * Set The media rendering. See {@link MediaRendering MediaRendering} 1133 * for valid values. 1134 */ 1135 public int mediaRendering; 1136 1137 /** 1138 * RGB width and Height 1139 */ 1140 public int rgbWidth; 1141 public int rgbHeight; 1142 } 1143 1144 /** 1145 * Defines settings for a transition. 1146 */ 1147 public static class TransitionSettings { 1148 1149 /** Duration of the transition in msec. */ 1150 public int duration; 1151 1152 /** 1153 * Transition type for video. See {@link VideoTransition 1154 * VideoTransition} for valid values. 1155 */ 1156 public int videoTransitionType; 1157 1158 /** 1159 * Transition type for audio. See {@link AudioTransition 1160 * AudioTransition} for valid values. 1161 */ 1162 public int audioTransitionType; 1163 1164 /** 1165 * Transition behaviour. See {@link TransitionBehaviour 1166 * TransitionBehaviour} for valid values. 1167 */ 1168 public int transitionBehaviour; 1169 1170 /** 1171 * Settings for AlphaMagic transition. Only needs to be set if 1172 * <code>videoTransitionType</code> is set to 1173 * <code>VideoTransition.ALPHA_MAGIC</code>. See 1174 * {@link AlphaMagicSettings AlphaMagicSettings}. 1175 */ 1176 public AlphaMagicSettings alphaSettings; 1177 1178 /** 1179 * Settings for the Slide transition. See 1180 * {@link SlideTransitionSettings SlideTransitionSettings}. 1181 */ 1182 public SlideTransitionSettings slideSettings; 1183 } 1184 1185 public static final class AudioTransition { 1186 /** No audio transition. */ 1187 public static final int NONE = 0; 1188 1189 /** Cross-fade audio transition. */ 1190 public static final int CROSS_FADE = 1; 1191 } 1192 1193 /** 1194 * Defines transition behaviors. 1195 */ 1196 public static final class TransitionBehaviour { 1197 1198 /** The transition uses an increasing speed. */ 1199 public static final int SPEED_UP = 0; 1200 1201 /** The transition uses a linear (constant) speed. */ 1202 public static final int LINEAR = 1; 1203 1204 /** The transition uses a decreasing speed. */ 1205 public static final int SPEED_DOWN = 2; 1206 1207 /** 1208 * The transition uses a constant speed, but slows down in the middle 1209 * section. 1210 */ 1211 public static final int SLOW_MIDDLE = 3; 1212 1213 /** 1214 * The transition uses a constant speed, but increases speed in the 1215 * middle section. 1216 */ 1217 public static final int FAST_MIDDLE = 4; 1218 } 1219 1220 /** 1221 * Defines settings for the background music. 1222 */ 1223 public static class BackgroundMusicSettings { 1224 1225 /** Background music file. */ 1226 public String file; 1227 1228 /** File type. See {@link FileType FileType} for valid values. */ 1229 public int fileType; 1230 1231 /** 1232 * Insertion time in milliseconds, in the output video where the 1233 * background music must be inserted. 1234 */ 1235 public long insertionTime; 1236 1237 /** 1238 * Volume, as a percentage of the background music track, to use. If 1239 * this field is set to 100, the background music will replace the audio 1240 * from the video input file(s). 1241 */ 1242 public int volumePercent; 1243 1244 /** 1245 * Start time in milliseconds in the background muisc file from where 1246 * the background music should loop. Set both <code>beginLoop</code> and 1247 * <code>endLoop</code> to <code>0</code> to disable looping. 1248 */ 1249 public long beginLoop; 1250 1251 /** 1252 * End time in milliseconds in the background music file to where the 1253 * background music should loop. Set both <code>beginLoop</code> and 1254 * <code>endLoop</code> to <code>0</code> to disable looping. 1255 */ 1256 public long endLoop; 1257 1258 public boolean enableDucking; 1259 1260 public int duckingThreshold; 1261 1262 public int lowVolume; 1263 1264 public boolean isLooping; 1265 } 1266 1267 /** Defines settings for an effect. */ 1268 public static class AudioEffect { 1269 /** No audio effect. */ 1270 public static final int NONE = 0; 1271 1272 /** Fade-in effect. */ 1273 public static final int FADE_IN = 8; 1274 1275 /** Fade-out effect. */ 1276 public static final int FADE_OUT = 16; 1277 } 1278 1279 /** Defines the effect settings. */ 1280 public static class EffectSettings { 1281 1282 /** Start time of the effect in milliseconds. */ 1283 public int startTime; 1284 1285 /** Duration of the effect in milliseconds. */ 1286 public int duration; 1287 1288 /** 1289 * Video effect type. See {@link VideoEffect VideoEffect} for valid 1290 * values. 1291 */ 1292 public int videoEffectType; 1293 1294 /** 1295 * Audio effect type. See {@link AudioEffect AudioEffect} for valid 1296 * values. 1297 */ 1298 public int audioEffectType; 1299 1300 /** 1301 * Start time of the effect in percents of the duration of the clip. A 1302 * value of 0 percent means start time is from the beginning of the 1303 * clip. 1304 */ 1305 public int startPercent; 1306 1307 /** 1308 * Duration of the effect in percents of the duration of the clip. 1309 */ 1310 public int durationPercent; 1311 1312 /** 1313 * Framing file. 1314 * <p> 1315 * This field is only used when the field <code>videoEffectType</code> 1316 * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise 1317 * this field is ignored. 1318 */ 1319 public String framingFile; 1320 1321 /** 1322 * Framing buffer. 1323 * <p> 1324 * This field is only used when the field <code>videoEffectType</code> 1325 * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise 1326 * this field is ignored. 1327 */ 1328 public int[] framingBuffer; 1329 1330 /** 1331 * Bitmap type Can be from RGB_565 (4), ARGB_4444 (5), ARGB_8888 (6); 1332 **/ 1333 1334 public int bitmapType; 1335 1336 public int width; 1337 1338 public int height; 1339 1340 /** 1341 * Top left x coordinate. This coordinate is used to set the x 1342 * coordinate of the picture in the framing file when the framing file 1343 * is selected. The x coordinate is also used to set the location of the 1344 * text in the text effect. 1345 * <p> 1346 * This field is only used when the field <code>videoEffectType</code> 1347 * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING} or 1348 * {@link VideoEffect#TEXT VideoEffect.TEXT}. Otherwise this field is 1349 * ignored. 1350 */ 1351 public int topLeftX; 1352 1353 /** 1354 * Top left y coordinate. This coordinate is used to set the y 1355 * coordinate of the picture in the framing file when the framing file 1356 * is selected. The y coordinate is also used to set the location of the 1357 * text in the text effect. 1358 * <p> 1359 * This field is only used when the field <code>videoEffectType</code> 1360 * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING} or 1361 * {@link VideoEffect#TEXT VideoEffect.TEXT}. Otherwise this field is 1362 * ignored. 1363 */ 1364 public int topLeftY; 1365 1366 /** 1367 * Should the frame be resized or not. If this field is set to 1368 * <link>true</code> then the frame size is matched with the output 1369 * video size. 1370 * <p> 1371 * This field is only used when the field <code>videoEffectType</code> 1372 * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise 1373 * this field is ignored. 1374 */ 1375 public boolean framingResize; 1376 1377 /** 1378 * Size to which the framing buffer needs to be resized to 1379 * This is valid only if framingResize is true 1380 */ 1381 public int framingScaledSize; 1382 /** 1383 * Text to insert in the video. 1384 * <p> 1385 * This field is only used when the field <code>videoEffectType</code> 1386 * is set to {@link VideoEffect#TEXT VideoEffect.TEXT}. Otherwise this 1387 * field is ignored. 1388 */ 1389 public String text; 1390 1391 /** 1392 * Text attributes for the text to insert in the video. 1393 * <p> 1394 * This field is only used when the field <code>videoEffectType</code> 1395 * is set to {@link VideoEffect#TEXT VideoEffect.TEXT}. Otherwise this 1396 * field is ignored. For more details about this field see the 1397 * integration guide. 1398 */ 1399 public String textRenderingData; 1400 1401 /** Width of the text buffer in pixels. */ 1402 public int textBufferWidth; 1403 1404 /** Height of the text buffer in pixels. */ 1405 public int textBufferHeight; 1406 1407 /** 1408 * Processing rate for the fifties effect. A high value (e.g. 30) 1409 * results in high effect strength. 1410 * <p> 1411 * This field is only used when the field <code>videoEffectType</code> 1412 * is set to {@link VideoEffect#FIFTIES VideoEffect.FIFTIES}. Otherwise 1413 * this field is ignored. 1414 */ 1415 public int fiftiesFrameRate; 1416 1417 /** 1418 * RGB 16 color of the RGB16 and gradient color effect. 1419 * <p> 1420 * This field is only used when the field <code>videoEffectType</code> 1421 * is set to {@link VideoEffect#COLORRGB16 VideoEffect.COLORRGB16} or 1422 * {@link VideoEffect#GRADIENT VideoEffect.GRADIENT}. Otherwise this 1423 * field is ignored. 1424 */ 1425 public int rgb16InputColor; 1426 1427 /** 1428 * Start alpha blending percentage. 1429 * <p> 1430 * This field is only used when the field <code>videoEffectType</code> 1431 * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or 1432 * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field 1433 * is ignored. 1434 */ 1435 public int alphaBlendingStartPercent; 1436 1437 /** 1438 * Middle alpha blending percentage. 1439 * <p> 1440 * This field is only used when the field <code>videoEffectType</code> 1441 * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or 1442 * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field 1443 * is ignored. 1444 */ 1445 public int alphaBlendingMiddlePercent; 1446 1447 /** 1448 * End alpha blending percentage. 1449 * <p> 1450 * This field is only used when the field <code>videoEffectType</code> 1451 * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or 1452 * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field 1453 * is ignored. 1454 */ 1455 public int alphaBlendingEndPercent; 1456 1457 /** 1458 * Duration, in percentage of effect duration of the fade-in phase. 1459 * <p> 1460 * This field is only used when the field <code>videoEffectType</code> 1461 * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or 1462 * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field 1463 * is ignored. 1464 */ 1465 public int alphaBlendingFadeInTimePercent; 1466 1467 /** 1468 * Duration, in percentage of effect duration of the fade-out phase. 1469 * <p> 1470 * This field is only used when the field <code>videoEffectType</code> 1471 * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or 1472 * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field 1473 * is ignored. 1474 */ 1475 public int alphaBlendingFadeOutTimePercent; 1476 } 1477 1478 /** Defines the clip properties for preview */ 1479 public static class PreviewClips { 1480 1481 /** 1482 * The path to the clip file. 1483 * <p> 1484 * File format of the clip, it can be: 1485 * <ul> 1486 * <li>3GP file containing MPEG4/H263 video and AAC/AMR audio 1487 * <li>JPG file 1488 * </ul> 1489 */ 1490 1491 public String clipPath; 1492 1493 /** 1494 * File type of the clip. See {@link FileType FileType} for valid 1495 * values. 1496 */ 1497 public int fileType; 1498 1499 /** Begin of the cut in the clip in milliseconds. */ 1500 public long beginPlayTime; 1501 1502 public long endPlayTime; 1503 1504 /** 1505 * Set The media rendering. See {@link MediaRendering MediaRendering} 1506 * for valid values. 1507 */ 1508 public int mediaRendering; 1509 1510 } 1511 1512 /** Defines the audio settings. */ 1513 public static class AudioSettings { 1514 1515 String pFile; 1516 1517 /** < PCM file path */ 1518 String Id; 1519 1520 boolean bRemoveOriginal; 1521 1522 /** < If true, the original audio track is not taken into account */ 1523 int channels; 1524 1525 /** < Number of channels (1=mono, 2=stereo) of BGM clip */ 1526 int Fs; 1527 1528 /** 1529 * < Sampling audio frequency (8000 for amr, 16000 or more for aac) of 1530 * BGM clip 1531 */ 1532 int ExtendedFs; 1533 1534 /** < Extended frequency for AAC+, eAAC+ streams of BGM clip */ 1535 long startMs; 1536 1537 /** < Time, in milliseconds, at which the added audio track is inserted */ 1538 long beginCutTime; 1539 1540 long endCutTime; 1541 1542 int fileType; 1543 1544 int volume; 1545 1546 /** < Volume, in percentage, of the added audio track */ 1547 boolean loop; 1548 1549 /** < Looping on/off > **/ 1550 1551 /** Audio mix and Duck **/ 1552 int ducking_threshold; 1553 1554 int ducking_lowVolume; 1555 1556 boolean bInDucking_enable; 1557 1558 String pcmFilePath; 1559 } 1560 1561 /** Encapsulates preview clips and effect settings */ 1562 public static class PreviewSettings { 1563 1564 public PreviewClips[] previewClipsArray; 1565 1566 /** The effect settings. */ 1567 public EffectSettings[] effectSettingsArray; 1568 1569 } 1570 1571 /** Encapsulates clip properties */ 1572 public static class PreviewClipProperties { 1573 1574 public Properties[] clipProperties; 1575 1576 } 1577 1578 /** Defines the editing settings. */ 1579 public static class EditSettings { 1580 1581 /** 1582 * Array of clip settings. There is one <code>clipSetting</code> for 1583 * each clip. 1584 */ 1585 public ClipSettings[] clipSettingsArray; 1586 1587 /** 1588 * Array of transition settings. If there are n clips (and thus n 1589 * <code>clipSettings</code>) then there are (n-1) transitions and (n-1) 1590 * <code>transistionSettings</code> in 1591 * <code>transistionSettingsArray</code>. 1592 */ 1593 public TransitionSettings[] transitionSettingsArray; 1594 1595 /** The effect settings. */ 1596 public EffectSettings[] effectSettingsArray; 1597 1598 /** 1599 * Video frame rate of the output clip. See {@link VideoFrameRate 1600 * VideoFrameRate} for valid values. 1601 */ 1602 public int videoFrameRate; 1603 1604 /** Output file name. Must be an absolute path. */ 1605 public String outputFile; 1606 1607 /** 1608 * Size of the video frames in the output clip. See 1609 * {@link VideoFrameSize VideoFrameSize} for valid values. 1610 */ 1611 public int videoFrameSize; 1612 1613 /** 1614 * Format of the video stream in the output clip. See 1615 * {@link VideoFormat VideoFormat} for valid values. 1616 */ 1617 public int videoFormat; 1618 1619 /** 1620 * Format of the audio stream in the output clip. See 1621 * {@link AudioFormat AudioFormat} for valid values. 1622 */ 1623 public int audioFormat; 1624 1625 /** 1626 * Sampling frequency of the audio stream in the output clip. See 1627 * {@link AudioSamplingFrequency AudioSamplingFrequency} for valid 1628 * values. 1629 */ 1630 public int audioSamplingFreq; 1631 1632 /** 1633 * Maximum file size. By setting this you can set the maximum size of 1634 * the output clip. Set it to <code>0</code> to let the class ignore 1635 * this filed. 1636 */ 1637 public int maxFileSize; 1638 1639 /** 1640 * Number of audio channels in output clip. Use <code>0</code> for none, 1641 * <code>1</code> for mono or <code>2</code> for stereo. None is only 1642 * allowed when the <code>audioFormat</code> field is set to 1643 * {@link AudioFormat#NO_AUDIO AudioFormat.NO_AUDIO} or 1644 * {@link AudioFormat#NULL_AUDIO AudioFormat.NULL_AUDIO} Mono is only 1645 * allowed when the <code>audioFormat</code> field is set to 1646 * {@link AudioFormat#AAC AudioFormat.AAC} 1647 */ 1648 public int audioChannels; 1649 1650 /** Video bitrate. See {@link Bitrate Bitrate} for valid values. */ 1651 public int videoBitrate; 1652 1653 /** Audio bitrate. See {@link Bitrate Bitrate} for valid values. */ 1654 public int audioBitrate; 1655 1656 /** 1657 * Background music settings. See {@link BackgroundMusicSettings 1658 * BackgroundMusicSettings} for valid values. 1659 */ 1660 public BackgroundMusicSettings backgroundMusicSettings; 1661 1662 public int primaryTrackVolume; 1663 1664 } 1665 1666 /** 1667 * Defines the media properties. 1668 **/ 1669 1670 public static class Properties { 1671 1672 /** 1673 * Duration of the media in milliseconds. 1674 */ 1675 1676 public int duration; 1677 1678 /** 1679 * File type. 1680 */ 1681 1682 public int fileType; 1683 1684 /** 1685 * Video format. 1686 */ 1687 1688 public int videoFormat; 1689 1690 /** 1691 * Duration of the video stream of the media in milliseconds. 1692 */ 1693 1694 public int videoDuration; 1695 1696 /** 1697 * Bitrate of the video stream of the media. 1698 */ 1699 1700 public int videoBitrate; 1701 1702 /** 1703 * Width of the video frames or the width of the still picture in 1704 * pixels. 1705 */ 1706 1707 public int width; 1708 1709 /** 1710 * Height of the video frames or the height of the still picture in 1711 * pixels. 1712 */ 1713 1714 public int height; 1715 1716 /** 1717 * Average frame rate of video in the media in frames per second. 1718 */ 1719 1720 public float averageFrameRate; 1721 1722 /** 1723 * Profile and level of the video in the media. 1724 */ 1725 1726 public int profileAndLevel; 1727 1728 /** 1729 * Audio format. 1730 */ 1731 1732 public int audioFormat; 1733 1734 /** 1735 * Duration of the audio stream of the media in milliseconds. 1736 */ 1737 1738 public int audioDuration; 1739 1740 /** 1741 * Bitrate of the audio stream of the media. 1742 */ 1743 1744 public int audioBitrate; 1745 1746 /** 1747 * Number of audio channels in the media. 1748 */ 1749 1750 public int audioChannels; 1751 1752 /** 1753 * Sampling frequency of the audio stream in the media in samples per 1754 * second. 1755 */ 1756 1757 public int audioSamplingFrequency; 1758 1759 /** 1760 * Volume value of the audio track as percentage. 1761 */ 1762 public int audioVolumeValue; 1763 1764 public String Id; 1765 } 1766 1767 /** 1768 * Constructor 1769 * 1770 * @param projectPath The path where the VideoEditor stores all files 1771 * related to the project 1772 * @param lock The semaphore 1773 * @param veObj The video editor reference 1774 */ 1775 public MediaArtistNativeHelper(String projectPath, Semaphore lock, VideoEditor veObj) { 1776 mProjectPath = projectPath; 1777 if (veObj != null) { 1778 mVideoEditor = veObj; 1779 } else { 1780 mVideoEditor = null; 1781 throw new IllegalArgumentException("video editor object is null"); 1782 } 1783 if (mStoryBoardSettings == null) { 1784 mStoryBoardSettings = new EditSettings(); 1785 } 1786 1787 mLock = lock; 1788 1789 _init(mProjectPath, "null"); 1790 mAudioTrackPCMFilePath = null; 1791 } 1792 1793 /** 1794 * @return The project path 1795 */ 1796 String getProjectPath() { 1797 return mProjectPath; 1798 } 1799 1800 /** 1801 * @return The Audio Track PCM file path 1802 */ 1803 String getProjectAudioTrackPCMFilePath() { 1804 return mAudioTrackPCMFilePath; 1805 } 1806 1807 /** 1808 * Invalidates the PCM file 1809 */ 1810 void invalidatePcmFile() { 1811 if (mAudioTrackPCMFilePath != null) { 1812 new File(mAudioTrackPCMFilePath).delete(); 1813 mAudioTrackPCMFilePath = null; 1814 } 1815 } 1816 1817 @SuppressWarnings("unused") 1818 private void onProgressUpdate(int taskId, int progress) { 1819 if (mProcessingState == PROCESSING_EXPORT) { 1820 if (mExportProgressListener != null) { 1821 if (mProgressToApp < progress) { 1822 mExportProgressListener.onProgress(mVideoEditor, mOutputFilename, progress); 1823 /* record previous progress */ 1824 mProgressToApp = progress; 1825 } 1826 } 1827 } 1828 else { 1829 // Adapt progress depending on current state 1830 int actualProgress = 0; 1831 int action = 0; 1832 1833 if (mProcessingState == PROCESSING_AUDIO_PCM) { 1834 action = MediaProcessingProgressListener.ACTION_DECODE; 1835 } else { 1836 action = MediaProcessingProgressListener.ACTION_ENCODE; 1837 } 1838 1839 switch (mProcessingState) { 1840 case PROCESSING_AUDIO_PCM: 1841 actualProgress = progress; 1842 break; 1843 case PROCESSING_TRANSITION: 1844 actualProgress = progress; 1845 break; 1846 case PROCESSING_KENBURNS: 1847 actualProgress = progress; 1848 break; 1849 case PROCESSING_INTERMEDIATE1: 1850 if ((progress == 0) && (mProgressToApp != 0)) { 1851 mProgressToApp = 0; 1852 } 1853 if ((progress != 0) || (mProgressToApp != 0)) { 1854 actualProgress = progress/4; 1855 } 1856 break; 1857 case PROCESSING_INTERMEDIATE2: 1858 if ((progress != 0) || (mProgressToApp != 0)) { 1859 actualProgress = 25 + progress/4; 1860 } 1861 break; 1862 case PROCESSING_INTERMEDIATE3: 1863 if ((progress != 0) || (mProgressToApp != 0)) { 1864 actualProgress = 50 + progress/2; 1865 } 1866 break; 1867 case PROCESSING_NONE: 1868 1869 default: 1870 Log.e(TAG, "ERROR unexpected State=" + mProcessingState); 1871 return; 1872 } 1873 if ((mProgressToApp != actualProgress) && (actualProgress != 0)) { 1874 1875 mProgressToApp = actualProgress; 1876 1877 if (mMediaProcessingProgressListener != null) { 1878 // Send the progress indication 1879 mMediaProcessingProgressListener.onProgress(mProcessingObject, action, 1880 actualProgress); 1881 } 1882 } 1883 /* avoid 0 in next intermediate call */ 1884 if (mProgressToApp == 0) { 1885 if (mMediaProcessingProgressListener != null) { 1886 /* 1887 * Send the progress indication 1888 */ 1889 mMediaProcessingProgressListener.onProgress(mProcessingObject, action, 1890 actualProgress); 1891 } 1892 mProgressToApp = 1; 1893 } 1894 } 1895 } 1896 1897 @SuppressWarnings("unused") 1898 private void onPreviewProgressUpdate(int progress, boolean isFinished, 1899 boolean updateOverlay, String filename, int renderingMode) { 1900 if (mPreviewProgressListener != null) { 1901 if (mIsFirstProgress) { 1902 mPreviewProgressListener.onStart(mVideoEditor); 1903 mIsFirstProgress = false; 1904 } 1905 1906 final VideoEditor.OverlayData overlayData; 1907 if (updateOverlay) { 1908 overlayData = new VideoEditor.OverlayData(); 1909 if (filename != null) { 1910 overlayData.set(BitmapFactory.decodeFile(filename), renderingMode); 1911 } else { 1912 overlayData.setClear(); 1913 } 1914 } else { 1915 overlayData = null; 1916 } 1917 1918 if (progress != 0) { 1919 mPreviewProgress = progress; 1920 } 1921 1922 if (isFinished) { 1923 mPreviewProgressListener.onStop(mVideoEditor); 1924 } else { 1925 mPreviewProgressListener.onProgress(mVideoEditor, progress, overlayData); 1926 } 1927 } 1928 } 1929 1930 /** 1931 * Release the native helper object 1932 */ 1933 void releaseNativeHelper() throws InterruptedException { 1934 release(); 1935 } 1936 1937 /** 1938 * Release the native helper to end the Audio Graph process 1939 */ 1940 @SuppressWarnings("unused") 1941 private void onAudioGraphExtractProgressUpdate(int progress, boolean isVideo) { 1942 if ((mExtractAudioWaveformProgressListener != null) && (progress > 0)) { 1943 mExtractAudioWaveformProgressListener.onProgress(progress); 1944 } 1945 } 1946 1947 /** 1948 * Populates the Effect Settings in EffectSettings 1949 * 1950 * @param effects The reference of EffectColor 1951 * 1952 * @return The populated effect settings in EffectSettings reference 1953 */ 1954 EffectSettings getEffectSettings(EffectColor effects) { 1955 EffectSettings effectSettings = new EffectSettings(); 1956 effectSettings.startTime = (int)effects.getStartTime(); 1957 effectSettings.duration = (int)effects.getDuration(); 1958 effectSettings.videoEffectType = getEffectColorType(effects); 1959 effectSettings.audioEffectType = 0; 1960 effectSettings.startPercent = 0; 1961 effectSettings.durationPercent = 0; 1962 effectSettings.framingFile = null; 1963 effectSettings.topLeftX = 0; 1964 effectSettings.topLeftY = 0; 1965 effectSettings.framingResize = false; 1966 effectSettings.text = null; 1967 effectSettings.textRenderingData = null; 1968 effectSettings.textBufferWidth = 0; 1969 effectSettings.textBufferHeight = 0; 1970 if (effects.getType() == EffectColor.TYPE_FIFTIES) { 1971 effectSettings.fiftiesFrameRate = 15; 1972 } else { 1973 effectSettings.fiftiesFrameRate = 0; 1974 } 1975 1976 if ((effectSettings.videoEffectType == VideoEffect.COLORRGB16) 1977 || (effectSettings.videoEffectType == VideoEffect.GRADIENT)) { 1978 effectSettings.rgb16InputColor = effects.getColor(); 1979 } 1980 1981 effectSettings.alphaBlendingStartPercent = 0; 1982 effectSettings.alphaBlendingMiddlePercent = 0; 1983 effectSettings.alphaBlendingEndPercent = 0; 1984 effectSettings.alphaBlendingFadeInTimePercent = 0; 1985 effectSettings.alphaBlendingFadeOutTimePercent = 0; 1986 return effectSettings; 1987 } 1988 1989 /** 1990 * Populates the Overlay Settings in EffectSettings 1991 * 1992 * @param overlay The reference of OverlayFrame 1993 * 1994 * @return The populated overlay settings in EffectSettings reference 1995 */ 1996 EffectSettings getOverlaySettings(OverlayFrame overlay) { 1997 EffectSettings effectSettings = new EffectSettings(); 1998 Bitmap bitmap = null; 1999 2000 effectSettings.startTime = (int)overlay.getStartTime(); 2001 effectSettings.duration = (int)overlay.getDuration(); 2002 effectSettings.videoEffectType = VideoEffect.FRAMING; 2003 effectSettings.audioEffectType = 0; 2004 effectSettings.startPercent = 0; 2005 effectSettings.durationPercent = 0; 2006 effectSettings.framingFile = null; 2007 2008 if ((bitmap = overlay.getBitmap()) != null) { 2009 effectSettings.framingFile = overlay.getFilename(); 2010 2011 if (effectSettings.framingFile == null) { 2012 try { 2013 (overlay).save(mProjectPath); 2014 } catch (IOException e) { 2015 Log.e(TAG, "getOverlaySettings : File not found"); 2016 } 2017 effectSettings.framingFile = overlay.getFilename(); 2018 } 2019 if (bitmap.getConfig() == Bitmap.Config.ARGB_8888) 2020 effectSettings.bitmapType = 6; 2021 else if (bitmap.getConfig() == Bitmap.Config.ARGB_4444) 2022 effectSettings.bitmapType = 5; 2023 else if (bitmap.getConfig() == Bitmap.Config.RGB_565) 2024 effectSettings.bitmapType = 4; 2025 else if (bitmap.getConfig() == Bitmap.Config.ALPHA_8) 2026 throw new RuntimeException("Bitmap config not supported"); 2027 2028 effectSettings.width = bitmap.getWidth(); 2029 effectSettings.height = bitmap.getHeight(); 2030 effectSettings.framingBuffer = new int[effectSettings.width]; 2031 int tmp = 0; 2032 short maxAlpha = 0; 2033 short minAlpha = (short)0xFF; 2034 short alpha = 0; 2035 while (tmp < effectSettings.height) { 2036 bitmap.getPixels(effectSettings.framingBuffer, 0, 2037 effectSettings.width, 0, tmp, 2038 effectSettings.width, 1); 2039 for (int i = 0; i < effectSettings.width; i++) { 2040 alpha = (short)((effectSettings.framingBuffer[i] >> 24) & 0xFF); 2041 if (alpha > maxAlpha) { 2042 maxAlpha = alpha; 2043 } 2044 if (alpha < minAlpha) { 2045 minAlpha = alpha; 2046 } 2047 } 2048 tmp += 1; 2049 } 2050 alpha = (short)((maxAlpha + minAlpha) / 2); 2051 alpha = (short)((alpha * 100) / 256); 2052 effectSettings.alphaBlendingEndPercent = alpha; 2053 effectSettings.alphaBlendingMiddlePercent = alpha; 2054 effectSettings.alphaBlendingStartPercent = alpha; 2055 effectSettings.alphaBlendingFadeInTimePercent = 100; 2056 effectSettings.alphaBlendingFadeOutTimePercent = 100; 2057 effectSettings.framingBuffer = null; 2058 2059 /* 2060 * Set the resized RGB file dimensions 2061 */ 2062 effectSettings.width = overlay.getResizedRGBSizeWidth(); 2063 if(effectSettings.width == 0) { 2064 effectSettings.width = bitmap.getWidth(); 2065 } 2066 2067 effectSettings.height = overlay.getResizedRGBSizeHeight(); 2068 if(effectSettings.height == 0) { 2069 effectSettings.height = bitmap.getHeight(); 2070 } 2071 2072 } 2073 2074 effectSettings.topLeftX = 0; 2075 effectSettings.topLeftY = 0; 2076 2077 effectSettings.framingResize = true; 2078 effectSettings.text = null; 2079 effectSettings.textRenderingData = null; 2080 effectSettings.textBufferWidth = 0; 2081 effectSettings.textBufferHeight = 0; 2082 effectSettings.fiftiesFrameRate = 0; 2083 effectSettings.rgb16InputColor = 0; 2084 int mediaItemHeight; 2085 int aspectRatio; 2086 if (overlay.getMediaItem() instanceof MediaImageItem) { 2087 if (((MediaImageItem)overlay.getMediaItem()).getGeneratedImageClip() != null) { 2088 // Ken Burns was applied 2089 mediaItemHeight = ((MediaImageItem)overlay.getMediaItem()).getGeneratedClipHeight(); 2090 aspectRatio = getAspectRatio( 2091 ((MediaImageItem)overlay.getMediaItem()).getGeneratedClipWidth() 2092 , mediaItemHeight); 2093 } else { 2094 //For image get the scaled height. Aspect ratio would remain the same 2095 mediaItemHeight = ((MediaImageItem)overlay.getMediaItem()).getScaledHeight(); 2096 aspectRatio = overlay.getMediaItem().getAspectRatio(); 2097 } 2098 } else { 2099 aspectRatio = overlay.getMediaItem().getAspectRatio(); 2100 mediaItemHeight = overlay.getMediaItem().getHeight(); 2101 } 2102 effectSettings.framingScaledSize = findVideoResolution(aspectRatio, mediaItemHeight); 2103 return effectSettings; 2104 } 2105 2106 /* get Video Editor aspect ratio */ 2107 int nativeHelperGetAspectRatio() { 2108 return mVideoEditor.getAspectRatio(); 2109 } 2110 2111 /** 2112 * Sets the audio regenerate flag 2113 * 2114 * @param flag The boolean to set the audio regenerate flag 2115 * 2116 */ 2117 void setAudioflag(boolean flag) { 2118 //check if the file exists. 2119 if (!(new File(String.format(mProjectPath + "/" + AUDIO_TRACK_PCM_FILE)).exists())) { 2120 flag = true; 2121 } 2122 mRegenerateAudio = flag; 2123 } 2124 2125 /** 2126 * Gets the audio regenerate flag 2127 * 2128 * @param return The boolean to get the audio regenerate flag 2129 * 2130 */ 2131 boolean getAudioflag() { 2132 return mRegenerateAudio; 2133 } 2134 2135 /** 2136 * Maps the average frame rate to one of the defined enum values 2137 * 2138 * @param averageFrameRate The average frame rate of video item 2139 * 2140 * @return The frame rate from one of the defined enum values 2141 */ 2142 int GetClosestVideoFrameRate(int averageFrameRate) { 2143 if (averageFrameRate >= 25) { 2144 return VideoFrameRate.FR_30_FPS; 2145 } else if (averageFrameRate >= 20) { 2146 return VideoFrameRate.FR_25_FPS; 2147 } else if (averageFrameRate >= 15) { 2148 return VideoFrameRate.FR_20_FPS; 2149 } else if (averageFrameRate >= 12) { 2150 return VideoFrameRate.FR_15_FPS; 2151 } else if (averageFrameRate >= 10) { 2152 return VideoFrameRate.FR_12_5_FPS; 2153 } else if (averageFrameRate >= 7) { 2154 return VideoFrameRate.FR_10_FPS; 2155 } else if (averageFrameRate >= 5) { 2156 return VideoFrameRate.FR_7_5_FPS; 2157 } else { 2158 return -1; 2159 } 2160 } 2161 2162 /** 2163 * Helper function to adjust the effect or overlay start time 2164 * depending on the begin and end boundary time of meddia item 2165 */ 2166 public void adjustEffectsStartTimeAndDuration(EffectSettings lEffect, int beginCutTime, 2167 int endCutTime) { 2168 2169 int effectStartTime = 0; 2170 int effectDuration = 0; 2171 2172 /** 2173 * cbct -> clip begin cut time 2174 * cect -> clip end cut time 2175 **************************************** 2176 * | | 2177 * | cbct cect | 2178 * | <-1--> | | | 2179 * | <--|-2-> | | 2180 * | | <---3---> | | 2181 * | | <--|-4---> | 2182 * | | | <--5--> | 2183 * | <---|------6----|----> | 2184 * | | 2185 * < : effectStart 2186 * > : effectStart + effectDuration 2187 **************************************** 2188 **/ 2189 2190 /** 1 & 5 */ 2191 /** 2192 * Effect falls out side the trim duration. In such a case effects shall 2193 * not be applied. 2194 */ 2195 if ((lEffect.startTime > endCutTime) 2196 || ((lEffect.startTime + lEffect.duration) <= beginCutTime)) { 2197 2198 effectStartTime = 0; 2199 effectDuration = 0; 2200 2201 lEffect.startTime = effectStartTime; 2202 lEffect.duration = effectDuration; 2203 return; 2204 } 2205 2206 /** 2 */ 2207 if ((lEffect.startTime < beginCutTime) 2208 && ((lEffect.startTime + lEffect.duration) > beginCutTime) 2209 && ((lEffect.startTime + lEffect.duration) <= endCutTime)) { 2210 effectStartTime = 0; 2211 effectDuration = lEffect.duration; 2212 2213 effectDuration -= (beginCutTime - lEffect.startTime); 2214 lEffect.startTime = effectStartTime; 2215 lEffect.duration = effectDuration; 2216 return; 2217 } 2218 2219 /** 3 */ 2220 if ((lEffect.startTime >= beginCutTime) 2221 && ((lEffect.startTime + lEffect.duration) <= endCutTime)) { 2222 effectStartTime = lEffect.startTime - beginCutTime; 2223 lEffect.startTime = effectStartTime; 2224 lEffect.duration = lEffect.duration; 2225 return; 2226 } 2227 2228 /** 4 */ 2229 if ((lEffect.startTime >= beginCutTime) 2230 && ((lEffect.startTime + lEffect.duration) > endCutTime)) { 2231 effectStartTime = lEffect.startTime - beginCutTime; 2232 effectDuration = endCutTime - lEffect.startTime; 2233 lEffect.startTime = effectStartTime; 2234 lEffect.duration = effectDuration; 2235 return; 2236 } 2237 2238 /** 6 */ 2239 if ((lEffect.startTime < beginCutTime) 2240 && ((lEffect.startTime + lEffect.duration) > endCutTime)) { 2241 effectStartTime = 0; 2242 effectDuration = endCutTime - beginCutTime; 2243 lEffect.startTime = effectStartTime; 2244 lEffect.duration = effectDuration; 2245 return; 2246 } 2247 2248 } 2249 2250 /** 2251 * Generates the clip for preview or export 2252 * 2253 * @param editSettings The EditSettings reference for generating 2254 * a clip for preview or export 2255 * 2256 * @return error value 2257 */ 2258 public int generateClip(EditSettings editSettings) { 2259 int err = 0; 2260 2261 try { 2262 err = nativeGenerateClip(editSettings); 2263 } catch (IllegalArgumentException ex) { 2264 Log.e(TAG, "Illegal Argument exception in load settings"); 2265 return -1; 2266 } catch (IllegalStateException ex) { 2267 Log.e(TAG, "Illegal state exception in load settings"); 2268 return -1; 2269 } catch (RuntimeException ex) { 2270 Log.e(TAG, "Runtime exception in load settings"); 2271 return -1; 2272 } 2273 return err; 2274 } 2275 2276 /** 2277 * Init function to initialiZe the ClipSettings reference to 2278 * default values 2279 * 2280 * @param lclipSettings The ClipSettings reference 2281 */ 2282 void initClipSettings(ClipSettings lclipSettings) { 2283 lclipSettings.clipPath = null; 2284 lclipSettings.clipDecodedPath = null; 2285 lclipSettings.clipOriginalPath = null; 2286 lclipSettings.fileType = 0; 2287 lclipSettings.endCutTime = 0; 2288 lclipSettings.beginCutTime = 0; 2289 lclipSettings.beginCutPercent = 0; 2290 lclipSettings.endCutPercent = 0; 2291 lclipSettings.panZoomEnabled = false; 2292 lclipSettings.panZoomPercentStart = 0; 2293 lclipSettings.panZoomTopLeftXStart = 0; 2294 lclipSettings.panZoomTopLeftYStart = 0; 2295 lclipSettings.panZoomPercentEnd = 0; 2296 lclipSettings.panZoomTopLeftXEnd = 0; 2297 lclipSettings.panZoomTopLeftYEnd = 0; 2298 lclipSettings.mediaRendering = 0; 2299 } 2300 2301 2302 /** 2303 * Populates the settings for generating an effect clip 2304 * 2305 * @param lMediaItem The media item for which the effect clip 2306 * needs to be generated 2307 * @param lclipSettings The ClipSettings reference containing 2308 * clips data 2309 * @param e The EditSettings reference containing effect specific data 2310 * @param uniqueId The unique id used in the name of the output clip 2311 * @param clipNo Used for internal purpose 2312 * 2313 * @return The name and path of generated clip 2314 */ 2315 String generateEffectClip(MediaItem lMediaItem, ClipSettings lclipSettings, 2316 EditSettings e,String uniqueId,int clipNo) { 2317 int err = 0; 2318 EditSettings editSettings = null; 2319 String EffectClipPath = null; 2320 2321 editSettings = new EditSettings(); 2322 2323 editSettings.clipSettingsArray = new ClipSettings[1]; 2324 editSettings.clipSettingsArray[0] = lclipSettings; 2325 2326 editSettings.backgroundMusicSettings = null; 2327 editSettings.transitionSettingsArray = null; 2328 editSettings.effectSettingsArray = e.effectSettingsArray; 2329 2330 EffectClipPath = String.format(mProjectPath + "/" + "ClipEffectIntermediate" + "_" 2331 + lMediaItem.getId() + uniqueId + ".3gp"); 2332 2333 File tmpFile = new File(EffectClipPath); 2334 if (tmpFile.exists()) { 2335 tmpFile.delete(); 2336 } 2337 2338 if (lMediaItem instanceof MediaVideoItem) { 2339 MediaVideoItem m = (MediaVideoItem)lMediaItem; 2340 2341 editSettings.audioFormat = AudioFormat.AAC; 2342 editSettings.audioChannels = 2; 2343 editSettings.audioBitrate = Bitrate.BR_64_KBPS; 2344 editSettings.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000; 2345 2346 editSettings.videoBitrate = Bitrate.BR_5_MBPS; 2347 //editSettings.videoFormat = VideoFormat.MPEG4; 2348 editSettings.videoFormat = VideoFormat.H264; 2349 editSettings.videoFrameRate = VideoFrameRate.FR_30_FPS; 2350 editSettings.videoFrameSize = findVideoResolution(mVideoEditor.getAspectRatio(), 2351 m.getHeight()); 2352 } else { 2353 MediaImageItem m = (MediaImageItem)lMediaItem; 2354 editSettings.audioBitrate = Bitrate.BR_64_KBPS; 2355 editSettings.audioChannels = 2; 2356 editSettings.audioFormat = AudioFormat.AAC; 2357 editSettings.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000; 2358 2359 editSettings.videoBitrate = Bitrate.BR_5_MBPS; 2360 editSettings.videoFormat = VideoFormat.H264; 2361 editSettings.videoFrameRate = VideoFrameRate.FR_30_FPS; 2362 editSettings.videoFrameSize = findVideoResolution(mVideoEditor.getAspectRatio(), 2363 m.getScaledHeight()); 2364 } 2365 2366 editSettings.outputFile = EffectClipPath; 2367 2368 if (clipNo == 1) { 2369 mProcessingState = PROCESSING_INTERMEDIATE1; 2370 } else if (clipNo == 2) { 2371 mProcessingState = PROCESSING_INTERMEDIATE2; 2372 } 2373 mProcessingObject = lMediaItem; 2374 err = generateClip(editSettings); 2375 mProcessingState = PROCESSING_NONE; 2376 2377 if (err == 0) { 2378 lclipSettings.clipPath = EffectClipPath; 2379 lclipSettings.fileType = FileType.THREE_GPP; 2380 return EffectClipPath; 2381 } else { 2382 throw new RuntimeException("preview generation cannot be completed"); 2383 } 2384 } 2385 2386 2387 /** 2388 * Populates the settings for generating a Ken Burn effect clip 2389 * 2390 * @param m The media image item for which the Ken Burn effect clip 2391 * needs to be generated 2392 * @param e The EditSettings reference clip specific data 2393 * 2394 * @return The name and path of generated clip 2395 */ 2396 String generateKenBurnsClip(EditSettings e, MediaImageItem m) { 2397 String output = null; 2398 int err = 0; 2399 2400 e.backgroundMusicSettings = null; 2401 e.transitionSettingsArray = null; 2402 e.effectSettingsArray = null; 2403 output = String.format(mProjectPath + "/" + "ImageClip-" + m.getId() + ".3gp"); 2404 2405 File tmpFile = new File(output); 2406 if (tmpFile.exists()) { 2407 tmpFile.delete(); 2408 } 2409 2410 e.outputFile = output; 2411 e.audioBitrate = Bitrate.BR_64_KBPS; 2412 e.audioChannels = 2; 2413 e.audioFormat = AudioFormat.AAC; 2414 e.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000; 2415 2416 e.videoBitrate = Bitrate.BR_5_MBPS; 2417 e.videoFormat = VideoFormat.H264; 2418 e.videoFrameRate = VideoFrameRate.FR_30_FPS; 2419 e.videoFrameSize = findVideoResolution(mVideoEditor.getAspectRatio(), 2420 m.getScaledHeight()); 2421 mProcessingState = PROCESSING_KENBURNS; 2422 mProcessingObject = m; 2423 err = generateClip(e); 2424 // Reset the processing state and check for errors 2425 mProcessingState = PROCESSING_NONE; 2426 if (err != 0) { 2427 throw new RuntimeException("preview generation cannot be completed"); 2428 } 2429 return output; 2430 } 2431 2432 2433 /** 2434 * Calculates the output resolution for transition clip 2435 * 2436 * @param m1 First media item associated with transition 2437 * @param m2 Second media item associated with transition 2438 * 2439 * @return The transition resolution 2440 */ 2441 private int getTransitionResolution(MediaItem m1, MediaItem m2) { 2442 int clip1Height = 0; 2443 int clip2Height = 0; 2444 int videoSize = 0; 2445 2446 if (m1 != null && m2 != null) { 2447 if (m1 instanceof MediaVideoItem) { 2448 clip1Height = m1.getHeight(); 2449 } else if (m1 instanceof MediaImageItem) { 2450 clip1Height = ((MediaImageItem)m1).getScaledHeight(); 2451 } 2452 if (m2 instanceof MediaVideoItem) { 2453 clip2Height = m2.getHeight(); 2454 } else if (m2 instanceof MediaImageItem) { 2455 clip2Height = ((MediaImageItem)m2).getScaledHeight(); 2456 } 2457 if (clip1Height > clip2Height) { 2458 videoSize = findVideoResolution(mVideoEditor.getAspectRatio(), clip1Height); 2459 } else { 2460 videoSize = findVideoResolution(mVideoEditor.getAspectRatio(), clip2Height); 2461 } 2462 } else if (m1 == null && m2 != null) { 2463 if (m2 instanceof MediaVideoItem) { 2464 clip2Height = m2.getHeight(); 2465 } else if (m2 instanceof MediaImageItem) { 2466 clip2Height = ((MediaImageItem)m2).getScaledHeight(); 2467 } 2468 videoSize = findVideoResolution(mVideoEditor.getAspectRatio(), clip2Height); 2469 } else if (m1 != null && m2 == null) { 2470 if (m1 instanceof MediaVideoItem) { 2471 clip1Height = m1.getHeight(); 2472 } else if (m1 instanceof MediaImageItem) { 2473 clip1Height = ((MediaImageItem)m1).getScaledHeight(); 2474 } 2475 videoSize = findVideoResolution(mVideoEditor.getAspectRatio(), clip1Height); 2476 } 2477 return videoSize; 2478 } 2479 2480 /** 2481 * Populates the settings for generating an transition clip 2482 * 2483 * @param m1 First media item associated with transition 2484 * @param m2 Second media item associated with transition 2485 * @param e The EditSettings reference containing 2486 * clip specific data 2487 * @param uniqueId The unique id used in the name of the output clip 2488 * @param t The Transition specific data 2489 * 2490 * @return The name and path of generated clip 2491 */ 2492 String generateTransitionClip(EditSettings e, String uniqueId, 2493 MediaItem m1, MediaItem m2,Transition t) { 2494 String outputFilename = null; 2495 int err = 0; 2496 2497 outputFilename = String.format(mProjectPath + "/" + uniqueId + ".3gp"); 2498 e.outputFile = outputFilename; 2499 e.audioBitrate = Bitrate.BR_64_KBPS; 2500 e.audioChannels = 2; 2501 e.audioFormat = AudioFormat.AAC; 2502 e.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000; 2503 2504 e.videoBitrate = Bitrate.BR_5_MBPS; 2505 e.videoFormat = VideoFormat.H264; 2506 e.videoFrameRate = VideoFrameRate.FR_30_FPS; 2507 e.videoFrameSize = getTransitionResolution(m1, m2); 2508 2509 if (new File(outputFilename).exists()) { 2510 new File(outputFilename).delete(); 2511 } 2512 mProcessingState = PROCESSING_INTERMEDIATE3; 2513 mProcessingObject = t; 2514 err = generateClip(e); 2515 // Reset the processing state and check for errors 2516 mProcessingState = PROCESSING_NONE; 2517 if (err != 0) { 2518 throw new RuntimeException("preview generation cannot be completed"); 2519 } 2520 return outputFilename; 2521 } 2522 2523 /** 2524 * Populates effects and overlays in EffectSettings structure 2525 * and also adjust the start time and duration of effects and overlays 2526 * w.r.t to total story board time 2527 * 2528 * @param m1 Media item associated with effect 2529 * @param effectSettings The EffectSettings reference containing 2530 * effect specific data 2531 * @param beginCutTime The begin cut time of the clip associated with effect 2532 * @param endCutTime The end cut time of the clip associated with effect 2533 * @param storyBoardTime The current story board time 2534 * 2535 * @return The updated index 2536 */ 2537 private int populateEffects(MediaItem m, EffectSettings[] effectSettings, int i, 2538 int beginCutTime, int endCutTime, int storyBoardTime) { 2539 2540 if (m.getBeginTransition() != null && m.getBeginTransition().getDuration() > 0 2541 && m.getEndTransition() != null && m.getEndTransition().getDuration() > 0) { 2542 beginCutTime += m.getBeginTransition().getDuration(); 2543 endCutTime -= m.getEndTransition().getDuration(); 2544 } else if (m.getBeginTransition() == null && m.getEndTransition() != null 2545 && m.getEndTransition().getDuration() > 0) { 2546 endCutTime -= m.getEndTransition().getDuration(); 2547 } else if (m.getEndTransition() == null && m.getBeginTransition() != null 2548 && m.getBeginTransition().getDuration() > 0) { 2549 beginCutTime += m.getBeginTransition().getDuration(); 2550 } 2551 2552 final List<Effect> effects = m.getAllEffects(); 2553 final List<Overlay> overlays = m.getAllOverlays(); 2554 2555 for (Overlay overlay : overlays) { 2556 effectSettings[i] = getOverlaySettings((OverlayFrame)overlay); 2557 adjustEffectsStartTimeAndDuration(effectSettings[i], beginCutTime, endCutTime); 2558 effectSettings[i].startTime += storyBoardTime; 2559 i++; 2560 } 2561 2562 for (Effect effect : effects) { 2563 if (effect instanceof EffectColor) { 2564 effectSettings[i] = getEffectSettings((EffectColor)effect); 2565 adjustEffectsStartTimeAndDuration(effectSettings[i], beginCutTime, endCutTime); 2566 effectSettings[i].startTime += storyBoardTime; 2567 i++; 2568 } 2569 } 2570 2571 return i; 2572 } 2573 2574 /** 2575 * Adjusts the media item boundaries for use in export or preview 2576 * 2577 * @param clipSettings The ClipSettings reference 2578 * @param clipProperties The Properties reference 2579 * @param m The media item 2580 */ 2581 private void adjustMediaItemBoundary(ClipSettings clipSettings, 2582 Properties clipProperties, MediaItem m) { 2583 if (m.getBeginTransition() != null && m.getBeginTransition().getDuration() > 0 2584 && m.getEndTransition() != null && m.getEndTransition().getDuration() > 0) { 2585 clipSettings.beginCutTime += m.getBeginTransition().getDuration(); 2586 clipSettings.endCutTime -= m.getEndTransition().getDuration(); 2587 } else if (m.getBeginTransition() == null && m.getEndTransition() != null 2588 && m.getEndTransition().getDuration() > 0) { 2589 clipSettings.endCutTime -= m.getEndTransition().getDuration(); 2590 } else if (m.getEndTransition() == null && m.getBeginTransition() != null 2591 && m.getBeginTransition().getDuration() > 0) { 2592 clipSettings.beginCutTime += m.getBeginTransition().getDuration(); 2593 } 2594 2595 clipProperties.duration = clipSettings.endCutTime - clipSettings.beginCutTime; 2596 2597 if (clipProperties.videoDuration != 0) { 2598 clipProperties.videoDuration = clipSettings.endCutTime - clipSettings.beginCutTime; 2599 } 2600 2601 if (clipProperties.audioDuration != 0) { 2602 clipProperties.audioDuration = clipSettings.endCutTime - clipSettings.beginCutTime; 2603 } 2604 } 2605 2606 /** 2607 * Generates the transition if transition is present 2608 * and is in invalidated state 2609 * 2610 * @param transition The Transition reference 2611 * @param editSettings The EditSettings reference 2612 * @param clipPropertiesArray The clip Properties array 2613 * @param i The index in clip Properties array for current clip 2614 */ 2615 private void generateTransition(Transition transition, EditSettings editSettings, 2616 PreviewClipProperties clipPropertiesArray, int index) { 2617 if (!(transition.isGenerated())) { 2618 transition.generate(); 2619 } 2620 editSettings.clipSettingsArray[index] = new ClipSettings(); 2621 editSettings.clipSettingsArray[index].clipPath = transition.getFilename(); 2622 editSettings.clipSettingsArray[index].fileType = FileType.THREE_GPP; 2623 editSettings.clipSettingsArray[index].beginCutTime = 0; 2624 editSettings.clipSettingsArray[index].endCutTime = (int)transition.getDuration(); 2625 editSettings.clipSettingsArray[index].mediaRendering = MediaRendering.BLACK_BORDERS; 2626 2627 try { 2628 clipPropertiesArray.clipProperties[index] = 2629 getMediaProperties(transition.getFilename()); 2630 } catch (Exception e) { 2631 throw new IllegalArgumentException("Unsupported file or file not found"); 2632 } 2633 2634 clipPropertiesArray.clipProperties[index].Id = null; 2635 clipPropertiesArray.clipProperties[index].audioVolumeValue = 100; 2636 clipPropertiesArray.clipProperties[index].duration = (int)transition.getDuration(); 2637 if (clipPropertiesArray.clipProperties[index].videoDuration != 0) { 2638 clipPropertiesArray.clipProperties[index].videoDuration = (int)transition.getDuration(); 2639 } 2640 2641 if (clipPropertiesArray.clipProperties[index].audioDuration != 0) { 2642 clipPropertiesArray.clipProperties[index].audioDuration = (int)transition.getDuration(); 2643 } 2644 } 2645 2646 /** 2647 * Sets the volume for current media item in clip properties array 2648 * 2649 * @param m The media item 2650 * @param clipProperties The clip properties array reference 2651 * @param i The index in clip Properties array for current clip 2652 */ 2653 private void adjustVolume(MediaItem m, PreviewClipProperties clipProperties, 2654 int index) { 2655 if (m instanceof MediaVideoItem) { 2656 final boolean videoMuted = ((MediaVideoItem)m).isMuted(); 2657 if (videoMuted == false) { 2658 mClipProperties.clipProperties[index].audioVolumeValue = 2659 ((MediaVideoItem)m).getVolume(); 2660 } else { 2661 mClipProperties.clipProperties[index].audioVolumeValue = 0; 2662 } 2663 } else if (m instanceof MediaImageItem) { 2664 mClipProperties.clipProperties[index].audioVolumeValue = 0; 2665 } 2666 } 2667 2668 /** 2669 * Checks for odd size image width and height 2670 * 2671 * @param m The media item 2672 * @param clipProperties The clip properties array reference 2673 * @param i The index in clip Properties array for current clip 2674 */ 2675 private void checkOddSizeImage(MediaItem m, PreviewClipProperties clipProperties, int index) { 2676 if (m instanceof MediaImageItem) { 2677 int width = mClipProperties.clipProperties[index].width; 2678 int height = mClipProperties.clipProperties[index].height; 2679 2680 if ((width % 2) != 0) { 2681 width -= 1; 2682 } 2683 if ((height % 2) != 0) { 2684 height -= 1; 2685 } 2686 mClipProperties.clipProperties[index].width = width; 2687 mClipProperties.clipProperties[index].height = height; 2688 } 2689 } 2690 2691 /** 2692 * Populates the media item properties and calculates the maximum 2693 * height among all the clips 2694 * 2695 * @param m The media item 2696 * @param i The index in clip Properties array for current clip 2697 * @param maxHeight The max height from the clip properties 2698 * 2699 * @return Updates the max height if current clip's height is greater 2700 * than all previous clips height 2701 */ 2702 private int populateMediaItemProperties(MediaItem m, int index, int maxHeight) { 2703 mPreviewEditSettings.clipSettingsArray[index] = new ClipSettings(); 2704 if (m instanceof MediaVideoItem) { 2705 mPreviewEditSettings.clipSettingsArray[index] = 2706 ((MediaVideoItem)m).getVideoClipProperties(); 2707 if (((MediaVideoItem)m).getHeight() > maxHeight) { 2708 maxHeight = ((MediaVideoItem)m).getHeight(); 2709 } 2710 } else if (m instanceof MediaImageItem) { 2711 mPreviewEditSettings.clipSettingsArray[index] = 2712 ((MediaImageItem)m).getImageClipProperties(); 2713 if (((MediaImageItem)m).getScaledHeight() > maxHeight) { 2714 maxHeight = ((MediaImageItem)m).getScaledHeight(); 2715 } 2716 } 2717 /** + Handle the image files here */ 2718 if (mPreviewEditSettings.clipSettingsArray[index].fileType == FileType.JPG) { 2719 mPreviewEditSettings.clipSettingsArray[index].clipDecodedPath = 2720 ((MediaImageItem)m).getDecodedImageFileName(); 2721 2722 mPreviewEditSettings.clipSettingsArray[index].clipOriginalPath = 2723 mPreviewEditSettings.clipSettingsArray[index].clipPath; 2724 } 2725 return maxHeight; 2726 } 2727 2728 /** 2729 * Populates the background music track properties 2730 * 2731 * @param mediaBGMList The background music list 2732 * 2733 */ 2734 private void populateBackgroundMusicProperties(List<AudioTrack> mediaBGMList) { 2735 2736 if (mediaBGMList.size() == 1) { 2737 mAudioTrack = mediaBGMList.get(0); 2738 } else { 2739 mAudioTrack = null; 2740 } 2741 2742 if (mAudioTrack != null) { 2743 mAudioSettings = new AudioSettings(); 2744 Properties mAudioProperties = new Properties(); 2745 mAudioSettings.pFile = null; 2746 mAudioSettings.Id = mAudioTrack.getId(); 2747 try { 2748 mAudioProperties = getMediaProperties(mAudioTrack.getFilename()); 2749 } catch (Exception e) { 2750 throw new IllegalArgumentException("Unsupported file or file not found"); 2751 } 2752 mAudioSettings.bRemoveOriginal = false; 2753 mAudioSettings.channels = mAudioProperties.audioChannels; 2754 mAudioSettings.Fs = mAudioProperties.audioSamplingFrequency; 2755 mAudioSettings.loop = mAudioTrack.isLooping(); 2756 mAudioSettings.ExtendedFs = 0; 2757 mAudioSettings.pFile = mAudioTrack.getFilename(); 2758 mAudioSettings.startMs = mAudioTrack.getStartTime(); 2759 mAudioSettings.beginCutTime = mAudioTrack.getBoundaryBeginTime(); 2760 mAudioSettings.endCutTime = mAudioTrack.getBoundaryEndTime(); 2761 if (mAudioTrack.isMuted()) { 2762 mAudioSettings.volume = 0; 2763 } else { 2764 mAudioSettings.volume = mAudioTrack.getVolume(); 2765 } 2766 mAudioSettings.fileType = mAudioProperties.fileType; 2767 mAudioSettings.ducking_lowVolume = mAudioTrack.getDuckedTrackVolume(); 2768 mAudioSettings.ducking_threshold = mAudioTrack.getDuckingThreshhold(); 2769 mAudioSettings.bInDucking_enable = mAudioTrack.isDuckingEnabled(); 2770 mAudioTrackPCMFilePath = String.format(mProjectPath + "/" + AUDIO_TRACK_PCM_FILE); 2771 mAudioSettings.pcmFilePath = mAudioTrackPCMFilePath; 2772 2773 mPreviewEditSettings.backgroundMusicSettings = new BackgroundMusicSettings(); 2774 mPreviewEditSettings.backgroundMusicSettings.file = mAudioTrackPCMFilePath; 2775 mPreviewEditSettings.backgroundMusicSettings.fileType = mAudioProperties.fileType; 2776 mPreviewEditSettings.backgroundMusicSettings.insertionTime = 2777 mAudioTrack.getStartTime(); 2778 mPreviewEditSettings.backgroundMusicSettings.volumePercent = mAudioTrack.getVolume(); 2779 mPreviewEditSettings.backgroundMusicSettings.beginLoop = 2780 mAudioTrack.getBoundaryBeginTime(); 2781 mPreviewEditSettings.backgroundMusicSettings.endLoop = 2782 mAudioTrack.getBoundaryEndTime(); 2783 mPreviewEditSettings.backgroundMusicSettings.enableDucking = 2784 mAudioTrack.isDuckingEnabled(); 2785 mPreviewEditSettings.backgroundMusicSettings.duckingThreshold = 2786 mAudioTrack.getDuckingThreshhold(); 2787 mPreviewEditSettings.backgroundMusicSettings.lowVolume = 2788 mAudioTrack.getDuckedTrackVolume(); 2789 mPreviewEditSettings.backgroundMusicSettings.isLooping = mAudioTrack.isLooping(); 2790 mPreviewEditSettings.primaryTrackVolume = 100; 2791 mProcessingState = PROCESSING_AUDIO_PCM; 2792 mProcessingObject = mAudioTrack; 2793 } else { 2794 mAudioSettings = null; 2795 mPreviewEditSettings.backgroundMusicSettings = null; 2796 mAudioTrackPCMFilePath = null; 2797 } 2798 } 2799 2800 /** 2801 * Calculates all the effects in all the media items 2802 * in media items list 2803 * 2804 * @param mediaItemsList The media item list 2805 * 2806 * @return The total number of effects 2807 * 2808 */ 2809 private int getTotalEffects(List<MediaItem> mediaItemsList) { 2810 int totalEffects = 0; 2811 final Iterator<MediaItem> it = mediaItemsList.iterator(); 2812 while (it.hasNext()) { 2813 final MediaItem t = it.next(); 2814 totalEffects += t.getAllEffects().size(); 2815 totalEffects += t.getAllOverlays().size(); 2816 final Iterator<Effect> ef = t.getAllEffects().iterator(); 2817 while (ef.hasNext()) { 2818 final Effect e = ef.next(); 2819 if (e instanceof EffectKenBurns) { 2820 totalEffects--; 2821 } 2822 } 2823 } 2824 return totalEffects; 2825 } 2826 2827 /** 2828 * This function is responsible for forming clip settings 2829 * array and clip properties array including transition clips 2830 * and effect settings for preview purpose or export. 2831 * 2832 * 2833 * @param mediaItemsList The media item list 2834 * @param mediaTransitionList The transitions list 2835 * @param mediaBGMList The background music list 2836 * @param listener The MediaProcessingProgressListener 2837 * 2838 */ 2839 void previewStoryBoard(List<MediaItem> mediaItemsList, 2840 List<Transition> mediaTransitionList, List<AudioTrack> mediaBGMList, 2841 MediaProcessingProgressListener listener) { 2842 if (mInvalidatePreviewArray) { 2843 int previewIndex = 0; 2844 int totalEffects = 0; 2845 int storyBoardTime = 0; 2846 int maxHeight = 0; 2847 int beginCutTime = 0; 2848 int endCutTime = 0; 2849 int effectIndex = 0; 2850 Transition lTransition = null; 2851 MediaItem lMediaItem = null; 2852 mPreviewEditSettings = new EditSettings(); 2853 mClipProperties = new PreviewClipProperties(); 2854 mTotalClips = 0; 2855 2856 mTotalClips = mediaItemsList.size(); 2857 for (Transition transition : mediaTransitionList) { 2858 if (transition.getDuration() > 0) { 2859 mTotalClips++; 2860 } 2861 } 2862 2863 totalEffects = getTotalEffects(mediaItemsList); 2864 2865 mPreviewEditSettings.clipSettingsArray = new ClipSettings[mTotalClips]; 2866 mPreviewEditSettings.effectSettingsArray = new EffectSettings[totalEffects]; 2867 mClipProperties.clipProperties = new Properties[mTotalClips]; 2868 2869 /** record the call back progress listener */ 2870 mMediaProcessingProgressListener = listener; 2871 mProgressToApp = 0; 2872 2873 if (mediaItemsList.size() > 0) { 2874 for (int i = 0; i < mediaItemsList.size(); i++) { 2875 /* Get the Media Item from the list */ 2876 lMediaItem = mediaItemsList.get(i); 2877 if (lMediaItem instanceof MediaVideoItem) { 2878 beginCutTime = (int)((MediaVideoItem)lMediaItem).getBoundaryBeginTime(); 2879 endCutTime = (int)((MediaVideoItem)lMediaItem).getBoundaryEndTime(); 2880 } else if (lMediaItem instanceof MediaImageItem) { 2881 beginCutTime = 0; 2882 endCutTime = (int)((MediaImageItem)lMediaItem).getTimelineDuration(); 2883 } 2884 /* Get the transition associated with Media Item */ 2885 lTransition = lMediaItem.getBeginTransition(); 2886 if (lTransition != null && (lTransition.getDuration() > 0)) { 2887 /* generate transition clip */ 2888 generateTransition(lTransition, mPreviewEditSettings, 2889 mClipProperties, previewIndex); 2890 storyBoardTime += mClipProperties.clipProperties[previewIndex].duration; 2891 previewIndex++; 2892 } 2893 /* Populate media item properties */ 2894 maxHeight = populateMediaItemProperties(lMediaItem, previewIndex, maxHeight); 2895 /* Get the clip properties of the media item. */ 2896 if (lMediaItem instanceof MediaImageItem) { 2897 int tmpCnt = 0; 2898 boolean bEffectKbPresent = false; 2899 final List<Effect> effectList = lMediaItem.getAllEffects(); 2900 /** 2901 * Check if Ken Burns effect is present 2902 */ 2903 while (tmpCnt < effectList.size()) { 2904 if (effectList.get(tmpCnt) instanceof EffectKenBurns) { 2905 bEffectKbPresent = true; 2906 break; 2907 } 2908 tmpCnt++; 2909 } 2910 2911 if (bEffectKbPresent) { 2912 try { 2913 if(((MediaImageItem)lMediaItem).getGeneratedImageClip() != null) { 2914 mClipProperties.clipProperties[previewIndex] 2915 = getMediaProperties(((MediaImageItem)lMediaItem). 2916 getGeneratedImageClip()); 2917 } 2918 else { 2919 mClipProperties.clipProperties[previewIndex] 2920 = getMediaProperties(((MediaImageItem)lMediaItem). 2921 getScaledImageFileName()); 2922 mClipProperties.clipProperties[previewIndex].width = 2923 ((MediaImageItem)lMediaItem).getScaledWidth(); 2924 mClipProperties.clipProperties[previewIndex].height = 2925 ((MediaImageItem)lMediaItem).getScaledHeight(); 2926 } 2927 } catch (Exception e) { 2928 throw new IllegalArgumentException("Unsupported file or file not found"); 2929 } 2930 } else { 2931 try { 2932 mClipProperties.clipProperties[previewIndex] 2933 = getMediaProperties(((MediaImageItem)lMediaItem). 2934 getScaledImageFileName()); 2935 } catch (Exception e) { 2936 throw new IllegalArgumentException("Unsupported file or file not found"); 2937 } 2938 mClipProperties.clipProperties[previewIndex].width = 2939 ((MediaImageItem)lMediaItem).getScaledWidth(); 2940 mClipProperties.clipProperties[previewIndex].height = 2941 ((MediaImageItem)lMediaItem).getScaledHeight(); 2942 } 2943 } else { 2944 try { 2945 mClipProperties.clipProperties[previewIndex] 2946 = getMediaProperties(lMediaItem.getFilename()); 2947 } catch (Exception e) { 2948 throw new IllegalArgumentException("Unsupported file or file not found"); 2949 } 2950 } 2951 mClipProperties.clipProperties[previewIndex].Id = lMediaItem.getId(); 2952 checkOddSizeImage(lMediaItem, mClipProperties, previewIndex); 2953 adjustVolume(lMediaItem, mClipProperties, previewIndex); 2954 2955 /* 2956 * Adjust media item start time and end time w.r.t to begin 2957 * and end transitions associated with media item 2958 */ 2959 2960 adjustMediaItemBoundary(mPreviewEditSettings.clipSettingsArray[previewIndex], 2961 mClipProperties.clipProperties[previewIndex], lMediaItem); 2962 2963 /* 2964 * Get all the effects and overlays for that media item and 2965 * adjust start time and duration of effects 2966 */ 2967 2968 effectIndex = populateEffects(lMediaItem, 2969 mPreviewEditSettings.effectSettingsArray, effectIndex, beginCutTime, 2970 endCutTime, storyBoardTime); 2971 storyBoardTime += mClipProperties.clipProperties[previewIndex].duration; 2972 previewIndex++; 2973 2974 /* Check if there is any end transition at last media item */ 2975 2976 if (i == (mediaItemsList.size() - 1)) { 2977 lTransition = lMediaItem.getEndTransition(); 2978 if (lTransition != null && (lTransition.getDuration() > 0)) { 2979 generateTransition(lTransition, mPreviewEditSettings, mClipProperties, 2980 previewIndex); 2981 break; 2982 } 2983 } 2984 } 2985 2986 if (!mErrorFlagSet) { 2987 mPreviewEditSettings.videoFrameSize = findVideoResolution(mVideoEditor 2988 .getAspectRatio(), maxHeight); 2989 populateBackgroundMusicProperties(mediaBGMList); 2990 2991 /** call to native populate settings */ 2992 try { 2993 nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings); 2994 } catch (IllegalArgumentException ex) { 2995 Log.e(TAG, "Illegal argument exception in nativePopulateSettings"); 2996 throw ex; 2997 } catch (IllegalStateException ex) { 2998 Log.e(TAG, "Illegal state exception in nativePopulateSettings"); 2999 throw ex; 3000 } catch (RuntimeException ex) { 3001 Log.e(TAG, "Runtime exception in nativePopulateSettings"); 3002 throw ex; 3003 } 3004 mInvalidatePreviewArray = false; 3005 mProcessingState = PROCESSING_NONE; 3006 } 3007 } 3008 if (mErrorFlagSet) { 3009 mErrorFlagSet = false; 3010 throw new RuntimeException("preview generation cannot be completed"); 3011 } 3012 } 3013 } /* END of previewStoryBoard */ 3014 3015 /** 3016 * This function is responsible for starting the preview 3017 * 3018 * 3019 * @param surface The surface on which preview has to be displayed 3020 * @param fromMs The time in ms from which preview has to be started 3021 * @param toMs The time in ms till preview has to be played 3022 * @param loop To loop the preview or not 3023 * @param callbackAfterFrameCount INdicated after how many frames 3024 * the callback is needed 3025 * @param listener The PreviewProgressListener 3026 */ 3027 void doPreview(Surface surface, long fromMs, long toMs, boolean loop, 3028 int callbackAfterFrameCount, PreviewProgressListener listener) { 3029 mPreviewProgress = fromMs; 3030 mIsFirstProgress = true; 3031 mPreviewProgressListener = listener; 3032 3033 if (!mInvalidatePreviewArray) { 3034 try { 3035 /** Modify the image files names to rgb image files. */ 3036 for (int clipCnt = 0; clipCnt < mPreviewEditSettings.clipSettingsArray.length; 3037 clipCnt++) { 3038 if (mPreviewEditSettings.clipSettingsArray[clipCnt].fileType == FileType.JPG) { 3039 mPreviewEditSettings.clipSettingsArray[clipCnt].clipPath = 3040 mPreviewEditSettings.clipSettingsArray[clipCnt].clipDecodedPath; 3041 } 3042 } 3043 nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings); 3044 nativeStartPreview(surface, fromMs, toMs, callbackAfterFrameCount, loop); 3045 } catch (IllegalArgumentException ex) { 3046 Log.e(TAG, "Illegal argument exception in nativeStartPreview"); 3047 throw ex; 3048 } catch (IllegalStateException ex) { 3049 Log.e(TAG, "Illegal state exception in nativeStartPreview"); 3050 throw ex; 3051 } catch (RuntimeException ex) { 3052 Log.e(TAG, "Runtime exception in nativeStartPreview"); 3053 throw ex; 3054 } 3055 } else { 3056 throw new IllegalStateException("generatePreview is in progress"); 3057 } 3058 } 3059 3060 /** 3061 * This function is responsible for stopping the preview 3062 */ 3063 long stopPreview() { 3064 return nativeStopPreview(); 3065 } 3066 3067 /** 3068 * This function is responsible for rendering a single frame 3069 * from the complete story board on the surface 3070 * 3071 * @param surface The surface on which frame has to be rendered 3072 * @param time The time in ms at which the frame has to be rendered 3073 * @param surfaceWidth The surface width 3074 * @param surfaceHeight The surface height 3075 * @param overlayData The overlay data 3076 * 3077 * @return The actual time from the story board at which the frame was extracted 3078 * and rendered 3079 */ 3080 long renderPreviewFrame(Surface surface, long time, int surfaceWidth, 3081 int surfaceHeight, VideoEditor.OverlayData overlayData) { 3082 if (mInvalidatePreviewArray) { 3083 if (Log.isLoggable(TAG, Log.DEBUG)) { 3084 Log.d(TAG, "Call generate preview first"); 3085 } 3086 throw new IllegalStateException("Call generate preview first"); 3087 } 3088 3089 long timeMs = 0; 3090 try { 3091 for (int clipCnt = 0; clipCnt < mPreviewEditSettings.clipSettingsArray.length; 3092 clipCnt++) { 3093 if (mPreviewEditSettings.clipSettingsArray[clipCnt].fileType == FileType.JPG) { 3094 mPreviewEditSettings.clipSettingsArray[clipCnt].clipPath = 3095 mPreviewEditSettings.clipSettingsArray[clipCnt].clipDecodedPath; 3096 } 3097 } 3098 3099 // Reset the render preview frame params that shall be set by native. 3100 mRenderPreviewOverlayFile = null; 3101 mRenderPreviewRenderingMode = MediaRendering.RESIZING; 3102 3103 nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings); 3104 3105 timeMs = (long)nativeRenderPreviewFrame(surface, time, surfaceWidth, surfaceHeight); 3106 3107 if (mRenderPreviewOverlayFile != null) { 3108 overlayData.set(BitmapFactory.decodeFile(mRenderPreviewOverlayFile), 3109 mRenderPreviewRenderingMode); 3110 } else { 3111 overlayData.setClear(); 3112 } 3113 } catch (IllegalArgumentException ex) { 3114 Log.e(TAG, "Illegal Argument exception in nativeRenderPreviewFrame"); 3115 throw ex; 3116 } catch (IllegalStateException ex) { 3117 Log.e(TAG, "Illegal state exception in nativeRenderPreviewFrame"); 3118 throw ex; 3119 } catch (RuntimeException ex) { 3120 Log.e(TAG, "Runtime exception in nativeRenderPreviewFrame"); 3121 throw ex; 3122 } 3123 3124 return timeMs; 3125 } 3126 3127 private void previewFrameEditInfo(String filename, int renderingMode) { 3128 mRenderPreviewOverlayFile = filename; 3129 mRenderPreviewRenderingMode = renderingMode; 3130 } 3131 3132 3133 /** 3134 * This function is responsible for rendering a single frame 3135 * from a single media item on the surface 3136 * 3137 * @param surface The surface on which frame has to be rendered 3138 * @param filepath The file path for which the frame needs to be displayed 3139 * @param time The time in ms at which the frame has to be rendered 3140 * @param framewidth The frame width 3141 * @param framewidth The frame height 3142 * 3143 * @return The actual time from media item at which the frame was extracted 3144 * and rendered 3145 */ 3146 long renderMediaItemPreviewFrame(Surface surface, String filepath, 3147 long time, int framewidth, int frameheight) { 3148 long timeMs = 0; 3149 try { 3150 timeMs = (long)nativeRenderMediaItemPreviewFrame(surface, filepath, framewidth, 3151 frameheight, 0, 0, time); 3152 } catch (IllegalArgumentException ex) { 3153 Log.e(TAG, "Illegal Argument exception in renderMediaItemPreviewFrame"); 3154 throw ex; 3155 } catch (IllegalStateException ex) { 3156 Log.e(TAG, "Illegal state exception in renderMediaItemPreviewFrame"); 3157 throw ex; 3158 } catch (RuntimeException ex) { 3159 Log.e(TAG, "Runtime exception in renderMediaItemPreviewFrame"); 3160 throw ex; 3161 } 3162 3163 return timeMs; 3164 } 3165 3166 /** 3167 * This function sets the flag to invalidate the preview array 3168 * and for generating the preview again 3169 */ 3170 void setGeneratePreview(boolean isRequired) { 3171 boolean semAcquiredDone = false; 3172 try { 3173 lock(); 3174 semAcquiredDone = true; 3175 mInvalidatePreviewArray = isRequired; 3176 } catch (InterruptedException ex) { 3177 Log.e(TAG, "Runtime exception in renderMediaItemPreviewFrame"); 3178 } finally { 3179 if (semAcquiredDone) { 3180 unlock(); 3181 } 3182 } 3183 } 3184 3185 /** 3186 * @return Returns the current status of preview invalidation 3187 * flag 3188 */ 3189 boolean getGeneratePreview() { 3190 return mInvalidatePreviewArray; 3191 } 3192 3193 /** 3194 * Calculates the aspect ratio from widht and height 3195 * 3196 * @param w The width of media item 3197 * @param h The height of media item 3198 * 3199 * @return The calculated aspect ratio 3200 */ 3201 int getAspectRatio(int w, int h) { 3202 double apRatio = (double)(w) / (double)(h); 3203 BigDecimal bd = new BigDecimal(apRatio); 3204 bd = bd.setScale(3, BigDecimal.ROUND_HALF_UP); 3205 apRatio = bd.doubleValue(); 3206 int var = MediaProperties.ASPECT_RATIO_16_9; 3207 if (apRatio >= 1.7) { 3208 var = MediaProperties.ASPECT_RATIO_16_9; 3209 } else if (apRatio >= 1.6) { 3210 var = MediaProperties.ASPECT_RATIO_5_3; 3211 } else if (apRatio >= 1.5) { 3212 var = MediaProperties.ASPECT_RATIO_3_2; 3213 } else if (apRatio > 1.3) { 3214 var = MediaProperties.ASPECT_RATIO_4_3; 3215 } else if (apRatio >= 1.2) { 3216 var = MediaProperties.ASPECT_RATIO_11_9; 3217 } 3218 return var; 3219 } 3220 3221 /** 3222 * Maps the file type used in native layer 3223 * to file type used in JAVA layer 3224 * 3225 * @param fileType The file type in native layer 3226 * 3227 * @return The File type in JAVA layer 3228 */ 3229 int getFileType(int fileType) { 3230 int retValue = -1; 3231 switch (fileType) { 3232 case FileType.UNSUPPORTED: 3233 retValue = MediaProperties.FILE_UNSUPPORTED; 3234 break; 3235 case FileType.THREE_GPP: 3236 retValue = MediaProperties.FILE_3GP; 3237 break; 3238 case FileType.MP4: 3239 retValue = MediaProperties.FILE_MP4; 3240 break; 3241 case FileType.JPG: 3242 retValue = MediaProperties.FILE_JPEG; 3243 break; 3244 case FileType.PNG: 3245 retValue = MediaProperties.FILE_PNG; 3246 break; 3247 case FileType.MP3: 3248 retValue = MediaProperties.FILE_MP3; 3249 break; 3250 case FileType.M4V: 3251 retValue = MediaProperties.FILE_M4V; 3252 break; 3253 3254 default: 3255 retValue = -1; 3256 } 3257 return retValue; 3258 } 3259 3260 /** 3261 * Maps the video codec type used in native layer 3262 * to video codec type used in JAVA layer 3263 * 3264 * @param codecType The video codec type in native layer 3265 * 3266 * @return The video codec type in JAVA layer 3267 */ 3268 int getVideoCodecType(int codecType) { 3269 int retValue = -1; 3270 switch (codecType) { 3271 case VideoFormat.H263: 3272 retValue = MediaProperties.VCODEC_H263; 3273 break; 3274 case VideoFormat.H264: 3275 retValue = MediaProperties.VCODEC_H264BP; 3276 break; 3277 case VideoFormat.MPEG4: 3278 retValue = MediaProperties.VCODEC_MPEG4; 3279 break; 3280 case VideoFormat.UNSUPPORTED: 3281 3282 default: 3283 retValue = -1; 3284 } 3285 return retValue; 3286 } 3287 3288 /** 3289 * Maps the audio codec type used in native layer 3290 * to audio codec type used in JAVA layer 3291 * 3292 * @param audioType The audio codec type in native layer 3293 * 3294 * @return The audio codec type in JAVA layer 3295 */ 3296 int getAudioCodecType(int codecType) { 3297 int retValue = -1; 3298 switch (codecType) { 3299 case AudioFormat.AMR_NB: 3300 retValue = MediaProperties.ACODEC_AMRNB; 3301 break; 3302 case AudioFormat.AAC: 3303 retValue = MediaProperties.ACODEC_AAC_LC; 3304 break; 3305 case AudioFormat.MP3: 3306 retValue = MediaProperties.ACODEC_MP3; 3307 break; 3308 3309 default: 3310 retValue = -1; 3311 } 3312 return retValue; 3313 } 3314 3315 /** 3316 * Returns the frame rate as integer 3317 * 3318 * @param fps The fps as enum 3319 * 3320 * @return The frame rate as integer 3321 */ 3322 int getFrameRate(int fps) { 3323 int retValue = -1; 3324 switch (fps) { 3325 case VideoFrameRate.FR_5_FPS: 3326 retValue = 5; 3327 break; 3328 case VideoFrameRate.FR_7_5_FPS: 3329 retValue = 8; 3330 break; 3331 case VideoFrameRate.FR_10_FPS: 3332 retValue = 10; 3333 break; 3334 case VideoFrameRate.FR_12_5_FPS: 3335 retValue = 13; 3336 break; 3337 case VideoFrameRate.FR_15_FPS: 3338 retValue = 15; 3339 break; 3340 case VideoFrameRate.FR_20_FPS: 3341 retValue = 20; 3342 break; 3343 case VideoFrameRate.FR_25_FPS: 3344 retValue = 25; 3345 break; 3346 case VideoFrameRate.FR_30_FPS: 3347 retValue = 30; 3348 break; 3349 3350 default: 3351 retValue = -1; 3352 } 3353 return retValue; 3354 } 3355 3356 /** 3357 * Maps the file type used in JAVA layer 3358 * to file type used in native layer 3359 * 3360 * @param fileType The file type in JAVA layer 3361 * 3362 * @return The File type in native layer 3363 */ 3364 int getMediaItemFileType(int fileType) { 3365 int retValue = -1; 3366 3367 switch (fileType) { 3368 case MediaProperties.FILE_UNSUPPORTED: 3369 retValue = FileType.UNSUPPORTED; 3370 break; 3371 case MediaProperties.FILE_3GP: 3372 retValue = FileType.THREE_GPP; 3373 break; 3374 case MediaProperties.FILE_MP4: 3375 retValue = FileType.MP4; 3376 break; 3377 case MediaProperties.FILE_JPEG: 3378 retValue = FileType.JPG; 3379 break; 3380 case MediaProperties.FILE_PNG: 3381 retValue = FileType.PNG; 3382 break; 3383 case MediaProperties.FILE_M4V: 3384 retValue = FileType.M4V; 3385 break; 3386 3387 default: 3388 retValue = -1; 3389 } 3390 return retValue; 3391 3392 } 3393 3394 /** 3395 * Maps the rendering mode used in native layer 3396 * to rendering mode used in JAVA layer 3397 * 3398 * @param renderingMode The rendering mode in JAVA layer 3399 * 3400 * @return The rendering mode in native layer 3401 */ 3402 int getMediaItemRenderingMode(int renderingMode) { 3403 int retValue = -1; 3404 switch (renderingMode) { 3405 case MediaItem.RENDERING_MODE_BLACK_BORDER: 3406 retValue = MediaRendering.BLACK_BORDERS; 3407 break; 3408 case MediaItem.RENDERING_MODE_STRETCH: 3409 retValue = MediaRendering.RESIZING; 3410 break; 3411 case MediaItem.RENDERING_MODE_CROPPING: 3412 retValue = MediaRendering.CROPPING; 3413 break; 3414 3415 default: 3416 retValue = -1; 3417 } 3418 return retValue; 3419 } 3420 3421 /** 3422 * Maps the transition behavior used in JAVA layer 3423 * to transition behavior used in native layer 3424 * 3425 * @param transitionType The transition behavior in JAVA layer 3426 * 3427 * @return The transition behavior in native layer 3428 */ 3429 int getVideoTransitionBehaviour(int transitionType) { 3430 int retValue = -1; 3431 switch (transitionType) { 3432 case Transition.BEHAVIOR_SPEED_UP: 3433 retValue = TransitionBehaviour.SPEED_UP; 3434 break; 3435 case Transition.BEHAVIOR_SPEED_DOWN: 3436 retValue = TransitionBehaviour.SPEED_DOWN; 3437 break; 3438 case Transition.BEHAVIOR_LINEAR: 3439 retValue = TransitionBehaviour.LINEAR; 3440 break; 3441 case Transition.BEHAVIOR_MIDDLE_SLOW: 3442 retValue = TransitionBehaviour.SLOW_MIDDLE; 3443 break; 3444 case Transition.BEHAVIOR_MIDDLE_FAST: 3445 retValue = TransitionBehaviour.FAST_MIDDLE; 3446 break; 3447 3448 default: 3449 retValue = -1; 3450 } 3451 return retValue; 3452 } 3453 3454 /** 3455 * Maps the transition slide direction used in JAVA layer 3456 * to transition slide direction used in native layer 3457 * 3458 * @param slideDirection The transition slide direction 3459 * in JAVA layer 3460 * 3461 * @return The transition slide direction in native layer 3462 */ 3463 int getSlideSettingsDirection(int slideDirection) { 3464 int retValue = -1; 3465 switch (slideDirection) { 3466 case TransitionSliding.DIRECTION_RIGHT_OUT_LEFT_IN: 3467 retValue = SlideDirection.RIGHT_OUT_LEFT_IN; 3468 break; 3469 case TransitionSliding.DIRECTION_LEFT_OUT_RIGHT_IN: 3470 retValue = SlideDirection.LEFT_OUT_RIGTH_IN; 3471 break; 3472 case TransitionSliding.DIRECTION_TOP_OUT_BOTTOM_IN: 3473 retValue = SlideDirection.TOP_OUT_BOTTOM_IN; 3474 break; 3475 case TransitionSliding.DIRECTION_BOTTOM_OUT_TOP_IN: 3476 retValue = SlideDirection.BOTTOM_OUT_TOP_IN; 3477 break; 3478 3479 default: 3480 retValue = -1; 3481 } 3482 return retValue; 3483 } 3484 3485 /** 3486 * Maps the effect color type used in JAVA layer 3487 * to effect color type used in native layer 3488 * 3489 * @param effect The EffectColor reference 3490 * 3491 * @return The color effect value from native layer 3492 */ 3493 private int getEffectColorType(EffectColor effect) { 3494 int retValue = -1; 3495 switch (effect.getType()) { 3496 case EffectColor.TYPE_COLOR: 3497 if (effect.getColor() == EffectColor.GREEN) { 3498 retValue = VideoEffect.GREEN; 3499 } else if (effect.getColor() == EffectColor.PINK) { 3500 retValue = VideoEffect.PINK; 3501 } else if (effect.getColor() == EffectColor.GRAY) { 3502 retValue = VideoEffect.BLACK_AND_WHITE; 3503 } else { 3504 retValue = VideoEffect.COLORRGB16; 3505 } 3506 break; 3507 case EffectColor.TYPE_GRADIENT: 3508 retValue = VideoEffect.GRADIENT; 3509 break; 3510 case EffectColor.TYPE_SEPIA: 3511 retValue = VideoEffect.SEPIA; 3512 break; 3513 case EffectColor.TYPE_NEGATIVE: 3514 retValue = VideoEffect.NEGATIVE; 3515 break; 3516 case EffectColor.TYPE_FIFTIES: 3517 retValue = VideoEffect.FIFTIES; 3518 break; 3519 3520 default: 3521 retValue = -1; 3522 } 3523 return retValue; 3524 } 3525 3526 /** 3527 * Calculates video resolution for output clip 3528 * based on clip's height and aspect ratio of storyboard 3529 * 3530 * @param aspectRatio The aspect ratio of story board 3531 * @param height The height of clip 3532 * 3533 * @return The video resolution 3534 */ 3535 private int findVideoResolution(int aspectRatio, int height) { 3536 final Pair<Integer, Integer>[] resolutions; 3537 final Pair<Integer, Integer> maxResolution; 3538 int retValue = VideoFrameSize.SIZE_UNDEFINED; 3539 switch (aspectRatio) { 3540 case MediaProperties.ASPECT_RATIO_3_2: 3541 if (height == MediaProperties.HEIGHT_480) 3542 retValue = VideoFrameSize.NTSC; 3543 else if (height == MediaProperties.HEIGHT_720) 3544 retValue = VideoFrameSize.W720p; 3545 break; 3546 case MediaProperties.ASPECT_RATIO_16_9: 3547 if (height == MediaProperties.HEIGHT_480) 3548 retValue = VideoFrameSize.WVGA16x9; 3549 else if (height == MediaProperties.HEIGHT_720) 3550 retValue = VideoFrameSize.V720p; 3551 break; 3552 case MediaProperties.ASPECT_RATIO_4_3: 3553 if (height == MediaProperties.HEIGHT_480) 3554 retValue = VideoFrameSize.VGA; 3555 else if (height == MediaProperties.HEIGHT_720) 3556 retValue = VideoFrameSize.S720p; 3557 break; 3558 case MediaProperties.ASPECT_RATIO_5_3: 3559 if (height == MediaProperties.HEIGHT_480) 3560 retValue = VideoFrameSize.WVGA; 3561 break; 3562 case MediaProperties.ASPECT_RATIO_11_9: 3563 if (height == MediaProperties.HEIGHT_144) 3564 retValue = VideoFrameSize.QCIF; 3565 else if (height == MediaProperties.HEIGHT_288) 3566 retValue = VideoFrameSize.CIF; 3567 break; 3568 } 3569 if (retValue == VideoFrameSize.SIZE_UNDEFINED) { 3570 resolutions = MediaProperties.getSupportedResolutions(mVideoEditor.getAspectRatio()); 3571 // Get the highest resolution 3572 maxResolution = resolutions[resolutions.length - 1]; 3573 retValue = findVideoResolution(mVideoEditor.getAspectRatio(), maxResolution.second); 3574 } 3575 3576 return retValue; 3577 } 3578 3579 /** 3580 * This method is responsible for exporting a movie 3581 * 3582 * @param filePath The output file path 3583 * @param projectDir The output project directory 3584 * @param height The height of clip 3585 * @param bitrate The bitrate at which the movie should be exported 3586 * @param mediaItemsList The media items list 3587 * @param mediaTransitionList The transitions list 3588 * @param mediaBGMList The background track list 3589 * @param listener The ExportProgressListener 3590 * 3591 */ 3592 void export(String filePath, String projectDir, int height, int bitrate, 3593 List<MediaItem> mediaItemsList, List<Transition> mediaTransitionList, 3594 List<AudioTrack> mediaBGMList, ExportProgressListener listener) { 3595 3596 int outBitrate = 0; 3597 mExportFilename = filePath; 3598 previewStoryBoard(mediaItemsList, mediaTransitionList, mediaBGMList,null); 3599 mExportProgressListener = listener; 3600 3601 mProgressToApp = 0; 3602 3603 switch (bitrate) { 3604 case MediaProperties.BITRATE_28K: 3605 outBitrate = Bitrate.BR_32_KBPS; 3606 break; 3607 case MediaProperties.BITRATE_40K: 3608 outBitrate = Bitrate.BR_48_KBPS; 3609 break; 3610 case MediaProperties.BITRATE_64K: 3611 outBitrate = Bitrate.BR_64_KBPS; 3612 break; 3613 case MediaProperties.BITRATE_96K: 3614 outBitrate = Bitrate.BR_96_KBPS; 3615 break; 3616 case MediaProperties.BITRATE_128K: 3617 outBitrate = Bitrate.BR_128_KBPS; 3618 break; 3619 case MediaProperties.BITRATE_192K: 3620 outBitrate = Bitrate.BR_192_KBPS; 3621 break; 3622 case MediaProperties.BITRATE_256K: 3623 outBitrate = Bitrate.BR_256_KBPS; 3624 break; 3625 case MediaProperties.BITRATE_384K: 3626 outBitrate = Bitrate.BR_384_KBPS; 3627 break; 3628 case MediaProperties.BITRATE_512K: 3629 outBitrate = Bitrate.BR_512_KBPS; 3630 break; 3631 case MediaProperties.BITRATE_800K: 3632 outBitrate = Bitrate.BR_800_KBPS; 3633 break; 3634 case MediaProperties.BITRATE_2M: 3635 outBitrate = Bitrate.BR_2_MBPS; 3636 break; 3637 3638 case MediaProperties.BITRATE_5M: 3639 outBitrate = Bitrate.BR_5_MBPS; 3640 break; 3641 case MediaProperties.BITRATE_8M: 3642 outBitrate = Bitrate.BR_8_MBPS; 3643 break; 3644 3645 default: 3646 throw new IllegalArgumentException("Argument Bitrate incorrect"); 3647 } 3648 mPreviewEditSettings.videoFrameRate = VideoFrameRate.FR_30_FPS; 3649 mPreviewEditSettings.outputFile = mOutputFilename = filePath; 3650 3651 int aspectRatio = mVideoEditor.getAspectRatio(); 3652 mPreviewEditSettings.videoFrameSize = findVideoResolution(aspectRatio, height); 3653 mPreviewEditSettings.videoFormat = VideoFormat.H264; 3654 mPreviewEditSettings.audioFormat = AudioFormat.AAC; 3655 mPreviewEditSettings.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000; 3656 mPreviewEditSettings.maxFileSize = 0; 3657 mPreviewEditSettings.audioChannels = 2; 3658 mPreviewEditSettings.videoBitrate = outBitrate; 3659 mPreviewEditSettings.audioBitrate = Bitrate.BR_96_KBPS; 3660 3661 mPreviewEditSettings.transitionSettingsArray = new TransitionSettings[mTotalClips - 1]; 3662 for (int index = 0; index < mTotalClips - 1; index++) { 3663 mPreviewEditSettings.transitionSettingsArray[index] = new TransitionSettings(); 3664 mPreviewEditSettings.transitionSettingsArray[index].videoTransitionType = 3665 VideoTransition.NONE; 3666 mPreviewEditSettings.transitionSettingsArray[index].audioTransitionType = 3667 AudioTransition.NONE; 3668 } 3669 3670 for (int clipCnt = 0; clipCnt < mPreviewEditSettings.clipSettingsArray.length; clipCnt++) { 3671 if (mPreviewEditSettings.clipSettingsArray[clipCnt].fileType == FileType.JPG) { 3672 mPreviewEditSettings.clipSettingsArray[clipCnt].clipPath = 3673 mPreviewEditSettings.clipSettingsArray[clipCnt].clipOriginalPath; 3674 } 3675 } 3676 nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings); 3677 3678 int err = 0; 3679 try { 3680 mProcessingState = PROCESSING_EXPORT; 3681 mProcessingObject = null; 3682 err = generateClip(mPreviewEditSettings); 3683 mProcessingState = PROCESSING_NONE; 3684 } catch (IllegalArgumentException ex) { 3685 Log.e(TAG, "IllegalArgument for generateClip"); 3686 throw ex; 3687 } catch (IllegalStateException ex) { 3688 Log.e(TAG, "IllegalStateExceptiont for generateClip"); 3689 throw ex; 3690 } catch (RuntimeException ex) { 3691 Log.e(TAG, "RuntimeException for generateClip"); 3692 throw ex; 3693 } 3694 3695 if (err != 0) { 3696 Log.e(TAG, "RuntimeException for generateClip"); 3697 throw new RuntimeException("generateClip failed with error=" + err); 3698 } 3699 3700 mExportProgressListener = null; 3701 } 3702 3703 /** 3704 * This methods takes care of stopping the Export process 3705 * 3706 * @param The input file name for which export has to be stopped 3707 */ 3708 void stop(String filename) { 3709 try { 3710 stopEncoding(); 3711 new File(mExportFilename).delete(); 3712 } catch (IllegalStateException ex) { 3713 Log.e(TAG, "Illegal state exception in unload settings"); 3714 throw ex; 3715 } catch (RuntimeException ex) { 3716 Log.e(TAG, "Runtime exception in unload settings"); 3717 throw ex; 3718 } 3719 } 3720 3721 /** 3722 * This method extracts a frame from the input file 3723 * and returns the frame as a bitmap 3724 * 3725 * @param inputFile The inputFile 3726 * @param width The width of the output frame 3727 * @param height The height of the output frame 3728 * @param timeMS The time in ms at which the frame has to be extracted 3729 */ 3730 Bitmap getPixels(String inputFile, int width, int height, long timeMS) { 3731 if (inputFile == null) { 3732 throw new IllegalArgumentException("Invalid input file"); 3733 } 3734 3735 /* Make width and height as even */ 3736 final int newWidth = (width + 1) & 0xFFFFFFFE; 3737 final int newHeight = (height + 1) & 0xFFFFFFFE; 3738 3739 /* Create a temp bitmap for resized thumbnails */ 3740 Bitmap tempBitmap = null; 3741 if ((newWidth != width) || (newHeight != height)) { 3742 tempBitmap = Bitmap.createBitmap(newWidth, newHeight, Bitmap.Config.ARGB_8888); 3743 } 3744 3745 IntBuffer rgb888 = IntBuffer.allocate(newWidth * newHeight * 4); 3746 Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); 3747 nativeGetPixels(inputFile, rgb888.array(), newWidth, newHeight, timeMS); 3748 3749 if ((newWidth == width) && (newHeight == height)) { 3750 bitmap.copyPixelsFromBuffer(rgb888); 3751 } else { 3752 /* Create a temp bitmap to be used for resize */ 3753 tempBitmap.copyPixelsFromBuffer(rgb888); 3754 3755 /* Create a canvas to resize */ 3756 final Canvas canvas = new Canvas(bitmap); 3757 canvas.drawBitmap(tempBitmap, new Rect(0, 0, newWidth, newHeight), 3758 new Rect(0, 0, width, height), sResizePaint); 3759 } 3760 3761 if (tempBitmap != null) { 3762 tempBitmap.recycle(); 3763 } 3764 3765 return bitmap; 3766 } 3767 3768 /** 3769 * This method extracts a list of frame from the 3770 * input file and returns the frame in bitmap array 3771 * 3772 * @param filename The inputFile 3773 * @param width The width of the output frame 3774 * @param height The height of the output frame 3775 * @param startMs The starting time in ms 3776 * @param endMs The end time in ms 3777 * @param thumbnailCount The number of frames to be extracted 3778 * from startMs to endMs 3779 * 3780 * @return The frames as bitmaps in bitmap array 3781 **/ 3782 Bitmap[] getPixelsList(String filename, int width, int height, long startMs, long endMs, 3783 int thumbnailCount) { 3784 int[] rgb888 = null; 3785 int thumbnailSize = 0; 3786 Bitmap tempBitmap = null; 3787 3788 /* Make width and height as even */ 3789 final int newWidth = (width + 1) & 0xFFFFFFFE; 3790 final int newHeight = (height + 1) & 0xFFFFFFFE; 3791 thumbnailSize = newWidth * newHeight * 4; 3792 3793 /* Create a temp bitmap for resized thumbnails */ 3794 if ((newWidth != width) || (newHeight != height)) { 3795 tempBitmap = Bitmap.createBitmap(newWidth, newHeight, Bitmap.Config.ARGB_8888); 3796 } 3797 int i = 0; 3798 int deltaTime = (int)(endMs - startMs) / thumbnailCount; 3799 Bitmap[] bitmaps = null; 3800 3801 try { 3802 // This may result in out of Memory Error 3803 rgb888 = new int[thumbnailSize * thumbnailCount]; 3804 bitmaps = new Bitmap[thumbnailCount]; 3805 } catch (Throwable e) { 3806 // Allocating to new size with Fixed count 3807 try { 3808 rgb888 = new int[thumbnailSize * MAX_THUMBNAIL_PERMITTED]; 3809 bitmaps = new Bitmap[MAX_THUMBNAIL_PERMITTED]; 3810 thumbnailCount = MAX_THUMBNAIL_PERMITTED; 3811 } catch (Throwable ex) { 3812 throw new RuntimeException("Memory allocation fails, thumbnail count too large: " 3813 + thumbnailCount); 3814 } 3815 } 3816 IntBuffer tmpBuffer = IntBuffer.allocate(thumbnailSize); 3817 nativeGetPixelsList(filename, rgb888, newWidth, newHeight, deltaTime, thumbnailCount, 3818 startMs, endMs); 3819 3820 for (; i < thumbnailCount; i++) { 3821 bitmaps[i] = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); 3822 tmpBuffer.put(rgb888, (i * thumbnailSize), thumbnailSize); 3823 tmpBuffer.rewind(); 3824 3825 if ((newWidth == width) && (newHeight == height)) { 3826 bitmaps[i].copyPixelsFromBuffer(tmpBuffer); 3827 } else { 3828 /* Copy the out rgb buffer to temp bitmap */ 3829 tempBitmap.copyPixelsFromBuffer(tmpBuffer); 3830 3831 /* Create a canvas to resize */ 3832 final Canvas canvas = new Canvas(bitmaps[i]); 3833 canvas.drawBitmap(tempBitmap, new Rect(0, 0, newWidth, newHeight), 3834 new Rect(0, 0, width, height), sResizePaint); 3835 } 3836 } 3837 3838 if (tempBitmap != null) { 3839 tempBitmap.recycle(); 3840 } 3841 3842 return bitmaps; 3843 } 3844 3845 /** 3846 * This method generates the audio graph 3847 * 3848 * @param uniqueId The unique id 3849 * @param inFileName The inputFile 3850 * @param OutAudiGraphFileName output filename 3851 * @param frameDuration The each frame duration 3852 * @param audioChannels The number of audio channels 3853 * @param samplesCount Total number of samples count 3854 * @param listener ExtractAudioWaveformProgressListener reference 3855 * @param isVideo The flag to indicate if the file is video file or not 3856 * 3857 **/ 3858 void generateAudioGraph(String uniqueId, String inFileName, String OutAudiGraphFileName, 3859 int frameDuration, int audioChannels, int samplesCount, 3860 ExtractAudioWaveformProgressListener listener, boolean isVideo) { 3861 String tempPCMFileName; 3862 3863 mExtractAudioWaveformProgressListener = listener; 3864 3865 /** 3866 * In case of Video, first call will generate the PCM file to make the 3867 * audio graph 3868 */ 3869 if (isVideo) { 3870 tempPCMFileName = String.format(mProjectPath + "/" + uniqueId + ".pcm"); 3871 } else { 3872 tempPCMFileName = mAudioTrackPCMFilePath; 3873 } 3874 3875 /** 3876 * For Video item, generate the PCM 3877 */ 3878 if (isVideo) { 3879 nativeGenerateRawAudio(inFileName, tempPCMFileName); 3880 } 3881 3882 nativeGenerateAudioGraph(tempPCMFileName, OutAudiGraphFileName, frameDuration, 3883 audioChannels, samplesCount); 3884 3885 /** 3886 * Once the audio graph file is generated, delete the pcm file 3887 */ 3888 if (isVideo) { 3889 new File(tempPCMFileName).delete(); 3890 } 3891 } 3892 3893 void clearPreviewSurface(Surface surface) { 3894 nativeClearSurface(surface); 3895 } 3896 3897 /** 3898 * Grab the semaphore which arbitrates access to the editor 3899 * 3900 * @throws InterruptedException 3901 */ 3902 private void lock() throws InterruptedException { 3903 if (Log.isLoggable(TAG, Log.DEBUG)) { 3904 Log.d(TAG, "lock: grabbing semaphore", new Throwable()); 3905 } 3906 mLock.acquire(); 3907 if (Log.isLoggable(TAG, Log.DEBUG)) { 3908 Log.d(TAG, "lock: grabbed semaphore"); 3909 } 3910 } 3911 3912 /** 3913 * Release the semaphore which arbitrates access to the editor 3914 */ 3915 private void unlock() { 3916 if (Log.isLoggable(TAG, Log.DEBUG)) { 3917 Log.d(TAG, "unlock: releasing semaphore"); 3918 } 3919 mLock.release(); 3920 } 3921 3922 /** Native Methods */ 3923 native Properties getMediaProperties(String file) throws IllegalArgumentException, 3924 IllegalStateException, RuntimeException, Exception; 3925 3926 /** 3927 * Get the version of ManualEdit. 3928 * 3929 * @return version of ManualEdit 3930 * @throws RuntimeException if an error occurred 3931 * @see Version 3932 */ 3933 private static native Version getVersion() throws RuntimeException; 3934 3935 /** 3936 * Returns the video thumbnail in an array of integers. Output format is 3937 * ARGB8888. 3938 * 3939 * @param pixelArray the array that receives the pixel values 3940 * @param width width of the video thumbnail 3941 * @param height height of the video thumbnail 3942 * @param timeMS desired time of the thumbnail in ms 3943 * @return actual time in ms of the thumbnail generated 3944 * @throws IllegalStateException if the class has not been initialized 3945 * @throws IllegalArgumentException if the pixelArray is not available or 3946 * one of the dimensions is negative or zero or the time is 3947 * negative 3948 * @throws RuntimeException on runtime errors in native code 3949 */ 3950 private native int nativeGetPixels(String fileName, int[] pixelArray, int width, int height, 3951 long timeMS); 3952 3953 private native int nativeGetPixelsList(String fileName, int[] pixelArray, int width, int height, 3954 int timeMS, int nosofTN, long startTimeMs, long endTimeMs); 3955 3956 /** 3957 * Releases the JNI and cleans up the core native module.. Should be called 3958 * only after init( ) 3959 * 3960 * @throws IllegalStateException if the method could not be called 3961 */ 3962 private native void release() throws IllegalStateException, RuntimeException; 3963 3964 /* 3965 * Clear the preview surface 3966 */ 3967 private native void nativeClearSurface(Surface surface); 3968 3969 /** 3970 * Stops the encoding. This method should only be called after encoding has 3971 * started using method <code> startEncoding</code> 3972 * 3973 * @throws IllegalStateException if the method could not be called 3974 */ 3975 private native void stopEncoding() throws IllegalStateException, RuntimeException; 3976 3977 3978 private native void _init(String tempPath, String libraryPath) 3979 throws IllegalArgumentException, IllegalStateException, RuntimeException; 3980 3981 private native void nativeStartPreview(Surface mSurface, long fromMs, long toMs, 3982 int callbackAfterFrameCount, boolean loop) throws IllegalArgumentException, 3983 IllegalStateException, RuntimeException; 3984 3985 private native void nativePopulateSettings(EditSettings editSettings, 3986 PreviewClipProperties mProperties, AudioSettings mAudioSettings) 3987 throws IllegalArgumentException, IllegalStateException, RuntimeException; 3988 3989 private native int nativeRenderPreviewFrame(Surface mSurface, long timeMs, 3990 int surfaceWidth, int surfaceHeight) 3991 throws IllegalArgumentException, 3992 IllegalStateException, RuntimeException; 3993 3994 private native int nativeRenderMediaItemPreviewFrame(Surface mSurface, String filepath, 3995 int framewidth, int frameheight, int surfacewidth, int surfaceheight, long timeMs) 3996 throws IllegalArgumentException, IllegalStateException, RuntimeException; 3997 3998 private native int nativeStopPreview(); 3999 4000 private native int nativeGenerateAudioGraph(String pcmFilePath, String outGraphPath, 4001 int frameDuration, int channels, int sampleCount); 4002 4003 private native int nativeGenerateRawAudio(String InFileName, String PCMFileName); 4004 4005 private native int nativeGenerateClip(EditSettings editSettings) 4006 throws IllegalArgumentException, IllegalStateException, RuntimeException; 4007 4008} 4009