MediaArtistNativeHelper.java revision bcbf68583e38eeb79266f9f3a8c3d1b17f75a8c2
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17package android.media.videoeditor; 18 19import java.io.File; 20import java.io.IOException; 21import java.math.BigDecimal; 22import java.nio.IntBuffer; 23import java.util.Iterator; 24import java.util.List; 25import java.util.concurrent.Semaphore; 26 27import android.graphics.Bitmap; 28import android.graphics.BitmapFactory; 29import android.graphics.Canvas; 30import android.graphics.Paint; 31import android.graphics.Rect; 32import android.media.videoeditor.VideoEditor.ExportProgressListener; 33import android.media.videoeditor.VideoEditor.PreviewProgressListener; 34import android.media.videoeditor.VideoEditor.MediaProcessingProgressListener; 35import android.util.Log; 36import android.util.Pair; 37import android.view.Surface; 38 39/** 40 *This class provide Native methods to be used by MediaArtist {@hide} 41 */ 42class MediaArtistNativeHelper { 43 private static final String TAG = "MediaArtistNativeHelper"; 44 45 static { 46 System.loadLibrary("videoeditor_jni"); 47 } 48 49 private static final int MAX_THUMBNAIL_PERMITTED = 8; 50 51 public static final int TASK_LOADING_SETTINGS = 1; 52 public static final int TASK_ENCODING = 2; 53 54 /** 55 * The resize paint 56 */ 57 private static final Paint sResizePaint = new Paint(Paint.FILTER_BITMAP_FLAG); 58 59 private final VideoEditor mVideoEditor; 60 /* 61 * Semaphore to control preview calls 62 */ 63 private final Semaphore mLock; 64 65 private EditSettings mStoryBoardSettings; 66 67 private String mOutputFilename; 68 69 private PreviewClipProperties mClipProperties = null; 70 71 private EditSettings mPreviewEditSettings; 72 73 private AudioSettings mAudioSettings = null; 74 75 private AudioTrack mAudioTrack = null; 76 77 private boolean mInvalidatePreviewArray = true; 78 79 private boolean mRegenerateAudio = true; 80 81 private String mExportFilename = null; 82 83 private int mProgressToApp; 84 85 private String mRenderPreviewOverlayFile; 86 private int mRenderPreviewRenderingMode; 87 88 private boolean mIsFirstProgress; 89 90 private static final String AUDIO_TRACK_PCM_FILE = "AudioPcm.pcm"; 91 92 // Processing indication 93 public static final int PROCESSING_NONE = 0; 94 public static final int PROCESSING_AUDIO_PCM = 1; 95 public static final int PROCESSING_TRANSITION = 2; 96 public static final int PROCESSING_KENBURNS = 3; 97 public static final int PROCESSING_INTERMEDIATE1 = 11; 98 public static final int PROCESSING_INTERMEDIATE2 = 12; 99 public static final int PROCESSING_INTERMEDIATE3 = 13; 100 public static final int PROCESSING_EXPORT = 20; 101 102 private int mProcessingState; 103 private Object mProcessingObject; 104 private PreviewProgressListener mPreviewProgressListener; 105 private ExportProgressListener mExportProgressListener; 106 private ExtractAudioWaveformProgressListener mExtractAudioWaveformProgressListener; 107 private MediaProcessingProgressListener mMediaProcessingProgressListener; 108 private final String mProjectPath; 109 110 private long mPreviewProgress; 111 112 private String mAudioTrackPCMFilePath; 113 114 private int mTotalClips = 0; 115 116 private boolean mErrorFlagSet = false; 117 118 @SuppressWarnings("unused") 119 private int mManualEditContext; 120 121 /* Listeners */ 122 123 /** 124 * Interface definition for a listener to be invoked when there is an update 125 * in a running task. 126 */ 127 public interface OnProgressUpdateListener { 128 /** 129 * Called when there is an update. 130 * 131 * @param taskId id of the task reporting an update. 132 * @param progress progress of the task [0..100]. 133 * @see BasicEdit#TASK_ENCODING 134 */ 135 public void OnProgressUpdate(int taskId, int progress); 136 } 137 138 /** Defines the version. */ 139 public final class Version { 140 141 /** Major version number */ 142 public int major; 143 144 /** Minor version number */ 145 public int minor; 146 147 /** Revision number */ 148 public int revision; 149 150 /** VIDEOEDITOR major version number */ 151 private static final int VIDEOEDITOR_MAJOR_VERSION = 0; 152 153 /** VIDEOEDITOR minor version number */ 154 private static final int VIDEOEDITOR_MINOR_VERSION = 0; 155 156 /** VIDEOEDITOR revision number */ 157 private static final int VIDEOEDITOR_REVISION_VERSION = 1; 158 159 /** Method which returns the current VIDEOEDITOR version */ 160 public Version getVersion() { 161 Version version = new Version(); 162 163 version.major = Version.VIDEOEDITOR_MAJOR_VERSION; 164 version.minor = Version.VIDEOEDITOR_MINOR_VERSION; 165 version.revision = Version.VIDEOEDITOR_REVISION_VERSION; 166 167 return version; 168 } 169 } 170 171 /** 172 * Defines output audio formats. 173 */ 174 public final class AudioFormat { 175 /** No audio present in output clip. Used to generate video only clip */ 176 public static final int NO_AUDIO = 0; 177 178 /** AMR Narrow Band. */ 179 public static final int AMR_NB = 1; 180 181 /** Advanced Audio Coding (AAC). */ 182 public static final int AAC = 2; 183 184 /** Advanced Audio Codec Plus (HE-AAC v1). */ 185 public static final int AAC_PLUS = 3; 186 187 /** Advanced Audio Codec Plus (HE-AAC v2). */ 188 public static final int ENHANCED_AAC_PLUS = 4; 189 190 /** MPEG layer 3 (MP3). */ 191 public static final int MP3 = 5; 192 193 /** Enhanced Variable RateCodec (EVRC). */ 194 public static final int EVRC = 6; 195 196 /** PCM (PCM). */ 197 public static final int PCM = 7; 198 199 /** No transcoding. Output audio format is same as input audio format */ 200 public static final int NULL_AUDIO = 254; 201 202 /** Unsupported audio format. */ 203 public static final int UNSUPPORTED_AUDIO = 255; 204 } 205 206 /** 207 * Defines audio sampling frequencies. 208 */ 209 public final class AudioSamplingFrequency { 210 /** 211 * Default sampling frequency. Uses the default frequency for a specific 212 * audio format. For AAC the only supported (and thus default) sampling 213 * frequency is 16 kHz. For this audio format the sampling frequency in 214 * the OutputParams. 215 **/ 216 public static final int FREQ_DEFAULT = 0; 217 218 /** Audio sampling frequency of 8000 Hz. */ 219 public static final int FREQ_8000 = 8000; 220 221 /** Audio sampling frequency of 11025 Hz. */ 222 public static final int FREQ_11025 = 11025; 223 224 /** Audio sampling frequency of 12000 Hz. */ 225 public static final int FREQ_12000 = 12000; 226 227 /** Audio sampling frequency of 16000 Hz. */ 228 public static final int FREQ_16000 = 16000; 229 230 /** Audio sampling frequency of 22050 Hz. */ 231 public static final int FREQ_22050 = 22050; 232 233 /** Audio sampling frequency of 24000 Hz. */ 234 public static final int FREQ_24000 = 24000; 235 236 /** Audio sampling frequency of 32000 Hz. */ 237 public static final int FREQ_32000 = 32000; 238 239 /** Audio sampling frequency of 44100 Hz. */ 240 public static final int FREQ_44100 = 44100; 241 242 /** Audio sampling frequency of 48000 Hz. Not available for output file. */ 243 public static final int FREQ_48000 = 48000; 244 } 245 246 /** 247 * Defines the supported fixed audio and video bitrates. These values are 248 * for output audio video only. 249 */ 250 public final class Bitrate { 251 /** Variable bitrate. Means no bitrate regulation */ 252 public static final int VARIABLE = -1; 253 254 /** An undefined bitrate. */ 255 public static final int UNDEFINED = 0; 256 257 /** A bitrate of 9.2 kbits/s. */ 258 public static final int BR_9_2_KBPS = 9200; 259 260 /** A bitrate of 12.2 kbits/s. */ 261 public static final int BR_12_2_KBPS = 12200; 262 263 /** A bitrate of 16 kbits/s. */ 264 public static final int BR_16_KBPS = 16000; 265 266 /** A bitrate of 24 kbits/s. */ 267 public static final int BR_24_KBPS = 24000; 268 269 /** A bitrate of 32 kbits/s. */ 270 public static final int BR_32_KBPS = 32000; 271 272 /** A bitrate of 48 kbits/s. */ 273 public static final int BR_48_KBPS = 48000; 274 275 /** A bitrate of 64 kbits/s. */ 276 public static final int BR_64_KBPS = 64000; 277 278 /** A bitrate of 96 kbits/s. */ 279 public static final int BR_96_KBPS = 96000; 280 281 /** A bitrate of 128 kbits/s. */ 282 public static final int BR_128_KBPS = 128000; 283 284 /** A bitrate of 192 kbits/s. */ 285 public static final int BR_192_KBPS = 192000; 286 287 /** A bitrate of 256 kbits/s. */ 288 public static final int BR_256_KBPS = 256000; 289 290 /** A bitrate of 288 kbits/s. */ 291 public static final int BR_288_KBPS = 288000; 292 293 /** A bitrate of 384 kbits/s. */ 294 public static final int BR_384_KBPS = 384000; 295 296 /** A bitrate of 512 kbits/s. */ 297 public static final int BR_512_KBPS = 512000; 298 299 /** A bitrate of 800 kbits/s. */ 300 public static final int BR_800_KBPS = 800000; 301 302 /** A bitrate of 2 Mbits/s. */ 303 public static final int BR_2_MBPS = 2000000; 304 305 /** A bitrate of 5 Mbits/s. */ 306 public static final int BR_5_MBPS = 5000000; 307 308 /** A bitrate of 8 Mbits/s. */ 309 public static final int BR_8_MBPS = 8000000; 310 } 311 312 /** 313 * Defines all supported file types. 314 */ 315 public final class FileType { 316 /** 3GPP file type. */ 317 public static final int THREE_GPP = 0; 318 319 /** MP4 file type. */ 320 public static final int MP4 = 1; 321 322 /** AMR file type. */ 323 public static final int AMR = 2; 324 325 /** MP3 audio file type. */ 326 public static final int MP3 = 3; 327 328 /** PCM audio file type. */ 329 public static final int PCM = 4; 330 331 /** JPEG image file type. */ 332 public static final int JPG = 5; 333 334 /** GIF image file type. */ 335 public static final int GIF = 7; 336 337 /** PNG image file type. */ 338 public static final int PNG = 8; 339 340 /** M4V file type. */ 341 public static final int M4V = 10; 342 343 /** Unsupported file type. */ 344 public static final int UNSUPPORTED = 255; 345 } 346 347 /** 348 * Defines rendering types. Rendering can only be applied to files 349 * containing video streams. 350 **/ 351 public final class MediaRendering { 352 /** 353 * Resize to fit the output video with changing the aspect ratio if 354 * needed. 355 */ 356 public static final int RESIZING = 0; 357 358 /** 359 * Crop the input video to fit it with the output video resolution. 360 **/ 361 public static final int CROPPING = 1; 362 363 /** 364 * Resize to fit the output video resolution but maintain the aspect 365 * ratio. This framing type adds black borders if needed. 366 */ 367 public static final int BLACK_BORDERS = 2; 368 } 369 370 /** 371 * Defines the results. 372 */ 373 public final class Result { 374 /** No error. result OK */ 375 public static final int NO_ERROR = 0; 376 377 /** File not found */ 378 public static final int ERR_FILE_NOT_FOUND = 1; 379 380 /** 381 * In case of UTF8 conversion, the size of the converted path will be 382 * more than the corresponding allocated buffer. 383 */ 384 public static final int ERR_BUFFER_OUT_TOO_SMALL = 2; 385 386 /** Invalid file type. */ 387 public static final int ERR_INVALID_FILE_TYPE = 3; 388 389 /** Invalid effect kind. */ 390 public static final int ERR_INVALID_EFFECT_KIND = 4; 391 392 /** Invalid video effect. */ 393 public static final int ERR_INVALID_VIDEO_EFFECT_TYPE = 5; 394 395 /** Invalid audio effect. */ 396 public static final int ERR_INVALID_AUDIO_EFFECT_TYPE = 6; 397 398 /** Invalid video transition. */ 399 public static final int ERR_INVALID_VIDEO_TRANSITION_TYPE = 7; 400 401 /** Invalid audio transition. */ 402 public static final int ERR_INVALID_AUDIO_TRANSITION_TYPE = 8; 403 404 /** Invalid encoding frame rate. */ 405 public static final int ERR_INVALID_VIDEO_ENCODING_FRAME_RATE = 9; 406 407 /** External effect is called but this function is not set. */ 408 public static final int ERR_EXTERNAL_EFFECT_NULL = 10; 409 410 /** External transition is called but this function is not set. */ 411 public static final int ERR_EXTERNAL_TRANSITION_NULL = 11; 412 413 /** Begin time cut is larger than the video clip duration. */ 414 public static final int ERR_BEGIN_CUT_LARGER_THAN_DURATION = 12; 415 416 /** Begin cut time is larger or equal than end cut. */ 417 public static final int ERR_BEGIN_CUT_LARGER_THAN_END_CUT = 13; 418 419 /** Two consecutive transitions are overlapping on one clip. */ 420 public static final int ERR_OVERLAPPING_TRANSITIONS = 14; 421 422 /** Internal error, type size mismatch. */ 423 public static final int ERR_ANALYSIS_DATA_SIZE_TOO_SMALL = 15; 424 425 /** An input 3GPP file is invalid/corrupted. */ 426 public static final int ERR_INVALID_3GPP_FILE = 16; 427 428 /** A file contains an unsupported video format. */ 429 public static final int ERR_UNSUPPORTED_INPUT_VIDEO_FORMAT = 17; 430 431 /** A file contains an unsupported audio format. */ 432 public static final int ERR_UNSUPPORTED_INPUT_AUDIO_FORMAT = 18; 433 434 /** A file format is not supported. */ 435 public static final int ERR_AMR_EDITING_UNSUPPORTED = 19; 436 437 /** An input clip has an unexpectedly large Video AU. */ 438 public static final int ERR_INPUT_VIDEO_AU_TOO_LARGE = 20; 439 440 /** An input clip has an unexpectedly large Audio AU. */ 441 public static final int ERR_INPUT_AUDIO_AU_TOO_LARGE = 21; 442 443 /** An input clip has a corrupted Audio AU. */ 444 public static final int ERR_INPUT_AUDIO_CORRUPTED_AU = 22; 445 446 /** The video encoder encountered an Access Unit error. */ 447 public static final int ERR_ENCODER_ACCES_UNIT_ERROR = 23; 448 449 /** Unsupported video format for Video Editing. */ 450 public static final int ERR_EDITING_UNSUPPORTED_VIDEO_FORMAT = 24; 451 452 /** Unsupported H263 profile for Video Editing. */ 453 public static final int ERR_EDITING_UNSUPPORTED_H263_PROFILE = 25; 454 455 /** Unsupported MPEG-4 profile for Video Editing. */ 456 public static final int ERR_EDITING_UNSUPPORTED_MPEG4_PROFILE = 26; 457 458 /** Unsupported MPEG-4 RVLC tool for Video Editing. */ 459 public static final int ERR_EDITING_UNSUPPORTED_MPEG4_RVLC = 27; 460 461 /** Unsupported audio format for Video Editing. */ 462 public static final int ERR_EDITING_UNSUPPORTED_AUDIO_FORMAT = 28; 463 464 /** File contains no supported stream. */ 465 public static final int ERR_EDITING_NO_SUPPORTED_STREAM_IN_FILE = 29; 466 467 /** File contains no video stream or an unsupported video stream. */ 468 public static final int ERR_EDITING_NO_SUPPORTED_VIDEO_STREAM_IN_FILE = 30; 469 470 /** Internal error, clip analysis version mismatch. */ 471 public static final int ERR_INVALID_CLIP_ANALYSIS_VERSION = 31; 472 473 /** 474 * At least one of the clip analysis has been generated on another 475 * platform (WIN32, ARM, etc.). 476 */ 477 public static final int ERR_INVALID_CLIP_ANALYSIS_PLATFORM = 32; 478 479 /** Clips don't have the same video format (H263 or MPEG4). */ 480 public static final int ERR_INCOMPATIBLE_VIDEO_FORMAT = 33; 481 482 /** Clips don't have the same frame size. */ 483 public static final int ERR_INCOMPATIBLE_VIDEO_FRAME_SIZE = 34; 484 485 /** Clips don't have the same MPEG-4 time scale. */ 486 public static final int ERR_INCOMPATIBLE_VIDEO_TIME_SCALE = 35; 487 488 /** Clips don't have the same use of MPEG-4 data partitioning. */ 489 public static final int ERR_INCOMPATIBLE_VIDEO_DATA_PARTITIONING = 36; 490 491 /** MP3 clips can't be assembled. */ 492 public static final int ERR_UNSUPPORTED_MP3_ASSEMBLY = 37; 493 494 /** 495 * The input 3GPP file does not contain any supported audio or video 496 * track. 497 */ 498 public static final int ERR_NO_SUPPORTED_STREAM_IN_FILE = 38; 499 500 /** 501 * The Volume of the added audio track (AddVolume) must be strictly 502 * superior than zero. 503 */ 504 public static final int ERR_ADDVOLUME_EQUALS_ZERO = 39; 505 506 /** 507 * The time at which an audio track is added can't be higher than the 508 * input video track duration.. 509 */ 510 public static final int ERR_ADDCTS_HIGHER_THAN_VIDEO_DURATION = 40; 511 512 /** The audio track file format setting is undefined. */ 513 public static final int ERR_UNDEFINED_AUDIO_TRACK_FILE_FORMAT = 41; 514 515 /** The added audio track stream has an unsupported format. */ 516 public static final int ERR_UNSUPPORTED_ADDED_AUDIO_STREAM = 42; 517 518 /** The audio mixing feature doesn't support the audio track type. */ 519 public static final int ERR_AUDIO_MIXING_UNSUPPORTED = 43; 520 521 /** The audio mixing feature doesn't support MP3 audio tracks. */ 522 public static final int ERR_AUDIO_MIXING_MP3_UNSUPPORTED = 44; 523 524 /** 525 * An added audio track limits the available features: uiAddCts must be 526 * 0 and bRemoveOriginal must be true. 527 */ 528 public static final int ERR_FEATURE_UNSUPPORTED_WITH_AUDIO_TRACK = 45; 529 530 /** 531 * An added audio track limits the available features: uiAddCts must be 532 * 0 and bRemoveOriginal must be true. 533 */ 534 public static final int ERR_FEATURE_UNSUPPORTED_WITH_AAC = 46; 535 536 /** Input audio track is not of a type that can be mixed with output. */ 537 public static final int ERR_AUDIO_CANNOT_BE_MIXED = 47; 538 539 /** Input audio track is not AMR-NB, so it can't be mixed with output. */ 540 public static final int ERR_ONLY_AMRNB_INPUT_CAN_BE_MIXED = 48; 541 542 /** 543 * An added EVRC audio track limit the available features: uiAddCts must 544 * be 0 and bRemoveOriginal must be true. 545 */ 546 public static final int ERR_FEATURE_UNSUPPORTED_WITH_EVRC = 49; 547 548 /** H263 profiles other than 0 are not supported. */ 549 public static final int ERR_H263_PROFILE_NOT_SUPPORTED = 51; 550 551 /** File contains no video stream or an unsupported video stream. */ 552 public static final int ERR_NO_SUPPORTED_VIDEO_STREAM_IN_FILE = 52; 553 554 /** Transcoding of the input file(s) is necessary. */ 555 public static final int WAR_TRANSCODING_NECESSARY = 53; 556 557 /** 558 * The size of the output file will exceed the maximum configured value. 559 */ 560 public static final int WAR_MAX_OUTPUT_SIZE_EXCEEDED = 54; 561 562 /** The time scale is too big. */ 563 public static final int WAR_TIMESCALE_TOO_BIG = 55; 564 565 /** The year is out of range */ 566 public static final int ERR_CLOCK_BAD_REF_YEAR = 56; 567 568 /** The directory could not be opened */ 569 public static final int ERR_DIR_OPEN_FAILED = 57; 570 571 /** The directory could not be read */ 572 public static final int ERR_DIR_READ_FAILED = 58; 573 574 /** There are no more entries in the current directory */ 575 public static final int ERR_DIR_NO_MORE_ENTRY = 59; 576 577 /** The input parameter/s has error */ 578 public static final int ERR_PARAMETER = 60; 579 580 /** There is a state machine error */ 581 public static final int ERR_STATE = 61; 582 583 /** Memory allocation failed */ 584 public static final int ERR_ALLOC = 62; 585 586 /** Context is invalid */ 587 public static final int ERR_BAD_CONTEXT = 63; 588 589 /** Context creation failed */ 590 public static final int ERR_CONTEXT_FAILED = 64; 591 592 /** Invalid stream ID */ 593 public static final int ERR_BAD_STREAM_ID = 65; 594 595 /** Invalid option ID */ 596 public static final int ERR_BAD_OPTION_ID = 66; 597 598 /** The option is write only */ 599 public static final int ERR_WRITE_ONLY = 67; 600 601 /** The option is read only */ 602 public static final int ERR_READ_ONLY = 68; 603 604 /** The feature is not implemented in this version */ 605 public static final int ERR_NOT_IMPLEMENTED = 69; 606 607 /** The media type is not supported */ 608 public static final int ERR_UNSUPPORTED_MEDIA_TYPE = 70; 609 610 /** No data to be encoded */ 611 public static final int WAR_NO_DATA_YET = 71; 612 613 /** No data to be decoded */ 614 public static final int WAR_NO_MORE_STREAM = 72; 615 616 /** Time stamp is invalid */ 617 public static final int WAR_INVALID_TIME = 73; 618 619 /** No more data to be decoded */ 620 public static final int WAR_NO_MORE_AU = 74; 621 622 /** Semaphore timed out */ 623 public static final int WAR_TIME_OUT = 75; 624 625 /** Memory buffer is full */ 626 public static final int WAR_BUFFER_FULL = 76; 627 628 /** Server has asked for redirection */ 629 public static final int WAR_REDIRECT = 77; 630 631 /** Too many streams in input */ 632 public static final int WAR_TOO_MUCH_STREAMS = 78; 633 634 /** The file cannot be opened/ written into as it is locked */ 635 public static final int ERR_FILE_LOCKED = 79; 636 637 /** The file access mode is invalid */ 638 public static final int ERR_FILE_BAD_MODE_ACCESS = 80; 639 640 /** The file pointer points to an invalid location */ 641 public static final int ERR_FILE_INVALID_POSITION = 81; 642 643 /** Invalid string */ 644 public static final int ERR_STR_BAD_STRING = 94; 645 646 /** The input string cannot be converted */ 647 public static final int ERR_STR_CONV_FAILED = 95; 648 649 /** The string size is too large */ 650 public static final int ERR_STR_OVERFLOW = 96; 651 652 /** Bad string arguments */ 653 public static final int ERR_STR_BAD_ARGS = 97; 654 655 /** The string value is larger than maximum size allowed */ 656 public static final int WAR_STR_OVERFLOW = 98; 657 658 /** The string value is not present in this comparison operation */ 659 public static final int WAR_STR_NOT_FOUND = 99; 660 661 /** The thread is not started */ 662 public static final int ERR_THREAD_NOT_STARTED = 100; 663 664 /** Trancoding done warning */ 665 public static final int WAR_TRANSCODING_DONE = 101; 666 667 /** Unsupported mediatype */ 668 public static final int WAR_MEDIATYPE_NOT_SUPPORTED = 102; 669 670 /** Input file contains invalid/unsupported streams */ 671 public static final int ERR_INPUT_FILE_CONTAINS_NO_SUPPORTED_STREAM = 103; 672 673 /** Invalid input file */ 674 public static final int ERR_INVALID_INPUT_FILE = 104; 675 676 /** Invalid output video format */ 677 public static final int ERR_UNDEFINED_OUTPUT_VIDEO_FORMAT = 105; 678 679 /** Invalid output video frame size */ 680 public static final int ERR_UNDEFINED_OUTPUT_VIDEO_FRAME_SIZE = 106; 681 682 /** Invalid output video frame rate */ 683 public static final int ERR_UNDEFINED_OUTPUT_VIDEO_FRAME_RATE = 107; 684 685 /** Invalid output audio format */ 686 public static final int ERR_UNDEFINED_OUTPUT_AUDIO_FORMAT = 108; 687 688 /** Invalid video frame size for H.263 */ 689 public static final int ERR_INVALID_VIDEO_FRAME_SIZE_FOR_H263 = 109; 690 691 /** Invalid video frame rate for H.263 */ 692 public static final int ERR_INVALID_VIDEO_FRAME_RATE_FOR_H263 = 110; 693 694 /** invalid playback duration */ 695 public static final int ERR_DURATION_IS_NULL = 111; 696 697 /** Invalid H.263 profile in file */ 698 public static final int ERR_H263_FORBIDDEN_IN_MP4_FILE = 112; 699 700 /** Invalid AAC sampling frequency */ 701 public static final int ERR_INVALID_AAC_SAMPLING_FREQUENCY = 113; 702 703 /** Audio conversion failure */ 704 public static final int ERR_AUDIO_CONVERSION_FAILED = 114; 705 706 /** Invalid trim start and end times */ 707 public static final int ERR_BEGIN_CUT_EQUALS_END_CUT = 115; 708 709 /** End time smaller than start time for trim */ 710 public static final int ERR_END_CUT_SMALLER_THAN_BEGIN_CUT = 116; 711 712 /** Output file size is small */ 713 public static final int ERR_MAXFILESIZE_TOO_SMALL = 117; 714 715 /** Output video bitrate is too low */ 716 public static final int ERR_VIDEOBITRATE_TOO_LOW = 118; 717 718 /** Output audio bitrate is too low */ 719 public static final int ERR_AUDIOBITRATE_TOO_LOW = 119; 720 721 /** Output video bitrate is too high */ 722 public static final int ERR_VIDEOBITRATE_TOO_HIGH = 120; 723 724 /** Output audio bitrate is too high */ 725 public static final int ERR_AUDIOBITRATE_TOO_HIGH = 121; 726 727 /** Output file size is too small */ 728 public static final int ERR_OUTPUT_FILE_SIZE_TOO_SMALL = 122; 729 730 /** Unknown stream type */ 731 public static final int ERR_READER_UNKNOWN_STREAM_TYPE = 123; 732 733 /** Invalid metadata in input stream */ 734 public static final int WAR_READER_NO_METADATA = 124; 735 736 /** Invalid file reader info warning */ 737 public static final int WAR_READER_INFORMATION_NOT_PRESENT = 125; 738 739 /** Warning to indicate the the writer is being stopped */ 740 public static final int WAR_WRITER_STOP_REQ = 131; 741 742 /** Video decoder failed to provide frame for transcoding */ 743 public static final int WAR_VIDEORENDERER_NO_NEW_FRAME = 132; 744 745 /** Video deblocking filter is not implemented */ 746 public static final int WAR_DEBLOCKING_FILTER_NOT_IMPLEMENTED = 133; 747 748 /** H.263 decoder profile not supported */ 749 public static final int ERR_DECODER_H263_PROFILE_NOT_SUPPORTED = 134; 750 751 /** The input file contains unsupported H.263 profile */ 752 public static final int ERR_DECODER_H263_NOT_BASELINE = 135; 753 754 /** There is no more space to store the output file */ 755 public static final int ERR_NOMORE_SPACE_FOR_FILE = 136; 756 757 /** Internal error. */ 758 public static final int ERR_INTERNAL = 255; 759 } 760 761 /** 762 * Defines output video formats. 763 */ 764 public final class VideoFormat { 765 /** No video present in output clip. Used to generate audio only clip */ 766 public static final int NO_VIDEO = 0; 767 768 /** H263 baseline format. */ 769 public static final int H263 = 1; 770 771 /** MPEG4 video Simple Profile format. */ 772 public static final int MPEG4 = 2; 773 774 /** MPEG4 video Simple Profile format with support for EMP. */ 775 public static final int MPEG4_EMP = 3; 776 777 /** H264 video */ 778 public static final int H264 = 4; 779 780 /** No transcoding. Output video format is same as input video format */ 781 public static final int NULL_VIDEO = 254; 782 783 /** Unsupported video format. */ 784 public static final int UNSUPPORTED = 255; 785 } 786 787 /** Defines video profiles and levels. */ 788 public final class VideoProfile { 789 /** H263, Profile 0, Level 10. */ 790 public static final int H263_PROFILE_0_LEVEL_10 = MediaProperties.H263_PROFILE_0_LEVEL_10; 791 792 /** H263, Profile 0, Level 20. */ 793 public static final int H263_PROFILE_0_LEVEL_20 = MediaProperties.H263_PROFILE_0_LEVEL_20; 794 795 /** H263, Profile 0, Level 30. */ 796 public static final int H263_PROFILE_0_LEVEL_30 = MediaProperties.H263_PROFILE_0_LEVEL_30; 797 798 /** H263, Profile 0, Level 40. */ 799 public static final int H263_PROFILE_0_LEVEL_40 = MediaProperties.H263_PROFILE_0_LEVEL_40; 800 801 /** H263, Profile 0, Level 45. */ 802 public static final int H263_PROFILE_0_LEVEL_45 = MediaProperties.H263_PROFILE_0_LEVEL_45; 803 804 /** MPEG4, Simple Profile, Level 0. */ 805 public static final int MPEG4_SP_LEVEL_0 = MediaProperties.MPEG4_SP_LEVEL_0; 806 807 /** MPEG4, Simple Profile, Level 0B. */ 808 public static final int MPEG4_SP_LEVEL_0B = MediaProperties.MPEG4_SP_LEVEL_0B; 809 810 /** MPEG4, Simple Profile, Level 1. */ 811 public static final int MPEG4_SP_LEVEL_1 = MediaProperties.MPEG4_SP_LEVEL_1; 812 813 /** MPEG4, Simple Profile, Level 2. */ 814 public static final int MPEG4_SP_LEVEL_2 = MediaProperties.MPEG4_SP_LEVEL_2; 815 816 /** MPEG4, Simple Profile, Level 3. */ 817 public static final int MPEG4_SP_LEVEL_3 = MediaProperties.MPEG4_SP_LEVEL_3; 818 819 /** MPEG4, Simple Profile, Level 4A. */ 820 public static final int MPEG4_SP_LEVEL_4A = MediaProperties.MPEG4_SP_LEVEL_4A; 821 822 /** MPEG4, Simple Profile, Level 0. */ 823 public static final int MPEG4_SP_LEVEL_5 = MediaProperties.MPEG4_SP_LEVEL_5; 824 825 /** H264, Profile 0, Level 1. */ 826 public static final int H264_PROFILE_0_LEVEL_1 = MediaProperties.H264_PROFILE_0_LEVEL_1; 827 828 /** H264, Profile 0, Level 1b. */ 829 public static final int H264_PROFILE_0_LEVEL_1b = MediaProperties.H264_PROFILE_0_LEVEL_1B; 830 831 /** H264, Profile 0, Level 1.1 */ 832 public static final int H264_PROFILE_0_LEVEL_1_1 = MediaProperties.H264_PROFILE_0_LEVEL_1_1; 833 834 /** H264, Profile 0, Level 1.2 */ 835 public static final int H264_PROFILE_0_LEVEL_1_2 = MediaProperties.H264_PROFILE_0_LEVEL_1_2; 836 837 /** H264, Profile 0, Level 1.3 */ 838 public static final int H264_PROFILE_0_LEVEL_1_3 = MediaProperties.H264_PROFILE_0_LEVEL_1_3; 839 840 /** H264, Profile 0, Level 2. */ 841 public static final int H264_PROFILE_0_LEVEL_2 = MediaProperties.H264_PROFILE_0_LEVEL_2; 842 843 /** H264, Profile 0, Level 2.1 */ 844 public static final int H264_PROFILE_0_LEVEL_2_1 = MediaProperties.H264_PROFILE_0_LEVEL_2_1; 845 846 /** H264, Profile 0, Level 2.2 */ 847 public static final int H264_PROFILE_0_LEVEL_2_2 = MediaProperties.H264_PROFILE_0_LEVEL_2_2; 848 849 /** H264, Profile 0, Level 3. */ 850 public static final int H264_PROFILE_0_LEVEL_3 = MediaProperties.H264_PROFILE_0_LEVEL_3; 851 852 /** H264, Profile 0, Level 3.1 */ 853 public static final int H264_PROFILE_0_LEVEL_3_1 = MediaProperties.H264_PROFILE_0_LEVEL_3_1; 854 855 /** H264, Profile 0, Level 3.2 */ 856 public static final int H264_PROFILE_0_LEVEL_3_2 = MediaProperties.H264_PROFILE_0_LEVEL_3_2; 857 858 /** H264, Profile 0, Level 4. */ 859 public static final int H264_PROFILE_0_LEVEL_4 = MediaProperties.H264_PROFILE_0_LEVEL_4; 860 861 /** H264, Profile 0, Level 4.1 */ 862 public static final int H264_PROFILE_0_LEVEL_4_1 = MediaProperties.H264_PROFILE_0_LEVEL_4_1; 863 864 /** H264, Profile 0, Level 4.2 */ 865 public static final int H264_PROFILE_0_LEVEL_4_2 = MediaProperties.H264_PROFILE_0_LEVEL_4_2; 866 867 /** H264, Profile 0, Level 5. */ 868 public static final int H264_PROFILE_0_LEVEL_5 = MediaProperties.H264_PROFILE_0_LEVEL_5; 869 870 /** H264, Profile 0, Level 5.1 */ 871 public static final int H264_PROFILE_0_LEVEL_5_1 = MediaProperties.H264_PROFILE_0_LEVEL_5_1; 872 873 /** Profile out of range. */ 874 public static final int OUT_OF_RANGE = MediaProperties.UNSUPPORTED_PROFILE_LEVEL; 875 } 876 877 /** Defines video frame sizes. */ 878 public final class VideoFrameSize { 879 880 public static final int SIZE_UNDEFINED = -1; 881 882 /** SQCIF 128 x 96 pixels. */ 883 public static final int SQCIF = 0; 884 885 /** QQVGA 160 x 120 pixels. */ 886 public static final int QQVGA = 1; 887 888 /** QCIF 176 x 144 pixels. */ 889 public static final int QCIF = 2; 890 891 /** QVGA 320 x 240 pixels. */ 892 public static final int QVGA = 3; 893 894 /** CIF 352 x 288 pixels. */ 895 public static final int CIF = 4; 896 897 /** VGA 640 x 480 pixels. */ 898 public static final int VGA = 5; 899 900 /** WVGA 800 X 480 pixels */ 901 public static final int WVGA = 6; 902 903 /** NTSC 720 X 480 pixels */ 904 public static final int NTSC = 7; 905 906 /** 640 x 360 */ 907 public static final int nHD = 8; 908 909 /** 854 x 480 */ 910 public static final int WVGA16x9 = 9; 911 912 /** 720p 1280 X 720 */ 913 public static final int V720p = 10; 914 915 /** 1080 x 720 */ 916 public static final int W720p = 11; 917 918 /** 1080 960 x 720 */ 919 public static final int S720p = 12; 920 } 921 922 /** 923 * Defines output video frame rates. 924 */ 925 public final class VideoFrameRate { 926 /** Frame rate of 5 frames per second. */ 927 public static final int FR_5_FPS = 0; 928 929 /** Frame rate of 7.5 frames per second. */ 930 public static final int FR_7_5_FPS = 1; 931 932 /** Frame rate of 10 frames per second. */ 933 public static final int FR_10_FPS = 2; 934 935 /** Frame rate of 12.5 frames per second. */ 936 public static final int FR_12_5_FPS = 3; 937 938 /** Frame rate of 15 frames per second. */ 939 public static final int FR_15_FPS = 4; 940 941 /** Frame rate of 20 frames per second. */ 942 public static final int FR_20_FPS = 5; 943 944 /** Frame rate of 25 frames per second. */ 945 public static final int FR_25_FPS = 6; 946 947 /** Frame rate of 30 frames per second. */ 948 public static final int FR_30_FPS = 7; 949 } 950 951 /** 952 * Defines Video Effect Types. 953 */ 954 public static class VideoEffect { 955 956 public static final int NONE = 0; 957 958 public static final int FADE_FROM_BLACK = 8; 959 960 public static final int CURTAIN_OPENING = 9; 961 962 public static final int FADE_TO_BLACK = 16; 963 964 public static final int CURTAIN_CLOSING = 17; 965 966 public static final int EXTERNAL = 256; 967 968 public static final int BLACK_AND_WHITE = 257; 969 970 public static final int PINK = 258; 971 972 public static final int GREEN = 259; 973 974 public static final int SEPIA = 260; 975 976 public static final int NEGATIVE = 261; 977 978 public static final int FRAMING = 262; 979 980 public static final int TEXT = 263; 981 982 public static final int ZOOM_IN = 264; 983 984 public static final int ZOOM_OUT = 265; 985 986 public static final int FIFTIES = 266; 987 988 public static final int COLORRGB16 = 267; 989 990 public static final int GRADIENT = 268; 991 } 992 993 /** 994 * Defines the video transitions. 995 */ 996 public static class VideoTransition { 997 /** No transition */ 998 public static final int NONE = 0; 999 1000 /** Cross fade transition */ 1001 public static final int CROSS_FADE = 1; 1002 1003 /** External transition. Currently not available. */ 1004 public static final int EXTERNAL = 256; 1005 1006 /** AlphaMagic transition. */ 1007 public static final int ALPHA_MAGIC = 257; 1008 1009 /** Slide transition. */ 1010 public static final int SLIDE_TRANSITION = 258; 1011 1012 /** Fade to black transition. */ 1013 public static final int FADE_BLACK = 259; 1014 } 1015 1016 /** 1017 * Defines settings for the AlphaMagic transition 1018 */ 1019 public static class AlphaMagicSettings { 1020 /** Name of the alpha file (JPEG file). */ 1021 public String file; 1022 1023 /** Blending percentage [0..100] 0 = no blending. */ 1024 public int blendingPercent; 1025 1026 /** Invert the default rotation direction of the AlphaMagic effect. */ 1027 public boolean invertRotation; 1028 1029 public int rgbWidth; 1030 public int rgbHeight; 1031 } 1032 1033 /** Defines the direction of the Slide transition. */ 1034 public static final class SlideDirection { 1035 1036 /** Right out left in. */ 1037 public static final int RIGHT_OUT_LEFT_IN = 0; 1038 1039 /** Left out right in. */ 1040 public static final int LEFT_OUT_RIGTH_IN = 1; 1041 1042 /** Top out bottom in. */ 1043 public static final int TOP_OUT_BOTTOM_IN = 2; 1044 1045 /** Bottom out top in */ 1046 public static final int BOTTOM_OUT_TOP_IN = 3; 1047 } 1048 1049 /** Defines the Slide transition settings. */ 1050 public static class SlideTransitionSettings { 1051 /** 1052 * Direction of the slide transition. See {@link SlideDirection 1053 * SlideDirection} for valid values. 1054 */ 1055 public int direction; 1056 } 1057 1058 /** 1059 * Defines the settings of a single clip. 1060 */ 1061 public static class ClipSettings { 1062 1063 /** 1064 * The path to the clip file. 1065 * <p> 1066 * File format of the clip, it can be: 1067 * <ul> 1068 * <li>3GP file containing MPEG4/H263/H264 video and AAC/AMR audio 1069 * <li>JPG file 1070 * </ul> 1071 */ 1072 1073 public String clipPath; 1074 1075 /** 1076 * The path of the decoded file. This is used only for image files. 1077 */ 1078 public String clipDecodedPath; 1079 1080 /** 1081 * The path of the Original file. This is used only for image files. 1082 */ 1083 public String clipOriginalPath; 1084 1085 /** 1086 * File type of the clip. See {@link FileType FileType} for valid 1087 * values. 1088 */ 1089 public int fileType; 1090 1091 /** Begin of the cut in the clip in milliseconds. */ 1092 public int beginCutTime; 1093 1094 /** 1095 * End of the cut in the clip in milliseconds. Set both 1096 * <code>beginCutTime</code> and <code>endCutTime</code> to 1097 * <code>0</code> to get the full length of the clip without a cut. In 1098 * case of JPG clip, this is the duration of the JPEG file. 1099 */ 1100 public int endCutTime; 1101 1102 /** 1103 * Begin of the cut in the clip in percentage of the file duration. 1104 */ 1105 public int beginCutPercent; 1106 1107 /** 1108 * End of the cut in the clip in percentage of the file duration. Set 1109 * both <code>beginCutPercent</code> and <code>endCutPercent</code> to 1110 * <code>0</code> to get the full length of the clip without a cut. 1111 */ 1112 public int endCutPercent; 1113 1114 /** Enable panning and zooming. */ 1115 public boolean panZoomEnabled; 1116 1117 /** Zoom percentage at start of clip. 0 = no zoom, 100 = full zoom */ 1118 public int panZoomPercentStart; 1119 1120 /** Top left X coordinate at start of clip. */ 1121 public int panZoomTopLeftXStart; 1122 1123 /** Top left Y coordinate at start of clip. */ 1124 public int panZoomTopLeftYStart; 1125 1126 /** Zoom percentage at start of clip. 0 = no zoom, 100 = full zoom */ 1127 public int panZoomPercentEnd; 1128 1129 /** Top left X coordinate at end of clip. */ 1130 public int panZoomTopLeftXEnd; 1131 1132 /** Top left Y coordinate at end of clip. */ 1133 public int panZoomTopLeftYEnd; 1134 1135 /** 1136 * Set The media rendering. See {@link MediaRendering MediaRendering} 1137 * for valid values. 1138 */ 1139 public int mediaRendering; 1140 1141 /** 1142 * RGB width and Height 1143 */ 1144 public int rgbWidth; 1145 public int rgbHeight; 1146 } 1147 1148 /** 1149 * Defines settings for a transition. 1150 */ 1151 public static class TransitionSettings { 1152 1153 /** Duration of the transition in msec. */ 1154 public int duration; 1155 1156 /** 1157 * Transition type for video. See {@link VideoTransition 1158 * VideoTransition} for valid values. 1159 */ 1160 public int videoTransitionType; 1161 1162 /** 1163 * Transition type for audio. See {@link AudioTransition 1164 * AudioTransition} for valid values. 1165 */ 1166 public int audioTransitionType; 1167 1168 /** 1169 * Transition behaviour. See {@link TransitionBehaviour 1170 * TransitionBehaviour} for valid values. 1171 */ 1172 public int transitionBehaviour; 1173 1174 /** 1175 * Settings for AlphaMagic transition. Only needs to be set if 1176 * <code>videoTransitionType</code> is set to 1177 * <code>VideoTransition.ALPHA_MAGIC</code>. See 1178 * {@link AlphaMagicSettings AlphaMagicSettings}. 1179 */ 1180 public AlphaMagicSettings alphaSettings; 1181 1182 /** 1183 * Settings for the Slide transition. See 1184 * {@link SlideTransitionSettings SlideTransitionSettings}. 1185 */ 1186 public SlideTransitionSettings slideSettings; 1187 } 1188 1189 public static final class AudioTransition { 1190 /** No audio transition. */ 1191 public static final int NONE = 0; 1192 1193 /** Cross-fade audio transition. */ 1194 public static final int CROSS_FADE = 1; 1195 } 1196 1197 /** 1198 * Defines transition behaviors. 1199 */ 1200 public static final class TransitionBehaviour { 1201 1202 /** The transition uses an increasing speed. */ 1203 public static final int SPEED_UP = 0; 1204 1205 /** The transition uses a linear (constant) speed. */ 1206 public static final int LINEAR = 1; 1207 1208 /** The transition uses a decreasing speed. */ 1209 public static final int SPEED_DOWN = 2; 1210 1211 /** 1212 * The transition uses a constant speed, but slows down in the middle 1213 * section. 1214 */ 1215 public static final int SLOW_MIDDLE = 3; 1216 1217 /** 1218 * The transition uses a constant speed, but increases speed in the 1219 * middle section. 1220 */ 1221 public static final int FAST_MIDDLE = 4; 1222 } 1223 1224 /** 1225 * Defines settings for the background music. 1226 */ 1227 public static class BackgroundMusicSettings { 1228 1229 /** Background music file. */ 1230 public String file; 1231 1232 /** File type. See {@link FileType FileType} for valid values. */ 1233 public int fileType; 1234 1235 /** 1236 * Insertion time in milliseconds, in the output video where the 1237 * background music must be inserted. 1238 */ 1239 public long insertionTime; 1240 1241 /** 1242 * Volume, as a percentage of the background music track, to use. If 1243 * this field is set to 100, the background music will replace the audio 1244 * from the video input file(s). 1245 */ 1246 public int volumePercent; 1247 1248 /** 1249 * Start time in milliseconds in the background muisc file from where 1250 * the background music should loop. Set both <code>beginLoop</code> and 1251 * <code>endLoop</code> to <code>0</code> to disable looping. 1252 */ 1253 public long beginLoop; 1254 1255 /** 1256 * End time in milliseconds in the background music file to where the 1257 * background music should loop. Set both <code>beginLoop</code> and 1258 * <code>endLoop</code> to <code>0</code> to disable looping. 1259 */ 1260 public long endLoop; 1261 1262 public boolean enableDucking; 1263 1264 public int duckingThreshold; 1265 1266 public int lowVolume; 1267 1268 public boolean isLooping; 1269 } 1270 1271 /** Defines settings for an effect. */ 1272 public static class AudioEffect { 1273 /** No audio effect. */ 1274 public static final int NONE = 0; 1275 1276 /** Fade-in effect. */ 1277 public static final int FADE_IN = 8; 1278 1279 /** Fade-out effect. */ 1280 public static final int FADE_OUT = 16; 1281 } 1282 1283 /** Defines the effect settings. */ 1284 public static class EffectSettings { 1285 1286 /** Start time of the effect in milliseconds. */ 1287 public int startTime; 1288 1289 /** Duration of the effect in milliseconds. */ 1290 public int duration; 1291 1292 /** 1293 * Video effect type. See {@link VideoEffect VideoEffect} for valid 1294 * values. 1295 */ 1296 public int videoEffectType; 1297 1298 /** 1299 * Audio effect type. See {@link AudioEffect AudioEffect} for valid 1300 * values. 1301 */ 1302 public int audioEffectType; 1303 1304 /** 1305 * Start time of the effect in percents of the duration of the clip. A 1306 * value of 0 percent means start time is from the beginning of the 1307 * clip. 1308 */ 1309 public int startPercent; 1310 1311 /** 1312 * Duration of the effect in percents of the duration of the clip. 1313 */ 1314 public int durationPercent; 1315 1316 /** 1317 * Framing file. 1318 * <p> 1319 * This field is only used when the field <code>videoEffectType</code> 1320 * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise 1321 * this field is ignored. 1322 */ 1323 public String framingFile; 1324 1325 /** 1326 * Framing buffer. 1327 * <p> 1328 * This field is only used when the field <code>videoEffectType</code> 1329 * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise 1330 * this field is ignored. 1331 */ 1332 public int[] framingBuffer; 1333 1334 /** 1335 * Bitmap type Can be from RGB_565 (4), ARGB_4444 (5), ARGB_8888 (6); 1336 **/ 1337 1338 public int bitmapType; 1339 1340 public int width; 1341 1342 public int height; 1343 1344 /** 1345 * Top left x coordinate. This coordinate is used to set the x 1346 * coordinate of the picture in the framing file when the framing file 1347 * is selected. The x coordinate is also used to set the location of the 1348 * text in the text effect. 1349 * <p> 1350 * This field is only used when the field <code>videoEffectType</code> 1351 * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING} or 1352 * {@link VideoEffect#TEXT VideoEffect.TEXT}. Otherwise this field is 1353 * ignored. 1354 */ 1355 public int topLeftX; 1356 1357 /** 1358 * Top left y coordinate. This coordinate is used to set the y 1359 * coordinate of the picture in the framing file when the framing file 1360 * is selected. The y coordinate is also used to set the location of the 1361 * text in the text effect. 1362 * <p> 1363 * This field is only used when the field <code>videoEffectType</code> 1364 * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING} or 1365 * {@link VideoEffect#TEXT VideoEffect.TEXT}. Otherwise this field is 1366 * ignored. 1367 */ 1368 public int topLeftY; 1369 1370 /** 1371 * Should the frame be resized or not. If this field is set to 1372 * <link>true</code> then the frame size is matched with the output 1373 * video size. 1374 * <p> 1375 * This field is only used when the field <code>videoEffectType</code> 1376 * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise 1377 * this field is ignored. 1378 */ 1379 public boolean framingResize; 1380 1381 /** 1382 * Size to which the framing buffer needs to be resized to 1383 * This is valid only if framingResize is true 1384 */ 1385 public int framingScaledSize; 1386 /** 1387 * Text to insert in the video. 1388 * <p> 1389 * This field is only used when the field <code>videoEffectType</code> 1390 * is set to {@link VideoEffect#TEXT VideoEffect.TEXT}. Otherwise this 1391 * field is ignored. 1392 */ 1393 public String text; 1394 1395 /** 1396 * Text attributes for the text to insert in the video. 1397 * <p> 1398 * This field is only used when the field <code>videoEffectType</code> 1399 * is set to {@link VideoEffect#TEXT VideoEffect.TEXT}. Otherwise this 1400 * field is ignored. For more details about this field see the 1401 * integration guide. 1402 */ 1403 public String textRenderingData; 1404 1405 /** Width of the text buffer in pixels. */ 1406 public int textBufferWidth; 1407 1408 /** Height of the text buffer in pixels. */ 1409 public int textBufferHeight; 1410 1411 /** 1412 * Processing rate for the fifties effect. A high value (e.g. 30) 1413 * results in high effect strength. 1414 * <p> 1415 * This field is only used when the field <code>videoEffectType</code> 1416 * is set to {@link VideoEffect#FIFTIES VideoEffect.FIFTIES}. Otherwise 1417 * this field is ignored. 1418 */ 1419 public int fiftiesFrameRate; 1420 1421 /** 1422 * RGB 16 color of the RGB16 and gradient color effect. 1423 * <p> 1424 * This field is only used when the field <code>videoEffectType</code> 1425 * is set to {@link VideoEffect#COLORRGB16 VideoEffect.COLORRGB16} or 1426 * {@link VideoEffect#GRADIENT VideoEffect.GRADIENT}. Otherwise this 1427 * field is ignored. 1428 */ 1429 public int rgb16InputColor; 1430 1431 /** 1432 * Start alpha blending percentage. 1433 * <p> 1434 * This field is only used when the field <code>videoEffectType</code> 1435 * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or 1436 * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field 1437 * is ignored. 1438 */ 1439 public int alphaBlendingStartPercent; 1440 1441 /** 1442 * Middle alpha blending percentage. 1443 * <p> 1444 * This field is only used when the field <code>videoEffectType</code> 1445 * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or 1446 * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field 1447 * is ignored. 1448 */ 1449 public int alphaBlendingMiddlePercent; 1450 1451 /** 1452 * End alpha blending percentage. 1453 * <p> 1454 * This field is only used when the field <code>videoEffectType</code> 1455 * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or 1456 * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field 1457 * is ignored. 1458 */ 1459 public int alphaBlendingEndPercent; 1460 1461 /** 1462 * Duration, in percentage of effect duration of the fade-in phase. 1463 * <p> 1464 * This field is only used when the field <code>videoEffectType</code> 1465 * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or 1466 * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field 1467 * is ignored. 1468 */ 1469 public int alphaBlendingFadeInTimePercent; 1470 1471 /** 1472 * Duration, in percentage of effect duration of the fade-out phase. 1473 * <p> 1474 * This field is only used when the field <code>videoEffectType</code> 1475 * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or 1476 * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field 1477 * is ignored. 1478 */ 1479 public int alphaBlendingFadeOutTimePercent; 1480 } 1481 1482 /** Defines the clip properties for preview */ 1483 public static class PreviewClips { 1484 1485 /** 1486 * The path to the clip file. 1487 * <p> 1488 * File format of the clip, it can be: 1489 * <ul> 1490 * <li>3GP file containing MPEG4/H263 video and AAC/AMR audio 1491 * <li>JPG file 1492 * </ul> 1493 */ 1494 1495 public String clipPath; 1496 1497 /** 1498 * File type of the clip. See {@link FileType FileType} for valid 1499 * values. 1500 */ 1501 public int fileType; 1502 1503 /** Begin of the cut in the clip in milliseconds. */ 1504 public long beginPlayTime; 1505 1506 public long endPlayTime; 1507 1508 /** 1509 * Set The media rendering. See {@link MediaRendering MediaRendering} 1510 * for valid values. 1511 */ 1512 public int mediaRendering; 1513 1514 } 1515 1516 /** Defines the audio settings. */ 1517 public static class AudioSettings { 1518 1519 String pFile; 1520 1521 /** < PCM file path */ 1522 String Id; 1523 1524 boolean bRemoveOriginal; 1525 1526 /** < If true, the original audio track is not taken into account */ 1527 int channels; 1528 1529 /** < Number of channels (1=mono, 2=stereo) of BGM clip */ 1530 int Fs; 1531 1532 /** 1533 * < Sampling audio frequency (8000 for amr, 16000 or more for aac) of 1534 * BGM clip 1535 */ 1536 int ExtendedFs; 1537 1538 /** < Extended frequency for AAC+, eAAC+ streams of BGM clip */ 1539 long startMs; 1540 1541 /** < Time, in milliseconds, at which the added audio track is inserted */ 1542 long beginCutTime; 1543 1544 long endCutTime; 1545 1546 int fileType; 1547 1548 int volume; 1549 1550 /** < Volume, in percentage, of the added audio track */ 1551 boolean loop; 1552 1553 /** < Looping on/off > **/ 1554 1555 /** Audio mix and Duck **/ 1556 int ducking_threshold; 1557 1558 int ducking_lowVolume; 1559 1560 boolean bInDucking_enable; 1561 1562 String pcmFilePath; 1563 } 1564 1565 /** Encapsulates preview clips and effect settings */ 1566 public static class PreviewSettings { 1567 1568 public PreviewClips[] previewClipsArray; 1569 1570 /** The effect settings. */ 1571 public EffectSettings[] effectSettingsArray; 1572 1573 } 1574 1575 /** Encapsulates clip properties */ 1576 public static class PreviewClipProperties { 1577 1578 public Properties[] clipProperties; 1579 1580 } 1581 1582 /** Defines the editing settings. */ 1583 public static class EditSettings { 1584 1585 /** 1586 * Array of clip settings. There is one <code>clipSetting</code> for 1587 * each clip. 1588 */ 1589 public ClipSettings[] clipSettingsArray; 1590 1591 /** 1592 * Array of transition settings. If there are n clips (and thus n 1593 * <code>clipSettings</code>) then there are (n-1) transitions and (n-1) 1594 * <code>transistionSettings</code> in 1595 * <code>transistionSettingsArray</code>. 1596 */ 1597 public TransitionSettings[] transitionSettingsArray; 1598 1599 /** The effect settings. */ 1600 public EffectSettings[] effectSettingsArray; 1601 1602 /** 1603 * Video frame rate of the output clip. See {@link VideoFrameRate 1604 * VideoFrameRate} for valid values. 1605 */ 1606 public int videoFrameRate; 1607 1608 /** Output file name. Must be an absolute path. */ 1609 public String outputFile; 1610 1611 /** 1612 * Size of the video frames in the output clip. See 1613 * {@link VideoFrameSize VideoFrameSize} for valid values. 1614 */ 1615 public int videoFrameSize; 1616 1617 /** 1618 * Format of the video stream in the output clip. See 1619 * {@link VideoFormat VideoFormat} for valid values. 1620 */ 1621 public int videoFormat; 1622 1623 /** 1624 * Format of the audio stream in the output clip. See 1625 * {@link AudioFormat AudioFormat} for valid values. 1626 */ 1627 public int audioFormat; 1628 1629 /** 1630 * Sampling frequency of the audio stream in the output clip. See 1631 * {@link AudioSamplingFrequency AudioSamplingFrequency} for valid 1632 * values. 1633 */ 1634 public int audioSamplingFreq; 1635 1636 /** 1637 * Maximum file size. By setting this you can set the maximum size of 1638 * the output clip. Set it to <code>0</code> to let the class ignore 1639 * this filed. 1640 */ 1641 public int maxFileSize; 1642 1643 /** 1644 * Number of audio channels in output clip. Use <code>0</code> for none, 1645 * <code>1</code> for mono or <code>2</code> for stereo. None is only 1646 * allowed when the <code>audioFormat</code> field is set to 1647 * {@link AudioFormat#NO_AUDIO AudioFormat.NO_AUDIO} or 1648 * {@link AudioFormat#NULL_AUDIO AudioFormat.NULL_AUDIO} Mono is only 1649 * allowed when the <code>audioFormat</code> field is set to 1650 * {@link AudioFormat#AAC AudioFormat.AAC} 1651 */ 1652 public int audioChannels; 1653 1654 /** Video bitrate. See {@link Bitrate Bitrate} for valid values. */ 1655 public int videoBitrate; 1656 1657 /** Audio bitrate. See {@link Bitrate Bitrate} for valid values. */ 1658 public int audioBitrate; 1659 1660 /** 1661 * Background music settings. See {@link BackgroundMusicSettings 1662 * BackgroundMusicSettings} for valid values. 1663 */ 1664 public BackgroundMusicSettings backgroundMusicSettings; 1665 1666 public int primaryTrackVolume; 1667 1668 } 1669 1670 /** 1671 * Defines the media properties. 1672 **/ 1673 1674 public static class Properties { 1675 1676 /** 1677 * Duration of the media in milliseconds. 1678 */ 1679 1680 public int duration; 1681 1682 /** 1683 * File type. 1684 */ 1685 1686 public int fileType; 1687 1688 /** 1689 * Video format. 1690 */ 1691 1692 public int videoFormat; 1693 1694 /** 1695 * Duration of the video stream of the media in milliseconds. 1696 */ 1697 1698 public int videoDuration; 1699 1700 /** 1701 * Bitrate of the video stream of the media. 1702 */ 1703 1704 public int videoBitrate; 1705 1706 /** 1707 * Width of the video frames or the width of the still picture in 1708 * pixels. 1709 */ 1710 1711 public int width; 1712 1713 /** 1714 * Height of the video frames or the height of the still picture in 1715 * pixels. 1716 */ 1717 1718 public int height; 1719 1720 /** 1721 * Average frame rate of video in the media in frames per second. 1722 */ 1723 1724 public float averageFrameRate; 1725 1726 /** 1727 * Profile and level of the video in the media. 1728 */ 1729 1730 public int profileAndLevel; 1731 1732 /** 1733 * Audio format. 1734 */ 1735 1736 public int audioFormat; 1737 1738 /** 1739 * Duration of the audio stream of the media in milliseconds. 1740 */ 1741 1742 public int audioDuration; 1743 1744 /** 1745 * Bitrate of the audio stream of the media. 1746 */ 1747 1748 public int audioBitrate; 1749 1750 /** 1751 * Number of audio channels in the media. 1752 */ 1753 1754 public int audioChannels; 1755 1756 /** 1757 * Sampling frequency of the audio stream in the media in samples per 1758 * second. 1759 */ 1760 1761 public int audioSamplingFrequency; 1762 1763 /** 1764 * Volume value of the audio track as percentage. 1765 */ 1766 public int audioVolumeValue; 1767 1768 public String Id; 1769 } 1770 1771 /** 1772 * Constructor 1773 * 1774 * @param projectPath The path where the VideoEditor stores all files 1775 * related to the project 1776 * @param lock The semaphore 1777 * @param veObj The video editor reference 1778 */ 1779 public MediaArtistNativeHelper(String projectPath, Semaphore lock, VideoEditor veObj) { 1780 mProjectPath = projectPath; 1781 if (veObj != null) { 1782 mVideoEditor = veObj; 1783 } else { 1784 mVideoEditor = null; 1785 throw new IllegalArgumentException("video editor object is null"); 1786 } 1787 if (mStoryBoardSettings == null) { 1788 mStoryBoardSettings = new EditSettings(); 1789 } 1790 1791 mLock = lock; 1792 1793 _init(mProjectPath, "null"); 1794 mAudioTrackPCMFilePath = null; 1795 } 1796 1797 /** 1798 * @return The project path 1799 */ 1800 String getProjectPath() { 1801 return mProjectPath; 1802 } 1803 1804 /** 1805 * @return The Audio Track PCM file path 1806 */ 1807 String getProjectAudioTrackPCMFilePath() { 1808 return mAudioTrackPCMFilePath; 1809 } 1810 1811 /** 1812 * Invalidates the PCM file 1813 */ 1814 void invalidatePcmFile() { 1815 if (mAudioTrackPCMFilePath != null) { 1816 new File(mAudioTrackPCMFilePath).delete(); 1817 mAudioTrackPCMFilePath = null; 1818 } 1819 } 1820 1821 @SuppressWarnings("unused") 1822 private void onProgressUpdate(int taskId, int progress) { 1823 if (mProcessingState == PROCESSING_EXPORT) { 1824 if (mExportProgressListener != null) { 1825 if (mProgressToApp < progress) { 1826 mExportProgressListener.onProgress(mVideoEditor, mOutputFilename, progress); 1827 /* record previous progress */ 1828 mProgressToApp = progress; 1829 } 1830 } 1831 } 1832 else { 1833 // Adapt progress depending on current state 1834 int actualProgress = 0; 1835 int action = 0; 1836 1837 if (mProcessingState == PROCESSING_AUDIO_PCM) { 1838 action = MediaProcessingProgressListener.ACTION_DECODE; 1839 } else { 1840 action = MediaProcessingProgressListener.ACTION_ENCODE; 1841 } 1842 1843 switch (mProcessingState) { 1844 case PROCESSING_AUDIO_PCM: 1845 actualProgress = progress; 1846 break; 1847 case PROCESSING_TRANSITION: 1848 actualProgress = progress; 1849 break; 1850 case PROCESSING_KENBURNS: 1851 actualProgress = progress; 1852 break; 1853 case PROCESSING_INTERMEDIATE1: 1854 if ((progress == 0) && (mProgressToApp != 0)) { 1855 mProgressToApp = 0; 1856 } 1857 if ((progress != 0) || (mProgressToApp != 0)) { 1858 actualProgress = progress/4; 1859 } 1860 break; 1861 case PROCESSING_INTERMEDIATE2: 1862 if ((progress != 0) || (mProgressToApp != 0)) { 1863 actualProgress = 25 + progress/4; 1864 } 1865 break; 1866 case PROCESSING_INTERMEDIATE3: 1867 if ((progress != 0) || (mProgressToApp != 0)) { 1868 actualProgress = 50 + progress/2; 1869 } 1870 break; 1871 case PROCESSING_NONE: 1872 1873 default: 1874 Log.e(TAG, "ERROR unexpected State=" + mProcessingState); 1875 return; 1876 } 1877 if ((mProgressToApp != actualProgress) && (actualProgress != 0)) { 1878 1879 mProgressToApp = actualProgress; 1880 1881 if (mMediaProcessingProgressListener != null) { 1882 // Send the progress indication 1883 mMediaProcessingProgressListener.onProgress(mProcessingObject, action, 1884 actualProgress); 1885 } 1886 } 1887 /* avoid 0 in next intermediate call */ 1888 if (mProgressToApp == 0) { 1889 if (mMediaProcessingProgressListener != null) { 1890 /* 1891 * Send the progress indication 1892 */ 1893 mMediaProcessingProgressListener.onProgress(mProcessingObject, action, 1894 actualProgress); 1895 } 1896 mProgressToApp = 1; 1897 } 1898 } 1899 } 1900 1901 @SuppressWarnings("unused") 1902 private void onPreviewProgressUpdate(int progress, boolean isFinished, 1903 boolean updateOverlay, String filename, int renderingMode) { 1904 if (mPreviewProgressListener != null) { 1905 if (mIsFirstProgress) { 1906 mPreviewProgressListener.onStart(mVideoEditor); 1907 mIsFirstProgress = false; 1908 } 1909 1910 final VideoEditor.OverlayData overlayData; 1911 if (updateOverlay) { 1912 overlayData = new VideoEditor.OverlayData(); 1913 if (filename != null) { 1914 overlayData.set(BitmapFactory.decodeFile(filename), renderingMode); 1915 } else { 1916 overlayData.setClear(); 1917 } 1918 } else { 1919 overlayData = null; 1920 } 1921 1922 mPreviewProgressListener.onProgress(mVideoEditor, progress, overlayData); 1923 1924 if (progress != 0) { 1925 mPreviewProgress = progress; 1926 } 1927 1928 if (isFinished) { 1929 mPreviewProgressListener.onStop(mVideoEditor); 1930 } 1931 } 1932 } 1933 1934 /** 1935 * Release the native helper object 1936 */ 1937 void releaseNativeHelper() throws InterruptedException { 1938 release(); 1939 } 1940 1941 /** 1942 * Release the native helper to end the Audio Graph process 1943 */ 1944 @SuppressWarnings("unused") 1945 private void onAudioGraphExtractProgressUpdate(int progress, boolean isVideo) { 1946 if ((mExtractAudioWaveformProgressListener != null) && (progress > 0)) { 1947 mExtractAudioWaveformProgressListener.onProgress(progress); 1948 } 1949 } 1950 1951 /** 1952 * Populates the Effect Settings in EffectSettings 1953 * 1954 * @param effects The reference of EffectColor 1955 * 1956 * @return The populated effect settings in EffectSettings reference 1957 */ 1958 EffectSettings getEffectSettings(EffectColor effects) { 1959 EffectSettings effectSettings = new EffectSettings(); 1960 effectSettings.startTime = (int)effects.getStartTime(); 1961 effectSettings.duration = (int)effects.getDuration(); 1962 effectSettings.videoEffectType = getEffectColorType(effects); 1963 effectSettings.audioEffectType = 0; 1964 effectSettings.startPercent = 0; 1965 effectSettings.durationPercent = 0; 1966 effectSettings.framingFile = null; 1967 effectSettings.topLeftX = 0; 1968 effectSettings.topLeftY = 0; 1969 effectSettings.framingResize = false; 1970 effectSettings.text = null; 1971 effectSettings.textRenderingData = null; 1972 effectSettings.textBufferWidth = 0; 1973 effectSettings.textBufferHeight = 0; 1974 if (effects.getType() == EffectColor.TYPE_FIFTIES) { 1975 effectSettings.fiftiesFrameRate = 15; 1976 } else { 1977 effectSettings.fiftiesFrameRate = 0; 1978 } 1979 1980 if ((effectSettings.videoEffectType == VideoEffect.COLORRGB16) 1981 || (effectSettings.videoEffectType == VideoEffect.GRADIENT)) { 1982 effectSettings.rgb16InputColor = effects.getColor(); 1983 } 1984 1985 effectSettings.alphaBlendingStartPercent = 0; 1986 effectSettings.alphaBlendingMiddlePercent = 0; 1987 effectSettings.alphaBlendingEndPercent = 0; 1988 effectSettings.alphaBlendingFadeInTimePercent = 0; 1989 effectSettings.alphaBlendingFadeOutTimePercent = 0; 1990 return effectSettings; 1991 } 1992 1993 /** 1994 * Populates the Overlay Settings in EffectSettings 1995 * 1996 * @param overlay The reference of OverlayFrame 1997 * 1998 * @return The populated overlay settings in EffectSettings reference 1999 */ 2000 EffectSettings getOverlaySettings(OverlayFrame overlay) { 2001 EffectSettings effectSettings = new EffectSettings(); 2002 Bitmap bitmap = null; 2003 2004 effectSettings.startTime = (int)overlay.getStartTime(); 2005 effectSettings.duration = (int)overlay.getDuration(); 2006 effectSettings.videoEffectType = VideoEffect.FRAMING; 2007 effectSettings.audioEffectType = 0; 2008 effectSettings.startPercent = 0; 2009 effectSettings.durationPercent = 0; 2010 effectSettings.framingFile = null; 2011 2012 if ((bitmap = overlay.getBitmap()) != null) { 2013 effectSettings.framingFile = overlay.getFilename(); 2014 2015 if (effectSettings.framingFile == null) { 2016 try { 2017 (overlay).save(mProjectPath); 2018 } catch (IOException e) { 2019 Log.e(TAG, "getOverlaySettings : File not found"); 2020 } 2021 effectSettings.framingFile = overlay.getFilename(); 2022 } 2023 if (bitmap.getConfig() == Bitmap.Config.ARGB_8888) 2024 effectSettings.bitmapType = 6; 2025 else if (bitmap.getConfig() == Bitmap.Config.ARGB_4444) 2026 effectSettings.bitmapType = 5; 2027 else if (bitmap.getConfig() == Bitmap.Config.RGB_565) 2028 effectSettings.bitmapType = 4; 2029 else if (bitmap.getConfig() == Bitmap.Config.ALPHA_8) 2030 throw new RuntimeException("Bitmap config not supported"); 2031 2032 effectSettings.width = bitmap.getWidth(); 2033 effectSettings.height = bitmap.getHeight(); 2034 effectSettings.framingBuffer = new int[effectSettings.width]; 2035 int tmp = 0; 2036 short maxAlpha = 0; 2037 short minAlpha = (short)0xFF; 2038 short alpha = 0; 2039 while (tmp < effectSettings.height) { 2040 bitmap.getPixels(effectSettings.framingBuffer, 0, 2041 effectSettings.width, 0, tmp, 2042 effectSettings.width, 1); 2043 for (int i = 0; i < effectSettings.width; i++) { 2044 alpha = (short)((effectSettings.framingBuffer[i] >> 24) & 0xFF); 2045 if (alpha > maxAlpha) { 2046 maxAlpha = alpha; 2047 } 2048 if (alpha < minAlpha) { 2049 minAlpha = alpha; 2050 } 2051 } 2052 tmp += 1; 2053 } 2054 alpha = (short)((maxAlpha + minAlpha) / 2); 2055 alpha = (short)((alpha * 100) / 256); 2056 effectSettings.alphaBlendingEndPercent = alpha; 2057 effectSettings.alphaBlendingMiddlePercent = alpha; 2058 effectSettings.alphaBlendingStartPercent = alpha; 2059 effectSettings.alphaBlendingFadeInTimePercent = 100; 2060 effectSettings.alphaBlendingFadeOutTimePercent = 100; 2061 effectSettings.framingBuffer = null; 2062 2063 /* 2064 * Set the resized RGB file dimensions 2065 */ 2066 effectSettings.width = overlay.getResizedRGBSizeWidth(); 2067 if(effectSettings.width == 0) { 2068 effectSettings.width = bitmap.getWidth(); 2069 } 2070 2071 effectSettings.height = overlay.getResizedRGBSizeHeight(); 2072 if(effectSettings.height == 0) { 2073 effectSettings.height = bitmap.getHeight(); 2074 } 2075 2076 } 2077 2078 effectSettings.topLeftX = 0; 2079 effectSettings.topLeftY = 0; 2080 2081 effectSettings.framingResize = true; 2082 effectSettings.text = null; 2083 effectSettings.textRenderingData = null; 2084 effectSettings.textBufferWidth = 0; 2085 effectSettings.textBufferHeight = 0; 2086 effectSettings.fiftiesFrameRate = 0; 2087 effectSettings.rgb16InputColor = 0; 2088 int mediaItemHeight; 2089 int aspectRatio; 2090 if (overlay.getMediaItem() instanceof MediaImageItem) { 2091 if (((MediaImageItem)overlay.getMediaItem()).getGeneratedImageClip() != null) { 2092 // Ken Burns was applied 2093 mediaItemHeight = ((MediaImageItem)overlay.getMediaItem()).getGeneratedClipHeight(); 2094 aspectRatio = getAspectRatio( 2095 ((MediaImageItem)overlay.getMediaItem()).getGeneratedClipWidth() 2096 , mediaItemHeight); 2097 } else { 2098 //For image get the scaled height. Aspect ratio would remain the same 2099 mediaItemHeight = ((MediaImageItem)overlay.getMediaItem()).getScaledHeight(); 2100 aspectRatio = overlay.getMediaItem().getAspectRatio(); 2101 } 2102 } else { 2103 aspectRatio = overlay.getMediaItem().getAspectRatio(); 2104 mediaItemHeight = overlay.getMediaItem().getHeight(); 2105 } 2106 effectSettings.framingScaledSize = findVideoResolution(aspectRatio, mediaItemHeight); 2107 return effectSettings; 2108 } 2109 2110 /* get Video Editor aspect ratio */ 2111 int nativeHelperGetAspectRatio() { 2112 return mVideoEditor.getAspectRatio(); 2113 } 2114 2115 /** 2116 * Sets the audio regenerate flag 2117 * 2118 * @param flag The boolean to set the audio regenerate flag 2119 * 2120 */ 2121 void setAudioflag(boolean flag) { 2122 //check if the file exists. 2123 if (!(new File(String.format(mProjectPath + "/" + AUDIO_TRACK_PCM_FILE)).exists())) { 2124 flag = true; 2125 } 2126 mRegenerateAudio = flag; 2127 } 2128 2129 /** 2130 * Gets the audio regenerate flag 2131 * 2132 * @param return The boolean to get the audio regenerate flag 2133 * 2134 */ 2135 boolean getAudioflag() { 2136 return mRegenerateAudio; 2137 } 2138 2139 /** 2140 * Maps the average frame rate to one of the defined enum values 2141 * 2142 * @param averageFrameRate The average frame rate of video item 2143 * 2144 * @return The frame rate from one of the defined enum values 2145 */ 2146 int GetClosestVideoFrameRate(int averageFrameRate) { 2147 if (averageFrameRate >= 25) { 2148 return VideoFrameRate.FR_30_FPS; 2149 } else if (averageFrameRate >= 20) { 2150 return VideoFrameRate.FR_25_FPS; 2151 } else if (averageFrameRate >= 15) { 2152 return VideoFrameRate.FR_20_FPS; 2153 } else if (averageFrameRate >= 12) { 2154 return VideoFrameRate.FR_15_FPS; 2155 } else if (averageFrameRate >= 10) { 2156 return VideoFrameRate.FR_12_5_FPS; 2157 } else if (averageFrameRate >= 7) { 2158 return VideoFrameRate.FR_10_FPS; 2159 } else if (averageFrameRate >= 5) { 2160 return VideoFrameRate.FR_7_5_FPS; 2161 } else { 2162 return -1; 2163 } 2164 } 2165 2166 /** 2167 * Helper function to adjust the effect or overlay start time 2168 * depending on the begin and end boundary time of meddia item 2169 */ 2170 public void adjustEffectsStartTimeAndDuration(EffectSettings lEffect, int beginCutTime, 2171 int endCutTime) { 2172 2173 int effectStartTime = 0; 2174 int effectDuration = 0; 2175 2176 /** 2177 * cbct -> clip begin cut time 2178 * cect -> clip end cut time 2179 **************************************** 2180 * | | 2181 * | cbct cect | 2182 * | <-1--> | | | 2183 * | <--|-2-> | | 2184 * | | <---3---> | | 2185 * | | <--|-4---> | 2186 * | | | <--5--> | 2187 * | <---|------6----|----> | 2188 * | | 2189 * < : effectStart 2190 * > : effectStart + effectDuration 2191 **************************************** 2192 **/ 2193 2194 /** 1 & 5 */ 2195 /** 2196 * Effect falls out side the trim duration. In such a case effects shall 2197 * not be applied. 2198 */ 2199 if ((lEffect.startTime > endCutTime) 2200 || ((lEffect.startTime + lEffect.duration) <= beginCutTime)) { 2201 2202 effectStartTime = 0; 2203 effectDuration = 0; 2204 2205 lEffect.startTime = effectStartTime; 2206 lEffect.duration = effectDuration; 2207 return; 2208 } 2209 2210 /** 2 */ 2211 if ((lEffect.startTime < beginCutTime) 2212 && ((lEffect.startTime + lEffect.duration) > beginCutTime) 2213 && ((lEffect.startTime + lEffect.duration) <= endCutTime)) { 2214 effectStartTime = 0; 2215 effectDuration = lEffect.duration; 2216 2217 effectDuration -= (beginCutTime - lEffect.startTime); 2218 lEffect.startTime = effectStartTime; 2219 lEffect.duration = effectDuration; 2220 return; 2221 } 2222 2223 /** 3 */ 2224 if ((lEffect.startTime >= beginCutTime) 2225 && ((lEffect.startTime + lEffect.duration) <= endCutTime)) { 2226 effectStartTime = lEffect.startTime - beginCutTime; 2227 lEffect.startTime = effectStartTime; 2228 lEffect.duration = lEffect.duration; 2229 return; 2230 } 2231 2232 /** 4 */ 2233 if ((lEffect.startTime >= beginCutTime) 2234 && ((lEffect.startTime + lEffect.duration) > endCutTime)) { 2235 effectStartTime = lEffect.startTime - beginCutTime; 2236 effectDuration = endCutTime - lEffect.startTime; 2237 lEffect.startTime = effectStartTime; 2238 lEffect.duration = effectDuration; 2239 return; 2240 } 2241 2242 /** 6 */ 2243 if ((lEffect.startTime < beginCutTime) 2244 && ((lEffect.startTime + lEffect.duration) > endCutTime)) { 2245 effectStartTime = 0; 2246 effectDuration = endCutTime - beginCutTime; 2247 lEffect.startTime = effectStartTime; 2248 lEffect.duration = effectDuration; 2249 return; 2250 } 2251 2252 } 2253 2254 /** 2255 * Generates the clip for preview or export 2256 * 2257 * @param editSettings The EditSettings reference for generating 2258 * a clip for preview or export 2259 * 2260 * @return error value 2261 */ 2262 public int generateClip(EditSettings editSettings) { 2263 int err = 0; 2264 2265 try { 2266 err = nativeGenerateClip(editSettings); 2267 } catch (IllegalArgumentException ex) { 2268 Log.e(TAG, "Illegal Argument exception in load settings"); 2269 return -1; 2270 } catch (IllegalStateException ex) { 2271 Log.e(TAG, "Illegal state exception in load settings"); 2272 return -1; 2273 } catch (RuntimeException ex) { 2274 Log.e(TAG, "Runtime exception in load settings"); 2275 return -1; 2276 } 2277 return err; 2278 } 2279 2280 /** 2281 * Init function to initialiZe the ClipSettings reference to 2282 * default values 2283 * 2284 * @param lclipSettings The ClipSettings reference 2285 */ 2286 void initClipSettings(ClipSettings lclipSettings) { 2287 lclipSettings.clipPath = null; 2288 lclipSettings.clipDecodedPath = null; 2289 lclipSettings.clipOriginalPath = null; 2290 lclipSettings.fileType = 0; 2291 lclipSettings.endCutTime = 0; 2292 lclipSettings.beginCutTime = 0; 2293 lclipSettings.beginCutPercent = 0; 2294 lclipSettings.endCutPercent = 0; 2295 lclipSettings.panZoomEnabled = false; 2296 lclipSettings.panZoomPercentStart = 0; 2297 lclipSettings.panZoomTopLeftXStart = 0; 2298 lclipSettings.panZoomTopLeftYStart = 0; 2299 lclipSettings.panZoomPercentEnd = 0; 2300 lclipSettings.panZoomTopLeftXEnd = 0; 2301 lclipSettings.panZoomTopLeftYEnd = 0; 2302 lclipSettings.mediaRendering = 0; 2303 } 2304 2305 2306 /** 2307 * Populates the settings for generating an effect clip 2308 * 2309 * @param lMediaItem The media item for which the effect clip 2310 * needs to be generated 2311 * @param lclipSettings The ClipSettings reference containing 2312 * clips data 2313 * @param e The EditSettings reference containing effect specific data 2314 * @param uniqueId The unique id used in the name of the output clip 2315 * @param clipNo Used for internal purpose 2316 * 2317 * @return The name and path of generated clip 2318 */ 2319 String generateEffectClip(MediaItem lMediaItem, ClipSettings lclipSettings, 2320 EditSettings e,String uniqueId,int clipNo) { 2321 int err = 0; 2322 EditSettings editSettings = null; 2323 String EffectClipPath = null; 2324 2325 editSettings = new EditSettings(); 2326 2327 editSettings.clipSettingsArray = new ClipSettings[1]; 2328 editSettings.clipSettingsArray[0] = lclipSettings; 2329 2330 editSettings.backgroundMusicSettings = null; 2331 editSettings.transitionSettingsArray = null; 2332 editSettings.effectSettingsArray = e.effectSettingsArray; 2333 2334 EffectClipPath = String.format(mProjectPath + "/" + "ClipEffectIntermediate" + "_" 2335 + lMediaItem.getId() + uniqueId + ".3gp"); 2336 2337 File tmpFile = new File(EffectClipPath); 2338 if (tmpFile.exists()) { 2339 tmpFile.delete(); 2340 } 2341 2342 if (lMediaItem instanceof MediaVideoItem) { 2343 MediaVideoItem m = (MediaVideoItem)lMediaItem; 2344 2345 editSettings.audioFormat = AudioFormat.AAC; 2346 editSettings.audioChannels = 2; 2347 editSettings.audioBitrate = Bitrate.BR_64_KBPS; 2348 editSettings.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000; 2349 2350 editSettings.videoBitrate = Bitrate.BR_5_MBPS; 2351 //editSettings.videoFormat = VideoFormat.MPEG4; 2352 editSettings.videoFormat = VideoFormat.H264; 2353 editSettings.videoFrameRate = VideoFrameRate.FR_30_FPS; 2354 editSettings.videoFrameSize = findVideoResolution(mVideoEditor.getAspectRatio(), 2355 m.getHeight()); 2356 } else { 2357 MediaImageItem m = (MediaImageItem)lMediaItem; 2358 editSettings.audioBitrate = Bitrate.BR_64_KBPS; 2359 editSettings.audioChannels = 2; 2360 editSettings.audioFormat = AudioFormat.AAC; 2361 editSettings.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000; 2362 2363 editSettings.videoBitrate = Bitrate.BR_5_MBPS; 2364 editSettings.videoFormat = VideoFormat.H264; 2365 editSettings.videoFrameRate = VideoFrameRate.FR_30_FPS; 2366 editSettings.videoFrameSize = findVideoResolution(mVideoEditor.getAspectRatio(), 2367 m.getScaledHeight()); 2368 } 2369 2370 editSettings.outputFile = EffectClipPath; 2371 2372 if (clipNo == 1) { 2373 mProcessingState = PROCESSING_INTERMEDIATE1; 2374 } else if (clipNo == 2) { 2375 mProcessingState = PROCESSING_INTERMEDIATE2; 2376 } 2377 mProcessingObject = lMediaItem; 2378 err = generateClip(editSettings); 2379 mProcessingState = PROCESSING_NONE; 2380 2381 if (err == 0) { 2382 lclipSettings.clipPath = EffectClipPath; 2383 lclipSettings.fileType = FileType.THREE_GPP; 2384 return EffectClipPath; 2385 } else { 2386 throw new RuntimeException("preview generation cannot be completed"); 2387 } 2388 } 2389 2390 2391 /** 2392 * Populates the settings for generating a Ken Burn effect clip 2393 * 2394 * @param m The media image item for which the Ken Burn effect clip 2395 * needs to be generated 2396 * @param e The EditSettings reference clip specific data 2397 * 2398 * @return The name and path of generated clip 2399 */ 2400 String generateKenBurnsClip(EditSettings e, MediaImageItem m) { 2401 String output = null; 2402 int err = 0; 2403 2404 e.backgroundMusicSettings = null; 2405 e.transitionSettingsArray = null; 2406 e.effectSettingsArray = null; 2407 output = String.format(mProjectPath + "/" + "ImageClip-" + m.getId() + ".3gp"); 2408 2409 File tmpFile = new File(output); 2410 if (tmpFile.exists()) { 2411 tmpFile.delete(); 2412 } 2413 2414 e.outputFile = output; 2415 e.audioBitrate = Bitrate.BR_64_KBPS; 2416 e.audioChannels = 2; 2417 e.audioFormat = AudioFormat.AAC; 2418 e.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000; 2419 2420 e.videoBitrate = Bitrate.BR_5_MBPS; 2421 e.videoFormat = VideoFormat.H264; 2422 e.videoFrameRate = VideoFrameRate.FR_30_FPS; 2423 e.videoFrameSize = findVideoResolution(mVideoEditor.getAspectRatio(), 2424 m.getScaledHeight()); 2425 mProcessingState = PROCESSING_KENBURNS; 2426 mProcessingObject = m; 2427 err = generateClip(e); 2428 // Reset the processing state and check for errors 2429 mProcessingState = PROCESSING_NONE; 2430 if (err != 0) { 2431 throw new RuntimeException("preview generation cannot be completed"); 2432 } 2433 return output; 2434 } 2435 2436 2437 /** 2438 * Calculates the output resolution for transition clip 2439 * 2440 * @param m1 First media item associated with transition 2441 * @param m2 Second media item associated with transition 2442 * 2443 * @return The transition resolution 2444 */ 2445 private int getTransitionResolution(MediaItem m1, MediaItem m2) { 2446 int clip1Height = 0; 2447 int clip2Height = 0; 2448 int videoSize = 0; 2449 2450 if (m1 != null && m2 != null) { 2451 if (m1 instanceof MediaVideoItem) { 2452 clip1Height = m1.getHeight(); 2453 } else if (m1 instanceof MediaImageItem) { 2454 clip1Height = ((MediaImageItem)m1).getScaledHeight(); 2455 } 2456 if (m2 instanceof MediaVideoItem) { 2457 clip2Height = m2.getHeight(); 2458 } else if (m2 instanceof MediaImageItem) { 2459 clip2Height = ((MediaImageItem)m2).getScaledHeight(); 2460 } 2461 if (clip1Height > clip2Height) { 2462 videoSize = findVideoResolution(mVideoEditor.getAspectRatio(), clip1Height); 2463 } else { 2464 videoSize = findVideoResolution(mVideoEditor.getAspectRatio(), clip2Height); 2465 } 2466 } else if (m1 == null && m2 != null) { 2467 if (m2 instanceof MediaVideoItem) { 2468 clip2Height = m2.getHeight(); 2469 } else if (m2 instanceof MediaImageItem) { 2470 clip2Height = ((MediaImageItem)m2).getScaledHeight(); 2471 } 2472 videoSize = findVideoResolution(mVideoEditor.getAspectRatio(), clip2Height); 2473 } else if (m1 != null && m2 == null) { 2474 if (m1 instanceof MediaVideoItem) { 2475 clip1Height = m1.getHeight(); 2476 } else if (m1 instanceof MediaImageItem) { 2477 clip1Height = ((MediaImageItem)m1).getScaledHeight(); 2478 } 2479 videoSize = findVideoResolution(mVideoEditor.getAspectRatio(), clip1Height); 2480 } 2481 return videoSize; 2482 } 2483 2484 /** 2485 * Populates the settings for generating an transition clip 2486 * 2487 * @param m1 First media item associated with transition 2488 * @param m2 Second media item associated with transition 2489 * @param e The EditSettings reference containing 2490 * clip specific data 2491 * @param uniqueId The unique id used in the name of the output clip 2492 * @param t The Transition specific data 2493 * 2494 * @return The name and path of generated clip 2495 */ 2496 String generateTransitionClip(EditSettings e, String uniqueId, 2497 MediaItem m1, MediaItem m2,Transition t) { 2498 String outputFilename = null; 2499 int err = 0; 2500 2501 outputFilename = String.format(mProjectPath + "/" + uniqueId + ".3gp"); 2502 e.outputFile = outputFilename; 2503 e.audioBitrate = Bitrate.BR_64_KBPS; 2504 e.audioChannels = 2; 2505 e.audioFormat = AudioFormat.AAC; 2506 e.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000; 2507 2508 e.videoBitrate = Bitrate.BR_5_MBPS; 2509 e.videoFormat = VideoFormat.H264; 2510 e.videoFrameRate = VideoFrameRate.FR_30_FPS; 2511 e.videoFrameSize = getTransitionResolution(m1, m2); 2512 2513 if (new File(outputFilename).exists()) { 2514 new File(outputFilename).delete(); 2515 } 2516 mProcessingState = PROCESSING_INTERMEDIATE3; 2517 mProcessingObject = t; 2518 err = generateClip(e); 2519 // Reset the processing state and check for errors 2520 mProcessingState = PROCESSING_NONE; 2521 if (err != 0) { 2522 throw new RuntimeException("preview generation cannot be completed"); 2523 } 2524 return outputFilename; 2525 } 2526 2527 /** 2528 * Populates effects and overlays in EffectSettings structure 2529 * and also adjust the start time and duration of effects and overlays 2530 * w.r.t to total story board time 2531 * 2532 * @param m1 Media item associated with effect 2533 * @param effectSettings The EffectSettings reference containing 2534 * effect specific data 2535 * @param beginCutTime The begin cut time of the clip associated with effect 2536 * @param endCutTime The end cut time of the clip associated with effect 2537 * @param storyBoardTime The current story board time 2538 * 2539 * @return The updated index 2540 */ 2541 private int populateEffects(MediaItem m, EffectSettings[] effectSettings, int i, 2542 int beginCutTime, int endCutTime, int storyBoardTime) { 2543 2544 if (m.getBeginTransition() != null && m.getBeginTransition().getDuration() > 0 2545 && m.getEndTransition() != null && m.getEndTransition().getDuration() > 0) { 2546 beginCutTime += m.getBeginTransition().getDuration(); 2547 endCutTime -= m.getEndTransition().getDuration(); 2548 } else if (m.getBeginTransition() == null && m.getEndTransition() != null 2549 && m.getEndTransition().getDuration() > 0) { 2550 endCutTime -= m.getEndTransition().getDuration(); 2551 } else if (m.getEndTransition() == null && m.getBeginTransition() != null 2552 && m.getBeginTransition().getDuration() > 0) { 2553 beginCutTime += m.getBeginTransition().getDuration(); 2554 } 2555 2556 final List<Effect> effects = m.getAllEffects(); 2557 final List<Overlay> overlays = m.getAllOverlays(); 2558 2559 for (Overlay overlay : overlays) { 2560 effectSettings[i] = getOverlaySettings((OverlayFrame)overlay); 2561 adjustEffectsStartTimeAndDuration(effectSettings[i], beginCutTime, endCutTime); 2562 effectSettings[i].startTime += storyBoardTime; 2563 i++; 2564 } 2565 2566 for (Effect effect : effects) { 2567 if (effect instanceof EffectColor) { 2568 effectSettings[i] = getEffectSettings((EffectColor)effect); 2569 adjustEffectsStartTimeAndDuration(effectSettings[i], beginCutTime, endCutTime); 2570 effectSettings[i].startTime += storyBoardTime; 2571 i++; 2572 } 2573 } 2574 2575 return i; 2576 } 2577 2578 /** 2579 * Adjusts the media item boundaries for use in export or preview 2580 * 2581 * @param clipSettings The ClipSettings reference 2582 * @param clipProperties The Properties reference 2583 * @param m The media item 2584 */ 2585 private void adjustMediaItemBoundary(ClipSettings clipSettings, 2586 Properties clipProperties, MediaItem m) { 2587 if (m.getBeginTransition() != null && m.getBeginTransition().getDuration() > 0 2588 && m.getEndTransition() != null && m.getEndTransition().getDuration() > 0) { 2589 clipSettings.beginCutTime += m.getBeginTransition().getDuration(); 2590 clipSettings.endCutTime -= m.getEndTransition().getDuration(); 2591 } else if (m.getBeginTransition() == null && m.getEndTransition() != null 2592 && m.getEndTransition().getDuration() > 0) { 2593 clipSettings.endCutTime -= m.getEndTransition().getDuration(); 2594 } else if (m.getEndTransition() == null && m.getBeginTransition() != null 2595 && m.getBeginTransition().getDuration() > 0) { 2596 clipSettings.beginCutTime += m.getBeginTransition().getDuration(); 2597 } 2598 2599 clipProperties.duration = clipSettings.endCutTime - clipSettings.beginCutTime; 2600 2601 if (clipProperties.videoDuration != 0) { 2602 clipProperties.videoDuration = clipSettings.endCutTime - clipSettings.beginCutTime; 2603 } 2604 2605 if (clipProperties.audioDuration != 0) { 2606 clipProperties.audioDuration = clipSettings.endCutTime - clipSettings.beginCutTime; 2607 } 2608 } 2609 2610 /** 2611 * Generates the transition if transition is present 2612 * and is in invalidated state 2613 * 2614 * @param transition The Transition reference 2615 * @param editSettings The EditSettings reference 2616 * @param clipPropertiesArray The clip Properties array 2617 * @param i The index in clip Properties array for current clip 2618 */ 2619 private void generateTransition(Transition transition, EditSettings editSettings, 2620 PreviewClipProperties clipPropertiesArray, int index) { 2621 if (!(transition.isGenerated())) { 2622 transition.generate(); 2623 } 2624 editSettings.clipSettingsArray[index] = new ClipSettings(); 2625 editSettings.clipSettingsArray[index].clipPath = transition.getFilename(); 2626 editSettings.clipSettingsArray[index].fileType = FileType.THREE_GPP; 2627 editSettings.clipSettingsArray[index].beginCutTime = 0; 2628 editSettings.clipSettingsArray[index].endCutTime = (int)transition.getDuration(); 2629 editSettings.clipSettingsArray[index].mediaRendering = MediaRendering.BLACK_BORDERS; 2630 2631 try { 2632 clipPropertiesArray.clipProperties[index] = 2633 getMediaProperties(transition.getFilename()); 2634 } catch (Exception e) { 2635 throw new IllegalArgumentException("Unsupported file or file not found"); 2636 } 2637 2638 clipPropertiesArray.clipProperties[index].Id = null; 2639 clipPropertiesArray.clipProperties[index].audioVolumeValue = 100; 2640 clipPropertiesArray.clipProperties[index].duration = (int)transition.getDuration(); 2641 if (clipPropertiesArray.clipProperties[index].videoDuration != 0) { 2642 clipPropertiesArray.clipProperties[index].videoDuration = (int)transition.getDuration(); 2643 } 2644 2645 if (clipPropertiesArray.clipProperties[index].audioDuration != 0) { 2646 clipPropertiesArray.clipProperties[index].audioDuration = (int)transition.getDuration(); 2647 } 2648 } 2649 2650 /** 2651 * Sets the volume for current media item in clip properties array 2652 * 2653 * @param m The media item 2654 * @param clipProperties The clip properties array reference 2655 * @param i The index in clip Properties array for current clip 2656 */ 2657 private void adjustVolume(MediaItem m, PreviewClipProperties clipProperties, 2658 int index) { 2659 if (m instanceof MediaVideoItem) { 2660 final boolean videoMuted = ((MediaVideoItem)m).isMuted(); 2661 if (videoMuted == false) { 2662 mClipProperties.clipProperties[index].audioVolumeValue = 2663 ((MediaVideoItem)m).getVolume(); 2664 } else { 2665 mClipProperties.clipProperties[index].audioVolumeValue = 0; 2666 } 2667 } else if (m instanceof MediaImageItem) { 2668 mClipProperties.clipProperties[index].audioVolumeValue = 0; 2669 } 2670 } 2671 2672 /** 2673 * Checks for odd size image width and height 2674 * 2675 * @param m The media item 2676 * @param clipProperties The clip properties array reference 2677 * @param i The index in clip Properties array for current clip 2678 */ 2679 private void checkOddSizeImage(MediaItem m, PreviewClipProperties clipProperties, int index) { 2680 if (m instanceof MediaImageItem) { 2681 int width = mClipProperties.clipProperties[index].width; 2682 int height = mClipProperties.clipProperties[index].height; 2683 2684 if ((width % 2) != 0) { 2685 width -= 1; 2686 } 2687 if ((height % 2) != 0) { 2688 height -= 1; 2689 } 2690 mClipProperties.clipProperties[index].width = width; 2691 mClipProperties.clipProperties[index].height = height; 2692 } 2693 } 2694 2695 /** 2696 * Populates the media item properties and calculates the maximum 2697 * height among all the clips 2698 * 2699 * @param m The media item 2700 * @param i The index in clip Properties array for current clip 2701 * @param maxHeight The max height from the clip properties 2702 * 2703 * @return Updates the max height if current clip's height is greater 2704 * than all previous clips height 2705 */ 2706 private int populateMediaItemProperties(MediaItem m, int index, int maxHeight) { 2707 mPreviewEditSettings.clipSettingsArray[index] = new ClipSettings(); 2708 if (m instanceof MediaVideoItem) { 2709 mPreviewEditSettings.clipSettingsArray[index] = 2710 ((MediaVideoItem)m).getVideoClipProperties(); 2711 if (((MediaVideoItem)m).getHeight() > maxHeight) { 2712 maxHeight = ((MediaVideoItem)m).getHeight(); 2713 } 2714 } else if (m instanceof MediaImageItem) { 2715 mPreviewEditSettings.clipSettingsArray[index] = 2716 ((MediaImageItem)m).getImageClipProperties(); 2717 if (((MediaImageItem)m).getScaledHeight() > maxHeight) { 2718 maxHeight = ((MediaImageItem)m).getScaledHeight(); 2719 } 2720 } 2721 /** + Handle the image files here */ 2722 if (mPreviewEditSettings.clipSettingsArray[index].fileType == FileType.JPG) { 2723 mPreviewEditSettings.clipSettingsArray[index].clipDecodedPath = 2724 ((MediaImageItem)m).getDecodedImageFileName(); 2725 2726 mPreviewEditSettings.clipSettingsArray[index].clipOriginalPath = 2727 mPreviewEditSettings.clipSettingsArray[index].clipPath; 2728 } 2729 return maxHeight; 2730 } 2731 2732 /** 2733 * Populates the background music track properties 2734 * 2735 * @param mediaBGMList The background music list 2736 * 2737 */ 2738 private void populateBackgroundMusicProperties(List<AudioTrack> mediaBGMList) { 2739 2740 if (mediaBGMList.size() == 1) { 2741 mAudioTrack = mediaBGMList.get(0); 2742 } else { 2743 mAudioTrack = null; 2744 } 2745 2746 if (mAudioTrack != null) { 2747 mAudioSettings = new AudioSettings(); 2748 Properties mAudioProperties = new Properties(); 2749 mAudioSettings.pFile = null; 2750 mAudioSettings.Id = mAudioTrack.getId(); 2751 try { 2752 mAudioProperties = getMediaProperties(mAudioTrack.getFilename()); 2753 } catch (Exception e) { 2754 throw new IllegalArgumentException("Unsupported file or file not found"); 2755 } 2756 mAudioSettings.bRemoveOriginal = false; 2757 mAudioSettings.channels = mAudioProperties.audioChannels; 2758 mAudioSettings.Fs = mAudioProperties.audioSamplingFrequency; 2759 mAudioSettings.loop = mAudioTrack.isLooping(); 2760 mAudioSettings.ExtendedFs = 0; 2761 mAudioSettings.pFile = mAudioTrack.getFilename(); 2762 mAudioSettings.startMs = mAudioTrack.getStartTime(); 2763 mAudioSettings.beginCutTime = mAudioTrack.getBoundaryBeginTime(); 2764 mAudioSettings.endCutTime = mAudioTrack.getBoundaryEndTime(); 2765 if (mAudioTrack.isMuted()) { 2766 mAudioSettings.volume = 0; 2767 } else { 2768 mAudioSettings.volume = mAudioTrack.getVolume(); 2769 } 2770 mAudioSettings.fileType = mAudioProperties.fileType; 2771 mAudioSettings.ducking_lowVolume = mAudioTrack.getDuckedTrackVolume(); 2772 mAudioSettings.ducking_threshold = mAudioTrack.getDuckingThreshhold(); 2773 mAudioSettings.bInDucking_enable = mAudioTrack.isDuckingEnabled(); 2774 mAudioTrackPCMFilePath = String.format(mProjectPath + "/" + AUDIO_TRACK_PCM_FILE); 2775 mAudioSettings.pcmFilePath = mAudioTrackPCMFilePath; 2776 2777 mPreviewEditSettings.backgroundMusicSettings = new BackgroundMusicSettings(); 2778 mPreviewEditSettings.backgroundMusicSettings.file = mAudioTrackPCMFilePath; 2779 mPreviewEditSettings.backgroundMusicSettings.fileType = mAudioProperties.fileType; 2780 mPreviewEditSettings.backgroundMusicSettings.insertionTime = 2781 mAudioTrack.getStartTime(); 2782 mPreviewEditSettings.backgroundMusicSettings.volumePercent = mAudioTrack.getVolume(); 2783 mPreviewEditSettings.backgroundMusicSettings.beginLoop = 2784 mAudioTrack.getBoundaryBeginTime(); 2785 mPreviewEditSettings.backgroundMusicSettings.endLoop = 2786 mAudioTrack.getBoundaryEndTime(); 2787 mPreviewEditSettings.backgroundMusicSettings.enableDucking = 2788 mAudioTrack.isDuckingEnabled(); 2789 mPreviewEditSettings.backgroundMusicSettings.duckingThreshold = 2790 mAudioTrack.getDuckingThreshhold(); 2791 mPreviewEditSettings.backgroundMusicSettings.lowVolume = 2792 mAudioTrack.getDuckedTrackVolume(); 2793 mPreviewEditSettings.backgroundMusicSettings.isLooping = mAudioTrack.isLooping(); 2794 mPreviewEditSettings.primaryTrackVolume = 100; 2795 mProcessingState = PROCESSING_AUDIO_PCM; 2796 mProcessingObject = mAudioTrack; 2797 } else { 2798 mAudioSettings = null; 2799 mPreviewEditSettings.backgroundMusicSettings = null; 2800 mAudioTrackPCMFilePath = null; 2801 } 2802 } 2803 2804 /** 2805 * Calculates all the effects in all the media items 2806 * in media items list 2807 * 2808 * @param mediaItemsList The media item list 2809 * 2810 * @return The total number of effects 2811 * 2812 */ 2813 private int getTotalEffects(List<MediaItem> mediaItemsList) { 2814 int totalEffects = 0; 2815 final Iterator<MediaItem> it = mediaItemsList.iterator(); 2816 while (it.hasNext()) { 2817 final MediaItem t = it.next(); 2818 totalEffects += t.getAllEffects().size(); 2819 totalEffects += t.getAllOverlays().size(); 2820 final Iterator<Effect> ef = t.getAllEffects().iterator(); 2821 while (ef.hasNext()) { 2822 final Effect e = ef.next(); 2823 if (e instanceof EffectKenBurns) { 2824 totalEffects--; 2825 } 2826 } 2827 } 2828 return totalEffects; 2829 } 2830 2831 /** 2832 * This function is responsible for forming clip settings 2833 * array and clip properties array including transition clips 2834 * and effect settings for preview purpose or export. 2835 * 2836 * 2837 * @param mediaItemsList The media item list 2838 * @param mediaTransitionList The transitions list 2839 * @param mediaBGMList The background music list 2840 * @param listener The MediaProcessingProgressListener 2841 * 2842 */ 2843 void previewStoryBoard(List<MediaItem> mediaItemsList, 2844 List<Transition> mediaTransitionList, List<AudioTrack> mediaBGMList, 2845 MediaProcessingProgressListener listener) { 2846 if (mInvalidatePreviewArray) { 2847 int previewIndex = 0; 2848 int totalEffects = 0; 2849 int storyBoardTime = 0; 2850 int maxHeight = 0; 2851 int beginCutTime = 0; 2852 int endCutTime = 0; 2853 int effectIndex = 0; 2854 Transition lTransition = null; 2855 MediaItem lMediaItem = null; 2856 mPreviewEditSettings = new EditSettings(); 2857 mClipProperties = new PreviewClipProperties(); 2858 mTotalClips = 0; 2859 2860 mTotalClips = mediaItemsList.size(); 2861 for (Transition transition : mediaTransitionList) { 2862 if (transition.getDuration() > 0) { 2863 mTotalClips++; 2864 } 2865 } 2866 2867 totalEffects = getTotalEffects(mediaItemsList); 2868 2869 mPreviewEditSettings.clipSettingsArray = new ClipSettings[mTotalClips]; 2870 mPreviewEditSettings.effectSettingsArray = new EffectSettings[totalEffects]; 2871 mClipProperties.clipProperties = new Properties[mTotalClips]; 2872 2873 /** record the call back progress listener */ 2874 mMediaProcessingProgressListener = listener; 2875 mProgressToApp = 0; 2876 2877 if (mediaItemsList.size() > 0) { 2878 for (int i = 0; i < mediaItemsList.size(); i++) { 2879 /* Get the Media Item from the list */ 2880 lMediaItem = mediaItemsList.get(i); 2881 if (lMediaItem instanceof MediaVideoItem) { 2882 beginCutTime = (int)((MediaVideoItem)lMediaItem).getBoundaryBeginTime(); 2883 endCutTime = (int)((MediaVideoItem)lMediaItem).getBoundaryEndTime(); 2884 } else if (lMediaItem instanceof MediaImageItem) { 2885 beginCutTime = 0; 2886 endCutTime = (int)((MediaImageItem)lMediaItem).getTimelineDuration(); 2887 } 2888 /* Get the transition associated with Media Item */ 2889 lTransition = lMediaItem.getBeginTransition(); 2890 if (lTransition != null && (lTransition.getDuration() > 0)) { 2891 /* generate transition clip */ 2892 generateTransition(lTransition, mPreviewEditSettings, 2893 mClipProperties, previewIndex); 2894 storyBoardTime += mClipProperties.clipProperties[previewIndex].duration; 2895 previewIndex++; 2896 } 2897 /* Populate media item properties */ 2898 maxHeight = populateMediaItemProperties(lMediaItem, previewIndex, maxHeight); 2899 /* Get the clip properties of the media item. */ 2900 if (lMediaItem instanceof MediaImageItem) { 2901 int tmpCnt = 0; 2902 boolean bEffectKbPresent = false; 2903 final List<Effect> effectList = lMediaItem.getAllEffects(); 2904 /** 2905 * Check if Ken Burns effect is present 2906 */ 2907 while (tmpCnt < effectList.size()) { 2908 if (effectList.get(tmpCnt) instanceof EffectKenBurns) { 2909 bEffectKbPresent = true; 2910 break; 2911 } 2912 tmpCnt++; 2913 } 2914 2915 if (bEffectKbPresent) { 2916 try { 2917 if(((MediaImageItem)lMediaItem).getGeneratedImageClip() != null) { 2918 mClipProperties.clipProperties[previewIndex] 2919 = getMediaProperties(((MediaImageItem)lMediaItem). 2920 getGeneratedImageClip()); 2921 } 2922 else { 2923 mClipProperties.clipProperties[previewIndex] 2924 = getMediaProperties(((MediaImageItem)lMediaItem). 2925 getScaledImageFileName()); 2926 mClipProperties.clipProperties[previewIndex].width = 2927 ((MediaImageItem)lMediaItem).getScaledWidth(); 2928 mClipProperties.clipProperties[previewIndex].height = 2929 ((MediaImageItem)lMediaItem).getScaledHeight(); 2930 } 2931 } catch (Exception e) { 2932 throw new IllegalArgumentException("Unsupported file or file not found"); 2933 } 2934 } else { 2935 try { 2936 mClipProperties.clipProperties[previewIndex] 2937 = getMediaProperties(((MediaImageItem)lMediaItem). 2938 getScaledImageFileName()); 2939 } catch (Exception e) { 2940 throw new IllegalArgumentException("Unsupported file or file not found"); 2941 } 2942 mClipProperties.clipProperties[previewIndex].width = 2943 ((MediaImageItem)lMediaItem).getScaledWidth(); 2944 mClipProperties.clipProperties[previewIndex].height = 2945 ((MediaImageItem)lMediaItem).getScaledHeight(); 2946 } 2947 } else { 2948 try { 2949 mClipProperties.clipProperties[previewIndex] 2950 = getMediaProperties(lMediaItem.getFilename()); 2951 } catch (Exception e) { 2952 throw new IllegalArgumentException("Unsupported file or file not found"); 2953 } 2954 } 2955 mClipProperties.clipProperties[previewIndex].Id = lMediaItem.getId(); 2956 checkOddSizeImage(lMediaItem, mClipProperties, previewIndex); 2957 adjustVolume(lMediaItem, mClipProperties, previewIndex); 2958 2959 /* 2960 * Adjust media item start time and end time w.r.t to begin 2961 * and end transitions associated with media item 2962 */ 2963 2964 adjustMediaItemBoundary(mPreviewEditSettings.clipSettingsArray[previewIndex], 2965 mClipProperties.clipProperties[previewIndex], lMediaItem); 2966 2967 /* 2968 * Get all the effects and overlays for that media item and 2969 * adjust start time and duration of effects 2970 */ 2971 2972 effectIndex = populateEffects(lMediaItem, 2973 mPreviewEditSettings.effectSettingsArray, effectIndex, beginCutTime, 2974 endCutTime, storyBoardTime); 2975 storyBoardTime += mClipProperties.clipProperties[previewIndex].duration; 2976 previewIndex++; 2977 2978 /* Check if there is any end transition at last media item */ 2979 2980 if (i == (mediaItemsList.size() - 1)) { 2981 lTransition = lMediaItem.getEndTransition(); 2982 if (lTransition != null && (lTransition.getDuration() > 0)) { 2983 generateTransition(lTransition, mPreviewEditSettings, mClipProperties, 2984 previewIndex); 2985 break; 2986 } 2987 } 2988 } 2989 2990 if (!mErrorFlagSet) { 2991 mPreviewEditSettings.videoFrameSize = findVideoResolution(mVideoEditor 2992 .getAspectRatio(), maxHeight); 2993 populateBackgroundMusicProperties(mediaBGMList); 2994 2995 /** call to native populate settings */ 2996 try { 2997 nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings); 2998 } catch (IllegalArgumentException ex) { 2999 Log.e(TAG, "Illegal argument exception in nativePopulateSettings"); 3000 throw ex; 3001 } catch (IllegalStateException ex) { 3002 Log.e(TAG, "Illegal state exception in nativePopulateSettings"); 3003 throw ex; 3004 } catch (RuntimeException ex) { 3005 Log.e(TAG, "Runtime exception in nativePopulateSettings"); 3006 throw ex; 3007 } 3008 mInvalidatePreviewArray = false; 3009 mProcessingState = PROCESSING_NONE; 3010 } 3011 } 3012 if (mErrorFlagSet) { 3013 mErrorFlagSet = false; 3014 throw new RuntimeException("preview generation cannot be completed"); 3015 } 3016 } 3017 } /* END of previewStoryBoard */ 3018 3019 /** 3020 * This function is responsible for starting the preview 3021 * 3022 * 3023 * @param surface The surface on which preview has to be displayed 3024 * @param fromMs The time in ms from which preview has to be started 3025 * @param toMs The time in ms till preview has to be played 3026 * @param loop To loop the preview or not 3027 * @param callbackAfterFrameCount INdicated after how many frames 3028 * the callback is needed 3029 * @param listener The PreviewProgressListener 3030 */ 3031 void doPreview(Surface surface, long fromMs, long toMs, boolean loop, 3032 int callbackAfterFrameCount, PreviewProgressListener listener) { 3033 mPreviewProgress = fromMs; 3034 mIsFirstProgress = true; 3035 mPreviewProgressListener = listener; 3036 3037 if (!mInvalidatePreviewArray) { 3038 try { 3039 /** Modify the image files names to rgb image files. */ 3040 for (int clipCnt = 0; clipCnt < mPreviewEditSettings.clipSettingsArray.length; 3041 clipCnt++) { 3042 if (mPreviewEditSettings.clipSettingsArray[clipCnt].fileType == FileType.JPG) { 3043 mPreviewEditSettings.clipSettingsArray[clipCnt].clipPath = 3044 mPreviewEditSettings.clipSettingsArray[clipCnt].clipDecodedPath; 3045 } 3046 } 3047 nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings); 3048 nativeStartPreview(surface, fromMs, toMs, callbackAfterFrameCount, loop); 3049 } catch (IllegalArgumentException ex) { 3050 Log.e(TAG, "Illegal argument exception in nativeStartPreview"); 3051 throw ex; 3052 } catch (IllegalStateException ex) { 3053 Log.e(TAG, "Illegal state exception in nativeStartPreview"); 3054 throw ex; 3055 } catch (RuntimeException ex) { 3056 Log.e(TAG, "Runtime exception in nativeStartPreview"); 3057 throw ex; 3058 } 3059 } else { 3060 throw new IllegalStateException("generatePreview is in progress"); 3061 } 3062 } 3063 3064 /** 3065 * This function is responsible for stopping the preview 3066 */ 3067 long stopPreview() { 3068 return nativeStopPreview(); 3069 } 3070 3071 /** 3072 * This function is responsible for rendering a single frame 3073 * from the complete story board on the surface 3074 * 3075 * @param surface The surface on which frame has to be rendered 3076 * @param time The time in ms at which the frame has to be rendered 3077 * @param surfaceWidth The surface width 3078 * @param surfaceHeight The surface height 3079 * @param overlayData The overlay data 3080 * 3081 * @return The actual time from the story board at which the frame was extracted 3082 * and rendered 3083 */ 3084 long renderPreviewFrame(Surface surface, long time, int surfaceWidth, 3085 int surfaceHeight, VideoEditor.OverlayData overlayData) { 3086 if (mInvalidatePreviewArray) { 3087 if (Log.isLoggable(TAG, Log.DEBUG)) { 3088 Log.d(TAG, "Call generate preview first"); 3089 } 3090 throw new IllegalStateException("Call generate preview first"); 3091 } 3092 3093 long timeMs = 0; 3094 try { 3095 for (int clipCnt = 0; clipCnt < mPreviewEditSettings.clipSettingsArray.length; 3096 clipCnt++) { 3097 if (mPreviewEditSettings.clipSettingsArray[clipCnt].fileType == FileType.JPG) { 3098 mPreviewEditSettings.clipSettingsArray[clipCnt].clipPath = 3099 mPreviewEditSettings.clipSettingsArray[clipCnt].clipDecodedPath; 3100 } 3101 } 3102 3103 // Reset the render preview frame params that shall be set by native. 3104 mRenderPreviewOverlayFile = null; 3105 mRenderPreviewRenderingMode = MediaRendering.RESIZING; 3106 3107 nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings); 3108 3109 timeMs = (long)nativeRenderPreviewFrame(surface, time, surfaceWidth, surfaceHeight); 3110 3111 if (mRenderPreviewOverlayFile != null) { 3112 overlayData.set(BitmapFactory.decodeFile(mRenderPreviewOverlayFile), 3113 mRenderPreviewRenderingMode); 3114 } else { 3115 overlayData.setClear(); 3116 } 3117 } catch (IllegalArgumentException ex) { 3118 Log.e(TAG, "Illegal Argument exception in nativeRenderPreviewFrame"); 3119 throw ex; 3120 } catch (IllegalStateException ex) { 3121 Log.e(TAG, "Illegal state exception in nativeRenderPreviewFrame"); 3122 throw ex; 3123 } catch (RuntimeException ex) { 3124 Log.e(TAG, "Runtime exception in nativeRenderPreviewFrame"); 3125 throw ex; 3126 } 3127 3128 return timeMs; 3129 } 3130 3131 private void previewFrameEditInfo(String filename, int renderingMode) { 3132 mRenderPreviewOverlayFile = filename; 3133 mRenderPreviewRenderingMode = renderingMode; 3134 } 3135 3136 3137 /** 3138 * This function is responsible for rendering a single frame 3139 * from a single media item on the surface 3140 * 3141 * @param surface The surface on which frame has to be rendered 3142 * @param filepath The file path for which the frame needs to be displayed 3143 * @param time The time in ms at which the frame has to be rendered 3144 * @param framewidth The frame width 3145 * @param framewidth The frame height 3146 * 3147 * @return The actual time from media item at which the frame was extracted 3148 * and rendered 3149 */ 3150 long renderMediaItemPreviewFrame(Surface surface, String filepath, 3151 long time, int framewidth, int frameheight) { 3152 long timeMs = 0; 3153 try { 3154 timeMs = (long)nativeRenderMediaItemPreviewFrame(surface, filepath, framewidth, 3155 frameheight, 0, 0, time); 3156 } catch (IllegalArgumentException ex) { 3157 Log.e(TAG, "Illegal Argument exception in renderMediaItemPreviewFrame"); 3158 throw ex; 3159 } catch (IllegalStateException ex) { 3160 Log.e(TAG, "Illegal state exception in renderMediaItemPreviewFrame"); 3161 throw ex; 3162 } catch (RuntimeException ex) { 3163 Log.e(TAG, "Runtime exception in renderMediaItemPreviewFrame"); 3164 throw ex; 3165 } 3166 3167 return timeMs; 3168 } 3169 3170 /** 3171 * This function sets the flag to invalidate the preview array 3172 * and for generating the preview again 3173 */ 3174 void setGeneratePreview(boolean isRequired) { 3175 boolean semAcquiredDone = false; 3176 try { 3177 lock(); 3178 semAcquiredDone = true; 3179 mInvalidatePreviewArray = isRequired; 3180 } catch (InterruptedException ex) { 3181 Log.e(TAG, "Runtime exception in renderMediaItemPreviewFrame"); 3182 } finally { 3183 if (semAcquiredDone) { 3184 unlock(); 3185 } 3186 } 3187 } 3188 3189 /** 3190 * @return Returns the current status of preview invalidation 3191 * flag 3192 */ 3193 boolean getGeneratePreview() { 3194 return mInvalidatePreviewArray; 3195 } 3196 3197 /** 3198 * Calculates the aspect ratio from widht and height 3199 * 3200 * @param w The width of media item 3201 * @param h The height of media item 3202 * 3203 * @return The calculated aspect ratio 3204 */ 3205 int getAspectRatio(int w, int h) { 3206 double apRatio = (double)(w) / (double)(h); 3207 BigDecimal bd = new BigDecimal(apRatio); 3208 bd = bd.setScale(3, BigDecimal.ROUND_HALF_UP); 3209 apRatio = bd.doubleValue(); 3210 int var = MediaProperties.ASPECT_RATIO_16_9; 3211 if (apRatio >= 1.7) { 3212 var = MediaProperties.ASPECT_RATIO_16_9; 3213 } else if (apRatio >= 1.6) { 3214 var = MediaProperties.ASPECT_RATIO_5_3; 3215 } else if (apRatio >= 1.5) { 3216 var = MediaProperties.ASPECT_RATIO_3_2; 3217 } else if (apRatio > 1.3) { 3218 var = MediaProperties.ASPECT_RATIO_4_3; 3219 } else if (apRatio >= 1.2) { 3220 var = MediaProperties.ASPECT_RATIO_11_9; 3221 } 3222 return var; 3223 } 3224 3225 /** 3226 * Maps the file type used in native layer 3227 * to file type used in JAVA layer 3228 * 3229 * @param fileType The file type in native layer 3230 * 3231 * @return The File type in JAVA layer 3232 */ 3233 int getFileType(int fileType) { 3234 int retValue = -1; 3235 switch (fileType) { 3236 case FileType.UNSUPPORTED: 3237 retValue = MediaProperties.FILE_UNSUPPORTED; 3238 break; 3239 case FileType.THREE_GPP: 3240 retValue = MediaProperties.FILE_3GP; 3241 break; 3242 case FileType.MP4: 3243 retValue = MediaProperties.FILE_MP4; 3244 break; 3245 case FileType.JPG: 3246 retValue = MediaProperties.FILE_JPEG; 3247 break; 3248 case FileType.PNG: 3249 retValue = MediaProperties.FILE_PNG; 3250 break; 3251 case FileType.MP3: 3252 retValue = MediaProperties.FILE_MP3; 3253 break; 3254 case FileType.M4V: 3255 retValue = MediaProperties.FILE_M4V; 3256 break; 3257 3258 default: 3259 retValue = -1; 3260 } 3261 return retValue; 3262 } 3263 3264 /** 3265 * Maps the video codec type used in native layer 3266 * to video codec type used in JAVA layer 3267 * 3268 * @param codecType The video codec type in native layer 3269 * 3270 * @return The video codec type in JAVA layer 3271 */ 3272 int getVideoCodecType(int codecType) { 3273 int retValue = -1; 3274 switch (codecType) { 3275 case VideoFormat.H263: 3276 retValue = MediaProperties.VCODEC_H263; 3277 break; 3278 case VideoFormat.H264: 3279 retValue = MediaProperties.VCODEC_H264BP; 3280 break; 3281 case VideoFormat.MPEG4: 3282 retValue = MediaProperties.VCODEC_MPEG4; 3283 break; 3284 case VideoFormat.UNSUPPORTED: 3285 3286 default: 3287 retValue = -1; 3288 } 3289 return retValue; 3290 } 3291 3292 /** 3293 * Maps the audio codec type used in native layer 3294 * to audio codec type used in JAVA layer 3295 * 3296 * @param audioType The audio codec type in native layer 3297 * 3298 * @return The audio codec type in JAVA layer 3299 */ 3300 int getAudioCodecType(int codecType) { 3301 int retValue = -1; 3302 switch (codecType) { 3303 case AudioFormat.AMR_NB: 3304 retValue = MediaProperties.ACODEC_AMRNB; 3305 break; 3306 case AudioFormat.AAC: 3307 retValue = MediaProperties.ACODEC_AAC_LC; 3308 break; 3309 case AudioFormat.MP3: 3310 retValue = MediaProperties.ACODEC_MP3; 3311 break; 3312 3313 default: 3314 retValue = -1; 3315 } 3316 return retValue; 3317 } 3318 3319 /** 3320 * Returns the frame rate as integer 3321 * 3322 * @param fps The fps as enum 3323 * 3324 * @return The frame rate as integer 3325 */ 3326 int getFrameRate(int fps) { 3327 int retValue = -1; 3328 switch (fps) { 3329 case VideoFrameRate.FR_5_FPS: 3330 retValue = 5; 3331 break; 3332 case VideoFrameRate.FR_7_5_FPS: 3333 retValue = 8; 3334 break; 3335 case VideoFrameRate.FR_10_FPS: 3336 retValue = 10; 3337 break; 3338 case VideoFrameRate.FR_12_5_FPS: 3339 retValue = 13; 3340 break; 3341 case VideoFrameRate.FR_15_FPS: 3342 retValue = 15; 3343 break; 3344 case VideoFrameRate.FR_20_FPS: 3345 retValue = 20; 3346 break; 3347 case VideoFrameRate.FR_25_FPS: 3348 retValue = 25; 3349 break; 3350 case VideoFrameRate.FR_30_FPS: 3351 retValue = 30; 3352 break; 3353 3354 default: 3355 retValue = -1; 3356 } 3357 return retValue; 3358 } 3359 3360 /** 3361 * Maps the file type used in JAVA layer 3362 * to file type used in native layer 3363 * 3364 * @param fileType The file type in JAVA layer 3365 * 3366 * @return The File type in native layer 3367 */ 3368 int getMediaItemFileType(int fileType) { 3369 int retValue = -1; 3370 3371 switch (fileType) { 3372 case MediaProperties.FILE_UNSUPPORTED: 3373 retValue = FileType.UNSUPPORTED; 3374 break; 3375 case MediaProperties.FILE_3GP: 3376 retValue = FileType.THREE_GPP; 3377 break; 3378 case MediaProperties.FILE_MP4: 3379 retValue = FileType.MP4; 3380 break; 3381 case MediaProperties.FILE_JPEG: 3382 retValue = FileType.JPG; 3383 break; 3384 case MediaProperties.FILE_PNG: 3385 retValue = FileType.PNG; 3386 break; 3387 case MediaProperties.FILE_M4V: 3388 retValue = FileType.M4V; 3389 break; 3390 3391 default: 3392 retValue = -1; 3393 } 3394 return retValue; 3395 3396 } 3397 3398 /** 3399 * Maps the rendering mode used in native layer 3400 * to rendering mode used in JAVA layer 3401 * 3402 * @param renderingMode The rendering mode in JAVA layer 3403 * 3404 * @return The rendering mode in native layer 3405 */ 3406 int getMediaItemRenderingMode(int renderingMode) { 3407 int retValue = -1; 3408 switch (renderingMode) { 3409 case MediaItem.RENDERING_MODE_BLACK_BORDER: 3410 retValue = MediaRendering.BLACK_BORDERS; 3411 break; 3412 case MediaItem.RENDERING_MODE_STRETCH: 3413 retValue = MediaRendering.RESIZING; 3414 break; 3415 case MediaItem.RENDERING_MODE_CROPPING: 3416 retValue = MediaRendering.CROPPING; 3417 break; 3418 3419 default: 3420 retValue = -1; 3421 } 3422 return retValue; 3423 } 3424 3425 /** 3426 * Maps the transition behavior used in JAVA layer 3427 * to transition behavior used in native layer 3428 * 3429 * @param transitionType The transition behavior in JAVA layer 3430 * 3431 * @return The transition behavior in native layer 3432 */ 3433 int getVideoTransitionBehaviour(int transitionType) { 3434 int retValue = -1; 3435 switch (transitionType) { 3436 case Transition.BEHAVIOR_SPEED_UP: 3437 retValue = TransitionBehaviour.SPEED_UP; 3438 break; 3439 case Transition.BEHAVIOR_SPEED_DOWN: 3440 retValue = TransitionBehaviour.SPEED_DOWN; 3441 break; 3442 case Transition.BEHAVIOR_LINEAR: 3443 retValue = TransitionBehaviour.LINEAR; 3444 break; 3445 case Transition.BEHAVIOR_MIDDLE_SLOW: 3446 retValue = TransitionBehaviour.SLOW_MIDDLE; 3447 break; 3448 case Transition.BEHAVIOR_MIDDLE_FAST: 3449 retValue = TransitionBehaviour.FAST_MIDDLE; 3450 break; 3451 3452 default: 3453 retValue = -1; 3454 } 3455 return retValue; 3456 } 3457 3458 /** 3459 * Maps the transition slide direction used in JAVA layer 3460 * to transition slide direction used in native layer 3461 * 3462 * @param slideDirection The transition slide direction 3463 * in JAVA layer 3464 * 3465 * @return The transition slide direction in native layer 3466 */ 3467 int getSlideSettingsDirection(int slideDirection) { 3468 int retValue = -1; 3469 switch (slideDirection) { 3470 case TransitionSliding.DIRECTION_RIGHT_OUT_LEFT_IN: 3471 retValue = SlideDirection.RIGHT_OUT_LEFT_IN; 3472 break; 3473 case TransitionSliding.DIRECTION_LEFT_OUT_RIGHT_IN: 3474 retValue = SlideDirection.LEFT_OUT_RIGTH_IN; 3475 break; 3476 case TransitionSliding.DIRECTION_TOP_OUT_BOTTOM_IN: 3477 retValue = SlideDirection.TOP_OUT_BOTTOM_IN; 3478 break; 3479 case TransitionSliding.DIRECTION_BOTTOM_OUT_TOP_IN: 3480 retValue = SlideDirection.BOTTOM_OUT_TOP_IN; 3481 break; 3482 3483 default: 3484 retValue = -1; 3485 } 3486 return retValue; 3487 } 3488 3489 /** 3490 * Maps the effect color type used in JAVA layer 3491 * to effect color type used in native layer 3492 * 3493 * @param effect The EffectColor reference 3494 * 3495 * @return The color effect value from native layer 3496 */ 3497 private int getEffectColorType(EffectColor effect) { 3498 int retValue = -1; 3499 switch (effect.getType()) { 3500 case EffectColor.TYPE_COLOR: 3501 if (effect.getColor() == EffectColor.GREEN) { 3502 retValue = VideoEffect.GREEN; 3503 } else if (effect.getColor() == EffectColor.PINK) { 3504 retValue = VideoEffect.PINK; 3505 } else if (effect.getColor() == EffectColor.GRAY) { 3506 retValue = VideoEffect.BLACK_AND_WHITE; 3507 } else { 3508 retValue = VideoEffect.COLORRGB16; 3509 } 3510 break; 3511 case EffectColor.TYPE_GRADIENT: 3512 retValue = VideoEffect.GRADIENT; 3513 break; 3514 case EffectColor.TYPE_SEPIA: 3515 retValue = VideoEffect.SEPIA; 3516 break; 3517 case EffectColor.TYPE_NEGATIVE: 3518 retValue = VideoEffect.NEGATIVE; 3519 break; 3520 case EffectColor.TYPE_FIFTIES: 3521 retValue = VideoEffect.FIFTIES; 3522 break; 3523 3524 default: 3525 retValue = -1; 3526 } 3527 return retValue; 3528 } 3529 3530 /** 3531 * Calculates video resolution for output clip 3532 * based on clip's height and aspect ratio of storyboard 3533 * 3534 * @param aspectRatio The aspect ratio of story board 3535 * @param height The height of clip 3536 * 3537 * @return The video resolution 3538 */ 3539 private int findVideoResolution(int aspectRatio, int height) { 3540 final Pair<Integer, Integer>[] resolutions; 3541 final Pair<Integer, Integer> maxResolution; 3542 int retValue = VideoFrameSize.SIZE_UNDEFINED; 3543 switch (aspectRatio) { 3544 case MediaProperties.ASPECT_RATIO_3_2: 3545 if (height == MediaProperties.HEIGHT_480) 3546 retValue = VideoFrameSize.NTSC; 3547 else if (height == MediaProperties.HEIGHT_720) 3548 retValue = VideoFrameSize.W720p; 3549 break; 3550 case MediaProperties.ASPECT_RATIO_16_9: 3551 if (height == MediaProperties.HEIGHT_480) 3552 retValue = VideoFrameSize.WVGA16x9; 3553 else if (height == MediaProperties.HEIGHT_720) 3554 retValue = VideoFrameSize.V720p; 3555 break; 3556 case MediaProperties.ASPECT_RATIO_4_3: 3557 if (height == MediaProperties.HEIGHT_480) 3558 retValue = VideoFrameSize.VGA; 3559 else if (height == MediaProperties.HEIGHT_720) 3560 retValue = VideoFrameSize.S720p; 3561 break; 3562 case MediaProperties.ASPECT_RATIO_5_3: 3563 if (height == MediaProperties.HEIGHT_480) 3564 retValue = VideoFrameSize.WVGA; 3565 break; 3566 case MediaProperties.ASPECT_RATIO_11_9: 3567 if (height == MediaProperties.HEIGHT_144) 3568 retValue = VideoFrameSize.QCIF; 3569 else if (height == MediaProperties.HEIGHT_288) 3570 retValue = VideoFrameSize.CIF; 3571 break; 3572 } 3573 if (retValue == VideoFrameSize.SIZE_UNDEFINED) { 3574 resolutions = MediaProperties.getSupportedResolutions(mVideoEditor.getAspectRatio()); 3575 // Get the highest resolution 3576 maxResolution = resolutions[resolutions.length - 1]; 3577 retValue = findVideoResolution(mVideoEditor.getAspectRatio(), maxResolution.second); 3578 } 3579 3580 return retValue; 3581 } 3582 3583 /** 3584 * This method is responsible for exporting a movie 3585 * 3586 * @param filePath The output file path 3587 * @param projectDir The output project directory 3588 * @param height The height of clip 3589 * @param bitrate The bitrate at which the movie should be exported 3590 * @param mediaItemsList The media items list 3591 * @param mediaTransitionList The transitions list 3592 * @param mediaBGMList The background track list 3593 * @param listener The ExportProgressListener 3594 * 3595 */ 3596 void export(String filePath, String projectDir, int height, int bitrate, 3597 List<MediaItem> mediaItemsList, List<Transition> mediaTransitionList, 3598 List<AudioTrack> mediaBGMList, ExportProgressListener listener) { 3599 3600 int outBitrate = 0; 3601 mExportFilename = filePath; 3602 previewStoryBoard(mediaItemsList, mediaTransitionList, mediaBGMList,null); 3603 mExportProgressListener = listener; 3604 3605 mProgressToApp = 0; 3606 3607 switch (bitrate) { 3608 case MediaProperties.BITRATE_28K: 3609 outBitrate = Bitrate.BR_32_KBPS; 3610 break; 3611 case MediaProperties.BITRATE_40K: 3612 outBitrate = Bitrate.BR_48_KBPS; 3613 break; 3614 case MediaProperties.BITRATE_64K: 3615 outBitrate = Bitrate.BR_64_KBPS; 3616 break; 3617 case MediaProperties.BITRATE_96K: 3618 outBitrate = Bitrate.BR_96_KBPS; 3619 break; 3620 case MediaProperties.BITRATE_128K: 3621 outBitrate = Bitrate.BR_128_KBPS; 3622 break; 3623 case MediaProperties.BITRATE_192K: 3624 outBitrate = Bitrate.BR_192_KBPS; 3625 break; 3626 case MediaProperties.BITRATE_256K: 3627 outBitrate = Bitrate.BR_256_KBPS; 3628 break; 3629 case MediaProperties.BITRATE_384K: 3630 outBitrate = Bitrate.BR_384_KBPS; 3631 break; 3632 case MediaProperties.BITRATE_512K: 3633 outBitrate = Bitrate.BR_512_KBPS; 3634 break; 3635 case MediaProperties.BITRATE_800K: 3636 outBitrate = Bitrate.BR_800_KBPS; 3637 break; 3638 case MediaProperties.BITRATE_2M: 3639 outBitrate = Bitrate.BR_2_MBPS; 3640 break; 3641 3642 case MediaProperties.BITRATE_5M: 3643 outBitrate = Bitrate.BR_5_MBPS; 3644 break; 3645 case MediaProperties.BITRATE_8M: 3646 outBitrate = Bitrate.BR_8_MBPS; 3647 break; 3648 3649 default: 3650 throw new IllegalArgumentException("Argument Bitrate incorrect"); 3651 } 3652 mPreviewEditSettings.videoFrameRate = VideoFrameRate.FR_30_FPS; 3653 mPreviewEditSettings.outputFile = mOutputFilename = filePath; 3654 3655 int aspectRatio = mVideoEditor.getAspectRatio(); 3656 mPreviewEditSettings.videoFrameSize = findVideoResolution(aspectRatio, height); 3657 mPreviewEditSettings.videoFormat = VideoFormat.H264; 3658 mPreviewEditSettings.audioFormat = AudioFormat.AAC; 3659 mPreviewEditSettings.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000; 3660 mPreviewEditSettings.maxFileSize = 0; 3661 mPreviewEditSettings.audioChannels = 2; 3662 mPreviewEditSettings.videoBitrate = outBitrate; 3663 mPreviewEditSettings.audioBitrate = Bitrate.BR_96_KBPS; 3664 3665 mPreviewEditSettings.transitionSettingsArray = new TransitionSettings[mTotalClips - 1]; 3666 for (int index = 0; index < mTotalClips - 1; index++) { 3667 mPreviewEditSettings.transitionSettingsArray[index] = new TransitionSettings(); 3668 mPreviewEditSettings.transitionSettingsArray[index].videoTransitionType = 3669 VideoTransition.NONE; 3670 mPreviewEditSettings.transitionSettingsArray[index].audioTransitionType = 3671 AudioTransition.NONE; 3672 } 3673 3674 for (int clipCnt = 0; clipCnt < mPreviewEditSettings.clipSettingsArray.length; clipCnt++) { 3675 if (mPreviewEditSettings.clipSettingsArray[clipCnt].fileType == FileType.JPG) { 3676 mPreviewEditSettings.clipSettingsArray[clipCnt].clipPath = 3677 mPreviewEditSettings.clipSettingsArray[clipCnt].clipOriginalPath; 3678 } 3679 } 3680 nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings); 3681 3682 int err = 0; 3683 try { 3684 mProcessingState = PROCESSING_EXPORT; 3685 mProcessingObject = null; 3686 err = generateClip(mPreviewEditSettings); 3687 mProcessingState = PROCESSING_NONE; 3688 } catch (IllegalArgumentException ex) { 3689 Log.e(TAG, "IllegalArgument for generateClip"); 3690 throw ex; 3691 } catch (IllegalStateException ex) { 3692 Log.e(TAG, "IllegalStateExceptiont for generateClip"); 3693 throw ex; 3694 } catch (RuntimeException ex) { 3695 Log.e(TAG, "RuntimeException for generateClip"); 3696 throw ex; 3697 } 3698 3699 if (err != 0) { 3700 Log.e(TAG, "RuntimeException for generateClip"); 3701 throw new RuntimeException("generateClip failed with error=" + err); 3702 } 3703 3704 mExportProgressListener = null; 3705 } 3706 3707 /** 3708 * This methods takes care of stopping the Export process 3709 * 3710 * @param The input file name for which export has to be stopped 3711 */ 3712 void stop(String filename) { 3713 try { 3714 stopEncoding(); 3715 new File(mExportFilename).delete(); 3716 } catch (IllegalStateException ex) { 3717 Log.e(TAG, "Illegal state exception in unload settings"); 3718 throw ex; 3719 } catch (RuntimeException ex) { 3720 Log.e(TAG, "Runtime exception in unload settings"); 3721 throw ex; 3722 } 3723 } 3724 3725 /** 3726 * This method extracts a frame from the input file 3727 * and returns the frame as a bitmap 3728 * 3729 * @param inputFile The inputFile 3730 * @param width The width of the output frame 3731 * @param height The height of the output frame 3732 * @param timeMS The time in ms at which the frame has to be extracted 3733 */ 3734 Bitmap getPixels(String inputFile, int width, int height, long timeMS) { 3735 if (inputFile == null) { 3736 throw new IllegalArgumentException("Invalid input file"); 3737 } 3738 3739 /* Make width and height as even */ 3740 final int newWidth = (width + 1) & 0xFFFFFFFE; 3741 final int newHeight = (height + 1) & 0xFFFFFFFE; 3742 3743 /* Create a temp bitmap for resized thumbnails */ 3744 Bitmap tempBitmap = null; 3745 if ((newWidth != width) || (newHeight != height)) { 3746 tempBitmap = Bitmap.createBitmap(newWidth, newHeight, Bitmap.Config.ARGB_8888); 3747 } 3748 3749 IntBuffer rgb888 = IntBuffer.allocate(newWidth * newHeight * 4); 3750 Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); 3751 nativeGetPixels(inputFile, rgb888.array(), newWidth, newHeight, timeMS); 3752 3753 if ((newWidth == width) && (newHeight == height)) { 3754 bitmap.copyPixelsFromBuffer(rgb888); 3755 } else { 3756 /* Create a temp bitmap to be used for resize */ 3757 tempBitmap.copyPixelsFromBuffer(rgb888); 3758 3759 /* Create a canvas to resize */ 3760 final Canvas canvas = new Canvas(bitmap); 3761 canvas.drawBitmap(tempBitmap, new Rect(0, 0, newWidth, newHeight), 3762 new Rect(0, 0, width, height), sResizePaint); 3763 } 3764 3765 if (tempBitmap != null) { 3766 tempBitmap.recycle(); 3767 } 3768 3769 return bitmap; 3770 } 3771 3772 /** 3773 * This method extracts a list of frame from the 3774 * input file and returns the frame in bitmap array 3775 * 3776 * @param filename The inputFile 3777 * @param width The width of the output frame 3778 * @param height The height of the output frame 3779 * @param startMs The starting time in ms 3780 * @param endMs The end time in ms 3781 * @param thumbnailCount The number of frames to be extracted 3782 * from startMs to endMs 3783 * 3784 * @return The frames as bitmaps in bitmap array 3785 **/ 3786 Bitmap[] getPixelsList(String filename, int width, int height, long startMs, long endMs, 3787 int thumbnailCount) { 3788 int[] rgb888 = null; 3789 int thumbnailSize = 0; 3790 Bitmap tempBitmap = null; 3791 3792 /* Make width and height as even */ 3793 final int newWidth = (width + 1) & 0xFFFFFFFE; 3794 final int newHeight = (height + 1) & 0xFFFFFFFE; 3795 thumbnailSize = newWidth * newHeight * 4; 3796 3797 /* Create a temp bitmap for resized thumbnails */ 3798 if ((newWidth != width) || (newHeight != height)) { 3799 tempBitmap = Bitmap.createBitmap(newWidth, newHeight, Bitmap.Config.ARGB_8888); 3800 } 3801 int i = 0; 3802 int deltaTime = (int)(endMs - startMs) / thumbnailCount; 3803 Bitmap[] bitmaps = null; 3804 3805 try { 3806 // This may result in out of Memory Error 3807 rgb888 = new int[thumbnailSize * thumbnailCount]; 3808 bitmaps = new Bitmap[thumbnailCount]; 3809 } catch (Throwable e) { 3810 // Allocating to new size with Fixed count 3811 try { 3812 rgb888 = new int[thumbnailSize * MAX_THUMBNAIL_PERMITTED]; 3813 bitmaps = new Bitmap[MAX_THUMBNAIL_PERMITTED]; 3814 thumbnailCount = MAX_THUMBNAIL_PERMITTED; 3815 } catch (Throwable ex) { 3816 throw new RuntimeException("Memory allocation fails, thumbnail count too large: " 3817 + thumbnailCount); 3818 } 3819 } 3820 IntBuffer tmpBuffer = IntBuffer.allocate(thumbnailSize); 3821 nativeGetPixelsList(filename, rgb888, newWidth, newHeight, deltaTime, thumbnailCount, 3822 startMs, endMs); 3823 3824 for (; i < thumbnailCount; i++) { 3825 bitmaps[i] = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); 3826 tmpBuffer.put(rgb888, (i * thumbnailSize), thumbnailSize); 3827 tmpBuffer.rewind(); 3828 3829 if ((newWidth == width) && (newHeight == height)) { 3830 bitmaps[i].copyPixelsFromBuffer(tmpBuffer); 3831 } else { 3832 /* Copy the out rgb buffer to temp bitmap */ 3833 tempBitmap.copyPixelsFromBuffer(tmpBuffer); 3834 3835 /* Create a canvas to resize */ 3836 final Canvas canvas = new Canvas(bitmaps[i]); 3837 canvas.drawBitmap(tempBitmap, new Rect(0, 0, newWidth, newHeight), 3838 new Rect(0, 0, width, height), sResizePaint); 3839 } 3840 } 3841 3842 if (tempBitmap != null) { 3843 tempBitmap.recycle(); 3844 } 3845 3846 return bitmaps; 3847 } 3848 3849 /** 3850 * This method generates the audio graph 3851 * 3852 * @param uniqueId The unique id 3853 * @param inFileName The inputFile 3854 * @param OutAudiGraphFileName output filename 3855 * @param frameDuration The each frame duration 3856 * @param audioChannels The number of audio channels 3857 * @param samplesCount Total number of samples count 3858 * @param listener ExtractAudioWaveformProgressListener reference 3859 * @param isVideo The flag to indicate if the file is video file or not 3860 * 3861 **/ 3862 void generateAudioGraph(String uniqueId, String inFileName, String OutAudiGraphFileName, 3863 int frameDuration, int audioChannels, int samplesCount, 3864 ExtractAudioWaveformProgressListener listener, boolean isVideo) { 3865 String tempPCMFileName; 3866 3867 mExtractAudioWaveformProgressListener = listener; 3868 3869 /** 3870 * In case of Video, first call will generate the PCM file to make the 3871 * audio graph 3872 */ 3873 if (isVideo) { 3874 tempPCMFileName = String.format(mProjectPath + "/" + uniqueId + ".pcm"); 3875 } else { 3876 tempPCMFileName = mAudioTrackPCMFilePath; 3877 } 3878 3879 /** 3880 * For Video item, generate the PCM 3881 */ 3882 if (isVideo) { 3883 nativeGenerateRawAudio(inFileName, tempPCMFileName); 3884 } 3885 3886 nativeGenerateAudioGraph(tempPCMFileName, OutAudiGraphFileName, frameDuration, 3887 audioChannels, samplesCount); 3888 3889 /** 3890 * Once the audio graph file is generated, delete the pcm file 3891 */ 3892 if (isVideo) { 3893 new File(tempPCMFileName).delete(); 3894 } 3895 } 3896 3897 void clearPreviewSurface(Surface surface) { 3898 nativeClearSurface(surface); 3899 } 3900 3901 /** 3902 * Grab the semaphore which arbitrates access to the editor 3903 * 3904 * @throws InterruptedException 3905 */ 3906 private void lock() throws InterruptedException { 3907 if (Log.isLoggable(TAG, Log.DEBUG)) { 3908 Log.d(TAG, "lock: grabbing semaphore", new Throwable()); 3909 } 3910 mLock.acquire(); 3911 if (Log.isLoggable(TAG, Log.DEBUG)) { 3912 Log.d(TAG, "lock: grabbed semaphore"); 3913 } 3914 } 3915 3916 /** 3917 * Release the semaphore which arbitrates access to the editor 3918 */ 3919 private void unlock() { 3920 if (Log.isLoggable(TAG, Log.DEBUG)) { 3921 Log.d(TAG, "unlock: releasing semaphore"); 3922 } 3923 mLock.release(); 3924 } 3925 3926 /** Native Methods */ 3927 native Properties getMediaProperties(String file) throws IllegalArgumentException, 3928 IllegalStateException, RuntimeException, Exception; 3929 3930 /** 3931 * Get the version of ManualEdit. 3932 * 3933 * @return version of ManualEdit 3934 * @throws RuntimeException if an error occurred 3935 * @see Version 3936 */ 3937 private static native Version getVersion() throws RuntimeException; 3938 3939 /** 3940 * Returns the video thumbnail in an array of integers. Output format is 3941 * ARGB8888. 3942 * 3943 * @param pixelArray the array that receives the pixel values 3944 * @param width width of the video thumbnail 3945 * @param height height of the video thumbnail 3946 * @param timeMS desired time of the thumbnail in ms 3947 * @return actual time in ms of the thumbnail generated 3948 * @throws IllegalStateException if the class has not been initialized 3949 * @throws IllegalArgumentException if the pixelArray is not available or 3950 * one of the dimensions is negative or zero or the time is 3951 * negative 3952 * @throws RuntimeException on runtime errors in native code 3953 */ 3954 private native int nativeGetPixels(String fileName, int[] pixelArray, int width, int height, 3955 long timeMS); 3956 3957 private native int nativeGetPixelsList(String fileName, int[] pixelArray, int width, int height, 3958 int timeMS, int nosofTN, long startTimeMs, long endTimeMs); 3959 3960 /** 3961 * Releases the JNI and cleans up the core native module.. Should be called 3962 * only after init( ) 3963 * 3964 * @throws IllegalStateException if the method could not be called 3965 */ 3966 private native void release() throws IllegalStateException, RuntimeException; 3967 3968 /* 3969 * Clear the preview surface 3970 */ 3971 private native void nativeClearSurface(Surface surface); 3972 3973 /** 3974 * Stops the encoding. This method should only be called after encoding has 3975 * started using method <code> startEncoding</code> 3976 * 3977 * @throws IllegalStateException if the method could not be called 3978 */ 3979 private native void stopEncoding() throws IllegalStateException, RuntimeException; 3980 3981 3982 private native void _init(String tempPath, String libraryPath) 3983 throws IllegalArgumentException, IllegalStateException, RuntimeException; 3984 3985 private native void nativeStartPreview(Surface mSurface, long fromMs, long toMs, 3986 int callbackAfterFrameCount, boolean loop) throws IllegalArgumentException, 3987 IllegalStateException, RuntimeException; 3988 3989 private native void nativePopulateSettings(EditSettings editSettings, 3990 PreviewClipProperties mProperties, AudioSettings mAudioSettings) 3991 throws IllegalArgumentException, IllegalStateException, RuntimeException; 3992 3993 private native int nativeRenderPreviewFrame(Surface mSurface, long timeMs, 3994 int surfaceWidth, int surfaceHeight) 3995 throws IllegalArgumentException, 3996 IllegalStateException, RuntimeException; 3997 3998 private native int nativeRenderMediaItemPreviewFrame(Surface mSurface, String filepath, 3999 int framewidth, int frameheight, int surfacewidth, int surfaceheight, long timeMs) 4000 throws IllegalArgumentException, IllegalStateException, RuntimeException; 4001 4002 private native int nativeStopPreview(); 4003 4004 private native int nativeGenerateAudioGraph(String pcmFilePath, String outGraphPath, 4005 int frameDuration, int channels, int sampleCount); 4006 4007 private native int nativeGenerateRawAudio(String InFileName, String PCMFileName); 4008 4009 private native int nativeGenerateClip(EditSettings editSettings) 4010 throws IllegalArgumentException, IllegalStateException, RuntimeException; 4011 4012} 4013