MediaArtistNativeHelper.java revision 2bb13573d619e3371d06847d36db78a20b17dfab
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17package android.media.videoeditor; 18 19import java.io.File; 20import java.io.IOException; 21import java.math.BigDecimal; 22import java.nio.IntBuffer; 23import java.util.Iterator; 24import java.util.List; 25import java.util.concurrent.Semaphore; 26 27import android.graphics.Bitmap; 28import android.graphics.BitmapFactory; 29import android.graphics.Canvas; 30import android.graphics.Paint; 31import android.graphics.Rect; 32import android.media.videoeditor.VideoEditor.ExportProgressListener; 33import android.media.videoeditor.VideoEditor.PreviewProgressListener; 34import android.media.videoeditor.VideoEditor.MediaProcessingProgressListener; 35import android.util.Log; 36import android.util.Pair; 37import android.view.Surface; 38 39/** 40 *This class provide Native methods to be used by MediaArtist {@hide} 41 */ 42class MediaArtistNativeHelper { 43 private static final String TAG = "MediaArtistNativeHelper"; 44 45 static { 46 System.loadLibrary("videoeditor_jni"); 47 } 48 49 private static final int MAX_THUMBNAIL_PERMITTED = 8; 50 51 public static final int TASK_LOADING_SETTINGS = 1; 52 public static final int TASK_ENCODING = 2; 53 54 /** 55 * The resize paint 56 */ 57 private static final Paint sResizePaint = new Paint(Paint.FILTER_BITMAP_FLAG); 58 59 private final VideoEditor mVideoEditor; 60 61 private EditSettings mStoryBoardSettings; 62 63 private String mOutputFilename; 64 65 private PreviewClipProperties mClipProperties = null; 66 67 private EditSettings mPreviewEditSettings; 68 69 private AudioSettings mAudioSettings = null; 70 71 private AudioTrack mAudioTrack = null; 72 73 private boolean mInvalidatePreviewArray = true; 74 75 private boolean mRegenerateAudio = true; 76 77 private String mExportFilename = null; 78 79 private int mProgressToApp; 80 81 /* 82 * Semaphore to control preview calls 83 */ 84 private final Semaphore mLock = new Semaphore(1, true); 85 86 private String mRenderPreviewOverlayFile; 87 private int mRenderPreviewRenderingMode; 88 89 private boolean mIsFirstProgress; 90 91 private static final String AUDIO_TRACK_PCM_FILE = "AudioPcm.pcm"; 92 93 // Processing indication 94 public static final int PROCESSING_NONE = 0; 95 public static final int PROCESSING_AUDIO_PCM = 1; 96 public static final int PROCESSING_TRANSITION = 2; 97 public static final int PROCESSING_KENBURNS = 3; 98 public static final int PROCESSING_INTERMEDIATE1 = 11; 99 public static final int PROCESSING_INTERMEDIATE2 = 12; 100 public static final int PROCESSING_INTERMEDIATE3 = 13; 101 public static final int PROCESSING_EXPORT = 20; 102 103 private int mProcessingState; 104 private Object mProcessingObject; 105 private PreviewProgressListener mPreviewProgressListener; 106 private ExportProgressListener mExportProgressListener; 107 private ExtractAudioWaveformProgressListener mExtractAudioWaveformProgressListener; 108 private MediaProcessingProgressListener mMediaProcessingProgressListener; 109 private final String mProjectPath; 110 111 private long mPreviewProgress; 112 113 private String mAudioTrackPCMFilePath; 114 115 private int mTotalClips = 0; 116 117 private boolean mErrorFlagSet = false; 118 119 @SuppressWarnings("unused") 120 private int mManualEditContext; 121 122 /* Listeners */ 123 124 /** 125 * Interface definition for a listener to be invoked when there is an update 126 * in a running task. 127 */ 128 public interface OnProgressUpdateListener { 129 /** 130 * Called when there is an update. 131 * 132 * @param taskId id of the task reporting an update. 133 * @param progress progress of the task [0..100]. 134 * @see BasicEdit#TASK_ENCODING 135 */ 136 public void OnProgressUpdate(int taskId, int progress); 137 } 138 139 /** Defines the version. */ 140 public final class Version { 141 142 /** Major version number */ 143 public int major; 144 145 /** Minor version number */ 146 public int minor; 147 148 /** Revision number */ 149 public int revision; 150 151 /** VIDEOEDITOR major version number */ 152 private static final int VIDEOEDITOR_MAJOR_VERSION = 0; 153 154 /** VIDEOEDITOR minor version number */ 155 private static final int VIDEOEDITOR_MINOR_VERSION = 0; 156 157 /** VIDEOEDITOR revision number */ 158 private static final int VIDEOEDITOR_REVISION_VERSION = 1; 159 160 /** Method which returns the current VIDEOEDITOR version */ 161 public Version getVersion() { 162 Version version = new Version(); 163 164 version.major = Version.VIDEOEDITOR_MAJOR_VERSION; 165 version.minor = Version.VIDEOEDITOR_MINOR_VERSION; 166 version.revision = Version.VIDEOEDITOR_REVISION_VERSION; 167 168 return version; 169 } 170 } 171 172 /** 173 * Defines output audio formats. 174 */ 175 public final class AudioFormat { 176 /** No audio present in output clip. Used to generate video only clip */ 177 public static final int NO_AUDIO = 0; 178 179 /** AMR Narrow Band. */ 180 public static final int AMR_NB = 1; 181 182 /** Advanced Audio Coding (AAC). */ 183 public static final int AAC = 2; 184 185 /** Advanced Audio Codec Plus (HE-AAC v1). */ 186 public static final int AAC_PLUS = 3; 187 188 /** Advanced Audio Codec Plus (HE-AAC v2). */ 189 public static final int ENHANCED_AAC_PLUS = 4; 190 191 /** MPEG layer 3 (MP3). */ 192 public static final int MP3 = 5; 193 194 /** Enhanced Variable RateCodec (EVRC). */ 195 public static final int EVRC = 6; 196 197 /** PCM (PCM). */ 198 public static final int PCM = 7; 199 200 /** No transcoding. Output audio format is same as input audio format */ 201 public static final int NULL_AUDIO = 254; 202 203 /** Unsupported audio format. */ 204 public static final int UNSUPPORTED_AUDIO = 255; 205 } 206 207 /** 208 * Defines audio sampling frequencies. 209 */ 210 public final class AudioSamplingFrequency { 211 /** 212 * Default sampling frequency. Uses the default frequency for a specific 213 * audio format. For AAC the only supported (and thus default) sampling 214 * frequency is 16 kHz. For this audio format the sampling frequency in 215 * the OutputParams. 216 **/ 217 public static final int FREQ_DEFAULT = 0; 218 219 /** Audio sampling frequency of 8000 Hz. */ 220 public static final int FREQ_8000 = 8000; 221 222 /** Audio sampling frequency of 11025 Hz. */ 223 public static final int FREQ_11025 = 11025; 224 225 /** Audio sampling frequency of 12000 Hz. */ 226 public static final int FREQ_12000 = 12000; 227 228 /** Audio sampling frequency of 16000 Hz. */ 229 public static final int FREQ_16000 = 16000; 230 231 /** Audio sampling frequency of 22050 Hz. */ 232 public static final int FREQ_22050 = 22050; 233 234 /** Audio sampling frequency of 24000 Hz. */ 235 public static final int FREQ_24000 = 24000; 236 237 /** Audio sampling frequency of 32000 Hz. */ 238 public static final int FREQ_32000 = 32000; 239 240 /** Audio sampling frequency of 44100 Hz. */ 241 public static final int FREQ_44100 = 44100; 242 243 /** Audio sampling frequency of 48000 Hz. Not available for output file. */ 244 public static final int FREQ_48000 = 48000; 245 } 246 247 /** 248 * Defines the supported fixed audio and video bitrates. These values are 249 * for output audio video only. 250 */ 251 public final class Bitrate { 252 /** Variable bitrate. Means no bitrate regulation */ 253 public static final int VARIABLE = -1; 254 255 /** An undefined bitrate. */ 256 public static final int UNDEFINED = 0; 257 258 /** A bitrate of 9.2 kbits/s. */ 259 public static final int BR_9_2_KBPS = 9200; 260 261 /** A bitrate of 12.2 kbits/s. */ 262 public static final int BR_12_2_KBPS = 12200; 263 264 /** A bitrate of 16 kbits/s. */ 265 public static final int BR_16_KBPS = 16000; 266 267 /** A bitrate of 24 kbits/s. */ 268 public static final int BR_24_KBPS = 24000; 269 270 /** A bitrate of 32 kbits/s. */ 271 public static final int BR_32_KBPS = 32000; 272 273 /** A bitrate of 48 kbits/s. */ 274 public static final int BR_48_KBPS = 48000; 275 276 /** A bitrate of 64 kbits/s. */ 277 public static final int BR_64_KBPS = 64000; 278 279 /** A bitrate of 96 kbits/s. */ 280 public static final int BR_96_KBPS = 96000; 281 282 /** A bitrate of 128 kbits/s. */ 283 public static final int BR_128_KBPS = 128000; 284 285 /** A bitrate of 192 kbits/s. */ 286 public static final int BR_192_KBPS = 192000; 287 288 /** A bitrate of 256 kbits/s. */ 289 public static final int BR_256_KBPS = 256000; 290 291 /** A bitrate of 288 kbits/s. */ 292 public static final int BR_288_KBPS = 288000; 293 294 /** A bitrate of 384 kbits/s. */ 295 public static final int BR_384_KBPS = 384000; 296 297 /** A bitrate of 512 kbits/s. */ 298 public static final int BR_512_KBPS = 512000; 299 300 /** A bitrate of 800 kbits/s. */ 301 public static final int BR_800_KBPS = 800000; 302 303 /** A bitrate of 2 Mbits/s. */ 304 public static final int BR_2_MBPS = 2000000; 305 306 /** A bitrate of 5 Mbits/s. */ 307 public static final int BR_5_MBPS = 5000000; 308 309 /** A bitrate of 8 Mbits/s. */ 310 public static final int BR_8_MBPS = 8000000; 311 } 312 313 /** 314 * Defines all supported file types. 315 */ 316 public final class FileType { 317 /** 3GPP file type. */ 318 public static final int THREE_GPP = 0; 319 320 /** MP4 file type. */ 321 public static final int MP4 = 1; 322 323 /** AMR file type. */ 324 public static final int AMR = 2; 325 326 /** MP3 audio file type. */ 327 public static final int MP3 = 3; 328 329 /** PCM audio file type. */ 330 public static final int PCM = 4; 331 332 /** JPEG image file type. */ 333 public static final int JPG = 5; 334 335 /** GIF image file type. */ 336 public static final int GIF = 7; 337 338 /** PNG image file type. */ 339 public static final int PNG = 8; 340 341 /** M4V file type. */ 342 public static final int M4V = 10; 343 344 /** Unsupported file type. */ 345 public static final int UNSUPPORTED = 255; 346 } 347 348 /** 349 * Defines rendering types. Rendering can only be applied to files 350 * containing video streams. 351 **/ 352 public final class MediaRendering { 353 /** 354 * Resize to fit the output video with changing the aspect ratio if 355 * needed. 356 */ 357 public static final int RESIZING = 0; 358 359 /** 360 * Crop the input video to fit it with the output video resolution. 361 **/ 362 public static final int CROPPING = 1; 363 364 /** 365 * Resize to fit the output video resolution but maintain the aspect 366 * ratio. This framing type adds black borders if needed. 367 */ 368 public static final int BLACK_BORDERS = 2; 369 } 370 371 /** 372 * Defines the results. 373 */ 374 public final class Result { 375 /** No error. result OK */ 376 public static final int NO_ERROR = 0; 377 378 /** File not found */ 379 public static final int ERR_FILE_NOT_FOUND = 1; 380 381 /** 382 * In case of UTF8 conversion, the size of the converted path will be 383 * more than the corresponding allocated buffer. 384 */ 385 public static final int ERR_BUFFER_OUT_TOO_SMALL = 2; 386 387 /** Invalid file type. */ 388 public static final int ERR_INVALID_FILE_TYPE = 3; 389 390 /** Invalid effect kind. */ 391 public static final int ERR_INVALID_EFFECT_KIND = 4; 392 393 /** Invalid video effect. */ 394 public static final int ERR_INVALID_VIDEO_EFFECT_TYPE = 5; 395 396 /** Invalid audio effect. */ 397 public static final int ERR_INVALID_AUDIO_EFFECT_TYPE = 6; 398 399 /** Invalid video transition. */ 400 public static final int ERR_INVALID_VIDEO_TRANSITION_TYPE = 7; 401 402 /** Invalid audio transition. */ 403 public static final int ERR_INVALID_AUDIO_TRANSITION_TYPE = 8; 404 405 /** Invalid encoding frame rate. */ 406 public static final int ERR_INVALID_VIDEO_ENCODING_FRAME_RATE = 9; 407 408 /** External effect is called but this function is not set. */ 409 public static final int ERR_EXTERNAL_EFFECT_NULL = 10; 410 411 /** External transition is called but this function is not set. */ 412 public static final int ERR_EXTERNAL_TRANSITION_NULL = 11; 413 414 /** Begin time cut is larger than the video clip duration. */ 415 public static final int ERR_BEGIN_CUT_LARGER_THAN_DURATION = 12; 416 417 /** Begin cut time is larger or equal than end cut. */ 418 public static final int ERR_BEGIN_CUT_LARGER_THAN_END_CUT = 13; 419 420 /** Two consecutive transitions are overlapping on one clip. */ 421 public static final int ERR_OVERLAPPING_TRANSITIONS = 14; 422 423 /** Internal error, type size mismatch. */ 424 public static final int ERR_ANALYSIS_DATA_SIZE_TOO_SMALL = 15; 425 426 /** An input 3GPP file is invalid/corrupted. */ 427 public static final int ERR_INVALID_3GPP_FILE = 16; 428 429 /** A file contains an unsupported video format. */ 430 public static final int ERR_UNSUPPORTED_INPUT_VIDEO_FORMAT = 17; 431 432 /** A file contains an unsupported audio format. */ 433 public static final int ERR_UNSUPPORTED_INPUT_AUDIO_FORMAT = 18; 434 435 /** A file format is not supported. */ 436 public static final int ERR_AMR_EDITING_UNSUPPORTED = 19; 437 438 /** An input clip has an unexpectedly large Video AU. */ 439 public static final int ERR_INPUT_VIDEO_AU_TOO_LARGE = 20; 440 441 /** An input clip has an unexpectedly large Audio AU. */ 442 public static final int ERR_INPUT_AUDIO_AU_TOO_LARGE = 21; 443 444 /** An input clip has a corrupted Audio AU. */ 445 public static final int ERR_INPUT_AUDIO_CORRUPTED_AU = 22; 446 447 /** The video encoder encountered an Access Unit error. */ 448 public static final int ERR_ENCODER_ACCES_UNIT_ERROR = 23; 449 450 /** Unsupported video format for Video Editing. */ 451 public static final int ERR_EDITING_UNSUPPORTED_VIDEO_FORMAT = 24; 452 453 /** Unsupported H263 profile for Video Editing. */ 454 public static final int ERR_EDITING_UNSUPPORTED_H263_PROFILE = 25; 455 456 /** Unsupported MPEG-4 profile for Video Editing. */ 457 public static final int ERR_EDITING_UNSUPPORTED_MPEG4_PROFILE = 26; 458 459 /** Unsupported MPEG-4 RVLC tool for Video Editing. */ 460 public static final int ERR_EDITING_UNSUPPORTED_MPEG4_RVLC = 27; 461 462 /** Unsupported audio format for Video Editing. */ 463 public static final int ERR_EDITING_UNSUPPORTED_AUDIO_FORMAT = 28; 464 465 /** File contains no supported stream. */ 466 public static final int ERR_EDITING_NO_SUPPORTED_STREAM_IN_FILE = 29; 467 468 /** File contains no video stream or an unsupported video stream. */ 469 public static final int ERR_EDITING_NO_SUPPORTED_VIDEO_STREAM_IN_FILE = 30; 470 471 /** Internal error, clip analysis version mismatch. */ 472 public static final int ERR_INVALID_CLIP_ANALYSIS_VERSION = 31; 473 474 /** 475 * At least one of the clip analysis has been generated on another 476 * platform (WIN32, ARM, etc.). 477 */ 478 public static final int ERR_INVALID_CLIP_ANALYSIS_PLATFORM = 32; 479 480 /** Clips don't have the same video format (H263 or MPEG4). */ 481 public static final int ERR_INCOMPATIBLE_VIDEO_FORMAT = 33; 482 483 /** Clips don't have the same frame size. */ 484 public static final int ERR_INCOMPATIBLE_VIDEO_FRAME_SIZE = 34; 485 486 /** Clips don't have the same MPEG-4 time scale. */ 487 public static final int ERR_INCOMPATIBLE_VIDEO_TIME_SCALE = 35; 488 489 /** Clips don't have the same use of MPEG-4 data partitioning. */ 490 public static final int ERR_INCOMPATIBLE_VIDEO_DATA_PARTITIONING = 36; 491 492 /** MP3 clips can't be assembled. */ 493 public static final int ERR_UNSUPPORTED_MP3_ASSEMBLY = 37; 494 495 /** 496 * The input 3GPP file does not contain any supported audio or video 497 * track. 498 */ 499 public static final int ERR_NO_SUPPORTED_STREAM_IN_FILE = 38; 500 501 /** 502 * The Volume of the added audio track (AddVolume) must be strictly 503 * superior than zero. 504 */ 505 public static final int ERR_ADDVOLUME_EQUALS_ZERO = 39; 506 507 /** 508 * The time at which an audio track is added can't be higher than the 509 * input video track duration.. 510 */ 511 public static final int ERR_ADDCTS_HIGHER_THAN_VIDEO_DURATION = 40; 512 513 /** The audio track file format setting is undefined. */ 514 public static final int ERR_UNDEFINED_AUDIO_TRACK_FILE_FORMAT = 41; 515 516 /** The added audio track stream has an unsupported format. */ 517 public static final int ERR_UNSUPPORTED_ADDED_AUDIO_STREAM = 42; 518 519 /** The audio mixing feature doesn't support the audio track type. */ 520 public static final int ERR_AUDIO_MIXING_UNSUPPORTED = 43; 521 522 /** The audio mixing feature doesn't support MP3 audio tracks. */ 523 public static final int ERR_AUDIO_MIXING_MP3_UNSUPPORTED = 44; 524 525 /** 526 * An added audio track limits the available features: uiAddCts must be 527 * 0 and bRemoveOriginal must be true. 528 */ 529 public static final int ERR_FEATURE_UNSUPPORTED_WITH_AUDIO_TRACK = 45; 530 531 /** 532 * An added audio track limits the available features: uiAddCts must be 533 * 0 and bRemoveOriginal must be true. 534 */ 535 public static final int ERR_FEATURE_UNSUPPORTED_WITH_AAC = 46; 536 537 /** Input audio track is not of a type that can be mixed with output. */ 538 public static final int ERR_AUDIO_CANNOT_BE_MIXED = 47; 539 540 /** Input audio track is not AMR-NB, so it can't be mixed with output. */ 541 public static final int ERR_ONLY_AMRNB_INPUT_CAN_BE_MIXED = 48; 542 543 /** 544 * An added EVRC audio track limit the available features: uiAddCts must 545 * be 0 and bRemoveOriginal must be true. 546 */ 547 public static final int ERR_FEATURE_UNSUPPORTED_WITH_EVRC = 49; 548 549 /** H263 profiles other than 0 are not supported. */ 550 public static final int ERR_H263_PROFILE_NOT_SUPPORTED = 51; 551 552 /** File contains no video stream or an unsupported video stream. */ 553 public static final int ERR_NO_SUPPORTED_VIDEO_STREAM_IN_FILE = 52; 554 555 /** Transcoding of the input file(s) is necessary. */ 556 public static final int WAR_TRANSCODING_NECESSARY = 53; 557 558 /** 559 * The size of the output file will exceed the maximum configured value. 560 */ 561 public static final int WAR_MAX_OUTPUT_SIZE_EXCEEDED = 54; 562 563 /** The time scale is too big. */ 564 public static final int WAR_TIMESCALE_TOO_BIG = 55; 565 566 /** The year is out of range */ 567 public static final int ERR_CLOCK_BAD_REF_YEAR = 56; 568 569 /** The directory could not be opened */ 570 public static final int ERR_DIR_OPEN_FAILED = 57; 571 572 /** The directory could not be read */ 573 public static final int ERR_DIR_READ_FAILED = 58; 574 575 /** There are no more entries in the current directory */ 576 public static final int ERR_DIR_NO_MORE_ENTRY = 59; 577 578 /** The input parameter/s has error */ 579 public static final int ERR_PARAMETER = 60; 580 581 /** There is a state machine error */ 582 public static final int ERR_STATE = 61; 583 584 /** Memory allocation failed */ 585 public static final int ERR_ALLOC = 62; 586 587 /** Context is invalid */ 588 public static final int ERR_BAD_CONTEXT = 63; 589 590 /** Context creation failed */ 591 public static final int ERR_CONTEXT_FAILED = 64; 592 593 /** Invalid stream ID */ 594 public static final int ERR_BAD_STREAM_ID = 65; 595 596 /** Invalid option ID */ 597 public static final int ERR_BAD_OPTION_ID = 66; 598 599 /** The option is write only */ 600 public static final int ERR_WRITE_ONLY = 67; 601 602 /** The option is read only */ 603 public static final int ERR_READ_ONLY = 68; 604 605 /** The feature is not implemented in this version */ 606 public static final int ERR_NOT_IMPLEMENTED = 69; 607 608 /** The media type is not supported */ 609 public static final int ERR_UNSUPPORTED_MEDIA_TYPE = 70; 610 611 /** No data to be encoded */ 612 public static final int WAR_NO_DATA_YET = 71; 613 614 /** No data to be decoded */ 615 public static final int WAR_NO_MORE_STREAM = 72; 616 617 /** Time stamp is invalid */ 618 public static final int WAR_INVALID_TIME = 73; 619 620 /** No more data to be decoded */ 621 public static final int WAR_NO_MORE_AU = 74; 622 623 /** Semaphore timed out */ 624 public static final int WAR_TIME_OUT = 75; 625 626 /** Memory buffer is full */ 627 public static final int WAR_BUFFER_FULL = 76; 628 629 /** Server has asked for redirection */ 630 public static final int WAR_REDIRECT = 77; 631 632 /** Too many streams in input */ 633 public static final int WAR_TOO_MUCH_STREAMS = 78; 634 635 /** The file cannot be opened/ written into as it is locked */ 636 public static final int ERR_FILE_LOCKED = 79; 637 638 /** The file access mode is invalid */ 639 public static final int ERR_FILE_BAD_MODE_ACCESS = 80; 640 641 /** The file pointer points to an invalid location */ 642 public static final int ERR_FILE_INVALID_POSITION = 81; 643 644 /** Invalid string */ 645 public static final int ERR_STR_BAD_STRING = 94; 646 647 /** The input string cannot be converted */ 648 public static final int ERR_STR_CONV_FAILED = 95; 649 650 /** The string size is too large */ 651 public static final int ERR_STR_OVERFLOW = 96; 652 653 /** Bad string arguments */ 654 public static final int ERR_STR_BAD_ARGS = 97; 655 656 /** The string value is larger than maximum size allowed */ 657 public static final int WAR_STR_OVERFLOW = 98; 658 659 /** The string value is not present in this comparison operation */ 660 public static final int WAR_STR_NOT_FOUND = 99; 661 662 /** The thread is not started */ 663 public static final int ERR_THREAD_NOT_STARTED = 100; 664 665 /** Trancoding done warning */ 666 public static final int WAR_TRANSCODING_DONE = 101; 667 668 /** Unsupported mediatype */ 669 public static final int WAR_MEDIATYPE_NOT_SUPPORTED = 102; 670 671 /** Input file contains invalid/unsupported streams */ 672 public static final int ERR_INPUT_FILE_CONTAINS_NO_SUPPORTED_STREAM = 103; 673 674 /** Invalid input file */ 675 public static final int ERR_INVALID_INPUT_FILE = 104; 676 677 /** Invalid output video format */ 678 public static final int ERR_UNDEFINED_OUTPUT_VIDEO_FORMAT = 105; 679 680 /** Invalid output video frame size */ 681 public static final int ERR_UNDEFINED_OUTPUT_VIDEO_FRAME_SIZE = 106; 682 683 /** Invalid output video frame rate */ 684 public static final int ERR_UNDEFINED_OUTPUT_VIDEO_FRAME_RATE = 107; 685 686 /** Invalid output audio format */ 687 public static final int ERR_UNDEFINED_OUTPUT_AUDIO_FORMAT = 108; 688 689 /** Invalid video frame size for H.263 */ 690 public static final int ERR_INVALID_VIDEO_FRAME_SIZE_FOR_H263 = 109; 691 692 /** Invalid video frame rate for H.263 */ 693 public static final int ERR_INVALID_VIDEO_FRAME_RATE_FOR_H263 = 110; 694 695 /** invalid playback duration */ 696 public static final int ERR_DURATION_IS_NULL = 111; 697 698 /** Invalid H.263 profile in file */ 699 public static final int ERR_H263_FORBIDDEN_IN_MP4_FILE = 112; 700 701 /** Invalid AAC sampling frequency */ 702 public static final int ERR_INVALID_AAC_SAMPLING_FREQUENCY = 113; 703 704 /** Audio conversion failure */ 705 public static final int ERR_AUDIO_CONVERSION_FAILED = 114; 706 707 /** Invalid trim start and end times */ 708 public static final int ERR_BEGIN_CUT_EQUALS_END_CUT = 115; 709 710 /** End time smaller than start time for trim */ 711 public static final int ERR_END_CUT_SMALLER_THAN_BEGIN_CUT = 116; 712 713 /** Output file size is small */ 714 public static final int ERR_MAXFILESIZE_TOO_SMALL = 117; 715 716 /** Output video bitrate is too low */ 717 public static final int ERR_VIDEOBITRATE_TOO_LOW = 118; 718 719 /** Output audio bitrate is too low */ 720 public static final int ERR_AUDIOBITRATE_TOO_LOW = 119; 721 722 /** Output video bitrate is too high */ 723 public static final int ERR_VIDEOBITRATE_TOO_HIGH = 120; 724 725 /** Output audio bitrate is too high */ 726 public static final int ERR_AUDIOBITRATE_TOO_HIGH = 121; 727 728 /** Output file size is too small */ 729 public static final int ERR_OUTPUT_FILE_SIZE_TOO_SMALL = 122; 730 731 /** Unknown stream type */ 732 public static final int ERR_READER_UNKNOWN_STREAM_TYPE = 123; 733 734 /** Invalid metadata in input stream */ 735 public static final int WAR_READER_NO_METADATA = 124; 736 737 /** Invalid file reader info warning */ 738 public static final int WAR_READER_INFORMATION_NOT_PRESENT = 125; 739 740 /** Warning to indicate the the writer is being stopped */ 741 public static final int WAR_WRITER_STOP_REQ = 131; 742 743 /** Video decoder failed to provide frame for transcoding */ 744 public static final int WAR_VIDEORENDERER_NO_NEW_FRAME = 132; 745 746 /** Video deblocking filter is not implemented */ 747 public static final int WAR_DEBLOCKING_FILTER_NOT_IMPLEMENTED = 133; 748 749 /** H.263 decoder profile not supported */ 750 public static final int ERR_DECODER_H263_PROFILE_NOT_SUPPORTED = 134; 751 752 /** The input file contains unsupported H.263 profile */ 753 public static final int ERR_DECODER_H263_NOT_BASELINE = 135; 754 755 /** There is no more space to store the output file */ 756 public static final int ERR_NOMORE_SPACE_FOR_FILE = 136; 757 758 /** Internal error. */ 759 public static final int ERR_INTERNAL = 255; 760 } 761 762 /** 763 * Defines output video formats. 764 */ 765 public final class VideoFormat { 766 /** No video present in output clip. Used to generate audio only clip */ 767 public static final int NO_VIDEO = 0; 768 769 /** H263 baseline format. */ 770 public static final int H263 = 1; 771 772 /** MPEG4 video Simple Profile format. */ 773 public static final int MPEG4 = 2; 774 775 /** MPEG4 video Simple Profile format with support for EMP. */ 776 public static final int MPEG4_EMP = 3; 777 778 /** H264 video */ 779 public static final int H264 = 4; 780 781 /** No transcoding. Output video format is same as input video format */ 782 public static final int NULL_VIDEO = 254; 783 784 /** Unsupported video format. */ 785 public static final int UNSUPPORTED = 255; 786 } 787 788 /** Defines video profiles and levels. */ 789 public final class VideoProfile { 790 /** MPEG4, Simple Profile, Level 0. */ 791 public static final int MPEG4_SP_LEVEL_0 = 0; 792 793 /** MPEG4, Simple Profile, Level 0B. */ 794 public static final int MPEG4_SP_LEVEL_0B = 1; 795 796 /** MPEG4, Simple Profile, Level 1. */ 797 public static final int MPEG4_SP_LEVEL_1 = 2; 798 799 /** MPEG4, Simple Profile, Level 2. */ 800 public static final int MPEG4_SP_LEVEL_2 = 3; 801 802 /** MPEG4, Simple Profile, Level 3. */ 803 public static final int MPEG4_SP_LEVEL_3 = 4; 804 805 /** H263, Profile 0, Level 10. */ 806 public static final int H263_PROFILE_0_LEVEL_10 = 5; 807 808 /** H263, Profile 0, Level 20. */ 809 public static final int H263_PROFILE_0_LEVEL_20 = 6; 810 811 /** H263, Profile 0, Level 30. */ 812 public static final int H263_PROFILE_0_LEVEL_30 = 7; 813 814 /** H263, Profile 0, Level 40. */ 815 public static final int H263_PROFILE_0_LEVEL_40 = 8; 816 817 /** H263, Profile 0, Level 45. */ 818 public static final int H263_PROFILE_0_LEVEL_45 = 9; 819 820 /** MPEG4, Simple Profile, Level 4A. */ 821 public static final int MPEG4_SP_LEVEL_4A = 10; 822 823 /** MPEG4, Simple Profile, Level 0. */ 824 public static final int MPEG4_SP_LEVEL_5 = 11; 825 826 /** H264, Profile 0, Level 1. */ 827 public static final int H264_PROFILE_0_LEVEL_1 = 12; 828 829 /** H264, Profile 0, Level 1b. */ 830 public static final int H264_PROFILE_0_LEVEL_1b = 13; 831 832 /** H264, Profile 0, Level 1.1 */ 833 public static final int H264_PROFILE_0_LEVEL_1_1 = 14; 834 835 /** H264, Profile 0, Level 1.2 */ 836 public static final int H264_PROFILE_0_LEVEL_1_2 = 15; 837 838 /** H264, Profile 0, Level 1.3 */ 839 public static final int H264_PROFILE_0_LEVEL_1_3 = 16; 840 841 /** H264, Profile 0, Level 2. */ 842 public static final int H264_PROFILE_0_LEVEL_2 = 17; 843 844 /** H264, Profile 0, Level 2.1 */ 845 public static final int H264_PROFILE_0_LEVEL_2_1 = 18; 846 847 /** H264, Profile 0, Level 2.2 */ 848 public static final int H264_PROFILE_0_LEVEL_2_2 = 19; 849 850 /** H264, Profile 0, Level 3. */ 851 public static final int H264_PROFILE_0_LEVEL_3 = 20; 852 853 /** H264, Profile 0, Level 3.1 */ 854 public static final int H264_PROFILE_0_LEVEL_3_1 = 21; 855 856 /** H264, Profile 0, Level 3.2 */ 857 public static final int H264_PROFILE_0_LEVEL_3_2 = 22; 858 859 /** H264, Profile 0, Level 4. */ 860 public static final int H264_PROFILE_0_LEVEL_4 = 23; 861 862 /** H264, Profile 0, Level 4.1 */ 863 public static final int H264_PROFILE_0_LEVEL_4_1 = 24; 864 865 /** H264, Profile 0, Level 4.2 */ 866 public static final int H264_PROFILE_0_LEVEL_4_2 = 25; 867 868 /** H264, Profile 0, Level 5. */ 869 public static final int H264_PROFILE_0_LEVEL_5 = 26; 870 871 /** H264, Profile 0, Level 5.1 */ 872 public static final int H264_PROFILE_0_LEVEL_5_1 = 27; 873 874 /** Profile out of range. */ 875 public static final int OUT_OF_RANGE = 255; 876 } 877 878 /** Defines video frame sizes. */ 879 public final class VideoFrameSize { 880 881 public static final int SIZE_UNDEFINED = -1; 882 883 /** SQCIF 128 x 96 pixels. */ 884 public static final int SQCIF = 0; 885 886 /** QQVGA 160 x 120 pixels. */ 887 public static final int QQVGA = 1; 888 889 /** QCIF 176 x 144 pixels. */ 890 public static final int QCIF = 2; 891 892 /** QVGA 320 x 240 pixels. */ 893 public static final int QVGA = 3; 894 895 /** CIF 352 x 288 pixels. */ 896 public static final int CIF = 4; 897 898 /** VGA 640 x 480 pixels. */ 899 public static final int VGA = 5; 900 901 /** WVGA 800 X 480 pixels */ 902 public static final int WVGA = 6; 903 904 /** NTSC 720 X 480 pixels */ 905 public static final int NTSC = 7; 906 907 /** 640 x 360 */ 908 public static final int nHD = 8; 909 910 /** 854 x 480 */ 911 public static final int WVGA16x9 = 9; 912 913 /** 720p 1280 X 720 */ 914 public static final int V720p = 10; 915 916 /** 1080 x 720 */ 917 public static final int W720p = 11; 918 919 /** 1080 960 x 720 */ 920 public static final int S720p = 12; 921 } 922 923 /** 924 * Defines output video frame rates. 925 */ 926 public final class VideoFrameRate { 927 /** Frame rate of 5 frames per second. */ 928 public static final int FR_5_FPS = 0; 929 930 /** Frame rate of 7.5 frames per second. */ 931 public static final int FR_7_5_FPS = 1; 932 933 /** Frame rate of 10 frames per second. */ 934 public static final int FR_10_FPS = 2; 935 936 /** Frame rate of 12.5 frames per second. */ 937 public static final int FR_12_5_FPS = 3; 938 939 /** Frame rate of 15 frames per second. */ 940 public static final int FR_15_FPS = 4; 941 942 /** Frame rate of 20 frames per second. */ 943 public static final int FR_20_FPS = 5; 944 945 /** Frame rate of 25 frames per second. */ 946 public static final int FR_25_FPS = 6; 947 948 /** Frame rate of 30 frames per second. */ 949 public static final int FR_30_FPS = 7; 950 } 951 952 /** 953 * Defines Video Effect Types. 954 */ 955 public static class VideoEffect { 956 957 public static final int NONE = 0; 958 959 public static final int FADE_FROM_BLACK = 8; 960 961 public static final int CURTAIN_OPENING = 9; 962 963 public static final int FADE_TO_BLACK = 16; 964 965 public static final int CURTAIN_CLOSING = 17; 966 967 public static final int EXTERNAL = 256; 968 969 public static final int BLACK_AND_WHITE = 257; 970 971 public static final int PINK = 258; 972 973 public static final int GREEN = 259; 974 975 public static final int SEPIA = 260; 976 977 public static final int NEGATIVE = 261; 978 979 public static final int FRAMING = 262; 980 981 public static final int TEXT = 263; 982 983 public static final int ZOOM_IN = 264; 984 985 public static final int ZOOM_OUT = 265; 986 987 public static final int FIFTIES = 266; 988 989 public static final int COLORRGB16 = 267; 990 991 public static final int GRADIENT = 268; 992 } 993 994 /** 995 * Defines the video transitions. 996 */ 997 public static class VideoTransition { 998 /** No transition */ 999 public static final int NONE = 0; 1000 1001 /** Cross fade transition */ 1002 public static final int CROSS_FADE = 1; 1003 1004 /** External transition. Currently not available. */ 1005 public static final int EXTERNAL = 256; 1006 1007 /** AlphaMagic transition. */ 1008 public static final int ALPHA_MAGIC = 257; 1009 1010 /** Slide transition. */ 1011 public static final int SLIDE_TRANSITION = 258; 1012 1013 /** Fade to black transition. */ 1014 public static final int FADE_BLACK = 259; 1015 } 1016 1017 /** 1018 * Defines settings for the AlphaMagic transition 1019 */ 1020 public static class AlphaMagicSettings { 1021 /** Name of the alpha file (JPEG file). */ 1022 public String file; 1023 1024 /** Blending percentage [0..100] 0 = no blending. */ 1025 public int blendingPercent; 1026 1027 /** Invert the default rotation direction of the AlphaMagic effect. */ 1028 public boolean invertRotation; 1029 1030 public int rgbWidth; 1031 public int rgbHeight; 1032 } 1033 1034 /** Defines the direction of the Slide transition. */ 1035 public static final class SlideDirection { 1036 1037 /** Right out left in. */ 1038 public static final int RIGHT_OUT_LEFT_IN = 0; 1039 1040 /** Left out right in. */ 1041 public static final int LEFT_OUT_RIGTH_IN = 1; 1042 1043 /** Top out bottom in. */ 1044 public static final int TOP_OUT_BOTTOM_IN = 2; 1045 1046 /** Bottom out top in */ 1047 public static final int BOTTOM_OUT_TOP_IN = 3; 1048 } 1049 1050 /** Defines the Slide transition settings. */ 1051 public static class SlideTransitionSettings { 1052 /** 1053 * Direction of the slide transition. See {@link SlideDirection 1054 * SlideDirection} for valid values. 1055 */ 1056 public int direction; 1057 } 1058 1059 /** 1060 * Defines the settings of a single clip. 1061 */ 1062 public static class ClipSettings { 1063 1064 /** 1065 * The path to the clip file. 1066 * <p> 1067 * File format of the clip, it can be: 1068 * <ul> 1069 * <li>3GP file containing MPEG4/H263/H264 video and AAC/AMR audio 1070 * <li>JPG file 1071 * </ul> 1072 */ 1073 1074 public String clipPath; 1075 1076 /** 1077 * The path of the decoded file. This is used only for image files. 1078 */ 1079 public String clipDecodedPath; 1080 1081 /** 1082 * The path of the Original file. This is used only for image files. 1083 */ 1084 public String clipOriginalPath; 1085 1086 /** 1087 * File type of the clip. See {@link FileType FileType} for valid 1088 * values. 1089 */ 1090 public int fileType; 1091 1092 /** Begin of the cut in the clip in milliseconds. */ 1093 public int beginCutTime; 1094 1095 /** 1096 * End of the cut in the clip in milliseconds. Set both 1097 * <code>beginCutTime</code> and <code>endCutTime</code> to 1098 * <code>0</code> to get the full length of the clip without a cut. In 1099 * case of JPG clip, this is the duration of the JPEG file. 1100 */ 1101 public int endCutTime; 1102 1103 /** 1104 * Begin of the cut in the clip in percentage of the file duration. 1105 */ 1106 public int beginCutPercent; 1107 1108 /** 1109 * End of the cut in the clip in percentage of the file duration. Set 1110 * both <code>beginCutPercent</code> and <code>endCutPercent</code> to 1111 * <code>0</code> to get the full length of the clip without a cut. 1112 */ 1113 public int endCutPercent; 1114 1115 /** Enable panning and zooming. */ 1116 public boolean panZoomEnabled; 1117 1118 /** Zoom percentage at start of clip. 0 = no zoom, 100 = full zoom */ 1119 public int panZoomPercentStart; 1120 1121 /** Top left X coordinate at start of clip. */ 1122 public int panZoomTopLeftXStart; 1123 1124 /** Top left Y coordinate at start of clip. */ 1125 public int panZoomTopLeftYStart; 1126 1127 /** Zoom percentage at start of clip. 0 = no zoom, 100 = full zoom */ 1128 public int panZoomPercentEnd; 1129 1130 /** Top left X coordinate at end of clip. */ 1131 public int panZoomTopLeftXEnd; 1132 1133 /** Top left Y coordinate at end of clip. */ 1134 public int panZoomTopLeftYEnd; 1135 1136 /** 1137 * Set The media rendering. See {@link MediaRendering MediaRendering} 1138 * for valid values. 1139 */ 1140 public int mediaRendering; 1141 1142 /** 1143 * RGB width and Height 1144 */ 1145 public int rgbWidth; 1146 public int rgbHeight; 1147 } 1148 1149 /** 1150 * Defines settings for a transition. 1151 */ 1152 public static class TransitionSettings { 1153 1154 /** Duration of the transition in msec. */ 1155 public int duration; 1156 1157 /** 1158 * Transition type for video. See {@link VideoTransition 1159 * VideoTransition} for valid values. 1160 */ 1161 public int videoTransitionType; 1162 1163 /** 1164 * Transition type for audio. See {@link AudioTransition 1165 * AudioTransition} for valid values. 1166 */ 1167 public int audioTransitionType; 1168 1169 /** 1170 * Transition behaviour. See {@link TransitionBehaviour 1171 * TransitionBehaviour} for valid values. 1172 */ 1173 public int transitionBehaviour; 1174 1175 /** 1176 * Settings for AlphaMagic transition. Only needs to be set if 1177 * <code>videoTransitionType</code> is set to 1178 * <code>VideoTransition.ALPHA_MAGIC</code>. See 1179 * {@link AlphaMagicSettings AlphaMagicSettings}. 1180 */ 1181 public AlphaMagicSettings alphaSettings; 1182 1183 /** 1184 * Settings for the Slide transition. See 1185 * {@link SlideTransitionSettings SlideTransitionSettings}. 1186 */ 1187 public SlideTransitionSettings slideSettings; 1188 } 1189 1190 public static final class AudioTransition { 1191 /** No audio transition. */ 1192 public static final int NONE = 0; 1193 1194 /** Cross-fade audio transition. */ 1195 public static final int CROSS_FADE = 1; 1196 } 1197 1198 /** 1199 * Defines transition behaviors. 1200 */ 1201 public static final class TransitionBehaviour { 1202 1203 /** The transition uses an increasing speed. */ 1204 public static final int SPEED_UP = 0; 1205 1206 /** The transition uses a linear (constant) speed. */ 1207 public static final int LINEAR = 1; 1208 1209 /** The transition uses a decreasing speed. */ 1210 public static final int SPEED_DOWN = 2; 1211 1212 /** 1213 * The transition uses a constant speed, but slows down in the middle 1214 * section. 1215 */ 1216 public static final int SLOW_MIDDLE = 3; 1217 1218 /** 1219 * The transition uses a constant speed, but increases speed in the 1220 * middle section. 1221 */ 1222 public static final int FAST_MIDDLE = 4; 1223 } 1224 1225 /** 1226 * Defines settings for the background music. 1227 */ 1228 public static class BackgroundMusicSettings { 1229 1230 /** Background music file. */ 1231 public String file; 1232 1233 /** File type. See {@link FileType FileType} for valid values. */ 1234 public int fileType; 1235 1236 /** 1237 * Insertion time in milliseconds, in the output video where the 1238 * background music must be inserted. 1239 */ 1240 public long insertionTime; 1241 1242 /** 1243 * Volume, as a percentage of the background music track, to use. If 1244 * this field is set to 100, the background music will replace the audio 1245 * from the video input file(s). 1246 */ 1247 public int volumePercent; 1248 1249 /** 1250 * Start time in milliseconds in the background muisc file from where 1251 * the background music should loop. Set both <code>beginLoop</code> and 1252 * <code>endLoop</code> to <code>0</code> to disable looping. 1253 */ 1254 public long beginLoop; 1255 1256 /** 1257 * End time in milliseconds in the background music file to where the 1258 * background music should loop. Set both <code>beginLoop</code> and 1259 * <code>endLoop</code> to <code>0</code> to disable looping. 1260 */ 1261 public long endLoop; 1262 1263 public boolean enableDucking; 1264 1265 public int duckingThreshold; 1266 1267 public int lowVolume; 1268 1269 public boolean isLooping; 1270 } 1271 1272 /** Defines settings for an effect. */ 1273 public static class AudioEffect { 1274 /** No audio effect. */ 1275 public static final int NONE = 0; 1276 1277 /** Fade-in effect. */ 1278 public static final int FADE_IN = 8; 1279 1280 /** Fade-out effect. */ 1281 public static final int FADE_OUT = 16; 1282 } 1283 1284 /** Defines the effect settings. */ 1285 public static class EffectSettings { 1286 1287 /** Start time of the effect in milliseconds. */ 1288 public int startTime; 1289 1290 /** Duration of the effect in milliseconds. */ 1291 public int duration; 1292 1293 /** 1294 * Video effect type. See {@link VideoEffect VideoEffect} for valid 1295 * values. 1296 */ 1297 public int videoEffectType; 1298 1299 /** 1300 * Audio effect type. See {@link AudioEffect AudioEffect} for valid 1301 * values. 1302 */ 1303 public int audioEffectType; 1304 1305 /** 1306 * Start time of the effect in percents of the duration of the clip. A 1307 * value of 0 percent means start time is from the beginning of the 1308 * clip. 1309 */ 1310 public int startPercent; 1311 1312 /** 1313 * Duration of the effect in percents of the duration of the clip. 1314 */ 1315 public int durationPercent; 1316 1317 /** 1318 * Framing file. 1319 * <p> 1320 * This field is only used when the field <code>videoEffectType</code> 1321 * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise 1322 * this field is ignored. 1323 */ 1324 public String framingFile; 1325 1326 /** 1327 * Framing buffer. 1328 * <p> 1329 * This field is only used when the field <code>videoEffectType</code> 1330 * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise 1331 * this field is ignored. 1332 */ 1333 public int[] framingBuffer; 1334 1335 /** 1336 * Bitmap type Can be from RGB_565 (4), ARGB_4444 (5), ARGB_8888 (6); 1337 **/ 1338 1339 public int bitmapType; 1340 1341 public int width; 1342 1343 public int height; 1344 1345 /** 1346 * Top left x coordinate. This coordinate is used to set the x 1347 * coordinate of the picture in the framing file when the framing file 1348 * is selected. The x coordinate is also used to set the location of the 1349 * text in the text effect. 1350 * <p> 1351 * This field is only used when the field <code>videoEffectType</code> 1352 * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING} or 1353 * {@link VideoEffect#TEXT VideoEffect.TEXT}. Otherwise this field is 1354 * ignored. 1355 */ 1356 public int topLeftX; 1357 1358 /** 1359 * Top left y coordinate. This coordinate is used to set the y 1360 * coordinate of the picture in the framing file when the framing file 1361 * is selected. The y coordinate is also used to set the location of the 1362 * text in the text effect. 1363 * <p> 1364 * This field is only used when the field <code>videoEffectType</code> 1365 * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING} or 1366 * {@link VideoEffect#TEXT VideoEffect.TEXT}. Otherwise this field is 1367 * ignored. 1368 */ 1369 public int topLeftY; 1370 1371 /** 1372 * Should the frame be resized or not. If this field is set to 1373 * <link>true</code> then the frame size is matched with the output 1374 * video size. 1375 * <p> 1376 * This field is only used when the field <code>videoEffectType</code> 1377 * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise 1378 * this field is ignored. 1379 */ 1380 public boolean framingResize; 1381 1382 /** 1383 * Size to which the framing buffer needs to be resized to 1384 * This is valid only if framingResize is true 1385 */ 1386 public int framingScaledSize; 1387 /** 1388 * Text to insert in the video. 1389 * <p> 1390 * This field is only used when the field <code>videoEffectType</code> 1391 * is set to {@link VideoEffect#TEXT VideoEffect.TEXT}. Otherwise this 1392 * field is ignored. 1393 */ 1394 public String text; 1395 1396 /** 1397 * Text attributes for the text to insert in the video. 1398 * <p> 1399 * This field is only used when the field <code>videoEffectType</code> 1400 * is set to {@link VideoEffect#TEXT VideoEffect.TEXT}. Otherwise this 1401 * field is ignored. For more details about this field see the 1402 * integration guide. 1403 */ 1404 public String textRenderingData; 1405 1406 /** Width of the text buffer in pixels. */ 1407 public int textBufferWidth; 1408 1409 /** Height of the text buffer in pixels. */ 1410 public int textBufferHeight; 1411 1412 /** 1413 * Processing rate for the fifties effect. A high value (e.g. 30) 1414 * results in high effect strength. 1415 * <p> 1416 * This field is only used when the field <code>videoEffectType</code> 1417 * is set to {@link VideoEffect#FIFTIES VideoEffect.FIFTIES}. Otherwise 1418 * this field is ignored. 1419 */ 1420 public int fiftiesFrameRate; 1421 1422 /** 1423 * RGB 16 color of the RGB16 and gradient color effect. 1424 * <p> 1425 * This field is only used when the field <code>videoEffectType</code> 1426 * is set to {@link VideoEffect#COLORRGB16 VideoEffect.COLORRGB16} or 1427 * {@link VideoEffect#GRADIENT VideoEffect.GRADIENT}. Otherwise this 1428 * field is ignored. 1429 */ 1430 public int rgb16InputColor; 1431 1432 /** 1433 * Start alpha blending percentage. 1434 * <p> 1435 * This field is only used when the field <code>videoEffectType</code> 1436 * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or 1437 * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field 1438 * is ignored. 1439 */ 1440 public int alphaBlendingStartPercent; 1441 1442 /** 1443 * Middle alpha blending percentage. 1444 * <p> 1445 * This field is only used when the field <code>videoEffectType</code> 1446 * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or 1447 * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field 1448 * is ignored. 1449 */ 1450 public int alphaBlendingMiddlePercent; 1451 1452 /** 1453 * End alpha blending percentage. 1454 * <p> 1455 * This field is only used when the field <code>videoEffectType</code> 1456 * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or 1457 * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field 1458 * is ignored. 1459 */ 1460 public int alphaBlendingEndPercent; 1461 1462 /** 1463 * Duration, in percentage of effect duration of the fade-in phase. 1464 * <p> 1465 * This field is only used when the field <code>videoEffectType</code> 1466 * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or 1467 * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field 1468 * is ignored. 1469 */ 1470 public int alphaBlendingFadeInTimePercent; 1471 1472 /** 1473 * Duration, in percentage of effect duration of the fade-out phase. 1474 * <p> 1475 * This field is only used when the field <code>videoEffectType</code> 1476 * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or 1477 * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field 1478 * is ignored. 1479 */ 1480 public int alphaBlendingFadeOutTimePercent; 1481 } 1482 1483 /** Defines the clip properties for preview */ 1484 public static class PreviewClips { 1485 1486 /** 1487 * The path to the clip file. 1488 * <p> 1489 * File format of the clip, it can be: 1490 * <ul> 1491 * <li>3GP file containing MPEG4/H263 video and AAC/AMR audio 1492 * <li>JPG file 1493 * </ul> 1494 */ 1495 1496 public String clipPath; 1497 1498 /** 1499 * File type of the clip. See {@link FileType FileType} for valid 1500 * values. 1501 */ 1502 public int fileType; 1503 1504 /** Begin of the cut in the clip in milliseconds. */ 1505 public long beginPlayTime; 1506 1507 public long endPlayTime; 1508 1509 /** 1510 * Set The media rendering. See {@link MediaRendering MediaRendering} 1511 * for valid values. 1512 */ 1513 public int mediaRendering; 1514 1515 } 1516 1517 /** Defines the audio settings. */ 1518 public static class AudioSettings { 1519 1520 String pFile; 1521 1522 /** < PCM file path */ 1523 String Id; 1524 1525 boolean bRemoveOriginal; 1526 1527 /** < If true, the original audio track is not taken into account */ 1528 int channels; 1529 1530 /** < Number of channels (1=mono, 2=stereo) of BGM clip */ 1531 int Fs; 1532 1533 /** 1534 * < Sampling audio frequency (8000 for amr, 16000 or more for aac) of 1535 * BGM clip 1536 */ 1537 int ExtendedFs; 1538 1539 /** < Extended frequency for AAC+, eAAC+ streams of BGM clip */ 1540 long startMs; 1541 1542 /** < Time, in milliseconds, at which the added audio track is inserted */ 1543 long beginCutTime; 1544 1545 long endCutTime; 1546 1547 int fileType; 1548 1549 int volume; 1550 1551 /** < Volume, in percentage, of the added audio track */ 1552 boolean loop; 1553 1554 /** < Looping on/off > **/ 1555 1556 /** Audio mix and Duck **/ 1557 int ducking_threshold; 1558 1559 int ducking_lowVolume; 1560 1561 boolean bInDucking_enable; 1562 1563 String pcmFilePath; 1564 } 1565 1566 /** Encapsulates preview clips and effect settings */ 1567 public static class PreviewSettings { 1568 1569 public PreviewClips[] previewClipsArray; 1570 1571 /** The effect settings. */ 1572 public EffectSettings[] effectSettingsArray; 1573 1574 } 1575 1576 /** Encapsulates clip properties */ 1577 public static class PreviewClipProperties { 1578 1579 public Properties[] clipProperties; 1580 1581 } 1582 1583 /** Defines the editing settings. */ 1584 public static class EditSettings { 1585 1586 /** 1587 * Array of clip settings. There is one <code>clipSetting</code> for 1588 * each clip. 1589 */ 1590 public ClipSettings[] clipSettingsArray; 1591 1592 /** 1593 * Array of transition settings. If there are n clips (and thus n 1594 * <code>clipSettings</code>) then there are (n-1) transitions and (n-1) 1595 * <code>transistionSettings</code> in 1596 * <code>transistionSettingsArray</code>. 1597 */ 1598 public TransitionSettings[] transitionSettingsArray; 1599 1600 /** The effect settings. */ 1601 public EffectSettings[] effectSettingsArray; 1602 1603 /** 1604 * Video frame rate of the output clip. See {@link VideoFrameRate 1605 * VideoFrameRate} for valid values. 1606 */ 1607 public int videoFrameRate; 1608 1609 /** Output file name. Must be an absolute path. */ 1610 public String outputFile; 1611 1612 /** 1613 * Size of the video frames in the output clip. See 1614 * {@link VideoFrameSize VideoFrameSize} for valid values. 1615 */ 1616 public int videoFrameSize; 1617 1618 /** 1619 * Format of the video stream in the output clip. See 1620 * {@link VideoFormat VideoFormat} for valid values. 1621 */ 1622 public int videoFormat; 1623 1624 /** 1625 * Format of the audio stream in the output clip. See 1626 * {@link AudioFormat AudioFormat} for valid values. 1627 */ 1628 public int audioFormat; 1629 1630 /** 1631 * Sampling frequency of the audio stream in the output clip. See 1632 * {@link AudioSamplingFrequency AudioSamplingFrequency} for valid 1633 * values. 1634 */ 1635 public int audioSamplingFreq; 1636 1637 /** 1638 * Maximum file size. By setting this you can set the maximum size of 1639 * the output clip. Set it to <code>0</code> to let the class ignore 1640 * this filed. 1641 */ 1642 public int maxFileSize; 1643 1644 /** 1645 * Number of audio channels in output clip. Use <code>0</code> for none, 1646 * <code>1</code> for mono or <code>2</code> for stereo. None is only 1647 * allowed when the <code>audioFormat</code> field is set to 1648 * {@link AudioFormat#NO_AUDIO AudioFormat.NO_AUDIO} or 1649 * {@link AudioFormat#NULL_AUDIO AudioFormat.NULL_AUDIO} Mono is only 1650 * allowed when the <code>audioFormat</code> field is set to 1651 * {@link AudioFormat#AAC AudioFormat.AAC} 1652 */ 1653 public int audioChannels; 1654 1655 /** Video bitrate. See {@link Bitrate Bitrate} for valid values. */ 1656 public int videoBitrate; 1657 1658 /** Audio bitrate. See {@link Bitrate Bitrate} for valid values. */ 1659 public int audioBitrate; 1660 1661 /** 1662 * Background music settings. See {@link BackgroundMusicSettings 1663 * BackgroundMusicSettings} for valid values. 1664 */ 1665 public BackgroundMusicSettings backgroundMusicSettings; 1666 1667 public int primaryTrackVolume; 1668 1669 } 1670 1671 /** 1672 * Defines the media properties. 1673 **/ 1674 1675 public static class Properties { 1676 1677 /** 1678 * Duration of the media in milliseconds. 1679 */ 1680 1681 public int duration; 1682 1683 /** 1684 * File type. 1685 */ 1686 1687 public int fileType; 1688 1689 /** 1690 * Video format. 1691 */ 1692 1693 public int videoFormat; 1694 1695 /** 1696 * Duration of the video stream of the media in milliseconds. 1697 */ 1698 1699 public int videoDuration; 1700 1701 /** 1702 * Bitrate of the video stream of the media. 1703 */ 1704 1705 public int videoBitrate; 1706 1707 /** 1708 * Width of the video frames or the width of the still picture in 1709 * pixels. 1710 */ 1711 1712 public int width; 1713 1714 /** 1715 * Height of the video frames or the height of the still picture in 1716 * pixels. 1717 */ 1718 1719 public int height; 1720 1721 /** 1722 * Average frame rate of video in the media in frames per second. 1723 */ 1724 1725 public float averageFrameRate; 1726 1727 /** 1728 * Profile and level of the video in the media. 1729 */ 1730 1731 public int profileAndLevel; 1732 1733 /** 1734 * Audio format. 1735 */ 1736 1737 public int audioFormat; 1738 1739 /** 1740 * Duration of the audio stream of the media in milliseconds. 1741 */ 1742 1743 public int audioDuration; 1744 1745 /** 1746 * Bitrate of the audio stream of the media. 1747 */ 1748 1749 public int audioBitrate; 1750 1751 /** 1752 * Number of audio channels in the media. 1753 */ 1754 1755 public int audioChannels; 1756 1757 /** 1758 * Sampling frequency of the audio stream in the media in samples per 1759 * second. 1760 */ 1761 1762 public int audioSamplingFrequency; 1763 1764 /** 1765 * Volume value of the audio track as percentage. 1766 */ 1767 public int audioVolumeValue; 1768 1769 public String Id; 1770 } 1771 1772 /** 1773 * Constructor 1774 * 1775 * @param projectPath The path where the VideoEditor stores all files 1776 * related to the project 1777 * @param veObj The video editor reference 1778 */ 1779 public MediaArtistNativeHelper(String projectPath, VideoEditor veObj) { 1780 mProjectPath = projectPath; 1781 if (veObj != null) { 1782 mVideoEditor = veObj; 1783 } else { 1784 mVideoEditor = null; 1785 throw new IllegalArgumentException("video editor object is null"); 1786 } 1787 if (mStoryBoardSettings == null) 1788 mStoryBoardSettings = new EditSettings(); 1789 1790 _init(mProjectPath, "null"); 1791 mAudioTrackPCMFilePath = null; 1792 } 1793 1794 /** 1795 * @return The project path 1796 */ 1797 String getProjectPath() { 1798 return mProjectPath; 1799 } 1800 1801 /** 1802 * @return The Audio Track PCM file path 1803 */ 1804 String getProjectAudioTrackPCMFilePath() { 1805 return mAudioTrackPCMFilePath; 1806 } 1807 1808 /** 1809 * Invalidates the PCM file 1810 */ 1811 void invalidatePcmFile() { 1812 if (mAudioTrackPCMFilePath != null) { 1813 new File(mAudioTrackPCMFilePath).delete(); 1814 mAudioTrackPCMFilePath = null; 1815 } 1816 } 1817 1818 @SuppressWarnings("unused") 1819 private void onProgressUpdate(int taskId, int progress) { 1820 if (mProcessingState == PROCESSING_EXPORT) { 1821 if (mExportProgressListener != null) { 1822 if (mProgressToApp < progress) { 1823 mExportProgressListener.onProgress(mVideoEditor, mOutputFilename, progress); 1824 /* record previous progress */ 1825 mProgressToApp = progress; 1826 } 1827 } 1828 } 1829 else { 1830 // Adapt progress depending on current state 1831 int actualProgress = 0; 1832 int action = 0; 1833 1834 if (mProcessingState == PROCESSING_AUDIO_PCM) { 1835 action = MediaProcessingProgressListener.ACTION_DECODE; 1836 } else { 1837 action = MediaProcessingProgressListener.ACTION_ENCODE; 1838 } 1839 1840 switch (mProcessingState) { 1841 case PROCESSING_AUDIO_PCM: 1842 actualProgress = progress; 1843 break; 1844 case PROCESSING_TRANSITION: 1845 actualProgress = progress; 1846 break; 1847 case PROCESSING_KENBURNS: 1848 actualProgress = progress; 1849 break; 1850 case PROCESSING_INTERMEDIATE1: 1851 if ((progress == 0) && (mProgressToApp != 0)) { 1852 mProgressToApp = 0; 1853 } 1854 if ((progress != 0) || (mProgressToApp != 0)) { 1855 actualProgress = progress/4; 1856 } 1857 break; 1858 case PROCESSING_INTERMEDIATE2: 1859 if ((progress != 0) || (mProgressToApp != 0)) { 1860 actualProgress = 25 + progress/4; 1861 } 1862 break; 1863 case PROCESSING_INTERMEDIATE3: 1864 if ((progress != 0) || (mProgressToApp != 0)) { 1865 actualProgress = 50 + progress/2; 1866 } 1867 break; 1868 case PROCESSING_NONE: 1869 1870 default: 1871 Log.e(TAG, "ERROR unexpected State=" + mProcessingState); 1872 return; 1873 } 1874 if ((mProgressToApp != actualProgress) && (actualProgress != 0)) { 1875 1876 mProgressToApp = actualProgress; 1877 1878 if (mMediaProcessingProgressListener != null) { 1879 // Send the progress indication 1880 mMediaProcessingProgressListener.onProgress(mProcessingObject, action, 1881 actualProgress); 1882 } 1883 } 1884 /* avoid 0 in next intermediate call */ 1885 if (mProgressToApp == 0) { 1886 if (mMediaProcessingProgressListener != null) { 1887 /* 1888 * Send the progress indication 1889 */ 1890 mMediaProcessingProgressListener.onProgress(mProcessingObject, action, 1891 actualProgress); 1892 } 1893 mProgressToApp = 1; 1894 } 1895 } 1896 } 1897 1898 @SuppressWarnings("unused") 1899 private void onPreviewProgressUpdate(int progress, boolean isFinished, 1900 boolean updateOverlay, String filename, int renderingMode) { 1901 if (mPreviewProgressListener != null) { 1902 if (mIsFirstProgress) { 1903 mPreviewProgressListener.onStart(mVideoEditor); 1904 mIsFirstProgress = false; 1905 } 1906 1907 final VideoEditor.OverlayData overlayData; 1908 if (updateOverlay) { 1909 overlayData = new VideoEditor.OverlayData(); 1910 if (filename != null) { 1911 overlayData.set(BitmapFactory.decodeFile(filename), renderingMode); 1912 } else { 1913 overlayData.setClear(); 1914 } 1915 } else { 1916 overlayData = null; 1917 } 1918 1919 mPreviewProgressListener.onProgress(mVideoEditor, progress, overlayData); 1920 1921 if (progress != 0) { 1922 mPreviewProgress = progress; 1923 } 1924 1925 if (isFinished) { 1926 mPreviewProgressListener.onStop(mVideoEditor); 1927 } 1928 } 1929 } 1930 1931 /** 1932 * Release the native helper object 1933 */ 1934 void releaseNativeHelper() { 1935 try { 1936 release(); 1937 } catch (IllegalStateException ex) { 1938 Log.e(TAG, "Illegal State exeption caught in releaseNativeHelper"); 1939 throw ex; 1940 } catch (RuntimeException ex) { 1941 Log.e(TAG, "Runtime exeption caught in releaseNativeHelper"); 1942 throw ex; 1943 } 1944 } 1945 1946 /** 1947 * Release the native helper to end the Audio Graph process 1948 */ 1949 @SuppressWarnings("unused") 1950 private void onAudioGraphExtractProgressUpdate(int progress, boolean isVideo) { 1951 if ((mExtractAudioWaveformProgressListener != null) && (progress > 0)) { 1952 mExtractAudioWaveformProgressListener.onProgress(progress); 1953 } 1954 } 1955 1956 /** 1957 * Populates the Effect Settings in EffectSettings 1958 * 1959 * @param effects The reference of EffectColor 1960 * 1961 * @return The populated effect settings in EffectSettings reference 1962 */ 1963 EffectSettings getEffectSettings(EffectColor effects) { 1964 EffectSettings effectSettings = new EffectSettings(); 1965 effectSettings.startTime = (int)effects.getStartTime(); 1966 effectSettings.duration = (int)effects.getDuration(); 1967 effectSettings.videoEffectType = getEffectColorType(effects); 1968 effectSettings.audioEffectType = 0; 1969 effectSettings.startPercent = 0; 1970 effectSettings.durationPercent = 0; 1971 effectSettings.framingFile = null; 1972 effectSettings.topLeftX = 0; 1973 effectSettings.topLeftY = 0; 1974 effectSettings.framingResize = false; 1975 effectSettings.text = null; 1976 effectSettings.textRenderingData = null; 1977 effectSettings.textBufferWidth = 0; 1978 effectSettings.textBufferHeight = 0; 1979 if (effects.getType() == EffectColor.TYPE_FIFTIES) { 1980 effectSettings.fiftiesFrameRate = 15; 1981 } else { 1982 effectSettings.fiftiesFrameRate = 0; 1983 } 1984 1985 if ((effectSettings.videoEffectType == VideoEffect.COLORRGB16) 1986 || (effectSettings.videoEffectType == VideoEffect.GRADIENT)) { 1987 effectSettings.rgb16InputColor = effects.getColor(); 1988 } 1989 1990 effectSettings.alphaBlendingStartPercent = 0; 1991 effectSettings.alphaBlendingMiddlePercent = 0; 1992 effectSettings.alphaBlendingEndPercent = 0; 1993 effectSettings.alphaBlendingFadeInTimePercent = 0; 1994 effectSettings.alphaBlendingFadeOutTimePercent = 0; 1995 return effectSettings; 1996 } 1997 1998 /** 1999 * Populates the Overlay Settings in EffectSettings 2000 * 2001 * @param overlay The reference of OverlayFrame 2002 * 2003 * @return The populated overlay settings in EffectSettings reference 2004 */ 2005 EffectSettings getOverlaySettings(OverlayFrame overlay) { 2006 EffectSettings effectSettings = new EffectSettings(); 2007 Bitmap bitmap = null; 2008 2009 effectSettings.startTime = (int)overlay.getStartTime(); 2010 effectSettings.duration = (int)overlay.getDuration(); 2011 effectSettings.videoEffectType = VideoEffect.FRAMING; 2012 effectSettings.audioEffectType = 0; 2013 effectSettings.startPercent = 0; 2014 effectSettings.durationPercent = 0; 2015 effectSettings.framingFile = null; 2016 2017 if ((bitmap = overlay.getBitmap()) != null) { 2018 effectSettings.framingFile = overlay.getFilename(); 2019 2020 if (effectSettings.framingFile == null) { 2021 try { 2022 (overlay).save(mProjectPath); 2023 } catch (IOException e) { 2024 Log.e(TAG, "getOverlaySettings : File not found"); 2025 } 2026 effectSettings.framingFile = overlay.getFilename(); 2027 } 2028 if (bitmap.getConfig() == Bitmap.Config.ARGB_8888) 2029 effectSettings.bitmapType = 6; 2030 else if (bitmap.getConfig() == Bitmap.Config.ARGB_4444) 2031 effectSettings.bitmapType = 5; 2032 else if (bitmap.getConfig() == Bitmap.Config.RGB_565) 2033 effectSettings.bitmapType = 4; 2034 else if (bitmap.getConfig() == Bitmap.Config.ALPHA_8) 2035 throw new RuntimeException("Bitmap config not supported"); 2036 2037 effectSettings.width = bitmap.getWidth(); 2038 effectSettings.height = bitmap.getHeight(); 2039 effectSettings.framingBuffer = new int[effectSettings.width]; 2040 int tmp = 0; 2041 short maxAlpha = 0; 2042 short minAlpha = (short)0xFF; 2043 short alpha = 0; 2044 while (tmp < effectSettings.height) { 2045 bitmap.getPixels(effectSettings.framingBuffer, 0, 2046 effectSettings.width, 0, tmp, 2047 effectSettings.width, 1); 2048 for (int i = 0; i < effectSettings.width; i++) { 2049 alpha = (short)((effectSettings.framingBuffer[i] >> 24) & 0xFF); 2050 if (alpha > maxAlpha) { 2051 maxAlpha = alpha; 2052 } 2053 if (alpha < minAlpha) { 2054 minAlpha = alpha; 2055 } 2056 } 2057 tmp += 1; 2058 } 2059 alpha = (short)((maxAlpha + minAlpha) / 2); 2060 alpha = (short)((alpha * 100) / 256); 2061 effectSettings.alphaBlendingEndPercent = alpha; 2062 effectSettings.alphaBlendingMiddlePercent = alpha; 2063 effectSettings.alphaBlendingStartPercent = alpha; 2064 effectSettings.alphaBlendingFadeInTimePercent = 100; 2065 effectSettings.alphaBlendingFadeOutTimePercent = 100; 2066 effectSettings.framingBuffer = null; 2067 } 2068 2069 effectSettings.topLeftX = 0; 2070 effectSettings.topLeftY = 0; 2071 2072 effectSettings.framingResize = true; 2073 effectSettings.text = null; 2074 effectSettings.textRenderingData = null; 2075 effectSettings.textBufferWidth = 0; 2076 effectSettings.textBufferHeight = 0; 2077 effectSettings.fiftiesFrameRate = 0; 2078 effectSettings.rgb16InputColor = 0; 2079 int mediaItemHeight; 2080 int aspectRatio; 2081 if (overlay.getMediaItem() instanceof MediaImageItem) { 2082 if (((MediaImageItem)overlay.getMediaItem()).getGeneratedImageClip() != null) { 2083 // Ken Burns was applied 2084 mediaItemHeight = ((MediaImageItem)overlay.getMediaItem()).getGeneratedClipHeight(); 2085 aspectRatio = getAspectRatio( 2086 ((MediaImageItem)overlay.getMediaItem()).getGeneratedClipWidth() 2087 , mediaItemHeight); 2088 } else { 2089 //For image get the scaled height. Aspect ratio would remain the same 2090 mediaItemHeight = ((MediaImageItem)overlay.getMediaItem()).getScaledHeight(); 2091 aspectRatio = overlay.getMediaItem().getAspectRatio(); 2092 } 2093 } else { 2094 aspectRatio = overlay.getMediaItem().getAspectRatio(); 2095 mediaItemHeight = overlay.getMediaItem().getHeight(); 2096 } 2097 effectSettings.framingScaledSize = findVideoResolution(aspectRatio, mediaItemHeight); 2098 return effectSettings; 2099 } 2100 2101 /** 2102 * Sets the audio regenerate flag 2103 * 2104 * @param flag The boolean to set the audio regenerate flag 2105 * 2106 */ 2107 void setAudioflag(boolean flag) { 2108 //check if the file exists. 2109 if (!(new File(String.format(mProjectPath + "/" + AUDIO_TRACK_PCM_FILE)).exists())) { 2110 flag = true; 2111 } 2112 mRegenerateAudio = flag; 2113 } 2114 2115 /** 2116 * Gets the audio regenerate flag 2117 * 2118 * @param return The boolean to get the audio regenerate flag 2119 * 2120 */ 2121 boolean getAudioflag() { 2122 return mRegenerateAudio; 2123 } 2124 2125 /** 2126 * Maps the average frame rate to one of the defined enum values 2127 * 2128 * @param averageFrameRate The average frame rate of video item 2129 * 2130 * @return The frame rate from one of the defined enum values 2131 */ 2132 int GetClosestVideoFrameRate(int averageFrameRate) { 2133 if (averageFrameRate >= 25) { 2134 return VideoFrameRate.FR_30_FPS; 2135 } else if (averageFrameRate >= 20) { 2136 return VideoFrameRate.FR_25_FPS; 2137 } else if (averageFrameRate >= 15) { 2138 return VideoFrameRate.FR_20_FPS; 2139 } else if (averageFrameRate >= 12) { 2140 return VideoFrameRate.FR_15_FPS; 2141 } else if (averageFrameRate >= 10) { 2142 return VideoFrameRate.FR_12_5_FPS; 2143 } else if (averageFrameRate >= 7) { 2144 return VideoFrameRate.FR_10_FPS; 2145 } else if (averageFrameRate >= 5) { 2146 return VideoFrameRate.FR_7_5_FPS; 2147 } else { 2148 return -1; 2149 } 2150 } 2151 2152 /** 2153 * Helper function to adjust the effect or overlay start time 2154 * depending on the begin and end boundary time of meddia item 2155 */ 2156 public void adjustEffectsStartTimeAndDuration(EffectSettings lEffect, int beginCutTime, 2157 int endCutTime) { 2158 2159 int effectStartTime = 0; 2160 int effectDuration = 0; 2161 2162 /** 2163 * cbct -> clip begin cut time 2164 * cect -> clip end cut time 2165 **************************************** 2166 * | | 2167 * | cbct cect | 2168 * | <-1--> | | | 2169 * | <--|-2-> | | 2170 * | | <---3---> | | 2171 * | | <--|-4---> | 2172 * | | | <--5--> | 2173 * | <---|------6----|----> | 2174 * | | 2175 * < : effectStart 2176 * > : effectStart + effectDuration 2177 **************************************** 2178 **/ 2179 2180 /** 1 & 5 */ 2181 /** 2182 * Effect falls out side the trim duration. In such a case effects shall 2183 * not be applied. 2184 */ 2185 if ((lEffect.startTime > endCutTime) 2186 || ((lEffect.startTime + lEffect.duration) <= beginCutTime)) { 2187 2188 effectStartTime = 0; 2189 effectDuration = 0; 2190 2191 lEffect.startTime = effectStartTime; 2192 lEffect.duration = effectDuration; 2193 return; 2194 } 2195 2196 /** 2 */ 2197 if ((lEffect.startTime < beginCutTime) 2198 && ((lEffect.startTime + lEffect.duration) > beginCutTime) 2199 && ((lEffect.startTime + lEffect.duration) <= endCutTime)) { 2200 effectStartTime = 0; 2201 effectDuration = lEffect.duration; 2202 2203 effectDuration -= (beginCutTime - lEffect.startTime); 2204 lEffect.startTime = effectStartTime; 2205 lEffect.duration = effectDuration; 2206 return; 2207 } 2208 2209 /** 3 */ 2210 if ((lEffect.startTime >= beginCutTime) 2211 && ((lEffect.startTime + lEffect.duration) <= endCutTime)) { 2212 effectStartTime = lEffect.startTime - beginCutTime; 2213 lEffect.startTime = effectStartTime; 2214 lEffect.duration = lEffect.duration; 2215 return; 2216 } 2217 2218 /** 4 */ 2219 if ((lEffect.startTime >= beginCutTime) 2220 && ((lEffect.startTime + lEffect.duration) > endCutTime)) { 2221 effectStartTime = lEffect.startTime - beginCutTime; 2222 effectDuration = endCutTime - lEffect.startTime; 2223 lEffect.startTime = effectStartTime; 2224 lEffect.duration = effectDuration; 2225 return; 2226 } 2227 2228 /** 6 */ 2229 if ((lEffect.startTime < beginCutTime) 2230 && ((lEffect.startTime + lEffect.duration) > endCutTime)) { 2231 effectStartTime = 0; 2232 effectDuration = endCutTime - beginCutTime; 2233 lEffect.startTime = effectStartTime; 2234 lEffect.duration = effectDuration; 2235 return; 2236 } 2237 2238 } 2239 2240 /** 2241 * Generates the clip for preview or export 2242 * 2243 * @param editSettings The EditSettings reference for generating 2244 * a clip for preview or export 2245 * 2246 * @return error value 2247 */ 2248 public int generateClip(EditSettings editSettings) { 2249 int err = 0; 2250 2251 try { 2252 err = nativeGenerateClip(editSettings); 2253 } catch (IllegalArgumentException ex) { 2254 Log.e(TAG, "Illegal Argument exception in load settings"); 2255 return -1; 2256 } catch (IllegalStateException ex) { 2257 Log.e(TAG, "Illegal state exception in load settings"); 2258 return -1; 2259 } catch (RuntimeException ex) { 2260 Log.e(TAG, "Runtime exception in load settings"); 2261 return -1; 2262 } 2263 return err; 2264 } 2265 2266 /** 2267 * Init function to initialiZe the ClipSettings reference to 2268 * default values 2269 * 2270 * @param lclipSettings The ClipSettings reference 2271 */ 2272 void initClipSettings(ClipSettings lclipSettings) { 2273 lclipSettings.clipPath = null; 2274 lclipSettings.clipDecodedPath = null; 2275 lclipSettings.clipOriginalPath = null; 2276 lclipSettings.fileType = 0; 2277 lclipSettings.endCutTime = 0; 2278 lclipSettings.beginCutTime = 0; 2279 lclipSettings.beginCutPercent = 0; 2280 lclipSettings.endCutPercent = 0; 2281 lclipSettings.panZoomEnabled = false; 2282 lclipSettings.panZoomPercentStart = 0; 2283 lclipSettings.panZoomTopLeftXStart = 0; 2284 lclipSettings.panZoomTopLeftYStart = 0; 2285 lclipSettings.panZoomPercentEnd = 0; 2286 lclipSettings.panZoomTopLeftXEnd = 0; 2287 lclipSettings.panZoomTopLeftYEnd = 0; 2288 lclipSettings.mediaRendering = 0; 2289 } 2290 2291 2292 /** 2293 * Populates the settings for generating an effect clip 2294 * 2295 * @param lMediaItem The media item for which the effect clip 2296 * needs to be generated 2297 * @param lclipSettings The ClipSettings reference containing 2298 * clips data 2299 * @param e The EditSettings reference containing effect specific data 2300 * @param uniqueId The unique id used in the name of the output clip 2301 * @param clipNo Used for internal purpose 2302 * 2303 * @return The name and path of generated clip 2304 */ 2305 String generateEffectClip(MediaItem lMediaItem, ClipSettings lclipSettings, 2306 EditSettings e,String uniqueId,int clipNo) { 2307 int err = 0; 2308 EditSettings editSettings = null; 2309 String EffectClipPath = null; 2310 2311 editSettings = new EditSettings(); 2312 2313 editSettings.clipSettingsArray = new ClipSettings[1]; 2314 editSettings.clipSettingsArray[0] = lclipSettings; 2315 2316 editSettings.backgroundMusicSettings = null; 2317 editSettings.transitionSettingsArray = null; 2318 editSettings.effectSettingsArray = e.effectSettingsArray; 2319 2320 EffectClipPath = String.format(mProjectPath + "/" + "ClipEffectIntermediate" + "_" 2321 + lMediaItem.getId() + uniqueId + ".3gp"); 2322 2323 File tmpFile = new File(EffectClipPath); 2324 if (tmpFile.exists()) { 2325 tmpFile.delete(); 2326 } 2327 2328 if (lMediaItem instanceof MediaVideoItem) { 2329 MediaVideoItem m = (MediaVideoItem)lMediaItem; 2330 2331 editSettings.audioFormat = AudioFormat.AAC; 2332 editSettings.audioChannels = 2; 2333 editSettings.audioBitrate = Bitrate.BR_64_KBPS; 2334 editSettings.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000; 2335 2336 editSettings.videoBitrate = Bitrate.BR_5_MBPS; 2337 //editSettings.videoFormat = VideoFormat.MPEG4; 2338 editSettings.videoFormat = VideoFormat.H264; 2339 editSettings.videoFrameRate = VideoFrameRate.FR_30_FPS; 2340 editSettings.videoFrameSize = findVideoResolution(mVideoEditor.getAspectRatio(), 2341 m.getHeight()); 2342 } else { 2343 MediaImageItem m = (MediaImageItem)lMediaItem; 2344 editSettings.audioBitrate = Bitrate.BR_64_KBPS; 2345 editSettings.audioChannels = 2; 2346 editSettings.audioFormat = AudioFormat.AAC; 2347 editSettings.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000; 2348 2349 editSettings.videoBitrate = Bitrate.BR_5_MBPS; 2350 editSettings.videoFormat = VideoFormat.H264; 2351 editSettings.videoFrameRate = VideoFrameRate.FR_30_FPS; 2352 editSettings.videoFrameSize = findVideoResolution(mVideoEditor.getAspectRatio(), 2353 m.getScaledHeight()); 2354 } 2355 2356 editSettings.outputFile = EffectClipPath; 2357 2358 if (clipNo == 1) { 2359 mProcessingState = PROCESSING_INTERMEDIATE1; 2360 } else if (clipNo == 2) { 2361 mProcessingState = PROCESSING_INTERMEDIATE2; 2362 } 2363 mProcessingObject = lMediaItem; 2364 err = generateClip(editSettings); 2365 mProcessingState = PROCESSING_NONE; 2366 2367 if (err == 0) { 2368 lclipSettings.clipPath = EffectClipPath; 2369 lclipSettings.fileType = FileType.THREE_GPP; 2370 return EffectClipPath; 2371 } else { 2372 throw new RuntimeException("preview generation cannot be completed"); 2373 } 2374 } 2375 2376 2377 /** 2378 * Populates the settings for generating a Ken Burn effect clip 2379 * 2380 * @param m The media image item for which the Ken Burn effect clip 2381 * needs to be generated 2382 * @param e The EditSettings reference clip specific data 2383 * 2384 * @return The name and path of generated clip 2385 */ 2386 String generateKenBurnsClip(EditSettings e, MediaImageItem m) { 2387 String output = null; 2388 int err = 0; 2389 2390 e.backgroundMusicSettings = null; 2391 e.transitionSettingsArray = null; 2392 e.effectSettingsArray = null; 2393 output = String.format(mProjectPath + "/" + "ImageClip-" + m.getId() + ".3gp"); 2394 2395 File tmpFile = new File(output); 2396 if (tmpFile.exists()) { 2397 tmpFile.delete(); 2398 } 2399 2400 e.outputFile = output; 2401 e.audioBitrate = Bitrate.BR_64_KBPS; 2402 e.audioChannels = 2; 2403 e.audioFormat = AudioFormat.AAC; 2404 e.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000; 2405 2406 e.videoBitrate = Bitrate.BR_5_MBPS; 2407 e.videoFormat = VideoFormat.H264; 2408 e.videoFrameRate = VideoFrameRate.FR_30_FPS; 2409 e.videoFrameSize = findVideoResolution(mVideoEditor.getAspectRatio(), 2410 m.getScaledHeight()); 2411 mProcessingState = PROCESSING_KENBURNS; 2412 mProcessingObject = m; 2413 err = generateClip(e); 2414 // Reset the processing state and check for errors 2415 mProcessingState = PROCESSING_NONE; 2416 if (err != 0) { 2417 throw new RuntimeException("preview generation cannot be completed"); 2418 } 2419 return output; 2420 } 2421 2422 2423 /** 2424 * Calculates the output resolution for transition clip 2425 * 2426 * @param m1 First media item associated with transition 2427 * @param m2 Second media item associated with transition 2428 * 2429 * @return The transition resolution 2430 */ 2431 private int getTransitionResolution(MediaItem m1, MediaItem m2) { 2432 int clip1Height = 0; 2433 int clip2Height = 0; 2434 int videoSize = 0; 2435 2436 if (m1 != null && m2 != null) { 2437 if (m1 instanceof MediaVideoItem) { 2438 clip1Height = m1.getHeight(); 2439 } else if (m1 instanceof MediaImageItem) { 2440 clip1Height = ((MediaImageItem)m1).getScaledHeight(); 2441 } 2442 if (m2 instanceof MediaVideoItem) { 2443 clip2Height = m2.getHeight(); 2444 } else if (m2 instanceof MediaImageItem) { 2445 clip2Height = ((MediaImageItem)m2).getScaledHeight(); 2446 } 2447 if (clip1Height > clip2Height) { 2448 videoSize = findVideoResolution(mVideoEditor.getAspectRatio(), clip1Height); 2449 } else { 2450 videoSize = findVideoResolution(mVideoEditor.getAspectRatio(), clip2Height); 2451 } 2452 } else if (m1 == null && m2 != null) { 2453 if (m2 instanceof MediaVideoItem) { 2454 clip2Height = m2.getHeight(); 2455 } else if (m2 instanceof MediaImageItem) { 2456 clip2Height = ((MediaImageItem)m2).getScaledHeight(); 2457 } 2458 videoSize = findVideoResolution(mVideoEditor.getAspectRatio(), clip2Height); 2459 } else if (m1 != null && m2 == null) { 2460 if (m1 instanceof MediaVideoItem) { 2461 clip1Height = m1.getHeight(); 2462 } else if (m1 instanceof MediaImageItem) { 2463 clip1Height = ((MediaImageItem)m1).getScaledHeight(); 2464 } 2465 videoSize = findVideoResolution(mVideoEditor.getAspectRatio(), clip1Height); 2466 } 2467 return videoSize; 2468 } 2469 2470 /** 2471 * Populates the settings for generating an transition clip 2472 * 2473 * @param m1 First media item associated with transition 2474 * @param m2 Second media item associated with transition 2475 * @param e The EditSettings reference containing 2476 * clip specific data 2477 * @param uniqueId The unique id used in the name of the output clip 2478 * @param t The Transition specific data 2479 * 2480 * @return The name and path of generated clip 2481 */ 2482 String generateTransitionClip(EditSettings e, String uniqueId, 2483 MediaItem m1, MediaItem m2,Transition t) { 2484 String outputFilename = null; 2485 int err = 0; 2486 2487 outputFilename = String.format(mProjectPath + "/" + uniqueId + ".3gp"); 2488 e.outputFile = outputFilename; 2489 e.audioBitrate = Bitrate.BR_64_KBPS; 2490 e.audioChannels = 2; 2491 e.audioFormat = AudioFormat.AAC; 2492 e.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000; 2493 2494 e.videoBitrate = Bitrate.BR_5_MBPS; 2495 e.videoFormat = VideoFormat.H264; 2496 e.videoFrameRate = VideoFrameRate.FR_30_FPS; 2497 e.videoFrameSize = getTransitionResolution(m1, m2); 2498 2499 if (new File(outputFilename).exists()) { 2500 new File(outputFilename).delete(); 2501 } 2502 mProcessingState = PROCESSING_INTERMEDIATE3; 2503 mProcessingObject = t; 2504 err = generateClip(e); 2505 // Reset the processing state and check for errors 2506 mProcessingState = PROCESSING_NONE; 2507 if (err != 0) { 2508 throw new RuntimeException("preview generation cannot be completed"); 2509 } 2510 return outputFilename; 2511 } 2512 2513 /** 2514 * Populates effects and overlays in EffectSettings structure 2515 * and also adjust the start time and duration of effects and overlays 2516 * w.r.t to total story board time 2517 * 2518 * @param m1 Media item associated with effect 2519 * @param effectSettings The EffectSettings reference containing 2520 * effect specific data 2521 * @param beginCutTime The begin cut time of the clip associated with effect 2522 * @param endCutTime The end cut time of the clip associated with effect 2523 * @param storyBoardTime The current story board time 2524 * 2525 * @return The updated index 2526 */ 2527 private int populateEffects(MediaItem m, EffectSettings[] effectSettings, int i, 2528 int beginCutTime, int endCutTime, int storyBoardTime) { 2529 2530 if (m.getBeginTransition() != null && m.getBeginTransition().getDuration() > 0 2531 && m.getEndTransition() != null && m.getEndTransition().getDuration() > 0) { 2532 beginCutTime += m.getBeginTransition().getDuration(); 2533 endCutTime -= m.getEndTransition().getDuration(); 2534 } else if (m.getBeginTransition() == null && m.getEndTransition() != null 2535 && m.getEndTransition().getDuration() > 0) { 2536 endCutTime -= m.getEndTransition().getDuration(); 2537 } else if (m.getEndTransition() == null && m.getBeginTransition() != null 2538 && m.getBeginTransition().getDuration() > 0) { 2539 beginCutTime += m.getBeginTransition().getDuration(); 2540 } 2541 2542 final List<Effect> effects = m.getAllEffects(); 2543 final List<Overlay> overlays = m.getAllOverlays(); 2544 for (Effect effect : effects) { 2545 if (effect instanceof EffectColor) { 2546 effectSettings[i] = getEffectSettings((EffectColor)effect); 2547 adjustEffectsStartTimeAndDuration(effectSettings[i], beginCutTime, endCutTime); 2548 effectSettings[i].startTime += storyBoardTime; 2549 i++; 2550 } 2551 } 2552 2553 for (Overlay overlay : overlays) { 2554 effectSettings[i] = getOverlaySettings((OverlayFrame)overlay); 2555 adjustEffectsStartTimeAndDuration(effectSettings[i], beginCutTime, endCutTime); 2556 effectSettings[i].startTime += storyBoardTime; 2557 i++; 2558 } 2559 return i; 2560 } 2561 2562 /** 2563 * Adjusts the media item boundaries for use in export or preview 2564 * 2565 * @param clipSettings The ClipSettings reference 2566 * @param clipProperties The Properties reference 2567 * @param m The media item 2568 */ 2569 private void adjustMediaItemBoundary(ClipSettings clipSettings, 2570 Properties clipProperties, MediaItem m) { 2571 if (m.getBeginTransition() != null && m.getBeginTransition().getDuration() > 0 2572 && m.getEndTransition() != null && m.getEndTransition().getDuration() > 0) { 2573 clipSettings.beginCutTime += m.getBeginTransition().getDuration(); 2574 clipSettings.endCutTime -= m.getEndTransition().getDuration(); 2575 } else if (m.getBeginTransition() == null && m.getEndTransition() != null 2576 && m.getEndTransition().getDuration() > 0) { 2577 clipSettings.endCutTime -= m.getEndTransition().getDuration(); 2578 } else if (m.getEndTransition() == null && m.getBeginTransition() != null 2579 && m.getBeginTransition().getDuration() > 0) { 2580 clipSettings.beginCutTime += m.getBeginTransition().getDuration(); 2581 } 2582 2583 clipProperties.duration = clipSettings.endCutTime - clipSettings.beginCutTime; 2584 2585 if (clipProperties.videoDuration != 0) { 2586 clipProperties.videoDuration = clipSettings.endCutTime - clipSettings.beginCutTime; 2587 } 2588 2589 if (clipProperties.audioDuration != 0) { 2590 clipProperties.audioDuration = clipSettings.endCutTime - clipSettings.beginCutTime; 2591 } 2592 } 2593 2594 /** 2595 * Generates the transition if transition is present 2596 * and is in invalidated state 2597 * 2598 * @param transition The Transition reference 2599 * @param editSettings The EditSettings reference 2600 * @param clipPropertiesArray The clip Properties array 2601 * @param i The index in clip Properties array for current clip 2602 */ 2603 private void generateTransition(Transition transition, EditSettings editSettings, 2604 PreviewClipProperties clipPropertiesArray, int index) { 2605 if (!(transition.isGenerated())) { 2606 transition.generate(); 2607 } 2608 editSettings.clipSettingsArray[index] = new ClipSettings(); 2609 editSettings.clipSettingsArray[index].clipPath = transition.getFilename(); 2610 editSettings.clipSettingsArray[index].fileType = FileType.THREE_GPP; 2611 editSettings.clipSettingsArray[index].beginCutTime = 0; 2612 editSettings.clipSettingsArray[index].endCutTime = (int)transition.getDuration(); 2613 editSettings.clipSettingsArray[index].mediaRendering = MediaRendering.BLACK_BORDERS; 2614 2615 try { 2616 clipPropertiesArray.clipProperties[index] = 2617 getMediaProperties(transition.getFilename()); 2618 } catch (Exception e) { 2619 throw new IllegalArgumentException("Unsupported file or file not found"); 2620 } 2621 2622 clipPropertiesArray.clipProperties[index].Id = null; 2623 clipPropertiesArray.clipProperties[index].audioVolumeValue = 100; 2624 clipPropertiesArray.clipProperties[index].duration = (int)transition.getDuration(); 2625 if (clipPropertiesArray.clipProperties[index].videoDuration != 0) { 2626 clipPropertiesArray.clipProperties[index].videoDuration = (int)transition.getDuration(); 2627 } 2628 2629 if (clipPropertiesArray.clipProperties[index].audioDuration != 0) { 2630 clipPropertiesArray.clipProperties[index].audioDuration = (int)transition.getDuration(); 2631 } 2632 } 2633 2634 /** 2635 * Sets the volume for current media item in clip properties array 2636 * 2637 * @param m The media item 2638 * @param clipProperties The clip properties array reference 2639 * @param i The index in clip Properties array for current clip 2640 */ 2641 private void adjustVolume(MediaItem m, PreviewClipProperties clipProperties, 2642 int index) { 2643 if (m instanceof MediaVideoItem) { 2644 final boolean videoMuted = ((MediaVideoItem)m).isMuted(); 2645 if (videoMuted == false) { 2646 mClipProperties.clipProperties[index].audioVolumeValue = 2647 ((MediaVideoItem)m).getVolume(); 2648 } else { 2649 mClipProperties.clipProperties[index].audioVolumeValue = 0; 2650 } 2651 } else if (m instanceof MediaImageItem) { 2652 mClipProperties.clipProperties[index].audioVolumeValue = 0; 2653 } 2654 } 2655 2656 /** 2657 * Checks for odd size image width and height 2658 * 2659 * @param m The media item 2660 * @param clipProperties The clip properties array reference 2661 * @param i The index in clip Properties array for current clip 2662 */ 2663 private void checkOddSizeImage(MediaItem m, PreviewClipProperties clipProperties, int index) { 2664 if (m instanceof MediaImageItem) { 2665 int width = mClipProperties.clipProperties[index].width; 2666 int height = mClipProperties.clipProperties[index].height; 2667 2668 if ((width % 2) != 0) { 2669 width -= 1; 2670 } 2671 if ((height % 2) != 0) { 2672 height -= 1; 2673 } 2674 mClipProperties.clipProperties[index].width = width; 2675 mClipProperties.clipProperties[index].height = height; 2676 } 2677 } 2678 2679 /** 2680 * Populates the media item properties and calculates the maximum 2681 * height among all the clips 2682 * 2683 * @param m The media item 2684 * @param i The index in clip Properties array for current clip 2685 * @param maxHeight The max height from the clip properties 2686 * 2687 * @return Updates the max height if current clip's height is greater 2688 * than all previous clips height 2689 */ 2690 private int populateMediaItemProperties(MediaItem m, int index, int maxHeight) { 2691 mPreviewEditSettings.clipSettingsArray[index] = new ClipSettings(); 2692 if (m instanceof MediaVideoItem) { 2693 mPreviewEditSettings.clipSettingsArray[index] = 2694 ((MediaVideoItem)m).getVideoClipProperties(); 2695 if (((MediaVideoItem)m).getHeight() > maxHeight) { 2696 maxHeight = ((MediaVideoItem)m).getHeight(); 2697 } 2698 } else if (m instanceof MediaImageItem) { 2699 mPreviewEditSettings.clipSettingsArray[index] = 2700 ((MediaImageItem)m).getImageClipProperties(); 2701 if (((MediaImageItem)m).getScaledHeight() > maxHeight) { 2702 maxHeight = ((MediaImageItem)m).getScaledHeight(); 2703 } 2704 } 2705 /** + Handle the image files here */ 2706 if (mPreviewEditSettings.clipSettingsArray[index].fileType == FileType.JPG) { 2707 mPreviewEditSettings.clipSettingsArray[index].clipDecodedPath = 2708 ((MediaImageItem)m).getDecodedImageFileName(); 2709 2710 mPreviewEditSettings.clipSettingsArray[index].clipOriginalPath = 2711 mPreviewEditSettings.clipSettingsArray[index].clipPath; 2712 } 2713 return maxHeight; 2714 } 2715 2716 /** 2717 * Populates the background music track properties 2718 * 2719 * @param mediaBGMList The background music list 2720 * 2721 */ 2722 private void populateBackgroundMusicProperties(List<AudioTrack> mediaBGMList) { 2723 2724 if (mediaBGMList.size() == 1) { 2725 mAudioTrack = mediaBGMList.get(0); 2726 } else { 2727 mAudioTrack = null; 2728 } 2729 2730 if (mAudioTrack != null) { 2731 mAudioSettings = new AudioSettings(); 2732 Properties mAudioProperties = new Properties(); 2733 mAudioSettings.pFile = null; 2734 mAudioSettings.Id = mAudioTrack.getId(); 2735 try { 2736 mAudioProperties = getMediaProperties(mAudioTrack.getFilename()); 2737 } catch (Exception e) { 2738 throw new IllegalArgumentException("Unsupported file or file not found"); 2739 } 2740 mAudioSettings.bRemoveOriginal = false; 2741 mAudioSettings.channels = mAudioProperties.audioChannels; 2742 mAudioSettings.Fs = mAudioProperties.audioSamplingFrequency; 2743 mAudioSettings.loop = mAudioTrack.isLooping(); 2744 mAudioSettings.ExtendedFs = 0; 2745 mAudioSettings.pFile = mAudioTrack.getFilename(); 2746 mAudioSettings.startMs = mAudioTrack.getStartTime(); 2747 mAudioSettings.beginCutTime = mAudioTrack.getBoundaryBeginTime(); 2748 mAudioSettings.endCutTime = mAudioTrack.getBoundaryEndTime(); 2749 if (mAudioTrack.isMuted()) { 2750 mAudioSettings.volume = 0; 2751 } else { 2752 mAudioSettings.volume = mAudioTrack.getVolume(); 2753 } 2754 mAudioSettings.fileType = mAudioProperties.fileType; 2755 mAudioSettings.ducking_lowVolume = mAudioTrack.getDuckedTrackVolume(); 2756 mAudioSettings.ducking_threshold = mAudioTrack.getDuckingThreshhold(); 2757 mAudioSettings.bInDucking_enable = mAudioTrack.isDuckingEnabled(); 2758 mAudioTrackPCMFilePath = String.format(mProjectPath + "/" + AUDIO_TRACK_PCM_FILE); 2759 mAudioSettings.pcmFilePath = mAudioTrackPCMFilePath; 2760 2761 mPreviewEditSettings.backgroundMusicSettings = new BackgroundMusicSettings(); 2762 mPreviewEditSettings.backgroundMusicSettings.file = mAudioTrackPCMFilePath; 2763 mPreviewEditSettings.backgroundMusicSettings.fileType = mAudioProperties.fileType; 2764 mPreviewEditSettings.backgroundMusicSettings.insertionTime = 2765 mAudioTrack.getStartTime(); 2766 mPreviewEditSettings.backgroundMusicSettings.volumePercent = mAudioTrack.getVolume(); 2767 mPreviewEditSettings.backgroundMusicSettings.beginLoop = 2768 mAudioTrack.getBoundaryBeginTime(); 2769 mPreviewEditSettings.backgroundMusicSettings.endLoop = 2770 mAudioTrack.getBoundaryEndTime(); 2771 mPreviewEditSettings.backgroundMusicSettings.enableDucking = 2772 mAudioTrack.isDuckingEnabled(); 2773 mPreviewEditSettings.backgroundMusicSettings.duckingThreshold = 2774 mAudioTrack.getDuckingThreshhold(); 2775 mPreviewEditSettings.backgroundMusicSettings.lowVolume = 2776 mAudioTrack.getDuckedTrackVolume(); 2777 mPreviewEditSettings.backgroundMusicSettings.isLooping = mAudioTrack.isLooping(); 2778 mPreviewEditSettings.primaryTrackVolume = 100; 2779 mProcessingState = PROCESSING_AUDIO_PCM; 2780 mProcessingObject = mAudioTrack; 2781 } else { 2782 mAudioSettings = null; 2783 mPreviewEditSettings.backgroundMusicSettings = null; 2784 mAudioTrackPCMFilePath = null; 2785 } 2786 } 2787 2788 /** 2789 * Calculates all the effects in all the media items 2790 * in media items list 2791 * 2792 * @param mediaItemsList The media item list 2793 * 2794 * @return The total number of effects 2795 * 2796 */ 2797 private int getTotalEffects(List<MediaItem> mediaItemsList) { 2798 int totalEffects = 0; 2799 final Iterator<MediaItem> it = mediaItemsList.iterator(); 2800 while (it.hasNext()) { 2801 final MediaItem t = it.next(); 2802 totalEffects += t.getAllEffects().size(); 2803 totalEffects += t.getAllOverlays().size(); 2804 final Iterator<Effect> ef = t.getAllEffects().iterator(); 2805 while (ef.hasNext()) { 2806 final Effect e = ef.next(); 2807 if (e instanceof EffectKenBurns) { 2808 totalEffects--; 2809 } 2810 } 2811 } 2812 return totalEffects; 2813 } 2814 2815 /** 2816 * This function is responsible for forming clip settings 2817 * array and clip properties array including transition clips 2818 * and effect settings for preview purpose or export. 2819 * 2820 * 2821 * @param mediaItemsList The media item list 2822 * @param mediaTransitionList The transitions list 2823 * @param mediaBGMList The background music list 2824 * @param listener The MediaProcessingProgressListener 2825 * 2826 */ 2827 void previewStoryBoard(List<MediaItem> mediaItemsList, 2828 List<Transition> mediaTransitionList, List<AudioTrack> mediaBGMList, 2829 MediaProcessingProgressListener listener) { 2830 if (mInvalidatePreviewArray) { 2831 int previewIndex = 0; 2832 int totalEffects = 0; 2833 int storyBoardTime = 0; 2834 int maxHeight = 0; 2835 int beginCutTime = 0; 2836 int endCutTime = 0; 2837 int effectIndex = 0; 2838 Transition lTransition = null; 2839 MediaItem lMediaItem = null; 2840 mPreviewEditSettings = new EditSettings(); 2841 mClipProperties = new PreviewClipProperties(); 2842 mTotalClips = 0; 2843 2844 mTotalClips = mediaItemsList.size(); 2845 for (Transition transition : mediaTransitionList) { 2846 if (transition.getDuration() > 0) { 2847 mTotalClips++; 2848 } 2849 } 2850 2851 totalEffects = getTotalEffects(mediaItemsList); 2852 2853 mPreviewEditSettings.clipSettingsArray = new ClipSettings[mTotalClips]; 2854 mPreviewEditSettings.effectSettingsArray = new EffectSettings[totalEffects]; 2855 mClipProperties.clipProperties = new Properties[mTotalClips]; 2856 2857 /** record the call back progress listener */ 2858 mMediaProcessingProgressListener = listener; 2859 mProgressToApp = 0; 2860 2861 if (mediaItemsList.size() > 0) { 2862 for (int i = 0; i < mediaItemsList.size(); i++) { 2863 /* Get the Media Item from the list */ 2864 lMediaItem = mediaItemsList.get(i); 2865 if (lMediaItem instanceof MediaVideoItem) { 2866 beginCutTime = (int)((MediaVideoItem)lMediaItem).getBoundaryBeginTime(); 2867 endCutTime = (int)((MediaVideoItem)lMediaItem).getBoundaryEndTime(); 2868 } else if (lMediaItem instanceof MediaImageItem) { 2869 beginCutTime = 0; 2870 endCutTime = (int)((MediaImageItem)lMediaItem).getTimelineDuration(); 2871 } 2872 /* Get the transition associated with Media Item */ 2873 lTransition = lMediaItem.getBeginTransition(); 2874 if (lTransition != null && (lTransition.getDuration() > 0)) { 2875 /* generate transition clip */ 2876 generateTransition(lTransition, mPreviewEditSettings, 2877 mClipProperties, previewIndex); 2878 storyBoardTime += mClipProperties.clipProperties[previewIndex].duration; 2879 previewIndex++; 2880 } 2881 /* Populate media item properties */ 2882 maxHeight = populateMediaItemProperties(lMediaItem, previewIndex, maxHeight); 2883 /* Get the clip properties of the media item. */ 2884 if (lMediaItem instanceof MediaImageItem) { 2885 int tmpCnt = 0; 2886 boolean bEffectKbPresent = false; 2887 final List<Effect> effectList = lMediaItem.getAllEffects(); 2888 /** 2889 * Check if Ken Burns effect is present 2890 */ 2891 while (tmpCnt < effectList.size()) { 2892 if (effectList.get(tmpCnt) instanceof EffectKenBurns) { 2893 bEffectKbPresent = true; 2894 break; 2895 } 2896 tmpCnt++; 2897 } 2898 2899 if (bEffectKbPresent) { 2900 try { 2901 mClipProperties.clipProperties[previewIndex] 2902 = getMediaProperties(((MediaImageItem)lMediaItem).getGeneratedImageClip()); 2903 } catch (Exception e) { 2904 throw new IllegalArgumentException("Unsupported file or file not found"); 2905 } 2906 } else { 2907 try { 2908 mClipProperties.clipProperties[previewIndex] 2909 = getMediaProperties(((MediaImageItem)lMediaItem).getScaledImageFileName()); 2910 } catch (Exception e) { 2911 throw new IllegalArgumentException("Unsupported file or file not found"); 2912 } 2913 mClipProperties.clipProperties[previewIndex].width = ((MediaImageItem)lMediaItem).getScaledWidth(); 2914 mClipProperties.clipProperties[previewIndex].height = ((MediaImageItem)lMediaItem).getScaledHeight(); 2915 } 2916 } else { 2917 try { 2918 mClipProperties.clipProperties[previewIndex] 2919 = getMediaProperties(lMediaItem.getFilename()); 2920 } catch (Exception e) { 2921 throw new IllegalArgumentException("Unsupported file or file not found"); 2922 } 2923 } 2924 mClipProperties.clipProperties[previewIndex].Id = lMediaItem.getId(); 2925 checkOddSizeImage(lMediaItem, mClipProperties, previewIndex); 2926 adjustVolume(lMediaItem, mClipProperties, previewIndex); 2927 2928 /* 2929 * Adjust media item start time and end time w.r.t to begin 2930 * and end transitions associated with media item 2931 */ 2932 2933 adjustMediaItemBoundary(mPreviewEditSettings.clipSettingsArray[previewIndex], 2934 mClipProperties.clipProperties[previewIndex], lMediaItem); 2935 2936 /* 2937 * Get all the effects and overlays for that media item and 2938 * adjust start time and duration of effects 2939 */ 2940 2941 effectIndex = populateEffects(lMediaItem, 2942 mPreviewEditSettings.effectSettingsArray, effectIndex, beginCutTime, 2943 endCutTime, storyBoardTime); 2944 storyBoardTime += mClipProperties.clipProperties[previewIndex].duration; 2945 previewIndex++; 2946 2947 /* Check if there is any end transition at last media item */ 2948 2949 if (i == (mediaItemsList.size() - 1)) { 2950 lTransition = lMediaItem.getEndTransition(); 2951 if (lTransition != null && (lTransition.getDuration() > 0)) { 2952 generateTransition(lTransition, mPreviewEditSettings, mClipProperties, 2953 previewIndex); 2954 break; 2955 } 2956 } 2957 } 2958 } 2959 if (!mErrorFlagSet) { 2960 mPreviewEditSettings.videoFrameSize = findVideoResolution(mVideoEditor 2961 .getAspectRatio(), maxHeight); 2962 populateBackgroundMusicProperties(mediaBGMList); 2963 2964 /** call to native populate settings */ 2965 try { 2966 nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings); 2967 } catch (IllegalArgumentException ex) { 2968 Log.e(TAG, "Illegal argument exception in nativePopulateSettings"); 2969 throw ex; 2970 } catch (IllegalStateException ex) { 2971 Log.e(TAG, "Illegal state exception in nativePopulateSettings"); 2972 throw ex; 2973 } catch (RuntimeException ex) { 2974 Log.e(TAG, "Runtime exception in nativePopulateSettings"); 2975 throw ex; 2976 } 2977 mInvalidatePreviewArray = false; 2978 mProcessingState = PROCESSING_NONE; 2979 } 2980 if (mErrorFlagSet) { 2981 mErrorFlagSet = false; 2982 throw new RuntimeException("preview generation cannot be completed"); 2983 } 2984 } 2985 } /* END of previewStoryBoard */ 2986 2987 /** 2988 * This function is responsible for starting the preview 2989 * 2990 * 2991 * @param surface The surface on which preview has to be displayed 2992 * @param fromMs The time in ms from which preview has to be started 2993 * @param toMs The time in ms till preview has to be played 2994 * @param loop To loop the preview or not 2995 * @param callbackAfterFrameCount INdicated after how many frames 2996 * the callback is needed 2997 * @param listener The PreviewProgressListener 2998 */ 2999 void doPreview(Surface surface, long fromMs, long toMs, boolean loop, 3000 int callbackAfterFrameCount, PreviewProgressListener listener) { 3001 mPreviewProgress = fromMs; 3002 mIsFirstProgress = true; 3003 mPreviewProgressListener = listener; 3004 3005 if (!mInvalidatePreviewArray) { 3006 try { 3007 /** Modify the image files names to rgb image files. */ 3008 for (int clipCnt = 0; clipCnt < mPreviewEditSettings.clipSettingsArray.length; 3009 clipCnt++) { 3010 if (mPreviewEditSettings.clipSettingsArray[clipCnt].fileType == FileType.JPG) { 3011 mPreviewEditSettings.clipSettingsArray[clipCnt].clipPath = 3012 mPreviewEditSettings.clipSettingsArray[clipCnt].clipDecodedPath; 3013 } 3014 } 3015 nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings); 3016 nativeStartPreview(surface, fromMs, toMs, callbackAfterFrameCount, loop); 3017 } catch (IllegalArgumentException ex) { 3018 Log.e(TAG, "Illegal argument exception in nativeStartPreview"); 3019 throw ex; 3020 } catch (IllegalStateException ex) { 3021 Log.e(TAG, "Illegal state exception in nativeStartPreview"); 3022 throw ex; 3023 } catch (RuntimeException ex) { 3024 Log.e(TAG, "Runtime exception in nativeStartPreview"); 3025 throw ex; 3026 } 3027 } 3028 } 3029 3030 /** 3031 * This function is responsible for stopping the preview 3032 */ 3033 long stopPreview() { 3034 nativeStopPreview(); 3035 return mPreviewProgress; 3036 } 3037 3038 /** 3039 * This function is responsible for rendering a single frame 3040 * from the complete story board on the surface 3041 * 3042 * @param surface The surface on which frame has to be rendered 3043 * @param time The time in ms at which the frame has to be rendered 3044 * @param surfaceWidth The surface width 3045 * @param surfaceHeight The surface height 3046 * @param overlayData The overlay data 3047 * 3048 * @return The actual time from the story board at which the frame was extracted 3049 * and rendered 3050 */ 3051 long renderPreviewFrame(Surface surface, long time, int surfaceWidth, 3052 int surfaceHeight, VideoEditor.OverlayData overlayData) { 3053 if (mInvalidatePreviewArray) { 3054 throw new RuntimeException("Call generate preview first"); 3055 } 3056 3057 long timeMs = 0; 3058 try { 3059 for (int clipCnt = 0; clipCnt < mPreviewEditSettings.clipSettingsArray.length; 3060 clipCnt++) { 3061 if (mPreviewEditSettings.clipSettingsArray[clipCnt].fileType == FileType.JPG) { 3062 mPreviewEditSettings.clipSettingsArray[clipCnt].clipPath = 3063 mPreviewEditSettings.clipSettingsArray[clipCnt].clipDecodedPath; 3064 } 3065 } 3066 3067 // Reset the render preview frame params that shall be set by native. 3068 mRenderPreviewOverlayFile = null; 3069 mRenderPreviewRenderingMode = MediaRendering.RESIZING; 3070 3071 nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings); 3072 3073 timeMs = (long)nativeRenderPreviewFrame(surface, time, surfaceWidth, surfaceHeight); 3074 3075 if (mRenderPreviewOverlayFile != null) { 3076 overlayData.set(BitmapFactory.decodeFile(mRenderPreviewOverlayFile), 3077 mRenderPreviewRenderingMode); 3078 } else { 3079 overlayData.setClear(); 3080 } 3081 } catch (IllegalArgumentException ex) { 3082 Log.e(TAG, "Illegal Argument exception in nativeRenderPreviewFrame"); 3083 throw ex; 3084 } catch (IllegalStateException ex) { 3085 Log.e(TAG, "Illegal state exception in nativeRenderPreviewFrame"); 3086 throw ex; 3087 } catch (RuntimeException ex) { 3088 Log.e(TAG, "Runtime exception in nativeRenderPreviewFrame"); 3089 throw ex; 3090 } 3091 3092 return timeMs; 3093 } 3094 3095 private void previewFrameEditInfo(String filename, int renderingMode) { 3096 mRenderPreviewOverlayFile = filename; 3097 mRenderPreviewRenderingMode = renderingMode; 3098 } 3099 3100 3101 /** 3102 * This function is responsible for rendering a single frame 3103 * from a single media item on the surface 3104 * 3105 * @param surface The surface on which frame has to be rendered 3106 * @param filepath The file path for which the frame needs to be displayed 3107 * @param time The time in ms at which the frame has to be rendered 3108 * @param framewidth The frame width 3109 * @param framewidth The frame height 3110 * 3111 * @return The actual time from media item at which the frame was extracted 3112 * and rendered 3113 */ 3114 long renderMediaItemPreviewFrame(Surface surface, String filepath, 3115 long time, int framewidth, int frameheight) { 3116 long timeMs = 0; 3117 try { 3118 timeMs = (long)nativeRenderMediaItemPreviewFrame(surface, filepath, framewidth, 3119 frameheight, 0, 0, time); 3120 } catch (IllegalArgumentException ex) { 3121 Log.e(TAG, "Illegal Argument exception in renderMediaItemPreviewFrame"); 3122 throw ex; 3123 } catch (IllegalStateException ex) { 3124 Log.e(TAG, "Illegal state exception in renderMediaItemPreviewFrame"); 3125 throw ex; 3126 } catch (RuntimeException ex) { 3127 Log.e(TAG, "Runtime exception in renderMediaItemPreviewFrame"); 3128 throw ex; 3129 } 3130 3131 return timeMs; 3132 } 3133 3134 /** 3135 * This function sets the flag to invalidate the preview array 3136 * and for generating the preview again 3137 */ 3138 void setGeneratePreview(boolean isRequired) { 3139 boolean semAcquiredDone = false; 3140 try { 3141 lock(); 3142 semAcquiredDone = true; 3143 mInvalidatePreviewArray = isRequired; 3144 } catch (InterruptedException ex) { 3145 Log.e(TAG, "Runtime exception in renderMediaItemPreviewFrame"); 3146 } finally { 3147 if (semAcquiredDone) { 3148 unlock(); 3149 } 3150 } 3151 } 3152 3153 /** 3154 * @return Returns the current status of preview invalidation 3155 * flag 3156 */ 3157 boolean getGeneratePreview() { 3158 return mInvalidatePreviewArray; 3159 } 3160 3161 /** 3162 * Calculates the aspect ratio from widht and height 3163 * 3164 * @param w The width of media item 3165 * @param h The height of media item 3166 * 3167 * @return The calculated aspect ratio 3168 */ 3169 int getAspectRatio(int w, int h) { 3170 double apRatio = (double)(w) / (double)(h); 3171 BigDecimal bd = new BigDecimal(apRatio); 3172 bd = bd.setScale(3, BigDecimal.ROUND_HALF_UP); 3173 apRatio = bd.doubleValue(); 3174 int var = MediaProperties.ASPECT_RATIO_16_9; 3175 if (apRatio >= 1.7) { 3176 var = MediaProperties.ASPECT_RATIO_16_9; 3177 } else if (apRatio >= 1.6) { 3178 var = MediaProperties.ASPECT_RATIO_5_3; 3179 } else if (apRatio >= 1.5) { 3180 var = MediaProperties.ASPECT_RATIO_3_2; 3181 } else if (apRatio > 1.3) { 3182 var = MediaProperties.ASPECT_RATIO_4_3; 3183 } else if (apRatio >= 1.2) { 3184 var = MediaProperties.ASPECT_RATIO_11_9; 3185 } 3186 return var; 3187 } 3188 3189 /** 3190 * Maps the file type used in native layer 3191 * to file type used in JAVA layer 3192 * 3193 * @param fileType The file type in native layer 3194 * 3195 * @return The File type in JAVA layer 3196 */ 3197 int getFileType(int fileType) { 3198 int retValue = -1; 3199 switch (fileType) { 3200 case FileType.UNSUPPORTED: 3201 retValue = MediaProperties.FILE_UNSUPPORTED; 3202 break; 3203 case FileType.THREE_GPP: 3204 retValue = MediaProperties.FILE_3GP; 3205 break; 3206 case FileType.MP4: 3207 retValue = MediaProperties.FILE_MP4; 3208 break; 3209 case FileType.JPG: 3210 retValue = MediaProperties.FILE_JPEG; 3211 break; 3212 case FileType.PNG: 3213 retValue = MediaProperties.FILE_PNG; 3214 break; 3215 case FileType.MP3: 3216 retValue = MediaProperties.FILE_MP3; 3217 break; 3218 case FileType.M4V: 3219 retValue = MediaProperties.FILE_M4V; 3220 break; 3221 3222 default: 3223 retValue = -1; 3224 } 3225 return retValue; 3226 } 3227 3228 /** 3229 * Maps the video codec type used in native layer 3230 * to video codec type used in JAVA layer 3231 * 3232 * @param codecType The video codec type in native layer 3233 * 3234 * @return The video codec type in JAVA layer 3235 */ 3236 int getVideoCodecType(int codecType) { 3237 int retValue = -1; 3238 switch (codecType) { 3239 case VideoFormat.H263: 3240 retValue = MediaProperties.VCODEC_H263; 3241 break; 3242 case VideoFormat.H264: 3243 retValue = MediaProperties.VCODEC_H264BP; 3244 break; 3245 case VideoFormat.MPEG4: 3246 retValue = MediaProperties.VCODEC_MPEG4; 3247 break; 3248 case VideoFormat.UNSUPPORTED: 3249 3250 default: 3251 retValue = -1; 3252 } 3253 return retValue; 3254 } 3255 3256 /** 3257 * Maps the audio codec type used in native layer 3258 * to audio codec type used in JAVA layer 3259 * 3260 * @param audioType The audio codec type in native layer 3261 * 3262 * @return The audio codec type in JAVA layer 3263 */ 3264 int getAudioCodecType(int codecType) { 3265 int retValue = -1; 3266 switch (codecType) { 3267 case AudioFormat.AMR_NB: 3268 retValue = MediaProperties.ACODEC_AMRNB; 3269 break; 3270 case AudioFormat.AAC: 3271 retValue = MediaProperties.ACODEC_AAC_LC; 3272 break; 3273 case AudioFormat.MP3: 3274 retValue = MediaProperties.ACODEC_MP3; 3275 break; 3276 3277 default: 3278 retValue = -1; 3279 } 3280 return retValue; 3281 } 3282 3283 /** 3284 * Returns the frame rate as integer 3285 * 3286 * @param fps The fps as enum 3287 * 3288 * @return The frame rate as integer 3289 */ 3290 int getFrameRate(int fps) { 3291 int retValue = -1; 3292 switch (fps) { 3293 case VideoFrameRate.FR_5_FPS: 3294 retValue = 5; 3295 break; 3296 case VideoFrameRate.FR_7_5_FPS: 3297 retValue = 8; 3298 break; 3299 case VideoFrameRate.FR_10_FPS: 3300 retValue = 10; 3301 break; 3302 case VideoFrameRate.FR_12_5_FPS: 3303 retValue = 13; 3304 break; 3305 case VideoFrameRate.FR_15_FPS: 3306 retValue = 15; 3307 break; 3308 case VideoFrameRate.FR_20_FPS: 3309 retValue = 20; 3310 break; 3311 case VideoFrameRate.FR_25_FPS: 3312 retValue = 25; 3313 break; 3314 case VideoFrameRate.FR_30_FPS: 3315 retValue = 30; 3316 break; 3317 3318 default: 3319 retValue = -1; 3320 } 3321 return retValue; 3322 } 3323 3324 /** 3325 * Maps the file type used in JAVA layer 3326 * to file type used in native layer 3327 * 3328 * @param fileType The file type in JAVA layer 3329 * 3330 * @return The File type in native layer 3331 */ 3332 int getMediaItemFileType(int fileType) { 3333 int retValue = -1; 3334 3335 switch (fileType) { 3336 case MediaProperties.FILE_UNSUPPORTED: 3337 retValue = FileType.UNSUPPORTED; 3338 break; 3339 case MediaProperties.FILE_3GP: 3340 retValue = FileType.THREE_GPP; 3341 break; 3342 case MediaProperties.FILE_MP4: 3343 retValue = FileType.MP4; 3344 break; 3345 case MediaProperties.FILE_JPEG: 3346 retValue = FileType.JPG; 3347 break; 3348 case MediaProperties.FILE_PNG: 3349 retValue = FileType.PNG; 3350 break; 3351 case MediaProperties.FILE_M4V: 3352 retValue = FileType.M4V; 3353 break; 3354 3355 default: 3356 retValue = -1; 3357 } 3358 return retValue; 3359 3360 } 3361 3362 /** 3363 * Maps the rendering mode used in native layer 3364 * to rendering mode used in JAVA layer 3365 * 3366 * @param renderingMode The rendering mode in JAVA layer 3367 * 3368 * @return The rendering mode in native layer 3369 */ 3370 int getMediaItemRenderingMode(int renderingMode) { 3371 int retValue = -1; 3372 switch (renderingMode) { 3373 case MediaItem.RENDERING_MODE_BLACK_BORDER: 3374 retValue = MediaRendering.BLACK_BORDERS; 3375 break; 3376 case MediaItem.RENDERING_MODE_STRETCH: 3377 retValue = MediaRendering.RESIZING; 3378 break; 3379 case MediaItem.RENDERING_MODE_CROPPING: 3380 retValue = MediaRendering.CROPPING; 3381 break; 3382 3383 default: 3384 retValue = -1; 3385 } 3386 return retValue; 3387 } 3388 3389 /** 3390 * Maps the transition behavior used in JAVA layer 3391 * to transition behavior used in native layer 3392 * 3393 * @param transitionType The transition behavior in JAVA layer 3394 * 3395 * @return The transition behavior in native layer 3396 */ 3397 int getVideoTransitionBehaviour(int transitionType) { 3398 int retValue = -1; 3399 switch (transitionType) { 3400 case Transition.BEHAVIOR_SPEED_UP: 3401 retValue = TransitionBehaviour.SPEED_UP; 3402 break; 3403 case Transition.BEHAVIOR_SPEED_DOWN: 3404 retValue = TransitionBehaviour.SPEED_DOWN; 3405 break; 3406 case Transition.BEHAVIOR_LINEAR: 3407 retValue = TransitionBehaviour.LINEAR; 3408 break; 3409 case Transition.BEHAVIOR_MIDDLE_SLOW: 3410 retValue = TransitionBehaviour.SLOW_MIDDLE; 3411 break; 3412 case Transition.BEHAVIOR_MIDDLE_FAST: 3413 retValue = TransitionBehaviour.FAST_MIDDLE; 3414 break; 3415 3416 default: 3417 retValue = -1; 3418 } 3419 return retValue; 3420 } 3421 3422 /** 3423 * Maps the transition slide direction used in JAVA layer 3424 * to transition slide direction used in native layer 3425 * 3426 * @param slideDirection The transition slide direction 3427 * in JAVA layer 3428 * 3429 * @return The transition slide direction in native layer 3430 */ 3431 int getSlideSettingsDirection(int slideDirection) { 3432 int retValue = -1; 3433 switch (slideDirection) { 3434 case TransitionSliding.DIRECTION_RIGHT_OUT_LEFT_IN: 3435 retValue = SlideDirection.RIGHT_OUT_LEFT_IN; 3436 break; 3437 case TransitionSliding.DIRECTION_LEFT_OUT_RIGHT_IN: 3438 retValue = SlideDirection.LEFT_OUT_RIGTH_IN; 3439 break; 3440 case TransitionSliding.DIRECTION_TOP_OUT_BOTTOM_IN: 3441 retValue = SlideDirection.TOP_OUT_BOTTOM_IN; 3442 break; 3443 case TransitionSliding.DIRECTION_BOTTOM_OUT_TOP_IN: 3444 retValue = SlideDirection.BOTTOM_OUT_TOP_IN; 3445 break; 3446 3447 default: 3448 retValue = -1; 3449 } 3450 return retValue; 3451 } 3452 3453 /** 3454 * Maps the effect color type used in JAVA layer 3455 * to effect color type used in native layer 3456 * 3457 * @param effect The EffectColor reference 3458 * 3459 * @return The color effect value from native layer 3460 */ 3461 private int getEffectColorType(EffectColor effect) { 3462 int retValue = -1; 3463 switch (effect.getType()) { 3464 case EffectColor.TYPE_COLOR: 3465 if (effect.getColor() == EffectColor.GREEN) { 3466 retValue = VideoEffect.GREEN; 3467 } else if (effect.getColor() == EffectColor.PINK) { 3468 retValue = VideoEffect.PINK; 3469 } else if (effect.getColor() == EffectColor.GRAY) { 3470 retValue = VideoEffect.BLACK_AND_WHITE; 3471 } else { 3472 retValue = VideoEffect.COLORRGB16; 3473 } 3474 break; 3475 case EffectColor.TYPE_GRADIENT: 3476 retValue = VideoEffect.GRADIENT; 3477 break; 3478 case EffectColor.TYPE_SEPIA: 3479 retValue = VideoEffect.SEPIA; 3480 break; 3481 case EffectColor.TYPE_NEGATIVE: 3482 retValue = VideoEffect.NEGATIVE; 3483 break; 3484 case EffectColor.TYPE_FIFTIES: 3485 retValue = VideoEffect.FIFTIES; 3486 break; 3487 3488 default: 3489 retValue = -1; 3490 } 3491 return retValue; 3492 } 3493 3494 /** 3495 * Calculates video resolution for output clip 3496 * based on clip's height and aspect ratio of storyboard 3497 * 3498 * @param aspectRatio The aspect ratio of story board 3499 * @param height The height of clip 3500 * 3501 * @return The video resolution 3502 */ 3503 private int findVideoResolution(int aspectRatio, int height) { 3504 final Pair<Integer, Integer>[] resolutions; 3505 final Pair<Integer, Integer> maxResolution; 3506 int retValue = VideoFrameSize.SIZE_UNDEFINED; 3507 switch (aspectRatio) { 3508 case MediaProperties.ASPECT_RATIO_3_2: 3509 if (height == MediaProperties.HEIGHT_480) 3510 retValue = VideoFrameSize.NTSC; 3511 else if (height == MediaProperties.HEIGHT_720) 3512 retValue = VideoFrameSize.W720p; 3513 break; 3514 case MediaProperties.ASPECT_RATIO_16_9: 3515 if (height == MediaProperties.HEIGHT_480) 3516 retValue = VideoFrameSize.WVGA16x9; 3517 else if (height == MediaProperties.HEIGHT_720) 3518 retValue = VideoFrameSize.V720p; 3519 break; 3520 case MediaProperties.ASPECT_RATIO_4_3: 3521 if (height == MediaProperties.HEIGHT_480) 3522 retValue = VideoFrameSize.VGA; 3523 if (height == MediaProperties.HEIGHT_720) 3524 retValue = VideoFrameSize.S720p; 3525 break; 3526 case MediaProperties.ASPECT_RATIO_5_3: 3527 if (height == MediaProperties.HEIGHT_480) 3528 retValue = VideoFrameSize.WVGA; 3529 break; 3530 case MediaProperties.ASPECT_RATIO_11_9: 3531 if (height == MediaProperties.HEIGHT_144) 3532 retValue = VideoFrameSize.QCIF; 3533 break; 3534 } 3535 if (retValue == VideoFrameSize.SIZE_UNDEFINED) { 3536 resolutions = MediaProperties.getSupportedResolutions(mVideoEditor.getAspectRatio()); 3537 // Get the highest resolution 3538 maxResolution = resolutions[resolutions.length - 1]; 3539 retValue = findVideoResolution(mVideoEditor.getAspectRatio(), maxResolution.second); 3540 } 3541 3542 return retValue; 3543 } 3544 3545 /** 3546 * This method is responsible for exporting a movie 3547 * 3548 * @param filePath The output file path 3549 * @param projectDir The output project directory 3550 * @param height The height of clip 3551 * @param bitrate The bitrate at which the movie should be exported 3552 * @param mediaItemsList The media items list 3553 * @param mediaTransitionList The transitions list 3554 * @param mediaBGMList The background track list 3555 * @param listener The ExportProgressListener 3556 * 3557 */ 3558 void export(String filePath, String projectDir, int height, int bitrate, 3559 List<MediaItem> mediaItemsList, List<Transition> mediaTransitionList, 3560 List<AudioTrack> mediaBGMList, ExportProgressListener listener) { 3561 3562 int outBitrate = 0; 3563 mExportFilename = filePath; 3564 previewStoryBoard(mediaItemsList, mediaTransitionList, mediaBGMList,null); 3565 mExportProgressListener = listener; 3566 3567 mProgressToApp = 0; 3568 3569 switch (bitrate) { 3570 case MediaProperties.BITRATE_28K: 3571 outBitrate = Bitrate.BR_32_KBPS; 3572 break; 3573 case MediaProperties.BITRATE_40K: 3574 outBitrate = Bitrate.BR_48_KBPS; 3575 break; 3576 case MediaProperties.BITRATE_64K: 3577 outBitrate = Bitrate.BR_64_KBPS; 3578 break; 3579 case MediaProperties.BITRATE_96K: 3580 outBitrate = Bitrate.BR_96_KBPS; 3581 break; 3582 case MediaProperties.BITRATE_128K: 3583 outBitrate = Bitrate.BR_128_KBPS; 3584 break; 3585 case MediaProperties.BITRATE_192K: 3586 outBitrate = Bitrate.BR_192_KBPS; 3587 break; 3588 case MediaProperties.BITRATE_256K: 3589 outBitrate = Bitrate.BR_256_KBPS; 3590 break; 3591 case MediaProperties.BITRATE_384K: 3592 outBitrate = Bitrate.BR_384_KBPS; 3593 break; 3594 case MediaProperties.BITRATE_512K: 3595 outBitrate = Bitrate.BR_512_KBPS; 3596 break; 3597 case MediaProperties.BITRATE_800K: 3598 outBitrate = Bitrate.BR_800_KBPS; 3599 break; 3600 case MediaProperties.BITRATE_2M: 3601 outBitrate = Bitrate.BR_2_MBPS; 3602 break; 3603 3604 case MediaProperties.BITRATE_5M: 3605 outBitrate = Bitrate.BR_5_MBPS; 3606 break; 3607 case MediaProperties.BITRATE_8M: 3608 outBitrate = Bitrate.BR_8_MBPS; 3609 break; 3610 3611 default: 3612 throw new IllegalArgumentException("Argument Bitrate incorrect"); 3613 } 3614 mPreviewEditSettings.videoFrameRate = VideoFrameRate.FR_30_FPS; 3615 mPreviewEditSettings.outputFile = mOutputFilename = filePath; 3616 3617 int aspectRatio = mVideoEditor.getAspectRatio(); 3618 mPreviewEditSettings.videoFrameSize = findVideoResolution(aspectRatio, height); 3619 mPreviewEditSettings.videoFormat = VideoFormat.H264; 3620 mPreviewEditSettings.audioFormat = AudioFormat.AAC; 3621 mPreviewEditSettings.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000; 3622 mPreviewEditSettings.maxFileSize = 0; 3623 mPreviewEditSettings.audioChannels = 2; 3624 mPreviewEditSettings.videoBitrate = outBitrate; 3625 mPreviewEditSettings.audioBitrate = Bitrate.BR_96_KBPS; 3626 3627 mPreviewEditSettings.transitionSettingsArray = new TransitionSettings[mTotalClips - 1]; 3628 for (int index = 0; index < mTotalClips - 1; index++) { 3629 mPreviewEditSettings.transitionSettingsArray[index] = new TransitionSettings(); 3630 mPreviewEditSettings.transitionSettingsArray[index].videoTransitionType = 3631 VideoTransition.NONE; 3632 mPreviewEditSettings.transitionSettingsArray[index].audioTransitionType = 3633 AudioTransition.NONE; 3634 } 3635 for (int clipCnt = 0; clipCnt < mPreviewEditSettings.clipSettingsArray.length; clipCnt++) { 3636 if (mPreviewEditSettings.clipSettingsArray[clipCnt].fileType == FileType.JPG) { 3637 mPreviewEditSettings.clipSettingsArray[clipCnt].clipPath = 3638 mPreviewEditSettings.clipSettingsArray[clipCnt].clipOriginalPath; 3639 } 3640 } 3641 nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings); 3642 3643 int err = 0; 3644 try { 3645 mProcessingState = PROCESSING_EXPORT; 3646 mProcessingObject = null; 3647 err = generateClip(mPreviewEditSettings); 3648 mProcessingState = PROCESSING_NONE; 3649 } catch (IllegalArgumentException ex) { 3650 Log.e(TAG, "IllegalArgument for generateClip"); 3651 throw ex; 3652 } catch (IllegalStateException ex) { 3653 Log.e(TAG, "IllegalStateExceptiont for generateClip"); 3654 throw ex; 3655 } catch (RuntimeException ex) { 3656 Log.e(TAG, "RuntimeException for generateClip"); 3657 throw ex; 3658 } 3659 3660 if (err != 0) { 3661 Log.e(TAG, "RuntimeException for generateClip"); 3662 throw new RuntimeException("generateClip failed with error=" + err); 3663 } 3664 3665 mExportProgressListener = null; 3666 } 3667 3668 /** 3669 * This methods takes care of stopping the Export process 3670 * 3671 * @param The input file name for which export has to be stopped 3672 */ 3673 void stop(String filename) { 3674 try { 3675 stopEncoding(); 3676 new File(mExportFilename).delete(); 3677 } catch (IllegalStateException ex) { 3678 Log.e(TAG, "Illegal state exception in unload settings"); 3679 throw ex; 3680 } catch (RuntimeException ex) { 3681 Log.e(TAG, "Runtime exception in unload settings"); 3682 throw ex; 3683 } 3684 } 3685 3686 /** 3687 * This method extracts a frame from the input file 3688 * and returns the frame as a bitmap 3689 * 3690 * @param inputFile The inputFile 3691 * @param width The width of the output frame 3692 * @param height The height of the output frame 3693 * @param timeMS The time in ms at which the frame has to be extracted 3694 */ 3695 Bitmap getPixels(String inputFile, int width, int height, long timeMS) { 3696 if (inputFile == null) { 3697 throw new IllegalArgumentException(); 3698 } 3699 3700 int newWidth = 0; 3701 int newHeight = 0; 3702 Bitmap tempBitmap = null; 3703 3704 /* Make width and height as even */ 3705 newWidth = (width + 1) & 0xFFFFFFFE; 3706 newHeight = (height + 1) & 0xFFFFFFFE; 3707 3708 /* Create a temp bitmap for resized thumbnails */ 3709 if ((newWidth != width) || (newHeight != height)) { 3710 tempBitmap = Bitmap.createBitmap(newWidth, newHeight, Bitmap.Config.ARGB_8888); 3711 } 3712 3713 IntBuffer rgb888 = IntBuffer.allocate(newWidth * newHeight * 4); 3714 Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); 3715 nativeGetPixels(inputFile, rgb888.array(), newWidth, newHeight, timeMS); 3716 3717 if ((newWidth == width) && (newHeight == height)) { 3718 bitmap.copyPixelsFromBuffer(rgb888); 3719 } else { 3720 /* Create a temp bitmap to be used for resize */ 3721 tempBitmap.copyPixelsFromBuffer(rgb888); 3722 3723 /* Create a canvas to resize */ 3724 final Canvas canvas = new Canvas(bitmap); 3725 canvas.drawBitmap(tempBitmap, new Rect(0, 0, newWidth, newHeight), 3726 new Rect(0, 0, width, height), sResizePaint); 3727 } 3728 3729 if (tempBitmap != null) { 3730 tempBitmap.recycle(); 3731 } 3732 return bitmap; 3733 } 3734 3735 /** 3736 * This method extracts a list of frame from the 3737 * input file and returns the frame in bitmap array 3738 * 3739 * @param filename The inputFile 3740 * @param width The width of the output frame 3741 * @param height The height of the output frame 3742 * @param startMs The starting time in ms 3743 * @param endMs The end time in ms 3744 * @param thumbnailCount The number of frames to be extracted 3745 * from startMs to endMs 3746 * 3747 * @return The frames as bitmaps in bitmap array 3748 **/ 3749 public Bitmap[] getPixelsList(String filename, int width, int height, long startMs, long endMs, 3750 int thumbnailCount) { 3751 int[] rgb888 = null; 3752 int thumbnailSize = 0; 3753 int newWidth = 0; 3754 int newHeight = 0; 3755 Bitmap tempBitmap = null; 3756 3757 /* Make width and height as even */ 3758 newWidth = (width + 1) & 0xFFFFFFFE; 3759 newHeight = (height + 1) & 0xFFFFFFFE; 3760 thumbnailSize = newWidth * newHeight * 4; 3761 3762 /* Create a temp bitmap for resized thumbnails */ 3763 if ((newWidth != width) || (newHeight != height)) { 3764 tempBitmap = Bitmap.createBitmap(newWidth, newHeight, Bitmap.Config.ARGB_8888); 3765 } 3766 int i = 0; 3767 int deltaTime = (int)(endMs - startMs) / thumbnailCount; 3768 Bitmap[] bitmaps = null; 3769 3770 try { 3771 // This may result in out of Memory Error 3772 rgb888 = new int[thumbnailSize * thumbnailCount]; 3773 bitmaps = new Bitmap[thumbnailCount]; 3774 } catch (Throwable e) { 3775 // Allocating to new size with Fixed count 3776 try { 3777 System.gc(); 3778 rgb888 = new int[thumbnailSize * MAX_THUMBNAIL_PERMITTED]; 3779 bitmaps = new Bitmap[MAX_THUMBNAIL_PERMITTED]; 3780 thumbnailCount = MAX_THUMBNAIL_PERMITTED; 3781 } catch (Throwable ex) { 3782 throw new RuntimeException("Memory allocation fails, thumbnail count too large: "+thumbnailCount); 3783 } 3784 } 3785 IntBuffer tmpBuffer = IntBuffer.allocate(thumbnailSize); 3786 nativeGetPixelsList(filename, rgb888, newWidth, newHeight, deltaTime, thumbnailCount, 3787 startMs, endMs); 3788 3789 for (; i < thumbnailCount; i++) { 3790 bitmaps[i] = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); 3791 tmpBuffer.put(rgb888, (i * thumbnailSize), thumbnailSize); 3792 tmpBuffer.rewind(); 3793 3794 if ((newWidth == width) && (newHeight == height)) { 3795 bitmaps[i].copyPixelsFromBuffer(tmpBuffer); 3796 } else { 3797 /* Copy the out rgb buffer to temp bitmap */ 3798 tempBitmap.copyPixelsFromBuffer(tmpBuffer); 3799 3800 /* Create a canvas to resize */ 3801 final Canvas canvas = new Canvas(bitmaps[i]); 3802 canvas.drawBitmap(tempBitmap, new Rect(0, 0, newWidth, newHeight), 3803 new Rect(0, 0, width, height), sResizePaint); 3804 } 3805 } 3806 3807 if (tempBitmap != null) { 3808 tempBitmap.recycle(); 3809 } 3810 return bitmaps; 3811 } 3812 3813 /** 3814 * This method generates the audio graph 3815 * 3816 * @param uniqueId The unique id 3817 * @param inFileName The inputFile 3818 * @param OutAudiGraphFileName output filename 3819 * @param frameDuration The each frame duration 3820 * @param audioChannels The number of audio channels 3821 * @param samplesCount Total number of samples count 3822 * @param listener ExtractAudioWaveformProgressListener reference 3823 * @param isVideo The flag to indicate if the file is video file or not 3824 * 3825 **/ 3826 void generateAudioGraph(String uniqueId, String inFileName, String OutAudiGraphFileName, 3827 int frameDuration, int audioChannels, int samplesCount, 3828 ExtractAudioWaveformProgressListener listener, boolean isVideo) { 3829 String tempPCMFileName; 3830 3831 mExtractAudioWaveformProgressListener = listener; 3832 3833 /** 3834 * In case of Video, first call will generate the PCM file to make the 3835 * audio graph 3836 */ 3837 if (isVideo) { 3838 tempPCMFileName = String.format(mProjectPath + "/" + uniqueId + ".pcm"); 3839 } else { 3840 tempPCMFileName = mAudioTrackPCMFilePath; 3841 } 3842 3843 /** 3844 * For Video item, generate the PCM 3845 */ 3846 if (isVideo) { 3847 nativeGenerateRawAudio(inFileName, tempPCMFileName); 3848 } 3849 3850 nativeGenerateAudioGraph(tempPCMFileName, OutAudiGraphFileName, frameDuration, 3851 audioChannels, samplesCount); 3852 3853 /** 3854 * Once the audio graph file is generated, delete the pcm file 3855 */ 3856 if (isVideo) { 3857 new File(tempPCMFileName).delete(); 3858 } 3859 } 3860 3861 void clearPreviewSurface(Surface surface) { 3862 nativeClearSurface(surface); 3863 } 3864 3865 /** 3866 * Grab the semaphore which arbitrates access to the editor 3867 * 3868 * @throws InterruptedException 3869 */ 3870 void lock() throws InterruptedException { 3871 if (Log.isLoggable(TAG, Log.DEBUG)) { 3872 Log.d(TAG, "lock: grabbing semaphore", new Throwable()); 3873 } 3874 mLock.acquire(); 3875 if (Log.isLoggable(TAG, Log.DEBUG)) { 3876 Log.d(TAG, "lock: grabbed semaphore"); 3877 } 3878 } 3879 3880 /** 3881 * Release the semaphore which arbitrates access to the editor 3882 */ 3883 void unlock() { 3884 if (Log.isLoggable(TAG, Log.DEBUG)) { 3885 Log.d(TAG, "unlock: releasing semaphore"); 3886 } 3887 mLock.release(); 3888 } 3889 3890 /** Native Methods */ 3891 native Properties getMediaProperties(String file) throws IllegalArgumentException, 3892 IllegalStateException, RuntimeException, Exception; 3893 3894 /** 3895 * Get the version of ManualEdit. 3896 * 3897 * @return version of ManualEdit 3898 * @throws RuntimeException if an error occurred 3899 * @see Version 3900 */ 3901 private static native Version getVersion() throws RuntimeException; 3902 3903 /** 3904 * Returns the video thumbnail in an array of integers. Output format is 3905 * ARGB8888. 3906 * 3907 * @param pixelArray the array that receives the pixel values 3908 * @param width width of the video thumbnail 3909 * @param height height of the video thumbnail 3910 * @param timeMS desired time of the thumbnail in ms 3911 * @return actual time in ms of the thumbnail generated 3912 * @throws IllegalStateException if the class has not been initialized 3913 * @throws IllegalArgumentException if the pixelArray is not available or 3914 * one of the dimensions is negative or zero or the time is 3915 * negative 3916 * @throws RuntimeException on runtime errors in native code 3917 */ 3918 private native int nativeGetPixels(String fileName, int[] pixelArray, int width, int height, 3919 long timeMS); 3920 3921 private native int nativeGetPixelsList(String fileName, int[] pixelArray, int width, int height, 3922 int timeMS, int nosofTN, long startTimeMs, long endTimeMs); 3923 3924 /** 3925 * Releases the JNI and cleans up the core native module.. Should be called 3926 * only after init( ) 3927 * 3928 * @throws IllegalStateException if the method could not be called 3929 */ 3930 private native void release() throws IllegalStateException, RuntimeException; 3931 3932 /* 3933 * Clear the preview surface 3934 */ 3935 private native void nativeClearSurface(Surface surface); 3936 3937 /** 3938 * Stops the encoding. This method should only be called after encoding has 3939 * started using method <code> startEncoding</code> 3940 * 3941 * @throws IllegalStateException if the method could not be called 3942 */ 3943 private native void stopEncoding() throws IllegalStateException, RuntimeException; 3944 3945 3946 private native void _init(String tempPath, String libraryPath) 3947 throws IllegalArgumentException, IllegalStateException, RuntimeException; 3948 3949 private native void nativeStartPreview(Surface mSurface, long fromMs, long toMs, 3950 int callbackAfterFrameCount, boolean loop) throws IllegalArgumentException, 3951 IllegalStateException, RuntimeException; 3952 3953 private native void nativePopulateSettings(EditSettings editSettings, 3954 PreviewClipProperties mProperties, AudioSettings mAudioSettings) 3955 throws IllegalArgumentException, IllegalStateException, RuntimeException; 3956 3957 private native int nativeRenderPreviewFrame(Surface mSurface, long timeMs, 3958 int surfaceWidth, int surfaceHeight) 3959 throws IllegalArgumentException, 3960 IllegalStateException, RuntimeException; 3961 3962 private native int nativeRenderMediaItemPreviewFrame(Surface mSurface, String filepath, 3963 int framewidth, int frameheight, int surfacewidth, int surfaceheight, long timeMs) 3964 throws IllegalArgumentException, IllegalStateException, RuntimeException; 3965 3966 private native void nativeStopPreview(); 3967 3968 private native int nativeGenerateAudioGraph(String pcmFilePath, String outGraphPath, 3969 int frameDuration, int channels, int sampleCount); 3970 3971 private native int nativeGenerateRawAudio(String InFileName, String PCMFileName); 3972 3973 private native int nativeGenerateClip(EditSettings editSettings) 3974 throws IllegalArgumentException, IllegalStateException, RuntimeException; 3975 3976} 3977