M4VSS3GPP_EditVideo.c revision d67e9da63323d965760337913f86de28024531a4
1/* 2 * Copyright (C) 2004-2011 NXP Software 3 * Copyright (C) 2011 The Android Open Source Project 4 * 5 * Licensed under the Apache License, Version 2.0 (the "License"); 6 * you may not use this file except in compliance with the License. 7 * You may obtain a copy of the License at 8 * 9 * http://www.apache.org/licenses/LICENSE-2.0 10 * 11 * Unless required by applicable law or agreed to in writing, software 12 * distributed under the License is distributed on an "AS IS" BASIS, 13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 * See the License for the specific language governing permissions and 15 * limitations under the License. 16 */ 17/** 18 ****************************************************************************** 19 * @file M4VSS3GPP_EditVideo.c 20 * @brief Video Studio Service 3GPP edit API implementation. 21 * @note 22 ****************************************************************************** 23 */ 24 25/****************/ 26/*** Includes ***/ 27/****************/ 28 29#include "NXPSW_CompilerSwitches.h" 30/** 31 * Our header */ 32#include "M4VSS3GPP_API.h" 33#include "M4VSS3GPP_InternalTypes.h" 34#include "M4VSS3GPP_InternalFunctions.h" 35#include "M4VSS3GPP_InternalConfig.h" 36#include "M4VSS3GPP_ErrorCodes.h" 37 38// StageFright encoders require %16 resolution 39#include "M4ENCODER_common.h" 40/** 41 * OSAL headers */ 42#include "M4OSA_Memory.h" /**< OSAL memory management */ 43#include "M4OSA_Debug.h" /**< OSAL debug management */ 44 45/** 46 * component includes */ 47#include "M4VFL_transition.h" /**< video effects */ 48 49/*for transition behaviour*/ 50#include <math.h> 51 52/************************************************************************/ 53/* Static local functions */ 54/************************************************************************/ 55 56static M4OSA_ERR M4VSS3GPP_intCheckVideoMode( 57 M4VSS3GPP_InternalEditContext *pC ); 58static M4OSA_Void 59M4VSS3GPP_intCheckVideoEffects( M4VSS3GPP_InternalEditContext *pC, 60 M4OSA_UInt8 uiClipNumber ); 61static M4OSA_ERR 62M4VSS3GPP_intApplyVideoEffect( M4VSS3GPP_InternalEditContext *pC,/*M4OSA_UInt8 uiClip1orClip2,*/ 63 M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut ); 64static M4OSA_ERR 65M4VSS3GPP_intVideoTransition( M4VSS3GPP_InternalEditContext *pC, 66 M4VIFI_ImagePlane *pPlaneOut ); 67 68static M4OSA_Void 69M4VSS3GPP_intUpdateTimeInfo( M4VSS3GPP_InternalEditContext *pC, 70 M4SYS_AccessUnit *pAU ); 71static M4OSA_Void M4VSS3GPP_intSetH263TimeCounter( M4OSA_MemAddr8 pAuDataBuffer, 72 M4OSA_UInt8 uiCts ); 73static M4OSA_Void M4VSS3GPP_intSetMPEG4Gov( M4OSA_MemAddr8 pAuDataBuffer, 74 M4OSA_UInt32 uiCtsSec ); 75static M4OSA_Void M4VSS3GPP_intGetMPEG4Gov( M4OSA_MemAddr8 pAuDataBuffer, 76 M4OSA_UInt32 *pCtsSec ); 77static M4OSA_ERR M4VSS3GPP_intAllocateYUV420( M4VIFI_ImagePlane *pPlanes, 78 M4OSA_UInt32 uiWidth, M4OSA_UInt32 uiHeight ); 79 80/** 81 ****************************************************************************** 82 * M4OSA_ERR M4VSS3GPP_intEditStepVideo() 83 * @brief One step of video processing 84 * @param pC (IN/OUT) Internal edit context 85 ****************************************************************************** 86 */ 87M4OSA_ERR M4VSS3GPP_intEditStepVideo( M4VSS3GPP_InternalEditContext *pC ) 88{ 89 M4OSA_ERR err; 90 M4OSA_Int32 iCts, iNextCts; 91 M4ENCODER_FrameMode FrameMode; 92 M4OSA_Bool bSkipFrame; 93 M4OSA_UInt16 offset; 94 95 /** 96 * Check if we reached end cut. Decorrelate input and output encoding 97 * timestamp to handle encoder prefetch 98 */ 99 if ( ((M4OSA_Int32)(pC->ewc.dInputVidCts) - pC->pC1->iVoffset 100 + pC->iInOutTimeOffset) >= pC->pC1->iEndTime ) 101 { 102 /* Re-adjust video to precise cut time */ 103 pC->iInOutTimeOffset = ((M4OSA_Int32)(pC->ewc.dInputVidCts)) 104 - pC->pC1->iVoffset + pC->iInOutTimeOffset - pC->pC1->iEndTime; 105 if ( pC->iInOutTimeOffset < 0 ) { 106 pC->iInOutTimeOffset = 0; 107 } 108 109 /** 110 * Video is done for this clip */ 111 err = M4VSS3GPP_intReachedEndOfVideo(pC); 112 113 /* RC: to know when a file has been processed */ 114 if (M4NO_ERROR != err && err != M4VSS3GPP_WAR_SWITCH_CLIP) 115 { 116 M4OSA_TRACE1_1( 117 "M4VSS3GPP_intEditStepVideo: M4VSS3GPP_intReachedEndOfVideo returns 0x%x", 118 err); 119 } 120 121 return err; 122 } 123 124 /* Don't change the states if we are in decodeUpTo() */ 125 if ( (M4VSS3GPP_kClipStatus_DECODE_UP_TO != pC->pC1->Vstatus) 126 && (( pC->pC2 == M4OSA_NULL) 127 || (M4VSS3GPP_kClipStatus_DECODE_UP_TO != pC->pC2->Vstatus)) ) 128 { 129 /** 130 * Check Video Mode, depending on the current output CTS */ 131 err = M4VSS3GPP_intCheckVideoMode( 132 pC); /**< This function change the pC->Vstate variable! */ 133 134 if (M4NO_ERROR != err) 135 { 136 M4OSA_TRACE1_1( 137 "M4VSS3GPP_intEditStepVideo: M4VSS3GPP_intCheckVideoMode returns 0x%x!", 138 err); 139 return err; 140 } 141 } 142 143 144 switch( pC->Vstate ) 145 { 146 /* _________________ */ 147 /*| |*/ 148 /*| READ_WRITE MODE |*/ 149 /*|_________________|*/ 150 151 case M4VSS3GPP_kEditVideoState_READ_WRITE: 152 case M4VSS3GPP_kEditVideoState_AFTER_CUT: 153 { 154 M4OSA_TRACE3_0("M4VSS3GPP_intEditStepVideo READ_WRITE"); 155 156 bSkipFrame = M4OSA_FALSE; 157 158 /** 159 * If we were decoding the clip, we must jump to be sure 160 * to get to the good position. */ 161 if( M4VSS3GPP_kClipStatus_READ != pC->pC1->Vstatus ) 162 { 163 /** 164 * Jump to target video time (tc = to-T) */ 165 // Decorrelate input and output encoding timestamp to handle encoder prefetch 166 iCts = (M4OSA_Int32)(pC->ewc.dInputVidCts) - pC->pC1->iVoffset; 167 err = pC->pC1->ShellAPI.m_pReader->m_pFctJump( 168 pC->pC1->pReaderContext, 169 (M4_StreamHandler *)pC->pC1->pVideoStream, &iCts); 170 171 if( M4NO_ERROR != err ) 172 { 173 M4OSA_TRACE1_1( 174 "M4VSS3GPP_intEditStepVideo:\ 175 READ_WRITE: m_pReader->m_pFctJump(V1) returns 0x%x!", 176 err); 177 return err; 178 } 179 180 err = pC->pC1->ShellAPI.m_pReaderDataIt->m_pFctGetNextAu( 181 pC->pC1->pReaderContext, 182 (M4_StreamHandler *)pC->pC1->pVideoStream, 183 &pC->pC1->VideoAU); 184 185 if( ( M4NO_ERROR != err) && (M4WAR_NO_MORE_AU != err) ) 186 { 187 M4OSA_TRACE1_1( 188 "M4VSS3GPP_intEditStepVideo:\ 189 READ_WRITE: m_pReader->m_pFctGetNextAu returns 0x%x!", 190 err); 191 return err; 192 } 193 194 M4OSA_TRACE2_3("A .... read : cts = %.0f + %ld [ 0x%x ]", 195 pC->pC1->VideoAU.m_CTS, pC->pC1->iVoffset, 196 pC->pC1->VideoAU.m_size); 197 198 /* This frame has been already written in BEGIN CUT step -> skip it */ 199 if( pC->pC1->VideoAU.m_CTS == iCts 200 && pC->pC1->iVideoRenderCts >= iCts ) 201 { 202 bSkipFrame = M4OSA_TRUE; 203 } 204 } 205 206 /* This frame has been already written in BEGIN CUT step -> skip it */ 207 if( ( pC->Vstate == M4VSS3GPP_kEditVideoState_AFTER_CUT) 208 && (pC->pC1->VideoAU.m_CTS 209 + pC->pC1->iVoffset <= pC->ewc.WriterVideoAU.CTS) ) 210 { 211 bSkipFrame = M4OSA_TRUE; 212 } 213 214 /** 215 * Remember the clip reading state */ 216 pC->pC1->Vstatus = M4VSS3GPP_kClipStatus_READ; 217 // Decorrelate input and output encoding timestamp to handle encoder prefetch 218 // Rounding is to compensate reader imprecision (m_CTS is actually an integer) 219 iCts = ((M4OSA_Int32)pC->ewc.dInputVidCts) - pC->pC1->iVoffset - 1; 220 iNextCts = iCts + ((M4OSA_Int32)pC->dOutputFrameDuration) + 1; 221 /* Avoid to write a last frame of duration 0 */ 222 if( iNextCts > pC->pC1->iEndTime ) 223 iNextCts = pC->pC1->iEndTime; 224 225 /** 226 * If the AU is good to be written, write it, else just skip it */ 227 if( ( M4OSA_FALSE == bSkipFrame) 228 && (( pC->pC1->VideoAU.m_CTS >= iCts) 229 && (pC->pC1->VideoAU.m_CTS < iNextCts) 230 && (pC->pC1->VideoAU.m_size > 0)) ) 231 { 232 /** 233 * Get the output AU to write into */ 234 err = pC->ShellAPI.pWriterDataFcts->pStartAU( 235 pC->ewc.p3gpWriterContext, 236 M4VSS3GPP_WRITER_VIDEO_STREAM_ID, 237 &pC->ewc.WriterVideoAU); 238 239 if( M4NO_ERROR != err ) 240 { 241 M4OSA_TRACE1_1( 242 "M4VSS3GPP_intEditStepVideo: READ_WRITE:\ 243 pWriterDataFcts->pStartAU(Video) returns 0x%x!", 244 err); 245 return err; 246 } 247 248 /** 249 * Copy the input AU to the output AU */ 250 pC->ewc.WriterVideoAU.attribute = pC->pC1->VideoAU.m_attribute; 251 // Decorrelate input and output encoding timestamp to handle encoder prefetch 252 pC->ewc.WriterVideoAU.CTS = (M4OSA_Time)pC->pC1->VideoAU.m_CTS + 253 (M4OSA_Time)pC->pC1->iVoffset; 254 pC->ewc.dInputVidCts += pC->dOutputFrameDuration; 255 offset = 0; 256 /* for h.264 stream do not read the 1st 4 bytes as they are header 257 indicators */ 258 if( pC->pC1->pVideoStream->m_basicProperties.m_streamType 259 == M4DA_StreamTypeVideoMpeg4Avc ) 260 offset = 4; 261 262 pC->ewc.WriterVideoAU.size = pC->pC1->VideoAU.m_size - offset; 263 if( pC->ewc.WriterVideoAU.size > pC->ewc.uiVideoMaxAuSize ) 264 { 265 M4OSA_TRACE1_2( 266 "M4VSS3GPP_intEditStepVideo: READ_WRITE: AU size greater than\ 267 MaxAuSize (%d>%d)! returning M4VSS3GPP_ERR_INPUT_VIDEO_AU_TOO_LARGE", 268 pC->ewc.WriterVideoAU.size, pC->ewc.uiVideoMaxAuSize); 269 return M4VSS3GPP_ERR_INPUT_VIDEO_AU_TOO_LARGE; 270 } 271 272 M4OSA_memcpy((M4OSA_MemAddr8)pC->ewc.WriterVideoAU.dataAddress, 273 (pC->pC1->VideoAU.m_dataAddress + offset), 274 (pC->ewc.WriterVideoAU.size)); 275 276 /** 277 * Update time info for the Counter Time System to be equal to the bit 278 -stream time*/ 279 M4VSS3GPP_intUpdateTimeInfo(pC, &pC->ewc.WriterVideoAU); 280 M4OSA_TRACE2_2("B ---- write : cts = %lu [ 0x%x ]", 281 pC->ewc.WriterVideoAU.CTS, pC->ewc.WriterVideoAU.size); 282 283 /** 284 * Write the AU */ 285 err = pC->ShellAPI.pWriterDataFcts->pProcessAU( 286 pC->ewc.p3gpWriterContext, 287 M4VSS3GPP_WRITER_VIDEO_STREAM_ID, 288 &pC->ewc.WriterVideoAU); 289 290 if( M4NO_ERROR != err ) 291 { 292 /* the warning M4WAR_WRITER_STOP_REQ is returned when the targeted output 293 file size is reached 294 The editing is then finished, the warning M4VSS3GPP_WAR_EDITING_DONE 295 is returned*/ 296 if( M4WAR_WRITER_STOP_REQ == err ) 297 { 298 M4OSA_TRACE1_0( 299 "M4VSS3GPP_intEditStepVideo: File was cut to avoid oversize"); 300 return M4VSS3GPP_WAR_EDITING_DONE; 301 } 302 else 303 { 304 M4OSA_TRACE1_1( 305 "M4VSS3GPP_intEditStepVideo: READ_WRITE:\ 306 pWriterDataFcts->pProcessAU(Video) returns 0x%x!", 307 err); 308 return err; 309 } 310 } 311 312 /** 313 * Read next AU for next step */ 314 err = pC->pC1->ShellAPI.m_pReaderDataIt->m_pFctGetNextAu( 315 pC->pC1->pReaderContext, 316 (M4_StreamHandler *)pC->pC1->pVideoStream, 317 &pC->pC1->VideoAU); 318 319 if( ( M4NO_ERROR != err) && (M4WAR_NO_MORE_AU != err) ) 320 { 321 M4OSA_TRACE1_1( 322 "M4VSS3GPP_intEditStepVideo: READ_WRITE:\ 323 m_pReaderDataIt->m_pFctGetNextAu returns 0x%x!", 324 err); 325 return err; 326 } 327 328 M4OSA_TRACE2_3("C .... read : cts = %.0f + %ld [ 0x%x ]", 329 pC->pC1->VideoAU.m_CTS, pC->pC1->iVoffset, 330 pC->pC1->VideoAU.m_size); 331 } 332 else 333 { 334 /** 335 * Decide wether to read or to increment time increment */ 336 if( ( pC->pC1->VideoAU.m_size == 0) 337 || (pC->pC1->VideoAU.m_CTS >= iNextCts) ) 338 { 339 /*Increment time by the encoding period (NO_MORE_AU or reader in advance */ 340 // Decorrelate input and output encoding timestamp to handle encoder prefetch 341 pC->ewc.dInputVidCts += pC->dOutputFrameDuration; 342 343 /* Switch (from AFTER_CUT) to normal mode because time is 344 no more frozen */ 345 pC->Vstate = M4VSS3GPP_kEditVideoState_READ_WRITE; 346 } 347 else 348 { 349 /* In other cases (reader late), just let the reader catch up 350 pC->ewc.dVTo */ 351 err = pC->pC1->ShellAPI.m_pReaderDataIt->m_pFctGetNextAu( 352 pC->pC1->pReaderContext, 353 (M4_StreamHandler *)pC->pC1->pVideoStream, 354 &pC->pC1->VideoAU); 355 356 if( ( M4NO_ERROR != err) && (M4WAR_NO_MORE_AU != err) ) 357 { 358 M4OSA_TRACE1_1( 359 "M4VSS3GPP_intEditStepVideo: READ_WRITE:\ 360 m_pReaderDataIt->m_pFctGetNextAu returns 0x%x!", 361 err); 362 return err; 363 } 364 365 M4OSA_TRACE2_3("D .... read : cts = %.0f + %ld [ 0x%x ]", 366 pC->pC1->VideoAU.m_CTS, pC->pC1->iVoffset, 367 pC->pC1->VideoAU.m_size); 368 } 369 } 370 } 371 break; 372 373 /* ____________________ */ 374 /*| |*/ 375 /*| DECODE_ENCODE MODE |*/ 376 /*| BEGIN_CUT MODE |*/ 377 /*|____________________|*/ 378 379 case M4VSS3GPP_kEditVideoState_DECODE_ENCODE: 380 case M4VSS3GPP_kEditVideoState_BEGIN_CUT: 381 { 382 M4OSA_TRACE3_0( 383 "M4VSS3GPP_intEditStepVideo DECODE_ENCODE / BEGIN_CUT"); 384 385 /** 386 * Decode the video up to the target time 387 (will jump to the previous RAP if needed ) */ 388 // Decorrelate input and output encoding timestamp to handle encoder prefetch 389 err = M4VSS3GPP_intClipDecodeVideoUpToCts(pC->pC1, (M4OSA_Int32)pC->ewc.dInputVidCts); 390 if( M4NO_ERROR != err ) 391 { 392 M4OSA_TRACE1_1( 393 "M4VSS3GPP_intEditStepVideo: DECODE_ENCODE:\ 394 M4VSS3GPP_intDecodeVideoUpToCts returns err=0x%x", 395 err); 396 return err; 397 } 398 399 /* If the decoding is not completed, do one more step with time frozen */ 400 if( M4VSS3GPP_kClipStatus_DECODE_UP_TO == pC->pC1->Vstatus ) 401 { 402 return M4NO_ERROR; 403 } 404 405 /** 406 * Reset the video pre-processing error before calling the encoder */ 407 pC->ewc.VppError = M4NO_ERROR; 408 409 M4OSA_TRACE2_0("E ++++ encode AU"); 410 411 /** 412 * Encode the frame(rendering,filtering and writing will be done 413 in encoder callbacks)*/ 414 if( pC->Vstate == M4VSS3GPP_kEditVideoState_BEGIN_CUT ) 415 FrameMode = M4ENCODER_kIFrame; 416 else 417 FrameMode = M4ENCODER_kNormalFrame; 418 419 // Decorrelate input and output encoding timestamp to handle encoder prefetch 420 err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctEncode(pC->ewc.pEncContext, M4OSA_NULL, 421 pC->ewc.dInputVidCts, FrameMode); 422 /** 423 * Check if we had a VPP error... */ 424 if( M4NO_ERROR != pC->ewc.VppError ) 425 { 426 M4OSA_TRACE1_1( 427 "M4VSS3GPP_intEditStepVideo: DECODE_ENCODE:\ 428 pVideoEncoderGlobalFcts->pFctEncode, returning VppErr=0x%x", 429 pC->ewc.VppError); 430#ifdef M4VSS_SUPPORT_OMX_CODECS 431 432 if( M4WAR_VIDEORENDERER_NO_NEW_FRAME != pC->ewc.VppError ) 433 { 434#endif //M4VSS_SUPPORT_OMX_CODECS 435 436 return pC->ewc.VppError; 437#ifdef M4VSS_SUPPORT_OMX_CODECS 438 439 } 440 441#endif //M4VSS_SUPPORT_OMX_CODECS 442 443 } 444 else if( M4NO_ERROR != err ) /**< ...or an encoder error */ 445 { 446 if( ((M4OSA_UInt32)M4ERR_ALLOC) == err ) 447 { 448 M4OSA_TRACE1_0( 449 "M4VSS3GPP_intEditStepVideo: DECODE_ENCODE:\ 450 returning M4VSS3GPP_ERR_ENCODER_ACCES_UNIT_ERROR"); 451 return M4VSS3GPP_ERR_ENCODER_ACCES_UNIT_ERROR; 452 } 453 /* the warning M4WAR_WRITER_STOP_REQ is returned when the targeted output 454 file size is reached 455 The editing is then finished, the warning M4VSS3GPP_WAR_EDITING_DONE 456 is returned*/ 457 else if( M4WAR_WRITER_STOP_REQ == err ) 458 { 459 M4OSA_TRACE1_0( 460 "M4VSS3GPP_intEditStepVideo: File was cut to avoid oversize"); 461 return M4VSS3GPP_WAR_EDITING_DONE; 462 } 463 else 464 { 465 M4OSA_TRACE1_1( 466 "M4VSS3GPP_intEditStepVideo: DECODE_ENCODE:\ 467 pVideoEncoderGlobalFcts->pFctEncode returns 0x%x", 468 err); 469 return err; 470 } 471 } 472 473 /** 474 * Increment time by the encoding period (for begin cut, do not increment to not 475 loose P-frames) */ 476 if( M4VSS3GPP_kEditVideoState_DECODE_ENCODE == pC->Vstate ) 477 { 478 // Decorrelate input and output encoding timestamp to handle encoder prefetch 479 pC->ewc.dInputVidCts += pC->dOutputFrameDuration; 480 } 481 } 482 break; 483 484 /* _________________ */ 485 /*| |*/ 486 /*| TRANSITION MODE |*/ 487 /*|_________________|*/ 488 489 case M4VSS3GPP_kEditVideoState_TRANSITION: 490 { 491 M4OSA_TRACE3_0("M4VSS3GPP_intEditStepVideo TRANSITION"); 492 493 /* Don't decode more than needed */ 494 if( !(( M4VSS3GPP_kClipStatus_DECODE_UP_TO != pC->pC1->Vstatus) 495 && (M4VSS3GPP_kClipStatus_DECODE_UP_TO == pC->pC2->Vstatus)) ) 496 { 497 /** 498 * Decode the clip1 video up to the target time 499 (will jump to the previous RAP if needed */ 500 // Decorrelate input and output encoding timestamp to handle encoder prefetch 501 err = M4VSS3GPP_intClipDecodeVideoUpToCts(pC->pC1, 502 (M4OSA_Int32)pC->ewc.dInputVidCts); 503 if( M4NO_ERROR != err ) 504 { 505 M4OSA_TRACE1_1( 506 "M4VSS3GPP_intEditStepVideo: TRANSITION:\ 507 M4VSS3GPP_intDecodeVideoUpToCts(C1) returns err=0x%x", 508 err); 509 return err; 510 } 511 512 /* If the decoding is not completed, do one more step with time frozen */ 513 if( M4VSS3GPP_kClipStatus_DECODE_UP_TO == pC->pC1->Vstatus ) 514 { 515 return M4NO_ERROR; 516 } 517 } 518 519 /* Don't decode more than needed */ 520 if( !(( M4VSS3GPP_kClipStatus_DECODE_UP_TO != pC->pC2->Vstatus) 521 && (M4VSS3GPP_kClipStatus_DECODE_UP_TO == pC->pC1->Vstatus)) ) 522 { 523 /** 524 * Decode the clip2 video up to the target time 525 (will jump to the previous RAP if needed) */ 526 // Decorrelate input and output encoding timestamp to handle encoder prefetch 527 err = M4VSS3GPP_intClipDecodeVideoUpToCts(pC->pC2, 528 (M4OSA_Int32)pC->ewc.dInputVidCts); 529 if( M4NO_ERROR != err ) 530 { 531 M4OSA_TRACE1_1( 532 "M4VSS3GPP_intEditStepVideo: TRANSITION:\ 533 M4VSS3GPP_intDecodeVideoUpToCts(C2) returns err=0x%x", 534 err); 535 return err; 536 } 537 538 /* If the decoding is not completed, do one more step with time frozen */ 539 if( M4VSS3GPP_kClipStatus_DECODE_UP_TO == pC->pC2->Vstatus ) 540 { 541 return M4NO_ERROR; 542 } 543 } 544 545 /** 546 * Reset the video pre-processing error before calling the encoder */ 547 pC->ewc.VppError = M4NO_ERROR; 548 549 M4OSA_TRACE2_0("F **** blend AUs"); 550 551 /** 552 * Encode the frame (rendering, filtering and writing will be done 553 in encoder callbacks */ 554 // Decorrelate input and output encoding timestamp to handle encoder prefetch 555 err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctEncode(pC->ewc.pEncContext, M4OSA_NULL, 556 pC->ewc.dInputVidCts, M4ENCODER_kNormalFrame); 557 558 /** 559 * If encode returns a process frame error, it is likely to be a VPP error */ 560 if( M4NO_ERROR != pC->ewc.VppError ) 561 { 562 M4OSA_TRACE1_1( 563 "M4VSS3GPP_intEditStepVideo: TRANSITION:\ 564 pVideoEncoderGlobalFcts->pFctEncode, returning VppErr=0x%x", 565 pC->ewc.VppError); 566#ifdef M4VSS_SUPPORT_OMX_CODECS 567 568 if( M4WAR_VIDEORENDERER_NO_NEW_FRAME != pC->ewc.VppError ) 569 { 570 571#endif //M4VSS_SUPPORT_OMX_CODECS 572 573 return pC->ewc.VppError; 574#ifdef M4VSS_SUPPORT_OMX_CODECS 575 576 } 577 578#endif //M4VSS_SUPPORT_OMX_CODECS 579 580 } 581 else if( M4NO_ERROR != err ) /**< ...or an encoder error */ 582 { 583 if( ((M4OSA_UInt32)M4ERR_ALLOC) == err ) 584 { 585 M4OSA_TRACE1_0( 586 "M4VSS3GPP_intEditStepVideo: TRANSITION:\ 587 returning M4VSS3GPP_ERR_ENCODER_ACCES_UNIT_ERROR"); 588 return M4VSS3GPP_ERR_ENCODER_ACCES_UNIT_ERROR; 589 } 590 591 /* the warning M4WAR_WRITER_STOP_REQ is returned when the targeted output 592 file size is reached 593 The editing is then finished, the warning M4VSS3GPP_WAR_EDITING_DONE is 594 returned*/ 595 else if( M4WAR_WRITER_STOP_REQ == err ) 596 { 597 M4OSA_TRACE1_0( 598 "M4VSS3GPP_intEditStepVideo: File was cut to avoid oversize"); 599 return M4VSS3GPP_WAR_EDITING_DONE; 600 } 601 else 602 { 603 M4OSA_TRACE1_1( 604 "M4VSS3GPP_intEditStepVideo: TRANSITION:\ 605 pVideoEncoderGlobalFcts->pFctEncode returns 0x%x", 606 err); 607 return err; 608 } 609 } 610 611 /** 612 * Increment time by the encoding period */ 613 // Decorrelate input and output encoding timestamp to handle encoder prefetch 614 pC->ewc.dInputVidCts += pC->dOutputFrameDuration; 615 } 616 break; 617 618 /* ____________ */ 619 /*| |*/ 620 /*| ERROR CASE |*/ 621 /*|____________|*/ 622 623 default: 624 M4OSA_TRACE1_1( 625 "M4VSS3GPP_intEditStepVideo: invalid internal state (0x%x),\ 626 returning M4VSS3GPP_ERR_INTERNAL_STATE", 627 pC->Vstate); 628 return M4VSS3GPP_ERR_INTERNAL_STATE; 629 } 630 631 /** 632 * Return with no error */ 633 M4OSA_TRACE3_0("M4VSS3GPP_intEditStepVideo: returning M4NO_ERROR"); 634 return M4NO_ERROR; 635} 636 637/** 638 ****************************************************************************** 639 * M4OSA_ERR M4VSS3GPP_intCheckVideoMode() 640 * @brief Check which video process mode we must use, depending on the output CTS. 641 * @param pC (IN/OUT) Internal edit context 642 ****************************************************************************** 643 */ 644static M4OSA_ERR M4VSS3GPP_intCheckVideoMode( 645 M4VSS3GPP_InternalEditContext *pC ) 646{ 647 M4OSA_ERR err; 648 // Decorrelate input and output encoding timestamp to handle encoder prefetch 649 const M4OSA_Int32 t = (M4OSA_Int32)pC->ewc.dInputVidCts; 650 /**< Transition duration */ 651 const M4OSA_Int32 TD = pC->pTransitionList[pC->uiCurrentClip].uiTransitionDuration; 652 653 M4OSA_Int32 iTmp; 654 655 const M4VSS3GPP_EditVideoState previousVstate = pC->Vstate; 656 657 /** 658 * Check if Clip1 is on its begin cut, or in an effect zone */ 659 M4VSS3GPP_intCheckVideoEffects(pC, 1); 660 661 /** 662 * Check if we are in the transition with next clip */ 663 if( ( TD > 0) && (( t - pC->pC1->iVoffset) >= (pC->pC1->iEndTime - TD)) ) 664 { 665 /** 666 * We are in a transition */ 667 pC->Vstate = M4VSS3GPP_kEditVideoState_TRANSITION; 668 pC->bTransitionEffect = M4OSA_TRUE; 669 670 /** 671 * Open second clip for transition, if not yet opened */ 672 if( M4OSA_NULL == pC->pC2 ) 673 { 674 err = M4VSS3GPP_intOpenClip(pC, &pC->pC2, 675 &pC->pClipList[pC->uiCurrentClip + 1]); 676 677 if( M4NO_ERROR != err ) 678 { 679 M4OSA_TRACE1_1( 680 "M4VSS3GPP_intCheckVideoMode: M4VSS3GPP_editOpenClip returns 0x%x!", 681 err); 682 return err; 683 } 684 685 /** 686 * Add current video output CTS to the clip offset 687 * (audio output CTS is not yet at the transition, so audio 688 * offset can't be updated yet). */ 689 // Decorrelate input and output encoding timestamp to handle encoder prefetch 690 pC->pC2->iVoffset += (M4OSA_UInt32)pC->ewc.dInputVidCts; 691 692 /** 693 * 2005-03-24: BugFix for audio-video synchro: 694 * Update transition duration due to the actual video transition beginning time. 695 * It will avoid desynchronization when doing the audio transition. */ 696 // Decorrelate input and output encoding timestamp to handle encoder prefetch 697 iTmp = ((M4OSA_Int32)pC->ewc.dInputVidCts)\ 698 - (pC->pC1->iEndTime - TD + pC->pC1->iVoffset); 699 if (iTmp < (M4OSA_Int32)pC->pTransitionList[pC->uiCurrentClip].uiTransitionDuration) 700 /**< Test in case of a very short transition */ 701 { 702 pC->pTransitionList[pC-> 703 uiCurrentClip].uiTransitionDuration -= iTmp; 704 705 /** 706 * Don't forget to also correct the total duration used for the progress bar 707 * (it was computed with the original transition duration). */ 708 pC->ewc.iOutputDuration += iTmp; 709 } 710 /**< No "else" here because it's hard predict the effect of 0 duration transition...*/ 711 } 712 713 /** 714 * Check effects for clip2 */ 715 M4VSS3GPP_intCheckVideoEffects(pC, 2); 716 } 717 else 718 { 719 /** 720 * We are not in a transition */ 721 pC->bTransitionEffect = M4OSA_FALSE; 722 723 /* If there is an effect we go to decode/encode mode */ 724 if ((pC->nbActiveEffects > 0) ||(pC->nbActiveEffects1 > 0)) 725 { 726 pC->Vstate = M4VSS3GPP_kEditVideoState_DECODE_ENCODE; 727 } 728 /* We do a begin cut, except if already done (time is not progressing because we want 729 to catch all P-frames after the cut) */ 730 else if( M4OSA_TRUE == pC->bClip1AtBeginCut ) 731 { 732 if(pC->pC1->pSettings->ClipProperties.VideoStreamType == M4VIDEOEDITING_kH264) { 733 pC->Vstate = M4VSS3GPP_kEditVideoState_DECODE_ENCODE; 734 pC->bEncodeTillEoF = M4OSA_TRUE; 735 } else if( ( M4VSS3GPP_kEditVideoState_BEGIN_CUT == previousVstate) 736 || (M4VSS3GPP_kEditVideoState_AFTER_CUT == previousVstate) ) { 737 pC->Vstate = M4VSS3GPP_kEditVideoState_AFTER_CUT; 738 } else { 739 pC->Vstate = M4VSS3GPP_kEditVideoState_BEGIN_CUT; 740 } 741 } 742 /* Else we are in default copy/paste mode */ 743 else 744 { 745 if( ( M4VSS3GPP_kEditVideoState_BEGIN_CUT == previousVstate) 746 || (M4VSS3GPP_kEditVideoState_AFTER_CUT == previousVstate) ) 747 { 748 pC->Vstate = M4VSS3GPP_kEditVideoState_AFTER_CUT; 749 } 750 else if( pC->bIsMMS == M4OSA_TRUE ) 751 { 752 M4OSA_UInt32 currentBitrate; 753 M4OSA_ERR err = M4NO_ERROR; 754 755 /* Do we need to reencode the video to downgrade the bitrate or not ? */ 756 /* Let's compute the cirrent bitrate of the current edited clip */ 757 err = pC->pC1->ShellAPI.m_pReader->m_pFctGetOption( 758 pC->pC1->pReaderContext, 759 M4READER_kOptionID_Bitrate, ¤tBitrate); 760 761 if( err != M4NO_ERROR ) 762 { 763 M4OSA_TRACE1_1( 764 "M4VSS3GPP_intCheckVideoMode:\ 765 Error when getting next bitrate of edited clip: 0x%x", 766 err); 767 return err; 768 } 769 770 /* Remove audio bitrate */ 771 currentBitrate -= 12200; 772 773 /* Test if we go into copy/paste mode or into decode/encode mode */ 774 if( currentBitrate > pC->uiMMSVideoBitrate ) 775 { 776 pC->Vstate = M4VSS3GPP_kEditVideoState_DECODE_ENCODE; 777 } 778 else 779 { 780 pC->Vstate = M4VSS3GPP_kEditVideoState_READ_WRITE; 781 } 782 } 783 else if(!((pC->m_bClipExternalHasStarted == M4OSA_TRUE) && 784 (pC->Vstate == M4VSS3GPP_kEditVideoState_DECODE_ENCODE)) && 785 pC->bEncodeTillEoF == M4OSA_FALSE) 786 { 787 /** 788 * Test if we go into copy/paste mode or into decode/encode mode 789 * If an external effect has been applied on the current clip 790 * then continue to be in decode/encode mode till end of 791 * clip to avoid H.264 distortion. 792 */ 793 pC->Vstate = M4VSS3GPP_kEditVideoState_READ_WRITE; 794 } 795 } 796 } 797 798 /** 799 * Check if we create an encoder */ 800 if( ( ( M4VSS3GPP_kEditVideoState_READ_WRITE == previousVstate) 801 || (M4VSS3GPP_kEditVideoState_AFTER_CUT 802 == previousVstate)) /**< read mode */ 803 && (( M4VSS3GPP_kEditVideoState_DECODE_ENCODE == pC->Vstate) 804 || (M4VSS3GPP_kEditVideoState_BEGIN_CUT == pC->Vstate) 805 || (M4VSS3GPP_kEditVideoState_TRANSITION 806 == pC->Vstate)) /**< encode mode */ 807 && pC->bIsMMS == M4OSA_FALSE ) 808 { 809 /** 810 * Create the encoder */ 811 err = M4VSS3GPP_intCreateVideoEncoder(pC); 812 813 if( M4NO_ERROR != err ) 814 { 815 M4OSA_TRACE1_1( 816 "M4VSS3GPP_intCheckVideoMode: M4VSS3GPP_intCreateVideoEncoder returns 0x%x!", 817 err); 818 return err; 819 } 820 } 821 else if( pC->bIsMMS == M4OSA_TRUE && pC->ewc.pEncContext == M4OSA_NULL ) 822 { 823 /** 824 * Create the encoder */ 825 err = M4VSS3GPP_intCreateVideoEncoder(pC); 826 827 if( M4NO_ERROR != err ) 828 { 829 M4OSA_TRACE1_1( 830 "M4VSS3GPP_intCheckVideoMode: M4VSS3GPP_intCreateVideoEncoder returns 0x%x!", 831 err); 832 return err; 833 } 834 } 835 836 /** 837 * When we go from filtering to read/write, we must act like a begin cut, 838 * because the last filtered image may be different than the original image. */ 839 else if( ( ( M4VSS3GPP_kEditVideoState_DECODE_ENCODE == previousVstate) 840 || (M4VSS3GPP_kEditVideoState_TRANSITION 841 == previousVstate)) /**< encode mode */ 842 && (M4VSS3GPP_kEditVideoState_READ_WRITE == pC->Vstate) /**< read mode */ 843 && (pC->bEncodeTillEoF == M4OSA_FALSE) ) 844 { 845 pC->Vstate = M4VSS3GPP_kEditVideoState_BEGIN_CUT; 846 } 847 848 /** 849 * Check if we destroy an encoder */ 850 else if( ( ( M4VSS3GPP_kEditVideoState_DECODE_ENCODE == previousVstate) 851 || (M4VSS3GPP_kEditVideoState_BEGIN_CUT == previousVstate) 852 || (M4VSS3GPP_kEditVideoState_TRANSITION 853 == previousVstate)) /**< encode mode */ 854 && (( M4VSS3GPP_kEditVideoState_READ_WRITE == pC->Vstate) 855 || (M4VSS3GPP_kEditVideoState_AFTER_CUT 856 == pC->Vstate)) /**< read mode */ 857 && pC->bIsMMS == M4OSA_FALSE ) 858 { 859 /** 860 * Destroy the previously created encoder */ 861 err = M4VSS3GPP_intDestroyVideoEncoder(pC); 862 863 if( M4NO_ERROR != err ) 864 { 865 M4OSA_TRACE1_1( 866 "M4VSS3GPP_intCheckVideoMode: M4VSS3GPP_intDestroyVideoEncoder returns 0x%x!", 867 err); 868 return err; 869 } 870 } 871 872 /** 873 * Return with no error */ 874 M4OSA_TRACE3_0("M4VSS3GPP_intCheckVideoMode: returning M4NO_ERROR"); 875 return M4NO_ERROR; 876} 877 878/****************************************************************************** 879 * M4OSA_ERR M4VSS3GPP_intStartAU() 880 * @brief StartAU writer-like interface used for the VSS 3GPP only 881 * @note 882 * @param pContext: (IN) It is the VSS 3GPP context in our case 883 * @param streamID: (IN) Id of the stream to which the Access Unit is related. 884 * @param pAU: (IN/OUT) Access Unit to be prepared. 885 * @return M4NO_ERROR: there is no error 886 ****************************************************************************** 887 */ 888M4OSA_ERR M4VSS3GPP_intStartAU( M4WRITER_Context pContext, 889 M4SYS_StreamID streamID, M4SYS_AccessUnit *pAU ) 890{ 891 M4OSA_ERR err; 892 M4OSA_UInt32 uiMaxAuSize; 893 894 /** 895 * Given context is actually the VSS3GPP context */ 896 M4VSS3GPP_InternalEditContext *pC = 897 (M4VSS3GPP_InternalEditContext *)pContext; 898 899 /** 900 * Get the output AU to write into */ 901 err = pC->ShellAPI.pWriterDataFcts->pStartAU(pC->ewc.p3gpWriterContext, 902 M4VSS3GPP_WRITER_VIDEO_STREAM_ID, pAU); 903 904 if( M4NO_ERROR != err ) 905 { 906 M4OSA_TRACE1_1( 907 "M4VSS3GPP_intStartAU: pWriterDataFcts->pStartAU(Video) returns 0x%x!", 908 err); 909 return err; 910 } 911 912 /** 913 * Return */ 914 M4OSA_TRACE3_0("M4VSS3GPP_intStartAU: returning M4NO_ERROR"); 915 return M4NO_ERROR; 916} 917 918/****************************************************************************** 919 * M4OSA_ERR M4VSS3GPP_intProcessAU() 920 * @brief ProcessAU writer-like interface used for the VSS 3GPP only 921 * @note 922 * @param pContext: (IN) It is the VSS 3GPP context in our case 923 * @param streamID: (IN) Id of the stream to which the Access Unit is related. 924 * @param pAU: (IN/OUT) Access Unit to be written 925 * @return M4NO_ERROR: there is no error 926 ****************************************************************************** 927 */ 928M4OSA_ERR M4VSS3GPP_intProcessAU( M4WRITER_Context pContext, 929 M4SYS_StreamID streamID, M4SYS_AccessUnit *pAU ) 930{ 931 M4OSA_ERR err; 932 933 /** 934 * Given context is actually the VSS3GPP context */ 935 M4VSS3GPP_InternalEditContext *pC = 936 (M4VSS3GPP_InternalEditContext *)pContext; 937 938 /** 939 * Fix the encoded AU time */ 940 // Decorrelate input and output encoding timestamp to handle encoder prefetch 941 pC->ewc.dOutputVidCts = pAU->CTS; 942 /** 943 * Update time info for the Counter Time System to be equal to the bit-stream time */ 944 M4VSS3GPP_intUpdateTimeInfo(pC, pAU); 945 946 /** 947 * Write the AU */ 948 err = pC->ShellAPI.pWriterDataFcts->pProcessAU(pC->ewc.p3gpWriterContext, 949 M4VSS3GPP_WRITER_VIDEO_STREAM_ID, pAU); 950 951 if( M4NO_ERROR != err ) 952 { 953 M4OSA_TRACE1_1( 954 "M4VSS3GPP_intProcessAU: pWriterDataFcts->pProcessAU(Video) returns 0x%x!", 955 err); 956 return err; 957 } 958 959 /** 960 * Return */ 961 M4OSA_TRACE3_0("M4VSS3GPP_intProcessAU: returning M4NO_ERROR"); 962 return M4NO_ERROR; 963} 964 965/** 966 ****************************************************************************** 967 * M4OSA_ERR M4VSS3GPP_intVPP() 968 * @brief We implement our own VideoPreProcessing function 969 * @note It is called by the video encoder 970 * @param pContext (IN) VPP context, which actually is the VSS 3GPP context in our case 971 * @param pPlaneIn (IN) 972 * @param pPlaneOut (IN/OUT) Pointer to an array of 3 planes that will contain the output 973 * YUV420 image 974 * @return M4NO_ERROR: No error 975 ****************************************************************************** 976 */ 977M4OSA_ERR M4VSS3GPP_intVPP( M4VPP_Context pContext, M4VIFI_ImagePlane *pPlaneIn, 978 M4VIFI_ImagePlane *pPlaneOut ) 979{ 980 M4OSA_ERR err; 981 M4_MediaTime t; 982 M4VIFI_ImagePlane *pTmp = M4OSA_NULL; 983 M4VIFI_ImagePlane pTemp1[3],pTemp2[3]; 984 M4OSA_UInt32 i =0; 985 /** 986 * VPP context is actually the VSS3GPP context */ 987 M4VSS3GPP_InternalEditContext *pC = 988 (M4VSS3GPP_InternalEditContext *)pContext; 989 pTemp1[0].pac_data = pTemp2[0].pac_data = M4OSA_NULL; 990 /** 991 * Reset VPP error remembered in context */ 992 pC->ewc.VppError = M4NO_ERROR; 993 994 /** 995 * At the end of the editing, we may be called when no more clip is loaded. 996 * (because to close the encoder properly it must be stepped one or twice...) */ 997 if( M4OSA_NULL == pC->pC1 ) 998 { 999 /** 1000 * We must fill the input of the encoder with a dummy image, because 1001 * encoding noise leads to a huge video AU, and thus a writer buffer overflow. */ 1002 M4OSA_memset((M4OSA_MemAddr8)pPlaneOut[0].pac_data, 1003 pPlaneOut[0].u_stride * pPlaneOut[0].u_height, 0); 1004 M4OSA_memset((M4OSA_MemAddr8)pPlaneOut[1].pac_data, 1005 pPlaneOut[1].u_stride * pPlaneOut[1].u_height, 0); 1006 M4OSA_memset((M4OSA_MemAddr8)pPlaneOut[2].pac_data, 1007 pPlaneOut[2].u_stride * pPlaneOut[2].u_height, 0); 1008 1009 M4OSA_TRACE3_0("M4VSS3GPP_intVPP: returning M4NO_ERROR (abort)"); 1010 return M4NO_ERROR; 1011 } 1012 1013 /** 1014 **************** Transition case ****************/ 1015 if( M4OSA_TRUE == pC->bTransitionEffect ) 1016 { 1017 if (M4OSA_NULL == pTemp1[0].pac_data) 1018 { 1019 err = M4VSS3GPP_intAllocateYUV420(pTemp1, pC->ewc.uiVideoWidth, 1020 pC->ewc.uiVideoHeight); 1021 if (M4NO_ERROR != err) 1022 { 1023 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420(1) returns 0x%x, \ 1024 returning M4NO_ERROR", err); 1025 pC->ewc.VppError = err; 1026 return M4NO_ERROR; /**< Return no error to the encoder core 1027 (else it may leak in some situations...) */ 1028 } 1029 } 1030 if (M4OSA_NULL == pTemp2[0].pac_data) 1031 { 1032 err = M4VSS3GPP_intAllocateYUV420(pTemp2, pC->ewc.uiVideoWidth, 1033 pC->ewc.uiVideoHeight); 1034 if (M4NO_ERROR != err) 1035 { 1036 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420(2) returns 0x%x, \ 1037 returning M4NO_ERROR", err); 1038 pC->ewc.VppError = err; 1039 return M4NO_ERROR; /**< Return no error to the encoder core 1040 (else it may leak in some situations...) */ 1041 } 1042 } 1043 /** 1044 * We need two intermediate planes */ 1045 if( M4OSA_NULL == pC->yuv1[0].pac_data ) 1046 { 1047 err = M4VSS3GPP_intAllocateYUV420(pC->yuv1, pC->ewc.uiVideoWidth, 1048 pC->ewc.uiVideoHeight); 1049 1050 if( M4NO_ERROR != err ) 1051 { 1052 M4OSA_TRACE1_1( 1053 "M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420(3) returns 0x%x,\ 1054 returning M4NO_ERROR", 1055 err); 1056 pC->ewc.VppError = err; 1057 return 1058 M4NO_ERROR; /**< Return no error to the encoder core 1059 (else it may leak in some situations...) */ 1060 } 1061 } 1062 1063 if( M4OSA_NULL == pC->yuv2[0].pac_data ) 1064 { 1065 err = M4VSS3GPP_intAllocateYUV420(pC->yuv2, pC->ewc.uiVideoWidth, 1066 pC->ewc.uiVideoHeight); 1067 1068 if( M4NO_ERROR != err ) 1069 { 1070 M4OSA_TRACE1_1( 1071 "M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420(4) returns 0x%x,\ 1072 returning M4NO_ERROR", 1073 err); 1074 pC->ewc.VppError = err; 1075 return 1076 M4NO_ERROR; /**< Return no error to the encoder core 1077 (else it may leak in some situations...) */ 1078 } 1079 } 1080 1081 /** 1082 * Allocate new temporary plane if needed */ 1083 if( M4OSA_NULL == pC->yuv3[0].pac_data ) 1084 { 1085 err = M4VSS3GPP_intAllocateYUV420(pC->yuv3, pC->ewc.uiVideoWidth, 1086 pC->ewc.uiVideoHeight); 1087 1088 if( M4NO_ERROR != err ) 1089 { 1090 M4OSA_TRACE1_1( 1091 "M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420(3) returns 0x%x,\ 1092 returning M4NO_ERROR", 1093 err); 1094 pC->ewc.VppError = err; 1095 return 1096 M4NO_ERROR; /**< Return no error to the encoder core 1097 (else it may leak in some situations...) */ 1098 } 1099 } 1100 1101 /** 1102 * Compute the time in the clip1 base: t = to - Offset */ 1103 // Decorrelate input and output encoding timestamp to handle encoder prefetch 1104 t = pC->ewc.dInputVidCts - pC->pC1->iVoffset; 1105 1106 /** 1107 * Render Clip1 */ 1108 if( pC->pC1->isRenderDup == M4OSA_FALSE ) 1109 { 1110 if(pC->nbActiveEffects > 0) 1111 { 1112 err = pC->pC1->ShellAPI.m_pVideoDecoder->m_pFctRender(pC->pC1->pViDecCtxt, 1113 &t, pTemp1, 1114 M4OSA_TRUE); 1115 if (M4NO_ERROR != err) 1116 { 1117 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: m_pVideoDecoder->m_pFctRender(C1) returns 0x%x, \ 1118 returning M4NO_ERROR", err); 1119 pC->ewc.VppError = err; 1120 return M4NO_ERROR; /**< Return no error to the encoder core 1121 (else it may leak in some situations...) */ 1122 } 1123 pC->bIssecondClip = M4OSA_FALSE; 1124 err = M4VSS3GPP_intApplyVideoEffect(pC, pTemp1 ,pC->yuv1 ); 1125 if (M4NO_ERROR != err) 1126 { 1127 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: M4VSS3GPP_intApplyVideoEffect(1) returns 0x%x, \ 1128 returning M4NO_ERROR", err); 1129 pC->ewc.VppError = err; 1130 return M4NO_ERROR; /**< Return no error to the encoder core 1131 (else it may leak in some situations...) */ 1132 } 1133 pC->pC1->lastDecodedPlane = pTemp1; 1134 } 1135 else 1136 { 1137 err = pC->pC1->ShellAPI.m_pVideoDecoder->m_pFctRender(pC->pC1->pViDecCtxt, 1138 &t, pC->yuv1, 1139 M4OSA_TRUE); 1140 if (M4NO_ERROR != err) 1141 { 1142 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: m_pVideoDecoder->m_pFctRender(C1) returns 0x%x, \ 1143 returning M4NO_ERROR", err); 1144 pC->ewc.VppError = err; 1145 return M4NO_ERROR; /**< Return no error to the encoder core 1146 (else it may leak in some situations...) */ 1147 } 1148 pC->pC1->lastDecodedPlane = pC->yuv1; 1149 } 1150 pC->pC1->iVideoRenderCts = (M4OSA_Int32)t; 1151 } 1152 else 1153 { 1154 /* Copy last decoded plane to output plane */ 1155 M4OSA_memcpy((M4OSA_MemAddr8)pTmp[0].pac_data, 1156 (M4OSA_MemAddr8)pC->pC1->lastDecodedPlane[0].pac_data, 1157 (pTmp[0].u_height * pTmp[0].u_width)); 1158 M4OSA_memcpy((M4OSA_MemAddr8)pTmp[1].pac_data, 1159 (M4OSA_MemAddr8)pC->pC1->lastDecodedPlane[1].pac_data, 1160 (pTmp[1].u_height * pTmp[1].u_width)); 1161 M4OSA_memcpy((M4OSA_MemAddr8)pTmp[2].pac_data, 1162 (M4OSA_MemAddr8)pC->pC1->lastDecodedPlane[2].pac_data, 1163 (pTmp[2].u_height * pTmp[2].u_width)); 1164 pC->pC1->lastDecodedPlane = pTmp; 1165 } 1166 1167 /** 1168 * Compute the time in the clip2 base: t = to - Offset */ 1169 // Decorrelate input and output encoding timestamp to handle encoder prefetch 1170 t = pC->ewc.dInputVidCts - pC->pC2->iVoffset; 1171 /** 1172 * Render Clip2 */ 1173 if( pC->pC2->isRenderDup == M4OSA_FALSE ) 1174 { 1175 if(pC->nbActiveEffects1 > 0) 1176 { 1177 err = pC->pC2->ShellAPI.m_pVideoDecoder->m_pFctRender(pC->pC2->pViDecCtxt, 1178 &t, pTemp2, 1179 M4OSA_TRUE); 1180 if (M4NO_ERROR != err) 1181 { 1182 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: m_pVideoDecoder->m_pFctRender(C2) returns 0x%x, \ 1183 returning M4NO_ERROR", err); 1184 pC->ewc.VppError = err; 1185 return M4NO_ERROR; /**< Return no error to the encoder core 1186 (else it may leak in some situations...) */ 1187 } 1188 1189 pC->bIssecondClip = M4OSA_TRUE; 1190 err = M4VSS3GPP_intApplyVideoEffect(pC, pTemp2 ,pC->yuv2); 1191 if (M4NO_ERROR != err) 1192 { 1193 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: M4VSS3GPP_intApplyVideoEffect(1) returns 0x%x, \ 1194 returning M4NO_ERROR", err); 1195 pC->ewc.VppError = err; 1196 return M4NO_ERROR; /**< Return no error to the encoder core 1197 (else it may leak in some situations...) */ 1198 } 1199 pC->pC2->lastDecodedPlane = pTemp2; 1200 } 1201 else 1202 { 1203 err = pC->pC2->ShellAPI.m_pVideoDecoder->m_pFctRender(pC->pC2->pViDecCtxt, 1204 &t, pC->yuv2, 1205 M4OSA_TRUE); 1206 if (M4NO_ERROR != err) 1207 { 1208 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: m_pVideoDecoder->m_pFctRender(C2) returns 0x%x, \ 1209 returning M4NO_ERROR", err); 1210 pC->ewc.VppError = err; 1211 return M4NO_ERROR; /**< Return no error to the encoder core 1212 (else it may leak in some situations...) */ 1213 } 1214 pC->pC2->lastDecodedPlane = pC->yuv2; 1215 } 1216 pC->pC2->iVideoRenderCts = (M4OSA_Int32)t; 1217 } 1218 else 1219 { 1220 /* Copy last decoded plane to output plane */ 1221 M4OSA_memcpy((M4OSA_MemAddr8)pTmp[0].pac_data, 1222 (M4OSA_MemAddr8)pC->pC2->lastDecodedPlane[0].pac_data, 1223 (pTmp[0].u_height * pTmp[0].u_width)); 1224 M4OSA_memcpy((M4OSA_MemAddr8)pTmp[1].pac_data, 1225 (M4OSA_MemAddr8)pC->pC2->lastDecodedPlane[1].pac_data, 1226 (pTmp[1].u_height * pTmp[1].u_width)); 1227 M4OSA_memcpy((M4OSA_MemAddr8)pTmp[2].pac_data, 1228 (M4OSA_MemAddr8)pC->pC2->lastDecodedPlane[2].pac_data, 1229 (pTmp[2].u_height * pTmp[2].u_width)); 1230 pC->pC2->lastDecodedPlane = pTmp; 1231 } 1232 1233 1234 pTmp = pPlaneOut; 1235 err = M4VSS3GPP_intVideoTransition(pC, pTmp); 1236 1237 if( M4NO_ERROR != err ) 1238 { 1239 M4OSA_TRACE1_1( 1240 "M4VSS3GPP_intVPP: M4VSS3GPP_intVideoTransition returns 0x%x,\ 1241 returning M4NO_ERROR", 1242 err); 1243 pC->ewc.VppError = err; 1244 return M4NO_ERROR; /**< Return no error to the encoder core 1245 (else it may leak in some situations...) */ 1246 } 1247 for (i=0; i < 3; i++) 1248 { 1249 if (pTemp2[i].pac_data != M4OSA_NULL) 1250 { 1251 M4OSA_free((M4OSA_MemAddr32)pTemp2[i].pac_data); 1252 pTemp2[i].pac_data = M4OSA_NULL; 1253 } 1254 1255 1256 if (pTemp1[i].pac_data != M4OSA_NULL) 1257 { 1258 M4OSA_free((M4OSA_MemAddr32)pTemp1[i].pac_data); 1259 pTemp1[i].pac_data = M4OSA_NULL; 1260 } 1261 } 1262 } 1263 /** 1264 **************** No Transition case ****************/ 1265 else 1266 { 1267 /** 1268 * Check if there is a filter */ 1269 if( pC->nbActiveEffects > 0 ) 1270 { 1271 /** 1272 * If we do modify the image, we need an intermediate image plane */ 1273 if( M4OSA_NULL == pC->yuv1[0].pac_data ) 1274 { 1275 err = 1276 M4VSS3GPP_intAllocateYUV420(pC->yuv1, pC->ewc.uiVideoWidth, 1277 pC->ewc.uiVideoHeight); 1278 1279 if( M4NO_ERROR != err ) 1280 { 1281 M4OSA_TRACE1_1( 1282 "M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420 returns 0x%x,\ 1283 returning M4NO_ERROR", 1284 err); 1285 pC->ewc.VppError = err; 1286 return 1287 M4NO_ERROR; /**< Return no error to the encoder core 1288 (else it may leak in some situations...) */ 1289 } 1290 } 1291 /** 1292 * The image is rendered in the intermediate image plane */ 1293 pTmp = pC->yuv1; 1294 } 1295 else 1296 { 1297 /** 1298 * No filter, the image is directly rendered in pPlaneOut */ 1299 pTmp = pPlaneOut; 1300 } 1301 1302 /** 1303 * Compute the time in the clip base: t = to - Offset */ 1304 // Decorrelate input and output encoding timestamp to handle encoder prefetch 1305 t = pC->ewc.dInputVidCts - pC->pC1->iVoffset; 1306 1307 if( pC->pC1->isRenderDup == M4OSA_FALSE ) 1308 { 1309 err = pC->pC1->ShellAPI.m_pVideoDecoder->m_pFctRender( 1310 pC->pC1->pViDecCtxt, &t, pTmp, M4OSA_TRUE); 1311 1312 if( M4NO_ERROR != err ) 1313 { 1314 M4OSA_TRACE1_1( 1315 "M4VSS3GPP_intVPP: m_pVideoDecoder->m_pFctRender returns 0x%x,\ 1316 returning M4NO_ERROR", 1317 err); 1318 pC->ewc.VppError = err; 1319 return 1320 M4NO_ERROR; /**< Return no error to the encoder core 1321 (else it may leak in some situations...) */ 1322 } 1323 pC->pC1->lastDecodedPlane = pTmp; 1324 pC->pC1->iVideoRenderCts = (M4OSA_Int32)t; 1325 } 1326 else 1327 { 1328 /* Copy last decoded plane to output plane */ 1329 M4OSA_memcpy((M4OSA_MemAddr8)pTmp[0].pac_data, 1330 (M4OSA_MemAddr8)pC->pC1->lastDecodedPlane[0].pac_data, 1331 (pTmp[0].u_height * pTmp[0].u_width)); 1332 M4OSA_memcpy((M4OSA_MemAddr8)pTmp[1].pac_data, 1333 (M4OSA_MemAddr8)pC->pC1->lastDecodedPlane[1].pac_data, 1334 (pTmp[1].u_height * pTmp[1].u_width)); 1335 M4OSA_memcpy((M4OSA_MemAddr8)pTmp[2].pac_data, 1336 (M4OSA_MemAddr8)pC->pC1->lastDecodedPlane[2].pac_data, 1337 (pTmp[2].u_height * pTmp[2].u_width)); 1338 pC->pC1->lastDecodedPlane = pTmp; 1339 } 1340 1341 M4OSA_TRACE3_1("M4VSS3GPP_intVPP: Rendered at CTS %.3f", t); 1342 1343 /** 1344 * Apply the clip1 effect */ 1345 // if (pC->iClip1ActiveEffect >= 0) 1346 if( pC->nbActiveEffects > 0 ) 1347 { 1348 err = M4VSS3GPP_intApplyVideoEffect(pC,/*1,*/ pC->yuv1, pPlaneOut); 1349 1350 if( M4NO_ERROR != err ) 1351 { 1352 M4OSA_TRACE1_1( 1353 "M4VSS3GPP_intVPP: M4VSS3GPP_intApplyVideoEffect(1) returns 0x%x,\ 1354 returning M4NO_ERROR", 1355 err); 1356 pC->ewc.VppError = err; 1357 return 1358 M4NO_ERROR; /**< Return no error to the encoder core 1359 (else it may leak in some situations...) */ 1360 } 1361 } 1362 } 1363 1364 /** 1365 * Return */ 1366 M4OSA_TRACE3_0("M4VSS3GPP_intVPP: returning M4NO_ERROR"); 1367 return M4NO_ERROR; 1368} 1369 1370/** 1371 ****************************************************************************** 1372 * M4OSA_ERR M4VSS3GPP_intApplyVideoEffect() 1373 * @brief Apply video effect from pPlaneIn to pPlaneOut 1374 * @param pC (IN/OUT) Internal edit context 1375 * @param uiClip1orClip2 (IN/OUT) 1 for first clip, 2 for second clip 1376 * @param pInputPlanes (IN) Input raw YUV420 image 1377 * @param pOutputPlanes (IN/OUT) Output raw YUV420 image 1378 * @return M4NO_ERROR: No error 1379 ****************************************************************************** 1380 */ 1381static M4OSA_ERR 1382M4VSS3GPP_intApplyVideoEffect( M4VSS3GPP_InternalEditContext *pC, 1383 M4VIFI_ImagePlane *pPlaneIn, 1384 M4VIFI_ImagePlane *pPlaneOut ) 1385{ 1386 M4OSA_ERR err; 1387 1388 M4VSS3GPP_ClipContext *pClip; 1389 M4VSS3GPP_EffectSettings *pFx; 1390 M4VFL_CurtainParam curtainParams; 1391 M4VSS3GPP_ExternalProgress extProgress; 1392 1393 M4OSA_Double VideoEffectTime; 1394 M4OSA_Double PercentageDone; 1395 M4OSA_Int32 tmp; 1396 1397 M4VIFI_ImagePlane *pPlaneTempIn; 1398 M4VIFI_ImagePlane *pPlaneTempOut; 1399 M4OSA_UInt8 i; 1400 M4OSA_UInt8 NumActiveEffects =0; 1401 1402 1403 pClip = pC->pC1; 1404 if (pC->bIssecondClip == M4OSA_TRUE) 1405 { 1406 NumActiveEffects = pC->nbActiveEffects1; 1407 } 1408 else 1409 { 1410 NumActiveEffects = pC->nbActiveEffects; 1411 } 1412 1413 /** 1414 * Allocate temporary plane if needed RC */ 1415 if (M4OSA_NULL == pC->yuv4[0].pac_data && NumActiveEffects > 1) 1416 { 1417 err = M4VSS3GPP_intAllocateYUV420(pC->yuv4, pC->ewc.uiVideoWidth, 1418 pC->ewc.uiVideoHeight); 1419 1420 if( M4NO_ERROR != err ) 1421 { 1422 M4OSA_TRACE1_1( 1423 "M4VSS3GPP_intApplyVideoEffect: M4VSS3GPP_intAllocateYUV420(4) returns 0x%x,\ 1424 returning M4NO_ERROR", 1425 err); 1426 pC->ewc.VppError = err; 1427 return 1428 M4NO_ERROR; /**< Return no error to the encoder core 1429 (else it may leak in some situations...) */ 1430 } 1431 } 1432 1433 if (NumActiveEffects % 2 == 0) 1434 { 1435 pPlaneTempIn = pPlaneIn; 1436 pPlaneTempOut = pC->yuv4; 1437 } 1438 else 1439 { 1440 pPlaneTempIn = pPlaneIn; 1441 pPlaneTempOut = pPlaneOut; 1442 } 1443 1444 for (i=0; i<NumActiveEffects; i++) 1445 { 1446 if (pC->bIssecondClip == M4OSA_TRUE) 1447 { 1448 1449 1450 pFx = &(pC->pEffectsList[pC->pActiveEffectsList1[i]]); 1451 /* Compute how far from the beginning of the effect we are, in clip-base time. */ 1452 // Decorrelate input and output encoding timestamp to handle encoder prefetch 1453 VideoEffectTime = ((M4OSA_Int32)pC->ewc.dInputVidCts) + 1454 pC->pTransitionList[pC->uiCurrentClip]. 1455 uiTransitionDuration- pFx->uiStartTime; 1456 } 1457 else 1458 { 1459 pFx = &(pC->pEffectsList[pC->pActiveEffectsList[i]]); 1460 /* Compute how far from the beginning of the effect we are, in clip-base time. */ 1461 // Decorrelate input and output encoding timestamp to handle encoder prefetch 1462 VideoEffectTime = ((M4OSA_Int32)pC->ewc.dInputVidCts) - pFx->uiStartTime; 1463 } 1464 1465 1466 1467 /* To calculate %, substract timeIncrement because effect should finish on the last frame*/ 1468 /* which is presented from CTS = eof-timeIncrement till CTS = eof */ 1469 PercentageDone = VideoEffectTime 1470 / ((M4OSA_Float)pFx->uiDuration/*- pC->dOutputFrameDuration*/); 1471 1472 if( PercentageDone < 0.0 ) 1473 PercentageDone = 0.0; 1474 1475 if( PercentageDone > 1.0 ) 1476 PercentageDone = 1.0; 1477 1478 switch( pFx->VideoEffectType ) 1479 { 1480 case M4VSS3GPP_kVideoEffectType_FadeFromBlack: 1481 /** 1482 * Compute where we are in the effect (scale is 0->1024). */ 1483 tmp = (M4OSA_Int32)(PercentageDone * 1024); 1484 1485 /** 1486 * Apply the darkening effect */ 1487 err = 1488 M4VFL_modifyLumaWithScale((M4ViComImagePlane *)pPlaneTempIn, 1489 (M4ViComImagePlane *)pPlaneTempOut, tmp, M4OSA_NULL); 1490 1491 if( M4NO_ERROR != err ) 1492 { 1493 M4OSA_TRACE1_1( 1494 "M4VSS3GPP_intApplyVideoEffect:\ 1495 M4VFL_modifyLumaWithScale returns error 0x%x,\ 1496 returning M4VSS3GPP_ERR_LUMA_FILTER_ERROR", 1497 err); 1498 return M4VSS3GPP_ERR_LUMA_FILTER_ERROR; 1499 } 1500 break; 1501 1502 case M4VSS3GPP_kVideoEffectType_CurtainOpening: 1503 /** 1504 * Compute where we are in the effect (scale is 0->height). 1505 * It is done with floats because tmp x height can be very large 1506 (with long clips).*/ 1507 curtainParams.nb_black_lines = 1508 (M4OSA_UInt16)(( 1.0 - PercentageDone) 1509 * pPlaneTempIn[0].u_height); 1510 /** 1511 * The curtain is hanged on the ceiling */ 1512 curtainParams.top_is_black = 1; 1513 1514 /** 1515 * Apply the curtain effect */ 1516 err = M4VFL_applyCurtain((M4ViComImagePlane *)pPlaneTempIn, 1517 (M4ViComImagePlane *)pPlaneTempOut, &curtainParams, 1518 M4OSA_NULL); 1519 1520 if( M4NO_ERROR != err ) 1521 { 1522 M4OSA_TRACE1_1( 1523 "M4VSS3GPP_intApplyVideoEffect: M4VFL_applyCurtain returns error 0x%x,\ 1524 returning M4VSS3GPP_ERR_CURTAIN_FILTER_ERROR", 1525 err); 1526 return M4VSS3GPP_ERR_CURTAIN_FILTER_ERROR; 1527 } 1528 break; 1529 1530 case M4VSS3GPP_kVideoEffectType_FadeToBlack: 1531 /** 1532 * Compute where we are in the effect (scale is 0->1024) */ 1533 tmp = (M4OSA_Int32)(( 1.0 - PercentageDone) * 1024); 1534 1535 /** 1536 * Apply the darkening effect */ 1537 err = 1538 M4VFL_modifyLumaWithScale((M4ViComImagePlane *)pPlaneTempIn, 1539 (M4ViComImagePlane *)pPlaneTempOut, tmp, M4OSA_NULL); 1540 1541 if( M4NO_ERROR != err ) 1542 { 1543 M4OSA_TRACE1_1( 1544 "M4VSS3GPP_intApplyVideoEffect:\ 1545 M4VFL_modifyLumaWithScale returns error 0x%x,\ 1546 returning M4VSS3GPP_ERR_LUMA_FILTER_ERROR", 1547 err); 1548 return M4VSS3GPP_ERR_LUMA_FILTER_ERROR; 1549 } 1550 break; 1551 1552 case M4VSS3GPP_kVideoEffectType_CurtainClosing: 1553 /** 1554 * Compute where we are in the effect (scale is 0->height) */ 1555 curtainParams.nb_black_lines = 1556 (M4OSA_UInt16)(PercentageDone * pPlaneTempIn[0].u_height); 1557 1558 /** 1559 * The curtain is hanged on the ceiling */ 1560 curtainParams.top_is_black = 1; 1561 1562 /** 1563 * Apply the curtain effect */ 1564 err = M4VFL_applyCurtain((M4ViComImagePlane *)pPlaneTempIn, 1565 (M4ViComImagePlane *)pPlaneTempOut, &curtainParams, 1566 M4OSA_NULL); 1567 1568 if( M4NO_ERROR != err ) 1569 { 1570 M4OSA_TRACE1_1( 1571 "M4VSS3GPP_intApplyVideoEffect: M4VFL_applyCurtain returns error 0x%x,\ 1572 returning M4VSS3GPP_ERR_CURTAIN_FILTER_ERROR", 1573 err); 1574 return M4VSS3GPP_ERR_CURTAIN_FILTER_ERROR; 1575 } 1576 break; 1577 1578 default: 1579 if( pFx->VideoEffectType 1580 >= M4VSS3GPP_kVideoEffectType_External ) 1581 { 1582 M4OSA_UInt32 Cts = 0; 1583 M4OSA_Int32 nextEffectTime; 1584 1585 /** 1586 * Compute where we are in the effect (scale is 0->1000) */ 1587 tmp = (M4OSA_Int32)(PercentageDone * 1000); 1588 1589 /** 1590 * Set the progress info provided to the external function */ 1591 extProgress.uiProgress = (M4OSA_UInt32)tmp; 1592 // Decorrelate input and output encoding timestamp to handle encoder prefetch 1593 extProgress.uiOutputTime = (M4OSA_UInt32)pC->ewc.dInputVidCts; 1594 extProgress.uiClipTime = extProgress.uiOutputTime - pClip->iVoffset; 1595 extProgress.bIsLast = M4OSA_FALSE; 1596 // Decorrelate input and output encoding timestamp to handle encoder prefetch 1597 nextEffectTime = (M4OSA_Int32)(pC->ewc.dInputVidCts \ 1598 + pC->dOutputFrameDuration); 1599 if(nextEffectTime >= (M4OSA_Int32)(pFx->uiStartTime + pFx->uiDuration)) 1600 { 1601 extProgress.bIsLast = M4OSA_TRUE; 1602 } 1603 1604 err = pFx->ExtVideoEffectFct(pFx->pExtVideoEffectFctCtxt, 1605 pPlaneTempIn, pPlaneTempOut, &extProgress, 1606 pFx->VideoEffectType 1607 - M4VSS3GPP_kVideoEffectType_External); 1608 1609 if( M4NO_ERROR != err ) 1610 { 1611 M4OSA_TRACE1_1( 1612 "M4VSS3GPP_intApplyVideoEffect: \ 1613 External video effect function returns 0x%x!", 1614 err); 1615 return err; 1616 } 1617 break; 1618 } 1619 else 1620 { 1621 M4OSA_TRACE1_1( 1622 "M4VSS3GPP_intApplyVideoEffect: unknown effect type (0x%x),\ 1623 returning M4VSS3GPP_ERR_INVALID_VIDEO_EFFECT_TYPE", 1624 pFx->VideoEffectType); 1625 return M4VSS3GPP_ERR_INVALID_VIDEO_EFFECT_TYPE; 1626 } 1627 } 1628 /** 1629 * RC Updates pTempPlaneIn and pTempPlaneOut depending on current effect */ 1630 if (((i % 2 == 0) && (NumActiveEffects % 2 == 0)) 1631 || ((i % 2 != 0) && (NumActiveEffects % 2 != 0))) 1632 { 1633 pPlaneTempIn = pC->yuv4; 1634 pPlaneTempOut = pPlaneOut; 1635 } 1636 else 1637 { 1638 pPlaneTempIn = pPlaneOut; 1639 pPlaneTempOut = pC->yuv4; 1640 } 1641 } 1642 1643 /** 1644 * Return */ 1645 M4OSA_TRACE3_0("M4VSS3GPP_intApplyVideoEffect: returning M4NO_ERROR"); 1646 return M4NO_ERROR; 1647} 1648 1649/** 1650 ****************************************************************************** 1651 * M4OSA_ERR M4VSS3GPP_intVideoTransition() 1652 * @brief Apply video transition effect pC1+pC2->pPlaneOut 1653 * @param pC (IN/OUT) Internal edit context 1654 * @param pOutputPlanes (IN/OUT) Output raw YUV420 image 1655 * @return M4NO_ERROR: No error 1656 ****************************************************************************** 1657 */ 1658static M4OSA_ERR 1659M4VSS3GPP_intVideoTransition( M4VSS3GPP_InternalEditContext *pC, 1660 M4VIFI_ImagePlane *pPlaneOut ) 1661{ 1662 M4OSA_ERR err; 1663 M4OSA_Int32 iProgress; 1664 M4VSS3GPP_ExternalProgress extProgress; 1665 M4VIFI_ImagePlane *pPlane; 1666 M4OSA_Int32 i; 1667 const M4OSA_Int32 iDur = (M4OSA_Int32)pC-> 1668 pTransitionList[pC->uiCurrentClip].uiTransitionDuration; 1669 1670 /** 1671 * Compute how far from the end cut we are, in clip-base time. 1672 * It is done with integers because the offset and begin cut have been rounded already. */ 1673 // Decorrelate input and output encoding timestamp to handle encoder prefetch 1674 iProgress = (M4OSA_Int32)((M4OSA_Double)pC->pC1->iEndTime) - pC->ewc.dInputVidCts + 1675 ((M4OSA_Double)pC->pC1->iVoffset); 1676 /** 1677 * We must remove the duration of one frame, else we would almost never reach the end 1678 * (It's kind of a "pile and intervals" issue). */ 1679 iProgress -= (M4OSA_Int32)pC->dOutputFrameDuration; 1680 1681 if( iProgress < 0 ) /**< Sanity checks */ 1682 { 1683 iProgress = 0; 1684 } 1685 1686 /** 1687 * Compute where we are in the transition, on a base 1000 */ 1688 iProgress = ( ( iDur - iProgress) * 1000) / iDur; 1689 1690 /** 1691 * Sanity checks */ 1692 if( iProgress < 0 ) 1693 { 1694 iProgress = 0; 1695 } 1696 else if( iProgress > 1000 ) 1697 { 1698 iProgress = 1000; 1699 } 1700 1701 switch( pC->pTransitionList[pC->uiCurrentClip].TransitionBehaviour ) 1702 { 1703 case M4VSS3GPP_TransitionBehaviour_SpeedUp: 1704 iProgress = ( iProgress * iProgress) / 1000; 1705 break; 1706 1707 case M4VSS3GPP_TransitionBehaviour_Linear: 1708 /*do nothing*/ 1709 break; 1710 1711 case M4VSS3GPP_TransitionBehaviour_SpeedDown: 1712 iProgress = (M4OSA_Int32)(sqrt(iProgress * 1000)); 1713 break; 1714 1715 case M4VSS3GPP_TransitionBehaviour_SlowMiddle: 1716 if( iProgress < 500 ) 1717 { 1718 iProgress = (M4OSA_Int32)(sqrt(iProgress * 500)); 1719 } 1720 else 1721 { 1722 iProgress = 1723 (M4OSA_Int32)(( ( ( iProgress - 500) * (iProgress - 500)) 1724 / 500) + 500); 1725 } 1726 break; 1727 1728 case M4VSS3GPP_TransitionBehaviour_FastMiddle: 1729 if( iProgress < 500 ) 1730 { 1731 iProgress = (M4OSA_Int32)(( iProgress * iProgress) / 500); 1732 } 1733 else 1734 { 1735 iProgress = (M4OSA_Int32)(sqrt(( iProgress - 500) * 500) + 500); 1736 } 1737 break; 1738 1739 default: 1740 /*do nothing*/ 1741 break; 1742 } 1743 1744 switch( pC->pTransitionList[pC->uiCurrentClip].VideoTransitionType ) 1745 { 1746 case M4VSS3GPP_kVideoTransitionType_CrossFade: 1747 /** 1748 * Apply the transition effect */ 1749 err = M4VIFI_ImageBlendingonYUV420(M4OSA_NULL, 1750 (M4ViComImagePlane *)pC->yuv1, 1751 (M4ViComImagePlane *)pC->yuv2, 1752 (M4ViComImagePlane *)pPlaneOut, iProgress); 1753 1754 if( M4NO_ERROR != err ) 1755 { 1756 M4OSA_TRACE1_1( 1757 "M4VSS3GPP_intVideoTransition:\ 1758 M4VIFI_ImageBlendingonYUV420 returns error 0x%x,\ 1759 returning M4VSS3GPP_ERR_TRANSITION_FILTER_ERROR", 1760 err); 1761 return M4VSS3GPP_ERR_TRANSITION_FILTER_ERROR; 1762 } 1763 break; 1764 1765 case M4VSS3GPP_kVideoTransitionType_None: 1766 /** 1767 * This is a stupid-non optimized version of the None transition... 1768 * We copy the YUV frame */ 1769 if( iProgress < 500 ) /**< first half of transition */ 1770 { 1771 pPlane = pC->yuv1; 1772 } 1773 else /**< second half of transition */ 1774 { 1775 pPlane = pC->yuv2; 1776 } 1777 /** 1778 * Copy the input YUV frames */ 1779 i = 3; 1780 1781 while( i-- > 0 ) 1782 { 1783 M4OSA_memcpy((M4OSA_MemAddr8)pPlaneOut[i].pac_data, 1784 (M4OSA_MemAddr8)pPlane[i].pac_data, 1785 pPlaneOut[i].u_stride * pPlaneOut[i].u_height); 1786 } 1787 break; 1788 1789 default: 1790 if( pC->pTransitionList[pC->uiCurrentClip].VideoTransitionType 1791 >= M4VSS3GPP_kVideoTransitionType_External ) 1792 { 1793 /** 1794 * Set the progress info provided to the external function */ 1795 extProgress.uiProgress = (M4OSA_UInt32)iProgress; 1796 // Decorrelate input and output encoding timestamp to handle encoder prefetch 1797 extProgress.uiOutputTime = (M4OSA_UInt32)pC->ewc.dInputVidCts; 1798 extProgress.uiClipTime = extProgress.uiOutputTime - pC->pC1->iVoffset; 1799 1800 err = pC->pTransitionList[pC-> 1801 uiCurrentClip].ExtVideoTransitionFct( 1802 pC->pTransitionList[pC-> 1803 uiCurrentClip].pExtVideoTransitionFctCtxt, 1804 pC->yuv1, pC->yuv2, pPlaneOut, &extProgress, 1805 pC->pTransitionList[pC-> 1806 uiCurrentClip].VideoTransitionType 1807 - M4VSS3GPP_kVideoTransitionType_External); 1808 1809 if( M4NO_ERROR != err ) 1810 { 1811 M4OSA_TRACE1_1( 1812 "M4VSS3GPP_intVideoTransition:\ 1813 External video transition function returns 0x%x!", 1814 err); 1815 return err; 1816 } 1817 break; 1818 } 1819 else 1820 { 1821 M4OSA_TRACE1_1( 1822 "M4VSS3GPP_intVideoTransition: unknown transition type (0x%x),\ 1823 returning M4VSS3GPP_ERR_INVALID_VIDEO_TRANSITION_TYPE", 1824 pC->pTransitionList[pC->uiCurrentClip].VideoTransitionType); 1825 return M4VSS3GPP_ERR_INVALID_VIDEO_TRANSITION_TYPE; 1826 } 1827 } 1828 1829 /** 1830 * Return */ 1831 M4OSA_TRACE3_0("M4VSS3GPP_intVideoTransition: returning M4NO_ERROR"); 1832 return M4NO_ERROR; 1833} 1834 1835/** 1836 ****************************************************************************** 1837 * M4OSA_Void M4VSS3GPP_intUpdateTimeInfo() 1838 * @brief Update bit stream time info by Counter Time System to be compliant with 1839 * players using bit stream time info 1840 * @note H263 uses an absolute time counter unlike MPEG4 which uses Group Of Vops 1841 * (GOV, see the standard) 1842 * @param pC (IN/OUT) returns time updated video AU, 1843 * the offset between system and video time (MPEG4 only) 1844 * and the state of the current clip (MPEG4 only) 1845 * @return nothing 1846 ****************************************************************************** 1847 */ 1848static M4OSA_Void 1849M4VSS3GPP_intUpdateTimeInfo( M4VSS3GPP_InternalEditContext *pC, 1850 M4SYS_AccessUnit *pAU ) 1851{ 1852 M4OSA_UInt8 uiTmp; 1853 M4OSA_UInt32 uiCts = 0; 1854 M4OSA_MemAddr8 pTmp; 1855 M4OSA_UInt32 uiAdd; 1856 M4OSA_UInt32 uiCurrGov; 1857 M4OSA_Int8 iDiff; 1858 1859 M4VSS3GPP_ClipContext *pClipCtxt = pC->pC1; 1860 M4OSA_Int32 *pOffset = &(pC->ewc.iMpeg4GovOffset); 1861 1862 /** 1863 * Set H263 time counter from system time */ 1864 if( M4SYS_kH263 == pAU->stream->streamType ) 1865 { 1866 uiTmp = (M4OSA_UInt8)((M4OSA_UInt32)( ( pAU->CTS * 30) / 1001 + 0.5) 1867 % M4VSS3GPP_EDIT_H263_MODULO_TIME); 1868 M4VSS3GPP_intSetH263TimeCounter((M4OSA_MemAddr8)(pAU->dataAddress), 1869 uiTmp); 1870 } 1871 /* 1872 * Set MPEG4 GOV time counter regarding video and system time */ 1873 else if( M4SYS_kMPEG_4 == pAU->stream->streamType ) 1874 { 1875 /* 1876 * If GOV. 1877 * beware of little/big endian! */ 1878 /* correction: read 8 bits block instead of one 32 bits block */ 1879 M4OSA_UInt8 *temp8 = (M4OSA_UInt8 *)(pAU->dataAddress); 1880 M4OSA_UInt32 temp32 = 0; 1881 1882 temp32 = ( 0x000000ff & (M4OSA_UInt32)(*temp8)) 1883 + (0x0000ff00 & ((M4OSA_UInt32)(*(temp8 + 1))) << 8) 1884 + (0x00ff0000 & ((M4OSA_UInt32)(*(temp8 + 2))) << 16) 1885 + (0xff000000 & ((M4OSA_UInt32)(*(temp8 + 3))) << 24); 1886 1887 M4OSA_TRACE3_2("RC: Temp32: 0x%x, dataAddress: 0x%x\n", temp32, 1888 *(pAU->dataAddress)); 1889 1890 if( M4VSS3GPP_EDIT_GOV_HEADER == temp32 ) 1891 { 1892 pTmp = 1893 (M4OSA_MemAddr8)(pAU->dataAddress 1894 + 1); /**< Jump to the time code (just after the 32 bits header) */ 1895 uiAdd = (M4OSA_UInt32)(pAU->CTS)+( *pOffset); 1896 1897 switch( pClipCtxt->bMpeg4GovState ) 1898 { 1899 case M4OSA_FALSE: /*< INIT */ 1900 { 1901 /* video time = ceil (system time + offset) */ 1902 uiCts = ( uiAdd + 999) / 1000; 1903 1904 /* offset update */ 1905 ( *pOffset) += (( uiCts * 1000) - uiAdd); 1906 1907 /* Save values */ 1908 pClipCtxt->uiMpeg4PrevGovValueSet = uiCts; 1909 1910 /* State to 'first' */ 1911 pClipCtxt->bMpeg4GovState = M4OSA_TRUE; 1912 } 1913 break; 1914 1915 case M4OSA_TRUE: /*< UPDATE */ 1916 { 1917 /* Get current Gov value */ 1918 M4VSS3GPP_intGetMPEG4Gov(pTmp, &uiCurrGov); 1919 1920 /* video time = floor or ceil (system time + offset) */ 1921 uiCts = (uiAdd / 1000); 1922 iDiff = (M4OSA_Int8)(uiCurrGov 1923 - pClipCtxt->uiMpeg4PrevGovValueGet - uiCts 1924 + pClipCtxt->uiMpeg4PrevGovValueSet); 1925 1926 /* ceiling */ 1927 if( iDiff > 0 ) 1928 { 1929 uiCts += (M4OSA_UInt32)(iDiff); 1930 1931 /* offset update */ 1932 ( *pOffset) += (( uiCts * 1000) - uiAdd); 1933 } 1934 1935 /* Save values */ 1936 pClipCtxt->uiMpeg4PrevGovValueGet = uiCurrGov; 1937 pClipCtxt->uiMpeg4PrevGovValueSet = uiCts; 1938 } 1939 break; 1940 } 1941 1942 M4VSS3GPP_intSetMPEG4Gov(pTmp, uiCts); 1943 } 1944 } 1945 return; 1946} 1947 1948/** 1949 ****************************************************************************** 1950 * M4OSA_Void M4VSS3GPP_intCheckVideoEffects() 1951 * @brief Check which video effect must be applied at the current time 1952 ****************************************************************************** 1953 */ 1954static M4OSA_Void 1955M4VSS3GPP_intCheckVideoEffects( M4VSS3GPP_InternalEditContext *pC, 1956 M4OSA_UInt8 uiClipNumber ) 1957{ 1958 M4OSA_UInt8 uiClipIndex; 1959 M4OSA_UInt8 uiFxIndex, i; 1960 M4VSS3GPP_ClipContext *pClip; 1961 M4VSS3GPP_EffectSettings *pFx; 1962 M4OSA_Int32 Off, BC, EC; 1963 // Decorrelate input and output encoding timestamp to handle encoder prefetch 1964 M4OSA_Int32 t = (M4OSA_Int32)pC->ewc.dInputVidCts; 1965 1966 uiClipIndex = pC->uiCurrentClip; 1967 pClip = pC->pC1; 1968 /** 1969 * Shortcuts for code readability */ 1970 Off = pClip->iVoffset; 1971 BC = pClip->iActualVideoBeginCut; 1972 EC = pClip->iEndTime; 1973 1974 i = 0; 1975 1976 for ( uiFxIndex = 0; uiFxIndex < pC->nbEffects; uiFxIndex++ ) 1977 { 1978 /** Shortcut, reverse order because of priority between effects(EndEffect always clean )*/ 1979 pFx = &(pC->pEffectsList[pC->nbEffects - 1 - uiFxIndex]); 1980 1981 if( M4VSS3GPP_kVideoEffectType_None != pFx->VideoEffectType ) 1982 { 1983 /** 1984 * Check if there is actually a video effect */ 1985 1986 if(uiClipNumber ==1) 1987 { 1988 /**< Are we after the start time of the effect? 1989 * or Are we into the effect duration? 1990 */ 1991 if ( (t >= (M4OSA_Int32)(pFx->uiStartTime)) && 1992 (t <= (M4OSA_Int32)(pFx->uiStartTime + pFx->uiDuration)) ) { 1993 /** 1994 * Set the active effect(s) */ 1995 pC->pActiveEffectsList[i] = pC->nbEffects-1-uiFxIndex; 1996 1997 /** 1998 * Update counter of active effects */ 1999 i++; 2000 2001 /** 2002 * For all external effects set this flag to true. */ 2003 if(pFx->VideoEffectType > M4VSS3GPP_kVideoEffectType_External) 2004 { 2005 pC->m_bClipExternalHasStarted = M4OSA_TRUE; 2006 } 2007 2008 /** 2009 * The third effect has the highest priority, then the 2010 * second one, then the first one. Hence, as soon as we 2011 * found an active effect, we can get out of this loop. 2012 */ 2013 } 2014 } 2015 else 2016 { 2017 /**< Are we into the effect duration? */ 2018 if ( ((M4OSA_Int32)(t + pC->pTransitionList[uiClipIndex].uiTransitionDuration) 2019 >= (M4OSA_Int32)(pFx->uiStartTime)) 2020 && ( (M4OSA_Int32)(t + pC->pTransitionList[uiClipIndex].uiTransitionDuration) 2021 <= (M4OSA_Int32)(pFx->uiStartTime + pFx->uiDuration)) ) { 2022 /** 2023 * Set the active effect(s) */ 2024 pC->pActiveEffectsList1[i] = pC->nbEffects-1-uiFxIndex; 2025 2026 /** 2027 * Update counter of active effects */ 2028 i++; 2029 2030 /** 2031 * For all external effects set this flag to true. */ 2032 if(pFx->VideoEffectType > M4VSS3GPP_kVideoEffectType_External) 2033 { 2034 pC->m_bClipExternalHasStarted = M4OSA_TRUE; 2035 } 2036 2037 /** 2038 * The third effect has the highest priority, then the second one, then the first one. 2039 * Hence, as soon as we found an active effect, we can get out of this loop */ 2040 } 2041 } 2042 } 2043 } 2044 2045 if(1==uiClipNumber) 2046 { 2047 /** 2048 * Save number of active effects */ 2049 pC->nbActiveEffects = i; 2050 } 2051 else 2052 { 2053 pC->nbActiveEffects1 = i; 2054 } 2055 2056 /** 2057 * Change the absolut time to clip related time */ 2058 t -= Off; 2059 2060 /** 2061 * Check if we are on the begin cut (for clip1 only) */ 2062 if( ( 0 != BC) && (t == BC) && (1 == uiClipNumber) ) 2063 { 2064 pC->bClip1AtBeginCut = M4OSA_TRUE; 2065 } 2066 else 2067 { 2068 pC->bClip1AtBeginCut = M4OSA_FALSE; 2069 } 2070 2071 return; 2072} 2073 2074/** 2075 ****************************************************************************** 2076 * M4OSA_ERR M4VSS3GPP_intCreateVideoEncoder() 2077 * @brief Creates the video encoder 2078 * @note 2079 ****************************************************************************** 2080 */ 2081M4OSA_ERR M4VSS3GPP_intCreateVideoEncoder( M4VSS3GPP_InternalEditContext *pC ) 2082{ 2083 M4OSA_ERR err; 2084 M4ENCODER_AdvancedParams EncParams; 2085 2086 /** 2087 * Simulate a writer interface with our specific function */ 2088 pC->ewc.OurWriterDataInterface.pProcessAU = 2089 M4VSS3GPP_intProcessAU; /**< This function is VSS 3GPP specific, 2090 but it follow the writer interface */ 2091 pC->ewc.OurWriterDataInterface.pStartAU = 2092 M4VSS3GPP_intStartAU; /**< This function is VSS 3GPP specific, 2093 but it follow the writer interface */ 2094 pC->ewc.OurWriterDataInterface.pWriterContext = 2095 (M4WRITER_Context) 2096 pC; /**< We give the internal context as writer context */ 2097 2098 /** 2099 * Get the encoder interface, if not already done */ 2100 if( M4OSA_NULL == pC->ShellAPI.pVideoEncoderGlobalFcts ) 2101 { 2102 err = M4VSS3GPP_setCurrentVideoEncoder(&pC->ShellAPI, 2103 pC->ewc.VideoStreamType); 2104 M4OSA_TRACE1_1( 2105 "M4VSS3GPP_intCreateVideoEncoder: setCurrentEncoder returns 0x%x", 2106 err); 2107 M4ERR_CHECK_RETURN(err); 2108 } 2109 2110 /** 2111 * Set encoder shell parameters according to VSS settings */ 2112 2113 /* Common parameters */ 2114 EncParams.InputFormat = M4ENCODER_kIYUV420; 2115 EncParams.FrameWidth = pC->ewc.uiVideoWidth; 2116 EncParams.FrameHeight = pC->ewc.uiVideoHeight; 2117 EncParams.uiTimeScale = pC->ewc.uiVideoTimeScale; 2118 2119 if( pC->bIsMMS == M4OSA_FALSE ) 2120 { 2121 /* No strict regulation in video editor */ 2122 /* Because of the effects and transitions we should allow more flexibility */ 2123 /* Also it prevents to drop important frames (with a bad result on sheduling and 2124 block effetcs) */ 2125 EncParams.bInternalRegulation = M4OSA_FALSE; 2126 // Variable framerate is not supported by StageFright encoders 2127 EncParams.FrameRate = M4ENCODER_k30_FPS; 2128 } 2129 else 2130 { 2131 /* In case of MMS mode, we need to enable bitrate regulation to be sure */ 2132 /* to reach the targeted output file size */ 2133 EncParams.bInternalRegulation = M4OSA_TRUE; 2134 EncParams.FrameRate = pC->MMSvideoFramerate; 2135 } 2136 2137 /** 2138 * Other encoder settings (defaults) */ 2139 EncParams.uiHorizontalSearchRange = 0; /* use default */ 2140 EncParams.uiVerticalSearchRange = 0; /* use default */ 2141 EncParams.bErrorResilience = M4OSA_FALSE; /* no error resilience */ 2142 EncParams.uiIVopPeriod = 0; /* use default */ 2143 EncParams.uiMotionEstimationTools = 0; /* M4V_MOTION_EST_TOOLS_ALL */ 2144 EncParams.bAcPrediction = M4OSA_TRUE; /* use AC prediction */ 2145 EncParams.uiStartingQuantizerValue = 10; /* initial QP = 10 */ 2146 EncParams.bDataPartitioning = M4OSA_FALSE; /* no data partitioning */ 2147 2148 switch ( pC->ewc.VideoStreamType ) 2149 { 2150 case M4SYS_kH263: 2151 2152 EncParams.Format = M4ENCODER_kH263; 2153 2154 EncParams.uiStartingQuantizerValue = 10; 2155 EncParams.uiRateFactor = 1; /* default */ 2156 2157 EncParams.bErrorResilience = M4OSA_FALSE; 2158 EncParams.bDataPartitioning = M4OSA_FALSE; 2159 break; 2160 2161 case M4SYS_kMPEG_4: 2162 2163 EncParams.Format = M4ENCODER_kMPEG4; 2164 2165 EncParams.uiStartingQuantizerValue = 8; 2166 EncParams.uiRateFactor = (M4OSA_UInt8)(( pC->dOutputFrameDuration 2167 * pC->ewc.uiVideoTimeScale) / 1000.0 + 0.5); 2168 2169 if( EncParams.uiRateFactor == 0 ) 2170 EncParams.uiRateFactor = 1; /* default */ 2171 2172 if( M4OSA_FALSE == pC->ewc.bVideoDataPartitioning ) 2173 { 2174 EncParams.bErrorResilience = M4OSA_FALSE; 2175 EncParams.bDataPartitioning = M4OSA_FALSE; 2176 } 2177 else 2178 { 2179 EncParams.bErrorResilience = M4OSA_TRUE; 2180 EncParams.bDataPartitioning = M4OSA_TRUE; 2181 } 2182 break; 2183 2184 case M4SYS_kH264: 2185 M4OSA_TRACE1_0("M4VSS3GPP_intCreateVideoEncoder: M4SYS_H264"); 2186 2187 EncParams.Format = M4ENCODER_kH264; 2188 2189 EncParams.uiStartingQuantizerValue = 10; 2190 EncParams.uiRateFactor = 1; /* default */ 2191 2192 EncParams.bErrorResilience = M4OSA_FALSE; 2193 EncParams.bDataPartitioning = M4OSA_FALSE; 2194 //EncParams.FrameRate = M4VIDEOEDITING_k5_FPS; 2195 break; 2196 2197 default: 2198 M4OSA_TRACE1_1( 2199 "M4VSS3GPP_intCreateVideoEncoder: Unknown videoStreamType 0x%x", 2200 pC->ewc.VideoStreamType); 2201 return M4VSS3GPP_ERR_EDITING_UNSUPPORTED_VIDEO_FORMAT; 2202 } 2203 2204 /* In case of EMP we overwrite certain parameters */ 2205 if( M4OSA_TRUE == pC->ewc.bActivateEmp ) 2206 { 2207 EncParams.uiHorizontalSearchRange = 15; /* set value */ 2208 EncParams.uiVerticalSearchRange = 15; /* set value */ 2209 EncParams.bErrorResilience = M4OSA_FALSE; /* no error resilience */ 2210 EncParams.uiIVopPeriod = 15; /* one I frame every 15 frames */ 2211 EncParams.uiMotionEstimationTools = 1; /* M4V_MOTION_EST_TOOLS_NO_4MV */ 2212 EncParams.bAcPrediction = M4OSA_FALSE; /* no AC prediction */ 2213 EncParams.uiStartingQuantizerValue = 10; /* initial QP = 10 */ 2214 EncParams.bDataPartitioning = M4OSA_FALSE; /* no data partitioning */ 2215 } 2216 2217 if( pC->bIsMMS == M4OSA_FALSE ) 2218 { 2219 /* Compute max bitrate depending on input files bitrates and transitions */ 2220 if( pC->Vstate == M4VSS3GPP_kEditVideoState_TRANSITION ) 2221 { 2222#if 0 2223 /* Max of the two blended files */ 2224 if( pC->pC1->pSettings->ClipProperties.uiVideoBitrate 2225 > pC->pC2->pSettings->ClipProperties.uiVideoBitrate ) 2226 EncParams.Bitrate = 2227 pC->pC1->pSettings->ClipProperties.uiVideoBitrate; 2228 else 2229 EncParams.Bitrate = 2230 pC->pC2->pSettings->ClipProperties.uiVideoBitrate; 2231#endif 2232 EncParams.Bitrate = pC->ewc.uiVideoBitrate; 2233 } 2234 else 2235 { 2236 EncParams.Bitrate = pC->ewc.uiVideoBitrate; 2237 } 2238 } 2239 else 2240 { 2241 EncParams.Bitrate = pC->uiMMSVideoBitrate; /* RC */ 2242 EncParams.uiTimeScale = 0; /* We let the encoder choose the timescale */ 2243 } 2244 2245 M4OSA_TRACE1_0("M4VSS3GPP_intCreateVideoEncoder: calling encoder pFctInit"); 2246 /** 2247 * Init the video encoder (advanced settings version of the encoder Open function) */ 2248 err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctInit(&pC->ewc.pEncContext, 2249 &pC->ewc.OurWriterDataInterface, M4VSS3GPP_intVPP, pC, 2250 pC->ShellAPI.pCurrentVideoEncoderExternalAPI, 2251 pC->ShellAPI.pCurrentVideoEncoderUserData); 2252 2253 if( M4NO_ERROR != err ) 2254 { 2255 M4OSA_TRACE1_1( 2256 "M4VSS3GPP_intCreateVideoEncoder: pVideoEncoderGlobalFcts->pFctInit returns 0x%x", 2257 err); 2258 return err; 2259 } 2260 2261 pC->ewc.encoderState = M4VSS3GPP_kEncoderClosed; 2262 M4OSA_TRACE1_0("M4VSS3GPP_intCreateVideoEncoder: calling encoder pFctOpen"); 2263 2264 err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctOpen(pC->ewc.pEncContext, 2265 &pC->ewc.WriterVideoAU, &EncParams); 2266 2267 if( M4NO_ERROR != err ) 2268 { 2269 M4OSA_TRACE1_1( 2270 "M4VSS3GPP_intCreateVideoEncoder: pVideoEncoderGlobalFcts->pFctOpen returns 0x%x", 2271 err); 2272 return err; 2273 } 2274 2275 pC->ewc.encoderState = M4VSS3GPP_kEncoderStopped; 2276 M4OSA_TRACE1_0( 2277 "M4VSS3GPP_intCreateVideoEncoder: calling encoder pFctStart"); 2278 2279 if( M4OSA_NULL != pC->ShellAPI.pVideoEncoderGlobalFcts->pFctStart ) 2280 { 2281 err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctStart( 2282 pC->ewc.pEncContext); 2283 2284 if( M4NO_ERROR != err ) 2285 { 2286 M4OSA_TRACE1_1( 2287 "M4VSS3GPP_intCreateVideoEncoder: pVideoEncoderGlobalFcts->pFctStart returns 0x%x", 2288 err); 2289 return err; 2290 } 2291 } 2292 2293 pC->ewc.encoderState = M4VSS3GPP_kEncoderRunning; 2294 2295 /** 2296 * Return */ 2297 M4OSA_TRACE3_0("M4VSS3GPP_intCreateVideoEncoder: returning M4NO_ERROR"); 2298 return M4NO_ERROR; 2299} 2300 2301/** 2302 ****************************************************************************** 2303 * M4OSA_ERR M4VSS3GPP_intDestroyVideoEncoder() 2304 * @brief Destroy the video encoder 2305 * @note 2306 ****************************************************************************** 2307 */ 2308M4OSA_ERR M4VSS3GPP_intDestroyVideoEncoder( M4VSS3GPP_InternalEditContext *pC ) 2309{ 2310 M4OSA_ERR err = M4NO_ERROR; 2311 2312 if( M4OSA_NULL != pC->ewc.pEncContext ) 2313 { 2314 if( M4VSS3GPP_kEncoderRunning == pC->ewc.encoderState ) 2315 { 2316 if( pC->ShellAPI.pVideoEncoderGlobalFcts->pFctStop != M4OSA_NULL ) 2317 { 2318 err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctStop( 2319 pC->ewc.pEncContext); 2320 2321 if( M4NO_ERROR != err ) 2322 { 2323 M4OSA_TRACE1_1( 2324 "M4VSS3GPP_intDestroyVideoEncoder:\ 2325 pVideoEncoderGlobalFcts->pFctStop returns 0x%x", 2326 err); 2327 /* Well... how the heck do you handle a failed cleanup? */ 2328 } 2329 } 2330 2331 pC->ewc.encoderState = M4VSS3GPP_kEncoderStopped; 2332 } 2333 2334 /* Has the encoder actually been opened? Don't close it if that's not the case. */ 2335 if( M4VSS3GPP_kEncoderStopped == pC->ewc.encoderState ) 2336 { 2337 err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctClose( 2338 pC->ewc.pEncContext); 2339 2340 if( M4NO_ERROR != err ) 2341 { 2342 M4OSA_TRACE1_1( 2343 "M4VSS3GPP_intDestroyVideoEncoder:\ 2344 pVideoEncoderGlobalFcts->pFctClose returns 0x%x", 2345 err); 2346 /* Well... how the heck do you handle a failed cleanup? */ 2347 } 2348 2349 pC->ewc.encoderState = M4VSS3GPP_kEncoderClosed; 2350 } 2351 2352 err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctCleanup( 2353 pC->ewc.pEncContext); 2354 2355 if( M4NO_ERROR != err ) 2356 { 2357 M4OSA_TRACE1_1( 2358 "M4VSS3GPP_intDestroyVideoEncoder:\ 2359 pVideoEncoderGlobalFcts->pFctCleanup returns 0x%x!", 2360 err); 2361 /**< We do not return the error here because we still have stuff to free */ 2362 } 2363 2364 pC->ewc.encoderState = M4VSS3GPP_kNoEncoder; 2365 /** 2366 * Reset variable */ 2367 pC->ewc.pEncContext = M4OSA_NULL; 2368 } 2369 2370 M4OSA_TRACE3_1("M4VSS3GPP_intDestroyVideoEncoder: returning 0x%x", err); 2371 return err; 2372} 2373 2374/** 2375 ****************************************************************************** 2376 * M4OSA_Void M4VSS3GPP_intSetH263TimeCounter() 2377 * @brief Modify the time counter of the given H263 video AU 2378 * @note 2379 * @param pAuDataBuffer (IN/OUT) H263 Video AU to modify 2380 * @param uiCts (IN) New time counter value 2381 * @return nothing 2382 ****************************************************************************** 2383 */ 2384static M4OSA_Void M4VSS3GPP_intSetH263TimeCounter( M4OSA_MemAddr8 pAuDataBuffer, 2385 M4OSA_UInt8 uiCts ) 2386{ 2387 /* 2388 * The H263 time counter is 8 bits located on the "x" below: 2389 * 2390 * |--------|--------|--------|--------| 2391 * ???????? ???????? ??????xx xxxxxx?? 2392 */ 2393 2394 /** 2395 * Write the 2 bits on the third byte */ 2396 pAuDataBuffer[2] = ( pAuDataBuffer[2] & 0xFC) | (( uiCts >> 6) & 0x3); 2397 2398 /** 2399 * Write the 6 bits on the fourth byte */ 2400 pAuDataBuffer[3] = ( ( uiCts << 2) & 0xFC) | (pAuDataBuffer[3] & 0x3); 2401 2402 return; 2403} 2404 2405/** 2406 ****************************************************************************** 2407 * M4OSA_Void M4VSS3GPP_intSetMPEG4Gov() 2408 * @brief Modify the time info from Group Of VOP video AU 2409 * @note 2410 * @param pAuDataBuffer (IN) MPEG4 Video AU to modify 2411 * @param uiCtsSec (IN) New GOV time info in second unit 2412 * @return nothing 2413 ****************************************************************************** 2414 */ 2415static M4OSA_Void M4VSS3GPP_intSetMPEG4Gov( M4OSA_MemAddr8 pAuDataBuffer, 2416 M4OSA_UInt32 uiCtsSec ) 2417{ 2418 /* 2419 * The MPEG-4 time code length is 18 bits: 2420 * 2421 * hh mm marker ss 2422 * xxxxx|xxx xxx 1 xxxx xx ?????? 2423 * |----- ---|--- - ----|-- ------| 2424 */ 2425 M4OSA_UInt8 uiHh; 2426 M4OSA_UInt8 uiMm; 2427 M4OSA_UInt8 uiSs; 2428 M4OSA_UInt8 uiTmp; 2429 2430 /** 2431 * Write the 2 last bits ss */ 2432 uiSs = (M4OSA_UInt8)(uiCtsSec % 60); /**< modulo part */ 2433 pAuDataBuffer[2] = (( ( uiSs & 0x03) << 6) | (pAuDataBuffer[2] & 0x3F)); 2434 2435 if( uiCtsSec < 60 ) 2436 { 2437 /** 2438 * Write the 3 last bits of mm, the marker bit (0x10 */ 2439 pAuDataBuffer[1] = (( 0x10) | (uiSs >> 2)); 2440 2441 /** 2442 * Write the 5 bits of hh and 3 of mm (out of 6) */ 2443 pAuDataBuffer[0] = 0; 2444 } 2445 else 2446 { 2447 /** 2448 * Write the 3 last bits of mm, the marker bit (0x10 */ 2449 uiTmp = (M4OSA_UInt8)(uiCtsSec / 60); /**< integer part */ 2450 uiMm = (M4OSA_UInt8)(uiTmp % 60); 2451 pAuDataBuffer[1] = (( uiMm << 5) | (0x10) | (uiSs >> 2)); 2452 2453 if( uiTmp < 60 ) 2454 { 2455 /** 2456 * Write the 5 bits of hh and 3 of mm (out of 6) */ 2457 pAuDataBuffer[0] = ((uiMm >> 3)); 2458 } 2459 else 2460 { 2461 /** 2462 * Write the 5 bits of hh and 3 of mm (out of 6) */ 2463 uiHh = (M4OSA_UInt8)(uiTmp / 60); 2464 pAuDataBuffer[0] = (( uiHh << 3) | (uiMm >> 3)); 2465 } 2466 } 2467 return; 2468} 2469 2470/** 2471 ****************************************************************************** 2472 * M4OSA_Void M4VSS3GPP_intGetMPEG4Gov() 2473 * @brief Get the time info from Group Of VOP video AU 2474 * @note 2475 * @param pAuDataBuffer (IN) MPEG4 Video AU to modify 2476 * @param pCtsSec (OUT) Current GOV time info in second unit 2477 * @return nothing 2478 ****************************************************************************** 2479 */ 2480static M4OSA_Void M4VSS3GPP_intGetMPEG4Gov( M4OSA_MemAddr8 pAuDataBuffer, 2481 M4OSA_UInt32 *pCtsSec ) 2482{ 2483 /* 2484 * The MPEG-4 time code length is 18 bits: 2485 * 2486 * hh mm marker ss 2487 * xxxxx|xxx xxx 1 xxxx xx ?????? 2488 * |----- ---|--- - ----|-- ------| 2489 */ 2490 M4OSA_UInt8 uiHh; 2491 M4OSA_UInt8 uiMm; 2492 M4OSA_UInt8 uiSs; 2493 M4OSA_UInt8 uiTmp; 2494 M4OSA_UInt32 uiCtsSec; 2495 2496 /** 2497 * Read ss */ 2498 uiSs = (( pAuDataBuffer[2] & 0xC0) >> 6); 2499 uiTmp = (( pAuDataBuffer[1] & 0x0F) << 2); 2500 uiCtsSec = uiSs + uiTmp; 2501 2502 /** 2503 * Read mm */ 2504 uiMm = (( pAuDataBuffer[1] & 0xE0) >> 5); 2505 uiTmp = (( pAuDataBuffer[0] & 0x07) << 3); 2506 uiMm = uiMm + uiTmp; 2507 uiCtsSec = ( uiMm * 60) + uiCtsSec; 2508 2509 /** 2510 * Read hh */ 2511 uiHh = (( pAuDataBuffer[0] & 0xF8) >> 3); 2512 2513 if( uiHh ) 2514 { 2515 uiCtsSec = ( uiHh * 3600) + uiCtsSec; 2516 } 2517 2518 /* 2519 * in sec */ 2520 *pCtsSec = uiCtsSec; 2521 2522 return; 2523} 2524 2525/** 2526 ****************************************************************************** 2527 * M4OSA_ERR M4VSS3GPP_intAllocateYUV420() 2528 * @brief Allocate the three YUV 4:2:0 planes 2529 * @note 2530 * @param pPlanes (IN/OUT) valid pointer to 3 M4VIFI_ImagePlane structures 2531 * @param uiWidth (IN) Image width 2532 * @param uiHeight(IN) Image height 2533 ****************************************************************************** 2534 */ 2535static M4OSA_ERR M4VSS3GPP_intAllocateYUV420( M4VIFI_ImagePlane *pPlanes, 2536 M4OSA_UInt32 uiWidth, M4OSA_UInt32 uiHeight ) 2537{ 2538 2539 pPlanes[0].u_width = uiWidth; 2540 pPlanes[0].u_height = uiHeight; 2541 pPlanes[0].u_stride = uiWidth; 2542 pPlanes[0].u_topleft = 0; 2543 pPlanes[0].pac_data = (M4VIFI_UInt8 *)M4OSA_malloc(pPlanes[0].u_stride 2544 * pPlanes[0].u_height, M4VSS3GPP, (M4OSA_Char *)"pPlanes[0].pac_data"); 2545 2546 if( M4OSA_NULL == pPlanes[0].pac_data ) 2547 { 2548 M4OSA_TRACE1_0( 2549 "M4VSS3GPP_intAllocateYUV420: unable to allocate pPlanes[0].pac_data,\ 2550 returning M4ERR_ALLOC"); 2551 return M4ERR_ALLOC; 2552 } 2553 2554 pPlanes[1].u_width = pPlanes[0].u_width >> 1; 2555 pPlanes[1].u_height = pPlanes[0].u_height >> 1; 2556 pPlanes[1].u_stride = pPlanes[1].u_width; 2557 pPlanes[1].u_topleft = 0; 2558 pPlanes[1].pac_data = (M4VIFI_UInt8 *)M4OSA_malloc(pPlanes[1].u_stride 2559 * pPlanes[1].u_height, M4VSS3GPP,(M4OSA_Char *) "pPlanes[1].pac_data"); 2560 2561 if( M4OSA_NULL == pPlanes[1].pac_data ) 2562 { 2563 M4OSA_TRACE1_0( 2564 "M4VSS3GPP_intAllocateYUV420: unable to allocate pPlanes[1].pac_data,\ 2565 returning M4ERR_ALLOC"); 2566 return M4ERR_ALLOC; 2567 } 2568 2569 pPlanes[2].u_width = pPlanes[1].u_width; 2570 pPlanes[2].u_height = pPlanes[1].u_height; 2571 pPlanes[2].u_stride = pPlanes[2].u_width; 2572 pPlanes[2].u_topleft = 0; 2573 pPlanes[2].pac_data = (M4VIFI_UInt8 *)M4OSA_malloc(pPlanes[2].u_stride 2574 * pPlanes[2].u_height, M4VSS3GPP, (M4OSA_Char *)"pPlanes[2].pac_data"); 2575 2576 if( M4OSA_NULL == pPlanes[2].pac_data ) 2577 { 2578 M4OSA_TRACE1_0( 2579 "M4VSS3GPP_intAllocateYUV420: unable to allocate pPlanes[2].pac_data,\ 2580 returning M4ERR_ALLOC"); 2581 return M4ERR_ALLOC; 2582 } 2583 2584 /** 2585 * Return */ 2586 M4OSA_TRACE3_0("M4VSS3GPP_intAllocateYUV420: returning M4NO_ERROR"); 2587 return M4NO_ERROR; 2588} 2589