M4VSS3GPP_EditVideo.c revision 276adbc8cab51c5cd60906fdbff9c7d5345ad0a6
1/* 2 * Copyright (C) 2011 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16/** 17 ****************************************************************************** 18 * @file M4VSS3GPP_EditVideo.c 19 * @brief Video Studio Service 3GPP edit API implementation. 20 * @note 21 ****************************************************************************** 22 */ 23 24/****************/ 25/*** Includes ***/ 26/****************/ 27 28#include "NXPSW_CompilerSwitches.h" 29/** 30 * Our header */ 31#include "M4VSS3GPP_API.h" 32#include "M4VSS3GPP_InternalTypes.h" 33#include "M4VSS3GPP_InternalFunctions.h" 34#include "M4VSS3GPP_InternalConfig.h" 35#include "M4VSS3GPP_ErrorCodes.h" 36 37// StageFright encoders require %16 resolution 38#include "M4ENCODER_common.h" 39/** 40 * OSAL headers */ 41#include "M4OSA_Memory.h" /**< OSAL memory management */ 42#include "M4OSA_Debug.h" /**< OSAL debug management */ 43 44/** 45 * component includes */ 46#include "M4VFL_transition.h" /**< video effects */ 47 48/*for transition behaviour*/ 49#include <math.h> 50#include "M4AIR_API.h" 51#include "M4VSS3GPP_Extended_API.h" 52/** Determine absolute value of a. */ 53#define M4xVSS_ABS(a) ( ( (a) < (0) ) ? (-(a)) : (a) ) 54#define Y_PLANE_BORDER_VALUE 0x00 55#define U_PLANE_BORDER_VALUE 0x80 56#define V_PLANE_BORDER_VALUE 0x80 57 58/************************************************************************/ 59/* Static local functions */ 60/************************************************************************/ 61 62static M4OSA_ERR M4VSS3GPP_intCheckVideoMode( 63 M4VSS3GPP_InternalEditContext *pC ); 64static M4OSA_Void 65M4VSS3GPP_intCheckVideoEffects( M4VSS3GPP_InternalEditContext *pC, 66 M4OSA_UInt8 uiClipNumber ); 67static M4OSA_ERR 68M4VSS3GPP_intApplyVideoEffect( M4VSS3GPP_InternalEditContext *pC,/*M4OSA_UInt8 uiClip1orClip2,*/ 69 M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut ); 70static M4OSA_ERR 71M4VSS3GPP_intVideoTransition( M4VSS3GPP_InternalEditContext *pC, 72 M4VIFI_ImagePlane *pPlaneOut ); 73 74static M4OSA_Void 75M4VSS3GPP_intUpdateTimeInfo( M4VSS3GPP_InternalEditContext *pC, 76 M4SYS_AccessUnit *pAU ); 77static M4OSA_Void M4VSS3GPP_intSetH263TimeCounter( M4OSA_MemAddr8 pAuDataBuffer, 78 M4OSA_UInt8 uiCts ); 79static M4OSA_Void M4VSS3GPP_intSetMPEG4Gov( M4OSA_MemAddr8 pAuDataBuffer, 80 M4OSA_UInt32 uiCtsSec ); 81static M4OSA_Void M4VSS3GPP_intGetMPEG4Gov( M4OSA_MemAddr8 pAuDataBuffer, 82 M4OSA_UInt32 *pCtsSec ); 83static M4OSA_ERR M4VSS3GPP_intAllocateYUV420( M4VIFI_ImagePlane *pPlanes, 84 M4OSA_UInt32 uiWidth, M4OSA_UInt32 uiHeight ); 85static M4OSA_ERR M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420( 86 M4OSA_Void* pFileIn, M4OSA_FileReadPointer* pFileReadPtr, 87 M4VIFI_ImagePlane* pImagePlanes, 88 M4OSA_UInt32 width,M4OSA_UInt32 height); 89static M4OSA_ERR M4VSS3GPP_intApplyRenderingMode( 90 M4VSS3GPP_InternalEditContext *pC, 91 M4xVSS_MediaRendering renderingMode, 92 M4VIFI_ImagePlane* pInplane, 93 M4VIFI_ImagePlane* pOutplane); 94 95static M4OSA_ERR M4VSS3GPP_intSetYuv420PlaneFromARGB888 ( 96 M4VSS3GPP_InternalEditContext *pC, 97 M4VSS3GPP_ClipContext* pClipCtxt); 98static M4OSA_ERR M4VSS3GPP_intRenderFrameWithEffect( 99 M4VSS3GPP_InternalEditContext *pC, 100 M4VSS3GPP_ClipContext* pClipCtxt, 101 M4_MediaTime ts, 102 M4OSA_Bool bIsClip1, 103 M4VIFI_ImagePlane *pResizePlane, 104 M4VIFI_ImagePlane *pPlaneNoResize, 105 M4VIFI_ImagePlane *pPlaneOut); 106 107static M4OSA_ERR M4VSS3GPP_intRotateVideo(M4VIFI_ImagePlane* pPlaneIn, 108 M4OSA_UInt32 rotationDegree); 109 110static M4OSA_ERR M4VSS3GPP_intSetYUV420Plane(M4VIFI_ImagePlane* planeIn, 111 M4OSA_UInt32 width, M4OSA_UInt32 height); 112 113/** 114 ****************************************************************************** 115 * M4OSA_ERR M4VSS3GPP_intEditStepVideo() 116 * @brief One step of video processing 117 * @param pC (IN/OUT) Internal edit context 118 ****************************************************************************** 119 */ 120M4OSA_ERR M4VSS3GPP_intEditStepVideo( M4VSS3GPP_InternalEditContext *pC ) 121{ 122 M4OSA_ERR err; 123 M4OSA_Int32 iCts, iNextCts; 124 M4ENCODER_FrameMode FrameMode; 125 M4OSA_Bool bSkipFrame; 126 M4OSA_UInt16 offset; 127 128 /** 129 * Check if we reached end cut. Decorrelate input and output encoding 130 * timestamp to handle encoder prefetch 131 */ 132 if ( ((M4OSA_Int32)(pC->ewc.dInputVidCts) - pC->pC1->iVoffset 133 + pC->iInOutTimeOffset) >= pC->pC1->iEndTime ) 134 { 135 /* Re-adjust video to precise cut time */ 136 pC->iInOutTimeOffset = ((M4OSA_Int32)(pC->ewc.dInputVidCts)) 137 - pC->pC1->iVoffset + pC->iInOutTimeOffset - pC->pC1->iEndTime; 138 if ( pC->iInOutTimeOffset < 0 ) { 139 pC->iInOutTimeOffset = 0; 140 } 141 142 /** 143 * Video is done for this clip */ 144 err = M4VSS3GPP_intReachedEndOfVideo(pC); 145 146 /* RC: to know when a file has been processed */ 147 if (M4NO_ERROR != err && err != M4VSS3GPP_WAR_SWITCH_CLIP) 148 { 149 M4OSA_TRACE1_1( 150 "M4VSS3GPP_intEditStepVideo: M4VSS3GPP_intReachedEndOfVideo returns 0x%x", 151 err); 152 } 153 154 return err; 155 } 156 157 /* Don't change the states if we are in decodeUpTo() */ 158 if ( (M4VSS3GPP_kClipStatus_DECODE_UP_TO != pC->pC1->Vstatus) 159 && (( pC->pC2 == M4OSA_NULL) 160 || (M4VSS3GPP_kClipStatus_DECODE_UP_TO != pC->pC2->Vstatus)) ) 161 { 162 /** 163 * Check Video Mode, depending on the current output CTS */ 164 err = M4VSS3GPP_intCheckVideoMode( 165 pC); /**< This function change the pC->Vstate variable! */ 166 167 if (M4NO_ERROR != err) 168 { 169 M4OSA_TRACE1_1( 170 "M4VSS3GPP_intEditStepVideo: M4VSS3GPP_intCheckVideoMode returns 0x%x!", 171 err); 172 return err; 173 } 174 } 175 176 177 switch( pC->Vstate ) 178 { 179 /* _________________ */ 180 /*| |*/ 181 /*| READ_WRITE MODE |*/ 182 /*|_________________|*/ 183 184 case M4VSS3GPP_kEditVideoState_READ_WRITE: 185 case M4VSS3GPP_kEditVideoState_AFTER_CUT: 186 { 187 M4OSA_TRACE3_0("M4VSS3GPP_intEditStepVideo READ_WRITE"); 188 189 bSkipFrame = M4OSA_FALSE; 190 191 /** 192 * If we were decoding the clip, we must jump to be sure 193 * to get to the good position. */ 194 if( M4VSS3GPP_kClipStatus_READ != pC->pC1->Vstatus ) 195 { 196 /** 197 * Jump to target video time (tc = to-T) */ 198 // Decorrelate input and output encoding timestamp to handle encoder prefetch 199 iCts = (M4OSA_Int32)(pC->ewc.dInputVidCts) - pC->pC1->iVoffset; 200 err = pC->pC1->ShellAPI.m_pReader->m_pFctJump( 201 pC->pC1->pReaderContext, 202 (M4_StreamHandler *)pC->pC1->pVideoStream, &iCts); 203 204 if( M4NO_ERROR != err ) 205 { 206 M4OSA_TRACE1_1( 207 "M4VSS3GPP_intEditStepVideo:\ 208 READ_WRITE: m_pReader->m_pFctJump(V1) returns 0x%x!", 209 err); 210 return err; 211 } 212 213 err = pC->pC1->ShellAPI.m_pReaderDataIt->m_pFctGetNextAu( 214 pC->pC1->pReaderContext, 215 (M4_StreamHandler *)pC->pC1->pVideoStream, 216 &pC->pC1->VideoAU); 217 218 if( ( M4NO_ERROR != err) && (M4WAR_NO_MORE_AU != err) ) 219 { 220 M4OSA_TRACE1_1( 221 "M4VSS3GPP_intEditStepVideo:\ 222 READ_WRITE: m_pReader->m_pFctGetNextAu returns 0x%x!", 223 err); 224 return err; 225 } 226 227 M4OSA_TRACE2_3("A .... read : cts = %.0f + %ld [ 0x%x ]", 228 pC->pC1->VideoAU.m_CTS, pC->pC1->iVoffset, 229 pC->pC1->VideoAU.m_size); 230 231 /* This frame has been already written in BEGIN CUT step -> skip it */ 232 if( pC->pC1->VideoAU.m_CTS == iCts 233 && pC->pC1->iVideoRenderCts >= iCts ) 234 { 235 bSkipFrame = M4OSA_TRUE; 236 } 237 } 238 239 /* This frame has been already written in BEGIN CUT step -> skip it */ 240 if( ( pC->Vstate == M4VSS3GPP_kEditVideoState_AFTER_CUT) 241 && (pC->pC1->VideoAU.m_CTS 242 + pC->pC1->iVoffset <= pC->ewc.WriterVideoAU.CTS) ) 243 { 244 bSkipFrame = M4OSA_TRUE; 245 } 246 247 /** 248 * Remember the clip reading state */ 249 pC->pC1->Vstatus = M4VSS3GPP_kClipStatus_READ; 250 // Decorrelate input and output encoding timestamp to handle encoder prefetch 251 // Rounding is to compensate reader imprecision (m_CTS is actually an integer) 252 iCts = ((M4OSA_Int32)pC->ewc.dInputVidCts) - pC->pC1->iVoffset - 1; 253 iNextCts = iCts + ((M4OSA_Int32)pC->dOutputFrameDuration) + 1; 254 /* Avoid to write a last frame of duration 0 */ 255 if( iNextCts > pC->pC1->iEndTime ) 256 iNextCts = pC->pC1->iEndTime; 257 258 /** 259 * If the AU is good to be written, write it, else just skip it */ 260 if( ( M4OSA_FALSE == bSkipFrame) 261 && (( pC->pC1->VideoAU.m_CTS >= iCts) 262 && (pC->pC1->VideoAU.m_CTS < iNextCts) 263 && (pC->pC1->VideoAU.m_size > 0)) ) 264 { 265 /** 266 * Get the output AU to write into */ 267 err = pC->ShellAPI.pWriterDataFcts->pStartAU( 268 pC->ewc.p3gpWriterContext, 269 M4VSS3GPP_WRITER_VIDEO_STREAM_ID, 270 &pC->ewc.WriterVideoAU); 271 272 if( M4NO_ERROR != err ) 273 { 274 M4OSA_TRACE1_1( 275 "M4VSS3GPP_intEditStepVideo: READ_WRITE:\ 276 pWriterDataFcts->pStartAU(Video) returns 0x%x!", 277 err); 278 return err; 279 } 280 281 /** 282 * Copy the input AU to the output AU */ 283 pC->ewc.WriterVideoAU.attribute = pC->pC1->VideoAU.m_attribute; 284 // Decorrelate input and output encoding timestamp to handle encoder prefetch 285 pC->ewc.WriterVideoAU.CTS = (M4OSA_Time)pC->pC1->VideoAU.m_CTS + 286 (M4OSA_Time)pC->pC1->iVoffset; 287 pC->ewc.dInputVidCts += pC->dOutputFrameDuration; 288 offset = 0; 289 /* for h.264 stream do not read the 1st 4 bytes as they are header 290 indicators */ 291 if( pC->pC1->pVideoStream->m_basicProperties.m_streamType 292 == M4DA_StreamTypeVideoMpeg4Avc ) 293 offset = 4; 294 295 pC->ewc.WriterVideoAU.size = pC->pC1->VideoAU.m_size - offset; 296 if( pC->ewc.WriterVideoAU.size > pC->ewc.uiVideoMaxAuSize ) 297 { 298 M4OSA_TRACE1_2( 299 "M4VSS3GPP_intEditStepVideo: READ_WRITE: AU size greater than\ 300 MaxAuSize (%d>%d)! returning M4VSS3GPP_ERR_INPUT_VIDEO_AU_TOO_LARGE", 301 pC->ewc.WriterVideoAU.size, pC->ewc.uiVideoMaxAuSize); 302 return M4VSS3GPP_ERR_INPUT_VIDEO_AU_TOO_LARGE; 303 } 304 305 memcpy((void *)pC->ewc.WriterVideoAU.dataAddress, 306 (void *)(pC->pC1->VideoAU.m_dataAddress + offset), 307 (pC->ewc.WriterVideoAU.size)); 308 309 /** 310 * Update time info for the Counter Time System to be equal to the bit 311 -stream time*/ 312 M4VSS3GPP_intUpdateTimeInfo(pC, &pC->ewc.WriterVideoAU); 313 M4OSA_TRACE2_2("B ---- write : cts = %lu [ 0x%x ]", 314 pC->ewc.WriterVideoAU.CTS, pC->ewc.WriterVideoAU.size); 315 316 /** 317 * Write the AU */ 318 err = pC->ShellAPI.pWriterDataFcts->pProcessAU( 319 pC->ewc.p3gpWriterContext, 320 M4VSS3GPP_WRITER_VIDEO_STREAM_ID, 321 &pC->ewc.WriterVideoAU); 322 323 if( M4NO_ERROR != err ) 324 { 325 /* the warning M4WAR_WRITER_STOP_REQ is returned when the targeted output 326 file size is reached 327 The editing is then finished, the warning M4VSS3GPP_WAR_EDITING_DONE 328 is returned*/ 329 if( M4WAR_WRITER_STOP_REQ == err ) 330 { 331 M4OSA_TRACE1_0( 332 "M4VSS3GPP_intEditStepVideo: File was cut to avoid oversize"); 333 return M4VSS3GPP_WAR_EDITING_DONE; 334 } 335 else 336 { 337 M4OSA_TRACE1_1( 338 "M4VSS3GPP_intEditStepVideo: READ_WRITE:\ 339 pWriterDataFcts->pProcessAU(Video) returns 0x%x!", 340 err); 341 return err; 342 } 343 } 344 345 /** 346 * Read next AU for next step */ 347 err = pC->pC1->ShellAPI.m_pReaderDataIt->m_pFctGetNextAu( 348 pC->pC1->pReaderContext, 349 (M4_StreamHandler *)pC->pC1->pVideoStream, 350 &pC->pC1->VideoAU); 351 352 if( ( M4NO_ERROR != err) && (M4WAR_NO_MORE_AU != err) ) 353 { 354 M4OSA_TRACE1_1( 355 "M4VSS3GPP_intEditStepVideo: READ_WRITE:\ 356 m_pReaderDataIt->m_pFctGetNextAu returns 0x%x!", 357 err); 358 return err; 359 } 360 361 M4OSA_TRACE2_3("C .... read : cts = %.0f + %ld [ 0x%x ]", 362 pC->pC1->VideoAU.m_CTS, pC->pC1->iVoffset, 363 pC->pC1->VideoAU.m_size); 364 } 365 else 366 { 367 /** 368 * Decide wether to read or to increment time increment */ 369 if( ( pC->pC1->VideoAU.m_size == 0) 370 || (pC->pC1->VideoAU.m_CTS >= iNextCts) ) 371 { 372 /*Increment time by the encoding period (NO_MORE_AU or reader in advance */ 373 // Decorrelate input and output encoding timestamp to handle encoder prefetch 374 pC->ewc.dInputVidCts += pC->dOutputFrameDuration; 375 376 /* Switch (from AFTER_CUT) to normal mode because time is 377 no more frozen */ 378 pC->Vstate = M4VSS3GPP_kEditVideoState_READ_WRITE; 379 } 380 else 381 { 382 /* In other cases (reader late), just let the reader catch up 383 pC->ewc.dVTo */ 384 err = pC->pC1->ShellAPI.m_pReaderDataIt->m_pFctGetNextAu( 385 pC->pC1->pReaderContext, 386 (M4_StreamHandler *)pC->pC1->pVideoStream, 387 &pC->pC1->VideoAU); 388 389 if( ( M4NO_ERROR != err) && (M4WAR_NO_MORE_AU != err) ) 390 { 391 M4OSA_TRACE1_1( 392 "M4VSS3GPP_intEditStepVideo: READ_WRITE:\ 393 m_pReaderDataIt->m_pFctGetNextAu returns 0x%x!", 394 err); 395 return err; 396 } 397 398 M4OSA_TRACE2_3("D .... read : cts = %.0f + %ld [ 0x%x ]", 399 pC->pC1->VideoAU.m_CTS, pC->pC1->iVoffset, 400 pC->pC1->VideoAU.m_size); 401 } 402 } 403 } 404 break; 405 406 /* ____________________ */ 407 /*| |*/ 408 /*| DECODE_ENCODE MODE |*/ 409 /*| BEGIN_CUT MODE |*/ 410 /*|____________________|*/ 411 412 case M4VSS3GPP_kEditVideoState_DECODE_ENCODE: 413 case M4VSS3GPP_kEditVideoState_BEGIN_CUT: 414 { 415 M4OSA_TRACE3_0( 416 "M4VSS3GPP_intEditStepVideo DECODE_ENCODE / BEGIN_CUT"); 417 418 if ((pC->pC1->pSettings->FileType == 419 M4VIDEOEDITING_kFileType_ARGB8888) && 420 (M4OSA_FALSE == 421 pC->pC1->pSettings->ClipProperties.bSetImageData)) { 422 423 err = M4VSS3GPP_intSetYuv420PlaneFromARGB888(pC, pC->pC1); 424 if( M4NO_ERROR != err ) { 425 M4OSA_TRACE1_1( 426 "M4VSS3GPP_intEditStepVideo: DECODE_ENCODE:\ 427 M4VSS3GPP_intSetYuv420PlaneFromARGB888 err=%x", err); 428 return err; 429 } 430 } 431 /** 432 * Decode the video up to the target time 433 (will jump to the previous RAP if needed ) */ 434 // Decorrelate input and output encoding timestamp to handle encoder prefetch 435 err = M4VSS3GPP_intClipDecodeVideoUpToCts(pC->pC1, (M4OSA_Int32)pC->ewc.dInputVidCts); 436 if( M4NO_ERROR != err ) 437 { 438 M4OSA_TRACE1_1( 439 "M4VSS3GPP_intEditStepVideo: DECODE_ENCODE:\ 440 M4VSS3GPP_intDecodeVideoUpToCts returns err=0x%x", 441 err); 442 return err; 443 } 444 445 /* If the decoding is not completed, do one more step with time frozen */ 446 if( M4VSS3GPP_kClipStatus_DECODE_UP_TO == pC->pC1->Vstatus ) 447 { 448 return M4NO_ERROR; 449 } 450 451 /** 452 * Reset the video pre-processing error before calling the encoder */ 453 pC->ewc.VppError = M4NO_ERROR; 454 455 M4OSA_TRACE2_0("E ++++ encode AU"); 456 457 /** 458 * Encode the frame(rendering,filtering and writing will be done 459 in encoder callbacks)*/ 460 if( pC->Vstate == M4VSS3GPP_kEditVideoState_BEGIN_CUT ) 461 FrameMode = M4ENCODER_kIFrame; 462 else 463 FrameMode = M4ENCODER_kNormalFrame; 464 465 // Decorrelate input and output encoding timestamp to handle encoder prefetch 466 err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctEncode(pC->ewc.pEncContext, M4OSA_NULL, 467 pC->ewc.dInputVidCts, FrameMode); 468 /** 469 * Check if we had a VPP error... */ 470 if( M4NO_ERROR != pC->ewc.VppError ) 471 { 472 M4OSA_TRACE1_1( 473 "M4VSS3GPP_intEditStepVideo: DECODE_ENCODE:\ 474 pVideoEncoderGlobalFcts->pFctEncode, returning VppErr=0x%x", 475 pC->ewc.VppError); 476#ifdef M4VSS_SUPPORT_OMX_CODECS 477 478 if( M4WAR_VIDEORENDERER_NO_NEW_FRAME != pC->ewc.VppError ) 479 { 480#endif //M4VSS_SUPPORT_OMX_CODECS 481 482 return pC->ewc.VppError; 483#ifdef M4VSS_SUPPORT_OMX_CODECS 484 485 } 486 487#endif //M4VSS_SUPPORT_OMX_CODECS 488 489 } 490 else if( M4NO_ERROR != err ) /**< ...or an encoder error */ 491 { 492 if( ((M4OSA_UInt32)M4ERR_ALLOC) == err ) 493 { 494 M4OSA_TRACE1_0( 495 "M4VSS3GPP_intEditStepVideo: DECODE_ENCODE:\ 496 returning M4VSS3GPP_ERR_ENCODER_ACCES_UNIT_ERROR"); 497 return M4VSS3GPP_ERR_ENCODER_ACCES_UNIT_ERROR; 498 } 499 /* the warning M4WAR_WRITER_STOP_REQ is returned when the targeted output 500 file size is reached 501 The editing is then finished, the warning M4VSS3GPP_WAR_EDITING_DONE 502 is returned*/ 503 else if( M4WAR_WRITER_STOP_REQ == err ) 504 { 505 M4OSA_TRACE1_0( 506 "M4VSS3GPP_intEditStepVideo: File was cut to avoid oversize"); 507 return M4VSS3GPP_WAR_EDITING_DONE; 508 } 509 else 510 { 511 M4OSA_TRACE1_1( 512 "M4VSS3GPP_intEditStepVideo: DECODE_ENCODE:\ 513 pVideoEncoderGlobalFcts->pFctEncode returns 0x%x", 514 err); 515 return err; 516 } 517 } 518 519 /** 520 * Increment time by the encoding period (for begin cut, do not increment to not 521 loose P-frames) */ 522 if( M4VSS3GPP_kEditVideoState_DECODE_ENCODE == pC->Vstate ) 523 { 524 // Decorrelate input and output encoding timestamp to handle encoder prefetch 525 pC->ewc.dInputVidCts += pC->dOutputFrameDuration; 526 } 527 } 528 break; 529 530 /* _________________ */ 531 /*| |*/ 532 /*| TRANSITION MODE |*/ 533 /*|_________________|*/ 534 535 case M4VSS3GPP_kEditVideoState_TRANSITION: 536 { 537 M4OSA_TRACE3_0("M4VSS3GPP_intEditStepVideo TRANSITION"); 538 539 /* Don't decode more than needed */ 540 if( !(( M4VSS3GPP_kClipStatus_DECODE_UP_TO != pC->pC1->Vstatus) 541 && (M4VSS3GPP_kClipStatus_DECODE_UP_TO == pC->pC2->Vstatus)) ) 542 { 543 /** 544 * Decode the clip1 video up to the target time 545 (will jump to the previous RAP if needed */ 546 if ((pC->pC1->pSettings->FileType == 547 M4VIDEOEDITING_kFileType_ARGB8888) && 548 (M4OSA_FALSE == 549 pC->pC1->pSettings->ClipProperties.bSetImageData)) { 550 551 err = M4VSS3GPP_intSetYuv420PlaneFromARGB888(pC, pC->pC1); 552 if( M4NO_ERROR != err ) { 553 M4OSA_TRACE1_1( 554 "M4VSS3GPP_intEditStepVideo: TRANSITION:\ 555 M4VSS3GPP_intSetYuv420PlaneFromARGB888 err=%x", err); 556 return err; 557 } 558 } 559 // Decorrelate input and output encoding timestamp to handle encoder prefetch 560 err = M4VSS3GPP_intClipDecodeVideoUpToCts(pC->pC1, 561 (M4OSA_Int32)pC->ewc.dInputVidCts); 562 if( M4NO_ERROR != err ) 563 { 564 M4OSA_TRACE1_1( 565 "M4VSS3GPP_intEditStepVideo: TRANSITION:\ 566 M4VSS3GPP_intDecodeVideoUpToCts(C1) returns err=0x%x", 567 err); 568 return err; 569 } 570 571 /* If the decoding is not completed, do one more step with time frozen */ 572 if( M4VSS3GPP_kClipStatus_DECODE_UP_TO == pC->pC1->Vstatus ) 573 { 574 return M4NO_ERROR; 575 } 576 } 577 578 /* Don't decode more than needed */ 579 if( !(( M4VSS3GPP_kClipStatus_DECODE_UP_TO != pC->pC2->Vstatus) 580 && (M4VSS3GPP_kClipStatus_DECODE_UP_TO == pC->pC1->Vstatus)) ) 581 { 582 /** 583 * Decode the clip2 video up to the target time 584 (will jump to the previous RAP if needed) */ 585 if ((pC->pC2->pSettings->FileType == 586 M4VIDEOEDITING_kFileType_ARGB8888) && 587 (M4OSA_FALSE == 588 pC->pC2->pSettings->ClipProperties.bSetImageData)) { 589 590 err = M4VSS3GPP_intSetYuv420PlaneFromARGB888(pC, pC->pC2); 591 if( M4NO_ERROR != err ) { 592 M4OSA_TRACE1_1( 593 "M4VSS3GPP_intEditStepVideo: TRANSITION:\ 594 M4VSS3GPP_intSetYuv420PlaneFromARGB888 err=%x", err); 595 return err; 596 } 597 } 598 599 // Decorrelate input and output encoding timestamp to handle encoder prefetch 600 err = M4VSS3GPP_intClipDecodeVideoUpToCts(pC->pC2, 601 (M4OSA_Int32)pC->ewc.dInputVidCts); 602 if( M4NO_ERROR != err ) 603 { 604 M4OSA_TRACE1_1( 605 "M4VSS3GPP_intEditStepVideo: TRANSITION:\ 606 M4VSS3GPP_intDecodeVideoUpToCts(C2) returns err=0x%x", 607 err); 608 return err; 609 } 610 611 /* If the decoding is not completed, do one more step with time frozen */ 612 if( M4VSS3GPP_kClipStatus_DECODE_UP_TO == pC->pC2->Vstatus ) 613 { 614 return M4NO_ERROR; 615 } 616 } 617 618 /** 619 * Reset the video pre-processing error before calling the encoder */ 620 pC->ewc.VppError = M4NO_ERROR; 621 622 M4OSA_TRACE2_0("F **** blend AUs"); 623 624 /** 625 * Encode the frame (rendering, filtering and writing will be done 626 in encoder callbacks */ 627 // Decorrelate input and output encoding timestamp to handle encoder prefetch 628 err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctEncode(pC->ewc.pEncContext, M4OSA_NULL, 629 pC->ewc.dInputVidCts, M4ENCODER_kNormalFrame); 630 631 /** 632 * If encode returns a process frame error, it is likely to be a VPP error */ 633 if( M4NO_ERROR != pC->ewc.VppError ) 634 { 635 M4OSA_TRACE1_1( 636 "M4VSS3GPP_intEditStepVideo: TRANSITION:\ 637 pVideoEncoderGlobalFcts->pFctEncode, returning VppErr=0x%x", 638 pC->ewc.VppError); 639#ifdef M4VSS_SUPPORT_OMX_CODECS 640 641 if( M4WAR_VIDEORENDERER_NO_NEW_FRAME != pC->ewc.VppError ) 642 { 643 644#endif //M4VSS_SUPPORT_OMX_CODECS 645 646 return pC->ewc.VppError; 647#ifdef M4VSS_SUPPORT_OMX_CODECS 648 649 } 650 651#endif //M4VSS_SUPPORT_OMX_CODECS 652 653 } 654 else if( M4NO_ERROR != err ) /**< ...or an encoder error */ 655 { 656 if( ((M4OSA_UInt32)M4ERR_ALLOC) == err ) 657 { 658 M4OSA_TRACE1_0( 659 "M4VSS3GPP_intEditStepVideo: TRANSITION:\ 660 returning M4VSS3GPP_ERR_ENCODER_ACCES_UNIT_ERROR"); 661 return M4VSS3GPP_ERR_ENCODER_ACCES_UNIT_ERROR; 662 } 663 664 /* the warning M4WAR_WRITER_STOP_REQ is returned when the targeted output 665 file size is reached 666 The editing is then finished, the warning M4VSS3GPP_WAR_EDITING_DONE is 667 returned*/ 668 else if( M4WAR_WRITER_STOP_REQ == err ) 669 { 670 M4OSA_TRACE1_0( 671 "M4VSS3GPP_intEditStepVideo: File was cut to avoid oversize"); 672 return M4VSS3GPP_WAR_EDITING_DONE; 673 } 674 else 675 { 676 M4OSA_TRACE1_1( 677 "M4VSS3GPP_intEditStepVideo: TRANSITION:\ 678 pVideoEncoderGlobalFcts->pFctEncode returns 0x%x", 679 err); 680 return err; 681 } 682 } 683 684 /** 685 * Increment time by the encoding period */ 686 // Decorrelate input and output encoding timestamp to handle encoder prefetch 687 pC->ewc.dInputVidCts += pC->dOutputFrameDuration; 688 } 689 break; 690 691 /* ____________ */ 692 /*| |*/ 693 /*| ERROR CASE |*/ 694 /*|____________|*/ 695 696 default: 697 M4OSA_TRACE1_1( 698 "M4VSS3GPP_intEditStepVideo: invalid internal state (0x%x),\ 699 returning M4VSS3GPP_ERR_INTERNAL_STATE", 700 pC->Vstate); 701 return M4VSS3GPP_ERR_INTERNAL_STATE; 702 } 703 704 /** 705 * Return with no error */ 706 M4OSA_TRACE3_0("M4VSS3GPP_intEditStepVideo: returning M4NO_ERROR"); 707 return M4NO_ERROR; 708} 709 710/** 711 ****************************************************************************** 712 * M4OSA_ERR M4VSS3GPP_intCheckVideoMode() 713 * @brief Check which video process mode we must use, depending on the output CTS. 714 * @param pC (IN/OUT) Internal edit context 715 ****************************************************************************** 716 */ 717static M4OSA_ERR M4VSS3GPP_intCheckVideoMode( 718 M4VSS3GPP_InternalEditContext *pC ) 719{ 720 M4OSA_ERR err; 721 // Decorrelate input and output encoding timestamp to handle encoder prefetch 722 const M4OSA_Int32 t = (M4OSA_Int32)pC->ewc.dInputVidCts; 723 /**< Transition duration */ 724 const M4OSA_Int32 TD = pC->pTransitionList[pC->uiCurrentClip].uiTransitionDuration; 725 726 M4OSA_Int32 iTmp; 727 728 const M4VSS3GPP_EditVideoState previousVstate = pC->Vstate; 729 730 /** 731 * Check if Clip1 is on its begin cut, or in an effect zone */ 732 M4VSS3GPP_intCheckVideoEffects(pC, 1); 733 734 /** 735 * Check if we are in the transition with next clip */ 736 if( ( TD > 0) && (( t - pC->pC1->iVoffset) >= (pC->pC1->iEndTime - TD)) ) 737 { 738 /** 739 * We are in a transition */ 740 pC->Vstate = M4VSS3GPP_kEditVideoState_TRANSITION; 741 pC->bTransitionEffect = M4OSA_TRUE; 742 743 /** 744 * Open second clip for transition, if not yet opened */ 745 if( M4OSA_NULL == pC->pC2 ) 746 { 747 pC->pC1->bGetYuvDataFromDecoder = M4OSA_TRUE; 748 749 err = M4VSS3GPP_intOpenClip(pC, &pC->pC2, 750 &pC->pClipList[pC->uiCurrentClip + 1]); 751 752 if( M4NO_ERROR != err ) 753 { 754 M4OSA_TRACE1_1( 755 "M4VSS3GPP_intCheckVideoMode: M4VSS3GPP_editOpenClip returns 0x%x!", 756 err); 757 return err; 758 } 759 760 /** 761 * Add current video output CTS to the clip offset 762 * (audio output CTS is not yet at the transition, so audio 763 * offset can't be updated yet). */ 764 // Decorrelate input and output encoding timestamp to handle encoder prefetch 765 pC->pC2->iVoffset += (M4OSA_UInt32)pC->ewc.dInputVidCts; 766 767 /** 768 * 2005-03-24: BugFix for audio-video synchro: 769 * Update transition duration due to the actual video transition beginning time. 770 * It will avoid desynchronization when doing the audio transition. */ 771 // Decorrelate input and output encoding timestamp to handle encoder prefetch 772 iTmp = ((M4OSA_Int32)pC->ewc.dInputVidCts)\ 773 - (pC->pC1->iEndTime - TD + pC->pC1->iVoffset); 774 if (iTmp < (M4OSA_Int32)pC->pTransitionList[pC->uiCurrentClip].uiTransitionDuration) 775 /**< Test in case of a very short transition */ 776 { 777 pC->pTransitionList[pC-> 778 uiCurrentClip].uiTransitionDuration -= iTmp; 779 780 /** 781 * Don't forget to also correct the total duration used for the progress bar 782 * (it was computed with the original transition duration). */ 783 pC->ewc.iOutputDuration += iTmp; 784 } 785 /**< No "else" here because it's hard predict the effect of 0 duration transition...*/ 786 } 787 788 /** 789 * Check effects for clip2 */ 790 M4VSS3GPP_intCheckVideoEffects(pC, 2); 791 } 792 else 793 { 794 /** 795 * We are not in a transition */ 796 pC->bTransitionEffect = M4OSA_FALSE; 797 798 /* If there is an effect we go to decode/encode mode */ 799 if((pC->nbActiveEffects > 0) || (pC->nbActiveEffects1 > 0) || 800 (pC->pC1->pSettings->FileType == 801 M4VIDEOEDITING_kFileType_ARGB8888) || 802 (pC->pC1->pSettings->bTranscodingRequired == M4OSA_TRUE)) { 803 pC->Vstate = M4VSS3GPP_kEditVideoState_DECODE_ENCODE; 804 } 805 /* We do a begin cut, except if already done (time is not progressing because we want 806 to catch all P-frames after the cut) */ 807 else if( M4OSA_TRUE == pC->bClip1AtBeginCut ) 808 { 809 if(pC->pC1->pSettings->ClipProperties.VideoStreamType == M4VIDEOEDITING_kH264) { 810 pC->Vstate = M4VSS3GPP_kEditVideoState_DECODE_ENCODE; 811 pC->bEncodeTillEoF = M4OSA_TRUE; 812 } else if( ( M4VSS3GPP_kEditVideoState_BEGIN_CUT == previousVstate) 813 || (M4VSS3GPP_kEditVideoState_AFTER_CUT == previousVstate) ) { 814 pC->Vstate = M4VSS3GPP_kEditVideoState_AFTER_CUT; 815 } else { 816 pC->Vstate = M4VSS3GPP_kEditVideoState_BEGIN_CUT; 817 } 818 } 819 /* Else we are in default copy/paste mode */ 820 else 821 { 822 if( ( M4VSS3GPP_kEditVideoState_BEGIN_CUT == previousVstate) 823 || (M4VSS3GPP_kEditVideoState_AFTER_CUT == previousVstate) ) 824 { 825 pC->Vstate = M4VSS3GPP_kEditVideoState_AFTER_CUT; 826 } 827 else if( pC->bIsMMS == M4OSA_TRUE ) 828 { 829 M4OSA_UInt32 currentBitrate; 830 M4OSA_ERR err = M4NO_ERROR; 831 832 /* Do we need to reencode the video to downgrade the bitrate or not ? */ 833 /* Let's compute the cirrent bitrate of the current edited clip */ 834 err = pC->pC1->ShellAPI.m_pReader->m_pFctGetOption( 835 pC->pC1->pReaderContext, 836 M4READER_kOptionID_Bitrate, ¤tBitrate); 837 838 if( err != M4NO_ERROR ) 839 { 840 M4OSA_TRACE1_1( 841 "M4VSS3GPP_intCheckVideoMode:\ 842 Error when getting next bitrate of edited clip: 0x%x", 843 err); 844 return err; 845 } 846 847 /* Remove audio bitrate */ 848 currentBitrate -= 12200; 849 850 /* Test if we go into copy/paste mode or into decode/encode mode */ 851 if( currentBitrate > pC->uiMMSVideoBitrate ) 852 { 853 pC->Vstate = M4VSS3GPP_kEditVideoState_DECODE_ENCODE; 854 } 855 else 856 { 857 pC->Vstate = M4VSS3GPP_kEditVideoState_READ_WRITE; 858 } 859 } 860 else if(!((pC->m_bClipExternalHasStarted == M4OSA_TRUE) && 861 (pC->Vstate == M4VSS3GPP_kEditVideoState_DECODE_ENCODE)) && 862 pC->bEncodeTillEoF == M4OSA_FALSE) 863 { 864 /** 865 * Test if we go into copy/paste mode or into decode/encode mode 866 * If an external effect has been applied on the current clip 867 * then continue to be in decode/encode mode till end of 868 * clip to avoid H.264 distortion. 869 */ 870 pC->Vstate = M4VSS3GPP_kEditVideoState_READ_WRITE; 871 } 872 } 873 } 874 875 /** 876 * Check if we create an encoder */ 877 if( ( ( M4VSS3GPP_kEditVideoState_READ_WRITE == previousVstate) 878 || (M4VSS3GPP_kEditVideoState_AFTER_CUT 879 == previousVstate)) /**< read mode */ 880 && (( M4VSS3GPP_kEditVideoState_DECODE_ENCODE == pC->Vstate) 881 || (M4VSS3GPP_kEditVideoState_BEGIN_CUT == pC->Vstate) 882 || (M4VSS3GPP_kEditVideoState_TRANSITION 883 == pC->Vstate)) /**< encode mode */ 884 && pC->bIsMMS == M4OSA_FALSE ) 885 { 886 /** 887 * Create the encoder, if not created already*/ 888 if (pC->ewc.encoderState == M4VSS3GPP_kNoEncoder) { 889 err = M4VSS3GPP_intCreateVideoEncoder(pC); 890 891 if( M4NO_ERROR != err ) 892 { 893 M4OSA_TRACE1_1( 894 "M4VSS3GPP_intCheckVideoMode: M4VSS3GPP_intCreateVideoEncoder \ 895 returns 0x%x!", err); 896 return err; 897 } 898 } 899 } 900 else if( pC->bIsMMS == M4OSA_TRUE && pC->ewc.pEncContext == M4OSA_NULL ) 901 { 902 /** 903 * Create the encoder */ 904 err = M4VSS3GPP_intCreateVideoEncoder(pC); 905 906 if( M4NO_ERROR != err ) 907 { 908 M4OSA_TRACE1_1( 909 "M4VSS3GPP_intCheckVideoMode: M4VSS3GPP_intCreateVideoEncoder returns 0x%x!", 910 err); 911 return err; 912 } 913 } 914 915 /** 916 * When we go from filtering to read/write, we must act like a begin cut, 917 * because the last filtered image may be different than the original image. */ 918 else if( ( ( M4VSS3GPP_kEditVideoState_DECODE_ENCODE == previousVstate) 919 || (M4VSS3GPP_kEditVideoState_TRANSITION 920 == previousVstate)) /**< encode mode */ 921 && (M4VSS3GPP_kEditVideoState_READ_WRITE == pC->Vstate) /**< read mode */ 922 && (pC->bEncodeTillEoF == M4OSA_FALSE) ) 923 { 924 pC->Vstate = M4VSS3GPP_kEditVideoState_BEGIN_CUT; 925 } 926 927 /** 928 * Check if we destroy an encoder */ 929 else if( ( ( M4VSS3GPP_kEditVideoState_DECODE_ENCODE == previousVstate) 930 || (M4VSS3GPP_kEditVideoState_BEGIN_CUT == previousVstate) 931 || (M4VSS3GPP_kEditVideoState_TRANSITION 932 == previousVstate)) /**< encode mode */ 933 && (( M4VSS3GPP_kEditVideoState_READ_WRITE == pC->Vstate) 934 || (M4VSS3GPP_kEditVideoState_AFTER_CUT 935 == pC->Vstate)) /**< read mode */ 936 && pC->bIsMMS == M4OSA_FALSE ) 937 { 938 /** 939 * Destroy the previously created encoder */ 940 err = M4VSS3GPP_intDestroyVideoEncoder(pC); 941 942 if( M4NO_ERROR != err ) 943 { 944 M4OSA_TRACE1_1( 945 "M4VSS3GPP_intCheckVideoMode: M4VSS3GPP_intDestroyVideoEncoder returns 0x%x!", 946 err); 947 return err; 948 } 949 } 950 951 /** 952 * Return with no error */ 953 M4OSA_TRACE3_0("M4VSS3GPP_intCheckVideoMode: returning M4NO_ERROR"); 954 return M4NO_ERROR; 955} 956 957/****************************************************************************** 958 * M4OSA_ERR M4VSS3GPP_intStartAU() 959 * @brief StartAU writer-like interface used for the VSS 3GPP only 960 * @note 961 * @param pContext: (IN) It is the VSS 3GPP context in our case 962 * @param streamID: (IN) Id of the stream to which the Access Unit is related. 963 * @param pAU: (IN/OUT) Access Unit to be prepared. 964 * @return M4NO_ERROR: there is no error 965 ****************************************************************************** 966 */ 967M4OSA_ERR M4VSS3GPP_intStartAU( M4WRITER_Context pContext, 968 M4SYS_StreamID streamID, M4SYS_AccessUnit *pAU ) 969{ 970 M4OSA_ERR err; 971 M4OSA_UInt32 uiMaxAuSize; 972 973 /** 974 * Given context is actually the VSS3GPP context */ 975 M4VSS3GPP_InternalEditContext *pC = 976 (M4VSS3GPP_InternalEditContext *)pContext; 977 978 /** 979 * Get the output AU to write into */ 980 err = pC->ShellAPI.pWriterDataFcts->pStartAU(pC->ewc.p3gpWriterContext, 981 M4VSS3GPP_WRITER_VIDEO_STREAM_ID, pAU); 982 983 if( M4NO_ERROR != err ) 984 { 985 M4OSA_TRACE1_1( 986 "M4VSS3GPP_intStartAU: pWriterDataFcts->pStartAU(Video) returns 0x%x!", 987 err); 988 return err; 989 } 990 991 /** 992 * Return */ 993 M4OSA_TRACE3_0("M4VSS3GPP_intStartAU: returning M4NO_ERROR"); 994 return M4NO_ERROR; 995} 996 997/****************************************************************************** 998 * M4OSA_ERR M4VSS3GPP_intProcessAU() 999 * @brief ProcessAU writer-like interface used for the VSS 3GPP only 1000 * @note 1001 * @param pContext: (IN) It is the VSS 3GPP context in our case 1002 * @param streamID: (IN) Id of the stream to which the Access Unit is related. 1003 * @param pAU: (IN/OUT) Access Unit to be written 1004 * @return M4NO_ERROR: there is no error 1005 ****************************************************************************** 1006 */ 1007M4OSA_ERR M4VSS3GPP_intProcessAU( M4WRITER_Context pContext, 1008 M4SYS_StreamID streamID, M4SYS_AccessUnit *pAU ) 1009{ 1010 M4OSA_ERR err; 1011 1012 /** 1013 * Given context is actually the VSS3GPP context */ 1014 M4VSS3GPP_InternalEditContext *pC = 1015 (M4VSS3GPP_InternalEditContext *)pContext; 1016 1017 /** 1018 * Fix the encoded AU time */ 1019 // Decorrelate input and output encoding timestamp to handle encoder prefetch 1020 pC->ewc.dOutputVidCts = pAU->CTS; 1021 /** 1022 * Update time info for the Counter Time System to be equal to the bit-stream time */ 1023 M4VSS3GPP_intUpdateTimeInfo(pC, pAU); 1024 1025 /** 1026 * Write the AU */ 1027 err = pC->ShellAPI.pWriterDataFcts->pProcessAU(pC->ewc.p3gpWriterContext, 1028 M4VSS3GPP_WRITER_VIDEO_STREAM_ID, pAU); 1029 1030 if( M4NO_ERROR != err ) 1031 { 1032 M4OSA_TRACE1_1( 1033 "M4VSS3GPP_intProcessAU: pWriterDataFcts->pProcessAU(Video) returns 0x%x!", 1034 err); 1035 return err; 1036 } 1037 1038 /** 1039 * Return */ 1040 M4OSA_TRACE3_0("M4VSS3GPP_intProcessAU: returning M4NO_ERROR"); 1041 return M4NO_ERROR; 1042} 1043 1044/** 1045 ****************************************************************************** 1046 * M4OSA_ERR M4VSS3GPP_intVPP() 1047 * @brief We implement our own VideoPreProcessing function 1048 * @note It is called by the video encoder 1049 * @param pContext (IN) VPP context, which actually is the VSS 3GPP context in our case 1050 * @param pPlaneIn (IN) 1051 * @param pPlaneOut (IN/OUT) Pointer to an array of 3 planes that will contain the output 1052 * YUV420 image 1053 * @return M4NO_ERROR: No error 1054 ****************************************************************************** 1055 */ 1056M4OSA_ERR M4VSS3GPP_intVPP( M4VPP_Context pContext, M4VIFI_ImagePlane *pPlaneIn, 1057 M4VIFI_ImagePlane *pPlaneOut ) 1058{ 1059 M4OSA_ERR err = M4NO_ERROR; 1060 M4_MediaTime ts; 1061 M4VIFI_ImagePlane *pTmp = M4OSA_NULL; 1062 M4VIFI_ImagePlane *pLastDecodedFrame = M4OSA_NULL ; 1063 M4VIFI_ImagePlane *pDecoderRenderFrame = M4OSA_NULL; 1064 M4VIFI_ImagePlane pTemp1[3],pTemp2[3]; 1065 M4VIFI_ImagePlane pTempPlaneClip1[3],pTempPlaneClip2[3]; 1066 M4OSA_UInt32 i = 0, yuvFrameWidth = 0, yuvFrameHeight = 0; 1067 1068 /** 1069 * VPP context is actually the VSS3GPP context */ 1070 M4VSS3GPP_InternalEditContext *pC = 1071 (M4VSS3GPP_InternalEditContext *)pContext; 1072 1073 memset((void *)pTemp1, 0, 3*sizeof(M4VIFI_ImagePlane)); 1074 memset((void *)pTemp2, 0, 3*sizeof(M4VIFI_ImagePlane)); 1075 memset((void *)pTempPlaneClip1, 0, 3*sizeof(M4VIFI_ImagePlane)); 1076 memset((void *)pTempPlaneClip2, 0, 3*sizeof(M4VIFI_ImagePlane)); 1077 1078 /** 1079 * Reset VPP error remembered in context */ 1080 pC->ewc.VppError = M4NO_ERROR; 1081 1082 /** 1083 * At the end of the editing, we may be called when no more clip is loaded. 1084 * (because to close the encoder properly it must be stepped one or twice...) */ 1085 if( M4OSA_NULL == pC->pC1 ) 1086 { 1087 /** 1088 * We must fill the input of the encoder with a dummy image, because 1089 * encoding noise leads to a huge video AU, and thus a writer buffer overflow. */ 1090 memset((void *)pPlaneOut[0].pac_data,0, 1091 pPlaneOut[0].u_stride * pPlaneOut[0].u_height); 1092 memset((void *)pPlaneOut[1].pac_data,0, 1093 pPlaneOut[1].u_stride * pPlaneOut[1].u_height); 1094 memset((void *)pPlaneOut[2].pac_data,0, 1095 pPlaneOut[2].u_stride * pPlaneOut[2].u_height); 1096 1097 M4OSA_TRACE3_0("M4VSS3GPP_intVPP: returning M4NO_ERROR (abort)"); 1098 return M4NO_ERROR; 1099 } 1100 1101 /** 1102 **************** Transition case ****************/ 1103 if( M4OSA_TRUE == pC->bTransitionEffect ) 1104 { 1105 if (M4OSA_NULL == pTemp1[0].pac_data) 1106 { 1107 err = M4VSS3GPP_intAllocateYUV420(pTemp1, pC->ewc.uiVideoWidth, 1108 pC->ewc.uiVideoHeight); 1109 if (M4NO_ERROR != err) 1110 { 1111 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420(1) returns 0x%x, \ 1112 returning M4NO_ERROR", err); 1113 pC->ewc.VppError = err; 1114 return M4NO_ERROR; /**< Return no error to the encoder core 1115 (else it may leak in some situations...) */ 1116 } 1117 } 1118 if (M4OSA_NULL == pTemp2[0].pac_data) 1119 { 1120 err = M4VSS3GPP_intAllocateYUV420(pTemp2, pC->ewc.uiVideoWidth, 1121 pC->ewc.uiVideoHeight); 1122 if (M4NO_ERROR != err) 1123 { 1124 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420(2) returns 0x%x, \ 1125 returning M4NO_ERROR", err); 1126 pC->ewc.VppError = err; 1127 return M4NO_ERROR; /**< Return no error to the encoder core 1128 (else it may leak in some situations...) */ 1129 } 1130 } 1131 /** 1132 * We need two intermediate planes */ 1133 if( M4OSA_NULL == pC->yuv1[0].pac_data ) 1134 { 1135 err = M4VSS3GPP_intAllocateYUV420(pC->yuv1, pC->ewc.uiVideoWidth, 1136 pC->ewc.uiVideoHeight); 1137 1138 if( M4NO_ERROR != err ) 1139 { 1140 M4OSA_TRACE1_1( 1141 "M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420(3) returns 0x%x,\ 1142 returning M4NO_ERROR", 1143 err); 1144 pC->ewc.VppError = err; 1145 return 1146 M4NO_ERROR; /**< Return no error to the encoder core 1147 (else it may leak in some situations...) */ 1148 } 1149 } 1150 1151 if( M4OSA_NULL == pC->yuv2[0].pac_data ) 1152 { 1153 err = M4VSS3GPP_intAllocateYUV420(pC->yuv2, pC->ewc.uiVideoWidth, 1154 pC->ewc.uiVideoHeight); 1155 1156 if( M4NO_ERROR != err ) 1157 { 1158 M4OSA_TRACE1_1( 1159 "M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420(4) returns 0x%x,\ 1160 returning M4NO_ERROR", 1161 err); 1162 pC->ewc.VppError = err; 1163 return 1164 M4NO_ERROR; /**< Return no error to the encoder core 1165 (else it may leak in some situations...) */ 1166 } 1167 } 1168 1169 /** 1170 * Allocate new temporary plane if needed */ 1171 if( M4OSA_NULL == pC->yuv3[0].pac_data ) 1172 { 1173 err = M4VSS3GPP_intAllocateYUV420(pC->yuv3, pC->ewc.uiVideoWidth, 1174 pC->ewc.uiVideoHeight); 1175 1176 if( M4NO_ERROR != err ) 1177 { 1178 M4OSA_TRACE1_1( 1179 "M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420(3) returns 0x%x,\ 1180 returning M4NO_ERROR", 1181 err); 1182 pC->ewc.VppError = err; 1183 return 1184 M4NO_ERROR; /**< Return no error to the encoder core 1185 (else it may leak in some situations...) */ 1186 } 1187 } 1188 1189 /** 1190 * Compute the time in the clip1 base: ts = to - Offset */ 1191 // Decorrelate input and output encoding timestamp to handle encoder prefetch 1192 ts = pC->ewc.dInputVidCts - pC->pC1->iVoffset; 1193 1194 /** 1195 * Render Clip1 */ 1196 if( pC->pC1->isRenderDup == M4OSA_FALSE ) 1197 { 1198 pC->bIssecondClip = M4OSA_FALSE; 1199 1200 err = M4VSS3GPP_intRenderFrameWithEffect(pC, pC->pC1, ts, M4OSA_TRUE, 1201 pTempPlaneClip1, pTemp1, 1202 pPlaneOut); 1203 if ((M4NO_ERROR != err) && 1204 (M4WAR_VIDEORENDERER_NO_NEW_FRAME != err)) { 1205 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \ 1206 M4VSS3GPP_intRenderFrameWithEffect returns 0x%x", err); 1207 pC->ewc.VppError = err; 1208 /** Return no error to the encoder core 1209 * else it may leak in some situations.*/ 1210 return M4NO_ERROR; 1211 } 1212 } 1213 if ((pC->pC1->isRenderDup == M4OSA_TRUE) || 1214 (M4WAR_VIDEORENDERER_NO_NEW_FRAME == err)) { 1215 pTmp = pC->yuv1; 1216 if (pC->pC1->lastDecodedPlane != M4NO_ERROR) { 1217 /* Copy last decoded plane to output plane */ 1218 memcpy((void *)pTmp[0].pac_data, 1219 (void *)pC->pC1->lastDecodedPlane[0].pac_data, 1220 (pTmp[0].u_height * pTmp[0].u_width)); 1221 memcpy((void *)pTmp[1].pac_data, 1222 (void *)pC->pC1->lastDecodedPlane[1].pac_data, 1223 (pTmp[1].u_height * pTmp[1].u_width)); 1224 memcpy((void *)pTmp[2].pac_data, 1225 (void *)pC->pC1->lastDecodedPlane[2].pac_data, 1226 (pTmp[2].u_height * pTmp[2].u_width)); 1227 } 1228 pC->pC1->lastDecodedPlane = pTmp; 1229 } 1230 1231 /** 1232 * Compute the time in the clip2 base: ts = to - Offset */ 1233 // Decorrelate input and output encoding timestamp to handle encoder prefetch 1234 ts = pC->ewc.dInputVidCts - pC->pC2->iVoffset; 1235 /** 1236 * Render Clip2 */ 1237 if( pC->pC2->isRenderDup == M4OSA_FALSE ) 1238 { 1239 1240 err = M4VSS3GPP_intRenderFrameWithEffect(pC, pC->pC2, ts, M4OSA_FALSE, 1241 pTempPlaneClip2, pTemp2, 1242 pPlaneOut); 1243 if ((M4NO_ERROR != err) && 1244 (M4WAR_VIDEORENDERER_NO_NEW_FRAME != err)) { 1245 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \ 1246 M4VSS3GPP_intRenderFrameWithEffect returns 0x%x", err); 1247 pC->ewc.VppError = err; 1248 /** Return no error to the encoder core 1249 * else it may leak in some situations.*/ 1250 return M4NO_ERROR; 1251 } 1252 } 1253 if ((pC->pC2->isRenderDup == M4OSA_TRUE) || 1254 (M4WAR_VIDEORENDERER_NO_NEW_FRAME == err)) { 1255 pTmp = pC->yuv2; 1256 if (pC->pC2->lastDecodedPlane != M4NO_ERROR) { 1257 /* Copy last decoded plane to output plane */ 1258 memcpy((void *)pTmp[0].pac_data, 1259 (void *)pC->pC2->lastDecodedPlane[0].pac_data, 1260 (pTmp[0].u_height * pTmp[0].u_width)); 1261 memcpy((void *)pTmp[1].pac_data, 1262 (void *)pC->pC2->lastDecodedPlane[1].pac_data, 1263 (pTmp[1].u_height * pTmp[1].u_width)); 1264 memcpy((void *)pTmp[2].pac_data, 1265 (void *)pC->pC2->lastDecodedPlane[2].pac_data, 1266 (pTmp[2].u_height * pTmp[2].u_width)); 1267 } 1268 pC->pC2->lastDecodedPlane = pTmp; 1269 } 1270 1271 1272 pTmp = pPlaneOut; 1273 err = M4VSS3GPP_intVideoTransition(pC, pTmp); 1274 1275 if( M4NO_ERROR != err ) 1276 { 1277 M4OSA_TRACE1_1( 1278 "M4VSS3GPP_intVPP: M4VSS3GPP_intVideoTransition returns 0x%x,\ 1279 returning M4NO_ERROR", 1280 err); 1281 pC->ewc.VppError = err; 1282 return M4NO_ERROR; /**< Return no error to the encoder core 1283 (else it may leak in some situations...) */ 1284 } 1285 for (i=0; i < 3; i++) 1286 { 1287 if(pTempPlaneClip2[i].pac_data != M4OSA_NULL) { 1288 free(pTempPlaneClip2[i].pac_data); 1289 pTempPlaneClip2[i].pac_data = M4OSA_NULL; 1290 } 1291 1292 if(pTempPlaneClip1[i].pac_data != M4OSA_NULL) { 1293 free(pTempPlaneClip1[i].pac_data); 1294 pTempPlaneClip1[i].pac_data = M4OSA_NULL; 1295 } 1296 1297 if (pTemp2[i].pac_data != M4OSA_NULL) { 1298 free(pTemp2[i].pac_data); 1299 pTemp2[i].pac_data = M4OSA_NULL; 1300 } 1301 1302 if (pTemp1[i].pac_data != M4OSA_NULL) { 1303 free(pTemp1[i].pac_data); 1304 pTemp1[i].pac_data = M4OSA_NULL; 1305 } 1306 } 1307 } 1308 /** 1309 **************** No Transition case ****************/ 1310 else 1311 { 1312 M4OSA_TRACE3_0("M4VSS3GPP_intVPP: NO transition case"); 1313 /** 1314 * Compute the time in the clip base: ts = to - Offset */ 1315 ts = pC->ewc.dInputVidCts - pC->pC1->iVoffset; 1316 /** 1317 * Render */ 1318 if (pC->pC1->isRenderDup == M4OSA_FALSE) { 1319 M4OSA_TRACE3_0("M4VSS3GPP_intVPP: renderdup false"); 1320 /** 1321 * Check if resizing is needed */ 1322 if (M4OSA_NULL != pC->pC1->m_pPreResizeFrame) { 1323 if ((pC->pC1->pSettings->FileType == 1324 M4VIDEOEDITING_kFileType_ARGB8888) && 1325 (pC->nbActiveEffects == 0) && 1326 (pC->pC1->bGetYuvDataFromDecoder == M4OSA_FALSE)) { 1327 err = pC->pC1->ShellAPI.m_pVideoDecoder->m_pFctSetOption( 1328 pC->pC1->pViDecCtxt, 1329 M4DECODER_kOptionID_EnableYuvWithEffect, 1330 (M4OSA_DataOption)M4OSA_TRUE); 1331 if (M4NO_ERROR == err ) { 1332 err = pC->pC1->ShellAPI.m_pVideoDecoder->m_pFctRender( 1333 pC->pC1->pViDecCtxt, &ts, 1334 pPlaneOut, M4OSA_TRUE); 1335 } 1336 } else { 1337 if (pC->pC1->pSettings->FileType == 1338 M4VIDEOEDITING_kFileType_ARGB8888) { 1339 err = pC->pC1->ShellAPI.m_pVideoDecoder->m_pFctSetOption( 1340 pC->pC1->pViDecCtxt, 1341 M4DECODER_kOptionID_EnableYuvWithEffect, 1342 (M4OSA_DataOption)M4OSA_FALSE); 1343 } 1344 if (M4NO_ERROR == err) { 1345 err = pC->pC1->ShellAPI.m_pVideoDecoder->m_pFctRender( 1346 pC->pC1->pViDecCtxt, &ts, 1347 pC->pC1->m_pPreResizeFrame, M4OSA_TRUE); 1348 } 1349 } 1350 if (M4NO_ERROR != err) { 1351 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \ 1352 m_pFctRender() returns error 0x%x", err); 1353 pC->ewc.VppError = err; 1354 return M4NO_ERROR; 1355 } 1356 if (pC->pC1->pSettings->FileType != 1357 M4VIDEOEDITING_kFileType_ARGB8888) { 1358 if (0 != pC->pC1->pSettings->ClipProperties.videoRotationDegrees) { 1359 // Save width and height of un-rotated frame 1360 yuvFrameWidth = pC->pC1->m_pPreResizeFrame[0].u_width; 1361 yuvFrameHeight = pC->pC1->m_pPreResizeFrame[0].u_height; 1362 err = M4VSS3GPP_intRotateVideo(pC->pC1->m_pPreResizeFrame, 1363 pC->pC1->pSettings->ClipProperties.videoRotationDegrees); 1364 if (M4NO_ERROR != err) { 1365 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \ 1366 rotateVideo() returns error 0x%x", err); 1367 pC->ewc.VppError = err; 1368 return M4NO_ERROR; 1369 } 1370 } 1371 } 1372 1373 if (pC->nbActiveEffects > 0) { 1374 pC->pC1->bGetYuvDataFromDecoder = M4OSA_TRUE; 1375 /** 1376 * If we do modify the image, we need an intermediate 1377 * image plane */ 1378 if (M4OSA_NULL == pTemp1[0].pac_data) { 1379 err = M4VSS3GPP_intAllocateYUV420(pTemp1, 1380 pC->pC1->m_pPreResizeFrame[0].u_width, 1381 pC->pC1->m_pPreResizeFrame[0].u_height); 1382 if (M4NO_ERROR != err) { 1383 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \ 1384 M4VSS3GPP_intAllocateYUV420 error 0x%x", err); 1385 pC->ewc.VppError = err; 1386 return M4NO_ERROR; 1387 } 1388 } 1389 err = M4VSS3GPP_intApplyVideoEffect(pC, 1390 pC->pC1->m_pPreResizeFrame,pTemp1); 1391 if (M4NO_ERROR != err) { 1392 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \ 1393 M4VSS3GPP_intApplyVideoEffect() error 0x%x", err); 1394 pC->ewc.VppError = err; 1395 return M4NO_ERROR; 1396 } 1397 pDecoderRenderFrame= pTemp1; 1398 1399 } else { 1400 pDecoderRenderFrame = pC->pC1->m_pPreResizeFrame; 1401 } 1402 1403 pTmp = pPlaneOut; 1404 if ((pC->pC1->bGetYuvDataFromDecoder == M4OSA_TRUE) || 1405 (pC->pC1->pSettings->FileType != 1406 M4VIDEOEDITING_kFileType_ARGB8888)) { 1407 1408 err = M4VSS3GPP_intApplyRenderingMode(pC, 1409 pC->pC1->pSettings->xVSS.MediaRendering, 1410 pDecoderRenderFrame, pTmp); 1411 if (M4NO_ERROR != err) { 1412 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \ 1413 M4VSS3GPP_intApplyRenderingMode) error 0x%x ", err); 1414 pC->ewc.VppError = err; 1415 return M4NO_ERROR; 1416 } 1417 } 1418 1419 if ((pC->pC1->pSettings->FileType == 1420 M4VIDEOEDITING_kFileType_ARGB8888) && 1421 (pC->nbActiveEffects == 0) && 1422 (pC->pC1->bGetYuvDataFromDecoder == M4OSA_TRUE)) { 1423 1424 err = pC->pC1->ShellAPI.m_pVideoDecoder->m_pFctSetOption( 1425 pC->pC1->pViDecCtxt, 1426 M4DECODER_kOptionID_YuvWithEffectNonContiguous, 1427 (M4OSA_DataOption)pTmp); 1428 if (M4NO_ERROR != err) { 1429 pC->ewc.VppError = err; 1430 return M4NO_ERROR; 1431 } 1432 pC->pC1->bGetYuvDataFromDecoder = M4OSA_FALSE; 1433 } 1434 1435 // Reset original width and height for resize frame plane 1436 if (0 != pC->pC1->pSettings->ClipProperties.videoRotationDegrees && 1437 180 != pC->pC1->pSettings->ClipProperties.videoRotationDegrees) { 1438 1439 M4VSS3GPP_intSetYUV420Plane(pC->pC1->m_pPreResizeFrame, 1440 yuvFrameWidth, yuvFrameHeight); 1441 } 1442 } 1443 else 1444 { 1445 M4OSA_TRACE3_0("M4VSS3GPP_intVPP: NO resize required"); 1446 if ((pC->nbActiveEffects > 0) || 1447 ((0 != pC->pC1->pSettings->ClipProperties.videoRotationDegrees) 1448 && (180 != pC->pC1->pSettings->ClipProperties.videoRotationDegrees))) { 1449 /** If we do modify the image, we need an 1450 * intermediate image plane */ 1451 if (M4OSA_NULL == pTemp1[0].pac_data) { 1452 err = M4VSS3GPP_intAllocateYUV420(pTemp1, 1453 pC->ewc.uiVideoWidth, 1454 pC->ewc.uiVideoHeight); 1455 if (M4NO_ERROR != err) { 1456 pC->ewc.VppError = err; 1457 return M4NO_ERROR; 1458 } 1459 } 1460 pDecoderRenderFrame = pTemp1; 1461 } 1462 else { 1463 pDecoderRenderFrame = pPlaneOut; 1464 } 1465 1466 pTmp = pPlaneOut; 1467 err = pC->pC1->ShellAPI.m_pVideoDecoder->m_pFctRender( 1468 pC->pC1->pViDecCtxt, &ts, 1469 pDecoderRenderFrame, M4OSA_TRUE); 1470 if (M4NO_ERROR != err) { 1471 pC->ewc.VppError = err; 1472 return M4NO_ERROR; 1473 } 1474 1475 if (pC->pC1->pSettings->FileType != 1476 M4VIDEOEDITING_kFileType_ARGB8888) { 1477 if (0 != pC->pC1->pSettings->ClipProperties.videoRotationDegrees) { 1478 // Save width and height of un-rotated frame 1479 yuvFrameWidth = pDecoderRenderFrame[0].u_width; 1480 yuvFrameHeight = pDecoderRenderFrame[0].u_height; 1481 err = M4VSS3GPP_intRotateVideo(pDecoderRenderFrame, 1482 pC->pC1->pSettings->ClipProperties.videoRotationDegrees); 1483 if (M4NO_ERROR != err) { 1484 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \ 1485 rotateVideo() returns error 0x%x", err); 1486 pC->ewc.VppError = err; 1487 return M4NO_ERROR; 1488 } 1489 1490 if (180 != pC->pC1->pSettings->ClipProperties.videoRotationDegrees) { 1491 // Apply black border on rotated frame 1492 if (pC->nbActiveEffects > 0) { 1493 /** we need an intermediate image plane */ 1494 if (M4OSA_NULL == pTemp2[0].pac_data) { 1495 err = M4VSS3GPP_intAllocateYUV420(pTemp2, 1496 pC->ewc.uiVideoWidth, 1497 pC->ewc.uiVideoHeight); 1498 if (M4NO_ERROR != err) { 1499 pC->ewc.VppError = err; 1500 return M4NO_ERROR; 1501 } 1502 } 1503 err = M4VSS3GPP_intApplyRenderingMode(pC, M4xVSS_kBlackBorders, 1504 pDecoderRenderFrame, pTemp2); 1505 } else { 1506 err = M4VSS3GPP_intApplyRenderingMode(pC, M4xVSS_kBlackBorders, 1507 pDecoderRenderFrame, pTmp); 1508 } 1509 if (M4NO_ERROR != err) { 1510 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \ 1511 M4VSS3GPP_intApplyRenderingMode) error 0x%x ", err); 1512 pC->ewc.VppError = err; 1513 return M4NO_ERROR; 1514 } 1515 } 1516 } 1517 } 1518 1519 if (pC->nbActiveEffects > 0) { 1520 if ((0 != pC->pC1->pSettings->ClipProperties.videoRotationDegrees) && 1521 (180 != pC->pC1->pSettings->ClipProperties.videoRotationDegrees)) { 1522 err = M4VSS3GPP_intApplyVideoEffect(pC, 1523 pTemp2,pPlaneOut); 1524 } else { 1525 err = M4VSS3GPP_intApplyVideoEffect(pC, 1526 pDecoderRenderFrame,pPlaneOut); 1527 } 1528 if (M4NO_ERROR != err) { 1529 pC->ewc.VppError = err; 1530 return M4NO_ERROR; 1531 } 1532 } 1533 1534 // Reset original width and height for resize frame plane 1535 if (0 != pC->pC1->pSettings->ClipProperties.videoRotationDegrees && 1536 180 != pC->pC1->pSettings->ClipProperties.videoRotationDegrees) { 1537 1538 M4VSS3GPP_intSetYUV420Plane(pDecoderRenderFrame, 1539 yuvFrameWidth, yuvFrameHeight); 1540 1541 if (pC->nbActiveEffects > 0) { 1542 free((void *)pTemp2[0].pac_data); 1543 free((void *)pTemp2[1].pac_data); 1544 free((void *)pTemp2[2].pac_data); 1545 } 1546 } 1547 } 1548 pC->pC1->lastDecodedPlane = pTmp; 1549 pC->pC1->iVideoRenderCts = (M4OSA_Int32)ts; 1550 1551 } else { 1552 M4OSA_TRACE3_0("M4VSS3GPP_intVPP: renderdup true"); 1553 1554 if (M4OSA_NULL != pC->pC1->m_pPreResizeFrame) { 1555 /** 1556 * Copy last decoded plane to output plane */ 1557 memcpy((void *)pC->pC1->m_pPreResizeFrame[0].pac_data, 1558 (void *)pC->pC1->lastDecodedPlane[0].pac_data, 1559 (pC->pC1->m_pPreResizeFrame[0].u_height * pC->pC1->m_pPreResizeFrame[0].u_width)); 1560 1561 memcpy((void *)pC->pC1->m_pPreResizeFrame[1].pac_data, 1562 (void *)pC->pC1->lastDecodedPlane[1].pac_data, 1563 (pC->pC1->m_pPreResizeFrame[1].u_height * pC->pC1->m_pPreResizeFrame[1].u_width)); 1564 1565 memcpy((void *)pC->pC1->m_pPreResizeFrame[2].pac_data, 1566 (void *)pC->pC1->lastDecodedPlane[2].pac_data, 1567 (pC->pC1->m_pPreResizeFrame[2].u_height * pC->pC1->m_pPreResizeFrame[2].u_width)); 1568 1569 if(pC->nbActiveEffects > 0) { 1570 /** 1571 * If we do modify the image, we need an 1572 * intermediate image plane */ 1573 if (M4OSA_NULL == pTemp1[0].pac_data) { 1574 err = M4VSS3GPP_intAllocateYUV420(pTemp1, 1575 pC->pC1->m_pPreResizeFrame[0].u_width, 1576 pC->pC1->m_pPreResizeFrame[0].u_height); 1577 if (M4NO_ERROR != err) { 1578 pC->ewc.VppError = err; 1579 return M4NO_ERROR; 1580 } 1581 } 1582 1583 err = M4VSS3GPP_intApplyVideoEffect(pC, 1584 pC->pC1->m_pPreResizeFrame,pTemp1); 1585 if (M4NO_ERROR != err) { 1586 pC->ewc.VppError = err; 1587 return M4NO_ERROR; 1588 } 1589 pDecoderRenderFrame= pTemp1; 1590 1591 } else { 1592 pDecoderRenderFrame = pC->pC1->m_pPreResizeFrame; 1593 } 1594 1595 pTmp = pPlaneOut; 1596 err = M4VSS3GPP_intApplyRenderingMode(pC, 1597 pC->pC1->pSettings->xVSS.MediaRendering, 1598 pDecoderRenderFrame, pTmp); 1599 if (M4NO_ERROR != err) { 1600 pC->ewc.VppError = err; 1601 return M4NO_ERROR; 1602 } 1603 } else { 1604 1605 if (M4OSA_NULL == pTemp1[0].pac_data) { 1606 err = M4VSS3GPP_intAllocateYUV420(pTemp1, 1607 pC->ewc.uiVideoWidth, 1608 pC->ewc.uiVideoHeight); 1609 if (M4NO_ERROR != err) { 1610 pC->ewc.VppError = err; 1611 return M4NO_ERROR; 1612 } 1613 } 1614 /** 1615 * Copy last decoded plane to output plane */ 1616 memcpy((void *)pLastDecodedFrame[0].pac_data, 1617 (void *)pC->pC1->lastDecodedPlane[0].pac_data, 1618 (pLastDecodedFrame[0].u_height * pLastDecodedFrame[0].u_width)); 1619 1620 memcpy((void *)pLastDecodedFrame[1].pac_data, 1621 (void *)pC->pC1->lastDecodedPlane[1].pac_data, 1622 (pLastDecodedFrame[1].u_height * pLastDecodedFrame[1].u_width)); 1623 1624 memcpy((void *)pLastDecodedFrame[2].pac_data, 1625 (void *)pC->pC1->lastDecodedPlane[2].pac_data, 1626 (pLastDecodedFrame[2].u_height * pLastDecodedFrame[2].u_width)); 1627 1628 pTmp = pPlaneOut; 1629 /** 1630 * Check if there is a filter */ 1631 if(pC->nbActiveEffects > 0) { 1632 err = M4VSS3GPP_intApplyVideoEffect(pC, 1633 pLastDecodedFrame, pTmp); 1634 if (M4NO_ERROR != err) { 1635 pC->ewc.VppError = err; 1636 return M4NO_ERROR; 1637 } 1638 } 1639 } 1640 pC->pC1->lastDecodedPlane = pTmp; 1641 } 1642 1643 M4OSA_TRACE3_1("M4VSS3GPP_intVPP: Rendered at CTS %.3f", ts); 1644 1645 for(i=0;i<3;i++) { 1646 if(pTemp1[i].pac_data != M4OSA_NULL) { 1647 free(pTemp1[i].pac_data); 1648 pTemp1[i].pac_data = M4OSA_NULL; 1649 } 1650 } 1651 } 1652 1653 /** 1654 * Return */ 1655 M4OSA_TRACE3_0("M4VSS3GPP_intVPP: returning M4NO_ERROR"); 1656 return M4NO_ERROR; 1657} 1658 1659/** 1660 ****************************************************************************** 1661 * M4OSA_ERR M4VSS3GPP_intApplyVideoEffect() 1662 * @brief Apply video effect from pPlaneIn to pPlaneOut 1663 * @param pC (IN/OUT) Internal edit context 1664 * @param uiClip1orClip2 (IN/OUT) 1 for first clip, 2 for second clip 1665 * @param pInputPlanes (IN) Input raw YUV420 image 1666 * @param pOutputPlanes (IN/OUT) Output raw YUV420 image 1667 * @return M4NO_ERROR: No error 1668 ****************************************************************************** 1669 */ 1670static M4OSA_ERR 1671M4VSS3GPP_intApplyVideoEffect( M4VSS3GPP_InternalEditContext *pC, 1672 M4VIFI_ImagePlane *pPlaneIn, 1673 M4VIFI_ImagePlane *pPlaneOut ) 1674{ 1675 M4OSA_ERR err; 1676 1677 M4VSS3GPP_ClipContext *pClip; 1678 M4VSS3GPP_EffectSettings *pFx; 1679 M4VSS3GPP_ExternalProgress extProgress; 1680 1681 M4OSA_Double VideoEffectTime; 1682 M4OSA_Double PercentageDone; 1683 M4OSA_Int32 tmp; 1684 1685 M4VIFI_ImagePlane *pPlaneTempIn; 1686 M4VIFI_ImagePlane *pPlaneTempOut; 1687 M4VIFI_ImagePlane pTempYuvPlane[3]; 1688 M4OSA_UInt8 i; 1689 M4OSA_UInt8 NumActiveEffects =0; 1690 1691 1692 pClip = pC->pC1; 1693 if (pC->bIssecondClip == M4OSA_TRUE) 1694 { 1695 NumActiveEffects = pC->nbActiveEffects1; 1696 } 1697 else 1698 { 1699 NumActiveEffects = pC->nbActiveEffects; 1700 } 1701 1702 memset((void *)pTempYuvPlane, 0, 3*sizeof(M4VIFI_ImagePlane)); 1703 1704 /** 1705 * Allocate temporary plane if needed RC */ 1706 if (M4OSA_NULL == pTempYuvPlane[0].pac_data && NumActiveEffects > 1) 1707 { 1708 err = M4VSS3GPP_intAllocateYUV420(pTempYuvPlane, pPlaneOut->u_width, 1709 pPlaneOut->u_height); 1710 1711 if( M4NO_ERROR != err ) 1712 { 1713 M4OSA_TRACE1_1( 1714 "M4VSS3GPP_intApplyVideoEffect: M4VSS3GPP_intAllocateYUV420(4) returns 0x%x,\ 1715 returning M4NO_ERROR", 1716 err); 1717 pC->ewc.VppError = err; 1718 return 1719 M4NO_ERROR; /**< Return no error to the encoder core 1720 (else it may leak in some situations...) */ 1721 } 1722 } 1723 1724 if (NumActiveEffects % 2 == 0) 1725 { 1726 pPlaneTempIn = pPlaneIn; 1727 pPlaneTempOut = pTempYuvPlane; 1728 } 1729 else 1730 { 1731 pPlaneTempIn = pPlaneIn; 1732 pPlaneTempOut = pPlaneOut; 1733 } 1734 1735 for (i=0; i<NumActiveEffects; i++) 1736 { 1737 if (pC->bIssecondClip == M4OSA_TRUE) 1738 { 1739 1740 1741 pFx = &(pC->pEffectsList[pC->pActiveEffectsList1[i]]); 1742 /* Compute how far from the beginning of the effect we are, in clip-base time. */ 1743 // Decorrelate input and output encoding timestamp to handle encoder prefetch 1744 VideoEffectTime = ((M4OSA_Int32)pC->ewc.dInputVidCts) + 1745 pC->pTransitionList[pC->uiCurrentClip]. 1746 uiTransitionDuration- pFx->uiStartTime; 1747 } 1748 else 1749 { 1750 pFx = &(pC->pEffectsList[pC->pActiveEffectsList[i]]); 1751 /* Compute how far from the beginning of the effect we are, in clip-base time. */ 1752 // Decorrelate input and output encoding timestamp to handle encoder prefetch 1753 VideoEffectTime = ((M4OSA_Int32)pC->ewc.dInputVidCts) - pFx->uiStartTime; 1754 } 1755 1756 1757 1758 /* To calculate %, substract timeIncrement because effect should finish on the last frame*/ 1759 /* which is presented from CTS = eof-timeIncrement till CTS = eof */ 1760 PercentageDone = VideoEffectTime 1761 / ((M4OSA_Float)pFx->uiDuration/*- pC->dOutputFrameDuration*/); 1762 1763 if( PercentageDone < 0.0 ) 1764 PercentageDone = 0.0; 1765 1766 if( PercentageDone > 1.0 ) 1767 PercentageDone = 1.0; 1768 1769 switch( pFx->VideoEffectType ) 1770 { 1771 case M4VSS3GPP_kVideoEffectType_FadeFromBlack: 1772 /** 1773 * Compute where we are in the effect (scale is 0->1024). */ 1774 tmp = (M4OSA_Int32)(PercentageDone * 1024); 1775 1776 /** 1777 * Apply the darkening effect */ 1778 err = 1779 M4VFL_modifyLumaWithScale((M4ViComImagePlane *)pPlaneTempIn, 1780 (M4ViComImagePlane *)pPlaneTempOut, tmp, M4OSA_NULL); 1781 1782 if( M4NO_ERROR != err ) 1783 { 1784 M4OSA_TRACE1_1( 1785 "M4VSS3GPP_intApplyVideoEffect:\ 1786 M4VFL_modifyLumaWithScale returns error 0x%x,\ 1787 returning M4VSS3GPP_ERR_LUMA_FILTER_ERROR", 1788 err); 1789 return M4VSS3GPP_ERR_LUMA_FILTER_ERROR; 1790 } 1791 break; 1792 1793 case M4VSS3GPP_kVideoEffectType_FadeToBlack: 1794 /** 1795 * Compute where we are in the effect (scale is 0->1024) */ 1796 tmp = (M4OSA_Int32)(( 1.0 - PercentageDone) * 1024); 1797 1798 /** 1799 * Apply the darkening effect */ 1800 err = 1801 M4VFL_modifyLumaWithScale((M4ViComImagePlane *)pPlaneTempIn, 1802 (M4ViComImagePlane *)pPlaneTempOut, tmp, M4OSA_NULL); 1803 1804 if( M4NO_ERROR != err ) 1805 { 1806 M4OSA_TRACE1_1( 1807 "M4VSS3GPP_intApplyVideoEffect:\ 1808 M4VFL_modifyLumaWithScale returns error 0x%x,\ 1809 returning M4VSS3GPP_ERR_LUMA_FILTER_ERROR", 1810 err); 1811 return M4VSS3GPP_ERR_LUMA_FILTER_ERROR; 1812 } 1813 break; 1814 1815 default: 1816 if( pFx->VideoEffectType 1817 >= M4VSS3GPP_kVideoEffectType_External ) 1818 { 1819 M4OSA_UInt32 Cts = 0; 1820 M4OSA_Int32 nextEffectTime; 1821 1822 /** 1823 * Compute where we are in the effect (scale is 0->1000) */ 1824 tmp = (M4OSA_Int32)(PercentageDone * 1000); 1825 1826 /** 1827 * Set the progress info provided to the external function */ 1828 extProgress.uiProgress = (M4OSA_UInt32)tmp; 1829 // Decorrelate input and output encoding timestamp to handle encoder prefetch 1830 extProgress.uiOutputTime = (M4OSA_UInt32)pC->ewc.dInputVidCts; 1831 extProgress.uiClipTime = extProgress.uiOutputTime - pClip->iVoffset; 1832 extProgress.bIsLast = M4OSA_FALSE; 1833 // Decorrelate input and output encoding timestamp to handle encoder prefetch 1834 nextEffectTime = (M4OSA_Int32)(pC->ewc.dInputVidCts \ 1835 + pC->dOutputFrameDuration); 1836 if(nextEffectTime >= (M4OSA_Int32)(pFx->uiStartTime + pFx->uiDuration)) 1837 { 1838 extProgress.bIsLast = M4OSA_TRUE; 1839 } 1840 1841 err = pFx->ExtVideoEffectFct(pFx->pExtVideoEffectFctCtxt, 1842 pPlaneTempIn, pPlaneTempOut, &extProgress, 1843 pFx->VideoEffectType 1844 - M4VSS3GPP_kVideoEffectType_External); 1845 1846 if( M4NO_ERROR != err ) 1847 { 1848 M4OSA_TRACE1_1( 1849 "M4VSS3GPP_intApplyVideoEffect: \ 1850 External video effect function returns 0x%x!", 1851 err); 1852 return err; 1853 } 1854 break; 1855 } 1856 else 1857 { 1858 M4OSA_TRACE1_1( 1859 "M4VSS3GPP_intApplyVideoEffect: unknown effect type (0x%x),\ 1860 returning M4VSS3GPP_ERR_INVALID_VIDEO_EFFECT_TYPE", 1861 pFx->VideoEffectType); 1862 return M4VSS3GPP_ERR_INVALID_VIDEO_EFFECT_TYPE; 1863 } 1864 } 1865 /** 1866 * RC Updates pTempPlaneIn and pTempPlaneOut depending on current effect */ 1867 if (((i % 2 == 0) && (NumActiveEffects % 2 == 0)) 1868 || ((i % 2 != 0) && (NumActiveEffects % 2 != 0))) 1869 { 1870 pPlaneTempIn = pTempYuvPlane; 1871 pPlaneTempOut = pPlaneOut; 1872 } 1873 else 1874 { 1875 pPlaneTempIn = pPlaneOut; 1876 pPlaneTempOut = pTempYuvPlane; 1877 } 1878 } 1879 1880 for(i=0; i<3; i++) { 1881 if(pTempYuvPlane[i].pac_data != M4OSA_NULL) { 1882 free(pTempYuvPlane[i].pac_data); 1883 pTempYuvPlane[i].pac_data = M4OSA_NULL; 1884 } 1885 } 1886 1887 /** 1888 * Return */ 1889 M4OSA_TRACE3_0("M4VSS3GPP_intApplyVideoEffect: returning M4NO_ERROR"); 1890 return M4NO_ERROR; 1891} 1892 1893/** 1894 ****************************************************************************** 1895 * M4OSA_ERR M4VSS3GPP_intVideoTransition() 1896 * @brief Apply video transition effect pC1+pC2->pPlaneOut 1897 * @param pC (IN/OUT) Internal edit context 1898 * @param pOutputPlanes (IN/OUT) Output raw YUV420 image 1899 * @return M4NO_ERROR: No error 1900 ****************************************************************************** 1901 */ 1902static M4OSA_ERR 1903M4VSS3GPP_intVideoTransition( M4VSS3GPP_InternalEditContext *pC, 1904 M4VIFI_ImagePlane *pPlaneOut ) 1905{ 1906 M4OSA_ERR err; 1907 M4OSA_Int32 iProgress; 1908 M4VSS3GPP_ExternalProgress extProgress; 1909 M4VIFI_ImagePlane *pPlane; 1910 M4OSA_Int32 i; 1911 const M4OSA_Int32 iDur = (M4OSA_Int32)pC-> 1912 pTransitionList[pC->uiCurrentClip].uiTransitionDuration; 1913 1914 /** 1915 * Compute how far from the end cut we are, in clip-base time. 1916 * It is done with integers because the offset and begin cut have been rounded already. */ 1917 // Decorrelate input and output encoding timestamp to handle encoder prefetch 1918 iProgress = (M4OSA_Int32)((M4OSA_Double)pC->pC1->iEndTime) - pC->ewc.dInputVidCts + 1919 ((M4OSA_Double)pC->pC1->iVoffset); 1920 /** 1921 * We must remove the duration of one frame, else we would almost never reach the end 1922 * (It's kind of a "pile and intervals" issue). */ 1923 iProgress -= (M4OSA_Int32)pC->dOutputFrameDuration; 1924 1925 if( iProgress < 0 ) /**< Sanity checks */ 1926 { 1927 iProgress = 0; 1928 } 1929 1930 /** 1931 * Compute where we are in the transition, on a base 1000 */ 1932 iProgress = ( ( iDur - iProgress) * 1000) / iDur; 1933 1934 /** 1935 * Sanity checks */ 1936 if( iProgress < 0 ) 1937 { 1938 iProgress = 0; 1939 } 1940 else if( iProgress > 1000 ) 1941 { 1942 iProgress = 1000; 1943 } 1944 1945 switch( pC->pTransitionList[pC->uiCurrentClip].TransitionBehaviour ) 1946 { 1947 case M4VSS3GPP_TransitionBehaviour_SpeedUp: 1948 iProgress = ( iProgress * iProgress) / 1000; 1949 break; 1950 1951 case M4VSS3GPP_TransitionBehaviour_Linear: 1952 /*do nothing*/ 1953 break; 1954 1955 case M4VSS3GPP_TransitionBehaviour_SpeedDown: 1956 iProgress = (M4OSA_Int32)(sqrt(iProgress * 1000)); 1957 break; 1958 1959 case M4VSS3GPP_TransitionBehaviour_SlowMiddle: 1960 if( iProgress < 500 ) 1961 { 1962 iProgress = (M4OSA_Int32)(sqrt(iProgress * 500)); 1963 } 1964 else 1965 { 1966 iProgress = 1967 (M4OSA_Int32)(( ( ( iProgress - 500) * (iProgress - 500)) 1968 / 500) + 500); 1969 } 1970 break; 1971 1972 case M4VSS3GPP_TransitionBehaviour_FastMiddle: 1973 if( iProgress < 500 ) 1974 { 1975 iProgress = (M4OSA_Int32)(( iProgress * iProgress) / 500); 1976 } 1977 else 1978 { 1979 iProgress = (M4OSA_Int32)(sqrt(( iProgress - 500) * 500) + 500); 1980 } 1981 break; 1982 1983 default: 1984 /*do nothing*/ 1985 break; 1986 } 1987 1988 switch( pC->pTransitionList[pC->uiCurrentClip].VideoTransitionType ) 1989 { 1990 case M4VSS3GPP_kVideoTransitionType_CrossFade: 1991 /** 1992 * Apply the transition effect */ 1993 err = M4VIFI_ImageBlendingonYUV420(M4OSA_NULL, 1994 (M4ViComImagePlane *)pC->yuv1, 1995 (M4ViComImagePlane *)pC->yuv2, 1996 (M4ViComImagePlane *)pPlaneOut, iProgress); 1997 1998 if( M4NO_ERROR != err ) 1999 { 2000 M4OSA_TRACE1_1( 2001 "M4VSS3GPP_intVideoTransition:\ 2002 M4VIFI_ImageBlendingonYUV420 returns error 0x%x,\ 2003 returning M4VSS3GPP_ERR_TRANSITION_FILTER_ERROR", 2004 err); 2005 return M4VSS3GPP_ERR_TRANSITION_FILTER_ERROR; 2006 } 2007 break; 2008 2009 case M4VSS3GPP_kVideoTransitionType_None: 2010 /** 2011 * This is a stupid-non optimized version of the None transition... 2012 * We copy the YUV frame */ 2013 if( iProgress < 500 ) /**< first half of transition */ 2014 { 2015 pPlane = pC->yuv1; 2016 } 2017 else /**< second half of transition */ 2018 { 2019 pPlane = pC->yuv2; 2020 } 2021 /** 2022 * Copy the input YUV frames */ 2023 i = 3; 2024 2025 while( i-- > 0 ) 2026 { 2027 memcpy((void *)pPlaneOut[i].pac_data, 2028 (void *)pPlane[i].pac_data, 2029 pPlaneOut[i].u_stride * pPlaneOut[i].u_height); 2030 } 2031 break; 2032 2033 default: 2034 if( pC->pTransitionList[pC->uiCurrentClip].VideoTransitionType 2035 >= M4VSS3GPP_kVideoTransitionType_External ) 2036 { 2037 /** 2038 * Set the progress info provided to the external function */ 2039 extProgress.uiProgress = (M4OSA_UInt32)iProgress; 2040 // Decorrelate input and output encoding timestamp to handle encoder prefetch 2041 extProgress.uiOutputTime = (M4OSA_UInt32)pC->ewc.dInputVidCts; 2042 extProgress.uiClipTime = extProgress.uiOutputTime - pC->pC1->iVoffset; 2043 2044 err = pC->pTransitionList[pC-> 2045 uiCurrentClip].ExtVideoTransitionFct( 2046 pC->pTransitionList[pC-> 2047 uiCurrentClip].pExtVideoTransitionFctCtxt, 2048 pC->yuv1, pC->yuv2, pPlaneOut, &extProgress, 2049 pC->pTransitionList[pC-> 2050 uiCurrentClip].VideoTransitionType 2051 - M4VSS3GPP_kVideoTransitionType_External); 2052 2053 if( M4NO_ERROR != err ) 2054 { 2055 M4OSA_TRACE1_1( 2056 "M4VSS3GPP_intVideoTransition:\ 2057 External video transition function returns 0x%x!", 2058 err); 2059 return err; 2060 } 2061 break; 2062 } 2063 else 2064 { 2065 M4OSA_TRACE1_1( 2066 "M4VSS3GPP_intVideoTransition: unknown transition type (0x%x),\ 2067 returning M4VSS3GPP_ERR_INVALID_VIDEO_TRANSITION_TYPE", 2068 pC->pTransitionList[pC->uiCurrentClip].VideoTransitionType); 2069 return M4VSS3GPP_ERR_INVALID_VIDEO_TRANSITION_TYPE; 2070 } 2071 } 2072 2073 /** 2074 * Return */ 2075 M4OSA_TRACE3_0("M4VSS3GPP_intVideoTransition: returning M4NO_ERROR"); 2076 return M4NO_ERROR; 2077} 2078 2079/** 2080 ****************************************************************************** 2081 * M4OSA_Void M4VSS3GPP_intUpdateTimeInfo() 2082 * @brief Update bit stream time info by Counter Time System to be compliant with 2083 * players using bit stream time info 2084 * @note H263 uses an absolute time counter unlike MPEG4 which uses Group Of Vops 2085 * (GOV, see the standard) 2086 * @param pC (IN/OUT) returns time updated video AU, 2087 * the offset between system and video time (MPEG4 only) 2088 * and the state of the current clip (MPEG4 only) 2089 * @return nothing 2090 ****************************************************************************** 2091 */ 2092static M4OSA_Void 2093M4VSS3GPP_intUpdateTimeInfo( M4VSS3GPP_InternalEditContext *pC, 2094 M4SYS_AccessUnit *pAU ) 2095{ 2096 M4OSA_UInt8 uiTmp; 2097 M4OSA_UInt32 uiCts = 0; 2098 M4OSA_MemAddr8 pTmp; 2099 M4OSA_UInt32 uiAdd; 2100 M4OSA_UInt32 uiCurrGov; 2101 M4OSA_Int8 iDiff; 2102 2103 M4VSS3GPP_ClipContext *pClipCtxt = pC->pC1; 2104 M4OSA_Int32 *pOffset = &(pC->ewc.iMpeg4GovOffset); 2105 2106 /** 2107 * Set H263 time counter from system time */ 2108 if( M4SYS_kH263 == pAU->stream->streamType ) 2109 { 2110 uiTmp = (M4OSA_UInt8)((M4OSA_UInt32)( ( pAU->CTS * 30) / 1001 + 0.5) 2111 % M4VSS3GPP_EDIT_H263_MODULO_TIME); 2112 M4VSS3GPP_intSetH263TimeCounter((M4OSA_MemAddr8)(pAU->dataAddress), 2113 uiTmp); 2114 } 2115 /* 2116 * Set MPEG4 GOV time counter regarding video and system time */ 2117 else if( M4SYS_kMPEG_4 == pAU->stream->streamType ) 2118 { 2119 /* 2120 * If GOV. 2121 * beware of little/big endian! */ 2122 /* correction: read 8 bits block instead of one 32 bits block */ 2123 M4OSA_UInt8 *temp8 = (M4OSA_UInt8 *)(pAU->dataAddress); 2124 M4OSA_UInt32 temp32 = 0; 2125 2126 temp32 = ( 0x000000ff & (M4OSA_UInt32)(*temp8)) 2127 + (0x0000ff00 & ((M4OSA_UInt32)(*(temp8 + 1))) << 8) 2128 + (0x00ff0000 & ((M4OSA_UInt32)(*(temp8 + 2))) << 16) 2129 + (0xff000000 & ((M4OSA_UInt32)(*(temp8 + 3))) << 24); 2130 2131 M4OSA_TRACE3_2("RC: Temp32: 0x%x, dataAddress: 0x%x\n", temp32, 2132 *(pAU->dataAddress)); 2133 2134 if( M4VSS3GPP_EDIT_GOV_HEADER == temp32 ) 2135 { 2136 pTmp = 2137 (M4OSA_MemAddr8)(pAU->dataAddress 2138 + 1); /**< Jump to the time code (just after the 32 bits header) */ 2139 uiAdd = (M4OSA_UInt32)(pAU->CTS)+( *pOffset); 2140 2141 switch( pClipCtxt->bMpeg4GovState ) 2142 { 2143 case M4OSA_FALSE: /*< INIT */ 2144 { 2145 /* video time = ceil (system time + offset) */ 2146 uiCts = ( uiAdd + 999) / 1000; 2147 2148 /* offset update */ 2149 ( *pOffset) += (( uiCts * 1000) - uiAdd); 2150 2151 /* Save values */ 2152 pClipCtxt->uiMpeg4PrevGovValueSet = uiCts; 2153 2154 /* State to 'first' */ 2155 pClipCtxt->bMpeg4GovState = M4OSA_TRUE; 2156 } 2157 break; 2158 2159 case M4OSA_TRUE: /*< UPDATE */ 2160 { 2161 /* Get current Gov value */ 2162 M4VSS3GPP_intGetMPEG4Gov(pTmp, &uiCurrGov); 2163 2164 /* video time = floor or ceil (system time + offset) */ 2165 uiCts = (uiAdd / 1000); 2166 iDiff = (M4OSA_Int8)(uiCurrGov 2167 - pClipCtxt->uiMpeg4PrevGovValueGet - uiCts 2168 + pClipCtxt->uiMpeg4PrevGovValueSet); 2169 2170 /* ceiling */ 2171 if( iDiff > 0 ) 2172 { 2173 uiCts += (M4OSA_UInt32)(iDiff); 2174 2175 /* offset update */ 2176 ( *pOffset) += (( uiCts * 1000) - uiAdd); 2177 } 2178 2179 /* Save values */ 2180 pClipCtxt->uiMpeg4PrevGovValueGet = uiCurrGov; 2181 pClipCtxt->uiMpeg4PrevGovValueSet = uiCts; 2182 } 2183 break; 2184 } 2185 2186 M4VSS3GPP_intSetMPEG4Gov(pTmp, uiCts); 2187 } 2188 } 2189 return; 2190} 2191 2192/** 2193 ****************************************************************************** 2194 * M4OSA_Void M4VSS3GPP_intCheckVideoEffects() 2195 * @brief Check which video effect must be applied at the current time 2196 ****************************************************************************** 2197 */ 2198static M4OSA_Void 2199M4VSS3GPP_intCheckVideoEffects( M4VSS3GPP_InternalEditContext *pC, 2200 M4OSA_UInt8 uiClipNumber ) 2201{ 2202 M4OSA_UInt8 uiClipIndex; 2203 M4OSA_UInt8 uiFxIndex, i; 2204 M4VSS3GPP_ClipContext *pClip; 2205 M4VSS3GPP_EffectSettings *pFx; 2206 M4OSA_Int32 Off, BC, EC; 2207 // Decorrelate input and output encoding timestamp to handle encoder prefetch 2208 M4OSA_Int32 t = (M4OSA_Int32)pC->ewc.dInputVidCts; 2209 2210 uiClipIndex = pC->uiCurrentClip; 2211 if (uiClipNumber == 1) { 2212 pClip = pC->pC1; 2213 } else { 2214 pClip = pC->pC2; 2215 } 2216 /** 2217 * Shortcuts for code readability */ 2218 Off = pClip->iVoffset; 2219 BC = pClip->iActualVideoBeginCut; 2220 EC = pClip->iEndTime; 2221 2222 i = 0; 2223 2224 for ( uiFxIndex = 0; uiFxIndex < pC->nbEffects; uiFxIndex++ ) 2225 { 2226 /** Shortcut, reverse order because of priority between effects(EndEffect always clean )*/ 2227 pFx = &(pC->pEffectsList[pC->nbEffects - 1 - uiFxIndex]); 2228 2229 if( M4VSS3GPP_kVideoEffectType_None != pFx->VideoEffectType ) 2230 { 2231 /** 2232 * Check if there is actually a video effect */ 2233 2234 if(uiClipNumber ==1) 2235 { 2236 /**< Are we after the start time of the effect? 2237 * or Are we into the effect duration? 2238 */ 2239 if ( (t >= (M4OSA_Int32)(pFx->uiStartTime)) && 2240 (t <= (M4OSA_Int32)(pFx->uiStartTime + pFx->uiDuration)) ) { 2241 /** 2242 * Set the active effect(s) */ 2243 pC->pActiveEffectsList[i] = pC->nbEffects-1-uiFxIndex; 2244 2245 /** 2246 * Update counter of active effects */ 2247 i++; 2248 2249 /** 2250 * For all external effects set this flag to true. */ 2251 if(pFx->VideoEffectType > M4VSS3GPP_kVideoEffectType_External) 2252 { 2253 pC->m_bClipExternalHasStarted = M4OSA_TRUE; 2254 } 2255 } 2256 2257 } 2258 else 2259 { 2260 /**< Are we into the effect duration? */ 2261 if ( ((M4OSA_Int32)(t + pC->pTransitionList[uiClipIndex].uiTransitionDuration) 2262 >= (M4OSA_Int32)(pFx->uiStartTime)) 2263 && ( (M4OSA_Int32)(t + pC->pTransitionList[uiClipIndex].uiTransitionDuration) 2264 <= (M4OSA_Int32)(pFx->uiStartTime + pFx->uiDuration)) ) { 2265 /** 2266 * Set the active effect(s) */ 2267 pC->pActiveEffectsList1[i] = pC->nbEffects-1-uiFxIndex; 2268 2269 /** 2270 * Update counter of active effects */ 2271 i++; 2272 2273 /** 2274 * For all external effects set this flag to true. */ 2275 if(pFx->VideoEffectType > M4VSS3GPP_kVideoEffectType_External) 2276 { 2277 pC->m_bClipExternalHasStarted = M4OSA_TRUE; 2278 } 2279 2280 /** 2281 * The third effect has the highest priority, then the second one, then the first one. 2282 * Hence, as soon as we found an active effect, we can get out of this loop */ 2283 } 2284 } 2285 if (M4VIDEOEDITING_kH264 != 2286 pC->pC1->pSettings->ClipProperties.VideoStreamType) { 2287 2288 // For Mpeg4 and H263 clips, full decode encode not required 2289 pC->m_bClipExternalHasStarted = M4OSA_FALSE; 2290 } 2291 } 2292 } 2293 if(1==uiClipNumber) 2294 { 2295 /** 2296 * Save number of active effects */ 2297 pC->nbActiveEffects = i; 2298 } 2299 else 2300 { 2301 pC->nbActiveEffects1 = i; 2302 } 2303 2304 /** 2305 * Change the absolut time to clip related time */ 2306 t -= Off; 2307 2308 /** 2309 * Check if we are on the begin cut (for clip1 only) */ 2310 if( ( 0 != BC) && (t == BC) && (1 == uiClipNumber) ) 2311 { 2312 pC->bClip1AtBeginCut = M4OSA_TRUE; 2313 } 2314 else 2315 { 2316 pC->bClip1AtBeginCut = M4OSA_FALSE; 2317 } 2318 2319 return; 2320} 2321 2322/** 2323 ****************************************************************************** 2324 * M4OSA_ERR M4VSS3GPP_intCreateVideoEncoder() 2325 * @brief Creates the video encoder 2326 * @note 2327 ****************************************************************************** 2328 */ 2329M4OSA_ERR M4VSS3GPP_intCreateVideoEncoder( M4VSS3GPP_InternalEditContext *pC ) 2330{ 2331 M4OSA_ERR err; 2332 M4ENCODER_AdvancedParams EncParams; 2333 2334 /** 2335 * Simulate a writer interface with our specific function */ 2336 pC->ewc.OurWriterDataInterface.pProcessAU = 2337 M4VSS3GPP_intProcessAU; /**< This function is VSS 3GPP specific, 2338 but it follow the writer interface */ 2339 pC->ewc.OurWriterDataInterface.pStartAU = 2340 M4VSS3GPP_intStartAU; /**< This function is VSS 3GPP specific, 2341 but it follow the writer interface */ 2342 pC->ewc.OurWriterDataInterface.pWriterContext = 2343 (M4WRITER_Context) 2344 pC; /**< We give the internal context as writer context */ 2345 2346 /** 2347 * Get the encoder interface, if not already done */ 2348 if( M4OSA_NULL == pC->ShellAPI.pVideoEncoderGlobalFcts ) 2349 { 2350 err = M4VSS3GPP_setCurrentVideoEncoder(&pC->ShellAPI, 2351 pC->ewc.VideoStreamType); 2352 M4OSA_TRACE1_1( 2353 "M4VSS3GPP_intCreateVideoEncoder: setCurrentEncoder returns 0x%x", 2354 err); 2355 M4ERR_CHECK_RETURN(err); 2356 } 2357 2358 /** 2359 * Set encoder shell parameters according to VSS settings */ 2360 2361 /* Common parameters */ 2362 EncParams.InputFormat = M4ENCODER_kIYUV420; 2363 EncParams.FrameWidth = pC->ewc.uiVideoWidth; 2364 EncParams.FrameHeight = pC->ewc.uiVideoHeight; 2365 EncParams.uiTimeScale = pC->ewc.uiVideoTimeScale; 2366 2367 if( pC->bIsMMS == M4OSA_FALSE ) 2368 { 2369 /* No strict regulation in video editor */ 2370 /* Because of the effects and transitions we should allow more flexibility */ 2371 /* Also it prevents to drop important frames (with a bad result on sheduling and 2372 block effetcs) */ 2373 EncParams.bInternalRegulation = M4OSA_FALSE; 2374 // Variable framerate is not supported by StageFright encoders 2375 EncParams.FrameRate = M4ENCODER_k30_FPS; 2376 } 2377 else 2378 { 2379 /* In case of MMS mode, we need to enable bitrate regulation to be sure */ 2380 /* to reach the targeted output file size */ 2381 EncParams.bInternalRegulation = M4OSA_TRUE; 2382 EncParams.FrameRate = pC->MMSvideoFramerate; 2383 } 2384 2385 /** 2386 * Other encoder settings (defaults) */ 2387 EncParams.uiHorizontalSearchRange = 0; /* use default */ 2388 EncParams.uiVerticalSearchRange = 0; /* use default */ 2389 EncParams.bErrorResilience = M4OSA_FALSE; /* no error resilience */ 2390 EncParams.uiIVopPeriod = 0; /* use default */ 2391 EncParams.uiMotionEstimationTools = 0; /* M4V_MOTION_EST_TOOLS_ALL */ 2392 EncParams.bAcPrediction = M4OSA_TRUE; /* use AC prediction */ 2393 EncParams.uiStartingQuantizerValue = 10; /* initial QP = 10 */ 2394 EncParams.bDataPartitioning = M4OSA_FALSE; /* no data partitioning */ 2395 2396 /** 2397 * Set the video profile and level */ 2398 EncParams.videoProfile = pC->ewc.outputVideoProfile; 2399 EncParams.videoLevel= pC->ewc.outputVideoLevel; 2400 2401 switch ( pC->ewc.VideoStreamType ) 2402 { 2403 case M4SYS_kH263: 2404 2405 EncParams.Format = M4ENCODER_kH263; 2406 2407 EncParams.uiStartingQuantizerValue = 10; 2408 EncParams.uiRateFactor = 1; /* default */ 2409 2410 EncParams.bErrorResilience = M4OSA_FALSE; 2411 EncParams.bDataPartitioning = M4OSA_FALSE; 2412 break; 2413 2414 case M4SYS_kMPEG_4: 2415 2416 EncParams.Format = M4ENCODER_kMPEG4; 2417 2418 EncParams.uiStartingQuantizerValue = 8; 2419 EncParams.uiRateFactor = (M4OSA_UInt8)(( pC->dOutputFrameDuration 2420 * pC->ewc.uiVideoTimeScale) / 1000.0 + 0.5); 2421 2422 if( EncParams.uiRateFactor == 0 ) 2423 EncParams.uiRateFactor = 1; /* default */ 2424 2425 if( M4OSA_FALSE == pC->ewc.bVideoDataPartitioning ) 2426 { 2427 EncParams.bErrorResilience = M4OSA_FALSE; 2428 EncParams.bDataPartitioning = M4OSA_FALSE; 2429 } 2430 else 2431 { 2432 EncParams.bErrorResilience = M4OSA_TRUE; 2433 EncParams.bDataPartitioning = M4OSA_TRUE; 2434 } 2435 break; 2436 2437 case M4SYS_kH264: 2438 M4OSA_TRACE1_0("M4VSS3GPP_intCreateVideoEncoder: M4SYS_H264"); 2439 2440 EncParams.Format = M4ENCODER_kH264; 2441 2442 EncParams.uiStartingQuantizerValue = 10; 2443 EncParams.uiRateFactor = 1; /* default */ 2444 2445 EncParams.bErrorResilience = M4OSA_FALSE; 2446 EncParams.bDataPartitioning = M4OSA_FALSE; 2447 //EncParams.FrameRate = M4VIDEOEDITING_k5_FPS; 2448 break; 2449 2450 default: 2451 M4OSA_TRACE1_1( 2452 "M4VSS3GPP_intCreateVideoEncoder: Unknown videoStreamType 0x%x", 2453 pC->ewc.VideoStreamType); 2454 return M4VSS3GPP_ERR_EDITING_UNSUPPORTED_VIDEO_FORMAT; 2455 } 2456 2457 if( pC->bIsMMS == M4OSA_FALSE ) 2458 { 2459 EncParams.Bitrate = pC->xVSS.outputVideoBitrate; 2460 2461 } 2462 else 2463 { 2464 EncParams.Bitrate = pC->uiMMSVideoBitrate; /* RC */ 2465 EncParams.uiTimeScale = 0; /* We let the encoder choose the timescale */ 2466 } 2467 2468 M4OSA_TRACE1_0("M4VSS3GPP_intCreateVideoEncoder: calling encoder pFctInit"); 2469 /** 2470 * Init the video encoder (advanced settings version of the encoder Open function) */ 2471 err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctInit(&pC->ewc.pEncContext, 2472 &pC->ewc.OurWriterDataInterface, M4VSS3GPP_intVPP, pC, 2473 pC->ShellAPI.pCurrentVideoEncoderExternalAPI, 2474 pC->ShellAPI.pCurrentVideoEncoderUserData); 2475 2476 if( M4NO_ERROR != err ) 2477 { 2478 M4OSA_TRACE1_1( 2479 "M4VSS3GPP_intCreateVideoEncoder: pVideoEncoderGlobalFcts->pFctInit returns 0x%x", 2480 err); 2481 return err; 2482 } 2483 2484 pC->ewc.encoderState = M4VSS3GPP_kEncoderClosed; 2485 M4OSA_TRACE1_0("M4VSS3GPP_intCreateVideoEncoder: calling encoder pFctOpen"); 2486 2487 err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctOpen(pC->ewc.pEncContext, 2488 &pC->ewc.WriterVideoAU, &EncParams); 2489 2490 if( M4NO_ERROR != err ) 2491 { 2492 M4OSA_TRACE1_1( 2493 "M4VSS3GPP_intCreateVideoEncoder: pVideoEncoderGlobalFcts->pFctOpen returns 0x%x", 2494 err); 2495 return err; 2496 } 2497 2498 pC->ewc.encoderState = M4VSS3GPP_kEncoderStopped; 2499 M4OSA_TRACE1_0( 2500 "M4VSS3GPP_intCreateVideoEncoder: calling encoder pFctStart"); 2501 2502 if( M4OSA_NULL != pC->ShellAPI.pVideoEncoderGlobalFcts->pFctStart ) 2503 { 2504 err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctStart( 2505 pC->ewc.pEncContext); 2506 2507 if( M4NO_ERROR != err ) 2508 { 2509 M4OSA_TRACE1_1( 2510 "M4VSS3GPP_intCreateVideoEncoder: pVideoEncoderGlobalFcts->pFctStart returns 0x%x", 2511 err); 2512 return err; 2513 } 2514 } 2515 2516 pC->ewc.encoderState = M4VSS3GPP_kEncoderRunning; 2517 2518 /** 2519 * Return */ 2520 M4OSA_TRACE3_0("M4VSS3GPP_intCreateVideoEncoder: returning M4NO_ERROR"); 2521 return M4NO_ERROR; 2522} 2523 2524/** 2525 ****************************************************************************** 2526 * M4OSA_ERR M4VSS3GPP_intDestroyVideoEncoder() 2527 * @brief Destroy the video encoder 2528 * @note 2529 ****************************************************************************** 2530 */ 2531M4OSA_ERR M4VSS3GPP_intDestroyVideoEncoder( M4VSS3GPP_InternalEditContext *pC ) 2532{ 2533 M4OSA_ERR err = M4NO_ERROR; 2534 2535 if( M4OSA_NULL != pC->ewc.pEncContext ) 2536 { 2537 if( M4VSS3GPP_kEncoderRunning == pC->ewc.encoderState ) 2538 { 2539 if( pC->ShellAPI.pVideoEncoderGlobalFcts->pFctStop != M4OSA_NULL ) 2540 { 2541 err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctStop( 2542 pC->ewc.pEncContext); 2543 2544 if( M4NO_ERROR != err ) 2545 { 2546 M4OSA_TRACE1_1( 2547 "M4VSS3GPP_intDestroyVideoEncoder:\ 2548 pVideoEncoderGlobalFcts->pFctStop returns 0x%x", 2549 err); 2550 /* Well... how the heck do you handle a failed cleanup? */ 2551 } 2552 } 2553 2554 pC->ewc.encoderState = M4VSS3GPP_kEncoderStopped; 2555 } 2556 2557 /* Has the encoder actually been opened? Don't close it if that's not the case. */ 2558 if( M4VSS3GPP_kEncoderStopped == pC->ewc.encoderState ) 2559 { 2560 err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctClose( 2561 pC->ewc.pEncContext); 2562 2563 if( M4NO_ERROR != err ) 2564 { 2565 M4OSA_TRACE1_1( 2566 "M4VSS3GPP_intDestroyVideoEncoder:\ 2567 pVideoEncoderGlobalFcts->pFctClose returns 0x%x", 2568 err); 2569 /* Well... how the heck do you handle a failed cleanup? */ 2570 } 2571 2572 pC->ewc.encoderState = M4VSS3GPP_kEncoderClosed; 2573 } 2574 2575 err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctCleanup( 2576 pC->ewc.pEncContext); 2577 2578 if( M4NO_ERROR != err ) 2579 { 2580 M4OSA_TRACE1_1( 2581 "M4VSS3GPP_intDestroyVideoEncoder:\ 2582 pVideoEncoderGlobalFcts->pFctCleanup returns 0x%x!", 2583 err); 2584 /**< We do not return the error here because we still have stuff to free */ 2585 } 2586 2587 pC->ewc.encoderState = M4VSS3GPP_kNoEncoder; 2588 /** 2589 * Reset variable */ 2590 pC->ewc.pEncContext = M4OSA_NULL; 2591 } 2592 2593 M4OSA_TRACE3_1("M4VSS3GPP_intDestroyVideoEncoder: returning 0x%x", err); 2594 return err; 2595} 2596 2597/** 2598 ****************************************************************************** 2599 * M4OSA_Void M4VSS3GPP_intSetH263TimeCounter() 2600 * @brief Modify the time counter of the given H263 video AU 2601 * @note 2602 * @param pAuDataBuffer (IN/OUT) H263 Video AU to modify 2603 * @param uiCts (IN) New time counter value 2604 * @return nothing 2605 ****************************************************************************** 2606 */ 2607static M4OSA_Void M4VSS3GPP_intSetH263TimeCounter( M4OSA_MemAddr8 pAuDataBuffer, 2608 M4OSA_UInt8 uiCts ) 2609{ 2610 /* 2611 * The H263 time counter is 8 bits located on the "x" below: 2612 * 2613 * |--------|--------|--------|--------| 2614 * ???????? ???????? ??????xx xxxxxx?? 2615 */ 2616 2617 /** 2618 * Write the 2 bits on the third byte */ 2619 pAuDataBuffer[2] = ( pAuDataBuffer[2] & 0xFC) | (( uiCts >> 6) & 0x3); 2620 2621 /** 2622 * Write the 6 bits on the fourth byte */ 2623 pAuDataBuffer[3] = ( ( uiCts << 2) & 0xFC) | (pAuDataBuffer[3] & 0x3); 2624 2625 return; 2626} 2627 2628/** 2629 ****************************************************************************** 2630 * M4OSA_Void M4VSS3GPP_intSetMPEG4Gov() 2631 * @brief Modify the time info from Group Of VOP video AU 2632 * @note 2633 * @param pAuDataBuffer (IN) MPEG4 Video AU to modify 2634 * @param uiCtsSec (IN) New GOV time info in second unit 2635 * @return nothing 2636 ****************************************************************************** 2637 */ 2638static M4OSA_Void M4VSS3GPP_intSetMPEG4Gov( M4OSA_MemAddr8 pAuDataBuffer, 2639 M4OSA_UInt32 uiCtsSec ) 2640{ 2641 /* 2642 * The MPEG-4 time code length is 18 bits: 2643 * 2644 * hh mm marker ss 2645 * xxxxx|xxx xxx 1 xxxx xx ?????? 2646 * |----- ---|--- - ----|-- ------| 2647 */ 2648 M4OSA_UInt8 uiHh; 2649 M4OSA_UInt8 uiMm; 2650 M4OSA_UInt8 uiSs; 2651 M4OSA_UInt8 uiTmp; 2652 2653 /** 2654 * Write the 2 last bits ss */ 2655 uiSs = (M4OSA_UInt8)(uiCtsSec % 60); /**< modulo part */ 2656 pAuDataBuffer[2] = (( ( uiSs & 0x03) << 6) | (pAuDataBuffer[2] & 0x3F)); 2657 2658 if( uiCtsSec < 60 ) 2659 { 2660 /** 2661 * Write the 3 last bits of mm, the marker bit (0x10 */ 2662 pAuDataBuffer[1] = (( 0x10) | (uiSs >> 2)); 2663 2664 /** 2665 * Write the 5 bits of hh and 3 of mm (out of 6) */ 2666 pAuDataBuffer[0] = 0; 2667 } 2668 else 2669 { 2670 /** 2671 * Write the 3 last bits of mm, the marker bit (0x10 */ 2672 uiTmp = (M4OSA_UInt8)(uiCtsSec / 60); /**< integer part */ 2673 uiMm = (M4OSA_UInt8)(uiTmp % 60); 2674 pAuDataBuffer[1] = (( uiMm << 5) | (0x10) | (uiSs >> 2)); 2675 2676 if( uiTmp < 60 ) 2677 { 2678 /** 2679 * Write the 5 bits of hh and 3 of mm (out of 6) */ 2680 pAuDataBuffer[0] = ((uiMm >> 3)); 2681 } 2682 else 2683 { 2684 /** 2685 * Write the 5 bits of hh and 3 of mm (out of 6) */ 2686 uiHh = (M4OSA_UInt8)(uiTmp / 60); 2687 pAuDataBuffer[0] = (( uiHh << 3) | (uiMm >> 3)); 2688 } 2689 } 2690 return; 2691} 2692 2693/** 2694 ****************************************************************************** 2695 * M4OSA_Void M4VSS3GPP_intGetMPEG4Gov() 2696 * @brief Get the time info from Group Of VOP video AU 2697 * @note 2698 * @param pAuDataBuffer (IN) MPEG4 Video AU to modify 2699 * @param pCtsSec (OUT) Current GOV time info in second unit 2700 * @return nothing 2701 ****************************************************************************** 2702 */ 2703static M4OSA_Void M4VSS3GPP_intGetMPEG4Gov( M4OSA_MemAddr8 pAuDataBuffer, 2704 M4OSA_UInt32 *pCtsSec ) 2705{ 2706 /* 2707 * The MPEG-4 time code length is 18 bits: 2708 * 2709 * hh mm marker ss 2710 * xxxxx|xxx xxx 1 xxxx xx ?????? 2711 * |----- ---|--- - ----|-- ------| 2712 */ 2713 M4OSA_UInt8 uiHh; 2714 M4OSA_UInt8 uiMm; 2715 M4OSA_UInt8 uiSs; 2716 M4OSA_UInt8 uiTmp; 2717 M4OSA_UInt32 uiCtsSec; 2718 2719 /** 2720 * Read ss */ 2721 uiSs = (( pAuDataBuffer[2] & 0xC0) >> 6); 2722 uiTmp = (( pAuDataBuffer[1] & 0x0F) << 2); 2723 uiCtsSec = uiSs + uiTmp; 2724 2725 /** 2726 * Read mm */ 2727 uiMm = (( pAuDataBuffer[1] & 0xE0) >> 5); 2728 uiTmp = (( pAuDataBuffer[0] & 0x07) << 3); 2729 uiMm = uiMm + uiTmp; 2730 uiCtsSec = ( uiMm * 60) + uiCtsSec; 2731 2732 /** 2733 * Read hh */ 2734 uiHh = (( pAuDataBuffer[0] & 0xF8) >> 3); 2735 2736 if( uiHh ) 2737 { 2738 uiCtsSec = ( uiHh * 3600) + uiCtsSec; 2739 } 2740 2741 /* 2742 * in sec */ 2743 *pCtsSec = uiCtsSec; 2744 2745 return; 2746} 2747 2748/** 2749 ****************************************************************************** 2750 * M4OSA_ERR M4VSS3GPP_intAllocateYUV420() 2751 * @brief Allocate the three YUV 4:2:0 planes 2752 * @note 2753 * @param pPlanes (IN/OUT) valid pointer to 3 M4VIFI_ImagePlane structures 2754 * @param uiWidth (IN) Image width 2755 * @param uiHeight(IN) Image height 2756 ****************************************************************************** 2757 */ 2758static M4OSA_ERR M4VSS3GPP_intAllocateYUV420( M4VIFI_ImagePlane *pPlanes, 2759 M4OSA_UInt32 uiWidth, M4OSA_UInt32 uiHeight ) 2760{ 2761 2762 pPlanes[0].u_width = uiWidth; 2763 pPlanes[0].u_height = uiHeight; 2764 pPlanes[0].u_stride = uiWidth; 2765 pPlanes[0].u_topleft = 0; 2766 pPlanes[0].pac_data = (M4VIFI_UInt8 *)M4OSA_32bitAlignedMalloc(pPlanes[0].u_stride 2767 * pPlanes[0].u_height, M4VSS3GPP, (M4OSA_Char *)"pPlanes[0].pac_data"); 2768 2769 if( M4OSA_NULL == pPlanes[0].pac_data ) 2770 { 2771 M4OSA_TRACE1_0( 2772 "M4VSS3GPP_intAllocateYUV420: unable to allocate pPlanes[0].pac_data,\ 2773 returning M4ERR_ALLOC"); 2774 return M4ERR_ALLOC; 2775 } 2776 2777 pPlanes[1].u_width = pPlanes[0].u_width >> 1; 2778 pPlanes[1].u_height = pPlanes[0].u_height >> 1; 2779 pPlanes[1].u_stride = pPlanes[1].u_width; 2780 pPlanes[1].u_topleft = 0; 2781 pPlanes[1].pac_data = (M4VIFI_UInt8 *)M4OSA_32bitAlignedMalloc(pPlanes[1].u_stride 2782 * pPlanes[1].u_height, M4VSS3GPP,(M4OSA_Char *) "pPlanes[1].pac_data"); 2783 2784 if( M4OSA_NULL == pPlanes[1].pac_data ) 2785 { 2786 M4OSA_TRACE1_0( 2787 "M4VSS3GPP_intAllocateYUV420: unable to allocate pPlanes[1].pac_data,\ 2788 returning M4ERR_ALLOC"); 2789 free((void *)pPlanes[0].pac_data); 2790 pPlanes[0].pac_data = M4OSA_NULL; 2791 return M4ERR_ALLOC; 2792 } 2793 2794 pPlanes[2].u_width = pPlanes[1].u_width; 2795 pPlanes[2].u_height = pPlanes[1].u_height; 2796 pPlanes[2].u_stride = pPlanes[2].u_width; 2797 pPlanes[2].u_topleft = 0; 2798 pPlanes[2].pac_data = (M4VIFI_UInt8 *)M4OSA_32bitAlignedMalloc(pPlanes[2].u_stride 2799 * pPlanes[2].u_height, M4VSS3GPP, (M4OSA_Char *)"pPlanes[2].pac_data"); 2800 2801 if( M4OSA_NULL == pPlanes[2].pac_data ) 2802 { 2803 M4OSA_TRACE1_0( 2804 "M4VSS3GPP_intAllocateYUV420: unable to allocate pPlanes[2].pac_data,\ 2805 returning M4ERR_ALLOC"); 2806 free((void *)pPlanes[0].pac_data); 2807 free((void *)pPlanes[1].pac_data); 2808 pPlanes[0].pac_data = M4OSA_NULL; 2809 pPlanes[1].pac_data = M4OSA_NULL; 2810 return M4ERR_ALLOC; 2811 } 2812 2813 memset((void *)pPlanes[0].pac_data, 0, pPlanes[0].u_stride*pPlanes[0].u_height); 2814 memset((void *)pPlanes[1].pac_data, 0, pPlanes[1].u_stride*pPlanes[1].u_height); 2815 memset((void *)pPlanes[2].pac_data, 0, pPlanes[2].u_stride*pPlanes[2].u_height); 2816 /** 2817 * Return */ 2818 M4OSA_TRACE3_0("M4VSS3GPP_intAllocateYUV420: returning M4NO_ERROR"); 2819 return M4NO_ERROR; 2820} 2821 2822/** 2823****************************************************************************** 2824* M4OSA_ERR M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420(M4OSA_Void* pFileIn, 2825* M4OSA_FileReadPointer* pFileReadPtr, 2826* M4VIFI_ImagePlane* pImagePlanes, 2827* M4OSA_UInt32 width, 2828* M4OSA_UInt32 height); 2829* @brief It Coverts and resizes a ARGB8888 image to YUV420 2830* @note 2831* @param pFileIn (IN) The ARGB888 input file 2832* @param pFileReadPtr (IN) Pointer on filesystem functions 2833* @param pImagePlanes (IN/OUT) Pointer on YUV420 output planes allocated by the user. 2834* ARGB8888 image will be converted and resized to output 2835* YUV420 plane size 2836* @param width (IN) width of the ARGB8888 2837* @param height (IN) height of the ARGB8888 2838* @return M4NO_ERROR: No error 2839* @return M4ERR_ALLOC: memory error 2840* @return M4ERR_PARAMETER: At least one of the function parameters is null 2841****************************************************************************** 2842*/ 2843 2844M4OSA_ERR M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420(M4OSA_Void* pFileIn, 2845 M4OSA_FileReadPointer* pFileReadPtr, 2846 M4VIFI_ImagePlane* pImagePlanes, 2847 M4OSA_UInt32 width,M4OSA_UInt32 height) { 2848 M4OSA_Context pARGBIn; 2849 M4VIFI_ImagePlane rgbPlane1 ,rgbPlane2; 2850 M4OSA_UInt32 frameSize_argb = width * height * 4; 2851 M4OSA_UInt32 frameSize_rgb888 = width * height * 3; 2852 M4OSA_UInt32 i = 0,j= 0; 2853 M4OSA_ERR err = M4NO_ERROR; 2854 2855 M4OSA_UInt8 *pArgbPlane = 2856 (M4OSA_UInt8*) M4OSA_32bitAlignedMalloc(frameSize_argb, 2857 M4VS, (M4OSA_Char*)"argb data"); 2858 if (pArgbPlane == M4OSA_NULL) { 2859 M4OSA_TRACE1_0("M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420: \ 2860 Failed to allocate memory for ARGB plane"); 2861 return M4ERR_ALLOC; 2862 } 2863 2864 /* Get file size */ 2865 err = pFileReadPtr->openRead(&pARGBIn, pFileIn, M4OSA_kFileRead); 2866 if (err != M4NO_ERROR) { 2867 M4OSA_TRACE1_2("M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420 : \ 2868 Can not open input ARGB8888 file %s, error: 0x%x\n",pFileIn, err); 2869 free(pArgbPlane); 2870 pArgbPlane = M4OSA_NULL; 2871 goto cleanup; 2872 } 2873 2874 err = pFileReadPtr->readData(pARGBIn,(M4OSA_MemAddr8)pArgbPlane, 2875 &frameSize_argb); 2876 if (err != M4NO_ERROR) { 2877 M4OSA_TRACE1_2("M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420 \ 2878 Can not read ARGB8888 file %s, error: 0x%x\n",pFileIn, err); 2879 pFileReadPtr->closeRead(pARGBIn); 2880 free(pArgbPlane); 2881 pArgbPlane = M4OSA_NULL; 2882 goto cleanup; 2883 } 2884 2885 err = pFileReadPtr->closeRead(pARGBIn); 2886 if(err != M4NO_ERROR) { 2887 M4OSA_TRACE1_2("M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420 \ 2888 Can not close ARGB8888 file %s, error: 0x%x\n",pFileIn, err); 2889 free(pArgbPlane); 2890 pArgbPlane = M4OSA_NULL; 2891 goto cleanup; 2892 } 2893 2894 rgbPlane1.pac_data = 2895 (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(frameSize_rgb888, 2896 M4VS, (M4OSA_Char*)"RGB888 plane1"); 2897 if(rgbPlane1.pac_data == M4OSA_NULL) { 2898 M4OSA_TRACE1_0("M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420 \ 2899 Failed to allocate memory for rgb plane1"); 2900 free(pArgbPlane); 2901 return M4ERR_ALLOC; 2902 } 2903 2904 rgbPlane1.u_height = height; 2905 rgbPlane1.u_width = width; 2906 rgbPlane1.u_stride = width*3; 2907 rgbPlane1.u_topleft = 0; 2908 2909 2910 /** Remove the alpha channel */ 2911 for (i=0, j = 0; i < frameSize_argb; i++) { 2912 if ((i % 4) == 0) continue; 2913 rgbPlane1.pac_data[j] = pArgbPlane[i]; 2914 j++; 2915 } 2916 free(pArgbPlane); 2917 2918 /** 2919 * Check if resizing is required with color conversion */ 2920 if(width != pImagePlanes->u_width || height != pImagePlanes->u_height) { 2921 2922 frameSize_rgb888 = pImagePlanes->u_width * pImagePlanes->u_height * 3; 2923 rgbPlane2.pac_data = 2924 (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(frameSize_rgb888, M4VS, 2925 (M4OSA_Char*)"rgb Plane2"); 2926 if(rgbPlane2.pac_data == M4OSA_NULL) { 2927 M4OSA_TRACE1_0("Failed to allocate memory for rgb plane2"); 2928 free(rgbPlane1.pac_data); 2929 return M4ERR_ALLOC; 2930 } 2931 rgbPlane2.u_height = pImagePlanes->u_height; 2932 rgbPlane2.u_width = pImagePlanes->u_width; 2933 rgbPlane2.u_stride = pImagePlanes->u_width*3; 2934 rgbPlane2.u_topleft = 0; 2935 2936 /* Resizing */ 2937 err = M4VIFI_ResizeBilinearRGB888toRGB888(M4OSA_NULL, 2938 &rgbPlane1, &rgbPlane2); 2939 free(rgbPlane1.pac_data); 2940 if(err != M4NO_ERROR) { 2941 M4OSA_TRACE1_1("error resizing RGB888 to RGB888: 0x%x\n", err); 2942 free(rgbPlane2.pac_data); 2943 return err; 2944 } 2945 2946 /*Converting Resized RGB888 to YUV420 */ 2947 err = M4VIFI_RGB888toYUV420(M4OSA_NULL, &rgbPlane2, pImagePlanes); 2948 free(rgbPlane2.pac_data); 2949 if(err != M4NO_ERROR) { 2950 M4OSA_TRACE1_1("error converting from RGB888 to YUV: 0x%x\n", err); 2951 return err; 2952 } 2953 } else { 2954 err = M4VIFI_RGB888toYUV420(M4OSA_NULL, &rgbPlane1, pImagePlanes); 2955 if(err != M4NO_ERROR) { 2956 M4OSA_TRACE1_1("error when converting from RGB to YUV: 0x%x\n", err); 2957 } 2958 free(rgbPlane1.pac_data); 2959 } 2960cleanup: 2961 M4OSA_TRACE3_0("M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420 exit"); 2962 return err; 2963} 2964 2965M4OSA_ERR M4VSS3GPP_intApplyRenderingMode(M4VSS3GPP_InternalEditContext *pC, 2966 M4xVSS_MediaRendering renderingMode, 2967 M4VIFI_ImagePlane* pInplane, 2968 M4VIFI_ImagePlane* pOutplane) { 2969 2970 M4OSA_ERR err = M4NO_ERROR; 2971 M4AIR_Params airParams; 2972 M4VIFI_ImagePlane pImagePlanesTemp[3]; 2973 M4OSA_UInt32 i = 0; 2974 2975 if (renderingMode == M4xVSS_kBlackBorders) { 2976 memset((void *)pOutplane[0].pac_data, Y_PLANE_BORDER_VALUE, 2977 (pOutplane[0].u_height*pOutplane[0].u_stride)); 2978 memset((void *)pOutplane[1].pac_data, U_PLANE_BORDER_VALUE, 2979 (pOutplane[1].u_height*pOutplane[1].u_stride)); 2980 memset((void *)pOutplane[2].pac_data, V_PLANE_BORDER_VALUE, 2981 (pOutplane[2].u_height*pOutplane[2].u_stride)); 2982 } 2983 2984 if (renderingMode == M4xVSS_kResizing) { 2985 /** 2986 * Call the resize filter. 2987 * From the intermediate frame to the encoder image plane */ 2988 err = M4VIFI_ResizeBilinearYUV420toYUV420(M4OSA_NULL, 2989 pInplane, pOutplane); 2990 if (M4NO_ERROR != err) { 2991 M4OSA_TRACE1_1("M4VSS3GPP_intApplyRenderingMode: \ 2992 M4ViFilResizeBilinearYUV420toYUV420 returns 0x%x!", err); 2993 return err; 2994 } 2995 } else { 2996 M4VIFI_ImagePlane* pPlaneTemp = M4OSA_NULL; 2997 M4OSA_UInt8* pOutPlaneY = 2998 pOutplane[0].pac_data + pOutplane[0].u_topleft; 2999 M4OSA_UInt8* pOutPlaneU = 3000 pOutplane[1].pac_data + pOutplane[1].u_topleft; 3001 M4OSA_UInt8* pOutPlaneV = 3002 pOutplane[2].pac_data + pOutplane[2].u_topleft; 3003 M4OSA_UInt8* pInPlaneY = M4OSA_NULL; 3004 M4OSA_UInt8* pInPlaneU = M4OSA_NULL; 3005 M4OSA_UInt8* pInPlaneV = M4OSA_NULL; 3006 3007 /* To keep media aspect ratio*/ 3008 /* Initialize AIR Params*/ 3009 airParams.m_inputCoord.m_x = 0; 3010 airParams.m_inputCoord.m_y = 0; 3011 airParams.m_inputSize.m_height = pInplane->u_height; 3012 airParams.m_inputSize.m_width = pInplane->u_width; 3013 airParams.m_outputSize.m_width = pOutplane->u_width; 3014 airParams.m_outputSize.m_height = pOutplane->u_height; 3015 airParams.m_bOutputStripe = M4OSA_FALSE; 3016 airParams.m_outputOrientation = M4COMMON_kOrientationTopLeft; 3017 3018 /** 3019 Media rendering: Black borders*/ 3020 if (renderingMode == M4xVSS_kBlackBorders) { 3021 pImagePlanesTemp[0].u_width = pOutplane[0].u_width; 3022 pImagePlanesTemp[0].u_height = pOutplane[0].u_height; 3023 pImagePlanesTemp[0].u_stride = pOutplane[0].u_width; 3024 pImagePlanesTemp[0].u_topleft = 0; 3025 3026 pImagePlanesTemp[1].u_width = pOutplane[1].u_width; 3027 pImagePlanesTemp[1].u_height = pOutplane[1].u_height; 3028 pImagePlanesTemp[1].u_stride = pOutplane[1].u_width; 3029 pImagePlanesTemp[1].u_topleft = 0; 3030 3031 pImagePlanesTemp[2].u_width = pOutplane[2].u_width; 3032 pImagePlanesTemp[2].u_height = pOutplane[2].u_height; 3033 pImagePlanesTemp[2].u_stride = pOutplane[2].u_width; 3034 pImagePlanesTemp[2].u_topleft = 0; 3035 3036 /** 3037 * Allocates plan in local image plane structure */ 3038 pImagePlanesTemp[0].pac_data = 3039 (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc( 3040 pImagePlanesTemp[0].u_width * pImagePlanesTemp[0].u_height, 3041 M4VS, (M4OSA_Char *)"pImagePlaneTemp Y") ; 3042 if (pImagePlanesTemp[0].pac_data == M4OSA_NULL) { 3043 M4OSA_TRACE1_0("M4VSS3GPP_intApplyRenderingMode: Alloc Error"); 3044 return M4ERR_ALLOC; 3045 } 3046 pImagePlanesTemp[1].pac_data = 3047 (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc( 3048 pImagePlanesTemp[1].u_width * pImagePlanesTemp[1].u_height, 3049 M4VS, (M4OSA_Char *)"pImagePlaneTemp U") ; 3050 if (pImagePlanesTemp[1].pac_data == M4OSA_NULL) { 3051 M4OSA_TRACE1_0("M4VSS3GPP_intApplyRenderingMode: Alloc Error"); 3052 free(pImagePlanesTemp[0].pac_data); 3053 return M4ERR_ALLOC; 3054 } 3055 pImagePlanesTemp[2].pac_data = 3056 (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc( 3057 pImagePlanesTemp[2].u_width * pImagePlanesTemp[2].u_height, 3058 M4VS, (M4OSA_Char *)"pImagePlaneTemp V") ; 3059 if (pImagePlanesTemp[2].pac_data == M4OSA_NULL) { 3060 M4OSA_TRACE1_0("M4VSS3GPP_intApplyRenderingMode: Alloc Error"); 3061 free(pImagePlanesTemp[0].pac_data); 3062 free(pImagePlanesTemp[1].pac_data); 3063 return M4ERR_ALLOC; 3064 } 3065 3066 pInPlaneY = pImagePlanesTemp[0].pac_data ; 3067 pInPlaneU = pImagePlanesTemp[1].pac_data ; 3068 pInPlaneV = pImagePlanesTemp[2].pac_data ; 3069 3070 memset((void *)pImagePlanesTemp[0].pac_data, Y_PLANE_BORDER_VALUE, 3071 (pImagePlanesTemp[0].u_height*pImagePlanesTemp[0].u_stride)); 3072 memset((void *)pImagePlanesTemp[1].pac_data, U_PLANE_BORDER_VALUE, 3073 (pImagePlanesTemp[1].u_height*pImagePlanesTemp[1].u_stride)); 3074 memset((void *)pImagePlanesTemp[2].pac_data, V_PLANE_BORDER_VALUE, 3075 (pImagePlanesTemp[2].u_height*pImagePlanesTemp[2].u_stride)); 3076 3077 M4OSA_UInt32 height = 3078 (pInplane->u_height * pOutplane->u_width) /pInplane->u_width; 3079 3080 if (height <= pOutplane->u_height) { 3081 /** 3082 * Black borders will be on the top and the bottom side */ 3083 airParams.m_outputSize.m_width = pOutplane->u_width; 3084 airParams.m_outputSize.m_height = height; 3085 /** 3086 * Number of lines at the top */ 3087 pImagePlanesTemp[0].u_topleft = 3088 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[0].u_height - 3089 airParams.m_outputSize.m_height)>>1)) * 3090 pImagePlanesTemp[0].u_stride; 3091 pImagePlanesTemp[0].u_height = airParams.m_outputSize.m_height; 3092 pImagePlanesTemp[1].u_topleft = 3093 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[1].u_height - 3094 (airParams.m_outputSize.m_height>>1)))>>1) * 3095 pImagePlanesTemp[1].u_stride; 3096 pImagePlanesTemp[1].u_height = 3097 airParams.m_outputSize.m_height>>1; 3098 pImagePlanesTemp[2].u_topleft = 3099 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[2].u_height - 3100 (airParams.m_outputSize.m_height>>1)))>>1) * 3101 pImagePlanesTemp[2].u_stride; 3102 pImagePlanesTemp[2].u_height = 3103 airParams.m_outputSize.m_height>>1; 3104 } else { 3105 /** 3106 * Black borders will be on the left and right side */ 3107 airParams.m_outputSize.m_height = pOutplane->u_height; 3108 airParams.m_outputSize.m_width = 3109 (M4OSA_UInt32)((pInplane->u_width * pOutplane->u_height)/pInplane->u_height); 3110 3111 pImagePlanesTemp[0].u_topleft = 3112 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[0].u_width - 3113 airParams.m_outputSize.m_width)>>1)); 3114 pImagePlanesTemp[0].u_width = airParams.m_outputSize.m_width; 3115 pImagePlanesTemp[1].u_topleft = 3116 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[1].u_width - 3117 (airParams.m_outputSize.m_width>>1)))>>1); 3118 pImagePlanesTemp[1].u_width = airParams.m_outputSize.m_width>>1; 3119 pImagePlanesTemp[2].u_topleft = 3120 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[2].u_width - 3121 (airParams.m_outputSize.m_width>>1)))>>1); 3122 pImagePlanesTemp[2].u_width = airParams.m_outputSize.m_width>>1; 3123 } 3124 3125 /** 3126 * Width and height have to be even */ 3127 airParams.m_outputSize.m_width = 3128 (airParams.m_outputSize.m_width>>1)<<1; 3129 airParams.m_outputSize.m_height = 3130 (airParams.m_outputSize.m_height>>1)<<1; 3131 airParams.m_inputSize.m_width = 3132 (airParams.m_inputSize.m_width>>1)<<1; 3133 airParams.m_inputSize.m_height = 3134 (airParams.m_inputSize.m_height>>1)<<1; 3135 pImagePlanesTemp[0].u_width = 3136 (pImagePlanesTemp[0].u_width>>1)<<1; 3137 pImagePlanesTemp[1].u_width = 3138 (pImagePlanesTemp[1].u_width>>1)<<1; 3139 pImagePlanesTemp[2].u_width = 3140 (pImagePlanesTemp[2].u_width>>1)<<1; 3141 pImagePlanesTemp[0].u_height = 3142 (pImagePlanesTemp[0].u_height>>1)<<1; 3143 pImagePlanesTemp[1].u_height = 3144 (pImagePlanesTemp[1].u_height>>1)<<1; 3145 pImagePlanesTemp[2].u_height = 3146 (pImagePlanesTemp[2].u_height>>1)<<1; 3147 3148 /** 3149 * Check that values are coherent */ 3150 if (airParams.m_inputSize.m_height == 3151 airParams.m_outputSize.m_height) { 3152 airParams.m_inputSize.m_width = 3153 airParams.m_outputSize.m_width; 3154 } else if (airParams.m_inputSize.m_width == 3155 airParams.m_outputSize.m_width) { 3156 airParams.m_inputSize.m_height = 3157 airParams.m_outputSize.m_height; 3158 } 3159 pPlaneTemp = pImagePlanesTemp; 3160 } 3161 3162 /** 3163 * Media rendering: Cropping*/ 3164 if (renderingMode == M4xVSS_kCropping) { 3165 airParams.m_outputSize.m_height = pOutplane->u_height; 3166 airParams.m_outputSize.m_width = pOutplane->u_width; 3167 if ((airParams.m_outputSize.m_height * 3168 airParams.m_inputSize.m_width)/airParams.m_outputSize.m_width < 3169 airParams.m_inputSize.m_height) { 3170 /* Height will be cropped */ 3171 airParams.m_inputSize.m_height = 3172 (M4OSA_UInt32)((airParams.m_outputSize.m_height * 3173 airParams.m_inputSize.m_width)/airParams.m_outputSize.m_width); 3174 airParams.m_inputSize.m_height = 3175 (airParams.m_inputSize.m_height>>1)<<1; 3176 airParams.m_inputCoord.m_y = 3177 (M4OSA_Int32)((M4OSA_Int32)((pInplane->u_height - 3178 airParams.m_inputSize.m_height))>>1); 3179 } else { 3180 /* Width will be cropped */ 3181 airParams.m_inputSize.m_width = 3182 (M4OSA_UInt32)((airParams.m_outputSize.m_width * 3183 airParams.m_inputSize.m_height)/airParams.m_outputSize.m_height); 3184 airParams.m_inputSize.m_width = 3185 (airParams.m_inputSize.m_width>>1)<<1; 3186 airParams.m_inputCoord.m_x = 3187 (M4OSA_Int32)((M4OSA_Int32)((pInplane->u_width - 3188 airParams.m_inputSize.m_width))>>1); 3189 } 3190 pPlaneTemp = pOutplane; 3191 } 3192 /** 3193 * Call AIR functions */ 3194 if (M4OSA_NULL == pC->m_air_context) { 3195 err = M4AIR_create(&pC->m_air_context, M4AIR_kYUV420P); 3196 if(err != M4NO_ERROR) { 3197 M4OSA_TRACE1_1("M4VSS3GPP_intApplyRenderingMode: \ 3198 M4AIR_create returned error 0x%x", err); 3199 goto cleanUp; 3200 } 3201 } 3202 3203 err = M4AIR_configure(pC->m_air_context, &airParams); 3204 if (err != M4NO_ERROR) { 3205 M4OSA_TRACE1_1("M4VSS3GPP_intApplyRenderingMode: \ 3206 Error when configuring AIR: 0x%x", err); 3207 M4AIR_cleanUp(pC->m_air_context); 3208 goto cleanUp; 3209 } 3210 3211 err = M4AIR_get(pC->m_air_context, pInplane, pPlaneTemp); 3212 if (err != M4NO_ERROR) { 3213 M4OSA_TRACE1_1("M4VSS3GPP_intApplyRenderingMode: \ 3214 Error when getting AIR plane: 0x%x", err); 3215 M4AIR_cleanUp(pC->m_air_context); 3216 goto cleanUp; 3217 } 3218 3219 if (renderingMode == M4xVSS_kBlackBorders) { 3220 for (i=0; i<pOutplane[0].u_height; i++) { 3221 memcpy((void *)pOutPlaneY, (void *)pInPlaneY, 3222 pOutplane[0].u_width); 3223 pInPlaneY += pOutplane[0].u_width; 3224 pOutPlaneY += pOutplane[0].u_stride; 3225 } 3226 for (i=0; i<pOutplane[1].u_height; i++) { 3227 memcpy((void *)pOutPlaneU, (void *)pInPlaneU, 3228 pOutplane[1].u_width); 3229 pInPlaneU += pOutplane[1].u_width; 3230 pOutPlaneU += pOutplane[1].u_stride; 3231 } 3232 for (i=0; i<pOutplane[2].u_height; i++) { 3233 memcpy((void *)pOutPlaneV, (void *)pInPlaneV, 3234 pOutplane[2].u_width); 3235 pInPlaneV += pOutplane[2].u_width; 3236 pOutPlaneV += pOutplane[2].u_stride; 3237 } 3238 } 3239 } 3240cleanUp: 3241 if (renderingMode == M4xVSS_kBlackBorders) { 3242 for (i=0; i<3; i++) { 3243 if (pImagePlanesTemp[i].pac_data != M4OSA_NULL) { 3244 free(pImagePlanesTemp[i].pac_data); 3245 pImagePlanesTemp[i].pac_data = M4OSA_NULL; 3246 } 3247 } 3248 } 3249 return err; 3250} 3251 3252M4OSA_ERR M4VSS3GPP_intSetYuv420PlaneFromARGB888 ( 3253 M4VSS3GPP_InternalEditContext *pC, 3254 M4VSS3GPP_ClipContext* pClipCtxt) { 3255 3256 M4OSA_ERR err= M4NO_ERROR; 3257 3258 // Allocate memory for YUV plane 3259 pClipCtxt->pPlaneYuv = 3260 (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc( 3261 3*sizeof(M4VIFI_ImagePlane), M4VS, 3262 (M4OSA_Char*)"pPlaneYuv"); 3263 3264 if (pClipCtxt->pPlaneYuv == M4OSA_NULL) { 3265 return M4ERR_ALLOC; 3266 } 3267 3268 pClipCtxt->pPlaneYuv[0].u_height = 3269 pClipCtxt->pSettings->ClipProperties.uiStillPicHeight; 3270 pClipCtxt->pPlaneYuv[0].u_width = 3271 pClipCtxt->pSettings->ClipProperties.uiStillPicWidth; 3272 pClipCtxt->pPlaneYuv[0].u_stride = pClipCtxt->pPlaneYuv[0].u_width; 3273 pClipCtxt->pPlaneYuv[0].u_topleft = 0; 3274 3275 pClipCtxt->pPlaneYuv[0].pac_data = 3276 (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc( 3277 pClipCtxt->pPlaneYuv[0].u_height * pClipCtxt->pPlaneYuv[0].u_width * 1.5, 3278 M4VS, (M4OSA_Char*)"imageClip YUV data"); 3279 if (pClipCtxt->pPlaneYuv[0].pac_data == M4OSA_NULL) { 3280 free(pClipCtxt->pPlaneYuv); 3281 return M4ERR_ALLOC; 3282 } 3283 3284 pClipCtxt->pPlaneYuv[1].u_height = pClipCtxt->pPlaneYuv[0].u_height >>1; 3285 pClipCtxt->pPlaneYuv[1].u_width = pClipCtxt->pPlaneYuv[0].u_width >> 1; 3286 pClipCtxt->pPlaneYuv[1].u_stride = pClipCtxt->pPlaneYuv[1].u_width; 3287 pClipCtxt->pPlaneYuv[1].u_topleft = 0; 3288 pClipCtxt->pPlaneYuv[1].pac_data = (M4VIFI_UInt8*)( 3289 pClipCtxt->pPlaneYuv[0].pac_data + 3290 pClipCtxt->pPlaneYuv[0].u_height * pClipCtxt->pPlaneYuv[0].u_width); 3291 3292 pClipCtxt->pPlaneYuv[2].u_height = pClipCtxt->pPlaneYuv[0].u_height >>1; 3293 pClipCtxt->pPlaneYuv[2].u_width = pClipCtxt->pPlaneYuv[0].u_width >> 1; 3294 pClipCtxt->pPlaneYuv[2].u_stride = pClipCtxt->pPlaneYuv[2].u_width; 3295 pClipCtxt->pPlaneYuv[2].u_topleft = 0; 3296 pClipCtxt->pPlaneYuv[2].pac_data = (M4VIFI_UInt8*)( 3297 pClipCtxt->pPlaneYuv[1].pac_data + 3298 pClipCtxt->pPlaneYuv[1].u_height * pClipCtxt->pPlaneYuv[1].u_width); 3299 3300 err = M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420 ( 3301 pClipCtxt->pSettings->pFile, 3302 pC->pOsaFileReadPtr, 3303 pClipCtxt->pPlaneYuv, 3304 pClipCtxt->pSettings->ClipProperties.uiStillPicWidth, 3305 pClipCtxt->pSettings->ClipProperties.uiStillPicHeight); 3306 if (M4NO_ERROR != err) { 3307 free(pClipCtxt->pPlaneYuv[0].pac_data); 3308 free(pClipCtxt->pPlaneYuv); 3309 return err; 3310 } 3311 3312 // Set the YUV data to the decoder using setoption 3313 err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctSetOption ( 3314 pClipCtxt->pViDecCtxt, 3315 M4DECODER_kOptionID_DecYuvData, 3316 (M4OSA_DataOption)pClipCtxt->pPlaneYuv); 3317 if (M4NO_ERROR != err) { 3318 free(pClipCtxt->pPlaneYuv[0].pac_data); 3319 free(pClipCtxt->pPlaneYuv); 3320 return err; 3321 } 3322 3323 pClipCtxt->pSettings->ClipProperties.bSetImageData = M4OSA_TRUE; 3324 3325 // Allocate Yuv plane with effect 3326 pClipCtxt->pPlaneYuvWithEffect = 3327 (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc( 3328 3*sizeof(M4VIFI_ImagePlane), M4VS, 3329 (M4OSA_Char*)"pPlaneYuvWithEffect"); 3330 if (pClipCtxt->pPlaneYuvWithEffect == M4OSA_NULL) { 3331 free(pClipCtxt->pPlaneYuv[0].pac_data); 3332 free(pClipCtxt->pPlaneYuv); 3333 return M4ERR_ALLOC; 3334 } 3335 3336 pClipCtxt->pPlaneYuvWithEffect[0].u_height = pC->ewc.uiVideoHeight; 3337 pClipCtxt->pPlaneYuvWithEffect[0].u_width = pC->ewc.uiVideoWidth; 3338 pClipCtxt->pPlaneYuvWithEffect[0].u_stride = pC->ewc.uiVideoWidth; 3339 pClipCtxt->pPlaneYuvWithEffect[0].u_topleft = 0; 3340 3341 pClipCtxt->pPlaneYuvWithEffect[0].pac_data = 3342 (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc( 3343 pC->ewc.uiVideoHeight * pC->ewc.uiVideoWidth * 1.5, 3344 M4VS, (M4OSA_Char*)"imageClip YUV data"); 3345 if (pClipCtxt->pPlaneYuvWithEffect[0].pac_data == M4OSA_NULL) { 3346 free(pClipCtxt->pPlaneYuv[0].pac_data); 3347 free(pClipCtxt->pPlaneYuv); 3348 free(pClipCtxt->pPlaneYuvWithEffect); 3349 return M4ERR_ALLOC; 3350 } 3351 3352 pClipCtxt->pPlaneYuvWithEffect[1].u_height = 3353 pClipCtxt->pPlaneYuvWithEffect[0].u_height >>1; 3354 pClipCtxt->pPlaneYuvWithEffect[1].u_width = 3355 pClipCtxt->pPlaneYuvWithEffect[0].u_width >> 1; 3356 pClipCtxt->pPlaneYuvWithEffect[1].u_stride = 3357 pClipCtxt->pPlaneYuvWithEffect[1].u_width; 3358 pClipCtxt->pPlaneYuvWithEffect[1].u_topleft = 0; 3359 pClipCtxt->pPlaneYuvWithEffect[1].pac_data = (M4VIFI_UInt8*)( 3360 pClipCtxt->pPlaneYuvWithEffect[0].pac_data + 3361 pClipCtxt->pPlaneYuvWithEffect[0].u_height * pClipCtxt->pPlaneYuvWithEffect[0].u_width); 3362 3363 pClipCtxt->pPlaneYuvWithEffect[2].u_height = 3364 pClipCtxt->pPlaneYuvWithEffect[0].u_height >>1; 3365 pClipCtxt->pPlaneYuvWithEffect[2].u_width = 3366 pClipCtxt->pPlaneYuvWithEffect[0].u_width >> 1; 3367 pClipCtxt->pPlaneYuvWithEffect[2].u_stride = 3368 pClipCtxt->pPlaneYuvWithEffect[2].u_width; 3369 pClipCtxt->pPlaneYuvWithEffect[2].u_topleft = 0; 3370 pClipCtxt->pPlaneYuvWithEffect[2].pac_data = (M4VIFI_UInt8*)( 3371 pClipCtxt->pPlaneYuvWithEffect[1].pac_data + 3372 pClipCtxt->pPlaneYuvWithEffect[1].u_height * pClipCtxt->pPlaneYuvWithEffect[1].u_width); 3373 3374 err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctSetOption( 3375 pClipCtxt->pViDecCtxt, M4DECODER_kOptionID_YuvWithEffectContiguous, 3376 (M4OSA_DataOption)pClipCtxt->pPlaneYuvWithEffect); 3377 if (M4NO_ERROR != err) { 3378 free(pClipCtxt->pPlaneYuv[0].pac_data); 3379 free(pClipCtxt->pPlaneYuv); 3380 free(pClipCtxt->pPlaneYuvWithEffect); 3381 return err; 3382 } 3383 3384 return M4NO_ERROR; 3385} 3386 3387M4OSA_ERR M4VSS3GPP_intRenderFrameWithEffect(M4VSS3GPP_InternalEditContext *pC, 3388 M4VSS3GPP_ClipContext* pClipCtxt, 3389 M4_MediaTime ts, 3390 M4OSA_Bool bIsClip1, 3391 M4VIFI_ImagePlane *pResizePlane, 3392 M4VIFI_ImagePlane *pPlaneNoResize, 3393 M4VIFI_ImagePlane *pPlaneOut) { 3394 3395 M4OSA_ERR err = M4NO_ERROR; 3396 M4OSA_UInt8 numEffects = 0; 3397 M4VIFI_ImagePlane *pDecoderRenderFrame = M4OSA_NULL; 3398 M4OSA_UInt32 yuvFrameWidth = 0, yuvFrameHeight = 0; 3399 M4VIFI_ImagePlane* pTmp = M4OSA_NULL; 3400 M4VIFI_ImagePlane pTemp[3]; 3401 3402 /** 3403 Check if resizing is needed */ 3404 if (M4OSA_NULL != pClipCtxt->m_pPreResizeFrame) { 3405 /** 3406 * If we do modify the image, we need an intermediate image plane */ 3407 if (M4OSA_NULL == pResizePlane[0].pac_data) { 3408 err = M4VSS3GPP_intAllocateYUV420(pResizePlane, 3409 pClipCtxt->m_pPreResizeFrame[0].u_width, 3410 pClipCtxt->m_pPreResizeFrame[0].u_height); 3411 if (M4NO_ERROR != err) { 3412 M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \ 3413 M4VSS3GPP_intAllocateYUV420 returns 0x%x", err); 3414 return err; 3415 } 3416 } 3417 3418 if ((pClipCtxt->pSettings->FileType == 3419 M4VIDEOEDITING_kFileType_ARGB8888) && 3420 (pC->nbActiveEffects == 0) && 3421 (pClipCtxt->bGetYuvDataFromDecoder == M4OSA_FALSE)) { 3422 3423 err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctSetOption( 3424 pClipCtxt->pViDecCtxt, 3425 M4DECODER_kOptionID_EnableYuvWithEffect, 3426 (M4OSA_DataOption)M4OSA_TRUE); 3427 if (M4NO_ERROR == err) { 3428 pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctRender( 3429 pClipCtxt->pViDecCtxt, &ts, 3430 pClipCtxt->pPlaneYuvWithEffect, M4OSA_TRUE); 3431 } 3432 3433 } else { 3434 if (pClipCtxt->pSettings->FileType == 3435 M4VIDEOEDITING_kFileType_ARGB8888) { 3436 err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctSetOption( 3437 pClipCtxt->pViDecCtxt, 3438 M4DECODER_kOptionID_EnableYuvWithEffect, 3439 (M4OSA_DataOption)M4OSA_FALSE); 3440 } 3441 if (M4NO_ERROR == err) { 3442 err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctRender( 3443 pClipCtxt->pViDecCtxt, &ts, 3444 pClipCtxt->m_pPreResizeFrame, M4OSA_TRUE); 3445 } 3446 3447 } 3448 if (M4NO_ERROR != err) { 3449 M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \ 3450 returns error 0x%x", err); 3451 return err; 3452 } 3453 3454 if (pClipCtxt->pSettings->FileType != 3455 M4VIDEOEDITING_kFileType_ARGB8888) { 3456 if (0 != pClipCtxt->pSettings->ClipProperties.videoRotationDegrees) { 3457 // Save width and height of un-rotated frame 3458 yuvFrameWidth = pClipCtxt->m_pPreResizeFrame[0].u_width; 3459 yuvFrameHeight = pClipCtxt->m_pPreResizeFrame[0].u_height; 3460 err = M4VSS3GPP_intRotateVideo(pClipCtxt->m_pPreResizeFrame, 3461 pClipCtxt->pSettings->ClipProperties.videoRotationDegrees); 3462 if (M4NO_ERROR != err) { 3463 M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \ 3464 rotateVideo() returns error 0x%x", err); 3465 return err; 3466 } 3467 } 3468 } 3469 3470 if (bIsClip1 == M4OSA_TRUE) { 3471 numEffects = pC->nbActiveEffects; 3472 } else { 3473 numEffects = pC->nbActiveEffects1; 3474 } 3475 3476 if ( numEffects > 0) { 3477 pClipCtxt->bGetYuvDataFromDecoder = M4OSA_TRUE; 3478 err = M4VSS3GPP_intApplyVideoEffect(pC, 3479 pClipCtxt->m_pPreResizeFrame, pResizePlane); 3480 if (M4NO_ERROR != err) { 3481 M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \ 3482 M4VSS3GPP_intApplyVideoEffect() err 0x%x", err); 3483 return err; 3484 } 3485 3486 pDecoderRenderFrame= pResizePlane; 3487 3488 } else { 3489 pDecoderRenderFrame = pClipCtxt->m_pPreResizeFrame; 3490 } 3491 3492 if ((pClipCtxt->bGetYuvDataFromDecoder == M4OSA_TRUE) || 3493 (pClipCtxt->pSettings->FileType != 3494 M4VIDEOEDITING_kFileType_ARGB8888)) { 3495 if (bIsClip1 == M4OSA_TRUE) { 3496 err = M4VSS3GPP_intApplyRenderingMode (pC, 3497 pClipCtxt->pSettings->xVSS.MediaRendering, 3498 pDecoderRenderFrame,pC->yuv1); 3499 } else { 3500 err = M4VSS3GPP_intApplyRenderingMode (pC, 3501 pClipCtxt->pSettings->xVSS.MediaRendering, 3502 pDecoderRenderFrame,pC->yuv2); 3503 } 3504 if (M4NO_ERROR != err) { 3505 M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \ 3506 M4VSS3GPP_intApplyRenderingMode error 0x%x ", err); 3507 return err; 3508 } 3509 3510 if (bIsClip1 == M4OSA_TRUE) { 3511 pClipCtxt->lastDecodedPlane = pC->yuv1; 3512 } else { 3513 pClipCtxt->lastDecodedPlane = pC->yuv2; 3514 } 3515 3516 } else { 3517 pClipCtxt->lastDecodedPlane = pClipCtxt->pPlaneYuvWithEffect; 3518 } 3519 3520 if ((pClipCtxt->pSettings->FileType == 3521 M4VIDEOEDITING_kFileType_ARGB8888) && 3522 (pC->nbActiveEffects == 0) && 3523 (pClipCtxt->bGetYuvDataFromDecoder == M4OSA_TRUE)) { 3524 if (bIsClip1 == M4OSA_TRUE) { 3525 err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctSetOption( 3526 pClipCtxt->pViDecCtxt, 3527 M4DECODER_kOptionID_YuvWithEffectNonContiguous, 3528 (M4OSA_DataOption)pC->yuv1); 3529 } else { 3530 err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctSetOption( 3531 pClipCtxt->pViDecCtxt, 3532 M4DECODER_kOptionID_YuvWithEffectNonContiguous, 3533 (M4OSA_DataOption)pC->yuv2); 3534 } 3535 if (M4NO_ERROR != err) { 3536 M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \ 3537 null decoder setOption error 0x%x ", err); 3538 return err; 3539 } 3540 pClipCtxt->bGetYuvDataFromDecoder = M4OSA_FALSE; 3541 } 3542 3543 // Reset original width and height for resize frame plane 3544 if (0 != pClipCtxt->pSettings->ClipProperties.videoRotationDegrees && 3545 180 != pClipCtxt->pSettings->ClipProperties.videoRotationDegrees) { 3546 3547 M4VSS3GPP_intSetYUV420Plane(pClipCtxt->m_pPreResizeFrame, 3548 yuvFrameWidth, yuvFrameHeight); 3549 } 3550 3551 } else { 3552 if (bIsClip1 == M4OSA_TRUE) { 3553 numEffects = pC->nbActiveEffects; 3554 } else { 3555 numEffects = pC->nbActiveEffects1; 3556 } 3557 3558 if(numEffects > 0) { 3559 err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctRender( 3560 pClipCtxt->pViDecCtxt, &ts, pPlaneNoResize, M4OSA_TRUE); 3561 3562 if (M4NO_ERROR != err) { 3563 M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \ 3564 Render returns error 0x%x", err); 3565 return err; 3566 } 3567 3568 if (pClipCtxt->pSettings->FileType != 3569 M4VIDEOEDITING_kFileType_ARGB8888) { 3570 if (0 != pClipCtxt->pSettings->ClipProperties.videoRotationDegrees) { 3571 // Save width and height of un-rotated frame 3572 yuvFrameWidth = pPlaneNoResize[0].u_width; 3573 yuvFrameHeight = pPlaneNoResize[0].u_height; 3574 err = M4VSS3GPP_intRotateVideo(pPlaneNoResize, 3575 pClipCtxt->pSettings->ClipProperties.videoRotationDegrees); 3576 if (M4NO_ERROR != err) { 3577 M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \ 3578 rotateVideo() returns error 0x%x", err); 3579 return err; 3580 } 3581 } 3582 3583 if (180 != pClipCtxt->pSettings->ClipProperties.videoRotationDegrees) { 3584 // Apply Black Borders to rotated plane 3585 /** we need an intermediate image plane */ 3586 3587 err = M4VSS3GPP_intAllocateYUV420(pTemp, 3588 pC->ewc.uiVideoWidth, 3589 pC->ewc.uiVideoHeight); 3590 if (M4NO_ERROR != err) { 3591 M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \ 3592 memAlloc() returns error 0x%x", err); 3593 return err; 3594 } 3595 err = M4VSS3GPP_intApplyRenderingMode(pC, M4xVSS_kBlackBorders, 3596 pPlaneNoResize, pTemp); 3597 if (M4NO_ERROR != err) { 3598 M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \ 3599 M4VSS3GPP_intApplyRenderingMode() returns error 0x%x", err); 3600 free((void *)pTemp[0].pac_data); 3601 free((void *)pTemp[1].pac_data); 3602 free((void *)pTemp[2].pac_data); 3603 return err; 3604 } 3605 } 3606 } 3607 3608 if (bIsClip1 == M4OSA_TRUE) { 3609 pC->bIssecondClip = M4OSA_FALSE; 3610 if ((0 != pClipCtxt->pSettings->ClipProperties.videoRotationDegrees) && 3611 (180 != pClipCtxt->pSettings->ClipProperties.videoRotationDegrees)) { 3612 err = M4VSS3GPP_intApplyVideoEffect(pC, pTemp ,pC->yuv1); 3613 } else { 3614 err = M4VSS3GPP_intApplyVideoEffect(pC, pPlaneNoResize ,pC->yuv1); 3615 } 3616 pClipCtxt->lastDecodedPlane = pC->yuv1; 3617 } else { 3618 pC->bIssecondClip = M4OSA_TRUE; 3619 if ((0 != pClipCtxt->pSettings->ClipProperties.videoRotationDegrees) && 3620 (180 != pClipCtxt->pSettings->ClipProperties.videoRotationDegrees)) { 3621 err = M4VSS3GPP_intApplyVideoEffect(pC, pTemp ,pC->yuv2); 3622 } else { 3623 err = M4VSS3GPP_intApplyVideoEffect(pC, pPlaneNoResize ,pC->yuv2); 3624 } 3625 pClipCtxt->lastDecodedPlane = pC->yuv2; 3626 } 3627 3628 if (M4NO_ERROR != err) { 3629 M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \ 3630 M4VSS3GPP_intApplyVideoEffect error 0x%x", err); 3631 return err; 3632 } 3633 3634 // Reset original width and height for resize frame plane 3635 if (0 != pClipCtxt->pSettings->ClipProperties.videoRotationDegrees && 3636 180 != pClipCtxt->pSettings->ClipProperties.videoRotationDegrees) { 3637 3638 M4VSS3GPP_intSetYUV420Plane(pPlaneNoResize, 3639 yuvFrameWidth, yuvFrameHeight); 3640 3641 free((void *)pTemp[0].pac_data); 3642 free((void *)pTemp[1].pac_data); 3643 free((void *)pTemp[2].pac_data); 3644 } 3645 3646 } else { 3647 3648 if ((0 != pClipCtxt->pSettings->ClipProperties.videoRotationDegrees) && 3649 (180 != pClipCtxt->pSettings->ClipProperties.videoRotationDegrees)) { 3650 pTmp = pPlaneNoResize; 3651 } else if (bIsClip1 == M4OSA_TRUE) { 3652 pTmp = pC->yuv1; 3653 } else { 3654 pTmp = pC->yuv2; 3655 } 3656 err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctRender( 3657 pClipCtxt->pViDecCtxt, &ts, pTmp, M4OSA_TRUE); 3658 if (M4NO_ERROR != err) { 3659 M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \ 3660 Render returns error 0x%x,", err); 3661 return err; 3662 } 3663 3664 if (0 == pClipCtxt->pSettings->ClipProperties.videoRotationDegrees) { 3665 pClipCtxt->lastDecodedPlane = pTmp; 3666 } else { 3667 // Save width and height of un-rotated frame 3668 yuvFrameWidth = pTmp[0].u_width; 3669 yuvFrameHeight = pTmp[0].u_height; 3670 err = M4VSS3GPP_intRotateVideo(pTmp, 3671 pClipCtxt->pSettings->ClipProperties.videoRotationDegrees); 3672 if (M4NO_ERROR != err) { 3673 M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \ 3674 rotateVideo() returns error 0x%x", err); 3675 return err; 3676 } 3677 3678 if (180 != pClipCtxt->pSettings->ClipProperties.videoRotationDegrees) { 3679 3680 // Apply Black borders on rotated frame 3681 if (bIsClip1) { 3682 err = M4VSS3GPP_intApplyRenderingMode (pC, 3683 M4xVSS_kBlackBorders, 3684 pTmp,pC->yuv1); 3685 } else { 3686 err = M4VSS3GPP_intApplyRenderingMode (pC, 3687 M4xVSS_kBlackBorders, 3688 pTmp,pC->yuv2); 3689 } 3690 if (M4NO_ERROR != err) { 3691 M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \ 3692 M4VSS3GPP_intApplyRenderingMode error 0x%x", err); 3693 return err; 3694 } 3695 3696 // Reset original width and height for noresize frame plane 3697 M4VSS3GPP_intSetYUV420Plane(pPlaneNoResize, 3698 yuvFrameWidth, yuvFrameHeight); 3699 } 3700 3701 if (bIsClip1) { 3702 pClipCtxt->lastDecodedPlane = pC->yuv1; 3703 } else { 3704 pClipCtxt->lastDecodedPlane = pC->yuv2; 3705 } 3706 } 3707 } 3708 pClipCtxt->iVideoRenderCts = (M4OSA_Int32)ts; 3709 } 3710 3711 return err; 3712} 3713 3714M4OSA_ERR M4VSS3GPP_intRotateVideo(M4VIFI_ImagePlane* pPlaneIn, 3715 M4OSA_UInt32 rotationDegree) { 3716 3717 M4OSA_ERR err = M4NO_ERROR; 3718 M4VIFI_ImagePlane outPlane[3]; 3719 3720 if (rotationDegree != 180) { 3721 // Swap width and height of in plane 3722 outPlane[0].u_width = pPlaneIn[0].u_height; 3723 outPlane[0].u_height = pPlaneIn[0].u_width; 3724 outPlane[0].u_stride = outPlane[0].u_width; 3725 outPlane[0].u_topleft = 0; 3726 outPlane[0].pac_data = (M4OSA_UInt8 *)M4OSA_32bitAlignedMalloc( 3727 (outPlane[0].u_stride*outPlane[0].u_height), M4VS, 3728 (M4OSA_Char*)("out Y plane for rotation")); 3729 if (outPlane[0].pac_data == M4OSA_NULL) { 3730 return M4ERR_ALLOC; 3731 } 3732 3733 outPlane[1].u_width = pPlaneIn[0].u_height/2; 3734 outPlane[1].u_height = pPlaneIn[0].u_width/2; 3735 outPlane[1].u_stride = outPlane[1].u_width; 3736 outPlane[1].u_topleft = 0; 3737 outPlane[1].pac_data = (M4OSA_UInt8 *)M4OSA_32bitAlignedMalloc( 3738 (outPlane[1].u_stride*outPlane[1].u_height), M4VS, 3739 (M4OSA_Char*)("out U plane for rotation")); 3740 if (outPlane[1].pac_data == M4OSA_NULL) { 3741 free((void *)outPlane[0].pac_data); 3742 return M4ERR_ALLOC; 3743 } 3744 3745 outPlane[2].u_width = pPlaneIn[0].u_height/2; 3746 outPlane[2].u_height = pPlaneIn[0].u_width/2; 3747 outPlane[2].u_stride = outPlane[2].u_width; 3748 outPlane[2].u_topleft = 0; 3749 outPlane[2].pac_data = (M4OSA_UInt8 *)M4OSA_32bitAlignedMalloc( 3750 (outPlane[2].u_stride*outPlane[2].u_height), M4VS, 3751 (M4OSA_Char*)("out V plane for rotation")); 3752 if (outPlane[2].pac_data == M4OSA_NULL) { 3753 free((void *)outPlane[0].pac_data); 3754 free((void *)outPlane[1].pac_data); 3755 return M4ERR_ALLOC; 3756 } 3757 } 3758 3759 switch(rotationDegree) { 3760 case 90: 3761 M4VIFI_Rotate90RightYUV420toYUV420(M4OSA_NULL, pPlaneIn, outPlane); 3762 break; 3763 3764 case 180: 3765 // In plane rotation, so planeOut = planeIn 3766 M4VIFI_Rotate180YUV420toYUV420(M4OSA_NULL, pPlaneIn, pPlaneIn); 3767 break; 3768 3769 case 270: 3770 M4VIFI_Rotate90LeftYUV420toYUV420(M4OSA_NULL, pPlaneIn, outPlane); 3771 break; 3772 3773 default: 3774 M4OSA_TRACE1_1("invalid rotation param %d", (int)rotationDegree); 3775 err = M4ERR_PARAMETER; 3776 break; 3777 } 3778 3779 if (rotationDegree != 180) { 3780 memset((void *)pPlaneIn[0].pac_data, 0, 3781 (pPlaneIn[0].u_width*pPlaneIn[0].u_height)); 3782 memset((void *)pPlaneIn[1].pac_data, 0, 3783 (pPlaneIn[1].u_width*pPlaneIn[1].u_height)); 3784 memset((void *)pPlaneIn[2].pac_data, 0, 3785 (pPlaneIn[2].u_width*pPlaneIn[2].u_height)); 3786 // Copy Y, U and V planes 3787 memcpy((void *)pPlaneIn[0].pac_data, (void *)outPlane[0].pac_data, 3788 (pPlaneIn[0].u_width*pPlaneIn[0].u_height)); 3789 memcpy((void *)pPlaneIn[1].pac_data, (void *)outPlane[1].pac_data, 3790 (pPlaneIn[1].u_width*pPlaneIn[1].u_height)); 3791 memcpy((void *)pPlaneIn[2].pac_data, (void *)outPlane[2].pac_data, 3792 (pPlaneIn[2].u_width*pPlaneIn[2].u_height)); 3793 3794 free((void *)outPlane[0].pac_data); 3795 free((void *)outPlane[1].pac_data); 3796 free((void *)outPlane[2].pac_data); 3797 3798 // Swap the width and height of the in plane 3799 uint32_t temp = 0; 3800 temp = pPlaneIn[0].u_width; 3801 pPlaneIn[0].u_width = pPlaneIn[0].u_height; 3802 pPlaneIn[0].u_height = temp; 3803 pPlaneIn[0].u_stride = pPlaneIn[0].u_width; 3804 3805 temp = pPlaneIn[1].u_width; 3806 pPlaneIn[1].u_width = pPlaneIn[1].u_height; 3807 pPlaneIn[1].u_height = temp; 3808 pPlaneIn[1].u_stride = pPlaneIn[1].u_width; 3809 3810 temp = pPlaneIn[2].u_width; 3811 pPlaneIn[2].u_width = pPlaneIn[2].u_height; 3812 pPlaneIn[2].u_height = temp; 3813 pPlaneIn[2].u_stride = pPlaneIn[2].u_width; 3814 } 3815 3816 return err; 3817} 3818 3819M4OSA_ERR M4VSS3GPP_intSetYUV420Plane(M4VIFI_ImagePlane* planeIn, 3820 M4OSA_UInt32 width, M4OSA_UInt32 height) { 3821 3822 M4OSA_ERR err = M4NO_ERROR; 3823 3824 if (planeIn == M4OSA_NULL) { 3825 M4OSA_TRACE1_0("NULL in plane, error"); 3826 return M4ERR_PARAMETER; 3827 } 3828 3829 planeIn[0].u_width = width; 3830 planeIn[0].u_height = height; 3831 planeIn[0].u_stride = planeIn[0].u_width; 3832 3833 planeIn[1].u_width = width/2; 3834 planeIn[1].u_height = height/2; 3835 planeIn[1].u_stride = planeIn[1].u_width; 3836 3837 planeIn[2].u_width = width/2; 3838 planeIn[2].u_height = height/2; 3839 planeIn[2].u_stride = planeIn[1].u_width; 3840 3841 return err; 3842} 3843