M4VSS3GPP_EditVideo.c revision e9eec0e0975c57c0dac91eb5b4cbb052b7dd011a
1/* 2 * Copyright (C) 2011 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16/** 17 ****************************************************************************** 18 * @file M4VSS3GPP_EditVideo.c 19 * @brief Video Studio Service 3GPP edit API implementation. 20 * @note 21 ****************************************************************************** 22 */ 23 24/****************/ 25/*** Includes ***/ 26/****************/ 27 28#include "NXPSW_CompilerSwitches.h" 29/** 30 * Our header */ 31#include "M4VSS3GPP_API.h" 32#include "M4VSS3GPP_InternalTypes.h" 33#include "M4VSS3GPP_InternalFunctions.h" 34#include "M4VSS3GPP_InternalConfig.h" 35#include "M4VSS3GPP_ErrorCodes.h" 36 37// StageFright encoders require %16 resolution 38#include "M4ENCODER_common.h" 39/** 40 * OSAL headers */ 41#include "M4OSA_Memory.h" /**< OSAL memory management */ 42#include "M4OSA_Debug.h" /**< OSAL debug management */ 43 44/** 45 * component includes */ 46#include "M4VFL_transition.h" /**< video effects */ 47 48/*for transition behaviour*/ 49#include <math.h> 50#include "M4AIR_API.h" 51#include "M4VSS3GPP_Extended_API.h" 52/** Determine absolute value of a. */ 53#define M4xVSS_ABS(a) ( ( (a) < (0) ) ? (-(a)) : (a) ) 54#define Y_PLANE_BORDER_VALUE 0x00 55#define U_PLANE_BORDER_VALUE 0x80 56#define V_PLANE_BORDER_VALUE 0x80 57 58/************************************************************************/ 59/* Static local functions */ 60/************************************************************************/ 61 62static M4OSA_ERR M4VSS3GPP_intCheckVideoMode( 63 M4VSS3GPP_InternalEditContext *pC ); 64static M4OSA_Void 65M4VSS3GPP_intCheckVideoEffects( M4VSS3GPP_InternalEditContext *pC, 66 M4OSA_UInt8 uiClipNumber ); 67static M4OSA_ERR 68M4VSS3GPP_intApplyVideoEffect( M4VSS3GPP_InternalEditContext *pC,/*M4OSA_UInt8 uiClip1orClip2,*/ 69 M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut ); 70static M4OSA_ERR 71M4VSS3GPP_intVideoTransition( M4VSS3GPP_InternalEditContext *pC, 72 M4VIFI_ImagePlane *pPlaneOut ); 73 74static M4OSA_Void 75M4VSS3GPP_intUpdateTimeInfo( M4VSS3GPP_InternalEditContext *pC, 76 M4SYS_AccessUnit *pAU ); 77static M4OSA_Void M4VSS3GPP_intSetH263TimeCounter( M4OSA_MemAddr8 pAuDataBuffer, 78 M4OSA_UInt8 uiCts ); 79static M4OSA_Void M4VSS3GPP_intSetMPEG4Gov( M4OSA_MemAddr8 pAuDataBuffer, 80 M4OSA_UInt32 uiCtsSec ); 81static M4OSA_Void M4VSS3GPP_intGetMPEG4Gov( M4OSA_MemAddr8 pAuDataBuffer, 82 M4OSA_UInt32 *pCtsSec ); 83static M4OSA_ERR M4VSS3GPP_intAllocateYUV420( M4VIFI_ImagePlane *pPlanes, 84 M4OSA_UInt32 uiWidth, M4OSA_UInt32 uiHeight ); 85static M4OSA_ERR M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420( 86 M4OSA_Void* pFileIn, M4OSA_FileReadPointer* pFileReadPtr, 87 M4VIFI_ImagePlane* pImagePlanes, 88 M4OSA_UInt32 width,M4OSA_UInt32 height); 89static M4OSA_ERR M4VSS3GPP_intApplyRenderingMode( 90 M4VSS3GPP_InternalEditContext *pC, 91 M4xVSS_MediaRendering renderingMode, 92 M4VIFI_ImagePlane* pInplane, 93 M4VIFI_ImagePlane* pOutplane); 94 95static M4OSA_ERR M4VSS3GPP_intSetYuv420PlaneFromARGB888 ( 96 M4VSS3GPP_InternalEditContext *pC, 97 M4VSS3GPP_ClipContext* pClipCtxt); 98static M4OSA_ERR M4VSS3GPP_intRenderFrameWithEffect( 99 M4VSS3GPP_InternalEditContext *pC, 100 M4VSS3GPP_ClipContext* pClipCtxt, 101 M4_MediaTime ts, 102 M4OSA_Bool bIsClip1, 103 M4VIFI_ImagePlane *pResizePlane, 104 M4VIFI_ImagePlane *pPlaneNoResize, 105 M4VIFI_ImagePlane *pPlaneOut); 106 107static M4OSA_ERR M4VSS3GPP_intRotateVideo(M4VIFI_ImagePlane* pPlaneIn, 108 M4OSA_UInt32 rotationDegree); 109 110static M4OSA_ERR M4VSS3GPP_intSetYUV420Plane(M4VIFI_ImagePlane* planeIn, 111 M4OSA_UInt32 width, M4OSA_UInt32 height); 112 113/** 114 ****************************************************************************** 115 * M4OSA_ERR M4VSS3GPP_intEditStepVideo() 116 * @brief One step of video processing 117 * @param pC (IN/OUT) Internal edit context 118 ****************************************************************************** 119 */ 120M4OSA_ERR M4VSS3GPP_intEditStepVideo( M4VSS3GPP_InternalEditContext *pC ) 121{ 122 M4OSA_ERR err; 123 M4OSA_Int32 iCts, iNextCts; 124 M4ENCODER_FrameMode FrameMode; 125 M4OSA_Bool bSkipFrame; 126 M4OSA_UInt16 offset; 127 128 /** 129 * Check if we reached end cut. Decorrelate input and output encoding 130 * timestamp to handle encoder prefetch 131 */ 132 if ( ((M4OSA_Int32)(pC->ewc.dInputVidCts) - pC->pC1->iVoffset 133 + pC->iInOutTimeOffset) >= pC->pC1->iEndTime ) 134 { 135 /* Re-adjust video to precise cut time */ 136 pC->iInOutTimeOffset = ((M4OSA_Int32)(pC->ewc.dInputVidCts)) 137 - pC->pC1->iVoffset + pC->iInOutTimeOffset - pC->pC1->iEndTime; 138 if ( pC->iInOutTimeOffset < 0 ) { 139 pC->iInOutTimeOffset = 0; 140 } 141 142 /** 143 * Video is done for this clip */ 144 err = M4VSS3GPP_intReachedEndOfVideo(pC); 145 146 /* RC: to know when a file has been processed */ 147 if (M4NO_ERROR != err && err != M4VSS3GPP_WAR_SWITCH_CLIP) 148 { 149 M4OSA_TRACE1_1( 150 "M4VSS3GPP_intEditStepVideo: M4VSS3GPP_intReachedEndOfVideo returns 0x%x", 151 err); 152 } 153 154 return err; 155 } 156 157 /* Don't change the states if we are in decodeUpTo() */ 158 if ( (M4VSS3GPP_kClipStatus_DECODE_UP_TO != pC->pC1->Vstatus) 159 && (( pC->pC2 == M4OSA_NULL) 160 || (M4VSS3GPP_kClipStatus_DECODE_UP_TO != pC->pC2->Vstatus)) ) 161 { 162 /** 163 * Check Video Mode, depending on the current output CTS */ 164 err = M4VSS3GPP_intCheckVideoMode( 165 pC); /**< This function change the pC->Vstate variable! */ 166 167 if (M4NO_ERROR != err) 168 { 169 M4OSA_TRACE1_1( 170 "M4VSS3GPP_intEditStepVideo: M4VSS3GPP_intCheckVideoMode returns 0x%x!", 171 err); 172 return err; 173 } 174 } 175 176 177 switch( pC->Vstate ) 178 { 179 /* _________________ */ 180 /*| |*/ 181 /*| READ_WRITE MODE |*/ 182 /*|_________________|*/ 183 184 case M4VSS3GPP_kEditVideoState_READ_WRITE: 185 case M4VSS3GPP_kEditVideoState_AFTER_CUT: 186 { 187 M4OSA_TRACE3_0("M4VSS3GPP_intEditStepVideo READ_WRITE"); 188 189 bSkipFrame = M4OSA_FALSE; 190 191 /** 192 * If we were decoding the clip, we must jump to be sure 193 * to get to the good position. */ 194 if( M4VSS3GPP_kClipStatus_READ != pC->pC1->Vstatus ) 195 { 196 /** 197 * Jump to target video time (tc = to-T) */ 198 // Decorrelate input and output encoding timestamp to handle encoder prefetch 199 iCts = (M4OSA_Int32)(pC->ewc.dInputVidCts) - pC->pC1->iVoffset; 200 err = pC->pC1->ShellAPI.m_pReader->m_pFctJump( 201 pC->pC1->pReaderContext, 202 (M4_StreamHandler *)pC->pC1->pVideoStream, &iCts); 203 204 if( M4NO_ERROR != err ) 205 { 206 M4OSA_TRACE1_1( 207 "M4VSS3GPP_intEditStepVideo:\ 208 READ_WRITE: m_pReader->m_pFctJump(V1) returns 0x%x!", 209 err); 210 return err; 211 } 212 213 err = pC->pC1->ShellAPI.m_pReaderDataIt->m_pFctGetNextAu( 214 pC->pC1->pReaderContext, 215 (M4_StreamHandler *)pC->pC1->pVideoStream, 216 &pC->pC1->VideoAU); 217 218 if( ( M4NO_ERROR != err) && (M4WAR_NO_MORE_AU != err) ) 219 { 220 M4OSA_TRACE1_1( 221 "M4VSS3GPP_intEditStepVideo:\ 222 READ_WRITE: m_pReader->m_pFctGetNextAu returns 0x%x!", 223 err); 224 return err; 225 } 226 227 M4OSA_TRACE2_3("A .... read : cts = %.0f + %ld [ 0x%x ]", 228 pC->pC1->VideoAU.m_CTS, pC->pC1->iVoffset, 229 pC->pC1->VideoAU.m_size); 230 231 /* This frame has been already written in BEGIN CUT step -> skip it */ 232 if( pC->pC1->VideoAU.m_CTS == iCts 233 && pC->pC1->iVideoRenderCts >= iCts ) 234 { 235 bSkipFrame = M4OSA_TRUE; 236 } 237 } 238 239 /* This frame has been already written in BEGIN CUT step -> skip it */ 240 if( ( pC->Vstate == M4VSS3GPP_kEditVideoState_AFTER_CUT) 241 && (pC->pC1->VideoAU.m_CTS 242 + pC->pC1->iVoffset <= pC->ewc.WriterVideoAU.CTS) ) 243 { 244 bSkipFrame = M4OSA_TRUE; 245 } 246 247 /** 248 * Remember the clip reading state */ 249 pC->pC1->Vstatus = M4VSS3GPP_kClipStatus_READ; 250 // Decorrelate input and output encoding timestamp to handle encoder prefetch 251 // Rounding is to compensate reader imprecision (m_CTS is actually an integer) 252 iCts = ((M4OSA_Int32)pC->ewc.dInputVidCts) - pC->pC1->iVoffset - 1; 253 iNextCts = iCts + ((M4OSA_Int32)pC->dOutputFrameDuration) + 1; 254 /* Avoid to write a last frame of duration 0 */ 255 if( iNextCts > pC->pC1->iEndTime ) 256 iNextCts = pC->pC1->iEndTime; 257 258 /** 259 * If the AU is good to be written, write it, else just skip it */ 260 if( ( M4OSA_FALSE == bSkipFrame) 261 && (( pC->pC1->VideoAU.m_CTS >= iCts) 262 && (pC->pC1->VideoAU.m_CTS < iNextCts) 263 && (pC->pC1->VideoAU.m_size > 0)) ) 264 { 265 /** 266 * Get the output AU to write into */ 267 err = pC->ShellAPI.pWriterDataFcts->pStartAU( 268 pC->ewc.p3gpWriterContext, 269 M4VSS3GPP_WRITER_VIDEO_STREAM_ID, 270 &pC->ewc.WriterVideoAU); 271 272 if( M4NO_ERROR != err ) 273 { 274 M4OSA_TRACE1_1( 275 "M4VSS3GPP_intEditStepVideo: READ_WRITE:\ 276 pWriterDataFcts->pStartAU(Video) returns 0x%x!", 277 err); 278 return err; 279 } 280 281 /** 282 * Copy the input AU to the output AU */ 283 pC->ewc.WriterVideoAU.attribute = pC->pC1->VideoAU.m_attribute; 284 // Decorrelate input and output encoding timestamp to handle encoder prefetch 285 pC->ewc.WriterVideoAU.CTS = (M4OSA_Time)pC->pC1->VideoAU.m_CTS + 286 (M4OSA_Time)pC->pC1->iVoffset; 287 pC->ewc.dInputVidCts += pC->dOutputFrameDuration; 288 offset = 0; 289 /* for h.264 stream do not read the 1st 4 bytes as they are header 290 indicators */ 291 if( pC->pC1->pVideoStream->m_basicProperties.m_streamType 292 == M4DA_StreamTypeVideoMpeg4Avc ) 293 offset = 4; 294 295 pC->ewc.WriterVideoAU.size = pC->pC1->VideoAU.m_size - offset; 296 if( pC->ewc.WriterVideoAU.size > pC->ewc.uiVideoMaxAuSize ) 297 { 298 M4OSA_TRACE1_2( 299 "M4VSS3GPP_intEditStepVideo: READ_WRITE: AU size greater than\ 300 MaxAuSize (%d>%d)! returning M4VSS3GPP_ERR_INPUT_VIDEO_AU_TOO_LARGE", 301 pC->ewc.WriterVideoAU.size, pC->ewc.uiVideoMaxAuSize); 302 return M4VSS3GPP_ERR_INPUT_VIDEO_AU_TOO_LARGE; 303 } 304 305 memcpy((void *)pC->ewc.WriterVideoAU.dataAddress, 306 (void *)(pC->pC1->VideoAU.m_dataAddress + offset), 307 (pC->ewc.WriterVideoAU.size)); 308 309 /** 310 * Update time info for the Counter Time System to be equal to the bit 311 -stream time*/ 312 M4VSS3GPP_intUpdateTimeInfo(pC, &pC->ewc.WriterVideoAU); 313 M4OSA_TRACE2_2("B ---- write : cts = %lu [ 0x%x ]", 314 pC->ewc.WriterVideoAU.CTS, pC->ewc.WriterVideoAU.size); 315 316 /** 317 * Write the AU */ 318 err = pC->ShellAPI.pWriterDataFcts->pProcessAU( 319 pC->ewc.p3gpWriterContext, 320 M4VSS3GPP_WRITER_VIDEO_STREAM_ID, 321 &pC->ewc.WriterVideoAU); 322 323 if( M4NO_ERROR != err ) 324 { 325 /* the warning M4WAR_WRITER_STOP_REQ is returned when the targeted output 326 file size is reached 327 The editing is then finished, the warning M4VSS3GPP_WAR_EDITING_DONE 328 is returned*/ 329 if( M4WAR_WRITER_STOP_REQ == err ) 330 { 331 M4OSA_TRACE1_0( 332 "M4VSS3GPP_intEditStepVideo: File was cut to avoid oversize"); 333 return M4VSS3GPP_WAR_EDITING_DONE; 334 } 335 else 336 { 337 M4OSA_TRACE1_1( 338 "M4VSS3GPP_intEditStepVideo: READ_WRITE:\ 339 pWriterDataFcts->pProcessAU(Video) returns 0x%x!", 340 err); 341 return err; 342 } 343 } 344 345 /** 346 * Read next AU for next step */ 347 err = pC->pC1->ShellAPI.m_pReaderDataIt->m_pFctGetNextAu( 348 pC->pC1->pReaderContext, 349 (M4_StreamHandler *)pC->pC1->pVideoStream, 350 &pC->pC1->VideoAU); 351 352 if( ( M4NO_ERROR != err) && (M4WAR_NO_MORE_AU != err) ) 353 { 354 M4OSA_TRACE1_1( 355 "M4VSS3GPP_intEditStepVideo: READ_WRITE:\ 356 m_pReaderDataIt->m_pFctGetNextAu returns 0x%x!", 357 err); 358 return err; 359 } 360 361 M4OSA_TRACE2_3("C .... read : cts = %.0f + %ld [ 0x%x ]", 362 pC->pC1->VideoAU.m_CTS, pC->pC1->iVoffset, 363 pC->pC1->VideoAU.m_size); 364 } 365 else 366 { 367 /** 368 * Decide wether to read or to increment time increment */ 369 if( ( pC->pC1->VideoAU.m_size == 0) 370 || (pC->pC1->VideoAU.m_CTS >= iNextCts) ) 371 { 372 /*Increment time by the encoding period (NO_MORE_AU or reader in advance */ 373 // Decorrelate input and output encoding timestamp to handle encoder prefetch 374 pC->ewc.dInputVidCts += pC->dOutputFrameDuration; 375 376 /* Switch (from AFTER_CUT) to normal mode because time is 377 no more frozen */ 378 pC->Vstate = M4VSS3GPP_kEditVideoState_READ_WRITE; 379 } 380 else 381 { 382 /* In other cases (reader late), just let the reader catch up 383 pC->ewc.dVTo */ 384 err = pC->pC1->ShellAPI.m_pReaderDataIt->m_pFctGetNextAu( 385 pC->pC1->pReaderContext, 386 (M4_StreamHandler *)pC->pC1->pVideoStream, 387 &pC->pC1->VideoAU); 388 389 if( ( M4NO_ERROR != err) && (M4WAR_NO_MORE_AU != err) ) 390 { 391 M4OSA_TRACE1_1( 392 "M4VSS3GPP_intEditStepVideo: READ_WRITE:\ 393 m_pReaderDataIt->m_pFctGetNextAu returns 0x%x!", 394 err); 395 return err; 396 } 397 398 M4OSA_TRACE2_3("D .... read : cts = %.0f + %ld [ 0x%x ]", 399 pC->pC1->VideoAU.m_CTS, pC->pC1->iVoffset, 400 pC->pC1->VideoAU.m_size); 401 } 402 } 403 } 404 break; 405 406 /* ____________________ */ 407 /*| |*/ 408 /*| DECODE_ENCODE MODE |*/ 409 /*| BEGIN_CUT MODE |*/ 410 /*|____________________|*/ 411 412 case M4VSS3GPP_kEditVideoState_DECODE_ENCODE: 413 case M4VSS3GPP_kEditVideoState_BEGIN_CUT: 414 { 415 M4OSA_TRACE3_0( 416 "M4VSS3GPP_intEditStepVideo DECODE_ENCODE / BEGIN_CUT"); 417 418 if ((pC->pC1->pSettings->FileType == 419 M4VIDEOEDITING_kFileType_ARGB8888) && 420 (M4OSA_FALSE == 421 pC->pC1->pSettings->ClipProperties.bSetImageData)) { 422 423 err = M4VSS3GPP_intSetYuv420PlaneFromARGB888(pC, pC->pC1); 424 if( M4NO_ERROR != err ) { 425 M4OSA_TRACE1_1( 426 "M4VSS3GPP_intEditStepVideo: DECODE_ENCODE:\ 427 M4VSS3GPP_intSetYuv420PlaneFromARGB888 err=%x", err); 428 return err; 429 } 430 } 431 /** 432 * Decode the video up to the target time 433 (will jump to the previous RAP if needed ) */ 434 // Decorrelate input and output encoding timestamp to handle encoder prefetch 435 err = M4VSS3GPP_intClipDecodeVideoUpToCts(pC->pC1, (M4OSA_Int32)pC->ewc.dInputVidCts); 436 if( M4NO_ERROR != err ) 437 { 438 M4OSA_TRACE1_1( 439 "M4VSS3GPP_intEditStepVideo: DECODE_ENCODE:\ 440 M4VSS3GPP_intDecodeVideoUpToCts returns err=0x%x", 441 err); 442 return err; 443 } 444 445 /* If the decoding is not completed, do one more step with time frozen */ 446 if( M4VSS3GPP_kClipStatus_DECODE_UP_TO == pC->pC1->Vstatus ) 447 { 448 return M4NO_ERROR; 449 } 450 451 /** 452 * Reset the video pre-processing error before calling the encoder */ 453 pC->ewc.VppError = M4NO_ERROR; 454 455 M4OSA_TRACE2_0("E ++++ encode AU"); 456 457 /** 458 * Encode the frame(rendering,filtering and writing will be done 459 in encoder callbacks)*/ 460 if( pC->Vstate == M4VSS3GPP_kEditVideoState_BEGIN_CUT ) 461 FrameMode = M4ENCODER_kIFrame; 462 else 463 FrameMode = M4ENCODER_kNormalFrame; 464 465 // Decorrelate input and output encoding timestamp to handle encoder prefetch 466 err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctEncode(pC->ewc.pEncContext, M4OSA_NULL, 467 pC->ewc.dInputVidCts, FrameMode); 468 /** 469 * Check if we had a VPP error... */ 470 if( M4NO_ERROR != pC->ewc.VppError ) 471 { 472 M4OSA_TRACE1_1( 473 "M4VSS3GPP_intEditStepVideo: DECODE_ENCODE:\ 474 pVideoEncoderGlobalFcts->pFctEncode, returning VppErr=0x%x", 475 pC->ewc.VppError); 476#ifdef M4VSS_SUPPORT_OMX_CODECS 477 478 if( M4WAR_VIDEORENDERER_NO_NEW_FRAME != pC->ewc.VppError ) 479 { 480#endif //M4VSS_SUPPORT_OMX_CODECS 481 482 return pC->ewc.VppError; 483#ifdef M4VSS_SUPPORT_OMX_CODECS 484 485 } 486 487#endif //M4VSS_SUPPORT_OMX_CODECS 488 489 } 490 else if( M4NO_ERROR != err ) /**< ...or an encoder error */ 491 { 492 if( ((M4OSA_UInt32)M4ERR_ALLOC) == err ) 493 { 494 M4OSA_TRACE1_0( 495 "M4VSS3GPP_intEditStepVideo: DECODE_ENCODE:\ 496 returning M4VSS3GPP_ERR_ENCODER_ACCES_UNIT_ERROR"); 497 return M4VSS3GPP_ERR_ENCODER_ACCES_UNIT_ERROR; 498 } 499 /* the warning M4WAR_WRITER_STOP_REQ is returned when the targeted output 500 file size is reached 501 The editing is then finished, the warning M4VSS3GPP_WAR_EDITING_DONE 502 is returned*/ 503 else if( M4WAR_WRITER_STOP_REQ == err ) 504 { 505 M4OSA_TRACE1_0( 506 "M4VSS3GPP_intEditStepVideo: File was cut to avoid oversize"); 507 return M4VSS3GPP_WAR_EDITING_DONE; 508 } 509 else 510 { 511 M4OSA_TRACE1_1( 512 "M4VSS3GPP_intEditStepVideo: DECODE_ENCODE:\ 513 pVideoEncoderGlobalFcts->pFctEncode returns 0x%x", 514 err); 515 return err; 516 } 517 } 518 519 /** 520 * Increment time by the encoding period (for begin cut, do not increment to not 521 loose P-frames) */ 522 if( M4VSS3GPP_kEditVideoState_DECODE_ENCODE == pC->Vstate ) 523 { 524 // Decorrelate input and output encoding timestamp to handle encoder prefetch 525 pC->ewc.dInputVidCts += pC->dOutputFrameDuration; 526 } 527 } 528 break; 529 530 /* _________________ */ 531 /*| |*/ 532 /*| TRANSITION MODE |*/ 533 /*|_________________|*/ 534 535 case M4VSS3GPP_kEditVideoState_TRANSITION: 536 { 537 M4OSA_TRACE3_0("M4VSS3GPP_intEditStepVideo TRANSITION"); 538 539 /* Don't decode more than needed */ 540 if( !(( M4VSS3GPP_kClipStatus_DECODE_UP_TO != pC->pC1->Vstatus) 541 && (M4VSS3GPP_kClipStatus_DECODE_UP_TO == pC->pC2->Vstatus)) ) 542 { 543 /** 544 * Decode the clip1 video up to the target time 545 (will jump to the previous RAP if needed */ 546 if ((pC->pC1->pSettings->FileType == 547 M4VIDEOEDITING_kFileType_ARGB8888) && 548 (M4OSA_FALSE == 549 pC->pC1->pSettings->ClipProperties.bSetImageData)) { 550 551 err = M4VSS3GPP_intSetYuv420PlaneFromARGB888(pC, pC->pC1); 552 if( M4NO_ERROR != err ) { 553 M4OSA_TRACE1_1( 554 "M4VSS3GPP_intEditStepVideo: TRANSITION:\ 555 M4VSS3GPP_intSetYuv420PlaneFromARGB888 err=%x", err); 556 return err; 557 } 558 } 559 // Decorrelate input and output encoding timestamp to handle encoder prefetch 560 err = M4VSS3GPP_intClipDecodeVideoUpToCts(pC->pC1, 561 (M4OSA_Int32)pC->ewc.dInputVidCts); 562 if( M4NO_ERROR != err ) 563 { 564 M4OSA_TRACE1_1( 565 "M4VSS3GPP_intEditStepVideo: TRANSITION:\ 566 M4VSS3GPP_intDecodeVideoUpToCts(C1) returns err=0x%x", 567 err); 568 return err; 569 } 570 571 /* If the decoding is not completed, do one more step with time frozen */ 572 if( M4VSS3GPP_kClipStatus_DECODE_UP_TO == pC->pC1->Vstatus ) 573 { 574 return M4NO_ERROR; 575 } 576 } 577 578 /* Don't decode more than needed */ 579 if( !(( M4VSS3GPP_kClipStatus_DECODE_UP_TO != pC->pC2->Vstatus) 580 && (M4VSS3GPP_kClipStatus_DECODE_UP_TO == pC->pC1->Vstatus)) ) 581 { 582 /** 583 * Decode the clip2 video up to the target time 584 (will jump to the previous RAP if needed) */ 585 if ((pC->pC2->pSettings->FileType == 586 M4VIDEOEDITING_kFileType_ARGB8888) && 587 (M4OSA_FALSE == 588 pC->pC2->pSettings->ClipProperties.bSetImageData)) { 589 590 err = M4VSS3GPP_intSetYuv420PlaneFromARGB888(pC, pC->pC2); 591 if( M4NO_ERROR != err ) { 592 M4OSA_TRACE1_1( 593 "M4VSS3GPP_intEditStepVideo: TRANSITION:\ 594 M4VSS3GPP_intSetYuv420PlaneFromARGB888 err=%x", err); 595 return err; 596 } 597 } 598 599 // Decorrelate input and output encoding timestamp to handle encoder prefetch 600 err = M4VSS3GPP_intClipDecodeVideoUpToCts(pC->pC2, 601 (M4OSA_Int32)pC->ewc.dInputVidCts); 602 if( M4NO_ERROR != err ) 603 { 604 M4OSA_TRACE1_1( 605 "M4VSS3GPP_intEditStepVideo: TRANSITION:\ 606 M4VSS3GPP_intDecodeVideoUpToCts(C2) returns err=0x%x", 607 err); 608 return err; 609 } 610 611 /* If the decoding is not completed, do one more step with time frozen */ 612 if( M4VSS3GPP_kClipStatus_DECODE_UP_TO == pC->pC2->Vstatus ) 613 { 614 return M4NO_ERROR; 615 } 616 } 617 618 /** 619 * Reset the video pre-processing error before calling the encoder */ 620 pC->ewc.VppError = M4NO_ERROR; 621 622 M4OSA_TRACE2_0("F **** blend AUs"); 623 624 /** 625 * Encode the frame (rendering, filtering and writing will be done 626 in encoder callbacks */ 627 // Decorrelate input and output encoding timestamp to handle encoder prefetch 628 err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctEncode(pC->ewc.pEncContext, M4OSA_NULL, 629 pC->ewc.dInputVidCts, M4ENCODER_kNormalFrame); 630 631 /** 632 * If encode returns a process frame error, it is likely to be a VPP error */ 633 if( M4NO_ERROR != pC->ewc.VppError ) 634 { 635 M4OSA_TRACE1_1( 636 "M4VSS3GPP_intEditStepVideo: TRANSITION:\ 637 pVideoEncoderGlobalFcts->pFctEncode, returning VppErr=0x%x", 638 pC->ewc.VppError); 639#ifdef M4VSS_SUPPORT_OMX_CODECS 640 641 if( M4WAR_VIDEORENDERER_NO_NEW_FRAME != pC->ewc.VppError ) 642 { 643 644#endif //M4VSS_SUPPORT_OMX_CODECS 645 646 return pC->ewc.VppError; 647#ifdef M4VSS_SUPPORT_OMX_CODECS 648 649 } 650 651#endif //M4VSS_SUPPORT_OMX_CODECS 652 653 } 654 else if( M4NO_ERROR != err ) /**< ...or an encoder error */ 655 { 656 if( ((M4OSA_UInt32)M4ERR_ALLOC) == err ) 657 { 658 M4OSA_TRACE1_0( 659 "M4VSS3GPP_intEditStepVideo: TRANSITION:\ 660 returning M4VSS3GPP_ERR_ENCODER_ACCES_UNIT_ERROR"); 661 return M4VSS3GPP_ERR_ENCODER_ACCES_UNIT_ERROR; 662 } 663 664 /* the warning M4WAR_WRITER_STOP_REQ is returned when the targeted output 665 file size is reached 666 The editing is then finished, the warning M4VSS3GPP_WAR_EDITING_DONE is 667 returned*/ 668 else if( M4WAR_WRITER_STOP_REQ == err ) 669 { 670 M4OSA_TRACE1_0( 671 "M4VSS3GPP_intEditStepVideo: File was cut to avoid oversize"); 672 return M4VSS3GPP_WAR_EDITING_DONE; 673 } 674 else 675 { 676 M4OSA_TRACE1_1( 677 "M4VSS3GPP_intEditStepVideo: TRANSITION:\ 678 pVideoEncoderGlobalFcts->pFctEncode returns 0x%x", 679 err); 680 return err; 681 } 682 } 683 684 /** 685 * Increment time by the encoding period */ 686 // Decorrelate input and output encoding timestamp to handle encoder prefetch 687 pC->ewc.dInputVidCts += pC->dOutputFrameDuration; 688 } 689 break; 690 691 /* ____________ */ 692 /*| |*/ 693 /*| ERROR CASE |*/ 694 /*|____________|*/ 695 696 default: 697 M4OSA_TRACE1_1( 698 "M4VSS3GPP_intEditStepVideo: invalid internal state (0x%x),\ 699 returning M4VSS3GPP_ERR_INTERNAL_STATE", 700 pC->Vstate); 701 return M4VSS3GPP_ERR_INTERNAL_STATE; 702 } 703 704 /** 705 * Return with no error */ 706 M4OSA_TRACE3_0("M4VSS3GPP_intEditStepVideo: returning M4NO_ERROR"); 707 return M4NO_ERROR; 708} 709 710/** 711 ****************************************************************************** 712 * M4OSA_ERR M4VSS3GPP_intCheckVideoMode() 713 * @brief Check which video process mode we must use, depending on the output CTS. 714 * @param pC (IN/OUT) Internal edit context 715 ****************************************************************************** 716 */ 717static M4OSA_ERR M4VSS3GPP_intCheckVideoMode( 718 M4VSS3GPP_InternalEditContext *pC ) 719{ 720 M4OSA_ERR err; 721 // Decorrelate input and output encoding timestamp to handle encoder prefetch 722 const M4OSA_Int32 t = (M4OSA_Int32)pC->ewc.dInputVidCts; 723 /**< Transition duration */ 724 const M4OSA_Int32 TD = pC->pTransitionList[pC->uiCurrentClip].uiTransitionDuration; 725 726 M4OSA_Int32 iTmp; 727 728 const M4VSS3GPP_EditVideoState previousVstate = pC->Vstate; 729 730 /** 731 * Check if Clip1 is on its begin cut, or in an effect zone */ 732 M4VSS3GPP_intCheckVideoEffects(pC, 1); 733 734 /** 735 * Check if we are in the transition with next clip */ 736 if( ( TD > 0) && (( t - pC->pC1->iVoffset) >= (pC->pC1->iEndTime - TD)) ) 737 { 738 /** 739 * We are in a transition */ 740 pC->Vstate = M4VSS3GPP_kEditVideoState_TRANSITION; 741 pC->bTransitionEffect = M4OSA_TRUE; 742 743 /** 744 * Open second clip for transition, if not yet opened */ 745 if( M4OSA_NULL == pC->pC2 ) 746 { 747 pC->pC1->bGetYuvDataFromDecoder = M4OSA_TRUE; 748 749 err = M4VSS3GPP_intOpenClip(pC, &pC->pC2, 750 &pC->pClipList[pC->uiCurrentClip + 1]); 751 752 if( M4NO_ERROR != err ) 753 { 754 M4OSA_TRACE1_1( 755 "M4VSS3GPP_intCheckVideoMode: M4VSS3GPP_editOpenClip returns 0x%x!", 756 err); 757 return err; 758 } 759 760 /** 761 * Add current video output CTS to the clip offset 762 * (audio output CTS is not yet at the transition, so audio 763 * offset can't be updated yet). */ 764 // Decorrelate input and output encoding timestamp to handle encoder prefetch 765 pC->pC2->iVoffset += (M4OSA_UInt32)pC->ewc.dInputVidCts; 766 767 /** 768 * 2005-03-24: BugFix for audio-video synchro: 769 * Update transition duration due to the actual video transition beginning time. 770 * It will avoid desynchronization when doing the audio transition. */ 771 // Decorrelate input and output encoding timestamp to handle encoder prefetch 772 iTmp = ((M4OSA_Int32)pC->ewc.dInputVidCts)\ 773 - (pC->pC1->iEndTime - TD + pC->pC1->iVoffset); 774 if (iTmp < (M4OSA_Int32)pC->pTransitionList[pC->uiCurrentClip].uiTransitionDuration) 775 /**< Test in case of a very short transition */ 776 { 777 pC->pTransitionList[pC-> 778 uiCurrentClip].uiTransitionDuration -= iTmp; 779 780 /** 781 * Don't forget to also correct the total duration used for the progress bar 782 * (it was computed with the original transition duration). */ 783 pC->ewc.iOutputDuration += iTmp; 784 } 785 /**< No "else" here because it's hard predict the effect of 0 duration transition...*/ 786 } 787 788 /** 789 * Check effects for clip2 */ 790 M4VSS3GPP_intCheckVideoEffects(pC, 2); 791 } 792 else 793 { 794 /** 795 * We are not in a transition */ 796 pC->bTransitionEffect = M4OSA_FALSE; 797 798 /* If there is an effect we go to decode/encode mode */ 799 if((pC->nbActiveEffects > 0) || (pC->nbActiveEffects1 > 0) || 800 (pC->pC1->pSettings->FileType == 801 M4VIDEOEDITING_kFileType_ARGB8888) || 802 (pC->pC1->pSettings->bTranscodingRequired == M4OSA_TRUE)) { 803 pC->Vstate = M4VSS3GPP_kEditVideoState_DECODE_ENCODE; 804 } 805 /* We do a begin cut, except if already done (time is not progressing because we want 806 to catch all P-frames after the cut) */ 807 else if( M4OSA_TRUE == pC->bClip1AtBeginCut ) 808 { 809 if(pC->pC1->pSettings->ClipProperties.VideoStreamType == M4VIDEOEDITING_kH264) { 810 pC->Vstate = M4VSS3GPP_kEditVideoState_DECODE_ENCODE; 811 pC->bEncodeTillEoF = M4OSA_TRUE; 812 } else if( ( M4VSS3GPP_kEditVideoState_BEGIN_CUT == previousVstate) 813 || (M4VSS3GPP_kEditVideoState_AFTER_CUT == previousVstate) ) { 814 pC->Vstate = M4VSS3GPP_kEditVideoState_AFTER_CUT; 815 } else { 816 pC->Vstate = M4VSS3GPP_kEditVideoState_BEGIN_CUT; 817 } 818 } 819 /* Else we are in default copy/paste mode */ 820 else 821 { 822 if( ( M4VSS3GPP_kEditVideoState_BEGIN_CUT == previousVstate) 823 || (M4VSS3GPP_kEditVideoState_AFTER_CUT == previousVstate) ) 824 { 825 pC->Vstate = M4VSS3GPP_kEditVideoState_AFTER_CUT; 826 } 827 else if( pC->bIsMMS == M4OSA_TRUE ) 828 { 829 M4OSA_UInt32 currentBitrate; 830 M4OSA_ERR err = M4NO_ERROR; 831 832 /* Do we need to reencode the video to downgrade the bitrate or not ? */ 833 /* Let's compute the cirrent bitrate of the current edited clip */ 834 err = pC->pC1->ShellAPI.m_pReader->m_pFctGetOption( 835 pC->pC1->pReaderContext, 836 M4READER_kOptionID_Bitrate, ¤tBitrate); 837 838 if( err != M4NO_ERROR ) 839 { 840 M4OSA_TRACE1_1( 841 "M4VSS3GPP_intCheckVideoMode:\ 842 Error when getting next bitrate of edited clip: 0x%x", 843 err); 844 return err; 845 } 846 847 /* Remove audio bitrate */ 848 currentBitrate -= 12200; 849 850 /* Test if we go into copy/paste mode or into decode/encode mode */ 851 if( currentBitrate > pC->uiMMSVideoBitrate ) 852 { 853 pC->Vstate = M4VSS3GPP_kEditVideoState_DECODE_ENCODE; 854 } 855 else 856 { 857 pC->Vstate = M4VSS3GPP_kEditVideoState_READ_WRITE; 858 } 859 } 860 else if(!((pC->m_bClipExternalHasStarted == M4OSA_TRUE) && 861 (pC->Vstate == M4VSS3GPP_kEditVideoState_DECODE_ENCODE)) && 862 pC->bEncodeTillEoF == M4OSA_FALSE) 863 { 864 /** 865 * Test if we go into copy/paste mode or into decode/encode mode 866 * If an external effect has been applied on the current clip 867 * then continue to be in decode/encode mode till end of 868 * clip to avoid H.264 distortion. 869 */ 870 pC->Vstate = M4VSS3GPP_kEditVideoState_READ_WRITE; 871 } 872 } 873 } 874 875 /** 876 * Check if we create an encoder */ 877 if( ( ( M4VSS3GPP_kEditVideoState_READ_WRITE == previousVstate) 878 || (M4VSS3GPP_kEditVideoState_AFTER_CUT 879 == previousVstate)) /**< read mode */ 880 && (( M4VSS3GPP_kEditVideoState_DECODE_ENCODE == pC->Vstate) 881 || (M4VSS3GPP_kEditVideoState_BEGIN_CUT == pC->Vstate) 882 || (M4VSS3GPP_kEditVideoState_TRANSITION 883 == pC->Vstate)) /**< encode mode */ 884 && pC->bIsMMS == M4OSA_FALSE ) 885 { 886 /** 887 * Create the encoder */ 888 err = M4VSS3GPP_intCreateVideoEncoder(pC); 889 890 if( M4NO_ERROR != err ) 891 { 892 M4OSA_TRACE1_1( 893 "M4VSS3GPP_intCheckVideoMode: M4VSS3GPP_intCreateVideoEncoder returns 0x%x!", 894 err); 895 return err; 896 } 897 } 898 else if( pC->bIsMMS == M4OSA_TRUE && pC->ewc.pEncContext == M4OSA_NULL ) 899 { 900 /** 901 * Create the encoder */ 902 err = M4VSS3GPP_intCreateVideoEncoder(pC); 903 904 if( M4NO_ERROR != err ) 905 { 906 M4OSA_TRACE1_1( 907 "M4VSS3GPP_intCheckVideoMode: M4VSS3GPP_intCreateVideoEncoder returns 0x%x!", 908 err); 909 return err; 910 } 911 } 912 913 /** 914 * When we go from filtering to read/write, we must act like a begin cut, 915 * because the last filtered image may be different than the original image. */ 916 else if( ( ( M4VSS3GPP_kEditVideoState_DECODE_ENCODE == previousVstate) 917 || (M4VSS3GPP_kEditVideoState_TRANSITION 918 == previousVstate)) /**< encode mode */ 919 && (M4VSS3GPP_kEditVideoState_READ_WRITE == pC->Vstate) /**< read mode */ 920 && (pC->bEncodeTillEoF == M4OSA_FALSE) ) 921 { 922 pC->Vstate = M4VSS3GPP_kEditVideoState_BEGIN_CUT; 923 } 924 925 /** 926 * Check if we destroy an encoder */ 927 else if( ( ( M4VSS3GPP_kEditVideoState_DECODE_ENCODE == previousVstate) 928 || (M4VSS3GPP_kEditVideoState_BEGIN_CUT == previousVstate) 929 || (M4VSS3GPP_kEditVideoState_TRANSITION 930 == previousVstate)) /**< encode mode */ 931 && (( M4VSS3GPP_kEditVideoState_READ_WRITE == pC->Vstate) 932 || (M4VSS3GPP_kEditVideoState_AFTER_CUT 933 == pC->Vstate)) /**< read mode */ 934 && pC->bIsMMS == M4OSA_FALSE ) 935 { 936 /** 937 * Destroy the previously created encoder */ 938 err = M4VSS3GPP_intDestroyVideoEncoder(pC); 939 940 if( M4NO_ERROR != err ) 941 { 942 M4OSA_TRACE1_1( 943 "M4VSS3GPP_intCheckVideoMode: M4VSS3GPP_intDestroyVideoEncoder returns 0x%x!", 944 err); 945 return err; 946 } 947 } 948 949 /** 950 * Return with no error */ 951 M4OSA_TRACE3_0("M4VSS3GPP_intCheckVideoMode: returning M4NO_ERROR"); 952 return M4NO_ERROR; 953} 954 955/****************************************************************************** 956 * M4OSA_ERR M4VSS3GPP_intStartAU() 957 * @brief StartAU writer-like interface used for the VSS 3GPP only 958 * @note 959 * @param pContext: (IN) It is the VSS 3GPP context in our case 960 * @param streamID: (IN) Id of the stream to which the Access Unit is related. 961 * @param pAU: (IN/OUT) Access Unit to be prepared. 962 * @return M4NO_ERROR: there is no error 963 ****************************************************************************** 964 */ 965M4OSA_ERR M4VSS3GPP_intStartAU( M4WRITER_Context pContext, 966 M4SYS_StreamID streamID, M4SYS_AccessUnit *pAU ) 967{ 968 M4OSA_ERR err; 969 M4OSA_UInt32 uiMaxAuSize; 970 971 /** 972 * Given context is actually the VSS3GPP context */ 973 M4VSS3GPP_InternalEditContext *pC = 974 (M4VSS3GPP_InternalEditContext *)pContext; 975 976 /** 977 * Get the output AU to write into */ 978 err = pC->ShellAPI.pWriterDataFcts->pStartAU(pC->ewc.p3gpWriterContext, 979 M4VSS3GPP_WRITER_VIDEO_STREAM_ID, pAU); 980 981 if( M4NO_ERROR != err ) 982 { 983 M4OSA_TRACE1_1( 984 "M4VSS3GPP_intStartAU: pWriterDataFcts->pStartAU(Video) returns 0x%x!", 985 err); 986 return err; 987 } 988 989 /** 990 * Return */ 991 M4OSA_TRACE3_0("M4VSS3GPP_intStartAU: returning M4NO_ERROR"); 992 return M4NO_ERROR; 993} 994 995/****************************************************************************** 996 * M4OSA_ERR M4VSS3GPP_intProcessAU() 997 * @brief ProcessAU writer-like interface used for the VSS 3GPP only 998 * @note 999 * @param pContext: (IN) It is the VSS 3GPP context in our case 1000 * @param streamID: (IN) Id of the stream to which the Access Unit is related. 1001 * @param pAU: (IN/OUT) Access Unit to be written 1002 * @return M4NO_ERROR: there is no error 1003 ****************************************************************************** 1004 */ 1005M4OSA_ERR M4VSS3GPP_intProcessAU( M4WRITER_Context pContext, 1006 M4SYS_StreamID streamID, M4SYS_AccessUnit *pAU ) 1007{ 1008 M4OSA_ERR err; 1009 1010 /** 1011 * Given context is actually the VSS3GPP context */ 1012 M4VSS3GPP_InternalEditContext *pC = 1013 (M4VSS3GPP_InternalEditContext *)pContext; 1014 1015 /** 1016 * Fix the encoded AU time */ 1017 // Decorrelate input and output encoding timestamp to handle encoder prefetch 1018 pC->ewc.dOutputVidCts = pAU->CTS; 1019 /** 1020 * Update time info for the Counter Time System to be equal to the bit-stream time */ 1021 M4VSS3GPP_intUpdateTimeInfo(pC, pAU); 1022 1023 /** 1024 * Write the AU */ 1025 err = pC->ShellAPI.pWriterDataFcts->pProcessAU(pC->ewc.p3gpWriterContext, 1026 M4VSS3GPP_WRITER_VIDEO_STREAM_ID, pAU); 1027 1028 if( M4NO_ERROR != err ) 1029 { 1030 M4OSA_TRACE1_1( 1031 "M4VSS3GPP_intProcessAU: pWriterDataFcts->pProcessAU(Video) returns 0x%x!", 1032 err); 1033 return err; 1034 } 1035 1036 /** 1037 * Return */ 1038 M4OSA_TRACE3_0("M4VSS3GPP_intProcessAU: returning M4NO_ERROR"); 1039 return M4NO_ERROR; 1040} 1041 1042/** 1043 ****************************************************************************** 1044 * M4OSA_ERR M4VSS3GPP_intVPP() 1045 * @brief We implement our own VideoPreProcessing function 1046 * @note It is called by the video encoder 1047 * @param pContext (IN) VPP context, which actually is the VSS 3GPP context in our case 1048 * @param pPlaneIn (IN) 1049 * @param pPlaneOut (IN/OUT) Pointer to an array of 3 planes that will contain the output 1050 * YUV420 image 1051 * @return M4NO_ERROR: No error 1052 ****************************************************************************** 1053 */ 1054M4OSA_ERR M4VSS3GPP_intVPP( M4VPP_Context pContext, M4VIFI_ImagePlane *pPlaneIn, 1055 M4VIFI_ImagePlane *pPlaneOut ) 1056{ 1057 M4OSA_ERR err = M4NO_ERROR; 1058 M4_MediaTime ts; 1059 M4VIFI_ImagePlane *pTmp = M4OSA_NULL; 1060 M4VIFI_ImagePlane *pLastDecodedFrame = M4OSA_NULL ; 1061 M4VIFI_ImagePlane *pDecoderRenderFrame = M4OSA_NULL; 1062 M4VIFI_ImagePlane pTemp1[3],pTemp2[3]; 1063 M4VIFI_ImagePlane pTempPlaneClip1[3],pTempPlaneClip2[3]; 1064 M4OSA_UInt32 i = 0, yuvFrameWidth = 0, yuvFrameHeight = 0; 1065 1066 /** 1067 * VPP context is actually the VSS3GPP context */ 1068 M4VSS3GPP_InternalEditContext *pC = 1069 (M4VSS3GPP_InternalEditContext *)pContext; 1070 1071 memset((void *)pTemp1, 0, 3*sizeof(M4VIFI_ImagePlane)); 1072 memset((void *)pTemp2, 0, 3*sizeof(M4VIFI_ImagePlane)); 1073 memset((void *)pTempPlaneClip1, 0, 3*sizeof(M4VIFI_ImagePlane)); 1074 memset((void *)pTempPlaneClip2, 0, 3*sizeof(M4VIFI_ImagePlane)); 1075 1076 /** 1077 * Reset VPP error remembered in context */ 1078 pC->ewc.VppError = M4NO_ERROR; 1079 1080 /** 1081 * At the end of the editing, we may be called when no more clip is loaded. 1082 * (because to close the encoder properly it must be stepped one or twice...) */ 1083 if( M4OSA_NULL == pC->pC1 ) 1084 { 1085 /** 1086 * We must fill the input of the encoder with a dummy image, because 1087 * encoding noise leads to a huge video AU, and thus a writer buffer overflow. */ 1088 memset((void *)pPlaneOut[0].pac_data,0, 1089 pPlaneOut[0].u_stride * pPlaneOut[0].u_height); 1090 memset((void *)pPlaneOut[1].pac_data,0, 1091 pPlaneOut[1].u_stride * pPlaneOut[1].u_height); 1092 memset((void *)pPlaneOut[2].pac_data,0, 1093 pPlaneOut[2].u_stride * pPlaneOut[2].u_height); 1094 1095 M4OSA_TRACE3_0("M4VSS3GPP_intVPP: returning M4NO_ERROR (abort)"); 1096 return M4NO_ERROR; 1097 } 1098 1099 /** 1100 **************** Transition case ****************/ 1101 if( M4OSA_TRUE == pC->bTransitionEffect ) 1102 { 1103 if (M4OSA_NULL == pTemp1[0].pac_data) 1104 { 1105 err = M4VSS3GPP_intAllocateYUV420(pTemp1, pC->ewc.uiVideoWidth, 1106 pC->ewc.uiVideoHeight); 1107 if (M4NO_ERROR != err) 1108 { 1109 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420(1) returns 0x%x, \ 1110 returning M4NO_ERROR", err); 1111 pC->ewc.VppError = err; 1112 return M4NO_ERROR; /**< Return no error to the encoder core 1113 (else it may leak in some situations...) */ 1114 } 1115 } 1116 if (M4OSA_NULL == pTemp2[0].pac_data) 1117 { 1118 err = M4VSS3GPP_intAllocateYUV420(pTemp2, pC->ewc.uiVideoWidth, 1119 pC->ewc.uiVideoHeight); 1120 if (M4NO_ERROR != err) 1121 { 1122 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420(2) returns 0x%x, \ 1123 returning M4NO_ERROR", err); 1124 pC->ewc.VppError = err; 1125 return M4NO_ERROR; /**< Return no error to the encoder core 1126 (else it may leak in some situations...) */ 1127 } 1128 } 1129 /** 1130 * We need two intermediate planes */ 1131 if( M4OSA_NULL == pC->yuv1[0].pac_data ) 1132 { 1133 err = M4VSS3GPP_intAllocateYUV420(pC->yuv1, pC->ewc.uiVideoWidth, 1134 pC->ewc.uiVideoHeight); 1135 1136 if( M4NO_ERROR != err ) 1137 { 1138 M4OSA_TRACE1_1( 1139 "M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420(3) returns 0x%x,\ 1140 returning M4NO_ERROR", 1141 err); 1142 pC->ewc.VppError = err; 1143 return 1144 M4NO_ERROR; /**< Return no error to the encoder core 1145 (else it may leak in some situations...) */ 1146 } 1147 } 1148 1149 if( M4OSA_NULL == pC->yuv2[0].pac_data ) 1150 { 1151 err = M4VSS3GPP_intAllocateYUV420(pC->yuv2, pC->ewc.uiVideoWidth, 1152 pC->ewc.uiVideoHeight); 1153 1154 if( M4NO_ERROR != err ) 1155 { 1156 M4OSA_TRACE1_1( 1157 "M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420(4) returns 0x%x,\ 1158 returning M4NO_ERROR", 1159 err); 1160 pC->ewc.VppError = err; 1161 return 1162 M4NO_ERROR; /**< Return no error to the encoder core 1163 (else it may leak in some situations...) */ 1164 } 1165 } 1166 1167 /** 1168 * Allocate new temporary plane if needed */ 1169 if( M4OSA_NULL == pC->yuv3[0].pac_data ) 1170 { 1171 err = M4VSS3GPP_intAllocateYUV420(pC->yuv3, pC->ewc.uiVideoWidth, 1172 pC->ewc.uiVideoHeight); 1173 1174 if( M4NO_ERROR != err ) 1175 { 1176 M4OSA_TRACE1_1( 1177 "M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420(3) returns 0x%x,\ 1178 returning M4NO_ERROR", 1179 err); 1180 pC->ewc.VppError = err; 1181 return 1182 M4NO_ERROR; /**< Return no error to the encoder core 1183 (else it may leak in some situations...) */ 1184 } 1185 } 1186 1187 /** 1188 * Compute the time in the clip1 base: ts = to - Offset */ 1189 // Decorrelate input and output encoding timestamp to handle encoder prefetch 1190 ts = pC->ewc.dInputVidCts - pC->pC1->iVoffset; 1191 1192 /** 1193 * Render Clip1 */ 1194 if( pC->pC1->isRenderDup == M4OSA_FALSE ) 1195 { 1196 pC->bIssecondClip = M4OSA_FALSE; 1197 1198 err = M4VSS3GPP_intRenderFrameWithEffect(pC, pC->pC1, ts, M4OSA_TRUE, 1199 pTempPlaneClip1, pTemp1, 1200 pPlaneOut); 1201 if ((M4NO_ERROR != err) && 1202 (M4WAR_VIDEORENDERER_NO_NEW_FRAME != err)) { 1203 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \ 1204 M4VSS3GPP_intRenderFrameWithEffect returns 0x%x", err); 1205 pC->ewc.VppError = err; 1206 /** Return no error to the encoder core 1207 * else it may leak in some situations.*/ 1208 return M4NO_ERROR; 1209 } 1210 } 1211 if ((pC->pC1->isRenderDup == M4OSA_TRUE) || 1212 (M4WAR_VIDEORENDERER_NO_NEW_FRAME == err)) { 1213 pTmp = pC->yuv1; 1214 if (pC->pC1->lastDecodedPlane != M4NO_ERROR) { 1215 /* Copy last decoded plane to output plane */ 1216 memcpy((void *)pTmp[0].pac_data, 1217 (void *)pC->pC1->lastDecodedPlane[0].pac_data, 1218 (pTmp[0].u_height * pTmp[0].u_width)); 1219 memcpy((void *)pTmp[1].pac_data, 1220 (void *)pC->pC1->lastDecodedPlane[1].pac_data, 1221 (pTmp[1].u_height * pTmp[1].u_width)); 1222 memcpy((void *)pTmp[2].pac_data, 1223 (void *)pC->pC1->lastDecodedPlane[2].pac_data, 1224 (pTmp[2].u_height * pTmp[2].u_width)); 1225 } 1226 pC->pC1->lastDecodedPlane = pTmp; 1227 } 1228 1229 /** 1230 * Compute the time in the clip2 base: ts = to - Offset */ 1231 // Decorrelate input and output encoding timestamp to handle encoder prefetch 1232 ts = pC->ewc.dInputVidCts - pC->pC2->iVoffset; 1233 /** 1234 * Render Clip2 */ 1235 if( pC->pC2->isRenderDup == M4OSA_FALSE ) 1236 { 1237 1238 err = M4VSS3GPP_intRenderFrameWithEffect(pC, pC->pC2, ts, M4OSA_FALSE, 1239 pTempPlaneClip2, pTemp2, 1240 pPlaneOut); 1241 if ((M4NO_ERROR != err) && 1242 (M4WAR_VIDEORENDERER_NO_NEW_FRAME != err)) { 1243 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \ 1244 M4VSS3GPP_intRenderFrameWithEffect returns 0x%x", err); 1245 pC->ewc.VppError = err; 1246 /** Return no error to the encoder core 1247 * else it may leak in some situations.*/ 1248 return M4NO_ERROR; 1249 } 1250 } 1251 if ((pC->pC2->isRenderDup == M4OSA_TRUE) || 1252 (M4WAR_VIDEORENDERER_NO_NEW_FRAME == err)) { 1253 pTmp = pC->yuv2; 1254 if (pC->pC2->lastDecodedPlane != M4NO_ERROR) { 1255 /* Copy last decoded plane to output plane */ 1256 memcpy((void *)pTmp[0].pac_data, 1257 (void *)pC->pC2->lastDecodedPlane[0].pac_data, 1258 (pTmp[0].u_height * pTmp[0].u_width)); 1259 memcpy((void *)pTmp[1].pac_data, 1260 (void *)pC->pC2->lastDecodedPlane[1].pac_data, 1261 (pTmp[1].u_height * pTmp[1].u_width)); 1262 memcpy((void *)pTmp[2].pac_data, 1263 (void *)pC->pC2->lastDecodedPlane[2].pac_data, 1264 (pTmp[2].u_height * pTmp[2].u_width)); 1265 } 1266 pC->pC2->lastDecodedPlane = pTmp; 1267 } 1268 1269 1270 pTmp = pPlaneOut; 1271 err = M4VSS3GPP_intVideoTransition(pC, pTmp); 1272 1273 if( M4NO_ERROR != err ) 1274 { 1275 M4OSA_TRACE1_1( 1276 "M4VSS3GPP_intVPP: M4VSS3GPP_intVideoTransition returns 0x%x,\ 1277 returning M4NO_ERROR", 1278 err); 1279 pC->ewc.VppError = err; 1280 return M4NO_ERROR; /**< Return no error to the encoder core 1281 (else it may leak in some situations...) */ 1282 } 1283 for (i=0; i < 3; i++) 1284 { 1285 if(pTempPlaneClip2[i].pac_data != M4OSA_NULL) { 1286 free(pTempPlaneClip2[i].pac_data); 1287 pTempPlaneClip2[i].pac_data = M4OSA_NULL; 1288 } 1289 1290 if(pTempPlaneClip1[i].pac_data != M4OSA_NULL) { 1291 free(pTempPlaneClip1[i].pac_data); 1292 pTempPlaneClip1[i].pac_data = M4OSA_NULL; 1293 } 1294 1295 if (pTemp2[i].pac_data != M4OSA_NULL) { 1296 free(pTemp2[i].pac_data); 1297 pTemp2[i].pac_data = M4OSA_NULL; 1298 } 1299 1300 if (pTemp1[i].pac_data != M4OSA_NULL) { 1301 free(pTemp1[i].pac_data); 1302 pTemp1[i].pac_data = M4OSA_NULL; 1303 } 1304 } 1305 } 1306 /** 1307 **************** No Transition case ****************/ 1308 else 1309 { 1310 M4OSA_TRACE3_0("M4VSS3GPP_intVPP: NO transition case"); 1311 /** 1312 * Compute the time in the clip base: ts = to - Offset */ 1313 ts = pC->ewc.dInputVidCts - pC->pC1->iVoffset; 1314 /** 1315 * Render */ 1316 if (pC->pC1->isRenderDup == M4OSA_FALSE) { 1317 M4OSA_TRACE3_0("M4VSS3GPP_intVPP: renderdup false"); 1318 /** 1319 * Check if resizing is needed */ 1320 if (M4OSA_NULL != pC->pC1->m_pPreResizeFrame) { 1321 if ((pC->pC1->pSettings->FileType == 1322 M4VIDEOEDITING_kFileType_ARGB8888) && 1323 (pC->nbActiveEffects == 0) && 1324 (pC->pC1->bGetYuvDataFromDecoder == M4OSA_FALSE)) { 1325 err = pC->pC1->ShellAPI.m_pVideoDecoder->m_pFctSetOption( 1326 pC->pC1->pViDecCtxt, 1327 M4DECODER_kOptionID_EnableYuvWithEffect, 1328 (M4OSA_DataOption)M4OSA_TRUE); 1329 if (M4NO_ERROR == err ) { 1330 err = pC->pC1->ShellAPI.m_pVideoDecoder->m_pFctRender( 1331 pC->pC1->pViDecCtxt, &ts, 1332 pPlaneOut, M4OSA_TRUE); 1333 } 1334 } else { 1335 if (pC->pC1->pSettings->FileType == 1336 M4VIDEOEDITING_kFileType_ARGB8888) { 1337 err = pC->pC1->ShellAPI.m_pVideoDecoder->m_pFctSetOption( 1338 pC->pC1->pViDecCtxt, 1339 M4DECODER_kOptionID_EnableYuvWithEffect, 1340 (M4OSA_DataOption)M4OSA_FALSE); 1341 } 1342 if (M4NO_ERROR == err) { 1343 err = pC->pC1->ShellAPI.m_pVideoDecoder->m_pFctRender( 1344 pC->pC1->pViDecCtxt, &ts, 1345 pC->pC1->m_pPreResizeFrame, M4OSA_TRUE); 1346 } 1347 } 1348 if (M4NO_ERROR != err) { 1349 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \ 1350 m_pFctRender() returns error 0x%x", err); 1351 pC->ewc.VppError = err; 1352 return M4NO_ERROR; 1353 } 1354 if (pC->pC1->pSettings->FileType != 1355 M4VIDEOEDITING_kFileType_ARGB8888) { 1356 if (0 != pC->pC1->pSettings->ClipProperties.videoRotationDegrees) { 1357 // Save width and height of un-rotated frame 1358 yuvFrameWidth = pC->pC1->m_pPreResizeFrame[0].u_width; 1359 yuvFrameHeight = pC->pC1->m_pPreResizeFrame[0].u_height; 1360 err = M4VSS3GPP_intRotateVideo(pC->pC1->m_pPreResizeFrame, 1361 pC->pC1->pSettings->ClipProperties.videoRotationDegrees); 1362 if (M4NO_ERROR != err) { 1363 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \ 1364 rotateVideo() returns error 0x%x", err); 1365 pC->ewc.VppError = err; 1366 return M4NO_ERROR; 1367 } 1368 } 1369 } 1370 1371 if (pC->nbActiveEffects > 0) { 1372 pC->pC1->bGetYuvDataFromDecoder = M4OSA_TRUE; 1373 /** 1374 * If we do modify the image, we need an intermediate 1375 * image plane */ 1376 if (M4OSA_NULL == pTemp1[0].pac_data) { 1377 err = M4VSS3GPP_intAllocateYUV420(pTemp1, 1378 pC->pC1->m_pPreResizeFrame[0].u_width, 1379 pC->pC1->m_pPreResizeFrame[0].u_height); 1380 if (M4NO_ERROR != err) { 1381 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \ 1382 M4VSS3GPP_intAllocateYUV420 error 0x%x", err); 1383 pC->ewc.VppError = err; 1384 return M4NO_ERROR; 1385 } 1386 } 1387 err = M4VSS3GPP_intApplyVideoEffect(pC, 1388 pC->pC1->m_pPreResizeFrame,pTemp1); 1389 if (M4NO_ERROR != err) { 1390 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \ 1391 M4VSS3GPP_intApplyVideoEffect() error 0x%x", err); 1392 pC->ewc.VppError = err; 1393 return M4NO_ERROR; 1394 } 1395 pDecoderRenderFrame= pTemp1; 1396 1397 } else { 1398 pDecoderRenderFrame = pC->pC1->m_pPreResizeFrame; 1399 } 1400 1401 pTmp = pPlaneOut; 1402 if ((pC->pC1->bGetYuvDataFromDecoder == M4OSA_TRUE) || 1403 (pC->pC1->pSettings->FileType != 1404 M4VIDEOEDITING_kFileType_ARGB8888)) { 1405 1406 err = M4VSS3GPP_intApplyRenderingMode(pC, 1407 pC->pC1->pSettings->xVSS.MediaRendering, 1408 pDecoderRenderFrame, pTmp); 1409 if (M4NO_ERROR != err) { 1410 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \ 1411 M4VSS3GPP_intApplyRenderingMode) error 0x%x ", err); 1412 pC->ewc.VppError = err; 1413 return M4NO_ERROR; 1414 } 1415 } 1416 1417 if ((pC->pC1->pSettings->FileType == 1418 M4VIDEOEDITING_kFileType_ARGB8888) && 1419 (pC->nbActiveEffects == 0) && 1420 (pC->pC1->bGetYuvDataFromDecoder == M4OSA_TRUE)) { 1421 1422 err = pC->pC1->ShellAPI.m_pVideoDecoder->m_pFctSetOption( 1423 pC->pC1->pViDecCtxt, 1424 M4DECODER_kOptionID_YuvWithEffectNonContiguous, 1425 (M4OSA_DataOption)pTmp); 1426 if (M4NO_ERROR != err) { 1427 pC->ewc.VppError = err; 1428 return M4NO_ERROR; 1429 } 1430 pC->pC1->bGetYuvDataFromDecoder = M4OSA_FALSE; 1431 } 1432 1433 // Reset original width and height for resize frame plane 1434 if (0 != pC->pC1->pSettings->ClipProperties.videoRotationDegrees && 1435 180 != pC->pC1->pSettings->ClipProperties.videoRotationDegrees) { 1436 1437 M4VSS3GPP_intSetYUV420Plane(pC->pC1->m_pPreResizeFrame, 1438 yuvFrameWidth, yuvFrameHeight); 1439 } 1440 } 1441 else 1442 { 1443 M4OSA_TRACE3_0("M4VSS3GPP_intVPP: NO resize required"); 1444 if ((pC->nbActiveEffects > 0) || 1445 ((0 != pC->pC1->pSettings->ClipProperties.videoRotationDegrees) 1446 && (180 != pC->pC1->pSettings->ClipProperties.videoRotationDegrees))) { 1447 /** If we do modify the image, we need an 1448 * intermediate image plane */ 1449 if (M4OSA_NULL == pTemp1[0].pac_data) { 1450 err = M4VSS3GPP_intAllocateYUV420(pTemp1, 1451 pC->ewc.uiVideoWidth, 1452 pC->ewc.uiVideoHeight); 1453 if (M4NO_ERROR != err) { 1454 pC->ewc.VppError = err; 1455 return M4NO_ERROR; 1456 } 1457 } 1458 pDecoderRenderFrame = pTemp1; 1459 } 1460 else { 1461 pDecoderRenderFrame = pPlaneOut; 1462 } 1463 1464 pTmp = pPlaneOut; 1465 err = pC->pC1->ShellAPI.m_pVideoDecoder->m_pFctRender( 1466 pC->pC1->pViDecCtxt, &ts, 1467 pDecoderRenderFrame, M4OSA_TRUE); 1468 if (M4NO_ERROR != err) { 1469 pC->ewc.VppError = err; 1470 return M4NO_ERROR; 1471 } 1472 1473 if (pC->pC1->pSettings->FileType != 1474 M4VIDEOEDITING_kFileType_ARGB8888) { 1475 if (0 != pC->pC1->pSettings->ClipProperties.videoRotationDegrees) { 1476 // Save width and height of un-rotated frame 1477 yuvFrameWidth = pDecoderRenderFrame[0].u_width; 1478 yuvFrameHeight = pDecoderRenderFrame[0].u_height; 1479 err = M4VSS3GPP_intRotateVideo(pDecoderRenderFrame, 1480 pC->pC1->pSettings->ClipProperties.videoRotationDegrees); 1481 if (M4NO_ERROR != err) { 1482 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \ 1483 rotateVideo() returns error 0x%x", err); 1484 pC->ewc.VppError = err; 1485 return M4NO_ERROR; 1486 } 1487 1488 if (180 != pC->pC1->pSettings->ClipProperties.videoRotationDegrees) { 1489 // Apply black border on rotated frame 1490 if (pC->nbActiveEffects > 0) { 1491 /** we need an intermediate image plane */ 1492 if (M4OSA_NULL == pTemp2[0].pac_data) { 1493 err = M4VSS3GPP_intAllocateYUV420(pTemp2, 1494 pC->ewc.uiVideoWidth, 1495 pC->ewc.uiVideoHeight); 1496 if (M4NO_ERROR != err) { 1497 pC->ewc.VppError = err; 1498 return M4NO_ERROR; 1499 } 1500 } 1501 err = M4VSS3GPP_intApplyRenderingMode(pC, M4xVSS_kBlackBorders, 1502 pDecoderRenderFrame, pTemp2); 1503 } else { 1504 err = M4VSS3GPP_intApplyRenderingMode(pC, M4xVSS_kBlackBorders, 1505 pDecoderRenderFrame, pTmp); 1506 } 1507 if (M4NO_ERROR != err) { 1508 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \ 1509 M4VSS3GPP_intApplyRenderingMode) error 0x%x ", err); 1510 pC->ewc.VppError = err; 1511 return M4NO_ERROR; 1512 } 1513 } 1514 } 1515 } 1516 1517 if (pC->nbActiveEffects > 0) { 1518 if ((0 != pC->pC1->pSettings->ClipProperties.videoRotationDegrees) && 1519 (180 != pC->pC1->pSettings->ClipProperties.videoRotationDegrees)) { 1520 err = M4VSS3GPP_intApplyVideoEffect(pC, 1521 pTemp2,pPlaneOut); 1522 } else { 1523 err = M4VSS3GPP_intApplyVideoEffect(pC, 1524 pDecoderRenderFrame,pPlaneOut); 1525 } 1526 if (M4NO_ERROR != err) { 1527 pC->ewc.VppError = err; 1528 return M4NO_ERROR; 1529 } 1530 } 1531 1532 // Reset original width and height for resize frame plane 1533 if (0 != pC->pC1->pSettings->ClipProperties.videoRotationDegrees && 1534 180 != pC->pC1->pSettings->ClipProperties.videoRotationDegrees) { 1535 1536 M4VSS3GPP_intSetYUV420Plane(pDecoderRenderFrame, 1537 yuvFrameWidth, yuvFrameHeight); 1538 1539 if (pC->nbActiveEffects > 0) { 1540 free((void *)pTemp2[0].pac_data); 1541 free((void *)pTemp2[1].pac_data); 1542 free((void *)pTemp2[2].pac_data); 1543 } 1544 } 1545 } 1546 pC->pC1->lastDecodedPlane = pTmp; 1547 pC->pC1->iVideoRenderCts = (M4OSA_Int32)ts; 1548 1549 } else { 1550 M4OSA_TRACE3_0("M4VSS3GPP_intVPP: renderdup true"); 1551 1552 if (M4OSA_NULL != pC->pC1->m_pPreResizeFrame) { 1553 /** 1554 * Copy last decoded plane to output plane */ 1555 memcpy((void *)pC->pC1->m_pPreResizeFrame[0].pac_data, 1556 (void *)pC->pC1->lastDecodedPlane[0].pac_data, 1557 (pC->pC1->m_pPreResizeFrame[0].u_height * pC->pC1->m_pPreResizeFrame[0].u_width)); 1558 1559 memcpy((void *)pC->pC1->m_pPreResizeFrame[1].pac_data, 1560 (void *)pC->pC1->lastDecodedPlane[1].pac_data, 1561 (pC->pC1->m_pPreResizeFrame[1].u_height * pC->pC1->m_pPreResizeFrame[1].u_width)); 1562 1563 memcpy((void *)pC->pC1->m_pPreResizeFrame[2].pac_data, 1564 (void *)pC->pC1->lastDecodedPlane[2].pac_data, 1565 (pC->pC1->m_pPreResizeFrame[2].u_height * pC->pC1->m_pPreResizeFrame[2].u_width)); 1566 1567 if(pC->nbActiveEffects > 0) { 1568 /** 1569 * If we do modify the image, we need an 1570 * intermediate image plane */ 1571 if (M4OSA_NULL == pTemp1[0].pac_data) { 1572 err = M4VSS3GPP_intAllocateYUV420(pTemp1, 1573 pC->pC1->m_pPreResizeFrame[0].u_width, 1574 pC->pC1->m_pPreResizeFrame[0].u_height); 1575 if (M4NO_ERROR != err) { 1576 pC->ewc.VppError = err; 1577 return M4NO_ERROR; 1578 } 1579 } 1580 1581 err = M4VSS3GPP_intApplyVideoEffect(pC, 1582 pC->pC1->m_pPreResizeFrame,pTemp1); 1583 if (M4NO_ERROR != err) { 1584 pC->ewc.VppError = err; 1585 return M4NO_ERROR; 1586 } 1587 pDecoderRenderFrame= pTemp1; 1588 1589 } else { 1590 pDecoderRenderFrame = pC->pC1->m_pPreResizeFrame; 1591 } 1592 1593 pTmp = pPlaneOut; 1594 err = M4VSS3GPP_intApplyRenderingMode(pC, 1595 pC->pC1->pSettings->xVSS.MediaRendering, 1596 pDecoderRenderFrame, pTmp); 1597 if (M4NO_ERROR != err) { 1598 pC->ewc.VppError = err; 1599 return M4NO_ERROR; 1600 } 1601 } else { 1602 1603 if (M4OSA_NULL == pTemp1[0].pac_data) { 1604 err = M4VSS3GPP_intAllocateYUV420(pTemp1, 1605 pC->ewc.uiVideoWidth, 1606 pC->ewc.uiVideoHeight); 1607 if (M4NO_ERROR != err) { 1608 pC->ewc.VppError = err; 1609 return M4NO_ERROR; 1610 } 1611 } 1612 /** 1613 * Copy last decoded plane to output plane */ 1614 memcpy((void *)pLastDecodedFrame[0].pac_data, 1615 (void *)pC->pC1->lastDecodedPlane[0].pac_data, 1616 (pLastDecodedFrame[0].u_height * pLastDecodedFrame[0].u_width)); 1617 1618 memcpy((void *)pLastDecodedFrame[1].pac_data, 1619 (void *)pC->pC1->lastDecodedPlane[1].pac_data, 1620 (pLastDecodedFrame[1].u_height * pLastDecodedFrame[1].u_width)); 1621 1622 memcpy((void *)pLastDecodedFrame[2].pac_data, 1623 (void *)pC->pC1->lastDecodedPlane[2].pac_data, 1624 (pLastDecodedFrame[2].u_height * pLastDecodedFrame[2].u_width)); 1625 1626 pTmp = pPlaneOut; 1627 /** 1628 * Check if there is a filter */ 1629 if(pC->nbActiveEffects > 0) { 1630 err = M4VSS3GPP_intApplyVideoEffect(pC, 1631 pLastDecodedFrame, pTmp); 1632 if (M4NO_ERROR != err) { 1633 pC->ewc.VppError = err; 1634 return M4NO_ERROR; 1635 } 1636 } 1637 } 1638 pC->pC1->lastDecodedPlane = pTmp; 1639 } 1640 1641 M4OSA_TRACE3_1("M4VSS3GPP_intVPP: Rendered at CTS %.3f", ts); 1642 1643 for(i=0;i<3;i++) { 1644 if(pTemp1[i].pac_data != M4OSA_NULL) { 1645 free(pTemp1[i].pac_data); 1646 pTemp1[i].pac_data = M4OSA_NULL; 1647 } 1648 } 1649 } 1650 1651 /** 1652 * Return */ 1653 M4OSA_TRACE3_0("M4VSS3GPP_intVPP: returning M4NO_ERROR"); 1654 return M4NO_ERROR; 1655} 1656 1657/** 1658 ****************************************************************************** 1659 * M4OSA_ERR M4VSS3GPP_intApplyVideoEffect() 1660 * @brief Apply video effect from pPlaneIn to pPlaneOut 1661 * @param pC (IN/OUT) Internal edit context 1662 * @param uiClip1orClip2 (IN/OUT) 1 for first clip, 2 for second clip 1663 * @param pInputPlanes (IN) Input raw YUV420 image 1664 * @param pOutputPlanes (IN/OUT) Output raw YUV420 image 1665 * @return M4NO_ERROR: No error 1666 ****************************************************************************** 1667 */ 1668static M4OSA_ERR 1669M4VSS3GPP_intApplyVideoEffect( M4VSS3GPP_InternalEditContext *pC, 1670 M4VIFI_ImagePlane *pPlaneIn, 1671 M4VIFI_ImagePlane *pPlaneOut ) 1672{ 1673 M4OSA_ERR err; 1674 1675 M4VSS3GPP_ClipContext *pClip; 1676 M4VSS3GPP_EffectSettings *pFx; 1677 M4VSS3GPP_ExternalProgress extProgress; 1678 1679 M4OSA_Double VideoEffectTime; 1680 M4OSA_Double PercentageDone; 1681 M4OSA_Int32 tmp; 1682 1683 M4VIFI_ImagePlane *pPlaneTempIn; 1684 M4VIFI_ImagePlane *pPlaneTempOut; 1685 M4VIFI_ImagePlane pTempYuvPlane[3]; 1686 M4OSA_UInt8 i; 1687 M4OSA_UInt8 NumActiveEffects =0; 1688 1689 1690 pClip = pC->pC1; 1691 if (pC->bIssecondClip == M4OSA_TRUE) 1692 { 1693 NumActiveEffects = pC->nbActiveEffects1; 1694 } 1695 else 1696 { 1697 NumActiveEffects = pC->nbActiveEffects; 1698 } 1699 1700 memset((void *)pTempYuvPlane, 0, 3*sizeof(M4VIFI_ImagePlane)); 1701 1702 /** 1703 * Allocate temporary plane if needed RC */ 1704 if (M4OSA_NULL == pTempYuvPlane[0].pac_data && NumActiveEffects > 1) 1705 { 1706 err = M4VSS3GPP_intAllocateYUV420(pTempYuvPlane, pPlaneOut->u_width, 1707 pPlaneOut->u_height); 1708 1709 if( M4NO_ERROR != err ) 1710 { 1711 M4OSA_TRACE1_1( 1712 "M4VSS3GPP_intApplyVideoEffect: M4VSS3GPP_intAllocateYUV420(4) returns 0x%x,\ 1713 returning M4NO_ERROR", 1714 err); 1715 pC->ewc.VppError = err; 1716 return 1717 M4NO_ERROR; /**< Return no error to the encoder core 1718 (else it may leak in some situations...) */ 1719 } 1720 } 1721 1722 if (NumActiveEffects % 2 == 0) 1723 { 1724 pPlaneTempIn = pPlaneIn; 1725 pPlaneTempOut = pTempYuvPlane; 1726 } 1727 else 1728 { 1729 pPlaneTempIn = pPlaneIn; 1730 pPlaneTempOut = pPlaneOut; 1731 } 1732 1733 for (i=0; i<NumActiveEffects; i++) 1734 { 1735 if (pC->bIssecondClip == M4OSA_TRUE) 1736 { 1737 1738 1739 pFx = &(pC->pEffectsList[pC->pActiveEffectsList1[i]]); 1740 /* Compute how far from the beginning of the effect we are, in clip-base time. */ 1741 // Decorrelate input and output encoding timestamp to handle encoder prefetch 1742 VideoEffectTime = ((M4OSA_Int32)pC->ewc.dInputVidCts) + 1743 pC->pTransitionList[pC->uiCurrentClip]. 1744 uiTransitionDuration- pFx->uiStartTime; 1745 } 1746 else 1747 { 1748 pFx = &(pC->pEffectsList[pC->pActiveEffectsList[i]]); 1749 /* Compute how far from the beginning of the effect we are, in clip-base time. */ 1750 // Decorrelate input and output encoding timestamp to handle encoder prefetch 1751 VideoEffectTime = ((M4OSA_Int32)pC->ewc.dInputVidCts) - pFx->uiStartTime; 1752 } 1753 1754 1755 1756 /* To calculate %, substract timeIncrement because effect should finish on the last frame*/ 1757 /* which is presented from CTS = eof-timeIncrement till CTS = eof */ 1758 PercentageDone = VideoEffectTime 1759 / ((M4OSA_Float)pFx->uiDuration/*- pC->dOutputFrameDuration*/); 1760 1761 if( PercentageDone < 0.0 ) 1762 PercentageDone = 0.0; 1763 1764 if( PercentageDone > 1.0 ) 1765 PercentageDone = 1.0; 1766 1767 switch( pFx->VideoEffectType ) 1768 { 1769 case M4VSS3GPP_kVideoEffectType_FadeFromBlack: 1770 /** 1771 * Compute where we are in the effect (scale is 0->1024). */ 1772 tmp = (M4OSA_Int32)(PercentageDone * 1024); 1773 1774 /** 1775 * Apply the darkening effect */ 1776 err = 1777 M4VFL_modifyLumaWithScale((M4ViComImagePlane *)pPlaneTempIn, 1778 (M4ViComImagePlane *)pPlaneTempOut, tmp, M4OSA_NULL); 1779 1780 if( M4NO_ERROR != err ) 1781 { 1782 M4OSA_TRACE1_1( 1783 "M4VSS3GPP_intApplyVideoEffect:\ 1784 M4VFL_modifyLumaWithScale returns error 0x%x,\ 1785 returning M4VSS3GPP_ERR_LUMA_FILTER_ERROR", 1786 err); 1787 return M4VSS3GPP_ERR_LUMA_FILTER_ERROR; 1788 } 1789 break; 1790 1791 case M4VSS3GPP_kVideoEffectType_FadeToBlack: 1792 /** 1793 * Compute where we are in the effect (scale is 0->1024) */ 1794 tmp = (M4OSA_Int32)(( 1.0 - PercentageDone) * 1024); 1795 1796 /** 1797 * Apply the darkening effect */ 1798 err = 1799 M4VFL_modifyLumaWithScale((M4ViComImagePlane *)pPlaneTempIn, 1800 (M4ViComImagePlane *)pPlaneTempOut, tmp, M4OSA_NULL); 1801 1802 if( M4NO_ERROR != err ) 1803 { 1804 M4OSA_TRACE1_1( 1805 "M4VSS3GPP_intApplyVideoEffect:\ 1806 M4VFL_modifyLumaWithScale returns error 0x%x,\ 1807 returning M4VSS3GPP_ERR_LUMA_FILTER_ERROR", 1808 err); 1809 return M4VSS3GPP_ERR_LUMA_FILTER_ERROR; 1810 } 1811 break; 1812 1813 default: 1814 if( pFx->VideoEffectType 1815 >= M4VSS3GPP_kVideoEffectType_External ) 1816 { 1817 M4OSA_UInt32 Cts = 0; 1818 M4OSA_Int32 nextEffectTime; 1819 1820 /** 1821 * Compute where we are in the effect (scale is 0->1000) */ 1822 tmp = (M4OSA_Int32)(PercentageDone * 1000); 1823 1824 /** 1825 * Set the progress info provided to the external function */ 1826 extProgress.uiProgress = (M4OSA_UInt32)tmp; 1827 // Decorrelate input and output encoding timestamp to handle encoder prefetch 1828 extProgress.uiOutputTime = (M4OSA_UInt32)pC->ewc.dInputVidCts; 1829 extProgress.uiClipTime = extProgress.uiOutputTime - pClip->iVoffset; 1830 extProgress.bIsLast = M4OSA_FALSE; 1831 // Decorrelate input and output encoding timestamp to handle encoder prefetch 1832 nextEffectTime = (M4OSA_Int32)(pC->ewc.dInputVidCts \ 1833 + pC->dOutputFrameDuration); 1834 if(nextEffectTime >= (M4OSA_Int32)(pFx->uiStartTime + pFx->uiDuration)) 1835 { 1836 extProgress.bIsLast = M4OSA_TRUE; 1837 } 1838 1839 err = pFx->ExtVideoEffectFct(pFx->pExtVideoEffectFctCtxt, 1840 pPlaneTempIn, pPlaneTempOut, &extProgress, 1841 pFx->VideoEffectType 1842 - M4VSS3GPP_kVideoEffectType_External); 1843 1844 if( M4NO_ERROR != err ) 1845 { 1846 M4OSA_TRACE1_1( 1847 "M4VSS3GPP_intApplyVideoEffect: \ 1848 External video effect function returns 0x%x!", 1849 err); 1850 return err; 1851 } 1852 break; 1853 } 1854 else 1855 { 1856 M4OSA_TRACE1_1( 1857 "M4VSS3GPP_intApplyVideoEffect: unknown effect type (0x%x),\ 1858 returning M4VSS3GPP_ERR_INVALID_VIDEO_EFFECT_TYPE", 1859 pFx->VideoEffectType); 1860 return M4VSS3GPP_ERR_INVALID_VIDEO_EFFECT_TYPE; 1861 } 1862 } 1863 /** 1864 * RC Updates pTempPlaneIn and pTempPlaneOut depending on current effect */ 1865 if (((i % 2 == 0) && (NumActiveEffects % 2 == 0)) 1866 || ((i % 2 != 0) && (NumActiveEffects % 2 != 0))) 1867 { 1868 pPlaneTempIn = pTempYuvPlane; 1869 pPlaneTempOut = pPlaneOut; 1870 } 1871 else 1872 { 1873 pPlaneTempIn = pPlaneOut; 1874 pPlaneTempOut = pTempYuvPlane; 1875 } 1876 } 1877 1878 for(i=0; i<3; i++) { 1879 if(pTempYuvPlane[i].pac_data != M4OSA_NULL) { 1880 free(pTempYuvPlane[i].pac_data); 1881 pTempYuvPlane[i].pac_data = M4OSA_NULL; 1882 } 1883 } 1884 1885 /** 1886 * Return */ 1887 M4OSA_TRACE3_0("M4VSS3GPP_intApplyVideoEffect: returning M4NO_ERROR"); 1888 return M4NO_ERROR; 1889} 1890 1891/** 1892 ****************************************************************************** 1893 * M4OSA_ERR M4VSS3GPP_intVideoTransition() 1894 * @brief Apply video transition effect pC1+pC2->pPlaneOut 1895 * @param pC (IN/OUT) Internal edit context 1896 * @param pOutputPlanes (IN/OUT) Output raw YUV420 image 1897 * @return M4NO_ERROR: No error 1898 ****************************************************************************** 1899 */ 1900static M4OSA_ERR 1901M4VSS3GPP_intVideoTransition( M4VSS3GPP_InternalEditContext *pC, 1902 M4VIFI_ImagePlane *pPlaneOut ) 1903{ 1904 M4OSA_ERR err; 1905 M4OSA_Int32 iProgress; 1906 M4VSS3GPP_ExternalProgress extProgress; 1907 M4VIFI_ImagePlane *pPlane; 1908 M4OSA_Int32 i; 1909 const M4OSA_Int32 iDur = (M4OSA_Int32)pC-> 1910 pTransitionList[pC->uiCurrentClip].uiTransitionDuration; 1911 1912 /** 1913 * Compute how far from the end cut we are, in clip-base time. 1914 * It is done with integers because the offset and begin cut have been rounded already. */ 1915 // Decorrelate input and output encoding timestamp to handle encoder prefetch 1916 iProgress = (M4OSA_Int32)((M4OSA_Double)pC->pC1->iEndTime) - pC->ewc.dInputVidCts + 1917 ((M4OSA_Double)pC->pC1->iVoffset); 1918 /** 1919 * We must remove the duration of one frame, else we would almost never reach the end 1920 * (It's kind of a "pile and intervals" issue). */ 1921 iProgress -= (M4OSA_Int32)pC->dOutputFrameDuration; 1922 1923 if( iProgress < 0 ) /**< Sanity checks */ 1924 { 1925 iProgress = 0; 1926 } 1927 1928 /** 1929 * Compute where we are in the transition, on a base 1000 */ 1930 iProgress = ( ( iDur - iProgress) * 1000) / iDur; 1931 1932 /** 1933 * Sanity checks */ 1934 if( iProgress < 0 ) 1935 { 1936 iProgress = 0; 1937 } 1938 else if( iProgress > 1000 ) 1939 { 1940 iProgress = 1000; 1941 } 1942 1943 switch( pC->pTransitionList[pC->uiCurrentClip].TransitionBehaviour ) 1944 { 1945 case M4VSS3GPP_TransitionBehaviour_SpeedUp: 1946 iProgress = ( iProgress * iProgress) / 1000; 1947 break; 1948 1949 case M4VSS3GPP_TransitionBehaviour_Linear: 1950 /*do nothing*/ 1951 break; 1952 1953 case M4VSS3GPP_TransitionBehaviour_SpeedDown: 1954 iProgress = (M4OSA_Int32)(sqrt(iProgress * 1000)); 1955 break; 1956 1957 case M4VSS3GPP_TransitionBehaviour_SlowMiddle: 1958 if( iProgress < 500 ) 1959 { 1960 iProgress = (M4OSA_Int32)(sqrt(iProgress * 500)); 1961 } 1962 else 1963 { 1964 iProgress = 1965 (M4OSA_Int32)(( ( ( iProgress - 500) * (iProgress - 500)) 1966 / 500) + 500); 1967 } 1968 break; 1969 1970 case M4VSS3GPP_TransitionBehaviour_FastMiddle: 1971 if( iProgress < 500 ) 1972 { 1973 iProgress = (M4OSA_Int32)(( iProgress * iProgress) / 500); 1974 } 1975 else 1976 { 1977 iProgress = (M4OSA_Int32)(sqrt(( iProgress - 500) * 500) + 500); 1978 } 1979 break; 1980 1981 default: 1982 /*do nothing*/ 1983 break; 1984 } 1985 1986 switch( pC->pTransitionList[pC->uiCurrentClip].VideoTransitionType ) 1987 { 1988 case M4VSS3GPP_kVideoTransitionType_CrossFade: 1989 /** 1990 * Apply the transition effect */ 1991 err = M4VIFI_ImageBlendingonYUV420(M4OSA_NULL, 1992 (M4ViComImagePlane *)pC->yuv1, 1993 (M4ViComImagePlane *)pC->yuv2, 1994 (M4ViComImagePlane *)pPlaneOut, iProgress); 1995 1996 if( M4NO_ERROR != err ) 1997 { 1998 M4OSA_TRACE1_1( 1999 "M4VSS3GPP_intVideoTransition:\ 2000 M4VIFI_ImageBlendingonYUV420 returns error 0x%x,\ 2001 returning M4VSS3GPP_ERR_TRANSITION_FILTER_ERROR", 2002 err); 2003 return M4VSS3GPP_ERR_TRANSITION_FILTER_ERROR; 2004 } 2005 break; 2006 2007 case M4VSS3GPP_kVideoTransitionType_None: 2008 /** 2009 * This is a stupid-non optimized version of the None transition... 2010 * We copy the YUV frame */ 2011 if( iProgress < 500 ) /**< first half of transition */ 2012 { 2013 pPlane = pC->yuv1; 2014 } 2015 else /**< second half of transition */ 2016 { 2017 pPlane = pC->yuv2; 2018 } 2019 /** 2020 * Copy the input YUV frames */ 2021 i = 3; 2022 2023 while( i-- > 0 ) 2024 { 2025 memcpy((void *)pPlaneOut[i].pac_data, 2026 (void *)pPlane[i].pac_data, 2027 pPlaneOut[i].u_stride * pPlaneOut[i].u_height); 2028 } 2029 break; 2030 2031 default: 2032 if( pC->pTransitionList[pC->uiCurrentClip].VideoTransitionType 2033 >= M4VSS3GPP_kVideoTransitionType_External ) 2034 { 2035 /** 2036 * Set the progress info provided to the external function */ 2037 extProgress.uiProgress = (M4OSA_UInt32)iProgress; 2038 // Decorrelate input and output encoding timestamp to handle encoder prefetch 2039 extProgress.uiOutputTime = (M4OSA_UInt32)pC->ewc.dInputVidCts; 2040 extProgress.uiClipTime = extProgress.uiOutputTime - pC->pC1->iVoffset; 2041 2042 err = pC->pTransitionList[pC-> 2043 uiCurrentClip].ExtVideoTransitionFct( 2044 pC->pTransitionList[pC-> 2045 uiCurrentClip].pExtVideoTransitionFctCtxt, 2046 pC->yuv1, pC->yuv2, pPlaneOut, &extProgress, 2047 pC->pTransitionList[pC-> 2048 uiCurrentClip].VideoTransitionType 2049 - M4VSS3GPP_kVideoTransitionType_External); 2050 2051 if( M4NO_ERROR != err ) 2052 { 2053 M4OSA_TRACE1_1( 2054 "M4VSS3GPP_intVideoTransition:\ 2055 External video transition function returns 0x%x!", 2056 err); 2057 return err; 2058 } 2059 break; 2060 } 2061 else 2062 { 2063 M4OSA_TRACE1_1( 2064 "M4VSS3GPP_intVideoTransition: unknown transition type (0x%x),\ 2065 returning M4VSS3GPP_ERR_INVALID_VIDEO_TRANSITION_TYPE", 2066 pC->pTransitionList[pC->uiCurrentClip].VideoTransitionType); 2067 return M4VSS3GPP_ERR_INVALID_VIDEO_TRANSITION_TYPE; 2068 } 2069 } 2070 2071 /** 2072 * Return */ 2073 M4OSA_TRACE3_0("M4VSS3GPP_intVideoTransition: returning M4NO_ERROR"); 2074 return M4NO_ERROR; 2075} 2076 2077/** 2078 ****************************************************************************** 2079 * M4OSA_Void M4VSS3GPP_intUpdateTimeInfo() 2080 * @brief Update bit stream time info by Counter Time System to be compliant with 2081 * players using bit stream time info 2082 * @note H263 uses an absolute time counter unlike MPEG4 which uses Group Of Vops 2083 * (GOV, see the standard) 2084 * @param pC (IN/OUT) returns time updated video AU, 2085 * the offset between system and video time (MPEG4 only) 2086 * and the state of the current clip (MPEG4 only) 2087 * @return nothing 2088 ****************************************************************************** 2089 */ 2090static M4OSA_Void 2091M4VSS3GPP_intUpdateTimeInfo( M4VSS3GPP_InternalEditContext *pC, 2092 M4SYS_AccessUnit *pAU ) 2093{ 2094 M4OSA_UInt8 uiTmp; 2095 M4OSA_UInt32 uiCts = 0; 2096 M4OSA_MemAddr8 pTmp; 2097 M4OSA_UInt32 uiAdd; 2098 M4OSA_UInt32 uiCurrGov; 2099 M4OSA_Int8 iDiff; 2100 2101 M4VSS3GPP_ClipContext *pClipCtxt = pC->pC1; 2102 M4OSA_Int32 *pOffset = &(pC->ewc.iMpeg4GovOffset); 2103 2104 /** 2105 * Set H263 time counter from system time */ 2106 if( M4SYS_kH263 == pAU->stream->streamType ) 2107 { 2108 uiTmp = (M4OSA_UInt8)((M4OSA_UInt32)( ( pAU->CTS * 30) / 1001 + 0.5) 2109 % M4VSS3GPP_EDIT_H263_MODULO_TIME); 2110 M4VSS3GPP_intSetH263TimeCounter((M4OSA_MemAddr8)(pAU->dataAddress), 2111 uiTmp); 2112 } 2113 /* 2114 * Set MPEG4 GOV time counter regarding video and system time */ 2115 else if( M4SYS_kMPEG_4 == pAU->stream->streamType ) 2116 { 2117 /* 2118 * If GOV. 2119 * beware of little/big endian! */ 2120 /* correction: read 8 bits block instead of one 32 bits block */ 2121 M4OSA_UInt8 *temp8 = (M4OSA_UInt8 *)(pAU->dataAddress); 2122 M4OSA_UInt32 temp32 = 0; 2123 2124 temp32 = ( 0x000000ff & (M4OSA_UInt32)(*temp8)) 2125 + (0x0000ff00 & ((M4OSA_UInt32)(*(temp8 + 1))) << 8) 2126 + (0x00ff0000 & ((M4OSA_UInt32)(*(temp8 + 2))) << 16) 2127 + (0xff000000 & ((M4OSA_UInt32)(*(temp8 + 3))) << 24); 2128 2129 M4OSA_TRACE3_2("RC: Temp32: 0x%x, dataAddress: 0x%x\n", temp32, 2130 *(pAU->dataAddress)); 2131 2132 if( M4VSS3GPP_EDIT_GOV_HEADER == temp32 ) 2133 { 2134 pTmp = 2135 (M4OSA_MemAddr8)(pAU->dataAddress 2136 + 1); /**< Jump to the time code (just after the 32 bits header) */ 2137 uiAdd = (M4OSA_UInt32)(pAU->CTS)+( *pOffset); 2138 2139 switch( pClipCtxt->bMpeg4GovState ) 2140 { 2141 case M4OSA_FALSE: /*< INIT */ 2142 { 2143 /* video time = ceil (system time + offset) */ 2144 uiCts = ( uiAdd + 999) / 1000; 2145 2146 /* offset update */ 2147 ( *pOffset) += (( uiCts * 1000) - uiAdd); 2148 2149 /* Save values */ 2150 pClipCtxt->uiMpeg4PrevGovValueSet = uiCts; 2151 2152 /* State to 'first' */ 2153 pClipCtxt->bMpeg4GovState = M4OSA_TRUE; 2154 } 2155 break; 2156 2157 case M4OSA_TRUE: /*< UPDATE */ 2158 { 2159 /* Get current Gov value */ 2160 M4VSS3GPP_intGetMPEG4Gov(pTmp, &uiCurrGov); 2161 2162 /* video time = floor or ceil (system time + offset) */ 2163 uiCts = (uiAdd / 1000); 2164 iDiff = (M4OSA_Int8)(uiCurrGov 2165 - pClipCtxt->uiMpeg4PrevGovValueGet - uiCts 2166 + pClipCtxt->uiMpeg4PrevGovValueSet); 2167 2168 /* ceiling */ 2169 if( iDiff > 0 ) 2170 { 2171 uiCts += (M4OSA_UInt32)(iDiff); 2172 2173 /* offset update */ 2174 ( *pOffset) += (( uiCts * 1000) - uiAdd); 2175 } 2176 2177 /* Save values */ 2178 pClipCtxt->uiMpeg4PrevGovValueGet = uiCurrGov; 2179 pClipCtxt->uiMpeg4PrevGovValueSet = uiCts; 2180 } 2181 break; 2182 } 2183 2184 M4VSS3GPP_intSetMPEG4Gov(pTmp, uiCts); 2185 } 2186 } 2187 return; 2188} 2189 2190/** 2191 ****************************************************************************** 2192 * M4OSA_Void M4VSS3GPP_intCheckVideoEffects() 2193 * @brief Check which video effect must be applied at the current time 2194 ****************************************************************************** 2195 */ 2196static M4OSA_Void 2197M4VSS3GPP_intCheckVideoEffects( M4VSS3GPP_InternalEditContext *pC, 2198 M4OSA_UInt8 uiClipNumber ) 2199{ 2200 M4OSA_UInt8 uiClipIndex; 2201 M4OSA_UInt8 uiFxIndex, i; 2202 M4VSS3GPP_ClipContext *pClip; 2203 M4VSS3GPP_EffectSettings *pFx; 2204 M4OSA_Int32 Off, BC, EC; 2205 // Decorrelate input and output encoding timestamp to handle encoder prefetch 2206 M4OSA_Int32 t = (M4OSA_Int32)pC->ewc.dInputVidCts; 2207 2208 uiClipIndex = pC->uiCurrentClip; 2209 if (uiClipNumber == 1) { 2210 pClip = pC->pC1; 2211 } else { 2212 pClip = pC->pC2; 2213 } 2214 /** 2215 * Shortcuts for code readability */ 2216 Off = pClip->iVoffset; 2217 BC = pClip->iActualVideoBeginCut; 2218 EC = pClip->iEndTime; 2219 2220 i = 0; 2221 2222 for ( uiFxIndex = 0; uiFxIndex < pC->nbEffects; uiFxIndex++ ) 2223 { 2224 /** Shortcut, reverse order because of priority between effects(EndEffect always clean )*/ 2225 pFx = &(pC->pEffectsList[pC->nbEffects - 1 - uiFxIndex]); 2226 2227 if( M4VSS3GPP_kVideoEffectType_None != pFx->VideoEffectType ) 2228 { 2229 /** 2230 * Check if there is actually a video effect */ 2231 2232 if(uiClipNumber ==1) 2233 { 2234 /**< Are we after the start time of the effect? 2235 * or Are we into the effect duration? 2236 */ 2237 if ( (t >= (M4OSA_Int32)(pFx->uiStartTime)) && 2238 (t <= (M4OSA_Int32)(pFx->uiStartTime + pFx->uiDuration)) ) { 2239 /** 2240 * Set the active effect(s) */ 2241 pC->pActiveEffectsList[i] = pC->nbEffects-1-uiFxIndex; 2242 2243 /** 2244 * Update counter of active effects */ 2245 i++; 2246 2247 /** 2248 * For all external effects set this flag to true. */ 2249 if(pFx->VideoEffectType > M4VSS3GPP_kVideoEffectType_External) 2250 { 2251 pC->m_bClipExternalHasStarted = M4OSA_TRUE; 2252 } 2253 } 2254 2255 } 2256 else 2257 { 2258 /**< Are we into the effect duration? */ 2259 if ( ((M4OSA_Int32)(t + pC->pTransitionList[uiClipIndex].uiTransitionDuration) 2260 >= (M4OSA_Int32)(pFx->uiStartTime)) 2261 && ( (M4OSA_Int32)(t + pC->pTransitionList[uiClipIndex].uiTransitionDuration) 2262 <= (M4OSA_Int32)(pFx->uiStartTime + pFx->uiDuration)) ) { 2263 /** 2264 * Set the active effect(s) */ 2265 pC->pActiveEffectsList1[i] = pC->nbEffects-1-uiFxIndex; 2266 2267 /** 2268 * Update counter of active effects */ 2269 i++; 2270 2271 /** 2272 * For all external effects set this flag to true. */ 2273 if(pFx->VideoEffectType > M4VSS3GPP_kVideoEffectType_External) 2274 { 2275 pC->m_bClipExternalHasStarted = M4OSA_TRUE; 2276 } 2277 2278 /** 2279 * The third effect has the highest priority, then the second one, then the first one. 2280 * Hence, as soon as we found an active effect, we can get out of this loop */ 2281 } 2282 } 2283 if (M4VIDEOEDITING_kH264 != 2284 pC->pC1->pSettings->ClipProperties.VideoStreamType) { 2285 2286 // For Mpeg4 and H263 clips, full decode encode not required 2287 pC->m_bClipExternalHasStarted = M4OSA_FALSE; 2288 } 2289 } 2290 } 2291 if(1==uiClipNumber) 2292 { 2293 /** 2294 * Save number of active effects */ 2295 pC->nbActiveEffects = i; 2296 } 2297 else 2298 { 2299 pC->nbActiveEffects1 = i; 2300 } 2301 2302 /** 2303 * Change the absolut time to clip related time */ 2304 t -= Off; 2305 2306 /** 2307 * Check if we are on the begin cut (for clip1 only) */ 2308 if( ( 0 != BC) && (t == BC) && (1 == uiClipNumber) ) 2309 { 2310 pC->bClip1AtBeginCut = M4OSA_TRUE; 2311 } 2312 else 2313 { 2314 pC->bClip1AtBeginCut = M4OSA_FALSE; 2315 } 2316 2317 return; 2318} 2319 2320/** 2321 ****************************************************************************** 2322 * M4OSA_ERR M4VSS3GPP_intCreateVideoEncoder() 2323 * @brief Creates the video encoder 2324 * @note 2325 ****************************************************************************** 2326 */ 2327M4OSA_ERR M4VSS3GPP_intCreateVideoEncoder( M4VSS3GPP_InternalEditContext *pC ) 2328{ 2329 M4OSA_ERR err; 2330 M4ENCODER_AdvancedParams EncParams; 2331 2332 /** 2333 * Simulate a writer interface with our specific function */ 2334 pC->ewc.OurWriterDataInterface.pProcessAU = 2335 M4VSS3GPP_intProcessAU; /**< This function is VSS 3GPP specific, 2336 but it follow the writer interface */ 2337 pC->ewc.OurWriterDataInterface.pStartAU = 2338 M4VSS3GPP_intStartAU; /**< This function is VSS 3GPP specific, 2339 but it follow the writer interface */ 2340 pC->ewc.OurWriterDataInterface.pWriterContext = 2341 (M4WRITER_Context) 2342 pC; /**< We give the internal context as writer context */ 2343 2344 /** 2345 * Get the encoder interface, if not already done */ 2346 if( M4OSA_NULL == pC->ShellAPI.pVideoEncoderGlobalFcts ) 2347 { 2348 err = M4VSS3GPP_setCurrentVideoEncoder(&pC->ShellAPI, 2349 pC->ewc.VideoStreamType); 2350 M4OSA_TRACE1_1( 2351 "M4VSS3GPP_intCreateVideoEncoder: setCurrentEncoder returns 0x%x", 2352 err); 2353 M4ERR_CHECK_RETURN(err); 2354 } 2355 2356 /** 2357 * Set encoder shell parameters according to VSS settings */ 2358 2359 /* Common parameters */ 2360 EncParams.InputFormat = M4ENCODER_kIYUV420; 2361 EncParams.FrameWidth = pC->ewc.uiVideoWidth; 2362 EncParams.FrameHeight = pC->ewc.uiVideoHeight; 2363 EncParams.uiTimeScale = pC->ewc.uiVideoTimeScale; 2364 2365 if( pC->bIsMMS == M4OSA_FALSE ) 2366 { 2367 /* No strict regulation in video editor */ 2368 /* Because of the effects and transitions we should allow more flexibility */ 2369 /* Also it prevents to drop important frames (with a bad result on sheduling and 2370 block effetcs) */ 2371 EncParams.bInternalRegulation = M4OSA_FALSE; 2372 // Variable framerate is not supported by StageFright encoders 2373 EncParams.FrameRate = M4ENCODER_k30_FPS; 2374 } 2375 else 2376 { 2377 /* In case of MMS mode, we need to enable bitrate regulation to be sure */ 2378 /* to reach the targeted output file size */ 2379 EncParams.bInternalRegulation = M4OSA_TRUE; 2380 EncParams.FrameRate = pC->MMSvideoFramerate; 2381 } 2382 2383 /** 2384 * Other encoder settings (defaults) */ 2385 EncParams.uiHorizontalSearchRange = 0; /* use default */ 2386 EncParams.uiVerticalSearchRange = 0; /* use default */ 2387 EncParams.bErrorResilience = M4OSA_FALSE; /* no error resilience */ 2388 EncParams.uiIVopPeriod = 0; /* use default */ 2389 EncParams.uiMotionEstimationTools = 0; /* M4V_MOTION_EST_TOOLS_ALL */ 2390 EncParams.bAcPrediction = M4OSA_TRUE; /* use AC prediction */ 2391 EncParams.uiStartingQuantizerValue = 10; /* initial QP = 10 */ 2392 EncParams.bDataPartitioning = M4OSA_FALSE; /* no data partitioning */ 2393 2394 /** 2395 * Set the video profile and level */ 2396 EncParams.videoProfile = pC->ewc.outputVideoProfile; 2397 EncParams.videoLevel= pC->ewc.outputVideoLevel; 2398 2399 switch ( pC->ewc.VideoStreamType ) 2400 { 2401 case M4SYS_kH263: 2402 2403 EncParams.Format = M4ENCODER_kH263; 2404 2405 EncParams.uiStartingQuantizerValue = 10; 2406 EncParams.uiRateFactor = 1; /* default */ 2407 2408 EncParams.bErrorResilience = M4OSA_FALSE; 2409 EncParams.bDataPartitioning = M4OSA_FALSE; 2410 break; 2411 2412 case M4SYS_kMPEG_4: 2413 2414 EncParams.Format = M4ENCODER_kMPEG4; 2415 2416 EncParams.uiStartingQuantizerValue = 8; 2417 EncParams.uiRateFactor = (M4OSA_UInt8)(( pC->dOutputFrameDuration 2418 * pC->ewc.uiVideoTimeScale) / 1000.0 + 0.5); 2419 2420 if( EncParams.uiRateFactor == 0 ) 2421 EncParams.uiRateFactor = 1; /* default */ 2422 2423 if( M4OSA_FALSE == pC->ewc.bVideoDataPartitioning ) 2424 { 2425 EncParams.bErrorResilience = M4OSA_FALSE; 2426 EncParams.bDataPartitioning = M4OSA_FALSE; 2427 } 2428 else 2429 { 2430 EncParams.bErrorResilience = M4OSA_TRUE; 2431 EncParams.bDataPartitioning = M4OSA_TRUE; 2432 } 2433 break; 2434 2435 case M4SYS_kH264: 2436 M4OSA_TRACE1_0("M4VSS3GPP_intCreateVideoEncoder: M4SYS_H264"); 2437 2438 EncParams.Format = M4ENCODER_kH264; 2439 2440 EncParams.uiStartingQuantizerValue = 10; 2441 EncParams.uiRateFactor = 1; /* default */ 2442 2443 EncParams.bErrorResilience = M4OSA_FALSE; 2444 EncParams.bDataPartitioning = M4OSA_FALSE; 2445 //EncParams.FrameRate = M4VIDEOEDITING_k5_FPS; 2446 break; 2447 2448 default: 2449 M4OSA_TRACE1_1( 2450 "M4VSS3GPP_intCreateVideoEncoder: Unknown videoStreamType 0x%x", 2451 pC->ewc.VideoStreamType); 2452 return M4VSS3GPP_ERR_EDITING_UNSUPPORTED_VIDEO_FORMAT; 2453 } 2454 2455 if( pC->bIsMMS == M4OSA_FALSE ) 2456 { 2457 EncParams.Bitrate = pC->xVSS.outputVideoBitrate; 2458 2459 } 2460 else 2461 { 2462 EncParams.Bitrate = pC->uiMMSVideoBitrate; /* RC */ 2463 EncParams.uiTimeScale = 0; /* We let the encoder choose the timescale */ 2464 } 2465 2466 M4OSA_TRACE1_0("M4VSS3GPP_intCreateVideoEncoder: calling encoder pFctInit"); 2467 /** 2468 * Init the video encoder (advanced settings version of the encoder Open function) */ 2469 err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctInit(&pC->ewc.pEncContext, 2470 &pC->ewc.OurWriterDataInterface, M4VSS3GPP_intVPP, pC, 2471 pC->ShellAPI.pCurrentVideoEncoderExternalAPI, 2472 pC->ShellAPI.pCurrentVideoEncoderUserData); 2473 2474 if( M4NO_ERROR != err ) 2475 { 2476 M4OSA_TRACE1_1( 2477 "M4VSS3GPP_intCreateVideoEncoder: pVideoEncoderGlobalFcts->pFctInit returns 0x%x", 2478 err); 2479 return err; 2480 } 2481 2482 pC->ewc.encoderState = M4VSS3GPP_kEncoderClosed; 2483 M4OSA_TRACE1_0("M4VSS3GPP_intCreateVideoEncoder: calling encoder pFctOpen"); 2484 2485 err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctOpen(pC->ewc.pEncContext, 2486 &pC->ewc.WriterVideoAU, &EncParams); 2487 2488 if( M4NO_ERROR != err ) 2489 { 2490 M4OSA_TRACE1_1( 2491 "M4VSS3GPP_intCreateVideoEncoder: pVideoEncoderGlobalFcts->pFctOpen returns 0x%x", 2492 err); 2493 return err; 2494 } 2495 2496 pC->ewc.encoderState = M4VSS3GPP_kEncoderStopped; 2497 M4OSA_TRACE1_0( 2498 "M4VSS3GPP_intCreateVideoEncoder: calling encoder pFctStart"); 2499 2500 if( M4OSA_NULL != pC->ShellAPI.pVideoEncoderGlobalFcts->pFctStart ) 2501 { 2502 err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctStart( 2503 pC->ewc.pEncContext); 2504 2505 if( M4NO_ERROR != err ) 2506 { 2507 M4OSA_TRACE1_1( 2508 "M4VSS3GPP_intCreateVideoEncoder: pVideoEncoderGlobalFcts->pFctStart returns 0x%x", 2509 err); 2510 return err; 2511 } 2512 } 2513 2514 pC->ewc.encoderState = M4VSS3GPP_kEncoderRunning; 2515 2516 /** 2517 * Return */ 2518 M4OSA_TRACE3_0("M4VSS3GPP_intCreateVideoEncoder: returning M4NO_ERROR"); 2519 return M4NO_ERROR; 2520} 2521 2522/** 2523 ****************************************************************************** 2524 * M4OSA_ERR M4VSS3GPP_intDestroyVideoEncoder() 2525 * @brief Destroy the video encoder 2526 * @note 2527 ****************************************************************************** 2528 */ 2529M4OSA_ERR M4VSS3GPP_intDestroyVideoEncoder( M4VSS3GPP_InternalEditContext *pC ) 2530{ 2531 M4OSA_ERR err = M4NO_ERROR; 2532 2533 if( M4OSA_NULL != pC->ewc.pEncContext ) 2534 { 2535 if( M4VSS3GPP_kEncoderRunning == pC->ewc.encoderState ) 2536 { 2537 if( pC->ShellAPI.pVideoEncoderGlobalFcts->pFctStop != M4OSA_NULL ) 2538 { 2539 err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctStop( 2540 pC->ewc.pEncContext); 2541 2542 if( M4NO_ERROR != err ) 2543 { 2544 M4OSA_TRACE1_1( 2545 "M4VSS3GPP_intDestroyVideoEncoder:\ 2546 pVideoEncoderGlobalFcts->pFctStop returns 0x%x", 2547 err); 2548 /* Well... how the heck do you handle a failed cleanup? */ 2549 } 2550 } 2551 2552 pC->ewc.encoderState = M4VSS3GPP_kEncoderStopped; 2553 } 2554 2555 /* Has the encoder actually been opened? Don't close it if that's not the case. */ 2556 if( M4VSS3GPP_kEncoderStopped == pC->ewc.encoderState ) 2557 { 2558 err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctClose( 2559 pC->ewc.pEncContext); 2560 2561 if( M4NO_ERROR != err ) 2562 { 2563 M4OSA_TRACE1_1( 2564 "M4VSS3GPP_intDestroyVideoEncoder:\ 2565 pVideoEncoderGlobalFcts->pFctClose returns 0x%x", 2566 err); 2567 /* Well... how the heck do you handle a failed cleanup? */ 2568 } 2569 2570 pC->ewc.encoderState = M4VSS3GPP_kEncoderClosed; 2571 } 2572 2573 err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctCleanup( 2574 pC->ewc.pEncContext); 2575 2576 if( M4NO_ERROR != err ) 2577 { 2578 M4OSA_TRACE1_1( 2579 "M4VSS3GPP_intDestroyVideoEncoder:\ 2580 pVideoEncoderGlobalFcts->pFctCleanup returns 0x%x!", 2581 err); 2582 /**< We do not return the error here because we still have stuff to free */ 2583 } 2584 2585 pC->ewc.encoderState = M4VSS3GPP_kNoEncoder; 2586 /** 2587 * Reset variable */ 2588 pC->ewc.pEncContext = M4OSA_NULL; 2589 } 2590 2591 M4OSA_TRACE3_1("M4VSS3GPP_intDestroyVideoEncoder: returning 0x%x", err); 2592 return err; 2593} 2594 2595/** 2596 ****************************************************************************** 2597 * M4OSA_Void M4VSS3GPP_intSetH263TimeCounter() 2598 * @brief Modify the time counter of the given H263 video AU 2599 * @note 2600 * @param pAuDataBuffer (IN/OUT) H263 Video AU to modify 2601 * @param uiCts (IN) New time counter value 2602 * @return nothing 2603 ****************************************************************************** 2604 */ 2605static M4OSA_Void M4VSS3GPP_intSetH263TimeCounter( M4OSA_MemAddr8 pAuDataBuffer, 2606 M4OSA_UInt8 uiCts ) 2607{ 2608 /* 2609 * The H263 time counter is 8 bits located on the "x" below: 2610 * 2611 * |--------|--------|--------|--------| 2612 * ???????? ???????? ??????xx xxxxxx?? 2613 */ 2614 2615 /** 2616 * Write the 2 bits on the third byte */ 2617 pAuDataBuffer[2] = ( pAuDataBuffer[2] & 0xFC) | (( uiCts >> 6) & 0x3); 2618 2619 /** 2620 * Write the 6 bits on the fourth byte */ 2621 pAuDataBuffer[3] = ( ( uiCts << 2) & 0xFC) | (pAuDataBuffer[3] & 0x3); 2622 2623 return; 2624} 2625 2626/** 2627 ****************************************************************************** 2628 * M4OSA_Void M4VSS3GPP_intSetMPEG4Gov() 2629 * @brief Modify the time info from Group Of VOP video AU 2630 * @note 2631 * @param pAuDataBuffer (IN) MPEG4 Video AU to modify 2632 * @param uiCtsSec (IN) New GOV time info in second unit 2633 * @return nothing 2634 ****************************************************************************** 2635 */ 2636static M4OSA_Void M4VSS3GPP_intSetMPEG4Gov( M4OSA_MemAddr8 pAuDataBuffer, 2637 M4OSA_UInt32 uiCtsSec ) 2638{ 2639 /* 2640 * The MPEG-4 time code length is 18 bits: 2641 * 2642 * hh mm marker ss 2643 * xxxxx|xxx xxx 1 xxxx xx ?????? 2644 * |----- ---|--- - ----|-- ------| 2645 */ 2646 M4OSA_UInt8 uiHh; 2647 M4OSA_UInt8 uiMm; 2648 M4OSA_UInt8 uiSs; 2649 M4OSA_UInt8 uiTmp; 2650 2651 /** 2652 * Write the 2 last bits ss */ 2653 uiSs = (M4OSA_UInt8)(uiCtsSec % 60); /**< modulo part */ 2654 pAuDataBuffer[2] = (( ( uiSs & 0x03) << 6) | (pAuDataBuffer[2] & 0x3F)); 2655 2656 if( uiCtsSec < 60 ) 2657 { 2658 /** 2659 * Write the 3 last bits of mm, the marker bit (0x10 */ 2660 pAuDataBuffer[1] = (( 0x10) | (uiSs >> 2)); 2661 2662 /** 2663 * Write the 5 bits of hh and 3 of mm (out of 6) */ 2664 pAuDataBuffer[0] = 0; 2665 } 2666 else 2667 { 2668 /** 2669 * Write the 3 last bits of mm, the marker bit (0x10 */ 2670 uiTmp = (M4OSA_UInt8)(uiCtsSec / 60); /**< integer part */ 2671 uiMm = (M4OSA_UInt8)(uiTmp % 60); 2672 pAuDataBuffer[1] = (( uiMm << 5) | (0x10) | (uiSs >> 2)); 2673 2674 if( uiTmp < 60 ) 2675 { 2676 /** 2677 * Write the 5 bits of hh and 3 of mm (out of 6) */ 2678 pAuDataBuffer[0] = ((uiMm >> 3)); 2679 } 2680 else 2681 { 2682 /** 2683 * Write the 5 bits of hh and 3 of mm (out of 6) */ 2684 uiHh = (M4OSA_UInt8)(uiTmp / 60); 2685 pAuDataBuffer[0] = (( uiHh << 3) | (uiMm >> 3)); 2686 } 2687 } 2688 return; 2689} 2690 2691/** 2692 ****************************************************************************** 2693 * M4OSA_Void M4VSS3GPP_intGetMPEG4Gov() 2694 * @brief Get the time info from Group Of VOP video AU 2695 * @note 2696 * @param pAuDataBuffer (IN) MPEG4 Video AU to modify 2697 * @param pCtsSec (OUT) Current GOV time info in second unit 2698 * @return nothing 2699 ****************************************************************************** 2700 */ 2701static M4OSA_Void M4VSS3GPP_intGetMPEG4Gov( M4OSA_MemAddr8 pAuDataBuffer, 2702 M4OSA_UInt32 *pCtsSec ) 2703{ 2704 /* 2705 * The MPEG-4 time code length is 18 bits: 2706 * 2707 * hh mm marker ss 2708 * xxxxx|xxx xxx 1 xxxx xx ?????? 2709 * |----- ---|--- - ----|-- ------| 2710 */ 2711 M4OSA_UInt8 uiHh; 2712 M4OSA_UInt8 uiMm; 2713 M4OSA_UInt8 uiSs; 2714 M4OSA_UInt8 uiTmp; 2715 M4OSA_UInt32 uiCtsSec; 2716 2717 /** 2718 * Read ss */ 2719 uiSs = (( pAuDataBuffer[2] & 0xC0) >> 6); 2720 uiTmp = (( pAuDataBuffer[1] & 0x0F) << 2); 2721 uiCtsSec = uiSs + uiTmp; 2722 2723 /** 2724 * Read mm */ 2725 uiMm = (( pAuDataBuffer[1] & 0xE0) >> 5); 2726 uiTmp = (( pAuDataBuffer[0] & 0x07) << 3); 2727 uiMm = uiMm + uiTmp; 2728 uiCtsSec = ( uiMm * 60) + uiCtsSec; 2729 2730 /** 2731 * Read hh */ 2732 uiHh = (( pAuDataBuffer[0] & 0xF8) >> 3); 2733 2734 if( uiHh ) 2735 { 2736 uiCtsSec = ( uiHh * 3600) + uiCtsSec; 2737 } 2738 2739 /* 2740 * in sec */ 2741 *pCtsSec = uiCtsSec; 2742 2743 return; 2744} 2745 2746/** 2747 ****************************************************************************** 2748 * M4OSA_ERR M4VSS3GPP_intAllocateYUV420() 2749 * @brief Allocate the three YUV 4:2:0 planes 2750 * @note 2751 * @param pPlanes (IN/OUT) valid pointer to 3 M4VIFI_ImagePlane structures 2752 * @param uiWidth (IN) Image width 2753 * @param uiHeight(IN) Image height 2754 ****************************************************************************** 2755 */ 2756static M4OSA_ERR M4VSS3GPP_intAllocateYUV420( M4VIFI_ImagePlane *pPlanes, 2757 M4OSA_UInt32 uiWidth, M4OSA_UInt32 uiHeight ) 2758{ 2759 2760 pPlanes[0].u_width = uiWidth; 2761 pPlanes[0].u_height = uiHeight; 2762 pPlanes[0].u_stride = uiWidth; 2763 pPlanes[0].u_topleft = 0; 2764 pPlanes[0].pac_data = (M4VIFI_UInt8 *)M4OSA_32bitAlignedMalloc(pPlanes[0].u_stride 2765 * pPlanes[0].u_height, M4VSS3GPP, (M4OSA_Char *)"pPlanes[0].pac_data"); 2766 2767 if( M4OSA_NULL == pPlanes[0].pac_data ) 2768 { 2769 M4OSA_TRACE1_0( 2770 "M4VSS3GPP_intAllocateYUV420: unable to allocate pPlanes[0].pac_data,\ 2771 returning M4ERR_ALLOC"); 2772 return M4ERR_ALLOC; 2773 } 2774 2775 pPlanes[1].u_width = pPlanes[0].u_width >> 1; 2776 pPlanes[1].u_height = pPlanes[0].u_height >> 1; 2777 pPlanes[1].u_stride = pPlanes[1].u_width; 2778 pPlanes[1].u_topleft = 0; 2779 pPlanes[1].pac_data = (M4VIFI_UInt8 *)M4OSA_32bitAlignedMalloc(pPlanes[1].u_stride 2780 * pPlanes[1].u_height, M4VSS3GPP,(M4OSA_Char *) "pPlanes[1].pac_data"); 2781 2782 if( M4OSA_NULL == pPlanes[1].pac_data ) 2783 { 2784 M4OSA_TRACE1_0( 2785 "M4VSS3GPP_intAllocateYUV420: unable to allocate pPlanes[1].pac_data,\ 2786 returning M4ERR_ALLOC"); 2787 free((void *)pPlanes[0].pac_data); 2788 pPlanes[0].pac_data = M4OSA_NULL; 2789 return M4ERR_ALLOC; 2790 } 2791 2792 pPlanes[2].u_width = pPlanes[1].u_width; 2793 pPlanes[2].u_height = pPlanes[1].u_height; 2794 pPlanes[2].u_stride = pPlanes[2].u_width; 2795 pPlanes[2].u_topleft = 0; 2796 pPlanes[2].pac_data = (M4VIFI_UInt8 *)M4OSA_32bitAlignedMalloc(pPlanes[2].u_stride 2797 * pPlanes[2].u_height, M4VSS3GPP, (M4OSA_Char *)"pPlanes[2].pac_data"); 2798 2799 if( M4OSA_NULL == pPlanes[2].pac_data ) 2800 { 2801 M4OSA_TRACE1_0( 2802 "M4VSS3GPP_intAllocateYUV420: unable to allocate pPlanes[2].pac_data,\ 2803 returning M4ERR_ALLOC"); 2804 free((void *)pPlanes[0].pac_data); 2805 free((void *)pPlanes[1].pac_data); 2806 pPlanes[0].pac_data = M4OSA_NULL; 2807 pPlanes[1].pac_data = M4OSA_NULL; 2808 return M4ERR_ALLOC; 2809 } 2810 2811 memset((void *)pPlanes[0].pac_data, 0, pPlanes[0].u_stride*pPlanes[0].u_height); 2812 memset((void *)pPlanes[1].pac_data, 0, pPlanes[1].u_stride*pPlanes[1].u_height); 2813 memset((void *)pPlanes[2].pac_data, 0, pPlanes[2].u_stride*pPlanes[2].u_height); 2814 /** 2815 * Return */ 2816 M4OSA_TRACE3_0("M4VSS3GPP_intAllocateYUV420: returning M4NO_ERROR"); 2817 return M4NO_ERROR; 2818} 2819 2820/** 2821****************************************************************************** 2822* M4OSA_ERR M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420(M4OSA_Void* pFileIn, 2823* M4OSA_FileReadPointer* pFileReadPtr, 2824* M4VIFI_ImagePlane* pImagePlanes, 2825* M4OSA_UInt32 width, 2826* M4OSA_UInt32 height); 2827* @brief It Coverts and resizes a ARGB8888 image to YUV420 2828* @note 2829* @param pFileIn (IN) The ARGB888 input file 2830* @param pFileReadPtr (IN) Pointer on filesystem functions 2831* @param pImagePlanes (IN/OUT) Pointer on YUV420 output planes allocated by the user. 2832* ARGB8888 image will be converted and resized to output 2833* YUV420 plane size 2834* @param width (IN) width of the ARGB8888 2835* @param height (IN) height of the ARGB8888 2836* @return M4NO_ERROR: No error 2837* @return M4ERR_ALLOC: memory error 2838* @return M4ERR_PARAMETER: At least one of the function parameters is null 2839****************************************************************************** 2840*/ 2841 2842M4OSA_ERR M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420(M4OSA_Void* pFileIn, 2843 M4OSA_FileReadPointer* pFileReadPtr, 2844 M4VIFI_ImagePlane* pImagePlanes, 2845 M4OSA_UInt32 width,M4OSA_UInt32 height) { 2846 M4OSA_Context pARGBIn; 2847 M4VIFI_ImagePlane rgbPlane1 ,rgbPlane2; 2848 M4OSA_UInt32 frameSize_argb = width * height * 4; 2849 M4OSA_UInt32 frameSize_rgb888 = width * height * 3; 2850 M4OSA_UInt32 i = 0,j= 0; 2851 M4OSA_ERR err = M4NO_ERROR; 2852 2853 M4OSA_UInt8 *pArgbPlane = 2854 (M4OSA_UInt8*) M4OSA_32bitAlignedMalloc(frameSize_argb, 2855 M4VS, (M4OSA_Char*)"argb data"); 2856 if (pArgbPlane == M4OSA_NULL) { 2857 M4OSA_TRACE1_0("M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420: \ 2858 Failed to allocate memory for ARGB plane"); 2859 return M4ERR_ALLOC; 2860 } 2861 2862 /* Get file size */ 2863 err = pFileReadPtr->openRead(&pARGBIn, pFileIn, M4OSA_kFileRead); 2864 if (err != M4NO_ERROR) { 2865 M4OSA_TRACE1_2("M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420 : \ 2866 Can not open input ARGB8888 file %s, error: 0x%x\n",pFileIn, err); 2867 free(pArgbPlane); 2868 pArgbPlane = M4OSA_NULL; 2869 goto cleanup; 2870 } 2871 2872 err = pFileReadPtr->readData(pARGBIn,(M4OSA_MemAddr8)pArgbPlane, 2873 &frameSize_argb); 2874 if (err != M4NO_ERROR) { 2875 M4OSA_TRACE1_2("M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420 \ 2876 Can not read ARGB8888 file %s, error: 0x%x\n",pFileIn, err); 2877 pFileReadPtr->closeRead(pARGBIn); 2878 free(pArgbPlane); 2879 pArgbPlane = M4OSA_NULL; 2880 goto cleanup; 2881 } 2882 2883 err = pFileReadPtr->closeRead(pARGBIn); 2884 if(err != M4NO_ERROR) { 2885 M4OSA_TRACE1_2("M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420 \ 2886 Can not close ARGB8888 file %s, error: 0x%x\n",pFileIn, err); 2887 free(pArgbPlane); 2888 pArgbPlane = M4OSA_NULL; 2889 goto cleanup; 2890 } 2891 2892 rgbPlane1.pac_data = 2893 (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(frameSize_rgb888, 2894 M4VS, (M4OSA_Char*)"RGB888 plane1"); 2895 if(rgbPlane1.pac_data == M4OSA_NULL) { 2896 M4OSA_TRACE1_0("M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420 \ 2897 Failed to allocate memory for rgb plane1"); 2898 free(pArgbPlane); 2899 return M4ERR_ALLOC; 2900 } 2901 2902 rgbPlane1.u_height = height; 2903 rgbPlane1.u_width = width; 2904 rgbPlane1.u_stride = width*3; 2905 rgbPlane1.u_topleft = 0; 2906 2907 2908 /** Remove the alpha channel */ 2909 for (i=0, j = 0; i < frameSize_argb; i++) { 2910 if ((i % 4) == 0) continue; 2911 rgbPlane1.pac_data[j] = pArgbPlane[i]; 2912 j++; 2913 } 2914 free(pArgbPlane); 2915 2916 /** 2917 * Check if resizing is required with color conversion */ 2918 if(width != pImagePlanes->u_width || height != pImagePlanes->u_height) { 2919 2920 frameSize_rgb888 = pImagePlanes->u_width * pImagePlanes->u_height * 3; 2921 rgbPlane2.pac_data = 2922 (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(frameSize_rgb888, M4VS, 2923 (M4OSA_Char*)"rgb Plane2"); 2924 if(rgbPlane2.pac_data == M4OSA_NULL) { 2925 M4OSA_TRACE1_0("Failed to allocate memory for rgb plane2"); 2926 free(rgbPlane1.pac_data); 2927 return M4ERR_ALLOC; 2928 } 2929 rgbPlane2.u_height = pImagePlanes->u_height; 2930 rgbPlane2.u_width = pImagePlanes->u_width; 2931 rgbPlane2.u_stride = pImagePlanes->u_width*3; 2932 rgbPlane2.u_topleft = 0; 2933 2934 /* Resizing */ 2935 err = M4VIFI_ResizeBilinearRGB888toRGB888(M4OSA_NULL, 2936 &rgbPlane1, &rgbPlane2); 2937 free(rgbPlane1.pac_data); 2938 if(err != M4NO_ERROR) { 2939 M4OSA_TRACE1_1("error resizing RGB888 to RGB888: 0x%x\n", err); 2940 free(rgbPlane2.pac_data); 2941 return err; 2942 } 2943 2944 /*Converting Resized RGB888 to YUV420 */ 2945 err = M4VIFI_RGB888toYUV420(M4OSA_NULL, &rgbPlane2, pImagePlanes); 2946 free(rgbPlane2.pac_data); 2947 if(err != M4NO_ERROR) { 2948 M4OSA_TRACE1_1("error converting from RGB888 to YUV: 0x%x\n", err); 2949 return err; 2950 } 2951 } else { 2952 err = M4VIFI_RGB888toYUV420(M4OSA_NULL, &rgbPlane1, pImagePlanes); 2953 if(err != M4NO_ERROR) { 2954 M4OSA_TRACE1_1("error when converting from RGB to YUV: 0x%x\n", err); 2955 } 2956 free(rgbPlane1.pac_data); 2957 } 2958cleanup: 2959 M4OSA_TRACE3_0("M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420 exit"); 2960 return err; 2961} 2962 2963M4OSA_ERR M4VSS3GPP_intApplyRenderingMode(M4VSS3GPP_InternalEditContext *pC, 2964 M4xVSS_MediaRendering renderingMode, 2965 M4VIFI_ImagePlane* pInplane, 2966 M4VIFI_ImagePlane* pOutplane) { 2967 2968 M4OSA_ERR err = M4NO_ERROR; 2969 M4AIR_Params airParams; 2970 M4VIFI_ImagePlane pImagePlanesTemp[3]; 2971 M4OSA_UInt32 i = 0; 2972 2973 if (renderingMode == M4xVSS_kBlackBorders) { 2974 memset((void *)pOutplane[0].pac_data, Y_PLANE_BORDER_VALUE, 2975 (pOutplane[0].u_height*pOutplane[0].u_stride)); 2976 memset((void *)pOutplane[1].pac_data, U_PLANE_BORDER_VALUE, 2977 (pOutplane[1].u_height*pOutplane[1].u_stride)); 2978 memset((void *)pOutplane[2].pac_data, V_PLANE_BORDER_VALUE, 2979 (pOutplane[2].u_height*pOutplane[2].u_stride)); 2980 } 2981 2982 if (renderingMode == M4xVSS_kResizing) { 2983 /** 2984 * Call the resize filter. 2985 * From the intermediate frame to the encoder image plane */ 2986 err = M4VIFI_ResizeBilinearYUV420toYUV420(M4OSA_NULL, 2987 pInplane, pOutplane); 2988 if (M4NO_ERROR != err) { 2989 M4OSA_TRACE1_1("M4VSS3GPP_intApplyRenderingMode: \ 2990 M4ViFilResizeBilinearYUV420toYUV420 returns 0x%x!", err); 2991 return err; 2992 } 2993 } else { 2994 M4VIFI_ImagePlane* pPlaneTemp = M4OSA_NULL; 2995 M4OSA_UInt8* pOutPlaneY = 2996 pOutplane[0].pac_data + pOutplane[0].u_topleft; 2997 M4OSA_UInt8* pOutPlaneU = 2998 pOutplane[1].pac_data + pOutplane[1].u_topleft; 2999 M4OSA_UInt8* pOutPlaneV = 3000 pOutplane[2].pac_data + pOutplane[2].u_topleft; 3001 M4OSA_UInt8* pInPlaneY = M4OSA_NULL; 3002 M4OSA_UInt8* pInPlaneU = M4OSA_NULL; 3003 M4OSA_UInt8* pInPlaneV = M4OSA_NULL; 3004 3005 /* To keep media aspect ratio*/ 3006 /* Initialize AIR Params*/ 3007 airParams.m_inputCoord.m_x = 0; 3008 airParams.m_inputCoord.m_y = 0; 3009 airParams.m_inputSize.m_height = pInplane->u_height; 3010 airParams.m_inputSize.m_width = pInplane->u_width; 3011 airParams.m_outputSize.m_width = pOutplane->u_width; 3012 airParams.m_outputSize.m_height = pOutplane->u_height; 3013 airParams.m_bOutputStripe = M4OSA_FALSE; 3014 airParams.m_outputOrientation = M4COMMON_kOrientationTopLeft; 3015 3016 /** 3017 Media rendering: Black borders*/ 3018 if (renderingMode == M4xVSS_kBlackBorders) { 3019 pImagePlanesTemp[0].u_width = pOutplane[0].u_width; 3020 pImagePlanesTemp[0].u_height = pOutplane[0].u_height; 3021 pImagePlanesTemp[0].u_stride = pOutplane[0].u_width; 3022 pImagePlanesTemp[0].u_topleft = 0; 3023 3024 pImagePlanesTemp[1].u_width = pOutplane[1].u_width; 3025 pImagePlanesTemp[1].u_height = pOutplane[1].u_height; 3026 pImagePlanesTemp[1].u_stride = pOutplane[1].u_width; 3027 pImagePlanesTemp[1].u_topleft = 0; 3028 3029 pImagePlanesTemp[2].u_width = pOutplane[2].u_width; 3030 pImagePlanesTemp[2].u_height = pOutplane[2].u_height; 3031 pImagePlanesTemp[2].u_stride = pOutplane[2].u_width; 3032 pImagePlanesTemp[2].u_topleft = 0; 3033 3034 /** 3035 * Allocates plan in local image plane structure */ 3036 pImagePlanesTemp[0].pac_data = 3037 (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc( 3038 pImagePlanesTemp[0].u_width * pImagePlanesTemp[0].u_height, 3039 M4VS, (M4OSA_Char *)"pImagePlaneTemp Y") ; 3040 if (pImagePlanesTemp[0].pac_data == M4OSA_NULL) { 3041 M4OSA_TRACE1_0("M4VSS3GPP_intApplyRenderingMode: Alloc Error"); 3042 return M4ERR_ALLOC; 3043 } 3044 pImagePlanesTemp[1].pac_data = 3045 (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc( 3046 pImagePlanesTemp[1].u_width * pImagePlanesTemp[1].u_height, 3047 M4VS, (M4OSA_Char *)"pImagePlaneTemp U") ; 3048 if (pImagePlanesTemp[1].pac_data == M4OSA_NULL) { 3049 M4OSA_TRACE1_0("M4VSS3GPP_intApplyRenderingMode: Alloc Error"); 3050 free(pImagePlanesTemp[0].pac_data); 3051 return M4ERR_ALLOC; 3052 } 3053 pImagePlanesTemp[2].pac_data = 3054 (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc( 3055 pImagePlanesTemp[2].u_width * pImagePlanesTemp[2].u_height, 3056 M4VS, (M4OSA_Char *)"pImagePlaneTemp V") ; 3057 if (pImagePlanesTemp[2].pac_data == M4OSA_NULL) { 3058 M4OSA_TRACE1_0("M4VSS3GPP_intApplyRenderingMode: Alloc Error"); 3059 free(pImagePlanesTemp[0].pac_data); 3060 free(pImagePlanesTemp[1].pac_data); 3061 return M4ERR_ALLOC; 3062 } 3063 3064 pInPlaneY = pImagePlanesTemp[0].pac_data ; 3065 pInPlaneU = pImagePlanesTemp[1].pac_data ; 3066 pInPlaneV = pImagePlanesTemp[2].pac_data ; 3067 3068 memset((void *)pImagePlanesTemp[0].pac_data, Y_PLANE_BORDER_VALUE, 3069 (pImagePlanesTemp[0].u_height*pImagePlanesTemp[0].u_stride)); 3070 memset((void *)pImagePlanesTemp[1].pac_data, U_PLANE_BORDER_VALUE, 3071 (pImagePlanesTemp[1].u_height*pImagePlanesTemp[1].u_stride)); 3072 memset((void *)pImagePlanesTemp[2].pac_data, V_PLANE_BORDER_VALUE, 3073 (pImagePlanesTemp[2].u_height*pImagePlanesTemp[2].u_stride)); 3074 3075 M4OSA_UInt32 height = 3076 (pInplane->u_height * pOutplane->u_width) /pInplane->u_width; 3077 3078 if (height <= pOutplane->u_height) { 3079 /** 3080 * Black borders will be on the top and the bottom side */ 3081 airParams.m_outputSize.m_width = pOutplane->u_width; 3082 airParams.m_outputSize.m_height = height; 3083 /** 3084 * Number of lines at the top */ 3085 pImagePlanesTemp[0].u_topleft = 3086 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[0].u_height - 3087 airParams.m_outputSize.m_height)>>1)) * 3088 pImagePlanesTemp[0].u_stride; 3089 pImagePlanesTemp[0].u_height = airParams.m_outputSize.m_height; 3090 pImagePlanesTemp[1].u_topleft = 3091 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[1].u_height - 3092 (airParams.m_outputSize.m_height>>1)))>>1) * 3093 pImagePlanesTemp[1].u_stride; 3094 pImagePlanesTemp[1].u_height = 3095 airParams.m_outputSize.m_height>>1; 3096 pImagePlanesTemp[2].u_topleft = 3097 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[2].u_height - 3098 (airParams.m_outputSize.m_height>>1)))>>1) * 3099 pImagePlanesTemp[2].u_stride; 3100 pImagePlanesTemp[2].u_height = 3101 airParams.m_outputSize.m_height>>1; 3102 } else { 3103 /** 3104 * Black borders will be on the left and right side */ 3105 airParams.m_outputSize.m_height = pOutplane->u_height; 3106 airParams.m_outputSize.m_width = 3107 (M4OSA_UInt32)((pInplane->u_width * pOutplane->u_height)/pInplane->u_height); 3108 3109 pImagePlanesTemp[0].u_topleft = 3110 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[0].u_width - 3111 airParams.m_outputSize.m_width)>>1)); 3112 pImagePlanesTemp[0].u_width = airParams.m_outputSize.m_width; 3113 pImagePlanesTemp[1].u_topleft = 3114 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[1].u_width - 3115 (airParams.m_outputSize.m_width>>1)))>>1); 3116 pImagePlanesTemp[1].u_width = airParams.m_outputSize.m_width>>1; 3117 pImagePlanesTemp[2].u_topleft = 3118 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[2].u_width - 3119 (airParams.m_outputSize.m_width>>1)))>>1); 3120 pImagePlanesTemp[2].u_width = airParams.m_outputSize.m_width>>1; 3121 } 3122 3123 /** 3124 * Width and height have to be even */ 3125 airParams.m_outputSize.m_width = 3126 (airParams.m_outputSize.m_width>>1)<<1; 3127 airParams.m_outputSize.m_height = 3128 (airParams.m_outputSize.m_height>>1)<<1; 3129 airParams.m_inputSize.m_width = 3130 (airParams.m_inputSize.m_width>>1)<<1; 3131 airParams.m_inputSize.m_height = 3132 (airParams.m_inputSize.m_height>>1)<<1; 3133 pImagePlanesTemp[0].u_width = 3134 (pImagePlanesTemp[0].u_width>>1)<<1; 3135 pImagePlanesTemp[1].u_width = 3136 (pImagePlanesTemp[1].u_width>>1)<<1; 3137 pImagePlanesTemp[2].u_width = 3138 (pImagePlanesTemp[2].u_width>>1)<<1; 3139 pImagePlanesTemp[0].u_height = 3140 (pImagePlanesTemp[0].u_height>>1)<<1; 3141 pImagePlanesTemp[1].u_height = 3142 (pImagePlanesTemp[1].u_height>>1)<<1; 3143 pImagePlanesTemp[2].u_height = 3144 (pImagePlanesTemp[2].u_height>>1)<<1; 3145 3146 /** 3147 * Check that values are coherent */ 3148 if (airParams.m_inputSize.m_height == 3149 airParams.m_outputSize.m_height) { 3150 airParams.m_inputSize.m_width = 3151 airParams.m_outputSize.m_width; 3152 } else if (airParams.m_inputSize.m_width == 3153 airParams.m_outputSize.m_width) { 3154 airParams.m_inputSize.m_height = 3155 airParams.m_outputSize.m_height; 3156 } 3157 pPlaneTemp = pImagePlanesTemp; 3158 } 3159 3160 /** 3161 * Media rendering: Cropping*/ 3162 if (renderingMode == M4xVSS_kCropping) { 3163 airParams.m_outputSize.m_height = pOutplane->u_height; 3164 airParams.m_outputSize.m_width = pOutplane->u_width; 3165 if ((airParams.m_outputSize.m_height * 3166 airParams.m_inputSize.m_width)/airParams.m_outputSize.m_width < 3167 airParams.m_inputSize.m_height) { 3168 /* Height will be cropped */ 3169 airParams.m_inputSize.m_height = 3170 (M4OSA_UInt32)((airParams.m_outputSize.m_height * 3171 airParams.m_inputSize.m_width)/airParams.m_outputSize.m_width); 3172 airParams.m_inputSize.m_height = 3173 (airParams.m_inputSize.m_height>>1)<<1; 3174 airParams.m_inputCoord.m_y = 3175 (M4OSA_Int32)((M4OSA_Int32)((pInplane->u_height - 3176 airParams.m_inputSize.m_height))>>1); 3177 } else { 3178 /* Width will be cropped */ 3179 airParams.m_inputSize.m_width = 3180 (M4OSA_UInt32)((airParams.m_outputSize.m_width * 3181 airParams.m_inputSize.m_height)/airParams.m_outputSize.m_height); 3182 airParams.m_inputSize.m_width = 3183 (airParams.m_inputSize.m_width>>1)<<1; 3184 airParams.m_inputCoord.m_x = 3185 (M4OSA_Int32)((M4OSA_Int32)((pInplane->u_width - 3186 airParams.m_inputSize.m_width))>>1); 3187 } 3188 pPlaneTemp = pOutplane; 3189 } 3190 /** 3191 * Call AIR functions */ 3192 if (M4OSA_NULL == pC->m_air_context) { 3193 err = M4AIR_create(&pC->m_air_context, M4AIR_kYUV420P); 3194 if(err != M4NO_ERROR) { 3195 M4OSA_TRACE1_1("M4VSS3GPP_intApplyRenderingMode: \ 3196 M4AIR_create returned error 0x%x", err); 3197 goto cleanUp; 3198 } 3199 } 3200 3201 err = M4AIR_configure(pC->m_air_context, &airParams); 3202 if (err != M4NO_ERROR) { 3203 M4OSA_TRACE1_1("M4VSS3GPP_intApplyRenderingMode: \ 3204 Error when configuring AIR: 0x%x", err); 3205 M4AIR_cleanUp(pC->m_air_context); 3206 goto cleanUp; 3207 } 3208 3209 err = M4AIR_get(pC->m_air_context, pInplane, pPlaneTemp); 3210 if (err != M4NO_ERROR) { 3211 M4OSA_TRACE1_1("M4VSS3GPP_intApplyRenderingMode: \ 3212 Error when getting AIR plane: 0x%x", err); 3213 M4AIR_cleanUp(pC->m_air_context); 3214 goto cleanUp; 3215 } 3216 3217 if (renderingMode == M4xVSS_kBlackBorders) { 3218 for (i=0; i<pOutplane[0].u_height; i++) { 3219 memcpy((void *)pOutPlaneY, (void *)pInPlaneY, 3220 pOutplane[0].u_width); 3221 pInPlaneY += pOutplane[0].u_width; 3222 pOutPlaneY += pOutplane[0].u_stride; 3223 } 3224 for (i=0; i<pOutplane[1].u_height; i++) { 3225 memcpy((void *)pOutPlaneU, (void *)pInPlaneU, 3226 pOutplane[1].u_width); 3227 pInPlaneU += pOutplane[1].u_width; 3228 pOutPlaneU += pOutplane[1].u_stride; 3229 } 3230 for (i=0; i<pOutplane[2].u_height; i++) { 3231 memcpy((void *)pOutPlaneV, (void *)pInPlaneV, 3232 pOutplane[2].u_width); 3233 pInPlaneV += pOutplane[2].u_width; 3234 pOutPlaneV += pOutplane[2].u_stride; 3235 } 3236 } 3237 } 3238cleanUp: 3239 if (renderingMode == M4xVSS_kBlackBorders) { 3240 for (i=0; i<3; i++) { 3241 if (pImagePlanesTemp[i].pac_data != M4OSA_NULL) { 3242 free(pImagePlanesTemp[i].pac_data); 3243 pImagePlanesTemp[i].pac_data = M4OSA_NULL; 3244 } 3245 } 3246 } 3247 return err; 3248} 3249 3250M4OSA_ERR M4VSS3GPP_intSetYuv420PlaneFromARGB888 ( 3251 M4VSS3GPP_InternalEditContext *pC, 3252 M4VSS3GPP_ClipContext* pClipCtxt) { 3253 3254 M4OSA_ERR err= M4NO_ERROR; 3255 3256 // Allocate memory for YUV plane 3257 pClipCtxt->pPlaneYuv = 3258 (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc( 3259 3*sizeof(M4VIFI_ImagePlane), M4VS, 3260 (M4OSA_Char*)"pPlaneYuv"); 3261 3262 if (pClipCtxt->pPlaneYuv == M4OSA_NULL) { 3263 return M4ERR_ALLOC; 3264 } 3265 3266 pClipCtxt->pPlaneYuv[0].u_height = 3267 pClipCtxt->pSettings->ClipProperties.uiStillPicHeight; 3268 pClipCtxt->pPlaneYuv[0].u_width = 3269 pClipCtxt->pSettings->ClipProperties.uiStillPicWidth; 3270 pClipCtxt->pPlaneYuv[0].u_stride = pClipCtxt->pPlaneYuv[0].u_width; 3271 pClipCtxt->pPlaneYuv[0].u_topleft = 0; 3272 3273 pClipCtxt->pPlaneYuv[0].pac_data = 3274 (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc( 3275 pClipCtxt->pPlaneYuv[0].u_height * pClipCtxt->pPlaneYuv[0].u_width * 1.5, 3276 M4VS, (M4OSA_Char*)"imageClip YUV data"); 3277 if (pClipCtxt->pPlaneYuv[0].pac_data == M4OSA_NULL) { 3278 free(pClipCtxt->pPlaneYuv); 3279 return M4ERR_ALLOC; 3280 } 3281 3282 pClipCtxt->pPlaneYuv[1].u_height = pClipCtxt->pPlaneYuv[0].u_height >>1; 3283 pClipCtxt->pPlaneYuv[1].u_width = pClipCtxt->pPlaneYuv[0].u_width >> 1; 3284 pClipCtxt->pPlaneYuv[1].u_stride = pClipCtxt->pPlaneYuv[1].u_width; 3285 pClipCtxt->pPlaneYuv[1].u_topleft = 0; 3286 pClipCtxt->pPlaneYuv[1].pac_data = (M4VIFI_UInt8*)( 3287 pClipCtxt->pPlaneYuv[0].pac_data + 3288 pClipCtxt->pPlaneYuv[0].u_height * pClipCtxt->pPlaneYuv[0].u_width); 3289 3290 pClipCtxt->pPlaneYuv[2].u_height = pClipCtxt->pPlaneYuv[0].u_height >>1; 3291 pClipCtxt->pPlaneYuv[2].u_width = pClipCtxt->pPlaneYuv[0].u_width >> 1; 3292 pClipCtxt->pPlaneYuv[2].u_stride = pClipCtxt->pPlaneYuv[2].u_width; 3293 pClipCtxt->pPlaneYuv[2].u_topleft = 0; 3294 pClipCtxt->pPlaneYuv[2].pac_data = (M4VIFI_UInt8*)( 3295 pClipCtxt->pPlaneYuv[1].pac_data + 3296 pClipCtxt->pPlaneYuv[1].u_height * pClipCtxt->pPlaneYuv[1].u_width); 3297 3298 err = M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420 ( 3299 pClipCtxt->pSettings->pFile, 3300 pC->pOsaFileReadPtr, 3301 pClipCtxt->pPlaneYuv, 3302 pClipCtxt->pSettings->ClipProperties.uiStillPicWidth, 3303 pClipCtxt->pSettings->ClipProperties.uiStillPicHeight); 3304 if (M4NO_ERROR != err) { 3305 free(pClipCtxt->pPlaneYuv[0].pac_data); 3306 free(pClipCtxt->pPlaneYuv); 3307 return err; 3308 } 3309 3310 // Set the YUV data to the decoder using setoption 3311 err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctSetOption ( 3312 pClipCtxt->pViDecCtxt, 3313 M4DECODER_kOptionID_DecYuvData, 3314 (M4OSA_DataOption)pClipCtxt->pPlaneYuv); 3315 if (M4NO_ERROR != err) { 3316 free(pClipCtxt->pPlaneYuv[0].pac_data); 3317 free(pClipCtxt->pPlaneYuv); 3318 return err; 3319 } 3320 3321 pClipCtxt->pSettings->ClipProperties.bSetImageData = M4OSA_TRUE; 3322 3323 // Allocate Yuv plane with effect 3324 pClipCtxt->pPlaneYuvWithEffect = 3325 (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc( 3326 3*sizeof(M4VIFI_ImagePlane), M4VS, 3327 (M4OSA_Char*)"pPlaneYuvWithEffect"); 3328 if (pClipCtxt->pPlaneYuvWithEffect == M4OSA_NULL) { 3329 free(pClipCtxt->pPlaneYuv[0].pac_data); 3330 free(pClipCtxt->pPlaneYuv); 3331 return M4ERR_ALLOC; 3332 } 3333 3334 pClipCtxt->pPlaneYuvWithEffect[0].u_height = pC->ewc.uiVideoHeight; 3335 pClipCtxt->pPlaneYuvWithEffect[0].u_width = pC->ewc.uiVideoWidth; 3336 pClipCtxt->pPlaneYuvWithEffect[0].u_stride = pC->ewc.uiVideoWidth; 3337 pClipCtxt->pPlaneYuvWithEffect[0].u_topleft = 0; 3338 3339 pClipCtxt->pPlaneYuvWithEffect[0].pac_data = 3340 (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc( 3341 pC->ewc.uiVideoHeight * pC->ewc.uiVideoWidth * 1.5, 3342 M4VS, (M4OSA_Char*)"imageClip YUV data"); 3343 if (pClipCtxt->pPlaneYuvWithEffect[0].pac_data == M4OSA_NULL) { 3344 free(pClipCtxt->pPlaneYuv[0].pac_data); 3345 free(pClipCtxt->pPlaneYuv); 3346 free(pClipCtxt->pPlaneYuvWithEffect); 3347 return M4ERR_ALLOC; 3348 } 3349 3350 pClipCtxt->pPlaneYuvWithEffect[1].u_height = 3351 pClipCtxt->pPlaneYuvWithEffect[0].u_height >>1; 3352 pClipCtxt->pPlaneYuvWithEffect[1].u_width = 3353 pClipCtxt->pPlaneYuvWithEffect[0].u_width >> 1; 3354 pClipCtxt->pPlaneYuvWithEffect[1].u_stride = 3355 pClipCtxt->pPlaneYuvWithEffect[1].u_width; 3356 pClipCtxt->pPlaneYuvWithEffect[1].u_topleft = 0; 3357 pClipCtxt->pPlaneYuvWithEffect[1].pac_data = (M4VIFI_UInt8*)( 3358 pClipCtxt->pPlaneYuvWithEffect[0].pac_data + 3359 pClipCtxt->pPlaneYuvWithEffect[0].u_height * pClipCtxt->pPlaneYuvWithEffect[0].u_width); 3360 3361 pClipCtxt->pPlaneYuvWithEffect[2].u_height = 3362 pClipCtxt->pPlaneYuvWithEffect[0].u_height >>1; 3363 pClipCtxt->pPlaneYuvWithEffect[2].u_width = 3364 pClipCtxt->pPlaneYuvWithEffect[0].u_width >> 1; 3365 pClipCtxt->pPlaneYuvWithEffect[2].u_stride = 3366 pClipCtxt->pPlaneYuvWithEffect[2].u_width; 3367 pClipCtxt->pPlaneYuvWithEffect[2].u_topleft = 0; 3368 pClipCtxt->pPlaneYuvWithEffect[2].pac_data = (M4VIFI_UInt8*)( 3369 pClipCtxt->pPlaneYuvWithEffect[1].pac_data + 3370 pClipCtxt->pPlaneYuvWithEffect[1].u_height * pClipCtxt->pPlaneYuvWithEffect[1].u_width); 3371 3372 err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctSetOption( 3373 pClipCtxt->pViDecCtxt, M4DECODER_kOptionID_YuvWithEffectContiguous, 3374 (M4OSA_DataOption)pClipCtxt->pPlaneYuvWithEffect); 3375 if (M4NO_ERROR != err) { 3376 free(pClipCtxt->pPlaneYuv[0].pac_data); 3377 free(pClipCtxt->pPlaneYuv); 3378 free(pClipCtxt->pPlaneYuvWithEffect); 3379 return err; 3380 } 3381 3382 return M4NO_ERROR; 3383} 3384 3385M4OSA_ERR M4VSS3GPP_intRenderFrameWithEffect(M4VSS3GPP_InternalEditContext *pC, 3386 M4VSS3GPP_ClipContext* pClipCtxt, 3387 M4_MediaTime ts, 3388 M4OSA_Bool bIsClip1, 3389 M4VIFI_ImagePlane *pResizePlane, 3390 M4VIFI_ImagePlane *pPlaneNoResize, 3391 M4VIFI_ImagePlane *pPlaneOut) { 3392 3393 M4OSA_ERR err = M4NO_ERROR; 3394 M4OSA_UInt8 numEffects = 0; 3395 M4VIFI_ImagePlane *pDecoderRenderFrame = M4OSA_NULL; 3396 M4OSA_UInt32 yuvFrameWidth = 0, yuvFrameHeight = 0; 3397 M4VIFI_ImagePlane* pTmp = M4OSA_NULL; 3398 M4VIFI_ImagePlane pTemp[3]; 3399 3400 /** 3401 Check if resizing is needed */ 3402 if (M4OSA_NULL != pClipCtxt->m_pPreResizeFrame) { 3403 /** 3404 * If we do modify the image, we need an intermediate image plane */ 3405 if (M4OSA_NULL == pResizePlane[0].pac_data) { 3406 err = M4VSS3GPP_intAllocateYUV420(pResizePlane, 3407 pClipCtxt->m_pPreResizeFrame[0].u_width, 3408 pClipCtxt->m_pPreResizeFrame[0].u_height); 3409 if (M4NO_ERROR != err) { 3410 M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \ 3411 M4VSS3GPP_intAllocateYUV420 returns 0x%x", err); 3412 return err; 3413 } 3414 } 3415 3416 if ((pClipCtxt->pSettings->FileType == 3417 M4VIDEOEDITING_kFileType_ARGB8888) && 3418 (pC->nbActiveEffects == 0) && 3419 (pClipCtxt->bGetYuvDataFromDecoder == M4OSA_FALSE)) { 3420 3421 err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctSetOption( 3422 pClipCtxt->pViDecCtxt, 3423 M4DECODER_kOptionID_EnableYuvWithEffect, 3424 (M4OSA_DataOption)M4OSA_TRUE); 3425 if (M4NO_ERROR == err) { 3426 pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctRender( 3427 pClipCtxt->pViDecCtxt, &ts, 3428 pClipCtxt->pPlaneYuvWithEffect, M4OSA_TRUE); 3429 } 3430 3431 } else { 3432 if (pClipCtxt->pSettings->FileType == 3433 M4VIDEOEDITING_kFileType_ARGB8888) { 3434 err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctSetOption( 3435 pClipCtxt->pViDecCtxt, 3436 M4DECODER_kOptionID_EnableYuvWithEffect, 3437 (M4OSA_DataOption)M4OSA_FALSE); 3438 } 3439 if (M4NO_ERROR == err) { 3440 err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctRender( 3441 pClipCtxt->pViDecCtxt, &ts, 3442 pClipCtxt->m_pPreResizeFrame, M4OSA_TRUE); 3443 } 3444 3445 } 3446 if (M4NO_ERROR != err) { 3447 M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \ 3448 returns error 0x%x", err); 3449 return err; 3450 } 3451 3452 if (pClipCtxt->pSettings->FileType != 3453 M4VIDEOEDITING_kFileType_ARGB8888) { 3454 if (0 != pClipCtxt->pSettings->ClipProperties.videoRotationDegrees) { 3455 // Save width and height of un-rotated frame 3456 yuvFrameWidth = pClipCtxt->m_pPreResizeFrame[0].u_width; 3457 yuvFrameHeight = pClipCtxt->m_pPreResizeFrame[0].u_height; 3458 err = M4VSS3GPP_intRotateVideo(pClipCtxt->m_pPreResizeFrame, 3459 pClipCtxt->pSettings->ClipProperties.videoRotationDegrees); 3460 if (M4NO_ERROR != err) { 3461 M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \ 3462 rotateVideo() returns error 0x%x", err); 3463 return err; 3464 } 3465 } 3466 } 3467 3468 if (bIsClip1 == M4OSA_TRUE) { 3469 numEffects = pC->nbActiveEffects; 3470 } else { 3471 numEffects = pC->nbActiveEffects1; 3472 } 3473 3474 if ( numEffects > 0) { 3475 pClipCtxt->bGetYuvDataFromDecoder = M4OSA_TRUE; 3476 err = M4VSS3GPP_intApplyVideoEffect(pC, 3477 pClipCtxt->m_pPreResizeFrame, pResizePlane); 3478 if (M4NO_ERROR != err) { 3479 M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \ 3480 M4VSS3GPP_intApplyVideoEffect() err 0x%x", err); 3481 return err; 3482 } 3483 3484 pDecoderRenderFrame= pResizePlane; 3485 3486 } else { 3487 pDecoderRenderFrame = pClipCtxt->m_pPreResizeFrame; 3488 } 3489 3490 if ((pClipCtxt->bGetYuvDataFromDecoder == M4OSA_TRUE) || 3491 (pClipCtxt->pSettings->FileType != 3492 M4VIDEOEDITING_kFileType_ARGB8888)) { 3493 if (bIsClip1 == M4OSA_TRUE) { 3494 err = M4VSS3GPP_intApplyRenderingMode (pC, 3495 pClipCtxt->pSettings->xVSS.MediaRendering, 3496 pDecoderRenderFrame,pC->yuv1); 3497 } else { 3498 err = M4VSS3GPP_intApplyRenderingMode (pC, 3499 pClipCtxt->pSettings->xVSS.MediaRendering, 3500 pDecoderRenderFrame,pC->yuv2); 3501 } 3502 if (M4NO_ERROR != err) { 3503 M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \ 3504 M4VSS3GPP_intApplyRenderingMode error 0x%x ", err); 3505 return err; 3506 } 3507 3508 if (bIsClip1 == M4OSA_TRUE) { 3509 pClipCtxt->lastDecodedPlane = pC->yuv1; 3510 } else { 3511 pClipCtxt->lastDecodedPlane = pC->yuv2; 3512 } 3513 3514 } else { 3515 pClipCtxt->lastDecodedPlane = pClipCtxt->pPlaneYuvWithEffect; 3516 } 3517 3518 if ((pClipCtxt->pSettings->FileType == 3519 M4VIDEOEDITING_kFileType_ARGB8888) && 3520 (pC->nbActiveEffects == 0) && 3521 (pClipCtxt->bGetYuvDataFromDecoder == M4OSA_TRUE)) { 3522 if (bIsClip1 == M4OSA_TRUE) { 3523 err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctSetOption( 3524 pClipCtxt->pViDecCtxt, 3525 M4DECODER_kOptionID_YuvWithEffectNonContiguous, 3526 (M4OSA_DataOption)pC->yuv1); 3527 } else { 3528 err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctSetOption( 3529 pClipCtxt->pViDecCtxt, 3530 M4DECODER_kOptionID_YuvWithEffectNonContiguous, 3531 (M4OSA_DataOption)pPlaneOut); 3532 } 3533 if (M4NO_ERROR != err) { 3534 M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \ 3535 null decoder setOption error 0x%x ", err); 3536 return err; 3537 } 3538 pClipCtxt->bGetYuvDataFromDecoder = M4OSA_FALSE; 3539 } 3540 3541 // Reset original width and height for resize frame plane 3542 if (0 != pClipCtxt->pSettings->ClipProperties.videoRotationDegrees && 3543 180 != pClipCtxt->pSettings->ClipProperties.videoRotationDegrees) { 3544 3545 M4VSS3GPP_intSetYUV420Plane(pClipCtxt->m_pPreResizeFrame, 3546 yuvFrameWidth, yuvFrameHeight); 3547 } 3548 3549 } else { 3550 if (bIsClip1 == M4OSA_TRUE) { 3551 numEffects = pC->nbActiveEffects; 3552 } else { 3553 numEffects = pC->nbActiveEffects1; 3554 } 3555 3556 if(numEffects > 0) { 3557 err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctRender( 3558 pClipCtxt->pViDecCtxt, &ts, pPlaneNoResize, M4OSA_TRUE); 3559 3560 if (M4NO_ERROR != err) { 3561 M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \ 3562 Render returns error 0x%x", err); 3563 return err; 3564 } 3565 3566 if (pClipCtxt->pSettings->FileType != 3567 M4VIDEOEDITING_kFileType_ARGB8888) { 3568 if (0 != pClipCtxt->pSettings->ClipProperties.videoRotationDegrees) { 3569 // Save width and height of un-rotated frame 3570 yuvFrameWidth = pPlaneNoResize[0].u_width; 3571 yuvFrameHeight = pPlaneNoResize[0].u_height; 3572 err = M4VSS3GPP_intRotateVideo(pPlaneNoResize, 3573 pClipCtxt->pSettings->ClipProperties.videoRotationDegrees); 3574 if (M4NO_ERROR != err) { 3575 M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \ 3576 rotateVideo() returns error 0x%x", err); 3577 return err; 3578 } 3579 } 3580 3581 if (180 != pClipCtxt->pSettings->ClipProperties.videoRotationDegrees) { 3582 // Apply Black Borders to rotated plane 3583 /** we need an intermediate image plane */ 3584 3585 err = M4VSS3GPP_intAllocateYUV420(pTemp, 3586 pC->ewc.uiVideoWidth, 3587 pC->ewc.uiVideoHeight); 3588 if (M4NO_ERROR != err) { 3589 M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \ 3590 memAlloc() returns error 0x%x", err); 3591 return err; 3592 } 3593 err = M4VSS3GPP_intApplyRenderingMode(pC, M4xVSS_kBlackBorders, 3594 pPlaneNoResize, pTemp); 3595 if (M4NO_ERROR != err) { 3596 M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \ 3597 M4VSS3GPP_intApplyRenderingMode() returns error 0x%x", err); 3598 free((void *)pTemp[0].pac_data); 3599 free((void *)pTemp[1].pac_data); 3600 free((void *)pTemp[2].pac_data); 3601 return err; 3602 } 3603 } 3604 } 3605 3606 if (bIsClip1 == M4OSA_TRUE) { 3607 pC->bIssecondClip = M4OSA_FALSE; 3608 if ((0 != pClipCtxt->pSettings->ClipProperties.videoRotationDegrees) && 3609 (180 != pClipCtxt->pSettings->ClipProperties.videoRotationDegrees)) { 3610 err = M4VSS3GPP_intApplyVideoEffect(pC, pTemp ,pC->yuv1); 3611 } else { 3612 err = M4VSS3GPP_intApplyVideoEffect(pC, pPlaneNoResize ,pC->yuv1); 3613 } 3614 pClipCtxt->lastDecodedPlane = pC->yuv1; 3615 } else { 3616 pC->bIssecondClip = M4OSA_TRUE; 3617 if ((0 != pClipCtxt->pSettings->ClipProperties.videoRotationDegrees) && 3618 (180 != pClipCtxt->pSettings->ClipProperties.videoRotationDegrees)) { 3619 err = M4VSS3GPP_intApplyVideoEffect(pC, pTemp ,pC->yuv2); 3620 } else { 3621 err = M4VSS3GPP_intApplyVideoEffect(pC, pPlaneNoResize ,pC->yuv2); 3622 } 3623 pClipCtxt->lastDecodedPlane = pC->yuv2; 3624 } 3625 3626 if (M4NO_ERROR != err) { 3627 M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \ 3628 M4VSS3GPP_intApplyVideoEffect error 0x%x", err); 3629 return err; 3630 } 3631 3632 // Reset original width and height for resize frame plane 3633 if (0 != pClipCtxt->pSettings->ClipProperties.videoRotationDegrees && 3634 180 != pClipCtxt->pSettings->ClipProperties.videoRotationDegrees) { 3635 3636 M4VSS3GPP_intSetYUV420Plane(pPlaneNoResize, 3637 yuvFrameWidth, yuvFrameHeight); 3638 3639 free((void *)pTemp[0].pac_data); 3640 free((void *)pTemp[1].pac_data); 3641 free((void *)pTemp[2].pac_data); 3642 } 3643 3644 } else { 3645 3646 if ((0 != pClipCtxt->pSettings->ClipProperties.videoRotationDegrees) && 3647 (180 != pClipCtxt->pSettings->ClipProperties.videoRotationDegrees)) { 3648 pTmp = pPlaneNoResize; 3649 } else if (bIsClip1 == M4OSA_TRUE) { 3650 pTmp = pC->yuv1; 3651 } else { 3652 pTmp = pC->yuv2; 3653 } 3654 err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctRender( 3655 pClipCtxt->pViDecCtxt, &ts, pTmp, M4OSA_TRUE); 3656 if (M4NO_ERROR != err) { 3657 M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \ 3658 Render returns error 0x%x,", err); 3659 return err; 3660 } 3661 3662 if (0 == pClipCtxt->pSettings->ClipProperties.videoRotationDegrees) { 3663 pClipCtxt->lastDecodedPlane = pTmp; 3664 } else { 3665 // Save width and height of un-rotated frame 3666 yuvFrameWidth = pTmp[0].u_width; 3667 yuvFrameHeight = pTmp[0].u_height; 3668 err = M4VSS3GPP_intRotateVideo(pTmp, 3669 pClipCtxt->pSettings->ClipProperties.videoRotationDegrees); 3670 if (M4NO_ERROR != err) { 3671 M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \ 3672 rotateVideo() returns error 0x%x", err); 3673 return err; 3674 } 3675 3676 if (180 != pClipCtxt->pSettings->ClipProperties.videoRotationDegrees) { 3677 3678 // Apply Black borders on rotated frame 3679 if (bIsClip1) { 3680 err = M4VSS3GPP_intApplyRenderingMode (pC, 3681 M4xVSS_kBlackBorders, 3682 pTmp,pC->yuv1); 3683 } else { 3684 err = M4VSS3GPP_intApplyRenderingMode (pC, 3685 M4xVSS_kBlackBorders, 3686 pTmp,pC->yuv2); 3687 } 3688 if (M4NO_ERROR != err) { 3689 M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \ 3690 M4VSS3GPP_intApplyRenderingMode error 0x%x", err); 3691 return err; 3692 } 3693 3694 // Reset original width and height for noresize frame plane 3695 M4VSS3GPP_intSetYUV420Plane(pPlaneNoResize, 3696 yuvFrameWidth, yuvFrameHeight); 3697 } 3698 3699 if (bIsClip1) { 3700 pClipCtxt->lastDecodedPlane = pC->yuv1; 3701 } else { 3702 pClipCtxt->lastDecodedPlane = pC->yuv2; 3703 } 3704 } 3705 } 3706 pClipCtxt->iVideoRenderCts = (M4OSA_Int32)ts; 3707 } 3708 3709 return err; 3710} 3711 3712M4OSA_ERR M4VSS3GPP_intRotateVideo(M4VIFI_ImagePlane* pPlaneIn, 3713 M4OSA_UInt32 rotationDegree) { 3714 3715 M4OSA_ERR err = M4NO_ERROR; 3716 M4VIFI_ImagePlane outPlane[3]; 3717 3718 if (rotationDegree != 180) { 3719 // Swap width and height of in plane 3720 outPlane[0].u_width = pPlaneIn[0].u_height; 3721 outPlane[0].u_height = pPlaneIn[0].u_width; 3722 outPlane[0].u_stride = outPlane[0].u_width; 3723 outPlane[0].u_topleft = 0; 3724 outPlane[0].pac_data = (M4OSA_UInt8 *)M4OSA_32bitAlignedMalloc( 3725 (outPlane[0].u_stride*outPlane[0].u_height), M4VS, 3726 (M4OSA_Char*)("out Y plane for rotation")); 3727 if (outPlane[0].pac_data == M4OSA_NULL) { 3728 return M4ERR_ALLOC; 3729 } 3730 3731 outPlane[1].u_width = pPlaneIn[0].u_height/2; 3732 outPlane[1].u_height = pPlaneIn[0].u_width/2; 3733 outPlane[1].u_stride = outPlane[1].u_width; 3734 outPlane[1].u_topleft = 0; 3735 outPlane[1].pac_data = (M4OSA_UInt8 *)M4OSA_32bitAlignedMalloc( 3736 (outPlane[1].u_stride*outPlane[1].u_height), M4VS, 3737 (M4OSA_Char*)("out U plane for rotation")); 3738 if (outPlane[1].pac_data == M4OSA_NULL) { 3739 free((void *)outPlane[0].pac_data); 3740 return M4ERR_ALLOC; 3741 } 3742 3743 outPlane[2].u_width = pPlaneIn[0].u_height/2; 3744 outPlane[2].u_height = pPlaneIn[0].u_width/2; 3745 outPlane[2].u_stride = outPlane[2].u_width; 3746 outPlane[2].u_topleft = 0; 3747 outPlane[2].pac_data = (M4OSA_UInt8 *)M4OSA_32bitAlignedMalloc( 3748 (outPlane[2].u_stride*outPlane[2].u_height), M4VS, 3749 (M4OSA_Char*)("out V plane for rotation")); 3750 if (outPlane[2].pac_data == M4OSA_NULL) { 3751 free((void *)outPlane[0].pac_data); 3752 free((void *)outPlane[1].pac_data); 3753 return M4ERR_ALLOC; 3754 } 3755 } 3756 3757 switch(rotationDegree) { 3758 case 90: 3759 M4VIFI_Rotate90RightYUV420toYUV420(M4OSA_NULL, pPlaneIn, outPlane); 3760 break; 3761 3762 case 180: 3763 // In plane rotation, so planeOut = planeIn 3764 M4VIFI_Rotate180YUV420toYUV420(M4OSA_NULL, pPlaneIn, pPlaneIn); 3765 break; 3766 3767 case 270: 3768 M4VIFI_Rotate90LeftYUV420toYUV420(M4OSA_NULL, pPlaneIn, outPlane); 3769 break; 3770 3771 default: 3772 M4OSA_TRACE1_1("invalid rotation param %d", (int)rotationDegree); 3773 err = M4ERR_PARAMETER; 3774 break; 3775 } 3776 3777 if (rotationDegree != 180) { 3778 memset((void *)pPlaneIn[0].pac_data, 0, 3779 (pPlaneIn[0].u_width*pPlaneIn[0].u_height)); 3780 memset((void *)pPlaneIn[1].pac_data, 0, 3781 (pPlaneIn[1].u_width*pPlaneIn[1].u_height)); 3782 memset((void *)pPlaneIn[2].pac_data, 0, 3783 (pPlaneIn[2].u_width*pPlaneIn[2].u_height)); 3784 // Copy Y, U and V planes 3785 memcpy((void *)pPlaneIn[0].pac_data, (void *)outPlane[0].pac_data, 3786 (pPlaneIn[0].u_width*pPlaneIn[0].u_height)); 3787 memcpy((void *)pPlaneIn[1].pac_data, (void *)outPlane[1].pac_data, 3788 (pPlaneIn[1].u_width*pPlaneIn[1].u_height)); 3789 memcpy((void *)pPlaneIn[2].pac_data, (void *)outPlane[2].pac_data, 3790 (pPlaneIn[2].u_width*pPlaneIn[2].u_height)); 3791 3792 free((void *)outPlane[0].pac_data); 3793 free((void *)outPlane[1].pac_data); 3794 free((void *)outPlane[2].pac_data); 3795 3796 // Swap the width and height of the in plane 3797 uint32_t temp = 0; 3798 temp = pPlaneIn[0].u_width; 3799 pPlaneIn[0].u_width = pPlaneIn[0].u_height; 3800 pPlaneIn[0].u_height = temp; 3801 pPlaneIn[0].u_stride = pPlaneIn[0].u_width; 3802 3803 temp = pPlaneIn[1].u_width; 3804 pPlaneIn[1].u_width = pPlaneIn[1].u_height; 3805 pPlaneIn[1].u_height = temp; 3806 pPlaneIn[1].u_stride = pPlaneIn[1].u_width; 3807 3808 temp = pPlaneIn[2].u_width; 3809 pPlaneIn[2].u_width = pPlaneIn[2].u_height; 3810 pPlaneIn[2].u_height = temp; 3811 pPlaneIn[2].u_stride = pPlaneIn[2].u_width; 3812 } 3813 3814 return err; 3815} 3816 3817M4OSA_ERR M4VSS3GPP_intSetYUV420Plane(M4VIFI_ImagePlane* planeIn, 3818 M4OSA_UInt32 width, M4OSA_UInt32 height) { 3819 3820 M4OSA_ERR err = M4NO_ERROR; 3821 3822 if (planeIn == M4OSA_NULL) { 3823 M4OSA_TRACE1_0("NULL in plane, error"); 3824 return M4ERR_PARAMETER; 3825 } 3826 3827 planeIn[0].u_width = width; 3828 planeIn[0].u_height = height; 3829 planeIn[0].u_stride = planeIn[0].u_width; 3830 3831 planeIn[1].u_width = width/2; 3832 planeIn[1].u_height = height/2; 3833 planeIn[1].u_stride = planeIn[1].u_width; 3834 3835 planeIn[2].u_width = width/2; 3836 planeIn[2].u_height = height/2; 3837 planeIn[2].u_stride = planeIn[1].u_width; 3838 3839 return err; 3840} 3841