M4xVSS_internal.c revision 2e7f31d481497ce59508b00e76cc79aebd107076
1/* 2 * Copyright (C) 2004-2011 NXP Software 3 * Copyright (C) 2011 The Android Open Source Project 4 * 5 * Licensed under the Apache License, Version 2.0 (the "License"); 6 * you may not use this file except in compliance with the License. 7 * You may obtain a copy of the License at 8 * 9 * http://www.apache.org/licenses/LICENSE-2.0 10 * 11 * Unless required by applicable law or agreed to in writing, software 12 * distributed under the License is distributed on an "AS IS" BASIS, 13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 * See the License for the specific language governing permissions and 15 * limitations under the License. 16 */ 17/** 18 ****************************************************************************** 19 * @file M4xVSS_internal.c 20 * @brief Internal functions of extended Video Studio Service (Video Studio 2.1) 21 * @note 22 ****************************************************************************** 23 */ 24#include "M4OSA_Debug.h" 25#include "M4OSA_CharStar.h" 26#include "M4OSA_FileExtra.h" 27 28#include "NXPSW_CompilerSwitches.h" 29 30#include "M4VSS3GPP_API.h" 31#include "M4VSS3GPP_ErrorCodes.h" 32 33#include "M4xVSS_API.h" 34#include "M4xVSS_Internal.h" 35 36/*for rgb16 color effect*/ 37#include "M4VIFI_Defines.h" 38#include "M4VIFI_Clip.h" 39 40/** 41 * component includes */ 42#include "M4VFL_transition.h" /**< video effects */ 43 44/* Internal header file of VSS is included because of MMS use case */ 45#include "M4VSS3GPP_InternalTypes.h" 46 47/*Exif header files to add image rendering support (cropping, black borders)*/ 48#include "M4EXIFC_CommonAPI.h" 49// StageFright encoders require %16 resolution 50#include "M4ENCODER_common.h" 51 52#define TRANSPARENT_COLOR 0x7E0 53 54/* Prototype of M4VIFI_xVSS_RGB565toYUV420 function (avoid green effect of transparency color) */ 55M4VIFI_UInt8 M4VIFI_xVSS_RGB565toYUV420(void *pUserData, M4VIFI_ImagePlane *pPlaneIn, 56 M4VIFI_ImagePlane *pPlaneOut); 57 58 59/*special MCS function used only in VideoArtist and VideoStudio to open the media in the normal 60 mode. That way the media duration is accurate*/ 61extern M4OSA_ERR M4MCS_open_normalMode(M4MCS_Context pContext, M4OSA_Void* pFileIn, 62 M4VIDEOEDITING_FileType InputFileType, 63 M4OSA_Void* pFileOut, M4OSA_Void* pTempFile); 64 65 66/** 67 ****************************************************************************** 68 * prototype M4OSA_ERR M4xVSS_internalStartTranscoding(M4OSA_Context pContext) 69 * @brief This function initializes MCS (3GP transcoder) with the given 70 * parameters 71 * @note The transcoding parameters are given by the internal xVSS context. 72 * This context contains a pointer on the current element of the 73 * chained list of MCS parameters. 74 * 75 * @param pContext (IN) Pointer on the xVSS edit context 76 * @return M4NO_ERROR: No error 77 * @return M4ERR_PARAMETER: At least one parameter is M4OSA_NULL 78 * @return M4ERR_ALLOC: Memory allocation has failed 79 ****************************************************************************** 80 */ 81M4OSA_ERR M4xVSS_internalStartTranscoding(M4OSA_Context pContext) 82{ 83 M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; 84 M4OSA_ERR err; 85 M4MCS_Context mcs_context; 86 M4MCS_OutputParams Params; 87 M4MCS_EncodingParams Rates; 88 M4OSA_UInt32 i; 89 90 err = M4MCS_init(&mcs_context, xVSS_context->pFileReadPtr, xVSS_context->pFileWritePtr); 91 if(err != M4NO_ERROR) 92 { 93 M4OSA_TRACE1_1("Error in M4MCS_init: 0x%x", err); 94 return err; 95 } 96 97#ifdef M4VSS_ENABLE_EXTERNAL_DECODERS 98 /* replay recorded external decoder registrations on the MCS */ 99 for (i=0; i<M4VD_kVideoType_NB; i++) 100 { 101 if (xVSS_context->registeredExternalDecs[i].registered) 102 { 103 err = M4MCS_registerExternalVideoDecoder(mcs_context, i, 104 xVSS_context->registeredExternalDecs[i].pDecoderInterface, 105 xVSS_context->registeredExternalDecs[i].pUserData); 106 if (M4NO_ERROR != err) 107 { 108 M4OSA_TRACE1_1("M4xVSS_internalStartTranscoding:\ 109 M4MCS_registerExternalVideoDecoder() returns 0x%x!", err); 110 M4MCS_abort(mcs_context); 111 return err; 112 } 113 } 114 } 115#endif /* M4VSS_ENABLE_EXTERNAL_DECODERS */ 116 117 /* replay recorded external encoder registrations on the MCS */ 118 for (i=0; i<M4VE_kEncoderType_NB; i++) 119 { 120 if (xVSS_context->registeredExternalEncs[i].registered) 121 { 122 err = M4MCS_registerExternalVideoEncoder(mcs_context, i, 123 xVSS_context->registeredExternalEncs[i].pEncoderInterface, 124 xVSS_context->registeredExternalEncs[i].pUserData); 125 if (M4NO_ERROR != err) 126 { 127 M4OSA_TRACE1_1("M4xVSS_internalStartTranscoding:\ 128 M4MCS_registerExternalVideoEncoder() returns 0x%x!", err); 129 M4MCS_abort(mcs_context); 130 return err; 131 } 132 } 133 } 134 135 err = M4MCS_open(mcs_context, xVSS_context->pMCScurrentParams->pFileIn, 136 xVSS_context->pMCScurrentParams->InputFileType, 137 xVSS_context->pMCScurrentParams->pFileOut, 138 xVSS_context->pMCScurrentParams->pFileTemp); 139 if (err != M4NO_ERROR) 140 { 141 M4OSA_TRACE1_1("Error in M4MCS_open: 0x%x", err); 142 M4MCS_abort(mcs_context); 143 return err; 144 } 145 146 /** 147 * Fill MCS parameters with the parameters contained in the current element of the 148 MCS parameters chained list */ 149 Params.OutputFileType = xVSS_context->pMCScurrentParams->OutputFileType; 150 Params.OutputVideoFormat = xVSS_context->pMCScurrentParams->OutputVideoFormat; 151 Params.OutputVideoFrameSize = xVSS_context->pMCScurrentParams->OutputVideoFrameSize; 152 Params.OutputVideoFrameRate = xVSS_context->pMCScurrentParams->OutputVideoFrameRate; 153 Params.OutputAudioFormat = xVSS_context->pMCScurrentParams->OutputAudioFormat; 154 Params.OutputAudioSamplingFrequency = 155 xVSS_context->pMCScurrentParams->OutputAudioSamplingFrequency; 156 Params.bAudioMono = xVSS_context->pMCScurrentParams->bAudioMono; 157 Params.pOutputPCMfile = M4OSA_NULL; 158 /*FB 2008/10/20: add media rendering parameter to keep aspect ratio*/ 159 switch(xVSS_context->pMCScurrentParams->MediaRendering) 160 { 161 case M4xVSS_kResizing: 162 Params.MediaRendering = M4MCS_kResizing; 163 break; 164 case M4xVSS_kCropping: 165 Params.MediaRendering = M4MCS_kCropping; 166 break; 167 case M4xVSS_kBlackBorders: 168 Params.MediaRendering = M4MCS_kBlackBorders; 169 break; 170 default: 171 break; 172 } 173 /**/ 174#ifdef TIMESCALE_BUG 175 Params.OutputVideoTimescale = xVSS_context->pMCScurrentParams->OutputVideoTimescale; 176#endif 177 // new params after integrating MCS 2.0 178 // Set the number of audio effects; 0 for now. 179 Params.nbEffects = 0; 180 181 // Set the audio effect; null for now. 182 Params.pEffects = NULL; 183 184 // Set the audio effect; null for now. 185 Params.bDiscardExif = M4OSA_FALSE; 186 187 // Set the audio effect; null for now. 188 Params.bAdjustOrientation = M4OSA_FALSE; 189 // new params after integrating MCS 2.0 190 191 /** 192 * Set output parameters */ 193 err = M4MCS_setOutputParams(mcs_context, &Params); 194 if (err != M4NO_ERROR) 195 { 196 M4OSA_TRACE1_1("Error in M4MCS_setOutputParams: 0x%x", err); 197 M4MCS_abort(mcs_context); 198 return err; 199 } 200 201 Rates.OutputVideoBitrate = xVSS_context->pMCScurrentParams->OutputVideoBitrate; 202 Rates.OutputAudioBitrate = xVSS_context->pMCScurrentParams->OutputAudioBitrate; 203 Rates.BeginCutTime = 0; 204 Rates.EndCutTime = 0; 205 Rates.OutputFileSize = 0; 206 207 /*FB: transcoding per parts*/ 208 Rates.BeginCutTime = xVSS_context->pMCScurrentParams->BeginCutTime; 209 Rates.EndCutTime = xVSS_context->pMCScurrentParams->EndCutTime; 210 Rates.OutputVideoTimescale = xVSS_context->pMCScurrentParams->OutputVideoTimescale; 211 212 err = M4MCS_setEncodingParams(mcs_context, &Rates); 213 if (err != M4NO_ERROR) 214 { 215 M4OSA_TRACE1_1("Error in M4MCS_setEncodingParams: 0x%x", err); 216 M4MCS_abort(mcs_context); 217 return err; 218 } 219 220 err = M4MCS_checkParamsAndStart(mcs_context); 221 if (err != M4NO_ERROR) 222 { 223 M4OSA_TRACE1_1("Error in M4MCS_checkParamsAndStart: 0x%x", err); 224 M4MCS_abort(mcs_context); 225 return err; 226 } 227 228 /** 229 * Save MCS context to be able to call MCS step function in M4xVSS_step function */ 230 xVSS_context->pMCS_Ctxt = mcs_context; 231 232 return M4NO_ERROR; 233} 234 235/** 236 ****************************************************************************** 237 * prototype M4OSA_ERR M4xVSS_internalStopTranscoding(M4OSA_Context pContext) 238 * @brief This function cleans up MCS (3GP transcoder) 239 * @note 240 * 241 * @param pContext (IN) Pointer on the xVSS edit context 242 * @return M4NO_ERROR: No error 243 * @return M4ERR_PARAMETER: At least one parameter is M4OSA_NULL 244 * @return M4ERR_ALLOC: Memory allocation has failed 245 ****************************************************************************** 246 */ 247M4OSA_ERR M4xVSS_internalStopTranscoding(M4OSA_Context pContext) 248{ 249 M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; 250 M4OSA_ERR err; 251 252 err = M4MCS_close(xVSS_context->pMCS_Ctxt); 253 if (err != M4NO_ERROR) 254 { 255 M4OSA_TRACE1_1("M4xVSS_internalStopTranscoding: Error in M4MCS_close: 0x%x", err); 256 M4MCS_abort(xVSS_context->pMCS_Ctxt); 257 return err; 258 } 259 260 /** 261 * Free this MCS instance */ 262 err = M4MCS_cleanUp(xVSS_context->pMCS_Ctxt); 263 if (err != M4NO_ERROR) 264 { 265 M4OSA_TRACE1_1("M4xVSS_internalStopTranscoding: Error in M4MCS_cleanUp: 0x%x", err); 266 return err; 267 } 268 269 xVSS_context->pMCS_Ctxt = M4OSA_NULL; 270 271 return M4NO_ERROR; 272} 273 274/** 275 ****************************************************************************** 276 * M4OSA_ERR M4xVSS_internalConvertAndResizeARGB8888toYUV420(M4OSA_Void* pFileIn, 277 * M4OSA_FileReadPointer* pFileReadPtr, 278 * M4VIFI_ImagePlane* pImagePlanes, 279 * M4OSA_UInt32 width, 280 * M4OSA_UInt32 height); 281 * @brief It Coverts and resizes a ARGB8888 image to YUV420 282 * @note 283 * @param pFileIn (IN) The Image input file 284 * @param pFileReadPtr (IN) Pointer on filesystem functions 285 * @param pImagePlanes (IN/OUT) Pointer on YUV420 output planes allocated by the user 286 * ARGB8888 image will be converted and resized to output 287 * YUV420 plane size 288 *@param width (IN) width of the ARGB8888 289 *@param height (IN) height of the ARGB8888 290 * @return M4NO_ERROR: No error 291 * @return M4ERR_ALLOC: memory error 292 * @return M4ERR_PARAMETER: At least one of the function parameters is null 293 ****************************************************************************** 294 */ 295 296M4OSA_ERR M4xVSS_internalConvertAndResizeARGB8888toYUV420(M4OSA_Void* pFileIn, 297 M4OSA_FileReadPointer* pFileReadPtr, 298 M4VIFI_ImagePlane* pImagePlanes, 299 M4OSA_UInt32 width,M4OSA_UInt32 height) 300{ 301 M4OSA_Context pARGBIn; 302 M4VIFI_ImagePlane rgbPlane1 ,rgbPlane2; 303 M4OSA_UInt32 frameSize_argb=(width * height * 4); 304 M4OSA_UInt32 frameSize = (width * height * 3); //Size of RGB888 data. 305 M4OSA_UInt32 i = 0,j= 0; 306 M4OSA_ERR err=M4NO_ERROR; 307 308 309 M4OSA_UInt8 *pTmpData = (M4OSA_UInt8*) M4OSA_malloc(frameSize_argb, 310 M4VS, (M4OSA_Char*)"Image argb data"); 311 M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 Entering :"); 312 if(pTmpData == M4OSA_NULL) { 313 M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 :\ 314 Failed to allocate memory for Image clip"); 315 return M4ERR_ALLOC; 316 } 317 318 M4OSA_TRACE1_2("M4xVSS_internalConvertAndResizeARGB8888toYUV420 :width and height %d %d", 319 width ,height); 320 /* Get file size (mandatory for chunk decoding) */ 321 err = pFileReadPtr->openRead(&pARGBIn, pFileIn, M4OSA_kFileRead); 322 if(err != M4NO_ERROR) 323 { 324 M4OSA_TRACE1_2("M4xVSS_internalConvertAndResizeARGB8888toYUV420 :\ 325 Can't open input ARGB8888 file %s, error: 0x%x\n",pFileIn, err); 326 M4OSA_free((M4OSA_MemAddr32)pTmpData); 327 pTmpData = M4OSA_NULL; 328 goto cleanup; 329 } 330 331 err = pFileReadPtr->readData(pARGBIn,(M4OSA_MemAddr8)pTmpData, &frameSize_argb); 332 if(err != M4NO_ERROR) 333 { 334 M4OSA_TRACE1_2("M4xVSS_internalConvertAndResizeARGB8888toYUV420 Can't close ARGB8888\ 335 file %s, error: 0x%x\n",pFileIn, err); 336 pFileReadPtr->closeRead(pARGBIn); 337 M4OSA_free((M4OSA_MemAddr32)pTmpData); 338 pTmpData = M4OSA_NULL; 339 goto cleanup; 340 } 341 342 err = pFileReadPtr->closeRead(pARGBIn); 343 if(err != M4NO_ERROR) 344 { 345 M4OSA_TRACE1_2("M4xVSS_internalConvertAndResizeARGB8888toYUV420 Can't close ARGB8888 \ 346 file %s, error: 0x%x\n",pFileIn, err); 347 M4OSA_free((M4OSA_MemAddr32)pTmpData); 348 pTmpData = M4OSA_NULL; 349 goto cleanup; 350 } 351 352 rgbPlane1.pac_data = (M4VIFI_UInt8*)M4OSA_malloc(frameSize, M4VS, 353 (M4OSA_Char*)"Image clip RGB888 data"); 354 if(rgbPlane1.pac_data == M4OSA_NULL) 355 { 356 M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 \ 357 Failed to allocate memory for Image clip"); 358 M4OSA_free((M4OSA_MemAddr32)pTmpData); 359 return M4ERR_ALLOC; 360 } 361 362 rgbPlane1.u_height = height; 363 rgbPlane1.u_width = width; 364 rgbPlane1.u_stride = width*3; 365 rgbPlane1.u_topleft = 0; 366 367 368 /** Remove the alpha channel */ 369 for (i=0, j = 0; i < frameSize_argb; i++) { 370 if ((i % 4) == 0) continue; 371 rgbPlane1.pac_data[j] = pTmpData[i]; 372 j++; 373 } 374 M4OSA_free((M4OSA_MemAddr32)pTmpData); 375 376 /* To Check if resizing is required with color conversion */ 377 if(width != pImagePlanes->u_width || height != pImagePlanes->u_height) 378 { 379 M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 Resizing :"); 380 frameSize = ( pImagePlanes->u_width * pImagePlanes->u_height * 3); 381 rgbPlane2.pac_data = (M4VIFI_UInt8*)M4OSA_malloc(frameSize, M4VS, 382 (M4OSA_Char*)"Image clip RGB888 data"); 383 if(rgbPlane2.pac_data == M4OSA_NULL) 384 { 385 M4OSA_TRACE1_0("Failed to allocate memory for Image clip"); 386 M4OSA_free((M4OSA_MemAddr32)pTmpData); 387 return M4ERR_ALLOC; 388 } 389 rgbPlane2.u_height = pImagePlanes->u_height; 390 rgbPlane2.u_width = pImagePlanes->u_width; 391 rgbPlane2.u_stride = pImagePlanes->u_width*3; 392 rgbPlane2.u_topleft = 0; 393 394 /* Resizing RGB888 to RGB888 */ 395 err = M4VIFI_ResizeBilinearRGB888toRGB888(M4OSA_NULL, &rgbPlane1, &rgbPlane2); 396 if(err != M4NO_ERROR) 397 { 398 M4OSA_TRACE1_1("error when converting from Resize RGB888 to RGB888: 0x%x\n", err); 399 M4OSA_free((M4OSA_MemAddr32)rgbPlane2.pac_data); 400 M4OSA_free((M4OSA_MemAddr32)rgbPlane1.pac_data); 401 return err; 402 } 403 /*Converting Resized RGB888 to YUV420 */ 404 err = M4VIFI_RGB888toYUV420(M4OSA_NULL, &rgbPlane2, pImagePlanes); 405 if(err != M4NO_ERROR) 406 { 407 M4OSA_TRACE1_1("error when converting from RGB888 to YUV: 0x%x\n", err); 408 M4OSA_free((M4OSA_MemAddr32)rgbPlane2.pac_data); 409 M4OSA_free((M4OSA_MemAddr32)rgbPlane1.pac_data); 410 return err; 411 } 412 M4OSA_free((M4OSA_MemAddr32)rgbPlane2.pac_data); 413 M4OSA_free((M4OSA_MemAddr32)rgbPlane1.pac_data); 414 415 M4OSA_TRACE1_0("RGB to YUV done"); 416 417 418 } 419 else 420 { 421 M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 NO Resizing :"); 422 err = M4VIFI_RGB888toYUV420(M4OSA_NULL, &rgbPlane1, pImagePlanes); 423 if(err != M4NO_ERROR) 424 { 425 M4OSA_TRACE1_1("error when converting from RGB to YUV: 0x%x\n", err); 426 } 427 M4OSA_free((M4OSA_MemAddr32)rgbPlane1.pac_data); 428 429 M4OSA_TRACE1_0("RGB to YUV done"); 430 } 431cleanup: 432 M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 leaving :"); 433 return err; 434} 435 436/** 437 ****************************************************************************** 438 * M4OSA_ERR M4xVSS_internalConvertARGB8888toYUV420(M4OSA_Void* pFileIn, 439 * M4OSA_FileReadPointer* pFileReadPtr, 440 * M4VIFI_ImagePlane* pImagePlanes, 441 * M4OSA_UInt32 width, 442 * M4OSA_UInt32 height); 443 * @brief It Coverts a ARGB8888 image to YUV420 444 * @note 445 * @param pFileIn (IN) The Image input file 446 * @param pFileReadPtr (IN) Pointer on filesystem functions 447 * @param pImagePlanes (IN/OUT) Pointer on YUV420 output planes allocated by the user 448 * ARGB8888 image will be converted and resized to output 449 * YUV420 plane size 450 * @param width (IN) width of the ARGB8888 451 * @param height (IN) height of the ARGB8888 452 * @return M4NO_ERROR: No error 453 * @return M4ERR_ALLOC: memory error 454 * @return M4ERR_PARAMETER: At least one of the function parameters is null 455 ****************************************************************************** 456 */ 457 458M4OSA_ERR M4xVSS_internalConvertARGB8888toYUV420(M4OSA_Void* pFileIn, 459 M4OSA_FileReadPointer* pFileReadPtr, 460 M4VIFI_ImagePlane** pImagePlanes, 461 M4OSA_UInt32 width,M4OSA_UInt32 height) 462{ 463 M4OSA_ERR err = M4NO_ERROR; 464 M4VIFI_ImagePlane *yuvPlane = M4OSA_NULL; 465 466 yuvPlane = (M4VIFI_ImagePlane*)M4OSA_malloc(3*sizeof(M4VIFI_ImagePlane), 467 M4VS, (M4OSA_Char*)"M4xVSS_internalConvertRGBtoYUV: Output plane YUV"); 468 if(yuvPlane == M4OSA_NULL) { 469 M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 :\ 470 Failed to allocate memory for Image clip"); 471 return M4ERR_ALLOC; 472 } 473 yuvPlane[0].u_height = height; 474 yuvPlane[0].u_width = width; 475 yuvPlane[0].u_stride = width; 476 yuvPlane[0].u_topleft = 0; 477 yuvPlane[0].pac_data = (M4VIFI_UInt8*)M4OSA_malloc(yuvPlane[0].u_height \ 478 * yuvPlane[0].u_width * 1.5, M4VS, (M4OSA_Char*)"imageClip YUV data"); 479 480 yuvPlane[1].u_height = yuvPlane[0].u_height >>1; 481 yuvPlane[1].u_width = yuvPlane[0].u_width >> 1; 482 yuvPlane[1].u_stride = yuvPlane[1].u_width; 483 yuvPlane[1].u_topleft = 0; 484 yuvPlane[1].pac_data = (M4VIFI_UInt8*)(yuvPlane[0].pac_data + yuvPlane[0].u_height \ 485 * yuvPlane[0].u_width); 486 487 yuvPlane[2].u_height = yuvPlane[0].u_height >>1; 488 yuvPlane[2].u_width = yuvPlane[0].u_width >> 1; 489 yuvPlane[2].u_stride = yuvPlane[2].u_width; 490 yuvPlane[2].u_topleft = 0; 491 yuvPlane[2].pac_data = (M4VIFI_UInt8*)(yuvPlane[1].pac_data + yuvPlane[1].u_height \ 492 * yuvPlane[1].u_width); 493 err = M4xVSS_internalConvertAndResizeARGB8888toYUV420( pFileIn,pFileReadPtr, 494 yuvPlane, width, height); 495 if(err != M4NO_ERROR) 496 { 497 M4OSA_TRACE1_1("M4xVSS_internalConvertAndResizeARGB8888toYUV420 return error: 0x%x\n", err); 498 M4OSA_free((M4OSA_MemAddr32)yuvPlane); 499 return err; 500 } 501 502 *pImagePlanes = yuvPlane; 503 504 M4OSA_TRACE1_0("M4xVSS_internalConvertARGB8888toYUV420 :Leaving"); 505 return err; 506 507} 508 509/** 510 ****************************************************************************** 511 * M4OSA_ERR M4xVSS_PictureCallbackFct (M4OSA_Void* pPictureCtxt, 512 * M4VIFI_ImagePlane* pImagePlanes, 513 * M4OSA_UInt32* pPictureDuration); 514 * @brief It feeds the PTO3GPP with YUV420 pictures. 515 * @note This function is given to the PTO3GPP in the M4PTO3GPP_Params structure 516 * @param pContext (IN) The integrator own context 517 * @param pImagePlanes(IN/OUT) Pointer to an array of three valid image planes 518 * @param pPictureDuration(OUT) Duration of the returned picture 519 * 520 * @return M4NO_ERROR: No error 521 * @return M4PTO3GPP_WAR_LAST_PICTURE: The returned image is the last one 522 * @return M4ERR_PARAMETER: At least one of the function parameters is null 523 ****************************************************************************** 524 */ 525M4OSA_ERR M4xVSS_PictureCallbackFct(M4OSA_Void* pPictureCtxt, M4VIFI_ImagePlane* pImagePlanes, 526 M4OSA_Double* pPictureDuration) 527{ 528 M4OSA_ERR err = M4NO_ERROR; 529 M4OSA_UInt8 last_frame_flag = 0; 530 M4xVSS_PictureCallbackCtxt* pC = (M4xVSS_PictureCallbackCtxt*) (pPictureCtxt); 531 532 /*Used for pan&zoom*/ 533 M4OSA_UInt8 tempPanzoomXa = 0; 534 M4OSA_UInt8 tempPanzoomXb = 0; 535 M4AIR_Params Params; 536 /**/ 537 538 /*Used for cropping and black borders*/ 539 M4OSA_Context pPictureContext = M4OSA_NULL; 540 M4OSA_FilePosition pictureSize = 0 ; 541 M4OSA_UInt8* pictureBuffer = M4OSA_NULL; 542 //M4EXIFC_Context pExifContext = M4OSA_NULL; 543 M4EXIFC_BasicTags pBasicTags; 544 M4VIFI_ImagePlane pImagePlanes1 = pImagePlanes[0]; 545 M4VIFI_ImagePlane pImagePlanes2 = pImagePlanes[1]; 546 M4VIFI_ImagePlane pImagePlanes3 = pImagePlanes[2]; 547 /**/ 548 549 /** 550 * Check input parameters */ 551 M4OSA_DEBUG_IF2((M4OSA_NULL==pPictureCtxt), M4ERR_PARAMETER, 552 "M4xVSS_PictureCallbackFct: pPictureCtxt is M4OSA_NULL"); 553 M4OSA_DEBUG_IF2((M4OSA_NULL==pImagePlanes), M4ERR_PARAMETER, 554 "M4xVSS_PictureCallbackFct: pImagePlanes is M4OSA_NULL"); 555 M4OSA_DEBUG_IF2((M4OSA_NULL==pPictureDuration), M4ERR_PARAMETER, 556 "M4xVSS_PictureCallbackFct: pPictureDuration is M4OSA_NULL"); 557 M4OSA_TRACE1_0("M4xVSS_PictureCallbackFct :Entering"); 558 /*PR P4ME00003181 In case the image number is 0, pan&zoom can not be used*/ 559 if(M4OSA_TRUE == pC->m_pPto3GPPparams->isPanZoom && pC->m_NbImage == 0) 560 { 561 pC->m_pPto3GPPparams->isPanZoom = M4OSA_FALSE; 562 } 563 564 /*If no cropping/black borders or pan&zoom, just decode and resize the picture*/ 565 if(pC->m_mediaRendering == M4xVSS_kResizing && M4OSA_FALSE == pC->m_pPto3GPPparams->isPanZoom) 566 { 567 /** 568 * Convert and resize input ARGB8888 file to YUV420 */ 569 /*To support ARGB8888 : */ 570 M4OSA_TRACE1_2("M4xVSS_PictureCallbackFct 1: width and heght %d %d", 571 pC->m_pPto3GPPparams->width,pC->m_pPto3GPPparams->height); 572 err = M4xVSS_internalConvertAndResizeARGB8888toYUV420(pC->m_FileIn, 573 pC->m_pFileReadPtr, pImagePlanes,pC->m_pPto3GPPparams->width, 574 pC->m_pPto3GPPparams->height); 575 if(err != M4NO_ERROR) 576 { 577 M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct: Error when decoding JPEG: 0x%x\n", err); 578 return err; 579 } 580 } 581 /*In case of cropping, black borders or pan&zoom, call the EXIF reader and the AIR*/ 582 else 583 { 584 /** 585 * Computes ratios */ 586 if(pC->m_pDecodedPlane == M4OSA_NULL) 587 { 588 /** 589 * Convert input ARGB8888 file to YUV420 */ 590 M4OSA_TRACE1_2("M4xVSS_PictureCallbackFct 2: width and heght %d %d", 591 pC->m_pPto3GPPparams->width,pC->m_pPto3GPPparams->height); 592 err = M4xVSS_internalConvertARGB8888toYUV420(pC->m_FileIn, pC->m_pFileReadPtr, 593 &(pC->m_pDecodedPlane),pC->m_pPto3GPPparams->width,pC->m_pPto3GPPparams->height); 594 if(err != M4NO_ERROR) 595 { 596 M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct: Error when decoding JPEG: 0x%x\n", err); 597 if(pC->m_pDecodedPlane != M4OSA_NULL) 598 { 599 /* YUV420 planar is returned but allocation is made only once 600 (contigous planes in memory) */ 601 if(pC->m_pDecodedPlane->pac_data != M4OSA_NULL) 602 { 603 M4OSA_free((M4OSA_MemAddr32)pC->m_pDecodedPlane->pac_data); 604 } 605 M4OSA_free((M4OSA_MemAddr32)pC->m_pDecodedPlane); 606 pC->m_pDecodedPlane = M4OSA_NULL; 607 } 608 return err; 609 } 610 } 611 612 /*Initialize AIR Params*/ 613 Params.m_inputCoord.m_x = 0; 614 Params.m_inputCoord.m_y = 0; 615 Params.m_inputSize.m_height = pC->m_pDecodedPlane->u_height; 616 Params.m_inputSize.m_width = pC->m_pDecodedPlane->u_width; 617 Params.m_outputSize.m_width = pImagePlanes->u_width; 618 Params.m_outputSize.m_height = pImagePlanes->u_height; 619 Params.m_bOutputStripe = M4OSA_FALSE; 620 Params.m_outputOrientation = M4COMMON_kOrientationTopLeft; 621 622 /*Initialize Exif params structure*/ 623 pBasicTags.orientation = M4COMMON_kOrientationUnknown; 624 625 /** 626 Pan&zoom params*/ 627 if(M4OSA_TRUE == pC->m_pPto3GPPparams->isPanZoom) 628 { 629 /*Save ratio values, they can be reused if the new ratios are 0*/ 630 tempPanzoomXa = (M4OSA_UInt8)pC->m_pPto3GPPparams->PanZoomXa; 631 tempPanzoomXb = (M4OSA_UInt8)pC->m_pPto3GPPparams->PanZoomXb; 632#if 0 633 /** 634 * Check size of output JPEG is compatible with pan & zoom parameters 635 First, check final (b) parameters */ 636 if(pC->m_pPto3GPPparams->PanZoomXb + pC->m_pPto3GPPparams->PanZoomTopleftXb > 100 ) 637 { 638 M4OSA_TRACE1_1("WARNING : Bad final Pan & Zoom settings !!!\ 639 New final Zoom ratio is: %d", (100 - pC->m_pPto3GPPparams->PanZoomTopleftXb)); 640 /* We do not change the topleft parameter as it may correspond to a precise area 641 of the picture -> only the zoom ratio is modified */ 642 pC->m_pPto3GPPparams->PanZoomXb = 100 - pC->m_pPto3GPPparams->PanZoomTopleftXb; 643 } 644 645 if(pC->m_pPto3GPPparams->PanZoomXb + pC->m_pPto3GPPparams->PanZoomTopleftYb > 100 ) 646 { 647 M4OSA_TRACE1_1("WARNING : Bad final Pan & Zoom settings \ 648 !!! New final Zoom ratio is: %d", 649 (100 - pC->m_pPto3GPPparams->PanZoomTopleftYb)); 650 /* We do not change the topleft parameter as it may correspond to a 651 precise area of the picture -> only the zoom ratio is modified */ 652 pC->m_pPto3GPPparams->PanZoomXb = 100 - pC->m_pPto3GPPparams->PanZoomTopleftYb; 653 } 654 655 /** 656 * Then, check initial (a) parameters */ 657 if(pC->m_pPto3GPPparams->PanZoomXa + pC->m_pPto3GPPparams->PanZoomTopleftXa > 100 ) 658 { 659 M4OSA_TRACE1_1("WARNING : Bad initial Pan & Zoom settings !!! \ 660 New initial Zoom ratio is: %d",(100 - pC->m_pPto3GPPparams->PanZoomTopleftXa)); 661 /* We do not change the topleft parameter as it may correspond to a precise 662 area of the picture-> only the zoom ratio is modified */ 663 pC->m_pPto3GPPparams->PanZoomXa = 100 - pC->m_pPto3GPPparams->PanZoomTopleftXa; 664 } 665 666 if(pC->m_pPto3GPPparams->PanZoomXa + pC->m_pPto3GPPparams->PanZoomTopleftYa > 100 ) 667 { 668 M4OSA_TRACE1_1("WARNING : Bad initial Pan & Zoom settings !!! New initial\ 669 Zoom ratio is: %d", (100 - pC->m_pPto3GPPparams->PanZoomTopleftYa)); 670 /* We do not change the topleft parameter as it may correspond to a precise 671 area of the picture-> only the zoom ratio is modified */ 672 pC->m_pPto3GPPparams->PanZoomXa = 100 - pC->m_pPto3GPPparams->PanZoomTopleftYa; 673 } 674#endif 675 /*Check that the ratio is not 0*/ 676 /*Check (a) parameters*/ 677 if(pC->m_pPto3GPPparams->PanZoomXa == 0) 678 { 679 M4OSA_UInt8 maxRatio = 0; 680 if(pC->m_pPto3GPPparams->PanZoomTopleftXa >= 681 pC->m_pPto3GPPparams->PanZoomTopleftYa) 682 { 683 /*The ratio is 0, that means the area of the picture defined with (a) 684 parameters is bigger than the image size*/ 685 if(pC->m_pPto3GPPparams->PanZoomTopleftXa + tempPanzoomXa > 1000) 686 { 687 /*The oversize is maxRatio*/ 688 maxRatio = pC->m_pPto3GPPparams->PanZoomTopleftXa + tempPanzoomXa - 1000; 689 } 690 } 691 else 692 { 693 /*The ratio is 0, that means the area of the picture defined with (a) 694 parameters is bigger than the image size*/ 695 if(pC->m_pPto3GPPparams->PanZoomTopleftYa + tempPanzoomXa > 1000) 696 { 697 /*The oversize is maxRatio*/ 698 maxRatio = pC->m_pPto3GPPparams->PanZoomTopleftYa + tempPanzoomXa - 1000; 699 } 700 } 701 /*Modify the (a) parameters:*/ 702 if(pC->m_pPto3GPPparams->PanZoomTopleftXa >= maxRatio) 703 { 704 /*The (a) topleft parameters can be moved to keep the same area size*/ 705 pC->m_pPto3GPPparams->PanZoomTopleftXa -= maxRatio; 706 } 707 else 708 { 709 /*Move the (a) topleft parameter to 0 but the ratio will be also further 710 modified to match the image size*/ 711 pC->m_pPto3GPPparams->PanZoomTopleftXa = 0; 712 } 713 if(pC->m_pPto3GPPparams->PanZoomTopleftYa >= maxRatio) 714 { 715 /*The (a) topleft parameters can be moved to keep the same area size*/ 716 pC->m_pPto3GPPparams->PanZoomTopleftYa -= maxRatio; 717 } 718 else 719 { 720 /*Move the (a) topleft parameter to 0 but the ratio will be also further 721 modified to match the image size*/ 722 pC->m_pPto3GPPparams->PanZoomTopleftYa = 0; 723 } 724 /*The new ratio is the original one*/ 725 pC->m_pPto3GPPparams->PanZoomXa = tempPanzoomXa; 726 if(pC->m_pPto3GPPparams->PanZoomXa + pC->m_pPto3GPPparams->PanZoomTopleftXa > 1000) 727 { 728 /*Change the ratio if the area of the picture defined with (a) parameters is 729 bigger than the image size*/ 730 pC->m_pPto3GPPparams->PanZoomXa = 1000 - pC->m_pPto3GPPparams->PanZoomTopleftXa; 731 } 732 if(pC->m_pPto3GPPparams->PanZoomXa + pC->m_pPto3GPPparams->PanZoomTopleftYa > 1000) 733 { 734 /*Change the ratio if the area of the picture defined with (a) parameters is 735 bigger than the image size*/ 736 pC->m_pPto3GPPparams->PanZoomXa = 1000 - pC->m_pPto3GPPparams->PanZoomTopleftYa; 737 } 738 } 739 /*Check (b) parameters*/ 740 if(pC->m_pPto3GPPparams->PanZoomXb == 0) 741 { 742 M4OSA_UInt8 maxRatio = 0; 743 if(pC->m_pPto3GPPparams->PanZoomTopleftXb >= 744 pC->m_pPto3GPPparams->PanZoomTopleftYb) 745 { 746 /*The ratio is 0, that means the area of the picture defined with (b) 747 parameters is bigger than the image size*/ 748 if(pC->m_pPto3GPPparams->PanZoomTopleftXb + tempPanzoomXb > 1000) 749 { 750 /*The oversize is maxRatio*/ 751 maxRatio = pC->m_pPto3GPPparams->PanZoomTopleftXb + tempPanzoomXb - 1000; 752 } 753 } 754 else 755 { 756 /*The ratio is 0, that means the area of the picture defined with (b) 757 parameters is bigger than the image size*/ 758 if(pC->m_pPto3GPPparams->PanZoomTopleftYb + tempPanzoomXb > 1000) 759 { 760 /*The oversize is maxRatio*/ 761 maxRatio = pC->m_pPto3GPPparams->PanZoomTopleftYb + tempPanzoomXb - 1000; 762 } 763 } 764 /*Modify the (b) parameters:*/ 765 if(pC->m_pPto3GPPparams->PanZoomTopleftXb >= maxRatio) 766 { 767 /*The (b) topleft parameters can be moved to keep the same area size*/ 768 pC->m_pPto3GPPparams->PanZoomTopleftXb -= maxRatio; 769 } 770 else 771 { 772 /*Move the (b) topleft parameter to 0 but the ratio will be also further 773 modified to match the image size*/ 774 pC->m_pPto3GPPparams->PanZoomTopleftXb = 0; 775 } 776 if(pC->m_pPto3GPPparams->PanZoomTopleftYb >= maxRatio) 777 { 778 /*The (b) topleft parameters can be moved to keep the same area size*/ 779 pC->m_pPto3GPPparams->PanZoomTopleftYb -= maxRatio; 780 } 781 else 782 { 783 /*Move the (b) topleft parameter to 0 but the ratio will be also further 784 modified to match the image size*/ 785 pC->m_pPto3GPPparams->PanZoomTopleftYb = 0; 786 } 787 /*The new ratio is the original one*/ 788 pC->m_pPto3GPPparams->PanZoomXb = tempPanzoomXb; 789 if(pC->m_pPto3GPPparams->PanZoomXb + pC->m_pPto3GPPparams->PanZoomTopleftXb > 1000) 790 { 791 /*Change the ratio if the area of the picture defined with (b) parameters is 792 bigger than the image size*/ 793 pC->m_pPto3GPPparams->PanZoomXb = 1000 - pC->m_pPto3GPPparams->PanZoomTopleftXb; 794 } 795 if(pC->m_pPto3GPPparams->PanZoomXb + pC->m_pPto3GPPparams->PanZoomTopleftYb > 1000) 796 { 797 /*Change the ratio if the area of the picture defined with (b) parameters is 798 bigger than the image size*/ 799 pC->m_pPto3GPPparams->PanZoomXb = 1000 - pC->m_pPto3GPPparams->PanZoomTopleftYb; 800 } 801 } 802 803 /** 804 * Computes AIR parameters */ 805/* Params.m_inputCoord.m_x = (M4OSA_UInt32)(pC->m_pDecodedPlane->u_width * 806 (pC->m_pPto3GPPparams->PanZoomTopleftXa + 807 (M4OSA_Int16)((pC->m_pPto3GPPparams->PanZoomTopleftXb \ 808 - pC->m_pPto3GPPparams->PanZoomTopleftXa) * 809 pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage)) / 100; 810 Params.m_inputCoord.m_y = (M4OSA_UInt32)(pC->m_pDecodedPlane->u_height * 811 (pC->m_pPto3GPPparams->PanZoomTopleftYa + 812 (M4OSA_Int16)((pC->m_pPto3GPPparams->PanZoomTopleftYb\ 813 - pC->m_pPto3GPPparams->PanZoomTopleftYa) * 814 pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage)) / 100; 815 816 Params.m_inputSize.m_width = (M4OSA_UInt32)(pC->m_pDecodedPlane->u_width * 817 (pC->m_pPto3GPPparams->PanZoomXa + 818 (M4OSA_Int16)((pC->m_pPto3GPPparams->PanZoomXb - pC->m_pPto3GPPparams->PanZoomXa) * 819 pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage)) / 100; 820 821 Params.m_inputSize.m_height = (M4OSA_UInt32)(pC->m_pDecodedPlane->u_height * 822 (pC->m_pPto3GPPparams->PanZoomXa + 823 (M4OSA_Int16)((pC->m_pPto3GPPparams->PanZoomXb - pC->m_pPto3GPPparams->PanZoomXa) * 824 pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage)) / 100; 825 */ 826 // Instead of using pC->m_NbImage we have to use (pC->m_NbImage-1) as pC->m_ImageCounter 827 // will be x-1 max for x no. of frames 828 Params.m_inputCoord.m_x = (M4OSA_UInt32)((((M4OSA_Double)pC->m_pDecodedPlane->u_width * 829 (pC->m_pPto3GPPparams->PanZoomTopleftXa + 830 (M4OSA_Double)((M4OSA_Double)(pC->m_pPto3GPPparams->PanZoomTopleftXb\ 831 - pC->m_pPto3GPPparams->PanZoomTopleftXa) * 832 pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage-1)) / 1000)); 833 Params.m_inputCoord.m_y = 834 (M4OSA_UInt32)((((M4OSA_Double)pC->m_pDecodedPlane->u_height * 835 (pC->m_pPto3GPPparams->PanZoomTopleftYa + 836 (M4OSA_Double)((M4OSA_Double)(pC->m_pPto3GPPparams->PanZoomTopleftYb\ 837 - pC->m_pPto3GPPparams->PanZoomTopleftYa) * 838 pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage-1)) / 1000)); 839 840 Params.m_inputSize.m_width = 841 (M4OSA_UInt32)((((M4OSA_Double)pC->m_pDecodedPlane->u_width * 842 (pC->m_pPto3GPPparams->PanZoomXa + 843 (M4OSA_Double)((M4OSA_Double)(pC->m_pPto3GPPparams->PanZoomXb\ 844 - pC->m_pPto3GPPparams->PanZoomXa) * 845 pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage-1)) / 1000)); 846 847 Params.m_inputSize.m_height = 848 (M4OSA_UInt32)((((M4OSA_Double)pC->m_pDecodedPlane->u_height * 849 (pC->m_pPto3GPPparams->PanZoomXa + 850 (M4OSA_Double)((M4OSA_Double)(pC->m_pPto3GPPparams->PanZoomXb \ 851 - pC->m_pPto3GPPparams->PanZoomXa) * 852 pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage-1)) / 1000)); 853 854 if((Params.m_inputSize.m_width + Params.m_inputCoord.m_x)\ 855 > pC->m_pDecodedPlane->u_width) 856 { 857 Params.m_inputSize.m_width = pC->m_pDecodedPlane->u_width \ 858 - Params.m_inputCoord.m_x; 859 } 860 861 if((Params.m_inputSize.m_height + Params.m_inputCoord.m_y)\ 862 > pC->m_pDecodedPlane->u_height) 863 { 864 Params.m_inputSize.m_height = pC->m_pDecodedPlane->u_height\ 865 - Params.m_inputCoord.m_y; 866 } 867 868 869 870 Params.m_inputSize.m_width = (Params.m_inputSize.m_width>>1)<<1; 871 Params.m_inputSize.m_height = (Params.m_inputSize.m_height>>1)<<1; 872 } 873 874 875 876 /** 877 Picture rendering: Black borders*/ 878 879 if(pC->m_mediaRendering == M4xVSS_kBlackBorders) 880 { 881 M4OSA_memset((M4OSA_MemAddr8)pImagePlanes[0].pac_data, 882 (pImagePlanes[0].u_height*pImagePlanes[0].u_stride),Y_PLANE_BORDER_VALUE); 883 M4OSA_memset((M4OSA_MemAddr8)pImagePlanes[1].pac_data, 884 (pImagePlanes[1].u_height*pImagePlanes[1].u_stride),U_PLANE_BORDER_VALUE); 885 M4OSA_memset((M4OSA_MemAddr8)pImagePlanes[2].pac_data, 886 (pImagePlanes[2].u_height*pImagePlanes[2].u_stride),V_PLANE_BORDER_VALUE); 887 888 /** 889 First without pan&zoom*/ 890 if(M4OSA_FALSE == pC->m_pPto3GPPparams->isPanZoom) 891 { 892 switch(pBasicTags.orientation) 893 { 894 default: 895 case M4COMMON_kOrientationUnknown: 896 Params.m_outputOrientation = M4COMMON_kOrientationTopLeft; 897 case M4COMMON_kOrientationTopLeft: 898 case M4COMMON_kOrientationTopRight: 899 case M4COMMON_kOrientationBottomRight: 900 case M4COMMON_kOrientationBottomLeft: 901 if((M4OSA_UInt32)((pC->m_pDecodedPlane->u_height * pImagePlanes->u_width)\ 902 /pC->m_pDecodedPlane->u_width) <= pImagePlanes->u_height) 903 //Params.m_inputSize.m_height < Params.m_inputSize.m_width) 904 { 905 /*it is height so black borders will be on the top and on the bottom side*/ 906 Params.m_outputSize.m_width = pImagePlanes->u_width; 907 Params.m_outputSize.m_height = 908 (M4OSA_UInt32)((pC->m_pDecodedPlane->u_height \ 909 * pImagePlanes->u_width) /pC->m_pDecodedPlane->u_width); 910 /*number of lines at the top*/ 911 pImagePlanes[0].u_topleft = 912 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_height\ 913 -Params.m_outputSize.m_height)>>1))*pImagePlanes[0].u_stride; 914 pImagePlanes[0].u_height = Params.m_outputSize.m_height; 915 pImagePlanes[1].u_topleft = 916 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_height\ 917 -(Params.m_outputSize.m_height>>1)))>>1)*pImagePlanes[1].u_stride; 918 pImagePlanes[1].u_height = Params.m_outputSize.m_height>>1; 919 pImagePlanes[2].u_topleft = 920 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_height\ 921 -(Params.m_outputSize.m_height>>1)))>>1)*pImagePlanes[2].u_stride; 922 pImagePlanes[2].u_height = Params.m_outputSize.m_height>>1; 923 } 924 else 925 { 926 /*it is width so black borders will be on the left and right side*/ 927 Params.m_outputSize.m_height = pImagePlanes->u_height; 928 Params.m_outputSize.m_width = 929 (M4OSA_UInt32)((pC->m_pDecodedPlane->u_width \ 930 * pImagePlanes->u_height) /pC->m_pDecodedPlane->u_height); 931 932 pImagePlanes[0].u_topleft = 933 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_width\ 934 -Params.m_outputSize.m_width)>>1)); 935 pImagePlanes[0].u_width = Params.m_outputSize.m_width; 936 pImagePlanes[1].u_topleft = 937 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_width\ 938 -(Params.m_outputSize.m_width>>1)))>>1); 939 pImagePlanes[1].u_width = Params.m_outputSize.m_width>>1; 940 pImagePlanes[2].u_topleft = 941 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_width\ 942 -(Params.m_outputSize.m_width>>1)))>>1); 943 pImagePlanes[2].u_width = Params.m_outputSize.m_width>>1; 944 } 945 break; 946 case M4COMMON_kOrientationLeftTop: 947 case M4COMMON_kOrientationLeftBottom: 948 case M4COMMON_kOrientationRightTop: 949 case M4COMMON_kOrientationRightBottom: 950 if((M4OSA_UInt32)((pC->m_pDecodedPlane->u_width * pImagePlanes->u_width)\ 951 /pC->m_pDecodedPlane->u_height) < pImagePlanes->u_height) 952 //Params.m_inputSize.m_height > Params.m_inputSize.m_width) 953 { 954 /*it is height so black borders will be on the top and on 955 the bottom side*/ 956 Params.m_outputSize.m_height = pImagePlanes->u_width; 957 Params.m_outputSize.m_width = 958 (M4OSA_UInt32)((pC->m_pDecodedPlane->u_width \ 959 * pImagePlanes->u_width) /pC->m_pDecodedPlane->u_height); 960 /*number of lines at the top*/ 961 pImagePlanes[0].u_topleft = 962 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_height\ 963 -Params.m_outputSize.m_width))>>1)*pImagePlanes[0].u_stride)+1; 964 pImagePlanes[0].u_height = Params.m_outputSize.m_width; 965 pImagePlanes[1].u_topleft = 966 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_height\ 967 -(Params.m_outputSize.m_width>>1)))>>1)\ 968 *pImagePlanes[1].u_stride)+1; 969 pImagePlanes[1].u_height = Params.m_outputSize.m_width>>1; 970 pImagePlanes[2].u_topleft = 971 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_height\ 972 -(Params.m_outputSize.m_width>>1)))>>1)\ 973 *pImagePlanes[2].u_stride)+1; 974 pImagePlanes[2].u_height = Params.m_outputSize.m_width>>1; 975 } 976 else 977 { 978 /*it is width so black borders will be on the left and right side*/ 979 Params.m_outputSize.m_width = pImagePlanes->u_height; 980 Params.m_outputSize.m_height = 981 (M4OSA_UInt32)((pC->m_pDecodedPlane->u_height\ 982 * pImagePlanes->u_height) /pC->m_pDecodedPlane->u_width); 983 984 pImagePlanes[0].u_topleft = 985 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_width\ 986 -Params.m_outputSize.m_height))>>1))+1; 987 pImagePlanes[0].u_width = Params.m_outputSize.m_height; 988 pImagePlanes[1].u_topleft = 989 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_width\ 990 -(Params.m_outputSize.m_height>>1)))>>1))+1; 991 pImagePlanes[1].u_width = Params.m_outputSize.m_height>>1; 992 pImagePlanes[2].u_topleft = 993 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_width\ 994 -(Params.m_outputSize.m_height>>1)))>>1))+1; 995 pImagePlanes[2].u_width = Params.m_outputSize.m_height>>1; 996 } 997 break; 998 } 999 } 1000 1001 /** 1002 Secondly with pan&zoom*/ 1003 else 1004 { 1005 switch(pBasicTags.orientation) 1006 { 1007 default: 1008 case M4COMMON_kOrientationUnknown: 1009 Params.m_outputOrientation = M4COMMON_kOrientationTopLeft; 1010 case M4COMMON_kOrientationTopLeft: 1011 case M4COMMON_kOrientationTopRight: 1012 case M4COMMON_kOrientationBottomRight: 1013 case M4COMMON_kOrientationBottomLeft: 1014 /*NO ROTATION*/ 1015 if((M4OSA_UInt32)((pC->m_pDecodedPlane->u_height * pImagePlanes->u_width)\ 1016 /pC->m_pDecodedPlane->u_width) <= pImagePlanes->u_height) 1017 //Params.m_inputSize.m_height < Params.m_inputSize.m_width) 1018 { 1019 /*Black borders will be on the top and bottom of the output video*/ 1020 /*Maximum output height if the input image aspect ratio is kept and if 1021 the output width is the screen width*/ 1022 M4OSA_UInt32 tempOutputSizeHeight = 1023 (M4OSA_UInt32)((pC->m_pDecodedPlane->u_height\ 1024 * pImagePlanes->u_width) /pC->m_pDecodedPlane->u_width); 1025 M4OSA_UInt32 tempInputSizeHeightMax = 0; 1026 M4OSA_UInt32 tempFinalInputHeight = 0; 1027 /*The output width is the screen width*/ 1028 Params.m_outputSize.m_width = pImagePlanes->u_width; 1029 tempOutputSizeHeight = (tempOutputSizeHeight>>1)<<1; 1030 1031 /*Maximum input height according to the maximum output height 1032 (proportional to the maximum output height)*/ 1033 tempInputSizeHeightMax = (pImagePlanes->u_height\ 1034 *Params.m_inputSize.m_height)/tempOutputSizeHeight; 1035 tempInputSizeHeightMax = (tempInputSizeHeightMax>>1)<<1; 1036 1037 /*Check if the maximum possible input height is contained into the 1038 input image height*/ 1039 if(tempInputSizeHeightMax <= pC->m_pDecodedPlane->u_height) 1040 { 1041 /*The maximum possible input height is contained in the input 1042 image height, 1043 that means no black borders, the input pan zoom area will be extended 1044 so that the input AIR height will be the maximum possible*/ 1045 if(((tempInputSizeHeightMax - Params.m_inputSize.m_height)>>1)\ 1046 <= Params.m_inputCoord.m_y 1047 && ((tempInputSizeHeightMax - Params.m_inputSize.m_height)>>1)\ 1048 <= pC->m_pDecodedPlane->u_height -(Params.m_inputCoord.m_y\ 1049 + Params.m_inputSize.m_height)) 1050 { 1051 /*The input pan zoom area can be extended symmetrically on the 1052 top and bottom side*/ 1053 Params.m_inputCoord.m_y -= ((tempInputSizeHeightMax \ 1054 - Params.m_inputSize.m_height)>>1); 1055 } 1056 else if(Params.m_inputCoord.m_y < pC->m_pDecodedPlane->u_height\ 1057 -(Params.m_inputCoord.m_y + Params.m_inputSize.m_height)) 1058 { 1059 /*There is not enough place above the input pan zoom area to 1060 extend it symmetrically, 1061 so extend it to the maximum on the top*/ 1062 Params.m_inputCoord.m_y = 0; 1063 } 1064 else 1065 { 1066 /*There is not enough place below the input pan zoom area to 1067 extend it symmetrically, 1068 so extend it to the maximum on the bottom*/ 1069 Params.m_inputCoord.m_y = pC->m_pDecodedPlane->u_height \ 1070 - tempInputSizeHeightMax; 1071 } 1072 /*The input height of the AIR is the maximum possible height*/ 1073 Params.m_inputSize.m_height = tempInputSizeHeightMax; 1074 } 1075 else 1076 { 1077 /*The maximum possible input height is greater than the input 1078 image height, 1079 that means black borders are necessary to keep aspect ratio 1080 The input height of the AIR is all the input image height*/ 1081 Params.m_outputSize.m_height = 1082 (tempOutputSizeHeight*pC->m_pDecodedPlane->u_height)\ 1083 /Params.m_inputSize.m_height; 1084 Params.m_outputSize.m_height = (Params.m_outputSize.m_height>>1)<<1; 1085 Params.m_inputCoord.m_y = 0; 1086 Params.m_inputSize.m_height = pC->m_pDecodedPlane->u_height; 1087 pImagePlanes[0].u_topleft = 1088 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_height\ 1089 -Params.m_outputSize.m_height)>>1))*pImagePlanes[0].u_stride; 1090 pImagePlanes[0].u_height = Params.m_outputSize.m_height; 1091 pImagePlanes[1].u_topleft = 1092 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_height\ 1093 -(Params.m_outputSize.m_height>>1)))>>1)\ 1094 *pImagePlanes[1].u_stride); 1095 pImagePlanes[1].u_height = Params.m_outputSize.m_height>>1; 1096 pImagePlanes[2].u_topleft = 1097 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_height\ 1098 -(Params.m_outputSize.m_height>>1)))>>1)\ 1099 *pImagePlanes[2].u_stride); 1100 pImagePlanes[2].u_height = Params.m_outputSize.m_height>>1; 1101 } 1102 } 1103 else 1104 { 1105 /*Black borders will be on the left and right side of the output video*/ 1106 /*Maximum output width if the input image aspect ratio is kept and if the 1107 output height is the screen height*/ 1108 M4OSA_UInt32 tempOutputSizeWidth = 1109 (M4OSA_UInt32)((pC->m_pDecodedPlane->u_width \ 1110 * pImagePlanes->u_height) /pC->m_pDecodedPlane->u_height); 1111 M4OSA_UInt32 tempInputSizeWidthMax = 0; 1112 M4OSA_UInt32 tempFinalInputWidth = 0; 1113 /*The output height is the screen height*/ 1114 Params.m_outputSize.m_height = pImagePlanes->u_height; 1115 tempOutputSizeWidth = (tempOutputSizeWidth>>1)<<1; 1116 1117 /*Maximum input width according to the maximum output width 1118 (proportional to the maximum output width)*/ 1119 tempInputSizeWidthMax = 1120 (pImagePlanes->u_width*Params.m_inputSize.m_width)\ 1121 /tempOutputSizeWidth; 1122 tempInputSizeWidthMax = (tempInputSizeWidthMax>>1)<<1; 1123 1124 /*Check if the maximum possible input width is contained into the input 1125 image width*/ 1126 if(tempInputSizeWidthMax <= pC->m_pDecodedPlane->u_width) 1127 { 1128 /*The maximum possible input width is contained in the input 1129 image width, 1130 that means no black borders, the input pan zoom area will be extended 1131 so that the input AIR width will be the maximum possible*/ 1132 if(((tempInputSizeWidthMax - Params.m_inputSize.m_width)>>1) \ 1133 <= Params.m_inputCoord.m_x 1134 && ((tempInputSizeWidthMax - Params.m_inputSize.m_width)>>1)\ 1135 <= pC->m_pDecodedPlane->u_width -(Params.m_inputCoord.m_x \ 1136 + Params.m_inputSize.m_width)) 1137 { 1138 /*The input pan zoom area can be extended symmetrically on the 1139 right and left side*/ 1140 Params.m_inputCoord.m_x -= ((tempInputSizeWidthMax\ 1141 - Params.m_inputSize.m_width)>>1); 1142 } 1143 else if(Params.m_inputCoord.m_x < pC->m_pDecodedPlane->u_width\ 1144 -(Params.m_inputCoord.m_x + Params.m_inputSize.m_width)) 1145 { 1146 /*There is not enough place above the input pan zoom area to 1147 extend it symmetrically, 1148 so extend it to the maximum on the left*/ 1149 Params.m_inputCoord.m_x = 0; 1150 } 1151 else 1152 { 1153 /*There is not enough place below the input pan zoom area 1154 to extend it symmetrically, 1155 so extend it to the maximum on the right*/ 1156 Params.m_inputCoord.m_x = pC->m_pDecodedPlane->u_width \ 1157 - tempInputSizeWidthMax; 1158 } 1159 /*The input width of the AIR is the maximum possible width*/ 1160 Params.m_inputSize.m_width = tempInputSizeWidthMax; 1161 } 1162 else 1163 { 1164 /*The maximum possible input width is greater than the input 1165 image width, 1166 that means black borders are necessary to keep aspect ratio 1167 The input width of the AIR is all the input image width*/ 1168 Params.m_outputSize.m_width =\ 1169 (tempOutputSizeWidth*pC->m_pDecodedPlane->u_width)\ 1170 /Params.m_inputSize.m_width; 1171 Params.m_outputSize.m_width = (Params.m_outputSize.m_width>>1)<<1; 1172 Params.m_inputCoord.m_x = 0; 1173 Params.m_inputSize.m_width = pC->m_pDecodedPlane->u_width; 1174 pImagePlanes[0].u_topleft = 1175 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_width\ 1176 -Params.m_outputSize.m_width)>>1)); 1177 pImagePlanes[0].u_width = Params.m_outputSize.m_width; 1178 pImagePlanes[1].u_topleft = 1179 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_width\ 1180 -(Params.m_outputSize.m_width>>1)))>>1); 1181 pImagePlanes[1].u_width = Params.m_outputSize.m_width>>1; 1182 pImagePlanes[2].u_topleft = 1183 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_width\ 1184 -(Params.m_outputSize.m_width>>1)))>>1); 1185 pImagePlanes[2].u_width = Params.m_outputSize.m_width>>1; 1186 } 1187 } 1188 break; 1189 case M4COMMON_kOrientationLeftTop: 1190 case M4COMMON_kOrientationLeftBottom: 1191 case M4COMMON_kOrientationRightTop: 1192 case M4COMMON_kOrientationRightBottom: 1193 /*ROTATION*/ 1194 if((M4OSA_UInt32)((pC->m_pDecodedPlane->u_width * pImagePlanes->u_width)\ 1195 /pC->m_pDecodedPlane->u_height) < pImagePlanes->u_height) 1196 //Params.m_inputSize.m_height > Params.m_inputSize.m_width) 1197 { 1198 /*Black borders will be on the left and right side of the output video*/ 1199 /*Maximum output height if the input image aspect ratio is kept and if 1200 the output height is the screen width*/ 1201 M4OSA_UInt32 tempOutputSizeHeight = 1202 (M4OSA_UInt32)((pC->m_pDecodedPlane->u_width * pImagePlanes->u_width)\ 1203 /pC->m_pDecodedPlane->u_height); 1204 M4OSA_UInt32 tempInputSizeHeightMax = 0; 1205 M4OSA_UInt32 tempFinalInputHeight = 0; 1206 /*The output width is the screen height*/ 1207 Params.m_outputSize.m_height = pImagePlanes->u_width; 1208 Params.m_outputSize.m_width= pImagePlanes->u_height; 1209 tempOutputSizeHeight = (tempOutputSizeHeight>>1)<<1; 1210 1211 /*Maximum input height according to the maximum output height 1212 (proportional to the maximum output height)*/ 1213 tempInputSizeHeightMax = 1214 (pImagePlanes->u_height*Params.m_inputSize.m_width)\ 1215 /tempOutputSizeHeight; 1216 tempInputSizeHeightMax = (tempInputSizeHeightMax>>1)<<1; 1217 1218 /*Check if the maximum possible input height is contained into the 1219 input image width (rotation included)*/ 1220 if(tempInputSizeHeightMax <= pC->m_pDecodedPlane->u_width) 1221 { 1222 /*The maximum possible input height is contained in the input 1223 image width (rotation included), 1224 that means no black borders, the input pan zoom area will be extended 1225 so that the input AIR width will be the maximum possible*/ 1226 if(((tempInputSizeHeightMax - Params.m_inputSize.m_width)>>1) \ 1227 <= Params.m_inputCoord.m_x 1228 && ((tempInputSizeHeightMax - Params.m_inputSize.m_width)>>1)\ 1229 <= pC->m_pDecodedPlane->u_width -(Params.m_inputCoord.m_x \ 1230 + Params.m_inputSize.m_width)) 1231 { 1232 /*The input pan zoom area can be extended symmetrically on the 1233 right and left side*/ 1234 Params.m_inputCoord.m_x -= ((tempInputSizeHeightMax \ 1235 - Params.m_inputSize.m_width)>>1); 1236 } 1237 else if(Params.m_inputCoord.m_x < pC->m_pDecodedPlane->u_width\ 1238 -(Params.m_inputCoord.m_x + Params.m_inputSize.m_width)) 1239 { 1240 /*There is not enough place on the left of the input pan 1241 zoom area to extend it symmetrically, 1242 so extend it to the maximum on the left*/ 1243 Params.m_inputCoord.m_x = 0; 1244 } 1245 else 1246 { 1247 /*There is not enough place on the right of the input pan zoom 1248 area to extend it symmetrically, 1249 so extend it to the maximum on the right*/ 1250 Params.m_inputCoord.m_x = 1251 pC->m_pDecodedPlane->u_width - tempInputSizeHeightMax; 1252 } 1253 /*The input width of the AIR is the maximum possible width*/ 1254 Params.m_inputSize.m_width = tempInputSizeHeightMax; 1255 } 1256 else 1257 { 1258 /*The maximum possible input height is greater than the input 1259 image width (rotation included), 1260 that means black borders are necessary to keep aspect ratio 1261 The input width of the AIR is all the input image width*/ 1262 Params.m_outputSize.m_width = 1263 (tempOutputSizeHeight*pC->m_pDecodedPlane->u_width)\ 1264 /Params.m_inputSize.m_width; 1265 Params.m_outputSize.m_width = (Params.m_outputSize.m_width>>1)<<1; 1266 Params.m_inputCoord.m_x = 0; 1267 Params.m_inputSize.m_width = pC->m_pDecodedPlane->u_width; 1268 pImagePlanes[0].u_topleft = 1269 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_height\ 1270 -Params.m_outputSize.m_width))>>1)*pImagePlanes[0].u_stride)+1; 1271 pImagePlanes[0].u_height = Params.m_outputSize.m_width; 1272 pImagePlanes[1].u_topleft = 1273 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_height\ 1274 -(Params.m_outputSize.m_width>>1)))>>1)\ 1275 *pImagePlanes[1].u_stride)+1; 1276 pImagePlanes[1].u_height = Params.m_outputSize.m_width>>1; 1277 pImagePlanes[2].u_topleft = 1278 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_height\ 1279 -(Params.m_outputSize.m_width>>1)))>>1)\ 1280 *pImagePlanes[2].u_stride)+1; 1281 pImagePlanes[2].u_height = Params.m_outputSize.m_width>>1; 1282 } 1283 } 1284 else 1285 { 1286 /*Black borders will be on the top and bottom of the output video*/ 1287 /*Maximum output width if the input image aspect ratio is kept and if 1288 the output width is the screen height*/ 1289 M4OSA_UInt32 tempOutputSizeWidth = 1290 (M4OSA_UInt32)((pC->m_pDecodedPlane->u_height * pImagePlanes->u_height)\ 1291 /pC->m_pDecodedPlane->u_width); 1292 M4OSA_UInt32 tempInputSizeWidthMax = 0; 1293 M4OSA_UInt32 tempFinalInputWidth = 0, tempFinalOutputWidth = 0; 1294 /*The output height is the screen width*/ 1295 Params.m_outputSize.m_width = pImagePlanes->u_height; 1296 Params.m_outputSize.m_height= pImagePlanes->u_width; 1297 tempOutputSizeWidth = (tempOutputSizeWidth>>1)<<1; 1298 1299 /*Maximum input width according to the maximum output width 1300 (proportional to the maximum output width)*/ 1301 tempInputSizeWidthMax = 1302 (pImagePlanes->u_width*Params.m_inputSize.m_height)/tempOutputSizeWidth; 1303 tempInputSizeWidthMax = (tempInputSizeWidthMax>>1)<<1; 1304 1305 /*Check if the maximum possible input width is contained into the input 1306 image height (rotation included)*/ 1307 if(tempInputSizeWidthMax <= pC->m_pDecodedPlane->u_height) 1308 { 1309 /*The maximum possible input width is contained in the input 1310 image height (rotation included), 1311 that means no black borders, the input pan zoom area will be extended 1312 so that the input AIR height will be the maximum possible*/ 1313 if(((tempInputSizeWidthMax - Params.m_inputSize.m_height)>>1) \ 1314 <= Params.m_inputCoord.m_y 1315 && ((tempInputSizeWidthMax - Params.m_inputSize.m_height)>>1)\ 1316 <= pC->m_pDecodedPlane->u_height -(Params.m_inputCoord.m_y \ 1317 + Params.m_inputSize.m_height)) 1318 { 1319 /*The input pan zoom area can be extended symmetrically on 1320 the right and left side*/ 1321 Params.m_inputCoord.m_y -= ((tempInputSizeWidthMax \ 1322 - Params.m_inputSize.m_height)>>1); 1323 } 1324 else if(Params.m_inputCoord.m_y < pC->m_pDecodedPlane->u_height\ 1325 -(Params.m_inputCoord.m_y + Params.m_inputSize.m_height)) 1326 { 1327 /*There is not enough place on the top of the input pan zoom 1328 area to extend it symmetrically, 1329 so extend it to the maximum on the top*/ 1330 Params.m_inputCoord.m_y = 0; 1331 } 1332 else 1333 { 1334 /*There is not enough place on the bottom of the input pan zoom 1335 area to extend it symmetrically, 1336 so extend it to the maximum on the bottom*/ 1337 Params.m_inputCoord.m_y = pC->m_pDecodedPlane->u_height\ 1338 - tempInputSizeWidthMax; 1339 } 1340 /*The input height of the AIR is the maximum possible height*/ 1341 Params.m_inputSize.m_height = tempInputSizeWidthMax; 1342 } 1343 else 1344 { 1345 /*The maximum possible input width is greater than the input\ 1346 image height (rotation included), 1347 that means black borders are necessary to keep aspect ratio 1348 The input height of the AIR is all the input image height*/ 1349 Params.m_outputSize.m_height = 1350 (tempOutputSizeWidth*pC->m_pDecodedPlane->u_height)\ 1351 /Params.m_inputSize.m_height; 1352 Params.m_outputSize.m_height = (Params.m_outputSize.m_height>>1)<<1; 1353 Params.m_inputCoord.m_y = 0; 1354 Params.m_inputSize.m_height = pC->m_pDecodedPlane->u_height; 1355 pImagePlanes[0].u_topleft = 1356 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_width\ 1357 -Params.m_outputSize.m_height))>>1))+1; 1358 pImagePlanes[0].u_width = Params.m_outputSize.m_height; 1359 pImagePlanes[1].u_topleft = 1360 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_width\ 1361 -(Params.m_outputSize.m_height>>1)))>>1))+1; 1362 pImagePlanes[1].u_width = Params.m_outputSize.m_height>>1; 1363 pImagePlanes[2].u_topleft = 1364 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_width\ 1365 -(Params.m_outputSize.m_height>>1)))>>1))+1; 1366 pImagePlanes[2].u_width = Params.m_outputSize.m_height>>1; 1367 } 1368 } 1369 break; 1370 } 1371 } 1372 1373 /*Width and height have to be even*/ 1374 Params.m_outputSize.m_width = (Params.m_outputSize.m_width>>1)<<1; 1375 Params.m_outputSize.m_height = (Params.m_outputSize.m_height>>1)<<1; 1376 Params.m_inputSize.m_width = (Params.m_inputSize.m_width>>1)<<1; 1377 Params.m_inputSize.m_height = (Params.m_inputSize.m_height>>1)<<1; 1378 pImagePlanes[0].u_width = (pImagePlanes[0].u_width>>1)<<1; 1379 pImagePlanes[1].u_width = (pImagePlanes[1].u_width>>1)<<1; 1380 pImagePlanes[2].u_width = (pImagePlanes[2].u_width>>1)<<1; 1381 pImagePlanes[0].u_height = (pImagePlanes[0].u_height>>1)<<1; 1382 pImagePlanes[1].u_height = (pImagePlanes[1].u_height>>1)<<1; 1383 pImagePlanes[2].u_height = (pImagePlanes[2].u_height>>1)<<1; 1384 1385 /*Check that values are coherent*/ 1386 if(Params.m_inputSize.m_height == Params.m_outputSize.m_height) 1387 { 1388 Params.m_inputSize.m_width = Params.m_outputSize.m_width; 1389 } 1390 else if(Params.m_inputSize.m_width == Params.m_outputSize.m_width) 1391 { 1392 Params.m_inputSize.m_height = Params.m_outputSize.m_height; 1393 } 1394 } 1395 1396 /** 1397 Picture rendering: Resizing and Cropping*/ 1398 if(pC->m_mediaRendering != M4xVSS_kBlackBorders) 1399 { 1400 switch(pBasicTags.orientation) 1401 { 1402 default: 1403 case M4COMMON_kOrientationUnknown: 1404 Params.m_outputOrientation = M4COMMON_kOrientationTopLeft; 1405 case M4COMMON_kOrientationTopLeft: 1406 case M4COMMON_kOrientationTopRight: 1407 case M4COMMON_kOrientationBottomRight: 1408 case M4COMMON_kOrientationBottomLeft: 1409 Params.m_outputSize.m_height = pImagePlanes->u_height; 1410 Params.m_outputSize.m_width = pImagePlanes->u_width; 1411 break; 1412 case M4COMMON_kOrientationLeftTop: 1413 case M4COMMON_kOrientationLeftBottom: 1414 case M4COMMON_kOrientationRightTop: 1415 case M4COMMON_kOrientationRightBottom: 1416 Params.m_outputSize.m_height = pImagePlanes->u_width; 1417 Params.m_outputSize.m_width = pImagePlanes->u_height; 1418 break; 1419 } 1420 } 1421 1422 /** 1423 Picture rendering: Cropping*/ 1424 if(pC->m_mediaRendering == M4xVSS_kCropping) 1425 { 1426 if((Params.m_outputSize.m_height * Params.m_inputSize.m_width)\ 1427 /Params.m_outputSize.m_width<Params.m_inputSize.m_height) 1428 { 1429 M4OSA_UInt32 tempHeight = Params.m_inputSize.m_height; 1430 /*height will be cropped*/ 1431 Params.m_inputSize.m_height = (M4OSA_UInt32)((Params.m_outputSize.m_height \ 1432 * Params.m_inputSize.m_width) /Params.m_outputSize.m_width); 1433 Params.m_inputSize.m_height = (Params.m_inputSize.m_height>>1)<<1; 1434 if(M4OSA_FALSE == pC->m_pPto3GPPparams->isPanZoom) 1435 { 1436 Params.m_inputCoord.m_y = (M4OSA_Int32)((M4OSA_Int32)\ 1437 ((pC->m_pDecodedPlane->u_height - Params.m_inputSize.m_height))>>1); 1438 } 1439 else 1440 { 1441 Params.m_inputCoord.m_y += (M4OSA_Int32)((M4OSA_Int32)\ 1442 ((tempHeight - Params.m_inputSize.m_height))>>1); 1443 } 1444 } 1445 else 1446 { 1447 M4OSA_UInt32 tempWidth= Params.m_inputSize.m_width; 1448 /*width will be cropped*/ 1449 Params.m_inputSize.m_width = (M4OSA_UInt32)((Params.m_outputSize.m_width \ 1450 * Params.m_inputSize.m_height) /Params.m_outputSize.m_height); 1451 Params.m_inputSize.m_width = (Params.m_inputSize.m_width>>1)<<1; 1452 if(M4OSA_FALSE == pC->m_pPto3GPPparams->isPanZoom) 1453 { 1454 Params.m_inputCoord.m_x = (M4OSA_Int32)((M4OSA_Int32)\ 1455 ((pC->m_pDecodedPlane->u_width - Params.m_inputSize.m_width))>>1); 1456 } 1457 else 1458 { 1459 Params.m_inputCoord.m_x += (M4OSA_Int32)\ 1460 (((M4OSA_Int32)(tempWidth - Params.m_inputSize.m_width))>>1); 1461 } 1462 } 1463 } 1464 1465 1466 1467 /** 1468 * Call AIR functions */ 1469 if(M4OSA_NULL == pC->m_air_context) 1470 { 1471 err = M4AIR_create(&pC->m_air_context, M4AIR_kYUV420P); 1472 if(err != M4NO_ERROR) 1473 { 1474 M4OSA_free((M4OSA_MemAddr32)pC->m_pDecodedPlane[0].pac_data); 1475 M4OSA_free((M4OSA_MemAddr32)pC->m_pDecodedPlane); 1476 pC->m_pDecodedPlane = M4OSA_NULL; 1477 M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct:\ 1478 Error when initializing AIR: 0x%x", err); 1479 return err; 1480 } 1481 } 1482 1483 err = M4AIR_configure(pC->m_air_context, &Params); 1484 if(err != M4NO_ERROR) 1485 { 1486 M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct:\ 1487 Error when configuring AIR: 0x%x", err); 1488 M4AIR_cleanUp(pC->m_air_context); 1489 M4OSA_free((M4OSA_MemAddr32)pC->m_pDecodedPlane[0].pac_data); 1490 M4OSA_free((M4OSA_MemAddr32)pC->m_pDecodedPlane); 1491 pC->m_pDecodedPlane = M4OSA_NULL; 1492 return err; 1493 } 1494 1495 err = M4AIR_get(pC->m_air_context, pC->m_pDecodedPlane, pImagePlanes); 1496 if(err != M4NO_ERROR) 1497 { 1498 M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct: Error when getting AIR plane: 0x%x", err); 1499 M4AIR_cleanUp(pC->m_air_context); 1500 M4OSA_free((M4OSA_MemAddr32)pC->m_pDecodedPlane[0].pac_data); 1501 M4OSA_free((M4OSA_MemAddr32)pC->m_pDecodedPlane); 1502 pC->m_pDecodedPlane = M4OSA_NULL; 1503 return err; 1504 } 1505 pImagePlanes[0] = pImagePlanes1; 1506 pImagePlanes[1] = pImagePlanes2; 1507 pImagePlanes[2] = pImagePlanes3; 1508 } 1509 1510 1511 /** 1512 * Increment the image counter */ 1513 pC->m_ImageCounter++; 1514 1515 /** 1516 * Check end of sequence */ 1517 last_frame_flag = (pC->m_ImageCounter >= pC->m_NbImage); 1518 1519 /** 1520 * Keep the picture duration */ 1521 *pPictureDuration = pC->m_timeDuration; 1522 1523 if (1 == last_frame_flag) 1524 { 1525 if(M4OSA_NULL != pC->m_air_context) 1526 { 1527 err = M4AIR_cleanUp(pC->m_air_context); 1528 if(err != M4NO_ERROR) 1529 { 1530 M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct: Error when cleaning AIR: 0x%x", err); 1531 return err; 1532 } 1533 } 1534 if(M4OSA_NULL != pC->m_pDecodedPlane) 1535 { 1536 M4OSA_free((M4OSA_MemAddr32)pC->m_pDecodedPlane[0].pac_data); 1537 M4OSA_free((M4OSA_MemAddr32)pC->m_pDecodedPlane); 1538 pC->m_pDecodedPlane = M4OSA_NULL; 1539 } 1540 return M4PTO3GPP_WAR_LAST_PICTURE; 1541 } 1542 1543 M4OSA_TRACE1_0("M4xVSS_PictureCallbackFct: Leaving "); 1544 return M4NO_ERROR; 1545} 1546 1547/** 1548 ****************************************************************************** 1549 * M4OSA_ERR M4xVSS_internalStartConvertPictureTo3gp(M4OSA_Context pContext) 1550 * @brief This function initializes Pto3GPP with the given parameters 1551 * @note The "Pictures to 3GPP" parameters are given by the internal xVSS 1552 * context. This context contains a pointer on the current element 1553 * of the chained list of Pto3GPP parameters. 1554 * @param pContext (IN) The integrator own context 1555 * 1556 * @return M4NO_ERROR: No error 1557 * @return M4PTO3GPP_WAR_LAST_PICTURE: The returned image is the last one 1558 * @return M4ERR_PARAMETER: At least one of the function parameters is null 1559 ****************************************************************************** 1560 */ 1561M4OSA_ERR M4xVSS_internalStartConvertPictureTo3gp(M4OSA_Context pContext) 1562{ 1563 /************************************************************************/ 1564 /* Definitions to generate dummy AMR file used to add AMR silence in files generated 1565 by Pto3GPP */ 1566 #define M4VSS3GPP_AMR_AU_SILENCE_FRAME_048_SIZE 13 1567 /* This constant is defined in M4VSS3GPP_InternalConfig.h */ 1568 extern const M4OSA_UInt8\ 1569 M4VSS3GPP_AMR_AU_SILENCE_FRAME_048[M4VSS3GPP_AMR_AU_SILENCE_FRAME_048_SIZE]; 1570 1571 /* AMR silent frame used to compute dummy AMR silence file */ 1572 #define M4VSS3GPP_AMR_HEADER_SIZE 6 1573 const M4OSA_UInt8 M4VSS3GPP_AMR_HEADER[M4VSS3GPP_AMR_AU_SILENCE_FRAME_048_SIZE] = 1574 { 0x23, 0x21, 0x41, 0x4d, 0x52, 0x0a }; 1575 /************************************************************************/ 1576 1577 M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; 1578 M4OSA_ERR err; 1579 M4PTO3GPP_Context pM4PTO3GPP_Ctxt = M4OSA_NULL; 1580 M4PTO3GPP_Params Params; 1581 M4xVSS_PictureCallbackCtxt* pCallBackCtxt; 1582 M4OSA_Bool cmpResult=M4OSA_FALSE; 1583 M4OSA_Context pDummyAMRFile; 1584 M4OSA_Char out_amr[64]; 1585 /*UTF conversion support*/ 1586 M4OSA_Char* pDecodedPath = M4OSA_NULL; 1587 M4OSA_UInt32 i; 1588 1589 /** 1590 * Create a M4PTO3GPP instance */ 1591 err = M4PTO3GPP_Init( &pM4PTO3GPP_Ctxt, xVSS_context->pFileReadPtr, 1592 xVSS_context->pFileWritePtr); 1593 if (err != M4NO_ERROR) 1594 { 1595 M4OSA_TRACE1_1("M4PTO3GPP_Init returned %ld\n",err); 1596 return err; 1597 } 1598 1599 /* replay recorded external encoder registrations on the PTO3GPP */ 1600 for (i=0; i<M4VE_kEncoderType_NB; i++) 1601 { 1602 if (xVSS_context->registeredExternalEncs[i].registered) 1603 { 1604 err = M4PTO3GPP_RegisterExternalVideoEncoder(pM4PTO3GPP_Ctxt, i, 1605 xVSS_context->registeredExternalEncs[i].pEncoderInterface, 1606 xVSS_context->registeredExternalEncs[i].pUserData); 1607 if (M4NO_ERROR != err) 1608 { 1609 M4OSA_TRACE1_1("M4xVSS_internalGenerateEditedFile:\ 1610 M4PTO3GPP_registerExternalVideoEncoder() returns 0x%x!", err); 1611 M4PTO3GPP_CleanUp(pM4PTO3GPP_Ctxt); 1612 return err; 1613 } 1614 } 1615 } 1616 1617 pCallBackCtxt = (M4xVSS_PictureCallbackCtxt*)M4OSA_malloc(sizeof(M4xVSS_PictureCallbackCtxt), 1618 M4VS,(M4OSA_Char *) "Pto3gpp callback struct"); 1619 if(pCallBackCtxt == M4OSA_NULL) 1620 { 1621 M4OSA_TRACE1_0("Allocation error in M4xVSS_internalStartConvertPictureTo3gp"); 1622 return M4ERR_ALLOC; 1623 } 1624 1625 Params.OutputVideoFrameSize = xVSS_context->pSettings->xVSS.outputVideoSize; 1626 Params.OutputVideoFormat = xVSS_context->pSettings->xVSS.outputVideoFormat; 1627 1628 /** 1629 * Generate "dummy" amr file containing silence in temporary folder */ 1630 M4OSA_chrNCopy(out_amr, xVSS_context->pTempPath, 64); 1631 M4OSA_chrNCat(out_amr, (M4OSA_Char *)"dummy.amr\0", 10); 1632 1633 /** 1634 * UTF conversion: convert the temporary path into the customer format*/ 1635 pDecodedPath = out_amr; 1636 1637 if(xVSS_context->UTFConversionContext.pConvFromUTF8Fct != M4OSA_NULL 1638 && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL) 1639 { 1640 M4OSA_UInt32 length = 0; 1641 err = M4xVSS_internalConvertFromUTF8(xVSS_context, (M4OSA_Void*) out_amr, 1642 (M4OSA_Void*) xVSS_context->UTFConversionContext.pTempOutConversionBuffer, &length); 1643 if(err != M4NO_ERROR) 1644 { 1645 M4OSA_TRACE1_1("M4xVSS_internalStartConvertPictureTo3gp:\ 1646 M4xVSS_internalConvertFromUTF8 returns err: 0x%x",err); 1647 return err; 1648 } 1649 pDecodedPath = xVSS_context->UTFConversionContext.pTempOutConversionBuffer; 1650 } 1651 1652 /** 1653 * End of the conversion, now use the converted path*/ 1654 1655 err = xVSS_context->pFileWritePtr->openWrite(&pDummyAMRFile, pDecodedPath, M4OSA_kFileWrite); 1656 1657 /*Commented because of the use of the UTF conversion see above*/ 1658/* err = xVSS_context->pFileWritePtr->openWrite(&pDummyAMRFile, out_amr, M4OSA_kFileWrite); 1659 */ 1660 if(err != M4NO_ERROR) 1661 { 1662 M4OSA_TRACE1_2("M4xVSS_internalConvertPictureTo3gp: Can't open output dummy amr file %s,\ 1663 error: 0x%x\n",out_amr, err); 1664 return err; 1665 } 1666 1667 err = xVSS_context->pFileWritePtr->writeData(pDummyAMRFile, 1668 (M4OSA_Int8*)M4VSS3GPP_AMR_HEADER, M4VSS3GPP_AMR_HEADER_SIZE); 1669 if(err != M4NO_ERROR) 1670 { 1671 M4OSA_TRACE1_2("M4xVSS_internalConvertPictureTo3gp: Can't write output dummy amr file %s,\ 1672 error: 0x%x\n",out_amr, err); 1673 return err; 1674 } 1675 1676 err = xVSS_context->pFileWritePtr->writeData(pDummyAMRFile, 1677 (M4OSA_Int8*)M4VSS3GPP_AMR_AU_SILENCE_FRAME_048, M4VSS3GPP_AMR_AU_SILENCE_FRAME_048_SIZE); 1678 if(err != M4NO_ERROR) 1679 { 1680 M4OSA_TRACE1_2("M4xVSS_internalConvertPictureTo3gp: \ 1681 Can't write output dummy amr file %s, error: 0x%x\n",out_amr, err); 1682 return err; 1683 } 1684 1685 err = xVSS_context->pFileWritePtr->closeWrite(pDummyAMRFile); 1686 if(err != M4NO_ERROR) 1687 { 1688 M4OSA_TRACE1_2("M4xVSS_internalConvertPictureTo3gp: \ 1689 Can't close output dummy amr file %s, error: 0x%x\n",out_amr, err); 1690 return err; 1691 } 1692 1693 /** 1694 * Fill parameters for Pto3GPP with the parameters contained in the current element of the 1695 * Pto3GPP parameters chained list and with default parameters */ 1696/*+ New Encoder bitrates */ 1697 if(xVSS_context->pSettings->xVSS.outputVideoBitrate == 0) { 1698 Params.OutputVideoBitrate = M4VIDEOEDITING_kVARIABLE_KBPS; 1699 } 1700 else { 1701 Params.OutputVideoBitrate = xVSS_context->pSettings->xVSS.outputVideoBitrate; 1702 } 1703 M4OSA_TRACE1_1("M4xVSS_internalStartConvertPicTo3GP: video bitrate = %d", 1704 Params.OutputVideoBitrate); 1705/*- New Encoder bitrates */ 1706 Params.OutputFileMaxSize = M4PTO3GPP_kUNLIMITED; 1707 Params.pPictureCallbackFct = M4xVSS_PictureCallbackFct; 1708 Params.pPictureCallbackCtxt = pCallBackCtxt; 1709 /*FB: change to use the converted path (UTF conversion) see the conversion above*/ 1710 /*Fix :- Adding Audio Track in Image as input :AudioTarckFile Setting to NULL */ 1711 Params.pInputAudioTrackFile = M4OSA_NULL;//(M4OSA_Void*)pDecodedPath;//out_amr; 1712 Params.AudioPaddingMode = M4PTO3GPP_kAudioPaddingMode_Loop; 1713 Params.AudioFileFormat = M4VIDEOEDITING_kFileType_AMR; 1714 Params.pOutput3gppFile = xVSS_context->pPTo3GPPcurrentParams->pFileOut; 1715 Params.pTemporaryFile = xVSS_context->pPTo3GPPcurrentParams->pFileTemp; 1716 /*+PR No: blrnxpsw#223*/ 1717 /*Increasing frequency of Frame, calculating Nos of Frame = duration /FPS */ 1718 /*Other changes made is @ M4xVSS_API.c @ line 3841 in M4xVSS_SendCommand*/ 1719 /*If case check for PanZoom removed */ 1720 Params.NbVideoFrames = (M4OSA_UInt32) 1721 (xVSS_context->pPTo3GPPcurrentParams->duration \ 1722 / xVSS_context->pPTo3GPPcurrentParams->framerate); /* */ 1723 pCallBackCtxt->m_timeDuration = xVSS_context->pPTo3GPPcurrentParams->framerate; 1724 /*-PR No: blrnxpsw#223*/ 1725 pCallBackCtxt->m_ImageCounter = 0; 1726 pCallBackCtxt->m_FileIn = xVSS_context->pPTo3GPPcurrentParams->pFileIn; 1727 pCallBackCtxt->m_NbImage = Params.NbVideoFrames; 1728 pCallBackCtxt->m_pFileReadPtr = xVSS_context->pFileReadPtr; 1729 pCallBackCtxt->m_pDecodedPlane = M4OSA_NULL; 1730 pCallBackCtxt->m_pPto3GPPparams = xVSS_context->pPTo3GPPcurrentParams; 1731 pCallBackCtxt->m_air_context = M4OSA_NULL; 1732 pCallBackCtxt->m_mediaRendering = xVSS_context->pPTo3GPPcurrentParams->MediaRendering; 1733 1734 /** 1735 * Set the input and output files */ 1736 err = M4PTO3GPP_Open(pM4PTO3GPP_Ctxt, &Params); 1737 if (err != M4NO_ERROR) 1738 { 1739 M4OSA_TRACE1_1("M4PTO3GPP_Open returned: 0x%x\n",err); 1740 if(pCallBackCtxt != M4OSA_NULL) 1741 { 1742 M4OSA_free((M4OSA_MemAddr32)pCallBackCtxt); 1743 pCallBackCtxt = M4OSA_NULL; 1744 } 1745 M4PTO3GPP_CleanUp(pM4PTO3GPP_Ctxt); 1746 return err; 1747 } 1748 1749 /** 1750 * Save context to be able to call Pto3GPP step function in M4xVSS_step function */ 1751 xVSS_context->pM4PTO3GPP_Ctxt = pM4PTO3GPP_Ctxt; 1752 xVSS_context->pCallBackCtxt = pCallBackCtxt; 1753 1754 return M4NO_ERROR; 1755} 1756 1757/** 1758 ****************************************************************************** 1759 * M4OSA_ERR M4xVSS_internalStopConvertPictureTo3gp(M4OSA_Context pContext) 1760 * @brief This function cleans up Pto3GPP 1761 * @note 1762 * @param pContext (IN) The integrator own context 1763 * 1764 * @return M4NO_ERROR: No error 1765 * @return M4ERR_PARAMETER: At least one of the function parameters is null 1766 ****************************************************************************** 1767 */ 1768M4OSA_ERR M4xVSS_internalStopConvertPictureTo3gp(M4OSA_Context pContext) 1769{ 1770 M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; 1771 M4OSA_ERR err; 1772 M4OSA_Char out_amr[64]; 1773 /*UTF conversion support*/ 1774 M4OSA_Char* pDecodedPath = M4OSA_NULL; 1775 1776 /** 1777 * Free the PTO3GPP callback context */ 1778 if(M4OSA_NULL != xVSS_context->pCallBackCtxt) 1779 { 1780 M4OSA_free((M4OSA_MemAddr32)xVSS_context->pCallBackCtxt); 1781 xVSS_context->pCallBackCtxt = M4OSA_NULL; 1782 } 1783 1784 /** 1785 * Finalize the output file */ 1786 err = M4PTO3GPP_Close(xVSS_context->pM4PTO3GPP_Ctxt); 1787 if (err != M4NO_ERROR) 1788 { 1789 M4OSA_TRACE1_1("M4PTO3GPP_Close returned 0x%x\n",err); 1790 M4PTO3GPP_CleanUp(xVSS_context->pM4PTO3GPP_Ctxt); 1791 return err; 1792 } 1793 1794 /** 1795 * Free this M4PTO3GPP instance */ 1796 err = M4PTO3GPP_CleanUp(xVSS_context->pM4PTO3GPP_Ctxt); 1797 if (err != M4NO_ERROR) 1798 { 1799 M4OSA_TRACE1_1("M4PTO3GPP_CleanUp returned 0x%x\n",err); 1800 return err; 1801 } 1802 1803 /** 1804 * Remove dummy.amr file */ 1805 M4OSA_chrNCopy(out_amr, xVSS_context->pTempPath, 64); 1806 M4OSA_chrNCat(out_amr, (M4OSA_Char *)"dummy.amr\0", 10); 1807 1808 /** 1809 * UTF conversion: convert the temporary path into the customer format*/ 1810 pDecodedPath = out_amr; 1811 1812 if(xVSS_context->UTFConversionContext.pConvFromUTF8Fct != M4OSA_NULL 1813 && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL) 1814 { 1815 M4OSA_UInt32 length = 0; 1816 err = M4xVSS_internalConvertFromUTF8(xVSS_context, (M4OSA_Void*) out_amr, 1817 (M4OSA_Void*) xVSS_context->UTFConversionContext.pTempOutConversionBuffer, &length); 1818 if(err != M4NO_ERROR) 1819 { 1820 M4OSA_TRACE1_1("M4xVSS_internalStopConvertPictureTo3gp:\ 1821 M4xVSS_internalConvertFromUTF8 returns err: 0x%x",err); 1822 return err; 1823 } 1824 pDecodedPath = xVSS_context->UTFConversionContext.pTempOutConversionBuffer; 1825 } 1826 /** 1827 * End of the conversion, now use the decoded path*/ 1828 M4OSA_fileExtraDelete(pDecodedPath); 1829 1830 /*Commented because of the use of the UTF conversion*/ 1831/* M4OSA_fileExtraDelete(out_amr); 1832 */ 1833 1834 xVSS_context->pM4PTO3GPP_Ctxt = M4OSA_NULL; 1835 xVSS_context->pCallBackCtxt = M4OSA_NULL; 1836 1837 return M4NO_ERROR; 1838} 1839 1840/** 1841 ****************************************************************************** 1842 * prototype M4OSA_ERR M4xVSS_internalConvertRGBtoYUV(M4xVSS_FramingStruct* framingCtx) 1843 * @brief This function converts an RGB565 plane to YUV420 planar 1844 * @note It is used only for framing effect 1845 * It allocates output YUV planes 1846 * @param framingCtx (IN) The framing struct containing input RGB565 plane 1847 * 1848 * @return M4NO_ERROR: No error 1849 * @return M4ERR_PARAMETER: At least one of the function parameters is null 1850 * @return M4ERR_ALLOC: Allocation error (no more memory) 1851 ****************************************************************************** 1852 */ 1853M4OSA_ERR M4xVSS_internalConvertRGBtoYUV(M4xVSS_FramingStruct* framingCtx) 1854{ 1855 M4OSA_ERR err; 1856 1857 /** 1858 * Allocate output YUV planes */ 1859 framingCtx->FramingYuv = (M4VIFI_ImagePlane*)M4OSA_malloc(3*sizeof(M4VIFI_ImagePlane), 1860 M4VS, (M4OSA_Char *)"M4xVSS_internalConvertRGBtoYUV: Output plane YUV"); 1861 if(framingCtx->FramingYuv == M4OSA_NULL) 1862 { 1863 M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertRGBtoYUV"); 1864 return M4ERR_ALLOC; 1865 } 1866 framingCtx->FramingYuv[0].u_width = framingCtx->FramingRgb->u_width; 1867 framingCtx->FramingYuv[0].u_height = framingCtx->FramingRgb->u_height; 1868 framingCtx->FramingYuv[0].u_topleft = 0; 1869 framingCtx->FramingYuv[0].u_stride = framingCtx->FramingRgb->u_width; 1870 framingCtx->FramingYuv[0].pac_data = 1871 (M4VIFI_UInt8*)M4OSA_malloc((framingCtx->FramingYuv[0].u_width\ 1872 *framingCtx->FramingYuv[0].u_height*3)>>1, M4VS, (M4OSA_Char *)\ 1873 "Alloc for the Convertion output YUV");; 1874 if(framingCtx->FramingYuv[0].pac_data == M4OSA_NULL) 1875 { 1876 M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertRGBtoYUV"); 1877 return M4ERR_ALLOC; 1878 } 1879 framingCtx->FramingYuv[1].u_width = (framingCtx->FramingRgb->u_width)>>1; 1880 framingCtx->FramingYuv[1].u_height = (framingCtx->FramingRgb->u_height)>>1; 1881 framingCtx->FramingYuv[1].u_topleft = 0; 1882 framingCtx->FramingYuv[1].u_stride = (framingCtx->FramingRgb->u_width)>>1; 1883 framingCtx->FramingYuv[1].pac_data = framingCtx->FramingYuv[0].pac_data \ 1884 + framingCtx->FramingYuv[0].u_width * framingCtx->FramingYuv[0].u_height; 1885 framingCtx->FramingYuv[2].u_width = (framingCtx->FramingRgb->u_width)>>1; 1886 framingCtx->FramingYuv[2].u_height = (framingCtx->FramingRgb->u_height)>>1; 1887 framingCtx->FramingYuv[2].u_topleft = 0; 1888 framingCtx->FramingYuv[2].u_stride = (framingCtx->FramingRgb->u_width)>>1; 1889 framingCtx->FramingYuv[2].pac_data = framingCtx->FramingYuv[1].pac_data \ 1890 + framingCtx->FramingYuv[1].u_width * framingCtx->FramingYuv[1].u_height; 1891 1892 /** 1893 * Convert input RGB 565 to YUV 420 to be able to merge it with output video in framing 1894 effect */ 1895 err = M4VIFI_xVSS_RGB565toYUV420(M4OSA_NULL, framingCtx->FramingRgb, framingCtx->FramingYuv); 1896 if(err != M4NO_ERROR) 1897 { 1898 M4OSA_TRACE1_1("M4xVSS_internalConvertRGBtoYUV:\ 1899 error when converting from RGB to YUV: 0x%x\n", err); 1900 } 1901 1902 framingCtx->duration = 0; 1903 framingCtx->previousClipTime = -1; 1904 framingCtx->previewOffsetClipTime = -1; 1905 1906 /** 1907 * Only one element in the chained list (no animated image with RGB buffer...) */ 1908 framingCtx->pCurrent = framingCtx; 1909 framingCtx->pNext = framingCtx; 1910 1911 return M4NO_ERROR; 1912} 1913 1914M4OSA_ERR M4xVSS_internalSetPlaneTransparent(M4OSA_UInt8* planeIn, M4OSA_UInt32 size) 1915{ 1916 M4OSA_UInt32 i; 1917 M4OSA_UInt8* plane = planeIn; 1918 M4OSA_UInt8 transparent1 = (M4OSA_UInt8)((TRANSPARENT_COLOR & 0xFF00)>>8); 1919 M4OSA_UInt8 transparent2 = (M4OSA_UInt8)TRANSPARENT_COLOR; 1920 1921 for(i=0; i<(size>>1); i++) 1922 { 1923 *plane++ = transparent1; 1924 *plane++ = transparent2; 1925 } 1926 1927 return M4NO_ERROR; 1928} 1929 1930 1931/** 1932 ****************************************************************************** 1933 * prototype M4OSA_ERR M4xVSS_internalConvertARBG888toYUV420_FrammingEffect(M4OSA_Context pContext, 1934 * M4VSS3GPP_EffectSettings* pEffect, 1935 * M4xVSS_FramingStruct* framingCtx, 1936 M4VIDEOEDITING_VideoFrameSize OutputVideoResolution) 1937 * 1938 * @brief This function converts ARGB8888 input file to YUV420 whenused for framming effect 1939 * @note The input ARGB8888 file path is contained in the pEffect structure 1940 * If the ARGB8888 must be resized to fit output video size, this function 1941 * will do it. 1942 * @param pContext (IN) The integrator own context 1943 * @param pEffect (IN) The effect structure containing all informations on 1944 * the file to decode, resizing ... 1945 * @param framingCtx (IN/OUT) Structure in which the output RGB will be stored 1946 * 1947 * @return M4NO_ERROR: No error 1948 * @return M4ERR_PARAMETER: At least one of the function parameters is null 1949 * @return M4ERR_ALLOC: Allocation error (no more memory) 1950 ****************************************************************************** 1951 */ 1952 1953 1954M4OSA_ERR M4xVSS_internalConvertARGB888toYUV420_FrammingEffect(M4OSA_Context pContext, 1955 M4VSS3GPP_EffectSettings* pEffect, 1956 M4xVSS_FramingStruct* framingCtx, 1957 M4VIDEOEDITING_VideoFrameSize\ 1958 OutputVideoResolution) 1959{ 1960 M4OSA_ERR err; 1961 M4OSA_Context pARGBIn; 1962 M4OSA_UInt32 file_size; 1963 M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; 1964 M4OSA_UInt32 width, height, width_out, height_out; 1965 M4OSA_Void* pFile = pEffect->xVSS.pFramingFilePath; 1966 M4OSA_UInt8 transparent1 = (M4OSA_UInt8)((TRANSPARENT_COLOR & 0xFF00)>>8); 1967 M4OSA_UInt8 transparent2 = (M4OSA_UInt8)TRANSPARENT_COLOR; 1968 /*UTF conversion support*/ 1969 M4OSA_Char* pDecodedPath = M4OSA_NULL; 1970 M4OSA_UInt32 i = 0,j = 0; 1971 M4VIFI_ImagePlane rgbPlane; 1972 M4OSA_UInt32 frameSize_argb=(framingCtx->width * framingCtx->height * 4); 1973 M4OSA_UInt32 frameSize = (framingCtx->width * framingCtx->height * 3); //Size of RGB888 data 1974 M4OSA_UInt32 tempAlphaPercent = 0; 1975 M4VIFI_UInt8* TempPacData = M4OSA_NULL; 1976 M4OSA_UInt16 *ptr = M4OSA_NULL; 1977 M4OSA_UInt32 z = 0; 1978 1979 M4OSA_UInt8 *pTmpData = (M4OSA_UInt8*) M4OSA_malloc(frameSize_argb, M4VS, (M4OSA_Char*)\ 1980 "Image argb data"); 1981 M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect: Entering "); 1982 M4OSA_TRACE1_2("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect width and height %d %d ", 1983 framingCtx->width,framingCtx->height); 1984 if(pTmpData == M4OSA_NULL) { 1985 M4OSA_TRACE1_0("Failed to allocate memory for Image clip"); 1986 return M4ERR_ALLOC; 1987 } 1988 /** 1989 * UTF conversion: convert the file path into the customer format*/ 1990 pDecodedPath = pFile; 1991 1992 if(xVSS_context->UTFConversionContext.pConvFromUTF8Fct != M4OSA_NULL 1993 && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL) 1994 { 1995 M4OSA_UInt32 length = 0; 1996 err = M4xVSS_internalConvertFromUTF8(xVSS_context, (M4OSA_Void*) pFile, 1997 (M4OSA_Void*) xVSS_context->UTFConversionContext.pTempOutConversionBuffer, &length); 1998 if(err != M4NO_ERROR) 1999 { 2000 M4OSA_TRACE1_1("M4xVSS_internalDecodePNG:\ 2001 M4xVSS_internalConvertFromUTF8 returns err: 0x%x",err); 2002 return err; 2003 } 2004 pDecodedPath = xVSS_context->UTFConversionContext.pTempOutConversionBuffer; 2005 } 2006 2007 /** 2008 * End of the conversion, now use the decoded path*/ 2009 2010 /* Open input ARGB8888 file and store it into memory */ 2011 err = xVSS_context->pFileReadPtr->openRead(&pARGBIn, pDecodedPath, M4OSA_kFileRead); 2012 2013 if(err != M4NO_ERROR) 2014 { 2015 M4OSA_TRACE1_2("Can't open input ARGB8888 file %s, error: 0x%x\n",pFile, err); 2016 M4OSA_free((M4OSA_MemAddr32)pTmpData); 2017 pTmpData = M4OSA_NULL; 2018 return err; 2019 } 2020 2021 err = xVSS_context->pFileReadPtr->readData(pARGBIn,(M4OSA_MemAddr8)pTmpData, &frameSize_argb); 2022 if(err != M4NO_ERROR) 2023 { 2024 xVSS_context->pFileReadPtr->closeRead(pARGBIn); 2025 M4OSA_free((M4OSA_MemAddr32)pTmpData); 2026 pTmpData = M4OSA_NULL; 2027 } 2028 2029 2030 err = xVSS_context->pFileReadPtr->closeRead(pARGBIn); 2031 if(err != M4NO_ERROR) 2032 { 2033 M4OSA_TRACE1_2("Can't close input png file %s, error: 0x%x\n",pFile, err); 2034 M4OSA_free((M4OSA_MemAddr32)pTmpData); 2035 pTmpData = M4OSA_NULL; 2036 return err; 2037 } 2038 2039 /* rgbPlane.pac_data = (M4VIFI_UInt8*)M4OSA_malloc(frameSize, M4VS,\ 2040 (M4OSA_Char*)"Image clip RGB888 data"); */ 2041 /* temp fix for crashing happening in filter : allocation 2memory for 2 more width */ 2042 rgbPlane.pac_data = (M4VIFI_UInt8*)M4OSA_malloc(((frameSize)+ (2 * framingCtx->width)), 2043 M4VS, (M4OSA_Char*)"Image clip RGB888 data"); 2044 if(rgbPlane.pac_data == M4OSA_NULL) 2045 { 2046 M4OSA_TRACE1_0("Failed to allocate memory for Image clip"); 2047 M4OSA_free((M4OSA_MemAddr32)pTmpData); 2048 return M4ERR_ALLOC; 2049 } 2050 2051 rgbPlane.u_height = (( framingCtx->height+1)>>1)<<1;; 2052 rgbPlane.u_width = (( framingCtx->width+1)>>1)<<1;; 2053 rgbPlane.u_stride = rgbPlane.u_width*3; 2054 rgbPlane.u_topleft = 0; 2055 2056 M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:\ 2057 Remove the alpha channel "); 2058#if 0 2059 /** Remove the alpha channel*/ 2060 for (i=0, j = 0; i < frameSize_argb; i++) { 2061 if ((i % 4) == 0) continue; 2062 rgbPlane.pac_data[j] = pTmpData[i]; 2063 j++; 2064 } 2065#endif 2066 2067 /* premultiplied alpha % on RGB */ 2068 for (i=0, j = 0; i < frameSize_argb; i += 4) { 2069 /* this is alpha value */ 2070 if ((i % 4) == 0) 2071 { 2072 tempAlphaPercent = pTmpData[i]; 2073 } 2074 2075 /* R */ 2076 rgbPlane.pac_data[j] = pTmpData[i+1]; 2077 j++; 2078 2079 /* G */ 2080 if (tempAlphaPercent > 0) { 2081 rgbPlane.pac_data[j] = pTmpData[i+2]; 2082 j++; 2083 } else {/* In case of alpha value 0, make GREEN to 255 */ 2084 rgbPlane.pac_data[j] = 255; //pTmpData[i+2]; 2085 j++; 2086 } 2087 2088 /* B */ 2089 rgbPlane.pac_data[j] = pTmpData[i+3]; 2090 j++; 2091 } 2092 2093 /* convert RGB888 to RGB565 */ 2094 2095 /* allocate temp RGB 565 buffer */ 2096 TempPacData = (M4VIFI_UInt8*)M4OSA_malloc(((frameSize)+ (2 * framingCtx->width)), 2097 M4VS, (M4OSA_Char*)"Image clip RGB565 data"); 2098 2099 ptr = (M4OSA_UInt16 *)TempPacData; 2100 z = 0; 2101 2102 for (i = 0; i < j ; i += 3) 2103 { 2104 ptr[z++] = PACK_RGB565(0, rgbPlane.pac_data[i], 2105 rgbPlane.pac_data[i+1], 2106 rgbPlane.pac_data[i+2]); 2107 } 2108 2109 /* reset stride */ 2110 rgbPlane.u_stride = rgbPlane.u_width*2; 2111 2112 /* free the RBG888 and assign RGB565 */ 2113 M4OSA_free((M4OSA_MemAddr32)rgbPlane.pac_data); 2114 rgbPlane.pac_data = TempPacData; 2115 2116 2117 M4OSA_free((M4OSA_MemAddr32)pTmpData); 2118 /** 2119 * Check if output sizes are odd */ 2120 if(rgbPlane.u_height % 2 != 0) 2121 { 2122 M4VIFI_UInt8* output_pac_data = rgbPlane.pac_data; 2123 M4OSA_UInt32 i; 2124 M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:\ 2125 output height is odd "); 2126 output_pac_data +=rgbPlane.u_width * rgbPlane.u_height*2; 2127 2128 for(i=0;i<rgbPlane.u_width;i++) 2129 { 2130 *output_pac_data++ = transparent1; 2131 *output_pac_data++ = transparent2; 2132 } 2133 2134 /** 2135 * We just add a white line to the PNG that will be transparent */ 2136 rgbPlane.u_height++; 2137 } 2138 if(rgbPlane.u_width % 2 != 0) 2139 { 2140 /** 2141 * We add a new column of white (=transparent), but we need to parse all RGB lines ... */ 2142 M4OSA_UInt32 i; 2143 M4VIFI_UInt8* newRGBpac_data; 2144 M4VIFI_UInt8* output_pac_data, *input_pac_data; 2145 2146 rgbPlane.u_width++; 2147 M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect: \ 2148 output width is odd "); 2149 /** 2150 * We need to allocate a new RGB output buffer in which all decoded data 2151 + white line will be copied */ 2152 newRGBpac_data = (M4VIFI_UInt8*)M4OSA_malloc(rgbPlane.u_height*rgbPlane.u_width*2\ 2153 *sizeof(M4VIFI_UInt8), M4VS, (M4OSA_Char *)"New Framing GIF Output pac_data RGB"); 2154 2155 if(newRGBpac_data == M4OSA_NULL) 2156 { 2157 M4OSA_TRACE1_0("Allocation error in \ 2158 M4xVSS_internalConvertARGB888toYUV420_FrammingEffect"); 2159 /** 2160 * Destroy SPS instance */ 2161 //M4SPS_destroy(pSPSContext); 2162 return M4ERR_ALLOC; 2163 } 2164 2165 output_pac_data= newRGBpac_data; 2166 input_pac_data = rgbPlane.pac_data; 2167 2168 for(i=0;i<rgbPlane.u_height;i++) 2169 { 2170 M4OSA_memcpy((M4OSA_MemAddr8)output_pac_data, (M4OSA_MemAddr8)input_pac_data, 2171 (rgbPlane.u_width-1)*2); 2172 2173 output_pac_data += ((rgbPlane.u_width-1)*2); 2174 /* Put the pixel to transparency color */ 2175 *output_pac_data++ = transparent1; 2176 *output_pac_data++ = transparent2; 2177 2178 input_pac_data += ((rgbPlane.u_width-1)*2); 2179 } 2180 2181 rgbPlane.pac_data = newRGBpac_data; 2182 } 2183 2184 /** 2185 * Initialize chained list parameters */ 2186 framingCtx->duration = 0; 2187 framingCtx->previousClipTime = -1; 2188 framingCtx->previewOffsetClipTime = -1; 2189 2190 /** 2191 * Only one element in the chained list (no animated image ...) */ 2192 framingCtx->pCurrent = framingCtx; 2193 framingCtx->pNext = framingCtx; 2194 2195 /** 2196 * Get output width/height */ 2197 switch(OutputVideoResolution) 2198 //switch(xVSS_context->pSettings->xVSS.outputVideoSize) 2199 { 2200 case M4VIDEOEDITING_kSQCIF: 2201 width_out = 128; 2202 height_out = 96; 2203 break; 2204 case M4VIDEOEDITING_kQQVGA: 2205 width_out = 160; 2206 height_out = 120; 2207 break; 2208 case M4VIDEOEDITING_kQCIF: 2209 width_out = 176; 2210 height_out = 144; 2211 break; 2212 case M4VIDEOEDITING_kQVGA: 2213 width_out = 320; 2214 height_out = 240; 2215 break; 2216 case M4VIDEOEDITING_kCIF: 2217 width_out = 352; 2218 height_out = 288; 2219 break; 2220 case M4VIDEOEDITING_kVGA: 2221 width_out = 640; 2222 height_out = 480; 2223 break; 2224 case M4VIDEOEDITING_kWVGA: 2225 width_out = 800; 2226 height_out = 480; 2227 break; 2228 case M4VIDEOEDITING_kNTSC: 2229 width_out = 720; 2230 height_out = 480; 2231 break; 2232 case M4VIDEOEDITING_k640_360: 2233 width_out = 640; 2234 height_out = 360; 2235 break; 2236 case M4VIDEOEDITING_k854_480: 2237 // StageFright encoders require %16 resolution 2238 width_out = M4ENCODER_854_480_Width; 2239 height_out = 480; 2240 break; 2241 case M4VIDEOEDITING_kHD1280: 2242 width_out = 1280; 2243 height_out = 720; 2244 break; 2245 case M4VIDEOEDITING_kHD1080: 2246 // StageFright encoders require %16 resolution 2247 width_out = M4ENCODER_HD1080_Width; 2248 height_out = 720; 2249 break; 2250 case M4VIDEOEDITING_kHD960: 2251 width_out = 960; 2252 height_out = 720; 2253 break; 2254 2255 /** 2256 * If output video size is not given, we take QCIF size, 2257 * should not happen, because already done in M4xVSS_sendCommand */ 2258 default: 2259 width_out = 176; 2260 height_out = 144; 2261 break; 2262 } 2263 2264 /** 2265 * Allocate output planes structures */ 2266 framingCtx->FramingRgb = (M4VIFI_ImagePlane*)M4OSA_malloc(sizeof(M4VIFI_ImagePlane), M4VS, 2267 (M4OSA_Char *)"Framing Output plane RGB"); 2268 if(framingCtx->FramingRgb == M4OSA_NULL) 2269 { 2270 M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertARGB888toYUV420_FrammingEffect"); 2271 M4OSA_free((M4OSA_MemAddr32)pTmpData); 2272 pTmpData = M4OSA_NULL;NULL; 2273 return M4ERR_ALLOC; 2274 } 2275 /** 2276 * Resize RGB if needed */ 2277 if((pEffect->xVSS.bResize) && 2278 (rgbPlane.u_width != width_out || rgbPlane.u_height != height_out)) 2279 { 2280 width = width_out; 2281 height = height_out; 2282 2283 M4OSA_TRACE1_2("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect: \ 2284 New Width and height %d %d ",width,height); 2285 2286 framingCtx->FramingRgb->u_height = height_out; 2287 framingCtx->FramingRgb->u_width = width_out; 2288 framingCtx->FramingRgb->u_stride = framingCtx->FramingRgb->u_width*2; 2289 framingCtx->FramingRgb->u_topleft = 0; 2290 2291 framingCtx->FramingRgb->pac_data = 2292 (M4VIFI_UInt8*)M4OSA_malloc(framingCtx->FramingRgb->u_height*framingCtx->\ 2293 FramingRgb->u_width*2*sizeof(M4VIFI_UInt8), M4VS, 2294 (M4OSA_Char *)"Framing Output pac_data RGB"); 2295 2296 if(framingCtx->FramingRgb->pac_data == M4OSA_NULL) 2297 { 2298 M4OSA_TRACE1_0("Allocation error in \ 2299 M4xVSS_internalConvertARGB888toYUV420_FrammingEffect"); 2300 M4OSA_free((M4OSA_MemAddr32)pTmpData); 2301 pTmpData = M4OSA_NULL;NULL; 2302 return M4ERR_ALLOC; 2303 } 2304 2305 M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect: Resizing Needed "); 2306 M4OSA_TRACE1_2("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:\ 2307 rgbPlane.u_height & rgbPlane.u_width %d %d",rgbPlane.u_height,rgbPlane.u_width); 2308 2309 //err = M4VIFI_ResizeBilinearRGB888toRGB888(M4OSA_NULL, &rgbPlane,framingCtx->FramingRgb); 2310 err = M4VIFI_ResizeBilinearRGB565toRGB565(M4OSA_NULL, &rgbPlane,framingCtx->FramingRgb); 2311 2312 if(err != M4NO_ERROR) 2313 { 2314 M4OSA_TRACE1_1("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect :\ 2315 when resizing RGB plane: 0x%x\n", err); 2316 return err; 2317 } 2318 2319 if(rgbPlane.pac_data != M4OSA_NULL) 2320 { 2321 M4OSA_free((M4OSA_MemAddr32)rgbPlane.pac_data); 2322 rgbPlane.pac_data = M4OSA_NULL; 2323 2324 } 2325 2326 } 2327 else 2328 { 2329 2330 M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:\ 2331 Resizing Not Needed "); 2332 width = framingCtx->width; 2333 height = framingCtx->height; 2334 framingCtx->FramingRgb->u_height = height; 2335 framingCtx->FramingRgb->u_width = width; 2336 framingCtx->FramingRgb->u_stride = framingCtx->FramingRgb->u_width*2; 2337 framingCtx->FramingRgb->u_topleft = 0; 2338 framingCtx->FramingRgb->pac_data = rgbPlane.pac_data; 2339 } 2340 2341 2342 if(pEffect->xVSS.bResize) 2343 { 2344 /** 2345 * Force topleft to 0 for pure framing effect */ 2346 framingCtx->topleft_x = 0; 2347 framingCtx->topleft_y = 0; 2348 } 2349 2350 2351 2352 /** 2353 * Convert RGB output to YUV 420 to be able to merge it with output video in framing 2354 effect */ 2355 framingCtx->FramingYuv = (M4VIFI_ImagePlane*)M4OSA_malloc(3*sizeof(M4VIFI_ImagePlane), M4VS, 2356 (M4OSA_Char *)"Framing Output plane YUV"); 2357 if(framingCtx->FramingYuv == M4OSA_NULL) 2358 { 2359 M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertARGB888toYUV420_FrammingEffect"); 2360 return M4ERR_ALLOC; 2361 } 2362 framingCtx->FramingYuv[0].u_width = ((width+1)>>1)<<1; 2363 framingCtx->FramingYuv[0].u_height = ((height+1)>>1)<<1; 2364 framingCtx->FramingYuv[0].u_topleft = 0; 2365 framingCtx->FramingYuv[0].u_stride = ((width+1)>>1)<<1; 2366 framingCtx->FramingYuv[0].pac_data = (M4VIFI_UInt8*)M4OSA_malloc 2367 ((framingCtx->FramingYuv[0].u_width*framingCtx->FramingYuv[0].u_height*3)>>1, M4VS, 2368 (M4OSA_Char *)"Alloc for the output YUV");; 2369 if(framingCtx->FramingYuv[0].pac_data == M4OSA_NULL) 2370 { 2371 M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertARGB888toYUV420_FrammingEffect"); 2372 return M4ERR_ALLOC; 2373 } 2374 framingCtx->FramingYuv[1].u_width = (((width+1)>>1)<<1)>>1; 2375 framingCtx->FramingYuv[1].u_height = (((height+1)>>1)<<1)>>1; 2376 framingCtx->FramingYuv[1].u_topleft = 0; 2377 framingCtx->FramingYuv[1].u_stride = (((width+1)>>1)<<1)>>1; 2378 2379 framingCtx->FramingYuv[1].pac_data = (M4VIFI_UInt8*)M4OSA_malloc\ 2380 (((framingCtx->FramingYuv[0].u_width)/2*(framingCtx->FramingYuv[0].u_height)/2), M4VS, 2381 (M4OSA_Char *)"Alloc for the output YUV");; 2382 2383 framingCtx->FramingYuv[2].u_width = (((width+1)>>1)<<1)>>1; 2384 framingCtx->FramingYuv[2].u_height = (((height+1)>>1)<<1)>>1; 2385 framingCtx->FramingYuv[2].u_topleft = 0; 2386 framingCtx->FramingYuv[2].u_stride = (((width+1)>>1)<<1)>>1; 2387 2388 framingCtx->FramingYuv[2].pac_data = (M4VIFI_UInt8*)M4OSA_malloc 2389 (((framingCtx->FramingYuv[0].u_width)/2*(framingCtx->FramingYuv[0].u_height)/2), M4VS, 2390 (M4OSA_Char *)"Alloc for the output YUV");; 2391 2392 2393 2394 M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:\ 2395 convert RGB to YUV "); 2396 2397 //err = M4VIFI_RGB888toYUV420(M4OSA_NULL, framingCtx->FramingRgb, framingCtx->FramingYuv); 2398 err = M4VIFI_RGB565toYUV420(M4OSA_NULL, framingCtx->FramingRgb, framingCtx->FramingYuv); 2399 2400 if(err != M4NO_ERROR) 2401 { 2402 M4OSA_TRACE1_1("SPS png: error when converting from RGB to YUV: 0x%x\n", err); 2403 } 2404 2405 M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect: Leaving "); 2406 return M4NO_ERROR; 2407} 2408 2409/** 2410 ****************************************************************************** 2411 * prototype M4OSA_ERR M4xVSS_internalGenerateEditedFile(M4OSA_Context pContext) 2412 * 2413 * @brief This function prepares VSS for editing 2414 * @note It also set special xVSS effect as external effects for the VSS 2415 * @param pContext (IN) The integrator own context 2416 * 2417 * @return M4NO_ERROR: No error 2418 * @return M4ERR_PARAMETER: At least one of the function parameters is null 2419 * @return M4ERR_ALLOC: Allocation error (no more memory) 2420 ****************************************************************************** 2421 */ 2422M4OSA_ERR M4xVSS_internalGenerateEditedFile(M4OSA_Context pContext) 2423{ 2424 M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; 2425 M4VSS3GPP_EditContext pVssCtxt; 2426 M4OSA_UInt32 i,j; 2427 M4OSA_ERR err; 2428 2429 /** 2430 * Create a VSS 3GPP edition instance */ 2431 err = M4VSS3GPP_editInit( &pVssCtxt, xVSS_context->pFileReadPtr, xVSS_context->pFileWritePtr); 2432 if (err != M4NO_ERROR) 2433 { 2434 M4OSA_TRACE1_1("M4xVSS_internalGenerateEditedFile: M4VSS3GPP_editInit returned 0x%x\n", 2435 err); 2436 M4VSS3GPP_editCleanUp(pVssCtxt); 2437 return err; 2438 } 2439 2440#ifdef M4VSS_ENABLE_EXTERNAL_DECODERS 2441 /* replay recorded external decoder registrations on the VSS3GPP */ 2442 for (i=0; i<M4VD_kVideoType_NB; i++) 2443 { 2444 if (xVSS_context->registeredExternalDecs[i].registered) 2445 { 2446 err = M4VSS3GPP_editRegisterExternalVideoDecoder(pVssCtxt, i, 2447 xVSS_context->registeredExternalDecs[i].pDecoderInterface, 2448 xVSS_context->registeredExternalDecs[i].pUserData); 2449 if (M4NO_ERROR != err) 2450 { 2451 M4OSA_TRACE1_1("M4xVSS_internalGenerateEditedFile: \ 2452 M4VSS3GPP_editRegisterExternalVideoDecoder() returns 0x%x!", err); 2453 M4VSS3GPP_editCleanUp(pVssCtxt); 2454 return err; 2455 } 2456 } 2457 } 2458#endif /* M4VSS_ENABLE_EXTERNAL_DECODERS */ 2459 2460 /* replay recorded external encoder registrations on the VSS3GPP */ 2461 for (i=0; i<M4VE_kEncoderType_NB; i++) 2462 { 2463 if (xVSS_context->registeredExternalEncs[i].registered) 2464 { 2465 err = M4VSS3GPP_editRegisterExternalVideoEncoder(pVssCtxt, i, 2466 xVSS_context->registeredExternalEncs[i].pEncoderInterface, 2467 xVSS_context->registeredExternalEncs[i].pUserData); 2468 if (M4NO_ERROR != err) 2469 { 2470 M4OSA_TRACE1_1("M4xVSS_internalGenerateEditedFile:\ 2471 M4VSS3GPP_editRegisterExternalVideoEncoder() returns 0x%x!", err); 2472 M4VSS3GPP_editCleanUp(pVssCtxt); 2473 return err; 2474 } 2475 } 2476 } 2477 2478 /* In case of MMS use case, we fill directly into the VSS context the targeted bitrate */ 2479 if(xVSS_context->targetedBitrate != 0) 2480 { 2481 M4VSS3GPP_InternalEditContext* pVSSContext = (M4VSS3GPP_InternalEditContext*)pVssCtxt; 2482 2483 pVSSContext->bIsMMS = M4OSA_TRUE; 2484 pVSSContext->uiMMSVideoBitrate = xVSS_context->targetedBitrate; 2485 pVSSContext->MMSvideoFramerate = xVSS_context->pSettings->videoFrameRate; 2486 } 2487 2488 /*Warning: since the adding of the UTF conversion, pSettings has been changed in the next 2489 part in pCurrentEditSettings (there is a specific current editing structure for the saving, 2490 as for the preview)*/ 2491 2492 /** 2493 * Set the external video effect functions, for saving mode (to be moved to 2494 M4xVSS_saveStart() ?)*/ 2495 for (i=0; i<xVSS_context->pCurrentEditSettings->uiClipNumber; i++) 2496 { 2497 for (j=0; j<xVSS_context->pCurrentEditSettings->nbEffects; j++) 2498 { 2499 if (M4xVSS_kVideoEffectType_BlackAndWhite == 2500 xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType) 2501 { 2502 xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct = 2503 M4VSS3GPP_externalVideoEffectColor; 2504 //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt = 2505 // (M4OSA_Void*)M4xVSS_kVideoEffectType_BlackAndWhite; 2506 /*commented FB*/ 2507 /** 2508 * We do not need to set the color context, it is already set 2509 during sendCommand function */ 2510 } 2511 if (M4xVSS_kVideoEffectType_Pink == 2512 xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType) 2513 { 2514 xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct = 2515 M4VSS3GPP_externalVideoEffectColor; 2516 //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt = 2517 // (M4OSA_Void*)M4xVSS_kVideoEffectType_Pink; /**< we don't 2518 // use any function context */ 2519 /*commented FB*/ 2520 /** 2521 * We do not need to set the color context, 2522 it is already set during sendCommand function */ 2523 } 2524 if (M4xVSS_kVideoEffectType_Green == 2525 xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType) 2526 { 2527 xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct = 2528 M4VSS3GPP_externalVideoEffectColor; 2529 //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt = 2530 // (M4OSA_Void*)M4xVSS_kVideoEffectType_Green; 2531 /**< we don't use any function context */ 2532 /*commented FB*/ 2533 /** 2534 * We do not need to set the color context, it is already set during 2535 sendCommand function */ 2536 } 2537 if (M4xVSS_kVideoEffectType_Sepia == 2538 xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType) 2539 { 2540 xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct = 2541 M4VSS3GPP_externalVideoEffectColor; 2542 //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt = 2543 // (M4OSA_Void*)M4xVSS_kVideoEffectType_Sepia; 2544 /**< we don't use any function context */ 2545 /*commented FB*/ 2546 /** 2547 * We do not need to set the color context, it is already set during 2548 sendCommand function */ 2549 } 2550 if (M4xVSS_kVideoEffectType_Fifties == 2551 xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType) 2552 { 2553 xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct = 2554 M4VSS3GPP_externalVideoEffectFifties; 2555 /** 2556 * We do not need to set the framing context, it is already set during 2557 sendCommand function */ 2558 } 2559 if (M4xVSS_kVideoEffectType_Negative == 2560 xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType) 2561 { 2562 xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct = 2563 M4VSS3GPP_externalVideoEffectColor; 2564 //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt = 2565 // (M4OSA_Void*)M4xVSS_kVideoEffectType_Negative; 2566 /**< we don't use any function context */ 2567 /*commented FB*/ 2568 /** 2569 * We do not need to set the color context, it is already set during 2570 sendCommand function */ 2571 } 2572 if (M4xVSS_kVideoEffectType_Framing == 2573 xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType) 2574 { 2575 xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct = 2576 M4VSS3GPP_externalVideoEffectFraming; 2577 /** 2578 * We do not need to set the framing context, it is already set during 2579 sendCommand function */ 2580 } 2581 if (M4xVSS_kVideoEffectType_ZoomIn == 2582 xVSS_context->pSettings->Effects[j].VideoEffectType) 2583 { 2584 xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct = 2585 M4VSS3GPP_externalVideoEffectZoom; 2586 xVSS_context->pCurrentEditSettings->Effects[j].pExtVideoEffectFctCtxt = 2587 (M4OSA_Void*)M4xVSS_kVideoEffectType_ZoomIn; /**< we don't use any 2588 function context */ 2589 } 2590 if (M4xVSS_kVideoEffectType_ZoomOut == 2591 xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType) 2592 { 2593 xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct = 2594 M4VSS3GPP_externalVideoEffectZoom; 2595 xVSS_context->pCurrentEditSettings->Effects[j].pExtVideoEffectFctCtxt = 2596 (M4OSA_Void*)M4xVSS_kVideoEffectType_ZoomOut; /**< we don't use any 2597 function context */ 2598 } 2599 if (M4xVSS_kVideoEffectType_ColorRGB16 == 2600 xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType) 2601 { 2602 xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct = 2603 M4VSS3GPP_externalVideoEffectColor; 2604 //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt = 2605 // (M4OSA_Void*)M4xVSS_kVideoEffectType_ColorRGB16; 2606 /**< we don't use any function context */ 2607 /** 2608 * We do not need to set the color context, it is already set during 2609 sendCommand function */ 2610 } 2611 if (M4xVSS_kVideoEffectType_Gradient == 2612 xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType) 2613 { 2614 xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct = 2615 M4VSS3GPP_externalVideoEffectColor; 2616 //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt = 2617 // (M4OSA_Void*)M4xVSS_kVideoEffectType_ColorRGB16; 2618 /**< we don't use any function context */ 2619 /** 2620 * We do not need to set the color context, it is already set during 2621 sendCommand function */ 2622 } 2623 2624 } 2625 } 2626 2627 /** 2628 * Open the VSS 3GPP */ 2629 err = M4VSS3GPP_editOpen(pVssCtxt, xVSS_context->pCurrentEditSettings); 2630 if (err != M4NO_ERROR) 2631 { 2632 M4OSA_TRACE1_1("M4xVSS_internalGenerateEditedFile:\ 2633 M4VSS3GPP_editOpen returned 0x%x\n",err); 2634 M4VSS3GPP_editCleanUp(pVssCtxt); 2635 return err; 2636 } 2637 2638 /** 2639 * Save VSS context to be able to close / free VSS later */ 2640 xVSS_context->pCurrentEditContext = pVssCtxt; 2641 2642 return M4NO_ERROR; 2643} 2644 2645/** 2646 ****************************************************************************** 2647 * prototype M4OSA_ERR M4xVSS_internalCloseEditedFile(M4OSA_Context pContext) 2648 * 2649 * @brief This function cleans up VSS 2650 * @note 2651 * @param pContext (IN) The integrator own context 2652 * 2653 * @return M4NO_ERROR: No error 2654 * @return M4ERR_PARAMETER: At least one of the function parameters is null 2655 ****************************************************************************** 2656 */ 2657M4OSA_ERR M4xVSS_internalCloseEditedFile(M4OSA_Context pContext) 2658{ 2659 M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; 2660 M4VSS3GPP_EditContext pVssCtxt = xVSS_context->pCurrentEditContext; 2661 M4OSA_ERR err; 2662 2663 if(xVSS_context->pCurrentEditContext != M4OSA_NULL) 2664 { 2665 /** 2666 * Close the VSS 3GPP */ 2667 err = M4VSS3GPP_editClose(pVssCtxt); 2668 if (err != M4NO_ERROR) 2669 { 2670 M4OSA_TRACE1_1("M4xVSS_internalCloseEditedFile:\ 2671 M4VSS3GPP_editClose returned 0x%x\n",err); 2672 M4VSS3GPP_editCleanUp(pVssCtxt); 2673 return err; 2674 } 2675 2676 /** 2677 * Free this VSS3GPP edition instance */ 2678 err = M4VSS3GPP_editCleanUp(pVssCtxt); 2679 if (err != M4NO_ERROR) 2680 { 2681 M4OSA_TRACE1_1("M4xVSS_internalCloseEditedFile: \ 2682 M4VSS3GPP_editCleanUp returned 0x%x\n",err); 2683 return err; 2684 } 2685 } 2686 2687 return M4NO_ERROR; 2688} 2689 2690/** 2691 ****************************************************************************** 2692 * prototype M4OSA_ERR M4xVSS_internalGenerateAudioMixFile(M4OSA_Context pContext) 2693 * 2694 * @brief This function prepares VSS for audio mixing 2695 * @note It takes its parameters from the BGM settings in the xVSS internal context 2696 * @param pContext (IN) The integrator own context 2697 * 2698 * @return M4NO_ERROR: No error 2699 * @return M4ERR_PARAMETER: At least one of the function parameters is null 2700 * @return M4ERR_ALLOC: Allocation error (no more memory) 2701 ****************************************************************************** 2702 */ 2703/*** 2704 * FB: the function has been modified since the structure used for the saving is now the 2705 * pCurrentEditSettings and not the pSettings 2706 * This change has been added for the UTF support 2707 * All the "xVSS_context->pSettings" has been replaced by "xVSS_context->pCurrentEditSettings" 2708 ***/ 2709M4OSA_ERR M4xVSS_internalGenerateAudioMixFile(M4OSA_Context pContext) 2710{ 2711 M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; 2712 M4VSS3GPP_AudioMixingSettings* pAudioMixSettings; 2713 M4VSS3GPP_AudioMixingContext pAudioMixingCtxt; 2714 M4OSA_ERR err; 2715 M4VIDEOEDITING_ClipProperties fileProperties; 2716 2717 /** 2718 * Allocate audio mixing settings structure and fill it with BGM parameters */ 2719 pAudioMixSettings = (M4VSS3GPP_AudioMixingSettings*)M4OSA_malloc 2720 (sizeof(M4VSS3GPP_AudioMixingSettings), M4VS, (M4OSA_Char *)"pAudioMixSettings"); 2721 if(pAudioMixSettings == M4OSA_NULL) 2722 { 2723 M4OSA_TRACE1_0("Allocation error in M4xVSS_internalGenerateAudioMixFile"); 2724 return M4ERR_ALLOC; 2725 } 2726 2727 if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->FileType == 2728 M4VIDEOEDITING_kFileType_3GPP) 2729 { 2730 err = M4xVSS_internalGetProperties((M4OSA_Context)xVSS_context, 2731 (M4OSA_Char*)xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile, 2732 &fileProperties); 2733 if(err != M4NO_ERROR) 2734 { 2735 M4OSA_TRACE1_1("M4xVSS_internalGenerateAudioMixFile:\ 2736 impossible to retrieve audio BGM properties ->\ 2737 reencoding audio background music", err); 2738 fileProperties.AudioStreamType = 2739 xVSS_context->pCurrentEditSettings->xVSS.outputAudioFormat+1; 2740 /* To force BGM encoding */ 2741 } 2742 } 2743 2744 pAudioMixSettings->bRemoveOriginal = M4OSA_FALSE; 2745 pAudioMixSettings->AddedAudioFileType = 2746 xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->FileType; 2747 pAudioMixSettings->pAddedAudioTrackFile = 2748 xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile; 2749 pAudioMixSettings->uiAddVolume = 2750 xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->uiAddVolume; 2751 2752 pAudioMixSettings->outputAudioFormat = xVSS_context->pSettings->xVSS.outputAudioFormat; 2753 pAudioMixSettings->outputASF = xVSS_context->pSettings->xVSS.outputAudioSamplFreq; 2754 pAudioMixSettings->outputAudioBitrate = xVSS_context->pSettings->xVSS.outputAudioBitrate; 2755 pAudioMixSettings->uiSamplingFrequency = 2756 xVSS_context->pSettings->xVSS.pBGMtrack->uiSamplingFrequency; 2757 pAudioMixSettings->uiNumChannels = xVSS_context->pSettings->xVSS.pBGMtrack->uiNumChannels; 2758 2759 pAudioMixSettings->b_DuckingNeedeed = 2760 xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->b_DuckingNeedeed; 2761 pAudioMixSettings->fBTVolLevel = 2762 (M4OSA_Float )xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->uiAddVolume/100; 2763 pAudioMixSettings->InDucking_threshold = 2764 xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->InDucking_threshold; 2765 pAudioMixSettings->InDucking_lowVolume = 2766 xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->lowVolume/100; 2767 pAudioMixSettings->fPTVolLevel = 2768 (M4OSA_Float)xVSS_context->pSettings->PTVolLevel/100; 2769 pAudioMixSettings->bLoop = xVSS_context->pSettings->xVSS.pBGMtrack->bLoop; 2770 2771 if(xVSS_context->pSettings->xVSS.bAudioMono) 2772 { 2773 pAudioMixSettings->outputNBChannels = 1; 2774 } 2775 else 2776 { 2777 pAudioMixSettings->outputNBChannels = 2; 2778 } 2779 2780 /** 2781 * Fill audio mix settings with BGM parameters */ 2782 pAudioMixSettings->uiBeginLoop = 2783 xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->uiBeginLoop; 2784 pAudioMixSettings->uiEndLoop = 2785 xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->uiEndLoop; 2786 pAudioMixSettings->uiAddCts = 2787 xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->uiAddCts; 2788 2789 /** 2790 * Output file of the audio mixer will be final file (audio mixing is the last step) */ 2791 pAudioMixSettings->pOutputClipFile = xVSS_context->pOutputFile; 2792 pAudioMixSettings->pTemporaryFile = xVSS_context->pTemporaryFile; 2793 2794 /** 2795 * Input file of the audio mixer is a temporary file containing all audio/video editions */ 2796 pAudioMixSettings->pOriginalClipFile = xVSS_context->pCurrentEditSettings->pOutputFile; 2797 2798 /** 2799 * Save audio mixing settings pointer to be able to free it in 2800 M4xVSS_internalCloseAudioMixedFile function */ 2801 xVSS_context->pAudioMixSettings = pAudioMixSettings; 2802 2803 /** 2804 * Create a VSS 3GPP audio mixing instance */ 2805 err = M4VSS3GPP_audioMixingInit(&pAudioMixingCtxt, pAudioMixSettings, 2806 xVSS_context->pFileReadPtr, xVSS_context->pFileWritePtr); 2807 2808 /** 2809 * Save audio mixing context to be able to call audio mixing step function in 2810 M4xVSS_step function */ 2811 xVSS_context->pAudioMixContext = pAudioMixingCtxt; 2812 2813 if (err != M4NO_ERROR) 2814 { 2815 M4OSA_TRACE1_1("M4xVSS_internalGenerateAudioMixFile:\ 2816 M4VSS3GPP_audioMixingInit returned 0x%x\n",err); 2817 //M4VSS3GPP_audioMixingCleanUp(pAudioMixingCtxt); 2818 return err; 2819 } 2820 2821 return M4NO_ERROR; 2822} 2823 2824/** 2825 ****************************************************************************** 2826 * prototype M4OSA_ERR M4xVSS_internalCloseAudioMixedFile(M4OSA_Context pContext) 2827 * 2828 * @brief This function cleans up VSS for audio mixing 2829 * @note 2830 * @param pContext (IN) The integrator own context 2831 * 2832 * @return M4NO_ERROR: No error 2833 * @return M4ERR_PARAMETER: At least one of the function parameters is null 2834 ****************************************************************************** 2835 */ 2836M4OSA_ERR M4xVSS_internalCloseAudioMixedFile(M4OSA_Context pContext) 2837{ 2838 M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; 2839 M4OSA_ERR err; 2840 2841 /** 2842 * Free this VSS3GPP audio mixing instance */ 2843 if(xVSS_context->pAudioMixContext != M4OSA_NULL) 2844 { 2845 err = M4VSS3GPP_audioMixingCleanUp(xVSS_context->pAudioMixContext); 2846 if (err != M4NO_ERROR) 2847 { 2848 M4OSA_TRACE1_1("M4xVSS_internalCloseAudioMixedFile:\ 2849 M4VSS3GPP_audioMixingCleanUp returned 0x%x\n",err); 2850 return err; 2851 } 2852 } 2853 2854 /** 2855 * Free VSS audio mixing settings */ 2856 if(xVSS_context->pAudioMixSettings != M4OSA_NULL) 2857 { 2858 M4OSA_free((M4OSA_MemAddr32)xVSS_context->pAudioMixSettings); 2859 xVSS_context->pAudioMixSettings = M4OSA_NULL; 2860 } 2861 2862 return M4NO_ERROR; 2863} 2864 2865/** 2866 ****************************************************************************** 2867 * prototype M4OSA_ERR M4xVSS_internalFreePreview(M4OSA_Context pContext) 2868 * 2869 * @brief This function cleans up preview edition structure used to generate 2870 * preview.3gp file given to the VPS 2871 * @note It also free the preview structure given to the VPS 2872 * @param pContext (IN) The integrator own context 2873 * 2874 * @return M4NO_ERROR: No error 2875 * @return M4ERR_PARAMETER: At least one of the function parameters is null 2876 ****************************************************************************** 2877 */ 2878M4OSA_ERR M4xVSS_internalFreePreview(M4OSA_Context pContext) 2879{ 2880 M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; 2881 M4OSA_UInt8 i; 2882 2883 /** 2884 * Free clip/transition settings */ 2885 for(i=0; i<xVSS_context->pCurrentEditSettings->uiClipNumber; i++) 2886 { 2887 M4xVSS_FreeClipSettings(xVSS_context->pCurrentEditSettings->pClipList[i]); 2888 2889 M4OSA_free((M4OSA_MemAddr32)(xVSS_context->pCurrentEditSettings->pClipList[i])); 2890 xVSS_context->pCurrentEditSettings->pClipList[i] = M4OSA_NULL; 2891 2892 /** 2893 * Because there is 1 less transition than clip number */ 2894 if(i != xVSS_context->pCurrentEditSettings->uiClipNumber-1) 2895 { 2896 M4OSA_free((M4OSA_MemAddr32)(xVSS_context->pCurrentEditSettings->pTransitionList[i])); 2897 xVSS_context->pCurrentEditSettings->pTransitionList[i] = M4OSA_NULL; 2898 } 2899 } 2900 2901 /** 2902 * Free clip/transition list */ 2903 if(xVSS_context->pCurrentEditSettings->pClipList != M4OSA_NULL) 2904 { 2905 M4OSA_free((M4OSA_MemAddr32)(xVSS_context->pCurrentEditSettings->pClipList)); 2906 xVSS_context->pCurrentEditSettings->pClipList = M4OSA_NULL; 2907 } 2908 if(xVSS_context->pCurrentEditSettings->pTransitionList != M4OSA_NULL) 2909 { 2910 M4OSA_free((M4OSA_MemAddr32)(xVSS_context->pCurrentEditSettings->pTransitionList)); 2911 xVSS_context->pCurrentEditSettings->pTransitionList = M4OSA_NULL; 2912 } 2913 2914 /** 2915 * Free output preview file path */ 2916 if(xVSS_context->pCurrentEditSettings->pOutputFile != M4OSA_NULL) 2917 { 2918 M4OSA_free(xVSS_context->pCurrentEditSettings->pOutputFile); 2919 xVSS_context->pCurrentEditSettings->pOutputFile = M4OSA_NULL; 2920 } 2921 2922 /** 2923 * Free temporary preview file path */ 2924 if(xVSS_context->pCurrentEditSettings->pTemporaryFile != M4OSA_NULL) 2925 { 2926 M4OSA_fileExtraDelete(xVSS_context->pCurrentEditSettings->pTemporaryFile); 2927 M4OSA_free(xVSS_context->pCurrentEditSettings->pTemporaryFile); 2928 xVSS_context->pCurrentEditSettings->pTemporaryFile = M4OSA_NULL; 2929 } 2930 2931 /** 2932 * Free "local" BGM settings */ 2933 if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack != M4OSA_NULL) 2934 { 2935 if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile != M4OSA_NULL) 2936 { 2937 M4OSA_free(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile); 2938 xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile = M4OSA_NULL; 2939 } 2940 M4OSA_free((M4OSA_MemAddr32)xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack); 2941 xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack = M4OSA_NULL; 2942 } 2943 2944 /** 2945 * Free current edit settings structure */ 2946 if(xVSS_context->pCurrentEditSettings != M4OSA_NULL) 2947 { 2948 M4OSA_free((M4OSA_MemAddr32)xVSS_context->pCurrentEditSettings); 2949 xVSS_context->pCurrentEditSettings = M4OSA_NULL; 2950 } 2951 2952 /** 2953 * Free preview effects given to application */ 2954 if(M4OSA_NULL != xVSS_context->pPreviewSettings->Effects) 2955 { 2956 M4OSA_free((M4OSA_MemAddr32)xVSS_context->pPreviewSettings->Effects); 2957 xVSS_context->pPreviewSettings->Effects = M4OSA_NULL; 2958 xVSS_context->pPreviewSettings->nbEffects = 0; 2959 } 2960 2961 return M4NO_ERROR; 2962} 2963 2964 2965/** 2966 ****************************************************************************** 2967 * prototype M4OSA_ERR M4xVSS_internalFreeSaving(M4OSA_Context pContext) 2968 * 2969 * @brief This function cleans up saving edition structure used to generate 2970 * output.3gp file given to the VPS 2971 * @note 2972 * @param pContext (IN) The integrator own context 2973 * 2974 * @return M4NO_ERROR: No error 2975 * @return M4ERR_PARAMETER: At least one of the function parameters is null 2976 ****************************************************************************** 2977 */ 2978M4OSA_ERR M4xVSS_internalFreeSaving(M4OSA_Context pContext) 2979{ 2980 M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; 2981 M4OSA_UInt8 i; 2982 2983 if(xVSS_context->pCurrentEditSettings != M4OSA_NULL) 2984 { 2985 /** 2986 * Free clip/transition settings */ 2987 for(i=0; i<xVSS_context->pCurrentEditSettings->uiClipNumber; i++) 2988 { 2989 M4xVSS_FreeClipSettings(xVSS_context->pCurrentEditSettings->pClipList[i]); 2990 2991 M4OSA_free((M4OSA_MemAddr32)(xVSS_context->pCurrentEditSettings->pClipList[i])); 2992 xVSS_context->pCurrentEditSettings->pClipList[i] = M4OSA_NULL; 2993 2994 /** 2995 * Because there is 1 less transition than clip number */ 2996 if(i != xVSS_context->pCurrentEditSettings->uiClipNumber-1) 2997 { 2998 M4OSA_free((M4OSA_MemAddr32)\ 2999 (xVSS_context->pCurrentEditSettings->pTransitionList[i])); 3000 xVSS_context->pCurrentEditSettings->pTransitionList[i] = M4OSA_NULL; 3001 } 3002 } 3003 3004 /** 3005 * Free clip/transition list */ 3006 if(xVSS_context->pCurrentEditSettings->pClipList != M4OSA_NULL) 3007 { 3008 M4OSA_free((M4OSA_MemAddr32)(xVSS_context->pCurrentEditSettings->pClipList)); 3009 xVSS_context->pCurrentEditSettings->pClipList = M4OSA_NULL; 3010 } 3011 if(xVSS_context->pCurrentEditSettings->pTransitionList != M4OSA_NULL) 3012 { 3013 M4OSA_free((M4OSA_MemAddr32)(xVSS_context->pCurrentEditSettings->pTransitionList)); 3014 xVSS_context->pCurrentEditSettings->pTransitionList = M4OSA_NULL; 3015 } 3016 3017 if(xVSS_context->pCurrentEditSettings->Effects != M4OSA_NULL) 3018 { 3019 M4OSA_free((M4OSA_MemAddr32)(xVSS_context->pCurrentEditSettings->Effects)); 3020 xVSS_context->pCurrentEditSettings->Effects = M4OSA_NULL; 3021 xVSS_context->pCurrentEditSettings->nbEffects = 0; 3022 } 3023 3024 /** 3025 * Free output saving file path */ 3026 if(xVSS_context->pCurrentEditSettings->pOutputFile != M4OSA_NULL) 3027 { 3028 if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack != M4OSA_NULL) 3029 { 3030 M4OSA_fileExtraDelete(xVSS_context->pCurrentEditSettings->pOutputFile); 3031 M4OSA_free(xVSS_context->pCurrentEditSettings->pOutputFile); 3032 } 3033 if(xVSS_context->pOutputFile != M4OSA_NULL) 3034 { 3035 M4OSA_free((M4OSA_MemAddr32)xVSS_context->pOutputFile); 3036 xVSS_context->pOutputFile = M4OSA_NULL; 3037 } 3038 xVSS_context->pSettings->pOutputFile = M4OSA_NULL; 3039 xVSS_context->pCurrentEditSettings->pOutputFile = M4OSA_NULL; 3040 } 3041 3042 /** 3043 * Free temporary saving file path */ 3044 if(xVSS_context->pCurrentEditSettings->pTemporaryFile != M4OSA_NULL) 3045 { 3046 M4OSA_fileExtraDelete(xVSS_context->pCurrentEditSettings->pTemporaryFile); 3047 M4OSA_free(xVSS_context->pCurrentEditSettings->pTemporaryFile); 3048 xVSS_context->pCurrentEditSettings->pTemporaryFile = M4OSA_NULL; 3049 } 3050 3051 /** 3052 * Free "local" BGM settings */ 3053 if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack != M4OSA_NULL) 3054 { 3055 if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile != M4OSA_NULL) 3056 { 3057 M4OSA_free(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile); 3058 xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile = M4OSA_NULL; 3059 } 3060 M4OSA_free((M4OSA_MemAddr32)xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack); 3061 xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack = M4OSA_NULL; 3062 } 3063 3064 /** 3065 * Free current edit settings structure */ 3066 M4OSA_free((M4OSA_MemAddr32)xVSS_context->pCurrentEditSettings); 3067 xVSS_context->pCurrentEditSettings = M4OSA_NULL; 3068 } 3069 3070 return M4NO_ERROR; 3071} 3072 3073 3074/** 3075 ****************************************************************************** 3076 * prototype M4OSA_ERR M4xVSS_freeSettings(M4OSA_Context pContext) 3077 * 3078 * @brief This function cleans up an M4VSS3GPP_EditSettings structure 3079 * @note 3080 * @param pSettings (IN) Pointer on M4VSS3GPP_EditSettings structure to free 3081 * 3082 * @return M4NO_ERROR: No error 3083 * @return M4ERR_PARAMETER: At least one of the function parameters is null 3084 ****************************************************************************** 3085 */ 3086M4OSA_ERR M4xVSS_freeSettings(M4VSS3GPP_EditSettings* pSettings) 3087{ 3088 M4OSA_UInt8 i,j; 3089 3090 /** 3091 * For each clip ... */ 3092 for(i=0; i<pSettings->uiClipNumber; i++) 3093 { 3094 /** 3095 * ... free clip settings */ 3096 if(pSettings->pClipList[i] != M4OSA_NULL) 3097 { 3098 M4xVSS_FreeClipSettings(pSettings->pClipList[i]); 3099 3100 M4OSA_free((M4OSA_MemAddr32)(pSettings->pClipList[i])); 3101 pSettings->pClipList[i] = M4OSA_NULL; 3102 } 3103 3104 /** 3105 * ... free transition settings */ 3106 if(i < pSettings->uiClipNumber-1) /* Because there is 1 less transition than clip number */ 3107 { 3108 if(pSettings->pTransitionList[i] != M4OSA_NULL) 3109 { 3110 switch (pSettings->pTransitionList[i]->VideoTransitionType) 3111 { 3112 case M4xVSS_kVideoTransitionType_AlphaMagic: 3113 3114 /** 3115 * In case of Alpha Magic transition, 3116 some extra parameters need to be freed */ 3117 if(pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt\ 3118 != M4OSA_NULL) 3119 { 3120 M4OSA_free((M4OSA_MemAddr32)(((M4xVSS_internal_AlphaMagicSettings*)\ 3121 pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt)->\ 3122 pPlane->pac_data)); 3123 ((M4xVSS_internal_AlphaMagicSettings*)pSettings->pTransitionList[i\ 3124 ]->pExtVideoTransitionFctCtxt)->pPlane->pac_data = M4OSA_NULL; 3125 3126 M4OSA_free((M4OSA_MemAddr32)(((M4xVSS_internal_AlphaMagicSettings*)\ 3127 pSettings->pTransitionList[i]->\ 3128 pExtVideoTransitionFctCtxt)->pPlane)); 3129 ((M4xVSS_internal_AlphaMagicSettings*)pSettings->pTransitionList[i]\ 3130 ->pExtVideoTransitionFctCtxt)->pPlane = M4OSA_NULL; 3131 3132 M4OSA_free((M4OSA_MemAddr32)(pSettings->pTransitionList[i]->\ 3133 pExtVideoTransitionFctCtxt)); 3134 pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt = M4OSA_NULL; 3135 3136 for(j=i+1;j<pSettings->uiClipNumber-1;j++) 3137 { 3138 if(pSettings->pTransitionList[j] != M4OSA_NULL) 3139 { 3140 if(pSettings->pTransitionList[j]->VideoTransitionType == 3141 M4xVSS_kVideoTransitionType_AlphaMagic) 3142 { 3143 M4OSA_UInt32 pCmpResult=0; 3144 M4OSA_chrCompare(pSettings->pTransitionList[i]->\ 3145 xVSS.transitionSpecific.pAlphaMagicSettings->\ 3146 pAlphaFilePath, 3147 pSettings->pTransitionList[j]->\ 3148 xVSS.transitionSpecific.pAlphaMagicSettings->\ 3149 pAlphaFilePath, (M4OSA_Int32 *)&pCmpResult); 3150 if(pCmpResult == 0) 3151 { 3152 /* Free extra internal alpha magic structure and put 3153 it to NULL to avoid refreeing it */ 3154 M4OSA_free((M4OSA_MemAddr32)(pSettings->\ 3155 pTransitionList[j]->pExtVideoTransitionFctCtxt)); 3156 pSettings->pTransitionList[j]->\ 3157 pExtVideoTransitionFctCtxt = M4OSA_NULL; 3158 } 3159 } 3160 } 3161 } 3162 } 3163 3164 if(pSettings->pTransitionList[i]->\ 3165 xVSS.transitionSpecific.pAlphaMagicSettings != M4OSA_NULL) 3166 { 3167 if(pSettings->pTransitionList[i]->\ 3168 xVSS.transitionSpecific.pAlphaMagicSettings->\ 3169 pAlphaFilePath != M4OSA_NULL) 3170 { 3171 M4OSA_free((M4OSA_MemAddr32)pSettings->\ 3172 pTransitionList[i]->\ 3173 xVSS.transitionSpecific.pAlphaMagicSettings->\ 3174 pAlphaFilePath); 3175 pSettings->pTransitionList[i]->\ 3176 xVSS.transitionSpecific.pAlphaMagicSettings->\ 3177 pAlphaFilePath = M4OSA_NULL; 3178 } 3179 M4OSA_free((M4OSA_MemAddr32)pSettings->pTransitionList[i]->\ 3180 xVSS.transitionSpecific.pAlphaMagicSettings); 3181 pSettings->pTransitionList[i]->\ 3182 xVSS.transitionSpecific.pAlphaMagicSettings = M4OSA_NULL; 3183 3184 } 3185 3186 break; 3187 3188 3189 case M4xVSS_kVideoTransitionType_SlideTransition: 3190 if (M4OSA_NULL != pSettings->pTransitionList[i]->\ 3191 xVSS.transitionSpecific.pSlideTransitionSettings) 3192 { 3193 M4OSA_free((M4OSA_MemAddr32)pSettings->pTransitionList[i]->\ 3194 xVSS.transitionSpecific.pSlideTransitionSettings); 3195 pSettings->pTransitionList[i]->\ 3196 xVSS.transitionSpecific.pSlideTransitionSettings = M4OSA_NULL; 3197 } 3198 if(pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt != M4OSA_NULL) 3199 { 3200 M4OSA_free((M4OSA_MemAddr32)(pSettings->pTransitionList[i]->\ 3201 pExtVideoTransitionFctCtxt)); 3202 pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt = M4OSA_NULL; 3203 } 3204 break; 3205 default: 3206 break; 3207 3208 } 3209 /** 3210 * Free transition settings structure */ 3211 M4OSA_free((M4OSA_MemAddr32)(pSettings->pTransitionList[i])); 3212 pSettings->pTransitionList[i] = M4OSA_NULL; 3213 } 3214 } 3215 } 3216 3217 /** 3218 * Free clip list */ 3219 if(pSettings->pClipList != M4OSA_NULL) 3220 { 3221 M4OSA_free((M4OSA_MemAddr32)(pSettings->pClipList)); 3222 pSettings->pClipList = M4OSA_NULL; 3223 } 3224 3225 /** 3226 * Free transition list */ 3227 if(pSettings->pTransitionList != M4OSA_NULL) 3228 { 3229 M4OSA_free((M4OSA_MemAddr32)(pSettings->pTransitionList)); 3230 pSettings->pTransitionList = M4OSA_NULL; 3231 } 3232 3233 /** 3234 * RC: Free effects list */ 3235 if(pSettings->Effects != M4OSA_NULL) 3236 { 3237 for(i=0; i<pSettings->nbEffects; i++) 3238 { 3239 /** 3240 * For each clip, free framing structure if needed */ 3241 if(pSettings->Effects[i].VideoEffectType == M4xVSS_kVideoEffectType_Framing 3242 || pSettings->Effects[i].VideoEffectType == M4xVSS_kVideoEffectType_Text) 3243 { 3244#ifdef DECODE_GIF_ON_SAVING 3245 M4xVSS_FramingContext* framingCtx = pSettings->Effects[i].pExtVideoEffectFctCtxt; 3246#else 3247 M4xVSS_FramingStruct* framingCtx = pSettings->Effects[i].pExtVideoEffectFctCtxt; 3248 M4xVSS_FramingStruct* framingCtx_save; 3249 M4xVSS_Framing3102Struct* framingCtx_first = framingCtx; 3250#endif 3251 3252#ifdef DECODE_GIF_ON_SAVING 3253 if(framingCtx != M4OSA_NULL) /* Bugfix 1.2.0: crash, trying to free non existant 3254 pointer */ 3255 { 3256 if(framingCtx->aFramingCtx != M4OSA_NULL) 3257 { 3258 if(pSettings->Effects[i].xVSS.pFramingBuffer == M4OSA_NULL) 3259 { 3260 if(framingCtx->aFramingCtx->FramingRgb != M4OSA_NULL) 3261 { 3262 M4OSA_free((M4OSA_MemAddr32)framingCtx->aFramingCtx->\ 3263 FramingRgb->pac_data); 3264 framingCtx->aFramingCtx->FramingRgb->pac_data = M4OSA_NULL; 3265 M4OSA_free((M4OSA_MemAddr32)framingCtx->aFramingCtx->FramingRgb); 3266 framingCtx->aFramingCtx->FramingRgb = M4OSA_NULL; 3267 } 3268 } 3269 if(framingCtx->aFramingCtx->FramingYuv != M4OSA_NULL) 3270 { 3271 M4OSA_free((M4OSA_MemAddr32)framingCtx->aFramingCtx->\ 3272 FramingYuv[0].pac_data); 3273 framingCtx->aFramingCtx->FramingYuv[0].pac_data = M4OSA_NULL; 3274 M4OSA_free((M4OSA_MemAddr32)framingCtx->aFramingCtx->\ 3275 FramingYuv[1].pac_data); 3276 framingCtx->aFramingCtx->FramingYuv[1].pac_data = M4OSA_NULL; 3277 M4OSA_free((M4OSA_MemAddr32)framingCtx->aFramingCtx->\ 3278 FramingYuv[2].pac_data); 3279 framingCtx->aFramingCtx->FramingYuv[2].pac_data = M4OSA_NULL; 3280 M4OSA_free((M4OSA_MemAddr32)framingCtx->aFramingCtx->FramingYuv); 3281 framingCtx->aFramingCtx->FramingYuv = M4OSA_NULL; 3282 } 3283 M4OSA_free((M4OSA_MemAddr32)framingCtx->aFramingCtx); 3284 framingCtx->aFramingCtx = M4OSA_NULL; 3285 } 3286 if(framingCtx->aFramingCtx_last != M4OSA_NULL) 3287 { 3288 if(pSettings->Effects[i].xVSS.pFramingBuffer == M4OSA_NULL) 3289 { 3290 if(framingCtx->aFramingCtx_last->FramingRgb != M4OSA_NULL) 3291 { 3292 M4OSA_free((M4OSA_MemAddr32)framingCtx->aFramingCtx_last->\ 3293 FramingRgb->pac_data); 3294 framingCtx->aFramingCtx_last->FramingRgb->pac_data = M4OSA_NULL; 3295 M4OSA_free((M4OSA_MemAddr32)framingCtx->aFramingCtx_last->\ 3296 FramingRgb); 3297 framingCtx->aFramingCtx_last->FramingRgb = M4OSA_NULL; 3298 } 3299 } 3300 if(framingCtx->aFramingCtx_last->FramingYuv != M4OSA_NULL) 3301 { 3302 M4OSA_free((M4OSA_MemAddr32)framingCtx->aFramingCtx_last->\ 3303 FramingYuv[0].pac_data); 3304 framingCtx->aFramingCtx_last->FramingYuv[0].pac_data = M4OSA_NULL; 3305 M4OSA_free((M4OSA_MemAddr32)framingCtx->aFramingCtx_last->FramingYuv); 3306 framingCtx->aFramingCtx_last->FramingYuv = M4OSA_NULL; 3307 } 3308 M4OSA_free((M4OSA_MemAddr32)framingCtx->aFramingCtx_last); 3309 framingCtx->aFramingCtx_last = M4OSA_NULL; 3310 } 3311 if(framingCtx->pEffectFilePath != M4OSA_NULL) 3312 { 3313 M4OSA_free((M4OSA_MemAddr32)framingCtx->pEffectFilePath); 3314 framingCtx->pEffectFilePath = M4OSA_NULL; 3315 } 3316 /*In case there are still allocated*/ 3317 if(framingCtx->pSPSContext != M4OSA_NULL) 3318 { 3319 // M4SPS_destroy(framingCtx->pSPSContext); 3320 framingCtx->pSPSContext = M4OSA_NULL; 3321#if 0 3322 if(framingCtx->inputStream.data_buffer != M4OSA_NULL) 3323 { 3324 M4OSA_free((M4OSA_MemAddr32)framingCtx->inputStream.data_buffer); 3325 framingCtx->inputStream.data_buffer = M4OSA_NULL; 3326 } 3327#endif 3328 } 3329 /*Alpha blending structure*/ 3330 if(framingCtx->alphaBlendingStruct != M4OSA_NULL) 3331 { 3332 M4OSA_free((M4OSA_MemAddr32)framingCtx->alphaBlendingStruct); 3333 framingCtx->alphaBlendingStruct = M4OSA_NULL; 3334 } 3335 3336 M4OSA_free((M4OSA_MemAddr32)framingCtx); 3337 framingCtx = M4OSA_NULL; 3338 } 3339#else 3340 do 3341 { 3342 if(framingCtx != M4OSA_NULL) /* Bugfix 1.2.0: crash, trying to free non 3343 existant pointer */ 3344 { 3345 if(pSettings->Effects[i].xVSS.pFramingBuffer == M4OSA_NULL) 3346 { 3347 if(framingCtx->FramingRgb != M4OSA_NULL) 3348 { 3349 M4OSA_free((M4OSA_MemAddr32)framingCtx->FramingRgb->pac_data); 3350 framingCtx->FramingRgb->pac_data = M4OSA_NULL; 3351 M4OSA_free((M4OSA_MemAddr32)framingCtx->FramingRgb); 3352 framingCtx->FramingRgb = M4OSA_NULL; 3353 } 3354 } 3355 if(framingCtx->FramingYuv != M4OSA_NULL) 3356 { 3357 M4OSA_free((M4OSA_MemAddr32)framingCtx->FramingYuv[0].pac_data); 3358 framingCtx->FramingYuv[0].pac_data = M4OSA_NULL; 3359 M4OSA_free((M4OSA_MemAddr32)framingCtx->FramingYuv); 3360 framingCtx->FramingYuv = M4OSA_NULL; 3361 } 3362 framingCtx_save = framingCtx->pNext; 3363 M4OSA_free((M4OSA_MemAddr32)framingCtx); 3364 framingCtx = M4OSA_NULL; 3365 framingCtx = framingCtx_save; 3366 } 3367 else 3368 { 3369 /*FB: bug fix P4ME00003002*/ 3370 break; 3371 } 3372 } while(framingCtx_first != framingCtx); 3373#endif 3374 } 3375 else if( M4xVSS_kVideoEffectType_Fifties == pSettings->Effects[i].VideoEffectType) 3376 { 3377 /* Free Fifties context */ 3378 M4xVSS_FiftiesStruct* FiftiesCtx = pSettings->Effects[i].pExtVideoEffectFctCtxt; 3379 3380 if(FiftiesCtx != M4OSA_NULL) 3381 { 3382 M4OSA_free((M4OSA_MemAddr32)FiftiesCtx); 3383 FiftiesCtx = M4OSA_NULL; 3384 } 3385 3386 } 3387 else if( M4xVSS_kVideoEffectType_ColorRGB16 == pSettings->Effects[i].VideoEffectType 3388 || M4xVSS_kVideoEffectType_BlackAndWhite == pSettings->Effects[i].VideoEffectType 3389 || M4xVSS_kVideoEffectType_Pink == pSettings->Effects[i].VideoEffectType 3390 || M4xVSS_kVideoEffectType_Green == pSettings->Effects[i].VideoEffectType 3391 || M4xVSS_kVideoEffectType_Sepia == pSettings->Effects[i].VideoEffectType 3392 || M4xVSS_kVideoEffectType_Negative== pSettings->Effects[i].VideoEffectType 3393 || M4xVSS_kVideoEffectType_Gradient== pSettings->Effects[i].VideoEffectType) 3394 { 3395 /* Free Color context */ 3396 M4xVSS_ColorStruct* ColorCtx = pSettings->Effects[i].pExtVideoEffectFctCtxt; 3397 3398 if(ColorCtx != M4OSA_NULL) 3399 { 3400 M4OSA_free((M4OSA_MemAddr32)ColorCtx); 3401 ColorCtx = M4OSA_NULL; 3402 } 3403 } 3404 3405 /* Free simple fields */ 3406 if(pSettings->Effects[i].xVSS.pFramingFilePath != M4OSA_NULL) 3407 { 3408 M4OSA_free((M4OSA_MemAddr32)pSettings->Effects[i].xVSS.pFramingFilePath); 3409 pSettings->Effects[i].xVSS.pFramingFilePath = M4OSA_NULL; 3410 } 3411 if(pSettings->Effects[i].xVSS.pFramingBuffer != M4OSA_NULL) 3412 { 3413 M4OSA_free((M4OSA_MemAddr32)pSettings->Effects[i].xVSS.pFramingBuffer); 3414 pSettings->Effects[i].xVSS.pFramingBuffer = M4OSA_NULL; 3415 } 3416 if(pSettings->Effects[i].xVSS.pTextBuffer != M4OSA_NULL) 3417 { 3418 M4OSA_free((M4OSA_MemAddr32)pSettings->Effects[i].xVSS.pTextBuffer); 3419 pSettings->Effects[i].xVSS.pTextBuffer = M4OSA_NULL; 3420 } 3421 } 3422 M4OSA_free((M4OSA_MemAddr32)pSettings->Effects); 3423 pSettings->Effects = M4OSA_NULL; 3424 } 3425 3426 return M4NO_ERROR; 3427} 3428 3429M4OSA_ERR M4xVSS_freeCommand(M4OSA_Context pContext) 3430{ 3431 M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; 3432// M4OSA_UInt8 i,j; 3433 3434 /* Free "local" BGM settings */ 3435 if(xVSS_context->pSettings->xVSS.pBGMtrack != M4OSA_NULL) 3436 { 3437 if(xVSS_context->pSettings->xVSS.pBGMtrack->pFile != M4OSA_NULL) 3438 { 3439 M4OSA_free(xVSS_context->pSettings->xVSS.pBGMtrack->pFile); 3440 xVSS_context->pSettings->xVSS.pBGMtrack->pFile = M4OSA_NULL; 3441 } 3442 M4OSA_free((M4OSA_MemAddr32)xVSS_context->pSettings->xVSS.pBGMtrack); 3443 xVSS_context->pSettings->xVSS.pBGMtrack = M4OSA_NULL; 3444 } 3445#if 0 3446 /* Parse transitions to free internal "alpha magic" settings structure */ 3447 /** 3448 * In case there is twice or more the same Alpha Magic effect, the effect context 3449 * may be freed twice or more. 3450 * So, we parse all remaining transition settings to know if the context can be 3451 * "re-freed", and if yes, we put its context to NULL to avoid freeing it again */ 3452 for(i=0; i<xVSS_context->pSettings->uiClipNumber-1; i++) 3453 { 3454 if(xVSS_context->pSettings->pTransitionList[i] != M4OSA_NULL) 3455 { 3456 switch (xVSS_context->pSettings->pTransitionList[i]->VideoTransitionType) 3457 { 3458 case M4xVSS_kVideoTransitionType_AlphaMagic: 3459 /** 3460 * In case of Alpha Magic transition, some extra parameters need to be freed */ 3461 if(xVSS_context->pSettings->pTransitionList[i]->\ 3462 pExtVideoTransitionFctCtxt != M4OSA_NULL) 3463 { 3464 M4OSA_free((M4OSA_MemAddr32)(((M4xVSS_internal_AlphaMagicSettings*)\ 3465 xVSS_context->pSettings->pTransitionList[i]->\ 3466 pExtVideoTransitionFctCtxt)->pPlane->pac_data)); 3467 ((M4xVSS_internal_AlphaMagicSettings*)xVSS_context->\ 3468 pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt)->\ 3469 pPlane->pac_data = M4OSA_NULL; 3470 3471 M4OSA_free((M4OSA_MemAddr32)(((M4xVSS_internal_AlphaMagicSettings*)\ 3472 xVSS_context->pSettings->pTransitionList[i]->\ 3473 pExtVideoTransitionFctCtxt)->pPlane)); 3474 ((M4xVSS_internal_AlphaMagicSettings*)xVSS_context->\ 3475 pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt)->\ 3476 pPlane = M4OSA_NULL; 3477 3478 M4OSA_free((M4OSA_MemAddr32)(xVSS_context->pSettings->\ 3479 pTransitionList[i]->pExtVideoTransitionFctCtxt)); 3480 xVSS_context->pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt 3481 = M4OSA_NULL; 3482 3483 for(j=i+1;j<xVSS_context->pSettings->uiClipNumber-1;j++) 3484 { 3485 if(xVSS_context->pSettings->pTransitionList[j] != M4OSA_NULL) 3486 { 3487 if(xVSS_context->pSettings->pTransitionList[j]->\ 3488 VideoTransitionType == M4xVSS_kVideoTransitionType_AlphaMagic) 3489 { 3490 M4OSA_UInt32 pCmpResult=0; 3491 M4OSA_chrCompare(xVSS_context->pSettings->pTransitionList[i]->\ 3492 xVSS.transitionSpecific.pAlphaMagicSettings->\ 3493 pAlphaFilePath, 3494 xVSS_context->pSettings->pTransitionList[j]->\ 3495 xVSS.transitionSpecific.pAlphaMagicSettings->\ 3496 pAlphaFilePath, &pCmpResult); 3497 if(pCmpResult == 0) 3498 { 3499 /* Free extra internal alpha magic structure and put it 3500 to NULL to avoid refreeing it */ 3501 M4OSA_free((M4OSA_MemAddr32)(xVSS_context->pSettings->\ 3502 pTransitionList[j]->pExtVideoTransitionFctCtxt)); 3503 xVSS_context->pSettings->pTransitionList[j]->\ 3504 pExtVideoTransitionFctCtxt = M4OSA_NULL; 3505 } 3506 } 3507 } 3508 } 3509 } 3510 break; 3511 3512 case M4xVSS_kVideoTransitionType_SlideTransition: 3513 if(xVSS_context->pSettings->pTransitionList[i]->\ 3514 pExtVideoTransitionFctCtxt != M4OSA_NULL) 3515 { 3516 M4OSA_free((M4OSA_MemAddr32)(xVSS_context->pSettings->\ 3517 pTransitionList[i]->pExtVideoTransitionFctCtxt)); 3518 xVSS_context->pSettings->pTransitionList[i]->\ 3519 pExtVideoTransitionFctCtxt = M4OSA_NULL; 3520 } 3521 break; 3522 } 3523 } 3524 } 3525#endif 3526 3527 M4xVSS_freeSettings(xVSS_context->pSettings); 3528 3529 if(xVSS_context->pPTo3GPPparamsList != M4OSA_NULL) 3530 { 3531 M4xVSS_Pto3GPP_params* pParams = xVSS_context->pPTo3GPPparamsList; 3532 M4xVSS_Pto3GPP_params* pParams_sauv; 3533 3534 while(pParams != M4OSA_NULL) 3535 { 3536 if(pParams->pFileIn != M4OSA_NULL) 3537 { 3538 M4OSA_free((M4OSA_MemAddr32)pParams->pFileIn); 3539 pParams->pFileIn = M4OSA_NULL; 3540 } 3541 if(pParams->pFileOut != M4OSA_NULL) 3542 { 3543 /* Delete temporary file */ 3544 M4OSA_fileExtraDelete(pParams->pFileOut); 3545 M4OSA_free((M4OSA_MemAddr32)pParams->pFileOut); 3546 pParams->pFileOut = M4OSA_NULL; 3547 } 3548 if(pParams->pFileTemp != M4OSA_NULL) 3549 { 3550 /* Delete temporary file */ 3551#ifdef M4xVSS_RESERVED_MOOV_DISK_SPACE 3552 M4OSA_fileExtraDelete(pParams->pFileTemp); 3553 M4OSA_free((M4OSA_MemAddr32)pParams->pFileTemp); 3554#endif/*M4xVSS_RESERVED_MOOV_DISK_SPACE*/ 3555 pParams->pFileTemp = M4OSA_NULL; 3556 } 3557 pParams_sauv = pParams; 3558 pParams = pParams->pNext; 3559 M4OSA_free((M4OSA_MemAddr32)pParams_sauv); 3560 pParams_sauv = M4OSA_NULL; 3561 } 3562 } 3563 3564 if(xVSS_context->pMCSparamsList != M4OSA_NULL) 3565 { 3566 M4xVSS_MCS_params* pParams = xVSS_context->pMCSparamsList; 3567 M4xVSS_MCS_params* pParams_sauv; 3568 3569 while(pParams != M4OSA_NULL) 3570 { 3571 if(pParams->pFileIn != M4OSA_NULL) 3572 { 3573 M4OSA_free((M4OSA_MemAddr32)pParams->pFileIn); 3574 pParams->pFileIn = M4OSA_NULL; 3575 } 3576 if(pParams->pFileOut != M4OSA_NULL) 3577 { 3578 /* Delete temporary file */ 3579 M4OSA_fileExtraDelete(pParams->pFileOut); 3580 M4OSA_free((M4OSA_MemAddr32)pParams->pFileOut); 3581 pParams->pFileOut = M4OSA_NULL; 3582 } 3583 if(pParams->pFileTemp != M4OSA_NULL) 3584 { 3585 /* Delete temporary file */ 3586#ifdef M4xVSS_RESERVED_MOOV_DISK_SPACE 3587 M4OSA_fileExtraDelete(pParams->pFileTemp); 3588 M4OSA_free((M4OSA_MemAddr32)pParams->pFileTemp); 3589#endif/*M4xVSS_RESERVED_MOOV_DISK_SPACE*/ 3590 pParams->pFileTemp = M4OSA_NULL; 3591 } 3592 pParams_sauv = pParams; 3593 pParams = pParams->pNext; 3594 M4OSA_free((M4OSA_MemAddr32)pParams_sauv); 3595 pParams_sauv = M4OSA_NULL; 3596 } 3597 } 3598 3599 if(xVSS_context->pcmPreviewFile != M4OSA_NULL) 3600 { 3601 M4OSA_free((M4OSA_MemAddr32)xVSS_context->pcmPreviewFile); 3602 xVSS_context->pcmPreviewFile = M4OSA_NULL; 3603 } 3604 if(xVSS_context->pSettings->pOutputFile != M4OSA_NULL 3605 && xVSS_context->pOutputFile != M4OSA_NULL) 3606 { 3607 M4OSA_free((M4OSA_MemAddr32)xVSS_context->pSettings->pOutputFile); 3608 xVSS_context->pSettings->pOutputFile = M4OSA_NULL; 3609 xVSS_context->pOutputFile = M4OSA_NULL; 3610 } 3611 3612 /* Reinit all context variables */ 3613 xVSS_context->previousClipNumber = 0; 3614 xVSS_context->editingStep = M4xVSS_kMicroStateEditing; 3615 xVSS_context->analyseStep = M4xVSS_kMicroStateAnalysePto3GPP; 3616 xVSS_context->pPTo3GPPparamsList = M4OSA_NULL; 3617 xVSS_context->pPTo3GPPcurrentParams = M4OSA_NULL; 3618 xVSS_context->pMCSparamsList = M4OSA_NULL; 3619 xVSS_context->pMCScurrentParams = M4OSA_NULL; 3620 xVSS_context->tempFileIndex = 0; 3621 xVSS_context->targetedTimescale = 0; 3622 3623 return M4NO_ERROR; 3624} 3625 3626/** 3627 ****************************************************************************** 3628 * prototype M4OSA_ERR M4xVSS_internalGetProperties(M4OSA_Context pContext, 3629 * M4OSA_Char* pFile, 3630 * M4VIDEOEDITING_ClipProperties *pFileProperties) 3631 * 3632 * @brief This function retrieve properties of an input 3GP file using MCS 3633 * @note 3634 * @param pContext (IN) The integrator own context 3635 * @param pFile (IN) 3GP file to analyse 3636 * @param pFileProperties (IN/OUT) Pointer on a structure that will contain 3637 * the 3GP file properties 3638 * 3639 * @return M4NO_ERROR: No error 3640 * @return M4ERR_PARAMETER: At least one of the function parameters is null 3641 ****************************************************************************** 3642 */ 3643M4OSA_ERR M4xVSS_internalGetProperties(M4OSA_Context pContext, M4OSA_Char* pFile, 3644 M4VIDEOEDITING_ClipProperties *pFileProperties) 3645{ 3646 M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; 3647 M4OSA_ERR err; 3648 M4MCS_Context mcs_context; 3649 3650 err = M4MCS_init(&mcs_context, xVSS_context->pFileReadPtr, xVSS_context->pFileWritePtr); 3651 if(err != M4NO_ERROR) 3652 { 3653 M4OSA_TRACE1_1("M4xVSS_internalGetProperties: Error in M4MCS_init: 0x%x", err); 3654 return err; 3655 } 3656 3657 /*open the MCS in the "normal opening" mode to retrieve the exact duration*/ 3658 err = M4MCS_open_normalMode(mcs_context, pFile, M4VIDEOEDITING_kFileType_3GPP, 3659 M4OSA_NULL, M4OSA_NULL); 3660 if (err != M4NO_ERROR) 3661 { 3662 M4OSA_TRACE1_1("M4xVSS_internalGetProperties: Error in M4MCS_open: 0x%x", err); 3663 M4MCS_abort(mcs_context); 3664 return err; 3665 } 3666 3667 err = M4MCS_getInputFileProperties(mcs_context, pFileProperties); 3668 if(err != M4NO_ERROR) 3669 { 3670 M4OSA_TRACE1_1("Error in M4MCS_getInputFileProperties: 0x%x", err); 3671 M4MCS_abort(mcs_context); 3672 return err; 3673 } 3674 3675 err = M4MCS_abort(mcs_context); 3676 if (err != M4NO_ERROR) 3677 { 3678 M4OSA_TRACE1_1("M4xVSS_internalGetProperties: Error in M4MCS_abort: 0x%x", err); 3679 return err; 3680 } 3681 3682 return M4NO_ERROR; 3683} 3684 3685 3686/** 3687 ****************************************************************************** 3688 * prototype M4OSA_ERR M4xVSS_internalGetTargetedTimeScale(M4OSA_Context pContext, 3689 * M4OSA_UInt32* pTargetedTimeScale) 3690 * 3691 * @brief This function retrieve targeted time scale 3692 * @note 3693 * @param pContext (IN) The integrator own context 3694 * @param pTargetedTimeScale (OUT) Targeted time scale 3695 * 3696 * @return M4NO_ERROR: No error 3697 * @return M4ERR_PARAMETER: At least one of the function parameters is null 3698 ****************************************************************************** 3699 */ 3700M4OSA_ERR M4xVSS_internalGetTargetedTimeScale(M4OSA_Context pContext, 3701 M4VSS3GPP_EditSettings* pSettings, 3702 M4OSA_UInt32* pTargetedTimeScale) 3703{ 3704 M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; 3705 M4OSA_ERR err; 3706 M4OSA_UInt32 totalDuration = 0; 3707 M4OSA_UInt8 i = 0; 3708 M4OSA_UInt32 tempTimeScale = 0, tempDuration = 0; 3709 3710 for(i=0;i<pSettings->uiClipNumber;i++) 3711 { 3712 /*search timescale only in mpeg4 case*/ 3713 if(pSettings->pClipList[i]->FileType == M4VIDEOEDITING_kFileType_3GPP 3714 || pSettings->pClipList[i]->FileType == M4VIDEOEDITING_kFileType_MP4) 3715 { 3716 M4VIDEOEDITING_ClipProperties fileProperties; 3717 3718 /*UTF conversion support*/ 3719 M4OSA_Char* pDecodedPath = M4OSA_NULL; 3720 3721 /** 3722 * UTF conversion: convert into the customer format, before being used*/ 3723 pDecodedPath = pSettings->pClipList[i]->pFile; 3724 3725 if(xVSS_context->UTFConversionContext.pConvToUTF8Fct != M4OSA_NULL 3726 && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL) 3727 { 3728 M4OSA_UInt32 length = 0; 3729 err = M4xVSS_internalConvertFromUTF8(xVSS_context, 3730 (M4OSA_Void*) pSettings->pClipList[i]->pFile, 3731 (M4OSA_Void*) xVSS_context->UTFConversionContext.pTempOutConversionBuffer, 3732 &length); 3733 if(err != M4NO_ERROR) 3734 { 3735 M4OSA_TRACE1_1("M4xVSS_Init:\ 3736 M4xVSS_internalConvertToUTF8 returns err: 0x%x",err); 3737 return err; 3738 } 3739 pDecodedPath = xVSS_context->UTFConversionContext.pTempOutConversionBuffer; 3740 } 3741 3742 /*End of the conversion: use the decoded path*/ 3743 err = M4xVSS_internalGetProperties(xVSS_context, pDecodedPath, &fileProperties); 3744 3745 /*get input file properties*/ 3746 /*err = M4xVSS_internalGetProperties(xVSS_context, pSettings->\ 3747 pClipList[i]->pFile, &fileProperties);*/ 3748 if(M4NO_ERROR != err) 3749 { 3750 M4OSA_TRACE1_1("M4xVSS_internalGetTargetedTimeScale:\ 3751 M4xVSS_internalGetProperties returned: 0x%x", err); 3752 return err; 3753 } 3754 if(fileProperties.VideoStreamType == M4VIDEOEDITING_kMPEG4) 3755 { 3756 if(pSettings->pClipList[i]->uiEndCutTime > 0) 3757 { 3758 if(tempDuration < (pSettings->pClipList[i]->uiEndCutTime \ 3759 - pSettings->pClipList[i]->uiBeginCutTime)) 3760 { 3761 tempTimeScale = fileProperties.uiVideoTimeScale; 3762 tempDuration = (pSettings->pClipList[i]->uiEndCutTime\ 3763 - pSettings->pClipList[i]->uiBeginCutTime); 3764 } 3765 } 3766 else 3767 { 3768 if(tempDuration < (fileProperties.uiClipDuration\ 3769 - pSettings->pClipList[i]->uiBeginCutTime)) 3770 { 3771 tempTimeScale = fileProperties.uiVideoTimeScale; 3772 tempDuration = (fileProperties.uiClipDuration\ 3773 - pSettings->pClipList[i]->uiBeginCutTime); 3774 } 3775 } 3776 } 3777 } 3778 if(pSettings->pClipList[i]->FileType == M4VIDEOEDITING_kFileType_ARGB8888) 3779 { 3780 /*the timescale is 30 for PTO3GP*/ 3781 *pTargetedTimeScale = 30; 3782 return M4NO_ERROR; 3783 3784 } 3785 } 3786 3787 if(tempTimeScale >= 30)/*Define a minimum time scale, otherwise if the timescale is not 3788 enough, there will be an infinite loop in the shell encoder*/ 3789 { 3790 *pTargetedTimeScale = tempTimeScale; 3791 } 3792 else 3793 { 3794 *pTargetedTimeScale = 30; 3795 } 3796 3797 return M4NO_ERROR; 3798} 3799 3800 3801/** 3802 ****************************************************************************** 3803 * prototype M4VSS3GPP_externalVideoEffectColor(M4OSA_Void *pFunctionContext, 3804 * M4VIFI_ImagePlane *PlaneIn, 3805 * M4VIFI_ImagePlane *PlaneOut, 3806 * M4VSS3GPP_ExternalProgress *pProgress, 3807 * M4OSA_UInt32 uiEffectKind) 3808 * 3809 * @brief This function apply a color effect on an input YUV420 planar frame 3810 * @note 3811 * @param pFunctionContext(IN) Contains which color to apply (not very clean ...) 3812 * @param PlaneIn (IN) Input YUV420 planar 3813 * @param PlaneOut (IN/OUT) Output YUV420 planar 3814 * @param pProgress (IN/OUT) Progress indication (0-100) 3815 * @param uiEffectKind (IN) Unused 3816 * 3817 * @return M4VIFI_OK: No error 3818 ****************************************************************************** 3819 */ 3820M4OSA_ERR M4VSS3GPP_externalVideoEffectColor(M4OSA_Void *pFunctionContext, 3821 M4VIFI_ImagePlane *PlaneIn, 3822 M4VIFI_ImagePlane *PlaneOut, 3823 M4VSS3GPP_ExternalProgress *pProgress, 3824 M4OSA_UInt32 uiEffectKind) 3825{ 3826 M4VIFI_Int32 plane_number; 3827 M4VIFI_UInt32 i,j; 3828 M4VIFI_UInt8 *p_buf_src, *p_buf_dest; 3829 M4xVSS_ColorStruct* ColorContext = (M4xVSS_ColorStruct*)pFunctionContext; 3830 3831 for (plane_number = 0; plane_number < 3; plane_number++) 3832 { 3833 p_buf_src = &(PlaneIn[plane_number].pac_data[PlaneIn[plane_number].u_topleft]); 3834 p_buf_dest = &(PlaneOut[plane_number].pac_data[PlaneOut[plane_number].u_topleft]); 3835 for (i = 0; i < PlaneOut[plane_number].u_height; i++) 3836 { 3837 /** 3838 * Chrominance */ 3839 if(plane_number==1 || plane_number==2) 3840 { 3841 //switch ((M4OSA_UInt32)pFunctionContext) 3842 // commented because a structure for the effects context exist 3843 switch (ColorContext->colorEffectType) 3844 { 3845 case M4xVSS_kVideoEffectType_BlackAndWhite: 3846 M4OSA_memset((M4OSA_MemAddr8)p_buf_dest, 3847 PlaneIn[plane_number].u_width, 128); 3848 break; 3849 case M4xVSS_kVideoEffectType_Pink: 3850 M4OSA_memset((M4OSA_MemAddr8)p_buf_dest, 3851 PlaneIn[plane_number].u_width, 255); 3852 break; 3853 case M4xVSS_kVideoEffectType_Green: 3854 M4OSA_memset((M4OSA_MemAddr8)p_buf_dest, 3855 PlaneIn[plane_number].u_width, 0); 3856 break; 3857 case M4xVSS_kVideoEffectType_Sepia: 3858 if(plane_number==1) 3859 { 3860 M4OSA_memset((M4OSA_MemAddr8)p_buf_dest, 3861 PlaneIn[plane_number].u_width, 117); 3862 } 3863 else 3864 { 3865 M4OSA_memset((M4OSA_MemAddr8)p_buf_dest, 3866 PlaneIn[plane_number].u_width, 139); 3867 } 3868 break; 3869 case M4xVSS_kVideoEffectType_Negative: 3870 M4OSA_memcpy((M4OSA_MemAddr8)p_buf_dest, 3871 (M4OSA_MemAddr8)p_buf_src ,PlaneOut[plane_number].u_width); 3872 break; 3873 3874 case M4xVSS_kVideoEffectType_ColorRGB16: 3875 { 3876 M4OSA_UInt16 r = 0,g = 0,b = 0,y = 0,u = 0,v = 0; 3877 3878 /*first get the r, g, b*/ 3879 b = (ColorContext->rgb16ColorData & 0x001f); 3880 g = (ColorContext->rgb16ColorData & 0x07e0)>>5; 3881 r = (ColorContext->rgb16ColorData & 0xf800)>>11; 3882 3883 /*keep y, but replace u and v*/ 3884 if(plane_number==1) 3885 { 3886 /*then convert to u*/ 3887 u = U16(r, g, b); 3888 M4OSA_memset((M4OSA_MemAddr8)p_buf_dest, 3889 PlaneIn[plane_number].u_width, (M4OSA_UInt8)u); 3890 } 3891 if(plane_number==2) 3892 { 3893 /*then convert to v*/ 3894 v = V16(r, g, b); 3895 M4OSA_memset((M4OSA_MemAddr8)p_buf_dest, 3896 PlaneIn[plane_number].u_width, (M4OSA_UInt8)v); 3897 } 3898 } 3899 break; 3900 case M4xVSS_kVideoEffectType_Gradient: 3901 { 3902 M4OSA_UInt16 r = 0,g = 0,b = 0,y = 0,u = 0,v = 0; 3903 3904 /*first get the r, g, b*/ 3905 b = (ColorContext->rgb16ColorData & 0x001f); 3906 g = (ColorContext->rgb16ColorData & 0x07e0)>>5; 3907 r = (ColorContext->rgb16ColorData & 0xf800)>>11; 3908 3909 /*for color gradation*/ 3910 b = (M4OSA_UInt16)( b - ((b*i)/PlaneIn[plane_number].u_height)); 3911 g = (M4OSA_UInt16)(g - ((g*i)/PlaneIn[plane_number].u_height)); 3912 r = (M4OSA_UInt16)(r - ((r*i)/PlaneIn[plane_number].u_height)); 3913 3914 /*keep y, but replace u and v*/ 3915 if(plane_number==1) 3916 { 3917 /*then convert to u*/ 3918 u = U16(r, g, b); 3919 M4OSA_memset((M4OSA_MemAddr8)p_buf_dest, 3920 PlaneIn[plane_number].u_width, (M4OSA_UInt8)u); 3921 } 3922 if(plane_number==2) 3923 { 3924 /*then convert to v*/ 3925 v = V16(r, g, b); 3926 M4OSA_memset((M4OSA_MemAddr8)p_buf_dest, 3927 PlaneIn[plane_number].u_width, (M4OSA_UInt8)v); 3928 } 3929 } 3930 break; 3931 default: 3932 break; 3933 } 3934 } 3935 /** 3936 * Luminance */ 3937 else 3938 { 3939 //switch ((M4OSA_UInt32)pFunctionContext) 3940 // commented because a structure for the effects context exist 3941 switch (ColorContext->colorEffectType) 3942 { 3943 case M4xVSS_kVideoEffectType_Negative: 3944 for(j=0;j<PlaneOut[plane_number].u_width;j++) 3945 { 3946 p_buf_dest[j] = 255 - p_buf_src[j]; 3947 } 3948 break; 3949 default: 3950 M4OSA_memcpy((M4OSA_MemAddr8)p_buf_dest, 3951 (M4OSA_MemAddr8)p_buf_src ,PlaneOut[plane_number].u_width); 3952 break; 3953 } 3954 } 3955 p_buf_src += PlaneIn[plane_number].u_stride; 3956 p_buf_dest += PlaneOut[plane_number].u_stride; 3957 } 3958 } 3959 3960 return M4VIFI_OK; 3961} 3962 3963/** 3964 ****************************************************************************** 3965 * prototype M4VSS3GPP_externalVideoEffectFraming(M4OSA_Void *pFunctionContext, 3966 * M4VIFI_ImagePlane *PlaneIn, 3967 * M4VIFI_ImagePlane *PlaneOut, 3968 * M4VSS3GPP_ExternalProgress *pProgress, 3969 * M4OSA_UInt32 uiEffectKind) 3970 * 3971 * @brief This function add a fixed or animated image on an input YUV420 planar frame 3972 * @note 3973 * @param pFunctionContext(IN) Contains which color to apply (not very clean ...) 3974 * @param PlaneIn (IN) Input YUV420 planar 3975 * @param PlaneOut (IN/OUT) Output YUV420 planar 3976 * @param pProgress (IN/OUT) Progress indication (0-100) 3977 * @param uiEffectKind (IN) Unused 3978 * 3979 * @return M4VIFI_OK: No error 3980 ****************************************************************************** 3981 */ 3982M4OSA_ERR M4VSS3GPP_externalVideoEffectFraming( M4OSA_Void *userData, 3983 M4VIFI_ImagePlane PlaneIn[3], 3984 M4VIFI_ImagePlane *PlaneOut, 3985 M4VSS3GPP_ExternalProgress *pProgress, 3986 M4OSA_UInt32 uiEffectKind ) 3987{ 3988 M4VIFI_UInt32 x,y; 3989 3990 M4VIFI_UInt8 *p_in_Y = PlaneIn[0].pac_data; 3991 M4VIFI_UInt8 *p_in_U = PlaneIn[1].pac_data; 3992 M4VIFI_UInt8 *p_in_V = PlaneIn[2].pac_data; 3993 3994 M4xVSS_FramingStruct* Framing = M4OSA_NULL; 3995 M4xVSS_FramingStruct* currentFraming = M4OSA_NULL; 3996 M4VIFI_UInt8 *FramingRGB = M4OSA_NULL; 3997 3998 M4VIFI_UInt8 *p_out0; 3999 M4VIFI_UInt8 *p_out1; 4000 M4VIFI_UInt8 *p_out2; 4001 4002 M4VIFI_UInt32 topleft[2]; 4003 4004 M4OSA_UInt8 transparent1 = (M4OSA_UInt8)((TRANSPARENT_COLOR & 0xFF00)>>8); 4005 M4OSA_UInt8 transparent2 = (M4OSA_UInt8)TRANSPARENT_COLOR; 4006 4007#ifndef DECODE_GIF_ON_SAVING 4008 Framing = (M4xVSS_FramingStruct *)userData; 4009 currentFraming = (M4xVSS_FramingStruct *)Framing->pCurrent; 4010 FramingRGB = Framing->FramingRgb->pac_data; 4011#endif /*DECODE_GIF_ON_SAVING*/ 4012 4013 /*FB*/ 4014#ifdef DECODE_GIF_ON_SAVING 4015 M4OSA_ERR err; 4016 Framing = (M4xVSS_FramingStruct *)((M4xVSS_FramingContext*)userData)->aFramingCtx; 4017#if 0 4018 if(Framing == M4OSA_NULL) 4019 { 4020 ((M4xVSS_FramingContext*)userData)->clipTime = pProgress->uiOutputTime; 4021 err = M4xVSS_internalDecodeGIF(userData); 4022 if(M4NO_ERROR != err) 4023 { 4024 M4OSA_TRACE1_1("M4VSS3GPP_externalVideoEffectFraming:\ 4025 Error in M4xVSS_internalDecodeGIF: 0x%x", err); 4026 return err; 4027 } 4028 Framing = (M4xVSS_FramingStruct *)((M4xVSS_FramingContext*)userData)->aFramingCtx; 4029 /* Initializes first GIF time */ 4030 ((M4xVSS_FramingContext*)userData)->current_gif_time = pProgress->uiOutputTime; 4031 } 4032#endif 4033 currentFraming = (M4xVSS_FramingStruct *)Framing; 4034 FramingRGB = Framing->FramingRgb->pac_data; 4035#endif /*DECODE_GIF_ON_SAVING*/ 4036 /*end FB*/ 4037 4038 /** 4039 * Initialize input / output plane pointers */ 4040 p_in_Y += PlaneIn[0].u_topleft; 4041 p_in_U += PlaneIn[1].u_topleft; 4042 p_in_V += PlaneIn[2].u_topleft; 4043 4044 p_out0 = PlaneOut[0].pac_data; 4045 p_out1 = PlaneOut[1].pac_data; 4046 p_out2 = PlaneOut[2].pac_data; 4047 4048 /** 4049 * Depending on time, initialize Framing frame to use */ 4050 if(Framing->previousClipTime == -1) 4051 { 4052 Framing->previousClipTime = pProgress->uiOutputTime; 4053 } 4054 4055 /** 4056 * If the current clip time has reach the duration of one frame of the framing picture 4057 * we need to step to next framing picture */ 4058#if 0 4059 if(((M4xVSS_FramingContext*)userData)->b_animated == M4OSA_TRUE) 4060 { 4061 while((((M4xVSS_FramingContext*)userData)->current_gif_time + currentFraming->duration)\ 4062 < pProgress->uiOutputTime) 4063 { 4064#ifdef DECODE_GIF_ON_SAVING 4065 ((M4xVSS_FramingContext*)userData)->clipTime = pProgress->uiOutputTime; 4066 err = M4xVSS_internalDecodeGIF(userData); 4067 if(M4NO_ERROR != err) 4068 { 4069 M4OSA_TRACE1_1("M4VSS3GPP_externalVideoEffectFraming:\ 4070 Error in M4xVSS_internalDecodeGIF: 0x%x", err); 4071 return err; 4072 } 4073 if(currentFraming->duration != 0) 4074 { 4075 ((M4xVSS_FramingContext*)userData)->current_gif_time += currentFraming->duration; 4076 } 4077 else 4078 { 4079 ((M4xVSS_FramingContext*)userData)->current_gif_time \ 4080 += pProgress->uiOutputTime - Framing->previousClipTime; 4081 } 4082 Framing = (M4xVSS_FramingStruct *)((M4xVSS_FramingContext*)userData)->aFramingCtx; 4083 currentFraming = (M4xVSS_FramingStruct *)Framing; 4084 FramingRGB = Framing->FramingRgb->pac_data; 4085#else 4086 Framing->pCurrent = currentFraming->pNext; 4087 currentFraming = Framing->pCurrent; 4088#endif /*DECODE_GIF_ON_SAVING*/ 4089 } 4090 } 4091#endif 4092 4093 Framing->previousClipTime = pProgress->uiOutputTime; 4094 FramingRGB = currentFraming->FramingRgb->pac_data; 4095 topleft[0] = currentFraming->topleft_x; 4096 topleft[1] = currentFraming->topleft_y; 4097 4098 for( x=0 ;x < PlaneIn[0].u_height ; x++) 4099 { 4100 for( y=0 ;y < PlaneIn[0].u_width ; y++) 4101 { 4102 /** 4103 * To handle framing with input size != output size 4104 * Framing is applyed if coordinates matches between framing/topleft and input plane */ 4105 if( y < (topleft[0] + currentFraming->FramingYuv[0].u_width) && 4106 y >= topleft[0] && 4107 x < (topleft[1] + currentFraming->FramingYuv[0].u_height) && 4108 x >= topleft[1]) 4109 { 4110 /*Alpha blending support*/ 4111 M4OSA_Float alphaBlending = 1; 4112 M4xVSS_internalEffectsAlphaBlending* alphaBlendingStruct =\ 4113 (M4xVSS_internalEffectsAlphaBlending*)\ 4114 ((M4xVSS_FramingContext*)userData)->alphaBlendingStruct; 4115 4116 if(alphaBlendingStruct != M4OSA_NULL) 4117 { 4118 if(pProgress->uiProgress >= 0 && pProgress->uiProgress \ 4119 < (M4OSA_UInt32)(alphaBlendingStruct->m_fadeInTime*10)) 4120 { 4121 alphaBlending = ((M4OSA_Float)(alphaBlendingStruct->m_middle\ 4122 - alphaBlendingStruct->m_start)\ 4123 *pProgress->uiProgress/(alphaBlendingStruct->m_fadeInTime*10)); 4124 alphaBlending += alphaBlendingStruct->m_start; 4125 alphaBlending /= 100; 4126 } 4127 else if(pProgress->uiProgress >= (M4OSA_UInt32)(alphaBlendingStruct->\ 4128 m_fadeInTime*10) && pProgress->uiProgress < 1000\ 4129 - (M4OSA_UInt32)(alphaBlendingStruct->m_fadeOutTime*10)) 4130 { 4131 alphaBlending = (M4OSA_Float)\ 4132 ((M4OSA_Float)alphaBlendingStruct->m_middle/100); 4133 } 4134 else if(pProgress->uiProgress >= 1000 - (M4OSA_UInt32)\ 4135 (alphaBlendingStruct->m_fadeOutTime*10)) 4136 { 4137 alphaBlending = ((M4OSA_Float)(alphaBlendingStruct->m_middle \ 4138 - alphaBlendingStruct->m_end))*(1000 - pProgress->uiProgress)\ 4139 /(alphaBlendingStruct->m_fadeOutTime*10); 4140 alphaBlending += alphaBlendingStruct->m_end; 4141 alphaBlending /= 100; 4142 } 4143 } 4144 /**/ 4145 4146 if((*(FramingRGB)==transparent1) && (*(FramingRGB+1)==transparent2)) 4147 { 4148 *( p_out0+y+x*PlaneOut[0].u_stride)=(*(p_in_Y+y+x*PlaneIn[0].u_stride)); 4149 *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)= 4150 (*(p_in_U+(y>>1)+(x>>1)*PlaneIn[1].u_stride)); 4151 *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)= 4152 (*(p_in_V+(y>>1)+(x>>1)*PlaneIn[2].u_stride)); 4153 } 4154 else 4155 { 4156 *( p_out0+y+x*PlaneOut[0].u_stride)= 4157 (*(currentFraming->FramingYuv[0].pac_data+(y-topleft[0])\ 4158 +(x-topleft[1])*currentFraming->FramingYuv[0].u_stride))*alphaBlending; 4159 *( p_out0+y+x*PlaneOut[0].u_stride)+= 4160 (*(p_in_Y+y+x*PlaneIn[0].u_stride))*(1-alphaBlending); 4161 *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)= 4162 (*(currentFraming->FramingYuv[1].pac_data+((y-topleft[0])>>1)\ 4163 +((x-topleft[1])>>1)*currentFraming->FramingYuv[1].u_stride))\ 4164 *alphaBlending; 4165 *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)+= 4166 (*(p_in_U+(y>>1)+(x>>1)*PlaneIn[1].u_stride))*(1-alphaBlending); 4167 *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)= 4168 (*(currentFraming->FramingYuv[2].pac_data+((y-topleft[0])>>1)\ 4169 +((x-topleft[1])>>1)*currentFraming->FramingYuv[2].u_stride))\ 4170 *alphaBlending; 4171 *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)+= 4172 (*(p_in_V+(y>>1)+(x>>1)*PlaneIn[2].u_stride))*(1-alphaBlending); 4173 } 4174 if( PlaneIn[0].u_width < (topleft[0] + currentFraming->FramingYuv[0].u_width) && 4175 y == PlaneIn[0].u_width-1) 4176 { 4177 FramingRGB = FramingRGB + 2 \ 4178 * (topleft[0] + currentFraming->FramingYuv[0].u_width \ 4179 - PlaneIn[0].u_width + 1); 4180 } 4181 else 4182 { 4183 FramingRGB = FramingRGB + 2; 4184 } 4185 } 4186 /** 4187 * Just copy input plane to output plane */ 4188 else 4189 { 4190 *( p_out0+y+x*PlaneOut[0].u_stride)=*(p_in_Y+y+x*PlaneIn[0].u_stride); 4191 *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)= 4192 *(p_in_U+(y>>1)+(x>>1)*PlaneIn[1].u_stride); 4193 *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)= 4194 *(p_in_V+(y>>1)+(x>>1)*PlaneIn[2].u_stride); 4195 } 4196 } 4197 } 4198 4199#ifdef DECODE_GIF_ON_SAVING 4200#if 0 4201 if(pProgress->bIsLast == M4OSA_TRUE 4202 && (M4OSA_Bool)((M4xVSS_FramingContext*)userData)->b_IsFileGif == M4OSA_TRUE) 4203 { 4204 M4xVSS_internalDecodeGIF_Cleaning((M4xVSS_FramingContext*)userData); 4205 } 4206#endif 4207#endif /*DECODE_GIF_ON_SAVING*/ 4208 4209 return M4VIFI_OK; 4210} 4211 4212 4213/** 4214 ****************************************************************************** 4215 * prototype M4VSS3GPP_externalVideoEffectFifties(M4OSA_Void *pFunctionContext, 4216 * M4VIFI_ImagePlane *PlaneIn, 4217 * M4VIFI_ImagePlane *PlaneOut, 4218 * M4VSS3GPP_ExternalProgress *pProgress, 4219 * M4OSA_UInt32 uiEffectKind) 4220 * 4221 * @brief This function make a video look as if it was taken in the fifties 4222 * @note 4223 * @param pUserData (IN) Context 4224 * @param pPlaneIn (IN) Input YUV420 planar 4225 * @param pPlaneOut (IN/OUT) Output YUV420 planar 4226 * @param pProgress (IN/OUT) Progress indication (0-100) 4227 * @param uiEffectKind (IN) Unused 4228 * 4229 * @return M4VIFI_OK: No error 4230 * @return M4ERR_PARAMETER: pFiftiesData, pPlaneOut or pProgress are NULL (DEBUG only) 4231 ****************************************************************************** 4232 */ 4233M4OSA_ERR M4VSS3GPP_externalVideoEffectFifties( M4OSA_Void *pUserData, 4234 M4VIFI_ImagePlane *pPlaneIn, 4235 M4VIFI_ImagePlane *pPlaneOut, 4236 M4VSS3GPP_ExternalProgress *pProgress, 4237 M4OSA_UInt32 uiEffectKind ) 4238{ 4239 M4VIFI_UInt32 x, y, xShift; 4240 M4VIFI_UInt8 *pInY = pPlaneIn[0].pac_data; 4241 M4VIFI_UInt8 *pOutY, *pInYbegin; 4242 M4VIFI_UInt8 *pInCr,* pOutCr; 4243 M4VIFI_Int32 plane_number; 4244 4245 /* Internal context*/ 4246 M4xVSS_FiftiesStruct* p_FiftiesData = (M4xVSS_FiftiesStruct *)pUserData; 4247 4248 /* Check the inputs (debug only) */ 4249 M4OSA_DEBUG_IF2((pFiftiesData == M4OSA_NULL),M4ERR_PARAMETER, 4250 "xVSS: p_FiftiesData is M4OSA_NULL in M4VSS3GPP_externalVideoEffectFifties"); 4251 M4OSA_DEBUG_IF2((pPlaneOut == M4OSA_NULL),M4ERR_PARAMETER, 4252 "xVSS: p_PlaneOut is M4OSA_NULL in M4VSS3GPP_externalVideoEffectFifties"); 4253 M4OSA_DEBUG_IF2((pProgress == M4OSA_NULL),M4ERR_PARAMETER, 4254 "xVSS: p_Progress is M4OSA_NULL in M4VSS3GPP_externalVideoEffectFifties"); 4255 4256 /* Initialize input / output plane pointers */ 4257 pInY += pPlaneIn[0].u_topleft; 4258 pOutY = pPlaneOut[0].pac_data; 4259 pInYbegin = pInY; 4260 4261 /* Initialize the random */ 4262 if(p_FiftiesData->previousClipTime < 0) 4263 { 4264 M4OSA_randInit(); 4265 M4OSA_rand((M4OSA_Int32 *)&(p_FiftiesData->shiftRandomValue), (pPlaneIn[0].u_height) >> 4); 4266 M4OSA_rand((M4OSA_Int32 *)&(p_FiftiesData->stripeRandomValue), (pPlaneIn[0].u_width)<< 2); 4267 p_FiftiesData->previousClipTime = pProgress->uiOutputTime; 4268 } 4269 4270 /* Choose random values if we have reached the duration of a partial effect */ 4271 else if( (pProgress->uiOutputTime - p_FiftiesData->previousClipTime)\ 4272 > p_FiftiesData->fiftiesEffectDuration) 4273 { 4274 M4OSA_rand((M4OSA_Int32 *)&(p_FiftiesData->shiftRandomValue), (pPlaneIn[0].u_height) >> 4); 4275 M4OSA_rand((M4OSA_Int32 *)&(p_FiftiesData->stripeRandomValue), (pPlaneIn[0].u_width)<< 2); 4276 p_FiftiesData->previousClipTime = pProgress->uiOutputTime; 4277 } 4278 4279 /* Put in Sepia the chrominance */ 4280 for (plane_number = 1; plane_number < 3; plane_number++) 4281 { 4282 pInCr = pPlaneIn[plane_number].pac_data + pPlaneIn[plane_number].u_topleft; 4283 pOutCr = pPlaneOut[plane_number].pac_data + pPlaneOut[plane_number].u_topleft; 4284 4285 for (x = 0; x < pPlaneOut[plane_number].u_height; x++) 4286 { 4287 if (1 == plane_number) 4288 M4OSA_memset((M4OSA_MemAddr8)pOutCr, pPlaneIn[plane_number].u_width, 4289 117); /* U value */ 4290 else 4291 M4OSA_memset((M4OSA_MemAddr8)pOutCr, pPlaneIn[plane_number].u_width, 4292 139); /* V value */ 4293 4294 pInCr += pPlaneIn[plane_number].u_stride; 4295 pOutCr += pPlaneOut[plane_number].u_stride; 4296 } 4297 } 4298 4299 /* Compute the new pixels values */ 4300 for( x = 0 ; x < pPlaneIn[0].u_height ; x++) 4301 { 4302 M4VIFI_UInt8 *p_outYtmp, *p_inYtmp; 4303 4304 /* Compute the xShift (random value) */ 4305 if (0 == (p_FiftiesData->shiftRandomValue % 5 )) 4306 xShift = (x + p_FiftiesData->shiftRandomValue ) % (pPlaneIn[0].u_height - 1); 4307 else 4308 xShift = (x + (pPlaneIn[0].u_height - p_FiftiesData->shiftRandomValue) ) \ 4309 % (pPlaneIn[0].u_height - 1); 4310 4311 /* Initialize the pointers */ 4312 p_outYtmp = pOutY + 1; /* yShift of 1 pixel */ 4313 p_inYtmp = pInYbegin + (xShift * pPlaneIn[0].u_stride); /* Apply the xShift */ 4314 4315 for( y = 0 ; y < pPlaneIn[0].u_width ; y++) 4316 { 4317 /* Set Y value */ 4318 if (xShift > (pPlaneIn[0].u_height - 4)) 4319 *p_outYtmp = 40; /* Add some horizontal black lines between the 4320 two parts of the image */ 4321 else if ( y == p_FiftiesData->stripeRandomValue) 4322 *p_outYtmp = 90; /* Add a random vertical line for the bulk */ 4323 else 4324 *p_outYtmp = *p_inYtmp; 4325 4326 4327 /* Go to the next pixel */ 4328 p_outYtmp++; 4329 p_inYtmp++; 4330 4331 /* Restart at the beginning of the line for the last pixel*/ 4332 if (y == (pPlaneIn[0].u_width - 2)) 4333 p_outYtmp = pOutY; 4334 } 4335 4336 /* Go to the next line */ 4337 pOutY += pPlaneOut[0].u_stride; 4338 } 4339 4340 return M4VIFI_OK; 4341} 4342 4343/** 4344 ****************************************************************************** 4345 * M4OSA_ERR M4VSS3GPP_externalVideoEffectZoom( ) 4346 * @brief Zoom in/out video effect functions. 4347 * @note The external video function is used only if VideoEffectType is set to 4348 * M4VSS3GPP_kVideoEffectType_ZoomIn or M4VSS3GPP_kVideoEffectType_ZoomOut. 4349 * 4350 * @param pFunctionContext (IN) The function context, previously set by the integrator 4351 * @param pInputPlanes (IN) Input YUV420 image: pointer to an array of three valid 4352 * image planes (Y, U and V) 4353 * @param pOutputPlanes (IN/OUT) Output (filtered) YUV420 image: pointer to an array of 4354 * three valid image planes (Y, U and V) 4355 * @param pProgress (IN) Set of information about the video transition progress. 4356 * @return M4NO_ERROR: No error 4357 * @return M4ERR_PARAMETER: At least one parameter is M4OSA_NULL (debug only) 4358 ****************************************************************************** 4359 */ 4360 4361M4OSA_ERR M4VSS3GPP_externalVideoEffectZoom( 4362 M4OSA_Void *pFunctionContext, 4363 M4VIFI_ImagePlane *pInputPlanes, 4364 M4VIFI_ImagePlane *pOutputPlanes, 4365 M4VSS3GPP_ExternalProgress *pProgress, 4366 M4OSA_UInt32 uiEffectKind 4367) 4368{ 4369 M4OSA_UInt32 boxWidth; 4370 M4OSA_UInt32 boxHeight; 4371 M4OSA_UInt32 boxPosX; 4372 M4OSA_UInt32 boxPosY; 4373 M4OSA_UInt32 ratio = 0; 4374 /* * 1.189207 between ratio */ 4375 /* zoom between x1 and x16 */ 4376 M4OSA_UInt32 ratiotab[17] ={1024,1218,1448,1722,2048,2435,2896,3444,4096,4871,5793,\ 4377 6889,8192,9742,11585,13777,16384}; 4378 M4OSA_UInt32 ik; 4379 4380 M4VIFI_ImagePlane boxPlane[3]; 4381 4382 if(M4xVSS_kVideoEffectType_ZoomOut == (M4OSA_UInt32)pFunctionContext) 4383 { 4384 //ratio = 16 - (15 * pProgress->uiProgress)/1000; 4385 ratio = 16 - pProgress->uiProgress / 66 ; 4386 } 4387 else if(M4xVSS_kVideoEffectType_ZoomIn == (M4OSA_UInt32)pFunctionContext) 4388 { 4389 //ratio = 1 + (15 * pProgress->uiProgress)/1000; 4390 ratio = 1 + pProgress->uiProgress / 66 ; 4391 } 4392 4393 for(ik=0;ik<3;ik++){ 4394 4395 boxPlane[ik].u_stride = pInputPlanes[ik].u_stride; 4396 boxPlane[ik].pac_data = pInputPlanes[ik].pac_data; 4397 4398 boxHeight = ( pInputPlanes[ik].u_height << 10 ) / ratiotab[ratio]; 4399 boxWidth = ( pInputPlanes[ik].u_width << 10 ) / ratiotab[ratio]; 4400 boxPlane[ik].u_height = (boxHeight)&(~1); 4401 boxPlane[ik].u_width = (boxWidth)&(~1); 4402 4403 boxPosY = (pInputPlanes[ik].u_height >> 1) - (boxPlane[ik].u_height >> 1); 4404 boxPosX = (pInputPlanes[ik].u_width >> 1) - (boxPlane[ik].u_width >> 1); 4405 boxPlane[ik].u_topleft = boxPosY * boxPlane[ik].u_stride + boxPosX; 4406 } 4407 4408 M4VIFI_ResizeBilinearYUV420toYUV420(M4OSA_NULL, (M4VIFI_ImagePlane*)&boxPlane, pOutputPlanes); 4409 4410 /** 4411 * Return */ 4412 return(M4NO_ERROR); 4413} 4414 4415/** 4416 ****************************************************************************** 4417 * prototype M4xVSS_AlphaMagic( M4OSA_Void *userData, 4418 * M4VIFI_ImagePlane PlaneIn1[3], 4419 * M4VIFI_ImagePlane PlaneIn2[3], 4420 * M4VIFI_ImagePlane *PlaneOut, 4421 * M4VSS3GPP_ExternalProgress *pProgress, 4422 * M4OSA_UInt32 uiTransitionKind) 4423 * 4424 * @brief This function apply a color effect on an input YUV420 planar frame 4425 * @note 4426 * @param userData (IN) Contains a pointer on a settings structure 4427 * @param PlaneIn1 (IN) Input YUV420 planar from video 1 4428 * @param PlaneIn2 (IN) Input YUV420 planar from video 2 4429 * @param PlaneOut (IN/OUT) Output YUV420 planar 4430 * @param pProgress (IN/OUT) Progress indication (0-100) 4431 * @param uiTransitionKind(IN) Unused 4432 * 4433 * @return M4VIFI_OK: No error 4434 ****************************************************************************** 4435 */ 4436M4OSA_ERR M4xVSS_AlphaMagic( M4OSA_Void *userData, M4VIFI_ImagePlane PlaneIn1[3], 4437 M4VIFI_ImagePlane PlaneIn2[3], M4VIFI_ImagePlane *PlaneOut, 4438 M4VSS3GPP_ExternalProgress *pProgress, M4OSA_UInt32 uiTransitionKind) 4439{ 4440 4441 M4OSA_ERR err; 4442 4443 M4xVSS_internal_AlphaMagicSettings* alphaContext; 4444 M4VIFI_Int32 alphaProgressLevel; 4445 4446 M4VIFI_ImagePlane* planeswap; 4447 M4VIFI_UInt32 x,y; 4448 4449 M4VIFI_UInt8 *p_out0; 4450 M4VIFI_UInt8 *p_out1; 4451 M4VIFI_UInt8 *p_out2; 4452 M4VIFI_UInt8 *alphaMask; 4453 /* "Old image" */ 4454 M4VIFI_UInt8 *p_in1_Y; 4455 M4VIFI_UInt8 *p_in1_U; 4456 M4VIFI_UInt8 *p_in1_V; 4457 /* "New image" */ 4458 M4VIFI_UInt8 *p_in2_Y; 4459 M4VIFI_UInt8 *p_in2_U; 4460 M4VIFI_UInt8 *p_in2_V; 4461 4462 err = M4NO_ERROR; 4463 4464 alphaContext = (M4xVSS_internal_AlphaMagicSettings*)userData; 4465 4466 alphaProgressLevel = (pProgress->uiProgress * 255)/1000; 4467 4468 if( alphaContext->isreverse != M4OSA_FALSE) 4469 { 4470 alphaProgressLevel = 255 - alphaProgressLevel; 4471 planeswap = PlaneIn1; 4472 PlaneIn1 = PlaneIn2; 4473 PlaneIn2 = planeswap; 4474 } 4475 4476 p_out0 = PlaneOut[0].pac_data; 4477 p_out1 = PlaneOut[1].pac_data; 4478 p_out2 = PlaneOut[2].pac_data; 4479 4480 alphaMask = alphaContext->pPlane->pac_data; 4481 4482 /* "Old image" */ 4483 p_in1_Y = PlaneIn1[0].pac_data; 4484 p_in1_U = PlaneIn1[1].pac_data; 4485 p_in1_V = PlaneIn1[2].pac_data; 4486 /* "New image" */ 4487 p_in2_Y = PlaneIn2[0].pac_data; 4488 p_in2_U = PlaneIn2[1].pac_data; 4489 p_in2_V = PlaneIn2[2].pac_data; 4490 4491 /** 4492 * For each column ... */ 4493 for( y=0; y<PlaneOut->u_height; y++ ) 4494 { 4495 /** 4496 * ... and each row of the alpha mask */ 4497 for( x=0; x<PlaneOut->u_width; x++ ) 4498 { 4499 /** 4500 * If the value of the current pixel of the alpha mask is > to the current time 4501 * ( current time is normalized on [0-255] ) */ 4502 if( alphaProgressLevel < alphaMask[x+y*PlaneOut->u_width] ) 4503 { 4504 /* We keep "old image" in output plane */ 4505 *( p_out0+x+y*PlaneOut[0].u_stride)=*(p_in1_Y+x+y*PlaneIn1[0].u_stride); 4506 *( p_out1+(x>>1)+(y>>1)*PlaneOut[1].u_stride)= 4507 *(p_in1_U+(x>>1)+(y>>1)*PlaneIn1[1].u_stride); 4508 *( p_out2+(x>>1)+(y>>1)*PlaneOut[2].u_stride)= 4509 *(p_in1_V+(x>>1)+(y>>1)*PlaneIn1[2].u_stride); 4510 } 4511 else 4512 { 4513 /* We take "new image" in output plane */ 4514 *( p_out0+x+y*PlaneOut[0].u_stride)=*(p_in2_Y+x+y*PlaneIn2[0].u_stride); 4515 *( p_out1+(x>>1)+(y>>1)*PlaneOut[1].u_stride)= 4516 *(p_in2_U+(x>>1)+(y>>1)*PlaneIn2[1].u_stride); 4517 *( p_out2+(x>>1)+(y>>1)*PlaneOut[2].u_stride)= 4518 *(p_in2_V+(x>>1)+(y>>1)*PlaneIn2[2].u_stride); 4519 } 4520 } 4521 } 4522 4523 return(err); 4524} 4525 4526/** 4527 ****************************************************************************** 4528 * prototype M4xVSS_AlphaMagicBlending( M4OSA_Void *userData, 4529 * M4VIFI_ImagePlane PlaneIn1[3], 4530 * M4VIFI_ImagePlane PlaneIn2[3], 4531 * M4VIFI_ImagePlane *PlaneOut, 4532 * M4VSS3GPP_ExternalProgress *pProgress, 4533 * M4OSA_UInt32 uiTransitionKind) 4534 * 4535 * @brief This function apply a color effect on an input YUV420 planar frame 4536 * @note 4537 * @param userData (IN) Contains a pointer on a settings structure 4538 * @param PlaneIn1 (IN) Input YUV420 planar from video 1 4539 * @param PlaneIn2 (IN) Input YUV420 planar from video 2 4540 * @param PlaneOut (IN/OUT) Output YUV420 planar 4541 * @param pProgress (IN/OUT) Progress indication (0-100) 4542 * @param uiTransitionKind(IN) Unused 4543 * 4544 * @return M4VIFI_OK: No error 4545 ****************************************************************************** 4546 */ 4547M4OSA_ERR M4xVSS_AlphaMagicBlending( M4OSA_Void *userData, M4VIFI_ImagePlane PlaneIn1[3], 4548 M4VIFI_ImagePlane PlaneIn2[3], M4VIFI_ImagePlane *PlaneOut, 4549 M4VSS3GPP_ExternalProgress *pProgress, 4550 M4OSA_UInt32 uiTransitionKind) 4551{ 4552 M4OSA_ERR err; 4553 4554 M4xVSS_internal_AlphaMagicSettings* alphaContext; 4555 M4VIFI_Int32 alphaProgressLevel; 4556 M4VIFI_Int32 alphaBlendLevelMin; 4557 M4VIFI_Int32 alphaBlendLevelMax; 4558 M4VIFI_Int32 alphaBlendRange; 4559 4560 M4VIFI_ImagePlane* planeswap; 4561 M4VIFI_UInt32 x,y; 4562 M4VIFI_Int32 alphaMaskValue; 4563 4564 M4VIFI_UInt8 *p_out0; 4565 M4VIFI_UInt8 *p_out1; 4566 M4VIFI_UInt8 *p_out2; 4567 M4VIFI_UInt8 *alphaMask; 4568 /* "Old image" */ 4569 M4VIFI_UInt8 *p_in1_Y; 4570 M4VIFI_UInt8 *p_in1_U; 4571 M4VIFI_UInt8 *p_in1_V; 4572 /* "New image" */ 4573 M4VIFI_UInt8 *p_in2_Y; 4574 M4VIFI_UInt8 *p_in2_U; 4575 M4VIFI_UInt8 *p_in2_V; 4576 4577 4578 err = M4NO_ERROR; 4579 4580 alphaContext = (M4xVSS_internal_AlphaMagicSettings*)userData; 4581 4582 alphaProgressLevel = (pProgress->uiProgress * 255)/1000; 4583 4584 if( alphaContext->isreverse != M4OSA_FALSE) 4585 { 4586 alphaProgressLevel = 255 - alphaProgressLevel; 4587 planeswap = PlaneIn1; 4588 PlaneIn1 = PlaneIn2; 4589 PlaneIn2 = planeswap; 4590 } 4591 4592 alphaBlendLevelMin = alphaProgressLevel-alphaContext->blendingthreshold; 4593 4594 alphaBlendLevelMax = alphaProgressLevel+alphaContext->blendingthreshold; 4595 4596 alphaBlendRange = (alphaContext->blendingthreshold)*2; 4597 4598 p_out0 = PlaneOut[0].pac_data; 4599 p_out1 = PlaneOut[1].pac_data; 4600 p_out2 = PlaneOut[2].pac_data; 4601 4602 alphaMask = alphaContext->pPlane->pac_data; 4603 4604 /* "Old image" */ 4605 p_in1_Y = PlaneIn1[0].pac_data; 4606 p_in1_U = PlaneIn1[1].pac_data; 4607 p_in1_V = PlaneIn1[2].pac_data; 4608 /* "New image" */ 4609 p_in2_Y = PlaneIn2[0].pac_data; 4610 p_in2_U = PlaneIn2[1].pac_data; 4611 p_in2_V = PlaneIn2[2].pac_data; 4612 4613 /* apply Alpha Magic on each pixel */ 4614 for( y=0; y<PlaneOut->u_height; y++ ) 4615 { 4616 for( x=0; x<PlaneOut->u_width; x++ ) 4617 { 4618 alphaMaskValue = alphaMask[x+y*PlaneOut->u_width]; 4619 if( alphaBlendLevelMax < alphaMaskValue ) 4620 { 4621 /* We keep "old image" in output plane */ 4622 *( p_out0+x+y*PlaneOut[0].u_stride)=*(p_in1_Y+x+y*PlaneIn1[0].u_stride); 4623 *( p_out1+(x>>1)+(y>>1)*PlaneOut[1].u_stride)= 4624 *(p_in1_U+(x>>1)+(y>>1)*PlaneIn1[1].u_stride); 4625 *( p_out2+(x>>1)+(y>>1)*PlaneOut[2].u_stride)= 4626 *(p_in1_V+(x>>1)+(y>>1)*PlaneIn1[2].u_stride); 4627 } 4628 else if( (alphaBlendLevelMin < alphaMaskValue)&& 4629 (alphaMaskValue <= alphaBlendLevelMax ) ) 4630 { 4631 /* We blend "old and new image" in output plane */ 4632 *( p_out0+x+y*PlaneOut[0].u_stride)=(M4VIFI_UInt8) 4633 (( (alphaMaskValue-alphaBlendLevelMin)*( *(p_in1_Y+x+y*PlaneIn1[0].u_stride)) 4634 +(alphaBlendLevelMax-alphaMaskValue)\ 4635 *( *(p_in2_Y+x+y*PlaneIn2[0].u_stride)) )/alphaBlendRange ); 4636 4637 *( p_out1+(x>>1)+(y>>1)*PlaneOut[1].u_stride)=(M4VIFI_UInt8)\ 4638 (( (alphaMaskValue-alphaBlendLevelMin)*( *(p_in1_U+(x>>1)+(y>>1)\ 4639 *PlaneIn1[1].u_stride)) 4640 +(alphaBlendLevelMax-alphaMaskValue)*( *(p_in2_U+(x>>1)+(y>>1)\ 4641 *PlaneIn2[1].u_stride)) )/alphaBlendRange ); 4642 4643 *( p_out2+(x>>1)+(y>>1)*PlaneOut[2].u_stride)= 4644 (M4VIFI_UInt8)(( (alphaMaskValue-alphaBlendLevelMin)\ 4645 *( *(p_in1_V+(x>>1)+(y>>1)*PlaneIn1[2].u_stride)) 4646 +(alphaBlendLevelMax-alphaMaskValue)*( *(p_in2_V+(x>>1)+(y>>1)\ 4647 *PlaneIn2[2].u_stride)) )/alphaBlendRange ); 4648 4649 } 4650 else 4651 { 4652 /* We take "new image" in output plane */ 4653 *( p_out0+x+y*PlaneOut[0].u_stride)=*(p_in2_Y+x+y*PlaneIn2[0].u_stride); 4654 *( p_out1+(x>>1)+(y>>1)*PlaneOut[1].u_stride)= 4655 *(p_in2_U+(x>>1)+(y>>1)*PlaneIn2[1].u_stride); 4656 *( p_out2+(x>>1)+(y>>1)*PlaneOut[2].u_stride)= 4657 *(p_in2_V+(x>>1)+(y>>1)*PlaneIn2[2].u_stride); 4658 } 4659 } 4660 } 4661 4662 return(err); 4663} 4664 4665#define M4XXX_SampleAddress(plane, x, y) ( (plane).pac_data + (plane).u_topleft + (y)\ 4666 * (plane).u_stride + (x) ) 4667 4668static void M4XXX_CopyPlane(M4VIFI_ImagePlane* dest, M4VIFI_ImagePlane* source) 4669{ 4670 M4OSA_UInt32 height, width, sourceStride, destStride, y; 4671 M4OSA_MemAddr8 sourceWalk, destWalk; 4672 4673 /* cache the vars used in the loop so as to avoid them being repeatedly fetched and 4674 recomputed from memory. */ 4675 height = dest->u_height; 4676 width = dest->u_width; 4677 4678 sourceWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(*source, 0, 0); 4679 sourceStride = source->u_stride; 4680 4681 destWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(*dest, 0, 0); 4682 destStride = dest->u_stride; 4683 4684 for (y=0; y<height; y++) 4685 { 4686 M4OSA_memcpy((M4OSA_MemAddr8)destWalk, (M4OSA_MemAddr8)sourceWalk, width); 4687 destWalk += destStride; 4688 sourceWalk += sourceStride; 4689 } 4690} 4691 4692static M4OSA_ERR M4xVSS_VerticalSlideTransition(M4VIFI_ImagePlane* topPlane, 4693 M4VIFI_ImagePlane* bottomPlane, 4694 M4VIFI_ImagePlane *PlaneOut, 4695 M4OSA_UInt32 shiftUV) 4696{ 4697 M4OSA_UInt32 i; 4698 4699 /* Do three loops, one for each plane type, in order to avoid having too many buffers 4700 "hot" at the same time (better for cache). */ 4701 for (i=0; i<3; i++) 4702 { 4703 M4OSA_UInt32 topPartHeight, bottomPartHeight, width, sourceStride, destStride, y; 4704 M4OSA_MemAddr8 sourceWalk, destWalk; 4705 4706 /* cache the vars used in the loop so as to avoid them being repeatedly fetched and 4707 recomputed from memory. */ 4708 if (0 == i) /* Y plane */ 4709 { 4710 bottomPartHeight = 2*shiftUV; 4711 } 4712 else /* U and V planes */ 4713 { 4714 bottomPartHeight = shiftUV; 4715 } 4716 topPartHeight = PlaneOut[i].u_height - bottomPartHeight; 4717 width = PlaneOut[i].u_width; 4718 4719 sourceWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(topPlane[i], 0, bottomPartHeight); 4720 sourceStride = topPlane[i].u_stride; 4721 4722 destWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(PlaneOut[i], 0, 0); 4723 destStride = PlaneOut[i].u_stride; 4724 4725 /* First the part from the top source clip frame. */ 4726 for (y=0; y<topPartHeight; y++) 4727 { 4728 M4OSA_memcpy((M4OSA_MemAddr8)destWalk, (M4OSA_MemAddr8)sourceWalk, width); 4729 destWalk += destStride; 4730 sourceWalk += sourceStride; 4731 } 4732 4733 /* and now change the vars to copy the part from the bottom source clip frame. */ 4734 sourceWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(bottomPlane[i], 0, 0); 4735 sourceStride = bottomPlane[i].u_stride; 4736 4737 /* destWalk is already at M4XXX_SampleAddress(PlaneOut[i], 0, topPartHeight) */ 4738 4739 for (y=0; y<bottomPartHeight; y++) 4740 { 4741 M4OSA_memcpy((M4OSA_MemAddr8)destWalk, (M4OSA_MemAddr8)sourceWalk, width); 4742 destWalk += destStride; 4743 sourceWalk += sourceStride; 4744 } 4745 } 4746 return M4NO_ERROR; 4747} 4748 4749static M4OSA_ERR M4xVSS_HorizontalSlideTransition(M4VIFI_ImagePlane* leftPlane, 4750 M4VIFI_ImagePlane* rightPlane, 4751 M4VIFI_ImagePlane *PlaneOut, 4752 M4OSA_UInt32 shiftUV) 4753{ 4754 M4OSA_UInt32 i, y; 4755 /* If we shifted by exactly 0, or by the width of the target image, then we would get the left 4756 frame or the right frame, respectively. These cases aren't handled too well by the general 4757 handling, since they result in 0-size memcopies, so might as well particularize them. */ 4758 4759 if (0 == shiftUV) /* output left frame */ 4760 { 4761 for (i = 0; i<3; i++) /* for each YUV plane */ 4762 { 4763 M4XXX_CopyPlane(&(PlaneOut[i]), &(leftPlane[i])); 4764 } 4765 4766 return M4NO_ERROR; 4767 } 4768 4769 if (PlaneOut[1].u_width == shiftUV) /* output right frame */ 4770 { 4771 for (i = 0; i<3; i++) /* for each YUV plane */ 4772 { 4773 M4XXX_CopyPlane(&(PlaneOut[i]), &(rightPlane[i])); 4774 } 4775 4776 return M4NO_ERROR; 4777 } 4778 4779 4780 /* Do three loops, one for each plane type, in order to avoid having too many buffers 4781 "hot" at the same time (better for cache). */ 4782 for (i=0; i<3; i++) 4783 { 4784 M4OSA_UInt32 height, leftPartWidth, rightPartWidth; 4785 M4OSA_UInt32 leftStride, rightStride, destStride; 4786 M4OSA_MemAddr8 leftWalk, rightWalk, destWalkLeft, destWalkRight; 4787 4788 /* cache the vars used in the loop so as to avoid them being repeatedly fetched 4789 and recomputed from memory. */ 4790 height = PlaneOut[i].u_height; 4791 4792 if (0 == i) /* Y plane */ 4793 { 4794 rightPartWidth = 2*shiftUV; 4795 } 4796 else /* U and V planes */ 4797 { 4798 rightPartWidth = shiftUV; 4799 } 4800 leftPartWidth = PlaneOut[i].u_width - rightPartWidth; 4801 4802 leftWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(leftPlane[i], rightPartWidth, 0); 4803 leftStride = leftPlane[i].u_stride; 4804 4805 rightWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(rightPlane[i], 0, 0); 4806 rightStride = rightPlane[i].u_stride; 4807 4808 destWalkLeft = (M4OSA_MemAddr8)M4XXX_SampleAddress(PlaneOut[i], 0, 0); 4809 destWalkRight = (M4OSA_MemAddr8)M4XXX_SampleAddress(PlaneOut[i], leftPartWidth, 0); 4810 destStride = PlaneOut[i].u_stride; 4811 4812 for (y=0; y<height; y++) 4813 { 4814 M4OSA_memcpy((M4OSA_MemAddr8)destWalkLeft, (M4OSA_MemAddr8)leftWalk, leftPartWidth); 4815 leftWalk += leftStride; 4816 4817 M4OSA_memcpy((M4OSA_MemAddr8)destWalkRight, (M4OSA_MemAddr8)rightWalk, rightPartWidth); 4818 rightWalk += rightStride; 4819 4820 destWalkLeft += destStride; 4821 destWalkRight += destStride; 4822 } 4823 } 4824 4825 return M4NO_ERROR; 4826} 4827 4828 4829M4OSA_ERR M4xVSS_SlideTransition( M4OSA_Void *userData, M4VIFI_ImagePlane PlaneIn1[3], 4830 M4VIFI_ImagePlane PlaneIn2[3], M4VIFI_ImagePlane *PlaneOut, 4831 M4VSS3GPP_ExternalProgress *pProgress, 4832 M4OSA_UInt32 uiTransitionKind) 4833{ 4834 M4xVSS_internal_SlideTransitionSettings* settings = 4835 (M4xVSS_internal_SlideTransitionSettings*)userData; 4836 M4OSA_UInt32 shiftUV; 4837 4838 M4OSA_TRACE1_0("inside M4xVSS_SlideTransition"); 4839 if ((M4xVSS_SlideTransition_RightOutLeftIn == settings->direction) 4840 || (M4xVSS_SlideTransition_LeftOutRightIn == settings->direction) ) 4841 { 4842 /* horizontal slide */ 4843 shiftUV = ((PlaneOut[1]).u_width * pProgress->uiProgress)/1000; 4844 M4OSA_TRACE1_2("M4xVSS_SlideTransition upper: shiftUV = %d,progress = %d", 4845 shiftUV,pProgress->uiProgress ); 4846 if (M4xVSS_SlideTransition_RightOutLeftIn == settings->direction) 4847 { 4848 /* Put the previous clip frame right, the next clip frame left, and reverse shiftUV 4849 (since it's a shift from the left frame) so that we start out on the right 4850 (i.e. not left) frame, it 4851 being from the previous clip. */ 4852 return M4xVSS_HorizontalSlideTransition(PlaneIn2, PlaneIn1, PlaneOut, 4853 (PlaneOut[1]).u_width - shiftUV); 4854 } 4855 else /* Left out, right in*/ 4856 { 4857 return M4xVSS_HorizontalSlideTransition(PlaneIn1, PlaneIn2, PlaneOut, shiftUV); 4858 } 4859 } 4860 else 4861 { 4862 /* vertical slide */ 4863 shiftUV = ((PlaneOut[1]).u_height * pProgress->uiProgress)/1000; 4864 M4OSA_TRACE1_2("M4xVSS_SlideTransition bottom: shiftUV = %d,progress = %d",shiftUV, 4865 pProgress->uiProgress ); 4866 if (M4xVSS_SlideTransition_TopOutBottomIn == settings->direction) 4867 { 4868 /* Put the previous clip frame top, the next clip frame bottom. */ 4869 return M4xVSS_VerticalSlideTransition(PlaneIn1, PlaneIn2, PlaneOut, shiftUV); 4870 } 4871 else /* Bottom out, top in */ 4872 { 4873 return M4xVSS_VerticalSlideTransition(PlaneIn2, PlaneIn1, PlaneOut, 4874 (PlaneOut[1]).u_height - shiftUV); 4875 } 4876 } 4877 4878 /* Note: it might be worthwhile to do some parameter checking, see if dimensions match, etc., 4879 at least in debug mode. */ 4880} 4881 4882 4883/** 4884 ****************************************************************************** 4885 * prototype M4xVSS_FadeBlackTransition(M4OSA_Void *pFunctionContext, 4886 * M4VIFI_ImagePlane *PlaneIn, 4887 * M4VIFI_ImagePlane *PlaneOut, 4888 * M4VSS3GPP_ExternalProgress *pProgress, 4889 * M4OSA_UInt32 uiEffectKind) 4890 * 4891 * @brief This function apply a fade to black and then a fade from black 4892 * @note 4893 * @param pFunctionContext(IN) Contains which color to apply (not very clean ...) 4894 * @param PlaneIn (IN) Input YUV420 planar 4895 * @param PlaneOut (IN/OUT) Output YUV420 planar 4896 * @param pProgress (IN/OUT) Progress indication (0-100) 4897 * @param uiEffectKind (IN) Unused 4898 * 4899 * @return M4VIFI_OK: No error 4900 ****************************************************************************** 4901 */ 4902M4OSA_ERR M4xVSS_FadeBlackTransition(M4OSA_Void *userData, M4VIFI_ImagePlane PlaneIn1[3], 4903 M4VIFI_ImagePlane PlaneIn2[3], 4904 M4VIFI_ImagePlane *PlaneOut, 4905 M4VSS3GPP_ExternalProgress *pProgress, 4906 M4OSA_UInt32 uiTransitionKind) 4907{ 4908 M4OSA_Int32 tmp = 0; 4909 M4OSA_ERR err = M4NO_ERROR; 4910 4911 4912 if((pProgress->uiProgress) < 500) 4913 { 4914 /** 4915 * Compute where we are in the effect (scale is 0->1024) */ 4916 tmp = (M4OSA_Int32)((1.0 - ((M4OSA_Float)(pProgress->uiProgress*2)/1000)) * 1024 ); 4917 4918 /** 4919 * Apply the darkening effect */ 4920 err = M4VFL_modifyLumaWithScale( (M4ViComImagePlane*)PlaneIn1, 4921 (M4ViComImagePlane*)PlaneOut, tmp, M4OSA_NULL); 4922 if (M4NO_ERROR != err) 4923 { 4924 M4OSA_TRACE1_1("M4xVSS_FadeBlackTransition: M4VFL_modifyLumaWithScale returns\ 4925 error 0x%x, returning M4VSS3GPP_ERR_LUMA_FILTER_ERROR", err); 4926 return M4VSS3GPP_ERR_LUMA_FILTER_ERROR; 4927 } 4928 } 4929 else 4930 { 4931 /** 4932 * Compute where we are in the effect (scale is 0->1024). */ 4933 tmp = (M4OSA_Int32)( (((M4OSA_Float)(((pProgress->uiProgress-500)*2))/1000)) * 1024 ); 4934 4935 /** 4936 * Apply the darkening effect */ 4937 err = M4VFL_modifyLumaWithScale((M4ViComImagePlane*)PlaneIn2, 4938 (M4ViComImagePlane*)PlaneOut, tmp, M4OSA_NULL); 4939 if (M4NO_ERROR != err) 4940 { 4941 M4OSA_TRACE1_1("M4xVSS_FadeBlackTransition:\ 4942 M4VFL_modifyLumaWithScale returns error 0x%x,\ 4943 returning M4VSS3GPP_ERR_LUMA_FILTER_ERROR", err); 4944 return M4VSS3GPP_ERR_LUMA_FILTER_ERROR; 4945 } 4946 } 4947 4948 4949 return M4VIFI_OK; 4950} 4951 4952 4953/** 4954 ****************************************************************************** 4955 * prototype M4OSA_ERR M4xVSS_internalConvertToUTF8(M4OSA_Context pContext, 4956 * M4OSA_Void* pBufferIn, 4957 * M4OSA_Void* pBufferOut, 4958 * M4OSA_UInt32* convertedSize) 4959 * 4960 * @brief This function convert from the customer format to UTF8 4961 * @note 4962 * @param pContext (IN) The integrator own context 4963 * @param pBufferIn (IN) Buffer to convert 4964 * @param pBufferOut (OUT) Converted buffer 4965 * @param convertedSize (OUT) Size of the converted buffer 4966 * 4967 * @return M4NO_ERROR: No error 4968 * @return M4ERR_PARAMETER: At least one of the function parameters is null 4969 ****************************************************************************** 4970 */ 4971M4OSA_ERR M4xVSS_internalConvertToUTF8(M4OSA_Context pContext, M4OSA_Void* pBufferIn, 4972 M4OSA_Void* pBufferOut, M4OSA_UInt32* convertedSize) 4973{ 4974 M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; 4975 M4OSA_ERR err; 4976 4977 pBufferOut = pBufferIn; 4978 if(xVSS_context->UTFConversionContext.pConvToUTF8Fct != M4OSA_NULL 4979 && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL) 4980 { 4981 M4OSA_UInt32 ConvertedSize = xVSS_context->UTFConversionContext.m_TempOutConversionSize; 4982 4983 M4OSA_memset((M4OSA_MemAddr8)xVSS_context->UTFConversionContext.pTempOutConversionBuffer 4984 ,(M4OSA_UInt32)xVSS_context->UTFConversionContext.m_TempOutConversionSize,0); 4985 4986 err = xVSS_context->UTFConversionContext.pConvToUTF8Fct((M4OSA_Void*)pBufferIn, 4987 (M4OSA_UInt8*)xVSS_context->UTFConversionContext.pTempOutConversionBuffer, 4988 (M4OSA_UInt32*)&ConvertedSize); 4989 if(err == M4xVSSWAR_BUFFER_OUT_TOO_SMALL) 4990 { 4991 M4OSA_TRACE2_1("M4xVSS_internalConvertToUTF8: pConvToUTF8Fct return 0x%x",err); 4992 4993 /*free too small buffer*/ 4994 M4OSA_free((M4OSA_MemAddr32)xVSS_context->\ 4995 UTFConversionContext.pTempOutConversionBuffer); 4996 4997 /*re-allocate the buffer*/ 4998 xVSS_context->UTFConversionContext.pTempOutConversionBuffer = 4999 (M4OSA_Void*)M4OSA_malloc(ConvertedSize*sizeof(M4OSA_UInt8), M4VA, 5000 (M4OSA_Char *)"M4xVSS_internalConvertToUTF8: UTF conversion buffer"); 5001 if(M4OSA_NULL == xVSS_context->UTFConversionContext.pTempOutConversionBuffer) 5002 { 5003 M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertToUTF8"); 5004 return M4ERR_ALLOC; 5005 } 5006 xVSS_context->UTFConversionContext.m_TempOutConversionSize = ConvertedSize; 5007 5008 M4OSA_memset((M4OSA_MemAddr8)xVSS_context->\ 5009 UTFConversionContext.pTempOutConversionBuffer,(M4OSA_UInt32)xVSS_context->\ 5010 UTFConversionContext.m_TempOutConversionSize,0); 5011 5012 err = xVSS_context->UTFConversionContext.pConvToUTF8Fct((M4OSA_Void*)pBufferIn, 5013 (M4OSA_Void*)xVSS_context->UTFConversionContext.pTempOutConversionBuffer, 5014 (M4OSA_UInt32*)&ConvertedSize); 5015 if(err != M4NO_ERROR) 5016 { 5017 M4OSA_TRACE1_1("M4xVSS_internalConvertToUTF8: pConvToUTF8Fct return 0x%x",err); 5018 return err; 5019 } 5020 } 5021 else if(err != M4NO_ERROR) 5022 { 5023 M4OSA_TRACE1_1("M4xVSS_internalConvertToUTF8: pConvToUTF8Fct return 0x%x",err); 5024 return err; 5025 } 5026 /*decoded path*/ 5027 pBufferOut = xVSS_context->UTFConversionContext.pTempOutConversionBuffer; 5028 (*convertedSize) = ConvertedSize; 5029 } 5030 return M4NO_ERROR; 5031} 5032 5033 5034/** 5035 ****************************************************************************** 5036 * prototype M4OSA_ERR M4xVSS_internalConvertFromUTF8(M4OSA_Context pContext) 5037 * 5038 * @brief This function convert from UTF8 to the customer format 5039 * @note 5040 * @param pContext (IN) The integrator own context 5041 * @param pBufferIn (IN) Buffer to convert 5042 * @param pBufferOut (OUT) Converted buffer 5043 * @param convertedSize (OUT) Size of the converted buffer 5044 * 5045 * @return M4NO_ERROR: No error 5046 * @return M4ERR_PARAMETER: At least one of the function parameters is null 5047 ****************************************************************************** 5048 */ 5049M4OSA_ERR M4xVSS_internalConvertFromUTF8(M4OSA_Context pContext, M4OSA_Void* pBufferIn, 5050 M4OSA_Void* pBufferOut, M4OSA_UInt32* convertedSize) 5051{ 5052 M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; 5053 M4OSA_ERR err; 5054 5055 pBufferOut = pBufferIn; 5056 if(xVSS_context->UTFConversionContext.pConvFromUTF8Fct != M4OSA_NULL 5057 && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL) 5058 { 5059 M4OSA_UInt32 ConvertedSize = xVSS_context->UTFConversionContext.m_TempOutConversionSize; 5060 5061 M4OSA_memset((M4OSA_MemAddr8)xVSS_context->\ 5062 UTFConversionContext.pTempOutConversionBuffer,(M4OSA_UInt32)xVSS_context->\ 5063 UTFConversionContext.m_TempOutConversionSize,0); 5064 5065 err = xVSS_context->UTFConversionContext.pConvFromUTF8Fct\ 5066 ((M4OSA_Void*)pBufferIn,(M4OSA_UInt8*)xVSS_context->\ 5067 UTFConversionContext.pTempOutConversionBuffer, (M4OSA_UInt32*)&ConvertedSize); 5068 if(err == M4xVSSWAR_BUFFER_OUT_TOO_SMALL) 5069 { 5070 M4OSA_TRACE2_1("M4xVSS_internalConvertFromUTF8: pConvFromUTF8Fct return 0x%x",err); 5071 5072 /*free too small buffer*/ 5073 M4OSA_free((M4OSA_MemAddr32)xVSS_context->\ 5074 UTFConversionContext.pTempOutConversionBuffer); 5075 5076 /*re-allocate the buffer*/ 5077 xVSS_context->UTFConversionContext.pTempOutConversionBuffer = 5078 (M4OSA_Void*)M4OSA_malloc(ConvertedSize*sizeof(M4OSA_UInt8), M4VA, 5079 (M4OSA_Char *)"M4xVSS_internalConvertFromUTF8: UTF conversion buffer"); 5080 if(M4OSA_NULL == xVSS_context->UTFConversionContext.pTempOutConversionBuffer) 5081 { 5082 M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertFromUTF8"); 5083 return M4ERR_ALLOC; 5084 } 5085 xVSS_context->UTFConversionContext.m_TempOutConversionSize = ConvertedSize; 5086 5087 M4OSA_memset((M4OSA_MemAddr8)xVSS_context->\ 5088 UTFConversionContext.pTempOutConversionBuffer,(M4OSA_UInt32)xVSS_context->\ 5089 UTFConversionContext.m_TempOutConversionSize,0); 5090 5091 err = xVSS_context->UTFConversionContext.pConvFromUTF8Fct((M4OSA_Void*)pBufferIn, 5092 (M4OSA_Void*)xVSS_context->UTFConversionContext.pTempOutConversionBuffer, 5093 (M4OSA_UInt32*)&ConvertedSize); 5094 if(err != M4NO_ERROR) 5095 { 5096 M4OSA_TRACE1_1("M4xVSS_internalConvertFromUTF8: pConvFromUTF8Fct return 0x%x",err); 5097 return err; 5098 } 5099 } 5100 else if(err != M4NO_ERROR) 5101 { 5102 M4OSA_TRACE1_1("M4xVSS_internalConvertFromUTF8: pConvFromUTF8Fct return 0x%x",err); 5103 return err; 5104 } 5105 /*decoded path*/ 5106 pBufferOut = xVSS_context->UTFConversionContext.pTempOutConversionBuffer; 5107 (*convertedSize) = ConvertedSize; 5108 } 5109 5110 5111 return M4NO_ERROR; 5112} 5113