M4xVSS_internal.c revision 7c9d8018755adf1857571125ba1b3598c96ea506
1/* 2 * Copyright (C) 2004-2011 NXP Software 3 * Copyright (C) 2011 The Android Open Source Project 4 * 5 * Licensed under the Apache License, Version 2.0 (the "License"); 6 * you may not use this file except in compliance with the License. 7 * You may obtain a copy of the License at 8 * 9 * http://www.apache.org/licenses/LICENSE-2.0 10 * 11 * Unless required by applicable law or agreed to in writing, software 12 * distributed under the License is distributed on an "AS IS" BASIS, 13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 * See the License for the specific language governing permissions and 15 * limitations under the License. 16 */ 17/** 18 ****************************************************************************** 19 * @file M4xVSS_internal.c 20 * @brief Internal functions of extended Video Studio Service (Video Studio 2.1) 21 * @note 22 ****************************************************************************** 23 */ 24#include "M4OSA_Debug.h" 25#include "M4OSA_CharStar.h" 26#include "M4OSA_FileExtra.h" 27 28#include "NXPSW_CompilerSwitches.h" 29 30#include "M4VSS3GPP_API.h" 31#include "M4VSS3GPP_ErrorCodes.h" 32 33#include "M4xVSS_API.h" 34#include "M4xVSS_Internal.h" 35 36/*for rgb16 color effect*/ 37#include "M4VIFI_Defines.h" 38#include "M4VIFI_Clip.h" 39 40/** 41 * component includes */ 42#include "M4VFL_transition.h" /**< video effects */ 43 44/* Internal header file of VSS is included because of MMS use case */ 45#include "M4VSS3GPP_InternalTypes.h" 46 47/*Exif header files to add image rendering support (cropping, black borders)*/ 48#include "M4EXIFC_CommonAPI.h" 49// StageFright encoders require %16 resolution 50#include "M4ENCODER_common.h" 51 52#define TRANSPARENT_COLOR 0x7E0 53 54/* Prototype of M4VIFI_xVSS_RGB565toYUV420 function (avoid green effect of transparency color) */ 55M4VIFI_UInt8 M4VIFI_xVSS_RGB565toYUV420(void *pUserData, M4VIFI_ImagePlane *pPlaneIn, 56 M4VIFI_ImagePlane *pPlaneOut); 57 58 59/*special MCS function used only in VideoArtist and VideoStudio to open the media in the normal 60 mode. That way the media duration is accurate*/ 61extern M4OSA_ERR M4MCS_open_normalMode(M4MCS_Context pContext, M4OSA_Void* pFileIn, 62 M4VIDEOEDITING_FileType InputFileType, 63 M4OSA_Void* pFileOut, M4OSA_Void* pTempFile); 64 65 66/** 67 ****************************************************************************** 68 * prototype M4OSA_ERR M4xVSS_internalStartTranscoding(M4OSA_Context pContext) 69 * @brief This function initializes MCS (3GP transcoder) with the given 70 * parameters 71 * @note The transcoding parameters are given by the internal xVSS context. 72 * This context contains a pointer on the current element of the 73 * chained list of MCS parameters. 74 * 75 * @param pContext (IN) Pointer on the xVSS edit context 76 * @return M4NO_ERROR: No error 77 * @return M4ERR_PARAMETER: At least one parameter is M4OSA_NULL 78 * @return M4ERR_ALLOC: Memory allocation has failed 79 ****************************************************************************** 80 */ 81M4OSA_ERR M4xVSS_internalStartTranscoding(M4OSA_Context pContext) 82{ 83 M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; 84 M4OSA_ERR err; 85 M4MCS_Context mcs_context; 86 M4MCS_OutputParams Params; 87 M4MCS_EncodingParams Rates; 88 M4OSA_UInt32 i; 89 90 err = M4MCS_init(&mcs_context, xVSS_context->pFileReadPtr, xVSS_context->pFileWritePtr); 91 if(err != M4NO_ERROR) 92 { 93 M4OSA_TRACE1_1("Error in M4MCS_init: 0x%x", err); 94 return err; 95 } 96 97#ifdef M4VSS_ENABLE_EXTERNAL_DECODERS 98 /* replay recorded external decoder registrations on the MCS */ 99 for (i=0; i<M4VD_kVideoType_NB; i++) 100 { 101 if (xVSS_context->registeredExternalDecs[i].registered) 102 { 103 err = M4MCS_registerExternalVideoDecoder(mcs_context, i, 104 xVSS_context->registeredExternalDecs[i].pDecoderInterface, 105 xVSS_context->registeredExternalDecs[i].pUserData); 106 if (M4NO_ERROR != err) 107 { 108 M4OSA_TRACE1_1("M4xVSS_internalStartTranscoding:\ 109 M4MCS_registerExternalVideoDecoder() returns 0x%x!", err); 110 M4MCS_abort(mcs_context); 111 return err; 112 } 113 } 114 } 115#endif /* M4VSS_ENABLE_EXTERNAL_DECODERS */ 116 117 /* replay recorded external encoder registrations on the MCS */ 118 for (i=0; i<M4VE_kEncoderType_NB; i++) 119 { 120 if (xVSS_context->registeredExternalEncs[i].registered) 121 { 122 err = M4MCS_registerExternalVideoEncoder(mcs_context, i, 123 xVSS_context->registeredExternalEncs[i].pEncoderInterface, 124 xVSS_context->registeredExternalEncs[i].pUserData); 125 if (M4NO_ERROR != err) 126 { 127 M4OSA_TRACE1_1("M4xVSS_internalStartTranscoding:\ 128 M4MCS_registerExternalVideoEncoder() returns 0x%x!", err); 129 M4MCS_abort(mcs_context); 130 return err; 131 } 132 } 133 } 134 135 err = M4MCS_open(mcs_context, xVSS_context->pMCScurrentParams->pFileIn, 136 xVSS_context->pMCScurrentParams->InputFileType, 137 xVSS_context->pMCScurrentParams->pFileOut, 138 xVSS_context->pMCScurrentParams->pFileTemp); 139 if (err != M4NO_ERROR) 140 { 141 M4OSA_TRACE1_1("Error in M4MCS_open: 0x%x", err); 142 M4MCS_abort(mcs_context); 143 return err; 144 } 145 146 /** 147 * Fill MCS parameters with the parameters contained in the current element of the 148 MCS parameters chained list */ 149 Params.OutputFileType = xVSS_context->pMCScurrentParams->OutputFileType; 150 Params.OutputVideoFormat = xVSS_context->pMCScurrentParams->OutputVideoFormat; 151 Params.OutputVideoFrameSize = xVSS_context->pMCScurrentParams->OutputVideoFrameSize; 152 Params.OutputVideoFrameRate = xVSS_context->pMCScurrentParams->OutputVideoFrameRate; 153 Params.OutputAudioFormat = xVSS_context->pMCScurrentParams->OutputAudioFormat; 154 Params.OutputAudioSamplingFrequency = 155 xVSS_context->pMCScurrentParams->OutputAudioSamplingFrequency; 156 Params.bAudioMono = xVSS_context->pMCScurrentParams->bAudioMono; 157 Params.pOutputPCMfile = M4OSA_NULL; 158 /*FB 2008/10/20: add media rendering parameter to keep aspect ratio*/ 159 switch(xVSS_context->pMCScurrentParams->MediaRendering) 160 { 161 case M4xVSS_kResizing: 162 Params.MediaRendering = M4MCS_kResizing; 163 break; 164 case M4xVSS_kCropping: 165 Params.MediaRendering = M4MCS_kCropping; 166 break; 167 case M4xVSS_kBlackBorders: 168 Params.MediaRendering = M4MCS_kBlackBorders; 169 break; 170 default: 171 break; 172 } 173 /**/ 174#ifdef TIMESCALE_BUG 175 Params.OutputVideoTimescale = xVSS_context->pMCScurrentParams->OutputVideoTimescale; 176#endif 177 // new params after integrating MCS 2.0 178 // Set the number of audio effects; 0 for now. 179 Params.nbEffects = 0; 180 181 // Set the audio effect; null for now. 182 Params.pEffects = NULL; 183 184 // Set the audio effect; null for now. 185 Params.bDiscardExif = M4OSA_FALSE; 186 187 // Set the audio effect; null for now. 188 Params.bAdjustOrientation = M4OSA_FALSE; 189 // new params after integrating MCS 2.0 190 191 /** 192 * Set output parameters */ 193 err = M4MCS_setOutputParams(mcs_context, &Params); 194 if (err != M4NO_ERROR) 195 { 196 M4OSA_TRACE1_1("Error in M4MCS_setOutputParams: 0x%x", err); 197 M4MCS_abort(mcs_context); 198 return err; 199 } 200 201 Rates.OutputVideoBitrate = xVSS_context->pMCScurrentParams->OutputVideoBitrate; 202 Rates.OutputAudioBitrate = xVSS_context->pMCScurrentParams->OutputAudioBitrate; 203 Rates.BeginCutTime = 0; 204 Rates.EndCutTime = 0; 205 Rates.OutputFileSize = 0; 206 207 /*FB: transcoding per parts*/ 208 Rates.BeginCutTime = xVSS_context->pMCScurrentParams->BeginCutTime; 209 Rates.EndCutTime = xVSS_context->pMCScurrentParams->EndCutTime; 210 Rates.OutputVideoTimescale = xVSS_context->pMCScurrentParams->OutputVideoTimescale; 211 212 err = M4MCS_setEncodingParams(mcs_context, &Rates); 213 if (err != M4NO_ERROR) 214 { 215 M4OSA_TRACE1_1("Error in M4MCS_setEncodingParams: 0x%x", err); 216 M4MCS_abort(mcs_context); 217 return err; 218 } 219 220 err = M4MCS_checkParamsAndStart(mcs_context); 221 if (err != M4NO_ERROR) 222 { 223 M4OSA_TRACE1_1("Error in M4MCS_checkParamsAndStart: 0x%x", err); 224 M4MCS_abort(mcs_context); 225 return err; 226 } 227 228 /** 229 * Save MCS context to be able to call MCS step function in M4xVSS_step function */ 230 xVSS_context->pMCS_Ctxt = mcs_context; 231 232 return M4NO_ERROR; 233} 234 235/** 236 ****************************************************************************** 237 * prototype M4OSA_ERR M4xVSS_internalStopTranscoding(M4OSA_Context pContext) 238 * @brief This function cleans up MCS (3GP transcoder) 239 * @note 240 * 241 * @param pContext (IN) Pointer on the xVSS edit context 242 * @return M4NO_ERROR: No error 243 * @return M4ERR_PARAMETER: At least one parameter is M4OSA_NULL 244 * @return M4ERR_ALLOC: Memory allocation has failed 245 ****************************************************************************** 246 */ 247M4OSA_ERR M4xVSS_internalStopTranscoding(M4OSA_Context pContext) 248{ 249 M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; 250 M4OSA_ERR err; 251 252 err = M4MCS_close(xVSS_context->pMCS_Ctxt); 253 if (err != M4NO_ERROR) 254 { 255 M4OSA_TRACE1_1("M4xVSS_internalStopTranscoding: Error in M4MCS_close: 0x%x", err); 256 M4MCS_abort(xVSS_context->pMCS_Ctxt); 257 return err; 258 } 259 260 /** 261 * Free this MCS instance */ 262 err = M4MCS_cleanUp(xVSS_context->pMCS_Ctxt); 263 if (err != M4NO_ERROR) 264 { 265 M4OSA_TRACE1_1("M4xVSS_internalStopTranscoding: Error in M4MCS_cleanUp: 0x%x", err); 266 return err; 267 } 268 269 xVSS_context->pMCS_Ctxt = M4OSA_NULL; 270 271 return M4NO_ERROR; 272} 273 274/** 275 ****************************************************************************** 276 * M4OSA_ERR M4xVSS_internalConvertAndResizeARGB8888toYUV420(M4OSA_Void* pFileIn, 277 * M4OSA_FileReadPointer* pFileReadPtr, 278 * M4VIFI_ImagePlane* pImagePlanes, 279 * M4OSA_UInt32 width, 280 * M4OSA_UInt32 height); 281 * @brief It Coverts and resizes a ARGB8888 image to YUV420 282 * @note 283 * @param pFileIn (IN) The Image input file 284 * @param pFileReadPtr (IN) Pointer on filesystem functions 285 * @param pImagePlanes (IN/OUT) Pointer on YUV420 output planes allocated by the user 286 * ARGB8888 image will be converted and resized to output 287 * YUV420 plane size 288 *@param width (IN) width of the ARGB8888 289 *@param height (IN) height of the ARGB8888 290 * @return M4NO_ERROR: No error 291 * @return M4ERR_ALLOC: memory error 292 * @return M4ERR_PARAMETER: At least one of the function parameters is null 293 ****************************************************************************** 294 */ 295 296M4OSA_ERR M4xVSS_internalConvertAndResizeARGB8888toYUV420(M4OSA_Void* pFileIn, 297 M4OSA_FileReadPointer* pFileReadPtr, 298 M4VIFI_ImagePlane* pImagePlanes, 299 M4OSA_UInt32 width,M4OSA_UInt32 height) 300{ 301 M4OSA_Context pARGBIn; 302 M4VIFI_ImagePlane rgbPlane1 ,rgbPlane2; 303 M4OSA_UInt32 frameSize_argb=(width * height * 4); 304 M4OSA_UInt32 frameSize = (width * height * 3); //Size of RGB888 data. 305 M4OSA_UInt32 i = 0,j= 0; 306 M4OSA_ERR err=M4NO_ERROR; 307 308 309 M4OSA_UInt8 *pTmpData = (M4OSA_UInt8*) M4OSA_malloc(frameSize_argb, 310 M4VS, (M4OSA_Char*)"Image argb data"); 311 M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 Entering :"); 312 if(pTmpData == M4OSA_NULL) { 313 M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 :\ 314 Failed to allocate memory for Image clip"); 315 return M4ERR_ALLOC; 316 } 317 318 M4OSA_TRACE1_2("M4xVSS_internalConvertAndResizeARGB8888toYUV420 :width and height %d %d", 319 width ,height); 320 /* Get file size (mandatory for chunk decoding) */ 321 err = pFileReadPtr->openRead(&pARGBIn, pFileIn, M4OSA_kFileRead); 322 if(err != M4NO_ERROR) 323 { 324 M4OSA_TRACE1_2("M4xVSS_internalConvertAndResizeARGB8888toYUV420 :\ 325 Can't open input ARGB8888 file %s, error: 0x%x\n",pFileIn, err); 326 M4OSA_free((M4OSA_MemAddr32)pTmpData); 327 pTmpData = M4OSA_NULL; 328 goto cleanup; 329 } 330 331 err = pFileReadPtr->readData(pARGBIn,(M4OSA_MemAddr8)pTmpData, &frameSize_argb); 332 if(err != M4NO_ERROR) 333 { 334 M4OSA_TRACE1_2("M4xVSS_internalConvertAndResizeARGB8888toYUV420 Can't close ARGB8888\ 335 file %s, error: 0x%x\n",pFileIn, err); 336 pFileReadPtr->closeRead(pARGBIn); 337 M4OSA_free((M4OSA_MemAddr32)pTmpData); 338 pTmpData = M4OSA_NULL; 339 goto cleanup; 340 } 341 342 err = pFileReadPtr->closeRead(pARGBIn); 343 if(err != M4NO_ERROR) 344 { 345 M4OSA_TRACE1_2("M4xVSS_internalConvertAndResizeARGB8888toYUV420 Can't close ARGB8888 \ 346 file %s, error: 0x%x\n",pFileIn, err); 347 M4OSA_free((M4OSA_MemAddr32)pTmpData); 348 pTmpData = M4OSA_NULL; 349 goto cleanup; 350 } 351 352 rgbPlane1.pac_data = (M4VIFI_UInt8*)M4OSA_malloc(frameSize, M4VS, 353 (M4OSA_Char*)"Image clip RGB888 data"); 354 if(rgbPlane1.pac_data == M4OSA_NULL) 355 { 356 M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 \ 357 Failed to allocate memory for Image clip"); 358 M4OSA_free((M4OSA_MemAddr32)pTmpData); 359 return M4ERR_ALLOC; 360 } 361 362 rgbPlane1.u_height = height; 363 rgbPlane1.u_width = width; 364 rgbPlane1.u_stride = width*3; 365 rgbPlane1.u_topleft = 0; 366 367 368 /** Remove the alpha channel */ 369 for (i=0, j = 0; i < frameSize_argb; i++) { 370 if ((i % 4) == 0) continue; 371 rgbPlane1.pac_data[j] = pTmpData[i]; 372 j++; 373 } 374 M4OSA_free((M4OSA_MemAddr32)pTmpData); 375 376 /* To Check if resizing is required with color conversion */ 377 if(width != pImagePlanes->u_width || height != pImagePlanes->u_height) 378 { 379 M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 Resizing :"); 380 frameSize = ( pImagePlanes->u_width * pImagePlanes->u_height * 3); 381 rgbPlane2.pac_data = (M4VIFI_UInt8*)M4OSA_malloc(frameSize, M4VS, 382 (M4OSA_Char*)"Image clip RGB888 data"); 383 if(rgbPlane2.pac_data == M4OSA_NULL) 384 { 385 M4OSA_TRACE1_0("Failed to allocate memory for Image clip"); 386 M4OSA_free((M4OSA_MemAddr32)pTmpData); 387 return M4ERR_ALLOC; 388 } 389 rgbPlane2.u_height = pImagePlanes->u_height; 390 rgbPlane2.u_width = pImagePlanes->u_width; 391 rgbPlane2.u_stride = pImagePlanes->u_width*3; 392 rgbPlane2.u_topleft = 0; 393 394 /* Resizing RGB888 to RGB888 */ 395 err = M4VIFI_ResizeBilinearRGB888toRGB888(M4OSA_NULL, &rgbPlane1, &rgbPlane2); 396 if(err != M4NO_ERROR) 397 { 398 M4OSA_TRACE1_1("error when converting from Resize RGB888 to RGB888: 0x%x\n", err); 399 M4OSA_free((M4OSA_MemAddr32)rgbPlane2.pac_data); 400 M4OSA_free((M4OSA_MemAddr32)rgbPlane1.pac_data); 401 return err; 402 } 403 /*Converting Resized RGB888 to YUV420 */ 404 err = M4VIFI_RGB888toYUV420(M4OSA_NULL, &rgbPlane2, pImagePlanes); 405 if(err != M4NO_ERROR) 406 { 407 M4OSA_TRACE1_1("error when converting from RGB888 to YUV: 0x%x\n", err); 408 M4OSA_free((M4OSA_MemAddr32)rgbPlane2.pac_data); 409 M4OSA_free((M4OSA_MemAddr32)rgbPlane1.pac_data); 410 return err; 411 } 412 M4OSA_free((M4OSA_MemAddr32)rgbPlane2.pac_data); 413 M4OSA_free((M4OSA_MemAddr32)rgbPlane1.pac_data); 414 415 M4OSA_TRACE1_0("RGB to YUV done"); 416 417 418 } 419 else 420 { 421 M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 NO Resizing :"); 422 err = M4VIFI_RGB888toYUV420(M4OSA_NULL, &rgbPlane1, pImagePlanes); 423 if(err != M4NO_ERROR) 424 { 425 M4OSA_TRACE1_1("error when converting from RGB to YUV: 0x%x\n", err); 426 } 427 M4OSA_free((M4OSA_MemAddr32)rgbPlane1.pac_data); 428 429 M4OSA_TRACE1_0("RGB to YUV done"); 430 } 431cleanup: 432 M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 leaving :"); 433 return err; 434} 435 436/** 437 ****************************************************************************** 438 * M4OSA_ERR M4xVSS_internalConvertARGB8888toYUV420(M4OSA_Void* pFileIn, 439 * M4OSA_FileReadPointer* pFileReadPtr, 440 * M4VIFI_ImagePlane* pImagePlanes, 441 * M4OSA_UInt32 width, 442 * M4OSA_UInt32 height); 443 * @brief It Coverts a ARGB8888 image to YUV420 444 * @note 445 * @param pFileIn (IN) The Image input file 446 * @param pFileReadPtr (IN) Pointer on filesystem functions 447 * @param pImagePlanes (IN/OUT) Pointer on YUV420 output planes allocated by the user 448 * ARGB8888 image will be converted and resized to output 449 * YUV420 plane size 450 * @param width (IN) width of the ARGB8888 451 * @param height (IN) height of the ARGB8888 452 * @return M4NO_ERROR: No error 453 * @return M4ERR_ALLOC: memory error 454 * @return M4ERR_PARAMETER: At least one of the function parameters is null 455 ****************************************************************************** 456 */ 457 458M4OSA_ERR M4xVSS_internalConvertARGB8888toYUV420(M4OSA_Void* pFileIn, 459 M4OSA_FileReadPointer* pFileReadPtr, 460 M4VIFI_ImagePlane** pImagePlanes, 461 M4OSA_UInt32 width,M4OSA_UInt32 height) 462{ 463 M4OSA_ERR err = M4NO_ERROR; 464 M4VIFI_ImagePlane *yuvPlane = M4OSA_NULL; 465 466 yuvPlane = (M4VIFI_ImagePlane*)M4OSA_malloc(3*sizeof(M4VIFI_ImagePlane), 467 M4VS, (M4OSA_Char*)"M4xVSS_internalConvertRGBtoYUV: Output plane YUV"); 468 if(yuvPlane == M4OSA_NULL) { 469 M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 :\ 470 Failed to allocate memory for Image clip"); 471 return M4ERR_ALLOC; 472 } 473 yuvPlane[0].u_height = height; 474 yuvPlane[0].u_width = width; 475 yuvPlane[0].u_stride = width; 476 yuvPlane[0].u_topleft = 0; 477 yuvPlane[0].pac_data = (M4VIFI_UInt8*)M4OSA_malloc(yuvPlane[0].u_height \ 478 * yuvPlane[0].u_width * 1.5, M4VS, (M4OSA_Char*)"imageClip YUV data"); 479 480 yuvPlane[1].u_height = yuvPlane[0].u_height >>1; 481 yuvPlane[1].u_width = yuvPlane[0].u_width >> 1; 482 yuvPlane[1].u_stride = yuvPlane[1].u_width; 483 yuvPlane[1].u_topleft = 0; 484 yuvPlane[1].pac_data = (M4VIFI_UInt8*)(yuvPlane[0].pac_data + yuvPlane[0].u_height \ 485 * yuvPlane[0].u_width); 486 487 yuvPlane[2].u_height = yuvPlane[0].u_height >>1; 488 yuvPlane[2].u_width = yuvPlane[0].u_width >> 1; 489 yuvPlane[2].u_stride = yuvPlane[2].u_width; 490 yuvPlane[2].u_topleft = 0; 491 yuvPlane[2].pac_data = (M4VIFI_UInt8*)(yuvPlane[1].pac_data + yuvPlane[1].u_height \ 492 * yuvPlane[1].u_width); 493 err = M4xVSS_internalConvertAndResizeARGB8888toYUV420( pFileIn,pFileReadPtr, 494 yuvPlane, width, height); 495 if(err != M4NO_ERROR) 496 { 497 M4OSA_TRACE1_1("M4xVSS_internalConvertAndResizeARGB8888toYUV420 return error: 0x%x\n", err); 498 M4OSA_free((M4OSA_MemAddr32)yuvPlane); 499 return err; 500 } 501 502 *pImagePlanes = yuvPlane; 503 504 M4OSA_TRACE1_0("M4xVSS_internalConvertARGB8888toYUV420 :Leaving"); 505 return err; 506 507} 508 509/** 510 ****************************************************************************** 511 * M4OSA_ERR M4xVSS_PictureCallbackFct (M4OSA_Void* pPictureCtxt, 512 * M4VIFI_ImagePlane* pImagePlanes, 513 * M4OSA_UInt32* pPictureDuration); 514 * @brief It feeds the PTO3GPP with YUV420 pictures. 515 * @note This function is given to the PTO3GPP in the M4PTO3GPP_Params structure 516 * @param pContext (IN) The integrator own context 517 * @param pImagePlanes(IN/OUT) Pointer to an array of three valid image planes 518 * @param pPictureDuration(OUT) Duration of the returned picture 519 * 520 * @return M4NO_ERROR: No error 521 * @return M4PTO3GPP_WAR_LAST_PICTURE: The returned image is the last one 522 * @return M4ERR_PARAMETER: At least one of the function parameters is null 523 ****************************************************************************** 524 */ 525M4OSA_ERR M4xVSS_PictureCallbackFct(M4OSA_Void* pPictureCtxt, M4VIFI_ImagePlane* pImagePlanes, 526 M4OSA_Double* pPictureDuration) 527{ 528 M4OSA_ERR err = M4NO_ERROR; 529 M4OSA_UInt8 last_frame_flag = 0; 530 M4xVSS_PictureCallbackCtxt* pC = (M4xVSS_PictureCallbackCtxt*) (pPictureCtxt); 531 532 /*Used for pan&zoom*/ 533 M4OSA_UInt8 tempPanzoomXa = 0; 534 M4OSA_UInt8 tempPanzoomXb = 0; 535 M4AIR_Params Params; 536 /**/ 537 538 /*Used for cropping and black borders*/ 539 M4OSA_Context pPictureContext = M4OSA_NULL; 540 M4OSA_FilePosition pictureSize = 0 ; 541 M4OSA_UInt8* pictureBuffer = M4OSA_NULL; 542 //M4EXIFC_Context pExifContext = M4OSA_NULL; 543 M4EXIFC_BasicTags pBasicTags; 544 M4VIFI_ImagePlane pImagePlanes1 = pImagePlanes[0]; 545 M4VIFI_ImagePlane pImagePlanes2 = pImagePlanes[1]; 546 M4VIFI_ImagePlane pImagePlanes3 = pImagePlanes[2]; 547 /**/ 548 549 /** 550 * Check input parameters */ 551 M4OSA_DEBUG_IF2((M4OSA_NULL==pPictureCtxt), M4ERR_PARAMETER, 552 "M4xVSS_PictureCallbackFct: pPictureCtxt is M4OSA_NULL"); 553 M4OSA_DEBUG_IF2((M4OSA_NULL==pImagePlanes), M4ERR_PARAMETER, 554 "M4xVSS_PictureCallbackFct: pImagePlanes is M4OSA_NULL"); 555 M4OSA_DEBUG_IF2((M4OSA_NULL==pPictureDuration), M4ERR_PARAMETER, 556 "M4xVSS_PictureCallbackFct: pPictureDuration is M4OSA_NULL"); 557 M4OSA_TRACE1_0("M4xVSS_PictureCallbackFct :Entering"); 558 /*PR P4ME00003181 In case the image number is 0, pan&zoom can not be used*/ 559 if(M4OSA_TRUE == pC->m_pPto3GPPparams->isPanZoom && pC->m_NbImage == 0) 560 { 561 pC->m_pPto3GPPparams->isPanZoom = M4OSA_FALSE; 562 } 563 564 /*If no cropping/black borders or pan&zoom, just decode and resize the picture*/ 565 if(pC->m_mediaRendering == M4xVSS_kResizing && M4OSA_FALSE == pC->m_pPto3GPPparams->isPanZoom) 566 { 567 /** 568 * Convert and resize input ARGB8888 file to YUV420 */ 569 /*To support ARGB8888 : */ 570 M4OSA_TRACE1_2("M4xVSS_PictureCallbackFct 1: width and heght %d %d", 571 pC->m_pPto3GPPparams->width,pC->m_pPto3GPPparams->height); 572 err = M4xVSS_internalConvertAndResizeARGB8888toYUV420(pC->m_FileIn, 573 pC->m_pFileReadPtr, pImagePlanes,pC->m_pPto3GPPparams->width, 574 pC->m_pPto3GPPparams->height); 575 if(err != M4NO_ERROR) 576 { 577 M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct: Error when decoding JPEG: 0x%x\n", err); 578 return err; 579 } 580 } 581 /*In case of cropping, black borders or pan&zoom, call the EXIF reader and the AIR*/ 582 else 583 { 584 /** 585 * Computes ratios */ 586 if(pC->m_pDecodedPlane == M4OSA_NULL) 587 { 588 /** 589 * Convert input ARGB8888 file to YUV420 */ 590 M4OSA_TRACE1_2("M4xVSS_PictureCallbackFct 2: width and heght %d %d", 591 pC->m_pPto3GPPparams->width,pC->m_pPto3GPPparams->height); 592 err = M4xVSS_internalConvertARGB8888toYUV420(pC->m_FileIn, pC->m_pFileReadPtr, 593 &(pC->m_pDecodedPlane),pC->m_pPto3GPPparams->width,pC->m_pPto3GPPparams->height); 594 if(err != M4NO_ERROR) 595 { 596 M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct: Error when decoding JPEG: 0x%x\n", err); 597 if(pC->m_pDecodedPlane != M4OSA_NULL) 598 { 599 /* YUV420 planar is returned but allocation is made only once 600 (contigous planes in memory) */ 601 if(pC->m_pDecodedPlane->pac_data != M4OSA_NULL) 602 { 603 M4OSA_free((M4OSA_MemAddr32)pC->m_pDecodedPlane->pac_data); 604 } 605 M4OSA_free((M4OSA_MemAddr32)pC->m_pDecodedPlane); 606 pC->m_pDecodedPlane = M4OSA_NULL; 607 } 608 return err; 609 } 610 } 611 612 /*Initialize AIR Params*/ 613 Params.m_inputCoord.m_x = 0; 614 Params.m_inputCoord.m_y = 0; 615 Params.m_inputSize.m_height = pC->m_pDecodedPlane->u_height; 616 Params.m_inputSize.m_width = pC->m_pDecodedPlane->u_width; 617 Params.m_outputSize.m_width = pImagePlanes->u_width; 618 Params.m_outputSize.m_height = pImagePlanes->u_height; 619 Params.m_bOutputStripe = M4OSA_FALSE; 620 Params.m_outputOrientation = M4COMMON_kOrientationTopLeft; 621 622 /*Initialize Exif params structure*/ 623 pBasicTags.orientation = M4COMMON_kOrientationUnknown; 624 625 /** 626 Pan&zoom params*/ 627 if(M4OSA_TRUE == pC->m_pPto3GPPparams->isPanZoom) 628 { 629 /*Save ratio values, they can be reused if the new ratios are 0*/ 630 tempPanzoomXa = (M4OSA_UInt8)pC->m_pPto3GPPparams->PanZoomXa; 631 tempPanzoomXb = (M4OSA_UInt8)pC->m_pPto3GPPparams->PanZoomXb; 632#if 0 633 /** 634 * Check size of output JPEG is compatible with pan & zoom parameters 635 First, check final (b) parameters */ 636 if(pC->m_pPto3GPPparams->PanZoomXb + pC->m_pPto3GPPparams->PanZoomTopleftXb > 100 ) 637 { 638 M4OSA_TRACE1_1("WARNING : Bad final Pan & Zoom settings !!!\ 639 New final Zoom ratio is: %d", (100 - pC->m_pPto3GPPparams->PanZoomTopleftXb)); 640 /* We do not change the topleft parameter as it may correspond to a precise area 641 of the picture -> only the zoom ratio is modified */ 642 pC->m_pPto3GPPparams->PanZoomXb = 100 - pC->m_pPto3GPPparams->PanZoomTopleftXb; 643 } 644 645 if(pC->m_pPto3GPPparams->PanZoomXb + pC->m_pPto3GPPparams->PanZoomTopleftYb > 100 ) 646 { 647 M4OSA_TRACE1_1("WARNING : Bad final Pan & Zoom settings \ 648 !!! New final Zoom ratio is: %d", 649 (100 - pC->m_pPto3GPPparams->PanZoomTopleftYb)); 650 /* We do not change the topleft parameter as it may correspond to a 651 precise area of the picture -> only the zoom ratio is modified */ 652 pC->m_pPto3GPPparams->PanZoomXb = 100 - pC->m_pPto3GPPparams->PanZoomTopleftYb; 653 } 654 655 /** 656 * Then, check initial (a) parameters */ 657 if(pC->m_pPto3GPPparams->PanZoomXa + pC->m_pPto3GPPparams->PanZoomTopleftXa > 100 ) 658 { 659 M4OSA_TRACE1_1("WARNING : Bad initial Pan & Zoom settings !!! \ 660 New initial Zoom ratio is: %d",(100 - pC->m_pPto3GPPparams->PanZoomTopleftXa)); 661 /* We do not change the topleft parameter as it may correspond to a precise 662 area of the picture-> only the zoom ratio is modified */ 663 pC->m_pPto3GPPparams->PanZoomXa = 100 - pC->m_pPto3GPPparams->PanZoomTopleftXa; 664 } 665 666 if(pC->m_pPto3GPPparams->PanZoomXa + pC->m_pPto3GPPparams->PanZoomTopleftYa > 100 ) 667 { 668 M4OSA_TRACE1_1("WARNING : Bad initial Pan & Zoom settings !!! New initial\ 669 Zoom ratio is: %d", (100 - pC->m_pPto3GPPparams->PanZoomTopleftYa)); 670 /* We do not change the topleft parameter as it may correspond to a precise 671 area of the picture-> only the zoom ratio is modified */ 672 pC->m_pPto3GPPparams->PanZoomXa = 100 - pC->m_pPto3GPPparams->PanZoomTopleftYa; 673 } 674#endif 675 /*Check that the ratio is not 0*/ 676 /*Check (a) parameters*/ 677 if(pC->m_pPto3GPPparams->PanZoomXa == 0) 678 { 679 M4OSA_UInt8 maxRatio = 0; 680 if(pC->m_pPto3GPPparams->PanZoomTopleftXa >= 681 pC->m_pPto3GPPparams->PanZoomTopleftYa) 682 { 683 /*The ratio is 0, that means the area of the picture defined with (a) 684 parameters is bigger than the image size*/ 685 if(pC->m_pPto3GPPparams->PanZoomTopleftXa + tempPanzoomXa > 100) 686 { 687 /*The oversize is maxRatio*/ 688 maxRatio = pC->m_pPto3GPPparams->PanZoomTopleftXa + tempPanzoomXa - 100; 689 } 690 } 691 else 692 { 693 /*The ratio is 0, that means the area of the picture defined with (a) 694 parameters is bigger than the image size*/ 695 if(pC->m_pPto3GPPparams->PanZoomTopleftYa + tempPanzoomXa > 100) 696 { 697 /*The oversize is maxRatio*/ 698 maxRatio = pC->m_pPto3GPPparams->PanZoomTopleftYa + tempPanzoomXa - 100; 699 } 700 } 701 /*Modify the (a) parameters:*/ 702 if(pC->m_pPto3GPPparams->PanZoomTopleftXa >= maxRatio) 703 { 704 /*The (a) topleft parameters can be moved to keep the same area size*/ 705 pC->m_pPto3GPPparams->PanZoomTopleftXa -= maxRatio; 706 } 707 else 708 { 709 /*Move the (a) topleft parameter to 0 but the ratio will be also further 710 modified to match the image size*/ 711 pC->m_pPto3GPPparams->PanZoomTopleftXa = 0; 712 } 713 if(pC->m_pPto3GPPparams->PanZoomTopleftYa >= maxRatio) 714 { 715 /*The (a) topleft parameters can be moved to keep the same area size*/ 716 pC->m_pPto3GPPparams->PanZoomTopleftYa -= maxRatio; 717 } 718 else 719 { 720 /*Move the (a) topleft parameter to 0 but the ratio will be also further 721 modified to match the image size*/ 722 pC->m_pPto3GPPparams->PanZoomTopleftYa = 0; 723 } 724 /*The new ratio is the original one*/ 725 pC->m_pPto3GPPparams->PanZoomXa = tempPanzoomXa; 726 if(pC->m_pPto3GPPparams->PanZoomXa + pC->m_pPto3GPPparams->PanZoomTopleftXa > 100) 727 { 728 /*Change the ratio if the area of the picture defined with (a) parameters is 729 bigger than the image size*/ 730 pC->m_pPto3GPPparams->PanZoomXa = 100 - pC->m_pPto3GPPparams->PanZoomTopleftXa; 731 } 732 if(pC->m_pPto3GPPparams->PanZoomXa + pC->m_pPto3GPPparams->PanZoomTopleftYa > 100) 733 { 734 /*Change the ratio if the area of the picture defined with (a) parameters is 735 bigger than the image size*/ 736 pC->m_pPto3GPPparams->PanZoomXa = 100 - pC->m_pPto3GPPparams->PanZoomTopleftYa; 737 } 738 } 739 /*Check (b) parameters*/ 740 if(pC->m_pPto3GPPparams->PanZoomXb == 0) 741 { 742 M4OSA_UInt8 maxRatio = 0; 743 if(pC->m_pPto3GPPparams->PanZoomTopleftXb >= 744 pC->m_pPto3GPPparams->PanZoomTopleftYb) 745 { 746 /*The ratio is 0, that means the area of the picture defined with (b) 747 parameters is bigger than the image size*/ 748 if(pC->m_pPto3GPPparams->PanZoomTopleftXb + tempPanzoomXb > 100) 749 { 750 /*The oversize is maxRatio*/ 751 maxRatio = pC->m_pPto3GPPparams->PanZoomTopleftXb + tempPanzoomXb - 100; 752 } 753 } 754 else 755 { 756 /*The ratio is 0, that means the area of the picture defined with (b) 757 parameters is bigger than the image size*/ 758 if(pC->m_pPto3GPPparams->PanZoomTopleftYb + tempPanzoomXb > 100) 759 { 760 /*The oversize is maxRatio*/ 761 maxRatio = pC->m_pPto3GPPparams->PanZoomTopleftYb + tempPanzoomXb - 100; 762 } 763 } 764 /*Modify the (b) parameters:*/ 765 if(pC->m_pPto3GPPparams->PanZoomTopleftXb >= maxRatio) 766 { 767 /*The (b) topleft parameters can be moved to keep the same area size*/ 768 pC->m_pPto3GPPparams->PanZoomTopleftXb -= maxRatio; 769 } 770 else 771 { 772 /*Move the (b) topleft parameter to 0 but the ratio will be also further 773 modified to match the image size*/ 774 pC->m_pPto3GPPparams->PanZoomTopleftXb = 0; 775 } 776 if(pC->m_pPto3GPPparams->PanZoomTopleftYb >= maxRatio) 777 { 778 /*The (b) topleft parameters can be moved to keep the same area size*/ 779 pC->m_pPto3GPPparams->PanZoomTopleftYb -= maxRatio; 780 } 781 else 782 { 783 /*Move the (b) topleft parameter to 0 but the ratio will be also further 784 modified to match the image size*/ 785 pC->m_pPto3GPPparams->PanZoomTopleftYb = 0; 786 } 787 /*The new ratio is the original one*/ 788 pC->m_pPto3GPPparams->PanZoomXb = tempPanzoomXb; 789 if(pC->m_pPto3GPPparams->PanZoomXb + pC->m_pPto3GPPparams->PanZoomTopleftXb > 100) 790 { 791 /*Change the ratio if the area of the picture defined with (b) parameters is 792 bigger than the image size*/ 793 pC->m_pPto3GPPparams->PanZoomXb = 100 - pC->m_pPto3GPPparams->PanZoomTopleftXb; 794 } 795 if(pC->m_pPto3GPPparams->PanZoomXb + pC->m_pPto3GPPparams->PanZoomTopleftYb > 100) 796 { 797 /*Change the ratio if the area of the picture defined with (b) parameters is 798 bigger than the image size*/ 799 pC->m_pPto3GPPparams->PanZoomXb = 100 - pC->m_pPto3GPPparams->PanZoomTopleftYb; 800 } 801 } 802 803 /** 804 * Computes AIR parameters */ 805/* Params.m_inputCoord.m_x = (M4OSA_UInt32)(pC->m_pDecodedPlane->u_width * 806 (pC->m_pPto3GPPparams->PanZoomTopleftXa + 807 (M4OSA_Int16)((pC->m_pPto3GPPparams->PanZoomTopleftXb \ 808 - pC->m_pPto3GPPparams->PanZoomTopleftXa) * 809 pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage)) / 100; 810 Params.m_inputCoord.m_y = (M4OSA_UInt32)(pC->m_pDecodedPlane->u_height * 811 (pC->m_pPto3GPPparams->PanZoomTopleftYa + 812 (M4OSA_Int16)((pC->m_pPto3GPPparams->PanZoomTopleftYb\ 813 - pC->m_pPto3GPPparams->PanZoomTopleftYa) * 814 pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage)) / 100; 815 816 Params.m_inputSize.m_width = (M4OSA_UInt32)(pC->m_pDecodedPlane->u_width * 817 (pC->m_pPto3GPPparams->PanZoomXa + 818 (M4OSA_Int16)((pC->m_pPto3GPPparams->PanZoomXb - pC->m_pPto3GPPparams->PanZoomXa) * 819 pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage)) / 100; 820 821 Params.m_inputSize.m_height = (M4OSA_UInt32)(pC->m_pDecodedPlane->u_height * 822 (pC->m_pPto3GPPparams->PanZoomXa + 823 (M4OSA_Int16)((pC->m_pPto3GPPparams->PanZoomXb - pC->m_pPto3GPPparams->PanZoomXa) * 824 pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage)) / 100; 825 */ 826 Params.m_inputCoord.m_x = (M4OSA_UInt32)((((M4OSA_Double)pC->m_pDecodedPlane->u_width * 827 (pC->m_pPto3GPPparams->PanZoomTopleftXa + 828 (M4OSA_Double)((M4OSA_Double)(pC->m_pPto3GPPparams->PanZoomTopleftXb\ 829 - pC->m_pPto3GPPparams->PanZoomTopleftXa) * 830 pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage)) / 100) + 0.5); 831 Params.m_inputCoord.m_y = 832 (M4OSA_UInt32)((((M4OSA_Double)pC->m_pDecodedPlane->u_height * 833 (pC->m_pPto3GPPparams->PanZoomTopleftYa + 834 (M4OSA_Double)((M4OSA_Double)(pC->m_pPto3GPPparams->PanZoomTopleftYb\ 835 - pC->m_pPto3GPPparams->PanZoomTopleftYa) * 836 pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage)) / 100) + 0.5); 837 838 Params.m_inputSize.m_width = 839 (M4OSA_UInt32)((((M4OSA_Double)pC->m_pDecodedPlane->u_width * 840 (pC->m_pPto3GPPparams->PanZoomXa + 841 (M4OSA_Double)((M4OSA_Double)(pC->m_pPto3GPPparams->PanZoomXb\ 842 - pC->m_pPto3GPPparams->PanZoomXa) * 843 pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage)) / 100) + 0.5); 844 845 Params.m_inputSize.m_height = 846 (M4OSA_UInt32)((((M4OSA_Double)pC->m_pDecodedPlane->u_height * 847 (pC->m_pPto3GPPparams->PanZoomXa + 848 (M4OSA_Double)((M4OSA_Double)(pC->m_pPto3GPPparams->PanZoomXb \ 849 - pC->m_pPto3GPPparams->PanZoomXa) * 850 pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage)) / 100) + 0.5); 851 852 853 if((Params.m_inputSize.m_width + Params.m_inputCoord.m_x)\ 854 > pC->m_pDecodedPlane->u_width) 855 { 856 Params.m_inputSize.m_width = pC->m_pDecodedPlane->u_width \ 857 - Params.m_inputCoord.m_x; 858 } 859 860 if((Params.m_inputSize.m_height + Params.m_inputCoord.m_y)\ 861 > pC->m_pDecodedPlane->u_height) 862 { 863 Params.m_inputSize.m_height = pC->m_pDecodedPlane->u_height\ 864 - Params.m_inputCoord.m_y; 865 } 866 867 868 869 Params.m_inputSize.m_width = (Params.m_inputSize.m_width>>1)<<1; 870 Params.m_inputSize.m_height = (Params.m_inputSize.m_height>>1)<<1; 871 } 872 873 874 875 /** 876 Picture rendering: Black borders*/ 877 878 if(pC->m_mediaRendering == M4xVSS_kBlackBorders) 879 { 880 M4OSA_memset((M4OSA_MemAddr8)pImagePlanes[0].pac_data, 881 (pImagePlanes[0].u_height*pImagePlanes[0].u_stride),Y_PLANE_BORDER_VALUE); 882 M4OSA_memset((M4OSA_MemAddr8)pImagePlanes[1].pac_data, 883 (pImagePlanes[1].u_height*pImagePlanes[1].u_stride),U_PLANE_BORDER_VALUE); 884 M4OSA_memset((M4OSA_MemAddr8)pImagePlanes[2].pac_data, 885 (pImagePlanes[2].u_height*pImagePlanes[2].u_stride),V_PLANE_BORDER_VALUE); 886 887 /** 888 First without pan&zoom*/ 889 if(M4OSA_FALSE == pC->m_pPto3GPPparams->isPanZoom) 890 { 891 switch(pBasicTags.orientation) 892 { 893 default: 894 case M4COMMON_kOrientationUnknown: 895 Params.m_outputOrientation = M4COMMON_kOrientationTopLeft; 896 case M4COMMON_kOrientationTopLeft: 897 case M4COMMON_kOrientationTopRight: 898 case M4COMMON_kOrientationBottomRight: 899 case M4COMMON_kOrientationBottomLeft: 900 if((M4OSA_UInt32)((pC->m_pDecodedPlane->u_height * pImagePlanes->u_width)\ 901 /pC->m_pDecodedPlane->u_width) <= pImagePlanes->u_height) 902 //Params.m_inputSize.m_height < Params.m_inputSize.m_width) 903 { 904 /*it is height so black borders will be on the top and on the bottom side*/ 905 Params.m_outputSize.m_width = pImagePlanes->u_width; 906 Params.m_outputSize.m_height = 907 (M4OSA_UInt32)((pC->m_pDecodedPlane->u_height \ 908 * pImagePlanes->u_width) /pC->m_pDecodedPlane->u_width); 909 /*number of lines at the top*/ 910 pImagePlanes[0].u_topleft = 911 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_height\ 912 -Params.m_outputSize.m_height)>>1))*pImagePlanes[0].u_stride; 913 pImagePlanes[0].u_height = Params.m_outputSize.m_height; 914 pImagePlanes[1].u_topleft = 915 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_height\ 916 -(Params.m_outputSize.m_height>>1)))>>1)*pImagePlanes[1].u_stride; 917 pImagePlanes[1].u_height = Params.m_outputSize.m_height>>1; 918 pImagePlanes[2].u_topleft = 919 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_height\ 920 -(Params.m_outputSize.m_height>>1)))>>1)*pImagePlanes[2].u_stride; 921 pImagePlanes[2].u_height = Params.m_outputSize.m_height>>1; 922 } 923 else 924 { 925 /*it is width so black borders will be on the left and right side*/ 926 Params.m_outputSize.m_height = pImagePlanes->u_height; 927 Params.m_outputSize.m_width = 928 (M4OSA_UInt32)((pC->m_pDecodedPlane->u_width \ 929 * pImagePlanes->u_height) /pC->m_pDecodedPlane->u_height); 930 931 pImagePlanes[0].u_topleft = 932 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_width\ 933 -Params.m_outputSize.m_width)>>1)); 934 pImagePlanes[0].u_width = Params.m_outputSize.m_width; 935 pImagePlanes[1].u_topleft = 936 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_width\ 937 -(Params.m_outputSize.m_width>>1)))>>1); 938 pImagePlanes[1].u_width = Params.m_outputSize.m_width>>1; 939 pImagePlanes[2].u_topleft = 940 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_width\ 941 -(Params.m_outputSize.m_width>>1)))>>1); 942 pImagePlanes[2].u_width = Params.m_outputSize.m_width>>1; 943 } 944 break; 945 case M4COMMON_kOrientationLeftTop: 946 case M4COMMON_kOrientationLeftBottom: 947 case M4COMMON_kOrientationRightTop: 948 case M4COMMON_kOrientationRightBottom: 949 if((M4OSA_UInt32)((pC->m_pDecodedPlane->u_width * pImagePlanes->u_width)\ 950 /pC->m_pDecodedPlane->u_height) < pImagePlanes->u_height) 951 //Params.m_inputSize.m_height > Params.m_inputSize.m_width) 952 { 953 /*it is height so black borders will be on the top and on 954 the bottom side*/ 955 Params.m_outputSize.m_height = pImagePlanes->u_width; 956 Params.m_outputSize.m_width = 957 (M4OSA_UInt32)((pC->m_pDecodedPlane->u_width \ 958 * pImagePlanes->u_width) /pC->m_pDecodedPlane->u_height); 959 /*number of lines at the top*/ 960 pImagePlanes[0].u_topleft = 961 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_height\ 962 -Params.m_outputSize.m_width))>>1)*pImagePlanes[0].u_stride)+1; 963 pImagePlanes[0].u_height = Params.m_outputSize.m_width; 964 pImagePlanes[1].u_topleft = 965 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_height\ 966 -(Params.m_outputSize.m_width>>1)))>>1)\ 967 *pImagePlanes[1].u_stride)+1; 968 pImagePlanes[1].u_height = Params.m_outputSize.m_width>>1; 969 pImagePlanes[2].u_topleft = 970 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_height\ 971 -(Params.m_outputSize.m_width>>1)))>>1)\ 972 *pImagePlanes[2].u_stride)+1; 973 pImagePlanes[2].u_height = Params.m_outputSize.m_width>>1; 974 } 975 else 976 { 977 /*it is width so black borders will be on the left and right side*/ 978 Params.m_outputSize.m_width = pImagePlanes->u_height; 979 Params.m_outputSize.m_height = 980 (M4OSA_UInt32)((pC->m_pDecodedPlane->u_height\ 981 * pImagePlanes->u_height) /pC->m_pDecodedPlane->u_width); 982 983 pImagePlanes[0].u_topleft = 984 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_width\ 985 -Params.m_outputSize.m_height))>>1))+1; 986 pImagePlanes[0].u_width = Params.m_outputSize.m_height; 987 pImagePlanes[1].u_topleft = 988 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_width\ 989 -(Params.m_outputSize.m_height>>1)))>>1))+1; 990 pImagePlanes[1].u_width = Params.m_outputSize.m_height>>1; 991 pImagePlanes[2].u_topleft = 992 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_width\ 993 -(Params.m_outputSize.m_height>>1)))>>1))+1; 994 pImagePlanes[2].u_width = Params.m_outputSize.m_height>>1; 995 } 996 break; 997 } 998 } 999 1000 /** 1001 Secondly with pan&zoom*/ 1002 else 1003 { 1004 switch(pBasicTags.orientation) 1005 { 1006 default: 1007 case M4COMMON_kOrientationUnknown: 1008 Params.m_outputOrientation = M4COMMON_kOrientationTopLeft; 1009 case M4COMMON_kOrientationTopLeft: 1010 case M4COMMON_kOrientationTopRight: 1011 case M4COMMON_kOrientationBottomRight: 1012 case M4COMMON_kOrientationBottomLeft: 1013 /*NO ROTATION*/ 1014 if((M4OSA_UInt32)((pC->m_pDecodedPlane->u_height * pImagePlanes->u_width)\ 1015 /pC->m_pDecodedPlane->u_width) <= pImagePlanes->u_height) 1016 //Params.m_inputSize.m_height < Params.m_inputSize.m_width) 1017 { 1018 /*Black borders will be on the top and bottom of the output video*/ 1019 /*Maximum output height if the input image aspect ratio is kept and if 1020 the output width is the screen width*/ 1021 M4OSA_UInt32 tempOutputSizeHeight = 1022 (M4OSA_UInt32)((pC->m_pDecodedPlane->u_height\ 1023 * pImagePlanes->u_width) /pC->m_pDecodedPlane->u_width); 1024 M4OSA_UInt32 tempInputSizeHeightMax = 0; 1025 M4OSA_UInt32 tempFinalInputHeight = 0; 1026 /*The output width is the screen width*/ 1027 Params.m_outputSize.m_width = pImagePlanes->u_width; 1028 tempOutputSizeHeight = (tempOutputSizeHeight>>1)<<1; 1029 1030 /*Maximum input height according to the maximum output height 1031 (proportional to the maximum output height)*/ 1032 tempInputSizeHeightMax = (pImagePlanes->u_height\ 1033 *Params.m_inputSize.m_height)/tempOutputSizeHeight; 1034 tempInputSizeHeightMax = (tempInputSizeHeightMax>>1)<<1; 1035 1036 /*Check if the maximum possible input height is contained into the 1037 input image height*/ 1038 if(tempInputSizeHeightMax <= pC->m_pDecodedPlane->u_height) 1039 { 1040 /*The maximum possible input height is contained in the input 1041 image height, 1042 that means no black borders, the input pan zoom area will be extended 1043 so that the input AIR height will be the maximum possible*/ 1044 if(((tempInputSizeHeightMax - Params.m_inputSize.m_height)>>1)\ 1045 <= Params.m_inputCoord.m_y 1046 && ((tempInputSizeHeightMax - Params.m_inputSize.m_height)>>1)\ 1047 <= pC->m_pDecodedPlane->u_height -(Params.m_inputCoord.m_y\ 1048 + Params.m_inputSize.m_height)) 1049 { 1050 /*The input pan zoom area can be extended symmetrically on the 1051 top and bottom side*/ 1052 Params.m_inputCoord.m_y -= ((tempInputSizeHeightMax \ 1053 - Params.m_inputSize.m_height)>>1); 1054 } 1055 else if(Params.m_inputCoord.m_y < pC->m_pDecodedPlane->u_height\ 1056 -(Params.m_inputCoord.m_y + Params.m_inputSize.m_height)) 1057 { 1058 /*There is not enough place above the input pan zoom area to 1059 extend it symmetrically, 1060 so extend it to the maximum on the top*/ 1061 Params.m_inputCoord.m_y = 0; 1062 } 1063 else 1064 { 1065 /*There is not enough place below the input pan zoom area to 1066 extend it symmetrically, 1067 so extend it to the maximum on the bottom*/ 1068 Params.m_inputCoord.m_y = pC->m_pDecodedPlane->u_height \ 1069 - tempInputSizeHeightMax; 1070 } 1071 /*The input height of the AIR is the maximum possible height*/ 1072 Params.m_inputSize.m_height = tempInputSizeHeightMax; 1073 } 1074 else 1075 { 1076 /*The maximum possible input height is greater than the input 1077 image height, 1078 that means black borders are necessary to keep aspect ratio 1079 The input height of the AIR is all the input image height*/ 1080 Params.m_outputSize.m_height = 1081 (tempOutputSizeHeight*pC->m_pDecodedPlane->u_height)\ 1082 /Params.m_inputSize.m_height; 1083 Params.m_outputSize.m_height = (Params.m_outputSize.m_height>>1)<<1; 1084 Params.m_inputCoord.m_y = 0; 1085 Params.m_inputSize.m_height = pC->m_pDecodedPlane->u_height; 1086 pImagePlanes[0].u_topleft = 1087 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_height\ 1088 -Params.m_outputSize.m_height)>>1))*pImagePlanes[0].u_stride; 1089 pImagePlanes[0].u_height = Params.m_outputSize.m_height; 1090 pImagePlanes[1].u_topleft = 1091 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_height\ 1092 -(Params.m_outputSize.m_height>>1)))>>1)\ 1093 *pImagePlanes[1].u_stride); 1094 pImagePlanes[1].u_height = Params.m_outputSize.m_height>>1; 1095 pImagePlanes[2].u_topleft = 1096 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_height\ 1097 -(Params.m_outputSize.m_height>>1)))>>1)\ 1098 *pImagePlanes[2].u_stride); 1099 pImagePlanes[2].u_height = Params.m_outputSize.m_height>>1; 1100 } 1101 } 1102 else 1103 { 1104 /*Black borders will be on the left and right side of the output video*/ 1105 /*Maximum output width if the input image aspect ratio is kept and if the 1106 output height is the screen height*/ 1107 M4OSA_UInt32 tempOutputSizeWidth = 1108 (M4OSA_UInt32)((pC->m_pDecodedPlane->u_width \ 1109 * pImagePlanes->u_height) /pC->m_pDecodedPlane->u_height); 1110 M4OSA_UInt32 tempInputSizeWidthMax = 0; 1111 M4OSA_UInt32 tempFinalInputWidth = 0; 1112 /*The output height is the screen height*/ 1113 Params.m_outputSize.m_height = pImagePlanes->u_height; 1114 tempOutputSizeWidth = (tempOutputSizeWidth>>1)<<1; 1115 1116 /*Maximum input width according to the maximum output width 1117 (proportional to the maximum output width)*/ 1118 tempInputSizeWidthMax = 1119 (pImagePlanes->u_width*Params.m_inputSize.m_width)\ 1120 /tempOutputSizeWidth; 1121 tempInputSizeWidthMax = (tempInputSizeWidthMax>>1)<<1; 1122 1123 /*Check if the maximum possible input width is contained into the input 1124 image width*/ 1125 if(tempInputSizeWidthMax <= pC->m_pDecodedPlane->u_width) 1126 { 1127 /*The maximum possible input width is contained in the input 1128 image width, 1129 that means no black borders, the input pan zoom area will be extended 1130 so that the input AIR width will be the maximum possible*/ 1131 if(((tempInputSizeWidthMax - Params.m_inputSize.m_width)>>1) \ 1132 <= Params.m_inputCoord.m_x 1133 && ((tempInputSizeWidthMax - Params.m_inputSize.m_width)>>1)\ 1134 <= pC->m_pDecodedPlane->u_width -(Params.m_inputCoord.m_x \ 1135 + Params.m_inputSize.m_width)) 1136 { 1137 /*The input pan zoom area can be extended symmetrically on the 1138 right and left side*/ 1139 Params.m_inputCoord.m_x -= ((tempInputSizeWidthMax\ 1140 - Params.m_inputSize.m_width)>>1); 1141 } 1142 else if(Params.m_inputCoord.m_x < pC->m_pDecodedPlane->u_width\ 1143 -(Params.m_inputCoord.m_x + Params.m_inputSize.m_width)) 1144 { 1145 /*There is not enough place above the input pan zoom area to 1146 extend it symmetrically, 1147 so extend it to the maximum on the left*/ 1148 Params.m_inputCoord.m_x = 0; 1149 } 1150 else 1151 { 1152 /*There is not enough place below the input pan zoom area 1153 to extend it symmetrically, 1154 so extend it to the maximum on the right*/ 1155 Params.m_inputCoord.m_x = pC->m_pDecodedPlane->u_width \ 1156 - tempInputSizeWidthMax; 1157 } 1158 /*The input width of the AIR is the maximum possible width*/ 1159 Params.m_inputSize.m_width = tempInputSizeWidthMax; 1160 } 1161 else 1162 { 1163 /*The maximum possible input width is greater than the input 1164 image width, 1165 that means black borders are necessary to keep aspect ratio 1166 The input width of the AIR is all the input image width*/ 1167 Params.m_outputSize.m_width =\ 1168 (tempOutputSizeWidth*pC->m_pDecodedPlane->u_width)\ 1169 /Params.m_inputSize.m_width; 1170 Params.m_outputSize.m_width = (Params.m_outputSize.m_width>>1)<<1; 1171 Params.m_inputCoord.m_x = 0; 1172 Params.m_inputSize.m_width = pC->m_pDecodedPlane->u_width; 1173 pImagePlanes[0].u_topleft = 1174 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_width\ 1175 -Params.m_outputSize.m_width)>>1)); 1176 pImagePlanes[0].u_width = Params.m_outputSize.m_width; 1177 pImagePlanes[1].u_topleft = 1178 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_width\ 1179 -(Params.m_outputSize.m_width>>1)))>>1); 1180 pImagePlanes[1].u_width = Params.m_outputSize.m_width>>1; 1181 pImagePlanes[2].u_topleft = 1182 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_width\ 1183 -(Params.m_outputSize.m_width>>1)))>>1); 1184 pImagePlanes[2].u_width = Params.m_outputSize.m_width>>1; 1185 } 1186 } 1187 break; 1188 case M4COMMON_kOrientationLeftTop: 1189 case M4COMMON_kOrientationLeftBottom: 1190 case M4COMMON_kOrientationRightTop: 1191 case M4COMMON_kOrientationRightBottom: 1192 /*ROTATION*/ 1193 if((M4OSA_UInt32)((pC->m_pDecodedPlane->u_width * pImagePlanes->u_width)\ 1194 /pC->m_pDecodedPlane->u_height) < pImagePlanes->u_height) 1195 //Params.m_inputSize.m_height > Params.m_inputSize.m_width) 1196 { 1197 /*Black borders will be on the left and right side of the output video*/ 1198 /*Maximum output height if the input image aspect ratio is kept and if 1199 the output height is the screen width*/ 1200 M4OSA_UInt32 tempOutputSizeHeight = 1201 (M4OSA_UInt32)((pC->m_pDecodedPlane->u_width * pImagePlanes->u_width)\ 1202 /pC->m_pDecodedPlane->u_height); 1203 M4OSA_UInt32 tempInputSizeHeightMax = 0; 1204 M4OSA_UInt32 tempFinalInputHeight = 0; 1205 /*The output width is the screen height*/ 1206 Params.m_outputSize.m_height = pImagePlanes->u_width; 1207 Params.m_outputSize.m_width= pImagePlanes->u_height; 1208 tempOutputSizeHeight = (tempOutputSizeHeight>>1)<<1; 1209 1210 /*Maximum input height according to the maximum output height 1211 (proportional to the maximum output height)*/ 1212 tempInputSizeHeightMax = 1213 (pImagePlanes->u_height*Params.m_inputSize.m_width)\ 1214 /tempOutputSizeHeight; 1215 tempInputSizeHeightMax = (tempInputSizeHeightMax>>1)<<1; 1216 1217 /*Check if the maximum possible input height is contained into the 1218 input image width (rotation included)*/ 1219 if(tempInputSizeHeightMax <= pC->m_pDecodedPlane->u_width) 1220 { 1221 /*The maximum possible input height is contained in the input 1222 image width (rotation included), 1223 that means no black borders, the input pan zoom area will be extended 1224 so that the input AIR width will be the maximum possible*/ 1225 if(((tempInputSizeHeightMax - Params.m_inputSize.m_width)>>1) \ 1226 <= Params.m_inputCoord.m_x 1227 && ((tempInputSizeHeightMax - Params.m_inputSize.m_width)>>1)\ 1228 <= pC->m_pDecodedPlane->u_width -(Params.m_inputCoord.m_x \ 1229 + Params.m_inputSize.m_width)) 1230 { 1231 /*The input pan zoom area can be extended symmetrically on the 1232 right and left side*/ 1233 Params.m_inputCoord.m_x -= ((tempInputSizeHeightMax \ 1234 - Params.m_inputSize.m_width)>>1); 1235 } 1236 else if(Params.m_inputCoord.m_x < pC->m_pDecodedPlane->u_width\ 1237 -(Params.m_inputCoord.m_x + Params.m_inputSize.m_width)) 1238 { 1239 /*There is not enough place on the left of the input pan 1240 zoom area to extend it symmetrically, 1241 so extend it to the maximum on the left*/ 1242 Params.m_inputCoord.m_x = 0; 1243 } 1244 else 1245 { 1246 /*There is not enough place on the right of the input pan zoom 1247 area to extend it symmetrically, 1248 so extend it to the maximum on the right*/ 1249 Params.m_inputCoord.m_x = 1250 pC->m_pDecodedPlane->u_width - tempInputSizeHeightMax; 1251 } 1252 /*The input width of the AIR is the maximum possible width*/ 1253 Params.m_inputSize.m_width = tempInputSizeHeightMax; 1254 } 1255 else 1256 { 1257 /*The maximum possible input height is greater than the input 1258 image width (rotation included), 1259 that means black borders are necessary to keep aspect ratio 1260 The input width of the AIR is all the input image width*/ 1261 Params.m_outputSize.m_width = 1262 (tempOutputSizeHeight*pC->m_pDecodedPlane->u_width)\ 1263 /Params.m_inputSize.m_width; 1264 Params.m_outputSize.m_width = (Params.m_outputSize.m_width>>1)<<1; 1265 Params.m_inputCoord.m_x = 0; 1266 Params.m_inputSize.m_width = pC->m_pDecodedPlane->u_width; 1267 pImagePlanes[0].u_topleft = 1268 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_height\ 1269 -Params.m_outputSize.m_width))>>1)*pImagePlanes[0].u_stride)+1; 1270 pImagePlanes[0].u_height = Params.m_outputSize.m_width; 1271 pImagePlanes[1].u_topleft = 1272 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_height\ 1273 -(Params.m_outputSize.m_width>>1)))>>1)\ 1274 *pImagePlanes[1].u_stride)+1; 1275 pImagePlanes[1].u_height = Params.m_outputSize.m_width>>1; 1276 pImagePlanes[2].u_topleft = 1277 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_height\ 1278 -(Params.m_outputSize.m_width>>1)))>>1)\ 1279 *pImagePlanes[2].u_stride)+1; 1280 pImagePlanes[2].u_height = Params.m_outputSize.m_width>>1; 1281 } 1282 } 1283 else 1284 { 1285 /*Black borders will be on the top and bottom of the output video*/ 1286 /*Maximum output width if the input image aspect ratio is kept and if 1287 the output width is the screen height*/ 1288 M4OSA_UInt32 tempOutputSizeWidth = 1289 (M4OSA_UInt32)((pC->m_pDecodedPlane->u_height * pImagePlanes->u_height)\ 1290 /pC->m_pDecodedPlane->u_width); 1291 M4OSA_UInt32 tempInputSizeWidthMax = 0; 1292 M4OSA_UInt32 tempFinalInputWidth = 0, tempFinalOutputWidth = 0; 1293 /*The output height is the screen width*/ 1294 Params.m_outputSize.m_width = pImagePlanes->u_height; 1295 Params.m_outputSize.m_height= pImagePlanes->u_width; 1296 tempOutputSizeWidth = (tempOutputSizeWidth>>1)<<1; 1297 1298 /*Maximum input width according to the maximum output width 1299 (proportional to the maximum output width)*/ 1300 tempInputSizeWidthMax = 1301 (pImagePlanes->u_width*Params.m_inputSize.m_height)/tempOutputSizeWidth; 1302 tempInputSizeWidthMax = (tempInputSizeWidthMax>>1)<<1; 1303 1304 /*Check if the maximum possible input width is contained into the input 1305 image height (rotation included)*/ 1306 if(tempInputSizeWidthMax <= pC->m_pDecodedPlane->u_height) 1307 { 1308 /*The maximum possible input width is contained in the input 1309 image height (rotation included), 1310 that means no black borders, the input pan zoom area will be extended 1311 so that the input AIR height will be the maximum possible*/ 1312 if(((tempInputSizeWidthMax - Params.m_inputSize.m_height)>>1) \ 1313 <= Params.m_inputCoord.m_y 1314 && ((tempInputSizeWidthMax - Params.m_inputSize.m_height)>>1)\ 1315 <= pC->m_pDecodedPlane->u_height -(Params.m_inputCoord.m_y \ 1316 + Params.m_inputSize.m_height)) 1317 { 1318 /*The input pan zoom area can be extended symmetrically on 1319 the right and left side*/ 1320 Params.m_inputCoord.m_y -= ((tempInputSizeWidthMax \ 1321 - Params.m_inputSize.m_height)>>1); 1322 } 1323 else if(Params.m_inputCoord.m_y < pC->m_pDecodedPlane->u_height\ 1324 -(Params.m_inputCoord.m_y + Params.m_inputSize.m_height)) 1325 { 1326 /*There is not enough place on the top of the input pan zoom 1327 area to extend it symmetrically, 1328 so extend it to the maximum on the top*/ 1329 Params.m_inputCoord.m_y = 0; 1330 } 1331 else 1332 { 1333 /*There is not enough place on the bottom of the input pan zoom 1334 area to extend it symmetrically, 1335 so extend it to the maximum on the bottom*/ 1336 Params.m_inputCoord.m_y = pC->m_pDecodedPlane->u_height\ 1337 - tempInputSizeWidthMax; 1338 } 1339 /*The input height of the AIR is the maximum possible height*/ 1340 Params.m_inputSize.m_height = tempInputSizeWidthMax; 1341 } 1342 else 1343 { 1344 /*The maximum possible input width is greater than the input\ 1345 image height (rotation included), 1346 that means black borders are necessary to keep aspect ratio 1347 The input height of the AIR is all the input image height*/ 1348 Params.m_outputSize.m_height = 1349 (tempOutputSizeWidth*pC->m_pDecodedPlane->u_height)\ 1350 /Params.m_inputSize.m_height; 1351 Params.m_outputSize.m_height = (Params.m_outputSize.m_height>>1)<<1; 1352 Params.m_inputCoord.m_y = 0; 1353 Params.m_inputSize.m_height = pC->m_pDecodedPlane->u_height; 1354 pImagePlanes[0].u_topleft = 1355 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_width\ 1356 -Params.m_outputSize.m_height))>>1))+1; 1357 pImagePlanes[0].u_width = Params.m_outputSize.m_height; 1358 pImagePlanes[1].u_topleft = 1359 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_width\ 1360 -(Params.m_outputSize.m_height>>1)))>>1))+1; 1361 pImagePlanes[1].u_width = Params.m_outputSize.m_height>>1; 1362 pImagePlanes[2].u_topleft = 1363 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_width\ 1364 -(Params.m_outputSize.m_height>>1)))>>1))+1; 1365 pImagePlanes[2].u_width = Params.m_outputSize.m_height>>1; 1366 } 1367 } 1368 break; 1369 } 1370 } 1371 1372 /*Width and height have to be even*/ 1373 Params.m_outputSize.m_width = (Params.m_outputSize.m_width>>1)<<1; 1374 Params.m_outputSize.m_height = (Params.m_outputSize.m_height>>1)<<1; 1375 Params.m_inputSize.m_width = (Params.m_inputSize.m_width>>1)<<1; 1376 Params.m_inputSize.m_height = (Params.m_inputSize.m_height>>1)<<1; 1377 pImagePlanes[0].u_width = (pImagePlanes[0].u_width>>1)<<1; 1378 pImagePlanes[1].u_width = (pImagePlanes[1].u_width>>1)<<1; 1379 pImagePlanes[2].u_width = (pImagePlanes[2].u_width>>1)<<1; 1380 pImagePlanes[0].u_height = (pImagePlanes[0].u_height>>1)<<1; 1381 pImagePlanes[1].u_height = (pImagePlanes[1].u_height>>1)<<1; 1382 pImagePlanes[2].u_height = (pImagePlanes[2].u_height>>1)<<1; 1383 1384 /*Check that values are coherent*/ 1385 if(Params.m_inputSize.m_height == Params.m_outputSize.m_height) 1386 { 1387 Params.m_inputSize.m_width = Params.m_outputSize.m_width; 1388 } 1389 else if(Params.m_inputSize.m_width == Params.m_outputSize.m_width) 1390 { 1391 Params.m_inputSize.m_height = Params.m_outputSize.m_height; 1392 } 1393 } 1394 1395 /** 1396 Picture rendering: Resizing and Cropping*/ 1397 if(pC->m_mediaRendering != M4xVSS_kBlackBorders) 1398 { 1399 switch(pBasicTags.orientation) 1400 { 1401 default: 1402 case M4COMMON_kOrientationUnknown: 1403 Params.m_outputOrientation = M4COMMON_kOrientationTopLeft; 1404 case M4COMMON_kOrientationTopLeft: 1405 case M4COMMON_kOrientationTopRight: 1406 case M4COMMON_kOrientationBottomRight: 1407 case M4COMMON_kOrientationBottomLeft: 1408 Params.m_outputSize.m_height = pImagePlanes->u_height; 1409 Params.m_outputSize.m_width = pImagePlanes->u_width; 1410 break; 1411 case M4COMMON_kOrientationLeftTop: 1412 case M4COMMON_kOrientationLeftBottom: 1413 case M4COMMON_kOrientationRightTop: 1414 case M4COMMON_kOrientationRightBottom: 1415 Params.m_outputSize.m_height = pImagePlanes->u_width; 1416 Params.m_outputSize.m_width = pImagePlanes->u_height; 1417 break; 1418 } 1419 } 1420 1421 /** 1422 Picture rendering: Cropping*/ 1423 if(pC->m_mediaRendering == M4xVSS_kCropping) 1424 { 1425 if((Params.m_outputSize.m_height * Params.m_inputSize.m_width)\ 1426 /Params.m_outputSize.m_width<Params.m_inputSize.m_height) 1427 { 1428 M4OSA_UInt32 tempHeight = Params.m_inputSize.m_height; 1429 /*height will be cropped*/ 1430 Params.m_inputSize.m_height = (M4OSA_UInt32)((Params.m_outputSize.m_height \ 1431 * Params.m_inputSize.m_width) /Params.m_outputSize.m_width); 1432 Params.m_inputSize.m_height = (Params.m_inputSize.m_height>>1)<<1; 1433 if(M4OSA_FALSE == pC->m_pPto3GPPparams->isPanZoom) 1434 { 1435 Params.m_inputCoord.m_y = (M4OSA_Int32)((M4OSA_Int32)\ 1436 ((pC->m_pDecodedPlane->u_height - Params.m_inputSize.m_height))>>1); 1437 } 1438 else 1439 { 1440 Params.m_inputCoord.m_y += (M4OSA_Int32)((M4OSA_Int32)\ 1441 ((tempHeight - Params.m_inputSize.m_height))>>1); 1442 } 1443 } 1444 else 1445 { 1446 M4OSA_UInt32 tempWidth= Params.m_inputSize.m_width; 1447 /*width will be cropped*/ 1448 Params.m_inputSize.m_width = (M4OSA_UInt32)((Params.m_outputSize.m_width \ 1449 * Params.m_inputSize.m_height) /Params.m_outputSize.m_height); 1450 Params.m_inputSize.m_width = (Params.m_inputSize.m_width>>1)<<1; 1451 if(M4OSA_FALSE == pC->m_pPto3GPPparams->isPanZoom) 1452 { 1453 Params.m_inputCoord.m_x = (M4OSA_Int32)((M4OSA_Int32)\ 1454 ((pC->m_pDecodedPlane->u_width - Params.m_inputSize.m_width))>>1); 1455 } 1456 else 1457 { 1458 Params.m_inputCoord.m_x += (M4OSA_Int32)\ 1459 (((M4OSA_Int32)(tempWidth - Params.m_inputSize.m_width))>>1); 1460 } 1461 } 1462 } 1463 1464 1465 1466 /** 1467 * Call AIR functions */ 1468 if(M4OSA_NULL == pC->m_air_context) 1469 { 1470 err = M4AIR_create(&pC->m_air_context, M4AIR_kYUV420P); 1471 if(err != M4NO_ERROR) 1472 { 1473 M4OSA_free((M4OSA_MemAddr32)pC->m_pDecodedPlane[0].pac_data); 1474 M4OSA_free((M4OSA_MemAddr32)pC->m_pDecodedPlane); 1475 pC->m_pDecodedPlane = M4OSA_NULL; 1476 M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct:\ 1477 Error when initializing AIR: 0x%x", err); 1478 return err; 1479 } 1480 } 1481 1482 err = M4AIR_configure(pC->m_air_context, &Params); 1483 if(err != M4NO_ERROR) 1484 { 1485 M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct:\ 1486 Error when configuring AIR: 0x%x", err); 1487 M4AIR_cleanUp(pC->m_air_context); 1488 M4OSA_free((M4OSA_MemAddr32)pC->m_pDecodedPlane[0].pac_data); 1489 M4OSA_free((M4OSA_MemAddr32)pC->m_pDecodedPlane); 1490 pC->m_pDecodedPlane = M4OSA_NULL; 1491 return err; 1492 } 1493 1494 err = M4AIR_get(pC->m_air_context, pC->m_pDecodedPlane, pImagePlanes); 1495 if(err != M4NO_ERROR) 1496 { 1497 M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct: Error when getting AIR plane: 0x%x", err); 1498 M4AIR_cleanUp(pC->m_air_context); 1499 M4OSA_free((M4OSA_MemAddr32)pC->m_pDecodedPlane[0].pac_data); 1500 M4OSA_free((M4OSA_MemAddr32)pC->m_pDecodedPlane); 1501 pC->m_pDecodedPlane = M4OSA_NULL; 1502 return err; 1503 } 1504 pImagePlanes[0] = pImagePlanes1; 1505 pImagePlanes[1] = pImagePlanes2; 1506 pImagePlanes[2] = pImagePlanes3; 1507 } 1508 1509 1510 /** 1511 * Increment the image counter */ 1512 pC->m_ImageCounter++; 1513 1514 /** 1515 * Check end of sequence */ 1516 last_frame_flag = (pC->m_ImageCounter >= pC->m_NbImage); 1517 1518 /** 1519 * Keep the picture duration */ 1520 *pPictureDuration = pC->m_timeDuration; 1521 1522 if (1 == last_frame_flag) 1523 { 1524 if(M4OSA_NULL != pC->m_air_context) 1525 { 1526 err = M4AIR_cleanUp(pC->m_air_context); 1527 if(err != M4NO_ERROR) 1528 { 1529 M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct: Error when cleaning AIR: 0x%x", err); 1530 return err; 1531 } 1532 } 1533 if(M4OSA_NULL != pC->m_pDecodedPlane) 1534 { 1535 M4OSA_free((M4OSA_MemAddr32)pC->m_pDecodedPlane[0].pac_data); 1536 M4OSA_free((M4OSA_MemAddr32)pC->m_pDecodedPlane); 1537 pC->m_pDecodedPlane = M4OSA_NULL; 1538 } 1539 return M4PTO3GPP_WAR_LAST_PICTURE; 1540 } 1541 1542 M4OSA_TRACE1_0("M4xVSS_PictureCallbackFct: Leaving "); 1543 return M4NO_ERROR; 1544} 1545 1546/** 1547 ****************************************************************************** 1548 * M4OSA_ERR M4xVSS_internalStartConvertPictureTo3gp(M4OSA_Context pContext) 1549 * @brief This function initializes Pto3GPP with the given parameters 1550 * @note The "Pictures to 3GPP" parameters are given by the internal xVSS 1551 * context. This context contains a pointer on the current element 1552 * of the chained list of Pto3GPP parameters. 1553 * @param pContext (IN) The integrator own context 1554 * 1555 * @return M4NO_ERROR: No error 1556 * @return M4PTO3GPP_WAR_LAST_PICTURE: The returned image is the last one 1557 * @return M4ERR_PARAMETER: At least one of the function parameters is null 1558 ****************************************************************************** 1559 */ 1560M4OSA_ERR M4xVSS_internalStartConvertPictureTo3gp(M4OSA_Context pContext) 1561{ 1562 /************************************************************************/ 1563 /* Definitions to generate dummy AMR file used to add AMR silence in files generated 1564 by Pto3GPP */ 1565 #define M4VSS3GPP_AMR_AU_SILENCE_FRAME_048_SIZE 13 1566 /* This constant is defined in M4VSS3GPP_InternalConfig.h */ 1567 extern const M4OSA_UInt8\ 1568 M4VSS3GPP_AMR_AU_SILENCE_FRAME_048[M4VSS3GPP_AMR_AU_SILENCE_FRAME_048_SIZE]; 1569 1570 /* AMR silent frame used to compute dummy AMR silence file */ 1571 #define M4VSS3GPP_AMR_HEADER_SIZE 6 1572 const M4OSA_UInt8 M4VSS3GPP_AMR_HEADER[M4VSS3GPP_AMR_AU_SILENCE_FRAME_048_SIZE] = 1573 { 0x23, 0x21, 0x41, 0x4d, 0x52, 0x0a }; 1574 /************************************************************************/ 1575 1576 M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; 1577 M4OSA_ERR err; 1578 M4PTO3GPP_Context pM4PTO3GPP_Ctxt = M4OSA_NULL; 1579 M4PTO3GPP_Params Params; 1580 M4xVSS_PictureCallbackCtxt* pCallBackCtxt; 1581 M4OSA_Bool cmpResult=M4OSA_FALSE; 1582 M4OSA_Context pDummyAMRFile; 1583 M4OSA_Char out_amr[64]; 1584 /*UTF conversion support*/ 1585 M4OSA_Char* pDecodedPath = M4OSA_NULL; 1586 M4OSA_UInt32 i; 1587 1588 /** 1589 * Create a M4PTO3GPP instance */ 1590 err = M4PTO3GPP_Init( &pM4PTO3GPP_Ctxt, xVSS_context->pFileReadPtr, 1591 xVSS_context->pFileWritePtr); 1592 if (err != M4NO_ERROR) 1593 { 1594 M4OSA_TRACE1_1("M4PTO3GPP_Init returned %ld\n",err); 1595 return err; 1596 } 1597 1598 /* replay recorded external encoder registrations on the PTO3GPP */ 1599 for (i=0; i<M4VE_kEncoderType_NB; i++) 1600 { 1601 if (xVSS_context->registeredExternalEncs[i].registered) 1602 { 1603 err = M4PTO3GPP_RegisterExternalVideoEncoder(pM4PTO3GPP_Ctxt, i, 1604 xVSS_context->registeredExternalEncs[i].pEncoderInterface, 1605 xVSS_context->registeredExternalEncs[i].pUserData); 1606 if (M4NO_ERROR != err) 1607 { 1608 M4OSA_TRACE1_1("M4xVSS_internalGenerateEditedFile:\ 1609 M4PTO3GPP_registerExternalVideoEncoder() returns 0x%x!", err); 1610 M4PTO3GPP_CleanUp(pM4PTO3GPP_Ctxt); 1611 return err; 1612 } 1613 } 1614 } 1615 1616 pCallBackCtxt = (M4xVSS_PictureCallbackCtxt*)M4OSA_malloc(sizeof(M4xVSS_PictureCallbackCtxt), 1617 M4VS,(M4OSA_Char *) "Pto3gpp callback struct"); 1618 if(pCallBackCtxt == M4OSA_NULL) 1619 { 1620 M4OSA_TRACE1_0("Allocation error in M4xVSS_internalStartConvertPictureTo3gp"); 1621 return M4ERR_ALLOC; 1622 } 1623 1624 Params.OutputVideoFrameSize = xVSS_context->pSettings->xVSS.outputVideoSize; 1625 Params.OutputVideoFormat = xVSS_context->pSettings->xVSS.outputVideoFormat; 1626 1627 /** 1628 * Generate "dummy" amr file containing silence in temporary folder */ 1629 M4OSA_chrNCopy(out_amr, xVSS_context->pTempPath, 64); 1630 M4OSA_chrNCat(out_amr, (M4OSA_Char *)"dummy.amr\0", 10); 1631 1632 /** 1633 * UTF conversion: convert the temporary path into the customer format*/ 1634 pDecodedPath = out_amr; 1635 1636 if(xVSS_context->UTFConversionContext.pConvFromUTF8Fct != M4OSA_NULL 1637 && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL) 1638 { 1639 M4OSA_UInt32 length = 0; 1640 err = M4xVSS_internalConvertFromUTF8(xVSS_context, (M4OSA_Void*) out_amr, 1641 (M4OSA_Void*) xVSS_context->UTFConversionContext.pTempOutConversionBuffer, &length); 1642 if(err != M4NO_ERROR) 1643 { 1644 M4OSA_TRACE1_1("M4xVSS_internalStartConvertPictureTo3gp:\ 1645 M4xVSS_internalConvertFromUTF8 returns err: 0x%x",err); 1646 return err; 1647 } 1648 pDecodedPath = xVSS_context->UTFConversionContext.pTempOutConversionBuffer; 1649 } 1650 1651 /** 1652 * End of the conversion, now use the converted path*/ 1653 1654 err = xVSS_context->pFileWritePtr->openWrite(&pDummyAMRFile, pDecodedPath, M4OSA_kFileWrite); 1655 1656 /*Commented because of the use of the UTF conversion see above*/ 1657/* err = xVSS_context->pFileWritePtr->openWrite(&pDummyAMRFile, out_amr, M4OSA_kFileWrite); 1658 */ 1659 if(err != M4NO_ERROR) 1660 { 1661 M4OSA_TRACE1_2("M4xVSS_internalConvertPictureTo3gp: Can't open output dummy amr file %s,\ 1662 error: 0x%x\n",out_amr, err); 1663 return err; 1664 } 1665 1666 err = xVSS_context->pFileWritePtr->writeData(pDummyAMRFile, 1667 (M4OSA_Int8*)M4VSS3GPP_AMR_HEADER, M4VSS3GPP_AMR_HEADER_SIZE); 1668 if(err != M4NO_ERROR) 1669 { 1670 M4OSA_TRACE1_2("M4xVSS_internalConvertPictureTo3gp: Can't write output dummy amr file %s,\ 1671 error: 0x%x\n",out_amr, err); 1672 return err; 1673 } 1674 1675 err = xVSS_context->pFileWritePtr->writeData(pDummyAMRFile, 1676 (M4OSA_Int8*)M4VSS3GPP_AMR_AU_SILENCE_FRAME_048, M4VSS3GPP_AMR_AU_SILENCE_FRAME_048_SIZE); 1677 if(err != M4NO_ERROR) 1678 { 1679 M4OSA_TRACE1_2("M4xVSS_internalConvertPictureTo3gp: \ 1680 Can't write output dummy amr file %s, error: 0x%x\n",out_amr, err); 1681 return err; 1682 } 1683 1684 err = xVSS_context->pFileWritePtr->closeWrite(pDummyAMRFile); 1685 if(err != M4NO_ERROR) 1686 { 1687 M4OSA_TRACE1_2("M4xVSS_internalConvertPictureTo3gp: \ 1688 Can't close output dummy amr file %s, error: 0x%x\n",out_amr, err); 1689 return err; 1690 } 1691 1692 /** 1693 * Fill parameters for Pto3GPP with the parameters contained in the current element of the 1694 * Pto3GPP parameters chained list and with default parameters */ 1695/*+ New Encoder bitrates */ 1696 if(xVSS_context->pSettings->xVSS.outputVideoBitrate == 0) { 1697 Params.OutputVideoBitrate = M4VIDEOEDITING_kVARIABLE_KBPS; 1698 } 1699 else { 1700 Params.OutputVideoBitrate = xVSS_context->pSettings->xVSS.outputVideoBitrate; 1701 } 1702 M4OSA_TRACE1_1("M4xVSS_internalStartConvertPicTo3GP: video bitrate = %d", 1703 Params.OutputVideoBitrate); 1704/*- New Encoder bitrates */ 1705 Params.OutputFileMaxSize = M4PTO3GPP_kUNLIMITED; 1706 Params.pPictureCallbackFct = M4xVSS_PictureCallbackFct; 1707 Params.pPictureCallbackCtxt = pCallBackCtxt; 1708 /*FB: change to use the converted path (UTF conversion) see the conversion above*/ 1709 /*Fix :- Adding Audio Track in Image as input :AudioTarckFile Setting to NULL */ 1710 Params.pInputAudioTrackFile = M4OSA_NULL;//(M4OSA_Void*)pDecodedPath;//out_amr; 1711 Params.AudioPaddingMode = M4PTO3GPP_kAudioPaddingMode_Loop; 1712 Params.AudioFileFormat = M4VIDEOEDITING_kFileType_AMR; 1713 Params.pOutput3gppFile = xVSS_context->pPTo3GPPcurrentParams->pFileOut; 1714 Params.pTemporaryFile = xVSS_context->pPTo3GPPcurrentParams->pFileTemp; 1715 /*+PR No: blrnxpsw#223*/ 1716 /*Increasing frequency of Frame, calculating Nos of Frame = duration /FPS */ 1717 /*Other changes made is @ M4xVSS_API.c @ line 3841 in M4xVSS_SendCommand*/ 1718 /*If case check for PanZoom removed */ 1719 Params.NbVideoFrames = (M4OSA_UInt32) 1720 (xVSS_context->pPTo3GPPcurrentParams->duration \ 1721 / xVSS_context->pPTo3GPPcurrentParams->framerate); /* */ 1722 pCallBackCtxt->m_timeDuration = xVSS_context->pPTo3GPPcurrentParams->framerate; 1723 /*-PR No: blrnxpsw#223*/ 1724 pCallBackCtxt->m_ImageCounter = 0; 1725 pCallBackCtxt->m_FileIn = xVSS_context->pPTo3GPPcurrentParams->pFileIn; 1726 pCallBackCtxt->m_NbImage = Params.NbVideoFrames; 1727 pCallBackCtxt->m_pFileReadPtr = xVSS_context->pFileReadPtr; 1728 pCallBackCtxt->m_pDecodedPlane = M4OSA_NULL; 1729 pCallBackCtxt->m_pPto3GPPparams = xVSS_context->pPTo3GPPcurrentParams; 1730 pCallBackCtxt->m_air_context = M4OSA_NULL; 1731 pCallBackCtxt->m_mediaRendering = xVSS_context->pPTo3GPPcurrentParams->MediaRendering; 1732 1733 /** 1734 * Set the input and output files */ 1735 err = M4PTO3GPP_Open(pM4PTO3GPP_Ctxt, &Params); 1736 if (err != M4NO_ERROR) 1737 { 1738 M4OSA_TRACE1_1("M4PTO3GPP_Open returned: 0x%x\n",err); 1739 if(pCallBackCtxt != M4OSA_NULL) 1740 { 1741 M4OSA_free((M4OSA_MemAddr32)pCallBackCtxt); 1742 pCallBackCtxt = M4OSA_NULL; 1743 } 1744 M4PTO3GPP_CleanUp(pM4PTO3GPP_Ctxt); 1745 return err; 1746 } 1747 1748 /** 1749 * Save context to be able to call Pto3GPP step function in M4xVSS_step function */ 1750 xVSS_context->pM4PTO3GPP_Ctxt = pM4PTO3GPP_Ctxt; 1751 xVSS_context->pCallBackCtxt = pCallBackCtxt; 1752 1753 return M4NO_ERROR; 1754} 1755 1756/** 1757 ****************************************************************************** 1758 * M4OSA_ERR M4xVSS_internalStopConvertPictureTo3gp(M4OSA_Context pContext) 1759 * @brief This function cleans up Pto3GPP 1760 * @note 1761 * @param pContext (IN) The integrator own context 1762 * 1763 * @return M4NO_ERROR: No error 1764 * @return M4ERR_PARAMETER: At least one of the function parameters is null 1765 ****************************************************************************** 1766 */ 1767M4OSA_ERR M4xVSS_internalStopConvertPictureTo3gp(M4OSA_Context pContext) 1768{ 1769 M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; 1770 M4OSA_ERR err; 1771 M4OSA_Char out_amr[64]; 1772 /*UTF conversion support*/ 1773 M4OSA_Char* pDecodedPath = M4OSA_NULL; 1774 1775 /** 1776 * Free the PTO3GPP callback context */ 1777 if(M4OSA_NULL != xVSS_context->pCallBackCtxt) 1778 { 1779 M4OSA_free((M4OSA_MemAddr32)xVSS_context->pCallBackCtxt); 1780 xVSS_context->pCallBackCtxt = M4OSA_NULL; 1781 } 1782 1783 /** 1784 * Finalize the output file */ 1785 err = M4PTO3GPP_Close(xVSS_context->pM4PTO3GPP_Ctxt); 1786 if (err != M4NO_ERROR) 1787 { 1788 M4OSA_TRACE1_1("M4PTO3GPP_Close returned 0x%x\n",err); 1789 M4PTO3GPP_CleanUp(xVSS_context->pM4PTO3GPP_Ctxt); 1790 return err; 1791 } 1792 1793 /** 1794 * Free this M4PTO3GPP instance */ 1795 err = M4PTO3GPP_CleanUp(xVSS_context->pM4PTO3GPP_Ctxt); 1796 if (err != M4NO_ERROR) 1797 { 1798 M4OSA_TRACE1_1("M4PTO3GPP_CleanUp returned 0x%x\n",err); 1799 return err; 1800 } 1801 1802 /** 1803 * Remove dummy.amr file */ 1804 M4OSA_chrNCopy(out_amr, xVSS_context->pTempPath, 64); 1805 M4OSA_chrNCat(out_amr, (M4OSA_Char *)"dummy.amr\0", 10); 1806 1807 /** 1808 * UTF conversion: convert the temporary path into the customer format*/ 1809 pDecodedPath = out_amr; 1810 1811 if(xVSS_context->UTFConversionContext.pConvFromUTF8Fct != M4OSA_NULL 1812 && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL) 1813 { 1814 M4OSA_UInt32 length = 0; 1815 err = M4xVSS_internalConvertFromUTF8(xVSS_context, (M4OSA_Void*) out_amr, 1816 (M4OSA_Void*) xVSS_context->UTFConversionContext.pTempOutConversionBuffer, &length); 1817 if(err != M4NO_ERROR) 1818 { 1819 M4OSA_TRACE1_1("M4xVSS_internalStopConvertPictureTo3gp:\ 1820 M4xVSS_internalConvertFromUTF8 returns err: 0x%x",err); 1821 return err; 1822 } 1823 pDecodedPath = xVSS_context->UTFConversionContext.pTempOutConversionBuffer; 1824 } 1825 /** 1826 * End of the conversion, now use the decoded path*/ 1827 M4OSA_fileExtraDelete(pDecodedPath); 1828 1829 /*Commented because of the use of the UTF conversion*/ 1830/* M4OSA_fileExtraDelete(out_amr); 1831 */ 1832 1833 xVSS_context->pM4PTO3GPP_Ctxt = M4OSA_NULL; 1834 xVSS_context->pCallBackCtxt = M4OSA_NULL; 1835 1836 return M4NO_ERROR; 1837} 1838 1839/** 1840 ****************************************************************************** 1841 * prototype M4OSA_ERR M4xVSS_internalConvertRGBtoYUV(M4xVSS_FramingStruct* framingCtx) 1842 * @brief This function converts an RGB565 plane to YUV420 planar 1843 * @note It is used only for framing effect 1844 * It allocates output YUV planes 1845 * @param framingCtx (IN) The framing struct containing input RGB565 plane 1846 * 1847 * @return M4NO_ERROR: No error 1848 * @return M4ERR_PARAMETER: At least one of the function parameters is null 1849 * @return M4ERR_ALLOC: Allocation error (no more memory) 1850 ****************************************************************************** 1851 */ 1852M4OSA_ERR M4xVSS_internalConvertRGBtoYUV(M4xVSS_FramingStruct* framingCtx) 1853{ 1854 M4OSA_ERR err; 1855 1856 /** 1857 * Allocate output YUV planes */ 1858 framingCtx->FramingYuv = (M4VIFI_ImagePlane*)M4OSA_malloc(3*sizeof(M4VIFI_ImagePlane), 1859 M4VS, (M4OSA_Char *)"M4xVSS_internalConvertRGBtoYUV: Output plane YUV"); 1860 if(framingCtx->FramingYuv == M4OSA_NULL) 1861 { 1862 M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertRGBtoYUV"); 1863 return M4ERR_ALLOC; 1864 } 1865 framingCtx->FramingYuv[0].u_width = framingCtx->FramingRgb->u_width; 1866 framingCtx->FramingYuv[0].u_height = framingCtx->FramingRgb->u_height; 1867 framingCtx->FramingYuv[0].u_topleft = 0; 1868 framingCtx->FramingYuv[0].u_stride = framingCtx->FramingRgb->u_width; 1869 framingCtx->FramingYuv[0].pac_data = 1870 (M4VIFI_UInt8*)M4OSA_malloc((framingCtx->FramingYuv[0].u_width\ 1871 *framingCtx->FramingYuv[0].u_height*3)>>1, M4VS, (M4OSA_Char *)\ 1872 "Alloc for the Convertion output YUV");; 1873 if(framingCtx->FramingYuv[0].pac_data == M4OSA_NULL) 1874 { 1875 M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertRGBtoYUV"); 1876 return M4ERR_ALLOC; 1877 } 1878 framingCtx->FramingYuv[1].u_width = (framingCtx->FramingRgb->u_width)>>1; 1879 framingCtx->FramingYuv[1].u_height = (framingCtx->FramingRgb->u_height)>>1; 1880 framingCtx->FramingYuv[1].u_topleft = 0; 1881 framingCtx->FramingYuv[1].u_stride = (framingCtx->FramingRgb->u_width)>>1; 1882 framingCtx->FramingYuv[1].pac_data = framingCtx->FramingYuv[0].pac_data \ 1883 + framingCtx->FramingYuv[0].u_width * framingCtx->FramingYuv[0].u_height; 1884 framingCtx->FramingYuv[2].u_width = (framingCtx->FramingRgb->u_width)>>1; 1885 framingCtx->FramingYuv[2].u_height = (framingCtx->FramingRgb->u_height)>>1; 1886 framingCtx->FramingYuv[2].u_topleft = 0; 1887 framingCtx->FramingYuv[2].u_stride = (framingCtx->FramingRgb->u_width)>>1; 1888 framingCtx->FramingYuv[2].pac_data = framingCtx->FramingYuv[1].pac_data \ 1889 + framingCtx->FramingYuv[1].u_width * framingCtx->FramingYuv[1].u_height; 1890 1891 /** 1892 * Convert input RGB 565 to YUV 420 to be able to merge it with output video in framing 1893 effect */ 1894 err = M4VIFI_xVSS_RGB565toYUV420(M4OSA_NULL, framingCtx->FramingRgb, framingCtx->FramingYuv); 1895 if(err != M4NO_ERROR) 1896 { 1897 M4OSA_TRACE1_1("M4xVSS_internalConvertRGBtoYUV:\ 1898 error when converting from RGB to YUV: 0x%x\n", err); 1899 } 1900 1901 framingCtx->duration = 0; 1902 framingCtx->previousClipTime = -1; 1903 framingCtx->previewOffsetClipTime = -1; 1904 1905 /** 1906 * Only one element in the chained list (no animated image with RGB buffer...) */ 1907 framingCtx->pCurrent = framingCtx; 1908 framingCtx->pNext = framingCtx; 1909 1910 return M4NO_ERROR; 1911} 1912 1913M4OSA_ERR M4xVSS_internalSetPlaneTransparent(M4OSA_UInt8* planeIn, M4OSA_UInt32 size) 1914{ 1915 M4OSA_UInt32 i; 1916 M4OSA_UInt8* plane = planeIn; 1917 M4OSA_UInt8 transparent1 = (M4OSA_UInt8)((TRANSPARENT_COLOR & 0xFF00)>>8); 1918 M4OSA_UInt8 transparent2 = (M4OSA_UInt8)TRANSPARENT_COLOR; 1919 1920 for(i=0; i<(size>>1); i++) 1921 { 1922 *plane++ = transparent1; 1923 *plane++ = transparent2; 1924 } 1925 1926 return M4NO_ERROR; 1927} 1928 1929 1930/** 1931 ****************************************************************************** 1932 * prototype M4OSA_ERR M4xVSS_internalConvertARBG888toYUV420_FrammingEffect(M4OSA_Context pContext, 1933 * M4VSS3GPP_EffectSettings* pEffect, 1934 * M4xVSS_FramingStruct* framingCtx, 1935 M4VIDEOEDITING_VideoFrameSize OutputVideoResolution) 1936 * 1937 * @brief This function converts ARGB8888 input file to YUV420 whenused for framming effect 1938 * @note The input ARGB8888 file path is contained in the pEffect structure 1939 * If the ARGB8888 must be resized to fit output video size, this function 1940 * will do it. 1941 * @param pContext (IN) The integrator own context 1942 * @param pEffect (IN) The effect structure containing all informations on 1943 * the file to decode, resizing ... 1944 * @param framingCtx (IN/OUT) Structure in which the output RGB will be stored 1945 * 1946 * @return M4NO_ERROR: No error 1947 * @return M4ERR_PARAMETER: At least one of the function parameters is null 1948 * @return M4ERR_ALLOC: Allocation error (no more memory) 1949 ****************************************************************************** 1950 */ 1951 1952 1953M4OSA_ERR M4xVSS_internalConvertARGB888toYUV420_FrammingEffect(M4OSA_Context pContext, 1954 M4VSS3GPP_EffectSettings* pEffect, 1955 M4xVSS_FramingStruct* framingCtx, 1956 M4VIDEOEDITING_VideoFrameSize\ 1957 OutputVideoResolution) 1958{ 1959 M4OSA_ERR err; 1960 M4OSA_Context pARGBIn; 1961 M4OSA_UInt32 file_size; 1962 M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; 1963 M4OSA_UInt32 width, height, width_out, height_out; 1964 M4OSA_Void* pFile = pEffect->xVSS.pFramingFilePath; 1965 M4OSA_UInt8 transparent1 = (M4OSA_UInt8)((TRANSPARENT_COLOR & 0xFF00)>>8); 1966 M4OSA_UInt8 transparent2 = (M4OSA_UInt8)TRANSPARENT_COLOR; 1967 /*UTF conversion support*/ 1968 M4OSA_Char* pDecodedPath = M4OSA_NULL; 1969 M4OSA_UInt32 i = 0,j = 0; 1970 M4VIFI_ImagePlane rgbPlane; 1971 M4OSA_UInt32 frameSize_argb=(framingCtx->width * framingCtx->height * 4); 1972 M4OSA_UInt32 frameSize = (framingCtx->width * framingCtx->height * 3); //Size of RGB888 data 1973 M4OSA_UInt8 *pTmpData = (M4OSA_UInt8*) M4OSA_malloc(frameSize_argb, M4VS, (M4OSA_Char*)\ 1974 "Image argb data"); 1975 M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect: Entering "); 1976 M4OSA_TRACE1_2("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect width and height %d %d ", 1977 framingCtx->width,framingCtx->height); 1978 if(pTmpData == M4OSA_NULL) { 1979 M4OSA_TRACE1_0("Failed to allocate memory for Image clip"); 1980 return M4ERR_ALLOC; 1981 } 1982 /** 1983 * UTF conversion: convert the file path into the customer format*/ 1984 pDecodedPath = pFile; 1985 1986 if(xVSS_context->UTFConversionContext.pConvFromUTF8Fct != M4OSA_NULL 1987 && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL) 1988 { 1989 M4OSA_UInt32 length = 0; 1990 err = M4xVSS_internalConvertFromUTF8(xVSS_context, (M4OSA_Void*) pFile, 1991 (M4OSA_Void*) xVSS_context->UTFConversionContext.pTempOutConversionBuffer, &length); 1992 if(err != M4NO_ERROR) 1993 { 1994 M4OSA_TRACE1_1("M4xVSS_internalDecodePNG:\ 1995 M4xVSS_internalConvertFromUTF8 returns err: 0x%x",err); 1996 return err; 1997 } 1998 pDecodedPath = xVSS_context->UTFConversionContext.pTempOutConversionBuffer; 1999 } 2000 2001 /** 2002 * End of the conversion, now use the decoded path*/ 2003 2004 /* Open input ARGB8888 file and store it into memory */ 2005 err = xVSS_context->pFileReadPtr->openRead(&pARGBIn, pDecodedPath, M4OSA_kFileRead); 2006 2007 if(err != M4NO_ERROR) 2008 { 2009 M4OSA_TRACE1_2("Can't open input ARGB8888 file %s, error: 0x%x\n",pFile, err); 2010 M4OSA_free((M4OSA_MemAddr32)pTmpData); 2011 pTmpData = M4OSA_NULL; 2012 return err; 2013 } 2014 2015 err = xVSS_context->pFileReadPtr->readData(pARGBIn,(M4OSA_MemAddr8)pTmpData, &frameSize_argb); 2016 if(err != M4NO_ERROR) 2017 { 2018 xVSS_context->pFileReadPtr->closeRead(pARGBIn); 2019 M4OSA_free((M4OSA_MemAddr32)pTmpData); 2020 pTmpData = M4OSA_NULL; 2021 } 2022 2023 2024 err = xVSS_context->pFileReadPtr->closeRead(pARGBIn); 2025 if(err != M4NO_ERROR) 2026 { 2027 M4OSA_TRACE1_2("Can't close input png file %s, error: 0x%x\n",pFile, err); 2028 M4OSA_free((M4OSA_MemAddr32)pTmpData); 2029 pTmpData = M4OSA_NULL; 2030 return err; 2031 } 2032 2033 /* rgbPlane.pac_data = (M4VIFI_UInt8*)M4OSA_malloc(frameSize, M4VS,\ 2034 (M4OSA_Char*)"Image clip RGB888 data"); */ 2035 /* temp fix for crashing happening in filter : allocation 2memory for 2 more width */ 2036 rgbPlane.pac_data = (M4VIFI_UInt8*)M4OSA_malloc(((frameSize)+ (2 * framingCtx->width)), 2037 M4VS, (M4OSA_Char*)"Image clip RGB888 data"); 2038 if(rgbPlane.pac_data == M4OSA_NULL) 2039 { 2040 M4OSA_TRACE1_0("Failed to allocate memory for Image clip"); 2041 M4OSA_free((M4OSA_MemAddr32)pTmpData); 2042 return M4ERR_ALLOC; 2043 } 2044 2045 rgbPlane.u_height = (( framingCtx->height+1)>>1)<<1;; 2046 rgbPlane.u_width = (( framingCtx->width+1)>>1)<<1;; 2047 rgbPlane.u_stride = rgbPlane.u_width*3; 2048 rgbPlane.u_topleft = 0; 2049 2050 M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:\ 2051 Remove the alpha channel "); 2052 /** Remove the alpha channel */ 2053 for (i=0, j = 0; i < frameSize_argb; i++) { 2054 if ((i % 4) == 0) continue; 2055 rgbPlane.pac_data[j] = pTmpData[i]; 2056 j++; 2057 } 2058 2059 M4OSA_free((M4OSA_MemAddr32)pTmpData); 2060 /** 2061 * Check if output sizes are odd */ 2062 if(rgbPlane.u_height % 2 != 0) 2063 { 2064 2065 M4VIFI_UInt8* output_pac_data = rgbPlane.pac_data; 2066 M4OSA_UInt32 i; 2067 M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:\ 2068 output height is odd "); 2069 output_pac_data +=rgbPlane.u_width * rgbPlane.u_height*3; 2070 for(i=0;i<rgbPlane.u_width;i++) 2071 { 2072 *output_pac_data++ = transparent1; 2073 *output_pac_data++ = transparent2; 2074 } 2075 2076 /** 2077 * We just add a white line to the PNG that will be transparent */ 2078 rgbPlane.u_height++; 2079 } 2080 if(rgbPlane.u_width % 2 != 0) 2081 { 2082 2083 /** 2084 * We add a new column of white (=transparent), but we need to parse all RGB lines ... */ 2085 M4OSA_UInt32 i; 2086 M4VIFI_UInt8* newRGBpac_data; 2087 M4VIFI_UInt8* output_pac_data, *input_pac_data; 2088 2089 rgbPlane.u_width++; 2090 M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect: \ 2091 output width is odd "); 2092 /** 2093 * We need to allocate a new RGB output buffer in which all decoded data 2094 + white line will be copied */ 2095 newRGBpac_data = (M4VIFI_UInt8*)M4OSA_malloc(rgbPlane.u_height*rgbPlane.u_width*3\ 2096 *sizeof(M4VIFI_UInt8), M4VS, (M4OSA_Char *)"New Framing GIF Output pac_data RGB"); 2097 if(newRGBpac_data == M4OSA_NULL) 2098 { 2099 M4OSA_TRACE1_0("Allocation error in \ 2100 M4xVSS_internalConvertARGB888toYUV420_FrammingEffect"); 2101 /** 2102 * Destroy SPS instance */ 2103 //M4SPS_destroy(pSPSContext); 2104 return M4ERR_ALLOC; 2105 } 2106 2107 output_pac_data= newRGBpac_data; 2108 input_pac_data = rgbPlane.pac_data; 2109 2110 for(i=0;i<rgbPlane.u_height;i++) 2111 { 2112 M4OSA_memcpy((M4OSA_MemAddr8)output_pac_data, (M4OSA_MemAddr8)input_pac_data, 2113 (rgbPlane.u_width-1)*3); 2114 output_pac_data += ((rgbPlane.u_width-1)*3); 2115 /* Put the pixel to transparency color */ 2116 *output_pac_data++ = transparent1; 2117 *output_pac_data++ = transparent2; 2118 input_pac_data += ((rgbPlane.u_width-1)*3); 2119 } 2120 2121 rgbPlane.pac_data = newRGBpac_data; 2122 } 2123 2124 /** 2125 * Initialize chained list parameters */ 2126 framingCtx->duration = 0; 2127 framingCtx->previousClipTime = -1; 2128 framingCtx->previewOffsetClipTime = -1; 2129 2130 /** 2131 * Only one element in the chained list (no animated image ...) */ 2132 framingCtx->pCurrent = framingCtx; 2133 framingCtx->pNext = framingCtx; 2134 2135 /** 2136 * Get output width/height */ 2137 switch(OutputVideoResolution) 2138 //switch(xVSS_context->pSettings->xVSS.outputVideoSize) 2139 { 2140 case M4VIDEOEDITING_kSQCIF: 2141 width_out = 128; 2142 height_out = 96; 2143 break; 2144 case M4VIDEOEDITING_kQQVGA: 2145 width_out = 160; 2146 height_out = 120; 2147 break; 2148 case M4VIDEOEDITING_kQCIF: 2149 width_out = 176; 2150 height_out = 144; 2151 break; 2152 case M4VIDEOEDITING_kQVGA: 2153 width_out = 320; 2154 height_out = 240; 2155 break; 2156 case M4VIDEOEDITING_kCIF: 2157 width_out = 352; 2158 height_out = 288; 2159 break; 2160 case M4VIDEOEDITING_kVGA: 2161 width_out = 640; 2162 height_out = 480; 2163 break; 2164 case M4VIDEOEDITING_kWVGA: 2165 width_out = 800; 2166 height_out = 480; 2167 break; 2168 case M4VIDEOEDITING_kNTSC: 2169 width_out = 720; 2170 height_out = 480; 2171 break; 2172 case M4VIDEOEDITING_k640_360: 2173 width_out = 640; 2174 height_out = 360; 2175 break; 2176 case M4VIDEOEDITING_k854_480: 2177 // StageFright encoders require %16 resolution 2178 width_out = M4ENCODER_854_480_Width; 2179 height_out = 480; 2180 break; 2181 case M4VIDEOEDITING_kHD1280: 2182 width_out = 1280; 2183 height_out = 720; 2184 break; 2185 case M4VIDEOEDITING_kHD1080: 2186 // StageFright encoders require %16 resolution 2187 width_out = M4ENCODER_HD1080_Width; 2188 height_out = 720; 2189 break; 2190 case M4VIDEOEDITING_kHD960: 2191 width_out = 960; 2192 height_out = 720; 2193 break; 2194 2195 /** 2196 * If output video size is not given, we take QCIF size, 2197 * should not happen, because already done in M4xVSS_sendCommand */ 2198 default: 2199 width_out = 176; 2200 height_out = 144; 2201 break; 2202 } 2203 2204 2205 /** 2206 * Allocate output planes structures */ 2207 framingCtx->FramingRgb = (M4VIFI_ImagePlane*)M4OSA_malloc(sizeof(M4VIFI_ImagePlane), M4VS, 2208 (M4OSA_Char *)"Framing Output plane RGB"); 2209 if(framingCtx->FramingRgb == M4OSA_NULL) 2210 { 2211 M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertARGB888toYUV420_FrammingEffect"); 2212 M4OSA_free((M4OSA_MemAddr32)pTmpData); 2213 pTmpData = M4OSA_NULL;NULL; 2214 return M4ERR_ALLOC; 2215 } 2216 /** 2217 * Resize RGB if needed */ 2218 if((pEffect->xVSS.bResize) && 2219 (rgbPlane.u_width != width_out || rgbPlane.u_height != height_out)) 2220 { 2221 width = width_out; 2222 height = height_out; 2223 2224 M4OSA_TRACE1_2("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect: \ 2225 New Width and height %d %d ",width,height); 2226 2227 framingCtx->FramingRgb->u_height = height_out; 2228 framingCtx->FramingRgb->u_width = width_out; 2229 framingCtx->FramingRgb->u_stride = framingCtx->FramingRgb->u_width*3; 2230 framingCtx->FramingRgb->u_topleft = 0; 2231 2232 framingCtx->FramingRgb->pac_data = 2233 (M4VIFI_UInt8*)M4OSA_malloc(framingCtx->FramingRgb->u_height*framingCtx->\ 2234 FramingRgb->u_width*3*sizeof(M4VIFI_UInt8), M4VS, 2235 (M4OSA_Char *)"Framing Output pac_data RGB"); 2236 if(framingCtx->FramingRgb->pac_data == M4OSA_NULL) 2237 { 2238 M4OSA_TRACE1_0("Allocation error in \ 2239 M4xVSS_internalConvertARGB888toYUV420_FrammingEffect"); 2240 M4OSA_free((M4OSA_MemAddr32)pTmpData); 2241 pTmpData = M4OSA_NULL;NULL; 2242 return M4ERR_ALLOC; 2243 } 2244 2245 M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect: Resizing Needed "); 2246 M4OSA_TRACE1_2("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:\ 2247 rgbPlane.u_height & rgbPlane.u_width %d %d",rgbPlane.u_height,rgbPlane.u_width); 2248 err = M4VIFI_ResizeBilinearRGB888toRGB888(M4OSA_NULL, &rgbPlane,framingCtx->FramingRgb); 2249 if(err != M4NO_ERROR) 2250 { 2251 M4OSA_TRACE1_1("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect :\ 2252 when resizing RGB plane: 0x%x\n", err); 2253 return err; 2254 } 2255 2256 if(rgbPlane.pac_data != M4OSA_NULL) 2257 { 2258 M4OSA_free((M4OSA_MemAddr32)rgbPlane.pac_data); 2259 rgbPlane.pac_data = M4OSA_NULL; 2260 2261 } 2262 2263 } 2264 else 2265 { 2266 2267 M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:\ 2268 Resizing Not Needed "); 2269 width = framingCtx->width; 2270 height = framingCtx->height; 2271 framingCtx->FramingRgb->u_height = height; 2272 framingCtx->FramingRgb->u_width = width; 2273 framingCtx->FramingRgb->u_stride = framingCtx->FramingRgb->u_width*3; 2274 framingCtx->FramingRgb->u_topleft = 0; 2275 framingCtx->FramingRgb->pac_data = rgbPlane.pac_data; 2276 } 2277 2278 2279 if(pEffect->xVSS.bResize) 2280 { 2281 /** 2282 * Force topleft to 0 for pure framing effect */ 2283 framingCtx->topleft_x = 0; 2284 framingCtx->topleft_y = 0; 2285 } 2286 2287 2288 2289 /** 2290 * Convert RGB output to YUV 420 to be able to merge it with output video in framing 2291 effect */ 2292 framingCtx->FramingYuv = (M4VIFI_ImagePlane*)M4OSA_malloc(3*sizeof(M4VIFI_ImagePlane), M4VS, 2293 (M4OSA_Char *)"Framing Output plane YUV"); 2294 if(framingCtx->FramingYuv == M4OSA_NULL) 2295 { 2296 M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertARGB888toYUV420_FrammingEffect"); 2297 return M4ERR_ALLOC; 2298 } 2299 framingCtx->FramingYuv[0].u_width = ((width+1)>>1)<<1; 2300 framingCtx->FramingYuv[0].u_height = ((height+1)>>1)<<1; 2301 framingCtx->FramingYuv[0].u_topleft = 0; 2302 framingCtx->FramingYuv[0].u_stride = ((width+1)>>1)<<1; 2303 framingCtx->FramingYuv[0].pac_data = (M4VIFI_UInt8*)M4OSA_malloc 2304 ((framingCtx->FramingYuv[0].u_width*framingCtx->FramingYuv[0].u_height*3)>>1, M4VS, 2305 (M4OSA_Char *)"Alloc for the output YUV");; 2306 if(framingCtx->FramingYuv[0].pac_data == M4OSA_NULL) 2307 { 2308 M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertARGB888toYUV420_FrammingEffect"); 2309 return M4ERR_ALLOC; 2310 } 2311 framingCtx->FramingYuv[1].u_width = (((width+1)>>1)<<1)>>1; 2312 framingCtx->FramingYuv[1].u_height = (((height+1)>>1)<<1)>>1; 2313 framingCtx->FramingYuv[1].u_topleft = 0; 2314 framingCtx->FramingYuv[1].u_stride = (((width+1)>>1)<<1)>>1; 2315 2316 framingCtx->FramingYuv[1].pac_data = (M4VIFI_UInt8*)M4OSA_malloc\ 2317 (((framingCtx->FramingYuv[0].u_width)/2*(framingCtx->FramingYuv[0].u_height)/2), M4VS, 2318 (M4OSA_Char *)"Alloc for the output YUV");; 2319 2320 framingCtx->FramingYuv[2].u_width = (((width+1)>>1)<<1)>>1; 2321 framingCtx->FramingYuv[2].u_height = (((height+1)>>1)<<1)>>1; 2322 framingCtx->FramingYuv[2].u_topleft = 0; 2323 framingCtx->FramingYuv[2].u_stride = (((width+1)>>1)<<1)>>1; 2324 2325 framingCtx->FramingYuv[2].pac_data = (M4VIFI_UInt8*)M4OSA_malloc 2326 (((framingCtx->FramingYuv[0].u_width)/2*(framingCtx->FramingYuv[0].u_height)/2), M4VS, 2327 (M4OSA_Char *)"Alloc for the output YUV");; 2328 2329 2330 2331 M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:\ 2332 convert RGB to YUV "); 2333 2334 err = M4VIFI_RGB888toYUV420(M4OSA_NULL, framingCtx->FramingRgb, framingCtx->FramingYuv); 2335 if(err != M4NO_ERROR) 2336 { 2337 M4OSA_TRACE1_1("SPS png: error when converting from RGB to YUV: 0x%x\n", err); 2338 } 2339 2340 M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect: Leaving "); 2341 return M4NO_ERROR; 2342} 2343 2344/** 2345 ****************************************************************************** 2346 * prototype M4OSA_ERR M4xVSS_internalGenerateEditedFile(M4OSA_Context pContext) 2347 * 2348 * @brief This function prepares VSS for editing 2349 * @note It also set special xVSS effect as external effects for the VSS 2350 * @param pContext (IN) The integrator own context 2351 * 2352 * @return M4NO_ERROR: No error 2353 * @return M4ERR_PARAMETER: At least one of the function parameters is null 2354 * @return M4ERR_ALLOC: Allocation error (no more memory) 2355 ****************************************************************************** 2356 */ 2357M4OSA_ERR M4xVSS_internalGenerateEditedFile(M4OSA_Context pContext) 2358{ 2359 M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; 2360 M4VSS3GPP_EditContext pVssCtxt; 2361 M4OSA_UInt32 i,j; 2362 M4OSA_ERR err; 2363 2364 /** 2365 * Create a VSS 3GPP edition instance */ 2366 err = M4VSS3GPP_editInit( &pVssCtxt, xVSS_context->pFileReadPtr, xVSS_context->pFileWritePtr); 2367 if (err != M4NO_ERROR) 2368 { 2369 M4OSA_TRACE1_1("M4xVSS_internalGenerateEditedFile: M4VSS3GPP_editInit returned 0x%x\n", 2370 err); 2371 M4VSS3GPP_editCleanUp(pVssCtxt); 2372 return err; 2373 } 2374 2375#ifdef M4VSS_ENABLE_EXTERNAL_DECODERS 2376 /* replay recorded external decoder registrations on the VSS3GPP */ 2377 for (i=0; i<M4VD_kVideoType_NB; i++) 2378 { 2379 if (xVSS_context->registeredExternalDecs[i].registered) 2380 { 2381 err = M4VSS3GPP_editRegisterExternalVideoDecoder(pVssCtxt, i, 2382 xVSS_context->registeredExternalDecs[i].pDecoderInterface, 2383 xVSS_context->registeredExternalDecs[i].pUserData); 2384 if (M4NO_ERROR != err) 2385 { 2386 M4OSA_TRACE1_1("M4xVSS_internalGenerateEditedFile: \ 2387 M4VSS3GPP_editRegisterExternalVideoDecoder() returns 0x%x!", err); 2388 M4VSS3GPP_editCleanUp(pVssCtxt); 2389 return err; 2390 } 2391 } 2392 } 2393#endif /* M4VSS_ENABLE_EXTERNAL_DECODERS */ 2394 2395 /* replay recorded external encoder registrations on the VSS3GPP */ 2396 for (i=0; i<M4VE_kEncoderType_NB; i++) 2397 { 2398 if (xVSS_context->registeredExternalEncs[i].registered) 2399 { 2400 err = M4VSS3GPP_editRegisterExternalVideoEncoder(pVssCtxt, i, 2401 xVSS_context->registeredExternalEncs[i].pEncoderInterface, 2402 xVSS_context->registeredExternalEncs[i].pUserData); 2403 if (M4NO_ERROR != err) 2404 { 2405 M4OSA_TRACE1_1("M4xVSS_internalGenerateEditedFile:\ 2406 M4VSS3GPP_editRegisterExternalVideoEncoder() returns 0x%x!", err); 2407 M4VSS3GPP_editCleanUp(pVssCtxt); 2408 return err; 2409 } 2410 } 2411 } 2412 2413 /* In case of MMS use case, we fill directly into the VSS context the targeted bitrate */ 2414 if(xVSS_context->targetedBitrate != 0) 2415 { 2416 M4VSS3GPP_InternalEditContext* pVSSContext = (M4VSS3GPP_InternalEditContext*)pVssCtxt; 2417 2418 pVSSContext->bIsMMS = M4OSA_TRUE; 2419 pVSSContext->uiMMSVideoBitrate = xVSS_context->targetedBitrate; 2420 pVSSContext->MMSvideoFramerate = xVSS_context->pSettings->videoFrameRate; 2421 } 2422 2423 /*Warning: since the adding of the UTF conversion, pSettings has been changed in the next 2424 part in pCurrentEditSettings (there is a specific current editing structure for the saving, 2425 as for the preview)*/ 2426 2427 /** 2428 * Set the external video effect functions, for saving mode (to be moved to 2429 M4xVSS_saveStart() ?)*/ 2430 for (i=0; i<xVSS_context->pCurrentEditSettings->uiClipNumber; i++) 2431 { 2432 for (j=0; j<xVSS_context->pCurrentEditSettings->nbEffects; j++) 2433 { 2434 if (M4xVSS_kVideoEffectType_BlackAndWhite == 2435 xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType) 2436 { 2437 xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct = 2438 M4VSS3GPP_externalVideoEffectColor; 2439 //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt = 2440 // (M4OSA_Void*)M4xVSS_kVideoEffectType_BlackAndWhite; 2441 /*commented FB*/ 2442 /** 2443 * We do not need to set the color context, it is already set 2444 during sendCommand function */ 2445 } 2446 if (M4xVSS_kVideoEffectType_Pink == 2447 xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType) 2448 { 2449 xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct = 2450 M4VSS3GPP_externalVideoEffectColor; 2451 //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt = 2452 // (M4OSA_Void*)M4xVSS_kVideoEffectType_Pink; /**< we don't 2453 // use any function context */ 2454 /*commented FB*/ 2455 /** 2456 * We do not need to set the color context, 2457 it is already set during sendCommand function */ 2458 } 2459 if (M4xVSS_kVideoEffectType_Green == 2460 xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType) 2461 { 2462 xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct = 2463 M4VSS3GPP_externalVideoEffectColor; 2464 //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt = 2465 // (M4OSA_Void*)M4xVSS_kVideoEffectType_Green; 2466 /**< we don't use any function context */ 2467 /*commented FB*/ 2468 /** 2469 * We do not need to set the color context, it is already set during 2470 sendCommand function */ 2471 } 2472 if (M4xVSS_kVideoEffectType_Sepia == 2473 xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType) 2474 { 2475 xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct = 2476 M4VSS3GPP_externalVideoEffectColor; 2477 //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt = 2478 // (M4OSA_Void*)M4xVSS_kVideoEffectType_Sepia; 2479 /**< we don't use any function context */ 2480 /*commented FB*/ 2481 /** 2482 * We do not need to set the color context, it is already set during 2483 sendCommand function */ 2484 } 2485 if (M4xVSS_kVideoEffectType_Fifties == 2486 xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType) 2487 { 2488 xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct = 2489 M4VSS3GPP_externalVideoEffectFifties; 2490 /** 2491 * We do not need to set the framing context, it is already set during 2492 sendCommand function */ 2493 } 2494 if (M4xVSS_kVideoEffectType_Negative == 2495 xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType) 2496 { 2497 xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct = 2498 M4VSS3GPP_externalVideoEffectColor; 2499 //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt = 2500 // (M4OSA_Void*)M4xVSS_kVideoEffectType_Negative; 2501 /**< we don't use any function context */ 2502 /*commented FB*/ 2503 /** 2504 * We do not need to set the color context, it is already set during 2505 sendCommand function */ 2506 } 2507 if (M4xVSS_kVideoEffectType_Framing == 2508 xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType) 2509 { 2510 xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct = 2511 M4VSS3GPP_externalVideoEffectFraming; 2512 /** 2513 * We do not need to set the framing context, it is already set during 2514 sendCommand function */ 2515 } 2516 if (M4xVSS_kVideoEffectType_ZoomIn == 2517 xVSS_context->pSettings->Effects[j].VideoEffectType) 2518 { 2519 xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct = 2520 M4VSS3GPP_externalVideoEffectZoom; 2521 xVSS_context->pCurrentEditSettings->Effects[j].pExtVideoEffectFctCtxt = 2522 (M4OSA_Void*)M4xVSS_kVideoEffectType_ZoomIn; /**< we don't use any 2523 function context */ 2524 } 2525 if (M4xVSS_kVideoEffectType_ZoomOut == 2526 xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType) 2527 { 2528 xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct = 2529 M4VSS3GPP_externalVideoEffectZoom; 2530 xVSS_context->pCurrentEditSettings->Effects[j].pExtVideoEffectFctCtxt = 2531 (M4OSA_Void*)M4xVSS_kVideoEffectType_ZoomOut; /**< we don't use any 2532 function context */ 2533 } 2534 if (M4xVSS_kVideoEffectType_ColorRGB16 == 2535 xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType) 2536 { 2537 xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct = 2538 M4VSS3GPP_externalVideoEffectColor; 2539 //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt = 2540 // (M4OSA_Void*)M4xVSS_kVideoEffectType_ColorRGB16; 2541 /**< we don't use any function context */ 2542 /** 2543 * We do not need to set the color context, it is already set during 2544 sendCommand function */ 2545 } 2546 if (M4xVSS_kVideoEffectType_Gradient == 2547 xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType) 2548 { 2549 xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct = 2550 M4VSS3GPP_externalVideoEffectColor; 2551 //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt = 2552 // (M4OSA_Void*)M4xVSS_kVideoEffectType_ColorRGB16; 2553 /**< we don't use any function context */ 2554 /** 2555 * We do not need to set the color context, it is already set during 2556 sendCommand function */ 2557 } 2558 2559 } 2560 } 2561 2562 /** 2563 * Open the VSS 3GPP */ 2564 err = M4VSS3GPP_editOpen(pVssCtxt, xVSS_context->pCurrentEditSettings); 2565 if (err != M4NO_ERROR) 2566 { 2567 M4OSA_TRACE1_1("M4xVSS_internalGenerateEditedFile:\ 2568 M4VSS3GPP_editOpen returned 0x%x\n",err); 2569 M4VSS3GPP_editCleanUp(pVssCtxt); 2570 return err; 2571 } 2572 2573 /** 2574 * Save VSS context to be able to close / free VSS later */ 2575 xVSS_context->pCurrentEditContext = pVssCtxt; 2576 2577 return M4NO_ERROR; 2578} 2579 2580/** 2581 ****************************************************************************** 2582 * prototype M4OSA_ERR M4xVSS_internalCloseEditedFile(M4OSA_Context pContext) 2583 * 2584 * @brief This function cleans up VSS 2585 * @note 2586 * @param pContext (IN) The integrator own context 2587 * 2588 * @return M4NO_ERROR: No error 2589 * @return M4ERR_PARAMETER: At least one of the function parameters is null 2590 ****************************************************************************** 2591 */ 2592M4OSA_ERR M4xVSS_internalCloseEditedFile(M4OSA_Context pContext) 2593{ 2594 M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; 2595 M4VSS3GPP_EditContext pVssCtxt = xVSS_context->pCurrentEditContext; 2596 M4OSA_ERR err; 2597 2598 if(xVSS_context->pCurrentEditContext != M4OSA_NULL) 2599 { 2600 /** 2601 * Close the VSS 3GPP */ 2602 err = M4VSS3GPP_editClose(pVssCtxt); 2603 if (err != M4NO_ERROR) 2604 { 2605 M4OSA_TRACE1_1("M4xVSS_internalCloseEditedFile:\ 2606 M4VSS3GPP_editClose returned 0x%x\n",err); 2607 M4VSS3GPP_editCleanUp(pVssCtxt); 2608 return err; 2609 } 2610 2611 /** 2612 * Free this VSS3GPP edition instance */ 2613 err = M4VSS3GPP_editCleanUp(pVssCtxt); 2614 if (err != M4NO_ERROR) 2615 { 2616 M4OSA_TRACE1_1("M4xVSS_internalCloseEditedFile: \ 2617 M4VSS3GPP_editCleanUp returned 0x%x\n",err); 2618 return err; 2619 } 2620 } 2621 2622 return M4NO_ERROR; 2623} 2624 2625/** 2626 ****************************************************************************** 2627 * prototype M4OSA_ERR M4xVSS_internalGenerateAudioMixFile(M4OSA_Context pContext) 2628 * 2629 * @brief This function prepares VSS for audio mixing 2630 * @note It takes its parameters from the BGM settings in the xVSS internal context 2631 * @param pContext (IN) The integrator own context 2632 * 2633 * @return M4NO_ERROR: No error 2634 * @return M4ERR_PARAMETER: At least one of the function parameters is null 2635 * @return M4ERR_ALLOC: Allocation error (no more memory) 2636 ****************************************************************************** 2637 */ 2638/*** 2639 * FB: the function has been modified since the structure used for the saving is now the 2640 * pCurrentEditSettings and not the pSettings 2641 * This change has been added for the UTF support 2642 * All the "xVSS_context->pSettings" has been replaced by "xVSS_context->pCurrentEditSettings" 2643 ***/ 2644M4OSA_ERR M4xVSS_internalGenerateAudioMixFile(M4OSA_Context pContext) 2645{ 2646 M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; 2647 M4VSS3GPP_AudioMixingSettings* pAudioMixSettings; 2648 M4VSS3GPP_AudioMixingContext pAudioMixingCtxt; 2649 M4OSA_ERR err; 2650 M4VIDEOEDITING_ClipProperties fileProperties; 2651 2652 /** 2653 * Allocate audio mixing settings structure and fill it with BGM parameters */ 2654 pAudioMixSettings = (M4VSS3GPP_AudioMixingSettings*)M4OSA_malloc 2655 (sizeof(M4VSS3GPP_AudioMixingSettings), M4VS, (M4OSA_Char *)"pAudioMixSettings"); 2656 if(pAudioMixSettings == M4OSA_NULL) 2657 { 2658 M4OSA_TRACE1_0("Allocation error in M4xVSS_internalGenerateAudioMixFile"); 2659 return M4ERR_ALLOC; 2660 } 2661 2662 if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->FileType == 2663 M4VIDEOEDITING_kFileType_3GPP) 2664 { 2665 err = M4xVSS_internalGetProperties((M4OSA_Context)xVSS_context, 2666 (M4OSA_Char*)xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile, 2667 &fileProperties); 2668 if(err != M4NO_ERROR) 2669 { 2670 M4OSA_TRACE1_1("M4xVSS_internalGenerateAudioMixFile:\ 2671 impossible to retrieve audio BGM properties ->\ 2672 reencoding audio background music", err); 2673 fileProperties.AudioStreamType = 2674 xVSS_context->pCurrentEditSettings->xVSS.outputAudioFormat+1; 2675 /* To force BGM encoding */ 2676 } 2677 } 2678 2679 pAudioMixSettings->bRemoveOriginal = M4OSA_FALSE; 2680 pAudioMixSettings->AddedAudioFileType = 2681 xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->FileType; 2682 pAudioMixSettings->pAddedAudioTrackFile = 2683 xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile; 2684 pAudioMixSettings->uiAddVolume = 2685 xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->uiAddVolume; 2686 2687 pAudioMixSettings->outputAudioFormat = xVSS_context->pSettings->xVSS.outputAudioFormat; 2688 pAudioMixSettings->outputASF = xVSS_context->pSettings->xVSS.outputAudioSamplFreq; 2689 pAudioMixSettings->outputAudioBitrate = xVSS_context->pSettings->xVSS.outputAudioBitrate; 2690 pAudioMixSettings->uiSamplingFrequency = 2691 xVSS_context->pSettings->xVSS.pBGMtrack->uiSamplingFrequency; 2692 pAudioMixSettings->uiNumChannels = xVSS_context->pSettings->xVSS.pBGMtrack->uiNumChannels; 2693 2694 pAudioMixSettings->b_DuckingNeedeed = 2695 xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->b_DuckingNeedeed; 2696 pAudioMixSettings->fBTVolLevel = 2697 (M4OSA_Float )xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->uiAddVolume/100; 2698 pAudioMixSettings->InDucking_threshold = 2699 xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->InDucking_threshold; 2700 pAudioMixSettings->InDucking_lowVolume = 2701 xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->lowVolume/100; 2702 pAudioMixSettings->fPTVolLevel = 2703 (M4OSA_Float)xVSS_context->pSettings->PTVolLevel/100; 2704 pAudioMixSettings->bLoop = xVSS_context->pSettings->xVSS.pBGMtrack->bLoop; 2705 2706 if(xVSS_context->pSettings->xVSS.bAudioMono) 2707 { 2708 pAudioMixSettings->outputNBChannels = 1; 2709 } 2710 else 2711 { 2712 pAudioMixSettings->outputNBChannels = 2; 2713 } 2714 2715 /** 2716 * Fill audio mix settings with BGM parameters */ 2717 pAudioMixSettings->uiBeginLoop = 2718 xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->uiBeginLoop; 2719 pAudioMixSettings->uiEndLoop = 2720 xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->uiEndLoop; 2721 pAudioMixSettings->uiAddCts = 2722 xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->uiAddCts; 2723 2724 /** 2725 * Output file of the audio mixer will be final file (audio mixing is the last step) */ 2726 pAudioMixSettings->pOutputClipFile = xVSS_context->pOutputFile; 2727 pAudioMixSettings->pTemporaryFile = xVSS_context->pTemporaryFile; 2728 2729 /** 2730 * Input file of the audio mixer is a temporary file containing all audio/video editions */ 2731 pAudioMixSettings->pOriginalClipFile = xVSS_context->pCurrentEditSettings->pOutputFile; 2732 2733 /** 2734 * Save audio mixing settings pointer to be able to free it in 2735 M4xVSS_internalCloseAudioMixedFile function */ 2736 xVSS_context->pAudioMixSettings = pAudioMixSettings; 2737 2738 /** 2739 * Create a VSS 3GPP audio mixing instance */ 2740 err = M4VSS3GPP_audioMixingInit(&pAudioMixingCtxt, pAudioMixSettings, 2741 xVSS_context->pFileReadPtr, xVSS_context->pFileWritePtr); 2742 2743 /** 2744 * Save audio mixing context to be able to call audio mixing step function in 2745 M4xVSS_step function */ 2746 xVSS_context->pAudioMixContext = pAudioMixingCtxt; 2747 2748 if (err != M4NO_ERROR) 2749 { 2750 M4OSA_TRACE1_1("M4xVSS_internalGenerateAudioMixFile:\ 2751 M4VSS3GPP_audioMixingInit returned 0x%x\n",err); 2752 //M4VSS3GPP_audioMixingCleanUp(pAudioMixingCtxt); 2753 return err; 2754 } 2755 2756 return M4NO_ERROR; 2757} 2758 2759/** 2760 ****************************************************************************** 2761 * prototype M4OSA_ERR M4xVSS_internalCloseAudioMixedFile(M4OSA_Context pContext) 2762 * 2763 * @brief This function cleans up VSS for audio mixing 2764 * @note 2765 * @param pContext (IN) The integrator own context 2766 * 2767 * @return M4NO_ERROR: No error 2768 * @return M4ERR_PARAMETER: At least one of the function parameters is null 2769 ****************************************************************************** 2770 */ 2771M4OSA_ERR M4xVSS_internalCloseAudioMixedFile(M4OSA_Context pContext) 2772{ 2773 M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; 2774 M4OSA_ERR err; 2775 2776 /** 2777 * Free this VSS3GPP audio mixing instance */ 2778 if(xVSS_context->pAudioMixContext != M4OSA_NULL) 2779 { 2780 err = M4VSS3GPP_audioMixingCleanUp(xVSS_context->pAudioMixContext); 2781 if (err != M4NO_ERROR) 2782 { 2783 M4OSA_TRACE1_1("M4xVSS_internalCloseAudioMixedFile:\ 2784 M4VSS3GPP_audioMixingCleanUp returned 0x%x\n",err); 2785 return err; 2786 } 2787 } 2788 2789 /** 2790 * Free VSS audio mixing settings */ 2791 if(xVSS_context->pAudioMixSettings != M4OSA_NULL) 2792 { 2793 M4OSA_free((M4OSA_MemAddr32)xVSS_context->pAudioMixSettings); 2794 xVSS_context->pAudioMixSettings = M4OSA_NULL; 2795 } 2796 2797 return M4NO_ERROR; 2798} 2799 2800/** 2801 ****************************************************************************** 2802 * prototype M4OSA_ERR M4xVSS_internalFreePreview(M4OSA_Context pContext) 2803 * 2804 * @brief This function cleans up preview edition structure used to generate 2805 * preview.3gp file given to the VPS 2806 * @note It also free the preview structure given to the VPS 2807 * @param pContext (IN) The integrator own context 2808 * 2809 * @return M4NO_ERROR: No error 2810 * @return M4ERR_PARAMETER: At least one of the function parameters is null 2811 ****************************************************************************** 2812 */ 2813M4OSA_ERR M4xVSS_internalFreePreview(M4OSA_Context pContext) 2814{ 2815 M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; 2816 M4OSA_UInt8 i; 2817 2818 /** 2819 * Free clip/transition settings */ 2820 for(i=0; i<xVSS_context->pCurrentEditSettings->uiClipNumber; i++) 2821 { 2822 M4xVSS_FreeClipSettings(xVSS_context->pCurrentEditSettings->pClipList[i]); 2823 2824 M4OSA_free((M4OSA_MemAddr32)(xVSS_context->pCurrentEditSettings->pClipList[i])); 2825 xVSS_context->pCurrentEditSettings->pClipList[i] = M4OSA_NULL; 2826 2827 /** 2828 * Because there is 1 less transition than clip number */ 2829 if(i != xVSS_context->pCurrentEditSettings->uiClipNumber-1) 2830 { 2831 M4OSA_free((M4OSA_MemAddr32)(xVSS_context->pCurrentEditSettings->pTransitionList[i])); 2832 xVSS_context->pCurrentEditSettings->pTransitionList[i] = M4OSA_NULL; 2833 } 2834 } 2835 2836 /** 2837 * Free clip/transition list */ 2838 if(xVSS_context->pCurrentEditSettings->pClipList != M4OSA_NULL) 2839 { 2840 M4OSA_free((M4OSA_MemAddr32)(xVSS_context->pCurrentEditSettings->pClipList)); 2841 xVSS_context->pCurrentEditSettings->pClipList = M4OSA_NULL; 2842 } 2843 if(xVSS_context->pCurrentEditSettings->pTransitionList != M4OSA_NULL) 2844 { 2845 M4OSA_free((M4OSA_MemAddr32)(xVSS_context->pCurrentEditSettings->pTransitionList)); 2846 xVSS_context->pCurrentEditSettings->pTransitionList = M4OSA_NULL; 2847 } 2848 2849 /** 2850 * Free output preview file path */ 2851 if(xVSS_context->pCurrentEditSettings->pOutputFile != M4OSA_NULL) 2852 { 2853 M4OSA_free(xVSS_context->pCurrentEditSettings->pOutputFile); 2854 xVSS_context->pCurrentEditSettings->pOutputFile = M4OSA_NULL; 2855 } 2856 2857 /** 2858 * Free temporary preview file path */ 2859 if(xVSS_context->pCurrentEditSettings->pTemporaryFile != M4OSA_NULL) 2860 { 2861 M4OSA_fileExtraDelete(xVSS_context->pCurrentEditSettings->pTemporaryFile); 2862 M4OSA_free(xVSS_context->pCurrentEditSettings->pTemporaryFile); 2863 xVSS_context->pCurrentEditSettings->pTemporaryFile = M4OSA_NULL; 2864 } 2865 2866 /** 2867 * Free "local" BGM settings */ 2868 if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack != M4OSA_NULL) 2869 { 2870 if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile != M4OSA_NULL) 2871 { 2872 M4OSA_free(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile); 2873 xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile = M4OSA_NULL; 2874 } 2875 M4OSA_free((M4OSA_MemAddr32)xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack); 2876 xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack = M4OSA_NULL; 2877 } 2878 2879 /** 2880 * Free current edit settings structure */ 2881 if(xVSS_context->pCurrentEditSettings != M4OSA_NULL) 2882 { 2883 M4OSA_free((M4OSA_MemAddr32)xVSS_context->pCurrentEditSettings); 2884 xVSS_context->pCurrentEditSettings = M4OSA_NULL; 2885 } 2886 2887 /** 2888 * Free preview effects given to application */ 2889 if(M4OSA_NULL != xVSS_context->pPreviewSettings->Effects) 2890 { 2891 M4OSA_free((M4OSA_MemAddr32)xVSS_context->pPreviewSettings->Effects); 2892 xVSS_context->pPreviewSettings->Effects = M4OSA_NULL; 2893 xVSS_context->pPreviewSettings->nbEffects = 0; 2894 } 2895 2896 return M4NO_ERROR; 2897} 2898 2899 2900/** 2901 ****************************************************************************** 2902 * prototype M4OSA_ERR M4xVSS_internalFreeSaving(M4OSA_Context pContext) 2903 * 2904 * @brief This function cleans up saving edition structure used to generate 2905 * output.3gp file given to the VPS 2906 * @note 2907 * @param pContext (IN) The integrator own context 2908 * 2909 * @return M4NO_ERROR: No error 2910 * @return M4ERR_PARAMETER: At least one of the function parameters is null 2911 ****************************************************************************** 2912 */ 2913M4OSA_ERR M4xVSS_internalFreeSaving(M4OSA_Context pContext) 2914{ 2915 M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; 2916 M4OSA_UInt8 i; 2917 2918 if(xVSS_context->pCurrentEditSettings != M4OSA_NULL) 2919 { 2920 /** 2921 * Free clip/transition settings */ 2922 for(i=0; i<xVSS_context->pCurrentEditSettings->uiClipNumber; i++) 2923 { 2924 M4xVSS_FreeClipSettings(xVSS_context->pCurrentEditSettings->pClipList[i]); 2925 2926 M4OSA_free((M4OSA_MemAddr32)(xVSS_context->pCurrentEditSettings->pClipList[i])); 2927 xVSS_context->pCurrentEditSettings->pClipList[i] = M4OSA_NULL; 2928 2929 /** 2930 * Because there is 1 less transition than clip number */ 2931 if(i != xVSS_context->pCurrentEditSettings->uiClipNumber-1) 2932 { 2933 M4OSA_free((M4OSA_MemAddr32)\ 2934 (xVSS_context->pCurrentEditSettings->pTransitionList[i])); 2935 xVSS_context->pCurrentEditSettings->pTransitionList[i] = M4OSA_NULL; 2936 } 2937 } 2938 2939 /** 2940 * Free clip/transition list */ 2941 if(xVSS_context->pCurrentEditSettings->pClipList != M4OSA_NULL) 2942 { 2943 M4OSA_free((M4OSA_MemAddr32)(xVSS_context->pCurrentEditSettings->pClipList)); 2944 xVSS_context->pCurrentEditSettings->pClipList = M4OSA_NULL; 2945 } 2946 if(xVSS_context->pCurrentEditSettings->pTransitionList != M4OSA_NULL) 2947 { 2948 M4OSA_free((M4OSA_MemAddr32)(xVSS_context->pCurrentEditSettings->pTransitionList)); 2949 xVSS_context->pCurrentEditSettings->pTransitionList = M4OSA_NULL; 2950 } 2951 2952 if(xVSS_context->pCurrentEditSettings->Effects != M4OSA_NULL) 2953 { 2954 M4OSA_free((M4OSA_MemAddr32)(xVSS_context->pCurrentEditSettings->Effects)); 2955 xVSS_context->pCurrentEditSettings->Effects = M4OSA_NULL; 2956 xVSS_context->pCurrentEditSettings->nbEffects = 0; 2957 } 2958 2959 /** 2960 * Free output saving file path */ 2961 if(xVSS_context->pCurrentEditSettings->pOutputFile != M4OSA_NULL) 2962 { 2963 if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack != M4OSA_NULL) 2964 { 2965 M4OSA_fileExtraDelete(xVSS_context->pCurrentEditSettings->pOutputFile); 2966 M4OSA_free(xVSS_context->pCurrentEditSettings->pOutputFile); 2967 } 2968 if(xVSS_context->pOutputFile != M4OSA_NULL) 2969 { 2970 M4OSA_free((M4OSA_MemAddr32)xVSS_context->pOutputFile); 2971 xVSS_context->pOutputFile = M4OSA_NULL; 2972 } 2973 xVSS_context->pSettings->pOutputFile = M4OSA_NULL; 2974 xVSS_context->pCurrentEditSettings->pOutputFile = M4OSA_NULL; 2975 } 2976 2977 /** 2978 * Free temporary saving file path */ 2979 if(xVSS_context->pCurrentEditSettings->pTemporaryFile != M4OSA_NULL) 2980 { 2981 M4OSA_fileExtraDelete(xVSS_context->pCurrentEditSettings->pTemporaryFile); 2982 M4OSA_free(xVSS_context->pCurrentEditSettings->pTemporaryFile); 2983 xVSS_context->pCurrentEditSettings->pTemporaryFile = M4OSA_NULL; 2984 } 2985 2986 /** 2987 * Free "local" BGM settings */ 2988 if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack != M4OSA_NULL) 2989 { 2990 if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile != M4OSA_NULL) 2991 { 2992 M4OSA_free(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile); 2993 xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile = M4OSA_NULL; 2994 } 2995 M4OSA_free((M4OSA_MemAddr32)xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack); 2996 xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack = M4OSA_NULL; 2997 } 2998 2999 /** 3000 * Free current edit settings structure */ 3001 M4OSA_free((M4OSA_MemAddr32)xVSS_context->pCurrentEditSettings); 3002 xVSS_context->pCurrentEditSettings = M4OSA_NULL; 3003 } 3004 3005 return M4NO_ERROR; 3006} 3007 3008 3009/** 3010 ****************************************************************************** 3011 * prototype M4OSA_ERR M4xVSS_freeSettings(M4OSA_Context pContext) 3012 * 3013 * @brief This function cleans up an M4VSS3GPP_EditSettings structure 3014 * @note 3015 * @param pSettings (IN) Pointer on M4VSS3GPP_EditSettings structure to free 3016 * 3017 * @return M4NO_ERROR: No error 3018 * @return M4ERR_PARAMETER: At least one of the function parameters is null 3019 ****************************************************************************** 3020 */ 3021M4OSA_ERR M4xVSS_freeSettings(M4VSS3GPP_EditSettings* pSettings) 3022{ 3023 M4OSA_UInt8 i,j; 3024 3025 /** 3026 * For each clip ... */ 3027 for(i=0; i<pSettings->uiClipNumber; i++) 3028 { 3029 /** 3030 * ... free clip settings */ 3031 if(pSettings->pClipList[i] != M4OSA_NULL) 3032 { 3033 M4xVSS_FreeClipSettings(pSettings->pClipList[i]); 3034 3035 M4OSA_free((M4OSA_MemAddr32)(pSettings->pClipList[i])); 3036 pSettings->pClipList[i] = M4OSA_NULL; 3037 } 3038 3039 /** 3040 * ... free transition settings */ 3041 if(i < pSettings->uiClipNumber-1) /* Because there is 1 less transition than clip number */ 3042 { 3043 if(pSettings->pTransitionList[i] != M4OSA_NULL) 3044 { 3045 switch (pSettings->pTransitionList[i]->VideoTransitionType) 3046 { 3047 case M4xVSS_kVideoTransitionType_AlphaMagic: 3048 3049 /** 3050 * In case of Alpha Magic transition, 3051 some extra parameters need to be freed */ 3052 if(pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt\ 3053 != M4OSA_NULL) 3054 { 3055 M4OSA_free((M4OSA_MemAddr32)(((M4xVSS_internal_AlphaMagicSettings*)\ 3056 pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt)->\ 3057 pPlane->pac_data)); 3058 ((M4xVSS_internal_AlphaMagicSettings*)pSettings->pTransitionList[i\ 3059 ]->pExtVideoTransitionFctCtxt)->pPlane->pac_data = M4OSA_NULL; 3060 3061 M4OSA_free((M4OSA_MemAddr32)(((M4xVSS_internal_AlphaMagicSettings*)\ 3062 pSettings->pTransitionList[i]->\ 3063 pExtVideoTransitionFctCtxt)->pPlane)); 3064 ((M4xVSS_internal_AlphaMagicSettings*)pSettings->pTransitionList[i]\ 3065 ->pExtVideoTransitionFctCtxt)->pPlane = M4OSA_NULL; 3066 3067 M4OSA_free((M4OSA_MemAddr32)(pSettings->pTransitionList[i]->\ 3068 pExtVideoTransitionFctCtxt)); 3069 pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt = M4OSA_NULL; 3070 3071 for(j=i+1;j<pSettings->uiClipNumber-1;j++) 3072 { 3073 if(pSettings->pTransitionList[j] != M4OSA_NULL) 3074 { 3075 if(pSettings->pTransitionList[j]->VideoTransitionType == 3076 M4xVSS_kVideoTransitionType_AlphaMagic) 3077 { 3078 M4OSA_UInt32 pCmpResult=0; 3079 M4OSA_chrCompare(pSettings->pTransitionList[i]->\ 3080 xVSS.transitionSpecific.pAlphaMagicSettings->\ 3081 pAlphaFilePath, 3082 pSettings->pTransitionList[j]->\ 3083 xVSS.transitionSpecific.pAlphaMagicSettings->\ 3084 pAlphaFilePath, (M4OSA_Int32 *)&pCmpResult); 3085 if(pCmpResult == 0) 3086 { 3087 /* Free extra internal alpha magic structure and put 3088 it to NULL to avoid refreeing it */ 3089 M4OSA_free((M4OSA_MemAddr32)(pSettings->\ 3090 pTransitionList[j]->pExtVideoTransitionFctCtxt)); 3091 pSettings->pTransitionList[j]->\ 3092 pExtVideoTransitionFctCtxt = M4OSA_NULL; 3093 } 3094 } 3095 } 3096 } 3097 } 3098 3099 if(pSettings->pTransitionList[i]->\ 3100 xVSS.transitionSpecific.pAlphaMagicSettings != M4OSA_NULL) 3101 { 3102 if(pSettings->pTransitionList[i]->\ 3103 xVSS.transitionSpecific.pAlphaMagicSettings->\ 3104 pAlphaFilePath != M4OSA_NULL) 3105 { 3106 M4OSA_free((M4OSA_MemAddr32)pSettings->\ 3107 pTransitionList[i]->\ 3108 xVSS.transitionSpecific.pAlphaMagicSettings->\ 3109 pAlphaFilePath); 3110 pSettings->pTransitionList[i]->\ 3111 xVSS.transitionSpecific.pAlphaMagicSettings->\ 3112 pAlphaFilePath = M4OSA_NULL; 3113 } 3114 M4OSA_free((M4OSA_MemAddr32)pSettings->pTransitionList[i]->\ 3115 xVSS.transitionSpecific.pAlphaMagicSettings); 3116 pSettings->pTransitionList[i]->\ 3117 xVSS.transitionSpecific.pAlphaMagicSettings = M4OSA_NULL; 3118 3119 } 3120 3121 break; 3122 3123 3124 case M4xVSS_kVideoTransitionType_SlideTransition: 3125 if (M4OSA_NULL != pSettings->pTransitionList[i]->\ 3126 xVSS.transitionSpecific.pSlideTransitionSettings) 3127 { 3128 M4OSA_free((M4OSA_MemAddr32)pSettings->pTransitionList[i]->\ 3129 xVSS.transitionSpecific.pSlideTransitionSettings); 3130 pSettings->pTransitionList[i]->\ 3131 xVSS.transitionSpecific.pSlideTransitionSettings = M4OSA_NULL; 3132 } 3133 if(pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt != M4OSA_NULL) 3134 { 3135 M4OSA_free((M4OSA_MemAddr32)(pSettings->pTransitionList[i]->\ 3136 pExtVideoTransitionFctCtxt)); 3137 pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt = M4OSA_NULL; 3138 } 3139 break; 3140 default: 3141 break; 3142 3143 } 3144 /** 3145 * Free transition settings structure */ 3146 M4OSA_free((M4OSA_MemAddr32)(pSettings->pTransitionList[i])); 3147 pSettings->pTransitionList[i] = M4OSA_NULL; 3148 } 3149 } 3150 } 3151 3152 /** 3153 * Free clip list */ 3154 if(pSettings->pClipList != M4OSA_NULL) 3155 { 3156 M4OSA_free((M4OSA_MemAddr32)(pSettings->pClipList)); 3157 pSettings->pClipList = M4OSA_NULL; 3158 } 3159 3160 /** 3161 * Free transition list */ 3162 if(pSettings->pTransitionList != M4OSA_NULL) 3163 { 3164 M4OSA_free((M4OSA_MemAddr32)(pSettings->pTransitionList)); 3165 pSettings->pTransitionList = M4OSA_NULL; 3166 } 3167 3168 /** 3169 * RC: Free effects list */ 3170 if(pSettings->Effects != M4OSA_NULL) 3171 { 3172 for(i=0; i<pSettings->nbEffects; i++) 3173 { 3174 /** 3175 * For each clip, free framing structure if needed */ 3176 if(pSettings->Effects[i].VideoEffectType == M4xVSS_kVideoEffectType_Framing 3177 || pSettings->Effects[i].VideoEffectType == M4xVSS_kVideoEffectType_Text) 3178 { 3179#ifdef DECODE_GIF_ON_SAVING 3180 M4xVSS_FramingContext* framingCtx = pSettings->Effects[i].pExtVideoEffectFctCtxt; 3181#else 3182 M4xVSS_FramingStruct* framingCtx = pSettings->Effects[i].pExtVideoEffectFctCtxt; 3183 M4xVSS_FramingStruct* framingCtx_save; 3184 M4xVSS_Framing3102Struct* framingCtx_first = framingCtx; 3185#endif 3186 3187#ifdef DECODE_GIF_ON_SAVING 3188 if(framingCtx != M4OSA_NULL) /* Bugfix 1.2.0: crash, trying to free non existant 3189 pointer */ 3190 { 3191 if(framingCtx->aFramingCtx != M4OSA_NULL) 3192 { 3193 if(pSettings->Effects[i].xVSS.pFramingBuffer == M4OSA_NULL) 3194 { 3195 if(framingCtx->aFramingCtx->FramingRgb != M4OSA_NULL) 3196 { 3197 M4OSA_free((M4OSA_MemAddr32)framingCtx->aFramingCtx->\ 3198 FramingRgb->pac_data); 3199 framingCtx->aFramingCtx->FramingRgb->pac_data = M4OSA_NULL; 3200 M4OSA_free((M4OSA_MemAddr32)framingCtx->aFramingCtx->FramingRgb); 3201 framingCtx->aFramingCtx->FramingRgb = M4OSA_NULL; 3202 } 3203 } 3204 if(framingCtx->aFramingCtx->FramingYuv != M4OSA_NULL) 3205 { 3206 M4OSA_free((M4OSA_MemAddr32)framingCtx->aFramingCtx->\ 3207 FramingYuv[0].pac_data); 3208 framingCtx->aFramingCtx->FramingYuv[0].pac_data = M4OSA_NULL; 3209 M4OSA_free((M4OSA_MemAddr32)framingCtx->aFramingCtx->\ 3210 FramingYuv[1].pac_data); 3211 framingCtx->aFramingCtx->FramingYuv[1].pac_data = M4OSA_NULL; 3212 M4OSA_free((M4OSA_MemAddr32)framingCtx->aFramingCtx->\ 3213 FramingYuv[2].pac_data); 3214 framingCtx->aFramingCtx->FramingYuv[2].pac_data = M4OSA_NULL; 3215 M4OSA_free((M4OSA_MemAddr32)framingCtx->aFramingCtx->FramingYuv); 3216 framingCtx->aFramingCtx->FramingYuv = M4OSA_NULL; 3217 } 3218 M4OSA_free((M4OSA_MemAddr32)framingCtx->aFramingCtx); 3219 framingCtx->aFramingCtx = M4OSA_NULL; 3220 } 3221 if(framingCtx->aFramingCtx_last != M4OSA_NULL) 3222 { 3223 if(pSettings->Effects[i].xVSS.pFramingBuffer == M4OSA_NULL) 3224 { 3225 if(framingCtx->aFramingCtx_last->FramingRgb != M4OSA_NULL) 3226 { 3227 M4OSA_free((M4OSA_MemAddr32)framingCtx->aFramingCtx_last->\ 3228 FramingRgb->pac_data); 3229 framingCtx->aFramingCtx_last->FramingRgb->pac_data = M4OSA_NULL; 3230 M4OSA_free((M4OSA_MemAddr32)framingCtx->aFramingCtx_last->\ 3231 FramingRgb); 3232 framingCtx->aFramingCtx_last->FramingRgb = M4OSA_NULL; 3233 } 3234 } 3235 if(framingCtx->aFramingCtx_last->FramingYuv != M4OSA_NULL) 3236 { 3237 M4OSA_free((M4OSA_MemAddr32)framingCtx->aFramingCtx_last->\ 3238 FramingYuv[0].pac_data); 3239 framingCtx->aFramingCtx_last->FramingYuv[0].pac_data = M4OSA_NULL; 3240 M4OSA_free((M4OSA_MemAddr32)framingCtx->aFramingCtx_last->FramingYuv); 3241 framingCtx->aFramingCtx_last->FramingYuv = M4OSA_NULL; 3242 } 3243 M4OSA_free((M4OSA_MemAddr32)framingCtx->aFramingCtx_last); 3244 framingCtx->aFramingCtx_last = M4OSA_NULL; 3245 } 3246 if(framingCtx->pEffectFilePath != M4OSA_NULL) 3247 { 3248 M4OSA_free((M4OSA_MemAddr32)framingCtx->pEffectFilePath); 3249 framingCtx->pEffectFilePath = M4OSA_NULL; 3250 } 3251 /*In case there are still allocated*/ 3252 if(framingCtx->pSPSContext != M4OSA_NULL) 3253 { 3254 // M4SPS_destroy(framingCtx->pSPSContext); 3255 framingCtx->pSPSContext = M4OSA_NULL; 3256#if 0 3257 if(framingCtx->inputStream.data_buffer != M4OSA_NULL) 3258 { 3259 M4OSA_free((M4OSA_MemAddr32)framingCtx->inputStream.data_buffer); 3260 framingCtx->inputStream.data_buffer = M4OSA_NULL; 3261 } 3262#endif 3263 } 3264 /*Alpha blending structure*/ 3265 if(framingCtx->alphaBlendingStruct != M4OSA_NULL) 3266 { 3267 M4OSA_free((M4OSA_MemAddr32)framingCtx->alphaBlendingStruct); 3268 framingCtx->alphaBlendingStruct = M4OSA_NULL; 3269 } 3270 3271 M4OSA_free((M4OSA_MemAddr32)framingCtx); 3272 framingCtx = M4OSA_NULL; 3273 } 3274#else 3275 do 3276 { 3277 if(framingCtx != M4OSA_NULL) /* Bugfix 1.2.0: crash, trying to free non 3278 existant pointer */ 3279 { 3280 if(pSettings->Effects[i].xVSS.pFramingBuffer == M4OSA_NULL) 3281 { 3282 if(framingCtx->FramingRgb != M4OSA_NULL) 3283 { 3284 M4OSA_free((M4OSA_MemAddr32)framingCtx->FramingRgb->pac_data); 3285 framingCtx->FramingRgb->pac_data = M4OSA_NULL; 3286 M4OSA_free((M4OSA_MemAddr32)framingCtx->FramingRgb); 3287 framingCtx->FramingRgb = M4OSA_NULL; 3288 } 3289 } 3290 if(framingCtx->FramingYuv != M4OSA_NULL) 3291 { 3292 M4OSA_free((M4OSA_MemAddr32)framingCtx->FramingYuv[0].pac_data); 3293 framingCtx->FramingYuv[0].pac_data = M4OSA_NULL; 3294 M4OSA_free((M4OSA_MemAddr32)framingCtx->FramingYuv); 3295 framingCtx->FramingYuv = M4OSA_NULL; 3296 } 3297 framingCtx_save = framingCtx->pNext; 3298 M4OSA_free((M4OSA_MemAddr32)framingCtx); 3299 framingCtx = M4OSA_NULL; 3300 framingCtx = framingCtx_save; 3301 } 3302 else 3303 { 3304 /*FB: bug fix P4ME00003002*/ 3305 break; 3306 } 3307 } while(framingCtx_first != framingCtx); 3308#endif 3309 } 3310 else if( M4xVSS_kVideoEffectType_Fifties == pSettings->Effects[i].VideoEffectType) 3311 { 3312 /* Free Fifties context */ 3313 M4xVSS_FiftiesStruct* FiftiesCtx = pSettings->Effects[i].pExtVideoEffectFctCtxt; 3314 3315 if(FiftiesCtx != M4OSA_NULL) 3316 { 3317 M4OSA_free((M4OSA_MemAddr32)FiftiesCtx); 3318 FiftiesCtx = M4OSA_NULL; 3319 } 3320 3321 } 3322 else if( M4xVSS_kVideoEffectType_ColorRGB16 == pSettings->Effects[i].VideoEffectType 3323 || M4xVSS_kVideoEffectType_BlackAndWhite == pSettings->Effects[i].VideoEffectType 3324 || M4xVSS_kVideoEffectType_Pink == pSettings->Effects[i].VideoEffectType 3325 || M4xVSS_kVideoEffectType_Green == pSettings->Effects[i].VideoEffectType 3326 || M4xVSS_kVideoEffectType_Sepia == pSettings->Effects[i].VideoEffectType 3327 || M4xVSS_kVideoEffectType_Negative== pSettings->Effects[i].VideoEffectType 3328 || M4xVSS_kVideoEffectType_Gradient== pSettings->Effects[i].VideoEffectType) 3329 { 3330 /* Free Color context */ 3331 M4xVSS_ColorStruct* ColorCtx = pSettings->Effects[i].pExtVideoEffectFctCtxt; 3332 3333 if(ColorCtx != M4OSA_NULL) 3334 { 3335 M4OSA_free((M4OSA_MemAddr32)ColorCtx); 3336 ColorCtx = M4OSA_NULL; 3337 } 3338 } 3339 3340 /* Free simple fields */ 3341 if(pSettings->Effects[i].xVSS.pFramingFilePath != M4OSA_NULL) 3342 { 3343 M4OSA_free((M4OSA_MemAddr32)pSettings->Effects[i].xVSS.pFramingFilePath); 3344 pSettings->Effects[i].xVSS.pFramingFilePath = M4OSA_NULL; 3345 } 3346 if(pSettings->Effects[i].xVSS.pFramingBuffer != M4OSA_NULL) 3347 { 3348 M4OSA_free((M4OSA_MemAddr32)pSettings->Effects[i].xVSS.pFramingBuffer); 3349 pSettings->Effects[i].xVSS.pFramingBuffer = M4OSA_NULL; 3350 } 3351 if(pSettings->Effects[i].xVSS.pTextBuffer != M4OSA_NULL) 3352 { 3353 M4OSA_free((M4OSA_MemAddr32)pSettings->Effects[i].xVSS.pTextBuffer); 3354 pSettings->Effects[i].xVSS.pTextBuffer = M4OSA_NULL; 3355 } 3356 } 3357 M4OSA_free((M4OSA_MemAddr32)pSettings->Effects); 3358 pSettings->Effects = M4OSA_NULL; 3359 } 3360 3361 return M4NO_ERROR; 3362} 3363 3364M4OSA_ERR M4xVSS_freeCommand(M4OSA_Context pContext) 3365{ 3366 M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; 3367// M4OSA_UInt8 i,j; 3368 3369 /* Free "local" BGM settings */ 3370 if(xVSS_context->pSettings->xVSS.pBGMtrack != M4OSA_NULL) 3371 { 3372 if(xVSS_context->pSettings->xVSS.pBGMtrack->pFile != M4OSA_NULL) 3373 { 3374 M4OSA_free(xVSS_context->pSettings->xVSS.pBGMtrack->pFile); 3375 xVSS_context->pSettings->xVSS.pBGMtrack->pFile = M4OSA_NULL; 3376 } 3377 M4OSA_free((M4OSA_MemAddr32)xVSS_context->pSettings->xVSS.pBGMtrack); 3378 xVSS_context->pSettings->xVSS.pBGMtrack = M4OSA_NULL; 3379 } 3380#if 0 3381 /* Parse transitions to free internal "alpha magic" settings structure */ 3382 /** 3383 * In case there is twice or more the same Alpha Magic effect, the effect context 3384 * may be freed twice or more. 3385 * So, we parse all remaining transition settings to know if the context can be 3386 * "re-freed", and if yes, we put its context to NULL to avoid freeing it again */ 3387 for(i=0; i<xVSS_context->pSettings->uiClipNumber-1; i++) 3388 { 3389 if(xVSS_context->pSettings->pTransitionList[i] != M4OSA_NULL) 3390 { 3391 switch (xVSS_context->pSettings->pTransitionList[i]->VideoTransitionType) 3392 { 3393 case M4xVSS_kVideoTransitionType_AlphaMagic: 3394 /** 3395 * In case of Alpha Magic transition, some extra parameters need to be freed */ 3396 if(xVSS_context->pSettings->pTransitionList[i]->\ 3397 pExtVideoTransitionFctCtxt != M4OSA_NULL) 3398 { 3399 M4OSA_free((M4OSA_MemAddr32)(((M4xVSS_internal_AlphaMagicSettings*)\ 3400 xVSS_context->pSettings->pTransitionList[i]->\ 3401 pExtVideoTransitionFctCtxt)->pPlane->pac_data)); 3402 ((M4xVSS_internal_AlphaMagicSettings*)xVSS_context->\ 3403 pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt)->\ 3404 pPlane->pac_data = M4OSA_NULL; 3405 3406 M4OSA_free((M4OSA_MemAddr32)(((M4xVSS_internal_AlphaMagicSettings*)\ 3407 xVSS_context->pSettings->pTransitionList[i]->\ 3408 pExtVideoTransitionFctCtxt)->pPlane)); 3409 ((M4xVSS_internal_AlphaMagicSettings*)xVSS_context->\ 3410 pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt)->\ 3411 pPlane = M4OSA_NULL; 3412 3413 M4OSA_free((M4OSA_MemAddr32)(xVSS_context->pSettings->\ 3414 pTransitionList[i]->pExtVideoTransitionFctCtxt)); 3415 xVSS_context->pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt 3416 = M4OSA_NULL; 3417 3418 for(j=i+1;j<xVSS_context->pSettings->uiClipNumber-1;j++) 3419 { 3420 if(xVSS_context->pSettings->pTransitionList[j] != M4OSA_NULL) 3421 { 3422 if(xVSS_context->pSettings->pTransitionList[j]->\ 3423 VideoTransitionType == M4xVSS_kVideoTransitionType_AlphaMagic) 3424 { 3425 M4OSA_UInt32 pCmpResult=0; 3426 M4OSA_chrCompare(xVSS_context->pSettings->pTransitionList[i]->\ 3427 xVSS.transitionSpecific.pAlphaMagicSettings->\ 3428 pAlphaFilePath, 3429 xVSS_context->pSettings->pTransitionList[j]->\ 3430 xVSS.transitionSpecific.pAlphaMagicSettings->\ 3431 pAlphaFilePath, &pCmpResult); 3432 if(pCmpResult == 0) 3433 { 3434 /* Free extra internal alpha magic structure and put it 3435 to NULL to avoid refreeing it */ 3436 M4OSA_free((M4OSA_MemAddr32)(xVSS_context->pSettings->\ 3437 pTransitionList[j]->pExtVideoTransitionFctCtxt)); 3438 xVSS_context->pSettings->pTransitionList[j]->\ 3439 pExtVideoTransitionFctCtxt = M4OSA_NULL; 3440 } 3441 } 3442 } 3443 } 3444 } 3445 break; 3446 3447 case M4xVSS_kVideoTransitionType_SlideTransition: 3448 if(xVSS_context->pSettings->pTransitionList[i]->\ 3449 pExtVideoTransitionFctCtxt != M4OSA_NULL) 3450 { 3451 M4OSA_free((M4OSA_MemAddr32)(xVSS_context->pSettings->\ 3452 pTransitionList[i]->pExtVideoTransitionFctCtxt)); 3453 xVSS_context->pSettings->pTransitionList[i]->\ 3454 pExtVideoTransitionFctCtxt = M4OSA_NULL; 3455 } 3456 break; 3457 } 3458 } 3459 } 3460#endif 3461 3462 M4xVSS_freeSettings(xVSS_context->pSettings); 3463 3464 if(xVSS_context->pPTo3GPPparamsList != M4OSA_NULL) 3465 { 3466 M4xVSS_Pto3GPP_params* pParams = xVSS_context->pPTo3GPPparamsList; 3467 M4xVSS_Pto3GPP_params* pParams_sauv; 3468 3469 while(pParams != M4OSA_NULL) 3470 { 3471 if(pParams->pFileIn != M4OSA_NULL) 3472 { 3473 M4OSA_free((M4OSA_MemAddr32)pParams->pFileIn); 3474 pParams->pFileIn = M4OSA_NULL; 3475 } 3476 if(pParams->pFileOut != M4OSA_NULL) 3477 { 3478 /* Delete temporary file */ 3479 M4OSA_fileExtraDelete(pParams->pFileOut); 3480 M4OSA_free((M4OSA_MemAddr32)pParams->pFileOut); 3481 pParams->pFileOut = M4OSA_NULL; 3482 } 3483 if(pParams->pFileTemp != M4OSA_NULL) 3484 { 3485 /* Delete temporary file */ 3486#ifdef M4xVSS_RESERVED_MOOV_DISK_SPACE 3487 M4OSA_fileExtraDelete(pParams->pFileTemp); 3488 M4OSA_free((M4OSA_MemAddr32)pParams->pFileTemp); 3489#endif/*M4xVSS_RESERVED_MOOV_DISK_SPACE*/ 3490 pParams->pFileTemp = M4OSA_NULL; 3491 } 3492 pParams_sauv = pParams; 3493 pParams = pParams->pNext; 3494 M4OSA_free((M4OSA_MemAddr32)pParams_sauv); 3495 pParams_sauv = M4OSA_NULL; 3496 } 3497 } 3498 3499 if(xVSS_context->pMCSparamsList != M4OSA_NULL) 3500 { 3501 M4xVSS_MCS_params* pParams = xVSS_context->pMCSparamsList; 3502 M4xVSS_MCS_params* pParams_sauv; 3503 3504 while(pParams != M4OSA_NULL) 3505 { 3506 if(pParams->pFileIn != M4OSA_NULL) 3507 { 3508 M4OSA_free((M4OSA_MemAddr32)pParams->pFileIn); 3509 pParams->pFileIn = M4OSA_NULL; 3510 } 3511 if(pParams->pFileOut != M4OSA_NULL) 3512 { 3513 /* Delete temporary file */ 3514 M4OSA_fileExtraDelete(pParams->pFileOut); 3515 M4OSA_free((M4OSA_MemAddr32)pParams->pFileOut); 3516 pParams->pFileOut = M4OSA_NULL; 3517 } 3518 if(pParams->pFileTemp != M4OSA_NULL) 3519 { 3520 /* Delete temporary file */ 3521#ifdef M4xVSS_RESERVED_MOOV_DISK_SPACE 3522 M4OSA_fileExtraDelete(pParams->pFileTemp); 3523 M4OSA_free((M4OSA_MemAddr32)pParams->pFileTemp); 3524#endif/*M4xVSS_RESERVED_MOOV_DISK_SPACE*/ 3525 pParams->pFileTemp = M4OSA_NULL; 3526 } 3527 pParams_sauv = pParams; 3528 pParams = pParams->pNext; 3529 M4OSA_free((M4OSA_MemAddr32)pParams_sauv); 3530 pParams_sauv = M4OSA_NULL; 3531 } 3532 } 3533 3534 if(xVSS_context->pcmPreviewFile != M4OSA_NULL) 3535 { 3536 M4OSA_free((M4OSA_MemAddr32)xVSS_context->pcmPreviewFile); 3537 xVSS_context->pcmPreviewFile = M4OSA_NULL; 3538 } 3539 if(xVSS_context->pSettings->pOutputFile != M4OSA_NULL 3540 && xVSS_context->pOutputFile != M4OSA_NULL) 3541 { 3542 M4OSA_free((M4OSA_MemAddr32)xVSS_context->pSettings->pOutputFile); 3543 xVSS_context->pSettings->pOutputFile = M4OSA_NULL; 3544 xVSS_context->pOutputFile = M4OSA_NULL; 3545 } 3546 3547 /* Reinit all context variables */ 3548 xVSS_context->previousClipNumber = 0; 3549 xVSS_context->editingStep = M4xVSS_kMicroStateEditing; 3550 xVSS_context->analyseStep = M4xVSS_kMicroStateAnalysePto3GPP; 3551 xVSS_context->pPTo3GPPparamsList = M4OSA_NULL; 3552 xVSS_context->pPTo3GPPcurrentParams = M4OSA_NULL; 3553 xVSS_context->pMCSparamsList = M4OSA_NULL; 3554 xVSS_context->pMCScurrentParams = M4OSA_NULL; 3555 xVSS_context->tempFileIndex = 0; 3556 xVSS_context->targetedTimescale = 0; 3557 3558 return M4NO_ERROR; 3559} 3560 3561/** 3562 ****************************************************************************** 3563 * prototype M4OSA_ERR M4xVSS_internalGetProperties(M4OSA_Context pContext, 3564 * M4OSA_Char* pFile, 3565 * M4VIDEOEDITING_ClipProperties *pFileProperties) 3566 * 3567 * @brief This function retrieve properties of an input 3GP file using MCS 3568 * @note 3569 * @param pContext (IN) The integrator own context 3570 * @param pFile (IN) 3GP file to analyse 3571 * @param pFileProperties (IN/OUT) Pointer on a structure that will contain 3572 * the 3GP file properties 3573 * 3574 * @return M4NO_ERROR: No error 3575 * @return M4ERR_PARAMETER: At least one of the function parameters is null 3576 ****************************************************************************** 3577 */ 3578M4OSA_ERR M4xVSS_internalGetProperties(M4OSA_Context pContext, M4OSA_Char* pFile, 3579 M4VIDEOEDITING_ClipProperties *pFileProperties) 3580{ 3581 M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; 3582 M4OSA_ERR err; 3583 M4MCS_Context mcs_context; 3584 3585 err = M4MCS_init(&mcs_context, xVSS_context->pFileReadPtr, xVSS_context->pFileWritePtr); 3586 if(err != M4NO_ERROR) 3587 { 3588 M4OSA_TRACE1_1("M4xVSS_internalGetProperties: Error in M4MCS_init: 0x%x", err); 3589 return err; 3590 } 3591 3592 /*open the MCS in the "normal opening" mode to retrieve the exact duration*/ 3593 err = M4MCS_open_normalMode(mcs_context, pFile, M4VIDEOEDITING_kFileType_3GPP, 3594 M4OSA_NULL, M4OSA_NULL); 3595 if (err != M4NO_ERROR) 3596 { 3597 M4OSA_TRACE1_1("M4xVSS_internalGetProperties: Error in M4MCS_open: 0x%x", err); 3598 M4MCS_abort(mcs_context); 3599 return err; 3600 } 3601 3602 err = M4MCS_getInputFileProperties(mcs_context, pFileProperties); 3603 if(err != M4NO_ERROR) 3604 { 3605 M4OSA_TRACE1_1("Error in M4MCS_getInputFileProperties: 0x%x", err); 3606 M4MCS_abort(mcs_context); 3607 return err; 3608 } 3609 3610 err = M4MCS_abort(mcs_context); 3611 if (err != M4NO_ERROR) 3612 { 3613 M4OSA_TRACE1_1("M4xVSS_internalGetProperties: Error in M4MCS_abort: 0x%x", err); 3614 return err; 3615 } 3616 3617 return M4NO_ERROR; 3618} 3619 3620 3621/** 3622 ****************************************************************************** 3623 * prototype M4OSA_ERR M4xVSS_internalGetTargetedTimeScale(M4OSA_Context pContext, 3624 * M4OSA_UInt32* pTargetedTimeScale) 3625 * 3626 * @brief This function retrieve targeted time scale 3627 * @note 3628 * @param pContext (IN) The integrator own context 3629 * @param pTargetedTimeScale (OUT) Targeted time scale 3630 * 3631 * @return M4NO_ERROR: No error 3632 * @return M4ERR_PARAMETER: At least one of the function parameters is null 3633 ****************************************************************************** 3634 */ 3635M4OSA_ERR M4xVSS_internalGetTargetedTimeScale(M4OSA_Context pContext, 3636 M4VSS3GPP_EditSettings* pSettings, 3637 M4OSA_UInt32* pTargetedTimeScale) 3638{ 3639 M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; 3640 M4OSA_ERR err; 3641 M4OSA_UInt32 totalDuration = 0; 3642 M4OSA_UInt8 i = 0; 3643 M4OSA_UInt32 tempTimeScale = 0, tempDuration = 0; 3644 3645 for(i=0;i<pSettings->uiClipNumber;i++) 3646 { 3647 /*search timescale only in mpeg4 case*/ 3648 if(pSettings->pClipList[i]->FileType == M4VIDEOEDITING_kFileType_3GPP 3649 || pSettings->pClipList[i]->FileType == M4VIDEOEDITING_kFileType_MP4) 3650 { 3651 M4VIDEOEDITING_ClipProperties fileProperties; 3652 3653 /*UTF conversion support*/ 3654 M4OSA_Char* pDecodedPath = M4OSA_NULL; 3655 3656 /** 3657 * UTF conversion: convert into the customer format, before being used*/ 3658 pDecodedPath = pSettings->pClipList[i]->pFile; 3659 3660 if(xVSS_context->UTFConversionContext.pConvToUTF8Fct != M4OSA_NULL 3661 && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL) 3662 { 3663 M4OSA_UInt32 length = 0; 3664 err = M4xVSS_internalConvertFromUTF8(xVSS_context, 3665 (M4OSA_Void*) pSettings->pClipList[i]->pFile, 3666 (M4OSA_Void*) xVSS_context->UTFConversionContext.pTempOutConversionBuffer, 3667 &length); 3668 if(err != M4NO_ERROR) 3669 { 3670 M4OSA_TRACE1_1("M4xVSS_Init:\ 3671 M4xVSS_internalConvertToUTF8 returns err: 0x%x",err); 3672 return err; 3673 } 3674 pDecodedPath = xVSS_context->UTFConversionContext.pTempOutConversionBuffer; 3675 } 3676 3677 /*End of the conversion: use the decoded path*/ 3678 err = M4xVSS_internalGetProperties(xVSS_context, pDecodedPath, &fileProperties); 3679 3680 /*get input file properties*/ 3681 /*err = M4xVSS_internalGetProperties(xVSS_context, pSettings->\ 3682 pClipList[i]->pFile, &fileProperties);*/ 3683 if(M4NO_ERROR != err) 3684 { 3685 M4OSA_TRACE1_1("M4xVSS_internalGetTargetedTimeScale:\ 3686 M4xVSS_internalGetProperties returned: 0x%x", err); 3687 return err; 3688 } 3689 if(fileProperties.VideoStreamType == M4VIDEOEDITING_kMPEG4) 3690 { 3691 if(pSettings->pClipList[i]->uiEndCutTime > 0) 3692 { 3693 if(tempDuration < (pSettings->pClipList[i]->uiEndCutTime \ 3694 - pSettings->pClipList[i]->uiBeginCutTime)) 3695 { 3696 tempTimeScale = fileProperties.uiVideoTimeScale; 3697 tempDuration = (pSettings->pClipList[i]->uiEndCutTime\ 3698 - pSettings->pClipList[i]->uiBeginCutTime); 3699 } 3700 } 3701 else 3702 { 3703 if(tempDuration < (fileProperties.uiClipDuration\ 3704 - pSettings->pClipList[i]->uiBeginCutTime)) 3705 { 3706 tempTimeScale = fileProperties.uiVideoTimeScale; 3707 tempDuration = (fileProperties.uiClipDuration\ 3708 - pSettings->pClipList[i]->uiBeginCutTime); 3709 } 3710 } 3711 } 3712 } 3713 if(pSettings->pClipList[i]->FileType == M4VIDEOEDITING_kFileType_ARGB8888) 3714 { 3715 /*the timescale is 30 for PTO3GP*/ 3716 *pTargetedTimeScale = 30; 3717 return M4NO_ERROR; 3718 3719 } 3720 } 3721 3722 if(tempTimeScale >= 30)/*Define a minimum time scale, otherwise if the timescale is not 3723 enough, there will be an infinite loop in the shell encoder*/ 3724 { 3725 *pTargetedTimeScale = tempTimeScale; 3726 } 3727 else 3728 { 3729 *pTargetedTimeScale = 30; 3730 } 3731 3732 return M4NO_ERROR; 3733} 3734 3735 3736/** 3737 ****************************************************************************** 3738 * prototype M4VSS3GPP_externalVideoEffectColor(M4OSA_Void *pFunctionContext, 3739 * M4VIFI_ImagePlane *PlaneIn, 3740 * M4VIFI_ImagePlane *PlaneOut, 3741 * M4VSS3GPP_ExternalProgress *pProgress, 3742 * M4OSA_UInt32 uiEffectKind) 3743 * 3744 * @brief This function apply a color effect on an input YUV420 planar frame 3745 * @note 3746 * @param pFunctionContext(IN) Contains which color to apply (not very clean ...) 3747 * @param PlaneIn (IN) Input YUV420 planar 3748 * @param PlaneOut (IN/OUT) Output YUV420 planar 3749 * @param pProgress (IN/OUT) Progress indication (0-100) 3750 * @param uiEffectKind (IN) Unused 3751 * 3752 * @return M4VIFI_OK: No error 3753 ****************************************************************************** 3754 */ 3755M4OSA_ERR M4VSS3GPP_externalVideoEffectColor(M4OSA_Void *pFunctionContext, 3756 M4VIFI_ImagePlane *PlaneIn, 3757 M4VIFI_ImagePlane *PlaneOut, 3758 M4VSS3GPP_ExternalProgress *pProgress, 3759 M4OSA_UInt32 uiEffectKind) 3760{ 3761 M4VIFI_Int32 plane_number; 3762 M4VIFI_UInt32 i,j; 3763 M4VIFI_UInt8 *p_buf_src, *p_buf_dest; 3764 M4xVSS_ColorStruct* ColorContext = (M4xVSS_ColorStruct*)pFunctionContext; 3765 3766 for (plane_number = 0; plane_number < 3; plane_number++) 3767 { 3768 p_buf_src = &(PlaneIn[plane_number].pac_data[PlaneIn[plane_number].u_topleft]); 3769 p_buf_dest = &(PlaneOut[plane_number].pac_data[PlaneOut[plane_number].u_topleft]); 3770 for (i = 0; i < PlaneOut[plane_number].u_height; i++) 3771 { 3772 /** 3773 * Chrominance */ 3774 if(plane_number==1 || plane_number==2) 3775 { 3776 //switch ((M4OSA_UInt32)pFunctionContext) 3777 // commented because a structure for the effects context exist 3778 switch (ColorContext->colorEffectType) 3779 { 3780 case M4xVSS_kVideoEffectType_BlackAndWhite: 3781 M4OSA_memset((M4OSA_MemAddr8)p_buf_dest, 3782 PlaneIn[plane_number].u_width, 128); 3783 break; 3784 case M4xVSS_kVideoEffectType_Pink: 3785 M4OSA_memset((M4OSA_MemAddr8)p_buf_dest, 3786 PlaneIn[plane_number].u_width, 255); 3787 break; 3788 case M4xVSS_kVideoEffectType_Green: 3789 M4OSA_memset((M4OSA_MemAddr8)p_buf_dest, 3790 PlaneIn[plane_number].u_width, 0); 3791 break; 3792 case M4xVSS_kVideoEffectType_Sepia: 3793 if(plane_number==1) 3794 { 3795 M4OSA_memset((M4OSA_MemAddr8)p_buf_dest, 3796 PlaneIn[plane_number].u_width, 117); 3797 } 3798 else 3799 { 3800 M4OSA_memset((M4OSA_MemAddr8)p_buf_dest, 3801 PlaneIn[plane_number].u_width, 139); 3802 } 3803 break; 3804 case M4xVSS_kVideoEffectType_Negative: 3805 M4OSA_memcpy((M4OSA_MemAddr8)p_buf_dest, 3806 (M4OSA_MemAddr8)p_buf_src ,PlaneOut[plane_number].u_width); 3807 break; 3808 3809 case M4xVSS_kVideoEffectType_ColorRGB16: 3810 { 3811 M4OSA_UInt16 r = 0,g = 0,b = 0,y = 0,u = 0,v = 0; 3812 3813 /*first get the r, g, b*/ 3814 b = (ColorContext->rgb16ColorData & 0x001f); 3815 g = (ColorContext->rgb16ColorData & 0x07e0)>>5; 3816 r = (ColorContext->rgb16ColorData & 0xf800)>>11; 3817 3818 /*keep y, but replace u and v*/ 3819 if(plane_number==1) 3820 { 3821 /*then convert to u*/ 3822 u = U16(r, g, b); 3823 M4OSA_memset((M4OSA_MemAddr8)p_buf_dest, 3824 PlaneIn[plane_number].u_width, (M4OSA_UInt8)u); 3825 } 3826 if(plane_number==2) 3827 { 3828 /*then convert to v*/ 3829 v = V16(r, g, b); 3830 M4OSA_memset((M4OSA_MemAddr8)p_buf_dest, 3831 PlaneIn[plane_number].u_width, (M4OSA_UInt8)v); 3832 } 3833 } 3834 break; 3835 case M4xVSS_kVideoEffectType_Gradient: 3836 { 3837 M4OSA_UInt16 r = 0,g = 0,b = 0,y = 0,u = 0,v = 0; 3838 3839 /*first get the r, g, b*/ 3840 b = (ColorContext->rgb16ColorData & 0x001f); 3841 g = (ColorContext->rgb16ColorData & 0x07e0)>>5; 3842 r = (ColorContext->rgb16ColorData & 0xf800)>>11; 3843 3844 /*for color gradation*/ 3845 b = (M4OSA_UInt16)( b - ((b*i)/PlaneIn[plane_number].u_height)); 3846 g = (M4OSA_UInt16)(g - ((g*i)/PlaneIn[plane_number].u_height)); 3847 r = (M4OSA_UInt16)(r - ((r*i)/PlaneIn[plane_number].u_height)); 3848 3849 /*keep y, but replace u and v*/ 3850 if(plane_number==1) 3851 { 3852 /*then convert to u*/ 3853 u = U16(r, g, b); 3854 M4OSA_memset((M4OSA_MemAddr8)p_buf_dest, 3855 PlaneIn[plane_number].u_width, (M4OSA_UInt8)u); 3856 } 3857 if(plane_number==2) 3858 { 3859 /*then convert to v*/ 3860 v = V16(r, g, b); 3861 M4OSA_memset((M4OSA_MemAddr8)p_buf_dest, 3862 PlaneIn[plane_number].u_width, (M4OSA_UInt8)v); 3863 } 3864 } 3865 break; 3866 default: 3867 break; 3868 } 3869 } 3870 /** 3871 * Luminance */ 3872 else 3873 { 3874 //switch ((M4OSA_UInt32)pFunctionContext) 3875 // commented because a structure for the effects context exist 3876 switch (ColorContext->colorEffectType) 3877 { 3878 case M4xVSS_kVideoEffectType_Negative: 3879 for(j=0;j<PlaneOut[plane_number].u_width;j++) 3880 { 3881 p_buf_dest[j] = 255 - p_buf_src[j]; 3882 } 3883 break; 3884 default: 3885 M4OSA_memcpy((M4OSA_MemAddr8)p_buf_dest, 3886 (M4OSA_MemAddr8)p_buf_src ,PlaneOut[plane_number].u_width); 3887 break; 3888 } 3889 } 3890 p_buf_src += PlaneIn[plane_number].u_stride; 3891 p_buf_dest += PlaneOut[plane_number].u_stride; 3892 } 3893 } 3894 3895 return M4VIFI_OK; 3896} 3897 3898/** 3899 ****************************************************************************** 3900 * prototype M4VSS3GPP_externalVideoEffectFraming(M4OSA_Void *pFunctionContext, 3901 * M4VIFI_ImagePlane *PlaneIn, 3902 * M4VIFI_ImagePlane *PlaneOut, 3903 * M4VSS3GPP_ExternalProgress *pProgress, 3904 * M4OSA_UInt32 uiEffectKind) 3905 * 3906 * @brief This function add a fixed or animated image on an input YUV420 planar frame 3907 * @note 3908 * @param pFunctionContext(IN) Contains which color to apply (not very clean ...) 3909 * @param PlaneIn (IN) Input YUV420 planar 3910 * @param PlaneOut (IN/OUT) Output YUV420 planar 3911 * @param pProgress (IN/OUT) Progress indication (0-100) 3912 * @param uiEffectKind (IN) Unused 3913 * 3914 * @return M4VIFI_OK: No error 3915 ****************************************************************************** 3916 */ 3917M4OSA_ERR M4VSS3GPP_externalVideoEffectFraming( M4OSA_Void *userData, 3918 M4VIFI_ImagePlane PlaneIn[3], 3919 M4VIFI_ImagePlane *PlaneOut, 3920 M4VSS3GPP_ExternalProgress *pProgress, 3921 M4OSA_UInt32 uiEffectKind ) 3922{ 3923 M4VIFI_UInt32 x,y; 3924 3925 M4VIFI_UInt8 *p_in_Y = PlaneIn[0].pac_data; 3926 M4VIFI_UInt8 *p_in_U = PlaneIn[1].pac_data; 3927 M4VIFI_UInt8 *p_in_V = PlaneIn[2].pac_data; 3928 3929 M4xVSS_FramingStruct* Framing = M4OSA_NULL; 3930 M4xVSS_FramingStruct* currentFraming = M4OSA_NULL; 3931 M4VIFI_UInt8 *FramingRGB = M4OSA_NULL; 3932 3933 M4VIFI_UInt8 *p_out0; 3934 M4VIFI_UInt8 *p_out1; 3935 M4VIFI_UInt8 *p_out2; 3936 3937 M4VIFI_UInt32 topleft[2]; 3938 3939 M4OSA_UInt8 transparent1 = (M4OSA_UInt8)((TRANSPARENT_COLOR & 0xFF00)>>8); 3940 M4OSA_UInt8 transparent2 = (M4OSA_UInt8)TRANSPARENT_COLOR; 3941 3942#ifndef DECODE_GIF_ON_SAVING 3943 Framing = (M4xVSS_FramingStruct *)userData; 3944 currentFraming = (M4xVSS_FramingStruct *)Framing->pCurrent; 3945 FramingRGB = Framing->FramingRgb->pac_data; 3946#endif /*DECODE_GIF_ON_SAVING*/ 3947 3948 /*FB*/ 3949#ifdef DECODE_GIF_ON_SAVING 3950 M4OSA_ERR err; 3951 Framing = (M4xVSS_FramingStruct *)((M4xVSS_FramingContext*)userData)->aFramingCtx; 3952#if 0 3953 if(Framing == M4OSA_NULL) 3954 { 3955 ((M4xVSS_FramingContext*)userData)->clipTime = pProgress->uiOutputTime; 3956 err = M4xVSS_internalDecodeGIF(userData); 3957 if(M4NO_ERROR != err) 3958 { 3959 M4OSA_TRACE1_1("M4VSS3GPP_externalVideoEffectFraming:\ 3960 Error in M4xVSS_internalDecodeGIF: 0x%x", err); 3961 return err; 3962 } 3963 Framing = (M4xVSS_FramingStruct *)((M4xVSS_FramingContext*)userData)->aFramingCtx; 3964 /* Initializes first GIF time */ 3965 ((M4xVSS_FramingContext*)userData)->current_gif_time = pProgress->uiOutputTime; 3966 } 3967#endif 3968 currentFraming = (M4xVSS_FramingStruct *)Framing; 3969 FramingRGB = Framing->FramingRgb->pac_data; 3970#endif /*DECODE_GIF_ON_SAVING*/ 3971 /*end FB*/ 3972 3973 /** 3974 * Initialize input / output plane pointers */ 3975 p_in_Y += PlaneIn[0].u_topleft; 3976 p_in_U += PlaneIn[1].u_topleft; 3977 p_in_V += PlaneIn[2].u_topleft; 3978 3979 p_out0 = PlaneOut[0].pac_data; 3980 p_out1 = PlaneOut[1].pac_data; 3981 p_out2 = PlaneOut[2].pac_data; 3982 3983 /** 3984 * Depending on time, initialize Framing frame to use */ 3985 if(Framing->previousClipTime == -1) 3986 { 3987 Framing->previousClipTime = pProgress->uiOutputTime; 3988 } 3989 3990 /** 3991 * If the current clip time has reach the duration of one frame of the framing picture 3992 * we need to step to next framing picture */ 3993#if 0 3994 if(((M4xVSS_FramingContext*)userData)->b_animated == M4OSA_TRUE) 3995 { 3996 while((((M4xVSS_FramingContext*)userData)->current_gif_time + currentFraming->duration)\ 3997 < pProgress->uiOutputTime) 3998 { 3999#ifdef DECODE_GIF_ON_SAVING 4000 ((M4xVSS_FramingContext*)userData)->clipTime = pProgress->uiOutputTime; 4001 err = M4xVSS_internalDecodeGIF(userData); 4002 if(M4NO_ERROR != err) 4003 { 4004 M4OSA_TRACE1_1("M4VSS3GPP_externalVideoEffectFraming:\ 4005 Error in M4xVSS_internalDecodeGIF: 0x%x", err); 4006 return err; 4007 } 4008 if(currentFraming->duration != 0) 4009 { 4010 ((M4xVSS_FramingContext*)userData)->current_gif_time += currentFraming->duration; 4011 } 4012 else 4013 { 4014 ((M4xVSS_FramingContext*)userData)->current_gif_time \ 4015 += pProgress->uiOutputTime - Framing->previousClipTime; 4016 } 4017 Framing = (M4xVSS_FramingStruct *)((M4xVSS_FramingContext*)userData)->aFramingCtx; 4018 currentFraming = (M4xVSS_FramingStruct *)Framing; 4019 FramingRGB = Framing->FramingRgb->pac_data; 4020#else 4021 Framing->pCurrent = currentFraming->pNext; 4022 currentFraming = Framing->pCurrent; 4023#endif /*DECODE_GIF_ON_SAVING*/ 4024 } 4025 } 4026#endif 4027 4028 Framing->previousClipTime = pProgress->uiOutputTime; 4029 FramingRGB = currentFraming->FramingRgb->pac_data; 4030 topleft[0] = currentFraming->topleft_x; 4031 topleft[1] = currentFraming->topleft_y; 4032 4033 for( x=0 ;x < PlaneIn[0].u_height ; x++) 4034 { 4035 for( y=0 ;y < PlaneIn[0].u_width ; y++) 4036 { 4037 /** 4038 * To handle framing with input size != output size 4039 * Framing is applyed if coordinates matches between framing/topleft and input plane */ 4040 if( y < (topleft[0] + currentFraming->FramingYuv[0].u_width) && 4041 y >= topleft[0] && 4042 x < (topleft[1] + currentFraming->FramingYuv[0].u_height) && 4043 x >= topleft[1]) 4044 { 4045 /*Alpha blending support*/ 4046 M4OSA_Float alphaBlending = 1; 4047 M4xVSS_internalEffectsAlphaBlending* alphaBlendingStruct =\ 4048 (M4xVSS_internalEffectsAlphaBlending*)\ 4049 ((M4xVSS_FramingContext*)userData)->alphaBlendingStruct; 4050 4051 if(alphaBlendingStruct != M4OSA_NULL) 4052 { 4053 if(pProgress->uiProgress >= 0 && pProgress->uiProgress \ 4054 < (M4OSA_UInt32)(alphaBlendingStruct->m_fadeInTime*10)) 4055 { 4056 alphaBlending = ((M4OSA_Float)(alphaBlendingStruct->m_middle\ 4057 - alphaBlendingStruct->m_start)\ 4058 *pProgress->uiProgress/(alphaBlendingStruct->m_fadeInTime*10)); 4059 alphaBlending += alphaBlendingStruct->m_start; 4060 alphaBlending /= 100; 4061 } 4062 else if(pProgress->uiProgress >= (M4OSA_UInt32)(alphaBlendingStruct->\ 4063 m_fadeInTime*10) && pProgress->uiProgress < 1000\ 4064 - (M4OSA_UInt32)(alphaBlendingStruct->m_fadeOutTime*10)) 4065 { 4066 alphaBlending = (M4OSA_Float)\ 4067 ((M4OSA_Float)alphaBlendingStruct->m_middle/100); 4068 } 4069 else if(pProgress->uiProgress >= 1000 - (M4OSA_UInt32)\ 4070 (alphaBlendingStruct->m_fadeOutTime*10)) 4071 { 4072 alphaBlending = ((M4OSA_Float)(alphaBlendingStruct->m_middle \ 4073 - alphaBlendingStruct->m_end))*(1000 - pProgress->uiProgress)\ 4074 /(alphaBlendingStruct->m_fadeOutTime*10); 4075 alphaBlending += alphaBlendingStruct->m_end; 4076 alphaBlending /= 100; 4077 } 4078 } 4079 /**/ 4080 4081 if((*(FramingRGB)==transparent1) && (*(FramingRGB+1)==transparent2)) 4082 { 4083 *( p_out0+y+x*PlaneOut[0].u_stride)=(*(p_in_Y+y+x*PlaneIn[0].u_stride)); 4084 *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)= 4085 (*(p_in_U+(y>>1)+(x>>1)*PlaneIn[1].u_stride)); 4086 *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)= 4087 (*(p_in_V+(y>>1)+(x>>1)*PlaneIn[2].u_stride)); 4088 } 4089 else 4090 { 4091 *( p_out0+y+x*PlaneOut[0].u_stride)= 4092 (*(currentFraming->FramingYuv[0].pac_data+(y-topleft[0])\ 4093 +(x-topleft[1])*currentFraming->FramingYuv[0].u_stride))*alphaBlending; 4094 *( p_out0+y+x*PlaneOut[0].u_stride)+= 4095 (*(p_in_Y+y+x*PlaneIn[0].u_stride))*(1-alphaBlending); 4096 *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)= 4097 (*(currentFraming->FramingYuv[1].pac_data+((y-topleft[0])>>1)\ 4098 +((x-topleft[1])>>1)*currentFraming->FramingYuv[1].u_stride))\ 4099 *alphaBlending; 4100 *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)+= 4101 (*(p_in_U+(y>>1)+(x>>1)*PlaneIn[1].u_stride))*(1-alphaBlending); 4102 *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)= 4103 (*(currentFraming->FramingYuv[2].pac_data+((y-topleft[0])>>1)\ 4104 +((x-topleft[1])>>1)*currentFraming->FramingYuv[2].u_stride))\ 4105 *alphaBlending; 4106 *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)+= 4107 (*(p_in_V+(y>>1)+(x>>1)*PlaneIn[2].u_stride))*(1-alphaBlending); 4108 } 4109 if( PlaneIn[0].u_width < (topleft[0] + currentFraming->FramingYuv[0].u_width) && 4110 y == PlaneIn[0].u_width-1) 4111 { 4112 FramingRGB = FramingRGB + 2 \ 4113 * (topleft[0] + currentFraming->FramingYuv[0].u_width \ 4114 - PlaneIn[0].u_width + 1); 4115 } 4116 else 4117 { 4118 FramingRGB = FramingRGB + 2; 4119 } 4120 } 4121 /** 4122 * Just copy input plane to output plane */ 4123 else 4124 { 4125 *( p_out0+y+x*PlaneOut[0].u_stride)=*(p_in_Y+y+x*PlaneIn[0].u_stride); 4126 *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)= 4127 *(p_in_U+(y>>1)+(x>>1)*PlaneIn[1].u_stride); 4128 *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)= 4129 *(p_in_V+(y>>1)+(x>>1)*PlaneIn[2].u_stride); 4130 } 4131 } 4132 } 4133 4134#ifdef DECODE_GIF_ON_SAVING 4135#if 0 4136 if(pProgress->bIsLast == M4OSA_TRUE 4137 && (M4OSA_Bool)((M4xVSS_FramingContext*)userData)->b_IsFileGif == M4OSA_TRUE) 4138 { 4139 M4xVSS_internalDecodeGIF_Cleaning((M4xVSS_FramingContext*)userData); 4140 } 4141#endif 4142#endif /*DECODE_GIF_ON_SAVING*/ 4143 4144 return M4VIFI_OK; 4145} 4146 4147 4148/** 4149 ****************************************************************************** 4150 * prototype M4VSS3GPP_externalVideoEffectFifties(M4OSA_Void *pFunctionContext, 4151 * M4VIFI_ImagePlane *PlaneIn, 4152 * M4VIFI_ImagePlane *PlaneOut, 4153 * M4VSS3GPP_ExternalProgress *pProgress, 4154 * M4OSA_UInt32 uiEffectKind) 4155 * 4156 * @brief This function make a video look as if it was taken in the fifties 4157 * @note 4158 * @param pUserData (IN) Context 4159 * @param pPlaneIn (IN) Input YUV420 planar 4160 * @param pPlaneOut (IN/OUT) Output YUV420 planar 4161 * @param pProgress (IN/OUT) Progress indication (0-100) 4162 * @param uiEffectKind (IN) Unused 4163 * 4164 * @return M4VIFI_OK: No error 4165 * @return M4ERR_PARAMETER: pFiftiesData, pPlaneOut or pProgress are NULL (DEBUG only) 4166 ****************************************************************************** 4167 */ 4168M4OSA_ERR M4VSS3GPP_externalVideoEffectFifties( M4OSA_Void *pUserData, 4169 M4VIFI_ImagePlane *pPlaneIn, 4170 M4VIFI_ImagePlane *pPlaneOut, 4171 M4VSS3GPP_ExternalProgress *pProgress, 4172 M4OSA_UInt32 uiEffectKind ) 4173{ 4174 M4VIFI_UInt32 x, y, xShift; 4175 M4VIFI_UInt8 *pInY = pPlaneIn[0].pac_data; 4176 M4VIFI_UInt8 *pOutY, *pInYbegin; 4177 M4VIFI_UInt8 *pInCr,* pOutCr; 4178 M4VIFI_Int32 plane_number; 4179 4180 /* Internal context*/ 4181 M4xVSS_FiftiesStruct* p_FiftiesData = (M4xVSS_FiftiesStruct *)pUserData; 4182 4183 /* Check the inputs (debug only) */ 4184 M4OSA_DEBUG_IF2((pFiftiesData == M4OSA_NULL),M4ERR_PARAMETER, 4185 "xVSS: p_FiftiesData is M4OSA_NULL in M4VSS3GPP_externalVideoEffectFifties"); 4186 M4OSA_DEBUG_IF2((pPlaneOut == M4OSA_NULL),M4ERR_PARAMETER, 4187 "xVSS: p_PlaneOut is M4OSA_NULL in M4VSS3GPP_externalVideoEffectFifties"); 4188 M4OSA_DEBUG_IF2((pProgress == M4OSA_NULL),M4ERR_PARAMETER, 4189 "xVSS: p_Progress is M4OSA_NULL in M4VSS3GPP_externalVideoEffectFifties"); 4190 4191 /* Initialize input / output plane pointers */ 4192 pInY += pPlaneIn[0].u_topleft; 4193 pOutY = pPlaneOut[0].pac_data; 4194 pInYbegin = pInY; 4195 4196 /* Initialize the random */ 4197 if(p_FiftiesData->previousClipTime < 0) 4198 { 4199 M4OSA_randInit(); 4200 M4OSA_rand((M4OSA_Int32 *)&(p_FiftiesData->shiftRandomValue), (pPlaneIn[0].u_height) >> 4); 4201 M4OSA_rand((M4OSA_Int32 *)&(p_FiftiesData->stripeRandomValue), (pPlaneIn[0].u_width)<< 2); 4202 p_FiftiesData->previousClipTime = pProgress->uiOutputTime; 4203 } 4204 4205 /* Choose random values if we have reached the duration of a partial effect */ 4206 else if( (pProgress->uiOutputTime - p_FiftiesData->previousClipTime)\ 4207 > p_FiftiesData->fiftiesEffectDuration) 4208 { 4209 M4OSA_rand((M4OSA_Int32 *)&(p_FiftiesData->shiftRandomValue), (pPlaneIn[0].u_height) >> 4); 4210 M4OSA_rand((M4OSA_Int32 *)&(p_FiftiesData->stripeRandomValue), (pPlaneIn[0].u_width)<< 2); 4211 p_FiftiesData->previousClipTime = pProgress->uiOutputTime; 4212 } 4213 4214 /* Put in Sepia the chrominance */ 4215 for (plane_number = 1; plane_number < 3; plane_number++) 4216 { 4217 pInCr = pPlaneIn[plane_number].pac_data + pPlaneIn[plane_number].u_topleft; 4218 pOutCr = pPlaneOut[plane_number].pac_data + pPlaneOut[plane_number].u_topleft; 4219 4220 for (x = 0; x < pPlaneOut[plane_number].u_height; x++) 4221 { 4222 if (1 == plane_number) 4223 M4OSA_memset((M4OSA_MemAddr8)pOutCr, pPlaneIn[plane_number].u_width, 4224 117); /* U value */ 4225 else 4226 M4OSA_memset((M4OSA_MemAddr8)pOutCr, pPlaneIn[plane_number].u_width, 4227 139); /* V value */ 4228 4229 pInCr += pPlaneIn[plane_number].u_stride; 4230 pOutCr += pPlaneOut[plane_number].u_stride; 4231 } 4232 } 4233 4234 /* Compute the new pixels values */ 4235 for( x = 0 ; x < pPlaneIn[0].u_height ; x++) 4236 { 4237 M4VIFI_UInt8 *p_outYtmp, *p_inYtmp; 4238 4239 /* Compute the xShift (random value) */ 4240 if (0 == (p_FiftiesData->shiftRandomValue % 5 )) 4241 xShift = (x + p_FiftiesData->shiftRandomValue ) % (pPlaneIn[0].u_height - 1); 4242 else 4243 xShift = (x + (pPlaneIn[0].u_height - p_FiftiesData->shiftRandomValue) ) \ 4244 % (pPlaneIn[0].u_height - 1); 4245 4246 /* Initialize the pointers */ 4247 p_outYtmp = pOutY + 1; /* yShift of 1 pixel */ 4248 p_inYtmp = pInYbegin + (xShift * pPlaneIn[0].u_stride); /* Apply the xShift */ 4249 4250 for( y = 0 ; y < pPlaneIn[0].u_width ; y++) 4251 { 4252 /* Set Y value */ 4253 if (xShift > (pPlaneIn[0].u_height - 4)) 4254 *p_outYtmp = 40; /* Add some horizontal black lines between the 4255 two parts of the image */ 4256 else if ( y == p_FiftiesData->stripeRandomValue) 4257 *p_outYtmp = 90; /* Add a random vertical line for the bulk */ 4258 else 4259 *p_outYtmp = *p_inYtmp; 4260 4261 4262 /* Go to the next pixel */ 4263 p_outYtmp++; 4264 p_inYtmp++; 4265 4266 /* Restart at the beginning of the line for the last pixel*/ 4267 if (y == (pPlaneIn[0].u_width - 2)) 4268 p_outYtmp = pOutY; 4269 } 4270 4271 /* Go to the next line */ 4272 pOutY += pPlaneOut[0].u_stride; 4273 } 4274 4275 return M4VIFI_OK; 4276} 4277 4278/** 4279 ****************************************************************************** 4280 * M4OSA_ERR M4VSS3GPP_externalVideoEffectZoom( ) 4281 * @brief Zoom in/out video effect functions. 4282 * @note The external video function is used only if VideoEffectType is set to 4283 * M4VSS3GPP_kVideoEffectType_ZoomIn or M4VSS3GPP_kVideoEffectType_ZoomOut. 4284 * 4285 * @param pFunctionContext (IN) The function context, previously set by the integrator 4286 * @param pInputPlanes (IN) Input YUV420 image: pointer to an array of three valid 4287 * image planes (Y, U and V) 4288 * @param pOutputPlanes (IN/OUT) Output (filtered) YUV420 image: pointer to an array of 4289 * three valid image planes (Y, U and V) 4290 * @param pProgress (IN) Set of information about the video transition progress. 4291 * @return M4NO_ERROR: No error 4292 * @return M4ERR_PARAMETER: At least one parameter is M4OSA_NULL (debug only) 4293 ****************************************************************************** 4294 */ 4295 4296M4OSA_ERR M4VSS3GPP_externalVideoEffectZoom( 4297 M4OSA_Void *pFunctionContext, 4298 M4VIFI_ImagePlane *pInputPlanes, 4299 M4VIFI_ImagePlane *pOutputPlanes, 4300 M4VSS3GPP_ExternalProgress *pProgress, 4301 M4OSA_UInt32 uiEffectKind 4302) 4303{ 4304 M4OSA_UInt32 boxWidth; 4305 M4OSA_UInt32 boxHeight; 4306 M4OSA_UInt32 boxPosX; 4307 M4OSA_UInt32 boxPosY; 4308 M4OSA_UInt32 ratio = 0; 4309 /* * 1.189207 between ratio */ 4310 /* zoom between x1 and x16 */ 4311 M4OSA_UInt32 ratiotab[17] ={1024,1218,1448,1722,2048,2435,2896,3444,4096,4871,5793,\ 4312 6889,8192,9742,11585,13777,16384}; 4313 M4OSA_UInt32 ik; 4314 4315 M4VIFI_ImagePlane boxPlane[3]; 4316 4317 if(M4xVSS_kVideoEffectType_ZoomOut == (M4OSA_UInt32)pFunctionContext) 4318 { 4319 //ratio = 16 - (15 * pProgress->uiProgress)/1000; 4320 ratio = 16 - pProgress->uiProgress / 66 ; 4321 } 4322 else if(M4xVSS_kVideoEffectType_ZoomIn == (M4OSA_UInt32)pFunctionContext) 4323 { 4324 //ratio = 1 + (15 * pProgress->uiProgress)/1000; 4325 ratio = 1 + pProgress->uiProgress / 66 ; 4326 } 4327 4328 for(ik=0;ik<3;ik++){ 4329 4330 boxPlane[ik].u_stride = pInputPlanes[ik].u_stride; 4331 boxPlane[ik].pac_data = pInputPlanes[ik].pac_data; 4332 4333 boxHeight = ( pInputPlanes[ik].u_height << 10 ) / ratiotab[ratio]; 4334 boxWidth = ( pInputPlanes[ik].u_width << 10 ) / ratiotab[ratio]; 4335 boxPlane[ik].u_height = (boxHeight)&(~1); 4336 boxPlane[ik].u_width = (boxWidth)&(~1); 4337 4338 boxPosY = (pInputPlanes[ik].u_height >> 1) - (boxPlane[ik].u_height >> 1); 4339 boxPosX = (pInputPlanes[ik].u_width >> 1) - (boxPlane[ik].u_width >> 1); 4340 boxPlane[ik].u_topleft = boxPosY * boxPlane[ik].u_stride + boxPosX; 4341 } 4342 4343 M4VIFI_ResizeBilinearYUV420toYUV420(M4OSA_NULL, (M4VIFI_ImagePlane*)&boxPlane, pOutputPlanes); 4344 4345 /** 4346 * Return */ 4347 return(M4NO_ERROR); 4348} 4349 4350/** 4351 ****************************************************************************** 4352 * prototype M4xVSS_AlphaMagic( M4OSA_Void *userData, 4353 * M4VIFI_ImagePlane PlaneIn1[3], 4354 * M4VIFI_ImagePlane PlaneIn2[3], 4355 * M4VIFI_ImagePlane *PlaneOut, 4356 * M4VSS3GPP_ExternalProgress *pProgress, 4357 * M4OSA_UInt32 uiTransitionKind) 4358 * 4359 * @brief This function apply a color effect on an input YUV420 planar frame 4360 * @note 4361 * @param userData (IN) Contains a pointer on a settings structure 4362 * @param PlaneIn1 (IN) Input YUV420 planar from video 1 4363 * @param PlaneIn2 (IN) Input YUV420 planar from video 2 4364 * @param PlaneOut (IN/OUT) Output YUV420 planar 4365 * @param pProgress (IN/OUT) Progress indication (0-100) 4366 * @param uiTransitionKind(IN) Unused 4367 * 4368 * @return M4VIFI_OK: No error 4369 ****************************************************************************** 4370 */ 4371M4OSA_ERR M4xVSS_AlphaMagic( M4OSA_Void *userData, M4VIFI_ImagePlane PlaneIn1[3], 4372 M4VIFI_ImagePlane PlaneIn2[3], M4VIFI_ImagePlane *PlaneOut, 4373 M4VSS3GPP_ExternalProgress *pProgress, M4OSA_UInt32 uiTransitionKind) 4374{ 4375 4376 M4OSA_ERR err; 4377 4378 M4xVSS_internal_AlphaMagicSettings* alphaContext; 4379 M4VIFI_Int32 alphaProgressLevel; 4380 4381 M4VIFI_ImagePlane* planeswap; 4382 M4VIFI_UInt32 x,y; 4383 4384 M4VIFI_UInt8 *p_out0; 4385 M4VIFI_UInt8 *p_out1; 4386 M4VIFI_UInt8 *p_out2; 4387 M4VIFI_UInt8 *alphaMask; 4388 /* "Old image" */ 4389 M4VIFI_UInt8 *p_in1_Y; 4390 M4VIFI_UInt8 *p_in1_U; 4391 M4VIFI_UInt8 *p_in1_V; 4392 /* "New image" */ 4393 M4VIFI_UInt8 *p_in2_Y; 4394 M4VIFI_UInt8 *p_in2_U; 4395 M4VIFI_UInt8 *p_in2_V; 4396 4397 err = M4NO_ERROR; 4398 4399 alphaContext = (M4xVSS_internal_AlphaMagicSettings*)userData; 4400 4401 alphaProgressLevel = (pProgress->uiProgress * 255)/1000; 4402 4403 if( alphaContext->isreverse != M4OSA_FALSE) 4404 { 4405 alphaProgressLevel = 255 - alphaProgressLevel; 4406 planeswap = PlaneIn1; 4407 PlaneIn1 = PlaneIn2; 4408 PlaneIn2 = planeswap; 4409 } 4410 4411 p_out0 = PlaneOut[0].pac_data; 4412 p_out1 = PlaneOut[1].pac_data; 4413 p_out2 = PlaneOut[2].pac_data; 4414 4415 alphaMask = alphaContext->pPlane->pac_data; 4416 4417 /* "Old image" */ 4418 p_in1_Y = PlaneIn1[0].pac_data; 4419 p_in1_U = PlaneIn1[1].pac_data; 4420 p_in1_V = PlaneIn1[2].pac_data; 4421 /* "New image" */ 4422 p_in2_Y = PlaneIn2[0].pac_data; 4423 p_in2_U = PlaneIn2[1].pac_data; 4424 p_in2_V = PlaneIn2[2].pac_data; 4425 4426 /** 4427 * For each column ... */ 4428 for( y=0; y<PlaneOut->u_height; y++ ) 4429 { 4430 /** 4431 * ... and each row of the alpha mask */ 4432 for( x=0; x<PlaneOut->u_width; x++ ) 4433 { 4434 /** 4435 * If the value of the current pixel of the alpha mask is > to the current time 4436 * ( current time is normalized on [0-255] ) */ 4437 if( alphaProgressLevel < alphaMask[x+y*PlaneOut->u_width] ) 4438 { 4439 /* We keep "old image" in output plane */ 4440 *( p_out0+x+y*PlaneOut[0].u_stride)=*(p_in1_Y+x+y*PlaneIn1[0].u_stride); 4441 *( p_out1+(x>>1)+(y>>1)*PlaneOut[1].u_stride)= 4442 *(p_in1_U+(x>>1)+(y>>1)*PlaneIn1[1].u_stride); 4443 *( p_out2+(x>>1)+(y>>1)*PlaneOut[2].u_stride)= 4444 *(p_in1_V+(x>>1)+(y>>1)*PlaneIn1[2].u_stride); 4445 } 4446 else 4447 { 4448 /* We take "new image" in output plane */ 4449 *( p_out0+x+y*PlaneOut[0].u_stride)=*(p_in2_Y+x+y*PlaneIn2[0].u_stride); 4450 *( p_out1+(x>>1)+(y>>1)*PlaneOut[1].u_stride)= 4451 *(p_in2_U+(x>>1)+(y>>1)*PlaneIn2[1].u_stride); 4452 *( p_out2+(x>>1)+(y>>1)*PlaneOut[2].u_stride)= 4453 *(p_in2_V+(x>>1)+(y>>1)*PlaneIn2[2].u_stride); 4454 } 4455 } 4456 } 4457 4458 return(err); 4459} 4460 4461/** 4462 ****************************************************************************** 4463 * prototype M4xVSS_AlphaMagicBlending( M4OSA_Void *userData, 4464 * M4VIFI_ImagePlane PlaneIn1[3], 4465 * M4VIFI_ImagePlane PlaneIn2[3], 4466 * M4VIFI_ImagePlane *PlaneOut, 4467 * M4VSS3GPP_ExternalProgress *pProgress, 4468 * M4OSA_UInt32 uiTransitionKind) 4469 * 4470 * @brief This function apply a color effect on an input YUV420 planar frame 4471 * @note 4472 * @param userData (IN) Contains a pointer on a settings structure 4473 * @param PlaneIn1 (IN) Input YUV420 planar from video 1 4474 * @param PlaneIn2 (IN) Input YUV420 planar from video 2 4475 * @param PlaneOut (IN/OUT) Output YUV420 planar 4476 * @param pProgress (IN/OUT) Progress indication (0-100) 4477 * @param uiTransitionKind(IN) Unused 4478 * 4479 * @return M4VIFI_OK: No error 4480 ****************************************************************************** 4481 */ 4482M4OSA_ERR M4xVSS_AlphaMagicBlending( M4OSA_Void *userData, M4VIFI_ImagePlane PlaneIn1[3], 4483 M4VIFI_ImagePlane PlaneIn2[3], M4VIFI_ImagePlane *PlaneOut, 4484 M4VSS3GPP_ExternalProgress *pProgress, 4485 M4OSA_UInt32 uiTransitionKind) 4486{ 4487 M4OSA_ERR err; 4488 4489 M4xVSS_internal_AlphaMagicSettings* alphaContext; 4490 M4VIFI_Int32 alphaProgressLevel; 4491 M4VIFI_Int32 alphaBlendLevelMin; 4492 M4VIFI_Int32 alphaBlendLevelMax; 4493 M4VIFI_Int32 alphaBlendRange; 4494 4495 M4VIFI_ImagePlane* planeswap; 4496 M4VIFI_UInt32 x,y; 4497 M4VIFI_Int32 alphaMaskValue; 4498 4499 M4VIFI_UInt8 *p_out0; 4500 M4VIFI_UInt8 *p_out1; 4501 M4VIFI_UInt8 *p_out2; 4502 M4VIFI_UInt8 *alphaMask; 4503 /* "Old image" */ 4504 M4VIFI_UInt8 *p_in1_Y; 4505 M4VIFI_UInt8 *p_in1_U; 4506 M4VIFI_UInt8 *p_in1_V; 4507 /* "New image" */ 4508 M4VIFI_UInt8 *p_in2_Y; 4509 M4VIFI_UInt8 *p_in2_U; 4510 M4VIFI_UInt8 *p_in2_V; 4511 4512 4513 err = M4NO_ERROR; 4514 4515 alphaContext = (M4xVSS_internal_AlphaMagicSettings*)userData; 4516 4517 alphaProgressLevel = (pProgress->uiProgress * 255)/1000; 4518 4519 if( alphaContext->isreverse != M4OSA_FALSE) 4520 { 4521 alphaProgressLevel = 255 - alphaProgressLevel; 4522 planeswap = PlaneIn1; 4523 PlaneIn1 = PlaneIn2; 4524 PlaneIn2 = planeswap; 4525 } 4526 4527 alphaBlendLevelMin = alphaProgressLevel-alphaContext->blendingthreshold; 4528 4529 alphaBlendLevelMax = alphaProgressLevel+alphaContext->blendingthreshold; 4530 4531 alphaBlendRange = (alphaContext->blendingthreshold)*2; 4532 4533 p_out0 = PlaneOut[0].pac_data; 4534 p_out1 = PlaneOut[1].pac_data; 4535 p_out2 = PlaneOut[2].pac_data; 4536 4537 alphaMask = alphaContext->pPlane->pac_data; 4538 4539 /* "Old image" */ 4540 p_in1_Y = PlaneIn1[0].pac_data; 4541 p_in1_U = PlaneIn1[1].pac_data; 4542 p_in1_V = PlaneIn1[2].pac_data; 4543 /* "New image" */ 4544 p_in2_Y = PlaneIn2[0].pac_data; 4545 p_in2_U = PlaneIn2[1].pac_data; 4546 p_in2_V = PlaneIn2[2].pac_data; 4547 4548 /* apply Alpha Magic on each pixel */ 4549 for( y=0; y<PlaneOut->u_height; y++ ) 4550 { 4551 for( x=0; x<PlaneOut->u_width; x++ ) 4552 { 4553 alphaMaskValue = alphaMask[x+y*PlaneOut->u_width]; 4554 if( alphaBlendLevelMax < alphaMaskValue ) 4555 { 4556 /* We keep "old image" in output plane */ 4557 *( p_out0+x+y*PlaneOut[0].u_stride)=*(p_in1_Y+x+y*PlaneIn1[0].u_stride); 4558 *( p_out1+(x>>1)+(y>>1)*PlaneOut[1].u_stride)= 4559 *(p_in1_U+(x>>1)+(y>>1)*PlaneIn1[1].u_stride); 4560 *( p_out2+(x>>1)+(y>>1)*PlaneOut[2].u_stride)= 4561 *(p_in1_V+(x>>1)+(y>>1)*PlaneIn1[2].u_stride); 4562 } 4563 else if( (alphaBlendLevelMin < alphaMaskValue)&& 4564 (alphaMaskValue <= alphaBlendLevelMax ) ) 4565 { 4566 /* We blend "old and new image" in output plane */ 4567 *( p_out0+x+y*PlaneOut[0].u_stride)=(M4VIFI_UInt8) 4568 (( (alphaMaskValue-alphaBlendLevelMin)*( *(p_in1_Y+x+y*PlaneIn1[0].u_stride)) 4569 +(alphaBlendLevelMax-alphaMaskValue)\ 4570 *( *(p_in2_Y+x+y*PlaneIn2[0].u_stride)) )/alphaBlendRange ); 4571 4572 *( p_out1+(x>>1)+(y>>1)*PlaneOut[1].u_stride)=(M4VIFI_UInt8)\ 4573 (( (alphaMaskValue-alphaBlendLevelMin)*( *(p_in1_U+(x>>1)+(y>>1)\ 4574 *PlaneIn1[1].u_stride)) 4575 +(alphaBlendLevelMax-alphaMaskValue)*( *(p_in2_U+(x>>1)+(y>>1)\ 4576 *PlaneIn2[1].u_stride)) )/alphaBlendRange ); 4577 4578 *( p_out2+(x>>1)+(y>>1)*PlaneOut[2].u_stride)= 4579 (M4VIFI_UInt8)(( (alphaMaskValue-alphaBlendLevelMin)\ 4580 *( *(p_in1_V+(x>>1)+(y>>1)*PlaneIn1[2].u_stride)) 4581 +(alphaBlendLevelMax-alphaMaskValue)*( *(p_in2_V+(x>>1)+(y>>1)\ 4582 *PlaneIn2[2].u_stride)) )/alphaBlendRange ); 4583 4584 } 4585 else 4586 { 4587 /* We take "new image" in output plane */ 4588 *( p_out0+x+y*PlaneOut[0].u_stride)=*(p_in2_Y+x+y*PlaneIn2[0].u_stride); 4589 *( p_out1+(x>>1)+(y>>1)*PlaneOut[1].u_stride)= 4590 *(p_in2_U+(x>>1)+(y>>1)*PlaneIn2[1].u_stride); 4591 *( p_out2+(x>>1)+(y>>1)*PlaneOut[2].u_stride)= 4592 *(p_in2_V+(x>>1)+(y>>1)*PlaneIn2[2].u_stride); 4593 } 4594 } 4595 } 4596 4597 return(err); 4598} 4599 4600#define M4XXX_SampleAddress(plane, x, y) ( (plane).pac_data + (plane).u_topleft + (y)\ 4601 * (plane).u_stride + (x) ) 4602 4603static void M4XXX_CopyPlane(M4VIFI_ImagePlane* dest, M4VIFI_ImagePlane* source) 4604{ 4605 M4OSA_UInt32 height, width, sourceStride, destStride, y; 4606 M4OSA_MemAddr8 sourceWalk, destWalk; 4607 4608 /* cache the vars used in the loop so as to avoid them being repeatedly fetched and 4609 recomputed from memory. */ 4610 height = dest->u_height; 4611 width = dest->u_width; 4612 4613 sourceWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(*source, 0, 0); 4614 sourceStride = source->u_stride; 4615 4616 destWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(*dest, 0, 0); 4617 destStride = dest->u_stride; 4618 4619 for (y=0; y<height; y++) 4620 { 4621 M4OSA_memcpy((M4OSA_MemAddr8)destWalk, (M4OSA_MemAddr8)sourceWalk, width); 4622 destWalk += destStride; 4623 sourceWalk += sourceStride; 4624 } 4625} 4626 4627static M4OSA_ERR M4xVSS_VerticalSlideTransition(M4VIFI_ImagePlane* topPlane, 4628 M4VIFI_ImagePlane* bottomPlane, 4629 M4VIFI_ImagePlane *PlaneOut, 4630 M4OSA_UInt32 shiftUV) 4631{ 4632 M4OSA_UInt32 i; 4633 4634 /* Do three loops, one for each plane type, in order to avoid having too many buffers 4635 "hot" at the same time (better for cache). */ 4636 for (i=0; i<3; i++) 4637 { 4638 M4OSA_UInt32 topPartHeight, bottomPartHeight, width, sourceStride, destStride, y; 4639 M4OSA_MemAddr8 sourceWalk, destWalk; 4640 4641 /* cache the vars used in the loop so as to avoid them being repeatedly fetched and 4642 recomputed from memory. */ 4643 if (0 == i) /* Y plane */ 4644 { 4645 bottomPartHeight = 2*shiftUV; 4646 } 4647 else /* U and V planes */ 4648 { 4649 bottomPartHeight = shiftUV; 4650 } 4651 topPartHeight = PlaneOut[i].u_height - bottomPartHeight; 4652 width = PlaneOut[i].u_width; 4653 4654 sourceWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(topPlane[i], 0, bottomPartHeight); 4655 sourceStride = topPlane[i].u_stride; 4656 4657 destWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(PlaneOut[i], 0, 0); 4658 destStride = PlaneOut[i].u_stride; 4659 4660 /* First the part from the top source clip frame. */ 4661 for (y=0; y<topPartHeight; y++) 4662 { 4663 M4OSA_memcpy((M4OSA_MemAddr8)destWalk, (M4OSA_MemAddr8)sourceWalk, width); 4664 destWalk += destStride; 4665 sourceWalk += sourceStride; 4666 } 4667 4668 /* and now change the vars to copy the part from the bottom source clip frame. */ 4669 sourceWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(bottomPlane[i], 0, 0); 4670 sourceStride = bottomPlane[i].u_stride; 4671 4672 /* destWalk is already at M4XXX_SampleAddress(PlaneOut[i], 0, topPartHeight) */ 4673 4674 for (y=0; y<bottomPartHeight; y++) 4675 { 4676 M4OSA_memcpy((M4OSA_MemAddr8)destWalk, (M4OSA_MemAddr8)sourceWalk, width); 4677 destWalk += destStride; 4678 sourceWalk += sourceStride; 4679 } 4680 } 4681 return M4NO_ERROR; 4682} 4683 4684static M4OSA_ERR M4xVSS_HorizontalSlideTransition(M4VIFI_ImagePlane* leftPlane, 4685 M4VIFI_ImagePlane* rightPlane, 4686 M4VIFI_ImagePlane *PlaneOut, 4687 M4OSA_UInt32 shiftUV) 4688{ 4689 M4OSA_UInt32 i, y; 4690 /* If we shifted by exactly 0, or by the width of the target image, then we would get the left 4691 frame or the right frame, respectively. These cases aren't handled too well by the general 4692 handling, since they result in 0-size memcopies, so might as well particularize them. */ 4693 4694 if (0 == shiftUV) /* output left frame */ 4695 { 4696 for (i = 0; i<3; i++) /* for each YUV plane */ 4697 { 4698 M4XXX_CopyPlane(&(PlaneOut[i]), &(leftPlane[i])); 4699 } 4700 4701 return M4NO_ERROR; 4702 } 4703 4704 if (PlaneOut[1].u_width == shiftUV) /* output right frame */ 4705 { 4706 for (i = 0; i<3; i++) /* for each YUV plane */ 4707 { 4708 M4XXX_CopyPlane(&(PlaneOut[i]), &(rightPlane[i])); 4709 } 4710 4711 return M4NO_ERROR; 4712 } 4713 4714 4715 /* Do three loops, one for each plane type, in order to avoid having too many buffers 4716 "hot" at the same time (better for cache). */ 4717 for (i=0; i<3; i++) 4718 { 4719 M4OSA_UInt32 height, leftPartWidth, rightPartWidth; 4720 M4OSA_UInt32 leftStride, rightStride, destStride; 4721 M4OSA_MemAddr8 leftWalk, rightWalk, destWalkLeft, destWalkRight; 4722 4723 /* cache the vars used in the loop so as to avoid them being repeatedly fetched 4724 and recomputed from memory. */ 4725 height = PlaneOut[i].u_height; 4726 4727 if (0 == i) /* Y plane */ 4728 { 4729 rightPartWidth = 2*shiftUV; 4730 } 4731 else /* U and V planes */ 4732 { 4733 rightPartWidth = shiftUV; 4734 } 4735 leftPartWidth = PlaneOut[i].u_width - rightPartWidth; 4736 4737 leftWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(leftPlane[i], rightPartWidth, 0); 4738 leftStride = leftPlane[i].u_stride; 4739 4740 rightWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(rightPlane[i], 0, 0); 4741 rightStride = rightPlane[i].u_stride; 4742 4743 destWalkLeft = (M4OSA_MemAddr8)M4XXX_SampleAddress(PlaneOut[i], 0, 0); 4744 destWalkRight = (M4OSA_MemAddr8)M4XXX_SampleAddress(PlaneOut[i], leftPartWidth, 0); 4745 destStride = PlaneOut[i].u_stride; 4746 4747 for (y=0; y<height; y++) 4748 { 4749 M4OSA_memcpy((M4OSA_MemAddr8)destWalkLeft, (M4OSA_MemAddr8)leftWalk, leftPartWidth); 4750 leftWalk += leftStride; 4751 4752 M4OSA_memcpy((M4OSA_MemAddr8)destWalkRight, (M4OSA_MemAddr8)rightWalk, rightPartWidth); 4753 rightWalk += rightStride; 4754 4755 destWalkLeft += destStride; 4756 destWalkRight += destStride; 4757 } 4758 } 4759 4760 return M4NO_ERROR; 4761} 4762 4763 4764M4OSA_ERR M4xVSS_SlideTransition( M4OSA_Void *userData, M4VIFI_ImagePlane PlaneIn1[3], 4765 M4VIFI_ImagePlane PlaneIn2[3], M4VIFI_ImagePlane *PlaneOut, 4766 M4VSS3GPP_ExternalProgress *pProgress, 4767 M4OSA_UInt32 uiTransitionKind) 4768{ 4769 M4xVSS_internal_SlideTransitionSettings* settings = 4770 (M4xVSS_internal_SlideTransitionSettings*)userData; 4771 M4OSA_UInt32 shiftUV; 4772 4773 M4OSA_TRACE1_0("inside M4xVSS_SlideTransition"); 4774 if ((M4xVSS_SlideTransition_RightOutLeftIn == settings->direction) 4775 || (M4xVSS_SlideTransition_LeftOutRightIn == settings->direction) ) 4776 { 4777 /* horizontal slide */ 4778 shiftUV = ((PlaneOut[1]).u_width * pProgress->uiProgress)/1000; 4779 M4OSA_TRACE1_2("M4xVSS_SlideTransition upper: shiftUV = %d,progress = %d", 4780 shiftUV,pProgress->uiProgress ); 4781 if (M4xVSS_SlideTransition_RightOutLeftIn == settings->direction) 4782 { 4783 /* Put the previous clip frame right, the next clip frame left, and reverse shiftUV 4784 (since it's a shift from the left frame) so that we start out on the right 4785 (i.e. not left) frame, it 4786 being from the previous clip. */ 4787 return M4xVSS_HorizontalSlideTransition(PlaneIn2, PlaneIn1, PlaneOut, 4788 (PlaneOut[1]).u_width - shiftUV); 4789 } 4790 else /* Left out, right in*/ 4791 { 4792 return M4xVSS_HorizontalSlideTransition(PlaneIn1, PlaneIn2, PlaneOut, shiftUV); 4793 } 4794 } 4795 else 4796 { 4797 /* vertical slide */ 4798 shiftUV = ((PlaneOut[1]).u_height * pProgress->uiProgress)/1000; 4799 M4OSA_TRACE1_2("M4xVSS_SlideTransition bottom: shiftUV = %d,progress = %d",shiftUV, 4800 pProgress->uiProgress ); 4801 if (M4xVSS_SlideTransition_TopOutBottomIn == settings->direction) 4802 { 4803 /* Put the previous clip frame top, the next clip frame bottom. */ 4804 return M4xVSS_VerticalSlideTransition(PlaneIn1, PlaneIn2, PlaneOut, shiftUV); 4805 } 4806 else /* Bottom out, top in */ 4807 { 4808 return M4xVSS_VerticalSlideTransition(PlaneIn2, PlaneIn1, PlaneOut, 4809 (PlaneOut[1]).u_height - shiftUV); 4810 } 4811 } 4812 4813 /* Note: it might be worthwhile to do some parameter checking, see if dimensions match, etc., 4814 at least in debug mode. */ 4815} 4816 4817 4818/** 4819 ****************************************************************************** 4820 * prototype M4xVSS_FadeBlackTransition(M4OSA_Void *pFunctionContext, 4821 * M4VIFI_ImagePlane *PlaneIn, 4822 * M4VIFI_ImagePlane *PlaneOut, 4823 * M4VSS3GPP_ExternalProgress *pProgress, 4824 * M4OSA_UInt32 uiEffectKind) 4825 * 4826 * @brief This function apply a fade to black and then a fade from black 4827 * @note 4828 * @param pFunctionContext(IN) Contains which color to apply (not very clean ...) 4829 * @param PlaneIn (IN) Input YUV420 planar 4830 * @param PlaneOut (IN/OUT) Output YUV420 planar 4831 * @param pProgress (IN/OUT) Progress indication (0-100) 4832 * @param uiEffectKind (IN) Unused 4833 * 4834 * @return M4VIFI_OK: No error 4835 ****************************************************************************** 4836 */ 4837M4OSA_ERR M4xVSS_FadeBlackTransition(M4OSA_Void *userData, M4VIFI_ImagePlane PlaneIn1[3], 4838 M4VIFI_ImagePlane PlaneIn2[3], 4839 M4VIFI_ImagePlane *PlaneOut, 4840 M4VSS3GPP_ExternalProgress *pProgress, 4841 M4OSA_UInt32 uiTransitionKind) 4842{ 4843 M4OSA_Int32 tmp = 0; 4844 M4OSA_ERR err = M4NO_ERROR; 4845 4846 4847 if((pProgress->uiProgress) < 500) 4848 { 4849 /** 4850 * Compute where we are in the effect (scale is 0->1024) */ 4851 tmp = (M4OSA_Int32)((1.0 - ((M4OSA_Float)(pProgress->uiProgress*2)/1000)) * 1024 ); 4852 4853 /** 4854 * Apply the darkening effect */ 4855 err = M4VFL_modifyLumaWithScale( (M4ViComImagePlane*)PlaneIn1, 4856 (M4ViComImagePlane*)PlaneOut, tmp, M4OSA_NULL); 4857 if (M4NO_ERROR != err) 4858 { 4859 M4OSA_TRACE1_1("M4xVSS_FadeBlackTransition: M4VFL_modifyLumaWithScale returns\ 4860 error 0x%x, returning M4VSS3GPP_ERR_LUMA_FILTER_ERROR", err); 4861 return M4VSS3GPP_ERR_LUMA_FILTER_ERROR; 4862 } 4863 } 4864 else 4865 { 4866 /** 4867 * Compute where we are in the effect (scale is 0->1024). */ 4868 tmp = (M4OSA_Int32)( (((M4OSA_Float)(((pProgress->uiProgress-500)*2))/1000)) * 1024 ); 4869 4870 /** 4871 * Apply the darkening effect */ 4872 err = M4VFL_modifyLumaWithScale((M4ViComImagePlane*)PlaneIn2, 4873 (M4ViComImagePlane*)PlaneOut, tmp, M4OSA_NULL); 4874 if (M4NO_ERROR != err) 4875 { 4876 M4OSA_TRACE1_1("M4xVSS_FadeBlackTransition:\ 4877 M4VFL_modifyLumaWithScale returns error 0x%x,\ 4878 returning M4VSS3GPP_ERR_LUMA_FILTER_ERROR", err); 4879 return M4VSS3GPP_ERR_LUMA_FILTER_ERROR; 4880 } 4881 } 4882 4883 4884 return M4VIFI_OK; 4885} 4886 4887 4888/** 4889 ****************************************************************************** 4890 * prototype M4OSA_ERR M4xVSS_internalConvertToUTF8(M4OSA_Context pContext, 4891 * M4OSA_Void* pBufferIn, 4892 * M4OSA_Void* pBufferOut, 4893 * M4OSA_UInt32* convertedSize) 4894 * 4895 * @brief This function convert from the customer format to UTF8 4896 * @note 4897 * @param pContext (IN) The integrator own context 4898 * @param pBufferIn (IN) Buffer to convert 4899 * @param pBufferOut (OUT) Converted buffer 4900 * @param convertedSize (OUT) Size of the converted buffer 4901 * 4902 * @return M4NO_ERROR: No error 4903 * @return M4ERR_PARAMETER: At least one of the function parameters is null 4904 ****************************************************************************** 4905 */ 4906M4OSA_ERR M4xVSS_internalConvertToUTF8(M4OSA_Context pContext, M4OSA_Void* pBufferIn, 4907 M4OSA_Void* pBufferOut, M4OSA_UInt32* convertedSize) 4908{ 4909 M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; 4910 M4OSA_ERR err; 4911 4912 pBufferOut = pBufferIn; 4913 if(xVSS_context->UTFConversionContext.pConvToUTF8Fct != M4OSA_NULL 4914 && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL) 4915 { 4916 M4OSA_UInt32 ConvertedSize = xVSS_context->UTFConversionContext.m_TempOutConversionSize; 4917 4918 M4OSA_memset((M4OSA_MemAddr8)xVSS_context->UTFConversionContext.pTempOutConversionBuffer 4919 ,(M4OSA_UInt32)xVSS_context->UTFConversionContext.m_TempOutConversionSize,0); 4920 4921 err = xVSS_context->UTFConversionContext.pConvToUTF8Fct((M4OSA_Void*)pBufferIn, 4922 (M4OSA_UInt8*)xVSS_context->UTFConversionContext.pTempOutConversionBuffer, 4923 (M4OSA_UInt32*)&ConvertedSize); 4924 if(err == M4xVSSWAR_BUFFER_OUT_TOO_SMALL) 4925 { 4926 M4OSA_TRACE2_1("M4xVSS_internalConvertToUTF8: pConvToUTF8Fct return 0x%x",err); 4927 4928 /*free too small buffer*/ 4929 M4OSA_free((M4OSA_MemAddr32)xVSS_context->\ 4930 UTFConversionContext.pTempOutConversionBuffer); 4931 4932 /*re-allocate the buffer*/ 4933 xVSS_context->UTFConversionContext.pTempOutConversionBuffer = 4934 (M4OSA_Void*)M4OSA_malloc(ConvertedSize*sizeof(M4OSA_UInt8), M4VA, 4935 (M4OSA_Char *)"M4xVSS_internalConvertToUTF8: UTF conversion buffer"); 4936 if(M4OSA_NULL == xVSS_context->UTFConversionContext.pTempOutConversionBuffer) 4937 { 4938 M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertToUTF8"); 4939 return M4ERR_ALLOC; 4940 } 4941 xVSS_context->UTFConversionContext.m_TempOutConversionSize = ConvertedSize; 4942 4943 M4OSA_memset((M4OSA_MemAddr8)xVSS_context->\ 4944 UTFConversionContext.pTempOutConversionBuffer,(M4OSA_UInt32)xVSS_context->\ 4945 UTFConversionContext.m_TempOutConversionSize,0); 4946 4947 err = xVSS_context->UTFConversionContext.pConvToUTF8Fct((M4OSA_Void*)pBufferIn, 4948 (M4OSA_Void*)xVSS_context->UTFConversionContext.pTempOutConversionBuffer, 4949 (M4OSA_UInt32*)&ConvertedSize); 4950 if(err != M4NO_ERROR) 4951 { 4952 M4OSA_TRACE1_1("M4xVSS_internalConvertToUTF8: pConvToUTF8Fct return 0x%x",err); 4953 return err; 4954 } 4955 } 4956 else if(err != M4NO_ERROR) 4957 { 4958 M4OSA_TRACE1_1("M4xVSS_internalConvertToUTF8: pConvToUTF8Fct return 0x%x",err); 4959 return err; 4960 } 4961 /*decoded path*/ 4962 pBufferOut = xVSS_context->UTFConversionContext.pTempOutConversionBuffer; 4963 (*convertedSize) = ConvertedSize; 4964 } 4965 return M4NO_ERROR; 4966} 4967 4968 4969/** 4970 ****************************************************************************** 4971 * prototype M4OSA_ERR M4xVSS_internalConvertFromUTF8(M4OSA_Context pContext) 4972 * 4973 * @brief This function convert from UTF8 to the customer format 4974 * @note 4975 * @param pContext (IN) The integrator own context 4976 * @param pBufferIn (IN) Buffer to convert 4977 * @param pBufferOut (OUT) Converted buffer 4978 * @param convertedSize (OUT) Size of the converted buffer 4979 * 4980 * @return M4NO_ERROR: No error 4981 * @return M4ERR_PARAMETER: At least one of the function parameters is null 4982 ****************************************************************************** 4983 */ 4984M4OSA_ERR M4xVSS_internalConvertFromUTF8(M4OSA_Context pContext, M4OSA_Void* pBufferIn, 4985 M4OSA_Void* pBufferOut, M4OSA_UInt32* convertedSize) 4986{ 4987 M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; 4988 M4OSA_ERR err; 4989 4990 pBufferOut = pBufferIn; 4991 if(xVSS_context->UTFConversionContext.pConvFromUTF8Fct != M4OSA_NULL 4992 && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL) 4993 { 4994 M4OSA_UInt32 ConvertedSize = xVSS_context->UTFConversionContext.m_TempOutConversionSize; 4995 4996 M4OSA_memset((M4OSA_MemAddr8)xVSS_context->\ 4997 UTFConversionContext.pTempOutConversionBuffer,(M4OSA_UInt32)xVSS_context->\ 4998 UTFConversionContext.m_TempOutConversionSize,0); 4999 5000 err = xVSS_context->UTFConversionContext.pConvFromUTF8Fct\ 5001 ((M4OSA_Void*)pBufferIn,(M4OSA_UInt8*)xVSS_context->\ 5002 UTFConversionContext.pTempOutConversionBuffer, (M4OSA_UInt32*)&ConvertedSize); 5003 if(err == M4xVSSWAR_BUFFER_OUT_TOO_SMALL) 5004 { 5005 M4OSA_TRACE2_1("M4xVSS_internalConvertFromUTF8: pConvFromUTF8Fct return 0x%x",err); 5006 5007 /*free too small buffer*/ 5008 M4OSA_free((M4OSA_MemAddr32)xVSS_context->\ 5009 UTFConversionContext.pTempOutConversionBuffer); 5010 5011 /*re-allocate the buffer*/ 5012 xVSS_context->UTFConversionContext.pTempOutConversionBuffer = 5013 (M4OSA_Void*)M4OSA_malloc(ConvertedSize*sizeof(M4OSA_UInt8), M4VA, 5014 (M4OSA_Char *)"M4xVSS_internalConvertFromUTF8: UTF conversion buffer"); 5015 if(M4OSA_NULL == xVSS_context->UTFConversionContext.pTempOutConversionBuffer) 5016 { 5017 M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertFromUTF8"); 5018 return M4ERR_ALLOC; 5019 } 5020 xVSS_context->UTFConversionContext.m_TempOutConversionSize = ConvertedSize; 5021 5022 M4OSA_memset((M4OSA_MemAddr8)xVSS_context->\ 5023 UTFConversionContext.pTempOutConversionBuffer,(M4OSA_UInt32)xVSS_context->\ 5024 UTFConversionContext.m_TempOutConversionSize,0); 5025 5026 err = xVSS_context->UTFConversionContext.pConvFromUTF8Fct((M4OSA_Void*)pBufferIn, 5027 (M4OSA_Void*)xVSS_context->UTFConversionContext.pTempOutConversionBuffer, 5028 (M4OSA_UInt32*)&ConvertedSize); 5029 if(err != M4NO_ERROR) 5030 { 5031 M4OSA_TRACE1_1("M4xVSS_internalConvertFromUTF8: pConvFromUTF8Fct return 0x%x",err); 5032 return err; 5033 } 5034 } 5035 else if(err != M4NO_ERROR) 5036 { 5037 M4OSA_TRACE1_1("M4xVSS_internalConvertFromUTF8: pConvFromUTF8Fct return 0x%x",err); 5038 return err; 5039 } 5040 /*decoded path*/ 5041 pBufferOut = xVSS_context->UTFConversionContext.pTempOutConversionBuffer; 5042 (*convertedSize) = ConvertedSize; 5043 } 5044 5045 5046 return M4NO_ERROR; 5047} 5048