M4xVSS_internal.c revision 0a389ab70db304fb840e33f33781ecc0503eae3c
1/* 2 * Copyright (C) 2004-2011 NXP Software 3 * Copyright (C) 2011 The Android Open Source Project 4 * 5 * Licensed under the Apache License, Version 2.0 (the "License"); 6 * you may not use this file except in compliance with the License. 7 * You may obtain a copy of the License at 8 * 9 * http://www.apache.org/licenses/LICENSE-2.0 10 * 11 * Unless required by applicable law or agreed to in writing, software 12 * distributed under the License is distributed on an "AS IS" BASIS, 13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 * See the License for the specific language governing permissions and 15 * limitations under the License. 16 */ 17/** 18 ****************************************************************************** 19 * @file M4xVSS_internal.c 20 * @brief Internal functions of extended Video Studio Service (Video Studio 2.1) 21 * @note 22 ****************************************************************************** 23 */ 24#include "M4OSA_Debug.h" 25#include "M4OSA_CharStar.h" 26 27#include "NXPSW_CompilerSwitches.h" 28 29#include "M4VSS3GPP_API.h" 30#include "M4VSS3GPP_ErrorCodes.h" 31 32#include "M4xVSS_API.h" 33#include "M4xVSS_Internal.h" 34 35/*for rgb16 color effect*/ 36#include "M4VIFI_Defines.h" 37#include "M4VIFI_Clip.h" 38 39/** 40 * component includes */ 41#include "M4VFL_transition.h" /**< video effects */ 42 43/* Internal header file of VSS is included because of MMS use case */ 44#include "M4VSS3GPP_InternalTypes.h" 45 46/*Exif header files to add image rendering support (cropping, black borders)*/ 47#include "M4EXIFC_CommonAPI.h" 48// StageFright encoders require %16 resolution 49#include "M4ENCODER_common.h" 50 51#define TRANSPARENT_COLOR 0x7E0 52 53/* Prototype of M4VIFI_xVSS_RGB565toYUV420 function (avoid green effect of transparency color) */ 54M4VIFI_UInt8 M4VIFI_xVSS_RGB565toYUV420(void *pUserData, M4VIFI_ImagePlane *pPlaneIn, 55 M4VIFI_ImagePlane *pPlaneOut); 56 57 58/*special MCS function used only in VideoArtist and VideoStudio to open the media in the normal 59 mode. That way the media duration is accurate*/ 60extern M4OSA_ERR M4MCS_open_normalMode(M4MCS_Context pContext, M4OSA_Void* pFileIn, 61 M4VIDEOEDITING_FileType InputFileType, 62 M4OSA_Void* pFileOut, M4OSA_Void* pTempFile); 63 64 65/** 66 ****************************************************************************** 67 * prototype M4OSA_ERR M4xVSS_internalStartTranscoding(M4OSA_Context pContext) 68 * @brief This function initializes MCS (3GP transcoder) with the given 69 * parameters 70 * @note The transcoding parameters are given by the internal xVSS context. 71 * This context contains a pointer on the current element of the 72 * chained list of MCS parameters. 73 * 74 * @param pContext (IN) Pointer on the xVSS edit context 75 * @return M4NO_ERROR: No error 76 * @return M4ERR_PARAMETER: At least one parameter is M4OSA_NULL 77 * @return M4ERR_ALLOC: Memory allocation has failed 78 ****************************************************************************** 79 */ 80M4OSA_ERR M4xVSS_internalStartTranscoding(M4OSA_Context pContext) 81{ 82 M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; 83 M4OSA_ERR err; 84 M4MCS_Context mcs_context; 85 M4MCS_OutputParams Params; 86 M4MCS_EncodingParams Rates; 87 M4OSA_UInt32 i; 88 89 err = M4MCS_init(&mcs_context, xVSS_context->pFileReadPtr, xVSS_context->pFileWritePtr); 90 if(err != M4NO_ERROR) 91 { 92 M4OSA_TRACE1_1("Error in M4MCS_init: 0x%x", err); 93 return err; 94 } 95 96 err = M4MCS_open(mcs_context, xVSS_context->pMCScurrentParams->pFileIn, 97 xVSS_context->pMCScurrentParams->InputFileType, 98 xVSS_context->pMCScurrentParams->pFileOut, 99 xVSS_context->pMCScurrentParams->pFileTemp); 100 if (err != M4NO_ERROR) 101 { 102 M4OSA_TRACE1_1("Error in M4MCS_open: 0x%x", err); 103 M4MCS_abort(mcs_context); 104 return err; 105 } 106 107 /** 108 * Fill MCS parameters with the parameters contained in the current element of the 109 MCS parameters chained list */ 110 Params.OutputFileType = xVSS_context->pMCScurrentParams->OutputFileType; 111 Params.OutputVideoFormat = xVSS_context->pMCScurrentParams->OutputVideoFormat; 112 Params.OutputVideoFrameSize = xVSS_context->pMCScurrentParams->OutputVideoFrameSize; 113 Params.OutputVideoFrameRate = xVSS_context->pMCScurrentParams->OutputVideoFrameRate; 114 Params.OutputAudioFormat = xVSS_context->pMCScurrentParams->OutputAudioFormat; 115 Params.OutputAudioSamplingFrequency = 116 xVSS_context->pMCScurrentParams->OutputAudioSamplingFrequency; 117 Params.bAudioMono = xVSS_context->pMCScurrentParams->bAudioMono; 118 Params.pOutputPCMfile = M4OSA_NULL; 119 /*FB 2008/10/20: add media rendering parameter to keep aspect ratio*/ 120 switch(xVSS_context->pMCScurrentParams->MediaRendering) 121 { 122 case M4xVSS_kResizing: 123 Params.MediaRendering = M4MCS_kResizing; 124 break; 125 case M4xVSS_kCropping: 126 Params.MediaRendering = M4MCS_kCropping; 127 break; 128 case M4xVSS_kBlackBorders: 129 Params.MediaRendering = M4MCS_kBlackBorders; 130 break; 131 default: 132 break; 133 } 134 /**/ 135 // new params after integrating MCS 2.0 136 // Set the number of audio effects; 0 for now. 137 Params.nbEffects = 0; 138 139 // Set the audio effect; null for now. 140 Params.pEffects = NULL; 141 142 // Set the audio effect; null for now. 143 Params.bDiscardExif = M4OSA_FALSE; 144 145 // Set the audio effect; null for now. 146 Params.bAdjustOrientation = M4OSA_FALSE; 147 // new params after integrating MCS 2.0 148 149 /** 150 * Set output parameters */ 151 err = M4MCS_setOutputParams(mcs_context, &Params); 152 if (err != M4NO_ERROR) 153 { 154 M4OSA_TRACE1_1("Error in M4MCS_setOutputParams: 0x%x", err); 155 M4MCS_abort(mcs_context); 156 return err; 157 } 158 159 Rates.OutputVideoBitrate = xVSS_context->pMCScurrentParams->OutputVideoBitrate; 160 Rates.OutputAudioBitrate = xVSS_context->pMCScurrentParams->OutputAudioBitrate; 161 Rates.BeginCutTime = 0; 162 Rates.EndCutTime = 0; 163 Rates.OutputFileSize = 0; 164 165 /*FB: transcoding per parts*/ 166 Rates.BeginCutTime = xVSS_context->pMCScurrentParams->BeginCutTime; 167 Rates.EndCutTime = xVSS_context->pMCScurrentParams->EndCutTime; 168 Rates.OutputVideoTimescale = xVSS_context->pMCScurrentParams->OutputVideoTimescale; 169 170 err = M4MCS_setEncodingParams(mcs_context, &Rates); 171 if (err != M4NO_ERROR) 172 { 173 M4OSA_TRACE1_1("Error in M4MCS_setEncodingParams: 0x%x", err); 174 M4MCS_abort(mcs_context); 175 return err; 176 } 177 178 err = M4MCS_checkParamsAndStart(mcs_context); 179 if (err != M4NO_ERROR) 180 { 181 M4OSA_TRACE1_1("Error in M4MCS_checkParamsAndStart: 0x%x", err); 182 M4MCS_abort(mcs_context); 183 return err; 184 } 185 186 /** 187 * Save MCS context to be able to call MCS step function in M4xVSS_step function */ 188 xVSS_context->pMCS_Ctxt = mcs_context; 189 190 return M4NO_ERROR; 191} 192 193/** 194 ****************************************************************************** 195 * prototype M4OSA_ERR M4xVSS_internalStopTranscoding(M4OSA_Context pContext) 196 * @brief This function cleans up MCS (3GP transcoder) 197 * @note 198 * 199 * @param pContext (IN) Pointer on the xVSS edit context 200 * @return M4NO_ERROR: No error 201 * @return M4ERR_PARAMETER: At least one parameter is M4OSA_NULL 202 * @return M4ERR_ALLOC: Memory allocation has failed 203 ****************************************************************************** 204 */ 205M4OSA_ERR M4xVSS_internalStopTranscoding(M4OSA_Context pContext) 206{ 207 M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; 208 M4OSA_ERR err; 209 210 err = M4MCS_close(xVSS_context->pMCS_Ctxt); 211 if (err != M4NO_ERROR) 212 { 213 M4OSA_TRACE1_1("M4xVSS_internalStopTranscoding: Error in M4MCS_close: 0x%x", err); 214 M4MCS_abort(xVSS_context->pMCS_Ctxt); 215 return err; 216 } 217 218 /** 219 * Free this MCS instance */ 220 err = M4MCS_cleanUp(xVSS_context->pMCS_Ctxt); 221 if (err != M4NO_ERROR) 222 { 223 M4OSA_TRACE1_1("M4xVSS_internalStopTranscoding: Error in M4MCS_cleanUp: 0x%x", err); 224 return err; 225 } 226 227 xVSS_context->pMCS_Ctxt = M4OSA_NULL; 228 229 return M4NO_ERROR; 230} 231 232/** 233 ****************************************************************************** 234 * M4OSA_ERR M4xVSS_internalConvertAndResizeARGB8888toYUV420(M4OSA_Void* pFileIn, 235 * M4OSA_FileReadPointer* pFileReadPtr, 236 * M4VIFI_ImagePlane* pImagePlanes, 237 * M4OSA_UInt32 width, 238 * M4OSA_UInt32 height); 239 * @brief It Coverts and resizes a ARGB8888 image to YUV420 240 * @note 241 * @param pFileIn (IN) The Image input file 242 * @param pFileReadPtr (IN) Pointer on filesystem functions 243 * @param pImagePlanes (IN/OUT) Pointer on YUV420 output planes allocated by the user 244 * ARGB8888 image will be converted and resized to output 245 * YUV420 plane size 246 *@param width (IN) width of the ARGB8888 247 *@param height (IN) height of the ARGB8888 248 * @return M4NO_ERROR: No error 249 * @return M4ERR_ALLOC: memory error 250 * @return M4ERR_PARAMETER: At least one of the function parameters is null 251 ****************************************************************************** 252 */ 253 254M4OSA_ERR M4xVSS_internalConvertAndResizeARGB8888toYUV420(M4OSA_Void* pFileIn, 255 M4OSA_FileReadPointer* pFileReadPtr, 256 M4VIFI_ImagePlane* pImagePlanes, 257 M4OSA_UInt32 width,M4OSA_UInt32 height) 258{ 259 M4OSA_Context pARGBIn; 260 M4VIFI_ImagePlane rgbPlane1 ,rgbPlane2; 261 M4OSA_UInt32 frameSize_argb=(width * height * 4); 262 M4OSA_UInt32 frameSize = (width * height * 3); //Size of RGB888 data. 263 M4OSA_UInt32 i = 0,j= 0; 264 M4OSA_ERR err=M4NO_ERROR; 265 266 267 M4OSA_UInt8 *pTmpData = (M4OSA_UInt8*) M4OSA_32bitAlignedMalloc(frameSize_argb, 268 M4VS, (M4OSA_Char*)"Image argb data"); 269 M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 Entering :"); 270 if(pTmpData == M4OSA_NULL) { 271 M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 :\ 272 Failed to allocate memory for Image clip"); 273 return M4ERR_ALLOC; 274 } 275 276 M4OSA_TRACE1_2("M4xVSS_internalConvertAndResizeARGB8888toYUV420 :width and height %d %d", 277 width ,height); 278 /* Get file size (mandatory for chunk decoding) */ 279 err = pFileReadPtr->openRead(&pARGBIn, pFileIn, M4OSA_kFileRead); 280 if(err != M4NO_ERROR) 281 { 282 M4OSA_TRACE1_2("M4xVSS_internalConvertAndResizeARGB8888toYUV420 :\ 283 Can't open input ARGB8888 file %s, error: 0x%x\n",pFileIn, err); 284 free(pTmpData); 285 pTmpData = M4OSA_NULL; 286 goto cleanup; 287 } 288 289 err = pFileReadPtr->readData(pARGBIn,(M4OSA_MemAddr8)pTmpData, &frameSize_argb); 290 if(err != M4NO_ERROR) 291 { 292 M4OSA_TRACE1_2("M4xVSS_internalConvertAndResizeARGB8888toYUV420 Can't close ARGB8888\ 293 file %s, error: 0x%x\n",pFileIn, err); 294 pFileReadPtr->closeRead(pARGBIn); 295 free(pTmpData); 296 pTmpData = M4OSA_NULL; 297 goto cleanup; 298 } 299 300 err = pFileReadPtr->closeRead(pARGBIn); 301 if(err != M4NO_ERROR) 302 { 303 M4OSA_TRACE1_2("M4xVSS_internalConvertAndResizeARGB8888toYUV420 Can't close ARGB8888 \ 304 file %s, error: 0x%x\n",pFileIn, err); 305 free(pTmpData); 306 pTmpData = M4OSA_NULL; 307 goto cleanup; 308 } 309 310 rgbPlane1.pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(frameSize, M4VS, 311 (M4OSA_Char*)"Image clip RGB888 data"); 312 if(rgbPlane1.pac_data == M4OSA_NULL) 313 { 314 M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 \ 315 Failed to allocate memory for Image clip"); 316 free(pTmpData); 317 return M4ERR_ALLOC; 318 } 319 320 rgbPlane1.u_height = height; 321 rgbPlane1.u_width = width; 322 rgbPlane1.u_stride = width*3; 323 rgbPlane1.u_topleft = 0; 324 325 326 /** Remove the alpha channel */ 327 for (i=0, j = 0; i < frameSize_argb; i++) { 328 if ((i % 4) == 0) continue; 329 rgbPlane1.pac_data[j] = pTmpData[i]; 330 j++; 331 } 332 free(pTmpData); 333 334 /* To Check if resizing is required with color conversion */ 335 if(width != pImagePlanes->u_width || height != pImagePlanes->u_height) 336 { 337 M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 Resizing :"); 338 frameSize = ( pImagePlanes->u_width * pImagePlanes->u_height * 3); 339 rgbPlane2.pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(frameSize, M4VS, 340 (M4OSA_Char*)"Image clip RGB888 data"); 341 if(rgbPlane2.pac_data == M4OSA_NULL) 342 { 343 M4OSA_TRACE1_0("Failed to allocate memory for Image clip"); 344 free(pTmpData); 345 return M4ERR_ALLOC; 346 } 347 rgbPlane2.u_height = pImagePlanes->u_height; 348 rgbPlane2.u_width = pImagePlanes->u_width; 349 rgbPlane2.u_stride = pImagePlanes->u_width*3; 350 rgbPlane2.u_topleft = 0; 351 352 /* Resizing RGB888 to RGB888 */ 353 err = M4VIFI_ResizeBilinearRGB888toRGB888(M4OSA_NULL, &rgbPlane1, &rgbPlane2); 354 if(err != M4NO_ERROR) 355 { 356 M4OSA_TRACE1_1("error when converting from Resize RGB888 to RGB888: 0x%x\n", err); 357 free(rgbPlane2.pac_data); 358 free(rgbPlane1.pac_data); 359 return err; 360 } 361 /*Converting Resized RGB888 to YUV420 */ 362 err = M4VIFI_RGB888toYUV420(M4OSA_NULL, &rgbPlane2, pImagePlanes); 363 if(err != M4NO_ERROR) 364 { 365 M4OSA_TRACE1_1("error when converting from RGB888 to YUV: 0x%x\n", err); 366 free(rgbPlane2.pac_data); 367 free(rgbPlane1.pac_data); 368 return err; 369 } 370 free(rgbPlane2.pac_data); 371 free(rgbPlane1.pac_data); 372 373 M4OSA_TRACE1_0("RGB to YUV done"); 374 375 376 } 377 else 378 { 379 M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 NO Resizing :"); 380 err = M4VIFI_RGB888toYUV420(M4OSA_NULL, &rgbPlane1, pImagePlanes); 381 if(err != M4NO_ERROR) 382 { 383 M4OSA_TRACE1_1("error when converting from RGB to YUV: 0x%x\n", err); 384 } 385 free(rgbPlane1.pac_data); 386 387 M4OSA_TRACE1_0("RGB to YUV done"); 388 } 389cleanup: 390 M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 leaving :"); 391 return err; 392} 393 394/** 395 ****************************************************************************** 396 * M4OSA_ERR M4xVSS_internalConvertARGB8888toYUV420(M4OSA_Void* pFileIn, 397 * M4OSA_FileReadPointer* pFileReadPtr, 398 * M4VIFI_ImagePlane* pImagePlanes, 399 * M4OSA_UInt32 width, 400 * M4OSA_UInt32 height); 401 * @brief It Coverts a ARGB8888 image to YUV420 402 * @note 403 * @param pFileIn (IN) The Image input file 404 * @param pFileReadPtr (IN) Pointer on filesystem functions 405 * @param pImagePlanes (IN/OUT) Pointer on YUV420 output planes allocated by the user 406 * ARGB8888 image will be converted and resized to output 407 * YUV420 plane size 408 * @param width (IN) width of the ARGB8888 409 * @param height (IN) height of the ARGB8888 410 * @return M4NO_ERROR: No error 411 * @return M4ERR_ALLOC: memory error 412 * @return M4ERR_PARAMETER: At least one of the function parameters is null 413 ****************************************************************************** 414 */ 415 416M4OSA_ERR M4xVSS_internalConvertARGB8888toYUV420(M4OSA_Void* pFileIn, 417 M4OSA_FileReadPointer* pFileReadPtr, 418 M4VIFI_ImagePlane** pImagePlanes, 419 M4OSA_UInt32 width,M4OSA_UInt32 height) 420{ 421 M4OSA_ERR err = M4NO_ERROR; 422 M4VIFI_ImagePlane *yuvPlane = M4OSA_NULL; 423 424 yuvPlane = (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(3*sizeof(M4VIFI_ImagePlane), 425 M4VS, (M4OSA_Char*)"M4xVSS_internalConvertRGBtoYUV: Output plane YUV"); 426 if(yuvPlane == M4OSA_NULL) { 427 M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 :\ 428 Failed to allocate memory for Image clip"); 429 return M4ERR_ALLOC; 430 } 431 yuvPlane[0].u_height = height; 432 yuvPlane[0].u_width = width; 433 yuvPlane[0].u_stride = width; 434 yuvPlane[0].u_topleft = 0; 435 yuvPlane[0].pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(yuvPlane[0].u_height \ 436 * yuvPlane[0].u_width * 1.5, M4VS, (M4OSA_Char*)"imageClip YUV data"); 437 438 yuvPlane[1].u_height = yuvPlane[0].u_height >>1; 439 yuvPlane[1].u_width = yuvPlane[0].u_width >> 1; 440 yuvPlane[1].u_stride = yuvPlane[1].u_width; 441 yuvPlane[1].u_topleft = 0; 442 yuvPlane[1].pac_data = (M4VIFI_UInt8*)(yuvPlane[0].pac_data + yuvPlane[0].u_height \ 443 * yuvPlane[0].u_width); 444 445 yuvPlane[2].u_height = yuvPlane[0].u_height >>1; 446 yuvPlane[2].u_width = yuvPlane[0].u_width >> 1; 447 yuvPlane[2].u_stride = yuvPlane[2].u_width; 448 yuvPlane[2].u_topleft = 0; 449 yuvPlane[2].pac_data = (M4VIFI_UInt8*)(yuvPlane[1].pac_data + yuvPlane[1].u_height \ 450 * yuvPlane[1].u_width); 451 err = M4xVSS_internalConvertAndResizeARGB8888toYUV420( pFileIn,pFileReadPtr, 452 yuvPlane, width, height); 453 if(err != M4NO_ERROR) 454 { 455 M4OSA_TRACE1_1("M4xVSS_internalConvertAndResizeARGB8888toYUV420 return error: 0x%x\n", err); 456 free(yuvPlane); 457 return err; 458 } 459 460 *pImagePlanes = yuvPlane; 461 462 M4OSA_TRACE1_0("M4xVSS_internalConvertARGB8888toYUV420 :Leaving"); 463 return err; 464 465} 466 467/** 468 ****************************************************************************** 469 * M4OSA_ERR M4xVSS_PictureCallbackFct (M4OSA_Void* pPictureCtxt, 470 * M4VIFI_ImagePlane* pImagePlanes, 471 * M4OSA_UInt32* pPictureDuration); 472 * @brief It feeds the PTO3GPP with YUV420 pictures. 473 * @note This function is given to the PTO3GPP in the M4PTO3GPP_Params structure 474 * @param pContext (IN) The integrator own context 475 * @param pImagePlanes(IN/OUT) Pointer to an array of three valid image planes 476 * @param pPictureDuration(OUT) Duration of the returned picture 477 * 478 * @return M4NO_ERROR: No error 479 * @return M4PTO3GPP_WAR_LAST_PICTURE: The returned image is the last one 480 * @return M4ERR_PARAMETER: At least one of the function parameters is null 481 ****************************************************************************** 482 */ 483M4OSA_ERR M4xVSS_PictureCallbackFct(M4OSA_Void* pPictureCtxt, M4VIFI_ImagePlane* pImagePlanes, 484 M4OSA_Double* pPictureDuration) 485{ 486 M4OSA_ERR err = M4NO_ERROR; 487 M4OSA_UInt8 last_frame_flag = 0; 488 M4xVSS_PictureCallbackCtxt* pC = (M4xVSS_PictureCallbackCtxt*) (pPictureCtxt); 489 490 /*Used for pan&zoom*/ 491 M4OSA_UInt8 tempPanzoomXa = 0; 492 M4OSA_UInt8 tempPanzoomXb = 0; 493 M4AIR_Params Params; 494 /**/ 495 496 /*Used for cropping and black borders*/ 497 M4OSA_Context pPictureContext = M4OSA_NULL; 498 M4OSA_FilePosition pictureSize = 0 ; 499 M4OSA_UInt8* pictureBuffer = M4OSA_NULL; 500 //M4EXIFC_Context pExifContext = M4OSA_NULL; 501 M4EXIFC_BasicTags pBasicTags; 502 M4VIFI_ImagePlane pImagePlanes1 = pImagePlanes[0]; 503 M4VIFI_ImagePlane pImagePlanes2 = pImagePlanes[1]; 504 M4VIFI_ImagePlane pImagePlanes3 = pImagePlanes[2]; 505 /**/ 506 507 /** 508 * Check input parameters */ 509 M4OSA_DEBUG_IF2((M4OSA_NULL==pPictureCtxt), M4ERR_PARAMETER, 510 "M4xVSS_PictureCallbackFct: pPictureCtxt is M4OSA_NULL"); 511 M4OSA_DEBUG_IF2((M4OSA_NULL==pImagePlanes), M4ERR_PARAMETER, 512 "M4xVSS_PictureCallbackFct: pImagePlanes is M4OSA_NULL"); 513 M4OSA_DEBUG_IF2((M4OSA_NULL==pPictureDuration), M4ERR_PARAMETER, 514 "M4xVSS_PictureCallbackFct: pPictureDuration is M4OSA_NULL"); 515 M4OSA_TRACE1_0("M4xVSS_PictureCallbackFct :Entering"); 516 /*PR P4ME00003181 In case the image number is 0, pan&zoom can not be used*/ 517 if(M4OSA_TRUE == pC->m_pPto3GPPparams->isPanZoom && pC->m_NbImage == 0) 518 { 519 pC->m_pPto3GPPparams->isPanZoom = M4OSA_FALSE; 520 } 521 522 /*If no cropping/black borders or pan&zoom, just decode and resize the picture*/ 523 if(pC->m_mediaRendering == M4xVSS_kResizing && M4OSA_FALSE == pC->m_pPto3GPPparams->isPanZoom) 524 { 525 /** 526 * Convert and resize input ARGB8888 file to YUV420 */ 527 /*To support ARGB8888 : */ 528 M4OSA_TRACE1_2("M4xVSS_PictureCallbackFct 1: width and heght %d %d", 529 pC->m_pPto3GPPparams->width,pC->m_pPto3GPPparams->height); 530 err = M4xVSS_internalConvertAndResizeARGB8888toYUV420(pC->m_FileIn, 531 pC->m_pFileReadPtr, pImagePlanes,pC->m_pPto3GPPparams->width, 532 pC->m_pPto3GPPparams->height); 533 if(err != M4NO_ERROR) 534 { 535 M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct: Error when decoding JPEG: 0x%x\n", err); 536 return err; 537 } 538 } 539 /*In case of cropping, black borders or pan&zoom, call the EXIF reader and the AIR*/ 540 else 541 { 542 /** 543 * Computes ratios */ 544 if(pC->m_pDecodedPlane == M4OSA_NULL) 545 { 546 /** 547 * Convert input ARGB8888 file to YUV420 */ 548 M4OSA_TRACE1_2("M4xVSS_PictureCallbackFct 2: width and heght %d %d", 549 pC->m_pPto3GPPparams->width,pC->m_pPto3GPPparams->height); 550 err = M4xVSS_internalConvertARGB8888toYUV420(pC->m_FileIn, pC->m_pFileReadPtr, 551 &(pC->m_pDecodedPlane),pC->m_pPto3GPPparams->width,pC->m_pPto3GPPparams->height); 552 if(err != M4NO_ERROR) 553 { 554 M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct: Error when decoding JPEG: 0x%x\n", err); 555 if(pC->m_pDecodedPlane != M4OSA_NULL) 556 { 557 /* YUV420 planar is returned but allocation is made only once 558 (contigous planes in memory) */ 559 if(pC->m_pDecodedPlane->pac_data != M4OSA_NULL) 560 { 561 free(pC->m_pDecodedPlane->pac_data); 562 } 563 free(pC->m_pDecodedPlane); 564 pC->m_pDecodedPlane = M4OSA_NULL; 565 } 566 return err; 567 } 568 } 569 570 /*Initialize AIR Params*/ 571 Params.m_inputCoord.m_x = 0; 572 Params.m_inputCoord.m_y = 0; 573 Params.m_inputSize.m_height = pC->m_pDecodedPlane->u_height; 574 Params.m_inputSize.m_width = pC->m_pDecodedPlane->u_width; 575 Params.m_outputSize.m_width = pImagePlanes->u_width; 576 Params.m_outputSize.m_height = pImagePlanes->u_height; 577 Params.m_bOutputStripe = M4OSA_FALSE; 578 Params.m_outputOrientation = M4COMMON_kOrientationTopLeft; 579 580 /*Initialize Exif params structure*/ 581 pBasicTags.orientation = M4COMMON_kOrientationUnknown; 582 583 /** 584 Pan&zoom params*/ 585 if(M4OSA_TRUE == pC->m_pPto3GPPparams->isPanZoom) 586 { 587 /*Save ratio values, they can be reused if the new ratios are 0*/ 588 tempPanzoomXa = (M4OSA_UInt8)pC->m_pPto3GPPparams->PanZoomXa; 589 tempPanzoomXb = (M4OSA_UInt8)pC->m_pPto3GPPparams->PanZoomXb; 590 /*Check that the ratio is not 0*/ 591 /*Check (a) parameters*/ 592 if(pC->m_pPto3GPPparams->PanZoomXa == 0) 593 { 594 M4OSA_UInt8 maxRatio = 0; 595 if(pC->m_pPto3GPPparams->PanZoomTopleftXa >= 596 pC->m_pPto3GPPparams->PanZoomTopleftYa) 597 { 598 /*The ratio is 0, that means the area of the picture defined with (a) 599 parameters is bigger than the image size*/ 600 if(pC->m_pPto3GPPparams->PanZoomTopleftXa + tempPanzoomXa > 1000) 601 { 602 /*The oversize is maxRatio*/ 603 maxRatio = pC->m_pPto3GPPparams->PanZoomTopleftXa + tempPanzoomXa - 1000; 604 } 605 } 606 else 607 { 608 /*The ratio is 0, that means the area of the picture defined with (a) 609 parameters is bigger than the image size*/ 610 if(pC->m_pPto3GPPparams->PanZoomTopleftYa + tempPanzoomXa > 1000) 611 { 612 /*The oversize is maxRatio*/ 613 maxRatio = pC->m_pPto3GPPparams->PanZoomTopleftYa + tempPanzoomXa - 1000; 614 } 615 } 616 /*Modify the (a) parameters:*/ 617 if(pC->m_pPto3GPPparams->PanZoomTopleftXa >= maxRatio) 618 { 619 /*The (a) topleft parameters can be moved to keep the same area size*/ 620 pC->m_pPto3GPPparams->PanZoomTopleftXa -= maxRatio; 621 } 622 else 623 { 624 /*Move the (a) topleft parameter to 0 but the ratio will be also further 625 modified to match the image size*/ 626 pC->m_pPto3GPPparams->PanZoomTopleftXa = 0; 627 } 628 if(pC->m_pPto3GPPparams->PanZoomTopleftYa >= maxRatio) 629 { 630 /*The (a) topleft parameters can be moved to keep the same area size*/ 631 pC->m_pPto3GPPparams->PanZoomTopleftYa -= maxRatio; 632 } 633 else 634 { 635 /*Move the (a) topleft parameter to 0 but the ratio will be also further 636 modified to match the image size*/ 637 pC->m_pPto3GPPparams->PanZoomTopleftYa = 0; 638 } 639 /*The new ratio is the original one*/ 640 pC->m_pPto3GPPparams->PanZoomXa = tempPanzoomXa; 641 if(pC->m_pPto3GPPparams->PanZoomXa + pC->m_pPto3GPPparams->PanZoomTopleftXa > 1000) 642 { 643 /*Change the ratio if the area of the picture defined with (a) parameters is 644 bigger than the image size*/ 645 pC->m_pPto3GPPparams->PanZoomXa = 1000 - pC->m_pPto3GPPparams->PanZoomTopleftXa; 646 } 647 if(pC->m_pPto3GPPparams->PanZoomXa + pC->m_pPto3GPPparams->PanZoomTopleftYa > 1000) 648 { 649 /*Change the ratio if the area of the picture defined with (a) parameters is 650 bigger than the image size*/ 651 pC->m_pPto3GPPparams->PanZoomXa = 1000 - pC->m_pPto3GPPparams->PanZoomTopleftYa; 652 } 653 } 654 /*Check (b) parameters*/ 655 if(pC->m_pPto3GPPparams->PanZoomXb == 0) 656 { 657 M4OSA_UInt8 maxRatio = 0; 658 if(pC->m_pPto3GPPparams->PanZoomTopleftXb >= 659 pC->m_pPto3GPPparams->PanZoomTopleftYb) 660 { 661 /*The ratio is 0, that means the area of the picture defined with (b) 662 parameters is bigger than the image size*/ 663 if(pC->m_pPto3GPPparams->PanZoomTopleftXb + tempPanzoomXb > 1000) 664 { 665 /*The oversize is maxRatio*/ 666 maxRatio = pC->m_pPto3GPPparams->PanZoomTopleftXb + tempPanzoomXb - 1000; 667 } 668 } 669 else 670 { 671 /*The ratio is 0, that means the area of the picture defined with (b) 672 parameters is bigger than the image size*/ 673 if(pC->m_pPto3GPPparams->PanZoomTopleftYb + tempPanzoomXb > 1000) 674 { 675 /*The oversize is maxRatio*/ 676 maxRatio = pC->m_pPto3GPPparams->PanZoomTopleftYb + tempPanzoomXb - 1000; 677 } 678 } 679 /*Modify the (b) parameters:*/ 680 if(pC->m_pPto3GPPparams->PanZoomTopleftXb >= maxRatio) 681 { 682 /*The (b) topleft parameters can be moved to keep the same area size*/ 683 pC->m_pPto3GPPparams->PanZoomTopleftXb -= maxRatio; 684 } 685 else 686 { 687 /*Move the (b) topleft parameter to 0 but the ratio will be also further 688 modified to match the image size*/ 689 pC->m_pPto3GPPparams->PanZoomTopleftXb = 0; 690 } 691 if(pC->m_pPto3GPPparams->PanZoomTopleftYb >= maxRatio) 692 { 693 /*The (b) topleft parameters can be moved to keep the same area size*/ 694 pC->m_pPto3GPPparams->PanZoomTopleftYb -= maxRatio; 695 } 696 else 697 { 698 /*Move the (b) topleft parameter to 0 but the ratio will be also further 699 modified to match the image size*/ 700 pC->m_pPto3GPPparams->PanZoomTopleftYb = 0; 701 } 702 /*The new ratio is the original one*/ 703 pC->m_pPto3GPPparams->PanZoomXb = tempPanzoomXb; 704 if(pC->m_pPto3GPPparams->PanZoomXb + pC->m_pPto3GPPparams->PanZoomTopleftXb > 1000) 705 { 706 /*Change the ratio if the area of the picture defined with (b) parameters is 707 bigger than the image size*/ 708 pC->m_pPto3GPPparams->PanZoomXb = 1000 - pC->m_pPto3GPPparams->PanZoomTopleftXb; 709 } 710 if(pC->m_pPto3GPPparams->PanZoomXb + pC->m_pPto3GPPparams->PanZoomTopleftYb > 1000) 711 { 712 /*Change the ratio if the area of the picture defined with (b) parameters is 713 bigger than the image size*/ 714 pC->m_pPto3GPPparams->PanZoomXb = 1000 - pC->m_pPto3GPPparams->PanZoomTopleftYb; 715 } 716 } 717 718 /** 719 * Computes AIR parameters */ 720/* Params.m_inputCoord.m_x = (M4OSA_UInt32)(pC->m_pDecodedPlane->u_width * 721 (pC->m_pPto3GPPparams->PanZoomTopleftXa + 722 (M4OSA_Int16)((pC->m_pPto3GPPparams->PanZoomTopleftXb \ 723 - pC->m_pPto3GPPparams->PanZoomTopleftXa) * 724 pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage)) / 100; 725 Params.m_inputCoord.m_y = (M4OSA_UInt32)(pC->m_pDecodedPlane->u_height * 726 (pC->m_pPto3GPPparams->PanZoomTopleftYa + 727 (M4OSA_Int16)((pC->m_pPto3GPPparams->PanZoomTopleftYb\ 728 - pC->m_pPto3GPPparams->PanZoomTopleftYa) * 729 pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage)) / 100; 730 731 Params.m_inputSize.m_width = (M4OSA_UInt32)(pC->m_pDecodedPlane->u_width * 732 (pC->m_pPto3GPPparams->PanZoomXa + 733 (M4OSA_Int16)((pC->m_pPto3GPPparams->PanZoomXb - pC->m_pPto3GPPparams->PanZoomXa) * 734 pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage)) / 100; 735 736 Params.m_inputSize.m_height = (M4OSA_UInt32)(pC->m_pDecodedPlane->u_height * 737 (pC->m_pPto3GPPparams->PanZoomXa + 738 (M4OSA_Int16)((pC->m_pPto3GPPparams->PanZoomXb - pC->m_pPto3GPPparams->PanZoomXa) * 739 pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage)) / 100; 740 */ 741 // Instead of using pC->m_NbImage we have to use (pC->m_NbImage-1) as pC->m_ImageCounter 742 // will be x-1 max for x no. of frames 743 Params.m_inputCoord.m_x = (M4OSA_UInt32)((((M4OSA_Double)pC->m_pDecodedPlane->u_width * 744 (pC->m_pPto3GPPparams->PanZoomTopleftXa + 745 (M4OSA_Double)((M4OSA_Double)(pC->m_pPto3GPPparams->PanZoomTopleftXb\ 746 - pC->m_pPto3GPPparams->PanZoomTopleftXa) * 747 pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage-1)) / 1000)); 748 Params.m_inputCoord.m_y = 749 (M4OSA_UInt32)((((M4OSA_Double)pC->m_pDecodedPlane->u_height * 750 (pC->m_pPto3GPPparams->PanZoomTopleftYa + 751 (M4OSA_Double)((M4OSA_Double)(pC->m_pPto3GPPparams->PanZoomTopleftYb\ 752 - pC->m_pPto3GPPparams->PanZoomTopleftYa) * 753 pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage-1)) / 1000)); 754 755 Params.m_inputSize.m_width = 756 (M4OSA_UInt32)((((M4OSA_Double)pC->m_pDecodedPlane->u_width * 757 (pC->m_pPto3GPPparams->PanZoomXa + 758 (M4OSA_Double)((M4OSA_Double)(pC->m_pPto3GPPparams->PanZoomXb\ 759 - pC->m_pPto3GPPparams->PanZoomXa) * 760 pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage-1)) / 1000)); 761 762 Params.m_inputSize.m_height = 763 (M4OSA_UInt32)((((M4OSA_Double)pC->m_pDecodedPlane->u_height * 764 (pC->m_pPto3GPPparams->PanZoomXa + 765 (M4OSA_Double)((M4OSA_Double)(pC->m_pPto3GPPparams->PanZoomXb \ 766 - pC->m_pPto3GPPparams->PanZoomXa) * 767 pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage-1)) / 1000)); 768 769 if((Params.m_inputSize.m_width + Params.m_inputCoord.m_x)\ 770 > pC->m_pDecodedPlane->u_width) 771 { 772 Params.m_inputSize.m_width = pC->m_pDecodedPlane->u_width \ 773 - Params.m_inputCoord.m_x; 774 } 775 776 if((Params.m_inputSize.m_height + Params.m_inputCoord.m_y)\ 777 > pC->m_pDecodedPlane->u_height) 778 { 779 Params.m_inputSize.m_height = pC->m_pDecodedPlane->u_height\ 780 - Params.m_inputCoord.m_y; 781 } 782 783 784 785 Params.m_inputSize.m_width = (Params.m_inputSize.m_width>>1)<<1; 786 Params.m_inputSize.m_height = (Params.m_inputSize.m_height>>1)<<1; 787 } 788 789 790 791 /** 792 Picture rendering: Black borders*/ 793 794 if(pC->m_mediaRendering == M4xVSS_kBlackBorders) 795 { 796 memset((void *)pImagePlanes[0].pac_data,Y_PLANE_BORDER_VALUE, 797 (pImagePlanes[0].u_height*pImagePlanes[0].u_stride)); 798 memset((void *)pImagePlanes[1].pac_data,U_PLANE_BORDER_VALUE, 799 (pImagePlanes[1].u_height*pImagePlanes[1].u_stride)); 800 memset((void *)pImagePlanes[2].pac_data,V_PLANE_BORDER_VALUE, 801 (pImagePlanes[2].u_height*pImagePlanes[2].u_stride)); 802 803 /** 804 First without pan&zoom*/ 805 if(M4OSA_FALSE == pC->m_pPto3GPPparams->isPanZoom) 806 { 807 switch(pBasicTags.orientation) 808 { 809 default: 810 case M4COMMON_kOrientationUnknown: 811 Params.m_outputOrientation = M4COMMON_kOrientationTopLeft; 812 case M4COMMON_kOrientationTopLeft: 813 case M4COMMON_kOrientationTopRight: 814 case M4COMMON_kOrientationBottomRight: 815 case M4COMMON_kOrientationBottomLeft: 816 if((M4OSA_UInt32)((pC->m_pDecodedPlane->u_height * pImagePlanes->u_width)\ 817 /pC->m_pDecodedPlane->u_width) <= pImagePlanes->u_height) 818 //Params.m_inputSize.m_height < Params.m_inputSize.m_width) 819 { 820 /*it is height so black borders will be on the top and on the bottom side*/ 821 Params.m_outputSize.m_width = pImagePlanes->u_width; 822 Params.m_outputSize.m_height = 823 (M4OSA_UInt32)((pC->m_pDecodedPlane->u_height \ 824 * pImagePlanes->u_width) /pC->m_pDecodedPlane->u_width); 825 /*number of lines at the top*/ 826 pImagePlanes[0].u_topleft = 827 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_height\ 828 -Params.m_outputSize.m_height)>>1))*pImagePlanes[0].u_stride; 829 pImagePlanes[0].u_height = Params.m_outputSize.m_height; 830 pImagePlanes[1].u_topleft = 831 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_height\ 832 -(Params.m_outputSize.m_height>>1)))>>1)*pImagePlanes[1].u_stride; 833 pImagePlanes[1].u_height = Params.m_outputSize.m_height>>1; 834 pImagePlanes[2].u_topleft = 835 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_height\ 836 -(Params.m_outputSize.m_height>>1)))>>1)*pImagePlanes[2].u_stride; 837 pImagePlanes[2].u_height = Params.m_outputSize.m_height>>1; 838 } 839 else 840 { 841 /*it is width so black borders will be on the left and right side*/ 842 Params.m_outputSize.m_height = pImagePlanes->u_height; 843 Params.m_outputSize.m_width = 844 (M4OSA_UInt32)((pC->m_pDecodedPlane->u_width \ 845 * pImagePlanes->u_height) /pC->m_pDecodedPlane->u_height); 846 847 pImagePlanes[0].u_topleft = 848 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_width\ 849 -Params.m_outputSize.m_width)>>1)); 850 pImagePlanes[0].u_width = Params.m_outputSize.m_width; 851 pImagePlanes[1].u_topleft = 852 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_width\ 853 -(Params.m_outputSize.m_width>>1)))>>1); 854 pImagePlanes[1].u_width = Params.m_outputSize.m_width>>1; 855 pImagePlanes[2].u_topleft = 856 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_width\ 857 -(Params.m_outputSize.m_width>>1)))>>1); 858 pImagePlanes[2].u_width = Params.m_outputSize.m_width>>1; 859 } 860 break; 861 case M4COMMON_kOrientationLeftTop: 862 case M4COMMON_kOrientationLeftBottom: 863 case M4COMMON_kOrientationRightTop: 864 case M4COMMON_kOrientationRightBottom: 865 if((M4OSA_UInt32)((pC->m_pDecodedPlane->u_width * pImagePlanes->u_width)\ 866 /pC->m_pDecodedPlane->u_height) < pImagePlanes->u_height) 867 //Params.m_inputSize.m_height > Params.m_inputSize.m_width) 868 { 869 /*it is height so black borders will be on the top and on 870 the bottom side*/ 871 Params.m_outputSize.m_height = pImagePlanes->u_width; 872 Params.m_outputSize.m_width = 873 (M4OSA_UInt32)((pC->m_pDecodedPlane->u_width \ 874 * pImagePlanes->u_width) /pC->m_pDecodedPlane->u_height); 875 /*number of lines at the top*/ 876 pImagePlanes[0].u_topleft = 877 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_height\ 878 -Params.m_outputSize.m_width))>>1)*pImagePlanes[0].u_stride)+1; 879 pImagePlanes[0].u_height = Params.m_outputSize.m_width; 880 pImagePlanes[1].u_topleft = 881 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_height\ 882 -(Params.m_outputSize.m_width>>1)))>>1)\ 883 *pImagePlanes[1].u_stride)+1; 884 pImagePlanes[1].u_height = Params.m_outputSize.m_width>>1; 885 pImagePlanes[2].u_topleft = 886 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_height\ 887 -(Params.m_outputSize.m_width>>1)))>>1)\ 888 *pImagePlanes[2].u_stride)+1; 889 pImagePlanes[2].u_height = Params.m_outputSize.m_width>>1; 890 } 891 else 892 { 893 /*it is width so black borders will be on the left and right side*/ 894 Params.m_outputSize.m_width = pImagePlanes->u_height; 895 Params.m_outputSize.m_height = 896 (M4OSA_UInt32)((pC->m_pDecodedPlane->u_height\ 897 * pImagePlanes->u_height) /pC->m_pDecodedPlane->u_width); 898 899 pImagePlanes[0].u_topleft = 900 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_width\ 901 -Params.m_outputSize.m_height))>>1))+1; 902 pImagePlanes[0].u_width = Params.m_outputSize.m_height; 903 pImagePlanes[1].u_topleft = 904 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_width\ 905 -(Params.m_outputSize.m_height>>1)))>>1))+1; 906 pImagePlanes[1].u_width = Params.m_outputSize.m_height>>1; 907 pImagePlanes[2].u_topleft = 908 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_width\ 909 -(Params.m_outputSize.m_height>>1)))>>1))+1; 910 pImagePlanes[2].u_width = Params.m_outputSize.m_height>>1; 911 } 912 break; 913 } 914 } 915 916 /** 917 Secondly with pan&zoom*/ 918 else 919 { 920 switch(pBasicTags.orientation) 921 { 922 default: 923 case M4COMMON_kOrientationUnknown: 924 Params.m_outputOrientation = M4COMMON_kOrientationTopLeft; 925 case M4COMMON_kOrientationTopLeft: 926 case M4COMMON_kOrientationTopRight: 927 case M4COMMON_kOrientationBottomRight: 928 case M4COMMON_kOrientationBottomLeft: 929 /*NO ROTATION*/ 930 if((M4OSA_UInt32)((pC->m_pDecodedPlane->u_height * pImagePlanes->u_width)\ 931 /pC->m_pDecodedPlane->u_width) <= pImagePlanes->u_height) 932 //Params.m_inputSize.m_height < Params.m_inputSize.m_width) 933 { 934 /*Black borders will be on the top and bottom of the output video*/ 935 /*Maximum output height if the input image aspect ratio is kept and if 936 the output width is the screen width*/ 937 M4OSA_UInt32 tempOutputSizeHeight = 938 (M4OSA_UInt32)((pC->m_pDecodedPlane->u_height\ 939 * pImagePlanes->u_width) /pC->m_pDecodedPlane->u_width); 940 M4OSA_UInt32 tempInputSizeHeightMax = 0; 941 M4OSA_UInt32 tempFinalInputHeight = 0; 942 /*The output width is the screen width*/ 943 Params.m_outputSize.m_width = pImagePlanes->u_width; 944 tempOutputSizeHeight = (tempOutputSizeHeight>>1)<<1; 945 946 /*Maximum input height according to the maximum output height 947 (proportional to the maximum output height)*/ 948 tempInputSizeHeightMax = (pImagePlanes->u_height\ 949 *Params.m_inputSize.m_height)/tempOutputSizeHeight; 950 tempInputSizeHeightMax = (tempInputSizeHeightMax>>1)<<1; 951 952 /*Check if the maximum possible input height is contained into the 953 input image height*/ 954 if(tempInputSizeHeightMax <= pC->m_pDecodedPlane->u_height) 955 { 956 /*The maximum possible input height is contained in the input 957 image height, 958 that means no black borders, the input pan zoom area will be extended 959 so that the input AIR height will be the maximum possible*/ 960 if(((tempInputSizeHeightMax - Params.m_inputSize.m_height)>>1)\ 961 <= Params.m_inputCoord.m_y 962 && ((tempInputSizeHeightMax - Params.m_inputSize.m_height)>>1)\ 963 <= pC->m_pDecodedPlane->u_height -(Params.m_inputCoord.m_y\ 964 + Params.m_inputSize.m_height)) 965 { 966 /*The input pan zoom area can be extended symmetrically on the 967 top and bottom side*/ 968 Params.m_inputCoord.m_y -= ((tempInputSizeHeightMax \ 969 - Params.m_inputSize.m_height)>>1); 970 } 971 else if(Params.m_inputCoord.m_y < pC->m_pDecodedPlane->u_height\ 972 -(Params.m_inputCoord.m_y + Params.m_inputSize.m_height)) 973 { 974 /*There is not enough place above the input pan zoom area to 975 extend it symmetrically, 976 so extend it to the maximum on the top*/ 977 Params.m_inputCoord.m_y = 0; 978 } 979 else 980 { 981 /*There is not enough place below the input pan zoom area to 982 extend it symmetrically, 983 so extend it to the maximum on the bottom*/ 984 Params.m_inputCoord.m_y = pC->m_pDecodedPlane->u_height \ 985 - tempInputSizeHeightMax; 986 } 987 /*The input height of the AIR is the maximum possible height*/ 988 Params.m_inputSize.m_height = tempInputSizeHeightMax; 989 } 990 else 991 { 992 /*The maximum possible input height is greater than the input 993 image height, 994 that means black borders are necessary to keep aspect ratio 995 The input height of the AIR is all the input image height*/ 996 Params.m_outputSize.m_height = 997 (tempOutputSizeHeight*pC->m_pDecodedPlane->u_height)\ 998 /Params.m_inputSize.m_height; 999 Params.m_outputSize.m_height = (Params.m_outputSize.m_height>>1)<<1; 1000 Params.m_inputCoord.m_y = 0; 1001 Params.m_inputSize.m_height = pC->m_pDecodedPlane->u_height; 1002 pImagePlanes[0].u_topleft = 1003 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_height\ 1004 -Params.m_outputSize.m_height)>>1))*pImagePlanes[0].u_stride; 1005 pImagePlanes[0].u_height = Params.m_outputSize.m_height; 1006 pImagePlanes[1].u_topleft = 1007 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_height\ 1008 -(Params.m_outputSize.m_height>>1)))>>1)\ 1009 *pImagePlanes[1].u_stride); 1010 pImagePlanes[1].u_height = Params.m_outputSize.m_height>>1; 1011 pImagePlanes[2].u_topleft = 1012 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_height\ 1013 -(Params.m_outputSize.m_height>>1)))>>1)\ 1014 *pImagePlanes[2].u_stride); 1015 pImagePlanes[2].u_height = Params.m_outputSize.m_height>>1; 1016 } 1017 } 1018 else 1019 { 1020 /*Black borders will be on the left and right side of the output video*/ 1021 /*Maximum output width if the input image aspect ratio is kept and if the 1022 output height is the screen height*/ 1023 M4OSA_UInt32 tempOutputSizeWidth = 1024 (M4OSA_UInt32)((pC->m_pDecodedPlane->u_width \ 1025 * pImagePlanes->u_height) /pC->m_pDecodedPlane->u_height); 1026 M4OSA_UInt32 tempInputSizeWidthMax = 0; 1027 M4OSA_UInt32 tempFinalInputWidth = 0; 1028 /*The output height is the screen height*/ 1029 Params.m_outputSize.m_height = pImagePlanes->u_height; 1030 tempOutputSizeWidth = (tempOutputSizeWidth>>1)<<1; 1031 1032 /*Maximum input width according to the maximum output width 1033 (proportional to the maximum output width)*/ 1034 tempInputSizeWidthMax = 1035 (pImagePlanes->u_width*Params.m_inputSize.m_width)\ 1036 /tempOutputSizeWidth; 1037 tempInputSizeWidthMax = (tempInputSizeWidthMax>>1)<<1; 1038 1039 /*Check if the maximum possible input width is contained into the input 1040 image width*/ 1041 if(tempInputSizeWidthMax <= pC->m_pDecodedPlane->u_width) 1042 { 1043 /*The maximum possible input width is contained in the input 1044 image width, 1045 that means no black borders, the input pan zoom area will be extended 1046 so that the input AIR width will be the maximum possible*/ 1047 if(((tempInputSizeWidthMax - Params.m_inputSize.m_width)>>1) \ 1048 <= Params.m_inputCoord.m_x 1049 && ((tempInputSizeWidthMax - Params.m_inputSize.m_width)>>1)\ 1050 <= pC->m_pDecodedPlane->u_width -(Params.m_inputCoord.m_x \ 1051 + Params.m_inputSize.m_width)) 1052 { 1053 /*The input pan zoom area can be extended symmetrically on the 1054 right and left side*/ 1055 Params.m_inputCoord.m_x -= ((tempInputSizeWidthMax\ 1056 - Params.m_inputSize.m_width)>>1); 1057 } 1058 else if(Params.m_inputCoord.m_x < pC->m_pDecodedPlane->u_width\ 1059 -(Params.m_inputCoord.m_x + Params.m_inputSize.m_width)) 1060 { 1061 /*There is not enough place above the input pan zoom area to 1062 extend it symmetrically, 1063 so extend it to the maximum on the left*/ 1064 Params.m_inputCoord.m_x = 0; 1065 } 1066 else 1067 { 1068 /*There is not enough place below the input pan zoom area 1069 to extend it symmetrically, 1070 so extend it to the maximum on the right*/ 1071 Params.m_inputCoord.m_x = pC->m_pDecodedPlane->u_width \ 1072 - tempInputSizeWidthMax; 1073 } 1074 /*The input width of the AIR is the maximum possible width*/ 1075 Params.m_inputSize.m_width = tempInputSizeWidthMax; 1076 } 1077 else 1078 { 1079 /*The maximum possible input width is greater than the input 1080 image width, 1081 that means black borders are necessary to keep aspect ratio 1082 The input width of the AIR is all the input image width*/ 1083 Params.m_outputSize.m_width =\ 1084 (tempOutputSizeWidth*pC->m_pDecodedPlane->u_width)\ 1085 /Params.m_inputSize.m_width; 1086 Params.m_outputSize.m_width = (Params.m_outputSize.m_width>>1)<<1; 1087 Params.m_inputCoord.m_x = 0; 1088 Params.m_inputSize.m_width = pC->m_pDecodedPlane->u_width; 1089 pImagePlanes[0].u_topleft = 1090 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_width\ 1091 -Params.m_outputSize.m_width)>>1)); 1092 pImagePlanes[0].u_width = Params.m_outputSize.m_width; 1093 pImagePlanes[1].u_topleft = 1094 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_width\ 1095 -(Params.m_outputSize.m_width>>1)))>>1); 1096 pImagePlanes[1].u_width = Params.m_outputSize.m_width>>1; 1097 pImagePlanes[2].u_topleft = 1098 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_width\ 1099 -(Params.m_outputSize.m_width>>1)))>>1); 1100 pImagePlanes[2].u_width = Params.m_outputSize.m_width>>1; 1101 } 1102 } 1103 break; 1104 case M4COMMON_kOrientationLeftTop: 1105 case M4COMMON_kOrientationLeftBottom: 1106 case M4COMMON_kOrientationRightTop: 1107 case M4COMMON_kOrientationRightBottom: 1108 /*ROTATION*/ 1109 if((M4OSA_UInt32)((pC->m_pDecodedPlane->u_width * pImagePlanes->u_width)\ 1110 /pC->m_pDecodedPlane->u_height) < pImagePlanes->u_height) 1111 //Params.m_inputSize.m_height > Params.m_inputSize.m_width) 1112 { 1113 /*Black borders will be on the left and right side of the output video*/ 1114 /*Maximum output height if the input image aspect ratio is kept and if 1115 the output height is the screen width*/ 1116 M4OSA_UInt32 tempOutputSizeHeight = 1117 (M4OSA_UInt32)((pC->m_pDecodedPlane->u_width * pImagePlanes->u_width)\ 1118 /pC->m_pDecodedPlane->u_height); 1119 M4OSA_UInt32 tempInputSizeHeightMax = 0; 1120 M4OSA_UInt32 tempFinalInputHeight = 0; 1121 /*The output width is the screen height*/ 1122 Params.m_outputSize.m_height = pImagePlanes->u_width; 1123 Params.m_outputSize.m_width= pImagePlanes->u_height; 1124 tempOutputSizeHeight = (tempOutputSizeHeight>>1)<<1; 1125 1126 /*Maximum input height according to the maximum output height 1127 (proportional to the maximum output height)*/ 1128 tempInputSizeHeightMax = 1129 (pImagePlanes->u_height*Params.m_inputSize.m_width)\ 1130 /tempOutputSizeHeight; 1131 tempInputSizeHeightMax = (tempInputSizeHeightMax>>1)<<1; 1132 1133 /*Check if the maximum possible input height is contained into the 1134 input image width (rotation included)*/ 1135 if(tempInputSizeHeightMax <= pC->m_pDecodedPlane->u_width) 1136 { 1137 /*The maximum possible input height is contained in the input 1138 image width (rotation included), 1139 that means no black borders, the input pan zoom area will be extended 1140 so that the input AIR width will be the maximum possible*/ 1141 if(((tempInputSizeHeightMax - Params.m_inputSize.m_width)>>1) \ 1142 <= Params.m_inputCoord.m_x 1143 && ((tempInputSizeHeightMax - Params.m_inputSize.m_width)>>1)\ 1144 <= pC->m_pDecodedPlane->u_width -(Params.m_inputCoord.m_x \ 1145 + Params.m_inputSize.m_width)) 1146 { 1147 /*The input pan zoom area can be extended symmetrically on the 1148 right and left side*/ 1149 Params.m_inputCoord.m_x -= ((tempInputSizeHeightMax \ 1150 - Params.m_inputSize.m_width)>>1); 1151 } 1152 else if(Params.m_inputCoord.m_x < pC->m_pDecodedPlane->u_width\ 1153 -(Params.m_inputCoord.m_x + Params.m_inputSize.m_width)) 1154 { 1155 /*There is not enough place on the left of the input pan 1156 zoom area to extend it symmetrically, 1157 so extend it to the maximum on the left*/ 1158 Params.m_inputCoord.m_x = 0; 1159 } 1160 else 1161 { 1162 /*There is not enough place on the right of the input pan zoom 1163 area to extend it symmetrically, 1164 so extend it to the maximum on the right*/ 1165 Params.m_inputCoord.m_x = 1166 pC->m_pDecodedPlane->u_width - tempInputSizeHeightMax; 1167 } 1168 /*The input width of the AIR is the maximum possible width*/ 1169 Params.m_inputSize.m_width = tempInputSizeHeightMax; 1170 } 1171 else 1172 { 1173 /*The maximum possible input height is greater than the input 1174 image width (rotation included), 1175 that means black borders are necessary to keep aspect ratio 1176 The input width of the AIR is all the input image width*/ 1177 Params.m_outputSize.m_width = 1178 (tempOutputSizeHeight*pC->m_pDecodedPlane->u_width)\ 1179 /Params.m_inputSize.m_width; 1180 Params.m_outputSize.m_width = (Params.m_outputSize.m_width>>1)<<1; 1181 Params.m_inputCoord.m_x = 0; 1182 Params.m_inputSize.m_width = pC->m_pDecodedPlane->u_width; 1183 pImagePlanes[0].u_topleft = 1184 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_height\ 1185 -Params.m_outputSize.m_width))>>1)*pImagePlanes[0].u_stride)+1; 1186 pImagePlanes[0].u_height = Params.m_outputSize.m_width; 1187 pImagePlanes[1].u_topleft = 1188 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_height\ 1189 -(Params.m_outputSize.m_width>>1)))>>1)\ 1190 *pImagePlanes[1].u_stride)+1; 1191 pImagePlanes[1].u_height = Params.m_outputSize.m_width>>1; 1192 pImagePlanes[2].u_topleft = 1193 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_height\ 1194 -(Params.m_outputSize.m_width>>1)))>>1)\ 1195 *pImagePlanes[2].u_stride)+1; 1196 pImagePlanes[2].u_height = Params.m_outputSize.m_width>>1; 1197 } 1198 } 1199 else 1200 { 1201 /*Black borders will be on the top and bottom of the output video*/ 1202 /*Maximum output width if the input image aspect ratio is kept and if 1203 the output width is the screen height*/ 1204 M4OSA_UInt32 tempOutputSizeWidth = 1205 (M4OSA_UInt32)((pC->m_pDecodedPlane->u_height * pImagePlanes->u_height)\ 1206 /pC->m_pDecodedPlane->u_width); 1207 M4OSA_UInt32 tempInputSizeWidthMax = 0; 1208 M4OSA_UInt32 tempFinalInputWidth = 0, tempFinalOutputWidth = 0; 1209 /*The output height is the screen width*/ 1210 Params.m_outputSize.m_width = pImagePlanes->u_height; 1211 Params.m_outputSize.m_height= pImagePlanes->u_width; 1212 tempOutputSizeWidth = (tempOutputSizeWidth>>1)<<1; 1213 1214 /*Maximum input width according to the maximum output width 1215 (proportional to the maximum output width)*/ 1216 tempInputSizeWidthMax = 1217 (pImagePlanes->u_width*Params.m_inputSize.m_height)/tempOutputSizeWidth; 1218 tempInputSizeWidthMax = (tempInputSizeWidthMax>>1)<<1; 1219 1220 /*Check if the maximum possible input width is contained into the input 1221 image height (rotation included)*/ 1222 if(tempInputSizeWidthMax <= pC->m_pDecodedPlane->u_height) 1223 { 1224 /*The maximum possible input width is contained in the input 1225 image height (rotation included), 1226 that means no black borders, the input pan zoom area will be extended 1227 so that the input AIR height will be the maximum possible*/ 1228 if(((tempInputSizeWidthMax - Params.m_inputSize.m_height)>>1) \ 1229 <= Params.m_inputCoord.m_y 1230 && ((tempInputSizeWidthMax - Params.m_inputSize.m_height)>>1)\ 1231 <= pC->m_pDecodedPlane->u_height -(Params.m_inputCoord.m_y \ 1232 + Params.m_inputSize.m_height)) 1233 { 1234 /*The input pan zoom area can be extended symmetrically on 1235 the right and left side*/ 1236 Params.m_inputCoord.m_y -= ((tempInputSizeWidthMax \ 1237 - Params.m_inputSize.m_height)>>1); 1238 } 1239 else if(Params.m_inputCoord.m_y < pC->m_pDecodedPlane->u_height\ 1240 -(Params.m_inputCoord.m_y + Params.m_inputSize.m_height)) 1241 { 1242 /*There is not enough place on the top of the input pan zoom 1243 area to extend it symmetrically, 1244 so extend it to the maximum on the top*/ 1245 Params.m_inputCoord.m_y = 0; 1246 } 1247 else 1248 { 1249 /*There is not enough place on the bottom of the input pan zoom 1250 area to extend it symmetrically, 1251 so extend it to the maximum on the bottom*/ 1252 Params.m_inputCoord.m_y = pC->m_pDecodedPlane->u_height\ 1253 - tempInputSizeWidthMax; 1254 } 1255 /*The input height of the AIR is the maximum possible height*/ 1256 Params.m_inputSize.m_height = tempInputSizeWidthMax; 1257 } 1258 else 1259 { 1260 /*The maximum possible input width is greater than the input\ 1261 image height (rotation included), 1262 that means black borders are necessary to keep aspect ratio 1263 The input height of the AIR is all the input image height*/ 1264 Params.m_outputSize.m_height = 1265 (tempOutputSizeWidth*pC->m_pDecodedPlane->u_height)\ 1266 /Params.m_inputSize.m_height; 1267 Params.m_outputSize.m_height = (Params.m_outputSize.m_height>>1)<<1; 1268 Params.m_inputCoord.m_y = 0; 1269 Params.m_inputSize.m_height = pC->m_pDecodedPlane->u_height; 1270 pImagePlanes[0].u_topleft = 1271 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_width\ 1272 -Params.m_outputSize.m_height))>>1))+1; 1273 pImagePlanes[0].u_width = Params.m_outputSize.m_height; 1274 pImagePlanes[1].u_topleft = 1275 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_width\ 1276 -(Params.m_outputSize.m_height>>1)))>>1))+1; 1277 pImagePlanes[1].u_width = Params.m_outputSize.m_height>>1; 1278 pImagePlanes[2].u_topleft = 1279 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_width\ 1280 -(Params.m_outputSize.m_height>>1)))>>1))+1; 1281 pImagePlanes[2].u_width = Params.m_outputSize.m_height>>1; 1282 } 1283 } 1284 break; 1285 } 1286 } 1287 1288 /*Width and height have to be even*/ 1289 Params.m_outputSize.m_width = (Params.m_outputSize.m_width>>1)<<1; 1290 Params.m_outputSize.m_height = (Params.m_outputSize.m_height>>1)<<1; 1291 Params.m_inputSize.m_width = (Params.m_inputSize.m_width>>1)<<1; 1292 Params.m_inputSize.m_height = (Params.m_inputSize.m_height>>1)<<1; 1293 pImagePlanes[0].u_width = (pImagePlanes[0].u_width>>1)<<1; 1294 pImagePlanes[1].u_width = (pImagePlanes[1].u_width>>1)<<1; 1295 pImagePlanes[2].u_width = (pImagePlanes[2].u_width>>1)<<1; 1296 pImagePlanes[0].u_height = (pImagePlanes[0].u_height>>1)<<1; 1297 pImagePlanes[1].u_height = (pImagePlanes[1].u_height>>1)<<1; 1298 pImagePlanes[2].u_height = (pImagePlanes[2].u_height>>1)<<1; 1299 1300 /*Check that values are coherent*/ 1301 if(Params.m_inputSize.m_height == Params.m_outputSize.m_height) 1302 { 1303 Params.m_inputSize.m_width = Params.m_outputSize.m_width; 1304 } 1305 else if(Params.m_inputSize.m_width == Params.m_outputSize.m_width) 1306 { 1307 Params.m_inputSize.m_height = Params.m_outputSize.m_height; 1308 } 1309 } 1310 1311 /** 1312 Picture rendering: Resizing and Cropping*/ 1313 if(pC->m_mediaRendering != M4xVSS_kBlackBorders) 1314 { 1315 switch(pBasicTags.orientation) 1316 { 1317 default: 1318 case M4COMMON_kOrientationUnknown: 1319 Params.m_outputOrientation = M4COMMON_kOrientationTopLeft; 1320 case M4COMMON_kOrientationTopLeft: 1321 case M4COMMON_kOrientationTopRight: 1322 case M4COMMON_kOrientationBottomRight: 1323 case M4COMMON_kOrientationBottomLeft: 1324 Params.m_outputSize.m_height = pImagePlanes->u_height; 1325 Params.m_outputSize.m_width = pImagePlanes->u_width; 1326 break; 1327 case M4COMMON_kOrientationLeftTop: 1328 case M4COMMON_kOrientationLeftBottom: 1329 case M4COMMON_kOrientationRightTop: 1330 case M4COMMON_kOrientationRightBottom: 1331 Params.m_outputSize.m_height = pImagePlanes->u_width; 1332 Params.m_outputSize.m_width = pImagePlanes->u_height; 1333 break; 1334 } 1335 } 1336 1337 /** 1338 Picture rendering: Cropping*/ 1339 if(pC->m_mediaRendering == M4xVSS_kCropping) 1340 { 1341 if((Params.m_outputSize.m_height * Params.m_inputSize.m_width)\ 1342 /Params.m_outputSize.m_width<Params.m_inputSize.m_height) 1343 { 1344 M4OSA_UInt32 tempHeight = Params.m_inputSize.m_height; 1345 /*height will be cropped*/ 1346 Params.m_inputSize.m_height = (M4OSA_UInt32)((Params.m_outputSize.m_height \ 1347 * Params.m_inputSize.m_width) /Params.m_outputSize.m_width); 1348 Params.m_inputSize.m_height = (Params.m_inputSize.m_height>>1)<<1; 1349 if(M4OSA_FALSE == pC->m_pPto3GPPparams->isPanZoom) 1350 { 1351 Params.m_inputCoord.m_y = (M4OSA_Int32)((M4OSA_Int32)\ 1352 ((pC->m_pDecodedPlane->u_height - Params.m_inputSize.m_height))>>1); 1353 } 1354 else 1355 { 1356 Params.m_inputCoord.m_y += (M4OSA_Int32)((M4OSA_Int32)\ 1357 ((tempHeight - Params.m_inputSize.m_height))>>1); 1358 } 1359 } 1360 else 1361 { 1362 M4OSA_UInt32 tempWidth= Params.m_inputSize.m_width; 1363 /*width will be cropped*/ 1364 Params.m_inputSize.m_width = (M4OSA_UInt32)((Params.m_outputSize.m_width \ 1365 * Params.m_inputSize.m_height) /Params.m_outputSize.m_height); 1366 Params.m_inputSize.m_width = (Params.m_inputSize.m_width>>1)<<1; 1367 if(M4OSA_FALSE == pC->m_pPto3GPPparams->isPanZoom) 1368 { 1369 Params.m_inputCoord.m_x = (M4OSA_Int32)((M4OSA_Int32)\ 1370 ((pC->m_pDecodedPlane->u_width - Params.m_inputSize.m_width))>>1); 1371 } 1372 else 1373 { 1374 Params.m_inputCoord.m_x += (M4OSA_Int32)\ 1375 (((M4OSA_Int32)(tempWidth - Params.m_inputSize.m_width))>>1); 1376 } 1377 } 1378 } 1379 1380 1381 1382 /** 1383 * Call AIR functions */ 1384 if(M4OSA_NULL == pC->m_air_context) 1385 { 1386 err = M4AIR_create(&pC->m_air_context, M4AIR_kYUV420P); 1387 if(err != M4NO_ERROR) 1388 { 1389 free(pC->m_pDecodedPlane[0].pac_data); 1390 free(pC->m_pDecodedPlane); 1391 pC->m_pDecodedPlane = M4OSA_NULL; 1392 M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct:\ 1393 Error when initializing AIR: 0x%x", err); 1394 return err; 1395 } 1396 } 1397 1398 err = M4AIR_configure(pC->m_air_context, &Params); 1399 if(err != M4NO_ERROR) 1400 { 1401 M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct:\ 1402 Error when configuring AIR: 0x%x", err); 1403 M4AIR_cleanUp(pC->m_air_context); 1404 free(pC->m_pDecodedPlane[0].pac_data); 1405 free(pC->m_pDecodedPlane); 1406 pC->m_pDecodedPlane = M4OSA_NULL; 1407 return err; 1408 } 1409 1410 err = M4AIR_get(pC->m_air_context, pC->m_pDecodedPlane, pImagePlanes); 1411 if(err != M4NO_ERROR) 1412 { 1413 M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct: Error when getting AIR plane: 0x%x", err); 1414 M4AIR_cleanUp(pC->m_air_context); 1415 free(pC->m_pDecodedPlane[0].pac_data); 1416 free(pC->m_pDecodedPlane); 1417 pC->m_pDecodedPlane = M4OSA_NULL; 1418 return err; 1419 } 1420 pImagePlanes[0] = pImagePlanes1; 1421 pImagePlanes[1] = pImagePlanes2; 1422 pImagePlanes[2] = pImagePlanes3; 1423 } 1424 1425 1426 /** 1427 * Increment the image counter */ 1428 pC->m_ImageCounter++; 1429 1430 /** 1431 * Check end of sequence */ 1432 last_frame_flag = (pC->m_ImageCounter >= pC->m_NbImage); 1433 1434 /** 1435 * Keep the picture duration */ 1436 *pPictureDuration = pC->m_timeDuration; 1437 1438 if (1 == last_frame_flag) 1439 { 1440 if(M4OSA_NULL != pC->m_air_context) 1441 { 1442 err = M4AIR_cleanUp(pC->m_air_context); 1443 if(err != M4NO_ERROR) 1444 { 1445 M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct: Error when cleaning AIR: 0x%x", err); 1446 return err; 1447 } 1448 } 1449 if(M4OSA_NULL != pC->m_pDecodedPlane) 1450 { 1451 free(pC->m_pDecodedPlane[0].pac_data); 1452 free(pC->m_pDecodedPlane); 1453 pC->m_pDecodedPlane = M4OSA_NULL; 1454 } 1455 return M4PTO3GPP_WAR_LAST_PICTURE; 1456 } 1457 1458 M4OSA_TRACE1_0("M4xVSS_PictureCallbackFct: Leaving "); 1459 return M4NO_ERROR; 1460} 1461 1462/** 1463 ****************************************************************************** 1464 * M4OSA_ERR M4xVSS_internalStartConvertPictureTo3gp(M4OSA_Context pContext) 1465 * @brief This function initializes Pto3GPP with the given parameters 1466 * @note The "Pictures to 3GPP" parameters are given by the internal xVSS 1467 * context. This context contains a pointer on the current element 1468 * of the chained list of Pto3GPP parameters. 1469 * @param pContext (IN) The integrator own context 1470 * 1471 * @return M4NO_ERROR: No error 1472 * @return M4PTO3GPP_WAR_LAST_PICTURE: The returned image is the last one 1473 * @return M4ERR_PARAMETER: At least one of the function parameters is null 1474 ****************************************************************************** 1475 */ 1476M4OSA_ERR M4xVSS_internalStartConvertPictureTo3gp(M4OSA_Context pContext) 1477{ 1478 /************************************************************************/ 1479 /* Definitions to generate dummy AMR file used to add AMR silence in files generated 1480 by Pto3GPP */ 1481 #define M4VSS3GPP_AMR_AU_SILENCE_FRAME_048_SIZE 13 1482 /* This constant is defined in M4VSS3GPP_InternalConfig.h */ 1483 extern const M4OSA_UInt8\ 1484 M4VSS3GPP_AMR_AU_SILENCE_FRAME_048[M4VSS3GPP_AMR_AU_SILENCE_FRAME_048_SIZE]; 1485 1486 /* AMR silent frame used to compute dummy AMR silence file */ 1487 #define M4VSS3GPP_AMR_HEADER_SIZE 6 1488 const M4OSA_UInt8 M4VSS3GPP_AMR_HEADER[M4VSS3GPP_AMR_AU_SILENCE_FRAME_048_SIZE] = 1489 { 0x23, 0x21, 0x41, 0x4d, 0x52, 0x0a }; 1490 /************************************************************************/ 1491 1492 M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; 1493 M4OSA_ERR err; 1494 M4PTO3GPP_Context pM4PTO3GPP_Ctxt = M4OSA_NULL; 1495 M4PTO3GPP_Params Params; 1496 M4xVSS_PictureCallbackCtxt* pCallBackCtxt; 1497 M4OSA_Bool cmpResult=M4OSA_FALSE; 1498 M4OSA_Context pDummyAMRFile; 1499 M4OSA_Char out_amr[M4XVSS_MAX_PATH_LEN]; 1500 /*UTF conversion support*/ 1501 M4OSA_Char* pDecodedPath = M4OSA_NULL; 1502 M4OSA_UInt32 i; 1503 1504 /** 1505 * Create a M4PTO3GPP instance */ 1506 err = M4PTO3GPP_Init( &pM4PTO3GPP_Ctxt, xVSS_context->pFileReadPtr, 1507 xVSS_context->pFileWritePtr); 1508 if (err != M4NO_ERROR) 1509 { 1510 M4OSA_TRACE1_1("M4xVSS_internalStartConvertPictureTo3gp returned %ld\n",err); 1511 return err; 1512 } 1513 1514 pCallBackCtxt = (M4xVSS_PictureCallbackCtxt*)M4OSA_32bitAlignedMalloc(sizeof(M4xVSS_PictureCallbackCtxt), 1515 M4VS,(M4OSA_Char *) "Pto3gpp callback struct"); 1516 if(pCallBackCtxt == M4OSA_NULL) 1517 { 1518 M4OSA_TRACE1_0("Allocation error in M4xVSS_internalStartConvertPictureTo3gp"); 1519 return M4ERR_ALLOC; 1520 } 1521 1522 Params.OutputVideoFrameSize = xVSS_context->pSettings->xVSS.outputVideoSize; 1523 Params.OutputVideoFormat = xVSS_context->pSettings->xVSS.outputVideoFormat; 1524 1525 /** 1526 * Generate "dummy" amr file containing silence in temporary folder */ 1527 M4OSA_chrNCopy(out_amr, xVSS_context->pTempPath, M4XVSS_MAX_PATH_LEN - 1); 1528 strncat((char *)out_amr, (const char *)"dummy.amr\0", 10); 1529 1530 /** 1531 * UTF conversion: convert the temporary path into the customer format*/ 1532 pDecodedPath = out_amr; 1533 1534 if(xVSS_context->UTFConversionContext.pConvFromUTF8Fct != M4OSA_NULL 1535 && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL) 1536 { 1537 M4OSA_UInt32 length = 0; 1538 err = M4xVSS_internalConvertFromUTF8(xVSS_context, (M4OSA_Void*) out_amr, 1539 (M4OSA_Void*) xVSS_context->UTFConversionContext.pTempOutConversionBuffer, &length); 1540 if(err != M4NO_ERROR) 1541 { 1542 M4OSA_TRACE1_1("M4xVSS_internalStartConvertPictureTo3gp:\ 1543 M4xVSS_internalConvertFromUTF8 returns err: 0x%x",err); 1544 return err; 1545 } 1546 pDecodedPath = xVSS_context->UTFConversionContext.pTempOutConversionBuffer; 1547 } 1548 1549 /** 1550 * End of the conversion, now use the converted path*/ 1551 1552 err = xVSS_context->pFileWritePtr->openWrite(&pDummyAMRFile, pDecodedPath, M4OSA_kFileWrite); 1553 1554 /*Commented because of the use of the UTF conversion see above*/ 1555/* err = xVSS_context->pFileWritePtr->openWrite(&pDummyAMRFile, out_amr, M4OSA_kFileWrite); 1556 */ 1557 if(err != M4NO_ERROR) 1558 { 1559 M4OSA_TRACE1_2("M4xVSS_internalConvertPictureTo3gp: Can't open output dummy amr file %s,\ 1560 error: 0x%x\n",out_amr, err); 1561 return err; 1562 } 1563 1564 err = xVSS_context->pFileWritePtr->writeData(pDummyAMRFile, 1565 (M4OSA_Int8*)M4VSS3GPP_AMR_HEADER, M4VSS3GPP_AMR_HEADER_SIZE); 1566 if(err != M4NO_ERROR) 1567 { 1568 M4OSA_TRACE1_2("M4xVSS_internalConvertPictureTo3gp: Can't write output dummy amr file %s,\ 1569 error: 0x%x\n",out_amr, err); 1570 return err; 1571 } 1572 1573 err = xVSS_context->pFileWritePtr->writeData(pDummyAMRFile, 1574 (M4OSA_Int8*)M4VSS3GPP_AMR_AU_SILENCE_FRAME_048, M4VSS3GPP_AMR_AU_SILENCE_FRAME_048_SIZE); 1575 if(err != M4NO_ERROR) 1576 { 1577 M4OSA_TRACE1_2("M4xVSS_internalConvertPictureTo3gp: \ 1578 Can't write output dummy amr file %s, error: 0x%x\n",out_amr, err); 1579 return err; 1580 } 1581 1582 err = xVSS_context->pFileWritePtr->closeWrite(pDummyAMRFile); 1583 if(err != M4NO_ERROR) 1584 { 1585 M4OSA_TRACE1_2("M4xVSS_internalConvertPictureTo3gp: \ 1586 Can't close output dummy amr file %s, error: 0x%x\n",out_amr, err); 1587 return err; 1588 } 1589 1590 /** 1591 * Fill parameters for Pto3GPP with the parameters contained in the current element of the 1592 * Pto3GPP parameters chained list and with default parameters */ 1593/*+ New Encoder bitrates */ 1594 if(xVSS_context->pSettings->xVSS.outputVideoBitrate == 0) { 1595 Params.OutputVideoBitrate = M4VIDEOEDITING_kVARIABLE_KBPS; 1596 } 1597 else { 1598 Params.OutputVideoBitrate = xVSS_context->pSettings->xVSS.outputVideoBitrate; 1599 } 1600 M4OSA_TRACE1_1("M4xVSS_internalStartConvertPicTo3GP: video bitrate = %d", 1601 Params.OutputVideoBitrate); 1602/*- New Encoder bitrates */ 1603 Params.OutputFileMaxSize = M4PTO3GPP_kUNLIMITED; 1604 Params.pPictureCallbackFct = M4xVSS_PictureCallbackFct; 1605 Params.pPictureCallbackCtxt = pCallBackCtxt; 1606 /*FB: change to use the converted path (UTF conversion) see the conversion above*/ 1607 /*Fix :- Adding Audio Track in Image as input :AudioTarckFile Setting to NULL */ 1608 Params.pInputAudioTrackFile = M4OSA_NULL;//(M4OSA_Void*)pDecodedPath;//out_amr; 1609 Params.AudioPaddingMode = M4PTO3GPP_kAudioPaddingMode_Loop; 1610 Params.AudioFileFormat = M4VIDEOEDITING_kFileType_AMR; 1611 Params.pOutput3gppFile = xVSS_context->pPTo3GPPcurrentParams->pFileOut; 1612 Params.pTemporaryFile = xVSS_context->pPTo3GPPcurrentParams->pFileTemp; 1613 /*+PR No: blrnxpsw#223*/ 1614 /*Increasing frequency of Frame, calculating Nos of Frame = duration /FPS */ 1615 /*Other changes made is @ M4xVSS_API.c @ line 3841 in M4xVSS_SendCommand*/ 1616 /*If case check for PanZoom removed */ 1617 Params.NbVideoFrames = (M4OSA_UInt32) 1618 (xVSS_context->pPTo3GPPcurrentParams->duration \ 1619 / xVSS_context->pPTo3GPPcurrentParams->framerate); /* */ 1620 pCallBackCtxt->m_timeDuration = xVSS_context->pPTo3GPPcurrentParams->framerate; 1621 /*-PR No: blrnxpsw#223*/ 1622 pCallBackCtxt->m_ImageCounter = 0; 1623 pCallBackCtxt->m_FileIn = xVSS_context->pPTo3GPPcurrentParams->pFileIn; 1624 pCallBackCtxt->m_NbImage = Params.NbVideoFrames; 1625 pCallBackCtxt->m_pFileReadPtr = xVSS_context->pFileReadPtr; 1626 pCallBackCtxt->m_pDecodedPlane = M4OSA_NULL; 1627 pCallBackCtxt->m_pPto3GPPparams = xVSS_context->pPTo3GPPcurrentParams; 1628 pCallBackCtxt->m_air_context = M4OSA_NULL; 1629 pCallBackCtxt->m_mediaRendering = xVSS_context->pPTo3GPPcurrentParams->MediaRendering; 1630 1631 /** 1632 * Set the input and output files */ 1633 err = M4PTO3GPP_Open(pM4PTO3GPP_Ctxt, &Params); 1634 if (err != M4NO_ERROR) 1635 { 1636 M4OSA_TRACE1_1("M4PTO3GPP_Open returned: 0x%x\n",err); 1637 if(pCallBackCtxt != M4OSA_NULL) 1638 { 1639 free(pCallBackCtxt); 1640 pCallBackCtxt = M4OSA_NULL; 1641 } 1642 M4PTO3GPP_CleanUp(pM4PTO3GPP_Ctxt); 1643 return err; 1644 } 1645 1646 /** 1647 * Save context to be able to call Pto3GPP step function in M4xVSS_step function */ 1648 xVSS_context->pM4PTO3GPP_Ctxt = pM4PTO3GPP_Ctxt; 1649 xVSS_context->pCallBackCtxt = pCallBackCtxt; 1650 1651 return M4NO_ERROR; 1652} 1653 1654/** 1655 ****************************************************************************** 1656 * M4OSA_ERR M4xVSS_internalStopConvertPictureTo3gp(M4OSA_Context pContext) 1657 * @brief This function cleans up Pto3GPP 1658 * @note 1659 * @param pContext (IN) The integrator own context 1660 * 1661 * @return M4NO_ERROR: No error 1662 * @return M4ERR_PARAMETER: At least one of the function parameters is null 1663 ****************************************************************************** 1664 */ 1665M4OSA_ERR M4xVSS_internalStopConvertPictureTo3gp(M4OSA_Context pContext) 1666{ 1667 M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; 1668 M4OSA_ERR err; 1669 M4OSA_Char out_amr[M4XVSS_MAX_PATH_LEN]; 1670 /*UTF conversion support*/ 1671 M4OSA_Char* pDecodedPath = M4OSA_NULL; 1672 1673 /** 1674 * Free the PTO3GPP callback context */ 1675 if(M4OSA_NULL != xVSS_context->pCallBackCtxt) 1676 { 1677 free(xVSS_context->pCallBackCtxt); 1678 xVSS_context->pCallBackCtxt = M4OSA_NULL; 1679 } 1680 1681 /** 1682 * Finalize the output file */ 1683 err = M4PTO3GPP_Close(xVSS_context->pM4PTO3GPP_Ctxt); 1684 if (err != M4NO_ERROR) 1685 { 1686 M4OSA_TRACE1_1("M4PTO3GPP_Close returned 0x%x\n",err); 1687 M4PTO3GPP_CleanUp(xVSS_context->pM4PTO3GPP_Ctxt); 1688 return err; 1689 } 1690 1691 /** 1692 * Free this M4PTO3GPP instance */ 1693 err = M4PTO3GPP_CleanUp(xVSS_context->pM4PTO3GPP_Ctxt); 1694 if (err != M4NO_ERROR) 1695 { 1696 M4OSA_TRACE1_1("M4PTO3GPP_CleanUp returned 0x%x\n",err); 1697 return err; 1698 } 1699 1700 /** 1701 * Remove dummy.amr file */ 1702 M4OSA_chrNCopy(out_amr, xVSS_context->pTempPath, M4XVSS_MAX_PATH_LEN - 1); 1703 strncat((char *)out_amr, (const char *)"dummy.amr\0", 10); 1704 1705 /** 1706 * UTF conversion: convert the temporary path into the customer format*/ 1707 pDecodedPath = out_amr; 1708 1709 if(xVSS_context->UTFConversionContext.pConvFromUTF8Fct != M4OSA_NULL 1710 && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL) 1711 { 1712 M4OSA_UInt32 length = 0; 1713 err = M4xVSS_internalConvertFromUTF8(xVSS_context, (M4OSA_Void*) out_amr, 1714 (M4OSA_Void*) xVSS_context->UTFConversionContext.pTempOutConversionBuffer, &length); 1715 if(err != M4NO_ERROR) 1716 { 1717 M4OSA_TRACE1_1("M4xVSS_internalStopConvertPictureTo3gp:\ 1718 M4xVSS_internalConvertFromUTF8 returns err: 0x%x",err); 1719 return err; 1720 } 1721 pDecodedPath = xVSS_context->UTFConversionContext.pTempOutConversionBuffer; 1722 } 1723 /** 1724 * End of the conversion, now use the decoded path*/ 1725 remove((const char *)pDecodedPath); 1726 1727 /*Commented because of the use of the UTF conversion*/ 1728/* remove(out_amr); 1729 */ 1730 1731 xVSS_context->pM4PTO3GPP_Ctxt = M4OSA_NULL; 1732 xVSS_context->pCallBackCtxt = M4OSA_NULL; 1733 1734 return M4NO_ERROR; 1735} 1736 1737/** 1738 ****************************************************************************** 1739 * prototype M4OSA_ERR M4xVSS_internalConvertRGBtoYUV(M4xVSS_FramingStruct* framingCtx) 1740 * @brief This function converts an RGB565 plane to YUV420 planar 1741 * @note It is used only for framing effect 1742 * It allocates output YUV planes 1743 * @param framingCtx (IN) The framing struct containing input RGB565 plane 1744 * 1745 * @return M4NO_ERROR: No error 1746 * @return M4ERR_PARAMETER: At least one of the function parameters is null 1747 * @return M4ERR_ALLOC: Allocation error (no more memory) 1748 ****************************************************************************** 1749 */ 1750M4OSA_ERR M4xVSS_internalConvertRGBtoYUV(M4xVSS_FramingStruct* framingCtx) 1751{ 1752 M4OSA_ERR err; 1753 1754 /** 1755 * Allocate output YUV planes */ 1756 framingCtx->FramingYuv = (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(3*sizeof(M4VIFI_ImagePlane), 1757 M4VS, (M4OSA_Char *)"M4xVSS_internalConvertRGBtoYUV: Output plane YUV"); 1758 if(framingCtx->FramingYuv == M4OSA_NULL) 1759 { 1760 M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertRGBtoYUV"); 1761 return M4ERR_ALLOC; 1762 } 1763 framingCtx->FramingYuv[0].u_width = framingCtx->FramingRgb->u_width; 1764 framingCtx->FramingYuv[0].u_height = framingCtx->FramingRgb->u_height; 1765 framingCtx->FramingYuv[0].u_topleft = 0; 1766 framingCtx->FramingYuv[0].u_stride = framingCtx->FramingRgb->u_width; 1767 framingCtx->FramingYuv[0].pac_data = 1768 (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc((framingCtx->FramingYuv[0].u_width\ 1769 *framingCtx->FramingYuv[0].u_height*3)>>1, M4VS, (M4OSA_Char *)\ 1770 "Alloc for the Convertion output YUV");; 1771 if(framingCtx->FramingYuv[0].pac_data == M4OSA_NULL) 1772 { 1773 M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertRGBtoYUV"); 1774 return M4ERR_ALLOC; 1775 } 1776 framingCtx->FramingYuv[1].u_width = (framingCtx->FramingRgb->u_width)>>1; 1777 framingCtx->FramingYuv[1].u_height = (framingCtx->FramingRgb->u_height)>>1; 1778 framingCtx->FramingYuv[1].u_topleft = 0; 1779 framingCtx->FramingYuv[1].u_stride = (framingCtx->FramingRgb->u_width)>>1; 1780 framingCtx->FramingYuv[1].pac_data = framingCtx->FramingYuv[0].pac_data \ 1781 + framingCtx->FramingYuv[0].u_width * framingCtx->FramingYuv[0].u_height; 1782 framingCtx->FramingYuv[2].u_width = (framingCtx->FramingRgb->u_width)>>1; 1783 framingCtx->FramingYuv[2].u_height = (framingCtx->FramingRgb->u_height)>>1; 1784 framingCtx->FramingYuv[2].u_topleft = 0; 1785 framingCtx->FramingYuv[2].u_stride = (framingCtx->FramingRgb->u_width)>>1; 1786 framingCtx->FramingYuv[2].pac_data = framingCtx->FramingYuv[1].pac_data \ 1787 + framingCtx->FramingYuv[1].u_width * framingCtx->FramingYuv[1].u_height; 1788 1789 /** 1790 * Convert input RGB 565 to YUV 420 to be able to merge it with output video in framing 1791 effect */ 1792 err = M4VIFI_xVSS_RGB565toYUV420(M4OSA_NULL, framingCtx->FramingRgb, framingCtx->FramingYuv); 1793 if(err != M4NO_ERROR) 1794 { 1795 M4OSA_TRACE1_1("M4xVSS_internalConvertRGBtoYUV:\ 1796 error when converting from RGB to YUV: 0x%x\n", err); 1797 } 1798 1799 framingCtx->duration = 0; 1800 framingCtx->previousClipTime = -1; 1801 framingCtx->previewOffsetClipTime = -1; 1802 1803 /** 1804 * Only one element in the chained list (no animated image with RGB buffer...) */ 1805 framingCtx->pCurrent = framingCtx; 1806 framingCtx->pNext = framingCtx; 1807 1808 return M4NO_ERROR; 1809} 1810 1811M4OSA_ERR M4xVSS_internalSetPlaneTransparent(M4OSA_UInt8* planeIn, M4OSA_UInt32 size) 1812{ 1813 M4OSA_UInt32 i; 1814 M4OSA_UInt8* plane = planeIn; 1815 M4OSA_UInt8 transparent1 = (M4OSA_UInt8)((TRANSPARENT_COLOR & 0xFF00)>>8); 1816 M4OSA_UInt8 transparent2 = (M4OSA_UInt8)TRANSPARENT_COLOR; 1817 1818 for(i=0; i<(size>>1); i++) 1819 { 1820 *plane++ = transparent1; 1821 *plane++ = transparent2; 1822 } 1823 1824 return M4NO_ERROR; 1825} 1826 1827 1828/** 1829 ****************************************************************************** 1830 * prototype M4OSA_ERR M4xVSS_internalConvertARBG888toYUV420_FrammingEffect(M4OSA_Context pContext, 1831 * M4VSS3GPP_EffectSettings* pEffect, 1832 * M4xVSS_FramingStruct* framingCtx, 1833 M4VIDEOEDITING_VideoFrameSize OutputVideoResolution) 1834 * 1835 * @brief This function converts ARGB8888 input file to YUV420 whenused for framming effect 1836 * @note The input ARGB8888 file path is contained in the pEffect structure 1837 * If the ARGB8888 must be resized to fit output video size, this function 1838 * will do it. 1839 * @param pContext (IN) The integrator own context 1840 * @param pEffect (IN) The effect structure containing all informations on 1841 * the file to decode, resizing ... 1842 * @param framingCtx (IN/OUT) Structure in which the output RGB will be stored 1843 * 1844 * @return M4NO_ERROR: No error 1845 * @return M4ERR_PARAMETER: At least one of the function parameters is null 1846 * @return M4ERR_ALLOC: Allocation error (no more memory) 1847 * @return M4ERR_FILE_NOT_FOUND: File not found. 1848 ****************************************************************************** 1849 */ 1850 1851 1852M4OSA_ERR M4xVSS_internalConvertARGB888toYUV420_FrammingEffect(M4OSA_Context pContext, 1853 M4VSS3GPP_EffectSettings* pEffect, 1854 M4xVSS_FramingStruct* framingCtx, 1855 M4VIDEOEDITING_VideoFrameSize\ 1856 OutputVideoResolution) 1857{ 1858 M4OSA_ERR err = M4NO_ERROR; 1859 M4OSA_Context pARGBIn; 1860 M4OSA_UInt32 file_size; 1861 M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; 1862 M4OSA_UInt32 width, height, width_out, height_out; 1863 M4OSA_Void* pFile = pEffect->xVSS.pFramingFilePath; 1864 M4OSA_UInt8 transparent1 = (M4OSA_UInt8)((TRANSPARENT_COLOR & 0xFF00)>>8); 1865 M4OSA_UInt8 transparent2 = (M4OSA_UInt8)TRANSPARENT_COLOR; 1866 /*UTF conversion support*/ 1867 M4OSA_Char* pDecodedPath = M4OSA_NULL; 1868 M4OSA_UInt32 i = 0,j = 0; 1869 M4VIFI_ImagePlane rgbPlane; 1870 M4OSA_UInt32 frameSize_argb=(framingCtx->width * framingCtx->height * 4); 1871 M4OSA_UInt32 frameSize; 1872 M4OSA_UInt32 tempAlphaPercent = 0; 1873 M4VIFI_UInt8* TempPacData = M4OSA_NULL; 1874 M4OSA_UInt16 *ptr = M4OSA_NULL; 1875 M4OSA_UInt32 z = 0; 1876 1877 M4OSA_TRACE3_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect: Entering "); 1878 1879 M4OSA_TRACE1_2("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect width and height %d %d ", 1880 framingCtx->width,framingCtx->height); 1881 1882 M4OSA_UInt8 *pTmpData = (M4OSA_UInt8*) M4OSA_32bitAlignedMalloc(frameSize_argb, M4VS, (M4OSA_Char*)\ 1883 "Image argb data"); 1884 if(pTmpData == M4OSA_NULL) { 1885 M4OSA_TRACE1_0("Failed to allocate memory for Image clip"); 1886 return M4ERR_ALLOC; 1887 } 1888 /** 1889 * UTF conversion: convert the file path into the customer format*/ 1890 pDecodedPath = pFile; 1891 1892 if(xVSS_context->UTFConversionContext.pConvFromUTF8Fct != M4OSA_NULL 1893 && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL) 1894 { 1895 M4OSA_UInt32 length = 0; 1896 err = M4xVSS_internalConvertFromUTF8(xVSS_context, (M4OSA_Void*) pFile, 1897 (M4OSA_Void*) xVSS_context->UTFConversionContext.pTempOutConversionBuffer, &length); 1898 if(err != M4NO_ERROR) 1899 { 1900 M4OSA_TRACE1_1("M4xVSS_internalDecodePNG:\ 1901 M4xVSS_internalConvertFromUTF8 returns err: 0x%x",err); 1902 free(pTmpData); 1903 pTmpData = M4OSA_NULL; 1904 return err; 1905 } 1906 pDecodedPath = xVSS_context->UTFConversionContext.pTempOutConversionBuffer; 1907 } 1908 1909 /** 1910 * End of the conversion, now use the decoded path*/ 1911 1912 /* Open input ARGB8888 file and store it into memory */ 1913 err = xVSS_context->pFileReadPtr->openRead(&pARGBIn, pDecodedPath, M4OSA_kFileRead); 1914 1915 if(err != M4NO_ERROR) 1916 { 1917 M4OSA_TRACE1_2("Can't open input ARGB8888 file %s, error: 0x%x\n",pFile, err); 1918 free(pTmpData); 1919 pTmpData = M4OSA_NULL; 1920 return err; 1921 } 1922 1923 err = xVSS_context->pFileReadPtr->readData(pARGBIn,(M4OSA_MemAddr8)pTmpData, &frameSize_argb); 1924 if(err != M4NO_ERROR) 1925 { 1926 xVSS_context->pFileReadPtr->closeRead(pARGBIn); 1927 free(pTmpData); 1928 pTmpData = M4OSA_NULL; 1929 return err; 1930 } 1931 1932 1933 err = xVSS_context->pFileReadPtr->closeRead(pARGBIn); 1934 if(err != M4NO_ERROR) 1935 { 1936 M4OSA_TRACE1_2("Can't close input png file %s, error: 0x%x\n",pFile, err); 1937 free(pTmpData); 1938 pTmpData = M4OSA_NULL; 1939 return err; 1940 } 1941 1942 1943 rgbPlane.u_height = framingCtx->height; 1944 rgbPlane.u_width = framingCtx->width; 1945 rgbPlane.u_stride = rgbPlane.u_width*3; 1946 rgbPlane.u_topleft = 0; 1947 1948 frameSize = (rgbPlane.u_width * rgbPlane.u_height * 3); //Size of RGB888 data 1949 rgbPlane.pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(((frameSize)+ (2 * framingCtx->width)), 1950 M4VS, (M4OSA_Char*)"Image clip RGB888 data"); 1951 if(rgbPlane.pac_data == M4OSA_NULL) 1952 { 1953 M4OSA_TRACE1_0("Failed to allocate memory for Image clip"); 1954 free(pTmpData); 1955 return M4ERR_ALLOC; 1956 } 1957 1958 M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:\ 1959 Remove the alpha channel "); 1960 1961 /* premultiplied alpha % on RGB */ 1962 for (i=0, j = 0; i < frameSize_argb; i += 4) { 1963 /* this is alpha value */ 1964 if ((i % 4) == 0) 1965 { 1966 tempAlphaPercent = pTmpData[i]; 1967 } 1968 1969 /* R */ 1970 rgbPlane.pac_data[j] = pTmpData[i+1]; 1971 j++; 1972 1973 /* G */ 1974 if (tempAlphaPercent > 0) { 1975 rgbPlane.pac_data[j] = pTmpData[i+2]; 1976 j++; 1977 } else {/* In case of alpha value 0, make GREEN to 255 */ 1978 rgbPlane.pac_data[j] = 255; //pTmpData[i+2]; 1979 j++; 1980 } 1981 1982 /* B */ 1983 rgbPlane.pac_data[j] = pTmpData[i+3]; 1984 j++; 1985 } 1986 1987 free(pTmpData); 1988 pTmpData = M4OSA_NULL; 1989 1990 /* convert RGB888 to RGB565 */ 1991 1992 /* allocate temp RGB 565 buffer */ 1993 TempPacData = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(frameSize + 1994 (4 * (framingCtx->width + framingCtx->height + 1)), 1995 M4VS, (M4OSA_Char*)"Image clip RGB565 data"); 1996 if (TempPacData == M4OSA_NULL) { 1997 M4OSA_TRACE1_0("Failed to allocate memory for Image clip RGB565 data"); 1998 free(rgbPlane.pac_data); 1999 return M4ERR_ALLOC; 2000 } 2001 2002 ptr = (M4OSA_UInt16 *)TempPacData; 2003 z = 0; 2004 2005 for (i = 0; i < j ; i += 3) 2006 { 2007 ptr[z++] = PACK_RGB565(0, rgbPlane.pac_data[i], 2008 rgbPlane.pac_data[i+1], 2009 rgbPlane.pac_data[i+2]); 2010 } 2011 2012 /* free the RBG888 and assign RGB565 */ 2013 free(rgbPlane.pac_data); 2014 rgbPlane.pac_data = TempPacData; 2015 2016 /** 2017 * Check if output sizes are odd */ 2018 if(rgbPlane.u_height % 2 != 0) 2019 { 2020 M4VIFI_UInt8* output_pac_data = rgbPlane.pac_data; 2021 M4OSA_UInt32 i; 2022 M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:\ 2023 output height is odd "); 2024 output_pac_data +=rgbPlane.u_width * rgbPlane.u_height*2; 2025 2026 for(i=0;i<rgbPlane.u_width;i++) 2027 { 2028 *output_pac_data++ = transparent1; 2029 *output_pac_data++ = transparent2; 2030 } 2031 2032 /** 2033 * We just add a white line to the PNG that will be transparent */ 2034 rgbPlane.u_height++; 2035 } 2036 if(rgbPlane.u_width % 2 != 0) 2037 { 2038 /** 2039 * We add a new column of white (=transparent), but we need to parse all RGB lines ... */ 2040 M4OSA_UInt32 i; 2041 M4VIFI_UInt8* newRGBpac_data; 2042 M4VIFI_UInt8* output_pac_data, *input_pac_data; 2043 2044 rgbPlane.u_width++; 2045 M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect: \ 2046 output width is odd "); 2047 /** 2048 * We need to allocate a new RGB output buffer in which all decoded data 2049 + white line will be copied */ 2050 newRGBpac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(rgbPlane.u_height*rgbPlane.u_width*2\ 2051 *sizeof(M4VIFI_UInt8), M4VS, (M4OSA_Char *)"New Framing GIF Output pac_data RGB"); 2052 2053 if(newRGBpac_data == M4OSA_NULL) 2054 { 2055 M4OSA_TRACE1_0("Allocation error in \ 2056 M4xVSS_internalConvertARGB888toYUV420_FrammingEffect"); 2057 free(rgbPlane.pac_data); 2058 return M4ERR_ALLOC; 2059 } 2060 2061 output_pac_data= newRGBpac_data; 2062 input_pac_data = rgbPlane.pac_data; 2063 2064 for(i=0;i<rgbPlane.u_height;i++) 2065 { 2066 memcpy((void *)output_pac_data, (void *)input_pac_data, 2067 (rgbPlane.u_width-1)*2); 2068 2069 output_pac_data += ((rgbPlane.u_width-1)*2); 2070 /* Put the pixel to transparency color */ 2071 *output_pac_data++ = transparent1; 2072 *output_pac_data++ = transparent2; 2073 2074 input_pac_data += ((rgbPlane.u_width-1)*2); 2075 } 2076 free(rgbPlane.pac_data); 2077 rgbPlane.pac_data = newRGBpac_data; 2078 } 2079 2080 /* reset stride */ 2081 rgbPlane.u_stride = rgbPlane.u_width*2; 2082 2083 /** 2084 * Initialize chained list parameters */ 2085 framingCtx->duration = 0; 2086 framingCtx->previousClipTime = -1; 2087 framingCtx->previewOffsetClipTime = -1; 2088 2089 /** 2090 * Only one element in the chained list (no animated image ...) */ 2091 framingCtx->pCurrent = framingCtx; 2092 framingCtx->pNext = framingCtx; 2093 2094 /** 2095 * Get output width/height */ 2096 switch(OutputVideoResolution) 2097 //switch(xVSS_context->pSettings->xVSS.outputVideoSize) 2098 { 2099 case M4VIDEOEDITING_kSQCIF: 2100 width_out = 128; 2101 height_out = 96; 2102 break; 2103 case M4VIDEOEDITING_kQQVGA: 2104 width_out = 160; 2105 height_out = 120; 2106 break; 2107 case M4VIDEOEDITING_kQCIF: 2108 width_out = 176; 2109 height_out = 144; 2110 break; 2111 case M4VIDEOEDITING_kQVGA: 2112 width_out = 320; 2113 height_out = 240; 2114 break; 2115 case M4VIDEOEDITING_kCIF: 2116 width_out = 352; 2117 height_out = 288; 2118 break; 2119 case M4VIDEOEDITING_kVGA: 2120 width_out = 640; 2121 height_out = 480; 2122 break; 2123 case M4VIDEOEDITING_kWVGA: 2124 width_out = 800; 2125 height_out = 480; 2126 break; 2127 case M4VIDEOEDITING_kNTSC: 2128 width_out = 720; 2129 height_out = 480; 2130 break; 2131 case M4VIDEOEDITING_k640_360: 2132 width_out = 640; 2133 height_out = 360; 2134 break; 2135 case M4VIDEOEDITING_k854_480: 2136 // StageFright encoders require %16 resolution 2137 width_out = M4ENCODER_854_480_Width; 2138 height_out = 480; 2139 break; 2140 case M4VIDEOEDITING_kHD1280: 2141 width_out = 1280; 2142 height_out = 720; 2143 break; 2144 case M4VIDEOEDITING_kHD1080: 2145 // StageFright encoders require %16 resolution 2146 width_out = M4ENCODER_HD1080_Width; 2147 height_out = 720; 2148 break; 2149 case M4VIDEOEDITING_kHD960: 2150 width_out = 960; 2151 height_out = 720; 2152 break; 2153 2154 /** 2155 * If output video size is not given, we take QCIF size, 2156 * should not happen, because already done in M4xVSS_sendCommand */ 2157 default: 2158 width_out = 176; 2159 height_out = 144; 2160 break; 2161 } 2162 2163 /** 2164 * Allocate output planes structures */ 2165 framingCtx->FramingRgb = (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(sizeof(M4VIFI_ImagePlane), M4VS, 2166 (M4OSA_Char *)"Framing Output plane RGB"); 2167 if(framingCtx->FramingRgb == M4OSA_NULL) 2168 { 2169 M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertARGB888toYUV420_FrammingEffect"); 2170 return M4ERR_ALLOC; 2171 } 2172 /** 2173 * Resize RGB if needed */ 2174 if((pEffect->xVSS.bResize) && 2175 (rgbPlane.u_width != width_out || rgbPlane.u_height != height_out)) 2176 { 2177 width = width_out; 2178 height = height_out; 2179 2180 M4OSA_TRACE1_2("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect: \ 2181 New Width and height %d %d ",width,height); 2182 2183 framingCtx->FramingRgb->u_height = height_out; 2184 framingCtx->FramingRgb->u_width = width_out; 2185 framingCtx->FramingRgb->u_stride = framingCtx->FramingRgb->u_width*2; 2186 framingCtx->FramingRgb->u_topleft = 0; 2187 2188 framingCtx->FramingRgb->pac_data = 2189 (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(framingCtx->FramingRgb->u_height*framingCtx->\ 2190 FramingRgb->u_width*2*sizeof(M4VIFI_UInt8), M4VS, 2191 (M4OSA_Char *)"Framing Output pac_data RGB"); 2192 2193 if(framingCtx->FramingRgb->pac_data == M4OSA_NULL) 2194 { 2195 M4OSA_TRACE1_0("Allocation error in \ 2196 M4xVSS_internalConvertARGB888toYUV420_FrammingEffect"); 2197 free(framingCtx->FramingRgb); 2198 free(rgbPlane.pac_data); 2199 return M4ERR_ALLOC; 2200 } 2201 2202 M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect: Resizing Needed "); 2203 M4OSA_TRACE1_2("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:\ 2204 rgbPlane.u_height & rgbPlane.u_width %d %d",rgbPlane.u_height,rgbPlane.u_width); 2205 2206 //err = M4VIFI_ResizeBilinearRGB888toRGB888(M4OSA_NULL, &rgbPlane,framingCtx->FramingRgb); 2207 err = M4VIFI_ResizeBilinearRGB565toRGB565(M4OSA_NULL, &rgbPlane,framingCtx->FramingRgb); 2208 2209 if(err != M4NO_ERROR) 2210 { 2211 M4OSA_TRACE1_1("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect :\ 2212 when resizing RGB plane: 0x%x\n", err); 2213 return err; 2214 } 2215 2216 if(rgbPlane.pac_data != M4OSA_NULL) 2217 { 2218 free(rgbPlane.pac_data); 2219 rgbPlane.pac_data = M4OSA_NULL; 2220 } 2221 } 2222 else 2223 { 2224 2225 M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:\ 2226 Resizing Not Needed "); 2227 2228 width = rgbPlane.u_width; 2229 height = rgbPlane.u_height; 2230 framingCtx->FramingRgb->u_height = height; 2231 framingCtx->FramingRgb->u_width = width; 2232 framingCtx->FramingRgb->u_stride = framingCtx->FramingRgb->u_width*2; 2233 framingCtx->FramingRgb->u_topleft = 0; 2234 framingCtx->FramingRgb->pac_data = rgbPlane.pac_data; 2235 } 2236 2237 2238 if(pEffect->xVSS.bResize) 2239 { 2240 /** 2241 * Force topleft to 0 for pure framing effect */ 2242 framingCtx->topleft_x = 0; 2243 framingCtx->topleft_y = 0; 2244 } 2245 2246 2247 /** 2248 * Convert RGB output to YUV 420 to be able to merge it with output video in framing 2249 effect */ 2250 framingCtx->FramingYuv = (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(3*sizeof(M4VIFI_ImagePlane), M4VS, 2251 (M4OSA_Char *)"Framing Output plane YUV"); 2252 if(framingCtx->FramingYuv == M4OSA_NULL) 2253 { 2254 M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertARGB888toYUV420_FrammingEffect"); 2255 free(framingCtx->FramingRgb->pac_data); 2256 return M4ERR_ALLOC; 2257 } 2258 2259 // Alloc for Y, U and V planes 2260 framingCtx->FramingYuv[0].u_width = ((width+1)>>1)<<1; 2261 framingCtx->FramingYuv[0].u_height = ((height+1)>>1)<<1; 2262 framingCtx->FramingYuv[0].u_topleft = 0; 2263 framingCtx->FramingYuv[0].u_stride = ((width+1)>>1)<<1; 2264 framingCtx->FramingYuv[0].pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc 2265 ((framingCtx->FramingYuv[0].u_width*framingCtx->FramingYuv[0].u_height), M4VS, 2266 (M4OSA_Char *)"Alloc for the output Y"); 2267 if(framingCtx->FramingYuv[0].pac_data == M4OSA_NULL) 2268 { 2269 M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertARGB888toYUV420_FrammingEffect"); 2270 free(framingCtx->FramingYuv); 2271 free(framingCtx->FramingRgb->pac_data); 2272 return M4ERR_ALLOC; 2273 } 2274 framingCtx->FramingYuv[1].u_width = (((width+1)>>1)<<1)>>1; 2275 framingCtx->FramingYuv[1].u_height = (((height+1)>>1)<<1)>>1; 2276 framingCtx->FramingYuv[1].u_topleft = 0; 2277 framingCtx->FramingYuv[1].u_stride = (((width+1)>>1)<<1)>>1; 2278 2279 2280 framingCtx->FramingYuv[1].pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc( 2281 framingCtx->FramingYuv[1].u_width * framingCtx->FramingYuv[1].u_height, M4VS, 2282 (M4OSA_Char *)"Alloc for the output U"); 2283 if (framingCtx->FramingYuv[1].pac_data == M4OSA_NULL) { 2284 free(framingCtx->FramingYuv[0].pac_data); 2285 free(framingCtx->FramingYuv); 2286 free(framingCtx->FramingRgb->pac_data); 2287 return M4ERR_ALLOC; 2288 } 2289 2290 framingCtx->FramingYuv[2].u_width = (((width+1)>>1)<<1)>>1; 2291 framingCtx->FramingYuv[2].u_height = (((height+1)>>1)<<1)>>1; 2292 framingCtx->FramingYuv[2].u_topleft = 0; 2293 framingCtx->FramingYuv[2].u_stride = (((width+1)>>1)<<1)>>1; 2294 2295 2296 framingCtx->FramingYuv[2].pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc( 2297 framingCtx->FramingYuv[2].u_width * framingCtx->FramingYuv[0].u_height, M4VS, 2298 (M4OSA_Char *)"Alloc for the output V"); 2299 if (framingCtx->FramingYuv[2].pac_data == M4OSA_NULL) { 2300 free(framingCtx->FramingYuv[1].pac_data); 2301 free(framingCtx->FramingYuv[0].pac_data); 2302 free(framingCtx->FramingYuv); 2303 free(framingCtx->FramingRgb->pac_data); 2304 return M4ERR_ALLOC; 2305 } 2306 2307 M4OSA_TRACE3_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:\ 2308 convert RGB to YUV "); 2309 2310 //err = M4VIFI_RGB888toYUV420(M4OSA_NULL, framingCtx->FramingRgb, framingCtx->FramingYuv); 2311 err = M4VIFI_RGB565toYUV420(M4OSA_NULL, framingCtx->FramingRgb, framingCtx->FramingYuv); 2312 2313 if (err != M4NO_ERROR) 2314 { 2315 M4OSA_TRACE1_1("SPS png: error when converting from RGB to YUV: 0x%x\n", err); 2316 } 2317 M4OSA_TRACE3_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect: Leaving "); 2318 return err; 2319} 2320 2321/** 2322 ****************************************************************************** 2323 * prototype M4OSA_ERR M4xVSS_internalGenerateEditedFile(M4OSA_Context pContext) 2324 * 2325 * @brief This function prepares VSS for editing 2326 * @note It also set special xVSS effect as external effects for the VSS 2327 * @param pContext (IN) The integrator own context 2328 * 2329 * @return M4NO_ERROR: No error 2330 * @return M4ERR_PARAMETER: At least one of the function parameters is null 2331 * @return M4ERR_ALLOC: Allocation error (no more memory) 2332 ****************************************************************************** 2333 */ 2334M4OSA_ERR M4xVSS_internalGenerateEditedFile(M4OSA_Context pContext) 2335{ 2336 M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; 2337 M4VSS3GPP_EditContext pVssCtxt; 2338 M4OSA_UInt32 i,j; 2339 M4OSA_ERR err; 2340 2341 /** 2342 * Create a VSS 3GPP edition instance */ 2343 err = M4VSS3GPP_editInit( &pVssCtxt, xVSS_context->pFileReadPtr, xVSS_context->pFileWritePtr); 2344 if (err != M4NO_ERROR) 2345 { 2346 M4OSA_TRACE1_1("M4xVSS_internalGenerateEditedFile: M4VSS3GPP_editInit returned 0x%x\n", 2347 err); 2348 M4VSS3GPP_editCleanUp(pVssCtxt); 2349 return err; 2350 } 2351 2352 /* In case of MMS use case, we fill directly into the VSS context the targeted bitrate */ 2353 if(xVSS_context->targetedBitrate != 0) 2354 { 2355 M4VSS3GPP_InternalEditContext* pVSSContext = (M4VSS3GPP_InternalEditContext*)pVssCtxt; 2356 2357 pVSSContext->bIsMMS = M4OSA_TRUE; 2358 pVSSContext->uiMMSVideoBitrate = xVSS_context->targetedBitrate; 2359 pVSSContext->MMSvideoFramerate = xVSS_context->pSettings->videoFrameRate; 2360 } 2361 2362 /*Warning: since the adding of the UTF conversion, pSettings has been changed in the next 2363 part in pCurrentEditSettings (there is a specific current editing structure for the saving, 2364 as for the preview)*/ 2365 2366 /** 2367 * Set the external video effect functions, for saving mode (to be moved to 2368 M4xVSS_saveStart() ?)*/ 2369 for (i=0; i<xVSS_context->pCurrentEditSettings->uiClipNumber; i++) 2370 { 2371 for (j=0; j<xVSS_context->pCurrentEditSettings->nbEffects; j++) 2372 { 2373 if (M4xVSS_kVideoEffectType_BlackAndWhite == 2374 xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType) 2375 { 2376 xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct = 2377 M4VSS3GPP_externalVideoEffectColor; 2378 //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt = 2379 // (M4OSA_Void*)M4xVSS_kVideoEffectType_BlackAndWhite; 2380 /*commented FB*/ 2381 /** 2382 * We do not need to set the color context, it is already set 2383 during sendCommand function */ 2384 } 2385 if (M4xVSS_kVideoEffectType_Pink == 2386 xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType) 2387 { 2388 xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct = 2389 M4VSS3GPP_externalVideoEffectColor; 2390 //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt = 2391 // (M4OSA_Void*)M4xVSS_kVideoEffectType_Pink; /**< we don't 2392 // use any function context */ 2393 /*commented FB*/ 2394 /** 2395 * We do not need to set the color context, 2396 it is already set during sendCommand function */ 2397 } 2398 if (M4xVSS_kVideoEffectType_Green == 2399 xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType) 2400 { 2401 xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct = 2402 M4VSS3GPP_externalVideoEffectColor; 2403 //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt = 2404 // (M4OSA_Void*)M4xVSS_kVideoEffectType_Green; 2405 /**< we don't use any function context */ 2406 /*commented FB*/ 2407 /** 2408 * We do not need to set the color context, it is already set during 2409 sendCommand function */ 2410 } 2411 if (M4xVSS_kVideoEffectType_Sepia == 2412 xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType) 2413 { 2414 xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct = 2415 M4VSS3GPP_externalVideoEffectColor; 2416 //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt = 2417 // (M4OSA_Void*)M4xVSS_kVideoEffectType_Sepia; 2418 /**< we don't use any function context */ 2419 /*commented FB*/ 2420 /** 2421 * We do not need to set the color context, it is already set during 2422 sendCommand function */ 2423 } 2424 if (M4xVSS_kVideoEffectType_Fifties == 2425 xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType) 2426 { 2427 xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct = 2428 M4VSS3GPP_externalVideoEffectFifties; 2429 /** 2430 * We do not need to set the framing context, it is already set during 2431 sendCommand function */ 2432 } 2433 if (M4xVSS_kVideoEffectType_Negative == 2434 xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType) 2435 { 2436 xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct = 2437 M4VSS3GPP_externalVideoEffectColor; 2438 //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt = 2439 // (M4OSA_Void*)M4xVSS_kVideoEffectType_Negative; 2440 /**< we don't use any function context */ 2441 /*commented FB*/ 2442 /** 2443 * We do not need to set the color context, it is already set during 2444 sendCommand function */ 2445 } 2446 if (M4xVSS_kVideoEffectType_Framing == 2447 xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType) 2448 { 2449 xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct = 2450 M4VSS3GPP_externalVideoEffectFraming; 2451 /** 2452 * We do not need to set the framing context, it is already set during 2453 sendCommand function */ 2454 } 2455 if (M4xVSS_kVideoEffectType_ZoomIn == 2456 xVSS_context->pSettings->Effects[j].VideoEffectType) 2457 { 2458 xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct = 2459 M4VSS3GPP_externalVideoEffectZoom; 2460 xVSS_context->pCurrentEditSettings->Effects[j].pExtVideoEffectFctCtxt = 2461 (M4OSA_Void*)M4xVSS_kVideoEffectType_ZoomIn; /**< we don't use any 2462 function context */ 2463 } 2464 if (M4xVSS_kVideoEffectType_ZoomOut == 2465 xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType) 2466 { 2467 xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct = 2468 M4VSS3GPP_externalVideoEffectZoom; 2469 xVSS_context->pCurrentEditSettings->Effects[j].pExtVideoEffectFctCtxt = 2470 (M4OSA_Void*)M4xVSS_kVideoEffectType_ZoomOut; /**< we don't use any 2471 function context */ 2472 } 2473 if (M4xVSS_kVideoEffectType_ColorRGB16 == 2474 xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType) 2475 { 2476 xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct = 2477 M4VSS3GPP_externalVideoEffectColor; 2478 //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt = 2479 // (M4OSA_Void*)M4xVSS_kVideoEffectType_ColorRGB16; 2480 /**< we don't use any function context */ 2481 /** 2482 * We do not need to set the color context, it is already set during 2483 sendCommand function */ 2484 } 2485 if (M4xVSS_kVideoEffectType_Gradient == 2486 xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType) 2487 { 2488 xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct = 2489 M4VSS3GPP_externalVideoEffectColor; 2490 //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt = 2491 // (M4OSA_Void*)M4xVSS_kVideoEffectType_ColorRGB16; 2492 /**< we don't use any function context */ 2493 /** 2494 * We do not need to set the color context, it is already set during 2495 sendCommand function */ 2496 } 2497 2498 } 2499 } 2500 2501 /** 2502 * Open the VSS 3GPP */ 2503 err = M4VSS3GPP_editOpen(pVssCtxt, xVSS_context->pCurrentEditSettings); 2504 if (err != M4NO_ERROR) 2505 { 2506 M4OSA_TRACE1_1("M4xVSS_internalGenerateEditedFile:\ 2507 M4VSS3GPP_editOpen returned 0x%x\n",err); 2508 M4VSS3GPP_editCleanUp(pVssCtxt); 2509 return err; 2510 } 2511 2512 /** 2513 * Save VSS context to be able to close / free VSS later */ 2514 xVSS_context->pCurrentEditContext = pVssCtxt; 2515 2516 return M4NO_ERROR; 2517} 2518 2519/** 2520 ****************************************************************************** 2521 * prototype M4OSA_ERR M4xVSS_internalCloseEditedFile(M4OSA_Context pContext) 2522 * 2523 * @brief This function cleans up VSS 2524 * @note 2525 * @param pContext (IN) The integrator own context 2526 * 2527 * @return M4NO_ERROR: No error 2528 * @return M4ERR_PARAMETER: At least one of the function parameters is null 2529 ****************************************************************************** 2530 */ 2531M4OSA_ERR M4xVSS_internalCloseEditedFile(M4OSA_Context pContext) 2532{ 2533 M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; 2534 M4VSS3GPP_EditContext pVssCtxt = xVSS_context->pCurrentEditContext; 2535 M4OSA_ERR err; 2536 2537 if(xVSS_context->pCurrentEditContext != M4OSA_NULL) 2538 { 2539 /** 2540 * Close the VSS 3GPP */ 2541 err = M4VSS3GPP_editClose(pVssCtxt); 2542 if (err != M4NO_ERROR) 2543 { 2544 M4OSA_TRACE1_1("M4xVSS_internalCloseEditedFile:\ 2545 M4VSS3GPP_editClose returned 0x%x\n",err); 2546 M4VSS3GPP_editCleanUp(pVssCtxt); 2547 return err; 2548 } 2549 2550 /** 2551 * Free this VSS3GPP edition instance */ 2552 err = M4VSS3GPP_editCleanUp(pVssCtxt); 2553 if (err != M4NO_ERROR) 2554 { 2555 M4OSA_TRACE1_1("M4xVSS_internalCloseEditedFile: \ 2556 M4VSS3GPP_editCleanUp returned 0x%x\n",err); 2557 return err; 2558 } 2559 } 2560 2561 return M4NO_ERROR; 2562} 2563 2564/** 2565 ****************************************************************************** 2566 * prototype M4OSA_ERR M4xVSS_internalGenerateAudioMixFile(M4OSA_Context pContext) 2567 * 2568 * @brief This function prepares VSS for audio mixing 2569 * @note It takes its parameters from the BGM settings in the xVSS internal context 2570 * @param pContext (IN) The integrator own context 2571 * 2572 * @return M4NO_ERROR: No error 2573 * @return M4ERR_PARAMETER: At least one of the function parameters is null 2574 * @return M4ERR_ALLOC: Allocation error (no more memory) 2575 ****************************************************************************** 2576 */ 2577/*** 2578 * FB: the function has been modified since the structure used for the saving is now the 2579 * pCurrentEditSettings and not the pSettings 2580 * This change has been added for the UTF support 2581 * All the "xVSS_context->pSettings" has been replaced by "xVSS_context->pCurrentEditSettings" 2582 ***/ 2583M4OSA_ERR M4xVSS_internalGenerateAudioMixFile(M4OSA_Context pContext) 2584{ 2585 M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; 2586 M4VSS3GPP_AudioMixingSettings* pAudioMixSettings; 2587 M4VSS3GPP_AudioMixingContext pAudioMixingCtxt; 2588 M4OSA_ERR err; 2589 M4VIDEOEDITING_ClipProperties fileProperties; 2590 2591 /** 2592 * Allocate audio mixing settings structure and fill it with BGM parameters */ 2593 pAudioMixSettings = (M4VSS3GPP_AudioMixingSettings*)M4OSA_32bitAlignedMalloc 2594 (sizeof(M4VSS3GPP_AudioMixingSettings), M4VS, (M4OSA_Char *)"pAudioMixSettings"); 2595 if(pAudioMixSettings == M4OSA_NULL) 2596 { 2597 M4OSA_TRACE1_0("Allocation error in M4xVSS_internalGenerateAudioMixFile"); 2598 return M4ERR_ALLOC; 2599 } 2600 2601 if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->FileType == 2602 M4VIDEOEDITING_kFileType_3GPP) 2603 { 2604 err = M4xVSS_internalGetProperties((M4OSA_Context)xVSS_context, 2605 (M4OSA_Char*)xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile, 2606 &fileProperties); 2607 if(err != M4NO_ERROR) 2608 { 2609 M4OSA_TRACE1_1("M4xVSS_internalGenerateAudioMixFile:\ 2610 impossible to retrieve audio BGM properties ->\ 2611 reencoding audio background music", err); 2612 fileProperties.AudioStreamType = 2613 xVSS_context->pCurrentEditSettings->xVSS.outputAudioFormat+1; 2614 /* To force BGM encoding */ 2615 } 2616 } 2617 2618 pAudioMixSettings->bRemoveOriginal = M4OSA_FALSE; 2619 pAudioMixSettings->AddedAudioFileType = 2620 xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->FileType; 2621 pAudioMixSettings->pAddedAudioTrackFile = 2622 xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile; 2623 pAudioMixSettings->uiAddVolume = 2624 xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->uiAddVolume; 2625 2626 pAudioMixSettings->outputAudioFormat = xVSS_context->pSettings->xVSS.outputAudioFormat; 2627 pAudioMixSettings->outputASF = xVSS_context->pSettings->xVSS.outputAudioSamplFreq; 2628 pAudioMixSettings->outputAudioBitrate = xVSS_context->pSettings->xVSS.outputAudioBitrate; 2629 pAudioMixSettings->uiSamplingFrequency = 2630 xVSS_context->pSettings->xVSS.pBGMtrack->uiSamplingFrequency; 2631 pAudioMixSettings->uiNumChannels = xVSS_context->pSettings->xVSS.pBGMtrack->uiNumChannels; 2632 2633 pAudioMixSettings->b_DuckingNeedeed = 2634 xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->b_DuckingNeedeed; 2635 pAudioMixSettings->fBTVolLevel = 2636 (M4OSA_Float )xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->uiAddVolume/100; 2637 pAudioMixSettings->InDucking_threshold = 2638 xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->InDucking_threshold; 2639 pAudioMixSettings->InDucking_lowVolume = 2640 xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->lowVolume/100; 2641 pAudioMixSettings->fPTVolLevel = 2642 (M4OSA_Float)xVSS_context->pSettings->PTVolLevel/100; 2643 pAudioMixSettings->bLoop = xVSS_context->pSettings->xVSS.pBGMtrack->bLoop; 2644 2645 if(xVSS_context->pSettings->xVSS.bAudioMono) 2646 { 2647 pAudioMixSettings->outputNBChannels = 1; 2648 } 2649 else 2650 { 2651 pAudioMixSettings->outputNBChannels = 2; 2652 } 2653 2654 /** 2655 * Fill audio mix settings with BGM parameters */ 2656 pAudioMixSettings->uiBeginLoop = 2657 xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->uiBeginLoop; 2658 pAudioMixSettings->uiEndLoop = 2659 xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->uiEndLoop; 2660 pAudioMixSettings->uiAddCts = 2661 xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->uiAddCts; 2662 2663 /** 2664 * Output file of the audio mixer will be final file (audio mixing is the last step) */ 2665 pAudioMixSettings->pOutputClipFile = xVSS_context->pOutputFile; 2666 pAudioMixSettings->pTemporaryFile = xVSS_context->pTemporaryFile; 2667 2668 /** 2669 * Input file of the audio mixer is a temporary file containing all audio/video editions */ 2670 pAudioMixSettings->pOriginalClipFile = xVSS_context->pCurrentEditSettings->pOutputFile; 2671 2672 /** 2673 * Save audio mixing settings pointer to be able to free it in 2674 M4xVSS_internalCloseAudioMixedFile function */ 2675 xVSS_context->pAudioMixSettings = pAudioMixSettings; 2676 2677 /** 2678 * Create a VSS 3GPP audio mixing instance */ 2679 err = M4VSS3GPP_audioMixingInit(&pAudioMixingCtxt, pAudioMixSettings, 2680 xVSS_context->pFileReadPtr, xVSS_context->pFileWritePtr); 2681 2682 /** 2683 * Save audio mixing context to be able to call audio mixing step function in 2684 M4xVSS_step function */ 2685 xVSS_context->pAudioMixContext = pAudioMixingCtxt; 2686 2687 if (err != M4NO_ERROR) 2688 { 2689 M4OSA_TRACE1_1("M4xVSS_internalGenerateAudioMixFile:\ 2690 M4VSS3GPP_audioMixingInit returned 0x%x\n",err); 2691 //M4VSS3GPP_audioMixingCleanUp(pAudioMixingCtxt); 2692 return err; 2693 } 2694 2695 return M4NO_ERROR; 2696} 2697 2698/** 2699 ****************************************************************************** 2700 * prototype M4OSA_ERR M4xVSS_internalCloseAudioMixedFile(M4OSA_Context pContext) 2701 * 2702 * @brief This function cleans up VSS for audio mixing 2703 * @note 2704 * @param pContext (IN) The integrator own context 2705 * 2706 * @return M4NO_ERROR: No error 2707 * @return M4ERR_PARAMETER: At least one of the function parameters is null 2708 ****************************************************************************** 2709 */ 2710M4OSA_ERR M4xVSS_internalCloseAudioMixedFile(M4OSA_Context pContext) 2711{ 2712 M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; 2713 M4OSA_ERR err; 2714 2715 /** 2716 * Free this VSS3GPP audio mixing instance */ 2717 if(xVSS_context->pAudioMixContext != M4OSA_NULL) 2718 { 2719 err = M4VSS3GPP_audioMixingCleanUp(xVSS_context->pAudioMixContext); 2720 if (err != M4NO_ERROR) 2721 { 2722 M4OSA_TRACE1_1("M4xVSS_internalCloseAudioMixedFile:\ 2723 M4VSS3GPP_audioMixingCleanUp returned 0x%x\n",err); 2724 return err; 2725 } 2726 } 2727 2728 /** 2729 * Free VSS audio mixing settings */ 2730 if(xVSS_context->pAudioMixSettings != M4OSA_NULL) 2731 { 2732 free(xVSS_context->pAudioMixSettings); 2733 xVSS_context->pAudioMixSettings = M4OSA_NULL; 2734 } 2735 2736 return M4NO_ERROR; 2737} 2738 2739/** 2740 ****************************************************************************** 2741 * prototype M4OSA_ERR M4xVSS_internalFreePreview(M4OSA_Context pContext) 2742 * 2743 * @brief This function cleans up preview edition structure used to generate 2744 * preview.3gp file given to the VPS 2745 * @note It also free the preview structure given to the VPS 2746 * @param pContext (IN) The integrator own context 2747 * 2748 * @return M4NO_ERROR: No error 2749 * @return M4ERR_PARAMETER: At least one of the function parameters is null 2750 ****************************************************************************** 2751 */ 2752M4OSA_ERR M4xVSS_internalFreePreview(M4OSA_Context pContext) 2753{ 2754 M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; 2755 M4OSA_UInt8 i; 2756 2757 /** 2758 * Free clip/transition settings */ 2759 for(i=0; i<xVSS_context->pCurrentEditSettings->uiClipNumber; i++) 2760 { 2761 M4xVSS_FreeClipSettings(xVSS_context->pCurrentEditSettings->pClipList[i]); 2762 2763 free((xVSS_context->pCurrentEditSettings->pClipList[i])); 2764 xVSS_context->pCurrentEditSettings->pClipList[i] = M4OSA_NULL; 2765 2766 /** 2767 * Because there is 1 less transition than clip number */ 2768 if(i != xVSS_context->pCurrentEditSettings->uiClipNumber-1) 2769 { 2770 free((xVSS_context->pCurrentEditSettings->pTransitionList[i])); 2771 xVSS_context->pCurrentEditSettings->pTransitionList[i] = M4OSA_NULL; 2772 } 2773 } 2774 2775 /** 2776 * Free clip/transition list */ 2777 if(xVSS_context->pCurrentEditSettings->pClipList != M4OSA_NULL) 2778 { 2779 free((xVSS_context->pCurrentEditSettings->pClipList)); 2780 xVSS_context->pCurrentEditSettings->pClipList = M4OSA_NULL; 2781 } 2782 if(xVSS_context->pCurrentEditSettings->pTransitionList != M4OSA_NULL) 2783 { 2784 free((xVSS_context->pCurrentEditSettings->pTransitionList)); 2785 xVSS_context->pCurrentEditSettings->pTransitionList = M4OSA_NULL; 2786 } 2787 2788 /** 2789 * Free output preview file path */ 2790 if(xVSS_context->pCurrentEditSettings->pOutputFile != M4OSA_NULL) 2791 { 2792 free(xVSS_context->pCurrentEditSettings->pOutputFile); 2793 xVSS_context->pCurrentEditSettings->pOutputFile = M4OSA_NULL; 2794 } 2795 2796 /** 2797 * Free temporary preview file path */ 2798 if(xVSS_context->pCurrentEditSettings->pTemporaryFile != M4OSA_NULL) 2799 { 2800 remove((const char *)xVSS_context->pCurrentEditSettings->pTemporaryFile); 2801 free(xVSS_context->pCurrentEditSettings->pTemporaryFile); 2802 xVSS_context->pCurrentEditSettings->pTemporaryFile = M4OSA_NULL; 2803 } 2804 2805 /** 2806 * Free "local" BGM settings */ 2807 if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack != M4OSA_NULL) 2808 { 2809 if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile != M4OSA_NULL) 2810 { 2811 free(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile); 2812 xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile = M4OSA_NULL; 2813 } 2814 free(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack); 2815 xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack = M4OSA_NULL; 2816 } 2817 2818 /** 2819 * Free current edit settings structure */ 2820 if(xVSS_context->pCurrentEditSettings != M4OSA_NULL) 2821 { 2822 free(xVSS_context->pCurrentEditSettings); 2823 xVSS_context->pCurrentEditSettings = M4OSA_NULL; 2824 } 2825 2826 /** 2827 * Free preview effects given to application */ 2828 if(M4OSA_NULL != xVSS_context->pPreviewSettings->Effects) 2829 { 2830 free(xVSS_context->pPreviewSettings->Effects); 2831 xVSS_context->pPreviewSettings->Effects = M4OSA_NULL; 2832 xVSS_context->pPreviewSettings->nbEffects = 0; 2833 } 2834 2835 return M4NO_ERROR; 2836} 2837 2838 2839/** 2840 ****************************************************************************** 2841 * prototype M4OSA_ERR M4xVSS_internalFreeSaving(M4OSA_Context pContext) 2842 * 2843 * @brief This function cleans up saving edition structure used to generate 2844 * output.3gp file given to the VPS 2845 * @note 2846 * @param pContext (IN) The integrator own context 2847 * 2848 * @return M4NO_ERROR: No error 2849 * @return M4ERR_PARAMETER: At least one of the function parameters is null 2850 ****************************************************************************** 2851 */ 2852M4OSA_ERR M4xVSS_internalFreeSaving(M4OSA_Context pContext) 2853{ 2854 M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; 2855 M4OSA_UInt8 i; 2856 2857 if(xVSS_context->pCurrentEditSettings != M4OSA_NULL) 2858 { 2859 /** 2860 * Free clip/transition settings */ 2861 for(i=0; i<xVSS_context->pCurrentEditSettings->uiClipNumber; i++) 2862 { 2863 M4xVSS_FreeClipSettings(xVSS_context->pCurrentEditSettings->pClipList[i]); 2864 2865 free((xVSS_context->pCurrentEditSettings->pClipList[i])); 2866 xVSS_context->pCurrentEditSettings->pClipList[i] = M4OSA_NULL; 2867 2868 /** 2869 * Because there is 1 less transition than clip number */ 2870 if(i != xVSS_context->pCurrentEditSettings->uiClipNumber-1) 2871 { 2872 free(\ 2873 (xVSS_context->pCurrentEditSettings->pTransitionList[i])); 2874 xVSS_context->pCurrentEditSettings->pTransitionList[i] = M4OSA_NULL; 2875 } 2876 } 2877 2878 /** 2879 * Free clip/transition list */ 2880 if(xVSS_context->pCurrentEditSettings->pClipList != M4OSA_NULL) 2881 { 2882 free((xVSS_context->pCurrentEditSettings->pClipList)); 2883 xVSS_context->pCurrentEditSettings->pClipList = M4OSA_NULL; 2884 } 2885 if(xVSS_context->pCurrentEditSettings->pTransitionList != M4OSA_NULL) 2886 { 2887 free((xVSS_context->pCurrentEditSettings->pTransitionList)); 2888 xVSS_context->pCurrentEditSettings->pTransitionList = M4OSA_NULL; 2889 } 2890 2891 if(xVSS_context->pCurrentEditSettings->Effects != M4OSA_NULL) 2892 { 2893 free((xVSS_context->pCurrentEditSettings->Effects)); 2894 xVSS_context->pCurrentEditSettings->Effects = M4OSA_NULL; 2895 xVSS_context->pCurrentEditSettings->nbEffects = 0; 2896 } 2897 2898 /** 2899 * Free output saving file path */ 2900 if(xVSS_context->pCurrentEditSettings->pOutputFile != M4OSA_NULL) 2901 { 2902 if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack != M4OSA_NULL) 2903 { 2904 remove((const char *)xVSS_context->pCurrentEditSettings->pOutputFile); 2905 free(xVSS_context->pCurrentEditSettings->pOutputFile); 2906 } 2907 if(xVSS_context->pOutputFile != M4OSA_NULL) 2908 { 2909 free(xVSS_context->pOutputFile); 2910 xVSS_context->pOutputFile = M4OSA_NULL; 2911 } 2912 xVSS_context->pSettings->pOutputFile = M4OSA_NULL; 2913 xVSS_context->pCurrentEditSettings->pOutputFile = M4OSA_NULL; 2914 } 2915 2916 /** 2917 * Free temporary saving file path */ 2918 if(xVSS_context->pCurrentEditSettings->pTemporaryFile != M4OSA_NULL) 2919 { 2920 remove((const char *)xVSS_context->pCurrentEditSettings->pTemporaryFile); 2921 free(xVSS_context->pCurrentEditSettings->pTemporaryFile); 2922 xVSS_context->pCurrentEditSettings->pTemporaryFile = M4OSA_NULL; 2923 } 2924 2925 /** 2926 * Free "local" BGM settings */ 2927 if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack != M4OSA_NULL) 2928 { 2929 if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile != M4OSA_NULL) 2930 { 2931 free(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile); 2932 xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile = M4OSA_NULL; 2933 } 2934 free(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack); 2935 xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack = M4OSA_NULL; 2936 } 2937 2938 /** 2939 * Free current edit settings structure */ 2940 free(xVSS_context->pCurrentEditSettings); 2941 xVSS_context->pCurrentEditSettings = M4OSA_NULL; 2942 } 2943 2944 return M4NO_ERROR; 2945} 2946 2947 2948/** 2949 ****************************************************************************** 2950 * prototype M4OSA_ERR M4xVSS_freeSettings(M4OSA_Context pContext) 2951 * 2952 * @brief This function cleans up an M4VSS3GPP_EditSettings structure 2953 * @note 2954 * @param pSettings (IN) Pointer on M4VSS3GPP_EditSettings structure to free 2955 * 2956 * @return M4NO_ERROR: No error 2957 * @return M4ERR_PARAMETER: At least one of the function parameters is null 2958 ****************************************************************************** 2959 */ 2960M4OSA_ERR M4xVSS_freeSettings(M4VSS3GPP_EditSettings* pSettings) 2961{ 2962 M4OSA_UInt8 i,j; 2963 2964 /** 2965 * For each clip ... */ 2966 for(i=0; i<pSettings->uiClipNumber; i++) 2967 { 2968 /** 2969 * ... free clip settings */ 2970 if(pSettings->pClipList[i] != M4OSA_NULL) 2971 { 2972 M4xVSS_FreeClipSettings(pSettings->pClipList[i]); 2973 2974 free((pSettings->pClipList[i])); 2975 pSettings->pClipList[i] = M4OSA_NULL; 2976 } 2977 2978 /** 2979 * ... free transition settings */ 2980 if(i < pSettings->uiClipNumber-1) /* Because there is 1 less transition than clip number */ 2981 { 2982 if(pSettings->pTransitionList[i] != M4OSA_NULL) 2983 { 2984 switch (pSettings->pTransitionList[i]->VideoTransitionType) 2985 { 2986 case M4xVSS_kVideoTransitionType_AlphaMagic: 2987 2988 /** 2989 * In case of Alpha Magic transition, 2990 some extra parameters need to be freed */ 2991 if(pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt\ 2992 != M4OSA_NULL) 2993 { 2994 free((((M4xVSS_internal_AlphaMagicSettings*)\ 2995 pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt)->\ 2996 pPlane->pac_data)); 2997 ((M4xVSS_internal_AlphaMagicSettings*)pSettings->pTransitionList[i\ 2998 ]->pExtVideoTransitionFctCtxt)->pPlane->pac_data = M4OSA_NULL; 2999 3000 free((((M4xVSS_internal_AlphaMagicSettings*)\ 3001 pSettings->pTransitionList[i]->\ 3002 pExtVideoTransitionFctCtxt)->pPlane)); 3003 ((M4xVSS_internal_AlphaMagicSettings*)pSettings->pTransitionList[i]\ 3004 ->pExtVideoTransitionFctCtxt)->pPlane = M4OSA_NULL; 3005 3006 free((pSettings->pTransitionList[i]->\ 3007 pExtVideoTransitionFctCtxt)); 3008 pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt = M4OSA_NULL; 3009 3010 for(j=i+1;j<pSettings->uiClipNumber-1;j++) 3011 { 3012 if(pSettings->pTransitionList[j] != M4OSA_NULL) 3013 { 3014 if(pSettings->pTransitionList[j]->VideoTransitionType == 3015 M4xVSS_kVideoTransitionType_AlphaMagic) 3016 { 3017 M4OSA_UInt32 pCmpResult=0; 3018 pCmpResult = strcmp((const char *)pSettings->pTransitionList[i]->\ 3019 xVSS.transitionSpecific.pAlphaMagicSettings->\ 3020 pAlphaFilePath, 3021 (const char *)pSettings->pTransitionList[j]->\ 3022 xVSS.transitionSpecific.pAlphaMagicSettings->\ 3023 pAlphaFilePath); 3024 if(pCmpResult == 0) 3025 { 3026 /* Free extra internal alpha magic structure and put 3027 it to NULL to avoid refreeing it */ 3028 free((pSettings->\ 3029 pTransitionList[j]->pExtVideoTransitionFctCtxt)); 3030 pSettings->pTransitionList[j]->\ 3031 pExtVideoTransitionFctCtxt = M4OSA_NULL; 3032 } 3033 } 3034 } 3035 } 3036 } 3037 3038 if(pSettings->pTransitionList[i]->\ 3039 xVSS.transitionSpecific.pAlphaMagicSettings != M4OSA_NULL) 3040 { 3041 if(pSettings->pTransitionList[i]->\ 3042 xVSS.transitionSpecific.pAlphaMagicSettings->\ 3043 pAlphaFilePath != M4OSA_NULL) 3044 { 3045 free(pSettings->\ 3046 pTransitionList[i]->\ 3047 xVSS.transitionSpecific.pAlphaMagicSettings->\ 3048 pAlphaFilePath); 3049 pSettings->pTransitionList[i]->\ 3050 xVSS.transitionSpecific.pAlphaMagicSettings->\ 3051 pAlphaFilePath = M4OSA_NULL; 3052 } 3053 free(pSettings->pTransitionList[i]->\ 3054 xVSS.transitionSpecific.pAlphaMagicSettings); 3055 pSettings->pTransitionList[i]->\ 3056 xVSS.transitionSpecific.pAlphaMagicSettings = M4OSA_NULL; 3057 3058 } 3059 3060 break; 3061 3062 3063 case M4xVSS_kVideoTransitionType_SlideTransition: 3064 if (M4OSA_NULL != pSettings->pTransitionList[i]->\ 3065 xVSS.transitionSpecific.pSlideTransitionSettings) 3066 { 3067 free(pSettings->pTransitionList[i]->\ 3068 xVSS.transitionSpecific.pSlideTransitionSettings); 3069 pSettings->pTransitionList[i]->\ 3070 xVSS.transitionSpecific.pSlideTransitionSettings = M4OSA_NULL; 3071 } 3072 if(pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt != M4OSA_NULL) 3073 { 3074 free((pSettings->pTransitionList[i]->\ 3075 pExtVideoTransitionFctCtxt)); 3076 pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt = M4OSA_NULL; 3077 } 3078 break; 3079 default: 3080 break; 3081 3082 } 3083 /** 3084 * Free transition settings structure */ 3085 free((pSettings->pTransitionList[i])); 3086 pSettings->pTransitionList[i] = M4OSA_NULL; 3087 } 3088 } 3089 } 3090 3091 /** 3092 * Free clip list */ 3093 if(pSettings->pClipList != M4OSA_NULL) 3094 { 3095 free((pSettings->pClipList)); 3096 pSettings->pClipList = M4OSA_NULL; 3097 } 3098 3099 /** 3100 * Free transition list */ 3101 if(pSettings->pTransitionList != M4OSA_NULL) 3102 { 3103 free((pSettings->pTransitionList)); 3104 pSettings->pTransitionList = M4OSA_NULL; 3105 } 3106 3107 /** 3108 * RC: Free effects list */ 3109 if(pSettings->Effects != M4OSA_NULL) 3110 { 3111 for(i=0; i<pSettings->nbEffects; i++) 3112 { 3113 /** 3114 * For each clip, free framing structure if needed */ 3115 if(pSettings->Effects[i].VideoEffectType == M4xVSS_kVideoEffectType_Framing 3116 || pSettings->Effects[i].VideoEffectType == M4xVSS_kVideoEffectType_Text) 3117 { 3118#ifdef DECODE_GIF_ON_SAVING 3119 M4xVSS_FramingContext* framingCtx = pSettings->Effects[i].pExtVideoEffectFctCtxt; 3120#else 3121 M4xVSS_FramingStruct* framingCtx = pSettings->Effects[i].pExtVideoEffectFctCtxt; 3122 M4xVSS_FramingStruct* framingCtx_save; 3123 M4xVSS_Framing3102Struct* framingCtx_first = framingCtx; 3124#endif 3125 3126#ifdef DECODE_GIF_ON_SAVING 3127 if(framingCtx != M4OSA_NULL) /* Bugfix 1.2.0: crash, trying to free non existant 3128 pointer */ 3129 { 3130 if(framingCtx->aFramingCtx != M4OSA_NULL) 3131 { 3132 { 3133 if(framingCtx->aFramingCtx->FramingRgb != M4OSA_NULL) 3134 { 3135 free(framingCtx->aFramingCtx->\ 3136 FramingRgb->pac_data); 3137 framingCtx->aFramingCtx->FramingRgb->pac_data = M4OSA_NULL; 3138 free(framingCtx->aFramingCtx->FramingRgb); 3139 framingCtx->aFramingCtx->FramingRgb = M4OSA_NULL; 3140 } 3141 } 3142 if(framingCtx->aFramingCtx->FramingYuv != M4OSA_NULL) 3143 { 3144 free(framingCtx->aFramingCtx->\ 3145 FramingYuv[0].pac_data); 3146 framingCtx->aFramingCtx->FramingYuv[0].pac_data = M4OSA_NULL; 3147 free(framingCtx->aFramingCtx->\ 3148 FramingYuv[1].pac_data); 3149 framingCtx->aFramingCtx->FramingYuv[1].pac_data = M4OSA_NULL; 3150 free(framingCtx->aFramingCtx->\ 3151 FramingYuv[2].pac_data); 3152 framingCtx->aFramingCtx->FramingYuv[2].pac_data = M4OSA_NULL; 3153 free(framingCtx->aFramingCtx->FramingYuv); 3154 framingCtx->aFramingCtx->FramingYuv = M4OSA_NULL; 3155 } 3156 free(framingCtx->aFramingCtx); 3157 framingCtx->aFramingCtx = M4OSA_NULL; 3158 } 3159 if(framingCtx->aFramingCtx_last != M4OSA_NULL) 3160 { 3161 if(framingCtx->aFramingCtx_last->FramingRgb != M4OSA_NULL) 3162 { 3163 free(framingCtx->aFramingCtx_last->\ 3164 FramingRgb->pac_data); 3165 framingCtx->aFramingCtx_last->FramingRgb->pac_data = M4OSA_NULL; 3166 free(framingCtx->aFramingCtx_last->\ 3167 FramingRgb); 3168 framingCtx->aFramingCtx_last->FramingRgb = M4OSA_NULL; 3169 } 3170 if(framingCtx->aFramingCtx_last->FramingYuv != M4OSA_NULL) 3171 { 3172 free(framingCtx->aFramingCtx_last->\ 3173 FramingYuv[0].pac_data); 3174 framingCtx->aFramingCtx_last->FramingYuv[0].pac_data = M4OSA_NULL; 3175 free(framingCtx->aFramingCtx_last->FramingYuv); 3176 framingCtx->aFramingCtx_last->FramingYuv = M4OSA_NULL; 3177 } 3178 free(framingCtx->aFramingCtx_last); 3179 framingCtx->aFramingCtx_last = M4OSA_NULL; 3180 } 3181 if(framingCtx->pEffectFilePath != M4OSA_NULL) 3182 { 3183 free(framingCtx->pEffectFilePath); 3184 framingCtx->pEffectFilePath = M4OSA_NULL; 3185 } 3186 /*In case there are still allocated*/ 3187 if(framingCtx->pSPSContext != M4OSA_NULL) 3188 { 3189 // M4SPS_destroy(framingCtx->pSPSContext); 3190 framingCtx->pSPSContext = M4OSA_NULL; 3191 } 3192 /*Alpha blending structure*/ 3193 if(framingCtx->alphaBlendingStruct != M4OSA_NULL) 3194 { 3195 free(framingCtx->alphaBlendingStruct); 3196 framingCtx->alphaBlendingStruct = M4OSA_NULL; 3197 } 3198 3199 free(framingCtx); 3200 framingCtx = M4OSA_NULL; 3201 } 3202#else 3203 do 3204 { 3205 if(framingCtx != M4OSA_NULL) /* Bugfix 1.2.0: crash, trying to free non 3206 existant pointer */ 3207 { 3208 if(framingCtx->FramingRgb != M4OSA_NULL) 3209 { 3210 free(framingCtx->FramingRgb->pac_data); 3211 framingCtx->FramingRgb->pac_data = M4OSA_NULL; 3212 free(framingCtx->FramingRgb); 3213 framingCtx->FramingRgb = M4OSA_NULL; 3214 } 3215 if(framingCtx->FramingYuv != M4OSA_NULL) 3216 { 3217 free(framingCtx->FramingYuv[0].pac_data); 3218 framingCtx->FramingYuv[0].pac_data = M4OSA_NULL; 3219 free(framingCtx->FramingYuv); 3220 framingCtx->FramingYuv = M4OSA_NULL; 3221 } 3222 framingCtx_save = framingCtx->pNext; 3223 free(framingCtx); 3224 framingCtx = M4OSA_NULL; 3225 framingCtx = framingCtx_save; 3226 } 3227 else 3228 { 3229 /*FB: bug fix P4ME00003002*/ 3230 break; 3231 } 3232 } while(framingCtx_first != framingCtx); 3233#endif 3234 } 3235 else if( M4xVSS_kVideoEffectType_Fifties == pSettings->Effects[i].VideoEffectType) 3236 { 3237 /* Free Fifties context */ 3238 M4xVSS_FiftiesStruct* FiftiesCtx = pSettings->Effects[i].pExtVideoEffectFctCtxt; 3239 3240 if(FiftiesCtx != M4OSA_NULL) 3241 { 3242 free(FiftiesCtx); 3243 FiftiesCtx = M4OSA_NULL; 3244 } 3245 3246 } 3247 else if( M4xVSS_kVideoEffectType_ColorRGB16 == pSettings->Effects[i].VideoEffectType 3248 || M4xVSS_kVideoEffectType_BlackAndWhite == pSettings->Effects[i].VideoEffectType 3249 || M4xVSS_kVideoEffectType_Pink == pSettings->Effects[i].VideoEffectType 3250 || M4xVSS_kVideoEffectType_Green == pSettings->Effects[i].VideoEffectType 3251 || M4xVSS_kVideoEffectType_Sepia == pSettings->Effects[i].VideoEffectType 3252 || M4xVSS_kVideoEffectType_Negative== pSettings->Effects[i].VideoEffectType 3253 || M4xVSS_kVideoEffectType_Gradient== pSettings->Effects[i].VideoEffectType) 3254 { 3255 /* Free Color context */ 3256 M4xVSS_ColorStruct* ColorCtx = pSettings->Effects[i].pExtVideoEffectFctCtxt; 3257 3258 if(ColorCtx != M4OSA_NULL) 3259 { 3260 free(ColorCtx); 3261 ColorCtx = M4OSA_NULL; 3262 } 3263 } 3264 3265 /* Free simple fields */ 3266 if(pSettings->Effects[i].xVSS.pFramingFilePath != M4OSA_NULL) 3267 { 3268 free(pSettings->Effects[i].xVSS.pFramingFilePath); 3269 pSettings->Effects[i].xVSS.pFramingFilePath = M4OSA_NULL; 3270 } 3271 if(pSettings->Effects[i].xVSS.pFramingBuffer != M4OSA_NULL) 3272 { 3273 free(pSettings->Effects[i].xVSS.pFramingBuffer); 3274 pSettings->Effects[i].xVSS.pFramingBuffer = M4OSA_NULL; 3275 } 3276 if(pSettings->Effects[i].xVSS.pTextBuffer != M4OSA_NULL) 3277 { 3278 free(pSettings->Effects[i].xVSS.pTextBuffer); 3279 pSettings->Effects[i].xVSS.pTextBuffer = M4OSA_NULL; 3280 } 3281 } 3282 free(pSettings->Effects); 3283 pSettings->Effects = M4OSA_NULL; 3284 } 3285 3286 return M4NO_ERROR; 3287} 3288 3289M4OSA_ERR M4xVSS_freeCommand(M4OSA_Context pContext) 3290{ 3291 M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; 3292// M4OSA_UInt8 i,j; 3293 3294 /* Free "local" BGM settings */ 3295 if(xVSS_context->pSettings->xVSS.pBGMtrack != M4OSA_NULL) 3296 { 3297 if(xVSS_context->pSettings->xVSS.pBGMtrack->pFile != M4OSA_NULL) 3298 { 3299 free(xVSS_context->pSettings->xVSS.pBGMtrack->pFile); 3300 xVSS_context->pSettings->xVSS.pBGMtrack->pFile = M4OSA_NULL; 3301 } 3302 free(xVSS_context->pSettings->xVSS.pBGMtrack); 3303 xVSS_context->pSettings->xVSS.pBGMtrack = M4OSA_NULL; 3304 } 3305 3306 M4xVSS_freeSettings(xVSS_context->pSettings); 3307 3308 if(xVSS_context->pPTo3GPPparamsList != M4OSA_NULL) 3309 { 3310 M4xVSS_Pto3GPP_params* pParams = xVSS_context->pPTo3GPPparamsList; 3311 M4xVSS_Pto3GPP_params* pParams_sauv; 3312 3313 while(pParams != M4OSA_NULL) 3314 { 3315 if(pParams->pFileIn != M4OSA_NULL) 3316 { 3317 free(pParams->pFileIn); 3318 pParams->pFileIn = M4OSA_NULL; 3319 } 3320 if(pParams->pFileOut != M4OSA_NULL) 3321 { 3322 /* Delete temporary file */ 3323 remove((const char *)pParams->pFileOut); 3324 free(pParams->pFileOut); 3325 pParams->pFileOut = M4OSA_NULL; 3326 } 3327 if(pParams->pFileTemp != M4OSA_NULL) 3328 { 3329 /* Delete temporary file */ 3330#ifdef M4xVSS_RESERVED_MOOV_DISK_SPACE 3331 remove((const char *)pParams->pFileTemp); 3332 free(pParams->pFileTemp); 3333#endif/*M4xVSS_RESERVED_MOOV_DISK_SPACE*/ 3334 pParams->pFileTemp = M4OSA_NULL; 3335 } 3336 pParams_sauv = pParams; 3337 pParams = pParams->pNext; 3338 free(pParams_sauv); 3339 pParams_sauv = M4OSA_NULL; 3340 } 3341 } 3342 3343 if(xVSS_context->pMCSparamsList != M4OSA_NULL) 3344 { 3345 M4xVSS_MCS_params* pParams = xVSS_context->pMCSparamsList; 3346 M4xVSS_MCS_params* pParams_sauv; 3347 3348 while(pParams != M4OSA_NULL) 3349 { 3350 if(pParams->pFileIn != M4OSA_NULL) 3351 { 3352 free(pParams->pFileIn); 3353 pParams->pFileIn = M4OSA_NULL; 3354 } 3355 if(pParams->pFileOut != M4OSA_NULL) 3356 { 3357 /* Delete temporary file */ 3358 remove((const char *)pParams->pFileOut); 3359 free(pParams->pFileOut); 3360 pParams->pFileOut = M4OSA_NULL; 3361 } 3362 if(pParams->pFileTemp != M4OSA_NULL) 3363 { 3364 /* Delete temporary file */ 3365#ifdef M4xVSS_RESERVED_MOOV_DISK_SPACE 3366 remove((const char *)pParams->pFileTemp); 3367 free(pParams->pFileTemp); 3368#endif/*M4xVSS_RESERVED_MOOV_DISK_SPACE*/ 3369 pParams->pFileTemp = M4OSA_NULL; 3370 } 3371 pParams_sauv = pParams; 3372 pParams = pParams->pNext; 3373 free(pParams_sauv); 3374 pParams_sauv = M4OSA_NULL; 3375 } 3376 } 3377 3378 if(xVSS_context->pcmPreviewFile != M4OSA_NULL) 3379 { 3380 free(xVSS_context->pcmPreviewFile); 3381 xVSS_context->pcmPreviewFile = M4OSA_NULL; 3382 } 3383 if(xVSS_context->pSettings->pOutputFile != M4OSA_NULL 3384 && xVSS_context->pOutputFile != M4OSA_NULL) 3385 { 3386 free(xVSS_context->pSettings->pOutputFile); 3387 xVSS_context->pSettings->pOutputFile = M4OSA_NULL; 3388 xVSS_context->pOutputFile = M4OSA_NULL; 3389 } 3390 3391 /* Reinit all context variables */ 3392 xVSS_context->previousClipNumber = 0; 3393 xVSS_context->editingStep = M4xVSS_kMicroStateEditing; 3394 xVSS_context->analyseStep = M4xVSS_kMicroStateAnalysePto3GPP; 3395 xVSS_context->pPTo3GPPparamsList = M4OSA_NULL; 3396 xVSS_context->pPTo3GPPcurrentParams = M4OSA_NULL; 3397 xVSS_context->pMCSparamsList = M4OSA_NULL; 3398 xVSS_context->pMCScurrentParams = M4OSA_NULL; 3399 xVSS_context->tempFileIndex = 0; 3400 xVSS_context->targetedTimescale = 0; 3401 3402 return M4NO_ERROR; 3403} 3404 3405/** 3406 ****************************************************************************** 3407 * prototype M4OSA_ERR M4xVSS_internalGetProperties(M4OSA_Context pContext, 3408 * M4OSA_Char* pFile, 3409 * M4VIDEOEDITING_ClipProperties *pFileProperties) 3410 * 3411 * @brief This function retrieve properties of an input 3GP file using MCS 3412 * @note 3413 * @param pContext (IN) The integrator own context 3414 * @param pFile (IN) 3GP file to analyse 3415 * @param pFileProperties (IN/OUT) Pointer on a structure that will contain 3416 * the 3GP file properties 3417 * 3418 * @return M4NO_ERROR: No error 3419 * @return M4ERR_PARAMETER: At least one of the function parameters is null 3420 ****************************************************************************** 3421 */ 3422M4OSA_ERR M4xVSS_internalGetProperties(M4OSA_Context pContext, M4OSA_Char* pFile, 3423 M4VIDEOEDITING_ClipProperties *pFileProperties) 3424{ 3425 M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; 3426 M4OSA_ERR err; 3427 M4MCS_Context mcs_context; 3428 3429 err = M4MCS_init(&mcs_context, xVSS_context->pFileReadPtr, xVSS_context->pFileWritePtr); 3430 if(err != M4NO_ERROR) 3431 { 3432 M4OSA_TRACE1_1("M4xVSS_internalGetProperties: Error in M4MCS_init: 0x%x", err); 3433 return err; 3434 } 3435 3436 /*open the MCS in the "normal opening" mode to retrieve the exact duration*/ 3437 err = M4MCS_open_normalMode(mcs_context, pFile, M4VIDEOEDITING_kFileType_3GPP, 3438 M4OSA_NULL, M4OSA_NULL); 3439 if (err != M4NO_ERROR) 3440 { 3441 M4OSA_TRACE1_1("M4xVSS_internalGetProperties: Error in M4MCS_open: 0x%x", err); 3442 M4MCS_abort(mcs_context); 3443 return err; 3444 } 3445 3446 err = M4MCS_getInputFileProperties(mcs_context, pFileProperties); 3447 if(err != M4NO_ERROR) 3448 { 3449 M4OSA_TRACE1_1("Error in M4MCS_getInputFileProperties: 0x%x", err); 3450 M4MCS_abort(mcs_context); 3451 return err; 3452 } 3453 3454 err = M4MCS_abort(mcs_context); 3455 if (err != M4NO_ERROR) 3456 { 3457 M4OSA_TRACE1_1("M4xVSS_internalGetProperties: Error in M4MCS_abort: 0x%x", err); 3458 return err; 3459 } 3460 3461 return M4NO_ERROR; 3462} 3463 3464 3465/** 3466 ****************************************************************************** 3467 * prototype M4OSA_ERR M4xVSS_internalGetTargetedTimeScale(M4OSA_Context pContext, 3468 * M4OSA_UInt32* pTargetedTimeScale) 3469 * 3470 * @brief This function retrieve targeted time scale 3471 * @note 3472 * @param pContext (IN) The integrator own context 3473 * @param pTargetedTimeScale (OUT) Targeted time scale 3474 * 3475 * @return M4NO_ERROR: No error 3476 * @return M4ERR_PARAMETER: At least one of the function parameters is null 3477 ****************************************************************************** 3478 */ 3479M4OSA_ERR M4xVSS_internalGetTargetedTimeScale(M4OSA_Context pContext, 3480 M4VSS3GPP_EditSettings* pSettings, 3481 M4OSA_UInt32* pTargetedTimeScale) 3482{ 3483 M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; 3484 M4OSA_ERR err; 3485 M4OSA_UInt32 totalDuration = 0; 3486 M4OSA_UInt8 i = 0; 3487 M4OSA_UInt32 tempTimeScale = 0, tempDuration = 0; 3488 3489 for(i=0;i<pSettings->uiClipNumber;i++) 3490 { 3491 /*search timescale only in mpeg4 case*/ 3492 if(pSettings->pClipList[i]->FileType == M4VIDEOEDITING_kFileType_3GPP 3493 || pSettings->pClipList[i]->FileType == M4VIDEOEDITING_kFileType_MP4 3494 || pSettings->pClipList[i]->FileType == M4VIDEOEDITING_kFileType_M4V) 3495 { 3496 M4VIDEOEDITING_ClipProperties fileProperties; 3497 3498 /*UTF conversion support*/ 3499 M4OSA_Char* pDecodedPath = M4OSA_NULL; 3500 3501 /** 3502 * UTF conversion: convert into the customer format, before being used*/ 3503 pDecodedPath = pSettings->pClipList[i]->pFile; 3504 3505 if(xVSS_context->UTFConversionContext.pConvToUTF8Fct != M4OSA_NULL 3506 && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL) 3507 { 3508 M4OSA_UInt32 length = 0; 3509 err = M4xVSS_internalConvertFromUTF8(xVSS_context, 3510 (M4OSA_Void*) pSettings->pClipList[i]->pFile, 3511 (M4OSA_Void*) xVSS_context->UTFConversionContext.pTempOutConversionBuffer, 3512 &length); 3513 if(err != M4NO_ERROR) 3514 { 3515 M4OSA_TRACE1_1("M4xVSS_Init:\ 3516 M4xVSS_internalConvertToUTF8 returns err: 0x%x",err); 3517 return err; 3518 } 3519 pDecodedPath = xVSS_context->UTFConversionContext.pTempOutConversionBuffer; 3520 } 3521 3522 /*End of the conversion: use the decoded path*/ 3523 err = M4xVSS_internalGetProperties(xVSS_context, pDecodedPath, &fileProperties); 3524 3525 /*get input file properties*/ 3526 /*err = M4xVSS_internalGetProperties(xVSS_context, pSettings->\ 3527 pClipList[i]->pFile, &fileProperties);*/ 3528 if(M4NO_ERROR != err) 3529 { 3530 M4OSA_TRACE1_1("M4xVSS_internalGetTargetedTimeScale:\ 3531 M4xVSS_internalGetProperties returned: 0x%x", err); 3532 return err; 3533 } 3534 if(fileProperties.VideoStreamType == M4VIDEOEDITING_kMPEG4) 3535 { 3536 if(pSettings->pClipList[i]->uiEndCutTime > 0) 3537 { 3538 if(tempDuration < (pSettings->pClipList[i]->uiEndCutTime \ 3539 - pSettings->pClipList[i]->uiBeginCutTime)) 3540 { 3541 tempTimeScale = fileProperties.uiVideoTimeScale; 3542 tempDuration = (pSettings->pClipList[i]->uiEndCutTime\ 3543 - pSettings->pClipList[i]->uiBeginCutTime); 3544 } 3545 } 3546 else 3547 { 3548 if(tempDuration < (fileProperties.uiClipDuration\ 3549 - pSettings->pClipList[i]->uiBeginCutTime)) 3550 { 3551 tempTimeScale = fileProperties.uiVideoTimeScale; 3552 tempDuration = (fileProperties.uiClipDuration\ 3553 - pSettings->pClipList[i]->uiBeginCutTime); 3554 } 3555 } 3556 } 3557 } 3558 if(pSettings->pClipList[i]->FileType == M4VIDEOEDITING_kFileType_ARGB8888) 3559 { 3560 /*the timescale is 30 for PTO3GP*/ 3561 *pTargetedTimeScale = 30; 3562 return M4NO_ERROR; 3563 3564 } 3565 } 3566 3567 if(tempTimeScale >= 30)/*Define a minimum time scale, otherwise if the timescale is not 3568 enough, there will be an infinite loop in the shell encoder*/ 3569 { 3570 *pTargetedTimeScale = tempTimeScale; 3571 } 3572 else 3573 { 3574 *pTargetedTimeScale = 30; 3575 } 3576 3577 return M4NO_ERROR; 3578} 3579 3580 3581/** 3582 ****************************************************************************** 3583 * prototype M4VSS3GPP_externalVideoEffectColor(M4OSA_Void *pFunctionContext, 3584 * M4VIFI_ImagePlane *PlaneIn, 3585 * M4VIFI_ImagePlane *PlaneOut, 3586 * M4VSS3GPP_ExternalProgress *pProgress, 3587 * M4OSA_UInt32 uiEffectKind) 3588 * 3589 * @brief This function apply a color effect on an input YUV420 planar frame 3590 * @note 3591 * @param pFunctionContext(IN) Contains which color to apply (not very clean ...) 3592 * @param PlaneIn (IN) Input YUV420 planar 3593 * @param PlaneOut (IN/OUT) Output YUV420 planar 3594 * @param pProgress (IN/OUT) Progress indication (0-100) 3595 * @param uiEffectKind (IN) Unused 3596 * 3597 * @return M4VIFI_OK: No error 3598 ****************************************************************************** 3599 */ 3600M4OSA_ERR M4VSS3GPP_externalVideoEffectColor(M4OSA_Void *pFunctionContext, 3601 M4VIFI_ImagePlane *PlaneIn, 3602 M4VIFI_ImagePlane *PlaneOut, 3603 M4VSS3GPP_ExternalProgress *pProgress, 3604 M4OSA_UInt32 uiEffectKind) 3605{ 3606 M4VIFI_Int32 plane_number; 3607 M4VIFI_UInt32 i,j; 3608 M4VIFI_UInt8 *p_buf_src, *p_buf_dest; 3609 M4xVSS_ColorStruct* ColorContext = (M4xVSS_ColorStruct*)pFunctionContext; 3610 3611 for (plane_number = 0; plane_number < 3; plane_number++) 3612 { 3613 p_buf_src = &(PlaneIn[plane_number].pac_data[PlaneIn[plane_number].u_topleft]); 3614 p_buf_dest = &(PlaneOut[plane_number].pac_data[PlaneOut[plane_number].u_topleft]); 3615 for (i = 0; i < PlaneOut[plane_number].u_height; i++) 3616 { 3617 /** 3618 * Chrominance */ 3619 if(plane_number==1 || plane_number==2) 3620 { 3621 //switch ((M4OSA_UInt32)pFunctionContext) 3622 // commented because a structure for the effects context exist 3623 switch (ColorContext->colorEffectType) 3624 { 3625 case M4xVSS_kVideoEffectType_BlackAndWhite: 3626 memset((void *)p_buf_dest,128, 3627 PlaneIn[plane_number].u_width); 3628 break; 3629 case M4xVSS_kVideoEffectType_Pink: 3630 memset((void *)p_buf_dest,255, 3631 PlaneIn[plane_number].u_width); 3632 break; 3633 case M4xVSS_kVideoEffectType_Green: 3634 memset((void *)p_buf_dest,0, 3635 PlaneIn[plane_number].u_width); 3636 break; 3637 case M4xVSS_kVideoEffectType_Sepia: 3638 if(plane_number==1) 3639 { 3640 memset((void *)p_buf_dest,117, 3641 PlaneIn[plane_number].u_width); 3642 } 3643 else 3644 { 3645 memset((void *)p_buf_dest,139, 3646 PlaneIn[plane_number].u_width); 3647 } 3648 break; 3649 case M4xVSS_kVideoEffectType_Negative: 3650 memcpy((void *)p_buf_dest, 3651 (void *)p_buf_src ,PlaneOut[plane_number].u_width); 3652 break; 3653 3654 case M4xVSS_kVideoEffectType_ColorRGB16: 3655 { 3656 M4OSA_UInt16 r = 0,g = 0,b = 0,y = 0,u = 0,v = 0; 3657 3658 /*first get the r, g, b*/ 3659 b = (ColorContext->rgb16ColorData & 0x001f); 3660 g = (ColorContext->rgb16ColorData & 0x07e0)>>5; 3661 r = (ColorContext->rgb16ColorData & 0xf800)>>11; 3662 3663 /*keep y, but replace u and v*/ 3664 if(plane_number==1) 3665 { 3666 /*then convert to u*/ 3667 u = U16(r, g, b); 3668 memset((void *)p_buf_dest,(M4OSA_UInt8)u, 3669 PlaneIn[plane_number].u_width); 3670 } 3671 if(plane_number==2) 3672 { 3673 /*then convert to v*/ 3674 v = V16(r, g, b); 3675 memset((void *)p_buf_dest, (M4OSA_UInt8)v, 3676 PlaneIn[plane_number].u_width); 3677 } 3678 } 3679 break; 3680 case M4xVSS_kVideoEffectType_Gradient: 3681 { 3682 M4OSA_UInt16 r = 0,g = 0,b = 0,y = 0,u = 0,v = 0; 3683 3684 /*first get the r, g, b*/ 3685 b = (ColorContext->rgb16ColorData & 0x001f); 3686 g = (ColorContext->rgb16ColorData & 0x07e0)>>5; 3687 r = (ColorContext->rgb16ColorData & 0xf800)>>11; 3688 3689 /*for color gradation*/ 3690 b = (M4OSA_UInt16)( b - ((b*i)/PlaneIn[plane_number].u_height)); 3691 g = (M4OSA_UInt16)(g - ((g*i)/PlaneIn[plane_number].u_height)); 3692 r = (M4OSA_UInt16)(r - ((r*i)/PlaneIn[plane_number].u_height)); 3693 3694 /*keep y, but replace u and v*/ 3695 if(plane_number==1) 3696 { 3697 /*then convert to u*/ 3698 u = U16(r, g, b); 3699 memset((void *)p_buf_dest,(M4OSA_UInt8)u, 3700 PlaneIn[plane_number].u_width); 3701 } 3702 if(plane_number==2) 3703 { 3704 /*then convert to v*/ 3705 v = V16(r, g, b); 3706 memset((void *)p_buf_dest,(M4OSA_UInt8)v, 3707 PlaneIn[plane_number].u_width); 3708 } 3709 } 3710 break; 3711 default: 3712 break; 3713 } 3714 } 3715 /** 3716 * Luminance */ 3717 else 3718 { 3719 //switch ((M4OSA_UInt32)pFunctionContext) 3720 // commented because a structure for the effects context exist 3721 switch (ColorContext->colorEffectType) 3722 { 3723 case M4xVSS_kVideoEffectType_Negative: 3724 for(j=0;j<PlaneOut[plane_number].u_width;j++) 3725 { 3726 p_buf_dest[j] = 255 - p_buf_src[j]; 3727 } 3728 break; 3729 default: 3730 memcpy((void *)p_buf_dest, 3731 (void *)p_buf_src ,PlaneOut[plane_number].u_width); 3732 break; 3733 } 3734 } 3735 p_buf_src += PlaneIn[plane_number].u_stride; 3736 p_buf_dest += PlaneOut[plane_number].u_stride; 3737 } 3738 } 3739 3740 return M4VIFI_OK; 3741} 3742 3743/** 3744 ****************************************************************************** 3745 * prototype M4VSS3GPP_externalVideoEffectFraming(M4OSA_Void *pFunctionContext, 3746 * M4VIFI_ImagePlane *PlaneIn, 3747 * M4VIFI_ImagePlane *PlaneOut, 3748 * M4VSS3GPP_ExternalProgress *pProgress, 3749 * M4OSA_UInt32 uiEffectKind) 3750 * 3751 * @brief This function add a fixed or animated image on an input YUV420 planar frame 3752 * @note 3753 * @param pFunctionContext(IN) Contains which color to apply (not very clean ...) 3754 * @param PlaneIn (IN) Input YUV420 planar 3755 * @param PlaneOut (IN/OUT) Output YUV420 planar 3756 * @param pProgress (IN/OUT) Progress indication (0-100) 3757 * @param uiEffectKind (IN) Unused 3758 * 3759 * @return M4VIFI_OK: No error 3760 ****************************************************************************** 3761 */ 3762M4OSA_ERR M4VSS3GPP_externalVideoEffectFraming( M4OSA_Void *userData, 3763 M4VIFI_ImagePlane PlaneIn[3], 3764 M4VIFI_ImagePlane *PlaneOut, 3765 M4VSS3GPP_ExternalProgress *pProgress, 3766 M4OSA_UInt32 uiEffectKind ) 3767{ 3768 M4VIFI_UInt32 x,y; 3769 3770 M4VIFI_UInt8 *p_in_Y = PlaneIn[0].pac_data; 3771 M4VIFI_UInt8 *p_in_U = PlaneIn[1].pac_data; 3772 M4VIFI_UInt8 *p_in_V = PlaneIn[2].pac_data; 3773 3774 M4xVSS_FramingStruct* Framing = M4OSA_NULL; 3775 M4xVSS_FramingStruct* currentFraming = M4OSA_NULL; 3776 M4VIFI_UInt8 *FramingRGB = M4OSA_NULL; 3777 3778 M4VIFI_UInt8 *p_out0; 3779 M4VIFI_UInt8 *p_out1; 3780 M4VIFI_UInt8 *p_out2; 3781 3782 M4VIFI_UInt32 topleft[2]; 3783 3784 M4OSA_UInt8 transparent1 = (M4OSA_UInt8)((TRANSPARENT_COLOR & 0xFF00)>>8); 3785 M4OSA_UInt8 transparent2 = (M4OSA_UInt8)TRANSPARENT_COLOR; 3786 3787#ifndef DECODE_GIF_ON_SAVING 3788 Framing = (M4xVSS_FramingStruct *)userData; 3789 currentFraming = (M4xVSS_FramingStruct *)Framing->pCurrent; 3790 FramingRGB = Framing->FramingRgb->pac_data; 3791#endif /*DECODE_GIF_ON_SAVING*/ 3792 3793 /*FB*/ 3794#ifdef DECODE_GIF_ON_SAVING 3795 M4OSA_ERR err; 3796 Framing = (M4xVSS_FramingStruct *)((M4xVSS_FramingContext*)userData)->aFramingCtx; 3797 currentFraming = (M4xVSS_FramingStruct *)Framing; 3798 FramingRGB = Framing->FramingRgb->pac_data; 3799#endif /*DECODE_GIF_ON_SAVING*/ 3800 /*end FB*/ 3801 3802 /** 3803 * Initialize input / output plane pointers */ 3804 p_in_Y += PlaneIn[0].u_topleft; 3805 p_in_U += PlaneIn[1].u_topleft; 3806 p_in_V += PlaneIn[2].u_topleft; 3807 3808 p_out0 = PlaneOut[0].pac_data; 3809 p_out1 = PlaneOut[1].pac_data; 3810 p_out2 = PlaneOut[2].pac_data; 3811 3812 /** 3813 * Depending on time, initialize Framing frame to use */ 3814 if(Framing->previousClipTime == -1) 3815 { 3816 Framing->previousClipTime = pProgress->uiOutputTime; 3817 } 3818 3819 /** 3820 * If the current clip time has reach the duration of one frame of the framing picture 3821 * we need to step to next framing picture */ 3822 3823 Framing->previousClipTime = pProgress->uiOutputTime; 3824 FramingRGB = currentFraming->FramingRgb->pac_data; 3825 topleft[0] = currentFraming->topleft_x; 3826 topleft[1] = currentFraming->topleft_y; 3827 3828 for( x=0 ;x < PlaneIn[0].u_height ; x++) 3829 { 3830 for( y=0 ;y < PlaneIn[0].u_width ; y++) 3831 { 3832 /** 3833 * To handle framing with input size != output size 3834 * Framing is applyed if coordinates matches between framing/topleft and input plane */ 3835 if( y < (topleft[0] + currentFraming->FramingYuv[0].u_width) && 3836 y >= topleft[0] && 3837 x < (topleft[1] + currentFraming->FramingYuv[0].u_height) && 3838 x >= topleft[1]) 3839 { 3840 /*Alpha blending support*/ 3841 M4OSA_Float alphaBlending = 1; 3842 M4xVSS_internalEffectsAlphaBlending* alphaBlendingStruct =\ 3843 (M4xVSS_internalEffectsAlphaBlending*)\ 3844 ((M4xVSS_FramingContext*)userData)->alphaBlendingStruct; 3845 3846 if(alphaBlendingStruct != M4OSA_NULL) 3847 { 3848 if(pProgress->uiProgress \ 3849 < (M4OSA_UInt32)(alphaBlendingStruct->m_fadeInTime*10)) 3850 { 3851 if(alphaBlendingStruct->m_fadeInTime == 0) { 3852 alphaBlending = alphaBlendingStruct->m_start / 100; 3853 } else { 3854 alphaBlending = ((M4OSA_Float)(alphaBlendingStruct->m_middle\ 3855 - alphaBlendingStruct->m_start)\ 3856 *pProgress->uiProgress/(alphaBlendingStruct->m_fadeInTime*10)); 3857 alphaBlending += alphaBlendingStruct->m_start; 3858 alphaBlending /= 100; 3859 } 3860 } 3861 else if(pProgress->uiProgress >= (M4OSA_UInt32)(alphaBlendingStruct->\ 3862 m_fadeInTime*10) && pProgress->uiProgress < 1000\ 3863 - (M4OSA_UInt32)(alphaBlendingStruct->m_fadeOutTime*10)) 3864 { 3865 alphaBlending = (M4OSA_Float)\ 3866 ((M4OSA_Float)alphaBlendingStruct->m_middle/100); 3867 } 3868 else if(pProgress->uiProgress >= 1000 - (M4OSA_UInt32)\ 3869 (alphaBlendingStruct->m_fadeOutTime*10)) 3870 { 3871 if(alphaBlendingStruct->m_fadeOutTime == 0) { 3872 alphaBlending = alphaBlendingStruct->m_end / 100; 3873 } else { 3874 alphaBlending = ((M4OSA_Float)(alphaBlendingStruct->m_middle \ 3875 - alphaBlendingStruct->m_end))*(1000 - pProgress->uiProgress)\ 3876 /(alphaBlendingStruct->m_fadeOutTime*10); 3877 alphaBlending += alphaBlendingStruct->m_end; 3878 alphaBlending /= 100; 3879 } 3880 } 3881 } 3882 /**/ 3883 3884 if((*(FramingRGB)==transparent1) && (*(FramingRGB+1)==transparent2)) 3885 { 3886 *( p_out0+y+x*PlaneOut[0].u_stride)=(*(p_in_Y+y+x*PlaneIn[0].u_stride)); 3887 *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)= 3888 (*(p_in_U+(y>>1)+(x>>1)*PlaneIn[1].u_stride)); 3889 *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)= 3890 (*(p_in_V+(y>>1)+(x>>1)*PlaneIn[2].u_stride)); 3891 } 3892 else 3893 { 3894 *( p_out0+y+x*PlaneOut[0].u_stride)= 3895 (*(currentFraming->FramingYuv[0].pac_data+(y-topleft[0])\ 3896 +(x-topleft[1])*currentFraming->FramingYuv[0].u_stride))*alphaBlending; 3897 *( p_out0+y+x*PlaneOut[0].u_stride)+= 3898 (*(p_in_Y+y+x*PlaneIn[0].u_stride))*(1-alphaBlending); 3899 *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)= 3900 (*(currentFraming->FramingYuv[1].pac_data+((y-topleft[0])>>1)\ 3901 +((x-topleft[1])>>1)*currentFraming->FramingYuv[1].u_stride))\ 3902 *alphaBlending; 3903 *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)+= 3904 (*(p_in_U+(y>>1)+(x>>1)*PlaneIn[1].u_stride))*(1-alphaBlending); 3905 *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)= 3906 (*(currentFraming->FramingYuv[2].pac_data+((y-topleft[0])>>1)\ 3907 +((x-topleft[1])>>1)*currentFraming->FramingYuv[2].u_stride))\ 3908 *alphaBlending; 3909 *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)+= 3910 (*(p_in_V+(y>>1)+(x>>1)*PlaneIn[2].u_stride))*(1-alphaBlending); 3911 } 3912 if( PlaneIn[0].u_width < (topleft[0] + currentFraming->FramingYuv[0].u_width) && 3913 y == PlaneIn[0].u_width-1) 3914 { 3915 FramingRGB = FramingRGB + 2 \ 3916 * (topleft[0] + currentFraming->FramingYuv[0].u_width \ 3917 - PlaneIn[0].u_width + 1); 3918 } 3919 else 3920 { 3921 FramingRGB = FramingRGB + 2; 3922 } 3923 } 3924 /** 3925 * Just copy input plane to output plane */ 3926 else 3927 { 3928 *( p_out0+y+x*PlaneOut[0].u_stride)=*(p_in_Y+y+x*PlaneIn[0].u_stride); 3929 *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)= 3930 *(p_in_U+(y>>1)+(x>>1)*PlaneIn[1].u_stride); 3931 *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)= 3932 *(p_in_V+(y>>1)+(x>>1)*PlaneIn[2].u_stride); 3933 } 3934 } 3935 } 3936 3937 3938 return M4VIFI_OK; 3939} 3940 3941 3942/** 3943 ****************************************************************************** 3944 * prototype M4VSS3GPP_externalVideoEffectFifties(M4OSA_Void *pFunctionContext, 3945 * M4VIFI_ImagePlane *PlaneIn, 3946 * M4VIFI_ImagePlane *PlaneOut, 3947 * M4VSS3GPP_ExternalProgress *pProgress, 3948 * M4OSA_UInt32 uiEffectKind) 3949 * 3950 * @brief This function make a video look as if it was taken in the fifties 3951 * @note 3952 * @param pUserData (IN) Context 3953 * @param pPlaneIn (IN) Input YUV420 planar 3954 * @param pPlaneOut (IN/OUT) Output YUV420 planar 3955 * @param pProgress (IN/OUT) Progress indication (0-100) 3956 * @param uiEffectKind (IN) Unused 3957 * 3958 * @return M4VIFI_OK: No error 3959 * @return M4ERR_PARAMETER: pFiftiesData, pPlaneOut or pProgress are NULL (DEBUG only) 3960 ****************************************************************************** 3961 */ 3962M4OSA_ERR M4VSS3GPP_externalVideoEffectFifties( M4OSA_Void *pUserData, 3963 M4VIFI_ImagePlane *pPlaneIn, 3964 M4VIFI_ImagePlane *pPlaneOut, 3965 M4VSS3GPP_ExternalProgress *pProgress, 3966 M4OSA_UInt32 uiEffectKind ) 3967{ 3968 M4VIFI_UInt32 x, y, xShift; 3969 M4VIFI_UInt8 *pInY = pPlaneIn[0].pac_data; 3970 M4VIFI_UInt8 *pOutY, *pInYbegin; 3971 M4VIFI_UInt8 *pInCr,* pOutCr; 3972 M4VIFI_Int32 plane_number; 3973 3974 /* Internal context*/ 3975 M4xVSS_FiftiesStruct* p_FiftiesData = (M4xVSS_FiftiesStruct *)pUserData; 3976 3977 /* Check the inputs (debug only) */ 3978 M4OSA_DEBUG_IF2((pFiftiesData == M4OSA_NULL),M4ERR_PARAMETER, 3979 "xVSS: p_FiftiesData is M4OSA_NULL in M4VSS3GPP_externalVideoEffectFifties"); 3980 M4OSA_DEBUG_IF2((pPlaneOut == M4OSA_NULL),M4ERR_PARAMETER, 3981 "xVSS: p_PlaneOut is M4OSA_NULL in M4VSS3GPP_externalVideoEffectFifties"); 3982 M4OSA_DEBUG_IF2((pProgress == M4OSA_NULL),M4ERR_PARAMETER, 3983 "xVSS: p_Progress is M4OSA_NULL in M4VSS3GPP_externalVideoEffectFifties"); 3984 3985 /* Initialize input / output plane pointers */ 3986 pInY += pPlaneIn[0].u_topleft; 3987 pOutY = pPlaneOut[0].pac_data; 3988 pInYbegin = pInY; 3989 3990 /* Initialize the random */ 3991 if(p_FiftiesData->previousClipTime < 0) 3992 { 3993 M4OSA_randInit(); 3994 M4OSA_rand((M4OSA_Int32 *)&(p_FiftiesData->shiftRandomValue), (pPlaneIn[0].u_height) >> 4); 3995 M4OSA_rand((M4OSA_Int32 *)&(p_FiftiesData->stripeRandomValue), (pPlaneIn[0].u_width)<< 2); 3996 p_FiftiesData->previousClipTime = pProgress->uiOutputTime; 3997 } 3998 3999 /* Choose random values if we have reached the duration of a partial effect */ 4000 else if( (pProgress->uiOutputTime - p_FiftiesData->previousClipTime)\ 4001 > p_FiftiesData->fiftiesEffectDuration) 4002 { 4003 M4OSA_rand((M4OSA_Int32 *)&(p_FiftiesData->shiftRandomValue), (pPlaneIn[0].u_height) >> 4); 4004 M4OSA_rand((M4OSA_Int32 *)&(p_FiftiesData->stripeRandomValue), (pPlaneIn[0].u_width)<< 2); 4005 p_FiftiesData->previousClipTime = pProgress->uiOutputTime; 4006 } 4007 4008 /* Put in Sepia the chrominance */ 4009 for (plane_number = 1; plane_number < 3; plane_number++) 4010 { 4011 pInCr = pPlaneIn[plane_number].pac_data + pPlaneIn[plane_number].u_topleft; 4012 pOutCr = pPlaneOut[plane_number].pac_data + pPlaneOut[plane_number].u_topleft; 4013 4014 for (x = 0; x < pPlaneOut[plane_number].u_height; x++) 4015 { 4016 if (1 == plane_number) 4017 memset((void *)pOutCr, 117,pPlaneIn[plane_number].u_width); /* U value */ 4018 else 4019 memset((void *)pOutCr, 139,pPlaneIn[plane_number].u_width); /* V value */ 4020 4021 pInCr += pPlaneIn[plane_number].u_stride; 4022 pOutCr += pPlaneOut[plane_number].u_stride; 4023 } 4024 } 4025 4026 /* Compute the new pixels values */ 4027 for( x = 0 ; x < pPlaneIn[0].u_height ; x++) 4028 { 4029 M4VIFI_UInt8 *p_outYtmp, *p_inYtmp; 4030 4031 /* Compute the xShift (random value) */ 4032 if (0 == (p_FiftiesData->shiftRandomValue % 5 )) 4033 xShift = (x + p_FiftiesData->shiftRandomValue ) % (pPlaneIn[0].u_height - 1); 4034 else 4035 xShift = (x + (pPlaneIn[0].u_height - p_FiftiesData->shiftRandomValue) ) \ 4036 % (pPlaneIn[0].u_height - 1); 4037 4038 /* Initialize the pointers */ 4039 p_outYtmp = pOutY + 1; /* yShift of 1 pixel */ 4040 p_inYtmp = pInYbegin + (xShift * pPlaneIn[0].u_stride); /* Apply the xShift */ 4041 4042 for( y = 0 ; y < pPlaneIn[0].u_width ; y++) 4043 { 4044 /* Set Y value */ 4045 if (xShift > (pPlaneIn[0].u_height - 4)) 4046 *p_outYtmp = 40; /* Add some horizontal black lines between the 4047 two parts of the image */ 4048 else if ( y == p_FiftiesData->stripeRandomValue) 4049 *p_outYtmp = 90; /* Add a random vertical line for the bulk */ 4050 else 4051 *p_outYtmp = *p_inYtmp; 4052 4053 4054 /* Go to the next pixel */ 4055 p_outYtmp++; 4056 p_inYtmp++; 4057 4058 /* Restart at the beginning of the line for the last pixel*/ 4059 if (y == (pPlaneIn[0].u_width - 2)) 4060 p_outYtmp = pOutY; 4061 } 4062 4063 /* Go to the next line */ 4064 pOutY += pPlaneOut[0].u_stride; 4065 } 4066 4067 return M4VIFI_OK; 4068} 4069 4070/** 4071 ****************************************************************************** 4072 * M4OSA_ERR M4VSS3GPP_externalVideoEffectZoom( ) 4073 * @brief Zoom in/out video effect functions. 4074 * @note The external video function is used only if VideoEffectType is set to 4075 * M4VSS3GPP_kVideoEffectType_ZoomIn or M4VSS3GPP_kVideoEffectType_ZoomOut. 4076 * 4077 * @param pFunctionContext (IN) The function context, previously set by the integrator 4078 * @param pInputPlanes (IN) Input YUV420 image: pointer to an array of three valid 4079 * image planes (Y, U and V) 4080 * @param pOutputPlanes (IN/OUT) Output (filtered) YUV420 image: pointer to an array of 4081 * three valid image planes (Y, U and V) 4082 * @param pProgress (IN) Set of information about the video transition progress. 4083 * @return M4NO_ERROR: No error 4084 * @return M4ERR_PARAMETER: At least one parameter is M4OSA_NULL (debug only) 4085 ****************************************************************************** 4086 */ 4087 4088M4OSA_ERR M4VSS3GPP_externalVideoEffectZoom( 4089 M4OSA_Void *pFunctionContext, 4090 M4VIFI_ImagePlane *pInputPlanes, 4091 M4VIFI_ImagePlane *pOutputPlanes, 4092 M4VSS3GPP_ExternalProgress *pProgress, 4093 M4OSA_UInt32 uiEffectKind 4094) 4095{ 4096 M4OSA_UInt32 boxWidth; 4097 M4OSA_UInt32 boxHeight; 4098 M4OSA_UInt32 boxPosX; 4099 M4OSA_UInt32 boxPosY; 4100 M4OSA_UInt32 ratio = 0; 4101 /* * 1.189207 between ratio */ 4102 /* zoom between x1 and x16 */ 4103 M4OSA_UInt32 ratiotab[17] ={1024,1218,1448,1722,2048,2435,2896,3444,4096,4871,5793,\ 4104 6889,8192,9742,11585,13777,16384}; 4105 M4OSA_UInt32 ik; 4106 4107 M4VIFI_ImagePlane boxPlane[3]; 4108 4109 if(M4xVSS_kVideoEffectType_ZoomOut == (M4OSA_UInt32)pFunctionContext) 4110 { 4111 //ratio = 16 - (15 * pProgress->uiProgress)/1000; 4112 ratio = 16 - pProgress->uiProgress / 66 ; 4113 } 4114 else if(M4xVSS_kVideoEffectType_ZoomIn == (M4OSA_UInt32)pFunctionContext) 4115 { 4116 //ratio = 1 + (15 * pProgress->uiProgress)/1000; 4117 ratio = 1 + pProgress->uiProgress / 66 ; 4118 } 4119 4120 for(ik=0;ik<3;ik++){ 4121 4122 boxPlane[ik].u_stride = pInputPlanes[ik].u_stride; 4123 boxPlane[ik].pac_data = pInputPlanes[ik].pac_data; 4124 4125 boxHeight = ( pInputPlanes[ik].u_height << 10 ) / ratiotab[ratio]; 4126 boxWidth = ( pInputPlanes[ik].u_width << 10 ) / ratiotab[ratio]; 4127 boxPlane[ik].u_height = (boxHeight)&(~1); 4128 boxPlane[ik].u_width = (boxWidth)&(~1); 4129 4130 boxPosY = (pInputPlanes[ik].u_height >> 1) - (boxPlane[ik].u_height >> 1); 4131 boxPosX = (pInputPlanes[ik].u_width >> 1) - (boxPlane[ik].u_width >> 1); 4132 boxPlane[ik].u_topleft = boxPosY * boxPlane[ik].u_stride + boxPosX; 4133 } 4134 4135 M4VIFI_ResizeBilinearYUV420toYUV420(M4OSA_NULL, (M4VIFI_ImagePlane*)&boxPlane, pOutputPlanes); 4136 4137 /** 4138 * Return */ 4139 return(M4NO_ERROR); 4140} 4141 4142/** 4143 ****************************************************************************** 4144 * prototype M4xVSS_AlphaMagic( M4OSA_Void *userData, 4145 * M4VIFI_ImagePlane PlaneIn1[3], 4146 * M4VIFI_ImagePlane PlaneIn2[3], 4147 * M4VIFI_ImagePlane *PlaneOut, 4148 * M4VSS3GPP_ExternalProgress *pProgress, 4149 * M4OSA_UInt32 uiTransitionKind) 4150 * 4151 * @brief This function apply a color effect on an input YUV420 planar frame 4152 * @note 4153 * @param userData (IN) Contains a pointer on a settings structure 4154 * @param PlaneIn1 (IN) Input YUV420 planar from video 1 4155 * @param PlaneIn2 (IN) Input YUV420 planar from video 2 4156 * @param PlaneOut (IN/OUT) Output YUV420 planar 4157 * @param pProgress (IN/OUT) Progress indication (0-100) 4158 * @param uiTransitionKind(IN) Unused 4159 * 4160 * @return M4VIFI_OK: No error 4161 ****************************************************************************** 4162 */ 4163M4OSA_ERR M4xVSS_AlphaMagic( M4OSA_Void *userData, M4VIFI_ImagePlane PlaneIn1[3], 4164 M4VIFI_ImagePlane PlaneIn2[3], M4VIFI_ImagePlane *PlaneOut, 4165 M4VSS3GPP_ExternalProgress *pProgress, M4OSA_UInt32 uiTransitionKind) 4166{ 4167 4168 M4OSA_ERR err; 4169 4170 M4xVSS_internal_AlphaMagicSettings* alphaContext; 4171 M4VIFI_Int32 alphaProgressLevel; 4172 4173 M4VIFI_ImagePlane* planeswap; 4174 M4VIFI_UInt32 x,y; 4175 4176 M4VIFI_UInt8 *p_out0; 4177 M4VIFI_UInt8 *p_out1; 4178 M4VIFI_UInt8 *p_out2; 4179 M4VIFI_UInt8 *alphaMask; 4180 /* "Old image" */ 4181 M4VIFI_UInt8 *p_in1_Y; 4182 M4VIFI_UInt8 *p_in1_U; 4183 M4VIFI_UInt8 *p_in1_V; 4184 /* "New image" */ 4185 M4VIFI_UInt8 *p_in2_Y; 4186 M4VIFI_UInt8 *p_in2_U; 4187 M4VIFI_UInt8 *p_in2_V; 4188 4189 err = M4NO_ERROR; 4190 4191 alphaContext = (M4xVSS_internal_AlphaMagicSettings*)userData; 4192 4193 alphaProgressLevel = (pProgress->uiProgress * 255)/1000; 4194 4195 if( alphaContext->isreverse != M4OSA_FALSE) 4196 { 4197 alphaProgressLevel = 255 - alphaProgressLevel; 4198 planeswap = PlaneIn1; 4199 PlaneIn1 = PlaneIn2; 4200 PlaneIn2 = planeswap; 4201 } 4202 4203 p_out0 = PlaneOut[0].pac_data; 4204 p_out1 = PlaneOut[1].pac_data; 4205 p_out2 = PlaneOut[2].pac_data; 4206 4207 alphaMask = alphaContext->pPlane->pac_data; 4208 4209 /* "Old image" */ 4210 p_in1_Y = PlaneIn1[0].pac_data; 4211 p_in1_U = PlaneIn1[1].pac_data; 4212 p_in1_V = PlaneIn1[2].pac_data; 4213 /* "New image" */ 4214 p_in2_Y = PlaneIn2[0].pac_data; 4215 p_in2_U = PlaneIn2[1].pac_data; 4216 p_in2_V = PlaneIn2[2].pac_data; 4217 4218 /** 4219 * For each column ... */ 4220 for( y=0; y<PlaneOut->u_height; y++ ) 4221 { 4222 /** 4223 * ... and each row of the alpha mask */ 4224 for( x=0; x<PlaneOut->u_width; x++ ) 4225 { 4226 /** 4227 * If the value of the current pixel of the alpha mask is > to the current time 4228 * ( current time is normalized on [0-255] ) */ 4229 if( alphaProgressLevel < alphaMask[x+y*PlaneOut->u_width] ) 4230 { 4231 /* We keep "old image" in output plane */ 4232 *( p_out0+x+y*PlaneOut[0].u_stride)=*(p_in1_Y+x+y*PlaneIn1[0].u_stride); 4233 *( p_out1+(x>>1)+(y>>1)*PlaneOut[1].u_stride)= 4234 *(p_in1_U+(x>>1)+(y>>1)*PlaneIn1[1].u_stride); 4235 *( p_out2+(x>>1)+(y>>1)*PlaneOut[2].u_stride)= 4236 *(p_in1_V+(x>>1)+(y>>1)*PlaneIn1[2].u_stride); 4237 } 4238 else 4239 { 4240 /* We take "new image" in output plane */ 4241 *( p_out0+x+y*PlaneOut[0].u_stride)=*(p_in2_Y+x+y*PlaneIn2[0].u_stride); 4242 *( p_out1+(x>>1)+(y>>1)*PlaneOut[1].u_stride)= 4243 *(p_in2_U+(x>>1)+(y>>1)*PlaneIn2[1].u_stride); 4244 *( p_out2+(x>>1)+(y>>1)*PlaneOut[2].u_stride)= 4245 *(p_in2_V+(x>>1)+(y>>1)*PlaneIn2[2].u_stride); 4246 } 4247 } 4248 } 4249 4250 return(err); 4251} 4252 4253/** 4254 ****************************************************************************** 4255 * prototype M4xVSS_AlphaMagicBlending( M4OSA_Void *userData, 4256 * M4VIFI_ImagePlane PlaneIn1[3], 4257 * M4VIFI_ImagePlane PlaneIn2[3], 4258 * M4VIFI_ImagePlane *PlaneOut, 4259 * M4VSS3GPP_ExternalProgress *pProgress, 4260 * M4OSA_UInt32 uiTransitionKind) 4261 * 4262 * @brief This function apply a color effect on an input YUV420 planar frame 4263 * @note 4264 * @param userData (IN) Contains a pointer on a settings structure 4265 * @param PlaneIn1 (IN) Input YUV420 planar from video 1 4266 * @param PlaneIn2 (IN) Input YUV420 planar from video 2 4267 * @param PlaneOut (IN/OUT) Output YUV420 planar 4268 * @param pProgress (IN/OUT) Progress indication (0-100) 4269 * @param uiTransitionKind(IN) Unused 4270 * 4271 * @return M4VIFI_OK: No error 4272 ****************************************************************************** 4273 */ 4274M4OSA_ERR M4xVSS_AlphaMagicBlending( M4OSA_Void *userData, M4VIFI_ImagePlane PlaneIn1[3], 4275 M4VIFI_ImagePlane PlaneIn2[3], M4VIFI_ImagePlane *PlaneOut, 4276 M4VSS3GPP_ExternalProgress *pProgress, 4277 M4OSA_UInt32 uiTransitionKind) 4278{ 4279 M4OSA_ERR err; 4280 4281 M4xVSS_internal_AlphaMagicSettings* alphaContext; 4282 M4VIFI_Int32 alphaProgressLevel; 4283 M4VIFI_Int32 alphaBlendLevelMin; 4284 M4VIFI_Int32 alphaBlendLevelMax; 4285 M4VIFI_Int32 alphaBlendRange; 4286 4287 M4VIFI_ImagePlane* planeswap; 4288 M4VIFI_UInt32 x,y; 4289 M4VIFI_Int32 alphaMaskValue; 4290 4291 M4VIFI_UInt8 *p_out0; 4292 M4VIFI_UInt8 *p_out1; 4293 M4VIFI_UInt8 *p_out2; 4294 M4VIFI_UInt8 *alphaMask; 4295 /* "Old image" */ 4296 M4VIFI_UInt8 *p_in1_Y; 4297 M4VIFI_UInt8 *p_in1_U; 4298 M4VIFI_UInt8 *p_in1_V; 4299 /* "New image" */ 4300 M4VIFI_UInt8 *p_in2_Y; 4301 M4VIFI_UInt8 *p_in2_U; 4302 M4VIFI_UInt8 *p_in2_V; 4303 4304 4305 err = M4NO_ERROR; 4306 4307 alphaContext = (M4xVSS_internal_AlphaMagicSettings*)userData; 4308 4309 alphaProgressLevel = (pProgress->uiProgress * 255)/1000; 4310 4311 if( alphaContext->isreverse != M4OSA_FALSE) 4312 { 4313 alphaProgressLevel = 255 - alphaProgressLevel; 4314 planeswap = PlaneIn1; 4315 PlaneIn1 = PlaneIn2; 4316 PlaneIn2 = planeswap; 4317 } 4318 4319 alphaBlendLevelMin = alphaProgressLevel-alphaContext->blendingthreshold; 4320 4321 alphaBlendLevelMax = alphaProgressLevel+alphaContext->blendingthreshold; 4322 4323 alphaBlendRange = (alphaContext->blendingthreshold)*2; 4324 4325 p_out0 = PlaneOut[0].pac_data; 4326 p_out1 = PlaneOut[1].pac_data; 4327 p_out2 = PlaneOut[2].pac_data; 4328 4329 alphaMask = alphaContext->pPlane->pac_data; 4330 4331 /* "Old image" */ 4332 p_in1_Y = PlaneIn1[0].pac_data; 4333 p_in1_U = PlaneIn1[1].pac_data; 4334 p_in1_V = PlaneIn1[2].pac_data; 4335 /* "New image" */ 4336 p_in2_Y = PlaneIn2[0].pac_data; 4337 p_in2_U = PlaneIn2[1].pac_data; 4338 p_in2_V = PlaneIn2[2].pac_data; 4339 4340 /* apply Alpha Magic on each pixel */ 4341 for( y=0; y<PlaneOut->u_height; y++ ) 4342 { 4343 for( x=0; x<PlaneOut->u_width; x++ ) 4344 { 4345 alphaMaskValue = alphaMask[x+y*PlaneOut->u_width]; 4346 if( alphaBlendLevelMax < alphaMaskValue ) 4347 { 4348 /* We keep "old image" in output plane */ 4349 *( p_out0+x+y*PlaneOut[0].u_stride)=*(p_in1_Y+x+y*PlaneIn1[0].u_stride); 4350 *( p_out1+(x>>1)+(y>>1)*PlaneOut[1].u_stride)= 4351 *(p_in1_U+(x>>1)+(y>>1)*PlaneIn1[1].u_stride); 4352 *( p_out2+(x>>1)+(y>>1)*PlaneOut[2].u_stride)= 4353 *(p_in1_V+(x>>1)+(y>>1)*PlaneIn1[2].u_stride); 4354 } 4355 else if( (alphaBlendLevelMin < alphaMaskValue)&& 4356 (alphaMaskValue <= alphaBlendLevelMax ) ) 4357 { 4358 /* We blend "old and new image" in output plane */ 4359 *( p_out0+x+y*PlaneOut[0].u_stride)=(M4VIFI_UInt8) 4360 (( (alphaMaskValue-alphaBlendLevelMin)*( *(p_in1_Y+x+y*PlaneIn1[0].u_stride)) 4361 +(alphaBlendLevelMax-alphaMaskValue)\ 4362 *( *(p_in2_Y+x+y*PlaneIn2[0].u_stride)) )/alphaBlendRange ); 4363 4364 *( p_out1+(x>>1)+(y>>1)*PlaneOut[1].u_stride)=(M4VIFI_UInt8)\ 4365 (( (alphaMaskValue-alphaBlendLevelMin)*( *(p_in1_U+(x>>1)+(y>>1)\ 4366 *PlaneIn1[1].u_stride)) 4367 +(alphaBlendLevelMax-alphaMaskValue)*( *(p_in2_U+(x>>1)+(y>>1)\ 4368 *PlaneIn2[1].u_stride)) )/alphaBlendRange ); 4369 4370 *( p_out2+(x>>1)+(y>>1)*PlaneOut[2].u_stride)= 4371 (M4VIFI_UInt8)(( (alphaMaskValue-alphaBlendLevelMin)\ 4372 *( *(p_in1_V+(x>>1)+(y>>1)*PlaneIn1[2].u_stride)) 4373 +(alphaBlendLevelMax-alphaMaskValue)*( *(p_in2_V+(x>>1)+(y>>1)\ 4374 *PlaneIn2[2].u_stride)) )/alphaBlendRange ); 4375 4376 } 4377 else 4378 { 4379 /* We take "new image" in output plane */ 4380 *( p_out0+x+y*PlaneOut[0].u_stride)=*(p_in2_Y+x+y*PlaneIn2[0].u_stride); 4381 *( p_out1+(x>>1)+(y>>1)*PlaneOut[1].u_stride)= 4382 *(p_in2_U+(x>>1)+(y>>1)*PlaneIn2[1].u_stride); 4383 *( p_out2+(x>>1)+(y>>1)*PlaneOut[2].u_stride)= 4384 *(p_in2_V+(x>>1)+(y>>1)*PlaneIn2[2].u_stride); 4385 } 4386 } 4387 } 4388 4389 return(err); 4390} 4391 4392#define M4XXX_SampleAddress(plane, x, y) ( (plane).pac_data + (plane).u_topleft + (y)\ 4393 * (plane).u_stride + (x) ) 4394 4395static void M4XXX_CopyPlane(M4VIFI_ImagePlane* dest, M4VIFI_ImagePlane* source) 4396{ 4397 M4OSA_UInt32 height, width, sourceStride, destStride, y; 4398 M4OSA_MemAddr8 sourceWalk, destWalk; 4399 4400 /* cache the vars used in the loop so as to avoid them being repeatedly fetched and 4401 recomputed from memory. */ 4402 height = dest->u_height; 4403 width = dest->u_width; 4404 4405 sourceWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(*source, 0, 0); 4406 sourceStride = source->u_stride; 4407 4408 destWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(*dest, 0, 0); 4409 destStride = dest->u_stride; 4410 4411 for (y=0; y<height; y++) 4412 { 4413 memcpy((void *)destWalk, (void *)sourceWalk, width); 4414 destWalk += destStride; 4415 sourceWalk += sourceStride; 4416 } 4417} 4418 4419static M4OSA_ERR M4xVSS_VerticalSlideTransition(M4VIFI_ImagePlane* topPlane, 4420 M4VIFI_ImagePlane* bottomPlane, 4421 M4VIFI_ImagePlane *PlaneOut, 4422 M4OSA_UInt32 shiftUV) 4423{ 4424 M4OSA_UInt32 i; 4425 4426 /* Do three loops, one for each plane type, in order to avoid having too many buffers 4427 "hot" at the same time (better for cache). */ 4428 for (i=0; i<3; i++) 4429 { 4430 M4OSA_UInt32 topPartHeight, bottomPartHeight, width, sourceStride, destStride, y; 4431 M4OSA_MemAddr8 sourceWalk, destWalk; 4432 4433 /* cache the vars used in the loop so as to avoid them being repeatedly fetched and 4434 recomputed from memory. */ 4435 if (0 == i) /* Y plane */ 4436 { 4437 bottomPartHeight = 2*shiftUV; 4438 } 4439 else /* U and V planes */ 4440 { 4441 bottomPartHeight = shiftUV; 4442 } 4443 topPartHeight = PlaneOut[i].u_height - bottomPartHeight; 4444 width = PlaneOut[i].u_width; 4445 4446 sourceWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(topPlane[i], 0, bottomPartHeight); 4447 sourceStride = topPlane[i].u_stride; 4448 4449 destWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(PlaneOut[i], 0, 0); 4450 destStride = PlaneOut[i].u_stride; 4451 4452 /* First the part from the top source clip frame. */ 4453 for (y=0; y<topPartHeight; y++) 4454 { 4455 memcpy((void *)destWalk, (void *)sourceWalk, width); 4456 destWalk += destStride; 4457 sourceWalk += sourceStride; 4458 } 4459 4460 /* and now change the vars to copy the part from the bottom source clip frame. */ 4461 sourceWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(bottomPlane[i], 0, 0); 4462 sourceStride = bottomPlane[i].u_stride; 4463 4464 /* destWalk is already at M4XXX_SampleAddress(PlaneOut[i], 0, topPartHeight) */ 4465 4466 for (y=0; y<bottomPartHeight; y++) 4467 { 4468 memcpy((void *)destWalk, (void *)sourceWalk, width); 4469 destWalk += destStride; 4470 sourceWalk += sourceStride; 4471 } 4472 } 4473 return M4NO_ERROR; 4474} 4475 4476static M4OSA_ERR M4xVSS_HorizontalSlideTransition(M4VIFI_ImagePlane* leftPlane, 4477 M4VIFI_ImagePlane* rightPlane, 4478 M4VIFI_ImagePlane *PlaneOut, 4479 M4OSA_UInt32 shiftUV) 4480{ 4481 M4OSA_UInt32 i, y; 4482 /* If we shifted by exactly 0, or by the width of the target image, then we would get the left 4483 frame or the right frame, respectively. These cases aren't handled too well by the general 4484 handling, since they result in 0-size memcopies, so might as well particularize them. */ 4485 4486 if (0 == shiftUV) /* output left frame */ 4487 { 4488 for (i = 0; i<3; i++) /* for each YUV plane */ 4489 { 4490 M4XXX_CopyPlane(&(PlaneOut[i]), &(leftPlane[i])); 4491 } 4492 4493 return M4NO_ERROR; 4494 } 4495 4496 if (PlaneOut[1].u_width == shiftUV) /* output right frame */ 4497 { 4498 for (i = 0; i<3; i++) /* for each YUV plane */ 4499 { 4500 M4XXX_CopyPlane(&(PlaneOut[i]), &(rightPlane[i])); 4501 } 4502 4503 return M4NO_ERROR; 4504 } 4505 4506 4507 /* Do three loops, one for each plane type, in order to avoid having too many buffers 4508 "hot" at the same time (better for cache). */ 4509 for (i=0; i<3; i++) 4510 { 4511 M4OSA_UInt32 height, leftPartWidth, rightPartWidth; 4512 M4OSA_UInt32 leftStride, rightStride, destStride; 4513 M4OSA_MemAddr8 leftWalk, rightWalk, destWalkLeft, destWalkRight; 4514 4515 /* cache the vars used in the loop so as to avoid them being repeatedly fetched 4516 and recomputed from memory. */ 4517 height = PlaneOut[i].u_height; 4518 4519 if (0 == i) /* Y plane */ 4520 { 4521 rightPartWidth = 2*shiftUV; 4522 } 4523 else /* U and V planes */ 4524 { 4525 rightPartWidth = shiftUV; 4526 } 4527 leftPartWidth = PlaneOut[i].u_width - rightPartWidth; 4528 4529 leftWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(leftPlane[i], rightPartWidth, 0); 4530 leftStride = leftPlane[i].u_stride; 4531 4532 rightWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(rightPlane[i], 0, 0); 4533 rightStride = rightPlane[i].u_stride; 4534 4535 destWalkLeft = (M4OSA_MemAddr8)M4XXX_SampleAddress(PlaneOut[i], 0, 0); 4536 destWalkRight = (M4OSA_MemAddr8)M4XXX_SampleAddress(PlaneOut[i], leftPartWidth, 0); 4537 destStride = PlaneOut[i].u_stride; 4538 4539 for (y=0; y<height; y++) 4540 { 4541 memcpy((void *)destWalkLeft, (void *)leftWalk, leftPartWidth); 4542 leftWalk += leftStride; 4543 4544 memcpy((void *)destWalkRight, (void *)rightWalk, rightPartWidth); 4545 rightWalk += rightStride; 4546 4547 destWalkLeft += destStride; 4548 destWalkRight += destStride; 4549 } 4550 } 4551 4552 return M4NO_ERROR; 4553} 4554 4555 4556M4OSA_ERR M4xVSS_SlideTransition( M4OSA_Void *userData, M4VIFI_ImagePlane PlaneIn1[3], 4557 M4VIFI_ImagePlane PlaneIn2[3], M4VIFI_ImagePlane *PlaneOut, 4558 M4VSS3GPP_ExternalProgress *pProgress, 4559 M4OSA_UInt32 uiTransitionKind) 4560{ 4561 M4xVSS_internal_SlideTransitionSettings* settings = 4562 (M4xVSS_internal_SlideTransitionSettings*)userData; 4563 M4OSA_UInt32 shiftUV; 4564 4565 M4OSA_TRACE1_0("inside M4xVSS_SlideTransition"); 4566 if ((M4xVSS_SlideTransition_RightOutLeftIn == settings->direction) 4567 || (M4xVSS_SlideTransition_LeftOutRightIn == settings->direction) ) 4568 { 4569 /* horizontal slide */ 4570 shiftUV = ((PlaneOut[1]).u_width * pProgress->uiProgress)/1000; 4571 M4OSA_TRACE1_2("M4xVSS_SlideTransition upper: shiftUV = %d,progress = %d", 4572 shiftUV,pProgress->uiProgress ); 4573 if (M4xVSS_SlideTransition_RightOutLeftIn == settings->direction) 4574 { 4575 /* Put the previous clip frame right, the next clip frame left, and reverse shiftUV 4576 (since it's a shift from the left frame) so that we start out on the right 4577 (i.e. not left) frame, it 4578 being from the previous clip. */ 4579 return M4xVSS_HorizontalSlideTransition(PlaneIn2, PlaneIn1, PlaneOut, 4580 (PlaneOut[1]).u_width - shiftUV); 4581 } 4582 else /* Left out, right in*/ 4583 { 4584 return M4xVSS_HorizontalSlideTransition(PlaneIn1, PlaneIn2, PlaneOut, shiftUV); 4585 } 4586 } 4587 else 4588 { 4589 /* vertical slide */ 4590 shiftUV = ((PlaneOut[1]).u_height * pProgress->uiProgress)/1000; 4591 M4OSA_TRACE1_2("M4xVSS_SlideTransition bottom: shiftUV = %d,progress = %d",shiftUV, 4592 pProgress->uiProgress ); 4593 if (M4xVSS_SlideTransition_TopOutBottomIn == settings->direction) 4594 { 4595 /* Put the previous clip frame top, the next clip frame bottom. */ 4596 return M4xVSS_VerticalSlideTransition(PlaneIn1, PlaneIn2, PlaneOut, shiftUV); 4597 } 4598 else /* Bottom out, top in */ 4599 { 4600 return M4xVSS_VerticalSlideTransition(PlaneIn2, PlaneIn1, PlaneOut, 4601 (PlaneOut[1]).u_height - shiftUV); 4602 } 4603 } 4604 4605 /* Note: it might be worthwhile to do some parameter checking, see if dimensions match, etc., 4606 at least in debug mode. */ 4607} 4608 4609 4610/** 4611 ****************************************************************************** 4612 * prototype M4xVSS_FadeBlackTransition(M4OSA_Void *pFunctionContext, 4613 * M4VIFI_ImagePlane *PlaneIn, 4614 * M4VIFI_ImagePlane *PlaneOut, 4615 * M4VSS3GPP_ExternalProgress *pProgress, 4616 * M4OSA_UInt32 uiEffectKind) 4617 * 4618 * @brief This function apply a fade to black and then a fade from black 4619 * @note 4620 * @param pFunctionContext(IN) Contains which color to apply (not very clean ...) 4621 * @param PlaneIn (IN) Input YUV420 planar 4622 * @param PlaneOut (IN/OUT) Output YUV420 planar 4623 * @param pProgress (IN/OUT) Progress indication (0-100) 4624 * @param uiEffectKind (IN) Unused 4625 * 4626 * @return M4VIFI_OK: No error 4627 ****************************************************************************** 4628 */ 4629M4OSA_ERR M4xVSS_FadeBlackTransition(M4OSA_Void *userData, M4VIFI_ImagePlane PlaneIn1[3], 4630 M4VIFI_ImagePlane PlaneIn2[3], 4631 M4VIFI_ImagePlane *PlaneOut, 4632 M4VSS3GPP_ExternalProgress *pProgress, 4633 M4OSA_UInt32 uiTransitionKind) 4634{ 4635 M4OSA_Int32 tmp = 0; 4636 M4OSA_ERR err = M4NO_ERROR; 4637 4638 4639 if((pProgress->uiProgress) < 500) 4640 { 4641 /** 4642 * Compute where we are in the effect (scale is 0->1024) */ 4643 tmp = (M4OSA_Int32)((1.0 - ((M4OSA_Float)(pProgress->uiProgress*2)/1000)) * 1024 ); 4644 4645 /** 4646 * Apply the darkening effect */ 4647 err = M4VFL_modifyLumaWithScale( (M4ViComImagePlane*)PlaneIn1, 4648 (M4ViComImagePlane*)PlaneOut, tmp, M4OSA_NULL); 4649 if (M4NO_ERROR != err) 4650 { 4651 M4OSA_TRACE1_1("M4xVSS_FadeBlackTransition: M4VFL_modifyLumaWithScale returns\ 4652 error 0x%x, returning M4VSS3GPP_ERR_LUMA_FILTER_ERROR", err); 4653 return M4VSS3GPP_ERR_LUMA_FILTER_ERROR; 4654 } 4655 } 4656 else 4657 { 4658 /** 4659 * Compute where we are in the effect (scale is 0->1024). */ 4660 tmp = (M4OSA_Int32)( (((M4OSA_Float)(((pProgress->uiProgress-500)*2))/1000)) * 1024 ); 4661 4662 /** 4663 * Apply the darkening effect */ 4664 err = M4VFL_modifyLumaWithScale((M4ViComImagePlane*)PlaneIn2, 4665 (M4ViComImagePlane*)PlaneOut, tmp, M4OSA_NULL); 4666 if (M4NO_ERROR != err) 4667 { 4668 M4OSA_TRACE1_1("M4xVSS_FadeBlackTransition:\ 4669 M4VFL_modifyLumaWithScale returns error 0x%x,\ 4670 returning M4VSS3GPP_ERR_LUMA_FILTER_ERROR", err); 4671 return M4VSS3GPP_ERR_LUMA_FILTER_ERROR; 4672 } 4673 } 4674 4675 4676 return M4VIFI_OK; 4677} 4678 4679 4680/** 4681 ****************************************************************************** 4682 * prototype M4OSA_ERR M4xVSS_internalConvertToUTF8(M4OSA_Context pContext, 4683 * M4OSA_Void* pBufferIn, 4684 * M4OSA_Void* pBufferOut, 4685 * M4OSA_UInt32* convertedSize) 4686 * 4687 * @brief This function convert from the customer format to UTF8 4688 * @note 4689 * @param pContext (IN) The integrator own context 4690 * @param pBufferIn (IN) Buffer to convert 4691 * @param pBufferOut (OUT) Converted buffer 4692 * @param convertedSize (OUT) Size of the converted buffer 4693 * 4694 * @return M4NO_ERROR: No error 4695 * @return M4ERR_PARAMETER: At least one of the function parameters is null 4696 ****************************************************************************** 4697 */ 4698M4OSA_ERR M4xVSS_internalConvertToUTF8(M4OSA_Context pContext, M4OSA_Void* pBufferIn, 4699 M4OSA_Void* pBufferOut, M4OSA_UInt32* convertedSize) 4700{ 4701 M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; 4702 M4OSA_ERR err; 4703 4704 pBufferOut = pBufferIn; 4705 if(xVSS_context->UTFConversionContext.pConvToUTF8Fct != M4OSA_NULL 4706 && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL) 4707 { 4708 M4OSA_UInt32 ConvertedSize = xVSS_context->UTFConversionContext.m_TempOutConversionSize; 4709 4710 memset((void *)xVSS_context->UTFConversionContext.pTempOutConversionBuffer,0 4711 ,(M4OSA_UInt32)xVSS_context->UTFConversionContext.m_TempOutConversionSize); 4712 4713 err = xVSS_context->UTFConversionContext.pConvToUTF8Fct((M4OSA_Void*)pBufferIn, 4714 (M4OSA_UInt8*)xVSS_context->UTFConversionContext.pTempOutConversionBuffer, 4715 (M4OSA_UInt32*)&ConvertedSize); 4716 if(err == M4xVSSWAR_BUFFER_OUT_TOO_SMALL) 4717 { 4718 M4OSA_TRACE2_1("M4xVSS_internalConvertToUTF8: pConvToUTF8Fct return 0x%x",err); 4719 4720 /*free too small buffer*/ 4721 free(xVSS_context->\ 4722 UTFConversionContext.pTempOutConversionBuffer); 4723 4724 /*re-allocate the buffer*/ 4725 xVSS_context->UTFConversionContext.pTempOutConversionBuffer = 4726 (M4OSA_Void*)M4OSA_32bitAlignedMalloc(ConvertedSize*sizeof(M4OSA_UInt8), M4VA, 4727 (M4OSA_Char *)"M4xVSS_internalConvertToUTF8: UTF conversion buffer"); 4728 if(M4OSA_NULL == xVSS_context->UTFConversionContext.pTempOutConversionBuffer) 4729 { 4730 M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertToUTF8"); 4731 return M4ERR_ALLOC; 4732 } 4733 xVSS_context->UTFConversionContext.m_TempOutConversionSize = ConvertedSize; 4734 4735 memset((void *)xVSS_context->\ 4736 UTFConversionContext.pTempOutConversionBuffer,0,(M4OSA_UInt32)xVSS_context->\ 4737 UTFConversionContext.m_TempOutConversionSize); 4738 4739 err = xVSS_context->UTFConversionContext.pConvToUTF8Fct((M4OSA_Void*)pBufferIn, 4740 (M4OSA_Void*)xVSS_context->UTFConversionContext.pTempOutConversionBuffer, 4741 (M4OSA_UInt32*)&ConvertedSize); 4742 if(err != M4NO_ERROR) 4743 { 4744 M4OSA_TRACE1_1("M4xVSS_internalConvertToUTF8: pConvToUTF8Fct return 0x%x",err); 4745 return err; 4746 } 4747 } 4748 else if(err != M4NO_ERROR) 4749 { 4750 M4OSA_TRACE1_1("M4xVSS_internalConvertToUTF8: pConvToUTF8Fct return 0x%x",err); 4751 return err; 4752 } 4753 /*decoded path*/ 4754 pBufferOut = xVSS_context->UTFConversionContext.pTempOutConversionBuffer; 4755 (*convertedSize) = ConvertedSize; 4756 } 4757 return M4NO_ERROR; 4758} 4759 4760 4761/** 4762 ****************************************************************************** 4763 * prototype M4OSA_ERR M4xVSS_internalConvertFromUTF8(M4OSA_Context pContext) 4764 * 4765 * @brief This function convert from UTF8 to the customer format 4766 * @note 4767 * @param pContext (IN) The integrator own context 4768 * @param pBufferIn (IN) Buffer to convert 4769 * @param pBufferOut (OUT) Converted buffer 4770 * @param convertedSize (OUT) Size of the converted buffer 4771 * 4772 * @return M4NO_ERROR: No error 4773 * @return M4ERR_PARAMETER: At least one of the function parameters is null 4774 ****************************************************************************** 4775 */ 4776M4OSA_ERR M4xVSS_internalConvertFromUTF8(M4OSA_Context pContext, M4OSA_Void* pBufferIn, 4777 M4OSA_Void* pBufferOut, M4OSA_UInt32* convertedSize) 4778{ 4779 M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext; 4780 M4OSA_ERR err; 4781 4782 pBufferOut = pBufferIn; 4783 if(xVSS_context->UTFConversionContext.pConvFromUTF8Fct != M4OSA_NULL 4784 && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL) 4785 { 4786 M4OSA_UInt32 ConvertedSize = xVSS_context->UTFConversionContext.m_TempOutConversionSize; 4787 4788 memset((void *)xVSS_context->\ 4789 UTFConversionContext.pTempOutConversionBuffer,0,(M4OSA_UInt32)xVSS_context->\ 4790 UTFConversionContext.m_TempOutConversionSize); 4791 4792 err = xVSS_context->UTFConversionContext.pConvFromUTF8Fct\ 4793 ((M4OSA_Void*)pBufferIn,(M4OSA_UInt8*)xVSS_context->\ 4794 UTFConversionContext.pTempOutConversionBuffer, (M4OSA_UInt32*)&ConvertedSize); 4795 if(err == M4xVSSWAR_BUFFER_OUT_TOO_SMALL) 4796 { 4797 M4OSA_TRACE2_1("M4xVSS_internalConvertFromUTF8: pConvFromUTF8Fct return 0x%x",err); 4798 4799 /*free too small buffer*/ 4800 free(xVSS_context->\ 4801 UTFConversionContext.pTempOutConversionBuffer); 4802 4803 /*re-allocate the buffer*/ 4804 xVSS_context->UTFConversionContext.pTempOutConversionBuffer = 4805 (M4OSA_Void*)M4OSA_32bitAlignedMalloc(ConvertedSize*sizeof(M4OSA_UInt8), M4VA, 4806 (M4OSA_Char *)"M4xVSS_internalConvertFromUTF8: UTF conversion buffer"); 4807 if(M4OSA_NULL == xVSS_context->UTFConversionContext.pTempOutConversionBuffer) 4808 { 4809 M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertFromUTF8"); 4810 return M4ERR_ALLOC; 4811 } 4812 xVSS_context->UTFConversionContext.m_TempOutConversionSize = ConvertedSize; 4813 4814 memset((void *)xVSS_context->\ 4815 UTFConversionContext.pTempOutConversionBuffer,0,(M4OSA_UInt32)xVSS_context->\ 4816 UTFConversionContext.m_TempOutConversionSize); 4817 4818 err = xVSS_context->UTFConversionContext.pConvFromUTF8Fct((M4OSA_Void*)pBufferIn, 4819 (M4OSA_Void*)xVSS_context->UTFConversionContext.pTempOutConversionBuffer, 4820 (M4OSA_UInt32*)&ConvertedSize); 4821 if(err != M4NO_ERROR) 4822 { 4823 M4OSA_TRACE1_1("M4xVSS_internalConvertFromUTF8: pConvFromUTF8Fct return 0x%x",err); 4824 return err; 4825 } 4826 } 4827 else if(err != M4NO_ERROR) 4828 { 4829 M4OSA_TRACE1_1("M4xVSS_internalConvertFromUTF8: pConvFromUTF8Fct return 0x%x",err); 4830 return err; 4831 } 4832 /*decoded path*/ 4833 pBufferOut = xVSS_context->UTFConversionContext.pTempOutConversionBuffer; 4834 (*convertedSize) = ConvertedSize; 4835 } 4836 4837 4838 return M4NO_ERROR; 4839} 4840