M4xVSS_internal.c revision 2d461d443d4d0a35acb9cfd49cafad8941b17a0b
1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16/**
17 ******************************************************************************
18 * @file    M4xVSS_internal.c
19 * @brief    Internal functions of extended Video Studio Service (Video Studio 2.1)
20 * @note
21 ******************************************************************************
22 */
23#include "M4OSA_Debug.h"
24#include "M4OSA_CharStar.h"
25
26#include "NXPSW_CompilerSwitches.h"
27
28#include "M4VSS3GPP_API.h"
29#include "M4VSS3GPP_ErrorCodes.h"
30
31#include "M4xVSS_API.h"
32#include "M4xVSS_Internal.h"
33
34/*for rgb16 color effect*/
35#include "M4VIFI_Defines.h"
36#include "M4VIFI_Clip.h"
37
38/**
39 * component includes */
40#include "M4VFL_transition.h"            /**< video effects */
41
42/* Internal header file of VSS is included because of MMS use case */
43#include "M4VSS3GPP_InternalTypes.h"
44
45/*Exif header files to add image rendering support (cropping, black borders)*/
46#include "M4EXIFC_CommonAPI.h"
47// StageFright encoders require %16 resolution
48#include "M4ENCODER_common.h"
49
50#define TRANSPARENT_COLOR 0x7E0
51
52/* Prototype of M4VIFI_xVSS_RGB565toYUV420 function (avoid green effect of transparency color) */
53M4VIFI_UInt8 M4VIFI_xVSS_RGB565toYUV420(void *pUserData, M4VIFI_ImagePlane *pPlaneIn,
54                                        M4VIFI_ImagePlane *pPlaneOut);
55
56
57/*special MCS function used only in VideoArtist and VideoStudio to open the media in the normal
58 mode. That way the media duration is accurate*/
59extern M4OSA_ERR M4MCS_open_normalMode(M4MCS_Context pContext, M4OSA_Void* pFileIn,
60                                         M4VIDEOEDITING_FileType InputFileType,
61                                         M4OSA_Void* pFileOut, M4OSA_Void* pTempFile);
62
63
64/**
65 ******************************************************************************
66 * prototype    M4OSA_ERR M4xVSS_internalStartTranscoding(M4OSA_Context pContext)
67 * @brief        This function initializes MCS (3GP transcoder) with the given
68 *                parameters
69 * @note        The transcoding parameters are given by the internal xVSS context.
70 *                This context contains a pointer on the current element of the
71 *                chained list of MCS parameters.
72 *
73 * @param    pContext            (IN) Pointer on the xVSS edit context
74 * @return    M4NO_ERROR:            No error
75 * @return    M4ERR_PARAMETER:    At least one parameter is M4OSA_NULL
76 * @return    M4ERR_ALLOC:        Memory allocation has failed
77 ******************************************************************************
78 */
79M4OSA_ERR M4xVSS_internalStartTranscoding(M4OSA_Context pContext)
80{
81    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
82    M4OSA_ERR err;
83    M4MCS_Context mcs_context;
84    M4MCS_OutputParams Params;
85    M4MCS_EncodingParams Rates;
86    M4OSA_UInt32 i;
87
88    err = M4MCS_init(&mcs_context, xVSS_context->pFileReadPtr, xVSS_context->pFileWritePtr);
89    if(err != M4NO_ERROR)
90    {
91        M4OSA_TRACE1_1("Error in M4MCS_init: 0x%x", err);
92        return err;
93    }
94
95    err = M4MCS_open(mcs_context, xVSS_context->pMCScurrentParams->pFileIn,
96         xVSS_context->pMCScurrentParams->InputFileType,
97             xVSS_context->pMCScurrentParams->pFileOut,
98             xVSS_context->pMCScurrentParams->pFileTemp);
99    if (err != M4NO_ERROR)
100    {
101        M4OSA_TRACE1_1("Error in M4MCS_open: 0x%x", err);
102        M4MCS_abort(mcs_context);
103        return err;
104    }
105
106    /**
107     * Fill MCS parameters with the parameters contained in the current element of the
108       MCS parameters chained list */
109    Params.OutputFileType = xVSS_context->pMCScurrentParams->OutputFileType;
110    Params.OutputVideoFormat = xVSS_context->pMCScurrentParams->OutputVideoFormat;
111    Params.OutputVideoFrameSize = xVSS_context->pMCScurrentParams->OutputVideoFrameSize;
112    Params.OutputVideoFrameRate = xVSS_context->pMCScurrentParams->OutputVideoFrameRate;
113    Params.OutputAudioFormat = xVSS_context->pMCScurrentParams->OutputAudioFormat;
114    Params.OutputAudioSamplingFrequency =
115         xVSS_context->pMCScurrentParams->OutputAudioSamplingFrequency;
116    Params.bAudioMono = xVSS_context->pMCScurrentParams->bAudioMono;
117    Params.pOutputPCMfile = M4OSA_NULL;
118    /*FB 2008/10/20: add media rendering parameter to keep aspect ratio*/
119    switch(xVSS_context->pMCScurrentParams->MediaRendering)
120    {
121    case M4xVSS_kResizing:
122        Params.MediaRendering = M4MCS_kResizing;
123        break;
124    case M4xVSS_kCropping:
125        Params.MediaRendering = M4MCS_kCropping;
126        break;
127    case M4xVSS_kBlackBorders:
128        Params.MediaRendering = M4MCS_kBlackBorders;
129        break;
130    default:
131        break;
132    }
133    /**/
134    // new params after integrating MCS 2.0
135    // Set the number of audio effects; 0 for now.
136    Params.nbEffects = 0;
137
138    // Set the audio effect; null for now.
139    Params.pEffects = NULL;
140
141    // Set the audio effect; null for now.
142    Params.bDiscardExif = M4OSA_FALSE;
143
144    // Set the audio effect; null for now.
145    Params.bAdjustOrientation = M4OSA_FALSE;
146    // new params after integrating MCS 2.0
147
148    /**
149     * Set output parameters */
150    err = M4MCS_setOutputParams(mcs_context, &Params);
151    if (err != M4NO_ERROR)
152    {
153        M4OSA_TRACE1_1("Error in M4MCS_setOutputParams: 0x%x", err);
154        M4MCS_abort(mcs_context);
155        return err;
156    }
157
158    Rates.OutputVideoBitrate = xVSS_context->pMCScurrentParams->OutputVideoBitrate;
159    Rates.OutputAudioBitrate = xVSS_context->pMCScurrentParams->OutputAudioBitrate;
160    Rates.BeginCutTime = 0;
161    Rates.EndCutTime = 0;
162    Rates.OutputFileSize = 0;
163
164    /*FB: transcoding per parts*/
165    Rates.BeginCutTime = xVSS_context->pMCScurrentParams->BeginCutTime;
166    Rates.EndCutTime = xVSS_context->pMCScurrentParams->EndCutTime;
167    Rates.OutputVideoTimescale = xVSS_context->pMCScurrentParams->OutputVideoTimescale;
168
169    err = M4MCS_setEncodingParams(mcs_context, &Rates);
170    if (err != M4NO_ERROR)
171    {
172        M4OSA_TRACE1_1("Error in M4MCS_setEncodingParams: 0x%x", err);
173        M4MCS_abort(mcs_context);
174        return err;
175    }
176
177    err = M4MCS_checkParamsAndStart(mcs_context);
178    if (err != M4NO_ERROR)
179    {
180        M4OSA_TRACE1_1("Error in M4MCS_checkParamsAndStart: 0x%x", err);
181        M4MCS_abort(mcs_context);
182        return err;
183    }
184
185    /**
186     * Save MCS context to be able to call MCS step function in M4xVSS_step function */
187    xVSS_context->pMCS_Ctxt = mcs_context;
188
189    return M4NO_ERROR;
190}
191
192/**
193 ******************************************************************************
194 * prototype    M4OSA_ERR M4xVSS_internalStopTranscoding(M4OSA_Context pContext)
195 * @brief        This function cleans up MCS (3GP transcoder)
196 * @note
197 *
198 * @param    pContext            (IN) Pointer on the xVSS edit context
199 * @return    M4NO_ERROR:            No error
200 * @return    M4ERR_PARAMETER:    At least one parameter is M4OSA_NULL
201 * @return    M4ERR_ALLOC:        Memory allocation has failed
202 ******************************************************************************
203 */
204M4OSA_ERR M4xVSS_internalStopTranscoding(M4OSA_Context pContext)
205{
206    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
207    M4OSA_ERR err;
208
209    err = M4MCS_close(xVSS_context->pMCS_Ctxt);
210    if (err != M4NO_ERROR)
211    {
212        M4OSA_TRACE1_1("M4xVSS_internalStopTranscoding: Error in M4MCS_close: 0x%x", err);
213        M4MCS_abort(xVSS_context->pMCS_Ctxt);
214        return err;
215    }
216
217    /**
218     * Free this MCS instance */
219    err = M4MCS_cleanUp(xVSS_context->pMCS_Ctxt);
220    if (err != M4NO_ERROR)
221    {
222        M4OSA_TRACE1_1("M4xVSS_internalStopTranscoding: Error in M4MCS_cleanUp: 0x%x", err);
223        return err;
224    }
225
226    xVSS_context->pMCS_Ctxt = M4OSA_NULL;
227
228    return M4NO_ERROR;
229}
230
231/**
232 ******************************************************************************
233 * M4OSA_ERR M4xVSS_internalConvertAndResizeARGB8888toYUV420(M4OSA_Void* pFileIn,
234 *                                             M4OSA_FileReadPointer* pFileReadPtr,
235 *                                                M4VIFI_ImagePlane* pImagePlanes,
236 *                                                 M4OSA_UInt32 width,
237 *                                                M4OSA_UInt32 height);
238 * @brief    It Coverts and resizes a ARGB8888 image to YUV420
239 * @note
240 * @param    pFileIn            (IN) The Image input file
241 * @param    pFileReadPtr    (IN) Pointer on filesystem functions
242 * @param    pImagePlanes    (IN/OUT) Pointer on YUV420 output planes allocated by the user
243 *                            ARGB8888 image  will be converted and resized  to output
244 *                             YUV420 plane size
245 *@param    width        (IN) width of the ARGB8888
246 *@param    height            (IN) height of the ARGB8888
247 * @return    M4NO_ERROR:    No error
248 * @return    M4ERR_ALLOC: memory error
249 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
250 ******************************************************************************
251 */
252
253M4OSA_ERR M4xVSS_internalConvertAndResizeARGB8888toYUV420(M4OSA_Void* pFileIn,
254                                                          M4OSA_FileReadPointer* pFileReadPtr,
255                                                          M4VIFI_ImagePlane* pImagePlanes,
256                                                          M4OSA_UInt32 width,M4OSA_UInt32 height)
257{
258    M4OSA_Context pARGBIn;
259    M4VIFI_ImagePlane rgbPlane1 ,rgbPlane2;
260    M4OSA_UInt32 frameSize_argb=(width * height * 4);
261    M4OSA_UInt32 frameSize = (width * height * 3); //Size of RGB888 data.
262    M4OSA_UInt32 i = 0,j= 0;
263    M4OSA_ERR err=M4NO_ERROR;
264
265
266    M4OSA_UInt8 *pTmpData = (M4OSA_UInt8*) M4OSA_32bitAlignedMalloc(frameSize_argb,
267         M4VS, (M4OSA_Char*)"Image argb data");
268        M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 Entering :");
269    if(pTmpData == M4OSA_NULL) {
270        M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 :\
271            Failed to allocate memory for Image clip");
272        return M4ERR_ALLOC;
273    }
274
275    M4OSA_TRACE1_2("M4xVSS_internalConvertAndResizeARGB8888toYUV420 :width and height %d %d",
276        width ,height);
277    /* Get file size (mandatory for chunk decoding) */
278    err = pFileReadPtr->openRead(&pARGBIn, pFileIn, M4OSA_kFileRead);
279    if(err != M4NO_ERROR)
280    {
281        M4OSA_TRACE1_2("M4xVSS_internalConvertAndResizeARGB8888toYUV420 :\
282            Can't open input ARGB8888 file %s, error: 0x%x\n",pFileIn, err);
283        free(pTmpData);
284        pTmpData = M4OSA_NULL;
285        goto cleanup;
286    }
287
288    err = pFileReadPtr->readData(pARGBIn,(M4OSA_MemAddr8)pTmpData, &frameSize_argb);
289    if(err != M4NO_ERROR)
290    {
291        M4OSA_TRACE1_2("M4xVSS_internalConvertAndResizeARGB8888toYUV420 Can't close ARGB8888\
292             file %s, error: 0x%x\n",pFileIn, err);
293        pFileReadPtr->closeRead(pARGBIn);
294        free(pTmpData);
295        pTmpData = M4OSA_NULL;
296        goto cleanup;
297    }
298
299    err = pFileReadPtr->closeRead(pARGBIn);
300    if(err != M4NO_ERROR)
301    {
302        M4OSA_TRACE1_2("M4xVSS_internalConvertAndResizeARGB8888toYUV420 Can't close ARGB8888 \
303             file %s, error: 0x%x\n",pFileIn, err);
304        free(pTmpData);
305        pTmpData = M4OSA_NULL;
306        goto cleanup;
307    }
308
309    rgbPlane1.pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(frameSize, M4VS,
310         (M4OSA_Char*)"Image clip RGB888 data");
311    if(rgbPlane1.pac_data == M4OSA_NULL)
312    {
313        M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 \
314            Failed to allocate memory for Image clip");
315        free(pTmpData);
316        return M4ERR_ALLOC;
317    }
318
319        rgbPlane1.u_height = height;
320        rgbPlane1.u_width = width;
321        rgbPlane1.u_stride = width*3;
322        rgbPlane1.u_topleft = 0;
323
324
325    /** Remove the alpha channel */
326    for (i=0, j = 0; i < frameSize_argb; i++) {
327        if ((i % 4) == 0) continue;
328        rgbPlane1.pac_data[j] = pTmpData[i];
329        j++;
330    }
331        free(pTmpData);
332
333    /* To Check if resizing is required with color conversion */
334    if(width != pImagePlanes->u_width || height != pImagePlanes->u_height)
335    {
336        M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 Resizing :");
337        frameSize =  ( pImagePlanes->u_width * pImagePlanes->u_height * 3);
338        rgbPlane2.pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(frameSize, M4VS,
339             (M4OSA_Char*)"Image clip RGB888 data");
340        if(rgbPlane2.pac_data == M4OSA_NULL)
341        {
342            M4OSA_TRACE1_0("Failed to allocate memory for Image clip");
343            free(pTmpData);
344            return M4ERR_ALLOC;
345        }
346            rgbPlane2.u_height =  pImagePlanes->u_height;
347            rgbPlane2.u_width = pImagePlanes->u_width;
348            rgbPlane2.u_stride = pImagePlanes->u_width*3;
349            rgbPlane2.u_topleft = 0;
350
351        /* Resizing RGB888 to RGB888 */
352        err = M4VIFI_ResizeBilinearRGB888toRGB888(M4OSA_NULL, &rgbPlane1, &rgbPlane2);
353        if(err != M4NO_ERROR)
354        {
355            M4OSA_TRACE1_1("error when converting from Resize RGB888 to RGB888: 0x%x\n", err);
356            free(rgbPlane2.pac_data);
357            free(rgbPlane1.pac_data);
358            return err;
359        }
360        /*Converting Resized RGB888 to YUV420 */
361        err = M4VIFI_RGB888toYUV420(M4OSA_NULL, &rgbPlane2, pImagePlanes);
362        if(err != M4NO_ERROR)
363        {
364            M4OSA_TRACE1_1("error when converting from RGB888 to YUV: 0x%x\n", err);
365            free(rgbPlane2.pac_data);
366            free(rgbPlane1.pac_data);
367            return err;
368        }
369            free(rgbPlane2.pac_data);
370            free(rgbPlane1.pac_data);
371
372            M4OSA_TRACE1_0("RGB to YUV done");
373
374
375    }
376    else
377    {
378        M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 NO  Resizing :");
379        err = M4VIFI_RGB888toYUV420(M4OSA_NULL, &rgbPlane1, pImagePlanes);
380        if(err != M4NO_ERROR)
381        {
382            M4OSA_TRACE1_1("error when converting from RGB to YUV: 0x%x\n", err);
383        }
384            free(rgbPlane1.pac_data);
385
386            M4OSA_TRACE1_0("RGB to YUV done");
387    }
388cleanup:
389    M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 leaving :");
390    return err;
391}
392
393/**
394 ******************************************************************************
395 * M4OSA_ERR M4xVSS_internalConvertARGB8888toYUV420(M4OSA_Void* pFileIn,
396 *                                             M4OSA_FileReadPointer* pFileReadPtr,
397 *                                                M4VIFI_ImagePlane* pImagePlanes,
398 *                                                 M4OSA_UInt32 width,
399 *                                                M4OSA_UInt32 height);
400 * @brief    It Coverts a ARGB8888 image to YUV420
401 * @note
402 * @param    pFileIn            (IN) The Image input file
403 * @param    pFileReadPtr    (IN) Pointer on filesystem functions
404 * @param    pImagePlanes    (IN/OUT) Pointer on YUV420 output planes allocated by the user
405 *                            ARGB8888 image  will be converted and resized  to output
406 *                            YUV420 plane size
407 * @param    width        (IN) width of the ARGB8888
408 * @param    height            (IN) height of the ARGB8888
409 * @return    M4NO_ERROR:    No error
410 * @return    M4ERR_ALLOC: memory error
411 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
412 ******************************************************************************
413 */
414
415M4OSA_ERR M4xVSS_internalConvertARGB8888toYUV420(M4OSA_Void* pFileIn,
416                                                 M4OSA_FileReadPointer* pFileReadPtr,
417                                                 M4VIFI_ImagePlane** pImagePlanes,
418                                                 M4OSA_UInt32 width,M4OSA_UInt32 height)
419{
420    M4OSA_ERR err = M4NO_ERROR;
421    M4VIFI_ImagePlane *yuvPlane = M4OSA_NULL;
422
423    yuvPlane = (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(3*sizeof(M4VIFI_ImagePlane),
424                M4VS, (M4OSA_Char*)"M4xVSS_internalConvertRGBtoYUV: Output plane YUV");
425    if(yuvPlane == M4OSA_NULL) {
426        M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 :\
427            Failed to allocate memory for Image clip");
428        return M4ERR_ALLOC;
429    }
430    yuvPlane[0].u_height = height;
431    yuvPlane[0].u_width = width;
432    yuvPlane[0].u_stride = width;
433    yuvPlane[0].u_topleft = 0;
434    yuvPlane[0].pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(yuvPlane[0].u_height \
435        * yuvPlane[0].u_width * 1.5, M4VS, (M4OSA_Char*)"imageClip YUV data");
436
437    yuvPlane[1].u_height = yuvPlane[0].u_height >>1;
438    yuvPlane[1].u_width = yuvPlane[0].u_width >> 1;
439    yuvPlane[1].u_stride = yuvPlane[1].u_width;
440    yuvPlane[1].u_topleft = 0;
441    yuvPlane[1].pac_data = (M4VIFI_UInt8*)(yuvPlane[0].pac_data + yuvPlane[0].u_height \
442        * yuvPlane[0].u_width);
443
444    yuvPlane[2].u_height = yuvPlane[0].u_height >>1;
445    yuvPlane[2].u_width = yuvPlane[0].u_width >> 1;
446    yuvPlane[2].u_stride = yuvPlane[2].u_width;
447    yuvPlane[2].u_topleft = 0;
448    yuvPlane[2].pac_data = (M4VIFI_UInt8*)(yuvPlane[1].pac_data + yuvPlane[1].u_height \
449        * yuvPlane[1].u_width);
450    err = M4xVSS_internalConvertAndResizeARGB8888toYUV420( pFileIn,pFileReadPtr,
451                                                          yuvPlane, width, height);
452    if(err != M4NO_ERROR)
453    {
454        M4OSA_TRACE1_1("M4xVSS_internalConvertAndResizeARGB8888toYUV420 return error: 0x%x\n", err);
455        free(yuvPlane);
456        return err;
457    }
458
459        *pImagePlanes = yuvPlane;
460
461    M4OSA_TRACE1_0("M4xVSS_internalConvertARGB8888toYUV420 :Leaving");
462    return err;
463
464}
465
466/**
467 ******************************************************************************
468 * M4OSA_ERR M4xVSS_PictureCallbackFct (M4OSA_Void* pPictureCtxt,
469 *                                        M4VIFI_ImagePlane* pImagePlanes,
470 *                                        M4OSA_UInt32* pPictureDuration);
471 * @brief    It feeds the PTO3GPP with YUV420 pictures.
472 * @note    This function is given to the PTO3GPP in the M4PTO3GPP_Params structure
473 * @param    pContext    (IN) The integrator own context
474 * @param    pImagePlanes(IN/OUT) Pointer to an array of three valid image planes
475 * @param    pPictureDuration(OUT) Duration of the returned picture
476 *
477 * @return    M4NO_ERROR:    No error
478 * @return    M4PTO3GPP_WAR_LAST_PICTURE: The returned image is the last one
479 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
480 ******************************************************************************
481 */
482M4OSA_ERR M4xVSS_PictureCallbackFct(M4OSA_Void* pPictureCtxt, M4VIFI_ImagePlane* pImagePlanes,
483                                     M4OSA_Double* pPictureDuration)
484{
485    M4OSA_ERR err = M4NO_ERROR;
486    M4OSA_UInt8    last_frame_flag = 0;
487    M4xVSS_PictureCallbackCtxt* pC = (M4xVSS_PictureCallbackCtxt*) (pPictureCtxt);
488
489    /*Used for pan&zoom*/
490    M4OSA_UInt8 tempPanzoomXa = 0;
491    M4OSA_UInt8 tempPanzoomXb = 0;
492    M4AIR_Params Params;
493    /**/
494
495    /*Used for cropping and black borders*/
496    M4OSA_Context    pPictureContext = M4OSA_NULL;
497    M4OSA_FilePosition    pictureSize = 0 ;
498    M4OSA_UInt8*    pictureBuffer = M4OSA_NULL;
499    //M4EXIFC_Context pExifContext = M4OSA_NULL;
500    M4EXIFC_BasicTags pBasicTags;
501    M4VIFI_ImagePlane pImagePlanes1 = pImagePlanes[0];
502    M4VIFI_ImagePlane pImagePlanes2 = pImagePlanes[1];
503    M4VIFI_ImagePlane pImagePlanes3 = pImagePlanes[2];
504    /**/
505
506    /**
507     * Check input parameters */
508    M4OSA_DEBUG_IF2((M4OSA_NULL==pPictureCtxt),        M4ERR_PARAMETER,
509         "M4xVSS_PictureCallbackFct: pPictureCtxt is M4OSA_NULL");
510    M4OSA_DEBUG_IF2((M4OSA_NULL==pImagePlanes),        M4ERR_PARAMETER,
511         "M4xVSS_PictureCallbackFct: pImagePlanes is M4OSA_NULL");
512    M4OSA_DEBUG_IF2((M4OSA_NULL==pPictureDuration), M4ERR_PARAMETER,
513         "M4xVSS_PictureCallbackFct: pPictureDuration is M4OSA_NULL");
514    M4OSA_TRACE1_0("M4xVSS_PictureCallbackFct :Entering");
515    /*PR P4ME00003181 In case the image number is 0, pan&zoom can not be used*/
516    if(M4OSA_TRUE == pC->m_pPto3GPPparams->isPanZoom && pC->m_NbImage == 0)
517    {
518        pC->m_pPto3GPPparams->isPanZoom = M4OSA_FALSE;
519    }
520
521    /*If no cropping/black borders or pan&zoom, just decode and resize the picture*/
522    if(pC->m_mediaRendering == M4xVSS_kResizing && M4OSA_FALSE == pC->m_pPto3GPPparams->isPanZoom)
523    {
524        /**
525         * Convert and resize input ARGB8888 file to YUV420 */
526        /*To support ARGB8888 : */
527        M4OSA_TRACE1_2("M4xVSS_PictureCallbackFct 1: width and heght %d %d",
528            pC->m_pPto3GPPparams->width,pC->m_pPto3GPPparams->height);
529        err = M4xVSS_internalConvertAndResizeARGB8888toYUV420(pC->m_FileIn,
530             pC->m_pFileReadPtr, pImagePlanes,pC->m_pPto3GPPparams->width,
531                pC->m_pPto3GPPparams->height);
532        if(err != M4NO_ERROR)
533        {
534            M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct: Error when decoding JPEG: 0x%x\n", err);
535            return err;
536        }
537    }
538    /*In case of cropping, black borders or pan&zoom, call the EXIF reader and the AIR*/
539    else
540    {
541        /**
542         * Computes ratios */
543        if(pC->m_pDecodedPlane == M4OSA_NULL)
544        {
545            /**
546             * Convert input ARGB8888 file to YUV420 */
547             M4OSA_TRACE1_2("M4xVSS_PictureCallbackFct 2: width and heght %d %d",
548                pC->m_pPto3GPPparams->width,pC->m_pPto3GPPparams->height);
549            err = M4xVSS_internalConvertARGB8888toYUV420(pC->m_FileIn, pC->m_pFileReadPtr,
550                &(pC->m_pDecodedPlane),pC->m_pPto3GPPparams->width,pC->m_pPto3GPPparams->height);
551            if(err != M4NO_ERROR)
552            {
553                M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct: Error when decoding JPEG: 0x%x\n", err);
554                if(pC->m_pDecodedPlane != M4OSA_NULL)
555                {
556                    /* YUV420 planar is returned but allocation is made only once
557                        (contigous planes in memory) */
558                    if(pC->m_pDecodedPlane->pac_data != M4OSA_NULL)
559                    {
560                        free(pC->m_pDecodedPlane->pac_data);
561                    }
562                    free(pC->m_pDecodedPlane);
563                    pC->m_pDecodedPlane = M4OSA_NULL;
564                }
565                return err;
566            }
567        }
568
569        /*Initialize AIR Params*/
570        Params.m_inputCoord.m_x = 0;
571        Params.m_inputCoord.m_y = 0;
572        Params.m_inputSize.m_height = pC->m_pDecodedPlane->u_height;
573        Params.m_inputSize.m_width = pC->m_pDecodedPlane->u_width;
574        Params.m_outputSize.m_width = pImagePlanes->u_width;
575        Params.m_outputSize.m_height = pImagePlanes->u_height;
576        Params.m_bOutputStripe = M4OSA_FALSE;
577        Params.m_outputOrientation = M4COMMON_kOrientationTopLeft;
578
579        /*Initialize Exif params structure*/
580        pBasicTags.orientation = M4COMMON_kOrientationUnknown;
581
582        /**
583        Pan&zoom params*/
584        if(M4OSA_TRUE == pC->m_pPto3GPPparams->isPanZoom)
585        {
586            /*Save ratio values, they can be reused if the new ratios are 0*/
587            tempPanzoomXa = (M4OSA_UInt8)pC->m_pPto3GPPparams->PanZoomXa;
588            tempPanzoomXb = (M4OSA_UInt8)pC->m_pPto3GPPparams->PanZoomXb;
589            /*Check that the ratio is not 0*/
590            /*Check (a) parameters*/
591            if(pC->m_pPto3GPPparams->PanZoomXa == 0)
592            {
593                M4OSA_UInt8 maxRatio = 0;
594                if(pC->m_pPto3GPPparams->PanZoomTopleftXa >=
595                     pC->m_pPto3GPPparams->PanZoomTopleftYa)
596                {
597                    /*The ratio is 0, that means the area of the picture defined with (a)
598                    parameters is bigger than the image size*/
599                    if(pC->m_pPto3GPPparams->PanZoomTopleftXa + tempPanzoomXa > 1000)
600                    {
601                        /*The oversize is maxRatio*/
602                        maxRatio = pC->m_pPto3GPPparams->PanZoomTopleftXa + tempPanzoomXa - 1000;
603                    }
604                }
605                else
606                {
607                    /*The ratio is 0, that means the area of the picture defined with (a)
608                     parameters is bigger than the image size*/
609                    if(pC->m_pPto3GPPparams->PanZoomTopleftYa + tempPanzoomXa > 1000)
610                    {
611                        /*The oversize is maxRatio*/
612                        maxRatio = pC->m_pPto3GPPparams->PanZoomTopleftYa + tempPanzoomXa - 1000;
613                    }
614                }
615                /*Modify the (a) parameters:*/
616                if(pC->m_pPto3GPPparams->PanZoomTopleftXa >= maxRatio)
617                {
618                    /*The (a) topleft parameters can be moved to keep the same area size*/
619                    pC->m_pPto3GPPparams->PanZoomTopleftXa -= maxRatio;
620                }
621                else
622                {
623                    /*Move the (a) topleft parameter to 0 but the ratio will be also further
624                    modified to match the image size*/
625                    pC->m_pPto3GPPparams->PanZoomTopleftXa = 0;
626                }
627                if(pC->m_pPto3GPPparams->PanZoomTopleftYa >= maxRatio)
628                {
629                    /*The (a) topleft parameters can be moved to keep the same area size*/
630                    pC->m_pPto3GPPparams->PanZoomTopleftYa -= maxRatio;
631                }
632                else
633                {
634                    /*Move the (a) topleft parameter to 0 but the ratio will be also further
635                     modified to match the image size*/
636                    pC->m_pPto3GPPparams->PanZoomTopleftYa = 0;
637                }
638                /*The new ratio is the original one*/
639                pC->m_pPto3GPPparams->PanZoomXa = tempPanzoomXa;
640                if(pC->m_pPto3GPPparams->PanZoomXa + pC->m_pPto3GPPparams->PanZoomTopleftXa > 1000)
641                {
642                    /*Change the ratio if the area of the picture defined with (a) parameters is
643                    bigger than the image size*/
644                    pC->m_pPto3GPPparams->PanZoomXa = 1000 - pC->m_pPto3GPPparams->PanZoomTopleftXa;
645                }
646                if(pC->m_pPto3GPPparams->PanZoomXa + pC->m_pPto3GPPparams->PanZoomTopleftYa > 1000)
647                {
648                    /*Change the ratio if the area of the picture defined with (a) parameters is
649                    bigger than the image size*/
650                    pC->m_pPto3GPPparams->PanZoomXa = 1000 - pC->m_pPto3GPPparams->PanZoomTopleftYa;
651                }
652            }
653            /*Check (b) parameters*/
654            if(pC->m_pPto3GPPparams->PanZoomXb == 0)
655            {
656                M4OSA_UInt8 maxRatio = 0;
657                if(pC->m_pPto3GPPparams->PanZoomTopleftXb >=
658                     pC->m_pPto3GPPparams->PanZoomTopleftYb)
659                {
660                    /*The ratio is 0, that means the area of the picture defined with (b)
661                     parameters is bigger than the image size*/
662                    if(pC->m_pPto3GPPparams->PanZoomTopleftXb + tempPanzoomXb > 1000)
663                    {
664                        /*The oversize is maxRatio*/
665                        maxRatio = pC->m_pPto3GPPparams->PanZoomTopleftXb + tempPanzoomXb - 1000;
666                    }
667                }
668                else
669                {
670                    /*The ratio is 0, that means the area of the picture defined with (b)
671                     parameters is bigger than the image size*/
672                    if(pC->m_pPto3GPPparams->PanZoomTopleftYb + tempPanzoomXb > 1000)
673                    {
674                        /*The oversize is maxRatio*/
675                        maxRatio = pC->m_pPto3GPPparams->PanZoomTopleftYb + tempPanzoomXb - 1000;
676                    }
677                }
678                /*Modify the (b) parameters:*/
679                if(pC->m_pPto3GPPparams->PanZoomTopleftXb >= maxRatio)
680                {
681                    /*The (b) topleft parameters can be moved to keep the same area size*/
682                    pC->m_pPto3GPPparams->PanZoomTopleftXb -= maxRatio;
683                }
684                else
685                {
686                    /*Move the (b) topleft parameter to 0 but the ratio will be also further
687                     modified to match the image size*/
688                    pC->m_pPto3GPPparams->PanZoomTopleftXb = 0;
689                }
690                if(pC->m_pPto3GPPparams->PanZoomTopleftYb >= maxRatio)
691                {
692                    /*The (b) topleft parameters can be moved to keep the same area size*/
693                    pC->m_pPto3GPPparams->PanZoomTopleftYb -= maxRatio;
694                }
695                else
696                {
697                    /*Move the (b) topleft parameter to 0 but the ratio will be also further
698                    modified to match the image size*/
699                    pC->m_pPto3GPPparams->PanZoomTopleftYb = 0;
700                }
701                /*The new ratio is the original one*/
702                pC->m_pPto3GPPparams->PanZoomXb = tempPanzoomXb;
703                if(pC->m_pPto3GPPparams->PanZoomXb + pC->m_pPto3GPPparams->PanZoomTopleftXb > 1000)
704                {
705                    /*Change the ratio if the area of the picture defined with (b) parameters is
706                    bigger than the image size*/
707                    pC->m_pPto3GPPparams->PanZoomXb = 1000 - pC->m_pPto3GPPparams->PanZoomTopleftXb;
708                }
709                if(pC->m_pPto3GPPparams->PanZoomXb + pC->m_pPto3GPPparams->PanZoomTopleftYb > 1000)
710                {
711                    /*Change the ratio if the area of the picture defined with (b) parameters is
712                    bigger than the image size*/
713                    pC->m_pPto3GPPparams->PanZoomXb = 1000 - pC->m_pPto3GPPparams->PanZoomTopleftYb;
714                }
715            }
716
717            /**
718             * Computes AIR parameters */
719/*        Params.m_inputCoord.m_x = (M4OSA_UInt32)(pC->m_pDecodedPlane->u_width *
720            (pC->m_pPto3GPPparams->PanZoomTopleftXa +
721            (M4OSA_Int16)((pC->m_pPto3GPPparams->PanZoomTopleftXb \
722                - pC->m_pPto3GPPparams->PanZoomTopleftXa) *
723            pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage)) / 100;
724        Params.m_inputCoord.m_y = (M4OSA_UInt32)(pC->m_pDecodedPlane->u_height *
725            (pC->m_pPto3GPPparams->PanZoomTopleftYa +
726            (M4OSA_Int16)((pC->m_pPto3GPPparams->PanZoomTopleftYb\
727                 - pC->m_pPto3GPPparams->PanZoomTopleftYa) *
728            pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage)) / 100;
729
730        Params.m_inputSize.m_width = (M4OSA_UInt32)(pC->m_pDecodedPlane->u_width *
731            (pC->m_pPto3GPPparams->PanZoomXa +
732            (M4OSA_Int16)((pC->m_pPto3GPPparams->PanZoomXb - pC->m_pPto3GPPparams->PanZoomXa) *
733            pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage)) / 100;
734
735        Params.m_inputSize.m_height =  (M4OSA_UInt32)(pC->m_pDecodedPlane->u_height *
736            (pC->m_pPto3GPPparams->PanZoomXa +
737            (M4OSA_Int16)((pC->m_pPto3GPPparams->PanZoomXb - pC->m_pPto3GPPparams->PanZoomXa) *
738            pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage)) / 100;
739 */
740            // Instead of using pC->m_NbImage we have to use (pC->m_NbImage-1) as pC->m_ImageCounter
741            // will be x-1 max for x no. of frames
742            Params.m_inputCoord.m_x = (M4OSA_UInt32)((((M4OSA_Double)pC->m_pDecodedPlane->u_width *
743                (pC->m_pPto3GPPparams->PanZoomTopleftXa +
744                (M4OSA_Double)((M4OSA_Double)(pC->m_pPto3GPPparams->PanZoomTopleftXb\
745                     - pC->m_pPto3GPPparams->PanZoomTopleftXa) *
746                pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage-1)) / 1000));
747            Params.m_inputCoord.m_y =
748                 (M4OSA_UInt32)((((M4OSA_Double)pC->m_pDecodedPlane->u_height *
749                (pC->m_pPto3GPPparams->PanZoomTopleftYa +
750                (M4OSA_Double)((M4OSA_Double)(pC->m_pPto3GPPparams->PanZoomTopleftYb\
751                     - pC->m_pPto3GPPparams->PanZoomTopleftYa) *
752                pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage-1)) / 1000));
753
754            Params.m_inputSize.m_width =
755                 (M4OSA_UInt32)((((M4OSA_Double)pC->m_pDecodedPlane->u_width *
756                (pC->m_pPto3GPPparams->PanZoomXa +
757                (M4OSA_Double)((M4OSA_Double)(pC->m_pPto3GPPparams->PanZoomXb\
758                     - pC->m_pPto3GPPparams->PanZoomXa) *
759                pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage-1)) / 1000));
760
761            Params.m_inputSize.m_height =
762                 (M4OSA_UInt32)((((M4OSA_Double)pC->m_pDecodedPlane->u_height *
763                (pC->m_pPto3GPPparams->PanZoomXa +
764                (M4OSA_Double)((M4OSA_Double)(pC->m_pPto3GPPparams->PanZoomXb \
765                    - pC->m_pPto3GPPparams->PanZoomXa) *
766                pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage-1)) / 1000));
767
768            if((Params.m_inputSize.m_width + Params.m_inputCoord.m_x)\
769                 > pC->m_pDecodedPlane->u_width)
770            {
771                Params.m_inputSize.m_width = pC->m_pDecodedPlane->u_width \
772                    - Params.m_inputCoord.m_x;
773            }
774
775            if((Params.m_inputSize.m_height + Params.m_inputCoord.m_y)\
776                 > pC->m_pDecodedPlane->u_height)
777            {
778                Params.m_inputSize.m_height = pC->m_pDecodedPlane->u_height\
779                     - Params.m_inputCoord.m_y;
780            }
781
782
783
784            Params.m_inputSize.m_width = (Params.m_inputSize.m_width>>1)<<1;
785            Params.m_inputSize.m_height = (Params.m_inputSize.m_height>>1)<<1;
786        }
787
788
789
790    /**
791        Picture rendering: Black borders*/
792
793        if(pC->m_mediaRendering == M4xVSS_kBlackBorders)
794        {
795            memset((void *)pImagePlanes[0].pac_data,Y_PLANE_BORDER_VALUE,
796                (pImagePlanes[0].u_height*pImagePlanes[0].u_stride));
797            memset((void *)pImagePlanes[1].pac_data,U_PLANE_BORDER_VALUE,
798                (pImagePlanes[1].u_height*pImagePlanes[1].u_stride));
799            memset((void *)pImagePlanes[2].pac_data,V_PLANE_BORDER_VALUE,
800                (pImagePlanes[2].u_height*pImagePlanes[2].u_stride));
801
802            /**
803            First without pan&zoom*/
804            if(M4OSA_FALSE == pC->m_pPto3GPPparams->isPanZoom)
805            {
806                switch(pBasicTags.orientation)
807                {
808                default:
809                case M4COMMON_kOrientationUnknown:
810                    Params.m_outputOrientation = M4COMMON_kOrientationTopLeft;
811                case M4COMMON_kOrientationTopLeft:
812                case M4COMMON_kOrientationTopRight:
813                case M4COMMON_kOrientationBottomRight:
814                case M4COMMON_kOrientationBottomLeft:
815                    if((M4OSA_UInt32)((pC->m_pDecodedPlane->u_height * pImagePlanes->u_width)\
816                         /pC->m_pDecodedPlane->u_width) <= pImagePlanes->u_height)
817                         //Params.m_inputSize.m_height < Params.m_inputSize.m_width)
818                    {
819                        /*it is height so black borders will be on the top and on the bottom side*/
820                        Params.m_outputSize.m_width = pImagePlanes->u_width;
821                        Params.m_outputSize.m_height =
822                             (M4OSA_UInt32)((pC->m_pDecodedPlane->u_height \
823                                * pImagePlanes->u_width) /pC->m_pDecodedPlane->u_width);
824                        /*number of lines at the top*/
825                        pImagePlanes[0].u_topleft =
826                            (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_height\
827                                -Params.m_outputSize.m_height)>>1))*pImagePlanes[0].u_stride;
828                        pImagePlanes[0].u_height = Params.m_outputSize.m_height;
829                        pImagePlanes[1].u_topleft =
830                             (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_height\
831                                -(Params.m_outputSize.m_height>>1)))>>1)*pImagePlanes[1].u_stride;
832                        pImagePlanes[1].u_height = Params.m_outputSize.m_height>>1;
833                        pImagePlanes[2].u_topleft =
834                             (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_height\
835                                -(Params.m_outputSize.m_height>>1)))>>1)*pImagePlanes[2].u_stride;
836                        pImagePlanes[2].u_height = Params.m_outputSize.m_height>>1;
837                    }
838                    else
839                    {
840                        /*it is width so black borders will be on the left and right side*/
841                        Params.m_outputSize.m_height = pImagePlanes->u_height;
842                        Params.m_outputSize.m_width =
843                             (M4OSA_UInt32)((pC->m_pDecodedPlane->u_width \
844                                * pImagePlanes->u_height) /pC->m_pDecodedPlane->u_height);
845
846                        pImagePlanes[0].u_topleft =
847                            (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_width\
848                                -Params.m_outputSize.m_width)>>1));
849                        pImagePlanes[0].u_width = Params.m_outputSize.m_width;
850                        pImagePlanes[1].u_topleft =
851                             (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_width\
852                                -(Params.m_outputSize.m_width>>1)))>>1);
853                        pImagePlanes[1].u_width = Params.m_outputSize.m_width>>1;
854                        pImagePlanes[2].u_topleft =
855                             (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_width\
856                                -(Params.m_outputSize.m_width>>1)))>>1);
857                        pImagePlanes[2].u_width = Params.m_outputSize.m_width>>1;
858                    }
859                    break;
860                case M4COMMON_kOrientationLeftTop:
861                case M4COMMON_kOrientationLeftBottom:
862                case M4COMMON_kOrientationRightTop:
863                case M4COMMON_kOrientationRightBottom:
864                        if((M4OSA_UInt32)((pC->m_pDecodedPlane->u_width * pImagePlanes->u_width)\
865                             /pC->m_pDecodedPlane->u_height) < pImagePlanes->u_height)
866                             //Params.m_inputSize.m_height > Params.m_inputSize.m_width)
867                        {
868                            /*it is height so black borders will be on the top and on
869                             the bottom side*/
870                            Params.m_outputSize.m_height = pImagePlanes->u_width;
871                            Params.m_outputSize.m_width =
872                                 (M4OSA_UInt32)((pC->m_pDecodedPlane->u_width \
873                                    * pImagePlanes->u_width) /pC->m_pDecodedPlane->u_height);
874                            /*number of lines at the top*/
875                            pImagePlanes[0].u_topleft =
876                                ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_height\
877                                    -Params.m_outputSize.m_width))>>1)*pImagePlanes[0].u_stride)+1;
878                            pImagePlanes[0].u_height = Params.m_outputSize.m_width;
879                            pImagePlanes[1].u_topleft =
880                                ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_height\
881                                    -(Params.m_outputSize.m_width>>1)))>>1)\
882                                        *pImagePlanes[1].u_stride)+1;
883                            pImagePlanes[1].u_height = Params.m_outputSize.m_width>>1;
884                            pImagePlanes[2].u_topleft =
885                                ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_height\
886                                    -(Params.m_outputSize.m_width>>1)))>>1)\
887                                        *pImagePlanes[2].u_stride)+1;
888                            pImagePlanes[2].u_height = Params.m_outputSize.m_width>>1;
889                        }
890                        else
891                        {
892                            /*it is width so black borders will be on the left and right side*/
893                            Params.m_outputSize.m_width = pImagePlanes->u_height;
894                            Params.m_outputSize.m_height =
895                                 (M4OSA_UInt32)((pC->m_pDecodedPlane->u_height\
896                                     * pImagePlanes->u_height) /pC->m_pDecodedPlane->u_width);
897
898                            pImagePlanes[0].u_topleft =
899                                 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_width\
900                                    -Params.m_outputSize.m_height))>>1))+1;
901                            pImagePlanes[0].u_width = Params.m_outputSize.m_height;
902                            pImagePlanes[1].u_topleft =
903                                 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_width\
904                                    -(Params.m_outputSize.m_height>>1)))>>1))+1;
905                            pImagePlanes[1].u_width = Params.m_outputSize.m_height>>1;
906                            pImagePlanes[2].u_topleft =
907                                 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_width\
908                                    -(Params.m_outputSize.m_height>>1)))>>1))+1;
909                            pImagePlanes[2].u_width = Params.m_outputSize.m_height>>1;
910                        }
911                    break;
912                }
913            }
914
915            /**
916            Secondly with pan&zoom*/
917            else
918            {
919                switch(pBasicTags.orientation)
920                {
921                default:
922                case M4COMMON_kOrientationUnknown:
923                    Params.m_outputOrientation = M4COMMON_kOrientationTopLeft;
924                case M4COMMON_kOrientationTopLeft:
925                case M4COMMON_kOrientationTopRight:
926                case M4COMMON_kOrientationBottomRight:
927                case M4COMMON_kOrientationBottomLeft:
928                    /*NO ROTATION*/
929                    if((M4OSA_UInt32)((pC->m_pDecodedPlane->u_height * pImagePlanes->u_width)\
930                         /pC->m_pDecodedPlane->u_width) <= pImagePlanes->u_height)
931                            //Params.m_inputSize.m_height < Params.m_inputSize.m_width)
932                    {
933                        /*Black borders will be on the top and bottom of the output video*/
934                        /*Maximum output height if the input image aspect ratio is kept and if
935                        the output width is the screen width*/
936                        M4OSA_UInt32 tempOutputSizeHeight =
937                            (M4OSA_UInt32)((pC->m_pDecodedPlane->u_height\
938                                 * pImagePlanes->u_width) /pC->m_pDecodedPlane->u_width);
939                        M4OSA_UInt32 tempInputSizeHeightMax = 0;
940                        M4OSA_UInt32 tempFinalInputHeight = 0;
941                        /*The output width is the screen width*/
942                        Params.m_outputSize.m_width = pImagePlanes->u_width;
943                        tempOutputSizeHeight = (tempOutputSizeHeight>>1)<<1;
944
945                        /*Maximum input height according to the maximum output height
946                        (proportional to the maximum output height)*/
947                        tempInputSizeHeightMax = (pImagePlanes->u_height\
948                            *Params.m_inputSize.m_height)/tempOutputSizeHeight;
949                        tempInputSizeHeightMax = (tempInputSizeHeightMax>>1)<<1;
950
951                        /*Check if the maximum possible input height is contained into the
952                        input image height*/
953                        if(tempInputSizeHeightMax <= pC->m_pDecodedPlane->u_height)
954                        {
955                            /*The maximum possible input height is contained in the input
956                            image height,
957                            that means no black borders, the input pan zoom area will be extended
958                            so that the input AIR height will be the maximum possible*/
959                            if(((tempInputSizeHeightMax - Params.m_inputSize.m_height)>>1)\
960                                 <= Params.m_inputCoord.m_y
961                                && ((tempInputSizeHeightMax - Params.m_inputSize.m_height)>>1)\
962                                     <= pC->m_pDecodedPlane->u_height -(Params.m_inputCoord.m_y\
963                                         + Params.m_inputSize.m_height))
964                            {
965                                /*The input pan zoom area can be extended symmetrically on the
966                                top and bottom side*/
967                                Params.m_inputCoord.m_y -= ((tempInputSizeHeightMax \
968                                    - Params.m_inputSize.m_height)>>1);
969                            }
970                            else if(Params.m_inputCoord.m_y < pC->m_pDecodedPlane->u_height\
971                                -(Params.m_inputCoord.m_y + Params.m_inputSize.m_height))
972                            {
973                                /*There is not enough place above the input pan zoom area to
974                                extend it symmetrically,
975                                so extend it to the maximum on the top*/
976                                Params.m_inputCoord.m_y = 0;
977                            }
978                            else
979                            {
980                                /*There is not enough place below the input pan zoom area to
981                                extend it symmetrically,
982                                so extend it to the maximum on the bottom*/
983                                Params.m_inputCoord.m_y = pC->m_pDecodedPlane->u_height \
984                                    - tempInputSizeHeightMax;
985                            }
986                            /*The input height of the AIR is the maximum possible height*/
987                            Params.m_inputSize.m_height = tempInputSizeHeightMax;
988                        }
989                        else
990                        {
991                            /*The maximum possible input height is greater than the input
992                            image height,
993                            that means black borders are necessary to keep aspect ratio
994                            The input height of the AIR is all the input image height*/
995                            Params.m_outputSize.m_height =
996                                (tempOutputSizeHeight*pC->m_pDecodedPlane->u_height)\
997                                    /Params.m_inputSize.m_height;
998                            Params.m_outputSize.m_height = (Params.m_outputSize.m_height>>1)<<1;
999                            Params.m_inputCoord.m_y = 0;
1000                            Params.m_inputSize.m_height = pC->m_pDecodedPlane->u_height;
1001                            pImagePlanes[0].u_topleft =
1002                                 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_height\
1003                                    -Params.m_outputSize.m_height)>>1))*pImagePlanes[0].u_stride;
1004                            pImagePlanes[0].u_height = Params.m_outputSize.m_height;
1005                            pImagePlanes[1].u_topleft =
1006                                ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_height\
1007                                    -(Params.m_outputSize.m_height>>1)))>>1)\
1008                                        *pImagePlanes[1].u_stride);
1009                            pImagePlanes[1].u_height = Params.m_outputSize.m_height>>1;
1010                            pImagePlanes[2].u_topleft =
1011                                 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_height\
1012                                    -(Params.m_outputSize.m_height>>1)))>>1)\
1013                                        *pImagePlanes[2].u_stride);
1014                            pImagePlanes[2].u_height = Params.m_outputSize.m_height>>1;
1015                        }
1016                    }
1017                    else
1018                    {
1019                        /*Black borders will be on the left and right side of the output video*/
1020                        /*Maximum output width if the input image aspect ratio is kept and if the
1021                         output height is the screen height*/
1022                        M4OSA_UInt32 tempOutputSizeWidth =
1023                             (M4OSA_UInt32)((pC->m_pDecodedPlane->u_width \
1024                                * pImagePlanes->u_height) /pC->m_pDecodedPlane->u_height);
1025                        M4OSA_UInt32 tempInputSizeWidthMax = 0;
1026                        M4OSA_UInt32 tempFinalInputWidth = 0;
1027                        /*The output height is the screen height*/
1028                        Params.m_outputSize.m_height = pImagePlanes->u_height;
1029                        tempOutputSizeWidth = (tempOutputSizeWidth>>1)<<1;
1030
1031                        /*Maximum input width according to the maximum output width
1032                        (proportional to the maximum output width)*/
1033                        tempInputSizeWidthMax =
1034                             (pImagePlanes->u_width*Params.m_inputSize.m_width)\
1035                                /tempOutputSizeWidth;
1036                        tempInputSizeWidthMax = (tempInputSizeWidthMax>>1)<<1;
1037
1038                        /*Check if the maximum possible input width is contained into the input
1039                         image width*/
1040                        if(tempInputSizeWidthMax <= pC->m_pDecodedPlane->u_width)
1041                        {
1042                            /*The maximum possible input width is contained in the input
1043                            image width,
1044                            that means no black borders, the input pan zoom area will be extended
1045                            so that the input AIR width will be the maximum possible*/
1046                            if(((tempInputSizeWidthMax - Params.m_inputSize.m_width)>>1) \
1047                                <= Params.m_inputCoord.m_x
1048                                && ((tempInputSizeWidthMax - Params.m_inputSize.m_width)>>1)\
1049                                     <= pC->m_pDecodedPlane->u_width -(Params.m_inputCoord.m_x \
1050                                        + Params.m_inputSize.m_width))
1051                            {
1052                                /*The input pan zoom area can be extended symmetrically on the
1053                                     right and left side*/
1054                                Params.m_inputCoord.m_x -= ((tempInputSizeWidthMax\
1055                                     - Params.m_inputSize.m_width)>>1);
1056                            }
1057                            else if(Params.m_inputCoord.m_x < pC->m_pDecodedPlane->u_width\
1058                                -(Params.m_inputCoord.m_x + Params.m_inputSize.m_width))
1059                            {
1060                                /*There is not enough place above the input pan zoom area to
1061                                    extend it symmetrically,
1062                                so extend it to the maximum on the left*/
1063                                Params.m_inputCoord.m_x = 0;
1064                            }
1065                            else
1066                            {
1067                                /*There is not enough place below the input pan zoom area
1068                                    to extend it symmetrically,
1069                                so extend it to the maximum on the right*/
1070                                Params.m_inputCoord.m_x = pC->m_pDecodedPlane->u_width \
1071                                    - tempInputSizeWidthMax;
1072                            }
1073                            /*The input width of the AIR is the maximum possible width*/
1074                            Params.m_inputSize.m_width = tempInputSizeWidthMax;
1075                        }
1076                        else
1077                        {
1078                            /*The maximum possible input width is greater than the input
1079                            image width,
1080                            that means black borders are necessary to keep aspect ratio
1081                            The input width of the AIR is all the input image width*/
1082                            Params.m_outputSize.m_width =\
1083                                 (tempOutputSizeWidth*pC->m_pDecodedPlane->u_width)\
1084                                    /Params.m_inputSize.m_width;
1085                            Params.m_outputSize.m_width = (Params.m_outputSize.m_width>>1)<<1;
1086                            Params.m_inputCoord.m_x = 0;
1087                            Params.m_inputSize.m_width = pC->m_pDecodedPlane->u_width;
1088                            pImagePlanes[0].u_topleft =
1089                                 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_width\
1090                                    -Params.m_outputSize.m_width)>>1));
1091                            pImagePlanes[0].u_width = Params.m_outputSize.m_width;
1092                            pImagePlanes[1].u_topleft =
1093                                 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_width\
1094                                    -(Params.m_outputSize.m_width>>1)))>>1);
1095                            pImagePlanes[1].u_width = Params.m_outputSize.m_width>>1;
1096                            pImagePlanes[2].u_topleft =
1097                                 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_width\
1098                                    -(Params.m_outputSize.m_width>>1)))>>1);
1099                            pImagePlanes[2].u_width = Params.m_outputSize.m_width>>1;
1100                        }
1101                    }
1102                    break;
1103                case M4COMMON_kOrientationLeftTop:
1104                case M4COMMON_kOrientationLeftBottom:
1105                case M4COMMON_kOrientationRightTop:
1106                case M4COMMON_kOrientationRightBottom:
1107                    /*ROTATION*/
1108                    if((M4OSA_UInt32)((pC->m_pDecodedPlane->u_width * pImagePlanes->u_width)\
1109                         /pC->m_pDecodedPlane->u_height) < pImagePlanes->u_height)
1110                         //Params.m_inputSize.m_height > Params.m_inputSize.m_width)
1111                    {
1112                        /*Black borders will be on the left and right side of the output video*/
1113                        /*Maximum output height if the input image aspect ratio is kept and if
1114                        the output height is the screen width*/
1115                        M4OSA_UInt32 tempOutputSizeHeight =
1116                        (M4OSA_UInt32)((pC->m_pDecodedPlane->u_width * pImagePlanes->u_width)\
1117                             /pC->m_pDecodedPlane->u_height);
1118                        M4OSA_UInt32 tempInputSizeHeightMax = 0;
1119                        M4OSA_UInt32 tempFinalInputHeight = 0;
1120                        /*The output width is the screen height*/
1121                        Params.m_outputSize.m_height = pImagePlanes->u_width;
1122                        Params.m_outputSize.m_width= pImagePlanes->u_height;
1123                        tempOutputSizeHeight = (tempOutputSizeHeight>>1)<<1;
1124
1125                        /*Maximum input height according to the maximum output height
1126                             (proportional to the maximum output height)*/
1127                        tempInputSizeHeightMax =
1128                            (pImagePlanes->u_height*Params.m_inputSize.m_width)\
1129                                /tempOutputSizeHeight;
1130                        tempInputSizeHeightMax = (tempInputSizeHeightMax>>1)<<1;
1131
1132                        /*Check if the maximum possible input height is contained into the
1133                             input image width (rotation included)*/
1134                        if(tempInputSizeHeightMax <= pC->m_pDecodedPlane->u_width)
1135                        {
1136                            /*The maximum possible input height is contained in the input
1137                            image width (rotation included),
1138                            that means no black borders, the input pan zoom area will be extended
1139                            so that the input AIR width will be the maximum possible*/
1140                            if(((tempInputSizeHeightMax - Params.m_inputSize.m_width)>>1) \
1141                                <= Params.m_inputCoord.m_x
1142                                && ((tempInputSizeHeightMax - Params.m_inputSize.m_width)>>1)\
1143                                     <= pC->m_pDecodedPlane->u_width -(Params.m_inputCoord.m_x \
1144                                        + Params.m_inputSize.m_width))
1145                            {
1146                                /*The input pan zoom area can be extended symmetrically on the
1147                                 right and left side*/
1148                                Params.m_inputCoord.m_x -= ((tempInputSizeHeightMax \
1149                                    - Params.m_inputSize.m_width)>>1);
1150                            }
1151                            else if(Params.m_inputCoord.m_x < pC->m_pDecodedPlane->u_width\
1152                                -(Params.m_inputCoord.m_x + Params.m_inputSize.m_width))
1153                            {
1154                                /*There is not enough place on the left of the input pan
1155                                zoom area to extend it symmetrically,
1156                                so extend it to the maximum on the left*/
1157                                Params.m_inputCoord.m_x = 0;
1158                            }
1159                            else
1160                            {
1161                                /*There is not enough place on the right of the input pan zoom
1162                                 area to extend it symmetrically,
1163                                so extend it to the maximum on the right*/
1164                                Params.m_inputCoord.m_x =
1165                                     pC->m_pDecodedPlane->u_width - tempInputSizeHeightMax;
1166                            }
1167                            /*The input width of the AIR is the maximum possible width*/
1168                            Params.m_inputSize.m_width = tempInputSizeHeightMax;
1169                        }
1170                        else
1171                        {
1172                            /*The maximum possible input height is greater than the input
1173                            image width (rotation included),
1174                            that means black borders are necessary to keep aspect ratio
1175                            The input width of the AIR is all the input image width*/
1176                            Params.m_outputSize.m_width =
1177                            (tempOutputSizeHeight*pC->m_pDecodedPlane->u_width)\
1178                                /Params.m_inputSize.m_width;
1179                            Params.m_outputSize.m_width = (Params.m_outputSize.m_width>>1)<<1;
1180                            Params.m_inputCoord.m_x = 0;
1181                            Params.m_inputSize.m_width = pC->m_pDecodedPlane->u_width;
1182                            pImagePlanes[0].u_topleft =
1183                                ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_height\
1184                                    -Params.m_outputSize.m_width))>>1)*pImagePlanes[0].u_stride)+1;
1185                            pImagePlanes[0].u_height = Params.m_outputSize.m_width;
1186                            pImagePlanes[1].u_topleft =
1187                            ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_height\
1188                                -(Params.m_outputSize.m_width>>1)))>>1)\
1189                                    *pImagePlanes[1].u_stride)+1;
1190                            pImagePlanes[1].u_height = Params.m_outputSize.m_width>>1;
1191                            pImagePlanes[2].u_topleft =
1192                            ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_height\
1193                                -(Params.m_outputSize.m_width>>1)))>>1)\
1194                                    *pImagePlanes[2].u_stride)+1;
1195                            pImagePlanes[2].u_height = Params.m_outputSize.m_width>>1;
1196                        }
1197                    }
1198                    else
1199                    {
1200                        /*Black borders will be on the top and bottom of the output video*/
1201                        /*Maximum output width if the input image aspect ratio is kept and if
1202                         the output width is the screen height*/
1203                        M4OSA_UInt32 tempOutputSizeWidth =
1204                        (M4OSA_UInt32)((pC->m_pDecodedPlane->u_height * pImagePlanes->u_height)\
1205                             /pC->m_pDecodedPlane->u_width);
1206                        M4OSA_UInt32 tempInputSizeWidthMax = 0;
1207                        M4OSA_UInt32 tempFinalInputWidth = 0, tempFinalOutputWidth = 0;
1208                        /*The output height is the screen width*/
1209                        Params.m_outputSize.m_width = pImagePlanes->u_height;
1210                        Params.m_outputSize.m_height= pImagePlanes->u_width;
1211                        tempOutputSizeWidth = (tempOutputSizeWidth>>1)<<1;
1212
1213                        /*Maximum input width according to the maximum output width
1214                         (proportional to the maximum output width)*/
1215                        tempInputSizeWidthMax =
1216                        (pImagePlanes->u_width*Params.m_inputSize.m_height)/tempOutputSizeWidth;
1217                        tempInputSizeWidthMax = (tempInputSizeWidthMax>>1)<<1;
1218
1219                        /*Check if the maximum possible input width is contained into the input
1220                         image height (rotation included)*/
1221                        if(tempInputSizeWidthMax <= pC->m_pDecodedPlane->u_height)
1222                        {
1223                            /*The maximum possible input width is contained in the input
1224                             image height (rotation included),
1225                            that means no black borders, the input pan zoom area will be extended
1226                            so that the input AIR height will be the maximum possible*/
1227                            if(((tempInputSizeWidthMax - Params.m_inputSize.m_height)>>1) \
1228                                <= Params.m_inputCoord.m_y
1229                                && ((tempInputSizeWidthMax - Params.m_inputSize.m_height)>>1)\
1230                                     <= pC->m_pDecodedPlane->u_height -(Params.m_inputCoord.m_y \
1231                                        + Params.m_inputSize.m_height))
1232                            {
1233                                /*The input pan zoom area can be extended symmetrically on
1234                                the right and left side*/
1235                                Params.m_inputCoord.m_y -= ((tempInputSizeWidthMax \
1236                                    - Params.m_inputSize.m_height)>>1);
1237                            }
1238                            else if(Params.m_inputCoord.m_y < pC->m_pDecodedPlane->u_height\
1239                                -(Params.m_inputCoord.m_y + Params.m_inputSize.m_height))
1240                            {
1241                                /*There is not enough place on the top of the input pan zoom
1242                                area to extend it symmetrically,
1243                                so extend it to the maximum on the top*/
1244                                Params.m_inputCoord.m_y = 0;
1245                            }
1246                            else
1247                            {
1248                                /*There is not enough place on the bottom of the input pan zoom
1249                                 area to extend it symmetrically,
1250                                so extend it to the maximum on the bottom*/
1251                                Params.m_inputCoord.m_y = pC->m_pDecodedPlane->u_height\
1252                                     - tempInputSizeWidthMax;
1253                            }
1254                            /*The input height of the AIR is the maximum possible height*/
1255                            Params.m_inputSize.m_height = tempInputSizeWidthMax;
1256                        }
1257                        else
1258                        {
1259                            /*The maximum possible input width is greater than the input\
1260                             image height (rotation included),
1261                            that means black borders are necessary to keep aspect ratio
1262                            The input height of the AIR is all the input image height*/
1263                            Params.m_outputSize.m_height =
1264                                (tempOutputSizeWidth*pC->m_pDecodedPlane->u_height)\
1265                                    /Params.m_inputSize.m_height;
1266                            Params.m_outputSize.m_height = (Params.m_outputSize.m_height>>1)<<1;
1267                            Params.m_inputCoord.m_y = 0;
1268                            Params.m_inputSize.m_height = pC->m_pDecodedPlane->u_height;
1269                            pImagePlanes[0].u_topleft =
1270                                ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_width\
1271                                    -Params.m_outputSize.m_height))>>1))+1;
1272                            pImagePlanes[0].u_width = Params.m_outputSize.m_height;
1273                            pImagePlanes[1].u_topleft =
1274                                ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_width\
1275                                    -(Params.m_outputSize.m_height>>1)))>>1))+1;
1276                            pImagePlanes[1].u_width = Params.m_outputSize.m_height>>1;
1277                            pImagePlanes[2].u_topleft =
1278                                 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_width\
1279                                    -(Params.m_outputSize.m_height>>1)))>>1))+1;
1280                            pImagePlanes[2].u_width = Params.m_outputSize.m_height>>1;
1281                        }
1282                    }
1283                    break;
1284                }
1285            }
1286
1287            /*Width and height have to be even*/
1288            Params.m_outputSize.m_width = (Params.m_outputSize.m_width>>1)<<1;
1289            Params.m_outputSize.m_height = (Params.m_outputSize.m_height>>1)<<1;
1290            Params.m_inputSize.m_width = (Params.m_inputSize.m_width>>1)<<1;
1291            Params.m_inputSize.m_height = (Params.m_inputSize.m_height>>1)<<1;
1292            pImagePlanes[0].u_width = (pImagePlanes[0].u_width>>1)<<1;
1293            pImagePlanes[1].u_width = (pImagePlanes[1].u_width>>1)<<1;
1294            pImagePlanes[2].u_width = (pImagePlanes[2].u_width>>1)<<1;
1295            pImagePlanes[0].u_height = (pImagePlanes[0].u_height>>1)<<1;
1296            pImagePlanes[1].u_height = (pImagePlanes[1].u_height>>1)<<1;
1297            pImagePlanes[2].u_height = (pImagePlanes[2].u_height>>1)<<1;
1298
1299            /*Check that values are coherent*/
1300            if(Params.m_inputSize.m_height == Params.m_outputSize.m_height)
1301            {
1302                Params.m_inputSize.m_width = Params.m_outputSize.m_width;
1303            }
1304            else if(Params.m_inputSize.m_width == Params.m_outputSize.m_width)
1305            {
1306                Params.m_inputSize.m_height = Params.m_outputSize.m_height;
1307            }
1308        }
1309
1310        /**
1311        Picture rendering: Resizing and Cropping*/
1312        if(pC->m_mediaRendering != M4xVSS_kBlackBorders)
1313        {
1314            switch(pBasicTags.orientation)
1315            {
1316            default:
1317            case M4COMMON_kOrientationUnknown:
1318                Params.m_outputOrientation = M4COMMON_kOrientationTopLeft;
1319            case M4COMMON_kOrientationTopLeft:
1320            case M4COMMON_kOrientationTopRight:
1321            case M4COMMON_kOrientationBottomRight:
1322            case M4COMMON_kOrientationBottomLeft:
1323                Params.m_outputSize.m_height = pImagePlanes->u_height;
1324                Params.m_outputSize.m_width = pImagePlanes->u_width;
1325                break;
1326            case M4COMMON_kOrientationLeftTop:
1327            case M4COMMON_kOrientationLeftBottom:
1328            case M4COMMON_kOrientationRightTop:
1329            case M4COMMON_kOrientationRightBottom:
1330                Params.m_outputSize.m_height = pImagePlanes->u_width;
1331                Params.m_outputSize.m_width = pImagePlanes->u_height;
1332                break;
1333            }
1334        }
1335
1336        /**
1337        Picture rendering: Cropping*/
1338        if(pC->m_mediaRendering == M4xVSS_kCropping)
1339        {
1340            if((Params.m_outputSize.m_height * Params.m_inputSize.m_width)\
1341                 /Params.m_outputSize.m_width<Params.m_inputSize.m_height)
1342            {
1343                M4OSA_UInt32 tempHeight = Params.m_inputSize.m_height;
1344                /*height will be cropped*/
1345                Params.m_inputSize.m_height =  (M4OSA_UInt32)((Params.m_outputSize.m_height \
1346                    * Params.m_inputSize.m_width) /Params.m_outputSize.m_width);
1347                Params.m_inputSize.m_height =  (Params.m_inputSize.m_height>>1)<<1;
1348                if(M4OSA_FALSE == pC->m_pPto3GPPparams->isPanZoom)
1349                {
1350                    Params.m_inputCoord.m_y = (M4OSA_Int32)((M4OSA_Int32)\
1351                        ((pC->m_pDecodedPlane->u_height - Params.m_inputSize.m_height))>>1);
1352                }
1353                else
1354                {
1355                    Params.m_inputCoord.m_y += (M4OSA_Int32)((M4OSA_Int32)\
1356                        ((tempHeight - Params.m_inputSize.m_height))>>1);
1357                }
1358            }
1359            else
1360            {
1361                M4OSA_UInt32 tempWidth= Params.m_inputSize.m_width;
1362                /*width will be cropped*/
1363                Params.m_inputSize.m_width =  (M4OSA_UInt32)((Params.m_outputSize.m_width \
1364                    * Params.m_inputSize.m_height) /Params.m_outputSize.m_height);
1365                Params.m_inputSize.m_width =  (Params.m_inputSize.m_width>>1)<<1;
1366                if(M4OSA_FALSE == pC->m_pPto3GPPparams->isPanZoom)
1367                {
1368                    Params.m_inputCoord.m_x = (M4OSA_Int32)((M4OSA_Int32)\
1369                        ((pC->m_pDecodedPlane->u_width - Params.m_inputSize.m_width))>>1);
1370                }
1371                else
1372                {
1373                    Params.m_inputCoord.m_x += (M4OSA_Int32)\
1374                        (((M4OSA_Int32)(tempWidth - Params.m_inputSize.m_width))>>1);
1375                }
1376            }
1377        }
1378
1379
1380
1381        /**
1382         * Call AIR functions */
1383        if(M4OSA_NULL == pC->m_air_context)
1384        {
1385            err = M4AIR_create(&pC->m_air_context, M4AIR_kYUV420P);
1386            if(err != M4NO_ERROR)
1387            {
1388                free(pC->m_pDecodedPlane[0].pac_data);
1389                free(pC->m_pDecodedPlane);
1390                pC->m_pDecodedPlane = M4OSA_NULL;
1391                M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct:\
1392                     Error when initializing AIR: 0x%x", err);
1393                return err;
1394            }
1395        }
1396
1397        err = M4AIR_configure(pC->m_air_context, &Params);
1398        if(err != M4NO_ERROR)
1399        {
1400            M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct:\
1401                 Error when configuring AIR: 0x%x", err);
1402            M4AIR_cleanUp(pC->m_air_context);
1403            free(pC->m_pDecodedPlane[0].pac_data);
1404            free(pC->m_pDecodedPlane);
1405            pC->m_pDecodedPlane = M4OSA_NULL;
1406            return err;
1407        }
1408
1409        err = M4AIR_get(pC->m_air_context, pC->m_pDecodedPlane, pImagePlanes);
1410        if(err != M4NO_ERROR)
1411        {
1412            M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct: Error when getting AIR plane: 0x%x", err);
1413            M4AIR_cleanUp(pC->m_air_context);
1414            free(pC->m_pDecodedPlane[0].pac_data);
1415            free(pC->m_pDecodedPlane);
1416            pC->m_pDecodedPlane = M4OSA_NULL;
1417            return err;
1418        }
1419        pImagePlanes[0] = pImagePlanes1;
1420        pImagePlanes[1] = pImagePlanes2;
1421        pImagePlanes[2] = pImagePlanes3;
1422    }
1423
1424
1425    /**
1426     * Increment the image counter */
1427    pC->m_ImageCounter++;
1428
1429    /**
1430     * Check end of sequence */
1431    last_frame_flag    = (pC->m_ImageCounter >= pC->m_NbImage);
1432
1433    /**
1434     * Keep the picture duration */
1435    *pPictureDuration = pC->m_timeDuration;
1436
1437    if (1 == last_frame_flag)
1438    {
1439        if(M4OSA_NULL != pC->m_air_context)
1440        {
1441            err = M4AIR_cleanUp(pC->m_air_context);
1442            if(err != M4NO_ERROR)
1443            {
1444                M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct: Error when cleaning AIR: 0x%x", err);
1445                return err;
1446            }
1447        }
1448        if(M4OSA_NULL != pC->m_pDecodedPlane)
1449        {
1450            free(pC->m_pDecodedPlane[0].pac_data);
1451            free(pC->m_pDecodedPlane);
1452            pC->m_pDecodedPlane = M4OSA_NULL;
1453        }
1454        return M4PTO3GPP_WAR_LAST_PICTURE;
1455    }
1456
1457    M4OSA_TRACE1_0("M4xVSS_PictureCallbackFct: Leaving ");
1458    return M4NO_ERROR;
1459}
1460
1461/**
1462 ******************************************************************************
1463 * M4OSA_ERR M4xVSS_internalStartConvertPictureTo3gp(M4OSA_Context pContext)
1464 * @brief    This function initializes Pto3GPP with the given parameters
1465 * @note    The "Pictures to 3GPP" parameters are given by the internal xVSS
1466 *            context. This context contains a pointer on the current element
1467 *            of the chained list of Pto3GPP parameters.
1468 * @param    pContext    (IN) The integrator own context
1469 *
1470 * @return    M4NO_ERROR:    No error
1471 * @return    M4PTO3GPP_WAR_LAST_PICTURE: The returned image is the last one
1472 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
1473 ******************************************************************************
1474 */
1475M4OSA_ERR M4xVSS_internalStartConvertPictureTo3gp(M4OSA_Context pContext)
1476{
1477    /************************************************************************/
1478    /* Definitions to generate dummy AMR file used to add AMR silence in files generated
1479     by Pto3GPP */
1480    #define M4VSS3GPP_AMR_AU_SILENCE_FRAME_048_SIZE     13
1481    /* This constant is defined in M4VSS3GPP_InternalConfig.h */
1482    extern const M4OSA_UInt8\
1483         M4VSS3GPP_AMR_AU_SILENCE_FRAME_048[M4VSS3GPP_AMR_AU_SILENCE_FRAME_048_SIZE];
1484
1485    /* AMR silent frame used to compute dummy AMR silence file */
1486    #define M4VSS3GPP_AMR_HEADER_SIZE 6
1487    const M4OSA_UInt8 M4VSS3GPP_AMR_HEADER[M4VSS3GPP_AMR_AU_SILENCE_FRAME_048_SIZE] =
1488    { 0x23, 0x21, 0x41, 0x4d, 0x52, 0x0a };
1489    /************************************************************************/
1490
1491    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
1492    M4OSA_ERR err;
1493    M4PTO3GPP_Context pM4PTO3GPP_Ctxt = M4OSA_NULL;
1494    M4PTO3GPP_Params Params;
1495     M4xVSS_PictureCallbackCtxt*    pCallBackCtxt;
1496    M4OSA_Bool cmpResult=M4OSA_FALSE;
1497    M4OSA_Context pDummyAMRFile;
1498    M4OSA_Char out_amr[M4XVSS_MAX_PATH_LEN];
1499    /*UTF conversion support*/
1500    M4OSA_Char* pDecodedPath = M4OSA_NULL;
1501    M4OSA_UInt32 i;
1502
1503    /**
1504     * Create a M4PTO3GPP instance */
1505    err = M4PTO3GPP_Init( &pM4PTO3GPP_Ctxt, xVSS_context->pFileReadPtr,
1506         xVSS_context->pFileWritePtr);
1507    if (err != M4NO_ERROR)
1508    {
1509        M4OSA_TRACE1_1("M4xVSS_internalStartConvertPictureTo3gp returned %ld\n",err);
1510        return err;
1511    }
1512
1513    pCallBackCtxt = (M4xVSS_PictureCallbackCtxt*)M4OSA_32bitAlignedMalloc(sizeof(M4xVSS_PictureCallbackCtxt),
1514         M4VS,(M4OSA_Char *) "Pto3gpp callback struct");
1515    if(pCallBackCtxt == M4OSA_NULL)
1516    {
1517        M4OSA_TRACE1_0("Allocation error in M4xVSS_internalStartConvertPictureTo3gp");
1518        return M4ERR_ALLOC;
1519    }
1520
1521    Params.OutputVideoFrameSize = xVSS_context->pSettings->xVSS.outputVideoSize;
1522    Params.OutputVideoFormat = xVSS_context->pSettings->xVSS.outputVideoFormat;
1523
1524    /**
1525     * Generate "dummy" amr file containing silence in temporary folder */
1526    M4OSA_chrNCopy(out_amr, xVSS_context->pTempPath, M4XVSS_MAX_PATH_LEN - 1);
1527    strncat((char *)out_amr, (const char *)"dummy.amr\0", 10);
1528
1529    /**
1530     * UTF conversion: convert the temporary path into the customer format*/
1531    pDecodedPath = out_amr;
1532
1533    if(xVSS_context->UTFConversionContext.pConvFromUTF8Fct != M4OSA_NULL
1534            && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL)
1535    {
1536        M4OSA_UInt32 length = 0;
1537        err = M4xVSS_internalConvertFromUTF8(xVSS_context, (M4OSA_Void*) out_amr,
1538             (M4OSA_Void*) xVSS_context->UTFConversionContext.pTempOutConversionBuffer, &length);
1539        if(err != M4NO_ERROR)
1540        {
1541            M4OSA_TRACE1_1("M4xVSS_internalStartConvertPictureTo3gp:\
1542                 M4xVSS_internalConvertFromUTF8 returns err: 0x%x",err);
1543            return err;
1544        }
1545        pDecodedPath = xVSS_context->UTFConversionContext.pTempOutConversionBuffer;
1546    }
1547
1548    /**
1549    * End of the conversion, now use the converted path*/
1550
1551    err = xVSS_context->pFileWritePtr->openWrite(&pDummyAMRFile, pDecodedPath, M4OSA_kFileWrite);
1552
1553    /*Commented because of the use of the UTF conversion see above*/
1554/*    err = xVSS_context->pFileWritePtr->openWrite(&pDummyAMRFile, out_amr, M4OSA_kFileWrite);
1555 */
1556    if(err != M4NO_ERROR)
1557    {
1558        M4OSA_TRACE1_2("M4xVSS_internalConvertPictureTo3gp: Can't open output dummy amr file %s,\
1559             error: 0x%x\n",out_amr, err);
1560        return err;
1561    }
1562
1563    err =  xVSS_context->pFileWritePtr->writeData(pDummyAMRFile,
1564        (M4OSA_Int8*)M4VSS3GPP_AMR_HEADER, M4VSS3GPP_AMR_HEADER_SIZE);
1565    if(err != M4NO_ERROR)
1566    {
1567        M4OSA_TRACE1_2("M4xVSS_internalConvertPictureTo3gp: Can't write output dummy amr file %s,\
1568             error: 0x%x\n",out_amr, err);
1569        return err;
1570    }
1571
1572    err =  xVSS_context->pFileWritePtr->writeData(pDummyAMRFile,
1573         (M4OSA_Int8*)M4VSS3GPP_AMR_AU_SILENCE_FRAME_048, M4VSS3GPP_AMR_AU_SILENCE_FRAME_048_SIZE);
1574    if(err != M4NO_ERROR)
1575    {
1576        M4OSA_TRACE1_2("M4xVSS_internalConvertPictureTo3gp: \
1577            Can't write output dummy amr file %s, error: 0x%x\n",out_amr, err);
1578        return err;
1579    }
1580
1581    err =  xVSS_context->pFileWritePtr->closeWrite(pDummyAMRFile);
1582    if(err != M4NO_ERROR)
1583    {
1584        M4OSA_TRACE1_2("M4xVSS_internalConvertPictureTo3gp: \
1585            Can't close output dummy amr file %s, error: 0x%x\n",out_amr, err);
1586        return err;
1587    }
1588
1589    /**
1590     * Fill parameters for Pto3GPP with the parameters contained in the current element of the
1591     * Pto3GPP parameters chained list and with default parameters */
1592/*+ New Encoder bitrates */
1593    if(xVSS_context->pSettings->xVSS.outputVideoBitrate == 0) {
1594        Params.OutputVideoBitrate    = M4VIDEOEDITING_kVARIABLE_KBPS;
1595    }
1596    else {
1597          Params.OutputVideoBitrate = xVSS_context->pSettings->xVSS.outputVideoBitrate;
1598    }
1599    M4OSA_TRACE1_1("M4xVSS_internalStartConvertPicTo3GP: video bitrate = %d",
1600        Params.OutputVideoBitrate);
1601/*- New Encoder bitrates */
1602    Params.OutputFileMaxSize    = M4PTO3GPP_kUNLIMITED;
1603    Params.pPictureCallbackFct    = M4xVSS_PictureCallbackFct;
1604    Params.pPictureCallbackCtxt    = pCallBackCtxt;
1605    /*FB: change to use the converted path (UTF conversion) see the conversion above*/
1606    /*Fix :- Adding Audio Track in Image as input :AudioTarckFile Setting to NULL */
1607    Params.pInputAudioTrackFile    = M4OSA_NULL;//(M4OSA_Void*)pDecodedPath;//out_amr;
1608    Params.AudioPaddingMode        = M4PTO3GPP_kAudioPaddingMode_Loop;
1609    Params.AudioFileFormat        = M4VIDEOEDITING_kFileType_AMR;
1610    Params.pOutput3gppFile        = xVSS_context->pPTo3GPPcurrentParams->pFileOut;
1611    Params.pTemporaryFile        = xVSS_context->pPTo3GPPcurrentParams->pFileTemp;
1612    /*+PR No:  blrnxpsw#223*/
1613    /*Increasing frequency of Frame, calculating Nos of Frame = duration /FPS */
1614    /*Other changes made is @ M4xVSS_API.c @ line 3841 in M4xVSS_SendCommand*/
1615    /*If case check for PanZoom removed */
1616    Params.NbVideoFrames            = (M4OSA_UInt32)
1617        (xVSS_context->pPTo3GPPcurrentParams->duration \
1618            / xVSS_context->pPTo3GPPcurrentParams->framerate); /* */
1619    pCallBackCtxt->m_timeDuration    = xVSS_context->pPTo3GPPcurrentParams->framerate;
1620    /*-PR No:  blrnxpsw#223*/
1621    pCallBackCtxt->m_ImageCounter    = 0;
1622    pCallBackCtxt->m_FileIn            = xVSS_context->pPTo3GPPcurrentParams->pFileIn;
1623    pCallBackCtxt->m_NbImage        = Params.NbVideoFrames;
1624    pCallBackCtxt->m_pFileReadPtr    = xVSS_context->pFileReadPtr;
1625    pCallBackCtxt->m_pDecodedPlane    = M4OSA_NULL;
1626    pCallBackCtxt->m_pPto3GPPparams    = xVSS_context->pPTo3GPPcurrentParams;
1627    pCallBackCtxt->m_air_context    = M4OSA_NULL;
1628    pCallBackCtxt->m_mediaRendering = xVSS_context->pPTo3GPPcurrentParams->MediaRendering;
1629
1630    /**
1631     * Set the input and output files */
1632    err = M4PTO3GPP_Open(pM4PTO3GPP_Ctxt, &Params);
1633    if (err != M4NO_ERROR)
1634    {
1635        M4OSA_TRACE1_1("M4PTO3GPP_Open returned: 0x%x\n",err);
1636        if(pCallBackCtxt != M4OSA_NULL)
1637        {
1638            free(pCallBackCtxt);
1639            pCallBackCtxt = M4OSA_NULL;
1640        }
1641        M4PTO3GPP_CleanUp(pM4PTO3GPP_Ctxt);
1642        return err;
1643    }
1644
1645    /**
1646     * Save context to be able to call Pto3GPP step function in M4xVSS_step function */
1647    xVSS_context->pM4PTO3GPP_Ctxt = pM4PTO3GPP_Ctxt;
1648    xVSS_context->pCallBackCtxt = pCallBackCtxt;
1649
1650    return M4NO_ERROR;
1651}
1652
1653/**
1654 ******************************************************************************
1655 * M4OSA_ERR M4xVSS_internalStopConvertPictureTo3gp(M4OSA_Context pContext)
1656 * @brief    This function cleans up Pto3GPP
1657 * @note
1658 * @param    pContext    (IN) The integrator own context
1659 *
1660 * @return    M4NO_ERROR:    No error
1661 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
1662 ******************************************************************************
1663 */
1664M4OSA_ERR M4xVSS_internalStopConvertPictureTo3gp(M4OSA_Context pContext)
1665{
1666    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
1667    M4OSA_ERR err;
1668    M4OSA_Char out_amr[M4XVSS_MAX_PATH_LEN];
1669    /*UTF conversion support*/
1670    M4OSA_Char* pDecodedPath = M4OSA_NULL;
1671
1672    /**
1673    * Free the PTO3GPP callback context */
1674    if(M4OSA_NULL != xVSS_context->pCallBackCtxt)
1675    {
1676        free(xVSS_context->pCallBackCtxt);
1677        xVSS_context->pCallBackCtxt = M4OSA_NULL;
1678    }
1679
1680    /**
1681     * Finalize the output file */
1682    err = M4PTO3GPP_Close(xVSS_context->pM4PTO3GPP_Ctxt);
1683    if (err != M4NO_ERROR)
1684    {
1685        M4OSA_TRACE1_1("M4PTO3GPP_Close returned 0x%x\n",err);
1686        M4PTO3GPP_CleanUp(xVSS_context->pM4PTO3GPP_Ctxt);
1687        return err;
1688    }
1689
1690    /**
1691     * Free this M4PTO3GPP instance */
1692    err = M4PTO3GPP_CleanUp(xVSS_context->pM4PTO3GPP_Ctxt);
1693    if (err != M4NO_ERROR)
1694    {
1695        M4OSA_TRACE1_1("M4PTO3GPP_CleanUp returned 0x%x\n",err);
1696        return err;
1697    }
1698
1699    /**
1700     * Remove dummy.amr file */
1701    M4OSA_chrNCopy(out_amr, xVSS_context->pTempPath, M4XVSS_MAX_PATH_LEN - 1);
1702    strncat((char *)out_amr, (const char *)"dummy.amr\0", 10);
1703
1704    /**
1705     * UTF conversion: convert the temporary path into the customer format*/
1706    pDecodedPath = out_amr;
1707
1708    if(xVSS_context->UTFConversionContext.pConvFromUTF8Fct != M4OSA_NULL
1709            && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL)
1710    {
1711        M4OSA_UInt32 length = 0;
1712        err = M4xVSS_internalConvertFromUTF8(xVSS_context, (M4OSA_Void*) out_amr,
1713             (M4OSA_Void*) xVSS_context->UTFConversionContext.pTempOutConversionBuffer, &length);
1714        if(err != M4NO_ERROR)
1715        {
1716            M4OSA_TRACE1_1("M4xVSS_internalStopConvertPictureTo3gp:\
1717                 M4xVSS_internalConvertFromUTF8 returns err: 0x%x",err);
1718            return err;
1719        }
1720        pDecodedPath = xVSS_context->UTFConversionContext.pTempOutConversionBuffer;
1721    }
1722    /**
1723    * End of the conversion, now use the decoded path*/
1724    remove((const char *)pDecodedPath);
1725
1726    /*Commented because of the use of the UTF conversion*/
1727/*    remove(out_amr);
1728 */
1729
1730    xVSS_context->pM4PTO3GPP_Ctxt = M4OSA_NULL;
1731    xVSS_context->pCallBackCtxt = M4OSA_NULL;
1732
1733    return M4NO_ERROR;
1734}
1735
1736/**
1737 ******************************************************************************
1738 * prototype    M4OSA_ERR M4xVSS_internalConvertRGBtoYUV(M4xVSS_FramingStruct* framingCtx)
1739 * @brief    This function converts an RGB565 plane to YUV420 planar
1740 * @note    It is used only for framing effect
1741 *            It allocates output YUV planes
1742 * @param    framingCtx    (IN) The framing struct containing input RGB565 plane
1743 *
1744 * @return    M4NO_ERROR:    No error
1745 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
1746 * @return    M4ERR_ALLOC: Allocation error (no more memory)
1747 ******************************************************************************
1748 */
1749M4OSA_ERR M4xVSS_internalConvertRGBtoYUV(M4xVSS_FramingStruct* framingCtx)
1750{
1751    M4OSA_ERR err;
1752
1753    /**
1754     * Allocate output YUV planes */
1755    framingCtx->FramingYuv = (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(3*sizeof(M4VIFI_ImagePlane),
1756         M4VS, (M4OSA_Char *)"M4xVSS_internalConvertRGBtoYUV: Output plane YUV");
1757    if(framingCtx->FramingYuv == M4OSA_NULL)
1758    {
1759        M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertRGBtoYUV");
1760        return M4ERR_ALLOC;
1761    }
1762    framingCtx->FramingYuv[0].u_width = framingCtx->FramingRgb->u_width;
1763    framingCtx->FramingYuv[0].u_height = framingCtx->FramingRgb->u_height;
1764    framingCtx->FramingYuv[0].u_topleft = 0;
1765    framingCtx->FramingYuv[0].u_stride = framingCtx->FramingRgb->u_width;
1766    framingCtx->FramingYuv[0].pac_data =
1767         (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc((framingCtx->FramingYuv[0].u_width\
1768            *framingCtx->FramingYuv[0].u_height*3)>>1, M4VS, (M4OSA_Char *)\
1769                "Alloc for the Convertion output YUV");;
1770    if(framingCtx->FramingYuv[0].pac_data == M4OSA_NULL)
1771    {
1772        M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertRGBtoYUV");
1773        return M4ERR_ALLOC;
1774    }
1775    framingCtx->FramingYuv[1].u_width = (framingCtx->FramingRgb->u_width)>>1;
1776    framingCtx->FramingYuv[1].u_height = (framingCtx->FramingRgb->u_height)>>1;
1777    framingCtx->FramingYuv[1].u_topleft = 0;
1778    framingCtx->FramingYuv[1].u_stride = (framingCtx->FramingRgb->u_width)>>1;
1779    framingCtx->FramingYuv[1].pac_data = framingCtx->FramingYuv[0].pac_data \
1780        + framingCtx->FramingYuv[0].u_width * framingCtx->FramingYuv[0].u_height;
1781    framingCtx->FramingYuv[2].u_width = (framingCtx->FramingRgb->u_width)>>1;
1782    framingCtx->FramingYuv[2].u_height = (framingCtx->FramingRgb->u_height)>>1;
1783    framingCtx->FramingYuv[2].u_topleft = 0;
1784    framingCtx->FramingYuv[2].u_stride = (framingCtx->FramingRgb->u_width)>>1;
1785    framingCtx->FramingYuv[2].pac_data = framingCtx->FramingYuv[1].pac_data \
1786        + framingCtx->FramingYuv[1].u_width * framingCtx->FramingYuv[1].u_height;
1787
1788    /**
1789     * Convert input RGB 565 to YUV 420 to be able to merge it with output video in framing
1790      effect */
1791    err = M4VIFI_xVSS_RGB565toYUV420(M4OSA_NULL, framingCtx->FramingRgb, framingCtx->FramingYuv);
1792    if(err != M4NO_ERROR)
1793    {
1794        M4OSA_TRACE1_1("M4xVSS_internalConvertRGBtoYUV:\
1795             error when converting from RGB to YUV: 0x%x\n", err);
1796    }
1797
1798    framingCtx->duration = 0;
1799    framingCtx->previousClipTime = -1;
1800    framingCtx->previewOffsetClipTime = -1;
1801
1802    /**
1803     * Only one element in the chained list (no animated image with RGB buffer...) */
1804    framingCtx->pCurrent = framingCtx;
1805    framingCtx->pNext = framingCtx;
1806
1807    return M4NO_ERROR;
1808}
1809
1810M4OSA_ERR M4xVSS_internalSetPlaneTransparent(M4OSA_UInt8* planeIn, M4OSA_UInt32 size)
1811{
1812    M4OSA_UInt32 i;
1813    M4OSA_UInt8* plane = planeIn;
1814    M4OSA_UInt8 transparent1 = (M4OSA_UInt8)((TRANSPARENT_COLOR & 0xFF00)>>8);
1815    M4OSA_UInt8 transparent2 = (M4OSA_UInt8)TRANSPARENT_COLOR;
1816
1817    for(i=0; i<(size>>1); i++)
1818    {
1819        *plane++ = transparent1;
1820        *plane++ = transparent2;
1821    }
1822
1823    return M4NO_ERROR;
1824}
1825
1826
1827/**
1828 ******************************************************************************
1829 * prototype M4OSA_ERR M4xVSS_internalConvertARBG888toYUV420_FrammingEffect(M4OSA_Context pContext,
1830 *                                                M4VSS3GPP_EffectSettings* pEffect,
1831 *                                                M4xVSS_FramingStruct* framingCtx,
1832                                                  M4VIDEOEDITING_VideoFrameSize OutputVideoResolution)
1833 *
1834 * @brief    This function converts ARGB8888 input file  to YUV420 whenused for framming effect
1835 * @note    The input ARGB8888 file path is contained in the pEffect structure
1836 *            If the ARGB8888 must be resized to fit output video size, this function
1837 *            will do it.
1838 * @param    pContext    (IN) The integrator own context
1839 * @param    pEffect        (IN) The effect structure containing all informations on
1840 *                        the file to decode, resizing ...
1841 * @param    framingCtx    (IN/OUT) Structure in which the output RGB will be stored
1842 *
1843 * @return    M4NO_ERROR:    No error
1844 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
1845 * @return    M4ERR_ALLOC: Allocation error (no more memory)
1846 * @return    M4ERR_FILE_NOT_FOUND: File not found.
1847 ******************************************************************************
1848 */
1849
1850
1851M4OSA_ERR M4xVSS_internalConvertARGB888toYUV420_FrammingEffect(M4OSA_Context pContext,
1852                                                               M4VSS3GPP_EffectSettings* pEffect,
1853                                                               M4xVSS_FramingStruct* framingCtx,
1854                                                               M4VIDEOEDITING_VideoFrameSize\
1855                                                               OutputVideoResolution)
1856{
1857    M4OSA_ERR err = M4NO_ERROR;
1858    M4OSA_Context pARGBIn;
1859    M4OSA_UInt32 file_size;
1860    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
1861    M4OSA_UInt32 width, height, width_out, height_out;
1862    M4OSA_Void* pFile = pEffect->xVSS.pFramingFilePath;
1863    M4OSA_UInt8 transparent1 = (M4OSA_UInt8)((TRANSPARENT_COLOR & 0xFF00)>>8);
1864    M4OSA_UInt8 transparent2 = (M4OSA_UInt8)TRANSPARENT_COLOR;
1865    /*UTF conversion support*/
1866    M4OSA_Char* pDecodedPath = M4OSA_NULL;
1867    M4OSA_UInt32 i = 0,j = 0;
1868    M4VIFI_ImagePlane rgbPlane;
1869    M4OSA_UInt32 frameSize_argb=(framingCtx->width * framingCtx->height * 4);
1870    M4OSA_UInt32 frameSize;
1871    M4OSA_UInt32 tempAlphaPercent = 0;
1872    M4VIFI_UInt8* TempPacData = M4OSA_NULL;
1873    M4OSA_UInt16 *ptr = M4OSA_NULL;
1874    M4OSA_UInt32 z = 0;
1875
1876    M4OSA_TRACE3_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect: Entering ");
1877
1878    M4OSA_TRACE1_2("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect width and height %d %d ",
1879        framingCtx->width,framingCtx->height);
1880
1881    M4OSA_UInt8 *pTmpData = (M4OSA_UInt8*) M4OSA_32bitAlignedMalloc(frameSize_argb, M4VS, (M4OSA_Char*)\
1882        "Image argb data");
1883    if(pTmpData == M4OSA_NULL) {
1884        M4OSA_TRACE1_0("Failed to allocate memory for Image clip");
1885        return M4ERR_ALLOC;
1886    }
1887    /**
1888     * UTF conversion: convert the file path into the customer format*/
1889    pDecodedPath = pFile;
1890
1891    if(xVSS_context->UTFConversionContext.pConvFromUTF8Fct != M4OSA_NULL
1892            && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL)
1893    {
1894        M4OSA_UInt32 length = 0;
1895        err = M4xVSS_internalConvertFromUTF8(xVSS_context, (M4OSA_Void*) pFile,
1896             (M4OSA_Void*) xVSS_context->UTFConversionContext.pTempOutConversionBuffer, &length);
1897        if(err != M4NO_ERROR)
1898        {
1899            M4OSA_TRACE1_1("M4xVSS_internalDecodePNG:\
1900                 M4xVSS_internalConvertFromUTF8 returns err: 0x%x",err);
1901            free(pTmpData);
1902            pTmpData = M4OSA_NULL;
1903            return err;
1904        }
1905        pDecodedPath = xVSS_context->UTFConversionContext.pTempOutConversionBuffer;
1906    }
1907
1908    /**
1909    * End of the conversion, now use the decoded path*/
1910
1911     /* Open input ARGB8888 file and store it into memory */
1912    err = xVSS_context->pFileReadPtr->openRead(&pARGBIn, pDecodedPath, M4OSA_kFileRead);
1913
1914    if(err != M4NO_ERROR)
1915    {
1916        M4OSA_TRACE1_2("Can't open input ARGB8888 file %s, error: 0x%x\n",pFile, err);
1917        free(pTmpData);
1918        pTmpData = M4OSA_NULL;
1919        return err;
1920    }
1921
1922    err = xVSS_context->pFileReadPtr->readData(pARGBIn,(M4OSA_MemAddr8)pTmpData, &frameSize_argb);
1923    if(err != M4NO_ERROR)
1924    {
1925        xVSS_context->pFileReadPtr->closeRead(pARGBIn);
1926        free(pTmpData);
1927        pTmpData = M4OSA_NULL;
1928        return err;
1929    }
1930
1931
1932    err =  xVSS_context->pFileReadPtr->closeRead(pARGBIn);
1933    if(err != M4NO_ERROR)
1934    {
1935        M4OSA_TRACE1_2("Can't close input png file %s, error: 0x%x\n",pFile, err);
1936        free(pTmpData);
1937        pTmpData = M4OSA_NULL;
1938        return err;
1939    }
1940
1941
1942    rgbPlane.u_height = framingCtx->height;
1943    rgbPlane.u_width = framingCtx->width;
1944    rgbPlane.u_stride = rgbPlane.u_width*3;
1945    rgbPlane.u_topleft = 0;
1946
1947    frameSize = (rgbPlane.u_width * rgbPlane.u_height * 3); //Size of RGB888 data
1948    rgbPlane.pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(((frameSize)+ (2 * framingCtx->width)),
1949         M4VS, (M4OSA_Char*)"Image clip RGB888 data");
1950    if(rgbPlane.pac_data == M4OSA_NULL)
1951    {
1952        M4OSA_TRACE1_0("Failed to allocate memory for Image clip");
1953        free(pTmpData);
1954        return M4ERR_ALLOC;
1955    }
1956
1957    M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:\
1958          Remove the alpha channel  ");
1959
1960    /* premultiplied alpha % on RGB */
1961    for (i=0, j = 0; i < frameSize_argb; i += 4) {
1962        /* this is alpha value */
1963        if ((i % 4) == 0)
1964        {
1965            tempAlphaPercent = pTmpData[i];
1966        }
1967
1968        /* R */
1969        rgbPlane.pac_data[j] = pTmpData[i+1];
1970        j++;
1971
1972        /* G */
1973        if (tempAlphaPercent > 0) {
1974            rgbPlane.pac_data[j] = pTmpData[i+2];
1975            j++;
1976        } else {/* In case of alpha value 0, make GREEN to 255 */
1977            rgbPlane.pac_data[j] = 255; //pTmpData[i+2];
1978            j++;
1979        }
1980
1981        /* B */
1982        rgbPlane.pac_data[j] = pTmpData[i+3];
1983        j++;
1984    }
1985
1986    free(pTmpData);
1987    pTmpData = M4OSA_NULL;
1988
1989    /* convert RGB888 to RGB565 */
1990
1991    /* allocate temp RGB 565 buffer */
1992    TempPacData = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(frameSize +
1993                       (4 * (framingCtx->width + framingCtx->height + 1)),
1994                        M4VS, (M4OSA_Char*)"Image clip RGB565 data");
1995    if (TempPacData == M4OSA_NULL) {
1996        M4OSA_TRACE1_0("Failed to allocate memory for Image clip RGB565 data");
1997        free(rgbPlane.pac_data);
1998        return M4ERR_ALLOC;
1999    }
2000
2001    ptr = (M4OSA_UInt16 *)TempPacData;
2002    z = 0;
2003
2004    for (i = 0; i < j ; i += 3)
2005    {
2006        ptr[z++] = PACK_RGB565(0,   rgbPlane.pac_data[i],
2007                                    rgbPlane.pac_data[i+1],
2008                                    rgbPlane.pac_data[i+2]);
2009    }
2010
2011    /* free the RBG888 and assign RGB565 */
2012    free(rgbPlane.pac_data);
2013    rgbPlane.pac_data = TempPacData;
2014
2015    /**
2016     * Check if output sizes are odd */
2017    if(rgbPlane.u_height % 2 != 0)
2018    {
2019        M4VIFI_UInt8* output_pac_data = rgbPlane.pac_data;
2020        M4OSA_UInt32 i;
2021        M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:\
2022             output height is odd  ");
2023        output_pac_data +=rgbPlane.u_width * rgbPlane.u_height*2;
2024
2025        for(i=0;i<rgbPlane.u_width;i++)
2026        {
2027            *output_pac_data++ = transparent1;
2028            *output_pac_data++ = transparent2;
2029        }
2030
2031        /**
2032         * We just add a white line to the PNG that will be transparent */
2033        rgbPlane.u_height++;
2034    }
2035    if(rgbPlane.u_width % 2 != 0)
2036    {
2037        /**
2038         * We add a new column of white (=transparent), but we need to parse all RGB lines ... */
2039        M4OSA_UInt32 i;
2040        M4VIFI_UInt8* newRGBpac_data;
2041        M4VIFI_UInt8* output_pac_data, *input_pac_data;
2042
2043        rgbPlane.u_width++;
2044        M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect: \
2045             output width is odd  ");
2046        /**
2047         * We need to allocate a new RGB output buffer in which all decoded data
2048          + white line will be copied */
2049        newRGBpac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(rgbPlane.u_height*rgbPlane.u_width*2\
2050            *sizeof(M4VIFI_UInt8), M4VS, (M4OSA_Char *)"New Framing GIF Output pac_data RGB");
2051
2052        if(newRGBpac_data == M4OSA_NULL)
2053        {
2054            M4OSA_TRACE1_0("Allocation error in \
2055                M4xVSS_internalConvertARGB888toYUV420_FrammingEffect");
2056            free(rgbPlane.pac_data);
2057            return M4ERR_ALLOC;
2058        }
2059
2060        output_pac_data= newRGBpac_data;
2061        input_pac_data = rgbPlane.pac_data;
2062
2063        for(i=0;i<rgbPlane.u_height;i++)
2064        {
2065            memcpy((void *)output_pac_data, (void *)input_pac_data,
2066                 (rgbPlane.u_width-1)*2);
2067
2068            output_pac_data += ((rgbPlane.u_width-1)*2);
2069            /* Put the pixel to transparency color */
2070            *output_pac_data++ = transparent1;
2071            *output_pac_data++ = transparent2;
2072
2073            input_pac_data += ((rgbPlane.u_width-1)*2);
2074        }
2075        free(rgbPlane.pac_data);
2076        rgbPlane.pac_data = newRGBpac_data;
2077    }
2078
2079    /* reset stride */
2080    rgbPlane.u_stride = rgbPlane.u_width*2;
2081
2082    /**
2083     * Initialize chained list parameters */
2084    framingCtx->duration = 0;
2085    framingCtx->previousClipTime = -1;
2086    framingCtx->previewOffsetClipTime = -1;
2087
2088    /**
2089     * Only one element in the chained list (no animated image ...) */
2090    framingCtx->pCurrent = framingCtx;
2091    framingCtx->pNext = framingCtx;
2092
2093    /**
2094     * Get output width/height */
2095     switch(OutputVideoResolution)
2096    //switch(xVSS_context->pSettings->xVSS.outputVideoSize)
2097    {
2098    case M4VIDEOEDITING_kSQCIF:
2099        width_out = 128;
2100        height_out = 96;
2101        break;
2102    case M4VIDEOEDITING_kQQVGA:
2103        width_out = 160;
2104        height_out = 120;
2105        break;
2106    case M4VIDEOEDITING_kQCIF:
2107        width_out = 176;
2108        height_out = 144;
2109        break;
2110    case M4VIDEOEDITING_kQVGA:
2111        width_out = 320;
2112        height_out = 240;
2113        break;
2114    case M4VIDEOEDITING_kCIF:
2115        width_out = 352;
2116        height_out = 288;
2117        break;
2118    case M4VIDEOEDITING_kVGA:
2119        width_out = 640;
2120        height_out = 480;
2121        break;
2122    case M4VIDEOEDITING_kWVGA:
2123        width_out = 800;
2124        height_out = 480;
2125        break;
2126    case M4VIDEOEDITING_kNTSC:
2127        width_out = 720;
2128        height_out = 480;
2129        break;
2130    case M4VIDEOEDITING_k640_360:
2131        width_out = 640;
2132        height_out = 360;
2133        break;
2134    case M4VIDEOEDITING_k854_480:
2135        // StageFright encoders require %16 resolution
2136        width_out = M4ENCODER_854_480_Width;
2137        height_out = 480;
2138        break;
2139    case M4VIDEOEDITING_k1280_720:
2140        width_out = 1280;
2141        height_out = 720;
2142        break;
2143    case M4VIDEOEDITING_k1080_720:
2144        // StageFright encoders require %16 resolution
2145        width_out = M4ENCODER_1080_720_Width;
2146        height_out = 720;
2147        break;
2148    case M4VIDEOEDITING_k960_720:
2149        width_out = 960;
2150        height_out = 720;
2151        break;
2152    case M4VIDEOEDITING_k1920_1080:
2153        width_out = 1920;
2154        height_out = M4ENCODER_1920_1080_Height;
2155        break;
2156    /**
2157     * If output video size is not given, we take QCIF size,
2158     * should not happen, because already done in M4xVSS_sendCommand */
2159    default:
2160        width_out = 176;
2161        height_out = 144;
2162        break;
2163    }
2164
2165    /**
2166     * Allocate output planes structures */
2167    framingCtx->FramingRgb = (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(sizeof(M4VIFI_ImagePlane), M4VS,
2168         (M4OSA_Char *)"Framing Output plane RGB");
2169    if(framingCtx->FramingRgb == M4OSA_NULL)
2170    {
2171        M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertARGB888toYUV420_FrammingEffect");
2172        return M4ERR_ALLOC;
2173    }
2174    /**
2175     * Resize RGB if needed */
2176    if((pEffect->xVSS.bResize) &&
2177         (rgbPlane.u_width != width_out || rgbPlane.u_height != height_out))
2178    {
2179        width = width_out;
2180        height = height_out;
2181
2182        M4OSA_TRACE1_2("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect: \
2183             New Width and height %d %d  ",width,height);
2184
2185        framingCtx->FramingRgb->u_height = height_out;
2186        framingCtx->FramingRgb->u_width = width_out;
2187        framingCtx->FramingRgb->u_stride = framingCtx->FramingRgb->u_width*2;
2188        framingCtx->FramingRgb->u_topleft = 0;
2189
2190        framingCtx->FramingRgb->pac_data =
2191             (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(framingCtx->FramingRgb->u_height*framingCtx->\
2192                FramingRgb->u_width*2*sizeof(M4VIFI_UInt8), M4VS,
2193                  (M4OSA_Char *)"Framing Output pac_data RGB");
2194
2195        if(framingCtx->FramingRgb->pac_data == M4OSA_NULL)
2196        {
2197            M4OSA_TRACE1_0("Allocation error in \
2198                M4xVSS_internalConvertARGB888toYUV420_FrammingEffect");
2199            free(framingCtx->FramingRgb);
2200            free(rgbPlane.pac_data);
2201            return M4ERR_ALLOC;
2202        }
2203
2204        M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:  Resizing Needed ");
2205        M4OSA_TRACE1_2("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:\
2206              rgbPlane.u_height & rgbPlane.u_width %d %d",rgbPlane.u_height,rgbPlane.u_width);
2207
2208        //err = M4VIFI_ResizeBilinearRGB888toRGB888(M4OSA_NULL, &rgbPlane,framingCtx->FramingRgb);
2209        err = M4VIFI_ResizeBilinearRGB565toRGB565(M4OSA_NULL, &rgbPlane,framingCtx->FramingRgb);
2210
2211        if(err != M4NO_ERROR)
2212        {
2213            M4OSA_TRACE1_1("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect :\
2214                when resizing RGB plane: 0x%x\n", err);
2215            return err;
2216        }
2217
2218        if(rgbPlane.pac_data != M4OSA_NULL)
2219        {
2220            free(rgbPlane.pac_data);
2221            rgbPlane.pac_data = M4OSA_NULL;
2222        }
2223    }
2224    else
2225    {
2226
2227        M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:\
2228              Resizing Not Needed ");
2229
2230        width = rgbPlane.u_width;
2231        height = rgbPlane.u_height;
2232        framingCtx->FramingRgb->u_height = height;
2233        framingCtx->FramingRgb->u_width = width;
2234        framingCtx->FramingRgb->u_stride = framingCtx->FramingRgb->u_width*2;
2235        framingCtx->FramingRgb->u_topleft = 0;
2236        framingCtx->FramingRgb->pac_data = rgbPlane.pac_data;
2237    }
2238
2239
2240    if(pEffect->xVSS.bResize)
2241    {
2242        /**
2243         * Force topleft to 0 for pure framing effect */
2244        framingCtx->topleft_x = 0;
2245        framingCtx->topleft_y = 0;
2246    }
2247
2248
2249    /**
2250     * Convert  RGB output to YUV 420 to be able to merge it with output video in framing
2251     effect */
2252    framingCtx->FramingYuv = (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(3*sizeof(M4VIFI_ImagePlane), M4VS,
2253         (M4OSA_Char *)"Framing Output plane YUV");
2254    if(framingCtx->FramingYuv == M4OSA_NULL)
2255    {
2256        M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertARGB888toYUV420_FrammingEffect");
2257        free(framingCtx->FramingRgb->pac_data);
2258        return M4ERR_ALLOC;
2259    }
2260
2261    // Alloc for Y, U and V planes
2262    framingCtx->FramingYuv[0].u_width = ((width+1)>>1)<<1;
2263    framingCtx->FramingYuv[0].u_height = ((height+1)>>1)<<1;
2264    framingCtx->FramingYuv[0].u_topleft = 0;
2265    framingCtx->FramingYuv[0].u_stride = ((width+1)>>1)<<1;
2266    framingCtx->FramingYuv[0].pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc
2267        ((framingCtx->FramingYuv[0].u_width*framingCtx->FramingYuv[0].u_height), M4VS,
2268            (M4OSA_Char *)"Alloc for the output Y");
2269    if(framingCtx->FramingYuv[0].pac_data == M4OSA_NULL)
2270    {
2271        M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertARGB888toYUV420_FrammingEffect");
2272        free(framingCtx->FramingYuv);
2273        free(framingCtx->FramingRgb->pac_data);
2274        return M4ERR_ALLOC;
2275    }
2276    framingCtx->FramingYuv[1].u_width = (((width+1)>>1)<<1)>>1;
2277    framingCtx->FramingYuv[1].u_height = (((height+1)>>1)<<1)>>1;
2278    framingCtx->FramingYuv[1].u_topleft = 0;
2279    framingCtx->FramingYuv[1].u_stride = (((width+1)>>1)<<1)>>1;
2280
2281
2282    framingCtx->FramingYuv[1].pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(
2283        framingCtx->FramingYuv[1].u_width * framingCtx->FramingYuv[1].u_height, M4VS,
2284        (M4OSA_Char *)"Alloc for the output U");
2285    if (framingCtx->FramingYuv[1].pac_data == M4OSA_NULL) {
2286        free(framingCtx->FramingYuv[0].pac_data);
2287        free(framingCtx->FramingYuv);
2288        free(framingCtx->FramingRgb->pac_data);
2289        return M4ERR_ALLOC;
2290    }
2291
2292    framingCtx->FramingYuv[2].u_width = (((width+1)>>1)<<1)>>1;
2293    framingCtx->FramingYuv[2].u_height = (((height+1)>>1)<<1)>>1;
2294    framingCtx->FramingYuv[2].u_topleft = 0;
2295    framingCtx->FramingYuv[2].u_stride = (((width+1)>>1)<<1)>>1;
2296
2297
2298    framingCtx->FramingYuv[2].pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(
2299        framingCtx->FramingYuv[2].u_width * framingCtx->FramingYuv[0].u_height, M4VS,
2300        (M4OSA_Char *)"Alloc for the  output V");
2301    if (framingCtx->FramingYuv[2].pac_data == M4OSA_NULL) {
2302        free(framingCtx->FramingYuv[1].pac_data);
2303        free(framingCtx->FramingYuv[0].pac_data);
2304        free(framingCtx->FramingYuv);
2305        free(framingCtx->FramingRgb->pac_data);
2306        return M4ERR_ALLOC;
2307    }
2308
2309    M4OSA_TRACE3_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:\
2310        convert RGB to YUV ");
2311
2312    //err = M4VIFI_RGB888toYUV420(M4OSA_NULL, framingCtx->FramingRgb,  framingCtx->FramingYuv);
2313    err = M4VIFI_RGB565toYUV420(M4OSA_NULL, framingCtx->FramingRgb,  framingCtx->FramingYuv);
2314
2315    if (err != M4NO_ERROR)
2316    {
2317        M4OSA_TRACE1_1("SPS png: error when converting from RGB to YUV: 0x%x\n", err);
2318    }
2319    M4OSA_TRACE3_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:  Leaving ");
2320    return err;
2321}
2322
2323/**
2324 ******************************************************************************
2325 * prototype    M4OSA_ERR M4xVSS_internalGenerateEditedFile(M4OSA_Context pContext)
2326 *
2327 * @brief    This function prepares VSS for editing
2328 * @note    It also set special xVSS effect as external effects for the VSS
2329 * @param    pContext    (IN) The integrator own context
2330 *
2331 * @return    M4NO_ERROR:    No error
2332 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
2333 * @return    M4ERR_ALLOC: Allocation error (no more memory)
2334 ******************************************************************************
2335 */
2336M4OSA_ERR M4xVSS_internalGenerateEditedFile(M4OSA_Context pContext)
2337{
2338    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
2339    M4VSS3GPP_EditContext pVssCtxt;
2340    M4OSA_UInt32 i,j;
2341    M4OSA_ERR err;
2342
2343    /**
2344     * Create a VSS 3GPP edition instance */
2345    err = M4VSS3GPP_editInit( &pVssCtxt, xVSS_context->pFileReadPtr, xVSS_context->pFileWritePtr);
2346    if (err != M4NO_ERROR)
2347    {
2348        M4OSA_TRACE1_1("M4xVSS_internalGenerateEditedFile: M4VSS3GPP_editInit returned 0x%x\n",
2349            err);
2350        M4VSS3GPP_editCleanUp(pVssCtxt);
2351        /**
2352         * Set the VSS context to NULL */
2353        xVSS_context->pCurrentEditContext = M4OSA_NULL;
2354        return err;
2355    }
2356
2357    /* In case of MMS use case, we fill directly into the VSS context the targeted bitrate */
2358    if(xVSS_context->targetedBitrate != 0)
2359    {
2360        M4VSS3GPP_InternalEditContext* pVSSContext = (M4VSS3GPP_InternalEditContext*)pVssCtxt;
2361
2362        pVSSContext->bIsMMS = M4OSA_TRUE;
2363        pVSSContext->uiMMSVideoBitrate = xVSS_context->targetedBitrate;
2364        pVSSContext->MMSvideoFramerate = xVSS_context->pSettings->videoFrameRate;
2365    }
2366
2367    /*Warning: since the adding of the UTF conversion, pSettings has been changed in the next
2368    part in  pCurrentEditSettings (there is a specific current editing structure for the saving,
2369     as for the preview)*/
2370
2371    /**
2372     * Set the external video effect functions, for saving mode (to be moved to
2373      M4xVSS_saveStart() ?)*/
2374    for (i=0; i<xVSS_context->pCurrentEditSettings->uiClipNumber; i++)
2375    {
2376        for (j=0; j<xVSS_context->pCurrentEditSettings->nbEffects; j++)
2377        {
2378            if (M4xVSS_kVideoEffectType_BlackAndWhite ==
2379            xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2380            {
2381                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2382                 M4VSS3GPP_externalVideoEffectColor;
2383                //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt =
2384                // (M4OSA_Void*)M4xVSS_kVideoEffectType_BlackAndWhite;
2385                /*commented FB*/
2386                /**
2387                 * We do not need to set the color context, it is already set
2388                 during sendCommand function */
2389            }
2390            if (M4xVSS_kVideoEffectType_Pink ==
2391                xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2392            {
2393                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2394                 M4VSS3GPP_externalVideoEffectColor;
2395                //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt =
2396                // (M4OSA_Void*)M4xVSS_kVideoEffectType_Pink; /**< we don't
2397                // use any function context */
2398                /*commented FB*/
2399                /**
2400                 * We do not need to set the color context,
2401                  it is already set during sendCommand function */
2402            }
2403            if (M4xVSS_kVideoEffectType_Green ==
2404                 xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2405            {
2406                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2407                    M4VSS3GPP_externalVideoEffectColor;
2408                //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt =
2409                    // (M4OSA_Void*)M4xVSS_kVideoEffectType_Green;
2410                     /**< we don't use any function context */
2411                /*commented FB*/
2412                /**
2413                 * We do not need to set the color context, it is already set during
2414                  sendCommand function */
2415            }
2416            if (M4xVSS_kVideoEffectType_Sepia ==
2417                 xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2418            {
2419                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2420                 M4VSS3GPP_externalVideoEffectColor;
2421                //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt =
2422                // (M4OSA_Void*)M4xVSS_kVideoEffectType_Sepia;
2423                /**< we don't use any function context */
2424                /*commented FB*/
2425                /**
2426                 * We do not need to set the color context, it is already set during
2427                 sendCommand function */
2428            }
2429            if (M4xVSS_kVideoEffectType_Fifties ==
2430             xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2431            {
2432                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2433                 M4VSS3GPP_externalVideoEffectFifties;
2434                /**
2435                 * We do not need to set the framing context, it is already set during
2436                 sendCommand function */
2437            }
2438            if (M4xVSS_kVideoEffectType_Negative ==
2439             xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2440            {
2441                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2442                 M4VSS3GPP_externalVideoEffectColor;
2443                //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt =
2444                // (M4OSA_Void*)M4xVSS_kVideoEffectType_Negative;
2445                 /**< we don't use any function context */
2446                /*commented FB*/
2447                /**
2448                 * We do not need to set the color context, it is already set during
2449                  sendCommand function */
2450            }
2451            if (M4xVSS_kVideoEffectType_Framing ==
2452             xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2453            {
2454                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2455                 M4VSS3GPP_externalVideoEffectFraming;
2456                /**
2457                 * We do not need to set the framing context, it is already set during
2458                 sendCommand function */
2459            }
2460            if (M4xVSS_kVideoEffectType_ZoomIn ==
2461             xVSS_context->pSettings->Effects[j].VideoEffectType)
2462            {
2463                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2464                 M4VSS3GPP_externalVideoEffectZoom;
2465                xVSS_context->pCurrentEditSettings->Effects[j].pExtVideoEffectFctCtxt =
2466                 (M4OSA_Void*)M4xVSS_kVideoEffectType_ZoomIn; /**< we don't use any
2467                 function context */
2468            }
2469            if (M4xVSS_kVideoEffectType_ZoomOut ==
2470             xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2471            {
2472                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2473                 M4VSS3GPP_externalVideoEffectZoom;
2474                xVSS_context->pCurrentEditSettings->Effects[j].pExtVideoEffectFctCtxt =
2475                 (M4OSA_Void*)M4xVSS_kVideoEffectType_ZoomOut; /**< we don't use any
2476                 function context */
2477            }
2478            if (M4xVSS_kVideoEffectType_ColorRGB16 ==
2479             xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2480            {
2481                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2482                 M4VSS3GPP_externalVideoEffectColor;
2483                //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt =
2484                // (M4OSA_Void*)M4xVSS_kVideoEffectType_ColorRGB16;
2485                /**< we don't use any function context */
2486                /**
2487                 * We do not need to set the color context, it is already set during
2488                 sendCommand function */
2489            }
2490            if (M4xVSS_kVideoEffectType_Gradient ==
2491             xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2492            {
2493                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2494                 M4VSS3GPP_externalVideoEffectColor;
2495                //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt =
2496                // (M4OSA_Void*)M4xVSS_kVideoEffectType_ColorRGB16;
2497                /**< we don't use any function context */
2498                /**
2499                 * We do not need to set the color context, it is already set during
2500                 sendCommand function */
2501            }
2502
2503        }
2504    }
2505
2506    /**
2507     * Open the VSS 3GPP */
2508    err = M4VSS3GPP_editOpen(pVssCtxt, xVSS_context->pCurrentEditSettings);
2509    if (err != M4NO_ERROR)
2510    {
2511        M4OSA_TRACE1_1("M4xVSS_internalGenerateEditedFile:\
2512             M4VSS3GPP_editOpen returned 0x%x\n",err);
2513        M4VSS3GPP_editCleanUp(pVssCtxt);
2514        /**
2515         * Set the VSS context to NULL */
2516        xVSS_context->pCurrentEditContext = M4OSA_NULL;
2517        return err;
2518    }
2519
2520    /**
2521     * Save VSS context to be able to close / free VSS later */
2522    xVSS_context->pCurrentEditContext = pVssCtxt;
2523
2524    return M4NO_ERROR;
2525}
2526
2527/**
2528 ******************************************************************************
2529 * prototype    M4OSA_ERR M4xVSS_internalCloseEditedFile(M4OSA_Context pContext)
2530 *
2531 * @brief    This function cleans up VSS
2532 * @note
2533 * @param    pContext    (IN) The integrator own context
2534 *
2535 * @return    M4NO_ERROR:    No error
2536 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
2537 ******************************************************************************
2538 */
2539M4OSA_ERR M4xVSS_internalCloseEditedFile(M4OSA_Context pContext)
2540{
2541    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
2542    M4VSS3GPP_EditContext pVssCtxt = xVSS_context->pCurrentEditContext;
2543    M4OSA_ERR err;
2544
2545    if(xVSS_context->pCurrentEditContext != M4OSA_NULL)
2546    {
2547        /**
2548         * Close the VSS 3GPP */
2549        err = M4VSS3GPP_editClose(pVssCtxt);
2550        if (err != M4NO_ERROR)
2551        {
2552            M4OSA_TRACE1_1("M4xVSS_internalCloseEditedFile:\
2553                 M4VSS3GPP_editClose returned 0x%x\n",err);
2554            M4VSS3GPP_editCleanUp(pVssCtxt);
2555            /**
2556             * Set the VSS context to NULL */
2557            xVSS_context->pCurrentEditContext = M4OSA_NULL;
2558            return err;
2559        }
2560
2561        /**
2562         * Free this VSS3GPP edition instance */
2563        err = M4VSS3GPP_editCleanUp(pVssCtxt);
2564        /**
2565         * Set the VSS context to NULL */
2566        xVSS_context->pCurrentEditContext = M4OSA_NULL;
2567        if (err != M4NO_ERROR)
2568        {
2569            M4OSA_TRACE1_1("M4xVSS_internalCloseEditedFile: \
2570                M4VSS3GPP_editCleanUp returned 0x%x\n",err);
2571            return err;
2572        }
2573    }
2574
2575    return M4NO_ERROR;
2576}
2577
2578/**
2579 ******************************************************************************
2580 * prototype    M4OSA_ERR M4xVSS_internalGenerateAudioMixFile(M4OSA_Context pContext)
2581 *
2582 * @brief    This function prepares VSS for audio mixing
2583 * @note    It takes its parameters from the BGM settings in the xVSS internal context
2584 * @param    pContext    (IN) The integrator own context
2585 *
2586 * @return    M4NO_ERROR:    No error
2587 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
2588 * @return    M4ERR_ALLOC: Allocation error (no more memory)
2589 ******************************************************************************
2590 */
2591/***
2592 * FB: the function has been modified since the structure used for the saving is now the
2593 *  pCurrentEditSettings and not the pSettings
2594 * This change has been added for the UTF support
2595 * All the "xVSS_context->pSettings" has been replaced by "xVSS_context->pCurrentEditSettings"
2596 ***/
2597M4OSA_ERR M4xVSS_internalGenerateAudioMixFile(M4OSA_Context pContext)
2598{
2599    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
2600    M4VSS3GPP_AudioMixingSettings* pAudioMixSettings;
2601    M4VSS3GPP_AudioMixingContext pAudioMixingCtxt;
2602    M4OSA_ERR err;
2603    M4VIDEOEDITING_ClipProperties fileProperties;
2604
2605    /**
2606     * Allocate audio mixing settings structure and fill it with BGM parameters */
2607    pAudioMixSettings = (M4VSS3GPP_AudioMixingSettings*)M4OSA_32bitAlignedMalloc
2608        (sizeof(M4VSS3GPP_AudioMixingSettings), M4VS, (M4OSA_Char *)"pAudioMixSettings");
2609    if(pAudioMixSettings == M4OSA_NULL)
2610    {
2611        M4OSA_TRACE1_0("Allocation error in M4xVSS_internalGenerateAudioMixFile");
2612        return M4ERR_ALLOC;
2613    }
2614
2615    if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->FileType ==
2616         M4VIDEOEDITING_kFileType_3GPP)
2617    {
2618        err = M4xVSS_internalGetProperties((M4OSA_Context)xVSS_context,
2619             (M4OSA_Char*)xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile,
2620                 &fileProperties);
2621        if(err != M4NO_ERROR)
2622        {
2623            M4OSA_TRACE1_1("M4xVSS_internalGenerateAudioMixFile:\
2624                 impossible to retrieve audio BGM properties ->\
2625                     reencoding audio background music", err);
2626            fileProperties.AudioStreamType =
2627                 xVSS_context->pCurrentEditSettings->xVSS.outputAudioFormat+1;
2628                  /* To force BGM encoding */
2629        }
2630    }
2631
2632    pAudioMixSettings->bRemoveOriginal = M4OSA_FALSE;
2633    pAudioMixSettings->AddedAudioFileType =
2634     xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->FileType;
2635    pAudioMixSettings->pAddedAudioTrackFile =
2636     xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile;
2637    pAudioMixSettings->uiAddVolume =
2638     xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->uiAddVolume;
2639
2640    pAudioMixSettings->outputAudioFormat = xVSS_context->pSettings->xVSS.outputAudioFormat;
2641    pAudioMixSettings->outputASF = xVSS_context->pSettings->xVSS.outputAudioSamplFreq;
2642    pAudioMixSettings->outputAudioBitrate = xVSS_context->pSettings->xVSS.outputAudioBitrate;
2643    pAudioMixSettings->uiSamplingFrequency =
2644     xVSS_context->pSettings->xVSS.pBGMtrack->uiSamplingFrequency;
2645    pAudioMixSettings->uiNumChannels = xVSS_context->pSettings->xVSS.pBGMtrack->uiNumChannels;
2646
2647    pAudioMixSettings->b_DuckingNeedeed =
2648     xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->b_DuckingNeedeed;
2649    pAudioMixSettings->fBTVolLevel =
2650     (M4OSA_Float )xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->uiAddVolume/100;
2651    pAudioMixSettings->InDucking_threshold =
2652     xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->InDucking_threshold;
2653    pAudioMixSettings->InDucking_lowVolume =
2654     xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->lowVolume/100;
2655    pAudioMixSettings->fPTVolLevel =
2656     (M4OSA_Float)xVSS_context->pSettings->PTVolLevel/100;
2657    pAudioMixSettings->bLoop = xVSS_context->pSettings->xVSS.pBGMtrack->bLoop;
2658
2659    if(xVSS_context->pSettings->xVSS.bAudioMono)
2660    {
2661        pAudioMixSettings->outputNBChannels = 1;
2662    }
2663    else
2664    {
2665        pAudioMixSettings->outputNBChannels = 2;
2666    }
2667
2668    /**
2669     * Fill audio mix settings with BGM parameters */
2670    pAudioMixSettings->uiBeginLoop =
2671     xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->uiBeginLoop;
2672    pAudioMixSettings->uiEndLoop =
2673     xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->uiEndLoop;
2674    pAudioMixSettings->uiAddCts =
2675     xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->uiAddCts;
2676
2677    /**
2678     * Output file of the audio mixer will be final file (audio mixing is the last step) */
2679    pAudioMixSettings->pOutputClipFile = xVSS_context->pOutputFile;
2680    pAudioMixSettings->pTemporaryFile = xVSS_context->pTemporaryFile;
2681
2682    /**
2683     * Input file of the audio mixer is a temporary file containing all audio/video editions */
2684    pAudioMixSettings->pOriginalClipFile = xVSS_context->pCurrentEditSettings->pOutputFile;
2685
2686    /**
2687     * Save audio mixing settings pointer to be able to free it in
2688     M4xVSS_internalCloseAudioMixedFile function */
2689    xVSS_context->pAudioMixSettings = pAudioMixSettings;
2690
2691    /**
2692     * Create a VSS 3GPP audio mixing instance */
2693    err = M4VSS3GPP_audioMixingInit(&pAudioMixingCtxt, pAudioMixSettings,
2694         xVSS_context->pFileReadPtr, xVSS_context->pFileWritePtr);
2695
2696    /**
2697     * Save audio mixing context to be able to call audio mixing step function in
2698      M4xVSS_step function */
2699    xVSS_context->pAudioMixContext = pAudioMixingCtxt;
2700
2701    if (err != M4NO_ERROR)
2702    {
2703        M4OSA_TRACE1_1("M4xVSS_internalGenerateAudioMixFile:\
2704             M4VSS3GPP_audioMixingInit returned 0x%x\n",err);
2705        //M4VSS3GPP_audioMixingCleanUp(pAudioMixingCtxt);
2706        return err;
2707    }
2708
2709    return M4NO_ERROR;
2710}
2711
2712/**
2713 ******************************************************************************
2714 * prototype    M4OSA_ERR M4xVSS_internalCloseAudioMixedFile(M4OSA_Context pContext)
2715 *
2716 * @brief    This function cleans up VSS for audio mixing
2717 * @note
2718 * @param    pContext    (IN) The integrator own context
2719 *
2720 * @return    M4NO_ERROR:    No error
2721 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
2722 ******************************************************************************
2723 */
2724M4OSA_ERR M4xVSS_internalCloseAudioMixedFile(M4OSA_Context pContext)
2725{
2726    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
2727    M4OSA_ERR err;
2728
2729    /**
2730     * Free this VSS3GPP audio mixing instance */
2731    if(xVSS_context->pAudioMixContext != M4OSA_NULL)
2732    {
2733        err = M4VSS3GPP_audioMixingCleanUp(xVSS_context->pAudioMixContext);
2734        if (err != M4NO_ERROR)
2735        {
2736            M4OSA_TRACE1_1("M4xVSS_internalCloseAudioMixedFile:\
2737                 M4VSS3GPP_audioMixingCleanUp returned 0x%x\n",err);
2738            return err;
2739        }
2740    }
2741
2742    /**
2743     * Free VSS audio mixing settings */
2744    if(xVSS_context->pAudioMixSettings != M4OSA_NULL)
2745    {
2746        free(xVSS_context->pAudioMixSettings);
2747        xVSS_context->pAudioMixSettings = M4OSA_NULL;
2748    }
2749
2750    return M4NO_ERROR;
2751}
2752
2753/**
2754 ******************************************************************************
2755 * prototype    M4OSA_ERR M4xVSS_internalFreePreview(M4OSA_Context pContext)
2756 *
2757 * @brief    This function cleans up preview edition structure used to generate
2758 *            preview.3gp file given to the VPS
2759 * @note    It also free the preview structure given to the VPS
2760 * @param    pContext    (IN) The integrator own context
2761 *
2762 * @return    M4NO_ERROR:    No error
2763 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
2764 ******************************************************************************
2765 */
2766M4OSA_ERR M4xVSS_internalFreePreview(M4OSA_Context pContext)
2767{
2768    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
2769    M4OSA_UInt8 i;
2770
2771    /**
2772     * Free clip/transition settings */
2773    for(i=0; i<xVSS_context->pCurrentEditSettings->uiClipNumber; i++)
2774    {
2775        M4xVSS_FreeClipSettings(xVSS_context->pCurrentEditSettings->pClipList[i]);
2776
2777        free((xVSS_context->pCurrentEditSettings->pClipList[i]));
2778        xVSS_context->pCurrentEditSettings->pClipList[i] = M4OSA_NULL;
2779
2780        /**
2781         * Because there is 1 less transition than clip number */
2782        if(i != xVSS_context->pCurrentEditSettings->uiClipNumber-1)
2783        {
2784            free((xVSS_context->pCurrentEditSettings->pTransitionList[i]));
2785            xVSS_context->pCurrentEditSettings->pTransitionList[i] = M4OSA_NULL;
2786        }
2787    }
2788
2789    /**
2790     * Free clip/transition list */
2791    if(xVSS_context->pCurrentEditSettings->pClipList != M4OSA_NULL)
2792    {
2793        free((xVSS_context->pCurrentEditSettings->pClipList));
2794        xVSS_context->pCurrentEditSettings->pClipList = M4OSA_NULL;
2795    }
2796    if(xVSS_context->pCurrentEditSettings->pTransitionList != M4OSA_NULL)
2797    {
2798        free((xVSS_context->pCurrentEditSettings->pTransitionList));
2799        xVSS_context->pCurrentEditSettings->pTransitionList = M4OSA_NULL;
2800    }
2801
2802    /**
2803     * Free output preview file path */
2804    if(xVSS_context->pCurrentEditSettings->pOutputFile != M4OSA_NULL)
2805    {
2806        free(xVSS_context->pCurrentEditSettings->pOutputFile);
2807        xVSS_context->pCurrentEditSettings->pOutputFile = M4OSA_NULL;
2808    }
2809
2810    /**
2811     * Free temporary preview file path */
2812    if(xVSS_context->pCurrentEditSettings->pTemporaryFile != M4OSA_NULL)
2813    {
2814        remove((const char *)xVSS_context->pCurrentEditSettings->pTemporaryFile);
2815        free(xVSS_context->pCurrentEditSettings->pTemporaryFile);
2816        xVSS_context->pCurrentEditSettings->pTemporaryFile = M4OSA_NULL;
2817    }
2818
2819    /**
2820     * Free "local" BGM settings */
2821    if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack != M4OSA_NULL)
2822    {
2823        if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile != M4OSA_NULL)
2824        {
2825            free(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile);
2826            xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile = M4OSA_NULL;
2827        }
2828        free(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack);
2829        xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack = M4OSA_NULL;
2830    }
2831
2832    /**
2833     * Free current edit settings structure */
2834    if(xVSS_context->pCurrentEditSettings != M4OSA_NULL)
2835    {
2836        free(xVSS_context->pCurrentEditSettings);
2837        xVSS_context->pCurrentEditSettings = M4OSA_NULL;
2838    }
2839
2840    /**
2841     * Free preview effects given to application */
2842    if(M4OSA_NULL != xVSS_context->pPreviewSettings->Effects)
2843    {
2844        free(xVSS_context->pPreviewSettings->Effects);
2845        xVSS_context->pPreviewSettings->Effects = M4OSA_NULL;
2846        xVSS_context->pPreviewSettings->nbEffects = 0;
2847    }
2848
2849    return M4NO_ERROR;
2850}
2851
2852
2853/**
2854 ******************************************************************************
2855 * prototype    M4OSA_ERR M4xVSS_internalFreeSaving(M4OSA_Context pContext)
2856 *
2857 * @brief    This function cleans up saving edition structure used to generate
2858 *            output.3gp file given to the VPS
2859 * @note
2860 * @param    pContext    (IN) The integrator own context
2861 *
2862 * @return    M4NO_ERROR:    No error
2863 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
2864 ******************************************************************************
2865 */
2866M4OSA_ERR M4xVSS_internalFreeSaving(M4OSA_Context pContext)
2867{
2868    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
2869    M4OSA_UInt8 i;
2870
2871    if(xVSS_context->pCurrentEditSettings != M4OSA_NULL)
2872    {
2873        /**
2874         * Free clip/transition settings */
2875        for(i=0; i<xVSS_context->pCurrentEditSettings->uiClipNumber; i++)
2876        {
2877            M4xVSS_FreeClipSettings(xVSS_context->pCurrentEditSettings->pClipList[i]);
2878
2879            free((xVSS_context->pCurrentEditSettings->pClipList[i]));
2880            xVSS_context->pCurrentEditSettings->pClipList[i] = M4OSA_NULL;
2881
2882            /**
2883             * Because there is 1 less transition than clip number */
2884            if(i != xVSS_context->pCurrentEditSettings->uiClipNumber-1)
2885            {
2886                free(\
2887                    (xVSS_context->pCurrentEditSettings->pTransitionList[i]));
2888                xVSS_context->pCurrentEditSettings->pTransitionList[i] = M4OSA_NULL;
2889            }
2890        }
2891
2892        /**
2893         * Free clip/transition list */
2894        if(xVSS_context->pCurrentEditSettings->pClipList != M4OSA_NULL)
2895        {
2896            free((xVSS_context->pCurrentEditSettings->pClipList));
2897            xVSS_context->pCurrentEditSettings->pClipList = M4OSA_NULL;
2898        }
2899        if(xVSS_context->pCurrentEditSettings->pTransitionList != M4OSA_NULL)
2900        {
2901            free((xVSS_context->pCurrentEditSettings->pTransitionList));
2902            xVSS_context->pCurrentEditSettings->pTransitionList = M4OSA_NULL;
2903        }
2904
2905        if(xVSS_context->pCurrentEditSettings->Effects != M4OSA_NULL)
2906        {
2907            free((xVSS_context->pCurrentEditSettings->Effects));
2908            xVSS_context->pCurrentEditSettings->Effects = M4OSA_NULL;
2909            xVSS_context->pCurrentEditSettings->nbEffects = 0;
2910        }
2911
2912        /**
2913         * Free output saving file path */
2914        if(xVSS_context->pCurrentEditSettings->pOutputFile != M4OSA_NULL)
2915        {
2916            if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack != M4OSA_NULL)
2917            {
2918                remove((const char *)xVSS_context->pCurrentEditSettings->pOutputFile);
2919                free(xVSS_context->pCurrentEditSettings->pOutputFile);
2920            }
2921            if(xVSS_context->pOutputFile != M4OSA_NULL)
2922            {
2923                free(xVSS_context->pOutputFile);
2924                xVSS_context->pOutputFile = M4OSA_NULL;
2925            }
2926            xVSS_context->pSettings->pOutputFile = M4OSA_NULL;
2927            xVSS_context->pCurrentEditSettings->pOutputFile = M4OSA_NULL;
2928        }
2929
2930        /**
2931         * Free temporary saving file path */
2932        if(xVSS_context->pCurrentEditSettings->pTemporaryFile != M4OSA_NULL)
2933        {
2934            remove((const char *)xVSS_context->pCurrentEditSettings->pTemporaryFile);
2935            free(xVSS_context->pCurrentEditSettings->pTemporaryFile);
2936            xVSS_context->pCurrentEditSettings->pTemporaryFile = M4OSA_NULL;
2937        }
2938
2939        /**
2940         * Free "local" BGM settings */
2941        if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack != M4OSA_NULL)
2942        {
2943            if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile != M4OSA_NULL)
2944            {
2945                free(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile);
2946                xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile = M4OSA_NULL;
2947            }
2948            free(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack);
2949            xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack = M4OSA_NULL;
2950        }
2951
2952        /**
2953         * Free current edit settings structure */
2954        free(xVSS_context->pCurrentEditSettings);
2955        xVSS_context->pCurrentEditSettings = M4OSA_NULL;
2956    }
2957
2958    return M4NO_ERROR;
2959}
2960
2961
2962/**
2963 ******************************************************************************
2964 * prototype    M4OSA_ERR M4xVSS_freeSettings(M4OSA_Context pContext)
2965 *
2966 * @brief    This function cleans up an M4VSS3GPP_EditSettings structure
2967 * @note
2968 * @param    pSettings    (IN) Pointer on M4VSS3GPP_EditSettings structure to free
2969 *
2970 * @return    M4NO_ERROR:    No error
2971 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
2972 ******************************************************************************
2973 */
2974M4OSA_ERR M4xVSS_freeSettings(M4VSS3GPP_EditSettings* pSettings)
2975{
2976    M4OSA_UInt8 i,j;
2977
2978    /**
2979     * For each clip ... */
2980    for(i=0; i<pSettings->uiClipNumber; i++)
2981    {
2982        /**
2983         * ... free clip settings */
2984        if(pSettings->pClipList[i] != M4OSA_NULL)
2985        {
2986            M4xVSS_FreeClipSettings(pSettings->pClipList[i]);
2987
2988            free((pSettings->pClipList[i]));
2989            pSettings->pClipList[i] = M4OSA_NULL;
2990        }
2991
2992        /**
2993         * ... free transition settings */
2994        if(i < pSettings->uiClipNumber-1) /* Because there is 1 less transition than clip number */
2995        {
2996            if(pSettings->pTransitionList[i] != M4OSA_NULL)
2997            {
2998                switch (pSettings->pTransitionList[i]->VideoTransitionType)
2999                {
3000                    case M4xVSS_kVideoTransitionType_AlphaMagic:
3001
3002                        /**
3003                         * In case of Alpha Magic transition,
3004                          some extra parameters need to be freed */
3005                        if(pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt\
3006                             != M4OSA_NULL)
3007                        {
3008                            free((((M4xVSS_internal_AlphaMagicSettings*)\
3009                                pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt)->\
3010                                    pPlane->pac_data));
3011                            ((M4xVSS_internal_AlphaMagicSettings*)pSettings->pTransitionList[i\
3012                                ]->pExtVideoTransitionFctCtxt)->pPlane->pac_data = M4OSA_NULL;
3013
3014                            free((((M4xVSS_internal_AlphaMagicSettings*)\
3015                                pSettings->pTransitionList[i]->\
3016                                    pExtVideoTransitionFctCtxt)->pPlane));
3017                            ((M4xVSS_internal_AlphaMagicSettings*)pSettings->pTransitionList[i]\
3018                                ->pExtVideoTransitionFctCtxt)->pPlane = M4OSA_NULL;
3019
3020                            free((pSettings->pTransitionList[i]->\
3021                                pExtVideoTransitionFctCtxt));
3022                            pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt = M4OSA_NULL;
3023
3024                            for(j=i+1;j<pSettings->uiClipNumber-1;j++)
3025                            {
3026                                if(pSettings->pTransitionList[j] != M4OSA_NULL)
3027                                {
3028                                    if(pSettings->pTransitionList[j]->VideoTransitionType ==
3029                                     M4xVSS_kVideoTransitionType_AlphaMagic)
3030                                    {
3031                                        M4OSA_UInt32 pCmpResult=0;
3032                                        pCmpResult = strcmp((const char *)pSettings->pTransitionList[i]->\
3033                                            xVSS.transitionSpecific.pAlphaMagicSettings->\
3034                                                pAlphaFilePath,
3035                                                (const char *)pSettings->pTransitionList[j]->\
3036                                                xVSS.transitionSpecific.pAlphaMagicSettings->\
3037                                                pAlphaFilePath);
3038                                        if(pCmpResult == 0)
3039                                        {
3040                                            /* Free extra internal alpha magic structure and put
3041                                            it to NULL to avoid refreeing it */
3042                                            free((pSettings->\
3043                                                pTransitionList[j]->pExtVideoTransitionFctCtxt));
3044                                            pSettings->pTransitionList[j]->\
3045                                                pExtVideoTransitionFctCtxt = M4OSA_NULL;
3046                                        }
3047                                    }
3048                                }
3049                            }
3050                        }
3051
3052                        if(pSettings->pTransitionList[i]->\
3053                            xVSS.transitionSpecific.pAlphaMagicSettings != M4OSA_NULL)
3054                        {
3055                            if(pSettings->pTransitionList[i]->\
3056                                xVSS.transitionSpecific.pAlphaMagicSettings->\
3057                                    pAlphaFilePath != M4OSA_NULL)
3058                            {
3059                                free(pSettings->\
3060                                    pTransitionList[i]->\
3061                                        xVSS.transitionSpecific.pAlphaMagicSettings->\
3062                                            pAlphaFilePath);
3063                                pSettings->pTransitionList[i]->\
3064                                    xVSS.transitionSpecific.pAlphaMagicSettings->\
3065                                        pAlphaFilePath = M4OSA_NULL;
3066                            }
3067                            free(pSettings->pTransitionList[i]->\
3068                                xVSS.transitionSpecific.pAlphaMagicSettings);
3069                            pSettings->pTransitionList[i]->\
3070                                xVSS.transitionSpecific.pAlphaMagicSettings = M4OSA_NULL;
3071
3072                        }
3073
3074                    break;
3075
3076
3077                    case M4xVSS_kVideoTransitionType_SlideTransition:
3078                        if (M4OSA_NULL != pSettings->pTransitionList[i]->\
3079                            xVSS.transitionSpecific.pSlideTransitionSettings)
3080                        {
3081                            free(pSettings->pTransitionList[i]->\
3082                                xVSS.transitionSpecific.pSlideTransitionSettings);
3083                            pSettings->pTransitionList[i]->\
3084                                xVSS.transitionSpecific.pSlideTransitionSettings = M4OSA_NULL;
3085                        }
3086                        if(pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt != M4OSA_NULL)
3087                        {
3088                            free((pSettings->pTransitionList[i]->\
3089                                pExtVideoTransitionFctCtxt));
3090                            pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt = M4OSA_NULL;
3091                        }
3092                    break;
3093                                        default:
3094                    break;
3095
3096                }
3097                /**
3098                 * Free transition settings structure */
3099                free((pSettings->pTransitionList[i]));
3100                pSettings->pTransitionList[i] = M4OSA_NULL;
3101            }
3102        }
3103    }
3104
3105    /**
3106     * Free clip list */
3107    if(pSettings->pClipList != M4OSA_NULL)
3108    {
3109        free((pSettings->pClipList));
3110        pSettings->pClipList = M4OSA_NULL;
3111    }
3112
3113    /**
3114     * Free transition list */
3115    if(pSettings->pTransitionList != M4OSA_NULL)
3116    {
3117        free((pSettings->pTransitionList));
3118        pSettings->pTransitionList = M4OSA_NULL;
3119    }
3120
3121    /**
3122     * RC: Free effects list */
3123    if(pSettings->Effects != M4OSA_NULL)
3124    {
3125        for(i=0; i<pSettings->nbEffects; i++)
3126        {
3127            /**
3128             * For each clip, free framing structure if needed */
3129            if(pSettings->Effects[i].VideoEffectType == M4xVSS_kVideoEffectType_Framing
3130                || pSettings->Effects[i].VideoEffectType == M4xVSS_kVideoEffectType_Text)
3131            {
3132#ifdef DECODE_GIF_ON_SAVING
3133                M4xVSS_FramingContext* framingCtx = pSettings->Effects[i].pExtVideoEffectFctCtxt;
3134#else
3135                M4xVSS_FramingStruct* framingCtx = pSettings->Effects[i].pExtVideoEffectFctCtxt;
3136                M4xVSS_FramingStruct* framingCtx_save;
3137                M4xVSS_Framing3102Struct* framingCtx_first = framingCtx;
3138#endif
3139
3140#ifdef DECODE_GIF_ON_SAVING
3141                if(framingCtx != M4OSA_NULL) /* Bugfix 1.2.0: crash, trying to free non existant
3142                 pointer */
3143                {
3144                    if(framingCtx->aFramingCtx != M4OSA_NULL)
3145                    {
3146                        {
3147                            if(framingCtx->aFramingCtx->FramingRgb != M4OSA_NULL)
3148                            {
3149                                free(framingCtx->aFramingCtx->\
3150                                    FramingRgb->pac_data);
3151                                framingCtx->aFramingCtx->FramingRgb->pac_data = M4OSA_NULL;
3152                                free(framingCtx->aFramingCtx->FramingRgb);
3153                                framingCtx->aFramingCtx->FramingRgb = M4OSA_NULL;
3154                            }
3155                        }
3156                        if(framingCtx->aFramingCtx->FramingYuv != M4OSA_NULL)
3157                        {
3158                            free(framingCtx->aFramingCtx->\
3159                                FramingYuv[0].pac_data);
3160                            framingCtx->aFramingCtx->FramingYuv[0].pac_data = M4OSA_NULL;
3161                           free(framingCtx->aFramingCtx->\
3162                                FramingYuv[1].pac_data);
3163                            framingCtx->aFramingCtx->FramingYuv[1].pac_data = M4OSA_NULL;
3164                           free(framingCtx->aFramingCtx->\
3165                                FramingYuv[2].pac_data);
3166                            framingCtx->aFramingCtx->FramingYuv[2].pac_data = M4OSA_NULL;
3167                            free(framingCtx->aFramingCtx->FramingYuv);
3168                            framingCtx->aFramingCtx->FramingYuv = M4OSA_NULL;
3169                        }
3170                        free(framingCtx->aFramingCtx);
3171                        framingCtx->aFramingCtx = M4OSA_NULL;
3172                    }
3173                    if(framingCtx->aFramingCtx_last != M4OSA_NULL)
3174                    {
3175                        if(framingCtx->aFramingCtx_last->FramingRgb != M4OSA_NULL)
3176                        {
3177                            free(framingCtx->aFramingCtx_last->\
3178                                FramingRgb->pac_data);
3179                            framingCtx->aFramingCtx_last->FramingRgb->pac_data = M4OSA_NULL;
3180                            free(framingCtx->aFramingCtx_last->\
3181                                FramingRgb);
3182                            framingCtx->aFramingCtx_last->FramingRgb = M4OSA_NULL;
3183                        }
3184                        if(framingCtx->aFramingCtx_last->FramingYuv != M4OSA_NULL)
3185                        {
3186                            free(framingCtx->aFramingCtx_last->\
3187                                FramingYuv[0].pac_data);
3188                            framingCtx->aFramingCtx_last->FramingYuv[0].pac_data = M4OSA_NULL;
3189                            free(framingCtx->aFramingCtx_last->FramingYuv);
3190                            framingCtx->aFramingCtx_last->FramingYuv = M4OSA_NULL;
3191                        }
3192                        free(framingCtx->aFramingCtx_last);
3193                        framingCtx->aFramingCtx_last = M4OSA_NULL;
3194                    }
3195                    if(framingCtx->pEffectFilePath != M4OSA_NULL)
3196                    {
3197                        free(framingCtx->pEffectFilePath);
3198                        framingCtx->pEffectFilePath = M4OSA_NULL;
3199                    }
3200                    /*In case there are still allocated*/
3201                    if(framingCtx->pSPSContext != M4OSA_NULL)
3202                    {
3203                    //    M4SPS_destroy(framingCtx->pSPSContext);
3204                        framingCtx->pSPSContext = M4OSA_NULL;
3205                    }
3206                    /*Alpha blending structure*/
3207                    if(framingCtx->alphaBlendingStruct  != M4OSA_NULL)
3208                    {
3209                        free(framingCtx->alphaBlendingStruct);
3210                        framingCtx->alphaBlendingStruct = M4OSA_NULL;
3211                    }
3212
3213                    free(framingCtx);
3214                    framingCtx = M4OSA_NULL;
3215                }
3216#else
3217                do
3218                {
3219                    if(framingCtx != M4OSA_NULL) /* Bugfix 1.2.0: crash, trying to free non
3220                    existant pointer */
3221                    {
3222                        if(framingCtx->FramingRgb != M4OSA_NULL)
3223                        {
3224                            free(framingCtx->FramingRgb->pac_data);
3225                            framingCtx->FramingRgb->pac_data = M4OSA_NULL;
3226                            free(framingCtx->FramingRgb);
3227                            framingCtx->FramingRgb = M4OSA_NULL;
3228                        }
3229                        if(framingCtx->FramingYuv != M4OSA_NULL)
3230                        {
3231                            free(framingCtx->FramingYuv[0].pac_data);
3232                            framingCtx->FramingYuv[0].pac_data = M4OSA_NULL;
3233                            free(framingCtx->FramingYuv);
3234                            framingCtx->FramingYuv = M4OSA_NULL;
3235                        }
3236                        framingCtx_save = framingCtx->pNext;
3237                        free(framingCtx);
3238                        framingCtx = M4OSA_NULL;
3239                        framingCtx = framingCtx_save;
3240                    }
3241                    else
3242                    {
3243                        /*FB: bug fix P4ME00003002*/
3244                        break;
3245                    }
3246                } while(framingCtx_first != framingCtx);
3247#endif
3248            }
3249            else if( M4xVSS_kVideoEffectType_Fifties == pSettings->Effects[i].VideoEffectType)
3250            {
3251                /* Free Fifties context */
3252                M4xVSS_FiftiesStruct* FiftiesCtx = pSettings->Effects[i].pExtVideoEffectFctCtxt;
3253
3254                if(FiftiesCtx != M4OSA_NULL)
3255                {
3256                    free(FiftiesCtx);
3257                    FiftiesCtx = M4OSA_NULL;
3258                }
3259
3260            }
3261            else if( M4xVSS_kVideoEffectType_ColorRGB16 == pSettings->Effects[i].VideoEffectType
3262                || M4xVSS_kVideoEffectType_BlackAndWhite == pSettings->Effects[i].VideoEffectType
3263                || M4xVSS_kVideoEffectType_Pink == pSettings->Effects[i].VideoEffectType
3264                || M4xVSS_kVideoEffectType_Green == pSettings->Effects[i].VideoEffectType
3265                || M4xVSS_kVideoEffectType_Sepia == pSettings->Effects[i].VideoEffectType
3266                || M4xVSS_kVideoEffectType_Negative== pSettings->Effects[i].VideoEffectType
3267                || M4xVSS_kVideoEffectType_Gradient== pSettings->Effects[i].VideoEffectType)
3268            {
3269                /* Free Color context */
3270                M4xVSS_ColorStruct* ColorCtx = pSettings->Effects[i].pExtVideoEffectFctCtxt;
3271
3272                if(ColorCtx != M4OSA_NULL)
3273                {
3274                    free(ColorCtx);
3275                    ColorCtx = M4OSA_NULL;
3276                }
3277            }
3278
3279            /* Free simple fields */
3280            if(pSettings->Effects[i].xVSS.pFramingFilePath != M4OSA_NULL)
3281            {
3282                free(pSettings->Effects[i].xVSS.pFramingFilePath);
3283                pSettings->Effects[i].xVSS.pFramingFilePath = M4OSA_NULL;
3284            }
3285            if(pSettings->Effects[i].xVSS.pFramingBuffer != M4OSA_NULL)
3286            {
3287                free(pSettings->Effects[i].xVSS.pFramingBuffer);
3288                pSettings->Effects[i].xVSS.pFramingBuffer = M4OSA_NULL;
3289            }
3290            if(pSettings->Effects[i].xVSS.pTextBuffer != M4OSA_NULL)
3291            {
3292                free(pSettings->Effects[i].xVSS.pTextBuffer);
3293                pSettings->Effects[i].xVSS.pTextBuffer = M4OSA_NULL;
3294            }
3295        }
3296        free(pSettings->Effects);
3297        pSettings->Effects = M4OSA_NULL;
3298    }
3299
3300    return M4NO_ERROR;
3301}
3302
3303M4OSA_ERR M4xVSS_freeCommand(M4OSA_Context pContext)
3304{
3305    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
3306//    M4OSA_UInt8 i,j;
3307
3308    /* Free "local" BGM settings */
3309    if(xVSS_context->pSettings->xVSS.pBGMtrack != M4OSA_NULL)
3310    {
3311        if(xVSS_context->pSettings->xVSS.pBGMtrack->pFile != M4OSA_NULL)
3312        {
3313            free(xVSS_context->pSettings->xVSS.pBGMtrack->pFile);
3314            xVSS_context->pSettings->xVSS.pBGMtrack->pFile = M4OSA_NULL;
3315        }
3316        free(xVSS_context->pSettings->xVSS.pBGMtrack);
3317        xVSS_context->pSettings->xVSS.pBGMtrack = M4OSA_NULL;
3318    }
3319
3320    M4xVSS_freeSettings(xVSS_context->pSettings);
3321
3322    if(xVSS_context->pPTo3GPPparamsList != M4OSA_NULL)
3323    {
3324        M4xVSS_Pto3GPP_params* pParams = xVSS_context->pPTo3GPPparamsList;
3325        M4xVSS_Pto3GPP_params* pParams_sauv;
3326
3327        while(pParams != M4OSA_NULL)
3328        {
3329            if(pParams->pFileIn != M4OSA_NULL)
3330            {
3331                free(pParams->pFileIn);
3332                pParams->pFileIn = M4OSA_NULL;
3333            }
3334            if(pParams->pFileOut != M4OSA_NULL)
3335            {
3336                /* Delete temporary file */
3337                remove((const char *)pParams->pFileOut);
3338                free(pParams->pFileOut);
3339                pParams->pFileOut = M4OSA_NULL;
3340            }
3341            if(pParams->pFileTemp != M4OSA_NULL)
3342            {
3343                /* Delete temporary file */
3344#ifdef M4xVSS_RESERVED_MOOV_DISK_SPACE
3345                remove((const char *)pParams->pFileTemp);
3346                free(pParams->pFileTemp);
3347#endif/*M4xVSS_RESERVED_MOOV_DISK_SPACE*/
3348                pParams->pFileTemp = M4OSA_NULL;
3349            }
3350            pParams_sauv = pParams;
3351            pParams = pParams->pNext;
3352            free(pParams_sauv);
3353            pParams_sauv = M4OSA_NULL;
3354        }
3355    }
3356
3357    if(xVSS_context->pMCSparamsList != M4OSA_NULL)
3358    {
3359        M4xVSS_MCS_params* pParams = xVSS_context->pMCSparamsList;
3360        M4xVSS_MCS_params* pParams_sauv;
3361
3362        while(pParams != M4OSA_NULL)
3363        {
3364            if(pParams->pFileIn != M4OSA_NULL)
3365            {
3366                free(pParams->pFileIn);
3367                pParams->pFileIn = M4OSA_NULL;
3368            }
3369            if(pParams->pFileOut != M4OSA_NULL)
3370            {
3371                /* Delete temporary file */
3372                remove((const char *)pParams->pFileOut);
3373                free(pParams->pFileOut);
3374                pParams->pFileOut = M4OSA_NULL;
3375            }
3376            if(pParams->pFileTemp != M4OSA_NULL)
3377            {
3378                /* Delete temporary file */
3379#ifdef M4xVSS_RESERVED_MOOV_DISK_SPACE
3380                remove((const char *)pParams->pFileTemp);
3381                free(pParams->pFileTemp);
3382#endif/*M4xVSS_RESERVED_MOOV_DISK_SPACE*/
3383                pParams->pFileTemp = M4OSA_NULL;
3384            }
3385            pParams_sauv = pParams;
3386            pParams = pParams->pNext;
3387            free(pParams_sauv);
3388            pParams_sauv = M4OSA_NULL;
3389        }
3390    }
3391
3392    if(xVSS_context->pcmPreviewFile != M4OSA_NULL)
3393    {
3394        free(xVSS_context->pcmPreviewFile);
3395        xVSS_context->pcmPreviewFile = M4OSA_NULL;
3396    }
3397    if(xVSS_context->pSettings->pOutputFile != M4OSA_NULL
3398        && xVSS_context->pOutputFile != M4OSA_NULL)
3399    {
3400        free(xVSS_context->pSettings->pOutputFile);
3401        xVSS_context->pSettings->pOutputFile = M4OSA_NULL;
3402        xVSS_context->pOutputFile = M4OSA_NULL;
3403    }
3404
3405    /* Reinit all context variables */
3406    xVSS_context->previousClipNumber = 0;
3407    xVSS_context->editingStep = M4xVSS_kMicroStateEditing;
3408    xVSS_context->analyseStep = M4xVSS_kMicroStateAnalysePto3GPP;
3409    xVSS_context->pPTo3GPPparamsList = M4OSA_NULL;
3410    xVSS_context->pPTo3GPPcurrentParams = M4OSA_NULL;
3411    xVSS_context->pMCSparamsList = M4OSA_NULL;
3412    xVSS_context->pMCScurrentParams = M4OSA_NULL;
3413    xVSS_context->tempFileIndex = 0;
3414    xVSS_context->targetedTimescale = 0;
3415
3416    return M4NO_ERROR;
3417}
3418
3419/**
3420 ******************************************************************************
3421 * prototype    M4OSA_ERR M4xVSS_internalGetProperties(M4OSA_Context pContext,
3422 *                                    M4OSA_Char* pFile,
3423 *                                    M4VIDEOEDITING_ClipProperties *pFileProperties)
3424 *
3425 * @brief    This function retrieve properties of an input 3GP file using MCS
3426 * @note
3427 * @param    pContext        (IN) The integrator own context
3428 * @param    pFile            (IN) 3GP file to analyse
3429 * @param    pFileProperties    (IN/OUT) Pointer on a structure that will contain
3430 *                            the 3GP file properties
3431 *
3432 * @return    M4NO_ERROR:    No error
3433 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
3434 ******************************************************************************
3435 */
3436M4OSA_ERR M4xVSS_internalGetProperties(M4OSA_Context pContext, M4OSA_Char* pFile,
3437                                       M4VIDEOEDITING_ClipProperties *pFileProperties)
3438{
3439    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
3440    M4OSA_ERR err;
3441    M4MCS_Context mcs_context;
3442
3443    err = M4MCS_init(&mcs_context, xVSS_context->pFileReadPtr, xVSS_context->pFileWritePtr);
3444    if(err != M4NO_ERROR)
3445    {
3446        M4OSA_TRACE1_1("M4xVSS_internalGetProperties: Error in M4MCS_init: 0x%x", err);
3447        return err;
3448    }
3449
3450    /*open the MCS in the "normal opening" mode to retrieve the exact duration*/
3451    err = M4MCS_open_normalMode(mcs_context, pFile, M4VIDEOEDITING_kFileType_3GPP,
3452        M4OSA_NULL, M4OSA_NULL);
3453    if (err != M4NO_ERROR)
3454    {
3455        M4OSA_TRACE1_1("M4xVSS_internalGetProperties: Error in M4MCS_open: 0x%x", err);
3456        M4MCS_abort(mcs_context);
3457        return err;
3458    }
3459
3460    err = M4MCS_getInputFileProperties(mcs_context, pFileProperties);
3461    if(err != M4NO_ERROR)
3462    {
3463        M4OSA_TRACE1_1("Error in M4MCS_getInputFileProperties: 0x%x", err);
3464        M4MCS_abort(mcs_context);
3465        return err;
3466    }
3467
3468    err = M4MCS_abort(mcs_context);
3469    if (err != M4NO_ERROR)
3470    {
3471        M4OSA_TRACE1_1("M4xVSS_internalGetProperties: Error in M4MCS_abort: 0x%x", err);
3472        return err;
3473    }
3474
3475    return M4NO_ERROR;
3476}
3477
3478
3479/**
3480 ******************************************************************************
3481 * prototype    M4OSA_ERR M4xVSS_internalGetTargetedTimeScale(M4OSA_Context pContext,
3482 *                                                M4OSA_UInt32* pTargetedTimeScale)
3483 *
3484 * @brief    This function retrieve targeted time scale
3485 * @note
3486 * @param    pContext            (IN)    The integrator own context
3487 * @param    pTargetedTimeScale    (OUT)    Targeted time scale
3488 *
3489 * @return    M4NO_ERROR:    No error
3490 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
3491 ******************************************************************************
3492 */
3493M4OSA_ERR M4xVSS_internalGetTargetedTimeScale(M4OSA_Context pContext,
3494                                                 M4VSS3GPP_EditSettings* pSettings,
3495                                                  M4OSA_UInt32* pTargetedTimeScale)
3496{
3497    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
3498    M4OSA_ERR err;
3499    M4OSA_UInt32 totalDuration = 0;
3500    M4OSA_UInt8 i = 0;
3501    M4OSA_UInt32 tempTimeScale = 0, tempDuration = 0;
3502
3503    for(i=0;i<pSettings->uiClipNumber;i++)
3504    {
3505        /*search timescale only in mpeg4 case*/
3506        if(pSettings->pClipList[i]->FileType == M4VIDEOEDITING_kFileType_3GPP
3507            || pSettings->pClipList[i]->FileType == M4VIDEOEDITING_kFileType_MP4
3508            || pSettings->pClipList[i]->FileType == M4VIDEOEDITING_kFileType_M4V)
3509        {
3510            M4VIDEOEDITING_ClipProperties fileProperties;
3511
3512            /*UTF conversion support*/
3513            M4OSA_Char* pDecodedPath = M4OSA_NULL;
3514
3515            /**
3516            * UTF conversion: convert into the customer format, before being used*/
3517            pDecodedPath = pSettings->pClipList[i]->pFile;
3518
3519            if(xVSS_context->UTFConversionContext.pConvToUTF8Fct != M4OSA_NULL
3520                && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL)
3521            {
3522                M4OSA_UInt32 length = 0;
3523                err = M4xVSS_internalConvertFromUTF8(xVSS_context,
3524                     (M4OSA_Void*) pSettings->pClipList[i]->pFile,
3525                        (M4OSA_Void*) xVSS_context->UTFConversionContext.pTempOutConversionBuffer,
3526                             &length);
3527                if(err != M4NO_ERROR)
3528                {
3529                    M4OSA_TRACE1_1("M4xVSS_Init:\
3530                         M4xVSS_internalConvertToUTF8 returns err: 0x%x",err);
3531                    return err;
3532                }
3533                pDecodedPath = xVSS_context->UTFConversionContext.pTempOutConversionBuffer;
3534            }
3535
3536            /*End of the conversion: use the decoded path*/
3537            err = M4xVSS_internalGetProperties(xVSS_context, pDecodedPath, &fileProperties);
3538
3539            /*get input file properties*/
3540            /*err = M4xVSS_internalGetProperties(xVSS_context, pSettings->\
3541                pClipList[i]->pFile, &fileProperties);*/
3542            if(M4NO_ERROR != err)
3543            {
3544                M4OSA_TRACE1_1("M4xVSS_internalGetTargetedTimeScale:\
3545                     M4xVSS_internalGetProperties returned: 0x%x", err);
3546                return err;
3547            }
3548            if(fileProperties.VideoStreamType == M4VIDEOEDITING_kMPEG4)
3549            {
3550                if(pSettings->pClipList[i]->uiEndCutTime > 0)
3551                {
3552                    if(tempDuration < (pSettings->pClipList[i]->uiEndCutTime \
3553                        - pSettings->pClipList[i]->uiBeginCutTime))
3554                    {
3555                        tempTimeScale = fileProperties.uiVideoTimeScale;
3556                        tempDuration = (pSettings->pClipList[i]->uiEndCutTime\
3557                             - pSettings->pClipList[i]->uiBeginCutTime);
3558                    }
3559                }
3560                else
3561                {
3562                    if(tempDuration < (fileProperties.uiClipDuration\
3563                         - pSettings->pClipList[i]->uiBeginCutTime))
3564                    {
3565                        tempTimeScale = fileProperties.uiVideoTimeScale;
3566                        tempDuration = (fileProperties.uiClipDuration\
3567                             - pSettings->pClipList[i]->uiBeginCutTime);
3568                    }
3569                }
3570            }
3571        }
3572        if(pSettings->pClipList[i]->FileType == M4VIDEOEDITING_kFileType_ARGB8888)
3573        {
3574            /*the timescale is 30 for PTO3GP*/
3575            *pTargetedTimeScale = 30;
3576            return M4NO_ERROR;
3577
3578        }
3579    }
3580
3581    if(tempTimeScale >= 30)/*Define a minimum time scale, otherwise if the timescale is not
3582    enough, there will be an infinite loop in the shell encoder*/
3583    {
3584        *pTargetedTimeScale = tempTimeScale;
3585    }
3586    else
3587    {
3588        *pTargetedTimeScale = 30;
3589    }
3590
3591    return M4NO_ERROR;
3592}
3593
3594
3595/**
3596 ******************************************************************************
3597 * prototype    M4VSS3GPP_externalVideoEffectColor(M4OSA_Void *pFunctionContext,
3598 *                                                    M4VIFI_ImagePlane *PlaneIn,
3599 *                                                    M4VIFI_ImagePlane *PlaneOut,
3600 *                                                    M4VSS3GPP_ExternalProgress *pProgress,
3601 *                                                    M4OSA_UInt32 uiEffectKind)
3602 *
3603 * @brief    This function apply a color effect on an input YUV420 planar frame
3604 * @note
3605 * @param    pFunctionContext(IN) Contains which color to apply (not very clean ...)
3606 * @param    PlaneIn            (IN) Input YUV420 planar
3607 * @param    PlaneOut        (IN/OUT) Output YUV420 planar
3608 * @param    pProgress        (IN/OUT) Progress indication (0-100)
3609 * @param    uiEffectKind    (IN) Unused
3610 *
3611 * @return    M4VIFI_OK:    No error
3612 ******************************************************************************
3613 */
3614M4OSA_ERR M4VSS3GPP_externalVideoEffectColor(M4OSA_Void *pFunctionContext,
3615                                             M4VIFI_ImagePlane *PlaneIn,
3616                                             M4VIFI_ImagePlane *PlaneOut,
3617                                             M4VSS3GPP_ExternalProgress *pProgress,
3618                                             M4OSA_UInt32 uiEffectKind)
3619{
3620    M4VIFI_Int32 plane_number;
3621    M4VIFI_UInt32 i,j;
3622    M4VIFI_UInt8 *p_buf_src, *p_buf_dest;
3623    M4xVSS_ColorStruct* ColorContext = (M4xVSS_ColorStruct*)pFunctionContext;
3624
3625    for (plane_number = 0; plane_number < 3; plane_number++)
3626    {
3627        p_buf_src = &(PlaneIn[plane_number].pac_data[PlaneIn[plane_number].u_topleft]);
3628        p_buf_dest = &(PlaneOut[plane_number].pac_data[PlaneOut[plane_number].u_topleft]);
3629        for (i = 0; i < PlaneOut[plane_number].u_height; i++)
3630        {
3631            /**
3632             * Chrominance */
3633            if(plane_number==1 || plane_number==2)
3634            {
3635                //switch ((M4OSA_UInt32)pFunctionContext)
3636                // commented because a structure for the effects context exist
3637                switch (ColorContext->colorEffectType)
3638                {
3639                    case M4xVSS_kVideoEffectType_BlackAndWhite:
3640                        memset((void *)p_buf_dest,128,
3641                         PlaneIn[plane_number].u_width);
3642                        break;
3643                    case M4xVSS_kVideoEffectType_Pink:
3644                        memset((void *)p_buf_dest,255,
3645                         PlaneIn[plane_number].u_width);
3646                        break;
3647                    case M4xVSS_kVideoEffectType_Green:
3648                        memset((void *)p_buf_dest,0,
3649                         PlaneIn[plane_number].u_width);
3650                        break;
3651                    case M4xVSS_kVideoEffectType_Sepia:
3652                        if(plane_number==1)
3653                        {
3654                            memset((void *)p_buf_dest,117,
3655                             PlaneIn[plane_number].u_width);
3656                        }
3657                        else
3658                        {
3659                            memset((void *)p_buf_dest,139,
3660                             PlaneIn[plane_number].u_width);
3661                        }
3662                        break;
3663                    case M4xVSS_kVideoEffectType_Negative:
3664                        memcpy((void *)p_buf_dest,
3665                         (void *)p_buf_src ,PlaneOut[plane_number].u_width);
3666                        break;
3667
3668                    case M4xVSS_kVideoEffectType_ColorRGB16:
3669                        {
3670                            M4OSA_UInt16 r = 0,g = 0,b = 0,y = 0,u = 0,v = 0;
3671
3672                            /*first get the r, g, b*/
3673                            b = (ColorContext->rgb16ColorData &  0x001f);
3674                            g = (ColorContext->rgb16ColorData &  0x07e0)>>5;
3675                            r = (ColorContext->rgb16ColorData &  0xf800)>>11;
3676
3677                            /*keep y, but replace u and v*/
3678                            if(plane_number==1)
3679                            {
3680                                /*then convert to u*/
3681                                u = U16(r, g, b);
3682                                memset((void *)p_buf_dest,(M4OSA_UInt8)u,
3683                                 PlaneIn[plane_number].u_width);
3684                            }
3685                            if(plane_number==2)
3686                            {
3687                                /*then convert to v*/
3688                                v = V16(r, g, b);
3689                                memset((void *)p_buf_dest, (M4OSA_UInt8)v,
3690                                 PlaneIn[plane_number].u_width);
3691                            }
3692                        }
3693                        break;
3694                    case M4xVSS_kVideoEffectType_Gradient:
3695                        {
3696                            M4OSA_UInt16 r = 0,g = 0,b = 0,y = 0,u = 0,v = 0;
3697
3698                            /*first get the r, g, b*/
3699                            b = (ColorContext->rgb16ColorData &  0x001f);
3700                            g = (ColorContext->rgb16ColorData &  0x07e0)>>5;
3701                            r = (ColorContext->rgb16ColorData &  0xf800)>>11;
3702
3703                            /*for color gradation*/
3704                            b = (M4OSA_UInt16)( b - ((b*i)/PlaneIn[plane_number].u_height));
3705                            g = (M4OSA_UInt16)(g - ((g*i)/PlaneIn[plane_number].u_height));
3706                            r = (M4OSA_UInt16)(r - ((r*i)/PlaneIn[plane_number].u_height));
3707
3708                            /*keep y, but replace u and v*/
3709                            if(plane_number==1)
3710                            {
3711                                /*then convert to u*/
3712                                u = U16(r, g, b);
3713                                memset((void *)p_buf_dest,(M4OSA_UInt8)u,
3714                                 PlaneIn[plane_number].u_width);
3715                            }
3716                            if(plane_number==2)
3717                            {
3718                                /*then convert to v*/
3719                                v = V16(r, g, b);
3720                                memset((void *)p_buf_dest,(M4OSA_UInt8)v,
3721                                 PlaneIn[plane_number].u_width);
3722                            }
3723                        }
3724                        break;
3725                        default:
3726                        break;
3727                }
3728            }
3729            /**
3730             * Luminance */
3731            else
3732            {
3733                //switch ((M4OSA_UInt32)pFunctionContext)
3734                // commented because a structure for the effects context exist
3735                switch (ColorContext->colorEffectType)
3736                {
3737                case M4xVSS_kVideoEffectType_Negative:
3738                    for(j=0;j<PlaneOut[plane_number].u_width;j++)
3739                    {
3740                            p_buf_dest[j] = 255 - p_buf_src[j];
3741                    }
3742                    break;
3743                default:
3744                    memcpy((void *)p_buf_dest,
3745                     (void *)p_buf_src ,PlaneOut[plane_number].u_width);
3746                    break;
3747                }
3748            }
3749            p_buf_src += PlaneIn[plane_number].u_stride;
3750            p_buf_dest += PlaneOut[plane_number].u_stride;
3751        }
3752    }
3753
3754    return M4VIFI_OK;
3755}
3756
3757/**
3758 ******************************************************************************
3759 * prototype    M4VSS3GPP_externalVideoEffectFraming(M4OSA_Void *pFunctionContext,
3760 *                                                    M4VIFI_ImagePlane *PlaneIn,
3761 *                                                    M4VIFI_ImagePlane *PlaneOut,
3762 *                                                    M4VSS3GPP_ExternalProgress *pProgress,
3763 *                                                    M4OSA_UInt32 uiEffectKind)
3764 *
3765 * @brief    This function add a fixed or animated image on an input YUV420 planar frame
3766 * @note
3767 * @param    pFunctionContext(IN) Contains which color to apply (not very clean ...)
3768 * @param    PlaneIn            (IN) Input YUV420 planar
3769 * @param    PlaneOut        (IN/OUT) Output YUV420 planar
3770 * @param    pProgress        (IN/OUT) Progress indication (0-100)
3771 * @param    uiEffectKind    (IN) Unused
3772 *
3773 * @return    M4VIFI_OK:    No error
3774 ******************************************************************************
3775 */
3776M4OSA_ERR M4VSS3GPP_externalVideoEffectFraming( M4OSA_Void *userData,
3777                                                M4VIFI_ImagePlane PlaneIn[3],
3778                                                M4VIFI_ImagePlane *PlaneOut,
3779                                                M4VSS3GPP_ExternalProgress *pProgress,
3780                                                M4OSA_UInt32 uiEffectKind )
3781{
3782    M4VIFI_UInt32 x,y;
3783
3784    M4VIFI_UInt8 *p_in_Y = PlaneIn[0].pac_data;
3785    M4VIFI_UInt8 *p_in_U = PlaneIn[1].pac_data;
3786    M4VIFI_UInt8 *p_in_V = PlaneIn[2].pac_data;
3787
3788    M4xVSS_FramingStruct* Framing = M4OSA_NULL;
3789    M4xVSS_FramingStruct* currentFraming = M4OSA_NULL;
3790    M4VIFI_UInt8 *FramingRGB = M4OSA_NULL;
3791
3792    M4VIFI_UInt8 *p_out0;
3793    M4VIFI_UInt8 *p_out1;
3794    M4VIFI_UInt8 *p_out2;
3795
3796    M4VIFI_UInt32 topleft[2];
3797
3798    M4OSA_UInt8 transparent1 = (M4OSA_UInt8)((TRANSPARENT_COLOR & 0xFF00)>>8);
3799    M4OSA_UInt8 transparent2 = (M4OSA_UInt8)TRANSPARENT_COLOR;
3800
3801#ifndef DECODE_GIF_ON_SAVING
3802    Framing = (M4xVSS_FramingStruct *)userData;
3803    currentFraming = (M4xVSS_FramingStruct *)Framing->pCurrent;
3804    FramingRGB = Framing->FramingRgb->pac_data;
3805#endif /*DECODE_GIF_ON_SAVING*/
3806
3807    /*FB*/
3808#ifdef DECODE_GIF_ON_SAVING
3809    M4OSA_ERR err;
3810    Framing = (M4xVSS_FramingStruct *)((M4xVSS_FramingContext*)userData)->aFramingCtx;
3811    currentFraming = (M4xVSS_FramingStruct *)Framing;
3812    FramingRGB = Framing->FramingRgb->pac_data;
3813#endif /*DECODE_GIF_ON_SAVING*/
3814    /*end FB*/
3815
3816    /**
3817     * Initialize input / output plane pointers */
3818    p_in_Y += PlaneIn[0].u_topleft;
3819    p_in_U += PlaneIn[1].u_topleft;
3820    p_in_V += PlaneIn[2].u_topleft;
3821
3822    p_out0 = PlaneOut[0].pac_data;
3823    p_out1 = PlaneOut[1].pac_data;
3824    p_out2 = PlaneOut[2].pac_data;
3825
3826    /**
3827     * Depending on time, initialize Framing frame to use */
3828    if(Framing->previousClipTime == -1)
3829    {
3830        Framing->previousClipTime = pProgress->uiOutputTime;
3831    }
3832
3833    /**
3834     * If the current clip time has reach the duration of one frame of the framing picture
3835     * we need to step to next framing picture */
3836
3837    Framing->previousClipTime = pProgress->uiOutputTime;
3838    FramingRGB = currentFraming->FramingRgb->pac_data;
3839    topleft[0] = currentFraming->topleft_x;
3840    topleft[1] = currentFraming->topleft_y;
3841
3842    for( x=0 ;x < PlaneIn[0].u_height ; x++)
3843    {
3844        for( y=0 ;y < PlaneIn[0].u_width ; y++)
3845        {
3846            /**
3847             * To handle framing with input size != output size
3848             * Framing is applyed if coordinates matches between framing/topleft and input plane */
3849            if( y < (topleft[0] + currentFraming->FramingYuv[0].u_width)  &&
3850                y >= topleft[0] &&
3851                x < (topleft[1] + currentFraming->FramingYuv[0].u_height) &&
3852                x >= topleft[1])
3853            {
3854                /*Alpha blending support*/
3855                M4OSA_Float alphaBlending = 1;
3856                M4xVSS_internalEffectsAlphaBlending*  alphaBlendingStruct =\
3857                 (M4xVSS_internalEffectsAlphaBlending*)\
3858                    ((M4xVSS_FramingContext*)userData)->alphaBlendingStruct;
3859
3860                if(alphaBlendingStruct != M4OSA_NULL)
3861                {
3862                    if(pProgress->uiProgress \
3863                    < (M4OSA_UInt32)(alphaBlendingStruct->m_fadeInTime*10))
3864                    {
3865                        if(alphaBlendingStruct->m_fadeInTime == 0) {
3866                            alphaBlending = alphaBlendingStruct->m_start / 100;
3867                        } else {
3868                            alphaBlending = ((M4OSA_Float)(alphaBlendingStruct->m_middle\
3869                             - alphaBlendingStruct->m_start)\
3870                                *pProgress->uiProgress/(alphaBlendingStruct->m_fadeInTime*10));
3871                            alphaBlending += alphaBlendingStruct->m_start;
3872                            alphaBlending /= 100;
3873                        }
3874                    }
3875                    else if(pProgress->uiProgress >= (M4OSA_UInt32)(alphaBlendingStruct->\
3876                    m_fadeInTime*10) && pProgress->uiProgress < 1000\
3877                     - (M4OSA_UInt32)(alphaBlendingStruct->m_fadeOutTime*10))
3878                    {
3879                        alphaBlending = (M4OSA_Float)\
3880                        ((M4OSA_Float)alphaBlendingStruct->m_middle/100);
3881                    }
3882                    else if(pProgress->uiProgress >= 1000 - (M4OSA_UInt32)\
3883                    (alphaBlendingStruct->m_fadeOutTime*10))
3884                    {
3885                        if(alphaBlendingStruct->m_fadeOutTime == 0) {
3886                            alphaBlending = alphaBlendingStruct->m_end / 100;
3887                        } else {
3888                            alphaBlending = ((M4OSA_Float)(alphaBlendingStruct->m_middle \
3889                            - alphaBlendingStruct->m_end))*(1000 - pProgress->uiProgress)\
3890                            /(alphaBlendingStruct->m_fadeOutTime*10);
3891                            alphaBlending += alphaBlendingStruct->m_end;
3892                            alphaBlending /= 100;
3893                        }
3894                    }
3895                }
3896                /**/
3897
3898                if((*(FramingRGB)==transparent1) && (*(FramingRGB+1)==transparent2))
3899                {
3900                    *( p_out0+y+x*PlaneOut[0].u_stride)=(*(p_in_Y+y+x*PlaneIn[0].u_stride));
3901                    *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)=
3902                        (*(p_in_U+(y>>1)+(x>>1)*PlaneIn[1].u_stride));
3903                    *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)=
3904                        (*(p_in_V+(y>>1)+(x>>1)*PlaneIn[2].u_stride));
3905                }
3906                else
3907                {
3908                    *( p_out0+y+x*PlaneOut[0].u_stride)=
3909                        (*(currentFraming->FramingYuv[0].pac_data+(y-topleft[0])\
3910                            +(x-topleft[1])*currentFraming->FramingYuv[0].u_stride))*alphaBlending;
3911                    *( p_out0+y+x*PlaneOut[0].u_stride)+=
3912                        (*(p_in_Y+y+x*PlaneIn[0].u_stride))*(1-alphaBlending);
3913                    *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)=
3914                        (*(currentFraming->FramingYuv[1].pac_data+((y-topleft[0])>>1)\
3915                            +((x-topleft[1])>>1)*currentFraming->FramingYuv[1].u_stride))\
3916                                *alphaBlending;
3917                    *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)+=
3918                        (*(p_in_U+(y>>1)+(x>>1)*PlaneIn[1].u_stride))*(1-alphaBlending);
3919                    *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)=
3920                        (*(currentFraming->FramingYuv[2].pac_data+((y-topleft[0])>>1)\
3921                            +((x-topleft[1])>>1)*currentFraming->FramingYuv[2].u_stride))\
3922                                *alphaBlending;
3923                    *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)+=
3924                        (*(p_in_V+(y>>1)+(x>>1)*PlaneIn[2].u_stride))*(1-alphaBlending);
3925                }
3926                if( PlaneIn[0].u_width < (topleft[0] + currentFraming->FramingYuv[0].u_width) &&
3927                    y == PlaneIn[0].u_width-1)
3928                {
3929                    FramingRGB = FramingRGB + 2 \
3930                        * (topleft[0] + currentFraming->FramingYuv[0].u_width \
3931                            - PlaneIn[0].u_width + 1);
3932                }
3933                else
3934                {
3935                    FramingRGB = FramingRGB + 2;
3936                }
3937            }
3938            /**
3939             * Just copy input plane to output plane */
3940            else
3941            {
3942                *( p_out0+y+x*PlaneOut[0].u_stride)=*(p_in_Y+y+x*PlaneIn[0].u_stride);
3943                *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)=
3944                    *(p_in_U+(y>>1)+(x>>1)*PlaneIn[1].u_stride);
3945                *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)=
3946                    *(p_in_V+(y>>1)+(x>>1)*PlaneIn[2].u_stride);
3947            }
3948        }
3949    }
3950
3951
3952    return M4VIFI_OK;
3953}
3954
3955
3956/**
3957 ******************************************************************************
3958 * prototype    M4VSS3GPP_externalVideoEffectFifties(M4OSA_Void *pFunctionContext,
3959 *                                                    M4VIFI_ImagePlane *PlaneIn,
3960 *                                                    M4VIFI_ImagePlane *PlaneOut,
3961 *                                                    M4VSS3GPP_ExternalProgress *pProgress,
3962 *                                                    M4OSA_UInt32 uiEffectKind)
3963 *
3964 * @brief    This function make a video look as if it was taken in the fifties
3965 * @note
3966 * @param    pUserData       (IN) Context
3967 * @param    pPlaneIn        (IN) Input YUV420 planar
3968 * @param    pPlaneOut        (IN/OUT) Output YUV420 planar
3969 * @param    pProgress        (IN/OUT) Progress indication (0-100)
3970 * @param    uiEffectKind    (IN) Unused
3971 *
3972 * @return    M4VIFI_OK:            No error
3973 * @return  M4ERR_PARAMETER:    pFiftiesData, pPlaneOut or pProgress are NULL (DEBUG only)
3974 ******************************************************************************
3975 */
3976M4OSA_ERR M4VSS3GPP_externalVideoEffectFifties( M4OSA_Void *pUserData,
3977                                                M4VIFI_ImagePlane *pPlaneIn,
3978                                                M4VIFI_ImagePlane *pPlaneOut,
3979                                                M4VSS3GPP_ExternalProgress *pProgress,
3980                                                M4OSA_UInt32 uiEffectKind )
3981{
3982    M4VIFI_UInt32 x, y, xShift;
3983    M4VIFI_UInt8 *pInY = pPlaneIn[0].pac_data;
3984    M4VIFI_UInt8 *pOutY, *pInYbegin;
3985    M4VIFI_UInt8 *pInCr,* pOutCr;
3986    M4VIFI_Int32 plane_number;
3987
3988    /* Internal context*/
3989    M4xVSS_FiftiesStruct* p_FiftiesData = (M4xVSS_FiftiesStruct *)pUserData;
3990
3991    /* Check the inputs (debug only) */
3992    M4OSA_DEBUG_IF2((pFiftiesData == M4OSA_NULL),M4ERR_PARAMETER,
3993         "xVSS: p_FiftiesData is M4OSA_NULL in M4VSS3GPP_externalVideoEffectFifties");
3994    M4OSA_DEBUG_IF2((pPlaneOut == M4OSA_NULL),M4ERR_PARAMETER,
3995         "xVSS: p_PlaneOut is M4OSA_NULL in M4VSS3GPP_externalVideoEffectFifties");
3996    M4OSA_DEBUG_IF2((pProgress == M4OSA_NULL),M4ERR_PARAMETER,
3997        "xVSS: p_Progress is M4OSA_NULL in M4VSS3GPP_externalVideoEffectFifties");
3998
3999    /* Initialize input / output plane pointers */
4000    pInY += pPlaneIn[0].u_topleft;
4001    pOutY = pPlaneOut[0].pac_data;
4002    pInYbegin  = pInY;
4003
4004    /* Initialize the random */
4005    if(p_FiftiesData->previousClipTime < 0)
4006    {
4007        M4OSA_randInit();
4008        M4OSA_rand((M4OSA_Int32 *)&(p_FiftiesData->shiftRandomValue), (pPlaneIn[0].u_height) >> 4);
4009        M4OSA_rand((M4OSA_Int32 *)&(p_FiftiesData->stripeRandomValue), (pPlaneIn[0].u_width)<< 2);
4010        p_FiftiesData->previousClipTime = pProgress->uiOutputTime;
4011    }
4012
4013    /* Choose random values if we have reached the duration of a partial effect */
4014    else if( (pProgress->uiOutputTime - p_FiftiesData->previousClipTime)\
4015         > p_FiftiesData->fiftiesEffectDuration)
4016    {
4017        M4OSA_rand((M4OSA_Int32 *)&(p_FiftiesData->shiftRandomValue), (pPlaneIn[0].u_height) >> 4);
4018        M4OSA_rand((M4OSA_Int32 *)&(p_FiftiesData->stripeRandomValue), (pPlaneIn[0].u_width)<< 2);
4019        p_FiftiesData->previousClipTime = pProgress->uiOutputTime;
4020    }
4021
4022    /* Put in Sepia the chrominance */
4023    for (plane_number = 1; plane_number < 3; plane_number++)
4024    {
4025        pInCr  = pPlaneIn[plane_number].pac_data  + pPlaneIn[plane_number].u_topleft;
4026        pOutCr = pPlaneOut[plane_number].pac_data + pPlaneOut[plane_number].u_topleft;
4027
4028        for (x = 0; x < pPlaneOut[plane_number].u_height; x++)
4029        {
4030            if (1 == plane_number)
4031                memset((void *)pOutCr, 117,pPlaneIn[plane_number].u_width); /* U value */
4032            else
4033                memset((void *)pOutCr, 139,pPlaneIn[plane_number].u_width); /* V value */
4034
4035            pInCr  += pPlaneIn[plane_number].u_stride;
4036            pOutCr += pPlaneOut[plane_number].u_stride;
4037        }
4038    }
4039
4040    /* Compute the new pixels values */
4041    for( x = 0 ; x < pPlaneIn[0].u_height ; x++)
4042    {
4043        M4VIFI_UInt8 *p_outYtmp, *p_inYtmp;
4044
4045        /* Compute the xShift (random value) */
4046        if (0 == (p_FiftiesData->shiftRandomValue % 5 ))
4047            xShift = (x + p_FiftiesData->shiftRandomValue ) % (pPlaneIn[0].u_height - 1);
4048        else
4049            xShift = (x + (pPlaneIn[0].u_height - p_FiftiesData->shiftRandomValue) ) \
4050                % (pPlaneIn[0].u_height - 1);
4051
4052        /* Initialize the pointers */
4053        p_outYtmp = pOutY + 1;                                    /* yShift of 1 pixel */
4054        p_inYtmp  = pInYbegin + (xShift * pPlaneIn[0].u_stride);  /* Apply the xShift */
4055
4056        for( y = 0 ; y < pPlaneIn[0].u_width ; y++)
4057        {
4058            /* Set Y value */
4059            if (xShift > (pPlaneIn[0].u_height - 4))
4060                *p_outYtmp = 40;        /* Add some horizontal black lines between the
4061                                        two parts of the image */
4062            else if ( y == p_FiftiesData->stripeRandomValue)
4063                *p_outYtmp = 90;        /* Add a random vertical line for the bulk */
4064            else
4065                *p_outYtmp = *p_inYtmp;
4066
4067
4068            /* Go to the next pixel */
4069            p_outYtmp++;
4070            p_inYtmp++;
4071
4072            /* Restart at the beginning of the line for the last pixel*/
4073            if (y == (pPlaneIn[0].u_width - 2))
4074                p_outYtmp = pOutY;
4075        }
4076
4077        /* Go to the next line */
4078        pOutY += pPlaneOut[0].u_stride;
4079    }
4080
4081    return M4VIFI_OK;
4082}
4083
4084/**
4085 ******************************************************************************
4086 * M4OSA_ERR M4VSS3GPP_externalVideoEffectZoom( )
4087 * @brief    Zoom in/out video effect functions.
4088 * @note    The external video function is used only if VideoEffectType is set to
4089 * M4VSS3GPP_kVideoEffectType_ZoomIn or M4VSS3GPP_kVideoEffectType_ZoomOut.
4090 *
4091 * @param   pFunctionContext    (IN) The function context, previously set by the integrator
4092 * @param    pInputPlanes        (IN) Input YUV420 image: pointer to an array of three valid
4093 *                                    image planes (Y, U and V)
4094 * @param    pOutputPlanes        (IN/OUT) Output (filtered) YUV420 image: pointer to an array of
4095 *                                        three valid image planes (Y, U and V)
4096 * @param    pProgress            (IN) Set of information about the video transition progress.
4097 * @return    M4NO_ERROR:            No error
4098 * @return    M4ERR_PARAMETER:    At least one parameter is M4OSA_NULL (debug only)
4099 ******************************************************************************
4100 */
4101
4102M4OSA_ERR M4VSS3GPP_externalVideoEffectZoom(
4103    M4OSA_Void *pFunctionContext,
4104    M4VIFI_ImagePlane *pInputPlanes,
4105    M4VIFI_ImagePlane *pOutputPlanes,
4106    M4VSS3GPP_ExternalProgress *pProgress,
4107    M4OSA_UInt32 uiEffectKind
4108)
4109{
4110    M4OSA_UInt32 boxWidth;
4111    M4OSA_UInt32 boxHeight;
4112    M4OSA_UInt32 boxPosX;
4113    M4OSA_UInt32 boxPosY;
4114    M4OSA_UInt32 ratio = 0;
4115    /*  * 1.189207 between ratio */
4116    /* zoom between x1 and x16 */
4117    M4OSA_UInt32 ratiotab[17] ={1024,1218,1448,1722,2048,2435,2896,3444,4096,4871,5793,\
4118                                6889,8192,9742,11585,13777,16384};
4119    M4OSA_UInt32 ik;
4120
4121    M4VIFI_ImagePlane boxPlane[3];
4122
4123    if(M4xVSS_kVideoEffectType_ZoomOut == (M4OSA_UInt32)pFunctionContext)
4124    {
4125        //ratio = 16 - (15 * pProgress->uiProgress)/1000;
4126        ratio = 16 - pProgress->uiProgress / 66 ;
4127    }
4128    else if(M4xVSS_kVideoEffectType_ZoomIn == (M4OSA_UInt32)pFunctionContext)
4129    {
4130        //ratio = 1 + (15 * pProgress->uiProgress)/1000;
4131        ratio = 1 + pProgress->uiProgress / 66 ;
4132    }
4133
4134    for(ik=0;ik<3;ik++){
4135
4136        boxPlane[ik].u_stride = pInputPlanes[ik].u_stride;
4137        boxPlane[ik].pac_data = pInputPlanes[ik].pac_data;
4138
4139        boxHeight = ( pInputPlanes[ik].u_height << 10 ) / ratiotab[ratio];
4140        boxWidth = ( pInputPlanes[ik].u_width << 10 ) / ratiotab[ratio];
4141        boxPlane[ik].u_height = (boxHeight)&(~1);
4142        boxPlane[ik].u_width = (boxWidth)&(~1);
4143
4144        boxPosY = (pInputPlanes[ik].u_height >> 1) - (boxPlane[ik].u_height >> 1);
4145        boxPosX = (pInputPlanes[ik].u_width >> 1) - (boxPlane[ik].u_width >> 1);
4146        boxPlane[ik].u_topleft = boxPosY * boxPlane[ik].u_stride + boxPosX;
4147    }
4148
4149    M4VIFI_ResizeBilinearYUV420toYUV420(M4OSA_NULL, (M4VIFI_ImagePlane*)&boxPlane, pOutputPlanes);
4150
4151    /**
4152     * Return */
4153    return(M4NO_ERROR);
4154}
4155
4156/**
4157 ******************************************************************************
4158 * prototype    M4xVSS_AlphaMagic( M4OSA_Void *userData,
4159 *                                    M4VIFI_ImagePlane PlaneIn1[3],
4160 *                                    M4VIFI_ImagePlane PlaneIn2[3],
4161 *                                    M4VIFI_ImagePlane *PlaneOut,
4162 *                                    M4VSS3GPP_ExternalProgress *pProgress,
4163 *                                    M4OSA_UInt32 uiTransitionKind)
4164 *
4165 * @brief    This function apply a color effect on an input YUV420 planar frame
4166 * @note
4167 * @param    userData        (IN) Contains a pointer on a settings structure
4168 * @param    PlaneIn1        (IN) Input YUV420 planar from video 1
4169 * @param    PlaneIn2        (IN) Input YUV420 planar from video 2
4170 * @param    PlaneOut        (IN/OUT) Output YUV420 planar
4171 * @param    pProgress        (IN/OUT) Progress indication (0-100)
4172 * @param    uiTransitionKind(IN) Unused
4173 *
4174 * @return    M4VIFI_OK:    No error
4175 ******************************************************************************
4176 */
4177M4OSA_ERR M4xVSS_AlphaMagic( M4OSA_Void *userData, M4VIFI_ImagePlane PlaneIn1[3],
4178                             M4VIFI_ImagePlane PlaneIn2[3], M4VIFI_ImagePlane *PlaneOut,
4179                             M4VSS3GPP_ExternalProgress *pProgress, M4OSA_UInt32 uiTransitionKind)
4180{
4181
4182    M4OSA_ERR err;
4183
4184    M4xVSS_internal_AlphaMagicSettings* alphaContext;
4185    M4VIFI_Int32 alphaProgressLevel;
4186
4187    M4VIFI_ImagePlane* planeswap;
4188    M4VIFI_UInt32 x,y;
4189
4190    M4VIFI_UInt8 *p_out0;
4191    M4VIFI_UInt8 *p_out1;
4192    M4VIFI_UInt8 *p_out2;
4193    M4VIFI_UInt8 *alphaMask;
4194    /* "Old image" */
4195    M4VIFI_UInt8 *p_in1_Y;
4196    M4VIFI_UInt8 *p_in1_U;
4197    M4VIFI_UInt8 *p_in1_V;
4198    /* "New image" */
4199    M4VIFI_UInt8 *p_in2_Y;
4200    M4VIFI_UInt8 *p_in2_U;
4201    M4VIFI_UInt8 *p_in2_V;
4202
4203    err = M4NO_ERROR;
4204
4205    alphaContext = (M4xVSS_internal_AlphaMagicSettings*)userData;
4206
4207    alphaProgressLevel = (pProgress->uiProgress * 255)/1000;
4208
4209    if( alphaContext->isreverse != M4OSA_FALSE)
4210    {
4211        alphaProgressLevel = 255 - alphaProgressLevel;
4212        planeswap = PlaneIn1;
4213        PlaneIn1 = PlaneIn2;
4214        PlaneIn2 = planeswap;
4215    }
4216
4217    p_out0 = PlaneOut[0].pac_data;
4218    p_out1 = PlaneOut[1].pac_data;
4219    p_out2 = PlaneOut[2].pac_data;
4220
4221    alphaMask = alphaContext->pPlane->pac_data;
4222
4223    /* "Old image" */
4224    p_in1_Y = PlaneIn1[0].pac_data;
4225    p_in1_U = PlaneIn1[1].pac_data;
4226    p_in1_V = PlaneIn1[2].pac_data;
4227    /* "New image" */
4228    p_in2_Y = PlaneIn2[0].pac_data;
4229    p_in2_U = PlaneIn2[1].pac_data;
4230    p_in2_V = PlaneIn2[2].pac_data;
4231
4232     /**
4233     * For each column ... */
4234    for( y=0; y<PlaneOut->u_height; y++ )
4235    {
4236        /**
4237         * ... and each row of the alpha mask */
4238        for( x=0; x<PlaneOut->u_width; x++ )
4239        {
4240            /**
4241             * If the value of the current pixel of the alpha mask is > to the current time
4242             * ( current time is normalized on [0-255] ) */
4243            if( alphaProgressLevel < alphaMask[x+y*PlaneOut->u_width] )
4244            {
4245                /* We keep "old image" in output plane */
4246                *( p_out0+x+y*PlaneOut[0].u_stride)=*(p_in1_Y+x+y*PlaneIn1[0].u_stride);
4247                *( p_out1+(x>>1)+(y>>1)*PlaneOut[1].u_stride)=
4248                    *(p_in1_U+(x>>1)+(y>>1)*PlaneIn1[1].u_stride);
4249                *( p_out2+(x>>1)+(y>>1)*PlaneOut[2].u_stride)=
4250                    *(p_in1_V+(x>>1)+(y>>1)*PlaneIn1[2].u_stride);
4251            }
4252            else
4253            {
4254                /* We take "new image" in output plane */
4255                *( p_out0+x+y*PlaneOut[0].u_stride)=*(p_in2_Y+x+y*PlaneIn2[0].u_stride);
4256                *( p_out1+(x>>1)+(y>>1)*PlaneOut[1].u_stride)=
4257                    *(p_in2_U+(x>>1)+(y>>1)*PlaneIn2[1].u_stride);
4258                *( p_out2+(x>>1)+(y>>1)*PlaneOut[2].u_stride)=
4259                    *(p_in2_V+(x>>1)+(y>>1)*PlaneIn2[2].u_stride);
4260            }
4261        }
4262    }
4263
4264    return(err);
4265}
4266
4267/**
4268 ******************************************************************************
4269 * prototype    M4xVSS_AlphaMagicBlending( M4OSA_Void *userData,
4270 *                                    M4VIFI_ImagePlane PlaneIn1[3],
4271 *                                    M4VIFI_ImagePlane PlaneIn2[3],
4272 *                                    M4VIFI_ImagePlane *PlaneOut,
4273 *                                    M4VSS3GPP_ExternalProgress *pProgress,
4274 *                                    M4OSA_UInt32 uiTransitionKind)
4275 *
4276 * @brief    This function apply a color effect on an input YUV420 planar frame
4277 * @note
4278 * @param    userData        (IN) Contains a pointer on a settings structure
4279 * @param    PlaneIn1        (IN) Input YUV420 planar from video 1
4280 * @param    PlaneIn2        (IN) Input YUV420 planar from video 2
4281 * @param    PlaneOut        (IN/OUT) Output YUV420 planar
4282 * @param    pProgress        (IN/OUT) Progress indication (0-100)
4283 * @param    uiTransitionKind(IN) Unused
4284 *
4285 * @return    M4VIFI_OK:    No error
4286 ******************************************************************************
4287 */
4288M4OSA_ERR M4xVSS_AlphaMagicBlending( M4OSA_Void *userData, M4VIFI_ImagePlane PlaneIn1[3],
4289                                     M4VIFI_ImagePlane PlaneIn2[3], M4VIFI_ImagePlane *PlaneOut,
4290                                     M4VSS3GPP_ExternalProgress *pProgress,
4291                                     M4OSA_UInt32 uiTransitionKind)
4292{
4293    M4OSA_ERR err;
4294
4295    M4xVSS_internal_AlphaMagicSettings* alphaContext;
4296    M4VIFI_Int32 alphaProgressLevel;
4297    M4VIFI_Int32 alphaBlendLevelMin;
4298    M4VIFI_Int32 alphaBlendLevelMax;
4299    M4VIFI_Int32 alphaBlendRange;
4300
4301    M4VIFI_ImagePlane* planeswap;
4302    M4VIFI_UInt32 x,y;
4303    M4VIFI_Int32 alphaMaskValue;
4304
4305    M4VIFI_UInt8 *p_out0;
4306    M4VIFI_UInt8 *p_out1;
4307    M4VIFI_UInt8 *p_out2;
4308    M4VIFI_UInt8 *alphaMask;
4309    /* "Old image" */
4310    M4VIFI_UInt8 *p_in1_Y;
4311    M4VIFI_UInt8 *p_in1_U;
4312    M4VIFI_UInt8 *p_in1_V;
4313    /* "New image" */
4314    M4VIFI_UInt8 *p_in2_Y;
4315    M4VIFI_UInt8 *p_in2_U;
4316    M4VIFI_UInt8 *p_in2_V;
4317
4318
4319    err = M4NO_ERROR;
4320
4321    alphaContext = (M4xVSS_internal_AlphaMagicSettings*)userData;
4322
4323    alphaProgressLevel = (pProgress->uiProgress * 255)/1000;
4324
4325    if( alphaContext->isreverse != M4OSA_FALSE)
4326    {
4327        alphaProgressLevel = 255 - alphaProgressLevel;
4328        planeswap = PlaneIn1;
4329        PlaneIn1 = PlaneIn2;
4330        PlaneIn2 = planeswap;
4331    }
4332
4333    alphaBlendLevelMin = alphaProgressLevel-alphaContext->blendingthreshold;
4334
4335    alphaBlendLevelMax = alphaProgressLevel+alphaContext->blendingthreshold;
4336
4337    alphaBlendRange = (alphaContext->blendingthreshold)*2;
4338
4339    p_out0 = PlaneOut[0].pac_data;
4340    p_out1 = PlaneOut[1].pac_data;
4341    p_out2 = PlaneOut[2].pac_data;
4342
4343    alphaMask = alphaContext->pPlane->pac_data;
4344
4345    /* "Old image" */
4346    p_in1_Y = PlaneIn1[0].pac_data;
4347    p_in1_U = PlaneIn1[1].pac_data;
4348    p_in1_V = PlaneIn1[2].pac_data;
4349    /* "New image" */
4350    p_in2_Y = PlaneIn2[0].pac_data;
4351    p_in2_U = PlaneIn2[1].pac_data;
4352    p_in2_V = PlaneIn2[2].pac_data;
4353
4354    /* apply Alpha Magic on each pixel */
4355       for( y=0; y<PlaneOut->u_height; y++ )
4356    {
4357        for( x=0; x<PlaneOut->u_width; x++ )
4358        {
4359            alphaMaskValue = alphaMask[x+y*PlaneOut->u_width];
4360            if( alphaBlendLevelMax < alphaMaskValue )
4361            {
4362                /* We keep "old image" in output plane */
4363                *( p_out0+x+y*PlaneOut[0].u_stride)=*(p_in1_Y+x+y*PlaneIn1[0].u_stride);
4364                *( p_out1+(x>>1)+(y>>1)*PlaneOut[1].u_stride)=
4365                    *(p_in1_U+(x>>1)+(y>>1)*PlaneIn1[1].u_stride);
4366                *( p_out2+(x>>1)+(y>>1)*PlaneOut[2].u_stride)=
4367                    *(p_in1_V+(x>>1)+(y>>1)*PlaneIn1[2].u_stride);
4368            }
4369            else if( (alphaBlendLevelMin < alphaMaskValue)&&
4370                    (alphaMaskValue <= alphaBlendLevelMax ) )
4371            {
4372                /* We blend "old and new image" in output plane */
4373                *( p_out0+x+y*PlaneOut[0].u_stride)=(M4VIFI_UInt8)
4374                    (( (alphaMaskValue-alphaBlendLevelMin)*( *(p_in1_Y+x+y*PlaneIn1[0].u_stride))
4375                        +(alphaBlendLevelMax-alphaMaskValue)\
4376                            *( *(p_in2_Y+x+y*PlaneIn2[0].u_stride)) )/alphaBlendRange );
4377
4378                *( p_out1+(x>>1)+(y>>1)*PlaneOut[1].u_stride)=(M4VIFI_UInt8)\
4379                    (( (alphaMaskValue-alphaBlendLevelMin)*( *(p_in1_U+(x>>1)+(y>>1)\
4380                        *PlaneIn1[1].u_stride))
4381                            +(alphaBlendLevelMax-alphaMaskValue)*( *(p_in2_U+(x>>1)+(y>>1)\
4382                                *PlaneIn2[1].u_stride)) )/alphaBlendRange );
4383
4384                *( p_out2+(x>>1)+(y>>1)*PlaneOut[2].u_stride)=
4385                    (M4VIFI_UInt8)(( (alphaMaskValue-alphaBlendLevelMin)\
4386                        *( *(p_in1_V+(x>>1)+(y>>1)*PlaneIn1[2].u_stride))
4387                                +(alphaBlendLevelMax-alphaMaskValue)*( *(p_in2_V+(x>>1)+(y>>1)\
4388                                    *PlaneIn2[2].u_stride)) )/alphaBlendRange );
4389
4390            }
4391            else
4392            {
4393                /* We take "new image" in output plane */
4394                *( p_out0+x+y*PlaneOut[0].u_stride)=*(p_in2_Y+x+y*PlaneIn2[0].u_stride);
4395                *( p_out1+(x>>1)+(y>>1)*PlaneOut[1].u_stride)=
4396                    *(p_in2_U+(x>>1)+(y>>1)*PlaneIn2[1].u_stride);
4397                *( p_out2+(x>>1)+(y>>1)*PlaneOut[2].u_stride)=
4398                    *(p_in2_V+(x>>1)+(y>>1)*PlaneIn2[2].u_stride);
4399            }
4400        }
4401    }
4402
4403    return(err);
4404}
4405
4406#define M4XXX_SampleAddress(plane, x, y)  ( (plane).pac_data + (plane).u_topleft + (y)\
4407     * (plane).u_stride + (x) )
4408
4409static void M4XXX_CopyPlane(M4VIFI_ImagePlane* dest, M4VIFI_ImagePlane* source)
4410{
4411    M4OSA_UInt32    height, width, sourceStride, destStride, y;
4412    M4OSA_MemAddr8    sourceWalk, destWalk;
4413
4414    /* cache the vars used in the loop so as to avoid them being repeatedly fetched and
4415     recomputed from memory. */
4416    height = dest->u_height;
4417    width = dest->u_width;
4418
4419    sourceWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(*source, 0, 0);
4420    sourceStride = source->u_stride;
4421
4422    destWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(*dest, 0, 0);
4423    destStride = dest->u_stride;
4424
4425    for (y=0; y<height; y++)
4426    {
4427        memcpy((void *)destWalk, (void *)sourceWalk, width);
4428        destWalk += destStride;
4429        sourceWalk += sourceStride;
4430    }
4431}
4432
4433static M4OSA_ERR M4xVSS_VerticalSlideTransition(M4VIFI_ImagePlane* topPlane,
4434                                                M4VIFI_ImagePlane* bottomPlane,
4435                                                M4VIFI_ImagePlane *PlaneOut,
4436                                                M4OSA_UInt32    shiftUV)
4437{
4438    M4OSA_UInt32 i;
4439
4440    /* Do three loops, one for each plane type, in order to avoid having too many buffers
4441    "hot" at the same time (better for cache). */
4442    for (i=0; i<3; i++)
4443    {
4444        M4OSA_UInt32    topPartHeight, bottomPartHeight, width, sourceStride, destStride, y;
4445        M4OSA_MemAddr8    sourceWalk, destWalk;
4446
4447        /* cache the vars used in the loop so as to avoid them being repeatedly fetched and
4448         recomputed from memory. */
4449        if (0 == i) /* Y plane */
4450        {
4451            bottomPartHeight = 2*shiftUV;
4452        }
4453        else /* U and V planes */
4454        {
4455            bottomPartHeight = shiftUV;
4456        }
4457        topPartHeight = PlaneOut[i].u_height - bottomPartHeight;
4458        width = PlaneOut[i].u_width;
4459
4460        sourceWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(topPlane[i], 0, bottomPartHeight);
4461        sourceStride = topPlane[i].u_stride;
4462
4463        destWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(PlaneOut[i], 0, 0);
4464        destStride = PlaneOut[i].u_stride;
4465
4466        /* First the part from the top source clip frame. */
4467        for (y=0; y<topPartHeight; y++)
4468        {
4469            memcpy((void *)destWalk, (void *)sourceWalk, width);
4470            destWalk += destStride;
4471            sourceWalk += sourceStride;
4472        }
4473
4474        /* and now change the vars to copy the part from the bottom source clip frame. */
4475        sourceWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(bottomPlane[i], 0, 0);
4476        sourceStride = bottomPlane[i].u_stride;
4477
4478        /* destWalk is already at M4XXX_SampleAddress(PlaneOut[i], 0, topPartHeight) */
4479
4480        for (y=0; y<bottomPartHeight; y++)
4481        {
4482            memcpy((void *)destWalk, (void *)sourceWalk, width);
4483            destWalk += destStride;
4484            sourceWalk += sourceStride;
4485        }
4486    }
4487    return M4NO_ERROR;
4488}
4489
4490static M4OSA_ERR M4xVSS_HorizontalSlideTransition(M4VIFI_ImagePlane* leftPlane,
4491                                                  M4VIFI_ImagePlane* rightPlane,
4492                                                  M4VIFI_ImagePlane *PlaneOut,
4493                                                  M4OSA_UInt32    shiftUV)
4494{
4495    M4OSA_UInt32 i, y;
4496    /* If we shifted by exactly 0, or by the width of the target image, then we would get the left
4497    frame or the right frame, respectively. These cases aren't handled too well by the general
4498    handling, since they result in 0-size memcopies, so might as well particularize them. */
4499
4500    if (0 == shiftUV)    /* output left frame */
4501    {
4502        for (i = 0; i<3; i++) /* for each YUV plane */
4503        {
4504            M4XXX_CopyPlane(&(PlaneOut[i]), &(leftPlane[i]));
4505        }
4506
4507        return M4NO_ERROR;
4508    }
4509
4510    if (PlaneOut[1].u_width == shiftUV) /* output right frame */
4511    {
4512        for (i = 0; i<3; i++) /* for each YUV plane */
4513        {
4514            M4XXX_CopyPlane(&(PlaneOut[i]), &(rightPlane[i]));
4515        }
4516
4517        return M4NO_ERROR;
4518    }
4519
4520
4521    /* Do three loops, one for each plane type, in order to avoid having too many buffers
4522    "hot" at the same time (better for cache). */
4523    for (i=0; i<3; i++)
4524    {
4525        M4OSA_UInt32    height, leftPartWidth, rightPartWidth;
4526        M4OSA_UInt32    leftStride,    rightStride,    destStride;
4527        M4OSA_MemAddr8    leftWalk,    rightWalk,    destWalkLeft, destWalkRight;
4528
4529        /* cache the vars used in the loop so as to avoid them being repeatedly fetched
4530        and recomputed from memory. */
4531        height = PlaneOut[i].u_height;
4532
4533        if (0 == i) /* Y plane */
4534        {
4535            rightPartWidth = 2*shiftUV;
4536        }
4537        else /* U and V planes */
4538        {
4539            rightPartWidth = shiftUV;
4540        }
4541        leftPartWidth = PlaneOut[i].u_width - rightPartWidth;
4542
4543        leftWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(leftPlane[i], rightPartWidth, 0);
4544        leftStride = leftPlane[i].u_stride;
4545
4546        rightWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(rightPlane[i], 0, 0);
4547        rightStride = rightPlane[i].u_stride;
4548
4549        destWalkLeft = (M4OSA_MemAddr8)M4XXX_SampleAddress(PlaneOut[i], 0, 0);
4550        destWalkRight = (M4OSA_MemAddr8)M4XXX_SampleAddress(PlaneOut[i], leftPartWidth, 0);
4551        destStride = PlaneOut[i].u_stride;
4552
4553        for (y=0; y<height; y++)
4554        {
4555            memcpy((void *)destWalkLeft, (void *)leftWalk, leftPartWidth);
4556            leftWalk += leftStride;
4557
4558            memcpy((void *)destWalkRight, (void *)rightWalk, rightPartWidth);
4559            rightWalk += rightStride;
4560
4561            destWalkLeft += destStride;
4562            destWalkRight += destStride;
4563        }
4564    }
4565
4566    return M4NO_ERROR;
4567}
4568
4569
4570M4OSA_ERR M4xVSS_SlideTransition( M4OSA_Void *userData, M4VIFI_ImagePlane PlaneIn1[3],
4571                                  M4VIFI_ImagePlane PlaneIn2[3], M4VIFI_ImagePlane *PlaneOut,
4572                                  M4VSS3GPP_ExternalProgress *pProgress,
4573                                  M4OSA_UInt32 uiTransitionKind)
4574{
4575    M4xVSS_internal_SlideTransitionSettings* settings =
4576         (M4xVSS_internal_SlideTransitionSettings*)userData;
4577    M4OSA_UInt32    shiftUV;
4578
4579    M4OSA_TRACE1_0("inside M4xVSS_SlideTransition");
4580    if ((M4xVSS_SlideTransition_RightOutLeftIn == settings->direction)
4581        || (M4xVSS_SlideTransition_LeftOutRightIn == settings->direction) )
4582    {
4583        /* horizontal slide */
4584        shiftUV = ((PlaneOut[1]).u_width * pProgress->uiProgress)/1000;
4585        M4OSA_TRACE1_2("M4xVSS_SlideTransition upper: shiftUV = %d,progress = %d",
4586            shiftUV,pProgress->uiProgress );
4587        if (M4xVSS_SlideTransition_RightOutLeftIn == settings->direction)
4588        {
4589            /* Put the previous clip frame right, the next clip frame left, and reverse shiftUV
4590            (since it's a shift from the left frame) so that we start out on the right
4591            (i.e. not left) frame, it
4592            being from the previous clip. */
4593            return M4xVSS_HorizontalSlideTransition(PlaneIn2, PlaneIn1, PlaneOut,
4594                 (PlaneOut[1]).u_width - shiftUV);
4595        }
4596        else /* Left out, right in*/
4597        {
4598            return M4xVSS_HorizontalSlideTransition(PlaneIn1, PlaneIn2, PlaneOut, shiftUV);
4599        }
4600    }
4601    else
4602    {
4603        /* vertical slide */
4604        shiftUV = ((PlaneOut[1]).u_height * pProgress->uiProgress)/1000;
4605        M4OSA_TRACE1_2("M4xVSS_SlideTransition bottom: shiftUV = %d,progress = %d",shiftUV,
4606            pProgress->uiProgress );
4607        if (M4xVSS_SlideTransition_TopOutBottomIn == settings->direction)
4608        {
4609            /* Put the previous clip frame top, the next clip frame bottom. */
4610            return M4xVSS_VerticalSlideTransition(PlaneIn1, PlaneIn2, PlaneOut, shiftUV);
4611        }
4612        else /* Bottom out, top in */
4613        {
4614            return M4xVSS_VerticalSlideTransition(PlaneIn2, PlaneIn1, PlaneOut,
4615                (PlaneOut[1]).u_height - shiftUV);
4616        }
4617    }
4618
4619    /* Note: it might be worthwhile to do some parameter checking, see if dimensions match, etc.,
4620    at least in debug mode. */
4621}
4622
4623
4624/**
4625 ******************************************************************************
4626 * prototype    M4xVSS_FadeBlackTransition(M4OSA_Void *pFunctionContext,
4627 *                                                    M4VIFI_ImagePlane *PlaneIn,
4628 *                                                    M4VIFI_ImagePlane *PlaneOut,
4629 *                                                    M4VSS3GPP_ExternalProgress *pProgress,
4630 *                                                    M4OSA_UInt32 uiEffectKind)
4631 *
4632 * @brief    This function apply a fade to black and then a fade from black
4633 * @note
4634 * @param    pFunctionContext(IN) Contains which color to apply (not very clean ...)
4635 * @param    PlaneIn            (IN) Input YUV420 planar
4636 * @param    PlaneOut        (IN/OUT) Output YUV420 planar
4637 * @param    pProgress        (IN/OUT) Progress indication (0-100)
4638 * @param    uiEffectKind    (IN) Unused
4639 *
4640 * @return    M4VIFI_OK:    No error
4641 ******************************************************************************
4642 */
4643M4OSA_ERR M4xVSS_FadeBlackTransition(M4OSA_Void *userData, M4VIFI_ImagePlane PlaneIn1[3],
4644                                     M4VIFI_ImagePlane PlaneIn2[3],
4645                                     M4VIFI_ImagePlane *PlaneOut,
4646                                     M4VSS3GPP_ExternalProgress *pProgress,
4647                                     M4OSA_UInt32 uiTransitionKind)
4648{
4649    M4OSA_Int32 tmp = 0;
4650    M4OSA_ERR err = M4NO_ERROR;
4651
4652
4653    if((pProgress->uiProgress) < 500)
4654    {
4655        /**
4656         * Compute where we are in the effect (scale is 0->1024) */
4657        tmp = (M4OSA_Int32)((1.0 - ((M4OSA_Float)(pProgress->uiProgress*2)/1000)) * 1024 );
4658
4659        /**
4660         * Apply the darkening effect */
4661        err = M4VFL_modifyLumaWithScale( (M4ViComImagePlane*)PlaneIn1,
4662             (M4ViComImagePlane*)PlaneOut, tmp, M4OSA_NULL);
4663        if (M4NO_ERROR != err)
4664        {
4665            M4OSA_TRACE1_1("M4xVSS_FadeBlackTransition: M4VFL_modifyLumaWithScale returns\
4666                 error 0x%x, returning M4VSS3GPP_ERR_LUMA_FILTER_ERROR", err);
4667            return M4VSS3GPP_ERR_LUMA_FILTER_ERROR;
4668        }
4669    }
4670    else
4671    {
4672        /**
4673         * Compute where we are in the effect (scale is 0->1024). */
4674        tmp = (M4OSA_Int32)( (((M4OSA_Float)(((pProgress->uiProgress-500)*2))/1000)) * 1024 );
4675
4676        /**
4677         * Apply the darkening effect */
4678        err = M4VFL_modifyLumaWithScale((M4ViComImagePlane*)PlaneIn2,
4679             (M4ViComImagePlane*)PlaneOut, tmp, M4OSA_NULL);
4680        if (M4NO_ERROR != err)
4681        {
4682            M4OSA_TRACE1_1("M4xVSS_FadeBlackTransition:\
4683                 M4VFL_modifyLumaWithScale returns error 0x%x,\
4684                     returning M4VSS3GPP_ERR_LUMA_FILTER_ERROR", err);
4685            return M4VSS3GPP_ERR_LUMA_FILTER_ERROR;
4686        }
4687    }
4688
4689
4690    return M4VIFI_OK;
4691}
4692
4693
4694/**
4695 ******************************************************************************
4696 * prototype    M4OSA_ERR M4xVSS_internalConvertToUTF8(M4OSA_Context pContext,
4697 *                                                        M4OSA_Void* pBufferIn,
4698 *                                                        M4OSA_Void* pBufferOut,
4699 *                                                        M4OSA_UInt32* convertedSize)
4700 *
4701 * @brief    This function convert from the customer format to UTF8
4702 * @note
4703 * @param    pContext        (IN)    The integrator own context
4704 * @param    pBufferIn        (IN)    Buffer to convert
4705 * @param    pBufferOut        (OUT)    Converted buffer
4706 * @param    convertedSize    (OUT)    Size of the converted buffer
4707 *
4708 * @return    M4NO_ERROR:    No error
4709 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
4710 ******************************************************************************
4711 */
4712M4OSA_ERR M4xVSS_internalConvertToUTF8(M4OSA_Context pContext, M4OSA_Void* pBufferIn,
4713                                       M4OSA_Void* pBufferOut, M4OSA_UInt32* convertedSize)
4714{
4715    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
4716    M4OSA_ERR err;
4717
4718    pBufferOut = pBufferIn;
4719    if(xVSS_context->UTFConversionContext.pConvToUTF8Fct != M4OSA_NULL
4720        && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL)
4721    {
4722        M4OSA_UInt32 ConvertedSize = xVSS_context->UTFConversionContext.m_TempOutConversionSize;
4723
4724        memset((void *)xVSS_context->UTFConversionContext.pTempOutConversionBuffer,0
4725            ,(M4OSA_UInt32)xVSS_context->UTFConversionContext.m_TempOutConversionSize);
4726
4727        err = xVSS_context->UTFConversionContext.pConvToUTF8Fct((M4OSA_Void*)pBufferIn,
4728            (M4OSA_UInt8*)xVSS_context->UTFConversionContext.pTempOutConversionBuffer,
4729                 (M4OSA_UInt32*)&ConvertedSize);
4730        if(err == M4xVSSWAR_BUFFER_OUT_TOO_SMALL)
4731        {
4732            M4OSA_TRACE2_1("M4xVSS_internalConvertToUTF8: pConvToUTF8Fct return 0x%x",err);
4733
4734            /*free too small buffer*/
4735            free(xVSS_context->\
4736                UTFConversionContext.pTempOutConversionBuffer);
4737
4738            /*re-allocate the buffer*/
4739            xVSS_context->UTFConversionContext.pTempOutConversionBuffer    =
4740                 (M4OSA_Void*)M4OSA_32bitAlignedMalloc(ConvertedSize*sizeof(M4OSA_UInt8), M4VA,
4741                     (M4OSA_Char *)"M4xVSS_internalConvertToUTF8: UTF conversion buffer");
4742            if(M4OSA_NULL == xVSS_context->UTFConversionContext.pTempOutConversionBuffer)
4743            {
4744                M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertToUTF8");
4745                return M4ERR_ALLOC;
4746            }
4747            xVSS_context->UTFConversionContext.m_TempOutConversionSize = ConvertedSize;
4748
4749            memset((void *)xVSS_context->\
4750                UTFConversionContext.pTempOutConversionBuffer,0,(M4OSA_UInt32)xVSS_context->\
4751                    UTFConversionContext.m_TempOutConversionSize);
4752
4753            err = xVSS_context->UTFConversionContext.pConvToUTF8Fct((M4OSA_Void*)pBufferIn,
4754                (M4OSA_Void*)xVSS_context->UTFConversionContext.pTempOutConversionBuffer,
4755                    (M4OSA_UInt32*)&ConvertedSize);
4756            if(err != M4NO_ERROR)
4757            {
4758                M4OSA_TRACE1_1("M4xVSS_internalConvertToUTF8: pConvToUTF8Fct return 0x%x",err);
4759                return err;
4760            }
4761        }
4762        else if(err != M4NO_ERROR)
4763        {
4764            M4OSA_TRACE1_1("M4xVSS_internalConvertToUTF8: pConvToUTF8Fct return 0x%x",err);
4765            return err;
4766        }
4767        /*decoded path*/
4768        pBufferOut = xVSS_context->UTFConversionContext.pTempOutConversionBuffer;
4769        (*convertedSize) = ConvertedSize;
4770    }
4771    return M4NO_ERROR;
4772}
4773
4774
4775/**
4776 ******************************************************************************
4777 * prototype    M4OSA_ERR M4xVSS_internalConvertFromUTF8(M4OSA_Context pContext)
4778 *
4779 * @brief    This function convert from UTF8 to the customer format
4780 * @note
4781 * @param    pContext    (IN) The integrator own context
4782 * @param    pBufferIn        (IN)    Buffer to convert
4783 * @param    pBufferOut        (OUT)    Converted buffer
4784 * @param    convertedSize    (OUT)    Size of the converted buffer
4785 *
4786 * @return    M4NO_ERROR:    No error
4787 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
4788 ******************************************************************************
4789 */
4790M4OSA_ERR M4xVSS_internalConvertFromUTF8(M4OSA_Context pContext, M4OSA_Void* pBufferIn,
4791                                        M4OSA_Void* pBufferOut, M4OSA_UInt32* convertedSize)
4792{
4793    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
4794    M4OSA_ERR err;
4795
4796    pBufferOut = pBufferIn;
4797    if(xVSS_context->UTFConversionContext.pConvFromUTF8Fct != M4OSA_NULL
4798        && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL)
4799    {
4800        M4OSA_UInt32 ConvertedSize = xVSS_context->UTFConversionContext.m_TempOutConversionSize;
4801
4802        memset((void *)xVSS_context->\
4803            UTFConversionContext.pTempOutConversionBuffer,0,(M4OSA_UInt32)xVSS_context->\
4804                UTFConversionContext.m_TempOutConversionSize);
4805
4806        err = xVSS_context->UTFConversionContext.pConvFromUTF8Fct\
4807            ((M4OSA_Void*)pBufferIn,(M4OSA_UInt8*)xVSS_context->\
4808                UTFConversionContext.pTempOutConversionBuffer, (M4OSA_UInt32*)&ConvertedSize);
4809        if(err == M4xVSSWAR_BUFFER_OUT_TOO_SMALL)
4810        {
4811            M4OSA_TRACE2_1("M4xVSS_internalConvertFromUTF8: pConvFromUTF8Fct return 0x%x",err);
4812
4813            /*free too small buffer*/
4814            free(xVSS_context->\
4815                UTFConversionContext.pTempOutConversionBuffer);
4816
4817            /*re-allocate the buffer*/
4818            xVSS_context->UTFConversionContext.pTempOutConversionBuffer    =
4819                (M4OSA_Void*)M4OSA_32bitAlignedMalloc(ConvertedSize*sizeof(M4OSA_UInt8), M4VA,
4820                     (M4OSA_Char *)"M4xVSS_internalConvertFromUTF8: UTF conversion buffer");
4821            if(M4OSA_NULL == xVSS_context->UTFConversionContext.pTempOutConversionBuffer)
4822            {
4823                M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertFromUTF8");
4824                return M4ERR_ALLOC;
4825            }
4826            xVSS_context->UTFConversionContext.m_TempOutConversionSize = ConvertedSize;
4827
4828            memset((void *)xVSS_context->\
4829                UTFConversionContext.pTempOutConversionBuffer,0,(M4OSA_UInt32)xVSS_context->\
4830                    UTFConversionContext.m_TempOutConversionSize);
4831
4832            err = xVSS_context->UTFConversionContext.pConvFromUTF8Fct((M4OSA_Void*)pBufferIn,
4833                (M4OSA_Void*)xVSS_context->UTFConversionContext.pTempOutConversionBuffer,
4834                     (M4OSA_UInt32*)&ConvertedSize);
4835            if(err != M4NO_ERROR)
4836            {
4837                M4OSA_TRACE1_1("M4xVSS_internalConvertFromUTF8: pConvFromUTF8Fct return 0x%x",err);
4838                return err;
4839            }
4840        }
4841        else if(err != M4NO_ERROR)
4842        {
4843            M4OSA_TRACE1_1("M4xVSS_internalConvertFromUTF8: pConvFromUTF8Fct return 0x%x",err);
4844            return err;
4845        }
4846        /*decoded path*/
4847        pBufferOut = xVSS_context->UTFConversionContext.pTempOutConversionBuffer;
4848        (*convertedSize) = ConvertedSize;
4849    }
4850
4851
4852    return M4NO_ERROR;
4853}
4854