M4xVSS_internal.c revision 6e779fda8a4f6258f9b910290b2f296d18fb2585
1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16/**
17 ******************************************************************************
18 * @file    M4xVSS_internal.c
19 * @brief    Internal functions of extended Video Studio Service (Video Studio 2.1)
20 * @note
21 ******************************************************************************
22 */
23#include "M4OSA_Debug.h"
24#include "M4OSA_CharStar.h"
25
26#include "NXPSW_CompilerSwitches.h"
27
28#include "M4VSS3GPP_API.h"
29#include "M4VSS3GPP_ErrorCodes.h"
30
31#include "M4xVSS_API.h"
32#include "M4xVSS_Internal.h"
33
34/*for rgb16 color effect*/
35#include "M4VIFI_Defines.h"
36#include "M4VIFI_Clip.h"
37
38/**
39 * component includes */
40#include "M4VFL_transition.h"            /**< video effects */
41
42/* Internal header file of VSS is included because of MMS use case */
43#include "M4VSS3GPP_InternalTypes.h"
44
45/*Exif header files to add image rendering support (cropping, black borders)*/
46#include "M4EXIFC_CommonAPI.h"
47// StageFright encoders require %16 resolution
48#include "M4ENCODER_common.h"
49
50#define TRANSPARENT_COLOR 0x7E0
51
52/* Prototype of M4VIFI_xVSS_RGB565toYUV420 function (avoid green effect of transparency color) */
53M4VIFI_UInt8 M4VIFI_xVSS_RGB565toYUV420(void *pUserData, M4VIFI_ImagePlane *pPlaneIn,
54                                        M4VIFI_ImagePlane *pPlaneOut);
55
56
57/*special MCS function used only in VideoArtist and VideoStudio to open the media in the normal
58 mode. That way the media duration is accurate*/
59extern M4OSA_ERR M4MCS_open_normalMode(M4MCS_Context pContext, M4OSA_Void* pFileIn,
60                                         M4VIDEOEDITING_FileType InputFileType,
61                                         M4OSA_Void* pFileOut, M4OSA_Void* pTempFile);
62
63
64/**
65 ******************************************************************************
66 * prototype    M4OSA_ERR M4xVSS_internalStartTranscoding(M4OSA_Context pContext)
67 * @brief        This function initializes MCS (3GP transcoder) with the given
68 *                parameters
69 * @note        The transcoding parameters are given by the internal xVSS context.
70 *                This context contains a pointer on the current element of the
71 *                chained list of MCS parameters.
72 *
73 * @param    pContext            (IN) Pointer on the xVSS edit context
74 * @return    M4NO_ERROR:            No error
75 * @return    M4ERR_PARAMETER:    At least one parameter is M4OSA_NULL
76 * @return    M4ERR_ALLOC:        Memory allocation has failed
77 ******************************************************************************
78 */
79M4OSA_ERR M4xVSS_internalStartTranscoding(M4OSA_Context pContext)
80{
81    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
82    M4OSA_ERR err;
83    M4MCS_Context mcs_context;
84    M4MCS_OutputParams Params;
85    M4MCS_EncodingParams Rates;
86    M4OSA_UInt32 i;
87
88    err = M4MCS_init(&mcs_context, xVSS_context->pFileReadPtr, xVSS_context->pFileWritePtr);
89    if(err != M4NO_ERROR)
90    {
91        M4OSA_TRACE1_1("Error in M4MCS_init: 0x%x", err);
92        return err;
93    }
94
95    err = M4MCS_open(mcs_context, xVSS_context->pMCScurrentParams->pFileIn,
96         xVSS_context->pMCScurrentParams->InputFileType,
97             xVSS_context->pMCScurrentParams->pFileOut,
98             xVSS_context->pMCScurrentParams->pFileTemp);
99    if (err != M4NO_ERROR)
100    {
101        M4OSA_TRACE1_1("Error in M4MCS_open: 0x%x", err);
102        M4MCS_abort(mcs_context);
103        return err;
104    }
105
106    /**
107     * Fill MCS parameters with the parameters contained in the current element of the
108       MCS parameters chained list */
109    Params.OutputFileType = xVSS_context->pMCScurrentParams->OutputFileType;
110    Params.OutputVideoFormat = xVSS_context->pMCScurrentParams->OutputVideoFormat;
111    Params.OutputVideoFrameSize = xVSS_context->pMCScurrentParams->OutputVideoFrameSize;
112    Params.OutputVideoFrameRate = xVSS_context->pMCScurrentParams->OutputVideoFrameRate;
113    Params.OutputAudioFormat = xVSS_context->pMCScurrentParams->OutputAudioFormat;
114    Params.OutputAudioSamplingFrequency =
115         xVSS_context->pMCScurrentParams->OutputAudioSamplingFrequency;
116    Params.bAudioMono = xVSS_context->pMCScurrentParams->bAudioMono;
117    Params.pOutputPCMfile = M4OSA_NULL;
118    /*FB 2008/10/20: add media rendering parameter to keep aspect ratio*/
119    switch(xVSS_context->pMCScurrentParams->MediaRendering)
120    {
121    case M4xVSS_kResizing:
122        Params.MediaRendering = M4MCS_kResizing;
123        break;
124    case M4xVSS_kCropping:
125        Params.MediaRendering = M4MCS_kCropping;
126        break;
127    case M4xVSS_kBlackBorders:
128        Params.MediaRendering = M4MCS_kBlackBorders;
129        break;
130    default:
131        break;
132    }
133    /**/
134    // new params after integrating MCS 2.0
135    // Set the number of audio effects; 0 for now.
136    Params.nbEffects = 0;
137
138    // Set the audio effect; null for now.
139    Params.pEffects = NULL;
140
141    // Set the audio effect; null for now.
142    Params.bDiscardExif = M4OSA_FALSE;
143
144    // Set the audio effect; null for now.
145    Params.bAdjustOrientation = M4OSA_FALSE;
146    // new params after integrating MCS 2.0
147
148    /**
149     * Set output parameters */
150    err = M4MCS_setOutputParams(mcs_context, &Params);
151    if (err != M4NO_ERROR)
152    {
153        M4OSA_TRACE1_1("Error in M4MCS_setOutputParams: 0x%x", err);
154        M4MCS_abort(mcs_context);
155        return err;
156    }
157
158    Rates.OutputVideoBitrate = xVSS_context->pMCScurrentParams->OutputVideoBitrate;
159    Rates.OutputAudioBitrate = xVSS_context->pMCScurrentParams->OutputAudioBitrate;
160    Rates.BeginCutTime = 0;
161    Rates.EndCutTime = 0;
162    Rates.OutputFileSize = 0;
163
164    /*FB: transcoding per parts*/
165    Rates.BeginCutTime = xVSS_context->pMCScurrentParams->BeginCutTime;
166    Rates.EndCutTime = xVSS_context->pMCScurrentParams->EndCutTime;
167    Rates.OutputVideoTimescale = xVSS_context->pMCScurrentParams->OutputVideoTimescale;
168
169    err = M4MCS_setEncodingParams(mcs_context, &Rates);
170    if (err != M4NO_ERROR)
171    {
172        M4OSA_TRACE1_1("Error in M4MCS_setEncodingParams: 0x%x", err);
173        M4MCS_abort(mcs_context);
174        return err;
175    }
176
177    err = M4MCS_checkParamsAndStart(mcs_context);
178    if (err != M4NO_ERROR)
179    {
180        M4OSA_TRACE1_1("Error in M4MCS_checkParamsAndStart: 0x%x", err);
181        M4MCS_abort(mcs_context);
182        return err;
183    }
184
185    /**
186     * Save MCS context to be able to call MCS step function in M4xVSS_step function */
187    xVSS_context->pMCS_Ctxt = mcs_context;
188
189    return M4NO_ERROR;
190}
191
192/**
193 ******************************************************************************
194 * prototype    M4OSA_ERR M4xVSS_internalStopTranscoding(M4OSA_Context pContext)
195 * @brief        This function cleans up MCS (3GP transcoder)
196 * @note
197 *
198 * @param    pContext            (IN) Pointer on the xVSS edit context
199 * @return    M4NO_ERROR:            No error
200 * @return    M4ERR_PARAMETER:    At least one parameter is M4OSA_NULL
201 * @return    M4ERR_ALLOC:        Memory allocation has failed
202 ******************************************************************************
203 */
204M4OSA_ERR M4xVSS_internalStopTranscoding(M4OSA_Context pContext)
205{
206    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
207    M4OSA_ERR err;
208
209    err = M4MCS_close(xVSS_context->pMCS_Ctxt);
210    if (err != M4NO_ERROR)
211    {
212        M4OSA_TRACE1_1("M4xVSS_internalStopTranscoding: Error in M4MCS_close: 0x%x", err);
213        M4MCS_abort(xVSS_context->pMCS_Ctxt);
214        return err;
215    }
216
217    /**
218     * Free this MCS instance */
219    err = M4MCS_cleanUp(xVSS_context->pMCS_Ctxt);
220    if (err != M4NO_ERROR)
221    {
222        M4OSA_TRACE1_1("M4xVSS_internalStopTranscoding: Error in M4MCS_cleanUp: 0x%x", err);
223        return err;
224    }
225
226    xVSS_context->pMCS_Ctxt = M4OSA_NULL;
227
228    return M4NO_ERROR;
229}
230
231/**
232 ******************************************************************************
233 * M4OSA_ERR M4xVSS_internalConvertAndResizeARGB8888toYUV420(M4OSA_Void* pFileIn,
234 *                                             M4OSA_FileReadPointer* pFileReadPtr,
235 *                                                M4VIFI_ImagePlane* pImagePlanes,
236 *                                                 M4OSA_UInt32 width,
237 *                                                M4OSA_UInt32 height);
238 * @brief    It Coverts and resizes a ARGB8888 image to YUV420
239 * @note
240 * @param    pFileIn            (IN) The Image input file
241 * @param    pFileReadPtr    (IN) Pointer on filesystem functions
242 * @param    pImagePlanes    (IN/OUT) Pointer on YUV420 output planes allocated by the user
243 *                            ARGB8888 image  will be converted and resized  to output
244 *                             YUV420 plane size
245 *@param    width        (IN) width of the ARGB8888
246 *@param    height            (IN) height of the ARGB8888
247 * @return    M4NO_ERROR:    No error
248 * @return    M4ERR_ALLOC: memory error
249 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
250 ******************************************************************************
251 */
252
253M4OSA_ERR M4xVSS_internalConvertAndResizeARGB8888toYUV420(M4OSA_Void* pFileIn,
254                                                          M4OSA_FileReadPointer* pFileReadPtr,
255                                                          M4VIFI_ImagePlane* pImagePlanes,
256                                                          M4OSA_UInt32 width,M4OSA_UInt32 height)
257{
258    M4OSA_Context pARGBIn;
259    M4VIFI_ImagePlane rgbPlane1 ,rgbPlane2;
260    M4OSA_UInt32 frameSize_argb=(width * height * 4);
261    M4OSA_UInt32 frameSize = (width * height * 3); //Size of RGB888 data.
262    M4OSA_UInt32 i = 0,j= 0;
263    M4OSA_ERR err=M4NO_ERROR;
264
265
266    M4OSA_UInt8 *pTmpData = (M4OSA_UInt8*) M4OSA_32bitAlignedMalloc(frameSize_argb,
267         M4VS, (M4OSA_Char*)"Image argb data");
268        M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 Entering :");
269    if(pTmpData == M4OSA_NULL) {
270        M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 :\
271            Failed to allocate memory for Image clip");
272        return M4ERR_ALLOC;
273    }
274
275    M4OSA_TRACE1_2("M4xVSS_internalConvertAndResizeARGB8888toYUV420 :width and height %d %d",
276        width ,height);
277    /* Get file size (mandatory for chunk decoding) */
278    err = pFileReadPtr->openRead(&pARGBIn, pFileIn, M4OSA_kFileRead);
279    if(err != M4NO_ERROR)
280    {
281        M4OSA_TRACE1_2("M4xVSS_internalConvertAndResizeARGB8888toYUV420 :\
282            Can't open input ARGB8888 file %s, error: 0x%x\n",pFileIn, err);
283        free(pTmpData);
284        pTmpData = M4OSA_NULL;
285        goto cleanup;
286    }
287
288    err = pFileReadPtr->readData(pARGBIn,(M4OSA_MemAddr8)pTmpData, &frameSize_argb);
289    if(err != M4NO_ERROR)
290    {
291        M4OSA_TRACE1_2("M4xVSS_internalConvertAndResizeARGB8888toYUV420 Can't close ARGB8888\
292             file %s, error: 0x%x\n",pFileIn, err);
293        pFileReadPtr->closeRead(pARGBIn);
294        free(pTmpData);
295        pTmpData = M4OSA_NULL;
296        goto cleanup;
297    }
298
299    err = pFileReadPtr->closeRead(pARGBIn);
300    if(err != M4NO_ERROR)
301    {
302        M4OSA_TRACE1_2("M4xVSS_internalConvertAndResizeARGB8888toYUV420 Can't close ARGB8888 \
303             file %s, error: 0x%x\n",pFileIn, err);
304        free(pTmpData);
305        pTmpData = M4OSA_NULL;
306        goto cleanup;
307    }
308
309    rgbPlane1.pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(frameSize, M4VS,
310         (M4OSA_Char*)"Image clip RGB888 data");
311    if(rgbPlane1.pac_data == M4OSA_NULL)
312    {
313        M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 \
314            Failed to allocate memory for Image clip");
315        free(pTmpData);
316        return M4ERR_ALLOC;
317    }
318
319        rgbPlane1.u_height = height;
320        rgbPlane1.u_width = width;
321        rgbPlane1.u_stride = width*3;
322        rgbPlane1.u_topleft = 0;
323
324
325    /** Remove the alpha channel */
326    for (i=0, j = 0; i < frameSize_argb; i++) {
327        if ((i % 4) == 0) continue;
328        rgbPlane1.pac_data[j] = pTmpData[i];
329        j++;
330    }
331        free(pTmpData);
332
333    /* To Check if resizing is required with color conversion */
334    if(width != pImagePlanes->u_width || height != pImagePlanes->u_height)
335    {
336        M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 Resizing :");
337        frameSize =  ( pImagePlanes->u_width * pImagePlanes->u_height * 3);
338        rgbPlane2.pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(frameSize, M4VS,
339             (M4OSA_Char*)"Image clip RGB888 data");
340        if(rgbPlane2.pac_data == M4OSA_NULL)
341        {
342            M4OSA_TRACE1_0("Failed to allocate memory for Image clip");
343            free(pTmpData);
344            return M4ERR_ALLOC;
345        }
346            rgbPlane2.u_height =  pImagePlanes->u_height;
347            rgbPlane2.u_width = pImagePlanes->u_width;
348            rgbPlane2.u_stride = pImagePlanes->u_width*3;
349            rgbPlane2.u_topleft = 0;
350
351        /* Resizing RGB888 to RGB888 */
352        err = M4VIFI_ResizeBilinearRGB888toRGB888(M4OSA_NULL, &rgbPlane1, &rgbPlane2);
353        if(err != M4NO_ERROR)
354        {
355            M4OSA_TRACE1_1("error when converting from Resize RGB888 to RGB888: 0x%x\n", err);
356            free(rgbPlane2.pac_data);
357            free(rgbPlane1.pac_data);
358            return err;
359        }
360        /*Converting Resized RGB888 to YUV420 */
361        err = M4VIFI_RGB888toYUV420(M4OSA_NULL, &rgbPlane2, pImagePlanes);
362        if(err != M4NO_ERROR)
363        {
364            M4OSA_TRACE1_1("error when converting from RGB888 to YUV: 0x%x\n", err);
365            free(rgbPlane2.pac_data);
366            free(rgbPlane1.pac_data);
367            return err;
368        }
369            free(rgbPlane2.pac_data);
370            free(rgbPlane1.pac_data);
371
372            M4OSA_TRACE1_0("RGB to YUV done");
373
374
375    }
376    else
377    {
378        M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 NO  Resizing :");
379        err = M4VIFI_RGB888toYUV420(M4OSA_NULL, &rgbPlane1, pImagePlanes);
380        if(err != M4NO_ERROR)
381        {
382            M4OSA_TRACE1_1("error when converting from RGB to YUV: 0x%x\n", err);
383        }
384            free(rgbPlane1.pac_data);
385
386            M4OSA_TRACE1_0("RGB to YUV done");
387    }
388cleanup:
389    M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 leaving :");
390    return err;
391}
392
393/**
394 ******************************************************************************
395 * M4OSA_ERR M4xVSS_internalConvertARGB8888toYUV420(M4OSA_Void* pFileIn,
396 *                                             M4OSA_FileReadPointer* pFileReadPtr,
397 *                                                M4VIFI_ImagePlane* pImagePlanes,
398 *                                                 M4OSA_UInt32 width,
399 *                                                M4OSA_UInt32 height);
400 * @brief    It Coverts a ARGB8888 image to YUV420
401 * @note
402 * @param    pFileIn            (IN) The Image input file
403 * @param    pFileReadPtr    (IN) Pointer on filesystem functions
404 * @param    pImagePlanes    (IN/OUT) Pointer on YUV420 output planes allocated by the user
405 *                            ARGB8888 image  will be converted and resized  to output
406 *                            YUV420 plane size
407 * @param    width        (IN) width of the ARGB8888
408 * @param    height            (IN) height of the ARGB8888
409 * @return    M4NO_ERROR:    No error
410 * @return    M4ERR_ALLOC: memory error
411 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
412 ******************************************************************************
413 */
414
415M4OSA_ERR M4xVSS_internalConvertARGB8888toYUV420(M4OSA_Void* pFileIn,
416                                                 M4OSA_FileReadPointer* pFileReadPtr,
417                                                 M4VIFI_ImagePlane** pImagePlanes,
418                                                 M4OSA_UInt32 width,M4OSA_UInt32 height)
419{
420    M4OSA_ERR err = M4NO_ERROR;
421    M4VIFI_ImagePlane *yuvPlane = M4OSA_NULL;
422
423    yuvPlane = (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(3*sizeof(M4VIFI_ImagePlane),
424                M4VS, (M4OSA_Char*)"M4xVSS_internalConvertRGBtoYUV: Output plane YUV");
425    if(yuvPlane == M4OSA_NULL) {
426        M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 :\
427            Failed to allocate memory for Image clip");
428        return M4ERR_ALLOC;
429    }
430    yuvPlane[0].u_height = height;
431    yuvPlane[0].u_width = width;
432    yuvPlane[0].u_stride = width;
433    yuvPlane[0].u_topleft = 0;
434    yuvPlane[0].pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(yuvPlane[0].u_height \
435        * yuvPlane[0].u_width * 1.5, M4VS, (M4OSA_Char*)"imageClip YUV data");
436
437    yuvPlane[1].u_height = yuvPlane[0].u_height >>1;
438    yuvPlane[1].u_width = yuvPlane[0].u_width >> 1;
439    yuvPlane[1].u_stride = yuvPlane[1].u_width;
440    yuvPlane[1].u_topleft = 0;
441    yuvPlane[1].pac_data = (M4VIFI_UInt8*)(yuvPlane[0].pac_data + yuvPlane[0].u_height \
442        * yuvPlane[0].u_width);
443
444    yuvPlane[2].u_height = yuvPlane[0].u_height >>1;
445    yuvPlane[2].u_width = yuvPlane[0].u_width >> 1;
446    yuvPlane[2].u_stride = yuvPlane[2].u_width;
447    yuvPlane[2].u_topleft = 0;
448    yuvPlane[2].pac_data = (M4VIFI_UInt8*)(yuvPlane[1].pac_data + yuvPlane[1].u_height \
449        * yuvPlane[1].u_width);
450    err = M4xVSS_internalConvertAndResizeARGB8888toYUV420( pFileIn,pFileReadPtr,
451                                                          yuvPlane, width, height);
452    if(err != M4NO_ERROR)
453    {
454        M4OSA_TRACE1_1("M4xVSS_internalConvertAndResizeARGB8888toYUV420 return error: 0x%x\n", err);
455        free(yuvPlane);
456        return err;
457    }
458
459        *pImagePlanes = yuvPlane;
460
461    M4OSA_TRACE1_0("M4xVSS_internalConvertARGB8888toYUV420 :Leaving");
462    return err;
463
464}
465
466/**
467 ******************************************************************************
468 * M4OSA_ERR M4xVSS_PictureCallbackFct (M4OSA_Void* pPictureCtxt,
469 *                                        M4VIFI_ImagePlane* pImagePlanes,
470 *                                        M4OSA_UInt32* pPictureDuration);
471 * @brief    It feeds the PTO3GPP with YUV420 pictures.
472 * @note    This function is given to the PTO3GPP in the M4PTO3GPP_Params structure
473 * @param    pContext    (IN) The integrator own context
474 * @param    pImagePlanes(IN/OUT) Pointer to an array of three valid image planes
475 * @param    pPictureDuration(OUT) Duration of the returned picture
476 *
477 * @return    M4NO_ERROR:    No error
478 * @return    M4PTO3GPP_WAR_LAST_PICTURE: The returned image is the last one
479 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
480 ******************************************************************************
481 */
482M4OSA_ERR M4xVSS_PictureCallbackFct(M4OSA_Void* pPictureCtxt, M4VIFI_ImagePlane* pImagePlanes,
483                                     M4OSA_Double* pPictureDuration)
484{
485    M4OSA_ERR err = M4NO_ERROR;
486    M4OSA_UInt8    last_frame_flag = 0;
487    M4xVSS_PictureCallbackCtxt* pC = (M4xVSS_PictureCallbackCtxt*) (pPictureCtxt);
488
489    /*Used for pan&zoom*/
490    M4OSA_UInt8 tempPanzoomXa = 0;
491    M4OSA_UInt8 tempPanzoomXb = 0;
492    M4AIR_Params Params;
493    /**/
494
495    /*Used for cropping and black borders*/
496    M4OSA_Context    pPictureContext = M4OSA_NULL;
497    M4OSA_FilePosition    pictureSize = 0 ;
498    M4OSA_UInt8*    pictureBuffer = M4OSA_NULL;
499    //M4EXIFC_Context pExifContext = M4OSA_NULL;
500    M4EXIFC_BasicTags pBasicTags;
501    M4VIFI_ImagePlane pImagePlanes1 = pImagePlanes[0];
502    M4VIFI_ImagePlane pImagePlanes2 = pImagePlanes[1];
503    M4VIFI_ImagePlane pImagePlanes3 = pImagePlanes[2];
504    /**/
505
506    /**
507     * Check input parameters */
508    M4OSA_DEBUG_IF2((M4OSA_NULL==pPictureCtxt),        M4ERR_PARAMETER,
509         "M4xVSS_PictureCallbackFct: pPictureCtxt is M4OSA_NULL");
510    M4OSA_DEBUG_IF2((M4OSA_NULL==pImagePlanes),        M4ERR_PARAMETER,
511         "M4xVSS_PictureCallbackFct: pImagePlanes is M4OSA_NULL");
512    M4OSA_DEBUG_IF2((M4OSA_NULL==pPictureDuration), M4ERR_PARAMETER,
513         "M4xVSS_PictureCallbackFct: pPictureDuration is M4OSA_NULL");
514    M4OSA_TRACE1_0("M4xVSS_PictureCallbackFct :Entering");
515    /*PR P4ME00003181 In case the image number is 0, pan&zoom can not be used*/
516    if(M4OSA_TRUE == pC->m_pPto3GPPparams->isPanZoom && pC->m_NbImage == 0)
517    {
518        pC->m_pPto3GPPparams->isPanZoom = M4OSA_FALSE;
519    }
520
521    /*If no cropping/black borders or pan&zoom, just decode and resize the picture*/
522    if(pC->m_mediaRendering == M4xVSS_kResizing && M4OSA_FALSE == pC->m_pPto3GPPparams->isPanZoom)
523    {
524        /**
525         * Convert and resize input ARGB8888 file to YUV420 */
526        /*To support ARGB8888 : */
527        M4OSA_TRACE1_2("M4xVSS_PictureCallbackFct 1: width and heght %d %d",
528            pC->m_pPto3GPPparams->width,pC->m_pPto3GPPparams->height);
529        err = M4xVSS_internalConvertAndResizeARGB8888toYUV420(pC->m_FileIn,
530             pC->m_pFileReadPtr, pImagePlanes,pC->m_pPto3GPPparams->width,
531                pC->m_pPto3GPPparams->height);
532        if(err != M4NO_ERROR)
533        {
534            M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct: Error when decoding JPEG: 0x%x\n", err);
535            return err;
536        }
537    }
538    /*In case of cropping, black borders or pan&zoom, call the EXIF reader and the AIR*/
539    else
540    {
541        /**
542         * Computes ratios */
543        if(pC->m_pDecodedPlane == M4OSA_NULL)
544        {
545            /**
546             * Convert input ARGB8888 file to YUV420 */
547             M4OSA_TRACE1_2("M4xVSS_PictureCallbackFct 2: width and heght %d %d",
548                pC->m_pPto3GPPparams->width,pC->m_pPto3GPPparams->height);
549            err = M4xVSS_internalConvertARGB8888toYUV420(pC->m_FileIn, pC->m_pFileReadPtr,
550                &(pC->m_pDecodedPlane),pC->m_pPto3GPPparams->width,pC->m_pPto3GPPparams->height);
551            if(err != M4NO_ERROR)
552            {
553                M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct: Error when decoding JPEG: 0x%x\n", err);
554                if(pC->m_pDecodedPlane != M4OSA_NULL)
555                {
556                    /* YUV420 planar is returned but allocation is made only once
557                        (contigous planes in memory) */
558                    if(pC->m_pDecodedPlane->pac_data != M4OSA_NULL)
559                    {
560                        free(pC->m_pDecodedPlane->pac_data);
561                    }
562                    free(pC->m_pDecodedPlane);
563                    pC->m_pDecodedPlane = M4OSA_NULL;
564                }
565                return err;
566            }
567        }
568
569        /*Initialize AIR Params*/
570        Params.m_inputCoord.m_x = 0;
571        Params.m_inputCoord.m_y = 0;
572        Params.m_inputSize.m_height = pC->m_pDecodedPlane->u_height;
573        Params.m_inputSize.m_width = pC->m_pDecodedPlane->u_width;
574        Params.m_outputSize.m_width = pImagePlanes->u_width;
575        Params.m_outputSize.m_height = pImagePlanes->u_height;
576        Params.m_bOutputStripe = M4OSA_FALSE;
577        Params.m_outputOrientation = M4COMMON_kOrientationTopLeft;
578
579        /*Initialize Exif params structure*/
580        pBasicTags.orientation = M4COMMON_kOrientationUnknown;
581
582        /**
583        Pan&zoom params*/
584        if(M4OSA_TRUE == pC->m_pPto3GPPparams->isPanZoom)
585        {
586            /*Save ratio values, they can be reused if the new ratios are 0*/
587            tempPanzoomXa = (M4OSA_UInt8)pC->m_pPto3GPPparams->PanZoomXa;
588            tempPanzoomXb = (M4OSA_UInt8)pC->m_pPto3GPPparams->PanZoomXb;
589            /*Check that the ratio is not 0*/
590            /*Check (a) parameters*/
591            if(pC->m_pPto3GPPparams->PanZoomXa == 0)
592            {
593                M4OSA_UInt8 maxRatio = 0;
594                if(pC->m_pPto3GPPparams->PanZoomTopleftXa >=
595                     pC->m_pPto3GPPparams->PanZoomTopleftYa)
596                {
597                    /*The ratio is 0, that means the area of the picture defined with (a)
598                    parameters is bigger than the image size*/
599                    if(pC->m_pPto3GPPparams->PanZoomTopleftXa + tempPanzoomXa > 1000)
600                    {
601                        /*The oversize is maxRatio*/
602                        maxRatio = pC->m_pPto3GPPparams->PanZoomTopleftXa + tempPanzoomXa - 1000;
603                    }
604                }
605                else
606                {
607                    /*The ratio is 0, that means the area of the picture defined with (a)
608                     parameters is bigger than the image size*/
609                    if(pC->m_pPto3GPPparams->PanZoomTopleftYa + tempPanzoomXa > 1000)
610                    {
611                        /*The oversize is maxRatio*/
612                        maxRatio = pC->m_pPto3GPPparams->PanZoomTopleftYa + tempPanzoomXa - 1000;
613                    }
614                }
615                /*Modify the (a) parameters:*/
616                if(pC->m_pPto3GPPparams->PanZoomTopleftXa >= maxRatio)
617                {
618                    /*The (a) topleft parameters can be moved to keep the same area size*/
619                    pC->m_pPto3GPPparams->PanZoomTopleftXa -= maxRatio;
620                }
621                else
622                {
623                    /*Move the (a) topleft parameter to 0 but the ratio will be also further
624                    modified to match the image size*/
625                    pC->m_pPto3GPPparams->PanZoomTopleftXa = 0;
626                }
627                if(pC->m_pPto3GPPparams->PanZoomTopleftYa >= maxRatio)
628                {
629                    /*The (a) topleft parameters can be moved to keep the same area size*/
630                    pC->m_pPto3GPPparams->PanZoomTopleftYa -= maxRatio;
631                }
632                else
633                {
634                    /*Move the (a) topleft parameter to 0 but the ratio will be also further
635                     modified to match the image size*/
636                    pC->m_pPto3GPPparams->PanZoomTopleftYa = 0;
637                }
638                /*The new ratio is the original one*/
639                pC->m_pPto3GPPparams->PanZoomXa = tempPanzoomXa;
640                if(pC->m_pPto3GPPparams->PanZoomXa + pC->m_pPto3GPPparams->PanZoomTopleftXa > 1000)
641                {
642                    /*Change the ratio if the area of the picture defined with (a) parameters is
643                    bigger than the image size*/
644                    pC->m_pPto3GPPparams->PanZoomXa = 1000 - pC->m_pPto3GPPparams->PanZoomTopleftXa;
645                }
646                if(pC->m_pPto3GPPparams->PanZoomXa + pC->m_pPto3GPPparams->PanZoomTopleftYa > 1000)
647                {
648                    /*Change the ratio if the area of the picture defined with (a) parameters is
649                    bigger than the image size*/
650                    pC->m_pPto3GPPparams->PanZoomXa = 1000 - pC->m_pPto3GPPparams->PanZoomTopleftYa;
651                }
652            }
653            /*Check (b) parameters*/
654            if(pC->m_pPto3GPPparams->PanZoomXb == 0)
655            {
656                M4OSA_UInt8 maxRatio = 0;
657                if(pC->m_pPto3GPPparams->PanZoomTopleftXb >=
658                     pC->m_pPto3GPPparams->PanZoomTopleftYb)
659                {
660                    /*The ratio is 0, that means the area of the picture defined with (b)
661                     parameters is bigger than the image size*/
662                    if(pC->m_pPto3GPPparams->PanZoomTopleftXb + tempPanzoomXb > 1000)
663                    {
664                        /*The oversize is maxRatio*/
665                        maxRatio = pC->m_pPto3GPPparams->PanZoomTopleftXb + tempPanzoomXb - 1000;
666                    }
667                }
668                else
669                {
670                    /*The ratio is 0, that means the area of the picture defined with (b)
671                     parameters is bigger than the image size*/
672                    if(pC->m_pPto3GPPparams->PanZoomTopleftYb + tempPanzoomXb > 1000)
673                    {
674                        /*The oversize is maxRatio*/
675                        maxRatio = pC->m_pPto3GPPparams->PanZoomTopleftYb + tempPanzoomXb - 1000;
676                    }
677                }
678                /*Modify the (b) parameters:*/
679                if(pC->m_pPto3GPPparams->PanZoomTopleftXb >= maxRatio)
680                {
681                    /*The (b) topleft parameters can be moved to keep the same area size*/
682                    pC->m_pPto3GPPparams->PanZoomTopleftXb -= maxRatio;
683                }
684                else
685                {
686                    /*Move the (b) topleft parameter to 0 but the ratio will be also further
687                     modified to match the image size*/
688                    pC->m_pPto3GPPparams->PanZoomTopleftXb = 0;
689                }
690                if(pC->m_pPto3GPPparams->PanZoomTopleftYb >= maxRatio)
691                {
692                    /*The (b) topleft parameters can be moved to keep the same area size*/
693                    pC->m_pPto3GPPparams->PanZoomTopleftYb -= maxRatio;
694                }
695                else
696                {
697                    /*Move the (b) topleft parameter to 0 but the ratio will be also further
698                    modified to match the image size*/
699                    pC->m_pPto3GPPparams->PanZoomTopleftYb = 0;
700                }
701                /*The new ratio is the original one*/
702                pC->m_pPto3GPPparams->PanZoomXb = tempPanzoomXb;
703                if(pC->m_pPto3GPPparams->PanZoomXb + pC->m_pPto3GPPparams->PanZoomTopleftXb > 1000)
704                {
705                    /*Change the ratio if the area of the picture defined with (b) parameters is
706                    bigger than the image size*/
707                    pC->m_pPto3GPPparams->PanZoomXb = 1000 - pC->m_pPto3GPPparams->PanZoomTopleftXb;
708                }
709                if(pC->m_pPto3GPPparams->PanZoomXb + pC->m_pPto3GPPparams->PanZoomTopleftYb > 1000)
710                {
711                    /*Change the ratio if the area of the picture defined with (b) parameters is
712                    bigger than the image size*/
713                    pC->m_pPto3GPPparams->PanZoomXb = 1000 - pC->m_pPto3GPPparams->PanZoomTopleftYb;
714                }
715            }
716
717            /**
718             * Computes AIR parameters */
719/*        Params.m_inputCoord.m_x = (M4OSA_UInt32)(pC->m_pDecodedPlane->u_width *
720            (pC->m_pPto3GPPparams->PanZoomTopleftXa +
721            (M4OSA_Int16)((pC->m_pPto3GPPparams->PanZoomTopleftXb \
722                - pC->m_pPto3GPPparams->PanZoomTopleftXa) *
723            pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage)) / 100;
724        Params.m_inputCoord.m_y = (M4OSA_UInt32)(pC->m_pDecodedPlane->u_height *
725            (pC->m_pPto3GPPparams->PanZoomTopleftYa +
726            (M4OSA_Int16)((pC->m_pPto3GPPparams->PanZoomTopleftYb\
727                 - pC->m_pPto3GPPparams->PanZoomTopleftYa) *
728            pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage)) / 100;
729
730        Params.m_inputSize.m_width = (M4OSA_UInt32)(pC->m_pDecodedPlane->u_width *
731            (pC->m_pPto3GPPparams->PanZoomXa +
732            (M4OSA_Int16)((pC->m_pPto3GPPparams->PanZoomXb - pC->m_pPto3GPPparams->PanZoomXa) *
733            pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage)) / 100;
734
735        Params.m_inputSize.m_height =  (M4OSA_UInt32)(pC->m_pDecodedPlane->u_height *
736            (pC->m_pPto3GPPparams->PanZoomXa +
737            (M4OSA_Int16)((pC->m_pPto3GPPparams->PanZoomXb - pC->m_pPto3GPPparams->PanZoomXa) *
738            pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage)) / 100;
739 */
740            // Instead of using pC->m_NbImage we have to use (pC->m_NbImage-1) as pC->m_ImageCounter
741            // will be x-1 max for x no. of frames
742            Params.m_inputCoord.m_x = (M4OSA_UInt32)((((M4OSA_Double)pC->m_pDecodedPlane->u_width *
743                (pC->m_pPto3GPPparams->PanZoomTopleftXa +
744                (M4OSA_Double)((M4OSA_Double)(pC->m_pPto3GPPparams->PanZoomTopleftXb\
745                     - pC->m_pPto3GPPparams->PanZoomTopleftXa) *
746                pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage-1)) / 1000));
747            Params.m_inputCoord.m_y =
748                 (M4OSA_UInt32)((((M4OSA_Double)pC->m_pDecodedPlane->u_height *
749                (pC->m_pPto3GPPparams->PanZoomTopleftYa +
750                (M4OSA_Double)((M4OSA_Double)(pC->m_pPto3GPPparams->PanZoomTopleftYb\
751                     - pC->m_pPto3GPPparams->PanZoomTopleftYa) *
752                pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage-1)) / 1000));
753
754            Params.m_inputSize.m_width =
755                 (M4OSA_UInt32)((((M4OSA_Double)pC->m_pDecodedPlane->u_width *
756                (pC->m_pPto3GPPparams->PanZoomXa +
757                (M4OSA_Double)((M4OSA_Double)(pC->m_pPto3GPPparams->PanZoomXb\
758                     - pC->m_pPto3GPPparams->PanZoomXa) *
759                pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage-1)) / 1000));
760
761            Params.m_inputSize.m_height =
762                 (M4OSA_UInt32)((((M4OSA_Double)pC->m_pDecodedPlane->u_height *
763                (pC->m_pPto3GPPparams->PanZoomXa +
764                (M4OSA_Double)((M4OSA_Double)(pC->m_pPto3GPPparams->PanZoomXb \
765                    - pC->m_pPto3GPPparams->PanZoomXa) *
766                pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage-1)) / 1000));
767
768            if((Params.m_inputSize.m_width + Params.m_inputCoord.m_x)\
769                 > pC->m_pDecodedPlane->u_width)
770            {
771                Params.m_inputSize.m_width = pC->m_pDecodedPlane->u_width \
772                    - Params.m_inputCoord.m_x;
773            }
774
775            if((Params.m_inputSize.m_height + Params.m_inputCoord.m_y)\
776                 > pC->m_pDecodedPlane->u_height)
777            {
778                Params.m_inputSize.m_height = pC->m_pDecodedPlane->u_height\
779                     - Params.m_inputCoord.m_y;
780            }
781
782
783
784            Params.m_inputSize.m_width = (Params.m_inputSize.m_width>>1)<<1;
785            Params.m_inputSize.m_height = (Params.m_inputSize.m_height>>1)<<1;
786        }
787
788
789
790    /**
791        Picture rendering: Black borders*/
792
793        if(pC->m_mediaRendering == M4xVSS_kBlackBorders)
794        {
795            memset((void *)pImagePlanes[0].pac_data,Y_PLANE_BORDER_VALUE,
796                (pImagePlanes[0].u_height*pImagePlanes[0].u_stride));
797            memset((void *)pImagePlanes[1].pac_data,U_PLANE_BORDER_VALUE,
798                (pImagePlanes[1].u_height*pImagePlanes[1].u_stride));
799            memset((void *)pImagePlanes[2].pac_data,V_PLANE_BORDER_VALUE,
800                (pImagePlanes[2].u_height*pImagePlanes[2].u_stride));
801
802            /**
803            First without pan&zoom*/
804            if(M4OSA_FALSE == pC->m_pPto3GPPparams->isPanZoom)
805            {
806                switch(pBasicTags.orientation)
807                {
808                default:
809                case M4COMMON_kOrientationUnknown:
810                    Params.m_outputOrientation = M4COMMON_kOrientationTopLeft;
811                case M4COMMON_kOrientationTopLeft:
812                case M4COMMON_kOrientationTopRight:
813                case M4COMMON_kOrientationBottomRight:
814                case M4COMMON_kOrientationBottomLeft:
815                    if((M4OSA_UInt32)((pC->m_pDecodedPlane->u_height * pImagePlanes->u_width)\
816                         /pC->m_pDecodedPlane->u_width) <= pImagePlanes->u_height)
817                         //Params.m_inputSize.m_height < Params.m_inputSize.m_width)
818                    {
819                        /*it is height so black borders will be on the top and on the bottom side*/
820                        Params.m_outputSize.m_width = pImagePlanes->u_width;
821                        Params.m_outputSize.m_height =
822                             (M4OSA_UInt32)((pC->m_pDecodedPlane->u_height \
823                                * pImagePlanes->u_width) /pC->m_pDecodedPlane->u_width);
824                        /*number of lines at the top*/
825                        pImagePlanes[0].u_topleft =
826                            (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_height\
827                                -Params.m_outputSize.m_height)>>1))*pImagePlanes[0].u_stride;
828                        pImagePlanes[0].u_height = Params.m_outputSize.m_height;
829                        pImagePlanes[1].u_topleft =
830                             (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_height\
831                                -(Params.m_outputSize.m_height>>1)))>>1)*pImagePlanes[1].u_stride;
832                        pImagePlanes[1].u_height = Params.m_outputSize.m_height>>1;
833                        pImagePlanes[2].u_topleft =
834                             (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_height\
835                                -(Params.m_outputSize.m_height>>1)))>>1)*pImagePlanes[2].u_stride;
836                        pImagePlanes[2].u_height = Params.m_outputSize.m_height>>1;
837                    }
838                    else
839                    {
840                        /*it is width so black borders will be on the left and right side*/
841                        Params.m_outputSize.m_height = pImagePlanes->u_height;
842                        Params.m_outputSize.m_width =
843                             (M4OSA_UInt32)((pC->m_pDecodedPlane->u_width \
844                                * pImagePlanes->u_height) /pC->m_pDecodedPlane->u_height);
845
846                        pImagePlanes[0].u_topleft =
847                            (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_width\
848                                -Params.m_outputSize.m_width)>>1));
849                        pImagePlanes[0].u_width = Params.m_outputSize.m_width;
850                        pImagePlanes[1].u_topleft =
851                             (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_width\
852                                -(Params.m_outputSize.m_width>>1)))>>1);
853                        pImagePlanes[1].u_width = Params.m_outputSize.m_width>>1;
854                        pImagePlanes[2].u_topleft =
855                             (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_width\
856                                -(Params.m_outputSize.m_width>>1)))>>1);
857                        pImagePlanes[2].u_width = Params.m_outputSize.m_width>>1;
858                    }
859                    break;
860                case M4COMMON_kOrientationLeftTop:
861                case M4COMMON_kOrientationLeftBottom:
862                case M4COMMON_kOrientationRightTop:
863                case M4COMMON_kOrientationRightBottom:
864                        if((M4OSA_UInt32)((pC->m_pDecodedPlane->u_width * pImagePlanes->u_width)\
865                             /pC->m_pDecodedPlane->u_height) < pImagePlanes->u_height)
866                             //Params.m_inputSize.m_height > Params.m_inputSize.m_width)
867                        {
868                            /*it is height so black borders will be on the top and on
869                             the bottom side*/
870                            Params.m_outputSize.m_height = pImagePlanes->u_width;
871                            Params.m_outputSize.m_width =
872                                 (M4OSA_UInt32)((pC->m_pDecodedPlane->u_width \
873                                    * pImagePlanes->u_width) /pC->m_pDecodedPlane->u_height);
874                            /*number of lines at the top*/
875                            pImagePlanes[0].u_topleft =
876                                ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_height\
877                                    -Params.m_outputSize.m_width))>>1)*pImagePlanes[0].u_stride)+1;
878                            pImagePlanes[0].u_height = Params.m_outputSize.m_width;
879                            pImagePlanes[1].u_topleft =
880                                ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_height\
881                                    -(Params.m_outputSize.m_width>>1)))>>1)\
882                                        *pImagePlanes[1].u_stride)+1;
883                            pImagePlanes[1].u_height = Params.m_outputSize.m_width>>1;
884                            pImagePlanes[2].u_topleft =
885                                ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_height\
886                                    -(Params.m_outputSize.m_width>>1)))>>1)\
887                                        *pImagePlanes[2].u_stride)+1;
888                            pImagePlanes[2].u_height = Params.m_outputSize.m_width>>1;
889                        }
890                        else
891                        {
892                            /*it is width so black borders will be on the left and right side*/
893                            Params.m_outputSize.m_width = pImagePlanes->u_height;
894                            Params.m_outputSize.m_height =
895                                 (M4OSA_UInt32)((pC->m_pDecodedPlane->u_height\
896                                     * pImagePlanes->u_height) /pC->m_pDecodedPlane->u_width);
897
898                            pImagePlanes[0].u_topleft =
899                                 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_width\
900                                    -Params.m_outputSize.m_height))>>1))+1;
901                            pImagePlanes[0].u_width = Params.m_outputSize.m_height;
902                            pImagePlanes[1].u_topleft =
903                                 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_width\
904                                    -(Params.m_outputSize.m_height>>1)))>>1))+1;
905                            pImagePlanes[1].u_width = Params.m_outputSize.m_height>>1;
906                            pImagePlanes[2].u_topleft =
907                                 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_width\
908                                    -(Params.m_outputSize.m_height>>1)))>>1))+1;
909                            pImagePlanes[2].u_width = Params.m_outputSize.m_height>>1;
910                        }
911                    break;
912                }
913            }
914
915            /**
916            Secondly with pan&zoom*/
917            else
918            {
919                switch(pBasicTags.orientation)
920                {
921                default:
922                case M4COMMON_kOrientationUnknown:
923                    Params.m_outputOrientation = M4COMMON_kOrientationTopLeft;
924                case M4COMMON_kOrientationTopLeft:
925                case M4COMMON_kOrientationTopRight:
926                case M4COMMON_kOrientationBottomRight:
927                case M4COMMON_kOrientationBottomLeft:
928                    /*NO ROTATION*/
929                    if((M4OSA_UInt32)((pC->m_pDecodedPlane->u_height * pImagePlanes->u_width)\
930                         /pC->m_pDecodedPlane->u_width) <= pImagePlanes->u_height)
931                            //Params.m_inputSize.m_height < Params.m_inputSize.m_width)
932                    {
933                        /*Black borders will be on the top and bottom of the output video*/
934                        /*Maximum output height if the input image aspect ratio is kept and if
935                        the output width is the screen width*/
936                        M4OSA_UInt32 tempOutputSizeHeight =
937                            (M4OSA_UInt32)((pC->m_pDecodedPlane->u_height\
938                                 * pImagePlanes->u_width) /pC->m_pDecodedPlane->u_width);
939                        M4OSA_UInt32 tempInputSizeHeightMax = 0;
940                        M4OSA_UInt32 tempFinalInputHeight = 0;
941                        /*The output width is the screen width*/
942                        Params.m_outputSize.m_width = pImagePlanes->u_width;
943                        tempOutputSizeHeight = (tempOutputSizeHeight>>1)<<1;
944
945                        /*Maximum input height according to the maximum output height
946                        (proportional to the maximum output height)*/
947                        tempInputSizeHeightMax = (pImagePlanes->u_height\
948                            *Params.m_inputSize.m_height)/tempOutputSizeHeight;
949                        tempInputSizeHeightMax = (tempInputSizeHeightMax>>1)<<1;
950
951                        /*Check if the maximum possible input height is contained into the
952                        input image height*/
953                        if(tempInputSizeHeightMax <= pC->m_pDecodedPlane->u_height)
954                        {
955                            /*The maximum possible input height is contained in the input
956                            image height,
957                            that means no black borders, the input pan zoom area will be extended
958                            so that the input AIR height will be the maximum possible*/
959                            if(((tempInputSizeHeightMax - Params.m_inputSize.m_height)>>1)\
960                                 <= Params.m_inputCoord.m_y
961                                && ((tempInputSizeHeightMax - Params.m_inputSize.m_height)>>1)\
962                                     <= pC->m_pDecodedPlane->u_height -(Params.m_inputCoord.m_y\
963                                         + Params.m_inputSize.m_height))
964                            {
965                                /*The input pan zoom area can be extended symmetrically on the
966                                top and bottom side*/
967                                Params.m_inputCoord.m_y -= ((tempInputSizeHeightMax \
968                                    - Params.m_inputSize.m_height)>>1);
969                            }
970                            else if(Params.m_inputCoord.m_y < pC->m_pDecodedPlane->u_height\
971                                -(Params.m_inputCoord.m_y + Params.m_inputSize.m_height))
972                            {
973                                /*There is not enough place above the input pan zoom area to
974                                extend it symmetrically,
975                                so extend it to the maximum on the top*/
976                                Params.m_inputCoord.m_y = 0;
977                            }
978                            else
979                            {
980                                /*There is not enough place below the input pan zoom area to
981                                extend it symmetrically,
982                                so extend it to the maximum on the bottom*/
983                                Params.m_inputCoord.m_y = pC->m_pDecodedPlane->u_height \
984                                    - tempInputSizeHeightMax;
985                            }
986                            /*The input height of the AIR is the maximum possible height*/
987                            Params.m_inputSize.m_height = tempInputSizeHeightMax;
988                        }
989                        else
990                        {
991                            /*The maximum possible input height is greater than the input
992                            image height,
993                            that means black borders are necessary to keep aspect ratio
994                            The input height of the AIR is all the input image height*/
995                            Params.m_outputSize.m_height =
996                                (tempOutputSizeHeight*pC->m_pDecodedPlane->u_height)\
997                                    /Params.m_inputSize.m_height;
998                            Params.m_outputSize.m_height = (Params.m_outputSize.m_height>>1)<<1;
999                            Params.m_inputCoord.m_y = 0;
1000                            Params.m_inputSize.m_height = pC->m_pDecodedPlane->u_height;
1001                            pImagePlanes[0].u_topleft =
1002                                 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_height\
1003                                    -Params.m_outputSize.m_height)>>1))*pImagePlanes[0].u_stride;
1004                            pImagePlanes[0].u_height = Params.m_outputSize.m_height;
1005                            pImagePlanes[1].u_topleft =
1006                                ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_height\
1007                                    -(Params.m_outputSize.m_height>>1)))>>1)\
1008                                        *pImagePlanes[1].u_stride);
1009                            pImagePlanes[1].u_height = Params.m_outputSize.m_height>>1;
1010                            pImagePlanes[2].u_topleft =
1011                                 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_height\
1012                                    -(Params.m_outputSize.m_height>>1)))>>1)\
1013                                        *pImagePlanes[2].u_stride);
1014                            pImagePlanes[2].u_height = Params.m_outputSize.m_height>>1;
1015                        }
1016                    }
1017                    else
1018                    {
1019                        /*Black borders will be on the left and right side of the output video*/
1020                        /*Maximum output width if the input image aspect ratio is kept and if the
1021                         output height is the screen height*/
1022                        M4OSA_UInt32 tempOutputSizeWidth =
1023                             (M4OSA_UInt32)((pC->m_pDecodedPlane->u_width \
1024                                * pImagePlanes->u_height) /pC->m_pDecodedPlane->u_height);
1025                        M4OSA_UInt32 tempInputSizeWidthMax = 0;
1026                        M4OSA_UInt32 tempFinalInputWidth = 0;
1027                        /*The output height is the screen height*/
1028                        Params.m_outputSize.m_height = pImagePlanes->u_height;
1029                        tempOutputSizeWidth = (tempOutputSizeWidth>>1)<<1;
1030
1031                        /*Maximum input width according to the maximum output width
1032                        (proportional to the maximum output width)*/
1033                        tempInputSizeWidthMax =
1034                             (pImagePlanes->u_width*Params.m_inputSize.m_width)\
1035                                /tempOutputSizeWidth;
1036                        tempInputSizeWidthMax = (tempInputSizeWidthMax>>1)<<1;
1037
1038                        /*Check if the maximum possible input width is contained into the input
1039                         image width*/
1040                        if(tempInputSizeWidthMax <= pC->m_pDecodedPlane->u_width)
1041                        {
1042                            /*The maximum possible input width is contained in the input
1043                            image width,
1044                            that means no black borders, the input pan zoom area will be extended
1045                            so that the input AIR width will be the maximum possible*/
1046                            if(((tempInputSizeWidthMax - Params.m_inputSize.m_width)>>1) \
1047                                <= Params.m_inputCoord.m_x
1048                                && ((tempInputSizeWidthMax - Params.m_inputSize.m_width)>>1)\
1049                                     <= pC->m_pDecodedPlane->u_width -(Params.m_inputCoord.m_x \
1050                                        + Params.m_inputSize.m_width))
1051                            {
1052                                /*The input pan zoom area can be extended symmetrically on the
1053                                     right and left side*/
1054                                Params.m_inputCoord.m_x -= ((tempInputSizeWidthMax\
1055                                     - Params.m_inputSize.m_width)>>1);
1056                            }
1057                            else if(Params.m_inputCoord.m_x < pC->m_pDecodedPlane->u_width\
1058                                -(Params.m_inputCoord.m_x + Params.m_inputSize.m_width))
1059                            {
1060                                /*There is not enough place above the input pan zoom area to
1061                                    extend it symmetrically,
1062                                so extend it to the maximum on the left*/
1063                                Params.m_inputCoord.m_x = 0;
1064                            }
1065                            else
1066                            {
1067                                /*There is not enough place below the input pan zoom area
1068                                    to extend it symmetrically,
1069                                so extend it to the maximum on the right*/
1070                                Params.m_inputCoord.m_x = pC->m_pDecodedPlane->u_width \
1071                                    - tempInputSizeWidthMax;
1072                            }
1073                            /*The input width of the AIR is the maximum possible width*/
1074                            Params.m_inputSize.m_width = tempInputSizeWidthMax;
1075                        }
1076                        else
1077                        {
1078                            /*The maximum possible input width is greater than the input
1079                            image width,
1080                            that means black borders are necessary to keep aspect ratio
1081                            The input width of the AIR is all the input image width*/
1082                            Params.m_outputSize.m_width =\
1083                                 (tempOutputSizeWidth*pC->m_pDecodedPlane->u_width)\
1084                                    /Params.m_inputSize.m_width;
1085                            Params.m_outputSize.m_width = (Params.m_outputSize.m_width>>1)<<1;
1086                            Params.m_inputCoord.m_x = 0;
1087                            Params.m_inputSize.m_width = pC->m_pDecodedPlane->u_width;
1088                            pImagePlanes[0].u_topleft =
1089                                 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_width\
1090                                    -Params.m_outputSize.m_width)>>1));
1091                            pImagePlanes[0].u_width = Params.m_outputSize.m_width;
1092                            pImagePlanes[1].u_topleft =
1093                                 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_width\
1094                                    -(Params.m_outputSize.m_width>>1)))>>1);
1095                            pImagePlanes[1].u_width = Params.m_outputSize.m_width>>1;
1096                            pImagePlanes[2].u_topleft =
1097                                 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_width\
1098                                    -(Params.m_outputSize.m_width>>1)))>>1);
1099                            pImagePlanes[2].u_width = Params.m_outputSize.m_width>>1;
1100                        }
1101                    }
1102                    break;
1103                case M4COMMON_kOrientationLeftTop:
1104                case M4COMMON_kOrientationLeftBottom:
1105                case M4COMMON_kOrientationRightTop:
1106                case M4COMMON_kOrientationRightBottom:
1107                    /*ROTATION*/
1108                    if((M4OSA_UInt32)((pC->m_pDecodedPlane->u_width * pImagePlanes->u_width)\
1109                         /pC->m_pDecodedPlane->u_height) < pImagePlanes->u_height)
1110                         //Params.m_inputSize.m_height > Params.m_inputSize.m_width)
1111                    {
1112                        /*Black borders will be on the left and right side of the output video*/
1113                        /*Maximum output height if the input image aspect ratio is kept and if
1114                        the output height is the screen width*/
1115                        M4OSA_UInt32 tempOutputSizeHeight =
1116                        (M4OSA_UInt32)((pC->m_pDecodedPlane->u_width * pImagePlanes->u_width)\
1117                             /pC->m_pDecodedPlane->u_height);
1118                        M4OSA_UInt32 tempInputSizeHeightMax = 0;
1119                        M4OSA_UInt32 tempFinalInputHeight = 0;
1120                        /*The output width is the screen height*/
1121                        Params.m_outputSize.m_height = pImagePlanes->u_width;
1122                        Params.m_outputSize.m_width= pImagePlanes->u_height;
1123                        tempOutputSizeHeight = (tempOutputSizeHeight>>1)<<1;
1124
1125                        /*Maximum input height according to the maximum output height
1126                             (proportional to the maximum output height)*/
1127                        tempInputSizeHeightMax =
1128                            (pImagePlanes->u_height*Params.m_inputSize.m_width)\
1129                                /tempOutputSizeHeight;
1130                        tempInputSizeHeightMax = (tempInputSizeHeightMax>>1)<<1;
1131
1132                        /*Check if the maximum possible input height is contained into the
1133                             input image width (rotation included)*/
1134                        if(tempInputSizeHeightMax <= pC->m_pDecodedPlane->u_width)
1135                        {
1136                            /*The maximum possible input height is contained in the input
1137                            image width (rotation included),
1138                            that means no black borders, the input pan zoom area will be extended
1139                            so that the input AIR width will be the maximum possible*/
1140                            if(((tempInputSizeHeightMax - Params.m_inputSize.m_width)>>1) \
1141                                <= Params.m_inputCoord.m_x
1142                                && ((tempInputSizeHeightMax - Params.m_inputSize.m_width)>>1)\
1143                                     <= pC->m_pDecodedPlane->u_width -(Params.m_inputCoord.m_x \
1144                                        + Params.m_inputSize.m_width))
1145                            {
1146                                /*The input pan zoom area can be extended symmetrically on the
1147                                 right and left side*/
1148                                Params.m_inputCoord.m_x -= ((tempInputSizeHeightMax \
1149                                    - Params.m_inputSize.m_width)>>1);
1150                            }
1151                            else if(Params.m_inputCoord.m_x < pC->m_pDecodedPlane->u_width\
1152                                -(Params.m_inputCoord.m_x + Params.m_inputSize.m_width))
1153                            {
1154                                /*There is not enough place on the left of the input pan
1155                                zoom area to extend it symmetrically,
1156                                so extend it to the maximum on the left*/
1157                                Params.m_inputCoord.m_x = 0;
1158                            }
1159                            else
1160                            {
1161                                /*There is not enough place on the right of the input pan zoom
1162                                 area to extend it symmetrically,
1163                                so extend it to the maximum on the right*/
1164                                Params.m_inputCoord.m_x =
1165                                     pC->m_pDecodedPlane->u_width - tempInputSizeHeightMax;
1166                            }
1167                            /*The input width of the AIR is the maximum possible width*/
1168                            Params.m_inputSize.m_width = tempInputSizeHeightMax;
1169                        }
1170                        else
1171                        {
1172                            /*The maximum possible input height is greater than the input
1173                            image width (rotation included),
1174                            that means black borders are necessary to keep aspect ratio
1175                            The input width of the AIR is all the input image width*/
1176                            Params.m_outputSize.m_width =
1177                            (tempOutputSizeHeight*pC->m_pDecodedPlane->u_width)\
1178                                /Params.m_inputSize.m_width;
1179                            Params.m_outputSize.m_width = (Params.m_outputSize.m_width>>1)<<1;
1180                            Params.m_inputCoord.m_x = 0;
1181                            Params.m_inputSize.m_width = pC->m_pDecodedPlane->u_width;
1182                            pImagePlanes[0].u_topleft =
1183                                ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_height\
1184                                    -Params.m_outputSize.m_width))>>1)*pImagePlanes[0].u_stride)+1;
1185                            pImagePlanes[0].u_height = Params.m_outputSize.m_width;
1186                            pImagePlanes[1].u_topleft =
1187                            ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_height\
1188                                -(Params.m_outputSize.m_width>>1)))>>1)\
1189                                    *pImagePlanes[1].u_stride)+1;
1190                            pImagePlanes[1].u_height = Params.m_outputSize.m_width>>1;
1191                            pImagePlanes[2].u_topleft =
1192                            ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_height\
1193                                -(Params.m_outputSize.m_width>>1)))>>1)\
1194                                    *pImagePlanes[2].u_stride)+1;
1195                            pImagePlanes[2].u_height = Params.m_outputSize.m_width>>1;
1196                        }
1197                    }
1198                    else
1199                    {
1200                        /*Black borders will be on the top and bottom of the output video*/
1201                        /*Maximum output width if the input image aspect ratio is kept and if
1202                         the output width is the screen height*/
1203                        M4OSA_UInt32 tempOutputSizeWidth =
1204                        (M4OSA_UInt32)((pC->m_pDecodedPlane->u_height * pImagePlanes->u_height)\
1205                             /pC->m_pDecodedPlane->u_width);
1206                        M4OSA_UInt32 tempInputSizeWidthMax = 0;
1207                        M4OSA_UInt32 tempFinalInputWidth = 0, tempFinalOutputWidth = 0;
1208                        /*The output height is the screen width*/
1209                        Params.m_outputSize.m_width = pImagePlanes->u_height;
1210                        Params.m_outputSize.m_height= pImagePlanes->u_width;
1211                        tempOutputSizeWidth = (tempOutputSizeWidth>>1)<<1;
1212
1213                        /*Maximum input width according to the maximum output width
1214                         (proportional to the maximum output width)*/
1215                        tempInputSizeWidthMax =
1216                        (pImagePlanes->u_width*Params.m_inputSize.m_height)/tempOutputSizeWidth;
1217                        tempInputSizeWidthMax = (tempInputSizeWidthMax>>1)<<1;
1218
1219                        /*Check if the maximum possible input width is contained into the input
1220                         image height (rotation included)*/
1221                        if(tempInputSizeWidthMax <= pC->m_pDecodedPlane->u_height)
1222                        {
1223                            /*The maximum possible input width is contained in the input
1224                             image height (rotation included),
1225                            that means no black borders, the input pan zoom area will be extended
1226                            so that the input AIR height will be the maximum possible*/
1227                            if(((tempInputSizeWidthMax - Params.m_inputSize.m_height)>>1) \
1228                                <= Params.m_inputCoord.m_y
1229                                && ((tempInputSizeWidthMax - Params.m_inputSize.m_height)>>1)\
1230                                     <= pC->m_pDecodedPlane->u_height -(Params.m_inputCoord.m_y \
1231                                        + Params.m_inputSize.m_height))
1232                            {
1233                                /*The input pan zoom area can be extended symmetrically on
1234                                the right and left side*/
1235                                Params.m_inputCoord.m_y -= ((tempInputSizeWidthMax \
1236                                    - Params.m_inputSize.m_height)>>1);
1237                            }
1238                            else if(Params.m_inputCoord.m_y < pC->m_pDecodedPlane->u_height\
1239                                -(Params.m_inputCoord.m_y + Params.m_inputSize.m_height))
1240                            {
1241                                /*There is not enough place on the top of the input pan zoom
1242                                area to extend it symmetrically,
1243                                so extend it to the maximum on the top*/
1244                                Params.m_inputCoord.m_y = 0;
1245                            }
1246                            else
1247                            {
1248                                /*There is not enough place on the bottom of the input pan zoom
1249                                 area to extend it symmetrically,
1250                                so extend it to the maximum on the bottom*/
1251                                Params.m_inputCoord.m_y = pC->m_pDecodedPlane->u_height\
1252                                     - tempInputSizeWidthMax;
1253                            }
1254                            /*The input height of the AIR is the maximum possible height*/
1255                            Params.m_inputSize.m_height = tempInputSizeWidthMax;
1256                        }
1257                        else
1258                        {
1259                            /*The maximum possible input width is greater than the input\
1260                             image height (rotation included),
1261                            that means black borders are necessary to keep aspect ratio
1262                            The input height of the AIR is all the input image height*/
1263                            Params.m_outputSize.m_height =
1264                                (tempOutputSizeWidth*pC->m_pDecodedPlane->u_height)\
1265                                    /Params.m_inputSize.m_height;
1266                            Params.m_outputSize.m_height = (Params.m_outputSize.m_height>>1)<<1;
1267                            Params.m_inputCoord.m_y = 0;
1268                            Params.m_inputSize.m_height = pC->m_pDecodedPlane->u_height;
1269                            pImagePlanes[0].u_topleft =
1270                                ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_width\
1271                                    -Params.m_outputSize.m_height))>>1))+1;
1272                            pImagePlanes[0].u_width = Params.m_outputSize.m_height;
1273                            pImagePlanes[1].u_topleft =
1274                                ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_width\
1275                                    -(Params.m_outputSize.m_height>>1)))>>1))+1;
1276                            pImagePlanes[1].u_width = Params.m_outputSize.m_height>>1;
1277                            pImagePlanes[2].u_topleft =
1278                                 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_width\
1279                                    -(Params.m_outputSize.m_height>>1)))>>1))+1;
1280                            pImagePlanes[2].u_width = Params.m_outputSize.m_height>>1;
1281                        }
1282                    }
1283                    break;
1284                }
1285            }
1286
1287            /*Width and height have to be even*/
1288            Params.m_outputSize.m_width = (Params.m_outputSize.m_width>>1)<<1;
1289            Params.m_outputSize.m_height = (Params.m_outputSize.m_height>>1)<<1;
1290            Params.m_inputSize.m_width = (Params.m_inputSize.m_width>>1)<<1;
1291            Params.m_inputSize.m_height = (Params.m_inputSize.m_height>>1)<<1;
1292            pImagePlanes[0].u_width = (pImagePlanes[0].u_width>>1)<<1;
1293            pImagePlanes[1].u_width = (pImagePlanes[1].u_width>>1)<<1;
1294            pImagePlanes[2].u_width = (pImagePlanes[2].u_width>>1)<<1;
1295            pImagePlanes[0].u_height = (pImagePlanes[0].u_height>>1)<<1;
1296            pImagePlanes[1].u_height = (pImagePlanes[1].u_height>>1)<<1;
1297            pImagePlanes[2].u_height = (pImagePlanes[2].u_height>>1)<<1;
1298
1299            /*Check that values are coherent*/
1300            if(Params.m_inputSize.m_height == Params.m_outputSize.m_height)
1301            {
1302                Params.m_inputSize.m_width = Params.m_outputSize.m_width;
1303            }
1304            else if(Params.m_inputSize.m_width == Params.m_outputSize.m_width)
1305            {
1306                Params.m_inputSize.m_height = Params.m_outputSize.m_height;
1307            }
1308        }
1309
1310        /**
1311        Picture rendering: Resizing and Cropping*/
1312        if(pC->m_mediaRendering != M4xVSS_kBlackBorders)
1313        {
1314            switch(pBasicTags.orientation)
1315            {
1316            default:
1317            case M4COMMON_kOrientationUnknown:
1318                Params.m_outputOrientation = M4COMMON_kOrientationTopLeft;
1319            case M4COMMON_kOrientationTopLeft:
1320            case M4COMMON_kOrientationTopRight:
1321            case M4COMMON_kOrientationBottomRight:
1322            case M4COMMON_kOrientationBottomLeft:
1323                Params.m_outputSize.m_height = pImagePlanes->u_height;
1324                Params.m_outputSize.m_width = pImagePlanes->u_width;
1325                break;
1326            case M4COMMON_kOrientationLeftTop:
1327            case M4COMMON_kOrientationLeftBottom:
1328            case M4COMMON_kOrientationRightTop:
1329            case M4COMMON_kOrientationRightBottom:
1330                Params.m_outputSize.m_height = pImagePlanes->u_width;
1331                Params.m_outputSize.m_width = pImagePlanes->u_height;
1332                break;
1333            }
1334        }
1335
1336        /**
1337        Picture rendering: Cropping*/
1338        if(pC->m_mediaRendering == M4xVSS_kCropping)
1339        {
1340            if((Params.m_outputSize.m_height * Params.m_inputSize.m_width)\
1341                 /Params.m_outputSize.m_width<Params.m_inputSize.m_height)
1342            {
1343                M4OSA_UInt32 tempHeight = Params.m_inputSize.m_height;
1344                /*height will be cropped*/
1345                Params.m_inputSize.m_height =  (M4OSA_UInt32)((Params.m_outputSize.m_height \
1346                    * Params.m_inputSize.m_width) /Params.m_outputSize.m_width);
1347                Params.m_inputSize.m_height =  (Params.m_inputSize.m_height>>1)<<1;
1348                if(M4OSA_FALSE == pC->m_pPto3GPPparams->isPanZoom)
1349                {
1350                    Params.m_inputCoord.m_y = (M4OSA_Int32)((M4OSA_Int32)\
1351                        ((pC->m_pDecodedPlane->u_height - Params.m_inputSize.m_height))>>1);
1352                }
1353                else
1354                {
1355                    Params.m_inputCoord.m_y += (M4OSA_Int32)((M4OSA_Int32)\
1356                        ((tempHeight - Params.m_inputSize.m_height))>>1);
1357                }
1358            }
1359            else
1360            {
1361                M4OSA_UInt32 tempWidth= Params.m_inputSize.m_width;
1362                /*width will be cropped*/
1363                Params.m_inputSize.m_width =  (M4OSA_UInt32)((Params.m_outputSize.m_width \
1364                    * Params.m_inputSize.m_height) /Params.m_outputSize.m_height);
1365                Params.m_inputSize.m_width =  (Params.m_inputSize.m_width>>1)<<1;
1366                if(M4OSA_FALSE == pC->m_pPto3GPPparams->isPanZoom)
1367                {
1368                    Params.m_inputCoord.m_x = (M4OSA_Int32)((M4OSA_Int32)\
1369                        ((pC->m_pDecodedPlane->u_width - Params.m_inputSize.m_width))>>1);
1370                }
1371                else
1372                {
1373                    Params.m_inputCoord.m_x += (M4OSA_Int32)\
1374                        (((M4OSA_Int32)(tempWidth - Params.m_inputSize.m_width))>>1);
1375                }
1376            }
1377        }
1378
1379
1380
1381        /**
1382         * Call AIR functions */
1383        if(M4OSA_NULL == pC->m_air_context)
1384        {
1385            err = M4AIR_create(&pC->m_air_context, M4AIR_kYUV420P);
1386            if(err != M4NO_ERROR)
1387            {
1388                free(pC->m_pDecodedPlane[0].pac_data);
1389                free(pC->m_pDecodedPlane);
1390                pC->m_pDecodedPlane = M4OSA_NULL;
1391                M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct:\
1392                     Error when initializing AIR: 0x%x", err);
1393                return err;
1394            }
1395        }
1396
1397        err = M4AIR_configure(pC->m_air_context, &Params);
1398        if(err != M4NO_ERROR)
1399        {
1400            M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct:\
1401                 Error when configuring AIR: 0x%x", err);
1402            M4AIR_cleanUp(pC->m_air_context);
1403            free(pC->m_pDecodedPlane[0].pac_data);
1404            free(pC->m_pDecodedPlane);
1405            pC->m_pDecodedPlane = M4OSA_NULL;
1406            return err;
1407        }
1408
1409        err = M4AIR_get(pC->m_air_context, pC->m_pDecodedPlane, pImagePlanes);
1410        if(err != M4NO_ERROR)
1411        {
1412            M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct: Error when getting AIR plane: 0x%x", err);
1413            M4AIR_cleanUp(pC->m_air_context);
1414            free(pC->m_pDecodedPlane[0].pac_data);
1415            free(pC->m_pDecodedPlane);
1416            pC->m_pDecodedPlane = M4OSA_NULL;
1417            return err;
1418        }
1419        pImagePlanes[0] = pImagePlanes1;
1420        pImagePlanes[1] = pImagePlanes2;
1421        pImagePlanes[2] = pImagePlanes3;
1422    }
1423
1424
1425    /**
1426     * Increment the image counter */
1427    pC->m_ImageCounter++;
1428
1429    /**
1430     * Check end of sequence */
1431    last_frame_flag    = (pC->m_ImageCounter >= pC->m_NbImage);
1432
1433    /**
1434     * Keep the picture duration */
1435    *pPictureDuration = pC->m_timeDuration;
1436
1437    if (1 == last_frame_flag)
1438    {
1439        if(M4OSA_NULL != pC->m_air_context)
1440        {
1441            err = M4AIR_cleanUp(pC->m_air_context);
1442            if(err != M4NO_ERROR)
1443            {
1444                M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct: Error when cleaning AIR: 0x%x", err);
1445                return err;
1446            }
1447        }
1448        if(M4OSA_NULL != pC->m_pDecodedPlane)
1449        {
1450            free(pC->m_pDecodedPlane[0].pac_data);
1451            free(pC->m_pDecodedPlane);
1452            pC->m_pDecodedPlane = M4OSA_NULL;
1453        }
1454        return M4PTO3GPP_WAR_LAST_PICTURE;
1455    }
1456
1457    M4OSA_TRACE1_0("M4xVSS_PictureCallbackFct: Leaving ");
1458    return M4NO_ERROR;
1459}
1460
1461/**
1462 ******************************************************************************
1463 * M4OSA_ERR M4xVSS_internalStartConvertPictureTo3gp(M4OSA_Context pContext)
1464 * @brief    This function initializes Pto3GPP with the given parameters
1465 * @note    The "Pictures to 3GPP" parameters are given by the internal xVSS
1466 *            context. This context contains a pointer on the current element
1467 *            of the chained list of Pto3GPP parameters.
1468 * @param    pContext    (IN) The integrator own context
1469 *
1470 * @return    M4NO_ERROR:    No error
1471 * @return    M4PTO3GPP_WAR_LAST_PICTURE: The returned image is the last one
1472 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
1473 ******************************************************************************
1474 */
1475M4OSA_ERR M4xVSS_internalStartConvertPictureTo3gp(M4OSA_Context pContext)
1476{
1477    /************************************************************************/
1478    /* Definitions to generate dummy AMR file used to add AMR silence in files generated
1479     by Pto3GPP */
1480    #define M4VSS3GPP_AMR_AU_SILENCE_FRAME_048_SIZE     13
1481    /* This constant is defined in M4VSS3GPP_InternalConfig.h */
1482    extern const M4OSA_UInt8\
1483         M4VSS3GPP_AMR_AU_SILENCE_FRAME_048[M4VSS3GPP_AMR_AU_SILENCE_FRAME_048_SIZE];
1484
1485    /* AMR silent frame used to compute dummy AMR silence file */
1486    #define M4VSS3GPP_AMR_HEADER_SIZE 6
1487    const M4OSA_UInt8 M4VSS3GPP_AMR_HEADER[M4VSS3GPP_AMR_AU_SILENCE_FRAME_048_SIZE] =
1488    { 0x23, 0x21, 0x41, 0x4d, 0x52, 0x0a };
1489    /************************************************************************/
1490
1491    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
1492    M4OSA_ERR err;
1493    M4PTO3GPP_Context pM4PTO3GPP_Ctxt = M4OSA_NULL;
1494    M4PTO3GPP_Params Params;
1495     M4xVSS_PictureCallbackCtxt*    pCallBackCtxt;
1496    M4OSA_Bool cmpResult=M4OSA_FALSE;
1497    M4OSA_Context pDummyAMRFile;
1498    M4OSA_Char out_amr[M4XVSS_MAX_PATH_LEN];
1499    /*UTF conversion support*/
1500    M4OSA_Char* pDecodedPath = M4OSA_NULL;
1501    M4OSA_UInt32 i;
1502
1503    /**
1504     * Create a M4PTO3GPP instance */
1505    err = M4PTO3GPP_Init( &pM4PTO3GPP_Ctxt, xVSS_context->pFileReadPtr,
1506         xVSS_context->pFileWritePtr);
1507    if (err != M4NO_ERROR)
1508    {
1509        M4OSA_TRACE1_1("M4xVSS_internalStartConvertPictureTo3gp returned %ld\n",err);
1510        return err;
1511    }
1512
1513    pCallBackCtxt = (M4xVSS_PictureCallbackCtxt*)M4OSA_32bitAlignedMalloc(sizeof(M4xVSS_PictureCallbackCtxt),
1514         M4VS,(M4OSA_Char *) "Pto3gpp callback struct");
1515    if(pCallBackCtxt == M4OSA_NULL)
1516    {
1517        M4OSA_TRACE1_0("Allocation error in M4xVSS_internalStartConvertPictureTo3gp");
1518        return M4ERR_ALLOC;
1519    }
1520
1521    Params.OutputVideoFrameSize = xVSS_context->pSettings->xVSS.outputVideoSize;
1522    Params.OutputVideoFormat = xVSS_context->pSettings->xVSS.outputVideoFormat;
1523
1524    /**
1525     * Generate "dummy" amr file containing silence in temporary folder */
1526    M4OSA_chrNCopy(out_amr, xVSS_context->pTempPath, M4XVSS_MAX_PATH_LEN - 1);
1527    strncat((char *)out_amr, (const char *)"dummy.amr\0", 10);
1528
1529    /**
1530     * UTF conversion: convert the temporary path into the customer format*/
1531    pDecodedPath = out_amr;
1532
1533    if(xVSS_context->UTFConversionContext.pConvFromUTF8Fct != M4OSA_NULL
1534            && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL)
1535    {
1536        M4OSA_UInt32 length = 0;
1537        err = M4xVSS_internalConvertFromUTF8(xVSS_context, (M4OSA_Void*) out_amr,
1538             (M4OSA_Void*) xVSS_context->UTFConversionContext.pTempOutConversionBuffer, &length);
1539        if(err != M4NO_ERROR)
1540        {
1541            M4OSA_TRACE1_1("M4xVSS_internalStartConvertPictureTo3gp:\
1542                 M4xVSS_internalConvertFromUTF8 returns err: 0x%x",err);
1543            return err;
1544        }
1545        pDecodedPath = xVSS_context->UTFConversionContext.pTempOutConversionBuffer;
1546    }
1547
1548    /**
1549    * End of the conversion, now use the converted path*/
1550
1551    err = xVSS_context->pFileWritePtr->openWrite(&pDummyAMRFile, pDecodedPath, M4OSA_kFileWrite);
1552
1553    /*Commented because of the use of the UTF conversion see above*/
1554/*    err = xVSS_context->pFileWritePtr->openWrite(&pDummyAMRFile, out_amr, M4OSA_kFileWrite);
1555 */
1556    if(err != M4NO_ERROR)
1557    {
1558        M4OSA_TRACE1_2("M4xVSS_internalConvertPictureTo3gp: Can't open output dummy amr file %s,\
1559             error: 0x%x\n",out_amr, err);
1560        return err;
1561    }
1562
1563    err =  xVSS_context->pFileWritePtr->writeData(pDummyAMRFile,
1564        (M4OSA_Int8*)M4VSS3GPP_AMR_HEADER, M4VSS3GPP_AMR_HEADER_SIZE);
1565    if(err != M4NO_ERROR)
1566    {
1567        M4OSA_TRACE1_2("M4xVSS_internalConvertPictureTo3gp: Can't write output dummy amr file %s,\
1568             error: 0x%x\n",out_amr, err);
1569        return err;
1570    }
1571
1572    err =  xVSS_context->pFileWritePtr->writeData(pDummyAMRFile,
1573         (M4OSA_Int8*)M4VSS3GPP_AMR_AU_SILENCE_FRAME_048, M4VSS3GPP_AMR_AU_SILENCE_FRAME_048_SIZE);
1574    if(err != M4NO_ERROR)
1575    {
1576        M4OSA_TRACE1_2("M4xVSS_internalConvertPictureTo3gp: \
1577            Can't write output dummy amr file %s, error: 0x%x\n",out_amr, err);
1578        return err;
1579    }
1580
1581    err =  xVSS_context->pFileWritePtr->closeWrite(pDummyAMRFile);
1582    if(err != M4NO_ERROR)
1583    {
1584        M4OSA_TRACE1_2("M4xVSS_internalConvertPictureTo3gp: \
1585            Can't close output dummy amr file %s, error: 0x%x\n",out_amr, err);
1586        return err;
1587    }
1588
1589    /**
1590     * Fill parameters for Pto3GPP with the parameters contained in the current element of the
1591     * Pto3GPP parameters chained list and with default parameters */
1592/*+ New Encoder bitrates */
1593    if(xVSS_context->pSettings->xVSS.outputVideoBitrate == 0) {
1594        Params.OutputVideoBitrate    = M4VIDEOEDITING_kVARIABLE_KBPS;
1595    }
1596    else {
1597          Params.OutputVideoBitrate = xVSS_context->pSettings->xVSS.outputVideoBitrate;
1598    }
1599    M4OSA_TRACE1_1("M4xVSS_internalStartConvertPicTo3GP: video bitrate = %d",
1600        Params.OutputVideoBitrate);
1601/*- New Encoder bitrates */
1602    Params.OutputFileMaxSize    = M4PTO3GPP_kUNLIMITED;
1603    Params.pPictureCallbackFct    = M4xVSS_PictureCallbackFct;
1604    Params.pPictureCallbackCtxt    = pCallBackCtxt;
1605    /*FB: change to use the converted path (UTF conversion) see the conversion above*/
1606    /*Fix :- Adding Audio Track in Image as input :AudioTarckFile Setting to NULL */
1607    Params.pInputAudioTrackFile    = M4OSA_NULL;//(M4OSA_Void*)pDecodedPath;//out_amr;
1608    Params.AudioPaddingMode        = M4PTO3GPP_kAudioPaddingMode_Loop;
1609    Params.AudioFileFormat        = M4VIDEOEDITING_kFileType_AMR;
1610    Params.pOutput3gppFile        = xVSS_context->pPTo3GPPcurrentParams->pFileOut;
1611    Params.pTemporaryFile        = xVSS_context->pPTo3GPPcurrentParams->pFileTemp;
1612    /*+PR No:  blrnxpsw#223*/
1613    /*Increasing frequency of Frame, calculating Nos of Frame = duration /FPS */
1614    /*Other changes made is @ M4xVSS_API.c @ line 3841 in M4xVSS_SendCommand*/
1615    /*If case check for PanZoom removed */
1616    Params.NbVideoFrames            = (M4OSA_UInt32)
1617        (xVSS_context->pPTo3GPPcurrentParams->duration \
1618            / xVSS_context->pPTo3GPPcurrentParams->framerate); /* */
1619    pCallBackCtxt->m_timeDuration    = xVSS_context->pPTo3GPPcurrentParams->framerate;
1620    /*-PR No:  blrnxpsw#223*/
1621    pCallBackCtxt->m_ImageCounter    = 0;
1622    pCallBackCtxt->m_FileIn            = xVSS_context->pPTo3GPPcurrentParams->pFileIn;
1623    pCallBackCtxt->m_NbImage        = Params.NbVideoFrames;
1624    pCallBackCtxt->m_pFileReadPtr    = xVSS_context->pFileReadPtr;
1625    pCallBackCtxt->m_pDecodedPlane    = M4OSA_NULL;
1626    pCallBackCtxt->m_pPto3GPPparams    = xVSS_context->pPTo3GPPcurrentParams;
1627    pCallBackCtxt->m_air_context    = M4OSA_NULL;
1628    pCallBackCtxt->m_mediaRendering = xVSS_context->pPTo3GPPcurrentParams->MediaRendering;
1629
1630    /**
1631     * Set the input and output files */
1632    err = M4PTO3GPP_Open(pM4PTO3GPP_Ctxt, &Params);
1633    if (err != M4NO_ERROR)
1634    {
1635        M4OSA_TRACE1_1("M4PTO3GPP_Open returned: 0x%x\n",err);
1636        if(pCallBackCtxt != M4OSA_NULL)
1637        {
1638            free(pCallBackCtxt);
1639            pCallBackCtxt = M4OSA_NULL;
1640        }
1641        M4PTO3GPP_CleanUp(pM4PTO3GPP_Ctxt);
1642        return err;
1643    }
1644
1645    /**
1646     * Save context to be able to call Pto3GPP step function in M4xVSS_step function */
1647    xVSS_context->pM4PTO3GPP_Ctxt = pM4PTO3GPP_Ctxt;
1648    xVSS_context->pCallBackCtxt = pCallBackCtxt;
1649
1650    return M4NO_ERROR;
1651}
1652
1653/**
1654 ******************************************************************************
1655 * M4OSA_ERR M4xVSS_internalStopConvertPictureTo3gp(M4OSA_Context pContext)
1656 * @brief    This function cleans up Pto3GPP
1657 * @note
1658 * @param    pContext    (IN) The integrator own context
1659 *
1660 * @return    M4NO_ERROR:    No error
1661 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
1662 ******************************************************************************
1663 */
1664M4OSA_ERR M4xVSS_internalStopConvertPictureTo3gp(M4OSA_Context pContext)
1665{
1666    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
1667    M4OSA_ERR err;
1668    M4OSA_Char out_amr[M4XVSS_MAX_PATH_LEN];
1669    /*UTF conversion support*/
1670    M4OSA_Char* pDecodedPath = M4OSA_NULL;
1671
1672    /**
1673    * Free the PTO3GPP callback context */
1674    if(M4OSA_NULL != xVSS_context->pCallBackCtxt)
1675    {
1676        free(xVSS_context->pCallBackCtxt);
1677        xVSS_context->pCallBackCtxt = M4OSA_NULL;
1678    }
1679
1680    /**
1681     * Finalize the output file */
1682    err = M4PTO3GPP_Close(xVSS_context->pM4PTO3GPP_Ctxt);
1683    if (err != M4NO_ERROR)
1684    {
1685        M4OSA_TRACE1_1("M4PTO3GPP_Close returned 0x%x\n",err);
1686        M4PTO3GPP_CleanUp(xVSS_context->pM4PTO3GPP_Ctxt);
1687        return err;
1688    }
1689
1690    /**
1691     * Free this M4PTO3GPP instance */
1692    err = M4PTO3GPP_CleanUp(xVSS_context->pM4PTO3GPP_Ctxt);
1693    if (err != M4NO_ERROR)
1694    {
1695        M4OSA_TRACE1_1("M4PTO3GPP_CleanUp returned 0x%x\n",err);
1696        return err;
1697    }
1698
1699    /**
1700     * Remove dummy.amr file */
1701    M4OSA_chrNCopy(out_amr, xVSS_context->pTempPath, M4XVSS_MAX_PATH_LEN - 1);
1702    strncat((char *)out_amr, (const char *)"dummy.amr\0", 10);
1703
1704    /**
1705     * UTF conversion: convert the temporary path into the customer format*/
1706    pDecodedPath = out_amr;
1707
1708    if(xVSS_context->UTFConversionContext.pConvFromUTF8Fct != M4OSA_NULL
1709            && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL)
1710    {
1711        M4OSA_UInt32 length = 0;
1712        err = M4xVSS_internalConvertFromUTF8(xVSS_context, (M4OSA_Void*) out_amr,
1713             (M4OSA_Void*) xVSS_context->UTFConversionContext.pTempOutConversionBuffer, &length);
1714        if(err != M4NO_ERROR)
1715        {
1716            M4OSA_TRACE1_1("M4xVSS_internalStopConvertPictureTo3gp:\
1717                 M4xVSS_internalConvertFromUTF8 returns err: 0x%x",err);
1718            return err;
1719        }
1720        pDecodedPath = xVSS_context->UTFConversionContext.pTempOutConversionBuffer;
1721    }
1722    /**
1723    * End of the conversion, now use the decoded path*/
1724    remove((const char *)pDecodedPath);
1725
1726    /*Commented because of the use of the UTF conversion*/
1727/*    remove(out_amr);
1728 */
1729
1730    xVSS_context->pM4PTO3GPP_Ctxt = M4OSA_NULL;
1731    xVSS_context->pCallBackCtxt = M4OSA_NULL;
1732
1733    return M4NO_ERROR;
1734}
1735
1736/**
1737 ******************************************************************************
1738 * prototype    M4OSA_ERR M4xVSS_internalConvertRGBtoYUV(M4xVSS_FramingStruct* framingCtx)
1739 * @brief    This function converts an RGB565 plane to YUV420 planar
1740 * @note    It is used only for framing effect
1741 *            It allocates output YUV planes
1742 * @param    framingCtx    (IN) The framing struct containing input RGB565 plane
1743 *
1744 * @return    M4NO_ERROR:    No error
1745 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
1746 * @return    M4ERR_ALLOC: Allocation error (no more memory)
1747 ******************************************************************************
1748 */
1749M4OSA_ERR M4xVSS_internalConvertRGBtoYUV(M4xVSS_FramingStruct* framingCtx)
1750{
1751    M4OSA_ERR err;
1752
1753    /**
1754     * Allocate output YUV planes */
1755    framingCtx->FramingYuv = (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(3*sizeof(M4VIFI_ImagePlane),
1756         M4VS, (M4OSA_Char *)"M4xVSS_internalConvertRGBtoYUV: Output plane YUV");
1757    if(framingCtx->FramingYuv == M4OSA_NULL)
1758    {
1759        M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertRGBtoYUV");
1760        return M4ERR_ALLOC;
1761    }
1762    framingCtx->FramingYuv[0].u_width = framingCtx->FramingRgb->u_width;
1763    framingCtx->FramingYuv[0].u_height = framingCtx->FramingRgb->u_height;
1764    framingCtx->FramingYuv[0].u_topleft = 0;
1765    framingCtx->FramingYuv[0].u_stride = framingCtx->FramingRgb->u_width;
1766    framingCtx->FramingYuv[0].pac_data =
1767         (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc((framingCtx->FramingYuv[0].u_width\
1768            *framingCtx->FramingYuv[0].u_height*3)>>1, M4VS, (M4OSA_Char *)\
1769                "Alloc for the Convertion output YUV");;
1770    if(framingCtx->FramingYuv[0].pac_data == M4OSA_NULL)
1771    {
1772        M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertRGBtoYUV");
1773        return M4ERR_ALLOC;
1774    }
1775    framingCtx->FramingYuv[1].u_width = (framingCtx->FramingRgb->u_width)>>1;
1776    framingCtx->FramingYuv[1].u_height = (framingCtx->FramingRgb->u_height)>>1;
1777    framingCtx->FramingYuv[1].u_topleft = 0;
1778    framingCtx->FramingYuv[1].u_stride = (framingCtx->FramingRgb->u_width)>>1;
1779    framingCtx->FramingYuv[1].pac_data = framingCtx->FramingYuv[0].pac_data \
1780        + framingCtx->FramingYuv[0].u_width * framingCtx->FramingYuv[0].u_height;
1781    framingCtx->FramingYuv[2].u_width = (framingCtx->FramingRgb->u_width)>>1;
1782    framingCtx->FramingYuv[2].u_height = (framingCtx->FramingRgb->u_height)>>1;
1783    framingCtx->FramingYuv[2].u_topleft = 0;
1784    framingCtx->FramingYuv[2].u_stride = (framingCtx->FramingRgb->u_width)>>1;
1785    framingCtx->FramingYuv[2].pac_data = framingCtx->FramingYuv[1].pac_data \
1786        + framingCtx->FramingYuv[1].u_width * framingCtx->FramingYuv[1].u_height;
1787
1788    /**
1789     * Convert input RGB 565 to YUV 420 to be able to merge it with output video in framing
1790      effect */
1791    err = M4VIFI_xVSS_RGB565toYUV420(M4OSA_NULL, framingCtx->FramingRgb, framingCtx->FramingYuv);
1792    if(err != M4NO_ERROR)
1793    {
1794        M4OSA_TRACE1_1("M4xVSS_internalConvertRGBtoYUV:\
1795             error when converting from RGB to YUV: 0x%x\n", err);
1796    }
1797
1798    framingCtx->duration = 0;
1799    framingCtx->previousClipTime = -1;
1800    framingCtx->previewOffsetClipTime = -1;
1801
1802    /**
1803     * Only one element in the chained list (no animated image with RGB buffer...) */
1804    framingCtx->pCurrent = framingCtx;
1805    framingCtx->pNext = framingCtx;
1806
1807    return M4NO_ERROR;
1808}
1809
1810M4OSA_ERR M4xVSS_internalSetPlaneTransparent(M4OSA_UInt8* planeIn, M4OSA_UInt32 size)
1811{
1812    M4OSA_UInt32 i;
1813    M4OSA_UInt8* plane = planeIn;
1814    M4OSA_UInt8 transparent1 = (M4OSA_UInt8)((TRANSPARENT_COLOR & 0xFF00)>>8);
1815    M4OSA_UInt8 transparent2 = (M4OSA_UInt8)TRANSPARENT_COLOR;
1816
1817    for(i=0; i<(size>>1); i++)
1818    {
1819        *plane++ = transparent1;
1820        *plane++ = transparent2;
1821    }
1822
1823    return M4NO_ERROR;
1824}
1825
1826
1827/**
1828 ******************************************************************************
1829 * prototype M4OSA_ERR M4xVSS_internalConvertARBG888toYUV420_FrammingEffect(M4OSA_Context pContext,
1830 *                                                M4VSS3GPP_EffectSettings* pEffect,
1831 *                                                M4xVSS_FramingStruct* framingCtx,
1832                                                  M4VIDEOEDITING_VideoFrameSize OutputVideoResolution)
1833 *
1834 * @brief    This function converts ARGB8888 input file  to YUV420 whenused for framming effect
1835 * @note    The input ARGB8888 file path is contained in the pEffect structure
1836 *            If the ARGB8888 must be resized to fit output video size, this function
1837 *            will do it.
1838 * @param    pContext    (IN) The integrator own context
1839 * @param    pEffect        (IN) The effect structure containing all informations on
1840 *                        the file to decode, resizing ...
1841 * @param    framingCtx    (IN/OUT) Structure in which the output RGB will be stored
1842 *
1843 * @return    M4NO_ERROR:    No error
1844 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
1845 * @return    M4ERR_ALLOC: Allocation error (no more memory)
1846 * @return    M4ERR_FILE_NOT_FOUND: File not found.
1847 ******************************************************************************
1848 */
1849
1850
1851M4OSA_ERR M4xVSS_internalConvertARGB888toYUV420_FrammingEffect(M4OSA_Context pContext,
1852                                                               M4VSS3GPP_EffectSettings* pEffect,
1853                                                               M4xVSS_FramingStruct* framingCtx,
1854                                                               M4VIDEOEDITING_VideoFrameSize\
1855                                                               OutputVideoResolution)
1856{
1857    M4OSA_ERR err = M4NO_ERROR;
1858    M4OSA_Context pARGBIn;
1859    M4OSA_UInt32 file_size;
1860    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
1861    M4OSA_UInt32 width, height, width_out, height_out;
1862    M4OSA_Void* pFile = pEffect->xVSS.pFramingFilePath;
1863    M4OSA_UInt8 transparent1 = (M4OSA_UInt8)((TRANSPARENT_COLOR & 0xFF00)>>8);
1864    M4OSA_UInt8 transparent2 = (M4OSA_UInt8)TRANSPARENT_COLOR;
1865    /*UTF conversion support*/
1866    M4OSA_Char* pDecodedPath = M4OSA_NULL;
1867    M4OSA_UInt32 i = 0,j = 0;
1868    M4VIFI_ImagePlane rgbPlane;
1869    M4OSA_UInt32 frameSize_argb=(framingCtx->width * framingCtx->height * 4);
1870    M4OSA_UInt32 frameSize;
1871    M4OSA_UInt32 tempAlphaPercent = 0;
1872    M4VIFI_UInt8* TempPacData = M4OSA_NULL;
1873    M4OSA_UInt16 *ptr = M4OSA_NULL;
1874    M4OSA_UInt32 z = 0;
1875
1876    M4OSA_TRACE3_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect: Entering ");
1877
1878    M4OSA_TRACE1_2("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect width and height %d %d ",
1879        framingCtx->width,framingCtx->height);
1880
1881    M4OSA_UInt8 *pTmpData = (M4OSA_UInt8*) M4OSA_32bitAlignedMalloc(frameSize_argb, M4VS, (M4OSA_Char*)\
1882        "Image argb data");
1883    if(pTmpData == M4OSA_NULL) {
1884        M4OSA_TRACE1_0("Failed to allocate memory for Image clip");
1885        return M4ERR_ALLOC;
1886    }
1887    /**
1888     * UTF conversion: convert the file path into the customer format*/
1889    pDecodedPath = pFile;
1890
1891    if(xVSS_context->UTFConversionContext.pConvFromUTF8Fct != M4OSA_NULL
1892            && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL)
1893    {
1894        M4OSA_UInt32 length = 0;
1895        err = M4xVSS_internalConvertFromUTF8(xVSS_context, (M4OSA_Void*) pFile,
1896             (M4OSA_Void*) xVSS_context->UTFConversionContext.pTempOutConversionBuffer, &length);
1897        if(err != M4NO_ERROR)
1898        {
1899            M4OSA_TRACE1_1("M4xVSS_internalDecodePNG:\
1900                 M4xVSS_internalConvertFromUTF8 returns err: 0x%x",err);
1901            free(pTmpData);
1902            pTmpData = M4OSA_NULL;
1903            return err;
1904        }
1905        pDecodedPath = xVSS_context->UTFConversionContext.pTempOutConversionBuffer;
1906    }
1907
1908    /**
1909    * End of the conversion, now use the decoded path*/
1910
1911     /* Open input ARGB8888 file and store it into memory */
1912    err = xVSS_context->pFileReadPtr->openRead(&pARGBIn, pDecodedPath, M4OSA_kFileRead);
1913
1914    if(err != M4NO_ERROR)
1915    {
1916        M4OSA_TRACE1_2("Can't open input ARGB8888 file %s, error: 0x%x\n",pFile, err);
1917        free(pTmpData);
1918        pTmpData = M4OSA_NULL;
1919        return err;
1920    }
1921
1922    err = xVSS_context->pFileReadPtr->readData(pARGBIn,(M4OSA_MemAddr8)pTmpData, &frameSize_argb);
1923    if(err != M4NO_ERROR)
1924    {
1925        xVSS_context->pFileReadPtr->closeRead(pARGBIn);
1926        free(pTmpData);
1927        pTmpData = M4OSA_NULL;
1928        return err;
1929    }
1930
1931
1932    err =  xVSS_context->pFileReadPtr->closeRead(pARGBIn);
1933    if(err != M4NO_ERROR)
1934    {
1935        M4OSA_TRACE1_2("Can't close input png file %s, error: 0x%x\n",pFile, err);
1936        free(pTmpData);
1937        pTmpData = M4OSA_NULL;
1938        return err;
1939    }
1940
1941
1942    rgbPlane.u_height = framingCtx->height;
1943    rgbPlane.u_width = framingCtx->width;
1944    rgbPlane.u_stride = rgbPlane.u_width*3;
1945    rgbPlane.u_topleft = 0;
1946
1947    frameSize = (rgbPlane.u_width * rgbPlane.u_height * 3); //Size of RGB888 data
1948    rgbPlane.pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(((frameSize)+ (2 * framingCtx->width)),
1949         M4VS, (M4OSA_Char*)"Image clip RGB888 data");
1950    if(rgbPlane.pac_data == M4OSA_NULL)
1951    {
1952        M4OSA_TRACE1_0("Failed to allocate memory for Image clip");
1953        free(pTmpData);
1954        return M4ERR_ALLOC;
1955    }
1956
1957    M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:\
1958          Remove the alpha channel  ");
1959
1960    /* premultiplied alpha % on RGB */
1961    for (i=0, j = 0; i < frameSize_argb; i += 4) {
1962        /* this is alpha value */
1963        if ((i % 4) == 0)
1964        {
1965            tempAlphaPercent = pTmpData[i];
1966        }
1967
1968        /* R */
1969        rgbPlane.pac_data[j] = pTmpData[i+1];
1970        j++;
1971
1972        /* G */
1973        if (tempAlphaPercent > 0) {
1974            rgbPlane.pac_data[j] = pTmpData[i+2];
1975            j++;
1976        } else {/* In case of alpha value 0, make GREEN to 255 */
1977            rgbPlane.pac_data[j] = 255; //pTmpData[i+2];
1978            j++;
1979        }
1980
1981        /* B */
1982        rgbPlane.pac_data[j] = pTmpData[i+3];
1983        j++;
1984    }
1985
1986    free(pTmpData);
1987    pTmpData = M4OSA_NULL;
1988
1989    /* convert RGB888 to RGB565 */
1990
1991    /* allocate temp RGB 565 buffer */
1992    TempPacData = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(frameSize +
1993                       (4 * (framingCtx->width + framingCtx->height + 1)),
1994                        M4VS, (M4OSA_Char*)"Image clip RGB565 data");
1995    if (TempPacData == M4OSA_NULL) {
1996        M4OSA_TRACE1_0("Failed to allocate memory for Image clip RGB565 data");
1997        free(rgbPlane.pac_data);
1998        return M4ERR_ALLOC;
1999    }
2000
2001    ptr = (M4OSA_UInt16 *)TempPacData;
2002    z = 0;
2003
2004    for (i = 0; i < j ; i += 3)
2005    {
2006        ptr[z++] = PACK_RGB565(0,   rgbPlane.pac_data[i],
2007                                    rgbPlane.pac_data[i+1],
2008                                    rgbPlane.pac_data[i+2]);
2009    }
2010
2011    /* free the RBG888 and assign RGB565 */
2012    free(rgbPlane.pac_data);
2013    rgbPlane.pac_data = TempPacData;
2014
2015    /**
2016     * Check if output sizes are odd */
2017    if(rgbPlane.u_height % 2 != 0)
2018    {
2019        M4VIFI_UInt8* output_pac_data = rgbPlane.pac_data;
2020        M4OSA_UInt32 i;
2021        M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:\
2022             output height is odd  ");
2023        output_pac_data +=rgbPlane.u_width * rgbPlane.u_height*2;
2024
2025        for(i=0;i<rgbPlane.u_width;i++)
2026        {
2027            *output_pac_data++ = transparent1;
2028            *output_pac_data++ = transparent2;
2029        }
2030
2031        /**
2032         * We just add a white line to the PNG that will be transparent */
2033        rgbPlane.u_height++;
2034    }
2035    if(rgbPlane.u_width % 2 != 0)
2036    {
2037        /**
2038         * We add a new column of white (=transparent), but we need to parse all RGB lines ... */
2039        M4OSA_UInt32 i;
2040        M4VIFI_UInt8* newRGBpac_data;
2041        M4VIFI_UInt8* output_pac_data, *input_pac_data;
2042
2043        rgbPlane.u_width++;
2044        M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect: \
2045             output width is odd  ");
2046        /**
2047         * We need to allocate a new RGB output buffer in which all decoded data
2048          + white line will be copied */
2049        newRGBpac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(rgbPlane.u_height*rgbPlane.u_width*2\
2050            *sizeof(M4VIFI_UInt8), M4VS, (M4OSA_Char *)"New Framing GIF Output pac_data RGB");
2051
2052        if(newRGBpac_data == M4OSA_NULL)
2053        {
2054            M4OSA_TRACE1_0("Allocation error in \
2055                M4xVSS_internalConvertARGB888toYUV420_FrammingEffect");
2056            free(rgbPlane.pac_data);
2057            return M4ERR_ALLOC;
2058        }
2059
2060        output_pac_data= newRGBpac_data;
2061        input_pac_data = rgbPlane.pac_data;
2062
2063        for(i=0;i<rgbPlane.u_height;i++)
2064        {
2065            memcpy((void *)output_pac_data, (void *)input_pac_data,
2066                 (rgbPlane.u_width-1)*2);
2067
2068            output_pac_data += ((rgbPlane.u_width-1)*2);
2069            /* Put the pixel to transparency color */
2070            *output_pac_data++ = transparent1;
2071            *output_pac_data++ = transparent2;
2072
2073            input_pac_data += ((rgbPlane.u_width-1)*2);
2074        }
2075        free(rgbPlane.pac_data);
2076        rgbPlane.pac_data = newRGBpac_data;
2077    }
2078
2079    /* reset stride */
2080    rgbPlane.u_stride = rgbPlane.u_width*2;
2081
2082    /**
2083     * Initialize chained list parameters */
2084    framingCtx->duration = 0;
2085    framingCtx->previousClipTime = -1;
2086    framingCtx->previewOffsetClipTime = -1;
2087
2088    /**
2089     * Only one element in the chained list (no animated image ...) */
2090    framingCtx->pCurrent = framingCtx;
2091    framingCtx->pNext = framingCtx;
2092
2093    /**
2094     * Get output width/height */
2095     switch(OutputVideoResolution)
2096    //switch(xVSS_context->pSettings->xVSS.outputVideoSize)
2097    {
2098    case M4VIDEOEDITING_kSQCIF:
2099        width_out = 128;
2100        height_out = 96;
2101        break;
2102    case M4VIDEOEDITING_kQQVGA:
2103        width_out = 160;
2104        height_out = 120;
2105        break;
2106    case M4VIDEOEDITING_kQCIF:
2107        width_out = 176;
2108        height_out = 144;
2109        break;
2110    case M4VIDEOEDITING_kQVGA:
2111        width_out = 320;
2112        height_out = 240;
2113        break;
2114    case M4VIDEOEDITING_kCIF:
2115        width_out = 352;
2116        height_out = 288;
2117        break;
2118    case M4VIDEOEDITING_kVGA:
2119        width_out = 640;
2120        height_out = 480;
2121        break;
2122    case M4VIDEOEDITING_kWVGA:
2123        width_out = 800;
2124        height_out = 480;
2125        break;
2126    case M4VIDEOEDITING_kNTSC:
2127        width_out = 720;
2128        height_out = 480;
2129        break;
2130    case M4VIDEOEDITING_k640_360:
2131        width_out = 640;
2132        height_out = 360;
2133        break;
2134    case M4VIDEOEDITING_k854_480:
2135        // StageFright encoders require %16 resolution
2136        width_out = M4ENCODER_854_480_Width;
2137        height_out = 480;
2138        break;
2139    case M4VIDEOEDITING_k1280_720:
2140        width_out = 1280;
2141        height_out = 720;
2142        break;
2143    case M4VIDEOEDITING_k1080_720:
2144        // StageFright encoders require %16 resolution
2145        width_out = M4ENCODER_1080_720_Width;
2146        height_out = 720;
2147        break;
2148    case M4VIDEOEDITING_k960_720:
2149        width_out = 960;
2150        height_out = 720;
2151        break;
2152    case M4VIDEOEDITING_k1920_1080:
2153        width_out = 1920;
2154        height_out = M4ENCODER_1920_1080_Height;
2155        break;
2156    /**
2157     * If output video size is not given, we take QCIF size,
2158     * should not happen, because already done in M4xVSS_sendCommand */
2159    default:
2160        width_out = 176;
2161        height_out = 144;
2162        break;
2163    }
2164
2165    /**
2166     * Allocate output planes structures */
2167    framingCtx->FramingRgb = (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(sizeof(M4VIFI_ImagePlane), M4VS,
2168         (M4OSA_Char *)"Framing Output plane RGB");
2169    if(framingCtx->FramingRgb == M4OSA_NULL)
2170    {
2171        M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertARGB888toYUV420_FrammingEffect");
2172        return M4ERR_ALLOC;
2173    }
2174    /**
2175     * Resize RGB if needed */
2176    if((pEffect->xVSS.bResize) &&
2177         (rgbPlane.u_width != width_out || rgbPlane.u_height != height_out))
2178    {
2179        width = width_out;
2180        height = height_out;
2181
2182        M4OSA_TRACE1_2("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect: \
2183             New Width and height %d %d  ",width,height);
2184
2185        framingCtx->FramingRgb->u_height = height_out;
2186        framingCtx->FramingRgb->u_width = width_out;
2187        framingCtx->FramingRgb->u_stride = framingCtx->FramingRgb->u_width*2;
2188        framingCtx->FramingRgb->u_topleft = 0;
2189
2190        framingCtx->FramingRgb->pac_data =
2191             (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(framingCtx->FramingRgb->u_height*framingCtx->\
2192                FramingRgb->u_width*2*sizeof(M4VIFI_UInt8), M4VS,
2193                  (M4OSA_Char *)"Framing Output pac_data RGB");
2194
2195        if(framingCtx->FramingRgb->pac_data == M4OSA_NULL)
2196        {
2197            M4OSA_TRACE1_0("Allocation error in \
2198                M4xVSS_internalConvertARGB888toYUV420_FrammingEffect");
2199            free(framingCtx->FramingRgb);
2200            free(rgbPlane.pac_data);
2201            return M4ERR_ALLOC;
2202        }
2203
2204        M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:  Resizing Needed ");
2205        M4OSA_TRACE1_2("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:\
2206              rgbPlane.u_height & rgbPlane.u_width %d %d",rgbPlane.u_height,rgbPlane.u_width);
2207
2208        //err = M4VIFI_ResizeBilinearRGB888toRGB888(M4OSA_NULL, &rgbPlane,framingCtx->FramingRgb);
2209        err = M4VIFI_ResizeBilinearRGB565toRGB565(M4OSA_NULL, &rgbPlane,framingCtx->FramingRgb);
2210
2211        if(err != M4NO_ERROR)
2212        {
2213            M4OSA_TRACE1_1("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect :\
2214                when resizing RGB plane: 0x%x\n", err);
2215            return err;
2216        }
2217
2218        if(rgbPlane.pac_data != M4OSA_NULL)
2219        {
2220            free(rgbPlane.pac_data);
2221            rgbPlane.pac_data = M4OSA_NULL;
2222        }
2223    }
2224    else
2225    {
2226
2227        M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:\
2228              Resizing Not Needed ");
2229
2230        width = rgbPlane.u_width;
2231        height = rgbPlane.u_height;
2232        framingCtx->FramingRgb->u_height = height;
2233        framingCtx->FramingRgb->u_width = width;
2234        framingCtx->FramingRgb->u_stride = framingCtx->FramingRgb->u_width*2;
2235        framingCtx->FramingRgb->u_topleft = 0;
2236        framingCtx->FramingRgb->pac_data = rgbPlane.pac_data;
2237    }
2238
2239
2240    if(pEffect->xVSS.bResize)
2241    {
2242        /**
2243         * Force topleft to 0 for pure framing effect */
2244        framingCtx->topleft_x = 0;
2245        framingCtx->topleft_y = 0;
2246    }
2247
2248
2249    /**
2250     * Convert  RGB output to YUV 420 to be able to merge it with output video in framing
2251     effect */
2252    framingCtx->FramingYuv = (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(3*sizeof(M4VIFI_ImagePlane), M4VS,
2253         (M4OSA_Char *)"Framing Output plane YUV");
2254    if(framingCtx->FramingYuv == M4OSA_NULL)
2255    {
2256        M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertARGB888toYUV420_FrammingEffect");
2257        free(framingCtx->FramingRgb->pac_data);
2258        return M4ERR_ALLOC;
2259    }
2260
2261    // Alloc for Y, U and V planes
2262    framingCtx->FramingYuv[0].u_width = ((width+1)>>1)<<1;
2263    framingCtx->FramingYuv[0].u_height = ((height+1)>>1)<<1;
2264    framingCtx->FramingYuv[0].u_topleft = 0;
2265    framingCtx->FramingYuv[0].u_stride = ((width+1)>>1)<<1;
2266    framingCtx->FramingYuv[0].pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc
2267        ((framingCtx->FramingYuv[0].u_width*framingCtx->FramingYuv[0].u_height), M4VS,
2268            (M4OSA_Char *)"Alloc for the output Y");
2269    if(framingCtx->FramingYuv[0].pac_data == M4OSA_NULL)
2270    {
2271        M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertARGB888toYUV420_FrammingEffect");
2272        free(framingCtx->FramingYuv);
2273        free(framingCtx->FramingRgb->pac_data);
2274        return M4ERR_ALLOC;
2275    }
2276    framingCtx->FramingYuv[1].u_width = (((width+1)>>1)<<1)>>1;
2277    framingCtx->FramingYuv[1].u_height = (((height+1)>>1)<<1)>>1;
2278    framingCtx->FramingYuv[1].u_topleft = 0;
2279    framingCtx->FramingYuv[1].u_stride = (((width+1)>>1)<<1)>>1;
2280
2281
2282    framingCtx->FramingYuv[1].pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(
2283        framingCtx->FramingYuv[1].u_width * framingCtx->FramingYuv[1].u_height, M4VS,
2284        (M4OSA_Char *)"Alloc for the output U");
2285    if (framingCtx->FramingYuv[1].pac_data == M4OSA_NULL) {
2286        free(framingCtx->FramingYuv[0].pac_data);
2287        free(framingCtx->FramingYuv);
2288        free(framingCtx->FramingRgb->pac_data);
2289        return M4ERR_ALLOC;
2290    }
2291
2292    framingCtx->FramingYuv[2].u_width = (((width+1)>>1)<<1)>>1;
2293    framingCtx->FramingYuv[2].u_height = (((height+1)>>1)<<1)>>1;
2294    framingCtx->FramingYuv[2].u_topleft = 0;
2295    framingCtx->FramingYuv[2].u_stride = (((width+1)>>1)<<1)>>1;
2296
2297
2298    framingCtx->FramingYuv[2].pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(
2299        framingCtx->FramingYuv[2].u_width * framingCtx->FramingYuv[0].u_height, M4VS,
2300        (M4OSA_Char *)"Alloc for the  output V");
2301    if (framingCtx->FramingYuv[2].pac_data == M4OSA_NULL) {
2302        free(framingCtx->FramingYuv[1].pac_data);
2303        free(framingCtx->FramingYuv[0].pac_data);
2304        free(framingCtx->FramingYuv);
2305        free(framingCtx->FramingRgb->pac_data);
2306        return M4ERR_ALLOC;
2307    }
2308
2309    M4OSA_TRACE3_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:\
2310        convert RGB to YUV ");
2311
2312    //err = M4VIFI_RGB888toYUV420(M4OSA_NULL, framingCtx->FramingRgb,  framingCtx->FramingYuv);
2313    err = M4VIFI_RGB565toYUV420(M4OSA_NULL, framingCtx->FramingRgb,  framingCtx->FramingYuv);
2314
2315    if (err != M4NO_ERROR)
2316    {
2317        M4OSA_TRACE1_1("SPS png: error when converting from RGB to YUV: 0x%x\n", err);
2318    }
2319    M4OSA_TRACE3_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:  Leaving ");
2320    return err;
2321}
2322
2323/**
2324 ******************************************************************************
2325 * prototype    M4OSA_ERR M4xVSS_internalGenerateEditedFile(M4OSA_Context pContext)
2326 *
2327 * @brief    This function prepares VSS for editing
2328 * @note    It also set special xVSS effect as external effects for the VSS
2329 * @param    pContext    (IN) The integrator own context
2330 *
2331 * @return    M4NO_ERROR:    No error
2332 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
2333 * @return    M4ERR_ALLOC: Allocation error (no more memory)
2334 ******************************************************************************
2335 */
2336M4OSA_ERR M4xVSS_internalGenerateEditedFile(M4OSA_Context pContext)
2337{
2338    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
2339    M4VSS3GPP_EditContext pVssCtxt;
2340    M4OSA_UInt32 i,j;
2341    M4OSA_ERR err;
2342
2343    /**
2344     * Create a VSS 3GPP edition instance */
2345    err = M4VSS3GPP_editInit( &pVssCtxt, xVSS_context->pFileReadPtr, xVSS_context->pFileWritePtr);
2346    if (err != M4NO_ERROR)
2347    {
2348        M4OSA_TRACE1_1("M4xVSS_internalGenerateEditedFile: M4VSS3GPP_editInit returned 0x%x\n",
2349            err);
2350        M4VSS3GPP_editCleanUp(pVssCtxt);
2351        /**
2352         * Set the VSS context to NULL */
2353        xVSS_context->pCurrentEditContext = M4OSA_NULL;
2354        return err;
2355    }
2356
2357        M4VSS3GPP_InternalEditContext* pVSSContext =
2358            (M4VSS3GPP_InternalEditContext*)pVssCtxt;
2359        pVSSContext->xVSS.outputVideoFormat =
2360            xVSS_context->pSettings->xVSS.outputVideoFormat;
2361        pVSSContext->xVSS.outputVideoSize =
2362            xVSS_context->pSettings->xVSS.outputVideoSize ;
2363        pVSSContext->xVSS.outputAudioFormat =
2364            xVSS_context->pSettings->xVSS.outputAudioFormat;
2365        pVSSContext->xVSS.outputAudioSamplFreq =
2366            xVSS_context->pSettings->xVSS.outputAudioSamplFreq;
2367        pVSSContext->xVSS.outputVideoBitrate =
2368            xVSS_context->pSettings->xVSS.outputVideoBitrate ;
2369        pVSSContext->xVSS.outputAudioBitrate =
2370            xVSS_context->pSettings->xVSS.outputAudioBitrate ;
2371        pVSSContext->xVSS.bAudioMono =
2372            xVSS_context->pSettings->xVSS.bAudioMono;
2373    /* In case of MMS use case, we fill directly into the VSS context the targeted bitrate */
2374    if(xVSS_context->targetedBitrate != 0)
2375    {
2376        M4VSS3GPP_InternalEditContext* pVSSContext = (M4VSS3GPP_InternalEditContext*)pVssCtxt;
2377
2378        pVSSContext->bIsMMS = M4OSA_TRUE;
2379        pVSSContext->uiMMSVideoBitrate = xVSS_context->targetedBitrate;
2380        pVSSContext->MMSvideoFramerate = xVSS_context->pSettings->videoFrameRate;
2381    }
2382
2383    /*Warning: since the adding of the UTF conversion, pSettings has been changed in the next
2384    part in  pCurrentEditSettings (there is a specific current editing structure for the saving,
2385     as for the preview)*/
2386
2387    /**
2388     * Set the external video effect functions, for saving mode (to be moved to
2389      M4xVSS_saveStart() ?)*/
2390    for (i=0; i<xVSS_context->pCurrentEditSettings->uiClipNumber; i++)
2391    {
2392        for (j=0; j<xVSS_context->pCurrentEditSettings->nbEffects; j++)
2393        {
2394            if (M4xVSS_kVideoEffectType_BlackAndWhite ==
2395            xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2396            {
2397                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2398                 M4VSS3GPP_externalVideoEffectColor;
2399                //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt =
2400                // (M4OSA_Void*)M4xVSS_kVideoEffectType_BlackAndWhite;
2401                /*commented FB*/
2402                /**
2403                 * We do not need to set the color context, it is already set
2404                 during sendCommand function */
2405            }
2406            if (M4xVSS_kVideoEffectType_Pink ==
2407                xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2408            {
2409                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2410                 M4VSS3GPP_externalVideoEffectColor;
2411                //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt =
2412                // (M4OSA_Void*)M4xVSS_kVideoEffectType_Pink; /**< we don't
2413                // use any function context */
2414                /*commented FB*/
2415                /**
2416                 * We do not need to set the color context,
2417                  it is already set during sendCommand function */
2418            }
2419            if (M4xVSS_kVideoEffectType_Green ==
2420                 xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2421            {
2422                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2423                    M4VSS3GPP_externalVideoEffectColor;
2424                //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt =
2425                    // (M4OSA_Void*)M4xVSS_kVideoEffectType_Green;
2426                     /**< we don't use any function context */
2427                /*commented FB*/
2428                /**
2429                 * We do not need to set the color context, it is already set during
2430                  sendCommand function */
2431            }
2432            if (M4xVSS_kVideoEffectType_Sepia ==
2433                 xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2434            {
2435                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2436                 M4VSS3GPP_externalVideoEffectColor;
2437                //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt =
2438                // (M4OSA_Void*)M4xVSS_kVideoEffectType_Sepia;
2439                /**< we don't use any function context */
2440                /*commented FB*/
2441                /**
2442                 * We do not need to set the color context, it is already set during
2443                 sendCommand function */
2444            }
2445            if (M4xVSS_kVideoEffectType_Fifties ==
2446             xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2447            {
2448                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2449                 M4VSS3GPP_externalVideoEffectFifties;
2450                /**
2451                 * We do not need to set the framing context, it is already set during
2452                 sendCommand function */
2453            }
2454            if (M4xVSS_kVideoEffectType_Negative ==
2455             xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2456            {
2457                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2458                 M4VSS3GPP_externalVideoEffectColor;
2459                //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt =
2460                // (M4OSA_Void*)M4xVSS_kVideoEffectType_Negative;
2461                 /**< we don't use any function context */
2462                /*commented FB*/
2463                /**
2464                 * We do not need to set the color context, it is already set during
2465                  sendCommand function */
2466            }
2467            if (M4xVSS_kVideoEffectType_Framing ==
2468             xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2469            {
2470                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2471                 M4VSS3GPP_externalVideoEffectFraming;
2472                /**
2473                 * We do not need to set the framing context, it is already set during
2474                 sendCommand function */
2475            }
2476            if (M4xVSS_kVideoEffectType_ZoomIn ==
2477             xVSS_context->pSettings->Effects[j].VideoEffectType)
2478            {
2479                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2480                 M4VSS3GPP_externalVideoEffectZoom;
2481                xVSS_context->pCurrentEditSettings->Effects[j].pExtVideoEffectFctCtxt =
2482                 (M4OSA_Void*)M4xVSS_kVideoEffectType_ZoomIn; /**< we don't use any
2483                 function context */
2484            }
2485            if (M4xVSS_kVideoEffectType_ZoomOut ==
2486             xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2487            {
2488                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2489                 M4VSS3GPP_externalVideoEffectZoom;
2490                xVSS_context->pCurrentEditSettings->Effects[j].pExtVideoEffectFctCtxt =
2491                 (M4OSA_Void*)M4xVSS_kVideoEffectType_ZoomOut; /**< we don't use any
2492                 function context */
2493            }
2494            if (M4xVSS_kVideoEffectType_ColorRGB16 ==
2495             xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2496            {
2497                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2498                 M4VSS3GPP_externalVideoEffectColor;
2499                //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt =
2500                // (M4OSA_Void*)M4xVSS_kVideoEffectType_ColorRGB16;
2501                /**< we don't use any function context */
2502                /**
2503                 * We do not need to set the color context, it is already set during
2504                 sendCommand function */
2505            }
2506            if (M4xVSS_kVideoEffectType_Gradient ==
2507             xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2508            {
2509                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2510                 M4VSS3GPP_externalVideoEffectColor;
2511                //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt =
2512                // (M4OSA_Void*)M4xVSS_kVideoEffectType_ColorRGB16;
2513                /**< we don't use any function context */
2514                /**
2515                 * We do not need to set the color context, it is already set during
2516                 sendCommand function */
2517            }
2518
2519        }
2520    }
2521
2522    /**
2523     * Open the VSS 3GPP */
2524    err = M4VSS3GPP_editOpen(pVssCtxt, xVSS_context->pCurrentEditSettings);
2525    if (err != M4NO_ERROR)
2526    {
2527        M4OSA_TRACE1_1("M4xVSS_internalGenerateEditedFile:\
2528             M4VSS3GPP_editOpen returned 0x%x\n",err);
2529        M4VSS3GPP_editCleanUp(pVssCtxt);
2530        /**
2531         * Set the VSS context to NULL */
2532        xVSS_context->pCurrentEditContext = M4OSA_NULL;
2533        return err;
2534    }
2535
2536    /**
2537     * Save VSS context to be able to close / free VSS later */
2538    xVSS_context->pCurrentEditContext = pVssCtxt;
2539
2540    return M4NO_ERROR;
2541}
2542
2543/**
2544 ******************************************************************************
2545 * prototype    M4OSA_ERR M4xVSS_internalCloseEditedFile(M4OSA_Context pContext)
2546 *
2547 * @brief    This function cleans up VSS
2548 * @note
2549 * @param    pContext    (IN) The integrator own context
2550 *
2551 * @return    M4NO_ERROR:    No error
2552 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
2553 ******************************************************************************
2554 */
2555M4OSA_ERR M4xVSS_internalCloseEditedFile(M4OSA_Context pContext)
2556{
2557    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
2558    M4VSS3GPP_EditContext pVssCtxt = xVSS_context->pCurrentEditContext;
2559    M4OSA_ERR err;
2560
2561    if(xVSS_context->pCurrentEditContext != M4OSA_NULL)
2562    {
2563        /**
2564         * Close the VSS 3GPP */
2565        err = M4VSS3GPP_editClose(pVssCtxt);
2566        if (err != M4NO_ERROR)
2567        {
2568            M4OSA_TRACE1_1("M4xVSS_internalCloseEditedFile:\
2569                 M4VSS3GPP_editClose returned 0x%x\n",err);
2570            M4VSS3GPP_editCleanUp(pVssCtxt);
2571            /**
2572             * Set the VSS context to NULL */
2573            xVSS_context->pCurrentEditContext = M4OSA_NULL;
2574            return err;
2575        }
2576
2577        /**
2578         * Free this VSS3GPP edition instance */
2579        err = M4VSS3GPP_editCleanUp(pVssCtxt);
2580        /**
2581         * Set the VSS context to NULL */
2582        xVSS_context->pCurrentEditContext = M4OSA_NULL;
2583        if (err != M4NO_ERROR)
2584        {
2585            M4OSA_TRACE1_1("M4xVSS_internalCloseEditedFile: \
2586                M4VSS3GPP_editCleanUp returned 0x%x\n",err);
2587            return err;
2588        }
2589    }
2590
2591    return M4NO_ERROR;
2592}
2593
2594/**
2595 ******************************************************************************
2596 * prototype    M4OSA_ERR M4xVSS_internalGenerateAudioMixFile(M4OSA_Context pContext)
2597 *
2598 * @brief    This function prepares VSS for audio mixing
2599 * @note    It takes its parameters from the BGM settings in the xVSS internal context
2600 * @param    pContext    (IN) The integrator own context
2601 *
2602 * @return    M4NO_ERROR:    No error
2603 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
2604 * @return    M4ERR_ALLOC: Allocation error (no more memory)
2605 ******************************************************************************
2606 */
2607/***
2608 * FB: the function has been modified since the structure used for the saving is now the
2609 *  pCurrentEditSettings and not the pSettings
2610 * This change has been added for the UTF support
2611 * All the "xVSS_context->pSettings" has been replaced by "xVSS_context->pCurrentEditSettings"
2612 ***/
2613M4OSA_ERR M4xVSS_internalGenerateAudioMixFile(M4OSA_Context pContext)
2614{
2615    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
2616    M4VSS3GPP_AudioMixingSettings* pAudioMixSettings;
2617    M4VSS3GPP_AudioMixingContext pAudioMixingCtxt;
2618    M4OSA_ERR err;
2619    M4VIDEOEDITING_ClipProperties fileProperties;
2620
2621    /**
2622     * Allocate audio mixing settings structure and fill it with BGM parameters */
2623    pAudioMixSettings = (M4VSS3GPP_AudioMixingSettings*)M4OSA_32bitAlignedMalloc
2624        (sizeof(M4VSS3GPP_AudioMixingSettings), M4VS, (M4OSA_Char *)"pAudioMixSettings");
2625    if(pAudioMixSettings == M4OSA_NULL)
2626    {
2627        M4OSA_TRACE1_0("Allocation error in M4xVSS_internalGenerateAudioMixFile");
2628        return M4ERR_ALLOC;
2629    }
2630
2631    if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->FileType ==
2632         M4VIDEOEDITING_kFileType_3GPP)
2633    {
2634        err = M4xVSS_internalGetProperties((M4OSA_Context)xVSS_context,
2635             (M4OSA_Char*)xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile,
2636                 &fileProperties);
2637        if(err != M4NO_ERROR)
2638        {
2639            M4OSA_TRACE1_1("M4xVSS_internalGenerateAudioMixFile:\
2640                 impossible to retrieve audio BGM properties ->\
2641                     reencoding audio background music", err);
2642            fileProperties.AudioStreamType =
2643                 xVSS_context->pCurrentEditSettings->xVSS.outputAudioFormat+1;
2644                  /* To force BGM encoding */
2645        }
2646    }
2647
2648    pAudioMixSettings->bRemoveOriginal = M4OSA_FALSE;
2649    pAudioMixSettings->AddedAudioFileType =
2650     xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->FileType;
2651    pAudioMixSettings->pAddedAudioTrackFile =
2652     xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile;
2653    pAudioMixSettings->uiAddVolume =
2654     xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->uiAddVolume;
2655
2656    pAudioMixSettings->outputAudioFormat = xVSS_context->pSettings->xVSS.outputAudioFormat;
2657    pAudioMixSettings->outputASF = xVSS_context->pSettings->xVSS.outputAudioSamplFreq;
2658    pAudioMixSettings->outputAudioBitrate = xVSS_context->pSettings->xVSS.outputAudioBitrate;
2659    pAudioMixSettings->uiSamplingFrequency =
2660     xVSS_context->pSettings->xVSS.pBGMtrack->uiSamplingFrequency;
2661    pAudioMixSettings->uiNumChannels = xVSS_context->pSettings->xVSS.pBGMtrack->uiNumChannels;
2662
2663    pAudioMixSettings->b_DuckingNeedeed =
2664     xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->b_DuckingNeedeed;
2665    pAudioMixSettings->fBTVolLevel =
2666     (M4OSA_Float )xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->uiAddVolume/100;
2667    pAudioMixSettings->InDucking_threshold =
2668     xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->InDucking_threshold;
2669    pAudioMixSettings->InDucking_lowVolume =
2670     xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->lowVolume/100;
2671    pAudioMixSettings->fPTVolLevel =
2672     (M4OSA_Float)xVSS_context->pSettings->PTVolLevel/100;
2673    pAudioMixSettings->bLoop = xVSS_context->pSettings->xVSS.pBGMtrack->bLoop;
2674
2675    if(xVSS_context->pSettings->xVSS.bAudioMono)
2676    {
2677        pAudioMixSettings->outputNBChannels = 1;
2678    }
2679    else
2680    {
2681        pAudioMixSettings->outputNBChannels = 2;
2682    }
2683
2684    /**
2685     * Fill audio mix settings with BGM parameters */
2686    pAudioMixSettings->uiBeginLoop =
2687     xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->uiBeginLoop;
2688    pAudioMixSettings->uiEndLoop =
2689     xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->uiEndLoop;
2690    pAudioMixSettings->uiAddCts =
2691     xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->uiAddCts;
2692
2693    /**
2694     * Output file of the audio mixer will be final file (audio mixing is the last step) */
2695    pAudioMixSettings->pOutputClipFile = xVSS_context->pOutputFile;
2696    pAudioMixSettings->pTemporaryFile = xVSS_context->pTemporaryFile;
2697
2698    /**
2699     * Input file of the audio mixer is a temporary file containing all audio/video editions */
2700    pAudioMixSettings->pOriginalClipFile = xVSS_context->pCurrentEditSettings->pOutputFile;
2701
2702    /**
2703     * Save audio mixing settings pointer to be able to free it in
2704     M4xVSS_internalCloseAudioMixedFile function */
2705    xVSS_context->pAudioMixSettings = pAudioMixSettings;
2706
2707    /**
2708     * Create a VSS 3GPP audio mixing instance */
2709    err = M4VSS3GPP_audioMixingInit(&pAudioMixingCtxt, pAudioMixSettings,
2710         xVSS_context->pFileReadPtr, xVSS_context->pFileWritePtr);
2711
2712    /**
2713     * Save audio mixing context to be able to call audio mixing step function in
2714      M4xVSS_step function */
2715    xVSS_context->pAudioMixContext = pAudioMixingCtxt;
2716
2717    if (err != M4NO_ERROR)
2718    {
2719        M4OSA_TRACE1_1("M4xVSS_internalGenerateAudioMixFile:\
2720             M4VSS3GPP_audioMixingInit returned 0x%x\n",err);
2721        //M4VSS3GPP_audioMixingCleanUp(pAudioMixingCtxt);
2722        return err;
2723    }
2724
2725    return M4NO_ERROR;
2726}
2727
2728/**
2729 ******************************************************************************
2730 * prototype    M4OSA_ERR M4xVSS_internalCloseAudioMixedFile(M4OSA_Context pContext)
2731 *
2732 * @brief    This function cleans up VSS for audio mixing
2733 * @note
2734 * @param    pContext    (IN) The integrator own context
2735 *
2736 * @return    M4NO_ERROR:    No error
2737 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
2738 ******************************************************************************
2739 */
2740M4OSA_ERR M4xVSS_internalCloseAudioMixedFile(M4OSA_Context pContext)
2741{
2742    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
2743    M4OSA_ERR err;
2744
2745    /**
2746     * Free this VSS3GPP audio mixing instance */
2747    if(xVSS_context->pAudioMixContext != M4OSA_NULL)
2748    {
2749        err = M4VSS3GPP_audioMixingCleanUp(xVSS_context->pAudioMixContext);
2750        if (err != M4NO_ERROR)
2751        {
2752            M4OSA_TRACE1_1("M4xVSS_internalCloseAudioMixedFile:\
2753                 M4VSS3GPP_audioMixingCleanUp returned 0x%x\n",err);
2754            return err;
2755        }
2756    }
2757
2758    /**
2759     * Free VSS audio mixing settings */
2760    if(xVSS_context->pAudioMixSettings != M4OSA_NULL)
2761    {
2762        free(xVSS_context->pAudioMixSettings);
2763        xVSS_context->pAudioMixSettings = M4OSA_NULL;
2764    }
2765
2766    return M4NO_ERROR;
2767}
2768
2769/**
2770 ******************************************************************************
2771 * prototype    M4OSA_ERR M4xVSS_internalFreePreview(M4OSA_Context pContext)
2772 *
2773 * @brief    This function cleans up preview edition structure used to generate
2774 *            preview.3gp file given to the VPS
2775 * @note    It also free the preview structure given to the VPS
2776 * @param    pContext    (IN) The integrator own context
2777 *
2778 * @return    M4NO_ERROR:    No error
2779 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
2780 ******************************************************************************
2781 */
2782M4OSA_ERR M4xVSS_internalFreePreview(M4OSA_Context pContext)
2783{
2784    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
2785    M4OSA_UInt8 i;
2786
2787    /**
2788     * Free clip/transition settings */
2789    for(i=0; i<xVSS_context->pCurrentEditSettings->uiClipNumber; i++)
2790    {
2791        M4xVSS_FreeClipSettings(xVSS_context->pCurrentEditSettings->pClipList[i]);
2792
2793        free((xVSS_context->pCurrentEditSettings->pClipList[i]));
2794        xVSS_context->pCurrentEditSettings->pClipList[i] = M4OSA_NULL;
2795
2796        /**
2797         * Because there is 1 less transition than clip number */
2798        if(i != xVSS_context->pCurrentEditSettings->uiClipNumber-1)
2799        {
2800            free((xVSS_context->pCurrentEditSettings->pTransitionList[i]));
2801            xVSS_context->pCurrentEditSettings->pTransitionList[i] = M4OSA_NULL;
2802        }
2803    }
2804
2805    /**
2806     * Free clip/transition list */
2807    if(xVSS_context->pCurrentEditSettings->pClipList != M4OSA_NULL)
2808    {
2809        free((xVSS_context->pCurrentEditSettings->pClipList));
2810        xVSS_context->pCurrentEditSettings->pClipList = M4OSA_NULL;
2811    }
2812    if(xVSS_context->pCurrentEditSettings->pTransitionList != M4OSA_NULL)
2813    {
2814        free((xVSS_context->pCurrentEditSettings->pTransitionList));
2815        xVSS_context->pCurrentEditSettings->pTransitionList = M4OSA_NULL;
2816    }
2817
2818    /**
2819     * Free output preview file path */
2820    if(xVSS_context->pCurrentEditSettings->pOutputFile != M4OSA_NULL)
2821    {
2822        free(xVSS_context->pCurrentEditSettings->pOutputFile);
2823        xVSS_context->pCurrentEditSettings->pOutputFile = M4OSA_NULL;
2824    }
2825
2826    /**
2827     * Free temporary preview file path */
2828    if(xVSS_context->pCurrentEditSettings->pTemporaryFile != M4OSA_NULL)
2829    {
2830        remove((const char *)xVSS_context->pCurrentEditSettings->pTemporaryFile);
2831        free(xVSS_context->pCurrentEditSettings->pTemporaryFile);
2832        xVSS_context->pCurrentEditSettings->pTemporaryFile = M4OSA_NULL;
2833    }
2834
2835    /**
2836     * Free "local" BGM settings */
2837    if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack != M4OSA_NULL)
2838    {
2839        if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile != M4OSA_NULL)
2840        {
2841            free(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile);
2842            xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile = M4OSA_NULL;
2843        }
2844        free(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack);
2845        xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack = M4OSA_NULL;
2846    }
2847
2848    /**
2849     * Free current edit settings structure */
2850    if(xVSS_context->pCurrentEditSettings != M4OSA_NULL)
2851    {
2852        free(xVSS_context->pCurrentEditSettings);
2853        xVSS_context->pCurrentEditSettings = M4OSA_NULL;
2854    }
2855
2856    /**
2857     * Free preview effects given to application */
2858    if(M4OSA_NULL != xVSS_context->pPreviewSettings->Effects)
2859    {
2860        free(xVSS_context->pPreviewSettings->Effects);
2861        xVSS_context->pPreviewSettings->Effects = M4OSA_NULL;
2862        xVSS_context->pPreviewSettings->nbEffects = 0;
2863    }
2864
2865    return M4NO_ERROR;
2866}
2867
2868
2869/**
2870 ******************************************************************************
2871 * prototype    M4OSA_ERR M4xVSS_internalFreeSaving(M4OSA_Context pContext)
2872 *
2873 * @brief    This function cleans up saving edition structure used to generate
2874 *            output.3gp file given to the VPS
2875 * @note
2876 * @param    pContext    (IN) The integrator own context
2877 *
2878 * @return    M4NO_ERROR:    No error
2879 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
2880 ******************************************************************************
2881 */
2882M4OSA_ERR M4xVSS_internalFreeSaving(M4OSA_Context pContext)
2883{
2884    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
2885    M4OSA_UInt8 i;
2886
2887    if(xVSS_context->pCurrentEditSettings != M4OSA_NULL)
2888    {
2889        /**
2890         * Free clip/transition settings */
2891        for(i=0; i<xVSS_context->pCurrentEditSettings->uiClipNumber; i++)
2892        {
2893            M4xVSS_FreeClipSettings(xVSS_context->pCurrentEditSettings->pClipList[i]);
2894
2895            free((xVSS_context->pCurrentEditSettings->pClipList[i]));
2896            xVSS_context->pCurrentEditSettings->pClipList[i] = M4OSA_NULL;
2897
2898            /**
2899             * Because there is 1 less transition than clip number */
2900            if(i != xVSS_context->pCurrentEditSettings->uiClipNumber-1)
2901            {
2902                free(\
2903                    (xVSS_context->pCurrentEditSettings->pTransitionList[i]));
2904                xVSS_context->pCurrentEditSettings->pTransitionList[i] = M4OSA_NULL;
2905            }
2906        }
2907
2908        /**
2909         * Free clip/transition list */
2910        if(xVSS_context->pCurrentEditSettings->pClipList != M4OSA_NULL)
2911        {
2912            free((xVSS_context->pCurrentEditSettings->pClipList));
2913            xVSS_context->pCurrentEditSettings->pClipList = M4OSA_NULL;
2914        }
2915        if(xVSS_context->pCurrentEditSettings->pTransitionList != M4OSA_NULL)
2916        {
2917            free((xVSS_context->pCurrentEditSettings->pTransitionList));
2918            xVSS_context->pCurrentEditSettings->pTransitionList = M4OSA_NULL;
2919        }
2920
2921        if(xVSS_context->pCurrentEditSettings->Effects != M4OSA_NULL)
2922        {
2923            free((xVSS_context->pCurrentEditSettings->Effects));
2924            xVSS_context->pCurrentEditSettings->Effects = M4OSA_NULL;
2925            xVSS_context->pCurrentEditSettings->nbEffects = 0;
2926        }
2927
2928        /**
2929         * Free output saving file path */
2930        if(xVSS_context->pCurrentEditSettings->pOutputFile != M4OSA_NULL)
2931        {
2932            if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack != M4OSA_NULL)
2933            {
2934                remove((const char *)xVSS_context->pCurrentEditSettings->pOutputFile);
2935                free(xVSS_context->pCurrentEditSettings->pOutputFile);
2936            }
2937            if(xVSS_context->pOutputFile != M4OSA_NULL)
2938            {
2939                free(xVSS_context->pOutputFile);
2940                xVSS_context->pOutputFile = M4OSA_NULL;
2941            }
2942            xVSS_context->pSettings->pOutputFile = M4OSA_NULL;
2943            xVSS_context->pCurrentEditSettings->pOutputFile = M4OSA_NULL;
2944        }
2945
2946        /**
2947         * Free temporary saving file path */
2948        if(xVSS_context->pCurrentEditSettings->pTemporaryFile != M4OSA_NULL)
2949        {
2950            remove((const char *)xVSS_context->pCurrentEditSettings->pTemporaryFile);
2951            free(xVSS_context->pCurrentEditSettings->pTemporaryFile);
2952            xVSS_context->pCurrentEditSettings->pTemporaryFile = M4OSA_NULL;
2953        }
2954
2955        /**
2956         * Free "local" BGM settings */
2957        if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack != M4OSA_NULL)
2958        {
2959            if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile != M4OSA_NULL)
2960            {
2961                free(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile);
2962                xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile = M4OSA_NULL;
2963            }
2964            free(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack);
2965            xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack = M4OSA_NULL;
2966        }
2967
2968        /**
2969         * Free current edit settings structure */
2970        free(xVSS_context->pCurrentEditSettings);
2971        xVSS_context->pCurrentEditSettings = M4OSA_NULL;
2972    }
2973
2974    return M4NO_ERROR;
2975}
2976
2977
2978/**
2979 ******************************************************************************
2980 * prototype    M4OSA_ERR M4xVSS_freeSettings(M4OSA_Context pContext)
2981 *
2982 * @brief    This function cleans up an M4VSS3GPP_EditSettings structure
2983 * @note
2984 * @param    pSettings    (IN) Pointer on M4VSS3GPP_EditSettings structure to free
2985 *
2986 * @return    M4NO_ERROR:    No error
2987 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
2988 ******************************************************************************
2989 */
2990M4OSA_ERR M4xVSS_freeSettings(M4VSS3GPP_EditSettings* pSettings)
2991{
2992    M4OSA_UInt8 i,j;
2993
2994    /**
2995     * For each clip ... */
2996    for(i=0; i<pSettings->uiClipNumber; i++)
2997    {
2998        /**
2999         * ... free clip settings */
3000        if(pSettings->pClipList[i] != M4OSA_NULL)
3001        {
3002            M4xVSS_FreeClipSettings(pSettings->pClipList[i]);
3003
3004            free((pSettings->pClipList[i]));
3005            pSettings->pClipList[i] = M4OSA_NULL;
3006        }
3007
3008        /**
3009         * ... free transition settings */
3010        if(i < pSettings->uiClipNumber-1) /* Because there is 1 less transition than clip number */
3011        {
3012            if(pSettings->pTransitionList[i] != M4OSA_NULL)
3013            {
3014                switch (pSettings->pTransitionList[i]->VideoTransitionType)
3015                {
3016                    case M4xVSS_kVideoTransitionType_AlphaMagic:
3017
3018                        /**
3019                         * In case of Alpha Magic transition,
3020                          some extra parameters need to be freed */
3021                        if(pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt\
3022                             != M4OSA_NULL)
3023                        {
3024                            free((((M4xVSS_internal_AlphaMagicSettings*)\
3025                                pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt)->\
3026                                    pPlane->pac_data));
3027                            ((M4xVSS_internal_AlphaMagicSettings*)pSettings->pTransitionList[i\
3028                                ]->pExtVideoTransitionFctCtxt)->pPlane->pac_data = M4OSA_NULL;
3029
3030                            free((((M4xVSS_internal_AlphaMagicSettings*)\
3031                                pSettings->pTransitionList[i]->\
3032                                    pExtVideoTransitionFctCtxt)->pPlane));
3033                            ((M4xVSS_internal_AlphaMagicSettings*)pSettings->pTransitionList[i]\
3034                                ->pExtVideoTransitionFctCtxt)->pPlane = M4OSA_NULL;
3035
3036                            free((pSettings->pTransitionList[i]->\
3037                                pExtVideoTransitionFctCtxt));
3038                            pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt = M4OSA_NULL;
3039
3040                            for(j=i+1;j<pSettings->uiClipNumber-1;j++)
3041                            {
3042                                if(pSettings->pTransitionList[j] != M4OSA_NULL)
3043                                {
3044                                    if(pSettings->pTransitionList[j]->VideoTransitionType ==
3045                                     M4xVSS_kVideoTransitionType_AlphaMagic)
3046                                    {
3047                                        M4OSA_UInt32 pCmpResult=0;
3048                                        pCmpResult = strcmp((const char *)pSettings->pTransitionList[i]->\
3049                                            xVSS.transitionSpecific.pAlphaMagicSettings->\
3050                                                pAlphaFilePath,
3051                                                (const char *)pSettings->pTransitionList[j]->\
3052                                                xVSS.transitionSpecific.pAlphaMagicSettings->\
3053                                                pAlphaFilePath);
3054                                        if(pCmpResult == 0)
3055                                        {
3056                                            /* Free extra internal alpha magic structure and put
3057                                            it to NULL to avoid refreeing it */
3058                                            free((pSettings->\
3059                                                pTransitionList[j]->pExtVideoTransitionFctCtxt));
3060                                            pSettings->pTransitionList[j]->\
3061                                                pExtVideoTransitionFctCtxt = M4OSA_NULL;
3062                                        }
3063                                    }
3064                                }
3065                            }
3066                        }
3067
3068                        if(pSettings->pTransitionList[i]->\
3069                            xVSS.transitionSpecific.pAlphaMagicSettings != M4OSA_NULL)
3070                        {
3071                            if(pSettings->pTransitionList[i]->\
3072                                xVSS.transitionSpecific.pAlphaMagicSettings->\
3073                                    pAlphaFilePath != M4OSA_NULL)
3074                            {
3075                                free(pSettings->\
3076                                    pTransitionList[i]->\
3077                                        xVSS.transitionSpecific.pAlphaMagicSettings->\
3078                                            pAlphaFilePath);
3079                                pSettings->pTransitionList[i]->\
3080                                    xVSS.transitionSpecific.pAlphaMagicSettings->\
3081                                        pAlphaFilePath = M4OSA_NULL;
3082                            }
3083                            free(pSettings->pTransitionList[i]->\
3084                                xVSS.transitionSpecific.pAlphaMagicSettings);
3085                            pSettings->pTransitionList[i]->\
3086                                xVSS.transitionSpecific.pAlphaMagicSettings = M4OSA_NULL;
3087
3088                        }
3089
3090                    break;
3091
3092
3093                    case M4xVSS_kVideoTransitionType_SlideTransition:
3094                        if (M4OSA_NULL != pSettings->pTransitionList[i]->\
3095                            xVSS.transitionSpecific.pSlideTransitionSettings)
3096                        {
3097                            free(pSettings->pTransitionList[i]->\
3098                                xVSS.transitionSpecific.pSlideTransitionSettings);
3099                            pSettings->pTransitionList[i]->\
3100                                xVSS.transitionSpecific.pSlideTransitionSettings = M4OSA_NULL;
3101                        }
3102                        if(pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt != M4OSA_NULL)
3103                        {
3104                            free((pSettings->pTransitionList[i]->\
3105                                pExtVideoTransitionFctCtxt));
3106                            pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt = M4OSA_NULL;
3107                        }
3108                    break;
3109                                        default:
3110                    break;
3111
3112                }
3113                /**
3114                 * Free transition settings structure */
3115                free((pSettings->pTransitionList[i]));
3116                pSettings->pTransitionList[i] = M4OSA_NULL;
3117            }
3118        }
3119    }
3120
3121    /**
3122     * Free clip list */
3123    if(pSettings->pClipList != M4OSA_NULL)
3124    {
3125        free((pSettings->pClipList));
3126        pSettings->pClipList = M4OSA_NULL;
3127    }
3128
3129    /**
3130     * Free transition list */
3131    if(pSettings->pTransitionList != M4OSA_NULL)
3132    {
3133        free((pSettings->pTransitionList));
3134        pSettings->pTransitionList = M4OSA_NULL;
3135    }
3136
3137    /**
3138     * RC: Free effects list */
3139    if(pSettings->Effects != M4OSA_NULL)
3140    {
3141        for(i=0; i<pSettings->nbEffects; i++)
3142        {
3143            /**
3144             * For each clip, free framing structure if needed */
3145            if(pSettings->Effects[i].VideoEffectType == M4xVSS_kVideoEffectType_Framing
3146                || pSettings->Effects[i].VideoEffectType == M4xVSS_kVideoEffectType_Text)
3147            {
3148#ifdef DECODE_GIF_ON_SAVING
3149                M4xVSS_FramingContext* framingCtx = pSettings->Effects[i].pExtVideoEffectFctCtxt;
3150#else
3151                M4xVSS_FramingStruct* framingCtx = pSettings->Effects[i].pExtVideoEffectFctCtxt;
3152                M4xVSS_FramingStruct* framingCtx_save;
3153                M4xVSS_Framing3102Struct* framingCtx_first = framingCtx;
3154#endif
3155
3156#ifdef DECODE_GIF_ON_SAVING
3157                if(framingCtx != M4OSA_NULL) /* Bugfix 1.2.0: crash, trying to free non existant
3158                 pointer */
3159                {
3160                    if(framingCtx->aFramingCtx != M4OSA_NULL)
3161                    {
3162                        {
3163                            if(framingCtx->aFramingCtx->FramingRgb != M4OSA_NULL)
3164                            {
3165                                free(framingCtx->aFramingCtx->\
3166                                    FramingRgb->pac_data);
3167                                framingCtx->aFramingCtx->FramingRgb->pac_data = M4OSA_NULL;
3168                                free(framingCtx->aFramingCtx->FramingRgb);
3169                                framingCtx->aFramingCtx->FramingRgb = M4OSA_NULL;
3170                            }
3171                        }
3172                        if(framingCtx->aFramingCtx->FramingYuv != M4OSA_NULL)
3173                        {
3174                            free(framingCtx->aFramingCtx->\
3175                                FramingYuv[0].pac_data);
3176                            framingCtx->aFramingCtx->FramingYuv[0].pac_data = M4OSA_NULL;
3177                           free(framingCtx->aFramingCtx->\
3178                                FramingYuv[1].pac_data);
3179                            framingCtx->aFramingCtx->FramingYuv[1].pac_data = M4OSA_NULL;
3180                           free(framingCtx->aFramingCtx->\
3181                                FramingYuv[2].pac_data);
3182                            framingCtx->aFramingCtx->FramingYuv[2].pac_data = M4OSA_NULL;
3183                            free(framingCtx->aFramingCtx->FramingYuv);
3184                            framingCtx->aFramingCtx->FramingYuv = M4OSA_NULL;
3185                        }
3186                        free(framingCtx->aFramingCtx);
3187                        framingCtx->aFramingCtx = M4OSA_NULL;
3188                    }
3189                    if(framingCtx->aFramingCtx_last != M4OSA_NULL)
3190                    {
3191                        if(framingCtx->aFramingCtx_last->FramingRgb != M4OSA_NULL)
3192                        {
3193                            free(framingCtx->aFramingCtx_last->\
3194                                FramingRgb->pac_data);
3195                            framingCtx->aFramingCtx_last->FramingRgb->pac_data = M4OSA_NULL;
3196                            free(framingCtx->aFramingCtx_last->\
3197                                FramingRgb);
3198                            framingCtx->aFramingCtx_last->FramingRgb = M4OSA_NULL;
3199                        }
3200                        if(framingCtx->aFramingCtx_last->FramingYuv != M4OSA_NULL)
3201                        {
3202                            free(framingCtx->aFramingCtx_last->\
3203                                FramingYuv[0].pac_data);
3204                            framingCtx->aFramingCtx_last->FramingYuv[0].pac_data = M4OSA_NULL;
3205                            free(framingCtx->aFramingCtx_last->FramingYuv);
3206                            framingCtx->aFramingCtx_last->FramingYuv = M4OSA_NULL;
3207                        }
3208                        free(framingCtx->aFramingCtx_last);
3209                        framingCtx->aFramingCtx_last = M4OSA_NULL;
3210                    }
3211                    if(framingCtx->pEffectFilePath != M4OSA_NULL)
3212                    {
3213                        free(framingCtx->pEffectFilePath);
3214                        framingCtx->pEffectFilePath = M4OSA_NULL;
3215                    }
3216                    /*In case there are still allocated*/
3217                    if(framingCtx->pSPSContext != M4OSA_NULL)
3218                    {
3219                    //    M4SPS_destroy(framingCtx->pSPSContext);
3220                        framingCtx->pSPSContext = M4OSA_NULL;
3221                    }
3222                    /*Alpha blending structure*/
3223                    if(framingCtx->alphaBlendingStruct  != M4OSA_NULL)
3224                    {
3225                        free(framingCtx->alphaBlendingStruct);
3226                        framingCtx->alphaBlendingStruct = M4OSA_NULL;
3227                    }
3228
3229                    free(framingCtx);
3230                    framingCtx = M4OSA_NULL;
3231                }
3232#else
3233                do
3234                {
3235                    if(framingCtx != M4OSA_NULL) /* Bugfix 1.2.0: crash, trying to free non
3236                    existant pointer */
3237                    {
3238                        if(framingCtx->FramingRgb != M4OSA_NULL)
3239                        {
3240                            free(framingCtx->FramingRgb->pac_data);
3241                            framingCtx->FramingRgb->pac_data = M4OSA_NULL;
3242                            free(framingCtx->FramingRgb);
3243                            framingCtx->FramingRgb = M4OSA_NULL;
3244                        }
3245                        if(framingCtx->FramingYuv != M4OSA_NULL)
3246                        {
3247                            free(framingCtx->FramingYuv[0].pac_data);
3248                            framingCtx->FramingYuv[0].pac_data = M4OSA_NULL;
3249                            free(framingCtx->FramingYuv);
3250                            framingCtx->FramingYuv = M4OSA_NULL;
3251                        }
3252                        framingCtx_save = framingCtx->pNext;
3253                        free(framingCtx);
3254                        framingCtx = M4OSA_NULL;
3255                        framingCtx = framingCtx_save;
3256                    }
3257                    else
3258                    {
3259                        /*FB: bug fix P4ME00003002*/
3260                        break;
3261                    }
3262                } while(framingCtx_first != framingCtx);
3263#endif
3264            }
3265            else if( M4xVSS_kVideoEffectType_Fifties == pSettings->Effects[i].VideoEffectType)
3266            {
3267                /* Free Fifties context */
3268                M4xVSS_FiftiesStruct* FiftiesCtx = pSettings->Effects[i].pExtVideoEffectFctCtxt;
3269
3270                if(FiftiesCtx != M4OSA_NULL)
3271                {
3272                    free(FiftiesCtx);
3273                    FiftiesCtx = M4OSA_NULL;
3274                }
3275
3276            }
3277            else if( M4xVSS_kVideoEffectType_ColorRGB16 == pSettings->Effects[i].VideoEffectType
3278                || M4xVSS_kVideoEffectType_BlackAndWhite == pSettings->Effects[i].VideoEffectType
3279                || M4xVSS_kVideoEffectType_Pink == pSettings->Effects[i].VideoEffectType
3280                || M4xVSS_kVideoEffectType_Green == pSettings->Effects[i].VideoEffectType
3281                || M4xVSS_kVideoEffectType_Sepia == pSettings->Effects[i].VideoEffectType
3282                || M4xVSS_kVideoEffectType_Negative== pSettings->Effects[i].VideoEffectType
3283                || M4xVSS_kVideoEffectType_Gradient== pSettings->Effects[i].VideoEffectType)
3284            {
3285                /* Free Color context */
3286                M4xVSS_ColorStruct* ColorCtx = pSettings->Effects[i].pExtVideoEffectFctCtxt;
3287
3288                if(ColorCtx != M4OSA_NULL)
3289                {
3290                    free(ColorCtx);
3291                    ColorCtx = M4OSA_NULL;
3292                }
3293            }
3294
3295            /* Free simple fields */
3296            if(pSettings->Effects[i].xVSS.pFramingFilePath != M4OSA_NULL)
3297            {
3298                free(pSettings->Effects[i].xVSS.pFramingFilePath);
3299                pSettings->Effects[i].xVSS.pFramingFilePath = M4OSA_NULL;
3300            }
3301            if(pSettings->Effects[i].xVSS.pFramingBuffer != M4OSA_NULL)
3302            {
3303                free(pSettings->Effects[i].xVSS.pFramingBuffer);
3304                pSettings->Effects[i].xVSS.pFramingBuffer = M4OSA_NULL;
3305            }
3306            if(pSettings->Effects[i].xVSS.pTextBuffer != M4OSA_NULL)
3307            {
3308                free(pSettings->Effects[i].xVSS.pTextBuffer);
3309                pSettings->Effects[i].xVSS.pTextBuffer = M4OSA_NULL;
3310            }
3311        }
3312        free(pSettings->Effects);
3313        pSettings->Effects = M4OSA_NULL;
3314    }
3315
3316    return M4NO_ERROR;
3317}
3318
3319M4OSA_ERR M4xVSS_freeCommand(M4OSA_Context pContext)
3320{
3321    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
3322//    M4OSA_UInt8 i,j;
3323
3324    /* Free "local" BGM settings */
3325    if(xVSS_context->pSettings->xVSS.pBGMtrack != M4OSA_NULL)
3326    {
3327        if(xVSS_context->pSettings->xVSS.pBGMtrack->pFile != M4OSA_NULL)
3328        {
3329            free(xVSS_context->pSettings->xVSS.pBGMtrack->pFile);
3330            xVSS_context->pSettings->xVSS.pBGMtrack->pFile = M4OSA_NULL;
3331        }
3332        free(xVSS_context->pSettings->xVSS.pBGMtrack);
3333        xVSS_context->pSettings->xVSS.pBGMtrack = M4OSA_NULL;
3334    }
3335
3336    M4xVSS_freeSettings(xVSS_context->pSettings);
3337
3338    if(xVSS_context->pPTo3GPPparamsList != M4OSA_NULL)
3339    {
3340        M4xVSS_Pto3GPP_params* pParams = xVSS_context->pPTo3GPPparamsList;
3341        M4xVSS_Pto3GPP_params* pParams_sauv;
3342
3343        while(pParams != M4OSA_NULL)
3344        {
3345            if(pParams->pFileIn != M4OSA_NULL)
3346            {
3347                free(pParams->pFileIn);
3348                pParams->pFileIn = M4OSA_NULL;
3349            }
3350            if(pParams->pFileOut != M4OSA_NULL)
3351            {
3352                /* Delete temporary file */
3353                remove((const char *)pParams->pFileOut);
3354                free(pParams->pFileOut);
3355                pParams->pFileOut = M4OSA_NULL;
3356            }
3357            if(pParams->pFileTemp != M4OSA_NULL)
3358            {
3359                /* Delete temporary file */
3360#ifdef M4xVSS_RESERVED_MOOV_DISK_SPACE
3361                remove((const char *)pParams->pFileTemp);
3362                free(pParams->pFileTemp);
3363#endif/*M4xVSS_RESERVED_MOOV_DISK_SPACE*/
3364                pParams->pFileTemp = M4OSA_NULL;
3365            }
3366            pParams_sauv = pParams;
3367            pParams = pParams->pNext;
3368            free(pParams_sauv);
3369            pParams_sauv = M4OSA_NULL;
3370        }
3371    }
3372
3373    if(xVSS_context->pMCSparamsList != M4OSA_NULL)
3374    {
3375        M4xVSS_MCS_params* pParams = xVSS_context->pMCSparamsList;
3376        M4xVSS_MCS_params* pParams_sauv;
3377
3378        while(pParams != M4OSA_NULL)
3379        {
3380            if(pParams->pFileIn != M4OSA_NULL)
3381            {
3382                free(pParams->pFileIn);
3383                pParams->pFileIn = M4OSA_NULL;
3384            }
3385            if(pParams->pFileOut != M4OSA_NULL)
3386            {
3387                /* Delete temporary file */
3388                remove((const char *)pParams->pFileOut);
3389                free(pParams->pFileOut);
3390                pParams->pFileOut = M4OSA_NULL;
3391            }
3392            if(pParams->pFileTemp != M4OSA_NULL)
3393            {
3394                /* Delete temporary file */
3395#ifdef M4xVSS_RESERVED_MOOV_DISK_SPACE
3396                remove((const char *)pParams->pFileTemp);
3397                free(pParams->pFileTemp);
3398#endif/*M4xVSS_RESERVED_MOOV_DISK_SPACE*/
3399                pParams->pFileTemp = M4OSA_NULL;
3400            }
3401            pParams_sauv = pParams;
3402            pParams = pParams->pNext;
3403            free(pParams_sauv);
3404            pParams_sauv = M4OSA_NULL;
3405        }
3406    }
3407
3408    if(xVSS_context->pcmPreviewFile != M4OSA_NULL)
3409    {
3410        free(xVSS_context->pcmPreviewFile);
3411        xVSS_context->pcmPreviewFile = M4OSA_NULL;
3412    }
3413    if(xVSS_context->pSettings->pOutputFile != M4OSA_NULL
3414        && xVSS_context->pOutputFile != M4OSA_NULL)
3415    {
3416        free(xVSS_context->pSettings->pOutputFile);
3417        xVSS_context->pSettings->pOutputFile = M4OSA_NULL;
3418        xVSS_context->pOutputFile = M4OSA_NULL;
3419    }
3420
3421    /* Reinit all context variables */
3422    xVSS_context->previousClipNumber = 0;
3423    xVSS_context->editingStep = M4xVSS_kMicroStateEditing;
3424    xVSS_context->analyseStep = M4xVSS_kMicroStateAnalysePto3GPP;
3425    xVSS_context->pPTo3GPPparamsList = M4OSA_NULL;
3426    xVSS_context->pPTo3GPPcurrentParams = M4OSA_NULL;
3427    xVSS_context->pMCSparamsList = M4OSA_NULL;
3428    xVSS_context->pMCScurrentParams = M4OSA_NULL;
3429    xVSS_context->tempFileIndex = 0;
3430    xVSS_context->targetedTimescale = 0;
3431
3432    return M4NO_ERROR;
3433}
3434
3435/**
3436 ******************************************************************************
3437 * prototype    M4OSA_ERR M4xVSS_internalGetProperties(M4OSA_Context pContext,
3438 *                                    M4OSA_Char* pFile,
3439 *                                    M4VIDEOEDITING_ClipProperties *pFileProperties)
3440 *
3441 * @brief    This function retrieve properties of an input 3GP file using MCS
3442 * @note
3443 * @param    pContext        (IN) The integrator own context
3444 * @param    pFile            (IN) 3GP file to analyse
3445 * @param    pFileProperties    (IN/OUT) Pointer on a structure that will contain
3446 *                            the 3GP file properties
3447 *
3448 * @return    M4NO_ERROR:    No error
3449 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
3450 ******************************************************************************
3451 */
3452M4OSA_ERR M4xVSS_internalGetProperties(M4OSA_Context pContext, M4OSA_Char* pFile,
3453                                       M4VIDEOEDITING_ClipProperties *pFileProperties)
3454{
3455    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
3456    M4OSA_ERR err;
3457    M4MCS_Context mcs_context;
3458
3459    err = M4MCS_init(&mcs_context, xVSS_context->pFileReadPtr, xVSS_context->pFileWritePtr);
3460    if(err != M4NO_ERROR)
3461    {
3462        M4OSA_TRACE1_1("M4xVSS_internalGetProperties: Error in M4MCS_init: 0x%x", err);
3463        return err;
3464    }
3465
3466    /*open the MCS in the "normal opening" mode to retrieve the exact duration*/
3467    err = M4MCS_open_normalMode(mcs_context, pFile, M4VIDEOEDITING_kFileType_3GPP,
3468        M4OSA_NULL, M4OSA_NULL);
3469    if (err != M4NO_ERROR)
3470    {
3471        M4OSA_TRACE1_1("M4xVSS_internalGetProperties: Error in M4MCS_open: 0x%x", err);
3472        M4MCS_abort(mcs_context);
3473        return err;
3474    }
3475
3476    err = M4MCS_getInputFileProperties(mcs_context, pFileProperties);
3477    if(err != M4NO_ERROR)
3478    {
3479        M4OSA_TRACE1_1("Error in M4MCS_getInputFileProperties: 0x%x", err);
3480        M4MCS_abort(mcs_context);
3481        return err;
3482    }
3483
3484    err = M4MCS_abort(mcs_context);
3485    if (err != M4NO_ERROR)
3486    {
3487        M4OSA_TRACE1_1("M4xVSS_internalGetProperties: Error in M4MCS_abort: 0x%x", err);
3488        return err;
3489    }
3490
3491    return M4NO_ERROR;
3492}
3493
3494
3495/**
3496 ******************************************************************************
3497 * prototype    M4OSA_ERR M4xVSS_internalGetTargetedTimeScale(M4OSA_Context pContext,
3498 *                                                M4OSA_UInt32* pTargetedTimeScale)
3499 *
3500 * @brief    This function retrieve targeted time scale
3501 * @note
3502 * @param    pContext            (IN)    The integrator own context
3503 * @param    pTargetedTimeScale    (OUT)    Targeted time scale
3504 *
3505 * @return    M4NO_ERROR:    No error
3506 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
3507 ******************************************************************************
3508 */
3509M4OSA_ERR M4xVSS_internalGetTargetedTimeScale(M4OSA_Context pContext,
3510                                                 M4VSS3GPP_EditSettings* pSettings,
3511                                                  M4OSA_UInt32* pTargetedTimeScale)
3512{
3513    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
3514    M4OSA_ERR err;
3515    M4OSA_UInt32 totalDuration = 0;
3516    M4OSA_UInt8 i = 0;
3517    M4OSA_UInt32 tempTimeScale = 0, tempDuration = 0;
3518
3519    for(i=0;i<pSettings->uiClipNumber;i++)
3520    {
3521        /*search timescale only in mpeg4 case*/
3522        if(pSettings->pClipList[i]->FileType == M4VIDEOEDITING_kFileType_3GPP
3523            || pSettings->pClipList[i]->FileType == M4VIDEOEDITING_kFileType_MP4
3524            || pSettings->pClipList[i]->FileType == M4VIDEOEDITING_kFileType_M4V)
3525        {
3526            M4VIDEOEDITING_ClipProperties fileProperties;
3527
3528            /*UTF conversion support*/
3529            M4OSA_Char* pDecodedPath = M4OSA_NULL;
3530
3531            /**
3532            * UTF conversion: convert into the customer format, before being used*/
3533            pDecodedPath = pSettings->pClipList[i]->pFile;
3534
3535            if(xVSS_context->UTFConversionContext.pConvToUTF8Fct != M4OSA_NULL
3536                && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL)
3537            {
3538                M4OSA_UInt32 length = 0;
3539                err = M4xVSS_internalConvertFromUTF8(xVSS_context,
3540                     (M4OSA_Void*) pSettings->pClipList[i]->pFile,
3541                        (M4OSA_Void*) xVSS_context->UTFConversionContext.pTempOutConversionBuffer,
3542                             &length);
3543                if(err != M4NO_ERROR)
3544                {
3545                    M4OSA_TRACE1_1("M4xVSS_Init:\
3546                         M4xVSS_internalConvertToUTF8 returns err: 0x%x",err);
3547                    return err;
3548                }
3549                pDecodedPath = xVSS_context->UTFConversionContext.pTempOutConversionBuffer;
3550            }
3551
3552            /*End of the conversion: use the decoded path*/
3553            err = M4xVSS_internalGetProperties(xVSS_context, pDecodedPath, &fileProperties);
3554
3555            /*get input file properties*/
3556            /*err = M4xVSS_internalGetProperties(xVSS_context, pSettings->\
3557                pClipList[i]->pFile, &fileProperties);*/
3558            if(M4NO_ERROR != err)
3559            {
3560                M4OSA_TRACE1_1("M4xVSS_internalGetTargetedTimeScale:\
3561                     M4xVSS_internalGetProperties returned: 0x%x", err);
3562                return err;
3563            }
3564            if(fileProperties.VideoStreamType == M4VIDEOEDITING_kMPEG4)
3565            {
3566                if(pSettings->pClipList[i]->uiEndCutTime > 0)
3567                {
3568                    if(tempDuration < (pSettings->pClipList[i]->uiEndCutTime \
3569                        - pSettings->pClipList[i]->uiBeginCutTime))
3570                    {
3571                        tempTimeScale = fileProperties.uiVideoTimeScale;
3572                        tempDuration = (pSettings->pClipList[i]->uiEndCutTime\
3573                             - pSettings->pClipList[i]->uiBeginCutTime);
3574                    }
3575                }
3576                else
3577                {
3578                    if(tempDuration < (fileProperties.uiClipDuration\
3579                         - pSettings->pClipList[i]->uiBeginCutTime))
3580                    {
3581                        tempTimeScale = fileProperties.uiVideoTimeScale;
3582                        tempDuration = (fileProperties.uiClipDuration\
3583                             - pSettings->pClipList[i]->uiBeginCutTime);
3584                    }
3585                }
3586            }
3587        }
3588        if(pSettings->pClipList[i]->FileType == M4VIDEOEDITING_kFileType_ARGB8888)
3589        {
3590            /*the timescale is 30 for PTO3GP*/
3591            *pTargetedTimeScale = 30;
3592            return M4NO_ERROR;
3593
3594        }
3595    }
3596
3597    if(tempTimeScale >= 30)/*Define a minimum time scale, otherwise if the timescale is not
3598    enough, there will be an infinite loop in the shell encoder*/
3599    {
3600        *pTargetedTimeScale = tempTimeScale;
3601    }
3602    else
3603    {
3604        *pTargetedTimeScale = 30;
3605    }
3606
3607    return M4NO_ERROR;
3608}
3609
3610
3611/**
3612 ******************************************************************************
3613 * prototype    M4VSS3GPP_externalVideoEffectColor(M4OSA_Void *pFunctionContext,
3614 *                                                    M4VIFI_ImagePlane *PlaneIn,
3615 *                                                    M4VIFI_ImagePlane *PlaneOut,
3616 *                                                    M4VSS3GPP_ExternalProgress *pProgress,
3617 *                                                    M4OSA_UInt32 uiEffectKind)
3618 *
3619 * @brief    This function apply a color effect on an input YUV420 planar frame
3620 * @note
3621 * @param    pFunctionContext(IN) Contains which color to apply (not very clean ...)
3622 * @param    PlaneIn            (IN) Input YUV420 planar
3623 * @param    PlaneOut        (IN/OUT) Output YUV420 planar
3624 * @param    pProgress        (IN/OUT) Progress indication (0-100)
3625 * @param    uiEffectKind    (IN) Unused
3626 *
3627 * @return    M4VIFI_OK:    No error
3628 ******************************************************************************
3629 */
3630M4OSA_ERR M4VSS3GPP_externalVideoEffectColor(M4OSA_Void *pFunctionContext,
3631                                             M4VIFI_ImagePlane *PlaneIn,
3632                                             M4VIFI_ImagePlane *PlaneOut,
3633                                             M4VSS3GPP_ExternalProgress *pProgress,
3634                                             M4OSA_UInt32 uiEffectKind)
3635{
3636    M4VIFI_Int32 plane_number;
3637    M4VIFI_UInt32 i,j;
3638    M4VIFI_UInt8 *p_buf_src, *p_buf_dest;
3639    M4xVSS_ColorStruct* ColorContext = (M4xVSS_ColorStruct*)pFunctionContext;
3640
3641    for (plane_number = 0; plane_number < 3; plane_number++)
3642    {
3643        p_buf_src = &(PlaneIn[plane_number].pac_data[PlaneIn[plane_number].u_topleft]);
3644        p_buf_dest = &(PlaneOut[plane_number].pac_data[PlaneOut[plane_number].u_topleft]);
3645        for (i = 0; i < PlaneOut[plane_number].u_height; i++)
3646        {
3647            /**
3648             * Chrominance */
3649            if(plane_number==1 || plane_number==2)
3650            {
3651                //switch ((M4OSA_UInt32)pFunctionContext)
3652                // commented because a structure for the effects context exist
3653                switch (ColorContext->colorEffectType)
3654                {
3655                    case M4xVSS_kVideoEffectType_BlackAndWhite:
3656                        memset((void *)p_buf_dest,128,
3657                         PlaneIn[plane_number].u_width);
3658                        break;
3659                    case M4xVSS_kVideoEffectType_Pink:
3660                        memset((void *)p_buf_dest,255,
3661                         PlaneIn[plane_number].u_width);
3662                        break;
3663                    case M4xVSS_kVideoEffectType_Green:
3664                        memset((void *)p_buf_dest,0,
3665                         PlaneIn[plane_number].u_width);
3666                        break;
3667                    case M4xVSS_kVideoEffectType_Sepia:
3668                        if(plane_number==1)
3669                        {
3670                            memset((void *)p_buf_dest,117,
3671                             PlaneIn[plane_number].u_width);
3672                        }
3673                        else
3674                        {
3675                            memset((void *)p_buf_dest,139,
3676                             PlaneIn[plane_number].u_width);
3677                        }
3678                        break;
3679                    case M4xVSS_kVideoEffectType_Negative:
3680                        memcpy((void *)p_buf_dest,
3681                         (void *)p_buf_src ,PlaneOut[plane_number].u_width);
3682                        break;
3683
3684                    case M4xVSS_kVideoEffectType_ColorRGB16:
3685                        {
3686                            M4OSA_UInt16 r = 0,g = 0,b = 0,y = 0,u = 0,v = 0;
3687
3688                            /*first get the r, g, b*/
3689                            b = (ColorContext->rgb16ColorData &  0x001f);
3690                            g = (ColorContext->rgb16ColorData &  0x07e0)>>5;
3691                            r = (ColorContext->rgb16ColorData &  0xf800)>>11;
3692
3693                            /*keep y, but replace u and v*/
3694                            if(plane_number==1)
3695                            {
3696                                /*then convert to u*/
3697                                u = U16(r, g, b);
3698                                memset((void *)p_buf_dest,(M4OSA_UInt8)u,
3699                                 PlaneIn[plane_number].u_width);
3700                            }
3701                            if(plane_number==2)
3702                            {
3703                                /*then convert to v*/
3704                                v = V16(r, g, b);
3705                                memset((void *)p_buf_dest, (M4OSA_UInt8)v,
3706                                 PlaneIn[plane_number].u_width);
3707                            }
3708                        }
3709                        break;
3710                    case M4xVSS_kVideoEffectType_Gradient:
3711                        {
3712                            M4OSA_UInt16 r = 0,g = 0,b = 0,y = 0,u = 0,v = 0;
3713
3714                            /*first get the r, g, b*/
3715                            b = (ColorContext->rgb16ColorData &  0x001f);
3716                            g = (ColorContext->rgb16ColorData &  0x07e0)>>5;
3717                            r = (ColorContext->rgb16ColorData &  0xf800)>>11;
3718
3719                            /*for color gradation*/
3720                            b = (M4OSA_UInt16)( b - ((b*i)/PlaneIn[plane_number].u_height));
3721                            g = (M4OSA_UInt16)(g - ((g*i)/PlaneIn[plane_number].u_height));
3722                            r = (M4OSA_UInt16)(r - ((r*i)/PlaneIn[plane_number].u_height));
3723
3724                            /*keep y, but replace u and v*/
3725                            if(plane_number==1)
3726                            {
3727                                /*then convert to u*/
3728                                u = U16(r, g, b);
3729                                memset((void *)p_buf_dest,(M4OSA_UInt8)u,
3730                                 PlaneIn[plane_number].u_width);
3731                            }
3732                            if(plane_number==2)
3733                            {
3734                                /*then convert to v*/
3735                                v = V16(r, g, b);
3736                                memset((void *)p_buf_dest,(M4OSA_UInt8)v,
3737                                 PlaneIn[plane_number].u_width);
3738                            }
3739                        }
3740                        break;
3741                        default:
3742                        break;
3743                }
3744            }
3745            /**
3746             * Luminance */
3747            else
3748            {
3749                //switch ((M4OSA_UInt32)pFunctionContext)
3750                // commented because a structure for the effects context exist
3751                switch (ColorContext->colorEffectType)
3752                {
3753                case M4xVSS_kVideoEffectType_Negative:
3754                    for(j=0;j<PlaneOut[plane_number].u_width;j++)
3755                    {
3756                            p_buf_dest[j] = 255 - p_buf_src[j];
3757                    }
3758                    break;
3759                default:
3760                    memcpy((void *)p_buf_dest,
3761                     (void *)p_buf_src ,PlaneOut[plane_number].u_width);
3762                    break;
3763                }
3764            }
3765            p_buf_src += PlaneIn[plane_number].u_stride;
3766            p_buf_dest += PlaneOut[plane_number].u_stride;
3767        }
3768    }
3769
3770    return M4VIFI_OK;
3771}
3772
3773/**
3774 ******************************************************************************
3775 * prototype    M4VSS3GPP_externalVideoEffectFraming(M4OSA_Void *pFunctionContext,
3776 *                                                    M4VIFI_ImagePlane *PlaneIn,
3777 *                                                    M4VIFI_ImagePlane *PlaneOut,
3778 *                                                    M4VSS3GPP_ExternalProgress *pProgress,
3779 *                                                    M4OSA_UInt32 uiEffectKind)
3780 *
3781 * @brief    This function add a fixed or animated image on an input YUV420 planar frame
3782 * @note
3783 * @param    pFunctionContext(IN) Contains which color to apply (not very clean ...)
3784 * @param    PlaneIn            (IN) Input YUV420 planar
3785 * @param    PlaneOut        (IN/OUT) Output YUV420 planar
3786 * @param    pProgress        (IN/OUT) Progress indication (0-100)
3787 * @param    uiEffectKind    (IN) Unused
3788 *
3789 * @return    M4VIFI_OK:    No error
3790 ******************************************************************************
3791 */
3792M4OSA_ERR M4VSS3GPP_externalVideoEffectFraming( M4OSA_Void *userData,
3793                                                M4VIFI_ImagePlane PlaneIn[3],
3794                                                M4VIFI_ImagePlane *PlaneOut,
3795                                                M4VSS3GPP_ExternalProgress *pProgress,
3796                                                M4OSA_UInt32 uiEffectKind )
3797{
3798    M4VIFI_UInt32 x,y;
3799
3800    M4VIFI_UInt8 *p_in_Y = PlaneIn[0].pac_data;
3801    M4VIFI_UInt8 *p_in_U = PlaneIn[1].pac_data;
3802    M4VIFI_UInt8 *p_in_V = PlaneIn[2].pac_data;
3803
3804    M4xVSS_FramingStruct* Framing = M4OSA_NULL;
3805    M4xVSS_FramingStruct* currentFraming = M4OSA_NULL;
3806    M4VIFI_UInt8 *FramingRGB = M4OSA_NULL;
3807
3808    M4VIFI_UInt8 *p_out0;
3809    M4VIFI_UInt8 *p_out1;
3810    M4VIFI_UInt8 *p_out2;
3811
3812    M4VIFI_UInt32 topleft[2];
3813
3814    M4OSA_UInt8 transparent1 = (M4OSA_UInt8)((TRANSPARENT_COLOR & 0xFF00)>>8);
3815    M4OSA_UInt8 transparent2 = (M4OSA_UInt8)TRANSPARENT_COLOR;
3816
3817#ifndef DECODE_GIF_ON_SAVING
3818    Framing = (M4xVSS_FramingStruct *)userData;
3819    currentFraming = (M4xVSS_FramingStruct *)Framing->pCurrent;
3820    FramingRGB = Framing->FramingRgb->pac_data;
3821#endif /*DECODE_GIF_ON_SAVING*/
3822
3823    /*FB*/
3824#ifdef DECODE_GIF_ON_SAVING
3825    M4OSA_ERR err;
3826    Framing = (M4xVSS_FramingStruct *)((M4xVSS_FramingContext*)userData)->aFramingCtx;
3827    currentFraming = (M4xVSS_FramingStruct *)Framing;
3828    FramingRGB = Framing->FramingRgb->pac_data;
3829#endif /*DECODE_GIF_ON_SAVING*/
3830    /*end FB*/
3831
3832    /**
3833     * Initialize input / output plane pointers */
3834    p_in_Y += PlaneIn[0].u_topleft;
3835    p_in_U += PlaneIn[1].u_topleft;
3836    p_in_V += PlaneIn[2].u_topleft;
3837
3838    p_out0 = PlaneOut[0].pac_data;
3839    p_out1 = PlaneOut[1].pac_data;
3840    p_out2 = PlaneOut[2].pac_data;
3841
3842    /**
3843     * Depending on time, initialize Framing frame to use */
3844    if(Framing->previousClipTime == -1)
3845    {
3846        Framing->previousClipTime = pProgress->uiOutputTime;
3847    }
3848
3849    /**
3850     * If the current clip time has reach the duration of one frame of the framing picture
3851     * we need to step to next framing picture */
3852
3853    Framing->previousClipTime = pProgress->uiOutputTime;
3854    FramingRGB = currentFraming->FramingRgb->pac_data;
3855    topleft[0] = currentFraming->topleft_x;
3856    topleft[1] = currentFraming->topleft_y;
3857
3858    for( x=0 ;x < PlaneIn[0].u_height ; x++)
3859    {
3860        for( y=0 ;y < PlaneIn[0].u_width ; y++)
3861        {
3862            /**
3863             * To handle framing with input size != output size
3864             * Framing is applyed if coordinates matches between framing/topleft and input plane */
3865            if( y < (topleft[0] + currentFraming->FramingYuv[0].u_width)  &&
3866                y >= topleft[0] &&
3867                x < (topleft[1] + currentFraming->FramingYuv[0].u_height) &&
3868                x >= topleft[1])
3869            {
3870                /*Alpha blending support*/
3871                M4OSA_Float alphaBlending = 1;
3872                M4xVSS_internalEffectsAlphaBlending*  alphaBlendingStruct =\
3873                 (M4xVSS_internalEffectsAlphaBlending*)\
3874                    ((M4xVSS_FramingContext*)userData)->alphaBlendingStruct;
3875
3876                if(alphaBlendingStruct != M4OSA_NULL)
3877                {
3878                    if(pProgress->uiProgress \
3879                    < (M4OSA_UInt32)(alphaBlendingStruct->m_fadeInTime*10))
3880                    {
3881                        if(alphaBlendingStruct->m_fadeInTime == 0) {
3882                            alphaBlending = alphaBlendingStruct->m_start / 100;
3883                        } else {
3884                            alphaBlending = ((M4OSA_Float)(alphaBlendingStruct->m_middle\
3885                             - alphaBlendingStruct->m_start)\
3886                                *pProgress->uiProgress/(alphaBlendingStruct->m_fadeInTime*10));
3887                            alphaBlending += alphaBlendingStruct->m_start;
3888                            alphaBlending /= 100;
3889                        }
3890                    }
3891                    else if(pProgress->uiProgress >= (M4OSA_UInt32)(alphaBlendingStruct->\
3892                    m_fadeInTime*10) && pProgress->uiProgress < 1000\
3893                     - (M4OSA_UInt32)(alphaBlendingStruct->m_fadeOutTime*10))
3894                    {
3895                        alphaBlending = (M4OSA_Float)\
3896                        ((M4OSA_Float)alphaBlendingStruct->m_middle/100);
3897                    }
3898                    else if(pProgress->uiProgress >= 1000 - (M4OSA_UInt32)\
3899                    (alphaBlendingStruct->m_fadeOutTime*10))
3900                    {
3901                        if(alphaBlendingStruct->m_fadeOutTime == 0) {
3902                            alphaBlending = alphaBlendingStruct->m_end / 100;
3903                        } else {
3904                            alphaBlending = ((M4OSA_Float)(alphaBlendingStruct->m_middle \
3905                            - alphaBlendingStruct->m_end))*(1000 - pProgress->uiProgress)\
3906                            /(alphaBlendingStruct->m_fadeOutTime*10);
3907                            alphaBlending += alphaBlendingStruct->m_end;
3908                            alphaBlending /= 100;
3909                        }
3910                    }
3911                }
3912                /**/
3913
3914                if((*(FramingRGB)==transparent1) && (*(FramingRGB+1)==transparent2))
3915                {
3916                    *( p_out0+y+x*PlaneOut[0].u_stride)=(*(p_in_Y+y+x*PlaneIn[0].u_stride));
3917                    *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)=
3918                        (*(p_in_U+(y>>1)+(x>>1)*PlaneIn[1].u_stride));
3919                    *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)=
3920                        (*(p_in_V+(y>>1)+(x>>1)*PlaneIn[2].u_stride));
3921                }
3922                else
3923                {
3924                    *( p_out0+y+x*PlaneOut[0].u_stride)=
3925                        (*(currentFraming->FramingYuv[0].pac_data+(y-topleft[0])\
3926                            +(x-topleft[1])*currentFraming->FramingYuv[0].u_stride))*alphaBlending;
3927                    *( p_out0+y+x*PlaneOut[0].u_stride)+=
3928                        (*(p_in_Y+y+x*PlaneIn[0].u_stride))*(1-alphaBlending);
3929                    *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)=
3930                        (*(currentFraming->FramingYuv[1].pac_data+((y-topleft[0])>>1)\
3931                            +((x-topleft[1])>>1)*currentFraming->FramingYuv[1].u_stride))\
3932                                *alphaBlending;
3933                    *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)+=
3934                        (*(p_in_U+(y>>1)+(x>>1)*PlaneIn[1].u_stride))*(1-alphaBlending);
3935                    *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)=
3936                        (*(currentFraming->FramingYuv[2].pac_data+((y-topleft[0])>>1)\
3937                            +((x-topleft[1])>>1)*currentFraming->FramingYuv[2].u_stride))\
3938                                *alphaBlending;
3939                    *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)+=
3940                        (*(p_in_V+(y>>1)+(x>>1)*PlaneIn[2].u_stride))*(1-alphaBlending);
3941                }
3942                if( PlaneIn[0].u_width < (topleft[0] + currentFraming->FramingYuv[0].u_width) &&
3943                    y == PlaneIn[0].u_width-1)
3944                {
3945                    FramingRGB = FramingRGB + 2 \
3946                        * (topleft[0] + currentFraming->FramingYuv[0].u_width \
3947                            - PlaneIn[0].u_width + 1);
3948                }
3949                else
3950                {
3951                    FramingRGB = FramingRGB + 2;
3952                }
3953            }
3954            /**
3955             * Just copy input plane to output plane */
3956            else
3957            {
3958                *( p_out0+y+x*PlaneOut[0].u_stride)=*(p_in_Y+y+x*PlaneIn[0].u_stride);
3959                *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)=
3960                    *(p_in_U+(y>>1)+(x>>1)*PlaneIn[1].u_stride);
3961                *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)=
3962                    *(p_in_V+(y>>1)+(x>>1)*PlaneIn[2].u_stride);
3963            }
3964        }
3965    }
3966
3967
3968    return M4VIFI_OK;
3969}
3970
3971
3972/**
3973 ******************************************************************************
3974 * prototype    M4VSS3GPP_externalVideoEffectFifties(M4OSA_Void *pFunctionContext,
3975 *                                                    M4VIFI_ImagePlane *PlaneIn,
3976 *                                                    M4VIFI_ImagePlane *PlaneOut,
3977 *                                                    M4VSS3GPP_ExternalProgress *pProgress,
3978 *                                                    M4OSA_UInt32 uiEffectKind)
3979 *
3980 * @brief    This function make a video look as if it was taken in the fifties
3981 * @note
3982 * @param    pUserData       (IN) Context
3983 * @param    pPlaneIn        (IN) Input YUV420 planar
3984 * @param    pPlaneOut        (IN/OUT) Output YUV420 planar
3985 * @param    pProgress        (IN/OUT) Progress indication (0-100)
3986 * @param    uiEffectKind    (IN) Unused
3987 *
3988 * @return    M4VIFI_OK:            No error
3989 * @return  M4ERR_PARAMETER:    pFiftiesData, pPlaneOut or pProgress are NULL (DEBUG only)
3990 ******************************************************************************
3991 */
3992M4OSA_ERR M4VSS3GPP_externalVideoEffectFifties( M4OSA_Void *pUserData,
3993                                                M4VIFI_ImagePlane *pPlaneIn,
3994                                                M4VIFI_ImagePlane *pPlaneOut,
3995                                                M4VSS3GPP_ExternalProgress *pProgress,
3996                                                M4OSA_UInt32 uiEffectKind )
3997{
3998    M4VIFI_UInt32 x, y, xShift;
3999    M4VIFI_UInt8 *pInY = pPlaneIn[0].pac_data;
4000    M4VIFI_UInt8 *pOutY, *pInYbegin;
4001    M4VIFI_UInt8 *pInCr,* pOutCr;
4002    M4VIFI_Int32 plane_number;
4003
4004    /* Internal context*/
4005    M4xVSS_FiftiesStruct* p_FiftiesData = (M4xVSS_FiftiesStruct *)pUserData;
4006
4007    /* Check the inputs (debug only) */
4008    M4OSA_DEBUG_IF2((pFiftiesData == M4OSA_NULL),M4ERR_PARAMETER,
4009         "xVSS: p_FiftiesData is M4OSA_NULL in M4VSS3GPP_externalVideoEffectFifties");
4010    M4OSA_DEBUG_IF2((pPlaneOut == M4OSA_NULL),M4ERR_PARAMETER,
4011         "xVSS: p_PlaneOut is M4OSA_NULL in M4VSS3GPP_externalVideoEffectFifties");
4012    M4OSA_DEBUG_IF2((pProgress == M4OSA_NULL),M4ERR_PARAMETER,
4013        "xVSS: p_Progress is M4OSA_NULL in M4VSS3GPP_externalVideoEffectFifties");
4014
4015    /* Initialize input / output plane pointers */
4016    pInY += pPlaneIn[0].u_topleft;
4017    pOutY = pPlaneOut[0].pac_data;
4018    pInYbegin  = pInY;
4019
4020    /* Initialize the random */
4021    if(p_FiftiesData->previousClipTime < 0)
4022    {
4023        M4OSA_randInit();
4024        M4OSA_rand((M4OSA_Int32 *)&(p_FiftiesData->shiftRandomValue), (pPlaneIn[0].u_height) >> 4);
4025        M4OSA_rand((M4OSA_Int32 *)&(p_FiftiesData->stripeRandomValue), (pPlaneIn[0].u_width)<< 2);
4026        p_FiftiesData->previousClipTime = pProgress->uiOutputTime;
4027    }
4028
4029    /* Choose random values if we have reached the duration of a partial effect */
4030    else if( (pProgress->uiOutputTime - p_FiftiesData->previousClipTime)\
4031         > p_FiftiesData->fiftiesEffectDuration)
4032    {
4033        M4OSA_rand((M4OSA_Int32 *)&(p_FiftiesData->shiftRandomValue), (pPlaneIn[0].u_height) >> 4);
4034        M4OSA_rand((M4OSA_Int32 *)&(p_FiftiesData->stripeRandomValue), (pPlaneIn[0].u_width)<< 2);
4035        p_FiftiesData->previousClipTime = pProgress->uiOutputTime;
4036    }
4037
4038    /* Put in Sepia the chrominance */
4039    for (plane_number = 1; plane_number < 3; plane_number++)
4040    {
4041        pInCr  = pPlaneIn[plane_number].pac_data  + pPlaneIn[plane_number].u_topleft;
4042        pOutCr = pPlaneOut[plane_number].pac_data + pPlaneOut[plane_number].u_topleft;
4043
4044        for (x = 0; x < pPlaneOut[plane_number].u_height; x++)
4045        {
4046            if (1 == plane_number)
4047                memset((void *)pOutCr, 117,pPlaneIn[plane_number].u_width); /* U value */
4048            else
4049                memset((void *)pOutCr, 139,pPlaneIn[plane_number].u_width); /* V value */
4050
4051            pInCr  += pPlaneIn[plane_number].u_stride;
4052            pOutCr += pPlaneOut[plane_number].u_stride;
4053        }
4054    }
4055
4056    /* Compute the new pixels values */
4057    for( x = 0 ; x < pPlaneIn[0].u_height ; x++)
4058    {
4059        M4VIFI_UInt8 *p_outYtmp, *p_inYtmp;
4060
4061        /* Compute the xShift (random value) */
4062        if (0 == (p_FiftiesData->shiftRandomValue % 5 ))
4063            xShift = (x + p_FiftiesData->shiftRandomValue ) % (pPlaneIn[0].u_height - 1);
4064        else
4065            xShift = (x + (pPlaneIn[0].u_height - p_FiftiesData->shiftRandomValue) ) \
4066                % (pPlaneIn[0].u_height - 1);
4067
4068        /* Initialize the pointers */
4069        p_outYtmp = pOutY + 1;                                    /* yShift of 1 pixel */
4070        p_inYtmp  = pInYbegin + (xShift * pPlaneIn[0].u_stride);  /* Apply the xShift */
4071
4072        for( y = 0 ; y < pPlaneIn[0].u_width ; y++)
4073        {
4074            /* Set Y value */
4075            if (xShift > (pPlaneIn[0].u_height - 4))
4076                *p_outYtmp = 40;        /* Add some horizontal black lines between the
4077                                        two parts of the image */
4078            else if ( y == p_FiftiesData->stripeRandomValue)
4079                *p_outYtmp = 90;        /* Add a random vertical line for the bulk */
4080            else
4081                *p_outYtmp = *p_inYtmp;
4082
4083
4084            /* Go to the next pixel */
4085            p_outYtmp++;
4086            p_inYtmp++;
4087
4088            /* Restart at the beginning of the line for the last pixel*/
4089            if (y == (pPlaneIn[0].u_width - 2))
4090                p_outYtmp = pOutY;
4091        }
4092
4093        /* Go to the next line */
4094        pOutY += pPlaneOut[0].u_stride;
4095    }
4096
4097    return M4VIFI_OK;
4098}
4099
4100/**
4101 ******************************************************************************
4102 * M4OSA_ERR M4VSS3GPP_externalVideoEffectZoom( )
4103 * @brief    Zoom in/out video effect functions.
4104 * @note    The external video function is used only if VideoEffectType is set to
4105 * M4VSS3GPP_kVideoEffectType_ZoomIn or M4VSS3GPP_kVideoEffectType_ZoomOut.
4106 *
4107 * @param   pFunctionContext    (IN) The function context, previously set by the integrator
4108 * @param    pInputPlanes        (IN) Input YUV420 image: pointer to an array of three valid
4109 *                                    image planes (Y, U and V)
4110 * @param    pOutputPlanes        (IN/OUT) Output (filtered) YUV420 image: pointer to an array of
4111 *                                        three valid image planes (Y, U and V)
4112 * @param    pProgress            (IN) Set of information about the video transition progress.
4113 * @return    M4NO_ERROR:            No error
4114 * @return    M4ERR_PARAMETER:    At least one parameter is M4OSA_NULL (debug only)
4115 ******************************************************************************
4116 */
4117
4118M4OSA_ERR M4VSS3GPP_externalVideoEffectZoom(
4119    M4OSA_Void *pFunctionContext,
4120    M4VIFI_ImagePlane *pInputPlanes,
4121    M4VIFI_ImagePlane *pOutputPlanes,
4122    M4VSS3GPP_ExternalProgress *pProgress,
4123    M4OSA_UInt32 uiEffectKind
4124)
4125{
4126    M4OSA_UInt32 boxWidth;
4127    M4OSA_UInt32 boxHeight;
4128    M4OSA_UInt32 boxPosX;
4129    M4OSA_UInt32 boxPosY;
4130    M4OSA_UInt32 ratio = 0;
4131    /*  * 1.189207 between ratio */
4132    /* zoom between x1 and x16 */
4133    M4OSA_UInt32 ratiotab[17] ={1024,1218,1448,1722,2048,2435,2896,3444,4096,4871,5793,\
4134                                6889,8192,9742,11585,13777,16384};
4135    M4OSA_UInt32 ik;
4136
4137    M4VIFI_ImagePlane boxPlane[3];
4138
4139    if(M4xVSS_kVideoEffectType_ZoomOut == (M4OSA_UInt32)pFunctionContext)
4140    {
4141        //ratio = 16 - (15 * pProgress->uiProgress)/1000;
4142        ratio = 16 - pProgress->uiProgress / 66 ;
4143    }
4144    else if(M4xVSS_kVideoEffectType_ZoomIn == (M4OSA_UInt32)pFunctionContext)
4145    {
4146        //ratio = 1 + (15 * pProgress->uiProgress)/1000;
4147        ratio = 1 + pProgress->uiProgress / 66 ;
4148    }
4149
4150    for(ik=0;ik<3;ik++){
4151
4152        boxPlane[ik].u_stride = pInputPlanes[ik].u_stride;
4153        boxPlane[ik].pac_data = pInputPlanes[ik].pac_data;
4154
4155        boxHeight = ( pInputPlanes[ik].u_height << 10 ) / ratiotab[ratio];
4156        boxWidth = ( pInputPlanes[ik].u_width << 10 ) / ratiotab[ratio];
4157        boxPlane[ik].u_height = (boxHeight)&(~1);
4158        boxPlane[ik].u_width = (boxWidth)&(~1);
4159
4160        boxPosY = (pInputPlanes[ik].u_height >> 1) - (boxPlane[ik].u_height >> 1);
4161        boxPosX = (pInputPlanes[ik].u_width >> 1) - (boxPlane[ik].u_width >> 1);
4162        boxPlane[ik].u_topleft = boxPosY * boxPlane[ik].u_stride + boxPosX;
4163    }
4164
4165    M4VIFI_ResizeBilinearYUV420toYUV420(M4OSA_NULL, (M4VIFI_ImagePlane*)&boxPlane, pOutputPlanes);
4166
4167    /**
4168     * Return */
4169    return(M4NO_ERROR);
4170}
4171
4172/**
4173 ******************************************************************************
4174 * prototype    M4xVSS_AlphaMagic( M4OSA_Void *userData,
4175 *                                    M4VIFI_ImagePlane PlaneIn1[3],
4176 *                                    M4VIFI_ImagePlane PlaneIn2[3],
4177 *                                    M4VIFI_ImagePlane *PlaneOut,
4178 *                                    M4VSS3GPP_ExternalProgress *pProgress,
4179 *                                    M4OSA_UInt32 uiTransitionKind)
4180 *
4181 * @brief    This function apply a color effect on an input YUV420 planar frame
4182 * @note
4183 * @param    userData        (IN) Contains a pointer on a settings structure
4184 * @param    PlaneIn1        (IN) Input YUV420 planar from video 1
4185 * @param    PlaneIn2        (IN) Input YUV420 planar from video 2
4186 * @param    PlaneOut        (IN/OUT) Output YUV420 planar
4187 * @param    pProgress        (IN/OUT) Progress indication (0-100)
4188 * @param    uiTransitionKind(IN) Unused
4189 *
4190 * @return    M4VIFI_OK:    No error
4191 ******************************************************************************
4192 */
4193M4OSA_ERR M4xVSS_AlphaMagic( M4OSA_Void *userData, M4VIFI_ImagePlane PlaneIn1[3],
4194                             M4VIFI_ImagePlane PlaneIn2[3], M4VIFI_ImagePlane *PlaneOut,
4195                             M4VSS3GPP_ExternalProgress *pProgress, M4OSA_UInt32 uiTransitionKind)
4196{
4197
4198    M4OSA_ERR err;
4199
4200    M4xVSS_internal_AlphaMagicSettings* alphaContext;
4201    M4VIFI_Int32 alphaProgressLevel;
4202
4203    M4VIFI_ImagePlane* planeswap;
4204    M4VIFI_UInt32 x,y;
4205
4206    M4VIFI_UInt8 *p_out0;
4207    M4VIFI_UInt8 *p_out1;
4208    M4VIFI_UInt8 *p_out2;
4209    M4VIFI_UInt8 *alphaMask;
4210    /* "Old image" */
4211    M4VIFI_UInt8 *p_in1_Y;
4212    M4VIFI_UInt8 *p_in1_U;
4213    M4VIFI_UInt8 *p_in1_V;
4214    /* "New image" */
4215    M4VIFI_UInt8 *p_in2_Y;
4216    M4VIFI_UInt8 *p_in2_U;
4217    M4VIFI_UInt8 *p_in2_V;
4218
4219    err = M4NO_ERROR;
4220
4221    alphaContext = (M4xVSS_internal_AlphaMagicSettings*)userData;
4222
4223    alphaProgressLevel = (pProgress->uiProgress * 255)/1000;
4224
4225    if( alphaContext->isreverse != M4OSA_FALSE)
4226    {
4227        alphaProgressLevel = 255 - alphaProgressLevel;
4228        planeswap = PlaneIn1;
4229        PlaneIn1 = PlaneIn2;
4230        PlaneIn2 = planeswap;
4231    }
4232
4233    p_out0 = PlaneOut[0].pac_data;
4234    p_out1 = PlaneOut[1].pac_data;
4235    p_out2 = PlaneOut[2].pac_data;
4236
4237    alphaMask = alphaContext->pPlane->pac_data;
4238
4239    /* "Old image" */
4240    p_in1_Y = PlaneIn1[0].pac_data;
4241    p_in1_U = PlaneIn1[1].pac_data;
4242    p_in1_V = PlaneIn1[2].pac_data;
4243    /* "New image" */
4244    p_in2_Y = PlaneIn2[0].pac_data;
4245    p_in2_U = PlaneIn2[1].pac_data;
4246    p_in2_V = PlaneIn2[2].pac_data;
4247
4248     /**
4249     * For each column ... */
4250    for( y=0; y<PlaneOut->u_height; y++ )
4251    {
4252        /**
4253         * ... and each row of the alpha mask */
4254        for( x=0; x<PlaneOut->u_width; x++ )
4255        {
4256            /**
4257             * If the value of the current pixel of the alpha mask is > to the current time
4258             * ( current time is normalized on [0-255] ) */
4259            if( alphaProgressLevel < alphaMask[x+y*PlaneOut->u_width] )
4260            {
4261                /* We keep "old image" in output plane */
4262                *( p_out0+x+y*PlaneOut[0].u_stride)=*(p_in1_Y+x+y*PlaneIn1[0].u_stride);
4263                *( p_out1+(x>>1)+(y>>1)*PlaneOut[1].u_stride)=
4264                    *(p_in1_U+(x>>1)+(y>>1)*PlaneIn1[1].u_stride);
4265                *( p_out2+(x>>1)+(y>>1)*PlaneOut[2].u_stride)=
4266                    *(p_in1_V+(x>>1)+(y>>1)*PlaneIn1[2].u_stride);
4267            }
4268            else
4269            {
4270                /* We take "new image" in output plane */
4271                *( p_out0+x+y*PlaneOut[0].u_stride)=*(p_in2_Y+x+y*PlaneIn2[0].u_stride);
4272                *( p_out1+(x>>1)+(y>>1)*PlaneOut[1].u_stride)=
4273                    *(p_in2_U+(x>>1)+(y>>1)*PlaneIn2[1].u_stride);
4274                *( p_out2+(x>>1)+(y>>1)*PlaneOut[2].u_stride)=
4275                    *(p_in2_V+(x>>1)+(y>>1)*PlaneIn2[2].u_stride);
4276            }
4277        }
4278    }
4279
4280    return(err);
4281}
4282
4283/**
4284 ******************************************************************************
4285 * prototype    M4xVSS_AlphaMagicBlending( M4OSA_Void *userData,
4286 *                                    M4VIFI_ImagePlane PlaneIn1[3],
4287 *                                    M4VIFI_ImagePlane PlaneIn2[3],
4288 *                                    M4VIFI_ImagePlane *PlaneOut,
4289 *                                    M4VSS3GPP_ExternalProgress *pProgress,
4290 *                                    M4OSA_UInt32 uiTransitionKind)
4291 *
4292 * @brief    This function apply a color effect on an input YUV420 planar frame
4293 * @note
4294 * @param    userData        (IN) Contains a pointer on a settings structure
4295 * @param    PlaneIn1        (IN) Input YUV420 planar from video 1
4296 * @param    PlaneIn2        (IN) Input YUV420 planar from video 2
4297 * @param    PlaneOut        (IN/OUT) Output YUV420 planar
4298 * @param    pProgress        (IN/OUT) Progress indication (0-100)
4299 * @param    uiTransitionKind(IN) Unused
4300 *
4301 * @return    M4VIFI_OK:    No error
4302 ******************************************************************************
4303 */
4304M4OSA_ERR M4xVSS_AlphaMagicBlending( M4OSA_Void *userData, M4VIFI_ImagePlane PlaneIn1[3],
4305                                     M4VIFI_ImagePlane PlaneIn2[3], M4VIFI_ImagePlane *PlaneOut,
4306                                     M4VSS3GPP_ExternalProgress *pProgress,
4307                                     M4OSA_UInt32 uiTransitionKind)
4308{
4309    M4OSA_ERR err;
4310
4311    M4xVSS_internal_AlphaMagicSettings* alphaContext;
4312    M4VIFI_Int32 alphaProgressLevel;
4313    M4VIFI_Int32 alphaBlendLevelMin;
4314    M4VIFI_Int32 alphaBlendLevelMax;
4315    M4VIFI_Int32 alphaBlendRange;
4316
4317    M4VIFI_ImagePlane* planeswap;
4318    M4VIFI_UInt32 x,y;
4319    M4VIFI_Int32 alphaMaskValue;
4320
4321    M4VIFI_UInt8 *p_out0;
4322    M4VIFI_UInt8 *p_out1;
4323    M4VIFI_UInt8 *p_out2;
4324    M4VIFI_UInt8 *alphaMask;
4325    /* "Old image" */
4326    M4VIFI_UInt8 *p_in1_Y;
4327    M4VIFI_UInt8 *p_in1_U;
4328    M4VIFI_UInt8 *p_in1_V;
4329    /* "New image" */
4330    M4VIFI_UInt8 *p_in2_Y;
4331    M4VIFI_UInt8 *p_in2_U;
4332    M4VIFI_UInt8 *p_in2_V;
4333
4334
4335    err = M4NO_ERROR;
4336
4337    alphaContext = (M4xVSS_internal_AlphaMagicSettings*)userData;
4338
4339    alphaProgressLevel = (pProgress->uiProgress * 255)/1000;
4340
4341    if( alphaContext->isreverse != M4OSA_FALSE)
4342    {
4343        alphaProgressLevel = 255 - alphaProgressLevel;
4344        planeswap = PlaneIn1;
4345        PlaneIn1 = PlaneIn2;
4346        PlaneIn2 = planeswap;
4347    }
4348
4349    alphaBlendLevelMin = alphaProgressLevel-alphaContext->blendingthreshold;
4350
4351    alphaBlendLevelMax = alphaProgressLevel+alphaContext->blendingthreshold;
4352
4353    alphaBlendRange = (alphaContext->blendingthreshold)*2;
4354
4355    p_out0 = PlaneOut[0].pac_data;
4356    p_out1 = PlaneOut[1].pac_data;
4357    p_out2 = PlaneOut[2].pac_data;
4358
4359    alphaMask = alphaContext->pPlane->pac_data;
4360
4361    /* "Old image" */
4362    p_in1_Y = PlaneIn1[0].pac_data;
4363    p_in1_U = PlaneIn1[1].pac_data;
4364    p_in1_V = PlaneIn1[2].pac_data;
4365    /* "New image" */
4366    p_in2_Y = PlaneIn2[0].pac_data;
4367    p_in2_U = PlaneIn2[1].pac_data;
4368    p_in2_V = PlaneIn2[2].pac_data;
4369
4370    /* apply Alpha Magic on each pixel */
4371       for( y=0; y<PlaneOut->u_height; y++ )
4372    {
4373        for( x=0; x<PlaneOut->u_width; x++ )
4374        {
4375            alphaMaskValue = alphaMask[x+y*PlaneOut->u_width];
4376            if( alphaBlendLevelMax < alphaMaskValue )
4377            {
4378                /* We keep "old image" in output plane */
4379                *( p_out0+x+y*PlaneOut[0].u_stride)=*(p_in1_Y+x+y*PlaneIn1[0].u_stride);
4380                *( p_out1+(x>>1)+(y>>1)*PlaneOut[1].u_stride)=
4381                    *(p_in1_U+(x>>1)+(y>>1)*PlaneIn1[1].u_stride);
4382                *( p_out2+(x>>1)+(y>>1)*PlaneOut[2].u_stride)=
4383                    *(p_in1_V+(x>>1)+(y>>1)*PlaneIn1[2].u_stride);
4384            }
4385            else if( (alphaBlendLevelMin < alphaMaskValue)&&
4386                    (alphaMaskValue <= alphaBlendLevelMax ) )
4387            {
4388                /* We blend "old and new image" in output plane */
4389                *( p_out0+x+y*PlaneOut[0].u_stride)=(M4VIFI_UInt8)
4390                    (( (alphaMaskValue-alphaBlendLevelMin)*( *(p_in1_Y+x+y*PlaneIn1[0].u_stride))
4391                        +(alphaBlendLevelMax-alphaMaskValue)\
4392                            *( *(p_in2_Y+x+y*PlaneIn2[0].u_stride)) )/alphaBlendRange );
4393
4394                *( p_out1+(x>>1)+(y>>1)*PlaneOut[1].u_stride)=(M4VIFI_UInt8)\
4395                    (( (alphaMaskValue-alphaBlendLevelMin)*( *(p_in1_U+(x>>1)+(y>>1)\
4396                        *PlaneIn1[1].u_stride))
4397                            +(alphaBlendLevelMax-alphaMaskValue)*( *(p_in2_U+(x>>1)+(y>>1)\
4398                                *PlaneIn2[1].u_stride)) )/alphaBlendRange );
4399
4400                *( p_out2+(x>>1)+(y>>1)*PlaneOut[2].u_stride)=
4401                    (M4VIFI_UInt8)(( (alphaMaskValue-alphaBlendLevelMin)\
4402                        *( *(p_in1_V+(x>>1)+(y>>1)*PlaneIn1[2].u_stride))
4403                                +(alphaBlendLevelMax-alphaMaskValue)*( *(p_in2_V+(x>>1)+(y>>1)\
4404                                    *PlaneIn2[2].u_stride)) )/alphaBlendRange );
4405
4406            }
4407            else
4408            {
4409                /* We take "new image" in output plane */
4410                *( p_out0+x+y*PlaneOut[0].u_stride)=*(p_in2_Y+x+y*PlaneIn2[0].u_stride);
4411                *( p_out1+(x>>1)+(y>>1)*PlaneOut[1].u_stride)=
4412                    *(p_in2_U+(x>>1)+(y>>1)*PlaneIn2[1].u_stride);
4413                *( p_out2+(x>>1)+(y>>1)*PlaneOut[2].u_stride)=
4414                    *(p_in2_V+(x>>1)+(y>>1)*PlaneIn2[2].u_stride);
4415            }
4416        }
4417    }
4418
4419    return(err);
4420}
4421
4422#define M4XXX_SampleAddress(plane, x, y)  ( (plane).pac_data + (plane).u_topleft + (y)\
4423     * (plane).u_stride + (x) )
4424
4425static void M4XXX_CopyPlane(M4VIFI_ImagePlane* dest, M4VIFI_ImagePlane* source)
4426{
4427    M4OSA_UInt32    height, width, sourceStride, destStride, y;
4428    M4OSA_MemAddr8    sourceWalk, destWalk;
4429
4430    /* cache the vars used in the loop so as to avoid them being repeatedly fetched and
4431     recomputed from memory. */
4432    height = dest->u_height;
4433    width = dest->u_width;
4434
4435    sourceWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(*source, 0, 0);
4436    sourceStride = source->u_stride;
4437
4438    destWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(*dest, 0, 0);
4439    destStride = dest->u_stride;
4440
4441    for (y=0; y<height; y++)
4442    {
4443        memcpy((void *)destWalk, (void *)sourceWalk, width);
4444        destWalk += destStride;
4445        sourceWalk += sourceStride;
4446    }
4447}
4448
4449static M4OSA_ERR M4xVSS_VerticalSlideTransition(M4VIFI_ImagePlane* topPlane,
4450                                                M4VIFI_ImagePlane* bottomPlane,
4451                                                M4VIFI_ImagePlane *PlaneOut,
4452                                                M4OSA_UInt32    shiftUV)
4453{
4454    M4OSA_UInt32 i;
4455
4456    /* Do three loops, one for each plane type, in order to avoid having too many buffers
4457    "hot" at the same time (better for cache). */
4458    for (i=0; i<3; i++)
4459    {
4460        M4OSA_UInt32    topPartHeight, bottomPartHeight, width, sourceStride, destStride, y;
4461        M4OSA_MemAddr8    sourceWalk, destWalk;
4462
4463        /* cache the vars used in the loop so as to avoid them being repeatedly fetched and
4464         recomputed from memory. */
4465        if (0 == i) /* Y plane */
4466        {
4467            bottomPartHeight = 2*shiftUV;
4468        }
4469        else /* U and V planes */
4470        {
4471            bottomPartHeight = shiftUV;
4472        }
4473        topPartHeight = PlaneOut[i].u_height - bottomPartHeight;
4474        width = PlaneOut[i].u_width;
4475
4476        sourceWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(topPlane[i], 0, bottomPartHeight);
4477        sourceStride = topPlane[i].u_stride;
4478
4479        destWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(PlaneOut[i], 0, 0);
4480        destStride = PlaneOut[i].u_stride;
4481
4482        /* First the part from the top source clip frame. */
4483        for (y=0; y<topPartHeight; y++)
4484        {
4485            memcpy((void *)destWalk, (void *)sourceWalk, width);
4486            destWalk += destStride;
4487            sourceWalk += sourceStride;
4488        }
4489
4490        /* and now change the vars to copy the part from the bottom source clip frame. */
4491        sourceWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(bottomPlane[i], 0, 0);
4492        sourceStride = bottomPlane[i].u_stride;
4493
4494        /* destWalk is already at M4XXX_SampleAddress(PlaneOut[i], 0, topPartHeight) */
4495
4496        for (y=0; y<bottomPartHeight; y++)
4497        {
4498            memcpy((void *)destWalk, (void *)sourceWalk, width);
4499            destWalk += destStride;
4500            sourceWalk += sourceStride;
4501        }
4502    }
4503    return M4NO_ERROR;
4504}
4505
4506static M4OSA_ERR M4xVSS_HorizontalSlideTransition(M4VIFI_ImagePlane* leftPlane,
4507                                                  M4VIFI_ImagePlane* rightPlane,
4508                                                  M4VIFI_ImagePlane *PlaneOut,
4509                                                  M4OSA_UInt32    shiftUV)
4510{
4511    M4OSA_UInt32 i, y;
4512    /* If we shifted by exactly 0, or by the width of the target image, then we would get the left
4513    frame or the right frame, respectively. These cases aren't handled too well by the general
4514    handling, since they result in 0-size memcopies, so might as well particularize them. */
4515
4516    if (0 == shiftUV)    /* output left frame */
4517    {
4518        for (i = 0; i<3; i++) /* for each YUV plane */
4519        {
4520            M4XXX_CopyPlane(&(PlaneOut[i]), &(leftPlane[i]));
4521        }
4522
4523        return M4NO_ERROR;
4524    }
4525
4526    if (PlaneOut[1].u_width == shiftUV) /* output right frame */
4527    {
4528        for (i = 0; i<3; i++) /* for each YUV plane */
4529        {
4530            M4XXX_CopyPlane(&(PlaneOut[i]), &(rightPlane[i]));
4531        }
4532
4533        return M4NO_ERROR;
4534    }
4535
4536
4537    /* Do three loops, one for each plane type, in order to avoid having too many buffers
4538    "hot" at the same time (better for cache). */
4539    for (i=0; i<3; i++)
4540    {
4541        M4OSA_UInt32    height, leftPartWidth, rightPartWidth;
4542        M4OSA_UInt32    leftStride,    rightStride,    destStride;
4543        M4OSA_MemAddr8    leftWalk,    rightWalk,    destWalkLeft, destWalkRight;
4544
4545        /* cache the vars used in the loop so as to avoid them being repeatedly fetched
4546        and recomputed from memory. */
4547        height = PlaneOut[i].u_height;
4548
4549        if (0 == i) /* Y plane */
4550        {
4551            rightPartWidth = 2*shiftUV;
4552        }
4553        else /* U and V planes */
4554        {
4555            rightPartWidth = shiftUV;
4556        }
4557        leftPartWidth = PlaneOut[i].u_width - rightPartWidth;
4558
4559        leftWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(leftPlane[i], rightPartWidth, 0);
4560        leftStride = leftPlane[i].u_stride;
4561
4562        rightWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(rightPlane[i], 0, 0);
4563        rightStride = rightPlane[i].u_stride;
4564
4565        destWalkLeft = (M4OSA_MemAddr8)M4XXX_SampleAddress(PlaneOut[i], 0, 0);
4566        destWalkRight = (M4OSA_MemAddr8)M4XXX_SampleAddress(PlaneOut[i], leftPartWidth, 0);
4567        destStride = PlaneOut[i].u_stride;
4568
4569        for (y=0; y<height; y++)
4570        {
4571            memcpy((void *)destWalkLeft, (void *)leftWalk, leftPartWidth);
4572            leftWalk += leftStride;
4573
4574            memcpy((void *)destWalkRight, (void *)rightWalk, rightPartWidth);
4575            rightWalk += rightStride;
4576
4577            destWalkLeft += destStride;
4578            destWalkRight += destStride;
4579        }
4580    }
4581
4582    return M4NO_ERROR;
4583}
4584
4585
4586M4OSA_ERR M4xVSS_SlideTransition( M4OSA_Void *userData, M4VIFI_ImagePlane PlaneIn1[3],
4587                                  M4VIFI_ImagePlane PlaneIn2[3], M4VIFI_ImagePlane *PlaneOut,
4588                                  M4VSS3GPP_ExternalProgress *pProgress,
4589                                  M4OSA_UInt32 uiTransitionKind)
4590{
4591    M4xVSS_internal_SlideTransitionSettings* settings =
4592         (M4xVSS_internal_SlideTransitionSettings*)userData;
4593    M4OSA_UInt32    shiftUV;
4594
4595    M4OSA_TRACE1_0("inside M4xVSS_SlideTransition");
4596    if ((M4xVSS_SlideTransition_RightOutLeftIn == settings->direction)
4597        || (M4xVSS_SlideTransition_LeftOutRightIn == settings->direction) )
4598    {
4599        /* horizontal slide */
4600        shiftUV = ((PlaneOut[1]).u_width * pProgress->uiProgress)/1000;
4601        M4OSA_TRACE1_2("M4xVSS_SlideTransition upper: shiftUV = %d,progress = %d",
4602            shiftUV,pProgress->uiProgress );
4603        if (M4xVSS_SlideTransition_RightOutLeftIn == settings->direction)
4604        {
4605            /* Put the previous clip frame right, the next clip frame left, and reverse shiftUV
4606            (since it's a shift from the left frame) so that we start out on the right
4607            (i.e. not left) frame, it
4608            being from the previous clip. */
4609            return M4xVSS_HorizontalSlideTransition(PlaneIn2, PlaneIn1, PlaneOut,
4610                 (PlaneOut[1]).u_width - shiftUV);
4611        }
4612        else /* Left out, right in*/
4613        {
4614            return M4xVSS_HorizontalSlideTransition(PlaneIn1, PlaneIn2, PlaneOut, shiftUV);
4615        }
4616    }
4617    else
4618    {
4619        /* vertical slide */
4620        shiftUV = ((PlaneOut[1]).u_height * pProgress->uiProgress)/1000;
4621        M4OSA_TRACE1_2("M4xVSS_SlideTransition bottom: shiftUV = %d,progress = %d",shiftUV,
4622            pProgress->uiProgress );
4623        if (M4xVSS_SlideTransition_TopOutBottomIn == settings->direction)
4624        {
4625            /* Put the previous clip frame top, the next clip frame bottom. */
4626            return M4xVSS_VerticalSlideTransition(PlaneIn1, PlaneIn2, PlaneOut, shiftUV);
4627        }
4628        else /* Bottom out, top in */
4629        {
4630            return M4xVSS_VerticalSlideTransition(PlaneIn2, PlaneIn1, PlaneOut,
4631                (PlaneOut[1]).u_height - shiftUV);
4632        }
4633    }
4634
4635    /* Note: it might be worthwhile to do some parameter checking, see if dimensions match, etc.,
4636    at least in debug mode. */
4637}
4638
4639
4640/**
4641 ******************************************************************************
4642 * prototype    M4xVSS_FadeBlackTransition(M4OSA_Void *pFunctionContext,
4643 *                                                    M4VIFI_ImagePlane *PlaneIn,
4644 *                                                    M4VIFI_ImagePlane *PlaneOut,
4645 *                                                    M4VSS3GPP_ExternalProgress *pProgress,
4646 *                                                    M4OSA_UInt32 uiEffectKind)
4647 *
4648 * @brief    This function apply a fade to black and then a fade from black
4649 * @note
4650 * @param    pFunctionContext(IN) Contains which color to apply (not very clean ...)
4651 * @param    PlaneIn            (IN) Input YUV420 planar
4652 * @param    PlaneOut        (IN/OUT) Output YUV420 planar
4653 * @param    pProgress        (IN/OUT) Progress indication (0-100)
4654 * @param    uiEffectKind    (IN) Unused
4655 *
4656 * @return    M4VIFI_OK:    No error
4657 ******************************************************************************
4658 */
4659M4OSA_ERR M4xVSS_FadeBlackTransition(M4OSA_Void *userData, M4VIFI_ImagePlane PlaneIn1[3],
4660                                     M4VIFI_ImagePlane PlaneIn2[3],
4661                                     M4VIFI_ImagePlane *PlaneOut,
4662                                     M4VSS3GPP_ExternalProgress *pProgress,
4663                                     M4OSA_UInt32 uiTransitionKind)
4664{
4665    M4OSA_Int32 tmp = 0;
4666    M4OSA_ERR err = M4NO_ERROR;
4667
4668
4669    if((pProgress->uiProgress) < 500)
4670    {
4671        /**
4672         * Compute where we are in the effect (scale is 0->1024) */
4673        tmp = (M4OSA_Int32)((1.0 - ((M4OSA_Float)(pProgress->uiProgress*2)/1000)) * 1024 );
4674
4675        /**
4676         * Apply the darkening effect */
4677        err = M4VFL_modifyLumaWithScale( (M4ViComImagePlane*)PlaneIn1,
4678             (M4ViComImagePlane*)PlaneOut, tmp, M4OSA_NULL);
4679        if (M4NO_ERROR != err)
4680        {
4681            M4OSA_TRACE1_1("M4xVSS_FadeBlackTransition: M4VFL_modifyLumaWithScale returns\
4682                 error 0x%x, returning M4VSS3GPP_ERR_LUMA_FILTER_ERROR", err);
4683            return M4VSS3GPP_ERR_LUMA_FILTER_ERROR;
4684        }
4685    }
4686    else
4687    {
4688        /**
4689         * Compute where we are in the effect (scale is 0->1024). */
4690        tmp = (M4OSA_Int32)( (((M4OSA_Float)(((pProgress->uiProgress-500)*2))/1000)) * 1024 );
4691
4692        /**
4693         * Apply the darkening effect */
4694        err = M4VFL_modifyLumaWithScale((M4ViComImagePlane*)PlaneIn2,
4695             (M4ViComImagePlane*)PlaneOut, tmp, M4OSA_NULL);
4696        if (M4NO_ERROR != err)
4697        {
4698            M4OSA_TRACE1_1("M4xVSS_FadeBlackTransition:\
4699                 M4VFL_modifyLumaWithScale returns error 0x%x,\
4700                     returning M4VSS3GPP_ERR_LUMA_FILTER_ERROR", err);
4701            return M4VSS3GPP_ERR_LUMA_FILTER_ERROR;
4702        }
4703    }
4704
4705
4706    return M4VIFI_OK;
4707}
4708
4709
4710/**
4711 ******************************************************************************
4712 * prototype    M4OSA_ERR M4xVSS_internalConvertToUTF8(M4OSA_Context pContext,
4713 *                                                        M4OSA_Void* pBufferIn,
4714 *                                                        M4OSA_Void* pBufferOut,
4715 *                                                        M4OSA_UInt32* convertedSize)
4716 *
4717 * @brief    This function convert from the customer format to UTF8
4718 * @note
4719 * @param    pContext        (IN)    The integrator own context
4720 * @param    pBufferIn        (IN)    Buffer to convert
4721 * @param    pBufferOut        (OUT)    Converted buffer
4722 * @param    convertedSize    (OUT)    Size of the converted buffer
4723 *
4724 * @return    M4NO_ERROR:    No error
4725 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
4726 ******************************************************************************
4727 */
4728M4OSA_ERR M4xVSS_internalConvertToUTF8(M4OSA_Context pContext, M4OSA_Void* pBufferIn,
4729                                       M4OSA_Void* pBufferOut, M4OSA_UInt32* convertedSize)
4730{
4731    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
4732    M4OSA_ERR err;
4733
4734    pBufferOut = pBufferIn;
4735    if(xVSS_context->UTFConversionContext.pConvToUTF8Fct != M4OSA_NULL
4736        && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL)
4737    {
4738        M4OSA_UInt32 ConvertedSize = xVSS_context->UTFConversionContext.m_TempOutConversionSize;
4739
4740        memset((void *)xVSS_context->UTFConversionContext.pTempOutConversionBuffer,0
4741            ,(M4OSA_UInt32)xVSS_context->UTFConversionContext.m_TempOutConversionSize);
4742
4743        err = xVSS_context->UTFConversionContext.pConvToUTF8Fct((M4OSA_Void*)pBufferIn,
4744            (M4OSA_UInt8*)xVSS_context->UTFConversionContext.pTempOutConversionBuffer,
4745                 (M4OSA_UInt32*)&ConvertedSize);
4746        if(err == M4xVSSWAR_BUFFER_OUT_TOO_SMALL)
4747        {
4748            M4OSA_TRACE2_1("M4xVSS_internalConvertToUTF8: pConvToUTF8Fct return 0x%x",err);
4749
4750            /*free too small buffer*/
4751            free(xVSS_context->\
4752                UTFConversionContext.pTempOutConversionBuffer);
4753
4754            /*re-allocate the buffer*/
4755            xVSS_context->UTFConversionContext.pTempOutConversionBuffer    =
4756                 (M4OSA_Void*)M4OSA_32bitAlignedMalloc(ConvertedSize*sizeof(M4OSA_UInt8), M4VA,
4757                     (M4OSA_Char *)"M4xVSS_internalConvertToUTF8: UTF conversion buffer");
4758            if(M4OSA_NULL == xVSS_context->UTFConversionContext.pTempOutConversionBuffer)
4759            {
4760                M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertToUTF8");
4761                return M4ERR_ALLOC;
4762            }
4763            xVSS_context->UTFConversionContext.m_TempOutConversionSize = ConvertedSize;
4764
4765            memset((void *)xVSS_context->\
4766                UTFConversionContext.pTempOutConversionBuffer,0,(M4OSA_UInt32)xVSS_context->\
4767                    UTFConversionContext.m_TempOutConversionSize);
4768
4769            err = xVSS_context->UTFConversionContext.pConvToUTF8Fct((M4OSA_Void*)pBufferIn,
4770                (M4OSA_Void*)xVSS_context->UTFConversionContext.pTempOutConversionBuffer,
4771                    (M4OSA_UInt32*)&ConvertedSize);
4772            if(err != M4NO_ERROR)
4773            {
4774                M4OSA_TRACE1_1("M4xVSS_internalConvertToUTF8: pConvToUTF8Fct return 0x%x",err);
4775                return err;
4776            }
4777        }
4778        else if(err != M4NO_ERROR)
4779        {
4780            M4OSA_TRACE1_1("M4xVSS_internalConvertToUTF8: pConvToUTF8Fct return 0x%x",err);
4781            return err;
4782        }
4783        /*decoded path*/
4784        pBufferOut = xVSS_context->UTFConversionContext.pTempOutConversionBuffer;
4785        (*convertedSize) = ConvertedSize;
4786    }
4787    return M4NO_ERROR;
4788}
4789
4790
4791/**
4792 ******************************************************************************
4793 * prototype    M4OSA_ERR M4xVSS_internalConvertFromUTF8(M4OSA_Context pContext)
4794 *
4795 * @brief    This function convert from UTF8 to the customer format
4796 * @note
4797 * @param    pContext    (IN) The integrator own context
4798 * @param    pBufferIn        (IN)    Buffer to convert
4799 * @param    pBufferOut        (OUT)    Converted buffer
4800 * @param    convertedSize    (OUT)    Size of the converted buffer
4801 *
4802 * @return    M4NO_ERROR:    No error
4803 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
4804 ******************************************************************************
4805 */
4806M4OSA_ERR M4xVSS_internalConvertFromUTF8(M4OSA_Context pContext, M4OSA_Void* pBufferIn,
4807                                        M4OSA_Void* pBufferOut, M4OSA_UInt32* convertedSize)
4808{
4809    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
4810    M4OSA_ERR err;
4811
4812    pBufferOut = pBufferIn;
4813    if(xVSS_context->UTFConversionContext.pConvFromUTF8Fct != M4OSA_NULL
4814        && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL)
4815    {
4816        M4OSA_UInt32 ConvertedSize = xVSS_context->UTFConversionContext.m_TempOutConversionSize;
4817
4818        memset((void *)xVSS_context->\
4819            UTFConversionContext.pTempOutConversionBuffer,0,(M4OSA_UInt32)xVSS_context->\
4820                UTFConversionContext.m_TempOutConversionSize);
4821
4822        err = xVSS_context->UTFConversionContext.pConvFromUTF8Fct\
4823            ((M4OSA_Void*)pBufferIn,(M4OSA_UInt8*)xVSS_context->\
4824                UTFConversionContext.pTempOutConversionBuffer, (M4OSA_UInt32*)&ConvertedSize);
4825        if(err == M4xVSSWAR_BUFFER_OUT_TOO_SMALL)
4826        {
4827            M4OSA_TRACE2_1("M4xVSS_internalConvertFromUTF8: pConvFromUTF8Fct return 0x%x",err);
4828
4829            /*free too small buffer*/
4830            free(xVSS_context->\
4831                UTFConversionContext.pTempOutConversionBuffer);
4832
4833            /*re-allocate the buffer*/
4834            xVSS_context->UTFConversionContext.pTempOutConversionBuffer    =
4835                (M4OSA_Void*)M4OSA_32bitAlignedMalloc(ConvertedSize*sizeof(M4OSA_UInt8), M4VA,
4836                     (M4OSA_Char *)"M4xVSS_internalConvertFromUTF8: UTF conversion buffer");
4837            if(M4OSA_NULL == xVSS_context->UTFConversionContext.pTempOutConversionBuffer)
4838            {
4839                M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertFromUTF8");
4840                return M4ERR_ALLOC;
4841            }
4842            xVSS_context->UTFConversionContext.m_TempOutConversionSize = ConvertedSize;
4843
4844            memset((void *)xVSS_context->\
4845                UTFConversionContext.pTempOutConversionBuffer,0,(M4OSA_UInt32)xVSS_context->\
4846                    UTFConversionContext.m_TempOutConversionSize);
4847
4848            err = xVSS_context->UTFConversionContext.pConvFromUTF8Fct((M4OSA_Void*)pBufferIn,
4849                (M4OSA_Void*)xVSS_context->UTFConversionContext.pTempOutConversionBuffer,
4850                     (M4OSA_UInt32*)&ConvertedSize);
4851            if(err != M4NO_ERROR)
4852            {
4853                M4OSA_TRACE1_1("M4xVSS_internalConvertFromUTF8: pConvFromUTF8Fct return 0x%x",err);
4854                return err;
4855            }
4856        }
4857        else if(err != M4NO_ERROR)
4858        {
4859            M4OSA_TRACE1_1("M4xVSS_internalConvertFromUTF8: pConvFromUTF8Fct return 0x%x",err);
4860            return err;
4861        }
4862        /*decoded path*/
4863        pBufferOut = xVSS_context->UTFConversionContext.pTempOutConversionBuffer;
4864        (*convertedSize) = ConvertedSize;
4865    }
4866
4867
4868    return M4NO_ERROR;
4869}
4870