M4xVSS_internal.c revision 0a25bd5bacf5408ca205b15e53cfb6dc2fc1d171
1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16/**
17 ******************************************************************************
18 * @file    M4xVSS_internal.c
19 * @brief    Internal functions of extended Video Studio Service (Video Studio 2.1)
20 * @note
21 ******************************************************************************
22 */
23#include "M4OSA_Debug.h"
24#include "M4OSA_CharStar.h"
25
26#include "NXPSW_CompilerSwitches.h"
27
28#include "M4VSS3GPP_API.h"
29#include "M4VSS3GPP_ErrorCodes.h"
30
31#include "M4xVSS_API.h"
32#include "M4xVSS_Internal.h"
33
34/*for rgb16 color effect*/
35#include "M4VIFI_Defines.h"
36#include "M4VIFI_Clip.h"
37
38/**
39 * component includes */
40#include "M4VFL_transition.h"            /**< video effects */
41
42/* Internal header file of VSS is included because of MMS use case */
43#include "M4VSS3GPP_InternalTypes.h"
44
45/*Exif header files to add image rendering support (cropping, black borders)*/
46#include "M4EXIFC_CommonAPI.h"
47// StageFright encoders require %16 resolution
48#include "M4ENCODER_common.h"
49
50#define TRANSPARENT_COLOR 0x7E0
51
52/* Prototype of M4VIFI_xVSS_RGB565toYUV420 function (avoid green effect of transparency color) */
53M4VIFI_UInt8 M4VIFI_xVSS_RGB565toYUV420(void *pUserData, M4VIFI_ImagePlane *pPlaneIn,
54                                        M4VIFI_ImagePlane *pPlaneOut);
55
56
57/*special MCS function used only in VideoArtist and VideoStudio to open the media in the normal
58 mode. That way the media duration is accurate*/
59extern M4OSA_ERR M4MCS_open_normalMode(M4MCS_Context pContext, M4OSA_Void* pFileIn,
60                                         M4VIDEOEDITING_FileType InputFileType,
61                                         M4OSA_Void* pFileOut, M4OSA_Void* pTempFile);
62
63
64/**
65 ******************************************************************************
66 * prototype    M4OSA_ERR M4xVSS_internalStartTranscoding(M4OSA_Context pContext)
67 * @brief        This function initializes MCS (3GP transcoder) with the given
68 *                parameters
69 * @note        The transcoding parameters are given by the internal xVSS context.
70 *                This context contains a pointer on the current element of the
71 *                chained list of MCS parameters.
72 *
73 * @param    pContext            (IN) Pointer on the xVSS edit context
74 * @return    M4NO_ERROR:            No error
75 * @return    M4ERR_PARAMETER:    At least one parameter is M4OSA_NULL
76 * @return    M4ERR_ALLOC:        Memory allocation has failed
77 ******************************************************************************
78 */
79M4OSA_ERR M4xVSS_internalStartTranscoding(M4OSA_Context pContext)
80{
81    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
82    M4OSA_ERR err;
83    M4MCS_Context mcs_context;
84    M4MCS_OutputParams Params;
85    M4MCS_EncodingParams Rates;
86    M4OSA_UInt32 i;
87
88    err = M4MCS_init(&mcs_context, xVSS_context->pFileReadPtr, xVSS_context->pFileWritePtr);
89    if(err != M4NO_ERROR)
90    {
91        M4OSA_TRACE1_1("Error in M4MCS_init: 0x%x", err);
92        return err;
93    }
94
95    err = M4MCS_open(mcs_context, xVSS_context->pMCScurrentParams->pFileIn,
96         xVSS_context->pMCScurrentParams->InputFileType,
97             xVSS_context->pMCScurrentParams->pFileOut,
98             xVSS_context->pMCScurrentParams->pFileTemp);
99    if (err != M4NO_ERROR)
100    {
101        M4OSA_TRACE1_1("Error in M4MCS_open: 0x%x", err);
102        M4MCS_abort(mcs_context);
103        return err;
104    }
105
106    /**
107     * Fill MCS parameters with the parameters contained in the current element of the
108       MCS parameters chained list */
109    Params.OutputFileType = xVSS_context->pMCScurrentParams->OutputFileType;
110    Params.OutputVideoFormat = xVSS_context->pMCScurrentParams->OutputVideoFormat;
111    Params.outputVideoProfile= xVSS_context->pMCScurrentParams->outputVideoProfile;
112    Params.outputVideoLevel = xVSS_context->pMCScurrentParams->outputVideoLevel;
113    Params.OutputVideoFrameSize = xVSS_context->pMCScurrentParams->OutputVideoFrameSize;
114    Params.OutputVideoFrameRate = xVSS_context->pMCScurrentParams->OutputVideoFrameRate;
115    Params.OutputAudioFormat = xVSS_context->pMCScurrentParams->OutputAudioFormat;
116    Params.OutputAudioSamplingFrequency =
117         xVSS_context->pMCScurrentParams->OutputAudioSamplingFrequency;
118    Params.bAudioMono = xVSS_context->pMCScurrentParams->bAudioMono;
119    Params.pOutputPCMfile = M4OSA_NULL;
120    /*FB 2008/10/20: add media rendering parameter to keep aspect ratio*/
121    switch(xVSS_context->pMCScurrentParams->MediaRendering)
122    {
123    case M4xVSS_kResizing:
124        Params.MediaRendering = M4MCS_kResizing;
125        break;
126    case M4xVSS_kCropping:
127        Params.MediaRendering = M4MCS_kCropping;
128        break;
129    case M4xVSS_kBlackBorders:
130        Params.MediaRendering = M4MCS_kBlackBorders;
131        break;
132    default:
133        break;
134    }
135    /**/
136    // new params after integrating MCS 2.0
137    // Set the number of audio effects; 0 for now.
138    Params.nbEffects = 0;
139
140    // Set the audio effect; null for now.
141    Params.pEffects = NULL;
142
143    // Set the audio effect; null for now.
144    Params.bDiscardExif = M4OSA_FALSE;
145
146    // Set the audio effect; null for now.
147    Params.bAdjustOrientation = M4OSA_FALSE;
148    // new params after integrating MCS 2.0
149
150    /**
151     * Set output parameters */
152    err = M4MCS_setOutputParams(mcs_context, &Params);
153    if (err != M4NO_ERROR)
154    {
155        M4OSA_TRACE1_1("Error in M4MCS_setOutputParams: 0x%x", err);
156        M4MCS_abort(mcs_context);
157        return err;
158    }
159
160    Rates.OutputVideoBitrate = xVSS_context->pMCScurrentParams->OutputVideoBitrate;
161    Rates.OutputAudioBitrate = xVSS_context->pMCScurrentParams->OutputAudioBitrate;
162    Rates.BeginCutTime = 0;
163    Rates.EndCutTime = 0;
164    Rates.OutputFileSize = 0;
165
166    /*FB: transcoding per parts*/
167    Rates.BeginCutTime = xVSS_context->pMCScurrentParams->BeginCutTime;
168    Rates.EndCutTime = xVSS_context->pMCScurrentParams->EndCutTime;
169    Rates.OutputVideoTimescale = xVSS_context->pMCScurrentParams->OutputVideoTimescale;
170
171    err = M4MCS_setEncodingParams(mcs_context, &Rates);
172    if (err != M4NO_ERROR)
173    {
174        M4OSA_TRACE1_1("Error in M4MCS_setEncodingParams: 0x%x", err);
175        M4MCS_abort(mcs_context);
176        return err;
177    }
178
179    err = M4MCS_checkParamsAndStart(mcs_context);
180    if (err != M4NO_ERROR)
181    {
182        M4OSA_TRACE1_1("Error in M4MCS_checkParamsAndStart: 0x%x", err);
183        M4MCS_abort(mcs_context);
184        return err;
185    }
186
187    /**
188     * Save MCS context to be able to call MCS step function in M4xVSS_step function */
189    xVSS_context->pMCS_Ctxt = mcs_context;
190
191    return M4NO_ERROR;
192}
193
194/**
195 ******************************************************************************
196 * prototype    M4OSA_ERR M4xVSS_internalStopTranscoding(M4OSA_Context pContext)
197 * @brief        This function cleans up MCS (3GP transcoder)
198 * @note
199 *
200 * @param    pContext            (IN) Pointer on the xVSS edit context
201 * @return    M4NO_ERROR:            No error
202 * @return    M4ERR_PARAMETER:    At least one parameter is M4OSA_NULL
203 * @return    M4ERR_ALLOC:        Memory allocation has failed
204 ******************************************************************************
205 */
206M4OSA_ERR M4xVSS_internalStopTranscoding(M4OSA_Context pContext)
207{
208    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
209    M4OSA_ERR err;
210
211    err = M4MCS_close(xVSS_context->pMCS_Ctxt);
212    if (err != M4NO_ERROR)
213    {
214        M4OSA_TRACE1_1("M4xVSS_internalStopTranscoding: Error in M4MCS_close: 0x%x", err);
215        M4MCS_abort(xVSS_context->pMCS_Ctxt);
216        return err;
217    }
218
219    /**
220     * Free this MCS instance */
221    err = M4MCS_cleanUp(xVSS_context->pMCS_Ctxt);
222    if (err != M4NO_ERROR)
223    {
224        M4OSA_TRACE1_1("M4xVSS_internalStopTranscoding: Error in M4MCS_cleanUp: 0x%x", err);
225        return err;
226    }
227
228    xVSS_context->pMCS_Ctxt = M4OSA_NULL;
229
230    return M4NO_ERROR;
231}
232
233/**
234 ******************************************************************************
235 * M4OSA_ERR M4xVSS_internalConvertAndResizeARGB8888toYUV420(M4OSA_Void* pFileIn,
236 *                                             M4OSA_FileReadPointer* pFileReadPtr,
237 *                                                M4VIFI_ImagePlane* pImagePlanes,
238 *                                                 M4OSA_UInt32 width,
239 *                                                M4OSA_UInt32 height);
240 * @brief    It Coverts and resizes a ARGB8888 image to YUV420
241 * @note
242 * @param    pFileIn            (IN) The Image input file
243 * @param    pFileReadPtr    (IN) Pointer on filesystem functions
244 * @param    pImagePlanes    (IN/OUT) Pointer on YUV420 output planes allocated by the user
245 *                            ARGB8888 image  will be converted and resized  to output
246 *                             YUV420 plane size
247 *@param    width        (IN) width of the ARGB8888
248 *@param    height            (IN) height of the ARGB8888
249 * @return    M4NO_ERROR:    No error
250 * @return    M4ERR_ALLOC: memory error
251 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
252 ******************************************************************************
253 */
254
255M4OSA_ERR M4xVSS_internalConvertAndResizeARGB8888toYUV420(M4OSA_Void* pFileIn,
256                                                          M4OSA_FileReadPointer* pFileReadPtr,
257                                                          M4VIFI_ImagePlane* pImagePlanes,
258                                                          M4OSA_UInt32 width,M4OSA_UInt32 height)
259{
260    M4OSA_Context pARGBIn;
261    M4VIFI_ImagePlane rgbPlane1 ,rgbPlane2;
262    M4OSA_UInt32 frameSize_argb=(width * height * 4);
263    M4OSA_UInt32 frameSize = (width * height * 3); //Size of RGB888 data.
264    M4OSA_UInt32 i = 0,j= 0;
265    M4OSA_ERR err=M4NO_ERROR;
266
267
268    M4OSA_UInt8 *pTmpData = (M4OSA_UInt8*) M4OSA_32bitAlignedMalloc(frameSize_argb,
269         M4VS, (M4OSA_Char*)"Image argb data");
270        M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 Entering :");
271    if(pTmpData == M4OSA_NULL) {
272        M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 :\
273            Failed to allocate memory for Image clip");
274        return M4ERR_ALLOC;
275    }
276
277    M4OSA_TRACE1_2("M4xVSS_internalConvertAndResizeARGB8888toYUV420 :width and height %d %d",
278        width ,height);
279    /* Get file size (mandatory for chunk decoding) */
280    err = pFileReadPtr->openRead(&pARGBIn, pFileIn, M4OSA_kFileRead);
281    if(err != M4NO_ERROR)
282    {
283        M4OSA_TRACE1_2("M4xVSS_internalConvertAndResizeARGB8888toYUV420 :\
284            Can't open input ARGB8888 file %s, error: 0x%x\n",pFileIn, err);
285        free(pTmpData);
286        pTmpData = M4OSA_NULL;
287        goto cleanup;
288    }
289
290    err = pFileReadPtr->readData(pARGBIn,(M4OSA_MemAddr8)pTmpData, &frameSize_argb);
291    if(err != M4NO_ERROR)
292    {
293        M4OSA_TRACE1_2("M4xVSS_internalConvertAndResizeARGB8888toYUV420 Can't close ARGB8888\
294             file %s, error: 0x%x\n",pFileIn, err);
295        pFileReadPtr->closeRead(pARGBIn);
296        free(pTmpData);
297        pTmpData = M4OSA_NULL;
298        goto cleanup;
299    }
300
301    err = pFileReadPtr->closeRead(pARGBIn);
302    if(err != M4NO_ERROR)
303    {
304        M4OSA_TRACE1_2("M4xVSS_internalConvertAndResizeARGB8888toYUV420 Can't close ARGB8888 \
305             file %s, error: 0x%x\n",pFileIn, err);
306        free(pTmpData);
307        pTmpData = M4OSA_NULL;
308        goto cleanup;
309    }
310
311    rgbPlane1.pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(frameSize, M4VS,
312         (M4OSA_Char*)"Image clip RGB888 data");
313    if(rgbPlane1.pac_data == M4OSA_NULL)
314    {
315        M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 \
316            Failed to allocate memory for Image clip");
317        free(pTmpData);
318        return M4ERR_ALLOC;
319    }
320
321        rgbPlane1.u_height = height;
322        rgbPlane1.u_width = width;
323        rgbPlane1.u_stride = width*3;
324        rgbPlane1.u_topleft = 0;
325
326
327    /** Remove the alpha channel */
328    for (i=0, j = 0; i < frameSize_argb; i++) {
329        if ((i % 4) == 0) continue;
330        rgbPlane1.pac_data[j] = pTmpData[i];
331        j++;
332    }
333        free(pTmpData);
334
335    /* To Check if resizing is required with color conversion */
336    if(width != pImagePlanes->u_width || height != pImagePlanes->u_height)
337    {
338        M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 Resizing :");
339        frameSize =  ( pImagePlanes->u_width * pImagePlanes->u_height * 3);
340        rgbPlane2.pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(frameSize, M4VS,
341             (M4OSA_Char*)"Image clip RGB888 data");
342        if(rgbPlane2.pac_data == M4OSA_NULL)
343        {
344            M4OSA_TRACE1_0("Failed to allocate memory for Image clip");
345            free(pTmpData);
346            return M4ERR_ALLOC;
347        }
348            rgbPlane2.u_height =  pImagePlanes->u_height;
349            rgbPlane2.u_width = pImagePlanes->u_width;
350            rgbPlane2.u_stride = pImagePlanes->u_width*3;
351            rgbPlane2.u_topleft = 0;
352
353        /* Resizing RGB888 to RGB888 */
354        err = M4VIFI_ResizeBilinearRGB888toRGB888(M4OSA_NULL, &rgbPlane1, &rgbPlane2);
355        if(err != M4NO_ERROR)
356        {
357            M4OSA_TRACE1_1("error when converting from Resize RGB888 to RGB888: 0x%x\n", err);
358            free(rgbPlane2.pac_data);
359            free(rgbPlane1.pac_data);
360            return err;
361        }
362        /*Converting Resized RGB888 to YUV420 */
363        err = M4VIFI_RGB888toYUV420(M4OSA_NULL, &rgbPlane2, pImagePlanes);
364        if(err != M4NO_ERROR)
365        {
366            M4OSA_TRACE1_1("error when converting from RGB888 to YUV: 0x%x\n", err);
367            free(rgbPlane2.pac_data);
368            free(rgbPlane1.pac_data);
369            return err;
370        }
371            free(rgbPlane2.pac_data);
372            free(rgbPlane1.pac_data);
373
374            M4OSA_TRACE1_0("RGB to YUV done");
375
376
377    }
378    else
379    {
380        M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 NO  Resizing :");
381        err = M4VIFI_RGB888toYUV420(M4OSA_NULL, &rgbPlane1, pImagePlanes);
382        if(err != M4NO_ERROR)
383        {
384            M4OSA_TRACE1_1("error when converting from RGB to YUV: 0x%x\n", err);
385        }
386            free(rgbPlane1.pac_data);
387
388            M4OSA_TRACE1_0("RGB to YUV done");
389    }
390cleanup:
391    M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 leaving :");
392    return err;
393}
394
395/**
396 ******************************************************************************
397 * M4OSA_ERR M4xVSS_internalConvertARGB8888toYUV420(M4OSA_Void* pFileIn,
398 *                                             M4OSA_FileReadPointer* pFileReadPtr,
399 *                                                M4VIFI_ImagePlane* pImagePlanes,
400 *                                                 M4OSA_UInt32 width,
401 *                                                M4OSA_UInt32 height);
402 * @brief    It Coverts a ARGB8888 image to YUV420
403 * @note
404 * @param    pFileIn            (IN) The Image input file
405 * @param    pFileReadPtr    (IN) Pointer on filesystem functions
406 * @param    pImagePlanes    (IN/OUT) Pointer on YUV420 output planes allocated by the user
407 *                            ARGB8888 image  will be converted and resized  to output
408 *                            YUV420 plane size
409 * @param    width        (IN) width of the ARGB8888
410 * @param    height            (IN) height of the ARGB8888
411 * @return    M4NO_ERROR:    No error
412 * @return    M4ERR_ALLOC: memory error
413 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
414 ******************************************************************************
415 */
416
417M4OSA_ERR M4xVSS_internalConvertARGB8888toYUV420(M4OSA_Void* pFileIn,
418                                                 M4OSA_FileReadPointer* pFileReadPtr,
419                                                 M4VIFI_ImagePlane** pImagePlanes,
420                                                 M4OSA_UInt32 width,M4OSA_UInt32 height)
421{
422    M4OSA_ERR err = M4NO_ERROR;
423    M4VIFI_ImagePlane *yuvPlane = M4OSA_NULL;
424
425    yuvPlane = (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(3*sizeof(M4VIFI_ImagePlane),
426                M4VS, (M4OSA_Char*)"M4xVSS_internalConvertRGBtoYUV: Output plane YUV");
427    if(yuvPlane == M4OSA_NULL) {
428        M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 :\
429            Failed to allocate memory for Image clip");
430        return M4ERR_ALLOC;
431    }
432    yuvPlane[0].u_height = height;
433    yuvPlane[0].u_width = width;
434    yuvPlane[0].u_stride = width;
435    yuvPlane[0].u_topleft = 0;
436    yuvPlane[0].pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(yuvPlane[0].u_height \
437        * yuvPlane[0].u_width * 1.5, M4VS, (M4OSA_Char*)"imageClip YUV data");
438
439    yuvPlane[1].u_height = yuvPlane[0].u_height >>1;
440    yuvPlane[1].u_width = yuvPlane[0].u_width >> 1;
441    yuvPlane[1].u_stride = yuvPlane[1].u_width;
442    yuvPlane[1].u_topleft = 0;
443    yuvPlane[1].pac_data = (M4VIFI_UInt8*)(yuvPlane[0].pac_data + yuvPlane[0].u_height \
444        * yuvPlane[0].u_width);
445
446    yuvPlane[2].u_height = yuvPlane[0].u_height >>1;
447    yuvPlane[2].u_width = yuvPlane[0].u_width >> 1;
448    yuvPlane[2].u_stride = yuvPlane[2].u_width;
449    yuvPlane[2].u_topleft = 0;
450    yuvPlane[2].pac_data = (M4VIFI_UInt8*)(yuvPlane[1].pac_data + yuvPlane[1].u_height \
451        * yuvPlane[1].u_width);
452    err = M4xVSS_internalConvertAndResizeARGB8888toYUV420( pFileIn,pFileReadPtr,
453                                                          yuvPlane, width, height);
454    if(err != M4NO_ERROR)
455    {
456        M4OSA_TRACE1_1("M4xVSS_internalConvertAndResizeARGB8888toYUV420 return error: 0x%x\n", err);
457        free(yuvPlane);
458        return err;
459    }
460
461        *pImagePlanes = yuvPlane;
462
463    M4OSA_TRACE1_0("M4xVSS_internalConvertARGB8888toYUV420 :Leaving");
464    return err;
465
466}
467
468/**
469 ******************************************************************************
470 * M4OSA_ERR M4xVSS_PictureCallbackFct (M4OSA_Void* pPictureCtxt,
471 *                                        M4VIFI_ImagePlane* pImagePlanes,
472 *                                        M4OSA_UInt32* pPictureDuration);
473 * @brief    It feeds the PTO3GPP with YUV420 pictures.
474 * @note    This function is given to the PTO3GPP in the M4PTO3GPP_Params structure
475 * @param    pContext    (IN) The integrator own context
476 * @param    pImagePlanes(IN/OUT) Pointer to an array of three valid image planes
477 * @param    pPictureDuration(OUT) Duration of the returned picture
478 *
479 * @return    M4NO_ERROR:    No error
480 * @return    M4PTO3GPP_WAR_LAST_PICTURE: The returned image is the last one
481 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
482 ******************************************************************************
483 */
484M4OSA_ERR M4xVSS_PictureCallbackFct(M4OSA_Void* pPictureCtxt, M4VIFI_ImagePlane* pImagePlanes,
485                                     M4OSA_Double* pPictureDuration)
486{
487    M4OSA_ERR err = M4NO_ERROR;
488    M4OSA_UInt8    last_frame_flag = 0;
489    M4xVSS_PictureCallbackCtxt* pC = (M4xVSS_PictureCallbackCtxt*) (pPictureCtxt);
490
491    /*Used for pan&zoom*/
492    M4OSA_UInt8 tempPanzoomXa = 0;
493    M4OSA_UInt8 tempPanzoomXb = 0;
494    M4AIR_Params Params;
495    /**/
496
497    /*Used for cropping and black borders*/
498    M4OSA_Context    pPictureContext = M4OSA_NULL;
499    M4OSA_FilePosition    pictureSize = 0 ;
500    M4OSA_UInt8*    pictureBuffer = M4OSA_NULL;
501    //M4EXIFC_Context pExifContext = M4OSA_NULL;
502    M4EXIFC_BasicTags pBasicTags;
503    M4VIFI_ImagePlane pImagePlanes1 = pImagePlanes[0];
504    M4VIFI_ImagePlane pImagePlanes2 = pImagePlanes[1];
505    M4VIFI_ImagePlane pImagePlanes3 = pImagePlanes[2];
506    /**/
507
508    /**
509     * Check input parameters */
510    M4OSA_DEBUG_IF2((M4OSA_NULL==pPictureCtxt),        M4ERR_PARAMETER,
511         "M4xVSS_PictureCallbackFct: pPictureCtxt is M4OSA_NULL");
512    M4OSA_DEBUG_IF2((M4OSA_NULL==pImagePlanes),        M4ERR_PARAMETER,
513         "M4xVSS_PictureCallbackFct: pImagePlanes is M4OSA_NULL");
514    M4OSA_DEBUG_IF2((M4OSA_NULL==pPictureDuration), M4ERR_PARAMETER,
515         "M4xVSS_PictureCallbackFct: pPictureDuration is M4OSA_NULL");
516    M4OSA_TRACE1_0("M4xVSS_PictureCallbackFct :Entering");
517    /*PR P4ME00003181 In case the image number is 0, pan&zoom can not be used*/
518    if(M4OSA_TRUE == pC->m_pPto3GPPparams->isPanZoom && pC->m_NbImage == 0)
519    {
520        pC->m_pPto3GPPparams->isPanZoom = M4OSA_FALSE;
521    }
522
523    /*If no cropping/black borders or pan&zoom, just decode and resize the picture*/
524    if(pC->m_mediaRendering == M4xVSS_kResizing && M4OSA_FALSE == pC->m_pPto3GPPparams->isPanZoom)
525    {
526        /**
527         * Convert and resize input ARGB8888 file to YUV420 */
528        /*To support ARGB8888 : */
529        M4OSA_TRACE1_2("M4xVSS_PictureCallbackFct 1: width and heght %d %d",
530            pC->m_pPto3GPPparams->width,pC->m_pPto3GPPparams->height);
531        err = M4xVSS_internalConvertAndResizeARGB8888toYUV420(pC->m_FileIn,
532             pC->m_pFileReadPtr, pImagePlanes,pC->m_pPto3GPPparams->width,
533                pC->m_pPto3GPPparams->height);
534        if(err != M4NO_ERROR)
535        {
536            M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct: Error when decoding JPEG: 0x%x\n", err);
537            return err;
538        }
539    }
540    /*In case of cropping, black borders or pan&zoom, call the EXIF reader and the AIR*/
541    else
542    {
543        /**
544         * Computes ratios */
545        if(pC->m_pDecodedPlane == M4OSA_NULL)
546        {
547            /**
548             * Convert input ARGB8888 file to YUV420 */
549             M4OSA_TRACE1_2("M4xVSS_PictureCallbackFct 2: width and heght %d %d",
550                pC->m_pPto3GPPparams->width,pC->m_pPto3GPPparams->height);
551            err = M4xVSS_internalConvertARGB8888toYUV420(pC->m_FileIn, pC->m_pFileReadPtr,
552                &(pC->m_pDecodedPlane),pC->m_pPto3GPPparams->width,pC->m_pPto3GPPparams->height);
553            if(err != M4NO_ERROR)
554            {
555                M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct: Error when decoding JPEG: 0x%x\n", err);
556                if(pC->m_pDecodedPlane != M4OSA_NULL)
557                {
558                    /* YUV420 planar is returned but allocation is made only once
559                        (contigous planes in memory) */
560                    if(pC->m_pDecodedPlane->pac_data != M4OSA_NULL)
561                    {
562                        free(pC->m_pDecodedPlane->pac_data);
563                    }
564                    free(pC->m_pDecodedPlane);
565                    pC->m_pDecodedPlane = M4OSA_NULL;
566                }
567                return err;
568            }
569        }
570
571        /*Initialize AIR Params*/
572        Params.m_inputCoord.m_x = 0;
573        Params.m_inputCoord.m_y = 0;
574        Params.m_inputSize.m_height = pC->m_pDecodedPlane->u_height;
575        Params.m_inputSize.m_width = pC->m_pDecodedPlane->u_width;
576        Params.m_outputSize.m_width = pImagePlanes->u_width;
577        Params.m_outputSize.m_height = pImagePlanes->u_height;
578        Params.m_bOutputStripe = M4OSA_FALSE;
579        Params.m_outputOrientation = M4COMMON_kOrientationTopLeft;
580
581        /*Initialize Exif params structure*/
582        pBasicTags.orientation = M4COMMON_kOrientationUnknown;
583
584        /**
585        Pan&zoom params*/
586        if(M4OSA_TRUE == pC->m_pPto3GPPparams->isPanZoom)
587        {
588            /*Save ratio values, they can be reused if the new ratios are 0*/
589            tempPanzoomXa = (M4OSA_UInt8)pC->m_pPto3GPPparams->PanZoomXa;
590            tempPanzoomXb = (M4OSA_UInt8)pC->m_pPto3GPPparams->PanZoomXb;
591            /*Check that the ratio is not 0*/
592            /*Check (a) parameters*/
593            if(pC->m_pPto3GPPparams->PanZoomXa == 0)
594            {
595                M4OSA_UInt8 maxRatio = 0;
596                if(pC->m_pPto3GPPparams->PanZoomTopleftXa >=
597                     pC->m_pPto3GPPparams->PanZoomTopleftYa)
598                {
599                    /*The ratio is 0, that means the area of the picture defined with (a)
600                    parameters is bigger than the image size*/
601                    if(pC->m_pPto3GPPparams->PanZoomTopleftXa + tempPanzoomXa > 1000)
602                    {
603                        /*The oversize is maxRatio*/
604                        maxRatio = pC->m_pPto3GPPparams->PanZoomTopleftXa + tempPanzoomXa - 1000;
605                    }
606                }
607                else
608                {
609                    /*The ratio is 0, that means the area of the picture defined with (a)
610                     parameters is bigger than the image size*/
611                    if(pC->m_pPto3GPPparams->PanZoomTopleftYa + tempPanzoomXa > 1000)
612                    {
613                        /*The oversize is maxRatio*/
614                        maxRatio = pC->m_pPto3GPPparams->PanZoomTopleftYa + tempPanzoomXa - 1000;
615                    }
616                }
617                /*Modify the (a) parameters:*/
618                if(pC->m_pPto3GPPparams->PanZoomTopleftXa >= maxRatio)
619                {
620                    /*The (a) topleft parameters can be moved to keep the same area size*/
621                    pC->m_pPto3GPPparams->PanZoomTopleftXa -= maxRatio;
622                }
623                else
624                {
625                    /*Move the (a) topleft parameter to 0 but the ratio will be also further
626                    modified to match the image size*/
627                    pC->m_pPto3GPPparams->PanZoomTopleftXa = 0;
628                }
629                if(pC->m_pPto3GPPparams->PanZoomTopleftYa >= maxRatio)
630                {
631                    /*The (a) topleft parameters can be moved to keep the same area size*/
632                    pC->m_pPto3GPPparams->PanZoomTopleftYa -= maxRatio;
633                }
634                else
635                {
636                    /*Move the (a) topleft parameter to 0 but the ratio will be also further
637                     modified to match the image size*/
638                    pC->m_pPto3GPPparams->PanZoomTopleftYa = 0;
639                }
640                /*The new ratio is the original one*/
641                pC->m_pPto3GPPparams->PanZoomXa = tempPanzoomXa;
642                if(pC->m_pPto3GPPparams->PanZoomXa + pC->m_pPto3GPPparams->PanZoomTopleftXa > 1000)
643                {
644                    /*Change the ratio if the area of the picture defined with (a) parameters is
645                    bigger than the image size*/
646                    pC->m_pPto3GPPparams->PanZoomXa = 1000 - pC->m_pPto3GPPparams->PanZoomTopleftXa;
647                }
648                if(pC->m_pPto3GPPparams->PanZoomXa + pC->m_pPto3GPPparams->PanZoomTopleftYa > 1000)
649                {
650                    /*Change the ratio if the area of the picture defined with (a) parameters is
651                    bigger than the image size*/
652                    pC->m_pPto3GPPparams->PanZoomXa = 1000 - pC->m_pPto3GPPparams->PanZoomTopleftYa;
653                }
654            }
655            /*Check (b) parameters*/
656            if(pC->m_pPto3GPPparams->PanZoomXb == 0)
657            {
658                M4OSA_UInt8 maxRatio = 0;
659                if(pC->m_pPto3GPPparams->PanZoomTopleftXb >=
660                     pC->m_pPto3GPPparams->PanZoomTopleftYb)
661                {
662                    /*The ratio is 0, that means the area of the picture defined with (b)
663                     parameters is bigger than the image size*/
664                    if(pC->m_pPto3GPPparams->PanZoomTopleftXb + tempPanzoomXb > 1000)
665                    {
666                        /*The oversize is maxRatio*/
667                        maxRatio = pC->m_pPto3GPPparams->PanZoomTopleftXb + tempPanzoomXb - 1000;
668                    }
669                }
670                else
671                {
672                    /*The ratio is 0, that means the area of the picture defined with (b)
673                     parameters is bigger than the image size*/
674                    if(pC->m_pPto3GPPparams->PanZoomTopleftYb + tempPanzoomXb > 1000)
675                    {
676                        /*The oversize is maxRatio*/
677                        maxRatio = pC->m_pPto3GPPparams->PanZoomTopleftYb + tempPanzoomXb - 1000;
678                    }
679                }
680                /*Modify the (b) parameters:*/
681                if(pC->m_pPto3GPPparams->PanZoomTopleftXb >= maxRatio)
682                {
683                    /*The (b) topleft parameters can be moved to keep the same area size*/
684                    pC->m_pPto3GPPparams->PanZoomTopleftXb -= maxRatio;
685                }
686                else
687                {
688                    /*Move the (b) topleft parameter to 0 but the ratio will be also further
689                     modified to match the image size*/
690                    pC->m_pPto3GPPparams->PanZoomTopleftXb = 0;
691                }
692                if(pC->m_pPto3GPPparams->PanZoomTopleftYb >= maxRatio)
693                {
694                    /*The (b) topleft parameters can be moved to keep the same area size*/
695                    pC->m_pPto3GPPparams->PanZoomTopleftYb -= maxRatio;
696                }
697                else
698                {
699                    /*Move the (b) topleft parameter to 0 but the ratio will be also further
700                    modified to match the image size*/
701                    pC->m_pPto3GPPparams->PanZoomTopleftYb = 0;
702                }
703                /*The new ratio is the original one*/
704                pC->m_pPto3GPPparams->PanZoomXb = tempPanzoomXb;
705                if(pC->m_pPto3GPPparams->PanZoomXb + pC->m_pPto3GPPparams->PanZoomTopleftXb > 1000)
706                {
707                    /*Change the ratio if the area of the picture defined with (b) parameters is
708                    bigger than the image size*/
709                    pC->m_pPto3GPPparams->PanZoomXb = 1000 - pC->m_pPto3GPPparams->PanZoomTopleftXb;
710                }
711                if(pC->m_pPto3GPPparams->PanZoomXb + pC->m_pPto3GPPparams->PanZoomTopleftYb > 1000)
712                {
713                    /*Change the ratio if the area of the picture defined with (b) parameters is
714                    bigger than the image size*/
715                    pC->m_pPto3GPPparams->PanZoomXb = 1000 - pC->m_pPto3GPPparams->PanZoomTopleftYb;
716                }
717            }
718
719            /**
720             * Computes AIR parameters */
721/*        Params.m_inputCoord.m_x = (M4OSA_UInt32)(pC->m_pDecodedPlane->u_width *
722            (pC->m_pPto3GPPparams->PanZoomTopleftXa +
723            (M4OSA_Int16)((pC->m_pPto3GPPparams->PanZoomTopleftXb \
724                - pC->m_pPto3GPPparams->PanZoomTopleftXa) *
725            pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage)) / 100;
726        Params.m_inputCoord.m_y = (M4OSA_UInt32)(pC->m_pDecodedPlane->u_height *
727            (pC->m_pPto3GPPparams->PanZoomTopleftYa +
728            (M4OSA_Int16)((pC->m_pPto3GPPparams->PanZoomTopleftYb\
729                 - pC->m_pPto3GPPparams->PanZoomTopleftYa) *
730            pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage)) / 100;
731
732        Params.m_inputSize.m_width = (M4OSA_UInt32)(pC->m_pDecodedPlane->u_width *
733            (pC->m_pPto3GPPparams->PanZoomXa +
734            (M4OSA_Int16)((pC->m_pPto3GPPparams->PanZoomXb - pC->m_pPto3GPPparams->PanZoomXa) *
735            pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage)) / 100;
736
737        Params.m_inputSize.m_height =  (M4OSA_UInt32)(pC->m_pDecodedPlane->u_height *
738            (pC->m_pPto3GPPparams->PanZoomXa +
739            (M4OSA_Int16)((pC->m_pPto3GPPparams->PanZoomXb - pC->m_pPto3GPPparams->PanZoomXa) *
740            pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage)) / 100;
741 */
742            // Instead of using pC->m_NbImage we have to use (pC->m_NbImage-1) as pC->m_ImageCounter
743            // will be x-1 max for x no. of frames
744            Params.m_inputCoord.m_x = (M4OSA_UInt32)((((M4OSA_Double)pC->m_pDecodedPlane->u_width *
745                (pC->m_pPto3GPPparams->PanZoomTopleftXa +
746                (M4OSA_Double)((M4OSA_Double)(pC->m_pPto3GPPparams->PanZoomTopleftXb\
747                     - pC->m_pPto3GPPparams->PanZoomTopleftXa) *
748                pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage-1)) / 1000));
749            Params.m_inputCoord.m_y =
750                 (M4OSA_UInt32)((((M4OSA_Double)pC->m_pDecodedPlane->u_height *
751                (pC->m_pPto3GPPparams->PanZoomTopleftYa +
752                (M4OSA_Double)((M4OSA_Double)(pC->m_pPto3GPPparams->PanZoomTopleftYb\
753                     - pC->m_pPto3GPPparams->PanZoomTopleftYa) *
754                pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage-1)) / 1000));
755
756            Params.m_inputSize.m_width =
757                 (M4OSA_UInt32)((((M4OSA_Double)pC->m_pDecodedPlane->u_width *
758                (pC->m_pPto3GPPparams->PanZoomXa +
759                (M4OSA_Double)((M4OSA_Double)(pC->m_pPto3GPPparams->PanZoomXb\
760                     - pC->m_pPto3GPPparams->PanZoomXa) *
761                pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage-1)) / 1000));
762
763            Params.m_inputSize.m_height =
764                 (M4OSA_UInt32)((((M4OSA_Double)pC->m_pDecodedPlane->u_height *
765                (pC->m_pPto3GPPparams->PanZoomXa +
766                (M4OSA_Double)((M4OSA_Double)(pC->m_pPto3GPPparams->PanZoomXb \
767                    - pC->m_pPto3GPPparams->PanZoomXa) *
768                pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage-1)) / 1000));
769
770            if((Params.m_inputSize.m_width + Params.m_inputCoord.m_x)\
771                 > pC->m_pDecodedPlane->u_width)
772            {
773                Params.m_inputSize.m_width = pC->m_pDecodedPlane->u_width \
774                    - Params.m_inputCoord.m_x;
775            }
776
777            if((Params.m_inputSize.m_height + Params.m_inputCoord.m_y)\
778                 > pC->m_pDecodedPlane->u_height)
779            {
780                Params.m_inputSize.m_height = pC->m_pDecodedPlane->u_height\
781                     - Params.m_inputCoord.m_y;
782            }
783
784
785
786            Params.m_inputSize.m_width = (Params.m_inputSize.m_width>>1)<<1;
787            Params.m_inputSize.m_height = (Params.m_inputSize.m_height>>1)<<1;
788        }
789
790
791
792    /**
793        Picture rendering: Black borders*/
794
795        if(pC->m_mediaRendering == M4xVSS_kBlackBorders)
796        {
797            memset((void *)pImagePlanes[0].pac_data,Y_PLANE_BORDER_VALUE,
798                (pImagePlanes[0].u_height*pImagePlanes[0].u_stride));
799            memset((void *)pImagePlanes[1].pac_data,U_PLANE_BORDER_VALUE,
800                (pImagePlanes[1].u_height*pImagePlanes[1].u_stride));
801            memset((void *)pImagePlanes[2].pac_data,V_PLANE_BORDER_VALUE,
802                (pImagePlanes[2].u_height*pImagePlanes[2].u_stride));
803
804            /**
805            First without pan&zoom*/
806            if(M4OSA_FALSE == pC->m_pPto3GPPparams->isPanZoom)
807            {
808                switch(pBasicTags.orientation)
809                {
810                default:
811                case M4COMMON_kOrientationUnknown:
812                    Params.m_outputOrientation = M4COMMON_kOrientationTopLeft;
813                case M4COMMON_kOrientationTopLeft:
814                case M4COMMON_kOrientationTopRight:
815                case M4COMMON_kOrientationBottomRight:
816                case M4COMMON_kOrientationBottomLeft:
817                    if((M4OSA_UInt32)((pC->m_pDecodedPlane->u_height * pImagePlanes->u_width)\
818                         /pC->m_pDecodedPlane->u_width) <= pImagePlanes->u_height)
819                         //Params.m_inputSize.m_height < Params.m_inputSize.m_width)
820                    {
821                        /*it is height so black borders will be on the top and on the bottom side*/
822                        Params.m_outputSize.m_width = pImagePlanes->u_width;
823                        Params.m_outputSize.m_height =
824                             (M4OSA_UInt32)((pC->m_pDecodedPlane->u_height \
825                                * pImagePlanes->u_width) /pC->m_pDecodedPlane->u_width);
826                        /*number of lines at the top*/
827                        pImagePlanes[0].u_topleft =
828                            (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_height\
829                                -Params.m_outputSize.m_height)>>1))*pImagePlanes[0].u_stride;
830                        pImagePlanes[0].u_height = Params.m_outputSize.m_height;
831                        pImagePlanes[1].u_topleft =
832                             (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_height\
833                                -(Params.m_outputSize.m_height>>1)))>>1)*pImagePlanes[1].u_stride;
834                        pImagePlanes[1].u_height = Params.m_outputSize.m_height>>1;
835                        pImagePlanes[2].u_topleft =
836                             (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_height\
837                                -(Params.m_outputSize.m_height>>1)))>>1)*pImagePlanes[2].u_stride;
838                        pImagePlanes[2].u_height = Params.m_outputSize.m_height>>1;
839                    }
840                    else
841                    {
842                        /*it is width so black borders will be on the left and right side*/
843                        Params.m_outputSize.m_height = pImagePlanes->u_height;
844                        Params.m_outputSize.m_width =
845                             (M4OSA_UInt32)((pC->m_pDecodedPlane->u_width \
846                                * pImagePlanes->u_height) /pC->m_pDecodedPlane->u_height);
847
848                        pImagePlanes[0].u_topleft =
849                            (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_width\
850                                -Params.m_outputSize.m_width)>>1));
851                        pImagePlanes[0].u_width = Params.m_outputSize.m_width;
852                        pImagePlanes[1].u_topleft =
853                             (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_width\
854                                -(Params.m_outputSize.m_width>>1)))>>1);
855                        pImagePlanes[1].u_width = Params.m_outputSize.m_width>>1;
856                        pImagePlanes[2].u_topleft =
857                             (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_width\
858                                -(Params.m_outputSize.m_width>>1)))>>1);
859                        pImagePlanes[2].u_width = Params.m_outputSize.m_width>>1;
860                    }
861                    break;
862                case M4COMMON_kOrientationLeftTop:
863                case M4COMMON_kOrientationLeftBottom:
864                case M4COMMON_kOrientationRightTop:
865                case M4COMMON_kOrientationRightBottom:
866                        if((M4OSA_UInt32)((pC->m_pDecodedPlane->u_width * pImagePlanes->u_width)\
867                             /pC->m_pDecodedPlane->u_height) < pImagePlanes->u_height)
868                             //Params.m_inputSize.m_height > Params.m_inputSize.m_width)
869                        {
870                            /*it is height so black borders will be on the top and on
871                             the bottom side*/
872                            Params.m_outputSize.m_height = pImagePlanes->u_width;
873                            Params.m_outputSize.m_width =
874                                 (M4OSA_UInt32)((pC->m_pDecodedPlane->u_width \
875                                    * pImagePlanes->u_width) /pC->m_pDecodedPlane->u_height);
876                            /*number of lines at the top*/
877                            pImagePlanes[0].u_topleft =
878                                ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_height\
879                                    -Params.m_outputSize.m_width))>>1)*pImagePlanes[0].u_stride)+1;
880                            pImagePlanes[0].u_height = Params.m_outputSize.m_width;
881                            pImagePlanes[1].u_topleft =
882                                ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_height\
883                                    -(Params.m_outputSize.m_width>>1)))>>1)\
884                                        *pImagePlanes[1].u_stride)+1;
885                            pImagePlanes[1].u_height = Params.m_outputSize.m_width>>1;
886                            pImagePlanes[2].u_topleft =
887                                ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_height\
888                                    -(Params.m_outputSize.m_width>>1)))>>1)\
889                                        *pImagePlanes[2].u_stride)+1;
890                            pImagePlanes[2].u_height = Params.m_outputSize.m_width>>1;
891                        }
892                        else
893                        {
894                            /*it is width so black borders will be on the left and right side*/
895                            Params.m_outputSize.m_width = pImagePlanes->u_height;
896                            Params.m_outputSize.m_height =
897                                 (M4OSA_UInt32)((pC->m_pDecodedPlane->u_height\
898                                     * pImagePlanes->u_height) /pC->m_pDecodedPlane->u_width);
899
900                            pImagePlanes[0].u_topleft =
901                                 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_width\
902                                    -Params.m_outputSize.m_height))>>1))+1;
903                            pImagePlanes[0].u_width = Params.m_outputSize.m_height;
904                            pImagePlanes[1].u_topleft =
905                                 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_width\
906                                    -(Params.m_outputSize.m_height>>1)))>>1))+1;
907                            pImagePlanes[1].u_width = Params.m_outputSize.m_height>>1;
908                            pImagePlanes[2].u_topleft =
909                                 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_width\
910                                    -(Params.m_outputSize.m_height>>1)))>>1))+1;
911                            pImagePlanes[2].u_width = Params.m_outputSize.m_height>>1;
912                        }
913                    break;
914                }
915            }
916
917            /**
918            Secondly with pan&zoom*/
919            else
920            {
921                switch(pBasicTags.orientation)
922                {
923                default:
924                case M4COMMON_kOrientationUnknown:
925                    Params.m_outputOrientation = M4COMMON_kOrientationTopLeft;
926                case M4COMMON_kOrientationTopLeft:
927                case M4COMMON_kOrientationTopRight:
928                case M4COMMON_kOrientationBottomRight:
929                case M4COMMON_kOrientationBottomLeft:
930                    /*NO ROTATION*/
931                    if((M4OSA_UInt32)((pC->m_pDecodedPlane->u_height * pImagePlanes->u_width)\
932                         /pC->m_pDecodedPlane->u_width) <= pImagePlanes->u_height)
933                            //Params.m_inputSize.m_height < Params.m_inputSize.m_width)
934                    {
935                        /*Black borders will be on the top and bottom of the output video*/
936                        /*Maximum output height if the input image aspect ratio is kept and if
937                        the output width is the screen width*/
938                        M4OSA_UInt32 tempOutputSizeHeight =
939                            (M4OSA_UInt32)((pC->m_pDecodedPlane->u_height\
940                                 * pImagePlanes->u_width) /pC->m_pDecodedPlane->u_width);
941                        M4OSA_UInt32 tempInputSizeHeightMax = 0;
942                        M4OSA_UInt32 tempFinalInputHeight = 0;
943                        /*The output width is the screen width*/
944                        Params.m_outputSize.m_width = pImagePlanes->u_width;
945                        tempOutputSizeHeight = (tempOutputSizeHeight>>1)<<1;
946
947                        /*Maximum input height according to the maximum output height
948                        (proportional to the maximum output height)*/
949                        tempInputSizeHeightMax = (pImagePlanes->u_height\
950                            *Params.m_inputSize.m_height)/tempOutputSizeHeight;
951                        tempInputSizeHeightMax = (tempInputSizeHeightMax>>1)<<1;
952
953                        /*Check if the maximum possible input height is contained into the
954                        input image height*/
955                        if(tempInputSizeHeightMax <= pC->m_pDecodedPlane->u_height)
956                        {
957                            /*The maximum possible input height is contained in the input
958                            image height,
959                            that means no black borders, the input pan zoom area will be extended
960                            so that the input AIR height will be the maximum possible*/
961                            if(((tempInputSizeHeightMax - Params.m_inputSize.m_height)>>1)\
962                                 <= Params.m_inputCoord.m_y
963                                && ((tempInputSizeHeightMax - Params.m_inputSize.m_height)>>1)\
964                                     <= pC->m_pDecodedPlane->u_height -(Params.m_inputCoord.m_y\
965                                         + Params.m_inputSize.m_height))
966                            {
967                                /*The input pan zoom area can be extended symmetrically on the
968                                top and bottom side*/
969                                Params.m_inputCoord.m_y -= ((tempInputSizeHeightMax \
970                                    - Params.m_inputSize.m_height)>>1);
971                            }
972                            else if(Params.m_inputCoord.m_y < pC->m_pDecodedPlane->u_height\
973                                -(Params.m_inputCoord.m_y + Params.m_inputSize.m_height))
974                            {
975                                /*There is not enough place above the input pan zoom area to
976                                extend it symmetrically,
977                                so extend it to the maximum on the top*/
978                                Params.m_inputCoord.m_y = 0;
979                            }
980                            else
981                            {
982                                /*There is not enough place below the input pan zoom area to
983                                extend it symmetrically,
984                                so extend it to the maximum on the bottom*/
985                                Params.m_inputCoord.m_y = pC->m_pDecodedPlane->u_height \
986                                    - tempInputSizeHeightMax;
987                            }
988                            /*The input height of the AIR is the maximum possible height*/
989                            Params.m_inputSize.m_height = tempInputSizeHeightMax;
990                        }
991                        else
992                        {
993                            /*The maximum possible input height is greater than the input
994                            image height,
995                            that means black borders are necessary to keep aspect ratio
996                            The input height of the AIR is all the input image height*/
997                            Params.m_outputSize.m_height =
998                                (tempOutputSizeHeight*pC->m_pDecodedPlane->u_height)\
999                                    /Params.m_inputSize.m_height;
1000                            Params.m_outputSize.m_height = (Params.m_outputSize.m_height>>1)<<1;
1001                            Params.m_inputCoord.m_y = 0;
1002                            Params.m_inputSize.m_height = pC->m_pDecodedPlane->u_height;
1003                            pImagePlanes[0].u_topleft =
1004                                 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_height\
1005                                    -Params.m_outputSize.m_height)>>1))*pImagePlanes[0].u_stride;
1006                            pImagePlanes[0].u_height = Params.m_outputSize.m_height;
1007                            pImagePlanes[1].u_topleft =
1008                                ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_height\
1009                                    -(Params.m_outputSize.m_height>>1)))>>1)\
1010                                        *pImagePlanes[1].u_stride);
1011                            pImagePlanes[1].u_height = Params.m_outputSize.m_height>>1;
1012                            pImagePlanes[2].u_topleft =
1013                                 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_height\
1014                                    -(Params.m_outputSize.m_height>>1)))>>1)\
1015                                        *pImagePlanes[2].u_stride);
1016                            pImagePlanes[2].u_height = Params.m_outputSize.m_height>>1;
1017                        }
1018                    }
1019                    else
1020                    {
1021                        /*Black borders will be on the left and right side of the output video*/
1022                        /*Maximum output width if the input image aspect ratio is kept and if the
1023                         output height is the screen height*/
1024                        M4OSA_UInt32 tempOutputSizeWidth =
1025                             (M4OSA_UInt32)((pC->m_pDecodedPlane->u_width \
1026                                * pImagePlanes->u_height) /pC->m_pDecodedPlane->u_height);
1027                        M4OSA_UInt32 tempInputSizeWidthMax = 0;
1028                        M4OSA_UInt32 tempFinalInputWidth = 0;
1029                        /*The output height is the screen height*/
1030                        Params.m_outputSize.m_height = pImagePlanes->u_height;
1031                        tempOutputSizeWidth = (tempOutputSizeWidth>>1)<<1;
1032
1033                        /*Maximum input width according to the maximum output width
1034                        (proportional to the maximum output width)*/
1035                        tempInputSizeWidthMax =
1036                             (pImagePlanes->u_width*Params.m_inputSize.m_width)\
1037                                /tempOutputSizeWidth;
1038                        tempInputSizeWidthMax = (tempInputSizeWidthMax>>1)<<1;
1039
1040                        /*Check if the maximum possible input width is contained into the input
1041                         image width*/
1042                        if(tempInputSizeWidthMax <= pC->m_pDecodedPlane->u_width)
1043                        {
1044                            /*The maximum possible input width is contained in the input
1045                            image width,
1046                            that means no black borders, the input pan zoom area will be extended
1047                            so that the input AIR width will be the maximum possible*/
1048                            if(((tempInputSizeWidthMax - Params.m_inputSize.m_width)>>1) \
1049                                <= Params.m_inputCoord.m_x
1050                                && ((tempInputSizeWidthMax - Params.m_inputSize.m_width)>>1)\
1051                                     <= pC->m_pDecodedPlane->u_width -(Params.m_inputCoord.m_x \
1052                                        + Params.m_inputSize.m_width))
1053                            {
1054                                /*The input pan zoom area can be extended symmetrically on the
1055                                     right and left side*/
1056                                Params.m_inputCoord.m_x -= ((tempInputSizeWidthMax\
1057                                     - Params.m_inputSize.m_width)>>1);
1058                            }
1059                            else if(Params.m_inputCoord.m_x < pC->m_pDecodedPlane->u_width\
1060                                -(Params.m_inputCoord.m_x + Params.m_inputSize.m_width))
1061                            {
1062                                /*There is not enough place above the input pan zoom area to
1063                                    extend it symmetrically,
1064                                so extend it to the maximum on the left*/
1065                                Params.m_inputCoord.m_x = 0;
1066                            }
1067                            else
1068                            {
1069                                /*There is not enough place below the input pan zoom area
1070                                    to extend it symmetrically,
1071                                so extend it to the maximum on the right*/
1072                                Params.m_inputCoord.m_x = pC->m_pDecodedPlane->u_width \
1073                                    - tempInputSizeWidthMax;
1074                            }
1075                            /*The input width of the AIR is the maximum possible width*/
1076                            Params.m_inputSize.m_width = tempInputSizeWidthMax;
1077                        }
1078                        else
1079                        {
1080                            /*The maximum possible input width is greater than the input
1081                            image width,
1082                            that means black borders are necessary to keep aspect ratio
1083                            The input width of the AIR is all the input image width*/
1084                            Params.m_outputSize.m_width =\
1085                                 (tempOutputSizeWidth*pC->m_pDecodedPlane->u_width)\
1086                                    /Params.m_inputSize.m_width;
1087                            Params.m_outputSize.m_width = (Params.m_outputSize.m_width>>1)<<1;
1088                            Params.m_inputCoord.m_x = 0;
1089                            Params.m_inputSize.m_width = pC->m_pDecodedPlane->u_width;
1090                            pImagePlanes[0].u_topleft =
1091                                 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_width\
1092                                    -Params.m_outputSize.m_width)>>1));
1093                            pImagePlanes[0].u_width = Params.m_outputSize.m_width;
1094                            pImagePlanes[1].u_topleft =
1095                                 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_width\
1096                                    -(Params.m_outputSize.m_width>>1)))>>1);
1097                            pImagePlanes[1].u_width = Params.m_outputSize.m_width>>1;
1098                            pImagePlanes[2].u_topleft =
1099                                 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_width\
1100                                    -(Params.m_outputSize.m_width>>1)))>>1);
1101                            pImagePlanes[2].u_width = Params.m_outputSize.m_width>>1;
1102                        }
1103                    }
1104                    break;
1105                case M4COMMON_kOrientationLeftTop:
1106                case M4COMMON_kOrientationLeftBottom:
1107                case M4COMMON_kOrientationRightTop:
1108                case M4COMMON_kOrientationRightBottom:
1109                    /*ROTATION*/
1110                    if((M4OSA_UInt32)((pC->m_pDecodedPlane->u_width * pImagePlanes->u_width)\
1111                         /pC->m_pDecodedPlane->u_height) < pImagePlanes->u_height)
1112                         //Params.m_inputSize.m_height > Params.m_inputSize.m_width)
1113                    {
1114                        /*Black borders will be on the left and right side of the output video*/
1115                        /*Maximum output height if the input image aspect ratio is kept and if
1116                        the output height is the screen width*/
1117                        M4OSA_UInt32 tempOutputSizeHeight =
1118                        (M4OSA_UInt32)((pC->m_pDecodedPlane->u_width * pImagePlanes->u_width)\
1119                             /pC->m_pDecodedPlane->u_height);
1120                        M4OSA_UInt32 tempInputSizeHeightMax = 0;
1121                        M4OSA_UInt32 tempFinalInputHeight = 0;
1122                        /*The output width is the screen height*/
1123                        Params.m_outputSize.m_height = pImagePlanes->u_width;
1124                        Params.m_outputSize.m_width= pImagePlanes->u_height;
1125                        tempOutputSizeHeight = (tempOutputSizeHeight>>1)<<1;
1126
1127                        /*Maximum input height according to the maximum output height
1128                             (proportional to the maximum output height)*/
1129                        tempInputSizeHeightMax =
1130                            (pImagePlanes->u_height*Params.m_inputSize.m_width)\
1131                                /tempOutputSizeHeight;
1132                        tempInputSizeHeightMax = (tempInputSizeHeightMax>>1)<<1;
1133
1134                        /*Check if the maximum possible input height is contained into the
1135                             input image width (rotation included)*/
1136                        if(tempInputSizeHeightMax <= pC->m_pDecodedPlane->u_width)
1137                        {
1138                            /*The maximum possible input height is contained in the input
1139                            image width (rotation included),
1140                            that means no black borders, the input pan zoom area will be extended
1141                            so that the input AIR width will be the maximum possible*/
1142                            if(((tempInputSizeHeightMax - Params.m_inputSize.m_width)>>1) \
1143                                <= Params.m_inputCoord.m_x
1144                                && ((tempInputSizeHeightMax - Params.m_inputSize.m_width)>>1)\
1145                                     <= pC->m_pDecodedPlane->u_width -(Params.m_inputCoord.m_x \
1146                                        + Params.m_inputSize.m_width))
1147                            {
1148                                /*The input pan zoom area can be extended symmetrically on the
1149                                 right and left side*/
1150                                Params.m_inputCoord.m_x -= ((tempInputSizeHeightMax \
1151                                    - Params.m_inputSize.m_width)>>1);
1152                            }
1153                            else if(Params.m_inputCoord.m_x < pC->m_pDecodedPlane->u_width\
1154                                -(Params.m_inputCoord.m_x + Params.m_inputSize.m_width))
1155                            {
1156                                /*There is not enough place on the left of the input pan
1157                                zoom area to extend it symmetrically,
1158                                so extend it to the maximum on the left*/
1159                                Params.m_inputCoord.m_x = 0;
1160                            }
1161                            else
1162                            {
1163                                /*There is not enough place on the right of the input pan zoom
1164                                 area to extend it symmetrically,
1165                                so extend it to the maximum on the right*/
1166                                Params.m_inputCoord.m_x =
1167                                     pC->m_pDecodedPlane->u_width - tempInputSizeHeightMax;
1168                            }
1169                            /*The input width of the AIR is the maximum possible width*/
1170                            Params.m_inputSize.m_width = tempInputSizeHeightMax;
1171                        }
1172                        else
1173                        {
1174                            /*The maximum possible input height is greater than the input
1175                            image width (rotation included),
1176                            that means black borders are necessary to keep aspect ratio
1177                            The input width of the AIR is all the input image width*/
1178                            Params.m_outputSize.m_width =
1179                            (tempOutputSizeHeight*pC->m_pDecodedPlane->u_width)\
1180                                /Params.m_inputSize.m_width;
1181                            Params.m_outputSize.m_width = (Params.m_outputSize.m_width>>1)<<1;
1182                            Params.m_inputCoord.m_x = 0;
1183                            Params.m_inputSize.m_width = pC->m_pDecodedPlane->u_width;
1184                            pImagePlanes[0].u_topleft =
1185                                ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_height\
1186                                    -Params.m_outputSize.m_width))>>1)*pImagePlanes[0].u_stride)+1;
1187                            pImagePlanes[0].u_height = Params.m_outputSize.m_width;
1188                            pImagePlanes[1].u_topleft =
1189                            ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_height\
1190                                -(Params.m_outputSize.m_width>>1)))>>1)\
1191                                    *pImagePlanes[1].u_stride)+1;
1192                            pImagePlanes[1].u_height = Params.m_outputSize.m_width>>1;
1193                            pImagePlanes[2].u_topleft =
1194                            ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_height\
1195                                -(Params.m_outputSize.m_width>>1)))>>1)\
1196                                    *pImagePlanes[2].u_stride)+1;
1197                            pImagePlanes[2].u_height = Params.m_outputSize.m_width>>1;
1198                        }
1199                    }
1200                    else
1201                    {
1202                        /*Black borders will be on the top and bottom of the output video*/
1203                        /*Maximum output width if the input image aspect ratio is kept and if
1204                         the output width is the screen height*/
1205                        M4OSA_UInt32 tempOutputSizeWidth =
1206                        (M4OSA_UInt32)((pC->m_pDecodedPlane->u_height * pImagePlanes->u_height)\
1207                             /pC->m_pDecodedPlane->u_width);
1208                        M4OSA_UInt32 tempInputSizeWidthMax = 0;
1209                        M4OSA_UInt32 tempFinalInputWidth = 0, tempFinalOutputWidth = 0;
1210                        /*The output height is the screen width*/
1211                        Params.m_outputSize.m_width = pImagePlanes->u_height;
1212                        Params.m_outputSize.m_height= pImagePlanes->u_width;
1213                        tempOutputSizeWidth = (tempOutputSizeWidth>>1)<<1;
1214
1215                        /*Maximum input width according to the maximum output width
1216                         (proportional to the maximum output width)*/
1217                        tempInputSizeWidthMax =
1218                        (pImagePlanes->u_width*Params.m_inputSize.m_height)/tempOutputSizeWidth;
1219                        tempInputSizeWidthMax = (tempInputSizeWidthMax>>1)<<1;
1220
1221                        /*Check if the maximum possible input width is contained into the input
1222                         image height (rotation included)*/
1223                        if(tempInputSizeWidthMax <= pC->m_pDecodedPlane->u_height)
1224                        {
1225                            /*The maximum possible input width is contained in the input
1226                             image height (rotation included),
1227                            that means no black borders, the input pan zoom area will be extended
1228                            so that the input AIR height will be the maximum possible*/
1229                            if(((tempInputSizeWidthMax - Params.m_inputSize.m_height)>>1) \
1230                                <= Params.m_inputCoord.m_y
1231                                && ((tempInputSizeWidthMax - Params.m_inputSize.m_height)>>1)\
1232                                     <= pC->m_pDecodedPlane->u_height -(Params.m_inputCoord.m_y \
1233                                        + Params.m_inputSize.m_height))
1234                            {
1235                                /*The input pan zoom area can be extended symmetrically on
1236                                the right and left side*/
1237                                Params.m_inputCoord.m_y -= ((tempInputSizeWidthMax \
1238                                    - Params.m_inputSize.m_height)>>1);
1239                            }
1240                            else if(Params.m_inputCoord.m_y < pC->m_pDecodedPlane->u_height\
1241                                -(Params.m_inputCoord.m_y + Params.m_inputSize.m_height))
1242                            {
1243                                /*There is not enough place on the top of the input pan zoom
1244                                area to extend it symmetrically,
1245                                so extend it to the maximum on the top*/
1246                                Params.m_inputCoord.m_y = 0;
1247                            }
1248                            else
1249                            {
1250                                /*There is not enough place on the bottom of the input pan zoom
1251                                 area to extend it symmetrically,
1252                                so extend it to the maximum on the bottom*/
1253                                Params.m_inputCoord.m_y = pC->m_pDecodedPlane->u_height\
1254                                     - tempInputSizeWidthMax;
1255                            }
1256                            /*The input height of the AIR is the maximum possible height*/
1257                            Params.m_inputSize.m_height = tempInputSizeWidthMax;
1258                        }
1259                        else
1260                        {
1261                            /*The maximum possible input width is greater than the input\
1262                             image height (rotation included),
1263                            that means black borders are necessary to keep aspect ratio
1264                            The input height of the AIR is all the input image height*/
1265                            Params.m_outputSize.m_height =
1266                                (tempOutputSizeWidth*pC->m_pDecodedPlane->u_height)\
1267                                    /Params.m_inputSize.m_height;
1268                            Params.m_outputSize.m_height = (Params.m_outputSize.m_height>>1)<<1;
1269                            Params.m_inputCoord.m_y = 0;
1270                            Params.m_inputSize.m_height = pC->m_pDecodedPlane->u_height;
1271                            pImagePlanes[0].u_topleft =
1272                                ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_width\
1273                                    -Params.m_outputSize.m_height))>>1))+1;
1274                            pImagePlanes[0].u_width = Params.m_outputSize.m_height;
1275                            pImagePlanes[1].u_topleft =
1276                                ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_width\
1277                                    -(Params.m_outputSize.m_height>>1)))>>1))+1;
1278                            pImagePlanes[1].u_width = Params.m_outputSize.m_height>>1;
1279                            pImagePlanes[2].u_topleft =
1280                                 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_width\
1281                                    -(Params.m_outputSize.m_height>>1)))>>1))+1;
1282                            pImagePlanes[2].u_width = Params.m_outputSize.m_height>>1;
1283                        }
1284                    }
1285                    break;
1286                }
1287            }
1288
1289            /*Width and height have to be even*/
1290            Params.m_outputSize.m_width = (Params.m_outputSize.m_width>>1)<<1;
1291            Params.m_outputSize.m_height = (Params.m_outputSize.m_height>>1)<<1;
1292            Params.m_inputSize.m_width = (Params.m_inputSize.m_width>>1)<<1;
1293            Params.m_inputSize.m_height = (Params.m_inputSize.m_height>>1)<<1;
1294            pImagePlanes[0].u_width = (pImagePlanes[0].u_width>>1)<<1;
1295            pImagePlanes[1].u_width = (pImagePlanes[1].u_width>>1)<<1;
1296            pImagePlanes[2].u_width = (pImagePlanes[2].u_width>>1)<<1;
1297            pImagePlanes[0].u_height = (pImagePlanes[0].u_height>>1)<<1;
1298            pImagePlanes[1].u_height = (pImagePlanes[1].u_height>>1)<<1;
1299            pImagePlanes[2].u_height = (pImagePlanes[2].u_height>>1)<<1;
1300
1301            /*Check that values are coherent*/
1302            if(Params.m_inputSize.m_height == Params.m_outputSize.m_height)
1303            {
1304                Params.m_inputSize.m_width = Params.m_outputSize.m_width;
1305            }
1306            else if(Params.m_inputSize.m_width == Params.m_outputSize.m_width)
1307            {
1308                Params.m_inputSize.m_height = Params.m_outputSize.m_height;
1309            }
1310        }
1311
1312        /**
1313        Picture rendering: Resizing and Cropping*/
1314        if(pC->m_mediaRendering != M4xVSS_kBlackBorders)
1315        {
1316            switch(pBasicTags.orientation)
1317            {
1318            default:
1319            case M4COMMON_kOrientationUnknown:
1320                Params.m_outputOrientation = M4COMMON_kOrientationTopLeft;
1321            case M4COMMON_kOrientationTopLeft:
1322            case M4COMMON_kOrientationTopRight:
1323            case M4COMMON_kOrientationBottomRight:
1324            case M4COMMON_kOrientationBottomLeft:
1325                Params.m_outputSize.m_height = pImagePlanes->u_height;
1326                Params.m_outputSize.m_width = pImagePlanes->u_width;
1327                break;
1328            case M4COMMON_kOrientationLeftTop:
1329            case M4COMMON_kOrientationLeftBottom:
1330            case M4COMMON_kOrientationRightTop:
1331            case M4COMMON_kOrientationRightBottom:
1332                Params.m_outputSize.m_height = pImagePlanes->u_width;
1333                Params.m_outputSize.m_width = pImagePlanes->u_height;
1334                break;
1335            }
1336        }
1337
1338        /**
1339        Picture rendering: Cropping*/
1340        if(pC->m_mediaRendering == M4xVSS_kCropping)
1341        {
1342            if((Params.m_outputSize.m_height * Params.m_inputSize.m_width)\
1343                 /Params.m_outputSize.m_width<Params.m_inputSize.m_height)
1344            {
1345                M4OSA_UInt32 tempHeight = Params.m_inputSize.m_height;
1346                /*height will be cropped*/
1347                Params.m_inputSize.m_height =  (M4OSA_UInt32)((Params.m_outputSize.m_height \
1348                    * Params.m_inputSize.m_width) /Params.m_outputSize.m_width);
1349                Params.m_inputSize.m_height =  (Params.m_inputSize.m_height>>1)<<1;
1350                if(M4OSA_FALSE == pC->m_pPto3GPPparams->isPanZoom)
1351                {
1352                    Params.m_inputCoord.m_y = (M4OSA_Int32)((M4OSA_Int32)\
1353                        ((pC->m_pDecodedPlane->u_height - Params.m_inputSize.m_height))>>1);
1354                }
1355                else
1356                {
1357                    Params.m_inputCoord.m_y += (M4OSA_Int32)((M4OSA_Int32)\
1358                        ((tempHeight - Params.m_inputSize.m_height))>>1);
1359                }
1360            }
1361            else
1362            {
1363                M4OSA_UInt32 tempWidth= Params.m_inputSize.m_width;
1364                /*width will be cropped*/
1365                Params.m_inputSize.m_width =  (M4OSA_UInt32)((Params.m_outputSize.m_width \
1366                    * Params.m_inputSize.m_height) /Params.m_outputSize.m_height);
1367                Params.m_inputSize.m_width =  (Params.m_inputSize.m_width>>1)<<1;
1368                if(M4OSA_FALSE == pC->m_pPto3GPPparams->isPanZoom)
1369                {
1370                    Params.m_inputCoord.m_x = (M4OSA_Int32)((M4OSA_Int32)\
1371                        ((pC->m_pDecodedPlane->u_width - Params.m_inputSize.m_width))>>1);
1372                }
1373                else
1374                {
1375                    Params.m_inputCoord.m_x += (M4OSA_Int32)\
1376                        (((M4OSA_Int32)(tempWidth - Params.m_inputSize.m_width))>>1);
1377                }
1378            }
1379        }
1380
1381
1382
1383        /**
1384         * Call AIR functions */
1385        if(M4OSA_NULL == pC->m_air_context)
1386        {
1387            err = M4AIR_create(&pC->m_air_context, M4AIR_kYUV420P);
1388            if(err != M4NO_ERROR)
1389            {
1390                free(pC->m_pDecodedPlane[0].pac_data);
1391                free(pC->m_pDecodedPlane);
1392                pC->m_pDecodedPlane = M4OSA_NULL;
1393                M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct:\
1394                     Error when initializing AIR: 0x%x", err);
1395                return err;
1396            }
1397        }
1398
1399        err = M4AIR_configure(pC->m_air_context, &Params);
1400        if(err != M4NO_ERROR)
1401        {
1402            M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct:\
1403                 Error when configuring AIR: 0x%x", err);
1404            M4AIR_cleanUp(pC->m_air_context);
1405            free(pC->m_pDecodedPlane[0].pac_data);
1406            free(pC->m_pDecodedPlane);
1407            pC->m_pDecodedPlane = M4OSA_NULL;
1408            return err;
1409        }
1410
1411        err = M4AIR_get(pC->m_air_context, pC->m_pDecodedPlane, pImagePlanes);
1412        if(err != M4NO_ERROR)
1413        {
1414            M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct: Error when getting AIR plane: 0x%x", err);
1415            M4AIR_cleanUp(pC->m_air_context);
1416            free(pC->m_pDecodedPlane[0].pac_data);
1417            free(pC->m_pDecodedPlane);
1418            pC->m_pDecodedPlane = M4OSA_NULL;
1419            return err;
1420        }
1421        pImagePlanes[0] = pImagePlanes1;
1422        pImagePlanes[1] = pImagePlanes2;
1423        pImagePlanes[2] = pImagePlanes3;
1424    }
1425
1426
1427    /**
1428     * Increment the image counter */
1429    pC->m_ImageCounter++;
1430
1431    /**
1432     * Check end of sequence */
1433    last_frame_flag    = (pC->m_ImageCounter >= pC->m_NbImage);
1434
1435    /**
1436     * Keep the picture duration */
1437    *pPictureDuration = pC->m_timeDuration;
1438
1439    if (1 == last_frame_flag)
1440    {
1441        if(M4OSA_NULL != pC->m_air_context)
1442        {
1443            err = M4AIR_cleanUp(pC->m_air_context);
1444            if(err != M4NO_ERROR)
1445            {
1446                M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct: Error when cleaning AIR: 0x%x", err);
1447                return err;
1448            }
1449        }
1450        if(M4OSA_NULL != pC->m_pDecodedPlane)
1451        {
1452            free(pC->m_pDecodedPlane[0].pac_data);
1453            free(pC->m_pDecodedPlane);
1454            pC->m_pDecodedPlane = M4OSA_NULL;
1455        }
1456        return M4PTO3GPP_WAR_LAST_PICTURE;
1457    }
1458
1459    M4OSA_TRACE1_0("M4xVSS_PictureCallbackFct: Leaving ");
1460    return M4NO_ERROR;
1461}
1462
1463/**
1464 ******************************************************************************
1465 * M4OSA_ERR M4xVSS_internalStartConvertPictureTo3gp(M4OSA_Context pContext)
1466 * @brief    This function initializes Pto3GPP with the given parameters
1467 * @note    The "Pictures to 3GPP" parameters are given by the internal xVSS
1468 *            context. This context contains a pointer on the current element
1469 *            of the chained list of Pto3GPP parameters.
1470 * @param    pContext    (IN) The integrator own context
1471 *
1472 * @return    M4NO_ERROR:    No error
1473 * @return    M4PTO3GPP_WAR_LAST_PICTURE: The returned image is the last one
1474 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
1475 ******************************************************************************
1476 */
1477M4OSA_ERR M4xVSS_internalStartConvertPictureTo3gp(M4OSA_Context pContext)
1478{
1479    /************************************************************************/
1480    /* Definitions to generate dummy AMR file used to add AMR silence in files generated
1481     by Pto3GPP */
1482    #define M4VSS3GPP_AMR_AU_SILENCE_FRAME_048_SIZE     13
1483    /* This constant is defined in M4VSS3GPP_InternalConfig.h */
1484    extern const M4OSA_UInt8\
1485         M4VSS3GPP_AMR_AU_SILENCE_FRAME_048[M4VSS3GPP_AMR_AU_SILENCE_FRAME_048_SIZE];
1486
1487    /* AMR silent frame used to compute dummy AMR silence file */
1488    #define M4VSS3GPP_AMR_HEADER_SIZE 6
1489    const M4OSA_UInt8 M4VSS3GPP_AMR_HEADER[M4VSS3GPP_AMR_AU_SILENCE_FRAME_048_SIZE] =
1490    { 0x23, 0x21, 0x41, 0x4d, 0x52, 0x0a };
1491    /************************************************************************/
1492
1493    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
1494    M4OSA_ERR err;
1495    M4PTO3GPP_Context pM4PTO3GPP_Ctxt = M4OSA_NULL;
1496    M4PTO3GPP_Params Params;
1497     M4xVSS_PictureCallbackCtxt*    pCallBackCtxt;
1498    M4OSA_Bool cmpResult=M4OSA_FALSE;
1499    M4OSA_Context pDummyAMRFile;
1500    M4OSA_Char out_amr[M4XVSS_MAX_PATH_LEN];
1501    /*UTF conversion support*/
1502    M4OSA_Char* pDecodedPath = M4OSA_NULL;
1503    M4OSA_UInt32 i;
1504
1505    /**
1506     * Create a M4PTO3GPP instance */
1507    err = M4PTO3GPP_Init( &pM4PTO3GPP_Ctxt, xVSS_context->pFileReadPtr,
1508         xVSS_context->pFileWritePtr);
1509    if (err != M4NO_ERROR)
1510    {
1511        M4OSA_TRACE1_1("M4xVSS_internalStartConvertPictureTo3gp returned %ld\n",err);
1512        return err;
1513    }
1514
1515    pCallBackCtxt = (M4xVSS_PictureCallbackCtxt*)M4OSA_32bitAlignedMalloc(sizeof(M4xVSS_PictureCallbackCtxt),
1516         M4VS,(M4OSA_Char *) "Pto3gpp callback struct");
1517    if(pCallBackCtxt == M4OSA_NULL)
1518    {
1519        M4OSA_TRACE1_0("Allocation error in M4xVSS_internalStartConvertPictureTo3gp");
1520        return M4ERR_ALLOC;
1521    }
1522
1523    Params.OutputVideoFrameSize = xVSS_context->pSettings->xVSS.outputVideoSize;
1524    Params.OutputVideoFormat = xVSS_context->pSettings->xVSS.outputVideoFormat;
1525    Params.videoProfile = xVSS_context->pSettings->xVSS.outputVideoProfile;
1526    Params.videoLevel = xVSS_context->pSettings->xVSS.outputVideoLevel;
1527
1528    /**
1529     * Generate "dummy" amr file containing silence in temporary folder */
1530    M4OSA_chrNCopy(out_amr, xVSS_context->pTempPath, M4XVSS_MAX_PATH_LEN - 1);
1531    strncat((char *)out_amr, (const char *)"dummy.amr\0", 10);
1532
1533    /**
1534     * UTF conversion: convert the temporary path into the customer format*/
1535    pDecodedPath = out_amr;
1536
1537    if(xVSS_context->UTFConversionContext.pConvFromUTF8Fct != M4OSA_NULL
1538            && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL)
1539    {
1540        M4OSA_UInt32 length = 0;
1541        err = M4xVSS_internalConvertFromUTF8(xVSS_context, (M4OSA_Void*) out_amr,
1542             (M4OSA_Void*) xVSS_context->UTFConversionContext.pTempOutConversionBuffer, &length);
1543        if(err != M4NO_ERROR)
1544        {
1545            M4OSA_TRACE1_1("M4xVSS_internalStartConvertPictureTo3gp:\
1546                 M4xVSS_internalConvertFromUTF8 returns err: 0x%x",err);
1547            return err;
1548        }
1549        pDecodedPath = xVSS_context->UTFConversionContext.pTempOutConversionBuffer;
1550    }
1551
1552    /**
1553    * End of the conversion, now use the converted path*/
1554
1555    err = xVSS_context->pFileWritePtr->openWrite(&pDummyAMRFile, pDecodedPath, M4OSA_kFileWrite);
1556
1557    /*Commented because of the use of the UTF conversion see above*/
1558/*    err = xVSS_context->pFileWritePtr->openWrite(&pDummyAMRFile, out_amr, M4OSA_kFileWrite);
1559 */
1560    if(err != M4NO_ERROR)
1561    {
1562        M4OSA_TRACE1_2("M4xVSS_internalConvertPictureTo3gp: Can't open output dummy amr file %s,\
1563             error: 0x%x\n",out_amr, err);
1564        return err;
1565    }
1566
1567    err =  xVSS_context->pFileWritePtr->writeData(pDummyAMRFile,
1568        (M4OSA_Int8*)M4VSS3GPP_AMR_HEADER, M4VSS3GPP_AMR_HEADER_SIZE);
1569    if(err != M4NO_ERROR)
1570    {
1571        M4OSA_TRACE1_2("M4xVSS_internalConvertPictureTo3gp: Can't write output dummy amr file %s,\
1572             error: 0x%x\n",out_amr, err);
1573        return err;
1574    }
1575
1576    err =  xVSS_context->pFileWritePtr->writeData(pDummyAMRFile,
1577         (M4OSA_Int8*)M4VSS3GPP_AMR_AU_SILENCE_FRAME_048, M4VSS3GPP_AMR_AU_SILENCE_FRAME_048_SIZE);
1578    if(err != M4NO_ERROR)
1579    {
1580        M4OSA_TRACE1_2("M4xVSS_internalConvertPictureTo3gp: \
1581            Can't write output dummy amr file %s, error: 0x%x\n",out_amr, err);
1582        return err;
1583    }
1584
1585    err =  xVSS_context->pFileWritePtr->closeWrite(pDummyAMRFile);
1586    if(err != M4NO_ERROR)
1587    {
1588        M4OSA_TRACE1_2("M4xVSS_internalConvertPictureTo3gp: \
1589            Can't close output dummy amr file %s, error: 0x%x\n",out_amr, err);
1590        return err;
1591    }
1592
1593    /**
1594     * Fill parameters for Pto3GPP with the parameters contained in the current element of the
1595     * Pto3GPP parameters chained list and with default parameters */
1596/*+ New Encoder bitrates */
1597    if(xVSS_context->pSettings->xVSS.outputVideoBitrate == 0) {
1598        Params.OutputVideoBitrate    = M4VIDEOEDITING_kVARIABLE_KBPS;
1599    }
1600    else {
1601          Params.OutputVideoBitrate = xVSS_context->pSettings->xVSS.outputVideoBitrate;
1602    }
1603    M4OSA_TRACE1_1("M4xVSS_internalStartConvertPicTo3GP: video bitrate = %d",
1604        Params.OutputVideoBitrate);
1605/*- New Encoder bitrates */
1606    Params.OutputFileMaxSize    = M4PTO3GPP_kUNLIMITED;
1607    Params.pPictureCallbackFct    = M4xVSS_PictureCallbackFct;
1608    Params.pPictureCallbackCtxt    = pCallBackCtxt;
1609    /*FB: change to use the converted path (UTF conversion) see the conversion above*/
1610    /*Fix :- Adding Audio Track in Image as input :AudioTarckFile Setting to NULL */
1611    Params.pInputAudioTrackFile    = M4OSA_NULL;//(M4OSA_Void*)pDecodedPath;//out_amr;
1612    Params.AudioPaddingMode        = M4PTO3GPP_kAudioPaddingMode_Loop;
1613    Params.AudioFileFormat        = M4VIDEOEDITING_kFileType_AMR;
1614    Params.pOutput3gppFile        = xVSS_context->pPTo3GPPcurrentParams->pFileOut;
1615    Params.pTemporaryFile        = xVSS_context->pPTo3GPPcurrentParams->pFileTemp;
1616    /*+PR No:  blrnxpsw#223*/
1617    /*Increasing frequency of Frame, calculating Nos of Frame = duration /FPS */
1618    /*Other changes made is @ M4xVSS_API.c @ line 3841 in M4xVSS_SendCommand*/
1619    /*If case check for PanZoom removed */
1620    Params.NbVideoFrames            = (M4OSA_UInt32)
1621        (xVSS_context->pPTo3GPPcurrentParams->duration \
1622            / xVSS_context->pPTo3GPPcurrentParams->framerate); /* */
1623    pCallBackCtxt->m_timeDuration    = xVSS_context->pPTo3GPPcurrentParams->framerate;
1624    /*-PR No:  blrnxpsw#223*/
1625    pCallBackCtxt->m_ImageCounter    = 0;
1626    pCallBackCtxt->m_FileIn            = xVSS_context->pPTo3GPPcurrentParams->pFileIn;
1627    pCallBackCtxt->m_NbImage        = Params.NbVideoFrames;
1628    pCallBackCtxt->m_pFileReadPtr    = xVSS_context->pFileReadPtr;
1629    pCallBackCtxt->m_pDecodedPlane    = M4OSA_NULL;
1630    pCallBackCtxt->m_pPto3GPPparams    = xVSS_context->pPTo3GPPcurrentParams;
1631    pCallBackCtxt->m_air_context    = M4OSA_NULL;
1632    pCallBackCtxt->m_mediaRendering = xVSS_context->pPTo3GPPcurrentParams->MediaRendering;
1633
1634    /**
1635     * Set the input and output files */
1636    err = M4PTO3GPP_Open(pM4PTO3GPP_Ctxt, &Params);
1637    if (err != M4NO_ERROR)
1638    {
1639        M4OSA_TRACE1_1("M4PTO3GPP_Open returned: 0x%x\n",err);
1640        if(pCallBackCtxt != M4OSA_NULL)
1641        {
1642            free(pCallBackCtxt);
1643            pCallBackCtxt = M4OSA_NULL;
1644        }
1645        M4PTO3GPP_CleanUp(pM4PTO3GPP_Ctxt);
1646        return err;
1647    }
1648
1649    /**
1650     * Save context to be able to call Pto3GPP step function in M4xVSS_step function */
1651    xVSS_context->pM4PTO3GPP_Ctxt = pM4PTO3GPP_Ctxt;
1652    xVSS_context->pCallBackCtxt = pCallBackCtxt;
1653
1654    return M4NO_ERROR;
1655}
1656
1657/**
1658 ******************************************************************************
1659 * M4OSA_ERR M4xVSS_internalStopConvertPictureTo3gp(M4OSA_Context pContext)
1660 * @brief    This function cleans up Pto3GPP
1661 * @note
1662 * @param    pContext    (IN) The integrator own context
1663 *
1664 * @return    M4NO_ERROR:    No error
1665 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
1666 ******************************************************************************
1667 */
1668M4OSA_ERR M4xVSS_internalStopConvertPictureTo3gp(M4OSA_Context pContext)
1669{
1670    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
1671    M4OSA_ERR err;
1672    M4OSA_Char out_amr[M4XVSS_MAX_PATH_LEN];
1673    /*UTF conversion support*/
1674    M4OSA_Char* pDecodedPath = M4OSA_NULL;
1675
1676    /**
1677    * Free the PTO3GPP callback context */
1678    if(M4OSA_NULL != xVSS_context->pCallBackCtxt)
1679    {
1680        free(xVSS_context->pCallBackCtxt);
1681        xVSS_context->pCallBackCtxt = M4OSA_NULL;
1682    }
1683
1684    /**
1685     * Finalize the output file */
1686    err = M4PTO3GPP_Close(xVSS_context->pM4PTO3GPP_Ctxt);
1687    if (err != M4NO_ERROR)
1688    {
1689        M4OSA_TRACE1_1("M4PTO3GPP_Close returned 0x%x\n",err);
1690        M4PTO3GPP_CleanUp(xVSS_context->pM4PTO3GPP_Ctxt);
1691        return err;
1692    }
1693
1694    /**
1695     * Free this M4PTO3GPP instance */
1696    err = M4PTO3GPP_CleanUp(xVSS_context->pM4PTO3GPP_Ctxt);
1697    if (err != M4NO_ERROR)
1698    {
1699        M4OSA_TRACE1_1("M4PTO3GPP_CleanUp returned 0x%x\n",err);
1700        return err;
1701    }
1702
1703    /**
1704     * Remove dummy.amr file */
1705    M4OSA_chrNCopy(out_amr, xVSS_context->pTempPath, M4XVSS_MAX_PATH_LEN - 1);
1706    strncat((char *)out_amr, (const char *)"dummy.amr\0", 10);
1707
1708    /**
1709     * UTF conversion: convert the temporary path into the customer format*/
1710    pDecodedPath = out_amr;
1711
1712    if(xVSS_context->UTFConversionContext.pConvFromUTF8Fct != M4OSA_NULL
1713            && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL)
1714    {
1715        M4OSA_UInt32 length = 0;
1716        err = M4xVSS_internalConvertFromUTF8(xVSS_context, (M4OSA_Void*) out_amr,
1717             (M4OSA_Void*) xVSS_context->UTFConversionContext.pTempOutConversionBuffer, &length);
1718        if(err != M4NO_ERROR)
1719        {
1720            M4OSA_TRACE1_1("M4xVSS_internalStopConvertPictureTo3gp:\
1721                 M4xVSS_internalConvertFromUTF8 returns err: 0x%x",err);
1722            return err;
1723        }
1724        pDecodedPath = xVSS_context->UTFConversionContext.pTempOutConversionBuffer;
1725    }
1726    /**
1727    * End of the conversion, now use the decoded path*/
1728    remove((const char *)pDecodedPath);
1729
1730    /*Commented because of the use of the UTF conversion*/
1731/*    remove(out_amr);
1732 */
1733
1734    xVSS_context->pM4PTO3GPP_Ctxt = M4OSA_NULL;
1735    xVSS_context->pCallBackCtxt = M4OSA_NULL;
1736
1737    return M4NO_ERROR;
1738}
1739
1740/**
1741 ******************************************************************************
1742 * prototype    M4OSA_ERR M4xVSS_internalConvertRGBtoYUV(M4xVSS_FramingStruct* framingCtx)
1743 * @brief    This function converts an RGB565 plane to YUV420 planar
1744 * @note    It is used only for framing effect
1745 *            It allocates output YUV planes
1746 * @param    framingCtx    (IN) The framing struct containing input RGB565 plane
1747 *
1748 * @return    M4NO_ERROR:    No error
1749 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
1750 * @return    M4ERR_ALLOC: Allocation error (no more memory)
1751 ******************************************************************************
1752 */
1753M4OSA_ERR M4xVSS_internalConvertRGBtoYUV(M4xVSS_FramingStruct* framingCtx)
1754{
1755    M4OSA_ERR err;
1756
1757    /**
1758     * Allocate output YUV planes */
1759    framingCtx->FramingYuv = (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(3*sizeof(M4VIFI_ImagePlane),
1760         M4VS, (M4OSA_Char *)"M4xVSS_internalConvertRGBtoYUV: Output plane YUV");
1761    if(framingCtx->FramingYuv == M4OSA_NULL)
1762    {
1763        M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertRGBtoYUV");
1764        return M4ERR_ALLOC;
1765    }
1766    framingCtx->FramingYuv[0].u_width = framingCtx->FramingRgb->u_width;
1767    framingCtx->FramingYuv[0].u_height = framingCtx->FramingRgb->u_height;
1768    framingCtx->FramingYuv[0].u_topleft = 0;
1769    framingCtx->FramingYuv[0].u_stride = framingCtx->FramingRgb->u_width;
1770    framingCtx->FramingYuv[0].pac_data =
1771         (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc((framingCtx->FramingYuv[0].u_width\
1772            *framingCtx->FramingYuv[0].u_height*3)>>1, M4VS, (M4OSA_Char *)\
1773                "Alloc for the Convertion output YUV");;
1774    if(framingCtx->FramingYuv[0].pac_data == M4OSA_NULL)
1775    {
1776        M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertRGBtoYUV");
1777        return M4ERR_ALLOC;
1778    }
1779    framingCtx->FramingYuv[1].u_width = (framingCtx->FramingRgb->u_width)>>1;
1780    framingCtx->FramingYuv[1].u_height = (framingCtx->FramingRgb->u_height)>>1;
1781    framingCtx->FramingYuv[1].u_topleft = 0;
1782    framingCtx->FramingYuv[1].u_stride = (framingCtx->FramingRgb->u_width)>>1;
1783    framingCtx->FramingYuv[1].pac_data = framingCtx->FramingYuv[0].pac_data \
1784        + framingCtx->FramingYuv[0].u_width * framingCtx->FramingYuv[0].u_height;
1785    framingCtx->FramingYuv[2].u_width = (framingCtx->FramingRgb->u_width)>>1;
1786    framingCtx->FramingYuv[2].u_height = (framingCtx->FramingRgb->u_height)>>1;
1787    framingCtx->FramingYuv[2].u_topleft = 0;
1788    framingCtx->FramingYuv[2].u_stride = (framingCtx->FramingRgb->u_width)>>1;
1789    framingCtx->FramingYuv[2].pac_data = framingCtx->FramingYuv[1].pac_data \
1790        + framingCtx->FramingYuv[1].u_width * framingCtx->FramingYuv[1].u_height;
1791
1792    /**
1793     * Convert input RGB 565 to YUV 420 to be able to merge it with output video in framing
1794      effect */
1795    err = M4VIFI_xVSS_RGB565toYUV420(M4OSA_NULL, framingCtx->FramingRgb, framingCtx->FramingYuv);
1796    if(err != M4NO_ERROR)
1797    {
1798        M4OSA_TRACE1_1("M4xVSS_internalConvertRGBtoYUV:\
1799             error when converting from RGB to YUV: 0x%x\n", err);
1800    }
1801
1802    framingCtx->duration = 0;
1803    framingCtx->previousClipTime = -1;
1804    framingCtx->previewOffsetClipTime = -1;
1805
1806    /**
1807     * Only one element in the chained list (no animated image with RGB buffer...) */
1808    framingCtx->pCurrent = framingCtx;
1809    framingCtx->pNext = framingCtx;
1810
1811    return M4NO_ERROR;
1812}
1813
1814M4OSA_ERR M4xVSS_internalSetPlaneTransparent(M4OSA_UInt8* planeIn, M4OSA_UInt32 size)
1815{
1816    M4OSA_UInt32 i;
1817    M4OSA_UInt8* plane = planeIn;
1818    M4OSA_UInt8 transparent1 = (M4OSA_UInt8)((TRANSPARENT_COLOR & 0xFF00)>>8);
1819    M4OSA_UInt8 transparent2 = (M4OSA_UInt8)TRANSPARENT_COLOR;
1820
1821    for(i=0; i<(size>>1); i++)
1822    {
1823        *plane++ = transparent1;
1824        *plane++ = transparent2;
1825    }
1826
1827    return M4NO_ERROR;
1828}
1829
1830
1831/**
1832 ******************************************************************************
1833 * prototype M4OSA_ERR M4xVSS_internalConvertARBG888toYUV420_FrammingEffect(M4OSA_Context pContext,
1834 *                                                M4VSS3GPP_EffectSettings* pEffect,
1835 *                                                M4xVSS_FramingStruct* framingCtx,
1836                                                  M4VIDEOEDITING_VideoFrameSize OutputVideoResolution)
1837 *
1838 * @brief    This function converts ARGB8888 input file  to YUV420 whenused for framming effect
1839 * @note    The input ARGB8888 file path is contained in the pEffect structure
1840 *            If the ARGB8888 must be resized to fit output video size, this function
1841 *            will do it.
1842 * @param    pContext    (IN) The integrator own context
1843 * @param    pEffect        (IN) The effect structure containing all informations on
1844 *                        the file to decode, resizing ...
1845 * @param    framingCtx    (IN/OUT) Structure in which the output RGB will be stored
1846 *
1847 * @return    M4NO_ERROR:    No error
1848 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
1849 * @return    M4ERR_ALLOC: Allocation error (no more memory)
1850 * @return    M4ERR_FILE_NOT_FOUND: File not found.
1851 ******************************************************************************
1852 */
1853
1854
1855M4OSA_ERR M4xVSS_internalConvertARGB888toYUV420_FrammingEffect(M4OSA_Context pContext,
1856                                                               M4VSS3GPP_EffectSettings* pEffect,
1857                                                               M4xVSS_FramingStruct* framingCtx,
1858                                                               M4VIDEOEDITING_VideoFrameSize\
1859                                                               OutputVideoResolution)
1860{
1861    M4OSA_ERR err = M4NO_ERROR;
1862    M4OSA_Context pARGBIn;
1863    M4OSA_UInt32 file_size;
1864    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
1865    M4OSA_UInt32 width, height, width_out, height_out;
1866    M4OSA_Void* pFile = pEffect->xVSS.pFramingFilePath;
1867    M4OSA_UInt8 transparent1 = (M4OSA_UInt8)((TRANSPARENT_COLOR & 0xFF00)>>8);
1868    M4OSA_UInt8 transparent2 = (M4OSA_UInt8)TRANSPARENT_COLOR;
1869    /*UTF conversion support*/
1870    M4OSA_Char* pDecodedPath = M4OSA_NULL;
1871    M4OSA_UInt32 i = 0,j = 0;
1872    M4VIFI_ImagePlane rgbPlane;
1873    M4OSA_UInt32 frameSize_argb=(framingCtx->width * framingCtx->height * 4);
1874    M4OSA_UInt32 frameSize;
1875    M4OSA_UInt32 tempAlphaPercent = 0;
1876    M4VIFI_UInt8* TempPacData = M4OSA_NULL;
1877    M4OSA_UInt16 *ptr = M4OSA_NULL;
1878    M4OSA_UInt32 z = 0;
1879
1880    M4OSA_TRACE3_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect: Entering ");
1881
1882    M4OSA_TRACE1_2("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect width and height %d %d ",
1883        framingCtx->width,framingCtx->height);
1884
1885    M4OSA_UInt8 *pTmpData = (M4OSA_UInt8*) M4OSA_32bitAlignedMalloc(frameSize_argb, M4VS, (M4OSA_Char*)\
1886        "Image argb data");
1887    if(pTmpData == M4OSA_NULL) {
1888        M4OSA_TRACE1_0("Failed to allocate memory for Image clip");
1889        return M4ERR_ALLOC;
1890    }
1891    /**
1892     * UTF conversion: convert the file path into the customer format*/
1893    pDecodedPath = pFile;
1894
1895    if(xVSS_context->UTFConversionContext.pConvFromUTF8Fct != M4OSA_NULL
1896            && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL)
1897    {
1898        M4OSA_UInt32 length = 0;
1899        err = M4xVSS_internalConvertFromUTF8(xVSS_context, (M4OSA_Void*) pFile,
1900             (M4OSA_Void*) xVSS_context->UTFConversionContext.pTempOutConversionBuffer, &length);
1901        if(err != M4NO_ERROR)
1902        {
1903            M4OSA_TRACE1_1("M4xVSS_internalDecodePNG:\
1904                 M4xVSS_internalConvertFromUTF8 returns err: 0x%x",err);
1905            free(pTmpData);
1906            pTmpData = M4OSA_NULL;
1907            return err;
1908        }
1909        pDecodedPath = xVSS_context->UTFConversionContext.pTempOutConversionBuffer;
1910    }
1911
1912    /**
1913    * End of the conversion, now use the decoded path*/
1914
1915     /* Open input ARGB8888 file and store it into memory */
1916    err = xVSS_context->pFileReadPtr->openRead(&pARGBIn, pDecodedPath, M4OSA_kFileRead);
1917
1918    if(err != M4NO_ERROR)
1919    {
1920        M4OSA_TRACE1_2("Can't open input ARGB8888 file %s, error: 0x%x\n",pFile, err);
1921        free(pTmpData);
1922        pTmpData = M4OSA_NULL;
1923        return err;
1924    }
1925
1926    err = xVSS_context->pFileReadPtr->readData(pARGBIn,(M4OSA_MemAddr8)pTmpData, &frameSize_argb);
1927    if(err != M4NO_ERROR)
1928    {
1929        xVSS_context->pFileReadPtr->closeRead(pARGBIn);
1930        free(pTmpData);
1931        pTmpData = M4OSA_NULL;
1932        return err;
1933    }
1934
1935
1936    err =  xVSS_context->pFileReadPtr->closeRead(pARGBIn);
1937    if(err != M4NO_ERROR)
1938    {
1939        M4OSA_TRACE1_2("Can't close input png file %s, error: 0x%x\n",pFile, err);
1940        free(pTmpData);
1941        pTmpData = M4OSA_NULL;
1942        return err;
1943    }
1944
1945
1946    rgbPlane.u_height = framingCtx->height;
1947    rgbPlane.u_width = framingCtx->width;
1948    rgbPlane.u_stride = rgbPlane.u_width*3;
1949    rgbPlane.u_topleft = 0;
1950
1951    frameSize = (rgbPlane.u_width * rgbPlane.u_height * 3); //Size of RGB888 data
1952    rgbPlane.pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(((frameSize)+ (2 * framingCtx->width)),
1953         M4VS, (M4OSA_Char*)"Image clip RGB888 data");
1954    if(rgbPlane.pac_data == M4OSA_NULL)
1955    {
1956        M4OSA_TRACE1_0("Failed to allocate memory for Image clip");
1957        free(pTmpData);
1958        return M4ERR_ALLOC;
1959    }
1960
1961    M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:\
1962          Remove the alpha channel  ");
1963
1964    /* premultiplied alpha % on RGB */
1965    for (i=0, j = 0; i < frameSize_argb; i += 4) {
1966        /* this is alpha value */
1967        if ((i % 4) == 0)
1968        {
1969            tempAlphaPercent = pTmpData[i];
1970        }
1971
1972        /* R */
1973        rgbPlane.pac_data[j] = pTmpData[i+1];
1974        j++;
1975
1976        /* G */
1977        if (tempAlphaPercent > 0) {
1978            rgbPlane.pac_data[j] = pTmpData[i+2];
1979            j++;
1980        } else {/* In case of alpha value 0, make GREEN to 255 */
1981            rgbPlane.pac_data[j] = 255; //pTmpData[i+2];
1982            j++;
1983        }
1984
1985        /* B */
1986        rgbPlane.pac_data[j] = pTmpData[i+3];
1987        j++;
1988    }
1989
1990    free(pTmpData);
1991    pTmpData = M4OSA_NULL;
1992
1993    /* convert RGB888 to RGB565 */
1994
1995    /* allocate temp RGB 565 buffer */
1996    TempPacData = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(frameSize +
1997                       (4 * (framingCtx->width + framingCtx->height + 1)),
1998                        M4VS, (M4OSA_Char*)"Image clip RGB565 data");
1999    if (TempPacData == M4OSA_NULL) {
2000        M4OSA_TRACE1_0("Failed to allocate memory for Image clip RGB565 data");
2001        free(rgbPlane.pac_data);
2002        return M4ERR_ALLOC;
2003    }
2004
2005    ptr = (M4OSA_UInt16 *)TempPacData;
2006    z = 0;
2007
2008    for (i = 0; i < j ; i += 3)
2009    {
2010        ptr[z++] = PACK_RGB565(0,   rgbPlane.pac_data[i],
2011                                    rgbPlane.pac_data[i+1],
2012                                    rgbPlane.pac_data[i+2]);
2013    }
2014
2015    /* free the RBG888 and assign RGB565 */
2016    free(rgbPlane.pac_data);
2017    rgbPlane.pac_data = TempPacData;
2018
2019    /**
2020     * Check if output sizes are odd */
2021    if(rgbPlane.u_height % 2 != 0)
2022    {
2023        M4VIFI_UInt8* output_pac_data = rgbPlane.pac_data;
2024        M4OSA_UInt32 i;
2025        M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:\
2026             output height is odd  ");
2027        output_pac_data +=rgbPlane.u_width * rgbPlane.u_height*2;
2028
2029        for(i=0;i<rgbPlane.u_width;i++)
2030        {
2031            *output_pac_data++ = transparent1;
2032            *output_pac_data++ = transparent2;
2033        }
2034
2035        /**
2036         * We just add a white line to the PNG that will be transparent */
2037        rgbPlane.u_height++;
2038    }
2039    if(rgbPlane.u_width % 2 != 0)
2040    {
2041        /**
2042         * We add a new column of white (=transparent), but we need to parse all RGB lines ... */
2043        M4OSA_UInt32 i;
2044        M4VIFI_UInt8* newRGBpac_data;
2045        M4VIFI_UInt8* output_pac_data, *input_pac_data;
2046
2047        rgbPlane.u_width++;
2048        M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect: \
2049             output width is odd  ");
2050        /**
2051         * We need to allocate a new RGB output buffer in which all decoded data
2052          + white line will be copied */
2053        newRGBpac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(rgbPlane.u_height*rgbPlane.u_width*2\
2054            *sizeof(M4VIFI_UInt8), M4VS, (M4OSA_Char *)"New Framing GIF Output pac_data RGB");
2055
2056        if(newRGBpac_data == M4OSA_NULL)
2057        {
2058            M4OSA_TRACE1_0("Allocation error in \
2059                M4xVSS_internalConvertARGB888toYUV420_FrammingEffect");
2060            free(rgbPlane.pac_data);
2061            return M4ERR_ALLOC;
2062        }
2063
2064        output_pac_data= newRGBpac_data;
2065        input_pac_data = rgbPlane.pac_data;
2066
2067        for(i=0;i<rgbPlane.u_height;i++)
2068        {
2069            memcpy((void *)output_pac_data, (void *)input_pac_data,
2070                 (rgbPlane.u_width-1)*2);
2071
2072            output_pac_data += ((rgbPlane.u_width-1)*2);
2073            /* Put the pixel to transparency color */
2074            *output_pac_data++ = transparent1;
2075            *output_pac_data++ = transparent2;
2076
2077            input_pac_data += ((rgbPlane.u_width-1)*2);
2078        }
2079        free(rgbPlane.pac_data);
2080        rgbPlane.pac_data = newRGBpac_data;
2081    }
2082
2083    /* reset stride */
2084    rgbPlane.u_stride = rgbPlane.u_width*2;
2085
2086    /**
2087     * Initialize chained list parameters */
2088    framingCtx->duration = 0;
2089    framingCtx->previousClipTime = -1;
2090    framingCtx->previewOffsetClipTime = -1;
2091
2092    /**
2093     * Only one element in the chained list (no animated image ...) */
2094    framingCtx->pCurrent = framingCtx;
2095    framingCtx->pNext = framingCtx;
2096
2097    /**
2098     * Get output width/height */
2099     switch(OutputVideoResolution)
2100    //switch(xVSS_context->pSettings->xVSS.outputVideoSize)
2101    {
2102    case M4VIDEOEDITING_kSQCIF:
2103        width_out = 128;
2104        height_out = 96;
2105        break;
2106    case M4VIDEOEDITING_kQQVGA:
2107        width_out = 160;
2108        height_out = 120;
2109        break;
2110    case M4VIDEOEDITING_kQCIF:
2111        width_out = 176;
2112        height_out = 144;
2113        break;
2114    case M4VIDEOEDITING_kQVGA:
2115        width_out = 320;
2116        height_out = 240;
2117        break;
2118    case M4VIDEOEDITING_kCIF:
2119        width_out = 352;
2120        height_out = 288;
2121        break;
2122    case M4VIDEOEDITING_kVGA:
2123        width_out = 640;
2124        height_out = 480;
2125        break;
2126    case M4VIDEOEDITING_kWVGA:
2127        width_out = 800;
2128        height_out = 480;
2129        break;
2130    case M4VIDEOEDITING_kNTSC:
2131        width_out = 720;
2132        height_out = 480;
2133        break;
2134    case M4VIDEOEDITING_k640_360:
2135        width_out = 640;
2136        height_out = 360;
2137        break;
2138    case M4VIDEOEDITING_k854_480:
2139        // StageFright encoders require %16 resolution
2140        width_out = M4ENCODER_854_480_Width;
2141        height_out = 480;
2142        break;
2143    case M4VIDEOEDITING_k1280_720:
2144        width_out = 1280;
2145        height_out = 720;
2146        break;
2147    case M4VIDEOEDITING_k1080_720:
2148        // StageFright encoders require %16 resolution
2149        width_out = M4ENCODER_1080_720_Width;
2150        height_out = 720;
2151        break;
2152    case M4VIDEOEDITING_k960_720:
2153        width_out = 960;
2154        height_out = 720;
2155        break;
2156    case M4VIDEOEDITING_k1920_1080:
2157        width_out = 1920;
2158        height_out = M4ENCODER_1920_1080_Height;
2159        break;
2160    /**
2161     * If output video size is not given, we take QCIF size,
2162     * should not happen, because already done in M4xVSS_sendCommand */
2163    default:
2164        width_out = 176;
2165        height_out = 144;
2166        break;
2167    }
2168
2169    /**
2170     * Allocate output planes structures */
2171    framingCtx->FramingRgb = (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(sizeof(M4VIFI_ImagePlane), M4VS,
2172         (M4OSA_Char *)"Framing Output plane RGB");
2173    if(framingCtx->FramingRgb == M4OSA_NULL)
2174    {
2175        M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertARGB888toYUV420_FrammingEffect");
2176        return M4ERR_ALLOC;
2177    }
2178    /**
2179     * Resize RGB if needed */
2180    if((pEffect->xVSS.bResize) &&
2181         (rgbPlane.u_width != width_out || rgbPlane.u_height != height_out))
2182    {
2183        width = width_out;
2184        height = height_out;
2185
2186        M4OSA_TRACE1_2("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect: \
2187             New Width and height %d %d  ",width,height);
2188
2189        framingCtx->FramingRgb->u_height = height_out;
2190        framingCtx->FramingRgb->u_width = width_out;
2191        framingCtx->FramingRgb->u_stride = framingCtx->FramingRgb->u_width*2;
2192        framingCtx->FramingRgb->u_topleft = 0;
2193
2194        framingCtx->FramingRgb->pac_data =
2195             (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(framingCtx->FramingRgb->u_height*framingCtx->\
2196                FramingRgb->u_width*2*sizeof(M4VIFI_UInt8), M4VS,
2197                  (M4OSA_Char *)"Framing Output pac_data RGB");
2198
2199        if(framingCtx->FramingRgb->pac_data == M4OSA_NULL)
2200        {
2201            M4OSA_TRACE1_0("Allocation error in \
2202                M4xVSS_internalConvertARGB888toYUV420_FrammingEffect");
2203            free(framingCtx->FramingRgb);
2204            free(rgbPlane.pac_data);
2205            return M4ERR_ALLOC;
2206        }
2207
2208        M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:  Resizing Needed ");
2209        M4OSA_TRACE1_2("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:\
2210              rgbPlane.u_height & rgbPlane.u_width %d %d",rgbPlane.u_height,rgbPlane.u_width);
2211
2212        //err = M4VIFI_ResizeBilinearRGB888toRGB888(M4OSA_NULL, &rgbPlane,framingCtx->FramingRgb);
2213        err = M4VIFI_ResizeBilinearRGB565toRGB565(M4OSA_NULL, &rgbPlane,framingCtx->FramingRgb);
2214
2215        if(err != M4NO_ERROR)
2216        {
2217            M4OSA_TRACE1_1("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect :\
2218                when resizing RGB plane: 0x%x\n", err);
2219            return err;
2220        }
2221
2222        if(rgbPlane.pac_data != M4OSA_NULL)
2223        {
2224            free(rgbPlane.pac_data);
2225            rgbPlane.pac_data = M4OSA_NULL;
2226        }
2227    }
2228    else
2229    {
2230
2231        M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:\
2232              Resizing Not Needed ");
2233
2234        width = rgbPlane.u_width;
2235        height = rgbPlane.u_height;
2236        framingCtx->FramingRgb->u_height = height;
2237        framingCtx->FramingRgb->u_width = width;
2238        framingCtx->FramingRgb->u_stride = framingCtx->FramingRgb->u_width*2;
2239        framingCtx->FramingRgb->u_topleft = 0;
2240        framingCtx->FramingRgb->pac_data = rgbPlane.pac_data;
2241    }
2242
2243
2244    if(pEffect->xVSS.bResize)
2245    {
2246        /**
2247         * Force topleft to 0 for pure framing effect */
2248        framingCtx->topleft_x = 0;
2249        framingCtx->topleft_y = 0;
2250    }
2251
2252
2253    /**
2254     * Convert  RGB output to YUV 420 to be able to merge it with output video in framing
2255     effect */
2256    framingCtx->FramingYuv = (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(3*sizeof(M4VIFI_ImagePlane), M4VS,
2257         (M4OSA_Char *)"Framing Output plane YUV");
2258    if(framingCtx->FramingYuv == M4OSA_NULL)
2259    {
2260        M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertARGB888toYUV420_FrammingEffect");
2261        free(framingCtx->FramingRgb->pac_data);
2262        return M4ERR_ALLOC;
2263    }
2264
2265    // Alloc for Y, U and V planes
2266    framingCtx->FramingYuv[0].u_width = ((width+1)>>1)<<1;
2267    framingCtx->FramingYuv[0].u_height = ((height+1)>>1)<<1;
2268    framingCtx->FramingYuv[0].u_topleft = 0;
2269    framingCtx->FramingYuv[0].u_stride = ((width+1)>>1)<<1;
2270    framingCtx->FramingYuv[0].pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc
2271        ((framingCtx->FramingYuv[0].u_width*framingCtx->FramingYuv[0].u_height), M4VS,
2272            (M4OSA_Char *)"Alloc for the output Y");
2273    if(framingCtx->FramingYuv[0].pac_data == M4OSA_NULL)
2274    {
2275        M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertARGB888toYUV420_FrammingEffect");
2276        free(framingCtx->FramingYuv);
2277        free(framingCtx->FramingRgb->pac_data);
2278        return M4ERR_ALLOC;
2279    }
2280    framingCtx->FramingYuv[1].u_width = (((width+1)>>1)<<1)>>1;
2281    framingCtx->FramingYuv[1].u_height = (((height+1)>>1)<<1)>>1;
2282    framingCtx->FramingYuv[1].u_topleft = 0;
2283    framingCtx->FramingYuv[1].u_stride = (((width+1)>>1)<<1)>>1;
2284
2285
2286    framingCtx->FramingYuv[1].pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(
2287        framingCtx->FramingYuv[1].u_width * framingCtx->FramingYuv[1].u_height, M4VS,
2288        (M4OSA_Char *)"Alloc for the output U");
2289    if (framingCtx->FramingYuv[1].pac_data == M4OSA_NULL) {
2290        free(framingCtx->FramingYuv[0].pac_data);
2291        free(framingCtx->FramingYuv);
2292        free(framingCtx->FramingRgb->pac_data);
2293        return M4ERR_ALLOC;
2294    }
2295
2296    framingCtx->FramingYuv[2].u_width = (((width+1)>>1)<<1)>>1;
2297    framingCtx->FramingYuv[2].u_height = (((height+1)>>1)<<1)>>1;
2298    framingCtx->FramingYuv[2].u_topleft = 0;
2299    framingCtx->FramingYuv[2].u_stride = (((width+1)>>1)<<1)>>1;
2300
2301
2302    framingCtx->FramingYuv[2].pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(
2303        framingCtx->FramingYuv[2].u_width * framingCtx->FramingYuv[0].u_height, M4VS,
2304        (M4OSA_Char *)"Alloc for the  output V");
2305    if (framingCtx->FramingYuv[2].pac_data == M4OSA_NULL) {
2306        free(framingCtx->FramingYuv[1].pac_data);
2307        free(framingCtx->FramingYuv[0].pac_data);
2308        free(framingCtx->FramingYuv);
2309        free(framingCtx->FramingRgb->pac_data);
2310        return M4ERR_ALLOC;
2311    }
2312
2313    M4OSA_TRACE3_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:\
2314        convert RGB to YUV ");
2315
2316    //err = M4VIFI_RGB888toYUV420(M4OSA_NULL, framingCtx->FramingRgb,  framingCtx->FramingYuv);
2317    err = M4VIFI_RGB565toYUV420(M4OSA_NULL, framingCtx->FramingRgb,  framingCtx->FramingYuv);
2318
2319    if (err != M4NO_ERROR)
2320    {
2321        M4OSA_TRACE1_1("SPS png: error when converting from RGB to YUV: 0x%x\n", err);
2322    }
2323    M4OSA_TRACE3_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:  Leaving ");
2324    return err;
2325}
2326
2327/**
2328 ******************************************************************************
2329 * prototype    M4OSA_ERR M4xVSS_internalGenerateEditedFile(M4OSA_Context pContext)
2330 *
2331 * @brief    This function prepares VSS for editing
2332 * @note    It also set special xVSS effect as external effects for the VSS
2333 * @param    pContext    (IN) The integrator own context
2334 *
2335 * @return    M4NO_ERROR:    No error
2336 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
2337 * @return    M4ERR_ALLOC: Allocation error (no more memory)
2338 ******************************************************************************
2339 */
2340M4OSA_ERR M4xVSS_internalGenerateEditedFile(M4OSA_Context pContext)
2341{
2342    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
2343    M4VSS3GPP_EditContext pVssCtxt;
2344    M4OSA_UInt32 i,j;
2345    M4OSA_ERR err;
2346
2347    /**
2348     * Create a VSS 3GPP edition instance */
2349    err = M4VSS3GPP_editInit( &pVssCtxt, xVSS_context->pFileReadPtr, xVSS_context->pFileWritePtr);
2350    if (err != M4NO_ERROR)
2351    {
2352        M4OSA_TRACE1_1("M4xVSS_internalGenerateEditedFile: M4VSS3GPP_editInit returned 0x%x\n",
2353            err);
2354        M4VSS3GPP_editCleanUp(pVssCtxt);
2355        /**
2356         * Set the VSS context to NULL */
2357        xVSS_context->pCurrentEditContext = M4OSA_NULL;
2358        return err;
2359    }
2360
2361        M4VSS3GPP_InternalEditContext* pVSSContext =
2362            (M4VSS3GPP_InternalEditContext*)pVssCtxt;
2363        pVSSContext->xVSS.outputVideoFormat =
2364            xVSS_context->pSettings->xVSS.outputVideoFormat;
2365        pVSSContext->xVSS.outputVideoSize =
2366            xVSS_context->pSettings->xVSS.outputVideoSize ;
2367        pVSSContext->xVSS.outputAudioFormat =
2368            xVSS_context->pSettings->xVSS.outputAudioFormat;
2369        pVSSContext->xVSS.outputAudioSamplFreq =
2370            xVSS_context->pSettings->xVSS.outputAudioSamplFreq;
2371        pVSSContext->xVSS.outputVideoBitrate =
2372            xVSS_context->pSettings->xVSS.outputVideoBitrate ;
2373        pVSSContext->xVSS.outputAudioBitrate =
2374            xVSS_context->pSettings->xVSS.outputAudioBitrate ;
2375        pVSSContext->xVSS.bAudioMono =
2376            xVSS_context->pSettings->xVSS.bAudioMono;
2377        pVSSContext->xVSS.outputVideoProfile =
2378            xVSS_context->pSettings->xVSS.outputVideoProfile;
2379        pVSSContext->xVSS.outputVideoLevel =
2380            xVSS_context->pSettings->xVSS.outputVideoLevel;
2381    /* In case of MMS use case, we fill directly into the VSS context the targeted bitrate */
2382    if(xVSS_context->targetedBitrate != 0)
2383    {
2384        M4VSS3GPP_InternalEditContext* pVSSContext = (M4VSS3GPP_InternalEditContext*)pVssCtxt;
2385
2386        pVSSContext->bIsMMS = M4OSA_TRUE;
2387        pVSSContext->uiMMSVideoBitrate = xVSS_context->targetedBitrate;
2388        pVSSContext->MMSvideoFramerate = xVSS_context->pSettings->videoFrameRate;
2389    }
2390
2391    /*Warning: since the adding of the UTF conversion, pSettings has been changed in the next
2392    part in  pCurrentEditSettings (there is a specific current editing structure for the saving,
2393     as for the preview)*/
2394
2395    /**
2396     * Set the external video effect functions, for saving mode (to be moved to
2397      M4xVSS_saveStart() ?)*/
2398    for (i=0; i<xVSS_context->pCurrentEditSettings->uiClipNumber; i++)
2399    {
2400        for (j=0; j<xVSS_context->pCurrentEditSettings->nbEffects; j++)
2401        {
2402            if (M4xVSS_kVideoEffectType_BlackAndWhite ==
2403            xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2404            {
2405                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2406                 M4VSS3GPP_externalVideoEffectColor;
2407                //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt =
2408                // (M4OSA_Void*)M4xVSS_kVideoEffectType_BlackAndWhite;
2409                /*commented FB*/
2410                /**
2411                 * We do not need to set the color context, it is already set
2412                 during sendCommand function */
2413            }
2414            if (M4xVSS_kVideoEffectType_Pink ==
2415                xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2416            {
2417                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2418                 M4VSS3GPP_externalVideoEffectColor;
2419                //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt =
2420                // (M4OSA_Void*)M4xVSS_kVideoEffectType_Pink; /**< we don't
2421                // use any function context */
2422                /*commented FB*/
2423                /**
2424                 * We do not need to set the color context,
2425                  it is already set during sendCommand function */
2426            }
2427            if (M4xVSS_kVideoEffectType_Green ==
2428                 xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2429            {
2430                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2431                    M4VSS3GPP_externalVideoEffectColor;
2432                //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt =
2433                    // (M4OSA_Void*)M4xVSS_kVideoEffectType_Green;
2434                     /**< we don't use any function context */
2435                /*commented FB*/
2436                /**
2437                 * We do not need to set the color context, it is already set during
2438                  sendCommand function */
2439            }
2440            if (M4xVSS_kVideoEffectType_Sepia ==
2441                 xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2442            {
2443                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2444                 M4VSS3GPP_externalVideoEffectColor;
2445                //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt =
2446                // (M4OSA_Void*)M4xVSS_kVideoEffectType_Sepia;
2447                /**< we don't use any function context */
2448                /*commented FB*/
2449                /**
2450                 * We do not need to set the color context, it is already set during
2451                 sendCommand function */
2452            }
2453            if (M4xVSS_kVideoEffectType_Fifties ==
2454             xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2455            {
2456                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2457                 M4VSS3GPP_externalVideoEffectFifties;
2458                /**
2459                 * We do not need to set the framing context, it is already set during
2460                 sendCommand function */
2461            }
2462            if (M4xVSS_kVideoEffectType_Negative ==
2463             xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2464            {
2465                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2466                 M4VSS3GPP_externalVideoEffectColor;
2467                //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt =
2468                // (M4OSA_Void*)M4xVSS_kVideoEffectType_Negative;
2469                 /**< we don't use any function context */
2470                /*commented FB*/
2471                /**
2472                 * We do not need to set the color context, it is already set during
2473                  sendCommand function */
2474            }
2475            if (M4xVSS_kVideoEffectType_Framing ==
2476             xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2477            {
2478                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2479                 M4VSS3GPP_externalVideoEffectFraming;
2480                /**
2481                 * We do not need to set the framing context, it is already set during
2482                 sendCommand function */
2483            }
2484            if (M4xVSS_kVideoEffectType_ZoomIn ==
2485             xVSS_context->pSettings->Effects[j].VideoEffectType)
2486            {
2487                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2488                 M4VSS3GPP_externalVideoEffectZoom;
2489                xVSS_context->pCurrentEditSettings->Effects[j].pExtVideoEffectFctCtxt =
2490                 (M4OSA_Void*)M4xVSS_kVideoEffectType_ZoomIn; /**< we don't use any
2491                 function context */
2492            }
2493            if (M4xVSS_kVideoEffectType_ZoomOut ==
2494             xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2495            {
2496                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2497                 M4VSS3GPP_externalVideoEffectZoom;
2498                xVSS_context->pCurrentEditSettings->Effects[j].pExtVideoEffectFctCtxt =
2499                 (M4OSA_Void*)M4xVSS_kVideoEffectType_ZoomOut; /**< we don't use any
2500                 function context */
2501            }
2502            if (M4xVSS_kVideoEffectType_ColorRGB16 ==
2503             xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2504            {
2505                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2506                 M4VSS3GPP_externalVideoEffectColor;
2507                //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt =
2508                // (M4OSA_Void*)M4xVSS_kVideoEffectType_ColorRGB16;
2509                /**< we don't use any function context */
2510                /**
2511                 * We do not need to set the color context, it is already set during
2512                 sendCommand function */
2513            }
2514            if (M4xVSS_kVideoEffectType_Gradient ==
2515             xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2516            {
2517                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2518                 M4VSS3GPP_externalVideoEffectColor;
2519                //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt =
2520                // (M4OSA_Void*)M4xVSS_kVideoEffectType_ColorRGB16;
2521                /**< we don't use any function context */
2522                /**
2523                 * We do not need to set the color context, it is already set during
2524                 sendCommand function */
2525            }
2526
2527        }
2528    }
2529
2530    /**
2531     * Open the VSS 3GPP */
2532    err = M4VSS3GPP_editOpen(pVssCtxt, xVSS_context->pCurrentEditSettings);
2533    if (err != M4NO_ERROR)
2534    {
2535        M4OSA_TRACE1_1("M4xVSS_internalGenerateEditedFile:\
2536             M4VSS3GPP_editOpen returned 0x%x\n",err);
2537        M4VSS3GPP_editCleanUp(pVssCtxt);
2538        /**
2539         * Set the VSS context to NULL */
2540        xVSS_context->pCurrentEditContext = M4OSA_NULL;
2541        return err;
2542    }
2543
2544    /**
2545     * Save VSS context to be able to close / free VSS later */
2546    xVSS_context->pCurrentEditContext = pVssCtxt;
2547
2548    return M4NO_ERROR;
2549}
2550
2551/**
2552 ******************************************************************************
2553 * prototype    M4OSA_ERR M4xVSS_internalCloseEditedFile(M4OSA_Context pContext)
2554 *
2555 * @brief    This function cleans up VSS
2556 * @note
2557 * @param    pContext    (IN) The integrator own context
2558 *
2559 * @return    M4NO_ERROR:    No error
2560 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
2561 ******************************************************************************
2562 */
2563M4OSA_ERR M4xVSS_internalCloseEditedFile(M4OSA_Context pContext)
2564{
2565    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
2566    M4VSS3GPP_EditContext pVssCtxt = xVSS_context->pCurrentEditContext;
2567    M4OSA_ERR err;
2568
2569    if(xVSS_context->pCurrentEditContext != M4OSA_NULL)
2570    {
2571        /**
2572         * Close the VSS 3GPP */
2573        err = M4VSS3GPP_editClose(pVssCtxt);
2574        if (err != M4NO_ERROR)
2575        {
2576            M4OSA_TRACE1_1("M4xVSS_internalCloseEditedFile:\
2577                 M4VSS3GPP_editClose returned 0x%x\n",err);
2578            M4VSS3GPP_editCleanUp(pVssCtxt);
2579            /**
2580             * Set the VSS context to NULL */
2581            xVSS_context->pCurrentEditContext = M4OSA_NULL;
2582            return err;
2583        }
2584
2585        /**
2586         * Free this VSS3GPP edition instance */
2587        err = M4VSS3GPP_editCleanUp(pVssCtxt);
2588        /**
2589         * Set the VSS context to NULL */
2590        xVSS_context->pCurrentEditContext = M4OSA_NULL;
2591        if (err != M4NO_ERROR)
2592        {
2593            M4OSA_TRACE1_1("M4xVSS_internalCloseEditedFile: \
2594                M4VSS3GPP_editCleanUp returned 0x%x\n",err);
2595            return err;
2596        }
2597    }
2598
2599    return M4NO_ERROR;
2600}
2601
2602/**
2603 ******************************************************************************
2604 * prototype    M4OSA_ERR M4xVSS_internalGenerateAudioMixFile(M4OSA_Context pContext)
2605 *
2606 * @brief    This function prepares VSS for audio mixing
2607 * @note    It takes its parameters from the BGM settings in the xVSS internal context
2608 * @param    pContext    (IN) The integrator own context
2609 *
2610 * @return    M4NO_ERROR:    No error
2611 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
2612 * @return    M4ERR_ALLOC: Allocation error (no more memory)
2613 ******************************************************************************
2614 */
2615/***
2616 * FB: the function has been modified since the structure used for the saving is now the
2617 *  pCurrentEditSettings and not the pSettings
2618 * This change has been added for the UTF support
2619 * All the "xVSS_context->pSettings" has been replaced by "xVSS_context->pCurrentEditSettings"
2620 ***/
2621M4OSA_ERR M4xVSS_internalGenerateAudioMixFile(M4OSA_Context pContext)
2622{
2623    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
2624    M4VSS3GPP_AudioMixingSettings* pAudioMixSettings;
2625    M4VSS3GPP_AudioMixingContext pAudioMixingCtxt;
2626    M4OSA_ERR err;
2627    M4VIDEOEDITING_ClipProperties fileProperties;
2628
2629    /**
2630     * Allocate audio mixing settings structure and fill it with BGM parameters */
2631    pAudioMixSettings = (M4VSS3GPP_AudioMixingSettings*)M4OSA_32bitAlignedMalloc
2632        (sizeof(M4VSS3GPP_AudioMixingSettings), M4VS, (M4OSA_Char *)"pAudioMixSettings");
2633    if(pAudioMixSettings == M4OSA_NULL)
2634    {
2635        M4OSA_TRACE1_0("Allocation error in M4xVSS_internalGenerateAudioMixFile");
2636        return M4ERR_ALLOC;
2637    }
2638
2639    if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->FileType ==
2640         M4VIDEOEDITING_kFileType_3GPP)
2641    {
2642        err = M4xVSS_internalGetProperties((M4OSA_Context)xVSS_context,
2643             (M4OSA_Char*)xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile,
2644                 &fileProperties);
2645        if(err != M4NO_ERROR)
2646        {
2647            M4OSA_TRACE1_1("M4xVSS_internalGenerateAudioMixFile:\
2648                 impossible to retrieve audio BGM properties ->\
2649                     reencoding audio background music", err);
2650            fileProperties.AudioStreamType =
2651                 xVSS_context->pCurrentEditSettings->xVSS.outputAudioFormat+1;
2652                  /* To force BGM encoding */
2653        }
2654    }
2655
2656    pAudioMixSettings->bRemoveOriginal = M4OSA_FALSE;
2657    pAudioMixSettings->AddedAudioFileType =
2658     xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->FileType;
2659    pAudioMixSettings->pAddedAudioTrackFile =
2660     xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile;
2661    pAudioMixSettings->uiAddVolume =
2662     xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->uiAddVolume;
2663
2664    pAudioMixSettings->outputAudioFormat = xVSS_context->pSettings->xVSS.outputAudioFormat;
2665    pAudioMixSettings->outputASF = xVSS_context->pSettings->xVSS.outputAudioSamplFreq;
2666    pAudioMixSettings->outputAudioBitrate = xVSS_context->pSettings->xVSS.outputAudioBitrate;
2667    pAudioMixSettings->uiSamplingFrequency =
2668     xVSS_context->pSettings->xVSS.pBGMtrack->uiSamplingFrequency;
2669    pAudioMixSettings->uiNumChannels = xVSS_context->pSettings->xVSS.pBGMtrack->uiNumChannels;
2670
2671    pAudioMixSettings->b_DuckingNeedeed =
2672     xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->b_DuckingNeedeed;
2673    pAudioMixSettings->fBTVolLevel =
2674     (M4OSA_Float )xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->uiAddVolume/100;
2675    pAudioMixSettings->InDucking_threshold =
2676     xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->InDucking_threshold;
2677    pAudioMixSettings->InDucking_lowVolume =
2678     xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->lowVolume/100;
2679    pAudioMixSettings->fPTVolLevel =
2680     (M4OSA_Float)xVSS_context->pSettings->PTVolLevel/100;
2681    pAudioMixSettings->bLoop = xVSS_context->pSettings->xVSS.pBGMtrack->bLoop;
2682
2683    if(xVSS_context->pSettings->xVSS.bAudioMono)
2684    {
2685        pAudioMixSettings->outputNBChannels = 1;
2686    }
2687    else
2688    {
2689        pAudioMixSettings->outputNBChannels = 2;
2690    }
2691
2692    /**
2693     * Fill audio mix settings with BGM parameters */
2694    pAudioMixSettings->uiBeginLoop =
2695     xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->uiBeginLoop;
2696    pAudioMixSettings->uiEndLoop =
2697     xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->uiEndLoop;
2698    pAudioMixSettings->uiAddCts =
2699     xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->uiAddCts;
2700
2701    /**
2702     * Output file of the audio mixer will be final file (audio mixing is the last step) */
2703    pAudioMixSettings->pOutputClipFile = xVSS_context->pOutputFile;
2704    pAudioMixSettings->pTemporaryFile = xVSS_context->pTemporaryFile;
2705
2706    /**
2707     * Input file of the audio mixer is a temporary file containing all audio/video editions */
2708    pAudioMixSettings->pOriginalClipFile = xVSS_context->pCurrentEditSettings->pOutputFile;
2709
2710    /**
2711     * Save audio mixing settings pointer to be able to free it in
2712     M4xVSS_internalCloseAudioMixedFile function */
2713    xVSS_context->pAudioMixSettings = pAudioMixSettings;
2714
2715    /**
2716     * Create a VSS 3GPP audio mixing instance */
2717    err = M4VSS3GPP_audioMixingInit(&pAudioMixingCtxt, pAudioMixSettings,
2718         xVSS_context->pFileReadPtr, xVSS_context->pFileWritePtr);
2719
2720    /**
2721     * Save audio mixing context to be able to call audio mixing step function in
2722      M4xVSS_step function */
2723    xVSS_context->pAudioMixContext = pAudioMixingCtxt;
2724
2725    if (err != M4NO_ERROR)
2726    {
2727        M4OSA_TRACE1_1("M4xVSS_internalGenerateAudioMixFile:\
2728             M4VSS3GPP_audioMixingInit returned 0x%x\n",err);
2729        //M4VSS3GPP_audioMixingCleanUp(pAudioMixingCtxt);
2730        return err;
2731    }
2732
2733    return M4NO_ERROR;
2734}
2735
2736/**
2737 ******************************************************************************
2738 * prototype    M4OSA_ERR M4xVSS_internalCloseAudioMixedFile(M4OSA_Context pContext)
2739 *
2740 * @brief    This function cleans up VSS for audio mixing
2741 * @note
2742 * @param    pContext    (IN) The integrator own context
2743 *
2744 * @return    M4NO_ERROR:    No error
2745 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
2746 ******************************************************************************
2747 */
2748M4OSA_ERR M4xVSS_internalCloseAudioMixedFile(M4OSA_Context pContext)
2749{
2750    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
2751    M4OSA_ERR err;
2752
2753    /**
2754     * Free this VSS3GPP audio mixing instance */
2755    if(xVSS_context->pAudioMixContext != M4OSA_NULL)
2756    {
2757        err = M4VSS3GPP_audioMixingCleanUp(xVSS_context->pAudioMixContext);
2758        if (err != M4NO_ERROR)
2759        {
2760            M4OSA_TRACE1_1("M4xVSS_internalCloseAudioMixedFile:\
2761                 M4VSS3GPP_audioMixingCleanUp returned 0x%x\n",err);
2762            return err;
2763        }
2764    }
2765
2766    /**
2767     * Free VSS audio mixing settings */
2768    if(xVSS_context->pAudioMixSettings != M4OSA_NULL)
2769    {
2770        free(xVSS_context->pAudioMixSettings);
2771        xVSS_context->pAudioMixSettings = M4OSA_NULL;
2772    }
2773
2774    return M4NO_ERROR;
2775}
2776
2777/**
2778 ******************************************************************************
2779 * prototype    M4OSA_ERR M4xVSS_internalFreePreview(M4OSA_Context pContext)
2780 *
2781 * @brief    This function cleans up preview edition structure used to generate
2782 *            preview.3gp file given to the VPS
2783 * @note    It also free the preview structure given to the VPS
2784 * @param    pContext    (IN) The integrator own context
2785 *
2786 * @return    M4NO_ERROR:    No error
2787 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
2788 ******************************************************************************
2789 */
2790M4OSA_ERR M4xVSS_internalFreePreview(M4OSA_Context pContext)
2791{
2792    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
2793    M4OSA_UInt8 i;
2794
2795    /**
2796     * Free clip/transition settings */
2797    for(i=0; i<xVSS_context->pCurrentEditSettings->uiClipNumber; i++)
2798    {
2799        M4xVSS_FreeClipSettings(xVSS_context->pCurrentEditSettings->pClipList[i]);
2800
2801        free((xVSS_context->pCurrentEditSettings->pClipList[i]));
2802        xVSS_context->pCurrentEditSettings->pClipList[i] = M4OSA_NULL;
2803
2804        /**
2805         * Because there is 1 less transition than clip number */
2806        if(i != xVSS_context->pCurrentEditSettings->uiClipNumber-1)
2807        {
2808            free((xVSS_context->pCurrentEditSettings->pTransitionList[i]));
2809            xVSS_context->pCurrentEditSettings->pTransitionList[i] = M4OSA_NULL;
2810        }
2811    }
2812
2813    /**
2814     * Free clip/transition list */
2815    if(xVSS_context->pCurrentEditSettings->pClipList != M4OSA_NULL)
2816    {
2817        free((xVSS_context->pCurrentEditSettings->pClipList));
2818        xVSS_context->pCurrentEditSettings->pClipList = M4OSA_NULL;
2819    }
2820    if(xVSS_context->pCurrentEditSettings->pTransitionList != M4OSA_NULL)
2821    {
2822        free((xVSS_context->pCurrentEditSettings->pTransitionList));
2823        xVSS_context->pCurrentEditSettings->pTransitionList = M4OSA_NULL;
2824    }
2825
2826    /**
2827     * Free output preview file path */
2828    if(xVSS_context->pCurrentEditSettings->pOutputFile != M4OSA_NULL)
2829    {
2830        free(xVSS_context->pCurrentEditSettings->pOutputFile);
2831        xVSS_context->pCurrentEditSettings->pOutputFile = M4OSA_NULL;
2832    }
2833
2834    /**
2835     * Free temporary preview file path */
2836    if(xVSS_context->pCurrentEditSettings->pTemporaryFile != M4OSA_NULL)
2837    {
2838        remove((const char *)xVSS_context->pCurrentEditSettings->pTemporaryFile);
2839        free(xVSS_context->pCurrentEditSettings->pTemporaryFile);
2840        xVSS_context->pCurrentEditSettings->pTemporaryFile = M4OSA_NULL;
2841    }
2842
2843    /**
2844     * Free "local" BGM settings */
2845    if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack != M4OSA_NULL)
2846    {
2847        if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile != M4OSA_NULL)
2848        {
2849            free(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile);
2850            xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile = M4OSA_NULL;
2851        }
2852        free(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack);
2853        xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack = M4OSA_NULL;
2854    }
2855
2856    /**
2857     * Free current edit settings structure */
2858    if(xVSS_context->pCurrentEditSettings != M4OSA_NULL)
2859    {
2860        free(xVSS_context->pCurrentEditSettings);
2861        xVSS_context->pCurrentEditSettings = M4OSA_NULL;
2862    }
2863
2864    /**
2865     * Free preview effects given to application */
2866    if(M4OSA_NULL != xVSS_context->pPreviewSettings->Effects)
2867    {
2868        free(xVSS_context->pPreviewSettings->Effects);
2869        xVSS_context->pPreviewSettings->Effects = M4OSA_NULL;
2870        xVSS_context->pPreviewSettings->nbEffects = 0;
2871    }
2872
2873    return M4NO_ERROR;
2874}
2875
2876
2877/**
2878 ******************************************************************************
2879 * prototype    M4OSA_ERR M4xVSS_internalFreeSaving(M4OSA_Context pContext)
2880 *
2881 * @brief    This function cleans up saving edition structure used to generate
2882 *            output.3gp file given to the VPS
2883 * @note
2884 * @param    pContext    (IN) The integrator own context
2885 *
2886 * @return    M4NO_ERROR:    No error
2887 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
2888 ******************************************************************************
2889 */
2890M4OSA_ERR M4xVSS_internalFreeSaving(M4OSA_Context pContext)
2891{
2892    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
2893    M4OSA_UInt8 i;
2894
2895    if(xVSS_context->pCurrentEditSettings != M4OSA_NULL)
2896    {
2897        /**
2898         * Free clip/transition settings */
2899        for(i=0; i<xVSS_context->pCurrentEditSettings->uiClipNumber; i++)
2900        {
2901            M4xVSS_FreeClipSettings(xVSS_context->pCurrentEditSettings->pClipList[i]);
2902
2903            free((xVSS_context->pCurrentEditSettings->pClipList[i]));
2904            xVSS_context->pCurrentEditSettings->pClipList[i] = M4OSA_NULL;
2905
2906            /**
2907             * Because there is 1 less transition than clip number */
2908            if(i != xVSS_context->pCurrentEditSettings->uiClipNumber-1)
2909            {
2910                free(\
2911                    (xVSS_context->pCurrentEditSettings->pTransitionList[i]));
2912                xVSS_context->pCurrentEditSettings->pTransitionList[i] = M4OSA_NULL;
2913            }
2914        }
2915
2916        /**
2917         * Free clip/transition list */
2918        if(xVSS_context->pCurrentEditSettings->pClipList != M4OSA_NULL)
2919        {
2920            free((xVSS_context->pCurrentEditSettings->pClipList));
2921            xVSS_context->pCurrentEditSettings->pClipList = M4OSA_NULL;
2922        }
2923        if(xVSS_context->pCurrentEditSettings->pTransitionList != M4OSA_NULL)
2924        {
2925            free((xVSS_context->pCurrentEditSettings->pTransitionList));
2926            xVSS_context->pCurrentEditSettings->pTransitionList = M4OSA_NULL;
2927        }
2928
2929        if(xVSS_context->pCurrentEditSettings->Effects != M4OSA_NULL)
2930        {
2931            free((xVSS_context->pCurrentEditSettings->Effects));
2932            xVSS_context->pCurrentEditSettings->Effects = M4OSA_NULL;
2933            xVSS_context->pCurrentEditSettings->nbEffects = 0;
2934        }
2935
2936        /**
2937         * Free output saving file path */
2938        if(xVSS_context->pCurrentEditSettings->pOutputFile != M4OSA_NULL)
2939        {
2940            if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack != M4OSA_NULL)
2941            {
2942                remove((const char *)xVSS_context->pCurrentEditSettings->pOutputFile);
2943                free(xVSS_context->pCurrentEditSettings->pOutputFile);
2944            }
2945            if(xVSS_context->pOutputFile != M4OSA_NULL)
2946            {
2947                free(xVSS_context->pOutputFile);
2948                xVSS_context->pOutputFile = M4OSA_NULL;
2949            }
2950            xVSS_context->pSettings->pOutputFile = M4OSA_NULL;
2951            xVSS_context->pCurrentEditSettings->pOutputFile = M4OSA_NULL;
2952        }
2953
2954        /**
2955         * Free temporary saving file path */
2956        if(xVSS_context->pCurrentEditSettings->pTemporaryFile != M4OSA_NULL)
2957        {
2958            remove((const char *)xVSS_context->pCurrentEditSettings->pTemporaryFile);
2959            free(xVSS_context->pCurrentEditSettings->pTemporaryFile);
2960            xVSS_context->pCurrentEditSettings->pTemporaryFile = M4OSA_NULL;
2961        }
2962
2963        /**
2964         * Free "local" BGM settings */
2965        if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack != M4OSA_NULL)
2966        {
2967            if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile != M4OSA_NULL)
2968            {
2969                free(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile);
2970                xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile = M4OSA_NULL;
2971            }
2972            free(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack);
2973            xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack = M4OSA_NULL;
2974        }
2975
2976        /**
2977         * Free current edit settings structure */
2978        free(xVSS_context->pCurrentEditSettings);
2979        xVSS_context->pCurrentEditSettings = M4OSA_NULL;
2980    }
2981
2982    return M4NO_ERROR;
2983}
2984
2985
2986/**
2987 ******************************************************************************
2988 * prototype    M4OSA_ERR M4xVSS_freeSettings(M4OSA_Context pContext)
2989 *
2990 * @brief    This function cleans up an M4VSS3GPP_EditSettings structure
2991 * @note
2992 * @param    pSettings    (IN) Pointer on M4VSS3GPP_EditSettings structure to free
2993 *
2994 * @return    M4NO_ERROR:    No error
2995 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
2996 ******************************************************************************
2997 */
2998M4OSA_ERR M4xVSS_freeSettings(M4VSS3GPP_EditSettings* pSettings)
2999{
3000    M4OSA_UInt8 i,j;
3001
3002    /**
3003     * For each clip ... */
3004    for(i=0; i<pSettings->uiClipNumber; i++)
3005    {
3006        /**
3007         * ... free clip settings */
3008        if(pSettings->pClipList[i] != M4OSA_NULL)
3009        {
3010            M4xVSS_FreeClipSettings(pSettings->pClipList[i]);
3011
3012            free((pSettings->pClipList[i]));
3013            pSettings->pClipList[i] = M4OSA_NULL;
3014        }
3015
3016        /**
3017         * ... free transition settings */
3018        if(i < pSettings->uiClipNumber-1) /* Because there is 1 less transition than clip number */
3019        {
3020            if(pSettings->pTransitionList[i] != M4OSA_NULL)
3021            {
3022                switch (pSettings->pTransitionList[i]->VideoTransitionType)
3023                {
3024                    case M4xVSS_kVideoTransitionType_AlphaMagic:
3025
3026                        /**
3027                         * In case of Alpha Magic transition,
3028                          some extra parameters need to be freed */
3029                        if(pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt\
3030                             != M4OSA_NULL)
3031                        {
3032                            free((((M4xVSS_internal_AlphaMagicSettings*)\
3033                                pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt)->\
3034                                    pPlane->pac_data));
3035                            ((M4xVSS_internal_AlphaMagicSettings*)pSettings->pTransitionList[i\
3036                                ]->pExtVideoTransitionFctCtxt)->pPlane->pac_data = M4OSA_NULL;
3037
3038                            free((((M4xVSS_internal_AlphaMagicSettings*)\
3039                                pSettings->pTransitionList[i]->\
3040                                    pExtVideoTransitionFctCtxt)->pPlane));
3041                            ((M4xVSS_internal_AlphaMagicSettings*)pSettings->pTransitionList[i]\
3042                                ->pExtVideoTransitionFctCtxt)->pPlane = M4OSA_NULL;
3043
3044                            free((pSettings->pTransitionList[i]->\
3045                                pExtVideoTransitionFctCtxt));
3046                            pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt = M4OSA_NULL;
3047
3048                            for(j=i+1;j<pSettings->uiClipNumber-1;j++)
3049                            {
3050                                if(pSettings->pTransitionList[j] != M4OSA_NULL)
3051                                {
3052                                    if(pSettings->pTransitionList[j]->VideoTransitionType ==
3053                                     M4xVSS_kVideoTransitionType_AlphaMagic)
3054                                    {
3055                                        M4OSA_UInt32 pCmpResult=0;
3056                                        pCmpResult = strcmp((const char *)pSettings->pTransitionList[i]->\
3057                                            xVSS.transitionSpecific.pAlphaMagicSettings->\
3058                                                pAlphaFilePath,
3059                                                (const char *)pSettings->pTransitionList[j]->\
3060                                                xVSS.transitionSpecific.pAlphaMagicSettings->\
3061                                                pAlphaFilePath);
3062                                        if(pCmpResult == 0)
3063                                        {
3064                                            /* Free extra internal alpha magic structure and put
3065                                            it to NULL to avoid refreeing it */
3066                                            free((pSettings->\
3067                                                pTransitionList[j]->pExtVideoTransitionFctCtxt));
3068                                            pSettings->pTransitionList[j]->\
3069                                                pExtVideoTransitionFctCtxt = M4OSA_NULL;
3070                                        }
3071                                    }
3072                                }
3073                            }
3074                        }
3075
3076                        if(pSettings->pTransitionList[i]->\
3077                            xVSS.transitionSpecific.pAlphaMagicSettings != M4OSA_NULL)
3078                        {
3079                            if(pSettings->pTransitionList[i]->\
3080                                xVSS.transitionSpecific.pAlphaMagicSettings->\
3081                                    pAlphaFilePath != M4OSA_NULL)
3082                            {
3083                                free(pSettings->\
3084                                    pTransitionList[i]->\
3085                                        xVSS.transitionSpecific.pAlphaMagicSettings->\
3086                                            pAlphaFilePath);
3087                                pSettings->pTransitionList[i]->\
3088                                    xVSS.transitionSpecific.pAlphaMagicSettings->\
3089                                        pAlphaFilePath = M4OSA_NULL;
3090                            }
3091                            free(pSettings->pTransitionList[i]->\
3092                                xVSS.transitionSpecific.pAlphaMagicSettings);
3093                            pSettings->pTransitionList[i]->\
3094                                xVSS.transitionSpecific.pAlphaMagicSettings = M4OSA_NULL;
3095
3096                        }
3097
3098                    break;
3099
3100
3101                    case M4xVSS_kVideoTransitionType_SlideTransition:
3102                        if (M4OSA_NULL != pSettings->pTransitionList[i]->\
3103                            xVSS.transitionSpecific.pSlideTransitionSettings)
3104                        {
3105                            free(pSettings->pTransitionList[i]->\
3106                                xVSS.transitionSpecific.pSlideTransitionSettings);
3107                            pSettings->pTransitionList[i]->\
3108                                xVSS.transitionSpecific.pSlideTransitionSettings = M4OSA_NULL;
3109                        }
3110                        if(pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt != M4OSA_NULL)
3111                        {
3112                            free((pSettings->pTransitionList[i]->\
3113                                pExtVideoTransitionFctCtxt));
3114                            pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt = M4OSA_NULL;
3115                        }
3116                    break;
3117                                        default:
3118                    break;
3119
3120                }
3121                /**
3122                 * Free transition settings structure */
3123                free((pSettings->pTransitionList[i]));
3124                pSettings->pTransitionList[i] = M4OSA_NULL;
3125            }
3126        }
3127    }
3128
3129    /**
3130     * Free clip list */
3131    if(pSettings->pClipList != M4OSA_NULL)
3132    {
3133        free((pSettings->pClipList));
3134        pSettings->pClipList = M4OSA_NULL;
3135    }
3136
3137    /**
3138     * Free transition list */
3139    if(pSettings->pTransitionList != M4OSA_NULL)
3140    {
3141        free((pSettings->pTransitionList));
3142        pSettings->pTransitionList = M4OSA_NULL;
3143    }
3144
3145    /**
3146     * RC: Free effects list */
3147    if(pSettings->Effects != M4OSA_NULL)
3148    {
3149        for(i=0; i<pSettings->nbEffects; i++)
3150        {
3151            /**
3152             * For each clip, free framing structure if needed */
3153            if(pSettings->Effects[i].VideoEffectType == M4xVSS_kVideoEffectType_Framing
3154                || pSettings->Effects[i].VideoEffectType == M4xVSS_kVideoEffectType_Text)
3155            {
3156#ifdef DECODE_GIF_ON_SAVING
3157                M4xVSS_FramingContext* framingCtx = pSettings->Effects[i].pExtVideoEffectFctCtxt;
3158#else
3159                M4xVSS_FramingStruct* framingCtx = pSettings->Effects[i].pExtVideoEffectFctCtxt;
3160                M4xVSS_FramingStruct* framingCtx_save;
3161                M4xVSS_Framing3102Struct* framingCtx_first = framingCtx;
3162#endif
3163
3164#ifdef DECODE_GIF_ON_SAVING
3165                if(framingCtx != M4OSA_NULL) /* Bugfix 1.2.0: crash, trying to free non existant
3166                 pointer */
3167                {
3168                    if(framingCtx->aFramingCtx != M4OSA_NULL)
3169                    {
3170                        {
3171                            if(framingCtx->aFramingCtx->FramingRgb != M4OSA_NULL)
3172                            {
3173                                free(framingCtx->aFramingCtx->\
3174                                    FramingRgb->pac_data);
3175                                framingCtx->aFramingCtx->FramingRgb->pac_data = M4OSA_NULL;
3176                                free(framingCtx->aFramingCtx->FramingRgb);
3177                                framingCtx->aFramingCtx->FramingRgb = M4OSA_NULL;
3178                            }
3179                        }
3180                        if(framingCtx->aFramingCtx->FramingYuv != M4OSA_NULL)
3181                        {
3182                            free(framingCtx->aFramingCtx->\
3183                                FramingYuv[0].pac_data);
3184                            framingCtx->aFramingCtx->FramingYuv[0].pac_data = M4OSA_NULL;
3185                           free(framingCtx->aFramingCtx->\
3186                                FramingYuv[1].pac_data);
3187                            framingCtx->aFramingCtx->FramingYuv[1].pac_data = M4OSA_NULL;
3188                           free(framingCtx->aFramingCtx->\
3189                                FramingYuv[2].pac_data);
3190                            framingCtx->aFramingCtx->FramingYuv[2].pac_data = M4OSA_NULL;
3191                            free(framingCtx->aFramingCtx->FramingYuv);
3192                            framingCtx->aFramingCtx->FramingYuv = M4OSA_NULL;
3193                        }
3194                        free(framingCtx->aFramingCtx);
3195                        framingCtx->aFramingCtx = M4OSA_NULL;
3196                    }
3197                    if(framingCtx->aFramingCtx_last != M4OSA_NULL)
3198                    {
3199                        if(framingCtx->aFramingCtx_last->FramingRgb != M4OSA_NULL)
3200                        {
3201                            free(framingCtx->aFramingCtx_last->\
3202                                FramingRgb->pac_data);
3203                            framingCtx->aFramingCtx_last->FramingRgb->pac_data = M4OSA_NULL;
3204                            free(framingCtx->aFramingCtx_last->\
3205                                FramingRgb);
3206                            framingCtx->aFramingCtx_last->FramingRgb = M4OSA_NULL;
3207                        }
3208                        if(framingCtx->aFramingCtx_last->FramingYuv != M4OSA_NULL)
3209                        {
3210                            free(framingCtx->aFramingCtx_last->\
3211                                FramingYuv[0].pac_data);
3212                            framingCtx->aFramingCtx_last->FramingYuv[0].pac_data = M4OSA_NULL;
3213                            free(framingCtx->aFramingCtx_last->FramingYuv);
3214                            framingCtx->aFramingCtx_last->FramingYuv = M4OSA_NULL;
3215                        }
3216                        free(framingCtx->aFramingCtx_last);
3217                        framingCtx->aFramingCtx_last = M4OSA_NULL;
3218                    }
3219                    if(framingCtx->pEffectFilePath != M4OSA_NULL)
3220                    {
3221                        free(framingCtx->pEffectFilePath);
3222                        framingCtx->pEffectFilePath = M4OSA_NULL;
3223                    }
3224                    /*In case there are still allocated*/
3225                    if(framingCtx->pSPSContext != M4OSA_NULL)
3226                    {
3227                    //    M4SPS_destroy(framingCtx->pSPSContext);
3228                        framingCtx->pSPSContext = M4OSA_NULL;
3229                    }
3230                    /*Alpha blending structure*/
3231                    if(framingCtx->alphaBlendingStruct  != M4OSA_NULL)
3232                    {
3233                        free(framingCtx->alphaBlendingStruct);
3234                        framingCtx->alphaBlendingStruct = M4OSA_NULL;
3235                    }
3236
3237                    free(framingCtx);
3238                    framingCtx = M4OSA_NULL;
3239                }
3240#else
3241                do
3242                {
3243                    if(framingCtx != M4OSA_NULL) /* Bugfix 1.2.0: crash, trying to free non
3244                    existant pointer */
3245                    {
3246                        if(framingCtx->FramingRgb != M4OSA_NULL)
3247                        {
3248                            free(framingCtx->FramingRgb->pac_data);
3249                            framingCtx->FramingRgb->pac_data = M4OSA_NULL;
3250                            free(framingCtx->FramingRgb);
3251                            framingCtx->FramingRgb = M4OSA_NULL;
3252                        }
3253                        if(framingCtx->FramingYuv != M4OSA_NULL)
3254                        {
3255                            free(framingCtx->FramingYuv[0].pac_data);
3256                            framingCtx->FramingYuv[0].pac_data = M4OSA_NULL;
3257                            free(framingCtx->FramingYuv);
3258                            framingCtx->FramingYuv = M4OSA_NULL;
3259                        }
3260                        framingCtx_save = framingCtx->pNext;
3261                        free(framingCtx);
3262                        framingCtx = M4OSA_NULL;
3263                        framingCtx = framingCtx_save;
3264                    }
3265                    else
3266                    {
3267                        /*FB: bug fix P4ME00003002*/
3268                        break;
3269                    }
3270                } while(framingCtx_first != framingCtx);
3271#endif
3272            }
3273            else if( M4xVSS_kVideoEffectType_Fifties == pSettings->Effects[i].VideoEffectType)
3274            {
3275                /* Free Fifties context */
3276                M4xVSS_FiftiesStruct* FiftiesCtx = pSettings->Effects[i].pExtVideoEffectFctCtxt;
3277
3278                if(FiftiesCtx != M4OSA_NULL)
3279                {
3280                    free(FiftiesCtx);
3281                    FiftiesCtx = M4OSA_NULL;
3282                }
3283
3284            }
3285            else if( M4xVSS_kVideoEffectType_ColorRGB16 == pSettings->Effects[i].VideoEffectType
3286                || M4xVSS_kVideoEffectType_BlackAndWhite == pSettings->Effects[i].VideoEffectType
3287                || M4xVSS_kVideoEffectType_Pink == pSettings->Effects[i].VideoEffectType
3288                || M4xVSS_kVideoEffectType_Green == pSettings->Effects[i].VideoEffectType
3289                || M4xVSS_kVideoEffectType_Sepia == pSettings->Effects[i].VideoEffectType
3290                || M4xVSS_kVideoEffectType_Negative== pSettings->Effects[i].VideoEffectType
3291                || M4xVSS_kVideoEffectType_Gradient== pSettings->Effects[i].VideoEffectType)
3292            {
3293                /* Free Color context */
3294                M4xVSS_ColorStruct* ColorCtx = pSettings->Effects[i].pExtVideoEffectFctCtxt;
3295
3296                if(ColorCtx != M4OSA_NULL)
3297                {
3298                    free(ColorCtx);
3299                    ColorCtx = M4OSA_NULL;
3300                }
3301            }
3302
3303            /* Free simple fields */
3304            if(pSettings->Effects[i].xVSS.pFramingFilePath != M4OSA_NULL)
3305            {
3306                free(pSettings->Effects[i].xVSS.pFramingFilePath);
3307                pSettings->Effects[i].xVSS.pFramingFilePath = M4OSA_NULL;
3308            }
3309            if(pSettings->Effects[i].xVSS.pFramingBuffer != M4OSA_NULL)
3310            {
3311                free(pSettings->Effects[i].xVSS.pFramingBuffer);
3312                pSettings->Effects[i].xVSS.pFramingBuffer = M4OSA_NULL;
3313            }
3314            if(pSettings->Effects[i].xVSS.pTextBuffer != M4OSA_NULL)
3315            {
3316                free(pSettings->Effects[i].xVSS.pTextBuffer);
3317                pSettings->Effects[i].xVSS.pTextBuffer = M4OSA_NULL;
3318            }
3319        }
3320        free(pSettings->Effects);
3321        pSettings->Effects = M4OSA_NULL;
3322    }
3323
3324    return M4NO_ERROR;
3325}
3326
3327M4OSA_ERR M4xVSS_freeCommand(M4OSA_Context pContext)
3328{
3329    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
3330//    M4OSA_UInt8 i,j;
3331
3332    /* Free "local" BGM settings */
3333    if(xVSS_context->pSettings->xVSS.pBGMtrack != M4OSA_NULL)
3334    {
3335        if(xVSS_context->pSettings->xVSS.pBGMtrack->pFile != M4OSA_NULL)
3336        {
3337            free(xVSS_context->pSettings->xVSS.pBGMtrack->pFile);
3338            xVSS_context->pSettings->xVSS.pBGMtrack->pFile = M4OSA_NULL;
3339        }
3340        free(xVSS_context->pSettings->xVSS.pBGMtrack);
3341        xVSS_context->pSettings->xVSS.pBGMtrack = M4OSA_NULL;
3342    }
3343
3344    M4xVSS_freeSettings(xVSS_context->pSettings);
3345
3346    if(xVSS_context->pPTo3GPPparamsList != M4OSA_NULL)
3347    {
3348        M4xVSS_Pto3GPP_params* pParams = xVSS_context->pPTo3GPPparamsList;
3349        M4xVSS_Pto3GPP_params* pParams_sauv;
3350
3351        while(pParams != M4OSA_NULL)
3352        {
3353            if(pParams->pFileIn != M4OSA_NULL)
3354            {
3355                free(pParams->pFileIn);
3356                pParams->pFileIn = M4OSA_NULL;
3357            }
3358            if(pParams->pFileOut != M4OSA_NULL)
3359            {
3360                /* Delete temporary file */
3361                remove((const char *)pParams->pFileOut);
3362                free(pParams->pFileOut);
3363                pParams->pFileOut = M4OSA_NULL;
3364            }
3365            if(pParams->pFileTemp != M4OSA_NULL)
3366            {
3367                /* Delete temporary file */
3368#ifdef M4xVSS_RESERVED_MOOV_DISK_SPACE
3369                remove((const char *)pParams->pFileTemp);
3370                free(pParams->pFileTemp);
3371#endif/*M4xVSS_RESERVED_MOOV_DISK_SPACE*/
3372                pParams->pFileTemp = M4OSA_NULL;
3373            }
3374            pParams_sauv = pParams;
3375            pParams = pParams->pNext;
3376            free(pParams_sauv);
3377            pParams_sauv = M4OSA_NULL;
3378        }
3379    }
3380
3381    if(xVSS_context->pMCSparamsList != M4OSA_NULL)
3382    {
3383        M4xVSS_MCS_params* pParams = xVSS_context->pMCSparamsList;
3384        M4xVSS_MCS_params* pParams_sauv;
3385
3386        while(pParams != M4OSA_NULL)
3387        {
3388            if(pParams->pFileIn != M4OSA_NULL)
3389            {
3390                free(pParams->pFileIn);
3391                pParams->pFileIn = M4OSA_NULL;
3392            }
3393            if(pParams->pFileOut != M4OSA_NULL)
3394            {
3395                /* Delete temporary file */
3396                remove((const char *)pParams->pFileOut);
3397                free(pParams->pFileOut);
3398                pParams->pFileOut = M4OSA_NULL;
3399            }
3400            if(pParams->pFileTemp != M4OSA_NULL)
3401            {
3402                /* Delete temporary file */
3403#ifdef M4xVSS_RESERVED_MOOV_DISK_SPACE
3404                remove((const char *)pParams->pFileTemp);
3405                free(pParams->pFileTemp);
3406#endif/*M4xVSS_RESERVED_MOOV_DISK_SPACE*/
3407                pParams->pFileTemp = M4OSA_NULL;
3408            }
3409            pParams_sauv = pParams;
3410            pParams = pParams->pNext;
3411            free(pParams_sauv);
3412            pParams_sauv = M4OSA_NULL;
3413        }
3414    }
3415
3416    if(xVSS_context->pcmPreviewFile != M4OSA_NULL)
3417    {
3418        free(xVSS_context->pcmPreviewFile);
3419        xVSS_context->pcmPreviewFile = M4OSA_NULL;
3420    }
3421    if(xVSS_context->pSettings->pOutputFile != M4OSA_NULL
3422        && xVSS_context->pOutputFile != M4OSA_NULL)
3423    {
3424        free(xVSS_context->pSettings->pOutputFile);
3425        xVSS_context->pSettings->pOutputFile = M4OSA_NULL;
3426        xVSS_context->pOutputFile = M4OSA_NULL;
3427    }
3428
3429    /* Reinit all context variables */
3430    xVSS_context->previousClipNumber = 0;
3431    xVSS_context->editingStep = M4xVSS_kMicroStateEditing;
3432    xVSS_context->analyseStep = M4xVSS_kMicroStateAnalysePto3GPP;
3433    xVSS_context->pPTo3GPPparamsList = M4OSA_NULL;
3434    xVSS_context->pPTo3GPPcurrentParams = M4OSA_NULL;
3435    xVSS_context->pMCSparamsList = M4OSA_NULL;
3436    xVSS_context->pMCScurrentParams = M4OSA_NULL;
3437    xVSS_context->tempFileIndex = 0;
3438    xVSS_context->targetedTimescale = 0;
3439
3440    return M4NO_ERROR;
3441}
3442
3443/**
3444 ******************************************************************************
3445 * prototype    M4OSA_ERR M4xVSS_internalGetProperties(M4OSA_Context pContext,
3446 *                                    M4OSA_Char* pFile,
3447 *                                    M4VIDEOEDITING_ClipProperties *pFileProperties)
3448 *
3449 * @brief    This function retrieve properties of an input 3GP file using MCS
3450 * @note
3451 * @param    pContext        (IN) The integrator own context
3452 * @param    pFile            (IN) 3GP file to analyse
3453 * @param    pFileProperties    (IN/OUT) Pointer on a structure that will contain
3454 *                            the 3GP file properties
3455 *
3456 * @return    M4NO_ERROR:    No error
3457 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
3458 ******************************************************************************
3459 */
3460M4OSA_ERR M4xVSS_internalGetProperties(M4OSA_Context pContext, M4OSA_Char* pFile,
3461                                       M4VIDEOEDITING_ClipProperties *pFileProperties)
3462{
3463    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
3464    M4OSA_ERR err;
3465    M4MCS_Context mcs_context;
3466
3467    err = M4MCS_init(&mcs_context, xVSS_context->pFileReadPtr, xVSS_context->pFileWritePtr);
3468    if(err != M4NO_ERROR)
3469    {
3470        M4OSA_TRACE1_1("M4xVSS_internalGetProperties: Error in M4MCS_init: 0x%x", err);
3471        return err;
3472    }
3473
3474    /*open the MCS in the "normal opening" mode to retrieve the exact duration*/
3475    err = M4MCS_open_normalMode(mcs_context, pFile, M4VIDEOEDITING_kFileType_3GPP,
3476        M4OSA_NULL, M4OSA_NULL);
3477    if (err != M4NO_ERROR)
3478    {
3479        M4OSA_TRACE1_1("M4xVSS_internalGetProperties: Error in M4MCS_open: 0x%x", err);
3480        M4MCS_abort(mcs_context);
3481        return err;
3482    }
3483
3484    err = M4MCS_getInputFileProperties(mcs_context, pFileProperties);
3485    if(err != M4NO_ERROR)
3486    {
3487        M4OSA_TRACE1_1("Error in M4MCS_getInputFileProperties: 0x%x", err);
3488        M4MCS_abort(mcs_context);
3489        return err;
3490    }
3491
3492    err = M4MCS_abort(mcs_context);
3493    if (err != M4NO_ERROR)
3494    {
3495        M4OSA_TRACE1_1("M4xVSS_internalGetProperties: Error in M4MCS_abort: 0x%x", err);
3496        return err;
3497    }
3498
3499    return M4NO_ERROR;
3500}
3501
3502
3503/**
3504 ******************************************************************************
3505 * prototype    M4OSA_ERR M4xVSS_internalGetTargetedTimeScale(M4OSA_Context pContext,
3506 *                                                M4OSA_UInt32* pTargetedTimeScale)
3507 *
3508 * @brief    This function retrieve targeted time scale
3509 * @note
3510 * @param    pContext            (IN)    The integrator own context
3511 * @param    pTargetedTimeScale    (OUT)    Targeted time scale
3512 *
3513 * @return    M4NO_ERROR:    No error
3514 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
3515 ******************************************************************************
3516 */
3517M4OSA_ERR M4xVSS_internalGetTargetedTimeScale(M4OSA_Context pContext,
3518                                                 M4VSS3GPP_EditSettings* pSettings,
3519                                                  M4OSA_UInt32* pTargetedTimeScale)
3520{
3521    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
3522    M4OSA_ERR err;
3523    M4OSA_UInt32 totalDuration = 0;
3524    M4OSA_UInt8 i = 0;
3525    M4OSA_UInt32 tempTimeScale = 0, tempDuration = 0;
3526
3527    for(i=0;i<pSettings->uiClipNumber;i++)
3528    {
3529        /*search timescale only in mpeg4 case*/
3530        if(pSettings->pClipList[i]->FileType == M4VIDEOEDITING_kFileType_3GPP
3531            || pSettings->pClipList[i]->FileType == M4VIDEOEDITING_kFileType_MP4
3532            || pSettings->pClipList[i]->FileType == M4VIDEOEDITING_kFileType_M4V)
3533        {
3534            M4VIDEOEDITING_ClipProperties fileProperties;
3535
3536            /*UTF conversion support*/
3537            M4OSA_Char* pDecodedPath = M4OSA_NULL;
3538
3539            /**
3540            * UTF conversion: convert into the customer format, before being used*/
3541            pDecodedPath = pSettings->pClipList[i]->pFile;
3542
3543            if(xVSS_context->UTFConversionContext.pConvToUTF8Fct != M4OSA_NULL
3544                && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL)
3545            {
3546                M4OSA_UInt32 length = 0;
3547                err = M4xVSS_internalConvertFromUTF8(xVSS_context,
3548                     (M4OSA_Void*) pSettings->pClipList[i]->pFile,
3549                        (M4OSA_Void*) xVSS_context->UTFConversionContext.pTempOutConversionBuffer,
3550                             &length);
3551                if(err != M4NO_ERROR)
3552                {
3553                    M4OSA_TRACE1_1("M4xVSS_Init:\
3554                         M4xVSS_internalConvertToUTF8 returns err: 0x%x",err);
3555                    return err;
3556                }
3557                pDecodedPath = xVSS_context->UTFConversionContext.pTempOutConversionBuffer;
3558            }
3559
3560            /*End of the conversion: use the decoded path*/
3561            err = M4xVSS_internalGetProperties(xVSS_context, pDecodedPath, &fileProperties);
3562
3563            /*get input file properties*/
3564            /*err = M4xVSS_internalGetProperties(xVSS_context, pSettings->\
3565                pClipList[i]->pFile, &fileProperties);*/
3566            if(M4NO_ERROR != err)
3567            {
3568                M4OSA_TRACE1_1("M4xVSS_internalGetTargetedTimeScale:\
3569                     M4xVSS_internalGetProperties returned: 0x%x", err);
3570                return err;
3571            }
3572            if(fileProperties.VideoStreamType == M4VIDEOEDITING_kMPEG4)
3573            {
3574                if(pSettings->pClipList[i]->uiEndCutTime > 0)
3575                {
3576                    if(tempDuration < (pSettings->pClipList[i]->uiEndCutTime \
3577                        - pSettings->pClipList[i]->uiBeginCutTime))
3578                    {
3579                        tempTimeScale = fileProperties.uiVideoTimeScale;
3580                        tempDuration = (pSettings->pClipList[i]->uiEndCutTime\
3581                             - pSettings->pClipList[i]->uiBeginCutTime);
3582                    }
3583                }
3584                else
3585                {
3586                    if(tempDuration < (fileProperties.uiClipDuration\
3587                         - pSettings->pClipList[i]->uiBeginCutTime))
3588                    {
3589                        tempTimeScale = fileProperties.uiVideoTimeScale;
3590                        tempDuration = (fileProperties.uiClipDuration\
3591                             - pSettings->pClipList[i]->uiBeginCutTime);
3592                    }
3593                }
3594            }
3595        }
3596        if(pSettings->pClipList[i]->FileType == M4VIDEOEDITING_kFileType_ARGB8888)
3597        {
3598            /*the timescale is 30 for PTO3GP*/
3599            *pTargetedTimeScale = 30;
3600            return M4NO_ERROR;
3601
3602        }
3603    }
3604
3605    if(tempTimeScale >= 30)/*Define a minimum time scale, otherwise if the timescale is not
3606    enough, there will be an infinite loop in the shell encoder*/
3607    {
3608        *pTargetedTimeScale = tempTimeScale;
3609    }
3610    else
3611    {
3612        *pTargetedTimeScale = 30;
3613    }
3614
3615    return M4NO_ERROR;
3616}
3617
3618
3619/**
3620 ******************************************************************************
3621 * prototype    M4VSS3GPP_externalVideoEffectColor(M4OSA_Void *pFunctionContext,
3622 *                                                    M4VIFI_ImagePlane *PlaneIn,
3623 *                                                    M4VIFI_ImagePlane *PlaneOut,
3624 *                                                    M4VSS3GPP_ExternalProgress *pProgress,
3625 *                                                    M4OSA_UInt32 uiEffectKind)
3626 *
3627 * @brief    This function apply a color effect on an input YUV420 planar frame
3628 * @note
3629 * @param    pFunctionContext(IN) Contains which color to apply (not very clean ...)
3630 * @param    PlaneIn            (IN) Input YUV420 planar
3631 * @param    PlaneOut        (IN/OUT) Output YUV420 planar
3632 * @param    pProgress        (IN/OUT) Progress indication (0-100)
3633 * @param    uiEffectKind    (IN) Unused
3634 *
3635 * @return    M4VIFI_OK:    No error
3636 ******************************************************************************
3637 */
3638M4OSA_ERR M4VSS3GPP_externalVideoEffectColor(M4OSA_Void *pFunctionContext,
3639                                             M4VIFI_ImagePlane *PlaneIn,
3640                                             M4VIFI_ImagePlane *PlaneOut,
3641                                             M4VSS3GPP_ExternalProgress *pProgress,
3642                                             M4OSA_UInt32 uiEffectKind)
3643{
3644    M4VIFI_Int32 plane_number;
3645    M4VIFI_UInt32 i,j;
3646    M4VIFI_UInt8 *p_buf_src, *p_buf_dest;
3647    M4xVSS_ColorStruct* ColorContext = (M4xVSS_ColorStruct*)pFunctionContext;
3648
3649    for (plane_number = 0; plane_number < 3; plane_number++)
3650    {
3651        p_buf_src = &(PlaneIn[plane_number].pac_data[PlaneIn[plane_number].u_topleft]);
3652        p_buf_dest = &(PlaneOut[plane_number].pac_data[PlaneOut[plane_number].u_topleft]);
3653        for (i = 0; i < PlaneOut[plane_number].u_height; i++)
3654        {
3655            /**
3656             * Chrominance */
3657            if(plane_number==1 || plane_number==2)
3658            {
3659                //switch ((M4OSA_UInt32)pFunctionContext)
3660                // commented because a structure for the effects context exist
3661                switch (ColorContext->colorEffectType)
3662                {
3663                    case M4xVSS_kVideoEffectType_BlackAndWhite:
3664                        memset((void *)p_buf_dest,128,
3665                         PlaneIn[plane_number].u_width);
3666                        break;
3667                    case M4xVSS_kVideoEffectType_Pink:
3668                        memset((void *)p_buf_dest,255,
3669                         PlaneIn[plane_number].u_width);
3670                        break;
3671                    case M4xVSS_kVideoEffectType_Green:
3672                        memset((void *)p_buf_dest,0,
3673                         PlaneIn[plane_number].u_width);
3674                        break;
3675                    case M4xVSS_kVideoEffectType_Sepia:
3676                        if(plane_number==1)
3677                        {
3678                            memset((void *)p_buf_dest,117,
3679                             PlaneIn[plane_number].u_width);
3680                        }
3681                        else
3682                        {
3683                            memset((void *)p_buf_dest,139,
3684                             PlaneIn[plane_number].u_width);
3685                        }
3686                        break;
3687                    case M4xVSS_kVideoEffectType_Negative:
3688                        memcpy((void *)p_buf_dest,
3689                         (void *)p_buf_src ,PlaneOut[plane_number].u_width);
3690                        break;
3691
3692                    case M4xVSS_kVideoEffectType_ColorRGB16:
3693                        {
3694                            M4OSA_UInt16 r = 0,g = 0,b = 0,y = 0,u = 0,v = 0;
3695
3696                            /*first get the r, g, b*/
3697                            b = (ColorContext->rgb16ColorData &  0x001f);
3698                            g = (ColorContext->rgb16ColorData &  0x07e0)>>5;
3699                            r = (ColorContext->rgb16ColorData &  0xf800)>>11;
3700
3701                            /*keep y, but replace u and v*/
3702                            if(plane_number==1)
3703                            {
3704                                /*then convert to u*/
3705                                u = U16(r, g, b);
3706                                memset((void *)p_buf_dest,(M4OSA_UInt8)u,
3707                                 PlaneIn[plane_number].u_width);
3708                            }
3709                            if(plane_number==2)
3710                            {
3711                                /*then convert to v*/
3712                                v = V16(r, g, b);
3713                                memset((void *)p_buf_dest, (M4OSA_UInt8)v,
3714                                 PlaneIn[plane_number].u_width);
3715                            }
3716                        }
3717                        break;
3718                    case M4xVSS_kVideoEffectType_Gradient:
3719                        {
3720                            M4OSA_UInt16 r = 0,g = 0,b = 0,y = 0,u = 0,v = 0;
3721
3722                            /*first get the r, g, b*/
3723                            b = (ColorContext->rgb16ColorData &  0x001f);
3724                            g = (ColorContext->rgb16ColorData &  0x07e0)>>5;
3725                            r = (ColorContext->rgb16ColorData &  0xf800)>>11;
3726
3727                            /*for color gradation*/
3728                            b = (M4OSA_UInt16)( b - ((b*i)/PlaneIn[plane_number].u_height));
3729                            g = (M4OSA_UInt16)(g - ((g*i)/PlaneIn[plane_number].u_height));
3730                            r = (M4OSA_UInt16)(r - ((r*i)/PlaneIn[plane_number].u_height));
3731
3732                            /*keep y, but replace u and v*/
3733                            if(plane_number==1)
3734                            {
3735                                /*then convert to u*/
3736                                u = U16(r, g, b);
3737                                memset((void *)p_buf_dest,(M4OSA_UInt8)u,
3738                                 PlaneIn[plane_number].u_width);
3739                            }
3740                            if(plane_number==2)
3741                            {
3742                                /*then convert to v*/
3743                                v = V16(r, g, b);
3744                                memset((void *)p_buf_dest,(M4OSA_UInt8)v,
3745                                 PlaneIn[plane_number].u_width);
3746                            }
3747                        }
3748                        break;
3749                        default:
3750                        break;
3751                }
3752            }
3753            /**
3754             * Luminance */
3755            else
3756            {
3757                //switch ((M4OSA_UInt32)pFunctionContext)
3758                // commented because a structure for the effects context exist
3759                switch (ColorContext->colorEffectType)
3760                {
3761                case M4xVSS_kVideoEffectType_Negative:
3762                    for(j=0;j<PlaneOut[plane_number].u_width;j++)
3763                    {
3764                            p_buf_dest[j] = 255 - p_buf_src[j];
3765                    }
3766                    break;
3767                default:
3768                    memcpy((void *)p_buf_dest,
3769                     (void *)p_buf_src ,PlaneOut[plane_number].u_width);
3770                    break;
3771                }
3772            }
3773            p_buf_src += PlaneIn[plane_number].u_stride;
3774            p_buf_dest += PlaneOut[plane_number].u_stride;
3775        }
3776    }
3777
3778    return M4VIFI_OK;
3779}
3780
3781/**
3782 ******************************************************************************
3783 * prototype    M4VSS3GPP_externalVideoEffectFraming(M4OSA_Void *pFunctionContext,
3784 *                                                    M4VIFI_ImagePlane *PlaneIn,
3785 *                                                    M4VIFI_ImagePlane *PlaneOut,
3786 *                                                    M4VSS3GPP_ExternalProgress *pProgress,
3787 *                                                    M4OSA_UInt32 uiEffectKind)
3788 *
3789 * @brief    This function add a fixed or animated image on an input YUV420 planar frame
3790 * @note
3791 * @param    pFunctionContext(IN) Contains which color to apply (not very clean ...)
3792 * @param    PlaneIn            (IN) Input YUV420 planar
3793 * @param    PlaneOut        (IN/OUT) Output YUV420 planar
3794 * @param    pProgress        (IN/OUT) Progress indication (0-100)
3795 * @param    uiEffectKind    (IN) Unused
3796 *
3797 * @return    M4VIFI_OK:    No error
3798 ******************************************************************************
3799 */
3800M4OSA_ERR M4VSS3GPP_externalVideoEffectFraming( M4OSA_Void *userData,
3801                                                M4VIFI_ImagePlane PlaneIn[3],
3802                                                M4VIFI_ImagePlane *PlaneOut,
3803                                                M4VSS3GPP_ExternalProgress *pProgress,
3804                                                M4OSA_UInt32 uiEffectKind )
3805{
3806    M4VIFI_UInt32 x,y;
3807
3808    M4VIFI_UInt8 *p_in_Y = PlaneIn[0].pac_data;
3809    M4VIFI_UInt8 *p_in_U = PlaneIn[1].pac_data;
3810    M4VIFI_UInt8 *p_in_V = PlaneIn[2].pac_data;
3811
3812    M4xVSS_FramingStruct* Framing = M4OSA_NULL;
3813    M4xVSS_FramingStruct* currentFraming = M4OSA_NULL;
3814    M4VIFI_UInt8 *FramingRGB = M4OSA_NULL;
3815
3816    M4VIFI_UInt8 *p_out0;
3817    M4VIFI_UInt8 *p_out1;
3818    M4VIFI_UInt8 *p_out2;
3819
3820    M4VIFI_UInt32 topleft[2];
3821
3822    M4OSA_UInt8 transparent1 = (M4OSA_UInt8)((TRANSPARENT_COLOR & 0xFF00)>>8);
3823    M4OSA_UInt8 transparent2 = (M4OSA_UInt8)TRANSPARENT_COLOR;
3824
3825#ifndef DECODE_GIF_ON_SAVING
3826    Framing = (M4xVSS_FramingStruct *)userData;
3827    currentFraming = (M4xVSS_FramingStruct *)Framing->pCurrent;
3828    FramingRGB = Framing->FramingRgb->pac_data;
3829#endif /*DECODE_GIF_ON_SAVING*/
3830
3831    /*FB*/
3832#ifdef DECODE_GIF_ON_SAVING
3833    M4OSA_ERR err;
3834    Framing = (M4xVSS_FramingStruct *)((M4xVSS_FramingContext*)userData)->aFramingCtx;
3835    currentFraming = (M4xVSS_FramingStruct *)Framing;
3836    FramingRGB = Framing->FramingRgb->pac_data;
3837#endif /*DECODE_GIF_ON_SAVING*/
3838    /*end FB*/
3839
3840    /**
3841     * Initialize input / output plane pointers */
3842    p_in_Y += PlaneIn[0].u_topleft;
3843    p_in_U += PlaneIn[1].u_topleft;
3844    p_in_V += PlaneIn[2].u_topleft;
3845
3846    p_out0 = PlaneOut[0].pac_data;
3847    p_out1 = PlaneOut[1].pac_data;
3848    p_out2 = PlaneOut[2].pac_data;
3849
3850    /**
3851     * Depending on time, initialize Framing frame to use */
3852    if(Framing->previousClipTime == -1)
3853    {
3854        Framing->previousClipTime = pProgress->uiOutputTime;
3855    }
3856
3857    /**
3858     * If the current clip time has reach the duration of one frame of the framing picture
3859     * we need to step to next framing picture */
3860
3861    Framing->previousClipTime = pProgress->uiOutputTime;
3862    FramingRGB = currentFraming->FramingRgb->pac_data;
3863    topleft[0] = currentFraming->topleft_x;
3864    topleft[1] = currentFraming->topleft_y;
3865
3866    for( x=0 ;x < PlaneIn[0].u_height ; x++)
3867    {
3868        for( y=0 ;y < PlaneIn[0].u_width ; y++)
3869        {
3870            /**
3871             * To handle framing with input size != output size
3872             * Framing is applyed if coordinates matches between framing/topleft and input plane */
3873            if( y < (topleft[0] + currentFraming->FramingYuv[0].u_width)  &&
3874                y >= topleft[0] &&
3875                x < (topleft[1] + currentFraming->FramingYuv[0].u_height) &&
3876                x >= topleft[1])
3877            {
3878                /*Alpha blending support*/
3879                M4OSA_Float alphaBlending = 1;
3880                M4xVSS_internalEffectsAlphaBlending*  alphaBlendingStruct =\
3881                 (M4xVSS_internalEffectsAlphaBlending*)\
3882                    ((M4xVSS_FramingContext*)userData)->alphaBlendingStruct;
3883
3884                if(alphaBlendingStruct != M4OSA_NULL)
3885                {
3886                    if(pProgress->uiProgress \
3887                    < (M4OSA_UInt32)(alphaBlendingStruct->m_fadeInTime*10))
3888                    {
3889                        if(alphaBlendingStruct->m_fadeInTime == 0) {
3890                            alphaBlending = alphaBlendingStruct->m_start / 100;
3891                        } else {
3892                            alphaBlending = ((M4OSA_Float)(alphaBlendingStruct->m_middle\
3893                             - alphaBlendingStruct->m_start)\
3894                                *pProgress->uiProgress/(alphaBlendingStruct->m_fadeInTime*10));
3895                            alphaBlending += alphaBlendingStruct->m_start;
3896                            alphaBlending /= 100;
3897                        }
3898                    }
3899                    else if(pProgress->uiProgress >= (M4OSA_UInt32)(alphaBlendingStruct->\
3900                    m_fadeInTime*10) && pProgress->uiProgress < 1000\
3901                     - (M4OSA_UInt32)(alphaBlendingStruct->m_fadeOutTime*10))
3902                    {
3903                        alphaBlending = (M4OSA_Float)\
3904                        ((M4OSA_Float)alphaBlendingStruct->m_middle/100);
3905                    }
3906                    else if(pProgress->uiProgress >= 1000 - (M4OSA_UInt32)\
3907                    (alphaBlendingStruct->m_fadeOutTime*10))
3908                    {
3909                        if(alphaBlendingStruct->m_fadeOutTime == 0) {
3910                            alphaBlending = alphaBlendingStruct->m_end / 100;
3911                        } else {
3912                            alphaBlending = ((M4OSA_Float)(alphaBlendingStruct->m_middle \
3913                            - alphaBlendingStruct->m_end))*(1000 - pProgress->uiProgress)\
3914                            /(alphaBlendingStruct->m_fadeOutTime*10);
3915                            alphaBlending += alphaBlendingStruct->m_end;
3916                            alphaBlending /= 100;
3917                        }
3918                    }
3919                }
3920                /**/
3921
3922                if((*(FramingRGB)==transparent1) && (*(FramingRGB+1)==transparent2))
3923                {
3924                    *( p_out0+y+x*PlaneOut[0].u_stride)=(*(p_in_Y+y+x*PlaneIn[0].u_stride));
3925                    *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)=
3926                        (*(p_in_U+(y>>1)+(x>>1)*PlaneIn[1].u_stride));
3927                    *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)=
3928                        (*(p_in_V+(y>>1)+(x>>1)*PlaneIn[2].u_stride));
3929                }
3930                else
3931                {
3932                    *( p_out0+y+x*PlaneOut[0].u_stride)=
3933                        (*(currentFraming->FramingYuv[0].pac_data+(y-topleft[0])\
3934                            +(x-topleft[1])*currentFraming->FramingYuv[0].u_stride))*alphaBlending;
3935                    *( p_out0+y+x*PlaneOut[0].u_stride)+=
3936                        (*(p_in_Y+y+x*PlaneIn[0].u_stride))*(1-alphaBlending);
3937                    *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)=
3938                        (*(currentFraming->FramingYuv[1].pac_data+((y-topleft[0])>>1)\
3939                            +((x-topleft[1])>>1)*currentFraming->FramingYuv[1].u_stride))\
3940                                *alphaBlending;
3941                    *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)+=
3942                        (*(p_in_U+(y>>1)+(x>>1)*PlaneIn[1].u_stride))*(1-alphaBlending);
3943                    *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)=
3944                        (*(currentFraming->FramingYuv[2].pac_data+((y-topleft[0])>>1)\
3945                            +((x-topleft[1])>>1)*currentFraming->FramingYuv[2].u_stride))\
3946                                *alphaBlending;
3947                    *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)+=
3948                        (*(p_in_V+(y>>1)+(x>>1)*PlaneIn[2].u_stride))*(1-alphaBlending);
3949                }
3950                if( PlaneIn[0].u_width < (topleft[0] + currentFraming->FramingYuv[0].u_width) &&
3951                    y == PlaneIn[0].u_width-1)
3952                {
3953                    FramingRGB = FramingRGB + 2 \
3954                        * (topleft[0] + currentFraming->FramingYuv[0].u_width \
3955                            - PlaneIn[0].u_width + 1);
3956                }
3957                else
3958                {
3959                    FramingRGB = FramingRGB + 2;
3960                }
3961            }
3962            /**
3963             * Just copy input plane to output plane */
3964            else
3965            {
3966                *( p_out0+y+x*PlaneOut[0].u_stride)=*(p_in_Y+y+x*PlaneIn[0].u_stride);
3967                *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)=
3968                    *(p_in_U+(y>>1)+(x>>1)*PlaneIn[1].u_stride);
3969                *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)=
3970                    *(p_in_V+(y>>1)+(x>>1)*PlaneIn[2].u_stride);
3971            }
3972        }
3973    }
3974
3975
3976    return M4VIFI_OK;
3977}
3978
3979
3980/**
3981 ******************************************************************************
3982 * prototype    M4VSS3GPP_externalVideoEffectFifties(M4OSA_Void *pFunctionContext,
3983 *                                                    M4VIFI_ImagePlane *PlaneIn,
3984 *                                                    M4VIFI_ImagePlane *PlaneOut,
3985 *                                                    M4VSS3GPP_ExternalProgress *pProgress,
3986 *                                                    M4OSA_UInt32 uiEffectKind)
3987 *
3988 * @brief    This function make a video look as if it was taken in the fifties
3989 * @note
3990 * @param    pUserData       (IN) Context
3991 * @param    pPlaneIn        (IN) Input YUV420 planar
3992 * @param    pPlaneOut        (IN/OUT) Output YUV420 planar
3993 * @param    pProgress        (IN/OUT) Progress indication (0-100)
3994 * @param    uiEffectKind    (IN) Unused
3995 *
3996 * @return    M4VIFI_OK:            No error
3997 * @return  M4ERR_PARAMETER:    pFiftiesData, pPlaneOut or pProgress are NULL (DEBUG only)
3998 ******************************************************************************
3999 */
4000M4OSA_ERR M4VSS3GPP_externalVideoEffectFifties( M4OSA_Void *pUserData,
4001                                                M4VIFI_ImagePlane *pPlaneIn,
4002                                                M4VIFI_ImagePlane *pPlaneOut,
4003                                                M4VSS3GPP_ExternalProgress *pProgress,
4004                                                M4OSA_UInt32 uiEffectKind )
4005{
4006    M4VIFI_UInt32 x, y, xShift;
4007    M4VIFI_UInt8 *pInY = pPlaneIn[0].pac_data;
4008    M4VIFI_UInt8 *pOutY, *pInYbegin;
4009    M4VIFI_UInt8 *pInCr,* pOutCr;
4010    M4VIFI_Int32 plane_number;
4011
4012    /* Internal context*/
4013    M4xVSS_FiftiesStruct* p_FiftiesData = (M4xVSS_FiftiesStruct *)pUserData;
4014
4015    /* Check the inputs (debug only) */
4016    M4OSA_DEBUG_IF2((pFiftiesData == M4OSA_NULL),M4ERR_PARAMETER,
4017         "xVSS: p_FiftiesData is M4OSA_NULL in M4VSS3GPP_externalVideoEffectFifties");
4018    M4OSA_DEBUG_IF2((pPlaneOut == M4OSA_NULL),M4ERR_PARAMETER,
4019         "xVSS: p_PlaneOut is M4OSA_NULL in M4VSS3GPP_externalVideoEffectFifties");
4020    M4OSA_DEBUG_IF2((pProgress == M4OSA_NULL),M4ERR_PARAMETER,
4021        "xVSS: p_Progress is M4OSA_NULL in M4VSS3GPP_externalVideoEffectFifties");
4022
4023    /* Initialize input / output plane pointers */
4024    pInY += pPlaneIn[0].u_topleft;
4025    pOutY = pPlaneOut[0].pac_data;
4026    pInYbegin  = pInY;
4027
4028    /* Initialize the random */
4029    if(p_FiftiesData->previousClipTime < 0)
4030    {
4031        M4OSA_randInit();
4032        M4OSA_rand((M4OSA_Int32 *)&(p_FiftiesData->shiftRandomValue), (pPlaneIn[0].u_height) >> 4);
4033        M4OSA_rand((M4OSA_Int32 *)&(p_FiftiesData->stripeRandomValue), (pPlaneIn[0].u_width)<< 2);
4034        p_FiftiesData->previousClipTime = pProgress->uiOutputTime;
4035    }
4036
4037    /* Choose random values if we have reached the duration of a partial effect */
4038    else if( (pProgress->uiOutputTime - p_FiftiesData->previousClipTime)\
4039         > p_FiftiesData->fiftiesEffectDuration)
4040    {
4041        M4OSA_rand((M4OSA_Int32 *)&(p_FiftiesData->shiftRandomValue), (pPlaneIn[0].u_height) >> 4);
4042        M4OSA_rand((M4OSA_Int32 *)&(p_FiftiesData->stripeRandomValue), (pPlaneIn[0].u_width)<< 2);
4043        p_FiftiesData->previousClipTime = pProgress->uiOutputTime;
4044    }
4045
4046    /* Put in Sepia the chrominance */
4047    for (plane_number = 1; plane_number < 3; plane_number++)
4048    {
4049        pInCr  = pPlaneIn[plane_number].pac_data  + pPlaneIn[plane_number].u_topleft;
4050        pOutCr = pPlaneOut[plane_number].pac_data + pPlaneOut[plane_number].u_topleft;
4051
4052        for (x = 0; x < pPlaneOut[plane_number].u_height; x++)
4053        {
4054            if (1 == plane_number)
4055                memset((void *)pOutCr, 117,pPlaneIn[plane_number].u_width); /* U value */
4056            else
4057                memset((void *)pOutCr, 139,pPlaneIn[plane_number].u_width); /* V value */
4058
4059            pInCr  += pPlaneIn[plane_number].u_stride;
4060            pOutCr += pPlaneOut[plane_number].u_stride;
4061        }
4062    }
4063
4064    /* Compute the new pixels values */
4065    for( x = 0 ; x < pPlaneIn[0].u_height ; x++)
4066    {
4067        M4VIFI_UInt8 *p_outYtmp, *p_inYtmp;
4068
4069        /* Compute the xShift (random value) */
4070        if (0 == (p_FiftiesData->shiftRandomValue % 5 ))
4071            xShift = (x + p_FiftiesData->shiftRandomValue ) % (pPlaneIn[0].u_height - 1);
4072        else
4073            xShift = (x + (pPlaneIn[0].u_height - p_FiftiesData->shiftRandomValue) ) \
4074                % (pPlaneIn[0].u_height - 1);
4075
4076        /* Initialize the pointers */
4077        p_outYtmp = pOutY + 1;                                    /* yShift of 1 pixel */
4078        p_inYtmp  = pInYbegin + (xShift * pPlaneIn[0].u_stride);  /* Apply the xShift */
4079
4080        for( y = 0 ; y < pPlaneIn[0].u_width ; y++)
4081        {
4082            /* Set Y value */
4083            if (xShift > (pPlaneIn[0].u_height - 4))
4084                *p_outYtmp = 40;        /* Add some horizontal black lines between the
4085                                        two parts of the image */
4086            else if ( y == p_FiftiesData->stripeRandomValue)
4087                *p_outYtmp = 90;        /* Add a random vertical line for the bulk */
4088            else
4089                *p_outYtmp = *p_inYtmp;
4090
4091
4092            /* Go to the next pixel */
4093            p_outYtmp++;
4094            p_inYtmp++;
4095
4096            /* Restart at the beginning of the line for the last pixel*/
4097            if (y == (pPlaneIn[0].u_width - 2))
4098                p_outYtmp = pOutY;
4099        }
4100
4101        /* Go to the next line */
4102        pOutY += pPlaneOut[0].u_stride;
4103    }
4104
4105    return M4VIFI_OK;
4106}
4107
4108/**
4109 ******************************************************************************
4110 * M4OSA_ERR M4VSS3GPP_externalVideoEffectZoom( )
4111 * @brief    Zoom in/out video effect functions.
4112 * @note    The external video function is used only if VideoEffectType is set to
4113 * M4VSS3GPP_kVideoEffectType_ZoomIn or M4VSS3GPP_kVideoEffectType_ZoomOut.
4114 *
4115 * @param   pFunctionContext    (IN) The function context, previously set by the integrator
4116 * @param    pInputPlanes        (IN) Input YUV420 image: pointer to an array of three valid
4117 *                                    image planes (Y, U and V)
4118 * @param    pOutputPlanes        (IN/OUT) Output (filtered) YUV420 image: pointer to an array of
4119 *                                        three valid image planes (Y, U and V)
4120 * @param    pProgress            (IN) Set of information about the video transition progress.
4121 * @return    M4NO_ERROR:            No error
4122 * @return    M4ERR_PARAMETER:    At least one parameter is M4OSA_NULL (debug only)
4123 ******************************************************************************
4124 */
4125
4126M4OSA_ERR M4VSS3GPP_externalVideoEffectZoom(
4127    M4OSA_Void *pFunctionContext,
4128    M4VIFI_ImagePlane *pInputPlanes,
4129    M4VIFI_ImagePlane *pOutputPlanes,
4130    M4VSS3GPP_ExternalProgress *pProgress,
4131    M4OSA_UInt32 uiEffectKind
4132)
4133{
4134    M4OSA_UInt32 boxWidth;
4135    M4OSA_UInt32 boxHeight;
4136    M4OSA_UInt32 boxPosX;
4137    M4OSA_UInt32 boxPosY;
4138    M4OSA_UInt32 ratio = 0;
4139    /*  * 1.189207 between ratio */
4140    /* zoom between x1 and x16 */
4141    M4OSA_UInt32 ratiotab[17] ={1024,1218,1448,1722,2048,2435,2896,3444,4096,4871,5793,\
4142                                6889,8192,9742,11585,13777,16384};
4143    M4OSA_UInt32 ik;
4144
4145    M4VIFI_ImagePlane boxPlane[3];
4146
4147    if(M4xVSS_kVideoEffectType_ZoomOut == (M4OSA_UInt32)pFunctionContext)
4148    {
4149        //ratio = 16 - (15 * pProgress->uiProgress)/1000;
4150        ratio = 16 - pProgress->uiProgress / 66 ;
4151    }
4152    else if(M4xVSS_kVideoEffectType_ZoomIn == (M4OSA_UInt32)pFunctionContext)
4153    {
4154        //ratio = 1 + (15 * pProgress->uiProgress)/1000;
4155        ratio = 1 + pProgress->uiProgress / 66 ;
4156    }
4157
4158    for(ik=0;ik<3;ik++){
4159
4160        boxPlane[ik].u_stride = pInputPlanes[ik].u_stride;
4161        boxPlane[ik].pac_data = pInputPlanes[ik].pac_data;
4162
4163        boxHeight = ( pInputPlanes[ik].u_height << 10 ) / ratiotab[ratio];
4164        boxWidth = ( pInputPlanes[ik].u_width << 10 ) / ratiotab[ratio];
4165        boxPlane[ik].u_height = (boxHeight)&(~1);
4166        boxPlane[ik].u_width = (boxWidth)&(~1);
4167
4168        boxPosY = (pInputPlanes[ik].u_height >> 1) - (boxPlane[ik].u_height >> 1);
4169        boxPosX = (pInputPlanes[ik].u_width >> 1) - (boxPlane[ik].u_width >> 1);
4170        boxPlane[ik].u_topleft = boxPosY * boxPlane[ik].u_stride + boxPosX;
4171    }
4172
4173    M4VIFI_ResizeBilinearYUV420toYUV420(M4OSA_NULL, (M4VIFI_ImagePlane*)&boxPlane, pOutputPlanes);
4174
4175    /**
4176     * Return */
4177    return(M4NO_ERROR);
4178}
4179
4180/**
4181 ******************************************************************************
4182 * prototype    M4xVSS_AlphaMagic( M4OSA_Void *userData,
4183 *                                    M4VIFI_ImagePlane PlaneIn1[3],
4184 *                                    M4VIFI_ImagePlane PlaneIn2[3],
4185 *                                    M4VIFI_ImagePlane *PlaneOut,
4186 *                                    M4VSS3GPP_ExternalProgress *pProgress,
4187 *                                    M4OSA_UInt32 uiTransitionKind)
4188 *
4189 * @brief    This function apply a color effect on an input YUV420 planar frame
4190 * @note
4191 * @param    userData        (IN) Contains a pointer on a settings structure
4192 * @param    PlaneIn1        (IN) Input YUV420 planar from video 1
4193 * @param    PlaneIn2        (IN) Input YUV420 planar from video 2
4194 * @param    PlaneOut        (IN/OUT) Output YUV420 planar
4195 * @param    pProgress        (IN/OUT) Progress indication (0-100)
4196 * @param    uiTransitionKind(IN) Unused
4197 *
4198 * @return    M4VIFI_OK:    No error
4199 ******************************************************************************
4200 */
4201M4OSA_ERR M4xVSS_AlphaMagic( M4OSA_Void *userData, M4VIFI_ImagePlane PlaneIn1[3],
4202                             M4VIFI_ImagePlane PlaneIn2[3], M4VIFI_ImagePlane *PlaneOut,
4203                             M4VSS3GPP_ExternalProgress *pProgress, M4OSA_UInt32 uiTransitionKind)
4204{
4205
4206    M4OSA_ERR err;
4207
4208    M4xVSS_internal_AlphaMagicSettings* alphaContext;
4209    M4VIFI_Int32 alphaProgressLevel;
4210
4211    M4VIFI_ImagePlane* planeswap;
4212    M4VIFI_UInt32 x,y;
4213
4214    M4VIFI_UInt8 *p_out0;
4215    M4VIFI_UInt8 *p_out1;
4216    M4VIFI_UInt8 *p_out2;
4217    M4VIFI_UInt8 *alphaMask;
4218    /* "Old image" */
4219    M4VIFI_UInt8 *p_in1_Y;
4220    M4VIFI_UInt8 *p_in1_U;
4221    M4VIFI_UInt8 *p_in1_V;
4222    /* "New image" */
4223    M4VIFI_UInt8 *p_in2_Y;
4224    M4VIFI_UInt8 *p_in2_U;
4225    M4VIFI_UInt8 *p_in2_V;
4226
4227    err = M4NO_ERROR;
4228
4229    alphaContext = (M4xVSS_internal_AlphaMagicSettings*)userData;
4230
4231    alphaProgressLevel = (pProgress->uiProgress * 255)/1000;
4232
4233    if( alphaContext->isreverse != M4OSA_FALSE)
4234    {
4235        alphaProgressLevel = 255 - alphaProgressLevel;
4236        planeswap = PlaneIn1;
4237        PlaneIn1 = PlaneIn2;
4238        PlaneIn2 = planeswap;
4239    }
4240
4241    p_out0 = PlaneOut[0].pac_data;
4242    p_out1 = PlaneOut[1].pac_data;
4243    p_out2 = PlaneOut[2].pac_data;
4244
4245    alphaMask = alphaContext->pPlane->pac_data;
4246
4247    /* "Old image" */
4248    p_in1_Y = PlaneIn1[0].pac_data;
4249    p_in1_U = PlaneIn1[1].pac_data;
4250    p_in1_V = PlaneIn1[2].pac_data;
4251    /* "New image" */
4252    p_in2_Y = PlaneIn2[0].pac_data;
4253    p_in2_U = PlaneIn2[1].pac_data;
4254    p_in2_V = PlaneIn2[2].pac_data;
4255
4256     /**
4257     * For each column ... */
4258    for( y=0; y<PlaneOut->u_height; y++ )
4259    {
4260        /**
4261         * ... and each row of the alpha mask */
4262        for( x=0; x<PlaneOut->u_width; x++ )
4263        {
4264            /**
4265             * If the value of the current pixel of the alpha mask is > to the current time
4266             * ( current time is normalized on [0-255] ) */
4267            if( alphaProgressLevel < alphaMask[x+y*PlaneOut->u_width] )
4268            {
4269                /* We keep "old image" in output plane */
4270                *( p_out0+x+y*PlaneOut[0].u_stride)=*(p_in1_Y+x+y*PlaneIn1[0].u_stride);
4271                *( p_out1+(x>>1)+(y>>1)*PlaneOut[1].u_stride)=
4272                    *(p_in1_U+(x>>1)+(y>>1)*PlaneIn1[1].u_stride);
4273                *( p_out2+(x>>1)+(y>>1)*PlaneOut[2].u_stride)=
4274                    *(p_in1_V+(x>>1)+(y>>1)*PlaneIn1[2].u_stride);
4275            }
4276            else
4277            {
4278                /* We take "new image" in output plane */
4279                *( p_out0+x+y*PlaneOut[0].u_stride)=*(p_in2_Y+x+y*PlaneIn2[0].u_stride);
4280                *( p_out1+(x>>1)+(y>>1)*PlaneOut[1].u_stride)=
4281                    *(p_in2_U+(x>>1)+(y>>1)*PlaneIn2[1].u_stride);
4282                *( p_out2+(x>>1)+(y>>1)*PlaneOut[2].u_stride)=
4283                    *(p_in2_V+(x>>1)+(y>>1)*PlaneIn2[2].u_stride);
4284            }
4285        }
4286    }
4287
4288    return(err);
4289}
4290
4291/**
4292 ******************************************************************************
4293 * prototype    M4xVSS_AlphaMagicBlending( M4OSA_Void *userData,
4294 *                                    M4VIFI_ImagePlane PlaneIn1[3],
4295 *                                    M4VIFI_ImagePlane PlaneIn2[3],
4296 *                                    M4VIFI_ImagePlane *PlaneOut,
4297 *                                    M4VSS3GPP_ExternalProgress *pProgress,
4298 *                                    M4OSA_UInt32 uiTransitionKind)
4299 *
4300 * @brief    This function apply a color effect on an input YUV420 planar frame
4301 * @note
4302 * @param    userData        (IN) Contains a pointer on a settings structure
4303 * @param    PlaneIn1        (IN) Input YUV420 planar from video 1
4304 * @param    PlaneIn2        (IN) Input YUV420 planar from video 2
4305 * @param    PlaneOut        (IN/OUT) Output YUV420 planar
4306 * @param    pProgress        (IN/OUT) Progress indication (0-100)
4307 * @param    uiTransitionKind(IN) Unused
4308 *
4309 * @return    M4VIFI_OK:    No error
4310 ******************************************************************************
4311 */
4312M4OSA_ERR M4xVSS_AlphaMagicBlending( M4OSA_Void *userData, M4VIFI_ImagePlane PlaneIn1[3],
4313                                     M4VIFI_ImagePlane PlaneIn2[3], M4VIFI_ImagePlane *PlaneOut,
4314                                     M4VSS3GPP_ExternalProgress *pProgress,
4315                                     M4OSA_UInt32 uiTransitionKind)
4316{
4317    M4OSA_ERR err;
4318
4319    M4xVSS_internal_AlphaMagicSettings* alphaContext;
4320    M4VIFI_Int32 alphaProgressLevel;
4321    M4VIFI_Int32 alphaBlendLevelMin;
4322    M4VIFI_Int32 alphaBlendLevelMax;
4323    M4VIFI_Int32 alphaBlendRange;
4324
4325    M4VIFI_ImagePlane* planeswap;
4326    M4VIFI_UInt32 x,y;
4327    M4VIFI_Int32 alphaMaskValue;
4328
4329    M4VIFI_UInt8 *p_out0;
4330    M4VIFI_UInt8 *p_out1;
4331    M4VIFI_UInt8 *p_out2;
4332    M4VIFI_UInt8 *alphaMask;
4333    /* "Old image" */
4334    M4VIFI_UInt8 *p_in1_Y;
4335    M4VIFI_UInt8 *p_in1_U;
4336    M4VIFI_UInt8 *p_in1_V;
4337    /* "New image" */
4338    M4VIFI_UInt8 *p_in2_Y;
4339    M4VIFI_UInt8 *p_in2_U;
4340    M4VIFI_UInt8 *p_in2_V;
4341
4342
4343    err = M4NO_ERROR;
4344
4345    alphaContext = (M4xVSS_internal_AlphaMagicSettings*)userData;
4346
4347    alphaProgressLevel = (pProgress->uiProgress * 255)/1000;
4348
4349    if( alphaContext->isreverse != M4OSA_FALSE)
4350    {
4351        alphaProgressLevel = 255 - alphaProgressLevel;
4352        planeswap = PlaneIn1;
4353        PlaneIn1 = PlaneIn2;
4354        PlaneIn2 = planeswap;
4355    }
4356
4357    alphaBlendLevelMin = alphaProgressLevel-alphaContext->blendingthreshold;
4358
4359    alphaBlendLevelMax = alphaProgressLevel+alphaContext->blendingthreshold;
4360
4361    alphaBlendRange = (alphaContext->blendingthreshold)*2;
4362
4363    p_out0 = PlaneOut[0].pac_data;
4364    p_out1 = PlaneOut[1].pac_data;
4365    p_out2 = PlaneOut[2].pac_data;
4366
4367    alphaMask = alphaContext->pPlane->pac_data;
4368
4369    /* "Old image" */
4370    p_in1_Y = PlaneIn1[0].pac_data;
4371    p_in1_U = PlaneIn1[1].pac_data;
4372    p_in1_V = PlaneIn1[2].pac_data;
4373    /* "New image" */
4374    p_in2_Y = PlaneIn2[0].pac_data;
4375    p_in2_U = PlaneIn2[1].pac_data;
4376    p_in2_V = PlaneIn2[2].pac_data;
4377
4378    /* apply Alpha Magic on each pixel */
4379       for( y=0; y<PlaneOut->u_height; y++ )
4380    {
4381        for( x=0; x<PlaneOut->u_width; x++ )
4382        {
4383            alphaMaskValue = alphaMask[x+y*PlaneOut->u_width];
4384            if( alphaBlendLevelMax < alphaMaskValue )
4385            {
4386                /* We keep "old image" in output plane */
4387                *( p_out0+x+y*PlaneOut[0].u_stride)=*(p_in1_Y+x+y*PlaneIn1[0].u_stride);
4388                *( p_out1+(x>>1)+(y>>1)*PlaneOut[1].u_stride)=
4389                    *(p_in1_U+(x>>1)+(y>>1)*PlaneIn1[1].u_stride);
4390                *( p_out2+(x>>1)+(y>>1)*PlaneOut[2].u_stride)=
4391                    *(p_in1_V+(x>>1)+(y>>1)*PlaneIn1[2].u_stride);
4392            }
4393            else if( (alphaBlendLevelMin < alphaMaskValue)&&
4394                    (alphaMaskValue <= alphaBlendLevelMax ) )
4395            {
4396                /* We blend "old and new image" in output plane */
4397                *( p_out0+x+y*PlaneOut[0].u_stride)=(M4VIFI_UInt8)
4398                    (( (alphaMaskValue-alphaBlendLevelMin)*( *(p_in1_Y+x+y*PlaneIn1[0].u_stride))
4399                        +(alphaBlendLevelMax-alphaMaskValue)\
4400                            *( *(p_in2_Y+x+y*PlaneIn2[0].u_stride)) )/alphaBlendRange );
4401
4402                *( p_out1+(x>>1)+(y>>1)*PlaneOut[1].u_stride)=(M4VIFI_UInt8)\
4403                    (( (alphaMaskValue-alphaBlendLevelMin)*( *(p_in1_U+(x>>1)+(y>>1)\
4404                        *PlaneIn1[1].u_stride))
4405                            +(alphaBlendLevelMax-alphaMaskValue)*( *(p_in2_U+(x>>1)+(y>>1)\
4406                                *PlaneIn2[1].u_stride)) )/alphaBlendRange );
4407
4408                *( p_out2+(x>>1)+(y>>1)*PlaneOut[2].u_stride)=
4409                    (M4VIFI_UInt8)(( (alphaMaskValue-alphaBlendLevelMin)\
4410                        *( *(p_in1_V+(x>>1)+(y>>1)*PlaneIn1[2].u_stride))
4411                                +(alphaBlendLevelMax-alphaMaskValue)*( *(p_in2_V+(x>>1)+(y>>1)\
4412                                    *PlaneIn2[2].u_stride)) )/alphaBlendRange );
4413
4414            }
4415            else
4416            {
4417                /* We take "new image" in output plane */
4418                *( p_out0+x+y*PlaneOut[0].u_stride)=*(p_in2_Y+x+y*PlaneIn2[0].u_stride);
4419                *( p_out1+(x>>1)+(y>>1)*PlaneOut[1].u_stride)=
4420                    *(p_in2_U+(x>>1)+(y>>1)*PlaneIn2[1].u_stride);
4421                *( p_out2+(x>>1)+(y>>1)*PlaneOut[2].u_stride)=
4422                    *(p_in2_V+(x>>1)+(y>>1)*PlaneIn2[2].u_stride);
4423            }
4424        }
4425    }
4426
4427    return(err);
4428}
4429
4430#define M4XXX_SampleAddress(plane, x, y)  ( (plane).pac_data + (plane).u_topleft + (y)\
4431     * (plane).u_stride + (x) )
4432
4433static void M4XXX_CopyPlane(M4VIFI_ImagePlane* dest, M4VIFI_ImagePlane* source)
4434{
4435    M4OSA_UInt32    height, width, sourceStride, destStride, y;
4436    M4OSA_MemAddr8    sourceWalk, destWalk;
4437
4438    /* cache the vars used in the loop so as to avoid them being repeatedly fetched and
4439     recomputed from memory. */
4440    height = dest->u_height;
4441    width = dest->u_width;
4442
4443    sourceWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(*source, 0, 0);
4444    sourceStride = source->u_stride;
4445
4446    destWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(*dest, 0, 0);
4447    destStride = dest->u_stride;
4448
4449    for (y=0; y<height; y++)
4450    {
4451        memcpy((void *)destWalk, (void *)sourceWalk, width);
4452        destWalk += destStride;
4453        sourceWalk += sourceStride;
4454    }
4455}
4456
4457static M4OSA_ERR M4xVSS_VerticalSlideTransition(M4VIFI_ImagePlane* topPlane,
4458                                                M4VIFI_ImagePlane* bottomPlane,
4459                                                M4VIFI_ImagePlane *PlaneOut,
4460                                                M4OSA_UInt32    shiftUV)
4461{
4462    M4OSA_UInt32 i;
4463
4464    /* Do three loops, one for each plane type, in order to avoid having too many buffers
4465    "hot" at the same time (better for cache). */
4466    for (i=0; i<3; i++)
4467    {
4468        M4OSA_UInt32    topPartHeight, bottomPartHeight, width, sourceStride, destStride, y;
4469        M4OSA_MemAddr8    sourceWalk, destWalk;
4470
4471        /* cache the vars used in the loop so as to avoid them being repeatedly fetched and
4472         recomputed from memory. */
4473        if (0 == i) /* Y plane */
4474        {
4475            bottomPartHeight = 2*shiftUV;
4476        }
4477        else /* U and V planes */
4478        {
4479            bottomPartHeight = shiftUV;
4480        }
4481        topPartHeight = PlaneOut[i].u_height - bottomPartHeight;
4482        width = PlaneOut[i].u_width;
4483
4484        sourceWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(topPlane[i], 0, bottomPartHeight);
4485        sourceStride = topPlane[i].u_stride;
4486
4487        destWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(PlaneOut[i], 0, 0);
4488        destStride = PlaneOut[i].u_stride;
4489
4490        /* First the part from the top source clip frame. */
4491        for (y=0; y<topPartHeight; y++)
4492        {
4493            memcpy((void *)destWalk, (void *)sourceWalk, width);
4494            destWalk += destStride;
4495            sourceWalk += sourceStride;
4496        }
4497
4498        /* and now change the vars to copy the part from the bottom source clip frame. */
4499        sourceWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(bottomPlane[i], 0, 0);
4500        sourceStride = bottomPlane[i].u_stride;
4501
4502        /* destWalk is already at M4XXX_SampleAddress(PlaneOut[i], 0, topPartHeight) */
4503
4504        for (y=0; y<bottomPartHeight; y++)
4505        {
4506            memcpy((void *)destWalk, (void *)sourceWalk, width);
4507            destWalk += destStride;
4508            sourceWalk += sourceStride;
4509        }
4510    }
4511    return M4NO_ERROR;
4512}
4513
4514static M4OSA_ERR M4xVSS_HorizontalSlideTransition(M4VIFI_ImagePlane* leftPlane,
4515                                                  M4VIFI_ImagePlane* rightPlane,
4516                                                  M4VIFI_ImagePlane *PlaneOut,
4517                                                  M4OSA_UInt32    shiftUV)
4518{
4519    M4OSA_UInt32 i, y;
4520    /* If we shifted by exactly 0, or by the width of the target image, then we would get the left
4521    frame or the right frame, respectively. These cases aren't handled too well by the general
4522    handling, since they result in 0-size memcopies, so might as well particularize them. */
4523
4524    if (0 == shiftUV)    /* output left frame */
4525    {
4526        for (i = 0; i<3; i++) /* for each YUV plane */
4527        {
4528            M4XXX_CopyPlane(&(PlaneOut[i]), &(leftPlane[i]));
4529        }
4530
4531        return M4NO_ERROR;
4532    }
4533
4534    if (PlaneOut[1].u_width == shiftUV) /* output right frame */
4535    {
4536        for (i = 0; i<3; i++) /* for each YUV plane */
4537        {
4538            M4XXX_CopyPlane(&(PlaneOut[i]), &(rightPlane[i]));
4539        }
4540
4541        return M4NO_ERROR;
4542    }
4543
4544
4545    /* Do three loops, one for each plane type, in order to avoid having too many buffers
4546    "hot" at the same time (better for cache). */
4547    for (i=0; i<3; i++)
4548    {
4549        M4OSA_UInt32    height, leftPartWidth, rightPartWidth;
4550        M4OSA_UInt32    leftStride,    rightStride,    destStride;
4551        M4OSA_MemAddr8    leftWalk,    rightWalk,    destWalkLeft, destWalkRight;
4552
4553        /* cache the vars used in the loop so as to avoid them being repeatedly fetched
4554        and recomputed from memory. */
4555        height = PlaneOut[i].u_height;
4556
4557        if (0 == i) /* Y plane */
4558        {
4559            rightPartWidth = 2*shiftUV;
4560        }
4561        else /* U and V planes */
4562        {
4563            rightPartWidth = shiftUV;
4564        }
4565        leftPartWidth = PlaneOut[i].u_width - rightPartWidth;
4566
4567        leftWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(leftPlane[i], rightPartWidth, 0);
4568        leftStride = leftPlane[i].u_stride;
4569
4570        rightWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(rightPlane[i], 0, 0);
4571        rightStride = rightPlane[i].u_stride;
4572
4573        destWalkLeft = (M4OSA_MemAddr8)M4XXX_SampleAddress(PlaneOut[i], 0, 0);
4574        destWalkRight = (M4OSA_MemAddr8)M4XXX_SampleAddress(PlaneOut[i], leftPartWidth, 0);
4575        destStride = PlaneOut[i].u_stride;
4576
4577        for (y=0; y<height; y++)
4578        {
4579            memcpy((void *)destWalkLeft, (void *)leftWalk, leftPartWidth);
4580            leftWalk += leftStride;
4581
4582            memcpy((void *)destWalkRight, (void *)rightWalk, rightPartWidth);
4583            rightWalk += rightStride;
4584
4585            destWalkLeft += destStride;
4586            destWalkRight += destStride;
4587        }
4588    }
4589
4590    return M4NO_ERROR;
4591}
4592
4593
4594M4OSA_ERR M4xVSS_SlideTransition( M4OSA_Void *userData, M4VIFI_ImagePlane PlaneIn1[3],
4595                                  M4VIFI_ImagePlane PlaneIn2[3], M4VIFI_ImagePlane *PlaneOut,
4596                                  M4VSS3GPP_ExternalProgress *pProgress,
4597                                  M4OSA_UInt32 uiTransitionKind)
4598{
4599    M4xVSS_internal_SlideTransitionSettings* settings =
4600         (M4xVSS_internal_SlideTransitionSettings*)userData;
4601    M4OSA_UInt32    shiftUV;
4602
4603    M4OSA_TRACE1_0("inside M4xVSS_SlideTransition");
4604    if ((M4xVSS_SlideTransition_RightOutLeftIn == settings->direction)
4605        || (M4xVSS_SlideTransition_LeftOutRightIn == settings->direction) )
4606    {
4607        /* horizontal slide */
4608        shiftUV = ((PlaneOut[1]).u_width * pProgress->uiProgress)/1000;
4609        M4OSA_TRACE1_2("M4xVSS_SlideTransition upper: shiftUV = %d,progress = %d",
4610            shiftUV,pProgress->uiProgress );
4611        if (M4xVSS_SlideTransition_RightOutLeftIn == settings->direction)
4612        {
4613            /* Put the previous clip frame right, the next clip frame left, and reverse shiftUV
4614            (since it's a shift from the left frame) so that we start out on the right
4615            (i.e. not left) frame, it
4616            being from the previous clip. */
4617            return M4xVSS_HorizontalSlideTransition(PlaneIn2, PlaneIn1, PlaneOut,
4618                 (PlaneOut[1]).u_width - shiftUV);
4619        }
4620        else /* Left out, right in*/
4621        {
4622            return M4xVSS_HorizontalSlideTransition(PlaneIn1, PlaneIn2, PlaneOut, shiftUV);
4623        }
4624    }
4625    else
4626    {
4627        /* vertical slide */
4628        shiftUV = ((PlaneOut[1]).u_height * pProgress->uiProgress)/1000;
4629        M4OSA_TRACE1_2("M4xVSS_SlideTransition bottom: shiftUV = %d,progress = %d",shiftUV,
4630            pProgress->uiProgress );
4631        if (M4xVSS_SlideTransition_TopOutBottomIn == settings->direction)
4632        {
4633            /* Put the previous clip frame top, the next clip frame bottom. */
4634            return M4xVSS_VerticalSlideTransition(PlaneIn1, PlaneIn2, PlaneOut, shiftUV);
4635        }
4636        else /* Bottom out, top in */
4637        {
4638            return M4xVSS_VerticalSlideTransition(PlaneIn2, PlaneIn1, PlaneOut,
4639                (PlaneOut[1]).u_height - shiftUV);
4640        }
4641    }
4642
4643    /* Note: it might be worthwhile to do some parameter checking, see if dimensions match, etc.,
4644    at least in debug mode. */
4645}
4646
4647
4648/**
4649 ******************************************************************************
4650 * prototype    M4xVSS_FadeBlackTransition(M4OSA_Void *pFunctionContext,
4651 *                                                    M4VIFI_ImagePlane *PlaneIn,
4652 *                                                    M4VIFI_ImagePlane *PlaneOut,
4653 *                                                    M4VSS3GPP_ExternalProgress *pProgress,
4654 *                                                    M4OSA_UInt32 uiEffectKind)
4655 *
4656 * @brief    This function apply a fade to black and then a fade from black
4657 * @note
4658 * @param    pFunctionContext(IN) Contains which color to apply (not very clean ...)
4659 * @param    PlaneIn            (IN) Input YUV420 planar
4660 * @param    PlaneOut        (IN/OUT) Output YUV420 planar
4661 * @param    pProgress        (IN/OUT) Progress indication (0-100)
4662 * @param    uiEffectKind    (IN) Unused
4663 *
4664 * @return    M4VIFI_OK:    No error
4665 ******************************************************************************
4666 */
4667M4OSA_ERR M4xVSS_FadeBlackTransition(M4OSA_Void *userData, M4VIFI_ImagePlane PlaneIn1[3],
4668                                     M4VIFI_ImagePlane PlaneIn2[3],
4669                                     M4VIFI_ImagePlane *PlaneOut,
4670                                     M4VSS3GPP_ExternalProgress *pProgress,
4671                                     M4OSA_UInt32 uiTransitionKind)
4672{
4673    M4OSA_Int32 tmp = 0;
4674    M4OSA_ERR err = M4NO_ERROR;
4675
4676
4677    if((pProgress->uiProgress) < 500)
4678    {
4679        /**
4680         * Compute where we are in the effect (scale is 0->1024) */
4681        tmp = (M4OSA_Int32)((1.0 - ((M4OSA_Float)(pProgress->uiProgress*2)/1000)) * 1024 );
4682
4683        /**
4684         * Apply the darkening effect */
4685        err = M4VFL_modifyLumaWithScale( (M4ViComImagePlane*)PlaneIn1,
4686             (M4ViComImagePlane*)PlaneOut, tmp, M4OSA_NULL);
4687        if (M4NO_ERROR != err)
4688        {
4689            M4OSA_TRACE1_1("M4xVSS_FadeBlackTransition: M4VFL_modifyLumaWithScale returns\
4690                 error 0x%x, returning M4VSS3GPP_ERR_LUMA_FILTER_ERROR", err);
4691            return M4VSS3GPP_ERR_LUMA_FILTER_ERROR;
4692        }
4693    }
4694    else
4695    {
4696        /**
4697         * Compute where we are in the effect (scale is 0->1024). */
4698        tmp = (M4OSA_Int32)( (((M4OSA_Float)(((pProgress->uiProgress-500)*2))/1000)) * 1024 );
4699
4700        /**
4701         * Apply the darkening effect */
4702        err = M4VFL_modifyLumaWithScale((M4ViComImagePlane*)PlaneIn2,
4703             (M4ViComImagePlane*)PlaneOut, tmp, M4OSA_NULL);
4704        if (M4NO_ERROR != err)
4705        {
4706            M4OSA_TRACE1_1("M4xVSS_FadeBlackTransition:\
4707                 M4VFL_modifyLumaWithScale returns error 0x%x,\
4708                     returning M4VSS3GPP_ERR_LUMA_FILTER_ERROR", err);
4709            return M4VSS3GPP_ERR_LUMA_FILTER_ERROR;
4710        }
4711    }
4712
4713
4714    return M4VIFI_OK;
4715}
4716
4717
4718/**
4719 ******************************************************************************
4720 * prototype    M4OSA_ERR M4xVSS_internalConvertToUTF8(M4OSA_Context pContext,
4721 *                                                        M4OSA_Void* pBufferIn,
4722 *                                                        M4OSA_Void* pBufferOut,
4723 *                                                        M4OSA_UInt32* convertedSize)
4724 *
4725 * @brief    This function convert from the customer format to UTF8
4726 * @note
4727 * @param    pContext        (IN)    The integrator own context
4728 * @param    pBufferIn        (IN)    Buffer to convert
4729 * @param    pBufferOut        (OUT)    Converted buffer
4730 * @param    convertedSize    (OUT)    Size of the converted buffer
4731 *
4732 * @return    M4NO_ERROR:    No error
4733 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
4734 ******************************************************************************
4735 */
4736M4OSA_ERR M4xVSS_internalConvertToUTF8(M4OSA_Context pContext, M4OSA_Void* pBufferIn,
4737                                       M4OSA_Void* pBufferOut, M4OSA_UInt32* convertedSize)
4738{
4739    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
4740    M4OSA_ERR err;
4741
4742    pBufferOut = pBufferIn;
4743    if(xVSS_context->UTFConversionContext.pConvToUTF8Fct != M4OSA_NULL
4744        && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL)
4745    {
4746        M4OSA_UInt32 ConvertedSize = xVSS_context->UTFConversionContext.m_TempOutConversionSize;
4747
4748        memset((void *)xVSS_context->UTFConversionContext.pTempOutConversionBuffer,0
4749            ,(M4OSA_UInt32)xVSS_context->UTFConversionContext.m_TempOutConversionSize);
4750
4751        err = xVSS_context->UTFConversionContext.pConvToUTF8Fct((M4OSA_Void*)pBufferIn,
4752            (M4OSA_UInt8*)xVSS_context->UTFConversionContext.pTempOutConversionBuffer,
4753                 (M4OSA_UInt32*)&ConvertedSize);
4754        if(err == M4xVSSWAR_BUFFER_OUT_TOO_SMALL)
4755        {
4756            M4OSA_TRACE2_1("M4xVSS_internalConvertToUTF8: pConvToUTF8Fct return 0x%x",err);
4757
4758            /*free too small buffer*/
4759            free(xVSS_context->\
4760                UTFConversionContext.pTempOutConversionBuffer);
4761
4762            /*re-allocate the buffer*/
4763            xVSS_context->UTFConversionContext.pTempOutConversionBuffer    =
4764                 (M4OSA_Void*)M4OSA_32bitAlignedMalloc(ConvertedSize*sizeof(M4OSA_UInt8), M4VA,
4765                     (M4OSA_Char *)"M4xVSS_internalConvertToUTF8: UTF conversion buffer");
4766            if(M4OSA_NULL == xVSS_context->UTFConversionContext.pTempOutConversionBuffer)
4767            {
4768                M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertToUTF8");
4769                return M4ERR_ALLOC;
4770            }
4771            xVSS_context->UTFConversionContext.m_TempOutConversionSize = ConvertedSize;
4772
4773            memset((void *)xVSS_context->\
4774                UTFConversionContext.pTempOutConversionBuffer,0,(M4OSA_UInt32)xVSS_context->\
4775                    UTFConversionContext.m_TempOutConversionSize);
4776
4777            err = xVSS_context->UTFConversionContext.pConvToUTF8Fct((M4OSA_Void*)pBufferIn,
4778                (M4OSA_Void*)xVSS_context->UTFConversionContext.pTempOutConversionBuffer,
4779                    (M4OSA_UInt32*)&ConvertedSize);
4780            if(err != M4NO_ERROR)
4781            {
4782                M4OSA_TRACE1_1("M4xVSS_internalConvertToUTF8: pConvToUTF8Fct return 0x%x",err);
4783                return err;
4784            }
4785        }
4786        else if(err != M4NO_ERROR)
4787        {
4788            M4OSA_TRACE1_1("M4xVSS_internalConvertToUTF8: pConvToUTF8Fct return 0x%x",err);
4789            return err;
4790        }
4791        /*decoded path*/
4792        pBufferOut = xVSS_context->UTFConversionContext.pTempOutConversionBuffer;
4793        (*convertedSize) = ConvertedSize;
4794    }
4795    return M4NO_ERROR;
4796}
4797
4798
4799/**
4800 ******************************************************************************
4801 * prototype    M4OSA_ERR M4xVSS_internalConvertFromUTF8(M4OSA_Context pContext)
4802 *
4803 * @brief    This function convert from UTF8 to the customer format
4804 * @note
4805 * @param    pContext    (IN) The integrator own context
4806 * @param    pBufferIn        (IN)    Buffer to convert
4807 * @param    pBufferOut        (OUT)    Converted buffer
4808 * @param    convertedSize    (OUT)    Size of the converted buffer
4809 *
4810 * @return    M4NO_ERROR:    No error
4811 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
4812 ******************************************************************************
4813 */
4814M4OSA_ERR M4xVSS_internalConvertFromUTF8(M4OSA_Context pContext, M4OSA_Void* pBufferIn,
4815                                        M4OSA_Void* pBufferOut, M4OSA_UInt32* convertedSize)
4816{
4817    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
4818    M4OSA_ERR err;
4819
4820    pBufferOut = pBufferIn;
4821    if(xVSS_context->UTFConversionContext.pConvFromUTF8Fct != M4OSA_NULL
4822        && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL)
4823    {
4824        M4OSA_UInt32 ConvertedSize = xVSS_context->UTFConversionContext.m_TempOutConversionSize;
4825
4826        memset((void *)xVSS_context->\
4827            UTFConversionContext.pTempOutConversionBuffer,0,(M4OSA_UInt32)xVSS_context->\
4828                UTFConversionContext.m_TempOutConversionSize);
4829
4830        err = xVSS_context->UTFConversionContext.pConvFromUTF8Fct\
4831            ((M4OSA_Void*)pBufferIn,(M4OSA_UInt8*)xVSS_context->\
4832                UTFConversionContext.pTempOutConversionBuffer, (M4OSA_UInt32*)&ConvertedSize);
4833        if(err == M4xVSSWAR_BUFFER_OUT_TOO_SMALL)
4834        {
4835            M4OSA_TRACE2_1("M4xVSS_internalConvertFromUTF8: pConvFromUTF8Fct return 0x%x",err);
4836
4837            /*free too small buffer*/
4838            free(xVSS_context->\
4839                UTFConversionContext.pTempOutConversionBuffer);
4840
4841            /*re-allocate the buffer*/
4842            xVSS_context->UTFConversionContext.pTempOutConversionBuffer    =
4843                (M4OSA_Void*)M4OSA_32bitAlignedMalloc(ConvertedSize*sizeof(M4OSA_UInt8), M4VA,
4844                     (M4OSA_Char *)"M4xVSS_internalConvertFromUTF8: UTF conversion buffer");
4845            if(M4OSA_NULL == xVSS_context->UTFConversionContext.pTempOutConversionBuffer)
4846            {
4847                M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertFromUTF8");
4848                return M4ERR_ALLOC;
4849            }
4850            xVSS_context->UTFConversionContext.m_TempOutConversionSize = ConvertedSize;
4851
4852            memset((void *)xVSS_context->\
4853                UTFConversionContext.pTempOutConversionBuffer,0,(M4OSA_UInt32)xVSS_context->\
4854                    UTFConversionContext.m_TempOutConversionSize);
4855
4856            err = xVSS_context->UTFConversionContext.pConvFromUTF8Fct((M4OSA_Void*)pBufferIn,
4857                (M4OSA_Void*)xVSS_context->UTFConversionContext.pTempOutConversionBuffer,
4858                     (M4OSA_UInt32*)&ConvertedSize);
4859            if(err != M4NO_ERROR)
4860            {
4861                M4OSA_TRACE1_1("M4xVSS_internalConvertFromUTF8: pConvFromUTF8Fct return 0x%x",err);
4862                return err;
4863            }
4864        }
4865        else if(err != M4NO_ERROR)
4866        {
4867            M4OSA_TRACE1_1("M4xVSS_internalConvertFromUTF8: pConvFromUTF8Fct return 0x%x",err);
4868            return err;
4869        }
4870        /*decoded path*/
4871        pBufferOut = xVSS_context->UTFConversionContext.pTempOutConversionBuffer;
4872        (*convertedSize) = ConvertedSize;
4873    }
4874
4875
4876    return M4NO_ERROR;
4877}
4878