M4xVSS_internal.c revision 3b25fdc4a33b53cfcf67315c2d42ad699b8cefe2
1/*
2 * Copyright (C) 2004-2011 NXP Software
3 * Copyright (C) 2011 The Android Open Source Project
4 *
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
8 *
9 *      http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 */
17/**
18 ******************************************************************************
19 * @file    M4xVSS_internal.c
20 * @brief    Internal functions of extended Video Studio Service (Video Studio 2.1)
21 * @note
22 ******************************************************************************
23 */
24#include "M4OSA_Debug.h"
25#include "M4OSA_CharStar.h"
26#include "M4OSA_FileExtra.h"
27
28#include "NXPSW_CompilerSwitches.h"
29
30#include "M4VSS3GPP_API.h"
31#include "M4VSS3GPP_ErrorCodes.h"
32
33#include "M4xVSS_API.h"
34#include "M4xVSS_Internal.h"
35
36/*for rgb16 color effect*/
37#include "M4VIFI_Defines.h"
38#include "M4VIFI_Clip.h"
39
40/**
41 * component includes */
42#include "M4VFL_transition.h"            /**< video effects */
43
44/* Internal header file of VSS is included because of MMS use case */
45#include "M4VSS3GPP_InternalTypes.h"
46
47/*Exif header files to add image rendering support (cropping, black borders)*/
48#include "M4EXIFC_CommonAPI.h"
49// StageFright encoders require %16 resolution
50#include "M4ENCODER_common.h"
51
52#define TRANSPARENT_COLOR 0x7E0
53
54/* Prototype of M4VIFI_xVSS_RGB565toYUV420 function (avoid green effect of transparency color) */
55M4VIFI_UInt8 M4VIFI_xVSS_RGB565toYUV420(void *pUserData, M4VIFI_ImagePlane *pPlaneIn,
56                                        M4VIFI_ImagePlane *pPlaneOut);
57
58
59/*special MCS function used only in VideoArtist and VideoStudio to open the media in the normal
60 mode. That way the media duration is accurate*/
61extern M4OSA_ERR M4MCS_open_normalMode(M4MCS_Context pContext, M4OSA_Void* pFileIn,
62                                         M4VIDEOEDITING_FileType InputFileType,
63                                         M4OSA_Void* pFileOut, M4OSA_Void* pTempFile);
64
65
66/**
67 ******************************************************************************
68 * prototype    M4OSA_ERR M4xVSS_internalStartTranscoding(M4OSA_Context pContext)
69 * @brief        This function initializes MCS (3GP transcoder) with the given
70 *                parameters
71 * @note        The transcoding parameters are given by the internal xVSS context.
72 *                This context contains a pointer on the current element of the
73 *                chained list of MCS parameters.
74 *
75 * @param    pContext            (IN) Pointer on the xVSS edit context
76 * @return    M4NO_ERROR:            No error
77 * @return    M4ERR_PARAMETER:    At least one parameter is M4OSA_NULL
78 * @return    M4ERR_ALLOC:        Memory allocation has failed
79 ******************************************************************************
80 */
81M4OSA_ERR M4xVSS_internalStartTranscoding(M4OSA_Context pContext)
82{
83    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
84    M4OSA_ERR err;
85    M4MCS_Context mcs_context;
86    M4MCS_OutputParams Params;
87    M4MCS_EncodingParams Rates;
88    M4OSA_UInt32 i;
89
90    err = M4MCS_init(&mcs_context, xVSS_context->pFileReadPtr, xVSS_context->pFileWritePtr);
91    if(err != M4NO_ERROR)
92    {
93        M4OSA_TRACE1_1("Error in M4MCS_init: 0x%x", err);
94        return err;
95    }
96
97#ifdef M4VSS_ENABLE_EXTERNAL_DECODERS
98    /* replay recorded external decoder registrations on the MCS */
99    for (i=0; i<M4VD_kVideoType_NB; i++)
100    {
101        if (xVSS_context->registeredExternalDecs[i].registered)
102        {
103            err = M4MCS_registerExternalVideoDecoder(mcs_context, i,
104                    xVSS_context->registeredExternalDecs[i].pDecoderInterface,
105                    xVSS_context->registeredExternalDecs[i].pUserData);
106            if (M4NO_ERROR != err)
107            {
108                M4OSA_TRACE1_1("M4xVSS_internalStartTranscoding:\
109                     M4MCS_registerExternalVideoDecoder() returns 0x%x!", err);
110                M4MCS_abort(mcs_context);
111                return err;
112            }
113        }
114    }
115#endif /* M4VSS_ENABLE_EXTERNAL_DECODERS */
116
117    /* replay recorded external encoder registrations on the MCS */
118    for (i=0; i<M4VE_kEncoderType_NB; i++)
119    {
120        if (xVSS_context->registeredExternalEncs[i].registered)
121        {
122            err = M4MCS_registerExternalVideoEncoder(mcs_context, i,
123                    xVSS_context->registeredExternalEncs[i].pEncoderInterface,
124                    xVSS_context->registeredExternalEncs[i].pUserData);
125            if (M4NO_ERROR != err)
126            {
127                M4OSA_TRACE1_1("M4xVSS_internalStartTranscoding:\
128                     M4MCS_registerExternalVideoEncoder() returns 0x%x!", err);
129                M4MCS_abort(mcs_context);
130                return err;
131            }
132        }
133    }
134
135    err = M4MCS_open(mcs_context, xVSS_context->pMCScurrentParams->pFileIn,
136         xVSS_context->pMCScurrentParams->InputFileType,
137             xVSS_context->pMCScurrentParams->pFileOut,
138             xVSS_context->pMCScurrentParams->pFileTemp);
139    if (err != M4NO_ERROR)
140    {
141        M4OSA_TRACE1_1("Error in M4MCS_open: 0x%x", err);
142        M4MCS_abort(mcs_context);
143        return err;
144    }
145
146    /**
147     * Fill MCS parameters with the parameters contained in the current element of the
148       MCS parameters chained list */
149    Params.OutputFileType = xVSS_context->pMCScurrentParams->OutputFileType;
150    Params.OutputVideoFormat = xVSS_context->pMCScurrentParams->OutputVideoFormat;
151    Params.OutputVideoFrameSize = xVSS_context->pMCScurrentParams->OutputVideoFrameSize;
152    Params.OutputVideoFrameRate = xVSS_context->pMCScurrentParams->OutputVideoFrameRate;
153    Params.OutputAudioFormat = xVSS_context->pMCScurrentParams->OutputAudioFormat;
154    Params.OutputAudioSamplingFrequency =
155         xVSS_context->pMCScurrentParams->OutputAudioSamplingFrequency;
156    Params.bAudioMono = xVSS_context->pMCScurrentParams->bAudioMono;
157    Params.pOutputPCMfile = M4OSA_NULL;
158    /*FB 2008/10/20: add media rendering parameter to keep aspect ratio*/
159    switch(xVSS_context->pMCScurrentParams->MediaRendering)
160    {
161    case M4xVSS_kResizing:
162        Params.MediaRendering = M4MCS_kResizing;
163        break;
164    case M4xVSS_kCropping:
165        Params.MediaRendering = M4MCS_kCropping;
166        break;
167    case M4xVSS_kBlackBorders:
168        Params.MediaRendering = M4MCS_kBlackBorders;
169        break;
170    default:
171        break;
172    }
173    /**/
174    // new params after integrating MCS 2.0
175    // Set the number of audio effects; 0 for now.
176    Params.nbEffects = 0;
177
178    // Set the audio effect; null for now.
179    Params.pEffects = NULL;
180
181    // Set the audio effect; null for now.
182    Params.bDiscardExif = M4OSA_FALSE;
183
184    // Set the audio effect; null for now.
185    Params.bAdjustOrientation = M4OSA_FALSE;
186    // new params after integrating MCS 2.0
187
188    /**
189     * Set output parameters */
190    err = M4MCS_setOutputParams(mcs_context, &Params);
191    if (err != M4NO_ERROR)
192    {
193        M4OSA_TRACE1_1("Error in M4MCS_setOutputParams: 0x%x", err);
194        M4MCS_abort(mcs_context);
195        return err;
196    }
197
198    Rates.OutputVideoBitrate = xVSS_context->pMCScurrentParams->OutputVideoBitrate;
199    Rates.OutputAudioBitrate = xVSS_context->pMCScurrentParams->OutputAudioBitrate;
200    Rates.BeginCutTime = 0;
201    Rates.EndCutTime = 0;
202    Rates.OutputFileSize = 0;
203
204    /*FB: transcoding per parts*/
205    Rates.BeginCutTime = xVSS_context->pMCScurrentParams->BeginCutTime;
206    Rates.EndCutTime = xVSS_context->pMCScurrentParams->EndCutTime;
207    Rates.OutputVideoTimescale = xVSS_context->pMCScurrentParams->OutputVideoTimescale;
208
209    err = M4MCS_setEncodingParams(mcs_context, &Rates);
210    if (err != M4NO_ERROR)
211    {
212        M4OSA_TRACE1_1("Error in M4MCS_setEncodingParams: 0x%x", err);
213        M4MCS_abort(mcs_context);
214        return err;
215    }
216
217    err = M4MCS_checkParamsAndStart(mcs_context);
218    if (err != M4NO_ERROR)
219    {
220        M4OSA_TRACE1_1("Error in M4MCS_checkParamsAndStart: 0x%x", err);
221        M4MCS_abort(mcs_context);
222        return err;
223    }
224
225    /**
226     * Save MCS context to be able to call MCS step function in M4xVSS_step function */
227    xVSS_context->pMCS_Ctxt = mcs_context;
228
229    return M4NO_ERROR;
230}
231
232/**
233 ******************************************************************************
234 * prototype    M4OSA_ERR M4xVSS_internalStopTranscoding(M4OSA_Context pContext)
235 * @brief        This function cleans up MCS (3GP transcoder)
236 * @note
237 *
238 * @param    pContext            (IN) Pointer on the xVSS edit context
239 * @return    M4NO_ERROR:            No error
240 * @return    M4ERR_PARAMETER:    At least one parameter is M4OSA_NULL
241 * @return    M4ERR_ALLOC:        Memory allocation has failed
242 ******************************************************************************
243 */
244M4OSA_ERR M4xVSS_internalStopTranscoding(M4OSA_Context pContext)
245{
246    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
247    M4OSA_ERR err;
248
249    err = M4MCS_close(xVSS_context->pMCS_Ctxt);
250    if (err != M4NO_ERROR)
251    {
252        M4OSA_TRACE1_1("M4xVSS_internalStopTranscoding: Error in M4MCS_close: 0x%x", err);
253        M4MCS_abort(xVSS_context->pMCS_Ctxt);
254        return err;
255    }
256
257    /**
258     * Free this MCS instance */
259    err = M4MCS_cleanUp(xVSS_context->pMCS_Ctxt);
260    if (err != M4NO_ERROR)
261    {
262        M4OSA_TRACE1_1("M4xVSS_internalStopTranscoding: Error in M4MCS_cleanUp: 0x%x", err);
263        return err;
264    }
265
266    xVSS_context->pMCS_Ctxt = M4OSA_NULL;
267
268    return M4NO_ERROR;
269}
270
271/**
272 ******************************************************************************
273 * M4OSA_ERR M4xVSS_internalConvertAndResizeARGB8888toYUV420(M4OSA_Void* pFileIn,
274 *                                             M4OSA_FileReadPointer* pFileReadPtr,
275 *                                                M4VIFI_ImagePlane* pImagePlanes,
276 *                                                 M4OSA_UInt32 width,
277 *                                                M4OSA_UInt32 height);
278 * @brief    It Coverts and resizes a ARGB8888 image to YUV420
279 * @note
280 * @param    pFileIn            (IN) The Image input file
281 * @param    pFileReadPtr    (IN) Pointer on filesystem functions
282 * @param    pImagePlanes    (IN/OUT) Pointer on YUV420 output planes allocated by the user
283 *                            ARGB8888 image  will be converted and resized  to output
284 *                             YUV420 plane size
285 *@param    width        (IN) width of the ARGB8888
286 *@param    height            (IN) height of the ARGB8888
287 * @return    M4NO_ERROR:    No error
288 * @return    M4ERR_ALLOC: memory error
289 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
290 ******************************************************************************
291 */
292
293M4OSA_ERR M4xVSS_internalConvertAndResizeARGB8888toYUV420(M4OSA_Void* pFileIn,
294                                                          M4OSA_FileReadPointer* pFileReadPtr,
295                                                          M4VIFI_ImagePlane* pImagePlanes,
296                                                          M4OSA_UInt32 width,M4OSA_UInt32 height)
297{
298    M4OSA_Context pARGBIn;
299    M4VIFI_ImagePlane rgbPlane1 ,rgbPlane2;
300    M4OSA_UInt32 frameSize_argb=(width * height * 4);
301    M4OSA_UInt32 frameSize = (width * height * 3); //Size of RGB888 data.
302    M4OSA_UInt32 i = 0,j= 0;
303    M4OSA_ERR err=M4NO_ERROR;
304
305
306    M4OSA_UInt8 *pTmpData = (M4OSA_UInt8*) M4OSA_32bitAlignedMalloc(frameSize_argb,
307         M4VS, (M4OSA_Char*)"Image argb data");
308        M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 Entering :");
309    if(pTmpData == M4OSA_NULL) {
310        M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 :\
311            Failed to allocate memory for Image clip");
312        return M4ERR_ALLOC;
313    }
314
315    M4OSA_TRACE1_2("M4xVSS_internalConvertAndResizeARGB8888toYUV420 :width and height %d %d",
316        width ,height);
317    /* Get file size (mandatory for chunk decoding) */
318    err = pFileReadPtr->openRead(&pARGBIn, pFileIn, M4OSA_kFileRead);
319    if(err != M4NO_ERROR)
320    {
321        M4OSA_TRACE1_2("M4xVSS_internalConvertAndResizeARGB8888toYUV420 :\
322            Can't open input ARGB8888 file %s, error: 0x%x\n",pFileIn, err);
323        free(pTmpData);
324        pTmpData = M4OSA_NULL;
325        goto cleanup;
326    }
327
328    err = pFileReadPtr->readData(pARGBIn,(M4OSA_MemAddr8)pTmpData, &frameSize_argb);
329    if(err != M4NO_ERROR)
330    {
331        M4OSA_TRACE1_2("M4xVSS_internalConvertAndResizeARGB8888toYUV420 Can't close ARGB8888\
332             file %s, error: 0x%x\n",pFileIn, err);
333        pFileReadPtr->closeRead(pARGBIn);
334        free(pTmpData);
335        pTmpData = M4OSA_NULL;
336        goto cleanup;
337    }
338
339    err = pFileReadPtr->closeRead(pARGBIn);
340    if(err != M4NO_ERROR)
341    {
342        M4OSA_TRACE1_2("M4xVSS_internalConvertAndResizeARGB8888toYUV420 Can't close ARGB8888 \
343             file %s, error: 0x%x\n",pFileIn, err);
344        free(pTmpData);
345        pTmpData = M4OSA_NULL;
346        goto cleanup;
347    }
348
349    rgbPlane1.pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(frameSize, M4VS,
350         (M4OSA_Char*)"Image clip RGB888 data");
351    if(rgbPlane1.pac_data == M4OSA_NULL)
352    {
353        M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 \
354            Failed to allocate memory for Image clip");
355        free(pTmpData);
356        return M4ERR_ALLOC;
357    }
358
359        rgbPlane1.u_height = height;
360        rgbPlane1.u_width = width;
361        rgbPlane1.u_stride = width*3;
362        rgbPlane1.u_topleft = 0;
363
364
365    /** Remove the alpha channel */
366    for (i=0, j = 0; i < frameSize_argb; i++) {
367        if ((i % 4) == 0) continue;
368        rgbPlane1.pac_data[j] = pTmpData[i];
369        j++;
370    }
371        free(pTmpData);
372
373    /* To Check if resizing is required with color conversion */
374    if(width != pImagePlanes->u_width || height != pImagePlanes->u_height)
375    {
376        M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 Resizing :");
377        frameSize =  ( pImagePlanes->u_width * pImagePlanes->u_height * 3);
378        rgbPlane2.pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(frameSize, M4VS,
379             (M4OSA_Char*)"Image clip RGB888 data");
380        if(rgbPlane2.pac_data == M4OSA_NULL)
381        {
382            M4OSA_TRACE1_0("Failed to allocate memory for Image clip");
383            free(pTmpData);
384            return M4ERR_ALLOC;
385        }
386            rgbPlane2.u_height =  pImagePlanes->u_height;
387            rgbPlane2.u_width = pImagePlanes->u_width;
388            rgbPlane2.u_stride = pImagePlanes->u_width*3;
389            rgbPlane2.u_topleft = 0;
390
391        /* Resizing RGB888 to RGB888 */
392        err = M4VIFI_ResizeBilinearRGB888toRGB888(M4OSA_NULL, &rgbPlane1, &rgbPlane2);
393        if(err != M4NO_ERROR)
394        {
395            M4OSA_TRACE1_1("error when converting from Resize RGB888 to RGB888: 0x%x\n", err);
396            free(rgbPlane2.pac_data);
397            free(rgbPlane1.pac_data);
398            return err;
399        }
400        /*Converting Resized RGB888 to YUV420 */
401        err = M4VIFI_RGB888toYUV420(M4OSA_NULL, &rgbPlane2, pImagePlanes);
402        if(err != M4NO_ERROR)
403        {
404            M4OSA_TRACE1_1("error when converting from RGB888 to YUV: 0x%x\n", err);
405            free(rgbPlane2.pac_data);
406            free(rgbPlane1.pac_data);
407            return err;
408        }
409            free(rgbPlane2.pac_data);
410            free(rgbPlane1.pac_data);
411
412            M4OSA_TRACE1_0("RGB to YUV done");
413
414
415    }
416    else
417    {
418        M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 NO  Resizing :");
419        err = M4VIFI_RGB888toYUV420(M4OSA_NULL, &rgbPlane1, pImagePlanes);
420        if(err != M4NO_ERROR)
421        {
422            M4OSA_TRACE1_1("error when converting from RGB to YUV: 0x%x\n", err);
423        }
424            free(rgbPlane1.pac_data);
425
426            M4OSA_TRACE1_0("RGB to YUV done");
427    }
428cleanup:
429    M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 leaving :");
430    return err;
431}
432
433/**
434 ******************************************************************************
435 * M4OSA_ERR M4xVSS_internalConvertARGB8888toYUV420(M4OSA_Void* pFileIn,
436 *                                             M4OSA_FileReadPointer* pFileReadPtr,
437 *                                                M4VIFI_ImagePlane* pImagePlanes,
438 *                                                 M4OSA_UInt32 width,
439 *                                                M4OSA_UInt32 height);
440 * @brief    It Coverts a ARGB8888 image to YUV420
441 * @note
442 * @param    pFileIn            (IN) The Image input file
443 * @param    pFileReadPtr    (IN) Pointer on filesystem functions
444 * @param    pImagePlanes    (IN/OUT) Pointer on YUV420 output planes allocated by the user
445 *                            ARGB8888 image  will be converted and resized  to output
446 *                            YUV420 plane size
447 * @param    width        (IN) width of the ARGB8888
448 * @param    height            (IN) height of the ARGB8888
449 * @return    M4NO_ERROR:    No error
450 * @return    M4ERR_ALLOC: memory error
451 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
452 ******************************************************************************
453 */
454
455M4OSA_ERR M4xVSS_internalConvertARGB8888toYUV420(M4OSA_Void* pFileIn,
456                                                 M4OSA_FileReadPointer* pFileReadPtr,
457                                                 M4VIFI_ImagePlane** pImagePlanes,
458                                                 M4OSA_UInt32 width,M4OSA_UInt32 height)
459{
460    M4OSA_ERR err = M4NO_ERROR;
461    M4VIFI_ImagePlane *yuvPlane = M4OSA_NULL;
462
463    yuvPlane = (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(3*sizeof(M4VIFI_ImagePlane),
464                M4VS, (M4OSA_Char*)"M4xVSS_internalConvertRGBtoYUV: Output plane YUV");
465    if(yuvPlane == M4OSA_NULL) {
466        M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 :\
467            Failed to allocate memory for Image clip");
468        return M4ERR_ALLOC;
469    }
470    yuvPlane[0].u_height = height;
471    yuvPlane[0].u_width = width;
472    yuvPlane[0].u_stride = width;
473    yuvPlane[0].u_topleft = 0;
474    yuvPlane[0].pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(yuvPlane[0].u_height \
475        * yuvPlane[0].u_width * 1.5, M4VS, (M4OSA_Char*)"imageClip YUV data");
476
477    yuvPlane[1].u_height = yuvPlane[0].u_height >>1;
478    yuvPlane[1].u_width = yuvPlane[0].u_width >> 1;
479    yuvPlane[1].u_stride = yuvPlane[1].u_width;
480    yuvPlane[1].u_topleft = 0;
481    yuvPlane[1].pac_data = (M4VIFI_UInt8*)(yuvPlane[0].pac_data + yuvPlane[0].u_height \
482        * yuvPlane[0].u_width);
483
484    yuvPlane[2].u_height = yuvPlane[0].u_height >>1;
485    yuvPlane[2].u_width = yuvPlane[0].u_width >> 1;
486    yuvPlane[2].u_stride = yuvPlane[2].u_width;
487    yuvPlane[2].u_topleft = 0;
488    yuvPlane[2].pac_data = (M4VIFI_UInt8*)(yuvPlane[1].pac_data + yuvPlane[1].u_height \
489        * yuvPlane[1].u_width);
490    err = M4xVSS_internalConvertAndResizeARGB8888toYUV420( pFileIn,pFileReadPtr,
491                                                          yuvPlane, width, height);
492    if(err != M4NO_ERROR)
493    {
494        M4OSA_TRACE1_1("M4xVSS_internalConvertAndResizeARGB8888toYUV420 return error: 0x%x\n", err);
495        free(yuvPlane);
496        return err;
497    }
498
499        *pImagePlanes = yuvPlane;
500
501    M4OSA_TRACE1_0("M4xVSS_internalConvertARGB8888toYUV420 :Leaving");
502    return err;
503
504}
505
506/**
507 ******************************************************************************
508 * M4OSA_ERR M4xVSS_PictureCallbackFct (M4OSA_Void* pPictureCtxt,
509 *                                        M4VIFI_ImagePlane* pImagePlanes,
510 *                                        M4OSA_UInt32* pPictureDuration);
511 * @brief    It feeds the PTO3GPP with YUV420 pictures.
512 * @note    This function is given to the PTO3GPP in the M4PTO3GPP_Params structure
513 * @param    pContext    (IN) The integrator own context
514 * @param    pImagePlanes(IN/OUT) Pointer to an array of three valid image planes
515 * @param    pPictureDuration(OUT) Duration of the returned picture
516 *
517 * @return    M4NO_ERROR:    No error
518 * @return    M4PTO3GPP_WAR_LAST_PICTURE: The returned image is the last one
519 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
520 ******************************************************************************
521 */
522M4OSA_ERR M4xVSS_PictureCallbackFct(M4OSA_Void* pPictureCtxt, M4VIFI_ImagePlane* pImagePlanes,
523                                     M4OSA_Double* pPictureDuration)
524{
525    M4OSA_ERR err = M4NO_ERROR;
526    M4OSA_UInt8    last_frame_flag = 0;
527    M4xVSS_PictureCallbackCtxt* pC = (M4xVSS_PictureCallbackCtxt*) (pPictureCtxt);
528
529    /*Used for pan&zoom*/
530    M4OSA_UInt8 tempPanzoomXa = 0;
531    M4OSA_UInt8 tempPanzoomXb = 0;
532    M4AIR_Params Params;
533    /**/
534
535    /*Used for cropping and black borders*/
536    M4OSA_Context    pPictureContext = M4OSA_NULL;
537    M4OSA_FilePosition    pictureSize = 0 ;
538    M4OSA_UInt8*    pictureBuffer = M4OSA_NULL;
539    //M4EXIFC_Context pExifContext = M4OSA_NULL;
540    M4EXIFC_BasicTags pBasicTags;
541    M4VIFI_ImagePlane pImagePlanes1 = pImagePlanes[0];
542    M4VIFI_ImagePlane pImagePlanes2 = pImagePlanes[1];
543    M4VIFI_ImagePlane pImagePlanes3 = pImagePlanes[2];
544    /**/
545
546    /**
547     * Check input parameters */
548    M4OSA_DEBUG_IF2((M4OSA_NULL==pPictureCtxt),        M4ERR_PARAMETER,
549         "M4xVSS_PictureCallbackFct: pPictureCtxt is M4OSA_NULL");
550    M4OSA_DEBUG_IF2((M4OSA_NULL==pImagePlanes),        M4ERR_PARAMETER,
551         "M4xVSS_PictureCallbackFct: pImagePlanes is M4OSA_NULL");
552    M4OSA_DEBUG_IF2((M4OSA_NULL==pPictureDuration), M4ERR_PARAMETER,
553         "M4xVSS_PictureCallbackFct: pPictureDuration is M4OSA_NULL");
554    M4OSA_TRACE1_0("M4xVSS_PictureCallbackFct :Entering");
555    /*PR P4ME00003181 In case the image number is 0, pan&zoom can not be used*/
556    if(M4OSA_TRUE == pC->m_pPto3GPPparams->isPanZoom && pC->m_NbImage == 0)
557    {
558        pC->m_pPto3GPPparams->isPanZoom = M4OSA_FALSE;
559    }
560
561    /*If no cropping/black borders or pan&zoom, just decode and resize the picture*/
562    if(pC->m_mediaRendering == M4xVSS_kResizing && M4OSA_FALSE == pC->m_pPto3GPPparams->isPanZoom)
563    {
564        /**
565         * Convert and resize input ARGB8888 file to YUV420 */
566        /*To support ARGB8888 : */
567        M4OSA_TRACE1_2("M4xVSS_PictureCallbackFct 1: width and heght %d %d",
568            pC->m_pPto3GPPparams->width,pC->m_pPto3GPPparams->height);
569        err = M4xVSS_internalConvertAndResizeARGB8888toYUV420(pC->m_FileIn,
570             pC->m_pFileReadPtr, pImagePlanes,pC->m_pPto3GPPparams->width,
571                pC->m_pPto3GPPparams->height);
572        if(err != M4NO_ERROR)
573        {
574            M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct: Error when decoding JPEG: 0x%x\n", err);
575            return err;
576        }
577    }
578    /*In case of cropping, black borders or pan&zoom, call the EXIF reader and the AIR*/
579    else
580    {
581        /**
582         * Computes ratios */
583        if(pC->m_pDecodedPlane == M4OSA_NULL)
584        {
585            /**
586             * Convert input ARGB8888 file to YUV420 */
587             M4OSA_TRACE1_2("M4xVSS_PictureCallbackFct 2: width and heght %d %d",
588                pC->m_pPto3GPPparams->width,pC->m_pPto3GPPparams->height);
589            err = M4xVSS_internalConvertARGB8888toYUV420(pC->m_FileIn, pC->m_pFileReadPtr,
590                &(pC->m_pDecodedPlane),pC->m_pPto3GPPparams->width,pC->m_pPto3GPPparams->height);
591            if(err != M4NO_ERROR)
592            {
593                M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct: Error when decoding JPEG: 0x%x\n", err);
594                if(pC->m_pDecodedPlane != M4OSA_NULL)
595                {
596                    /* YUV420 planar is returned but allocation is made only once
597                        (contigous planes in memory) */
598                    if(pC->m_pDecodedPlane->pac_data != M4OSA_NULL)
599                    {
600                        free(pC->m_pDecodedPlane->pac_data);
601                    }
602                    free(pC->m_pDecodedPlane);
603                    pC->m_pDecodedPlane = M4OSA_NULL;
604                }
605                return err;
606            }
607        }
608
609        /*Initialize AIR Params*/
610        Params.m_inputCoord.m_x = 0;
611        Params.m_inputCoord.m_y = 0;
612        Params.m_inputSize.m_height = pC->m_pDecodedPlane->u_height;
613        Params.m_inputSize.m_width = pC->m_pDecodedPlane->u_width;
614        Params.m_outputSize.m_width = pImagePlanes->u_width;
615        Params.m_outputSize.m_height = pImagePlanes->u_height;
616        Params.m_bOutputStripe = M4OSA_FALSE;
617        Params.m_outputOrientation = M4COMMON_kOrientationTopLeft;
618
619        /*Initialize Exif params structure*/
620        pBasicTags.orientation = M4COMMON_kOrientationUnknown;
621
622        /**
623        Pan&zoom params*/
624        if(M4OSA_TRUE == pC->m_pPto3GPPparams->isPanZoom)
625        {
626            /*Save ratio values, they can be reused if the new ratios are 0*/
627            tempPanzoomXa = (M4OSA_UInt8)pC->m_pPto3GPPparams->PanZoomXa;
628            tempPanzoomXb = (M4OSA_UInt8)pC->m_pPto3GPPparams->PanZoomXb;
629            /*Check that the ratio is not 0*/
630            /*Check (a) parameters*/
631            if(pC->m_pPto3GPPparams->PanZoomXa == 0)
632            {
633                M4OSA_UInt8 maxRatio = 0;
634                if(pC->m_pPto3GPPparams->PanZoomTopleftXa >=
635                     pC->m_pPto3GPPparams->PanZoomTopleftYa)
636                {
637                    /*The ratio is 0, that means the area of the picture defined with (a)
638                    parameters is bigger than the image size*/
639                    if(pC->m_pPto3GPPparams->PanZoomTopleftXa + tempPanzoomXa > 1000)
640                    {
641                        /*The oversize is maxRatio*/
642                        maxRatio = pC->m_pPto3GPPparams->PanZoomTopleftXa + tempPanzoomXa - 1000;
643                    }
644                }
645                else
646                {
647                    /*The ratio is 0, that means the area of the picture defined with (a)
648                     parameters is bigger than the image size*/
649                    if(pC->m_pPto3GPPparams->PanZoomTopleftYa + tempPanzoomXa > 1000)
650                    {
651                        /*The oversize is maxRatio*/
652                        maxRatio = pC->m_pPto3GPPparams->PanZoomTopleftYa + tempPanzoomXa - 1000;
653                    }
654                }
655                /*Modify the (a) parameters:*/
656                if(pC->m_pPto3GPPparams->PanZoomTopleftXa >= maxRatio)
657                {
658                    /*The (a) topleft parameters can be moved to keep the same area size*/
659                    pC->m_pPto3GPPparams->PanZoomTopleftXa -= maxRatio;
660                }
661                else
662                {
663                    /*Move the (a) topleft parameter to 0 but the ratio will be also further
664                    modified to match the image size*/
665                    pC->m_pPto3GPPparams->PanZoomTopleftXa = 0;
666                }
667                if(pC->m_pPto3GPPparams->PanZoomTopleftYa >= maxRatio)
668                {
669                    /*The (a) topleft parameters can be moved to keep the same area size*/
670                    pC->m_pPto3GPPparams->PanZoomTopleftYa -= maxRatio;
671                }
672                else
673                {
674                    /*Move the (a) topleft parameter to 0 but the ratio will be also further
675                     modified to match the image size*/
676                    pC->m_pPto3GPPparams->PanZoomTopleftYa = 0;
677                }
678                /*The new ratio is the original one*/
679                pC->m_pPto3GPPparams->PanZoomXa = tempPanzoomXa;
680                if(pC->m_pPto3GPPparams->PanZoomXa + pC->m_pPto3GPPparams->PanZoomTopleftXa > 1000)
681                {
682                    /*Change the ratio if the area of the picture defined with (a) parameters is
683                    bigger than the image size*/
684                    pC->m_pPto3GPPparams->PanZoomXa = 1000 - pC->m_pPto3GPPparams->PanZoomTopleftXa;
685                }
686                if(pC->m_pPto3GPPparams->PanZoomXa + pC->m_pPto3GPPparams->PanZoomTopleftYa > 1000)
687                {
688                    /*Change the ratio if the area of the picture defined with (a) parameters is
689                    bigger than the image size*/
690                    pC->m_pPto3GPPparams->PanZoomXa = 1000 - pC->m_pPto3GPPparams->PanZoomTopleftYa;
691                }
692            }
693            /*Check (b) parameters*/
694            if(pC->m_pPto3GPPparams->PanZoomXb == 0)
695            {
696                M4OSA_UInt8 maxRatio = 0;
697                if(pC->m_pPto3GPPparams->PanZoomTopleftXb >=
698                     pC->m_pPto3GPPparams->PanZoomTopleftYb)
699                {
700                    /*The ratio is 0, that means the area of the picture defined with (b)
701                     parameters is bigger than the image size*/
702                    if(pC->m_pPto3GPPparams->PanZoomTopleftXb + tempPanzoomXb > 1000)
703                    {
704                        /*The oversize is maxRatio*/
705                        maxRatio = pC->m_pPto3GPPparams->PanZoomTopleftXb + tempPanzoomXb - 1000;
706                    }
707                }
708                else
709                {
710                    /*The ratio is 0, that means the area of the picture defined with (b)
711                     parameters is bigger than the image size*/
712                    if(pC->m_pPto3GPPparams->PanZoomTopleftYb + tempPanzoomXb > 1000)
713                    {
714                        /*The oversize is maxRatio*/
715                        maxRatio = pC->m_pPto3GPPparams->PanZoomTopleftYb + tempPanzoomXb - 1000;
716                    }
717                }
718                /*Modify the (b) parameters:*/
719                if(pC->m_pPto3GPPparams->PanZoomTopleftXb >= maxRatio)
720                {
721                    /*The (b) topleft parameters can be moved to keep the same area size*/
722                    pC->m_pPto3GPPparams->PanZoomTopleftXb -= maxRatio;
723                }
724                else
725                {
726                    /*Move the (b) topleft parameter to 0 but the ratio will be also further
727                     modified to match the image size*/
728                    pC->m_pPto3GPPparams->PanZoomTopleftXb = 0;
729                }
730                if(pC->m_pPto3GPPparams->PanZoomTopleftYb >= maxRatio)
731                {
732                    /*The (b) topleft parameters can be moved to keep the same area size*/
733                    pC->m_pPto3GPPparams->PanZoomTopleftYb -= maxRatio;
734                }
735                else
736                {
737                    /*Move the (b) topleft parameter to 0 but the ratio will be also further
738                    modified to match the image size*/
739                    pC->m_pPto3GPPparams->PanZoomTopleftYb = 0;
740                }
741                /*The new ratio is the original one*/
742                pC->m_pPto3GPPparams->PanZoomXb = tempPanzoomXb;
743                if(pC->m_pPto3GPPparams->PanZoomXb + pC->m_pPto3GPPparams->PanZoomTopleftXb > 1000)
744                {
745                    /*Change the ratio if the area of the picture defined with (b) parameters is
746                    bigger than the image size*/
747                    pC->m_pPto3GPPparams->PanZoomXb = 1000 - pC->m_pPto3GPPparams->PanZoomTopleftXb;
748                }
749                if(pC->m_pPto3GPPparams->PanZoomXb + pC->m_pPto3GPPparams->PanZoomTopleftYb > 1000)
750                {
751                    /*Change the ratio if the area of the picture defined with (b) parameters is
752                    bigger than the image size*/
753                    pC->m_pPto3GPPparams->PanZoomXb = 1000 - pC->m_pPto3GPPparams->PanZoomTopleftYb;
754                }
755            }
756
757            /**
758             * Computes AIR parameters */
759/*        Params.m_inputCoord.m_x = (M4OSA_UInt32)(pC->m_pDecodedPlane->u_width *
760            (pC->m_pPto3GPPparams->PanZoomTopleftXa +
761            (M4OSA_Int16)((pC->m_pPto3GPPparams->PanZoomTopleftXb \
762                - pC->m_pPto3GPPparams->PanZoomTopleftXa) *
763            pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage)) / 100;
764        Params.m_inputCoord.m_y = (M4OSA_UInt32)(pC->m_pDecodedPlane->u_height *
765            (pC->m_pPto3GPPparams->PanZoomTopleftYa +
766            (M4OSA_Int16)((pC->m_pPto3GPPparams->PanZoomTopleftYb\
767                 - pC->m_pPto3GPPparams->PanZoomTopleftYa) *
768            pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage)) / 100;
769
770        Params.m_inputSize.m_width = (M4OSA_UInt32)(pC->m_pDecodedPlane->u_width *
771            (pC->m_pPto3GPPparams->PanZoomXa +
772            (M4OSA_Int16)((pC->m_pPto3GPPparams->PanZoomXb - pC->m_pPto3GPPparams->PanZoomXa) *
773            pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage)) / 100;
774
775        Params.m_inputSize.m_height =  (M4OSA_UInt32)(pC->m_pDecodedPlane->u_height *
776            (pC->m_pPto3GPPparams->PanZoomXa +
777            (M4OSA_Int16)((pC->m_pPto3GPPparams->PanZoomXb - pC->m_pPto3GPPparams->PanZoomXa) *
778            pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage)) / 100;
779 */
780            // Instead of using pC->m_NbImage we have to use (pC->m_NbImage-1) as pC->m_ImageCounter
781            // will be x-1 max for x no. of frames
782            Params.m_inputCoord.m_x = (M4OSA_UInt32)((((M4OSA_Double)pC->m_pDecodedPlane->u_width *
783                (pC->m_pPto3GPPparams->PanZoomTopleftXa +
784                (M4OSA_Double)((M4OSA_Double)(pC->m_pPto3GPPparams->PanZoomTopleftXb\
785                     - pC->m_pPto3GPPparams->PanZoomTopleftXa) *
786                pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage-1)) / 1000));
787            Params.m_inputCoord.m_y =
788                 (M4OSA_UInt32)((((M4OSA_Double)pC->m_pDecodedPlane->u_height *
789                (pC->m_pPto3GPPparams->PanZoomTopleftYa +
790                (M4OSA_Double)((M4OSA_Double)(pC->m_pPto3GPPparams->PanZoomTopleftYb\
791                     - pC->m_pPto3GPPparams->PanZoomTopleftYa) *
792                pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage-1)) / 1000));
793
794            Params.m_inputSize.m_width =
795                 (M4OSA_UInt32)((((M4OSA_Double)pC->m_pDecodedPlane->u_width *
796                (pC->m_pPto3GPPparams->PanZoomXa +
797                (M4OSA_Double)((M4OSA_Double)(pC->m_pPto3GPPparams->PanZoomXb\
798                     - pC->m_pPto3GPPparams->PanZoomXa) *
799                pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage-1)) / 1000));
800
801            Params.m_inputSize.m_height =
802                 (M4OSA_UInt32)((((M4OSA_Double)pC->m_pDecodedPlane->u_height *
803                (pC->m_pPto3GPPparams->PanZoomXa +
804                (M4OSA_Double)((M4OSA_Double)(pC->m_pPto3GPPparams->PanZoomXb \
805                    - pC->m_pPto3GPPparams->PanZoomXa) *
806                pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage-1)) / 1000));
807
808            if((Params.m_inputSize.m_width + Params.m_inputCoord.m_x)\
809                 > pC->m_pDecodedPlane->u_width)
810            {
811                Params.m_inputSize.m_width = pC->m_pDecodedPlane->u_width \
812                    - Params.m_inputCoord.m_x;
813            }
814
815            if((Params.m_inputSize.m_height + Params.m_inputCoord.m_y)\
816                 > pC->m_pDecodedPlane->u_height)
817            {
818                Params.m_inputSize.m_height = pC->m_pDecodedPlane->u_height\
819                     - Params.m_inputCoord.m_y;
820            }
821
822
823
824            Params.m_inputSize.m_width = (Params.m_inputSize.m_width>>1)<<1;
825            Params.m_inputSize.m_height = (Params.m_inputSize.m_height>>1)<<1;
826        }
827
828
829
830    /**
831        Picture rendering: Black borders*/
832
833        if(pC->m_mediaRendering == M4xVSS_kBlackBorders)
834        {
835            memset((void *)pImagePlanes[0].pac_data,Y_PLANE_BORDER_VALUE,
836                (pImagePlanes[0].u_height*pImagePlanes[0].u_stride));
837            memset((void *)pImagePlanes[1].pac_data,U_PLANE_BORDER_VALUE,
838                (pImagePlanes[1].u_height*pImagePlanes[1].u_stride));
839            memset((void *)pImagePlanes[2].pac_data,V_PLANE_BORDER_VALUE,
840                (pImagePlanes[2].u_height*pImagePlanes[2].u_stride));
841
842            /**
843            First without pan&zoom*/
844            if(M4OSA_FALSE == pC->m_pPto3GPPparams->isPanZoom)
845            {
846                switch(pBasicTags.orientation)
847                {
848                default:
849                case M4COMMON_kOrientationUnknown:
850                    Params.m_outputOrientation = M4COMMON_kOrientationTopLeft;
851                case M4COMMON_kOrientationTopLeft:
852                case M4COMMON_kOrientationTopRight:
853                case M4COMMON_kOrientationBottomRight:
854                case M4COMMON_kOrientationBottomLeft:
855                    if((M4OSA_UInt32)((pC->m_pDecodedPlane->u_height * pImagePlanes->u_width)\
856                         /pC->m_pDecodedPlane->u_width) <= pImagePlanes->u_height)
857                         //Params.m_inputSize.m_height < Params.m_inputSize.m_width)
858                    {
859                        /*it is height so black borders will be on the top and on the bottom side*/
860                        Params.m_outputSize.m_width = pImagePlanes->u_width;
861                        Params.m_outputSize.m_height =
862                             (M4OSA_UInt32)((pC->m_pDecodedPlane->u_height \
863                                * pImagePlanes->u_width) /pC->m_pDecodedPlane->u_width);
864                        /*number of lines at the top*/
865                        pImagePlanes[0].u_topleft =
866                            (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_height\
867                                -Params.m_outputSize.m_height)>>1))*pImagePlanes[0].u_stride;
868                        pImagePlanes[0].u_height = Params.m_outputSize.m_height;
869                        pImagePlanes[1].u_topleft =
870                             (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_height\
871                                -(Params.m_outputSize.m_height>>1)))>>1)*pImagePlanes[1].u_stride;
872                        pImagePlanes[1].u_height = Params.m_outputSize.m_height>>1;
873                        pImagePlanes[2].u_topleft =
874                             (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_height\
875                                -(Params.m_outputSize.m_height>>1)))>>1)*pImagePlanes[2].u_stride;
876                        pImagePlanes[2].u_height = Params.m_outputSize.m_height>>1;
877                    }
878                    else
879                    {
880                        /*it is width so black borders will be on the left and right side*/
881                        Params.m_outputSize.m_height = pImagePlanes->u_height;
882                        Params.m_outputSize.m_width =
883                             (M4OSA_UInt32)((pC->m_pDecodedPlane->u_width \
884                                * pImagePlanes->u_height) /pC->m_pDecodedPlane->u_height);
885
886                        pImagePlanes[0].u_topleft =
887                            (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_width\
888                                -Params.m_outputSize.m_width)>>1));
889                        pImagePlanes[0].u_width = Params.m_outputSize.m_width;
890                        pImagePlanes[1].u_topleft =
891                             (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_width\
892                                -(Params.m_outputSize.m_width>>1)))>>1);
893                        pImagePlanes[1].u_width = Params.m_outputSize.m_width>>1;
894                        pImagePlanes[2].u_topleft =
895                             (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_width\
896                                -(Params.m_outputSize.m_width>>1)))>>1);
897                        pImagePlanes[2].u_width = Params.m_outputSize.m_width>>1;
898                    }
899                    break;
900                case M4COMMON_kOrientationLeftTop:
901                case M4COMMON_kOrientationLeftBottom:
902                case M4COMMON_kOrientationRightTop:
903                case M4COMMON_kOrientationRightBottom:
904                        if((M4OSA_UInt32)((pC->m_pDecodedPlane->u_width * pImagePlanes->u_width)\
905                             /pC->m_pDecodedPlane->u_height) < pImagePlanes->u_height)
906                             //Params.m_inputSize.m_height > Params.m_inputSize.m_width)
907                        {
908                            /*it is height so black borders will be on the top and on
909                             the bottom side*/
910                            Params.m_outputSize.m_height = pImagePlanes->u_width;
911                            Params.m_outputSize.m_width =
912                                 (M4OSA_UInt32)((pC->m_pDecodedPlane->u_width \
913                                    * pImagePlanes->u_width) /pC->m_pDecodedPlane->u_height);
914                            /*number of lines at the top*/
915                            pImagePlanes[0].u_topleft =
916                                ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_height\
917                                    -Params.m_outputSize.m_width))>>1)*pImagePlanes[0].u_stride)+1;
918                            pImagePlanes[0].u_height = Params.m_outputSize.m_width;
919                            pImagePlanes[1].u_topleft =
920                                ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_height\
921                                    -(Params.m_outputSize.m_width>>1)))>>1)\
922                                        *pImagePlanes[1].u_stride)+1;
923                            pImagePlanes[1].u_height = Params.m_outputSize.m_width>>1;
924                            pImagePlanes[2].u_topleft =
925                                ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_height\
926                                    -(Params.m_outputSize.m_width>>1)))>>1)\
927                                        *pImagePlanes[2].u_stride)+1;
928                            pImagePlanes[2].u_height = Params.m_outputSize.m_width>>1;
929                        }
930                        else
931                        {
932                            /*it is width so black borders will be on the left and right side*/
933                            Params.m_outputSize.m_width = pImagePlanes->u_height;
934                            Params.m_outputSize.m_height =
935                                 (M4OSA_UInt32)((pC->m_pDecodedPlane->u_height\
936                                     * pImagePlanes->u_height) /pC->m_pDecodedPlane->u_width);
937
938                            pImagePlanes[0].u_topleft =
939                                 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_width\
940                                    -Params.m_outputSize.m_height))>>1))+1;
941                            pImagePlanes[0].u_width = Params.m_outputSize.m_height;
942                            pImagePlanes[1].u_topleft =
943                                 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_width\
944                                    -(Params.m_outputSize.m_height>>1)))>>1))+1;
945                            pImagePlanes[1].u_width = Params.m_outputSize.m_height>>1;
946                            pImagePlanes[2].u_topleft =
947                                 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_width\
948                                    -(Params.m_outputSize.m_height>>1)))>>1))+1;
949                            pImagePlanes[2].u_width = Params.m_outputSize.m_height>>1;
950                        }
951                    break;
952                }
953            }
954
955            /**
956            Secondly with pan&zoom*/
957            else
958            {
959                switch(pBasicTags.orientation)
960                {
961                default:
962                case M4COMMON_kOrientationUnknown:
963                    Params.m_outputOrientation = M4COMMON_kOrientationTopLeft;
964                case M4COMMON_kOrientationTopLeft:
965                case M4COMMON_kOrientationTopRight:
966                case M4COMMON_kOrientationBottomRight:
967                case M4COMMON_kOrientationBottomLeft:
968                    /*NO ROTATION*/
969                    if((M4OSA_UInt32)((pC->m_pDecodedPlane->u_height * pImagePlanes->u_width)\
970                         /pC->m_pDecodedPlane->u_width) <= pImagePlanes->u_height)
971                            //Params.m_inputSize.m_height < Params.m_inputSize.m_width)
972                    {
973                        /*Black borders will be on the top and bottom of the output video*/
974                        /*Maximum output height if the input image aspect ratio is kept and if
975                        the output width is the screen width*/
976                        M4OSA_UInt32 tempOutputSizeHeight =
977                            (M4OSA_UInt32)((pC->m_pDecodedPlane->u_height\
978                                 * pImagePlanes->u_width) /pC->m_pDecodedPlane->u_width);
979                        M4OSA_UInt32 tempInputSizeHeightMax = 0;
980                        M4OSA_UInt32 tempFinalInputHeight = 0;
981                        /*The output width is the screen width*/
982                        Params.m_outputSize.m_width = pImagePlanes->u_width;
983                        tempOutputSizeHeight = (tempOutputSizeHeight>>1)<<1;
984
985                        /*Maximum input height according to the maximum output height
986                        (proportional to the maximum output height)*/
987                        tempInputSizeHeightMax = (pImagePlanes->u_height\
988                            *Params.m_inputSize.m_height)/tempOutputSizeHeight;
989                        tempInputSizeHeightMax = (tempInputSizeHeightMax>>1)<<1;
990
991                        /*Check if the maximum possible input height is contained into the
992                        input image height*/
993                        if(tempInputSizeHeightMax <= pC->m_pDecodedPlane->u_height)
994                        {
995                            /*The maximum possible input height is contained in the input
996                            image height,
997                            that means no black borders, the input pan zoom area will be extended
998                            so that the input AIR height will be the maximum possible*/
999                            if(((tempInputSizeHeightMax - Params.m_inputSize.m_height)>>1)\
1000                                 <= Params.m_inputCoord.m_y
1001                                && ((tempInputSizeHeightMax - Params.m_inputSize.m_height)>>1)\
1002                                     <= pC->m_pDecodedPlane->u_height -(Params.m_inputCoord.m_y\
1003                                         + Params.m_inputSize.m_height))
1004                            {
1005                                /*The input pan zoom area can be extended symmetrically on the
1006                                top and bottom side*/
1007                                Params.m_inputCoord.m_y -= ((tempInputSizeHeightMax \
1008                                    - Params.m_inputSize.m_height)>>1);
1009                            }
1010                            else if(Params.m_inputCoord.m_y < pC->m_pDecodedPlane->u_height\
1011                                -(Params.m_inputCoord.m_y + Params.m_inputSize.m_height))
1012                            {
1013                                /*There is not enough place above the input pan zoom area to
1014                                extend it symmetrically,
1015                                so extend it to the maximum on the top*/
1016                                Params.m_inputCoord.m_y = 0;
1017                            }
1018                            else
1019                            {
1020                                /*There is not enough place below the input pan zoom area to
1021                                extend it symmetrically,
1022                                so extend it to the maximum on the bottom*/
1023                                Params.m_inputCoord.m_y = pC->m_pDecodedPlane->u_height \
1024                                    - tempInputSizeHeightMax;
1025                            }
1026                            /*The input height of the AIR is the maximum possible height*/
1027                            Params.m_inputSize.m_height = tempInputSizeHeightMax;
1028                        }
1029                        else
1030                        {
1031                            /*The maximum possible input height is greater than the input
1032                            image height,
1033                            that means black borders are necessary to keep aspect ratio
1034                            The input height of the AIR is all the input image height*/
1035                            Params.m_outputSize.m_height =
1036                                (tempOutputSizeHeight*pC->m_pDecodedPlane->u_height)\
1037                                    /Params.m_inputSize.m_height;
1038                            Params.m_outputSize.m_height = (Params.m_outputSize.m_height>>1)<<1;
1039                            Params.m_inputCoord.m_y = 0;
1040                            Params.m_inputSize.m_height = pC->m_pDecodedPlane->u_height;
1041                            pImagePlanes[0].u_topleft =
1042                                 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_height\
1043                                    -Params.m_outputSize.m_height)>>1))*pImagePlanes[0].u_stride;
1044                            pImagePlanes[0].u_height = Params.m_outputSize.m_height;
1045                            pImagePlanes[1].u_topleft =
1046                                ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_height\
1047                                    -(Params.m_outputSize.m_height>>1)))>>1)\
1048                                        *pImagePlanes[1].u_stride);
1049                            pImagePlanes[1].u_height = Params.m_outputSize.m_height>>1;
1050                            pImagePlanes[2].u_topleft =
1051                                 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_height\
1052                                    -(Params.m_outputSize.m_height>>1)))>>1)\
1053                                        *pImagePlanes[2].u_stride);
1054                            pImagePlanes[2].u_height = Params.m_outputSize.m_height>>1;
1055                        }
1056                    }
1057                    else
1058                    {
1059                        /*Black borders will be on the left and right side of the output video*/
1060                        /*Maximum output width if the input image aspect ratio is kept and if the
1061                         output height is the screen height*/
1062                        M4OSA_UInt32 tempOutputSizeWidth =
1063                             (M4OSA_UInt32)((pC->m_pDecodedPlane->u_width \
1064                                * pImagePlanes->u_height) /pC->m_pDecodedPlane->u_height);
1065                        M4OSA_UInt32 tempInputSizeWidthMax = 0;
1066                        M4OSA_UInt32 tempFinalInputWidth = 0;
1067                        /*The output height is the screen height*/
1068                        Params.m_outputSize.m_height = pImagePlanes->u_height;
1069                        tempOutputSizeWidth = (tempOutputSizeWidth>>1)<<1;
1070
1071                        /*Maximum input width according to the maximum output width
1072                        (proportional to the maximum output width)*/
1073                        tempInputSizeWidthMax =
1074                             (pImagePlanes->u_width*Params.m_inputSize.m_width)\
1075                                /tempOutputSizeWidth;
1076                        tempInputSizeWidthMax = (tempInputSizeWidthMax>>1)<<1;
1077
1078                        /*Check if the maximum possible input width is contained into the input
1079                         image width*/
1080                        if(tempInputSizeWidthMax <= pC->m_pDecodedPlane->u_width)
1081                        {
1082                            /*The maximum possible input width is contained in the input
1083                            image width,
1084                            that means no black borders, the input pan zoom area will be extended
1085                            so that the input AIR width will be the maximum possible*/
1086                            if(((tempInputSizeWidthMax - Params.m_inputSize.m_width)>>1) \
1087                                <= Params.m_inputCoord.m_x
1088                                && ((tempInputSizeWidthMax - Params.m_inputSize.m_width)>>1)\
1089                                     <= pC->m_pDecodedPlane->u_width -(Params.m_inputCoord.m_x \
1090                                        + Params.m_inputSize.m_width))
1091                            {
1092                                /*The input pan zoom area can be extended symmetrically on the
1093                                     right and left side*/
1094                                Params.m_inputCoord.m_x -= ((tempInputSizeWidthMax\
1095                                     - Params.m_inputSize.m_width)>>1);
1096                            }
1097                            else if(Params.m_inputCoord.m_x < pC->m_pDecodedPlane->u_width\
1098                                -(Params.m_inputCoord.m_x + Params.m_inputSize.m_width))
1099                            {
1100                                /*There is not enough place above the input pan zoom area to
1101                                    extend it symmetrically,
1102                                so extend it to the maximum on the left*/
1103                                Params.m_inputCoord.m_x = 0;
1104                            }
1105                            else
1106                            {
1107                                /*There is not enough place below the input pan zoom area
1108                                    to extend it symmetrically,
1109                                so extend it to the maximum on the right*/
1110                                Params.m_inputCoord.m_x = pC->m_pDecodedPlane->u_width \
1111                                    - tempInputSizeWidthMax;
1112                            }
1113                            /*The input width of the AIR is the maximum possible width*/
1114                            Params.m_inputSize.m_width = tempInputSizeWidthMax;
1115                        }
1116                        else
1117                        {
1118                            /*The maximum possible input width is greater than the input
1119                            image width,
1120                            that means black borders are necessary to keep aspect ratio
1121                            The input width of the AIR is all the input image width*/
1122                            Params.m_outputSize.m_width =\
1123                                 (tempOutputSizeWidth*pC->m_pDecodedPlane->u_width)\
1124                                    /Params.m_inputSize.m_width;
1125                            Params.m_outputSize.m_width = (Params.m_outputSize.m_width>>1)<<1;
1126                            Params.m_inputCoord.m_x = 0;
1127                            Params.m_inputSize.m_width = pC->m_pDecodedPlane->u_width;
1128                            pImagePlanes[0].u_topleft =
1129                                 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_width\
1130                                    -Params.m_outputSize.m_width)>>1));
1131                            pImagePlanes[0].u_width = Params.m_outputSize.m_width;
1132                            pImagePlanes[1].u_topleft =
1133                                 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_width\
1134                                    -(Params.m_outputSize.m_width>>1)))>>1);
1135                            pImagePlanes[1].u_width = Params.m_outputSize.m_width>>1;
1136                            pImagePlanes[2].u_topleft =
1137                                 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_width\
1138                                    -(Params.m_outputSize.m_width>>1)))>>1);
1139                            pImagePlanes[2].u_width = Params.m_outputSize.m_width>>1;
1140                        }
1141                    }
1142                    break;
1143                case M4COMMON_kOrientationLeftTop:
1144                case M4COMMON_kOrientationLeftBottom:
1145                case M4COMMON_kOrientationRightTop:
1146                case M4COMMON_kOrientationRightBottom:
1147                    /*ROTATION*/
1148                    if((M4OSA_UInt32)((pC->m_pDecodedPlane->u_width * pImagePlanes->u_width)\
1149                         /pC->m_pDecodedPlane->u_height) < pImagePlanes->u_height)
1150                         //Params.m_inputSize.m_height > Params.m_inputSize.m_width)
1151                    {
1152                        /*Black borders will be on the left and right side of the output video*/
1153                        /*Maximum output height if the input image aspect ratio is kept and if
1154                        the output height is the screen width*/
1155                        M4OSA_UInt32 tempOutputSizeHeight =
1156                        (M4OSA_UInt32)((pC->m_pDecodedPlane->u_width * pImagePlanes->u_width)\
1157                             /pC->m_pDecodedPlane->u_height);
1158                        M4OSA_UInt32 tempInputSizeHeightMax = 0;
1159                        M4OSA_UInt32 tempFinalInputHeight = 0;
1160                        /*The output width is the screen height*/
1161                        Params.m_outputSize.m_height = pImagePlanes->u_width;
1162                        Params.m_outputSize.m_width= pImagePlanes->u_height;
1163                        tempOutputSizeHeight = (tempOutputSizeHeight>>1)<<1;
1164
1165                        /*Maximum input height according to the maximum output height
1166                             (proportional to the maximum output height)*/
1167                        tempInputSizeHeightMax =
1168                            (pImagePlanes->u_height*Params.m_inputSize.m_width)\
1169                                /tempOutputSizeHeight;
1170                        tempInputSizeHeightMax = (tempInputSizeHeightMax>>1)<<1;
1171
1172                        /*Check if the maximum possible input height is contained into the
1173                             input image width (rotation included)*/
1174                        if(tempInputSizeHeightMax <= pC->m_pDecodedPlane->u_width)
1175                        {
1176                            /*The maximum possible input height is contained in the input
1177                            image width (rotation included),
1178                            that means no black borders, the input pan zoom area will be extended
1179                            so that the input AIR width will be the maximum possible*/
1180                            if(((tempInputSizeHeightMax - Params.m_inputSize.m_width)>>1) \
1181                                <= Params.m_inputCoord.m_x
1182                                && ((tempInputSizeHeightMax - Params.m_inputSize.m_width)>>1)\
1183                                     <= pC->m_pDecodedPlane->u_width -(Params.m_inputCoord.m_x \
1184                                        + Params.m_inputSize.m_width))
1185                            {
1186                                /*The input pan zoom area can be extended symmetrically on the
1187                                 right and left side*/
1188                                Params.m_inputCoord.m_x -= ((tempInputSizeHeightMax \
1189                                    - Params.m_inputSize.m_width)>>1);
1190                            }
1191                            else if(Params.m_inputCoord.m_x < pC->m_pDecodedPlane->u_width\
1192                                -(Params.m_inputCoord.m_x + Params.m_inputSize.m_width))
1193                            {
1194                                /*There is not enough place on the left of the input pan
1195                                zoom area to extend it symmetrically,
1196                                so extend it to the maximum on the left*/
1197                                Params.m_inputCoord.m_x = 0;
1198                            }
1199                            else
1200                            {
1201                                /*There is not enough place on the right of the input pan zoom
1202                                 area to extend it symmetrically,
1203                                so extend it to the maximum on the right*/
1204                                Params.m_inputCoord.m_x =
1205                                     pC->m_pDecodedPlane->u_width - tempInputSizeHeightMax;
1206                            }
1207                            /*The input width of the AIR is the maximum possible width*/
1208                            Params.m_inputSize.m_width = tempInputSizeHeightMax;
1209                        }
1210                        else
1211                        {
1212                            /*The maximum possible input height is greater than the input
1213                            image width (rotation included),
1214                            that means black borders are necessary to keep aspect ratio
1215                            The input width of the AIR is all the input image width*/
1216                            Params.m_outputSize.m_width =
1217                            (tempOutputSizeHeight*pC->m_pDecodedPlane->u_width)\
1218                                /Params.m_inputSize.m_width;
1219                            Params.m_outputSize.m_width = (Params.m_outputSize.m_width>>1)<<1;
1220                            Params.m_inputCoord.m_x = 0;
1221                            Params.m_inputSize.m_width = pC->m_pDecodedPlane->u_width;
1222                            pImagePlanes[0].u_topleft =
1223                                ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_height\
1224                                    -Params.m_outputSize.m_width))>>1)*pImagePlanes[0].u_stride)+1;
1225                            pImagePlanes[0].u_height = Params.m_outputSize.m_width;
1226                            pImagePlanes[1].u_topleft =
1227                            ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_height\
1228                                -(Params.m_outputSize.m_width>>1)))>>1)\
1229                                    *pImagePlanes[1].u_stride)+1;
1230                            pImagePlanes[1].u_height = Params.m_outputSize.m_width>>1;
1231                            pImagePlanes[2].u_topleft =
1232                            ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_height\
1233                                -(Params.m_outputSize.m_width>>1)))>>1)\
1234                                    *pImagePlanes[2].u_stride)+1;
1235                            pImagePlanes[2].u_height = Params.m_outputSize.m_width>>1;
1236                        }
1237                    }
1238                    else
1239                    {
1240                        /*Black borders will be on the top and bottom of the output video*/
1241                        /*Maximum output width if the input image aspect ratio is kept and if
1242                         the output width is the screen height*/
1243                        M4OSA_UInt32 tempOutputSizeWidth =
1244                        (M4OSA_UInt32)((pC->m_pDecodedPlane->u_height * pImagePlanes->u_height)\
1245                             /pC->m_pDecodedPlane->u_width);
1246                        M4OSA_UInt32 tempInputSizeWidthMax = 0;
1247                        M4OSA_UInt32 tempFinalInputWidth = 0, tempFinalOutputWidth = 0;
1248                        /*The output height is the screen width*/
1249                        Params.m_outputSize.m_width = pImagePlanes->u_height;
1250                        Params.m_outputSize.m_height= pImagePlanes->u_width;
1251                        tempOutputSizeWidth = (tempOutputSizeWidth>>1)<<1;
1252
1253                        /*Maximum input width according to the maximum output width
1254                         (proportional to the maximum output width)*/
1255                        tempInputSizeWidthMax =
1256                        (pImagePlanes->u_width*Params.m_inputSize.m_height)/tempOutputSizeWidth;
1257                        tempInputSizeWidthMax = (tempInputSizeWidthMax>>1)<<1;
1258
1259                        /*Check if the maximum possible input width is contained into the input
1260                         image height (rotation included)*/
1261                        if(tempInputSizeWidthMax <= pC->m_pDecodedPlane->u_height)
1262                        {
1263                            /*The maximum possible input width is contained in the input
1264                             image height (rotation included),
1265                            that means no black borders, the input pan zoom area will be extended
1266                            so that the input AIR height will be the maximum possible*/
1267                            if(((tempInputSizeWidthMax - Params.m_inputSize.m_height)>>1) \
1268                                <= Params.m_inputCoord.m_y
1269                                && ((tempInputSizeWidthMax - Params.m_inputSize.m_height)>>1)\
1270                                     <= pC->m_pDecodedPlane->u_height -(Params.m_inputCoord.m_y \
1271                                        + Params.m_inputSize.m_height))
1272                            {
1273                                /*The input pan zoom area can be extended symmetrically on
1274                                the right and left side*/
1275                                Params.m_inputCoord.m_y -= ((tempInputSizeWidthMax \
1276                                    - Params.m_inputSize.m_height)>>1);
1277                            }
1278                            else if(Params.m_inputCoord.m_y < pC->m_pDecodedPlane->u_height\
1279                                -(Params.m_inputCoord.m_y + Params.m_inputSize.m_height))
1280                            {
1281                                /*There is not enough place on the top of the input pan zoom
1282                                area to extend it symmetrically,
1283                                so extend it to the maximum on the top*/
1284                                Params.m_inputCoord.m_y = 0;
1285                            }
1286                            else
1287                            {
1288                                /*There is not enough place on the bottom of the input pan zoom
1289                                 area to extend it symmetrically,
1290                                so extend it to the maximum on the bottom*/
1291                                Params.m_inputCoord.m_y = pC->m_pDecodedPlane->u_height\
1292                                     - tempInputSizeWidthMax;
1293                            }
1294                            /*The input height of the AIR is the maximum possible height*/
1295                            Params.m_inputSize.m_height = tempInputSizeWidthMax;
1296                        }
1297                        else
1298                        {
1299                            /*The maximum possible input width is greater than the input\
1300                             image height (rotation included),
1301                            that means black borders are necessary to keep aspect ratio
1302                            The input height of the AIR is all the input image height*/
1303                            Params.m_outputSize.m_height =
1304                                (tempOutputSizeWidth*pC->m_pDecodedPlane->u_height)\
1305                                    /Params.m_inputSize.m_height;
1306                            Params.m_outputSize.m_height = (Params.m_outputSize.m_height>>1)<<1;
1307                            Params.m_inputCoord.m_y = 0;
1308                            Params.m_inputSize.m_height = pC->m_pDecodedPlane->u_height;
1309                            pImagePlanes[0].u_topleft =
1310                                ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_width\
1311                                    -Params.m_outputSize.m_height))>>1))+1;
1312                            pImagePlanes[0].u_width = Params.m_outputSize.m_height;
1313                            pImagePlanes[1].u_topleft =
1314                                ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_width\
1315                                    -(Params.m_outputSize.m_height>>1)))>>1))+1;
1316                            pImagePlanes[1].u_width = Params.m_outputSize.m_height>>1;
1317                            pImagePlanes[2].u_topleft =
1318                                 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_width\
1319                                    -(Params.m_outputSize.m_height>>1)))>>1))+1;
1320                            pImagePlanes[2].u_width = Params.m_outputSize.m_height>>1;
1321                        }
1322                    }
1323                    break;
1324                }
1325            }
1326
1327            /*Width and height have to be even*/
1328            Params.m_outputSize.m_width = (Params.m_outputSize.m_width>>1)<<1;
1329            Params.m_outputSize.m_height = (Params.m_outputSize.m_height>>1)<<1;
1330            Params.m_inputSize.m_width = (Params.m_inputSize.m_width>>1)<<1;
1331            Params.m_inputSize.m_height = (Params.m_inputSize.m_height>>1)<<1;
1332            pImagePlanes[0].u_width = (pImagePlanes[0].u_width>>1)<<1;
1333            pImagePlanes[1].u_width = (pImagePlanes[1].u_width>>1)<<1;
1334            pImagePlanes[2].u_width = (pImagePlanes[2].u_width>>1)<<1;
1335            pImagePlanes[0].u_height = (pImagePlanes[0].u_height>>1)<<1;
1336            pImagePlanes[1].u_height = (pImagePlanes[1].u_height>>1)<<1;
1337            pImagePlanes[2].u_height = (pImagePlanes[2].u_height>>1)<<1;
1338
1339            /*Check that values are coherent*/
1340            if(Params.m_inputSize.m_height == Params.m_outputSize.m_height)
1341            {
1342                Params.m_inputSize.m_width = Params.m_outputSize.m_width;
1343            }
1344            else if(Params.m_inputSize.m_width == Params.m_outputSize.m_width)
1345            {
1346                Params.m_inputSize.m_height = Params.m_outputSize.m_height;
1347            }
1348        }
1349
1350        /**
1351        Picture rendering: Resizing and Cropping*/
1352        if(pC->m_mediaRendering != M4xVSS_kBlackBorders)
1353        {
1354            switch(pBasicTags.orientation)
1355            {
1356            default:
1357            case M4COMMON_kOrientationUnknown:
1358                Params.m_outputOrientation = M4COMMON_kOrientationTopLeft;
1359            case M4COMMON_kOrientationTopLeft:
1360            case M4COMMON_kOrientationTopRight:
1361            case M4COMMON_kOrientationBottomRight:
1362            case M4COMMON_kOrientationBottomLeft:
1363                Params.m_outputSize.m_height = pImagePlanes->u_height;
1364                Params.m_outputSize.m_width = pImagePlanes->u_width;
1365                break;
1366            case M4COMMON_kOrientationLeftTop:
1367            case M4COMMON_kOrientationLeftBottom:
1368            case M4COMMON_kOrientationRightTop:
1369            case M4COMMON_kOrientationRightBottom:
1370                Params.m_outputSize.m_height = pImagePlanes->u_width;
1371                Params.m_outputSize.m_width = pImagePlanes->u_height;
1372                break;
1373            }
1374        }
1375
1376        /**
1377        Picture rendering: Cropping*/
1378        if(pC->m_mediaRendering == M4xVSS_kCropping)
1379        {
1380            if((Params.m_outputSize.m_height * Params.m_inputSize.m_width)\
1381                 /Params.m_outputSize.m_width<Params.m_inputSize.m_height)
1382            {
1383                M4OSA_UInt32 tempHeight = Params.m_inputSize.m_height;
1384                /*height will be cropped*/
1385                Params.m_inputSize.m_height =  (M4OSA_UInt32)((Params.m_outputSize.m_height \
1386                    * Params.m_inputSize.m_width) /Params.m_outputSize.m_width);
1387                Params.m_inputSize.m_height =  (Params.m_inputSize.m_height>>1)<<1;
1388                if(M4OSA_FALSE == pC->m_pPto3GPPparams->isPanZoom)
1389                {
1390                    Params.m_inputCoord.m_y = (M4OSA_Int32)((M4OSA_Int32)\
1391                        ((pC->m_pDecodedPlane->u_height - Params.m_inputSize.m_height))>>1);
1392                }
1393                else
1394                {
1395                    Params.m_inputCoord.m_y += (M4OSA_Int32)((M4OSA_Int32)\
1396                        ((tempHeight - Params.m_inputSize.m_height))>>1);
1397                }
1398            }
1399            else
1400            {
1401                M4OSA_UInt32 tempWidth= Params.m_inputSize.m_width;
1402                /*width will be cropped*/
1403                Params.m_inputSize.m_width =  (M4OSA_UInt32)((Params.m_outputSize.m_width \
1404                    * Params.m_inputSize.m_height) /Params.m_outputSize.m_height);
1405                Params.m_inputSize.m_width =  (Params.m_inputSize.m_width>>1)<<1;
1406                if(M4OSA_FALSE == pC->m_pPto3GPPparams->isPanZoom)
1407                {
1408                    Params.m_inputCoord.m_x = (M4OSA_Int32)((M4OSA_Int32)\
1409                        ((pC->m_pDecodedPlane->u_width - Params.m_inputSize.m_width))>>1);
1410                }
1411                else
1412                {
1413                    Params.m_inputCoord.m_x += (M4OSA_Int32)\
1414                        (((M4OSA_Int32)(tempWidth - Params.m_inputSize.m_width))>>1);
1415                }
1416            }
1417        }
1418
1419
1420
1421        /**
1422         * Call AIR functions */
1423        if(M4OSA_NULL == pC->m_air_context)
1424        {
1425            err = M4AIR_create(&pC->m_air_context, M4AIR_kYUV420P);
1426            if(err != M4NO_ERROR)
1427            {
1428                free(pC->m_pDecodedPlane[0].pac_data);
1429                free(pC->m_pDecodedPlane);
1430                pC->m_pDecodedPlane = M4OSA_NULL;
1431                M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct:\
1432                     Error when initializing AIR: 0x%x", err);
1433                return err;
1434            }
1435        }
1436
1437        err = M4AIR_configure(pC->m_air_context, &Params);
1438        if(err != M4NO_ERROR)
1439        {
1440            M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct:\
1441                 Error when configuring AIR: 0x%x", err);
1442            M4AIR_cleanUp(pC->m_air_context);
1443            free(pC->m_pDecodedPlane[0].pac_data);
1444            free(pC->m_pDecodedPlane);
1445            pC->m_pDecodedPlane = M4OSA_NULL;
1446            return err;
1447        }
1448
1449        err = M4AIR_get(pC->m_air_context, pC->m_pDecodedPlane, pImagePlanes);
1450        if(err != M4NO_ERROR)
1451        {
1452            M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct: Error when getting AIR plane: 0x%x", err);
1453            M4AIR_cleanUp(pC->m_air_context);
1454            free(pC->m_pDecodedPlane[0].pac_data);
1455            free(pC->m_pDecodedPlane);
1456            pC->m_pDecodedPlane = M4OSA_NULL;
1457            return err;
1458        }
1459        pImagePlanes[0] = pImagePlanes1;
1460        pImagePlanes[1] = pImagePlanes2;
1461        pImagePlanes[2] = pImagePlanes3;
1462    }
1463
1464
1465    /**
1466     * Increment the image counter */
1467    pC->m_ImageCounter++;
1468
1469    /**
1470     * Check end of sequence */
1471    last_frame_flag    = (pC->m_ImageCounter >= pC->m_NbImage);
1472
1473    /**
1474     * Keep the picture duration */
1475    *pPictureDuration = pC->m_timeDuration;
1476
1477    if (1 == last_frame_flag)
1478    {
1479        if(M4OSA_NULL != pC->m_air_context)
1480        {
1481            err = M4AIR_cleanUp(pC->m_air_context);
1482            if(err != M4NO_ERROR)
1483            {
1484                M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct: Error when cleaning AIR: 0x%x", err);
1485                return err;
1486            }
1487        }
1488        if(M4OSA_NULL != pC->m_pDecodedPlane)
1489        {
1490            free(pC->m_pDecodedPlane[0].pac_data);
1491            free(pC->m_pDecodedPlane);
1492            pC->m_pDecodedPlane = M4OSA_NULL;
1493        }
1494        return M4PTO3GPP_WAR_LAST_PICTURE;
1495    }
1496
1497    M4OSA_TRACE1_0("M4xVSS_PictureCallbackFct: Leaving ");
1498    return M4NO_ERROR;
1499}
1500
1501/**
1502 ******************************************************************************
1503 * M4OSA_ERR M4xVSS_internalStartConvertPictureTo3gp(M4OSA_Context pContext)
1504 * @brief    This function initializes Pto3GPP with the given parameters
1505 * @note    The "Pictures to 3GPP" parameters are given by the internal xVSS
1506 *            context. This context contains a pointer on the current element
1507 *            of the chained list of Pto3GPP parameters.
1508 * @param    pContext    (IN) The integrator own context
1509 *
1510 * @return    M4NO_ERROR:    No error
1511 * @return    M4PTO3GPP_WAR_LAST_PICTURE: The returned image is the last one
1512 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
1513 ******************************************************************************
1514 */
1515M4OSA_ERR M4xVSS_internalStartConvertPictureTo3gp(M4OSA_Context pContext)
1516{
1517    /************************************************************************/
1518    /* Definitions to generate dummy AMR file used to add AMR silence in files generated
1519     by Pto3GPP */
1520    #define M4VSS3GPP_AMR_AU_SILENCE_FRAME_048_SIZE     13
1521    /* This constant is defined in M4VSS3GPP_InternalConfig.h */
1522    extern const M4OSA_UInt8\
1523         M4VSS3GPP_AMR_AU_SILENCE_FRAME_048[M4VSS3GPP_AMR_AU_SILENCE_FRAME_048_SIZE];
1524
1525    /* AMR silent frame used to compute dummy AMR silence file */
1526    #define M4VSS3GPP_AMR_HEADER_SIZE 6
1527    const M4OSA_UInt8 M4VSS3GPP_AMR_HEADER[M4VSS3GPP_AMR_AU_SILENCE_FRAME_048_SIZE] =
1528    { 0x23, 0x21, 0x41, 0x4d, 0x52, 0x0a };
1529    /************************************************************************/
1530
1531    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
1532    M4OSA_ERR err;
1533    M4PTO3GPP_Context pM4PTO3GPP_Ctxt = M4OSA_NULL;
1534    M4PTO3GPP_Params Params;
1535     M4xVSS_PictureCallbackCtxt*    pCallBackCtxt;
1536    M4OSA_Bool cmpResult=M4OSA_FALSE;
1537    M4OSA_Context pDummyAMRFile;
1538    M4OSA_Char out_amr[M4XVSS_MAX_PATH_LEN];
1539    /*UTF conversion support*/
1540    M4OSA_Char* pDecodedPath = M4OSA_NULL;
1541    M4OSA_UInt32 i;
1542
1543    /**
1544     * Create a M4PTO3GPP instance */
1545    err = M4PTO3GPP_Init( &pM4PTO3GPP_Ctxt, xVSS_context->pFileReadPtr,
1546         xVSS_context->pFileWritePtr);
1547    if (err != M4NO_ERROR)
1548    {
1549        M4OSA_TRACE1_1("M4xVSS_internalStartConvertPictureTo3gp returned %ld\n",err);
1550        return err;
1551    }
1552
1553    /* replay recorded external encoder registrations on the PTO3GPP */
1554    for (i=0; i<M4VE_kEncoderType_NB; i++)
1555    {
1556        if (xVSS_context->registeredExternalEncs[i].registered)
1557        {
1558            err = M4PTO3GPP_RegisterExternalVideoEncoder(pM4PTO3GPP_Ctxt, i,
1559                    xVSS_context->registeredExternalEncs[i].pEncoderInterface,
1560                    xVSS_context->registeredExternalEncs[i].pUserData);
1561            if (M4NO_ERROR != err)
1562            {
1563                M4OSA_TRACE1_1("M4xVSS_internalGenerateEditedFile:\
1564                     M4PTO3GPP_registerExternalVideoEncoder() returns 0x%x!", err);
1565                M4PTO3GPP_CleanUp(pM4PTO3GPP_Ctxt);
1566                return err;
1567            }
1568        }
1569    }
1570
1571    pCallBackCtxt = (M4xVSS_PictureCallbackCtxt*)M4OSA_32bitAlignedMalloc(sizeof(M4xVSS_PictureCallbackCtxt),
1572         M4VS,(M4OSA_Char *) "Pto3gpp callback struct");
1573    if(pCallBackCtxt == M4OSA_NULL)
1574    {
1575        M4OSA_TRACE1_0("Allocation error in M4xVSS_internalStartConvertPictureTo3gp");
1576        return M4ERR_ALLOC;
1577    }
1578
1579    Params.OutputVideoFrameSize = xVSS_context->pSettings->xVSS.outputVideoSize;
1580    Params.OutputVideoFormat = xVSS_context->pSettings->xVSS.outputVideoFormat;
1581
1582    /**
1583     * Generate "dummy" amr file containing silence in temporary folder */
1584    M4OSA_chrNCopy(out_amr, xVSS_context->pTempPath, M4XVSS_MAX_PATH_LEN - 1);
1585    strncat((char *)out_amr, (const char *)"dummy.amr\0", 10);
1586
1587    /**
1588     * UTF conversion: convert the temporary path into the customer format*/
1589    pDecodedPath = out_amr;
1590
1591    if(xVSS_context->UTFConversionContext.pConvFromUTF8Fct != M4OSA_NULL
1592            && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL)
1593    {
1594        M4OSA_UInt32 length = 0;
1595        err = M4xVSS_internalConvertFromUTF8(xVSS_context, (M4OSA_Void*) out_amr,
1596             (M4OSA_Void*) xVSS_context->UTFConversionContext.pTempOutConversionBuffer, &length);
1597        if(err != M4NO_ERROR)
1598        {
1599            M4OSA_TRACE1_1("M4xVSS_internalStartConvertPictureTo3gp:\
1600                 M4xVSS_internalConvertFromUTF8 returns err: 0x%x",err);
1601            return err;
1602        }
1603        pDecodedPath = xVSS_context->UTFConversionContext.pTempOutConversionBuffer;
1604    }
1605
1606    /**
1607    * End of the conversion, now use the converted path*/
1608
1609    err = xVSS_context->pFileWritePtr->openWrite(&pDummyAMRFile, pDecodedPath, M4OSA_kFileWrite);
1610
1611    /*Commented because of the use of the UTF conversion see above*/
1612/*    err = xVSS_context->pFileWritePtr->openWrite(&pDummyAMRFile, out_amr, M4OSA_kFileWrite);
1613 */
1614    if(err != M4NO_ERROR)
1615    {
1616        M4OSA_TRACE1_2("M4xVSS_internalConvertPictureTo3gp: Can't open output dummy amr file %s,\
1617             error: 0x%x\n",out_amr, err);
1618        return err;
1619    }
1620
1621    err =  xVSS_context->pFileWritePtr->writeData(pDummyAMRFile,
1622        (M4OSA_Int8*)M4VSS3GPP_AMR_HEADER, M4VSS3GPP_AMR_HEADER_SIZE);
1623    if(err != M4NO_ERROR)
1624    {
1625        M4OSA_TRACE1_2("M4xVSS_internalConvertPictureTo3gp: Can't write output dummy amr file %s,\
1626             error: 0x%x\n",out_amr, err);
1627        return err;
1628    }
1629
1630    err =  xVSS_context->pFileWritePtr->writeData(pDummyAMRFile,
1631         (M4OSA_Int8*)M4VSS3GPP_AMR_AU_SILENCE_FRAME_048, M4VSS3GPP_AMR_AU_SILENCE_FRAME_048_SIZE);
1632    if(err != M4NO_ERROR)
1633    {
1634        M4OSA_TRACE1_2("M4xVSS_internalConvertPictureTo3gp: \
1635            Can't write output dummy amr file %s, error: 0x%x\n",out_amr, err);
1636        return err;
1637    }
1638
1639    err =  xVSS_context->pFileWritePtr->closeWrite(pDummyAMRFile);
1640    if(err != M4NO_ERROR)
1641    {
1642        M4OSA_TRACE1_2("M4xVSS_internalConvertPictureTo3gp: \
1643            Can't close output dummy amr file %s, error: 0x%x\n",out_amr, err);
1644        return err;
1645    }
1646
1647    /**
1648     * Fill parameters for Pto3GPP with the parameters contained in the current element of the
1649     * Pto3GPP parameters chained list and with default parameters */
1650/*+ New Encoder bitrates */
1651    if(xVSS_context->pSettings->xVSS.outputVideoBitrate == 0) {
1652        Params.OutputVideoBitrate    = M4VIDEOEDITING_kVARIABLE_KBPS;
1653    }
1654    else {
1655          Params.OutputVideoBitrate = xVSS_context->pSettings->xVSS.outputVideoBitrate;
1656    }
1657    M4OSA_TRACE1_1("M4xVSS_internalStartConvertPicTo3GP: video bitrate = %d",
1658        Params.OutputVideoBitrate);
1659/*- New Encoder bitrates */
1660    Params.OutputFileMaxSize    = M4PTO3GPP_kUNLIMITED;
1661    Params.pPictureCallbackFct    = M4xVSS_PictureCallbackFct;
1662    Params.pPictureCallbackCtxt    = pCallBackCtxt;
1663    /*FB: change to use the converted path (UTF conversion) see the conversion above*/
1664    /*Fix :- Adding Audio Track in Image as input :AudioTarckFile Setting to NULL */
1665    Params.pInputAudioTrackFile    = M4OSA_NULL;//(M4OSA_Void*)pDecodedPath;//out_amr;
1666    Params.AudioPaddingMode        = M4PTO3GPP_kAudioPaddingMode_Loop;
1667    Params.AudioFileFormat        = M4VIDEOEDITING_kFileType_AMR;
1668    Params.pOutput3gppFile        = xVSS_context->pPTo3GPPcurrentParams->pFileOut;
1669    Params.pTemporaryFile        = xVSS_context->pPTo3GPPcurrentParams->pFileTemp;
1670    /*+PR No:  blrnxpsw#223*/
1671    /*Increasing frequency of Frame, calculating Nos of Frame = duration /FPS */
1672    /*Other changes made is @ M4xVSS_API.c @ line 3841 in M4xVSS_SendCommand*/
1673    /*If case check for PanZoom removed */
1674    Params.NbVideoFrames            = (M4OSA_UInt32)
1675        (xVSS_context->pPTo3GPPcurrentParams->duration \
1676            / xVSS_context->pPTo3GPPcurrentParams->framerate); /* */
1677    pCallBackCtxt->m_timeDuration    = xVSS_context->pPTo3GPPcurrentParams->framerate;
1678    /*-PR No:  blrnxpsw#223*/
1679    pCallBackCtxt->m_ImageCounter    = 0;
1680    pCallBackCtxt->m_FileIn            = xVSS_context->pPTo3GPPcurrentParams->pFileIn;
1681    pCallBackCtxt->m_NbImage        = Params.NbVideoFrames;
1682    pCallBackCtxt->m_pFileReadPtr    = xVSS_context->pFileReadPtr;
1683    pCallBackCtxt->m_pDecodedPlane    = M4OSA_NULL;
1684    pCallBackCtxt->m_pPto3GPPparams    = xVSS_context->pPTo3GPPcurrentParams;
1685    pCallBackCtxt->m_air_context    = M4OSA_NULL;
1686    pCallBackCtxt->m_mediaRendering = xVSS_context->pPTo3GPPcurrentParams->MediaRendering;
1687
1688    /**
1689     * Set the input and output files */
1690    err = M4PTO3GPP_Open(pM4PTO3GPP_Ctxt, &Params);
1691    if (err != M4NO_ERROR)
1692    {
1693        M4OSA_TRACE1_1("M4PTO3GPP_Open returned: 0x%x\n",err);
1694        if(pCallBackCtxt != M4OSA_NULL)
1695        {
1696            free(pCallBackCtxt);
1697            pCallBackCtxt = M4OSA_NULL;
1698        }
1699        M4PTO3GPP_CleanUp(pM4PTO3GPP_Ctxt);
1700        return err;
1701    }
1702
1703    /**
1704     * Save context to be able to call Pto3GPP step function in M4xVSS_step function */
1705    xVSS_context->pM4PTO3GPP_Ctxt = pM4PTO3GPP_Ctxt;
1706    xVSS_context->pCallBackCtxt = pCallBackCtxt;
1707
1708    return M4NO_ERROR;
1709}
1710
1711/**
1712 ******************************************************************************
1713 * M4OSA_ERR M4xVSS_internalStopConvertPictureTo3gp(M4OSA_Context pContext)
1714 * @brief    This function cleans up Pto3GPP
1715 * @note
1716 * @param    pContext    (IN) The integrator own context
1717 *
1718 * @return    M4NO_ERROR:    No error
1719 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
1720 ******************************************************************************
1721 */
1722M4OSA_ERR M4xVSS_internalStopConvertPictureTo3gp(M4OSA_Context pContext)
1723{
1724    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
1725    M4OSA_ERR err;
1726    M4OSA_Char out_amr[M4XVSS_MAX_PATH_LEN];
1727    /*UTF conversion support*/
1728    M4OSA_Char* pDecodedPath = M4OSA_NULL;
1729
1730    /**
1731    * Free the PTO3GPP callback context */
1732    if(M4OSA_NULL != xVSS_context->pCallBackCtxt)
1733    {
1734        free(xVSS_context->pCallBackCtxt);
1735        xVSS_context->pCallBackCtxt = M4OSA_NULL;
1736    }
1737
1738    /**
1739     * Finalize the output file */
1740    err = M4PTO3GPP_Close(xVSS_context->pM4PTO3GPP_Ctxt);
1741    if (err != M4NO_ERROR)
1742    {
1743        M4OSA_TRACE1_1("M4PTO3GPP_Close returned 0x%x\n",err);
1744        M4PTO3GPP_CleanUp(xVSS_context->pM4PTO3GPP_Ctxt);
1745        return err;
1746    }
1747
1748    /**
1749     * Free this M4PTO3GPP instance */
1750    err = M4PTO3GPP_CleanUp(xVSS_context->pM4PTO3GPP_Ctxt);
1751    if (err != M4NO_ERROR)
1752    {
1753        M4OSA_TRACE1_1("M4PTO3GPP_CleanUp returned 0x%x\n",err);
1754        return err;
1755    }
1756
1757    /**
1758     * Remove dummy.amr file */
1759    M4OSA_chrNCopy(out_amr, xVSS_context->pTempPath, M4XVSS_MAX_PATH_LEN - 1);
1760    strncat((char *)out_amr, (const char *)"dummy.amr\0", 10);
1761
1762    /**
1763     * UTF conversion: convert the temporary path into the customer format*/
1764    pDecodedPath = out_amr;
1765
1766    if(xVSS_context->UTFConversionContext.pConvFromUTF8Fct != M4OSA_NULL
1767            && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL)
1768    {
1769        M4OSA_UInt32 length = 0;
1770        err = M4xVSS_internalConvertFromUTF8(xVSS_context, (M4OSA_Void*) out_amr,
1771             (M4OSA_Void*) xVSS_context->UTFConversionContext.pTempOutConversionBuffer, &length);
1772        if(err != M4NO_ERROR)
1773        {
1774            M4OSA_TRACE1_1("M4xVSS_internalStopConvertPictureTo3gp:\
1775                 M4xVSS_internalConvertFromUTF8 returns err: 0x%x",err);
1776            return err;
1777        }
1778        pDecodedPath = xVSS_context->UTFConversionContext.pTempOutConversionBuffer;
1779    }
1780    /**
1781    * End of the conversion, now use the decoded path*/
1782    remove((const char *)pDecodedPath);
1783
1784    /*Commented because of the use of the UTF conversion*/
1785/*    remove(out_amr);
1786 */
1787
1788    xVSS_context->pM4PTO3GPP_Ctxt = M4OSA_NULL;
1789    xVSS_context->pCallBackCtxt = M4OSA_NULL;
1790
1791    return M4NO_ERROR;
1792}
1793
1794/**
1795 ******************************************************************************
1796 * prototype    M4OSA_ERR M4xVSS_internalConvertRGBtoYUV(M4xVSS_FramingStruct* framingCtx)
1797 * @brief    This function converts an RGB565 plane to YUV420 planar
1798 * @note    It is used only for framing effect
1799 *            It allocates output YUV planes
1800 * @param    framingCtx    (IN) The framing struct containing input RGB565 plane
1801 *
1802 * @return    M4NO_ERROR:    No error
1803 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
1804 * @return    M4ERR_ALLOC: Allocation error (no more memory)
1805 ******************************************************************************
1806 */
1807M4OSA_ERR M4xVSS_internalConvertRGBtoYUV(M4xVSS_FramingStruct* framingCtx)
1808{
1809    M4OSA_ERR err;
1810
1811    /**
1812     * Allocate output YUV planes */
1813    framingCtx->FramingYuv = (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(3*sizeof(M4VIFI_ImagePlane),
1814         M4VS, (M4OSA_Char *)"M4xVSS_internalConvertRGBtoYUV: Output plane YUV");
1815    if(framingCtx->FramingYuv == M4OSA_NULL)
1816    {
1817        M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertRGBtoYUV");
1818        return M4ERR_ALLOC;
1819    }
1820    framingCtx->FramingYuv[0].u_width = framingCtx->FramingRgb->u_width;
1821    framingCtx->FramingYuv[0].u_height = framingCtx->FramingRgb->u_height;
1822    framingCtx->FramingYuv[0].u_topleft = 0;
1823    framingCtx->FramingYuv[0].u_stride = framingCtx->FramingRgb->u_width;
1824    framingCtx->FramingYuv[0].pac_data =
1825         (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc((framingCtx->FramingYuv[0].u_width\
1826            *framingCtx->FramingYuv[0].u_height*3)>>1, M4VS, (M4OSA_Char *)\
1827                "Alloc for the Convertion output YUV");;
1828    if(framingCtx->FramingYuv[0].pac_data == M4OSA_NULL)
1829    {
1830        M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertRGBtoYUV");
1831        return M4ERR_ALLOC;
1832    }
1833    framingCtx->FramingYuv[1].u_width = (framingCtx->FramingRgb->u_width)>>1;
1834    framingCtx->FramingYuv[1].u_height = (framingCtx->FramingRgb->u_height)>>1;
1835    framingCtx->FramingYuv[1].u_topleft = 0;
1836    framingCtx->FramingYuv[1].u_stride = (framingCtx->FramingRgb->u_width)>>1;
1837    framingCtx->FramingYuv[1].pac_data = framingCtx->FramingYuv[0].pac_data \
1838        + framingCtx->FramingYuv[0].u_width * framingCtx->FramingYuv[0].u_height;
1839    framingCtx->FramingYuv[2].u_width = (framingCtx->FramingRgb->u_width)>>1;
1840    framingCtx->FramingYuv[2].u_height = (framingCtx->FramingRgb->u_height)>>1;
1841    framingCtx->FramingYuv[2].u_topleft = 0;
1842    framingCtx->FramingYuv[2].u_stride = (framingCtx->FramingRgb->u_width)>>1;
1843    framingCtx->FramingYuv[2].pac_data = framingCtx->FramingYuv[1].pac_data \
1844        + framingCtx->FramingYuv[1].u_width * framingCtx->FramingYuv[1].u_height;
1845
1846    /**
1847     * Convert input RGB 565 to YUV 420 to be able to merge it with output video in framing
1848      effect */
1849    err = M4VIFI_xVSS_RGB565toYUV420(M4OSA_NULL, framingCtx->FramingRgb, framingCtx->FramingYuv);
1850    if(err != M4NO_ERROR)
1851    {
1852        M4OSA_TRACE1_1("M4xVSS_internalConvertRGBtoYUV:\
1853             error when converting from RGB to YUV: 0x%x\n", err);
1854    }
1855
1856    framingCtx->duration = 0;
1857    framingCtx->previousClipTime = -1;
1858    framingCtx->previewOffsetClipTime = -1;
1859
1860    /**
1861     * Only one element in the chained list (no animated image with RGB buffer...) */
1862    framingCtx->pCurrent = framingCtx;
1863    framingCtx->pNext = framingCtx;
1864
1865    return M4NO_ERROR;
1866}
1867
1868M4OSA_ERR M4xVSS_internalSetPlaneTransparent(M4OSA_UInt8* planeIn, M4OSA_UInt32 size)
1869{
1870    M4OSA_UInt32 i;
1871    M4OSA_UInt8* plane = planeIn;
1872    M4OSA_UInt8 transparent1 = (M4OSA_UInt8)((TRANSPARENT_COLOR & 0xFF00)>>8);
1873    M4OSA_UInt8 transparent2 = (M4OSA_UInt8)TRANSPARENT_COLOR;
1874
1875    for(i=0; i<(size>>1); i++)
1876    {
1877        *plane++ = transparent1;
1878        *plane++ = transparent2;
1879    }
1880
1881    return M4NO_ERROR;
1882}
1883
1884
1885/**
1886 ******************************************************************************
1887 * prototype M4OSA_ERR M4xVSS_internalConvertARBG888toYUV420_FrammingEffect(M4OSA_Context pContext,
1888 *                                                M4VSS3GPP_EffectSettings* pEffect,
1889 *                                                M4xVSS_FramingStruct* framingCtx,
1890                                                  M4VIDEOEDITING_VideoFrameSize OutputVideoResolution)
1891 *
1892 * @brief    This function converts ARGB8888 input file  to YUV420 whenused for framming effect
1893 * @note    The input ARGB8888 file path is contained in the pEffect structure
1894 *            If the ARGB8888 must be resized to fit output video size, this function
1895 *            will do it.
1896 * @param    pContext    (IN) The integrator own context
1897 * @param    pEffect        (IN) The effect structure containing all informations on
1898 *                        the file to decode, resizing ...
1899 * @param    framingCtx    (IN/OUT) Structure in which the output RGB will be stored
1900 *
1901 * @return    M4NO_ERROR:    No error
1902 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
1903 * @return    M4ERR_ALLOC: Allocation error (no more memory)
1904 * @return    M4ERR_FILE_NOT_FOUND: File not found.
1905 ******************************************************************************
1906 */
1907
1908
1909M4OSA_ERR M4xVSS_internalConvertARGB888toYUV420_FrammingEffect(M4OSA_Context pContext,
1910                                                               M4VSS3GPP_EffectSettings* pEffect,
1911                                                               M4xVSS_FramingStruct* framingCtx,
1912                                                               M4VIDEOEDITING_VideoFrameSize\
1913                                                               OutputVideoResolution)
1914{
1915    M4OSA_ERR err = M4NO_ERROR;
1916    M4OSA_Context pARGBIn;
1917    M4OSA_UInt32 file_size;
1918    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
1919    M4OSA_UInt32 width, height, width_out, height_out;
1920    M4OSA_Void* pFile = pEffect->xVSS.pFramingFilePath;
1921    M4OSA_UInt8 transparent1 = (M4OSA_UInt8)((TRANSPARENT_COLOR & 0xFF00)>>8);
1922    M4OSA_UInt8 transparent2 = (M4OSA_UInt8)TRANSPARENT_COLOR;
1923    /*UTF conversion support*/
1924    M4OSA_Char* pDecodedPath = M4OSA_NULL;
1925    M4OSA_UInt32 i = 0,j = 0;
1926    M4VIFI_ImagePlane rgbPlane;
1927    M4OSA_UInt32 frameSize_argb=(framingCtx->width * framingCtx->height * 4);
1928    M4OSA_UInt32 frameSize;
1929    M4OSA_UInt32 tempAlphaPercent = 0;
1930    M4VIFI_UInt8* TempPacData = M4OSA_NULL;
1931    M4OSA_UInt16 *ptr = M4OSA_NULL;
1932    M4OSA_UInt32 z = 0;
1933
1934    M4OSA_TRACE3_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect: Entering ");
1935
1936    M4OSA_TRACE1_2("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect width and height %d %d ",
1937        framingCtx->width,framingCtx->height);
1938
1939    M4OSA_UInt8 *pTmpData = (M4OSA_UInt8*) M4OSA_32bitAlignedMalloc(frameSize_argb, M4VS, (M4OSA_Char*)\
1940        "Image argb data");
1941    if(pTmpData == M4OSA_NULL) {
1942        M4OSA_TRACE1_0("Failed to allocate memory for Image clip");
1943        return M4ERR_ALLOC;
1944    }
1945    /**
1946     * UTF conversion: convert the file path into the customer format*/
1947    pDecodedPath = pFile;
1948
1949    if(xVSS_context->UTFConversionContext.pConvFromUTF8Fct != M4OSA_NULL
1950            && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL)
1951    {
1952        M4OSA_UInt32 length = 0;
1953        err = M4xVSS_internalConvertFromUTF8(xVSS_context, (M4OSA_Void*) pFile,
1954             (M4OSA_Void*) xVSS_context->UTFConversionContext.pTempOutConversionBuffer, &length);
1955        if(err != M4NO_ERROR)
1956        {
1957            M4OSA_TRACE1_1("M4xVSS_internalDecodePNG:\
1958                 M4xVSS_internalConvertFromUTF8 returns err: 0x%x",err);
1959            free(pTmpData);
1960            pTmpData = M4OSA_NULL;
1961            return err;
1962        }
1963        pDecodedPath = xVSS_context->UTFConversionContext.pTempOutConversionBuffer;
1964    }
1965
1966    /**
1967    * End of the conversion, now use the decoded path*/
1968
1969     /* Open input ARGB8888 file and store it into memory */
1970    err = xVSS_context->pFileReadPtr->openRead(&pARGBIn, pDecodedPath, M4OSA_kFileRead);
1971
1972    if(err != M4NO_ERROR)
1973    {
1974        M4OSA_TRACE1_2("Can't open input ARGB8888 file %s, error: 0x%x\n",pFile, err);
1975        free(pTmpData);
1976        pTmpData = M4OSA_NULL;
1977        return err;
1978    }
1979
1980    err = xVSS_context->pFileReadPtr->readData(pARGBIn,(M4OSA_MemAddr8)pTmpData, &frameSize_argb);
1981    if(err != M4NO_ERROR)
1982    {
1983        xVSS_context->pFileReadPtr->closeRead(pARGBIn);
1984        free(pTmpData);
1985        pTmpData = M4OSA_NULL;
1986        return err;
1987    }
1988
1989
1990    err =  xVSS_context->pFileReadPtr->closeRead(pARGBIn);
1991    if(err != M4NO_ERROR)
1992    {
1993        M4OSA_TRACE1_2("Can't close input png file %s, error: 0x%x\n",pFile, err);
1994        free(pTmpData);
1995        pTmpData = M4OSA_NULL;
1996        return err;
1997    }
1998
1999
2000    rgbPlane.u_height = framingCtx->height;
2001    rgbPlane.u_width = framingCtx->width;
2002    rgbPlane.u_stride = rgbPlane.u_width*3;
2003    rgbPlane.u_topleft = 0;
2004
2005    frameSize = (rgbPlane.u_width * rgbPlane.u_height * 3); //Size of RGB888 data
2006    rgbPlane.pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(((frameSize)+ (2 * framingCtx->width)),
2007         M4VS, (M4OSA_Char*)"Image clip RGB888 data");
2008    if(rgbPlane.pac_data == M4OSA_NULL)
2009    {
2010        M4OSA_TRACE1_0("Failed to allocate memory for Image clip");
2011        free(pTmpData);
2012        return M4ERR_ALLOC;
2013    }
2014
2015    M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:\
2016          Remove the alpha channel  ");
2017
2018    /* premultiplied alpha % on RGB */
2019    for (i=0, j = 0; i < frameSize_argb; i += 4) {
2020        /* this is alpha value */
2021        if ((i % 4) == 0)
2022        {
2023            tempAlphaPercent = pTmpData[i];
2024        }
2025
2026        /* R */
2027        rgbPlane.pac_data[j] = pTmpData[i+1];
2028        j++;
2029
2030        /* G */
2031        if (tempAlphaPercent > 0) {
2032            rgbPlane.pac_data[j] = pTmpData[i+2];
2033            j++;
2034        } else {/* In case of alpha value 0, make GREEN to 255 */
2035            rgbPlane.pac_data[j] = 255; //pTmpData[i+2];
2036            j++;
2037        }
2038
2039        /* B */
2040        rgbPlane.pac_data[j] = pTmpData[i+3];
2041        j++;
2042    }
2043
2044    free(pTmpData);
2045    pTmpData = M4OSA_NULL;
2046
2047    /* convert RGB888 to RGB565 */
2048
2049    /* allocate temp RGB 565 buffer */
2050    TempPacData = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(frameSize +
2051                       (4 * (framingCtx->width + framingCtx->height + 1)),
2052                        M4VS, (M4OSA_Char*)"Image clip RGB565 data");
2053    if (TempPacData == M4OSA_NULL) {
2054        M4OSA_TRACE1_0("Failed to allocate memory for Image clip RGB565 data");
2055        free(rgbPlane.pac_data);
2056        return M4ERR_ALLOC;
2057    }
2058
2059    ptr = (M4OSA_UInt16 *)TempPacData;
2060    z = 0;
2061
2062    for (i = 0; i < j ; i += 3)
2063    {
2064        ptr[z++] = PACK_RGB565(0,   rgbPlane.pac_data[i],
2065                                    rgbPlane.pac_data[i+1],
2066                                    rgbPlane.pac_data[i+2]);
2067    }
2068
2069    /* free the RBG888 and assign RGB565 */
2070    free(rgbPlane.pac_data);
2071    rgbPlane.pac_data = TempPacData;
2072
2073    /**
2074     * Check if output sizes are odd */
2075    if(rgbPlane.u_height % 2 != 0)
2076    {
2077        M4VIFI_UInt8* output_pac_data = rgbPlane.pac_data;
2078        M4OSA_UInt32 i;
2079        M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:\
2080             output height is odd  ");
2081        output_pac_data +=rgbPlane.u_width * rgbPlane.u_height*2;
2082
2083        for(i=0;i<rgbPlane.u_width;i++)
2084        {
2085            *output_pac_data++ = transparent1;
2086            *output_pac_data++ = transparent2;
2087        }
2088
2089        /**
2090         * We just add a white line to the PNG that will be transparent */
2091        rgbPlane.u_height++;
2092    }
2093    if(rgbPlane.u_width % 2 != 0)
2094    {
2095        /**
2096         * We add a new column of white (=transparent), but we need to parse all RGB lines ... */
2097        M4OSA_UInt32 i;
2098        M4VIFI_UInt8* newRGBpac_data;
2099        M4VIFI_UInt8* output_pac_data, *input_pac_data;
2100
2101        rgbPlane.u_width++;
2102        M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect: \
2103             output width is odd  ");
2104        /**
2105         * We need to allocate a new RGB output buffer in which all decoded data
2106          + white line will be copied */
2107        newRGBpac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(rgbPlane.u_height*rgbPlane.u_width*2\
2108            *sizeof(M4VIFI_UInt8), M4VS, (M4OSA_Char *)"New Framing GIF Output pac_data RGB");
2109
2110        if(newRGBpac_data == M4OSA_NULL)
2111        {
2112            M4OSA_TRACE1_0("Allocation error in \
2113                M4xVSS_internalConvertARGB888toYUV420_FrammingEffect");
2114            free(rgbPlane.pac_data);
2115            return M4ERR_ALLOC;
2116        }
2117
2118        output_pac_data= newRGBpac_data;
2119        input_pac_data = rgbPlane.pac_data;
2120
2121        for(i=0;i<rgbPlane.u_height;i++)
2122        {
2123            memcpy((void *)output_pac_data, (void *)input_pac_data,
2124                 (rgbPlane.u_width-1)*2);
2125
2126            output_pac_data += ((rgbPlane.u_width-1)*2);
2127            /* Put the pixel to transparency color */
2128            *output_pac_data++ = transparent1;
2129            *output_pac_data++ = transparent2;
2130
2131            input_pac_data += ((rgbPlane.u_width-1)*2);
2132        }
2133        free(rgbPlane.pac_data);
2134        rgbPlane.pac_data = newRGBpac_data;
2135    }
2136
2137    /* reset stride */
2138    rgbPlane.u_stride = rgbPlane.u_width*2;
2139
2140    /**
2141     * Initialize chained list parameters */
2142    framingCtx->duration = 0;
2143    framingCtx->previousClipTime = -1;
2144    framingCtx->previewOffsetClipTime = -1;
2145
2146    /**
2147     * Only one element in the chained list (no animated image ...) */
2148    framingCtx->pCurrent = framingCtx;
2149    framingCtx->pNext = framingCtx;
2150
2151    /**
2152     * Get output width/height */
2153     switch(OutputVideoResolution)
2154    //switch(xVSS_context->pSettings->xVSS.outputVideoSize)
2155    {
2156    case M4VIDEOEDITING_kSQCIF:
2157        width_out = 128;
2158        height_out = 96;
2159        break;
2160    case M4VIDEOEDITING_kQQVGA:
2161        width_out = 160;
2162        height_out = 120;
2163        break;
2164    case M4VIDEOEDITING_kQCIF:
2165        width_out = 176;
2166        height_out = 144;
2167        break;
2168    case M4VIDEOEDITING_kQVGA:
2169        width_out = 320;
2170        height_out = 240;
2171        break;
2172    case M4VIDEOEDITING_kCIF:
2173        width_out = 352;
2174        height_out = 288;
2175        break;
2176    case M4VIDEOEDITING_kVGA:
2177        width_out = 640;
2178        height_out = 480;
2179        break;
2180    case M4VIDEOEDITING_kWVGA:
2181        width_out = 800;
2182        height_out = 480;
2183        break;
2184    case M4VIDEOEDITING_kNTSC:
2185        width_out = 720;
2186        height_out = 480;
2187        break;
2188    case M4VIDEOEDITING_k640_360:
2189        width_out = 640;
2190        height_out = 360;
2191        break;
2192    case M4VIDEOEDITING_k854_480:
2193        // StageFright encoders require %16 resolution
2194        width_out = M4ENCODER_854_480_Width;
2195        height_out = 480;
2196        break;
2197    case M4VIDEOEDITING_kHD1280:
2198        width_out = 1280;
2199        height_out = 720;
2200        break;
2201    case M4VIDEOEDITING_kHD1080:
2202        // StageFright encoders require %16 resolution
2203        width_out = M4ENCODER_HD1080_Width;
2204        height_out = 720;
2205        break;
2206    case M4VIDEOEDITING_kHD960:
2207        width_out = 960;
2208        height_out = 720;
2209        break;
2210
2211    /**
2212     * If output video size is not given, we take QCIF size,
2213     * should not happen, because already done in M4xVSS_sendCommand */
2214    default:
2215        width_out = 176;
2216        height_out = 144;
2217        break;
2218    }
2219
2220    /**
2221     * Allocate output planes structures */
2222    framingCtx->FramingRgb = (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(sizeof(M4VIFI_ImagePlane), M4VS,
2223         (M4OSA_Char *)"Framing Output plane RGB");
2224    if(framingCtx->FramingRgb == M4OSA_NULL)
2225    {
2226        M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertARGB888toYUV420_FrammingEffect");
2227        return M4ERR_ALLOC;
2228    }
2229    /**
2230     * Resize RGB if needed */
2231    if((pEffect->xVSS.bResize) &&
2232         (rgbPlane.u_width != width_out || rgbPlane.u_height != height_out))
2233    {
2234        width = width_out;
2235        height = height_out;
2236
2237        M4OSA_TRACE1_2("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect: \
2238             New Width and height %d %d  ",width,height);
2239
2240        framingCtx->FramingRgb->u_height = height_out;
2241        framingCtx->FramingRgb->u_width = width_out;
2242        framingCtx->FramingRgb->u_stride = framingCtx->FramingRgb->u_width*2;
2243        framingCtx->FramingRgb->u_topleft = 0;
2244
2245        framingCtx->FramingRgb->pac_data =
2246             (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(framingCtx->FramingRgb->u_height*framingCtx->\
2247                FramingRgb->u_width*2*sizeof(M4VIFI_UInt8), M4VS,
2248                  (M4OSA_Char *)"Framing Output pac_data RGB");
2249
2250        if(framingCtx->FramingRgb->pac_data == M4OSA_NULL)
2251        {
2252            M4OSA_TRACE1_0("Allocation error in \
2253                M4xVSS_internalConvertARGB888toYUV420_FrammingEffect");
2254            free(framingCtx->FramingRgb);
2255            free(rgbPlane.pac_data);
2256            return M4ERR_ALLOC;
2257        }
2258
2259        M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:  Resizing Needed ");
2260        M4OSA_TRACE1_2("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:\
2261              rgbPlane.u_height & rgbPlane.u_width %d %d",rgbPlane.u_height,rgbPlane.u_width);
2262
2263        //err = M4VIFI_ResizeBilinearRGB888toRGB888(M4OSA_NULL, &rgbPlane,framingCtx->FramingRgb);
2264        err = M4VIFI_ResizeBilinearRGB565toRGB565(M4OSA_NULL, &rgbPlane,framingCtx->FramingRgb);
2265
2266        if(err != M4NO_ERROR)
2267        {
2268            M4OSA_TRACE1_1("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect :\
2269                when resizing RGB plane: 0x%x\n", err);
2270            return err;
2271        }
2272
2273        if(rgbPlane.pac_data != M4OSA_NULL)
2274        {
2275            free(rgbPlane.pac_data);
2276            rgbPlane.pac_data = M4OSA_NULL;
2277        }
2278    }
2279    else
2280    {
2281
2282        M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:\
2283              Resizing Not Needed ");
2284
2285        width = rgbPlane.u_width;
2286        height = rgbPlane.u_height;
2287        framingCtx->FramingRgb->u_height = height;
2288        framingCtx->FramingRgb->u_width = width;
2289        framingCtx->FramingRgb->u_stride = framingCtx->FramingRgb->u_width*2;
2290        framingCtx->FramingRgb->u_topleft = 0;
2291        framingCtx->FramingRgb->pac_data = rgbPlane.pac_data;
2292    }
2293
2294
2295    if(pEffect->xVSS.bResize)
2296    {
2297        /**
2298         * Force topleft to 0 for pure framing effect */
2299        framingCtx->topleft_x = 0;
2300        framingCtx->topleft_y = 0;
2301    }
2302
2303
2304    /**
2305     * Convert  RGB output to YUV 420 to be able to merge it with output video in framing
2306     effect */
2307    framingCtx->FramingYuv = (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(3*sizeof(M4VIFI_ImagePlane), M4VS,
2308         (M4OSA_Char *)"Framing Output plane YUV");
2309    if(framingCtx->FramingYuv == M4OSA_NULL)
2310    {
2311        M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertARGB888toYUV420_FrammingEffect");
2312        free(framingCtx->FramingRgb->pac_data);
2313        return M4ERR_ALLOC;
2314    }
2315
2316    // Alloc for Y, U and V planes
2317    framingCtx->FramingYuv[0].u_width = ((width+1)>>1)<<1;
2318    framingCtx->FramingYuv[0].u_height = ((height+1)>>1)<<1;
2319    framingCtx->FramingYuv[0].u_topleft = 0;
2320    framingCtx->FramingYuv[0].u_stride = ((width+1)>>1)<<1;
2321    framingCtx->FramingYuv[0].pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc
2322        ((framingCtx->FramingYuv[0].u_width*framingCtx->FramingYuv[0].u_height), M4VS,
2323            (M4OSA_Char *)"Alloc for the output Y");
2324    if(framingCtx->FramingYuv[0].pac_data == M4OSA_NULL)
2325    {
2326        M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertARGB888toYUV420_FrammingEffect");
2327        free(framingCtx->FramingYuv);
2328        free(framingCtx->FramingRgb->pac_data);
2329        return M4ERR_ALLOC;
2330    }
2331    framingCtx->FramingYuv[1].u_width = (((width+1)>>1)<<1)>>1;
2332    framingCtx->FramingYuv[1].u_height = (((height+1)>>1)<<1)>>1;
2333    framingCtx->FramingYuv[1].u_topleft = 0;
2334    framingCtx->FramingYuv[1].u_stride = (((width+1)>>1)<<1)>>1;
2335
2336
2337    framingCtx->FramingYuv[1].pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(
2338        framingCtx->FramingYuv[1].u_width * framingCtx->FramingYuv[1].u_height, M4VS,
2339        (M4OSA_Char *)"Alloc for the output U");
2340    if (framingCtx->FramingYuv[1].pac_data == M4OSA_NULL) {
2341        free(framingCtx->FramingYuv[0].pac_data);
2342        free(framingCtx->FramingYuv);
2343        free(framingCtx->FramingRgb->pac_data);
2344        return M4ERR_ALLOC;
2345    }
2346
2347    framingCtx->FramingYuv[2].u_width = (((width+1)>>1)<<1)>>1;
2348    framingCtx->FramingYuv[2].u_height = (((height+1)>>1)<<1)>>1;
2349    framingCtx->FramingYuv[2].u_topleft = 0;
2350    framingCtx->FramingYuv[2].u_stride = (((width+1)>>1)<<1)>>1;
2351
2352
2353    framingCtx->FramingYuv[2].pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(
2354        framingCtx->FramingYuv[2].u_width * framingCtx->FramingYuv[0].u_height, M4VS,
2355        (M4OSA_Char *)"Alloc for the  output V");
2356    if (framingCtx->FramingYuv[2].pac_data == M4OSA_NULL) {
2357        free(framingCtx->FramingYuv[1].pac_data);
2358        free(framingCtx->FramingYuv[0].pac_data);
2359        free(framingCtx->FramingYuv);
2360        free(framingCtx->FramingRgb->pac_data);
2361        return M4ERR_ALLOC;
2362    }
2363
2364    M4OSA_TRACE3_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:\
2365        convert RGB to YUV ");
2366
2367    //err = M4VIFI_RGB888toYUV420(M4OSA_NULL, framingCtx->FramingRgb,  framingCtx->FramingYuv);
2368    err = M4VIFI_RGB565toYUV420(M4OSA_NULL, framingCtx->FramingRgb,  framingCtx->FramingYuv);
2369
2370    if (err != M4NO_ERROR)
2371    {
2372        M4OSA_TRACE1_1("SPS png: error when converting from RGB to YUV: 0x%x\n", err);
2373    }
2374    M4OSA_TRACE3_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:  Leaving ");
2375    return err;
2376}
2377
2378/**
2379 ******************************************************************************
2380 * prototype    M4OSA_ERR M4xVSS_internalGenerateEditedFile(M4OSA_Context pContext)
2381 *
2382 * @brief    This function prepares VSS for editing
2383 * @note    It also set special xVSS effect as external effects for the VSS
2384 * @param    pContext    (IN) The integrator own context
2385 *
2386 * @return    M4NO_ERROR:    No error
2387 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
2388 * @return    M4ERR_ALLOC: Allocation error (no more memory)
2389 ******************************************************************************
2390 */
2391M4OSA_ERR M4xVSS_internalGenerateEditedFile(M4OSA_Context pContext)
2392{
2393    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
2394    M4VSS3GPP_EditContext pVssCtxt;
2395    M4OSA_UInt32 i,j;
2396    M4OSA_ERR err;
2397
2398    /**
2399     * Create a VSS 3GPP edition instance */
2400    err = M4VSS3GPP_editInit( &pVssCtxt, xVSS_context->pFileReadPtr, xVSS_context->pFileWritePtr);
2401    if (err != M4NO_ERROR)
2402    {
2403        M4OSA_TRACE1_1("M4xVSS_internalGenerateEditedFile: M4VSS3GPP_editInit returned 0x%x\n",
2404            err);
2405        M4VSS3GPP_editCleanUp(pVssCtxt);
2406        return err;
2407    }
2408
2409#ifdef M4VSS_ENABLE_EXTERNAL_DECODERS
2410    /* replay recorded external decoder registrations on the VSS3GPP */
2411    for (i=0; i<M4VD_kVideoType_NB; i++)
2412    {
2413        if (xVSS_context->registeredExternalDecs[i].registered)
2414        {
2415            err = M4VSS3GPP_editRegisterExternalVideoDecoder(pVssCtxt, i,
2416                    xVSS_context->registeredExternalDecs[i].pDecoderInterface,
2417                    xVSS_context->registeredExternalDecs[i].pUserData);
2418            if (M4NO_ERROR != err)
2419            {
2420                M4OSA_TRACE1_1("M4xVSS_internalGenerateEditedFile: \
2421                    M4VSS3GPP_editRegisterExternalVideoDecoder() returns 0x%x!", err);
2422                M4VSS3GPP_editCleanUp(pVssCtxt);
2423                return err;
2424            }
2425        }
2426    }
2427#endif /* M4VSS_ENABLE_EXTERNAL_DECODERS */
2428
2429    /* replay recorded external encoder registrations on the VSS3GPP */
2430    for (i=0; i<M4VE_kEncoderType_NB; i++)
2431    {
2432        if (xVSS_context->registeredExternalEncs[i].registered)
2433        {
2434            err = M4VSS3GPP_editRegisterExternalVideoEncoder(pVssCtxt, i,
2435                    xVSS_context->registeredExternalEncs[i].pEncoderInterface,
2436                    xVSS_context->registeredExternalEncs[i].pUserData);
2437            if (M4NO_ERROR != err)
2438            {
2439                M4OSA_TRACE1_1("M4xVSS_internalGenerateEditedFile:\
2440                     M4VSS3GPP_editRegisterExternalVideoEncoder() returns 0x%x!", err);
2441                M4VSS3GPP_editCleanUp(pVssCtxt);
2442                return err;
2443            }
2444        }
2445    }
2446
2447    /* In case of MMS use case, we fill directly into the VSS context the targeted bitrate */
2448    if(xVSS_context->targetedBitrate != 0)
2449    {
2450        M4VSS3GPP_InternalEditContext* pVSSContext = (M4VSS3GPP_InternalEditContext*)pVssCtxt;
2451
2452        pVSSContext->bIsMMS = M4OSA_TRUE;
2453        pVSSContext->uiMMSVideoBitrate = xVSS_context->targetedBitrate;
2454        pVSSContext->MMSvideoFramerate = xVSS_context->pSettings->videoFrameRate;
2455    }
2456
2457    /*Warning: since the adding of the UTF conversion, pSettings has been changed in the next
2458    part in  pCurrentEditSettings (there is a specific current editing structure for the saving,
2459     as for the preview)*/
2460
2461    /**
2462     * Set the external video effect functions, for saving mode (to be moved to
2463      M4xVSS_saveStart() ?)*/
2464    for (i=0; i<xVSS_context->pCurrentEditSettings->uiClipNumber; i++)
2465    {
2466        for (j=0; j<xVSS_context->pCurrentEditSettings->nbEffects; j++)
2467        {
2468            if (M4xVSS_kVideoEffectType_BlackAndWhite ==
2469            xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2470            {
2471                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2472                 M4VSS3GPP_externalVideoEffectColor;
2473                //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt =
2474                // (M4OSA_Void*)M4xVSS_kVideoEffectType_BlackAndWhite;
2475                /*commented FB*/
2476                /**
2477                 * We do not need to set the color context, it is already set
2478                 during sendCommand function */
2479            }
2480            if (M4xVSS_kVideoEffectType_Pink ==
2481                xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2482            {
2483                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2484                 M4VSS3GPP_externalVideoEffectColor;
2485                //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt =
2486                // (M4OSA_Void*)M4xVSS_kVideoEffectType_Pink; /**< we don't
2487                // use any function context */
2488                /*commented FB*/
2489                /**
2490                 * We do not need to set the color context,
2491                  it is already set during sendCommand function */
2492            }
2493            if (M4xVSS_kVideoEffectType_Green ==
2494                 xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2495            {
2496                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2497                    M4VSS3GPP_externalVideoEffectColor;
2498                //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt =
2499                    // (M4OSA_Void*)M4xVSS_kVideoEffectType_Green;
2500                     /**< we don't use any function context */
2501                /*commented FB*/
2502                /**
2503                 * We do not need to set the color context, it is already set during
2504                  sendCommand function */
2505            }
2506            if (M4xVSS_kVideoEffectType_Sepia ==
2507                 xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2508            {
2509                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2510                 M4VSS3GPP_externalVideoEffectColor;
2511                //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt =
2512                // (M4OSA_Void*)M4xVSS_kVideoEffectType_Sepia;
2513                /**< we don't use any function context */
2514                /*commented FB*/
2515                /**
2516                 * We do not need to set the color context, it is already set during
2517                 sendCommand function */
2518            }
2519            if (M4xVSS_kVideoEffectType_Fifties ==
2520             xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2521            {
2522                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2523                 M4VSS3GPP_externalVideoEffectFifties;
2524                /**
2525                 * We do not need to set the framing context, it is already set during
2526                 sendCommand function */
2527            }
2528            if (M4xVSS_kVideoEffectType_Negative ==
2529             xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2530            {
2531                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2532                 M4VSS3GPP_externalVideoEffectColor;
2533                //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt =
2534                // (M4OSA_Void*)M4xVSS_kVideoEffectType_Negative;
2535                 /**< we don't use any function context */
2536                /*commented FB*/
2537                /**
2538                 * We do not need to set the color context, it is already set during
2539                  sendCommand function */
2540            }
2541            if (M4xVSS_kVideoEffectType_Framing ==
2542             xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2543            {
2544                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2545                 M4VSS3GPP_externalVideoEffectFraming;
2546                /**
2547                 * We do not need to set the framing context, it is already set during
2548                 sendCommand function */
2549            }
2550            if (M4xVSS_kVideoEffectType_ZoomIn ==
2551             xVSS_context->pSettings->Effects[j].VideoEffectType)
2552            {
2553                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2554                 M4VSS3GPP_externalVideoEffectZoom;
2555                xVSS_context->pCurrentEditSettings->Effects[j].pExtVideoEffectFctCtxt =
2556                 (M4OSA_Void*)M4xVSS_kVideoEffectType_ZoomIn; /**< we don't use any
2557                 function context */
2558            }
2559            if (M4xVSS_kVideoEffectType_ZoomOut ==
2560             xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2561            {
2562                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2563                 M4VSS3GPP_externalVideoEffectZoom;
2564                xVSS_context->pCurrentEditSettings->Effects[j].pExtVideoEffectFctCtxt =
2565                 (M4OSA_Void*)M4xVSS_kVideoEffectType_ZoomOut; /**< we don't use any
2566                 function context */
2567            }
2568            if (M4xVSS_kVideoEffectType_ColorRGB16 ==
2569             xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2570            {
2571                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2572                 M4VSS3GPP_externalVideoEffectColor;
2573                //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt =
2574                // (M4OSA_Void*)M4xVSS_kVideoEffectType_ColorRGB16;
2575                /**< we don't use any function context */
2576                /**
2577                 * We do not need to set the color context, it is already set during
2578                 sendCommand function */
2579            }
2580            if (M4xVSS_kVideoEffectType_Gradient ==
2581             xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2582            {
2583                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2584                 M4VSS3GPP_externalVideoEffectColor;
2585                //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt =
2586                // (M4OSA_Void*)M4xVSS_kVideoEffectType_ColorRGB16;
2587                /**< we don't use any function context */
2588                /**
2589                 * We do not need to set the color context, it is already set during
2590                 sendCommand function */
2591            }
2592
2593        }
2594    }
2595
2596    /**
2597     * Open the VSS 3GPP */
2598    err = M4VSS3GPP_editOpen(pVssCtxt, xVSS_context->pCurrentEditSettings);
2599    if (err != M4NO_ERROR)
2600    {
2601        M4OSA_TRACE1_1("M4xVSS_internalGenerateEditedFile:\
2602             M4VSS3GPP_editOpen returned 0x%x\n",err);
2603        M4VSS3GPP_editCleanUp(pVssCtxt);
2604        return err;
2605    }
2606
2607    /**
2608     * Save VSS context to be able to close / free VSS later */
2609    xVSS_context->pCurrentEditContext = pVssCtxt;
2610
2611    return M4NO_ERROR;
2612}
2613
2614/**
2615 ******************************************************************************
2616 * prototype    M4OSA_ERR M4xVSS_internalCloseEditedFile(M4OSA_Context pContext)
2617 *
2618 * @brief    This function cleans up VSS
2619 * @note
2620 * @param    pContext    (IN) The integrator own context
2621 *
2622 * @return    M4NO_ERROR:    No error
2623 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
2624 ******************************************************************************
2625 */
2626M4OSA_ERR M4xVSS_internalCloseEditedFile(M4OSA_Context pContext)
2627{
2628    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
2629    M4VSS3GPP_EditContext pVssCtxt = xVSS_context->pCurrentEditContext;
2630    M4OSA_ERR err;
2631
2632    if(xVSS_context->pCurrentEditContext != M4OSA_NULL)
2633    {
2634        /**
2635         * Close the VSS 3GPP */
2636        err = M4VSS3GPP_editClose(pVssCtxt);
2637        if (err != M4NO_ERROR)
2638        {
2639            M4OSA_TRACE1_1("M4xVSS_internalCloseEditedFile:\
2640                 M4VSS3GPP_editClose returned 0x%x\n",err);
2641            M4VSS3GPP_editCleanUp(pVssCtxt);
2642            return err;
2643        }
2644
2645        /**
2646         * Free this VSS3GPP edition instance */
2647        err = M4VSS3GPP_editCleanUp(pVssCtxt);
2648        if (err != M4NO_ERROR)
2649        {
2650            M4OSA_TRACE1_1("M4xVSS_internalCloseEditedFile: \
2651                M4VSS3GPP_editCleanUp returned 0x%x\n",err);
2652            return err;
2653        }
2654    }
2655
2656    return M4NO_ERROR;
2657}
2658
2659/**
2660 ******************************************************************************
2661 * prototype    M4OSA_ERR M4xVSS_internalGenerateAudioMixFile(M4OSA_Context pContext)
2662 *
2663 * @brief    This function prepares VSS for audio mixing
2664 * @note    It takes its parameters from the BGM settings in the xVSS internal context
2665 * @param    pContext    (IN) The integrator own context
2666 *
2667 * @return    M4NO_ERROR:    No error
2668 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
2669 * @return    M4ERR_ALLOC: Allocation error (no more memory)
2670 ******************************************************************************
2671 */
2672/***
2673 * FB: the function has been modified since the structure used for the saving is now the
2674 *  pCurrentEditSettings and not the pSettings
2675 * This change has been added for the UTF support
2676 * All the "xVSS_context->pSettings" has been replaced by "xVSS_context->pCurrentEditSettings"
2677 ***/
2678M4OSA_ERR M4xVSS_internalGenerateAudioMixFile(M4OSA_Context pContext)
2679{
2680    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
2681    M4VSS3GPP_AudioMixingSettings* pAudioMixSettings;
2682    M4VSS3GPP_AudioMixingContext pAudioMixingCtxt;
2683    M4OSA_ERR err;
2684    M4VIDEOEDITING_ClipProperties fileProperties;
2685
2686    /**
2687     * Allocate audio mixing settings structure and fill it with BGM parameters */
2688    pAudioMixSettings = (M4VSS3GPP_AudioMixingSettings*)M4OSA_32bitAlignedMalloc
2689        (sizeof(M4VSS3GPP_AudioMixingSettings), M4VS, (M4OSA_Char *)"pAudioMixSettings");
2690    if(pAudioMixSettings == M4OSA_NULL)
2691    {
2692        M4OSA_TRACE1_0("Allocation error in M4xVSS_internalGenerateAudioMixFile");
2693        return M4ERR_ALLOC;
2694    }
2695
2696    if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->FileType ==
2697         M4VIDEOEDITING_kFileType_3GPP)
2698    {
2699        err = M4xVSS_internalGetProperties((M4OSA_Context)xVSS_context,
2700             (M4OSA_Char*)xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile,
2701                 &fileProperties);
2702        if(err != M4NO_ERROR)
2703        {
2704            M4OSA_TRACE1_1("M4xVSS_internalGenerateAudioMixFile:\
2705                 impossible to retrieve audio BGM properties ->\
2706                     reencoding audio background music", err);
2707            fileProperties.AudioStreamType =
2708                 xVSS_context->pCurrentEditSettings->xVSS.outputAudioFormat+1;
2709                  /* To force BGM encoding */
2710        }
2711    }
2712
2713    pAudioMixSettings->bRemoveOriginal = M4OSA_FALSE;
2714    pAudioMixSettings->AddedAudioFileType =
2715     xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->FileType;
2716    pAudioMixSettings->pAddedAudioTrackFile =
2717     xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile;
2718    pAudioMixSettings->uiAddVolume =
2719     xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->uiAddVolume;
2720
2721    pAudioMixSettings->outputAudioFormat = xVSS_context->pSettings->xVSS.outputAudioFormat;
2722    pAudioMixSettings->outputASF = xVSS_context->pSettings->xVSS.outputAudioSamplFreq;
2723    pAudioMixSettings->outputAudioBitrate = xVSS_context->pSettings->xVSS.outputAudioBitrate;
2724    pAudioMixSettings->uiSamplingFrequency =
2725     xVSS_context->pSettings->xVSS.pBGMtrack->uiSamplingFrequency;
2726    pAudioMixSettings->uiNumChannels = xVSS_context->pSettings->xVSS.pBGMtrack->uiNumChannels;
2727
2728    pAudioMixSettings->b_DuckingNeedeed =
2729     xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->b_DuckingNeedeed;
2730    pAudioMixSettings->fBTVolLevel =
2731     (M4OSA_Float )xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->uiAddVolume/100;
2732    pAudioMixSettings->InDucking_threshold =
2733     xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->InDucking_threshold;
2734    pAudioMixSettings->InDucking_lowVolume =
2735     xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->lowVolume/100;
2736    pAudioMixSettings->fPTVolLevel =
2737     (M4OSA_Float)xVSS_context->pSettings->PTVolLevel/100;
2738    pAudioMixSettings->bLoop = xVSS_context->pSettings->xVSS.pBGMtrack->bLoop;
2739
2740    if(xVSS_context->pSettings->xVSS.bAudioMono)
2741    {
2742        pAudioMixSettings->outputNBChannels = 1;
2743    }
2744    else
2745    {
2746        pAudioMixSettings->outputNBChannels = 2;
2747    }
2748
2749    /**
2750     * Fill audio mix settings with BGM parameters */
2751    pAudioMixSettings->uiBeginLoop =
2752     xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->uiBeginLoop;
2753    pAudioMixSettings->uiEndLoop =
2754     xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->uiEndLoop;
2755    pAudioMixSettings->uiAddCts =
2756     xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->uiAddCts;
2757
2758    /**
2759     * Output file of the audio mixer will be final file (audio mixing is the last step) */
2760    pAudioMixSettings->pOutputClipFile = xVSS_context->pOutputFile;
2761    pAudioMixSettings->pTemporaryFile = xVSS_context->pTemporaryFile;
2762
2763    /**
2764     * Input file of the audio mixer is a temporary file containing all audio/video editions */
2765    pAudioMixSettings->pOriginalClipFile = xVSS_context->pCurrentEditSettings->pOutputFile;
2766
2767    /**
2768     * Save audio mixing settings pointer to be able to free it in
2769     M4xVSS_internalCloseAudioMixedFile function */
2770    xVSS_context->pAudioMixSettings = pAudioMixSettings;
2771
2772    /**
2773     * Create a VSS 3GPP audio mixing instance */
2774    err = M4VSS3GPP_audioMixingInit(&pAudioMixingCtxt, pAudioMixSettings,
2775         xVSS_context->pFileReadPtr, xVSS_context->pFileWritePtr);
2776
2777    /**
2778     * Save audio mixing context to be able to call audio mixing step function in
2779      M4xVSS_step function */
2780    xVSS_context->pAudioMixContext = pAudioMixingCtxt;
2781
2782    if (err != M4NO_ERROR)
2783    {
2784        M4OSA_TRACE1_1("M4xVSS_internalGenerateAudioMixFile:\
2785             M4VSS3GPP_audioMixingInit returned 0x%x\n",err);
2786        //M4VSS3GPP_audioMixingCleanUp(pAudioMixingCtxt);
2787        return err;
2788    }
2789
2790    return M4NO_ERROR;
2791}
2792
2793/**
2794 ******************************************************************************
2795 * prototype    M4OSA_ERR M4xVSS_internalCloseAudioMixedFile(M4OSA_Context pContext)
2796 *
2797 * @brief    This function cleans up VSS for audio mixing
2798 * @note
2799 * @param    pContext    (IN) The integrator own context
2800 *
2801 * @return    M4NO_ERROR:    No error
2802 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
2803 ******************************************************************************
2804 */
2805M4OSA_ERR M4xVSS_internalCloseAudioMixedFile(M4OSA_Context pContext)
2806{
2807    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
2808    M4OSA_ERR err;
2809
2810    /**
2811     * Free this VSS3GPP audio mixing instance */
2812    if(xVSS_context->pAudioMixContext != M4OSA_NULL)
2813    {
2814        err = M4VSS3GPP_audioMixingCleanUp(xVSS_context->pAudioMixContext);
2815        if (err != M4NO_ERROR)
2816        {
2817            M4OSA_TRACE1_1("M4xVSS_internalCloseAudioMixedFile:\
2818                 M4VSS3GPP_audioMixingCleanUp returned 0x%x\n",err);
2819            return err;
2820        }
2821    }
2822
2823    /**
2824     * Free VSS audio mixing settings */
2825    if(xVSS_context->pAudioMixSettings != M4OSA_NULL)
2826    {
2827        free(xVSS_context->pAudioMixSettings);
2828        xVSS_context->pAudioMixSettings = M4OSA_NULL;
2829    }
2830
2831    return M4NO_ERROR;
2832}
2833
2834/**
2835 ******************************************************************************
2836 * prototype    M4OSA_ERR M4xVSS_internalFreePreview(M4OSA_Context pContext)
2837 *
2838 * @brief    This function cleans up preview edition structure used to generate
2839 *            preview.3gp file given to the VPS
2840 * @note    It also free the preview structure given to the VPS
2841 * @param    pContext    (IN) The integrator own context
2842 *
2843 * @return    M4NO_ERROR:    No error
2844 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
2845 ******************************************************************************
2846 */
2847M4OSA_ERR M4xVSS_internalFreePreview(M4OSA_Context pContext)
2848{
2849    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
2850    M4OSA_UInt8 i;
2851
2852    /**
2853     * Free clip/transition settings */
2854    for(i=0; i<xVSS_context->pCurrentEditSettings->uiClipNumber; i++)
2855    {
2856        M4xVSS_FreeClipSettings(xVSS_context->pCurrentEditSettings->pClipList[i]);
2857
2858        free((xVSS_context->pCurrentEditSettings->pClipList[i]));
2859        xVSS_context->pCurrentEditSettings->pClipList[i] = M4OSA_NULL;
2860
2861        /**
2862         * Because there is 1 less transition than clip number */
2863        if(i != xVSS_context->pCurrentEditSettings->uiClipNumber-1)
2864        {
2865            free((xVSS_context->pCurrentEditSettings->pTransitionList[i]));
2866            xVSS_context->pCurrentEditSettings->pTransitionList[i] = M4OSA_NULL;
2867        }
2868    }
2869
2870    /**
2871     * Free clip/transition list */
2872    if(xVSS_context->pCurrentEditSettings->pClipList != M4OSA_NULL)
2873    {
2874        free((xVSS_context->pCurrentEditSettings->pClipList));
2875        xVSS_context->pCurrentEditSettings->pClipList = M4OSA_NULL;
2876    }
2877    if(xVSS_context->pCurrentEditSettings->pTransitionList != M4OSA_NULL)
2878    {
2879        free((xVSS_context->pCurrentEditSettings->pTransitionList));
2880        xVSS_context->pCurrentEditSettings->pTransitionList = M4OSA_NULL;
2881    }
2882
2883    /**
2884     * Free output preview file path */
2885    if(xVSS_context->pCurrentEditSettings->pOutputFile != M4OSA_NULL)
2886    {
2887        free(xVSS_context->pCurrentEditSettings->pOutputFile);
2888        xVSS_context->pCurrentEditSettings->pOutputFile = M4OSA_NULL;
2889    }
2890
2891    /**
2892     * Free temporary preview file path */
2893    if(xVSS_context->pCurrentEditSettings->pTemporaryFile != M4OSA_NULL)
2894    {
2895        remove((const char *)xVSS_context->pCurrentEditSettings->pTemporaryFile);
2896        free(xVSS_context->pCurrentEditSettings->pTemporaryFile);
2897        xVSS_context->pCurrentEditSettings->pTemporaryFile = M4OSA_NULL;
2898    }
2899
2900    /**
2901     * Free "local" BGM settings */
2902    if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack != M4OSA_NULL)
2903    {
2904        if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile != M4OSA_NULL)
2905        {
2906            free(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile);
2907            xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile = M4OSA_NULL;
2908        }
2909        free(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack);
2910        xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack = M4OSA_NULL;
2911    }
2912
2913    /**
2914     * Free current edit settings structure */
2915    if(xVSS_context->pCurrentEditSettings != M4OSA_NULL)
2916    {
2917        free(xVSS_context->pCurrentEditSettings);
2918        xVSS_context->pCurrentEditSettings = M4OSA_NULL;
2919    }
2920
2921    /**
2922     * Free preview effects given to application */
2923    if(M4OSA_NULL != xVSS_context->pPreviewSettings->Effects)
2924    {
2925        free(xVSS_context->pPreviewSettings->Effects);
2926        xVSS_context->pPreviewSettings->Effects = M4OSA_NULL;
2927        xVSS_context->pPreviewSettings->nbEffects = 0;
2928    }
2929
2930    return M4NO_ERROR;
2931}
2932
2933
2934/**
2935 ******************************************************************************
2936 * prototype    M4OSA_ERR M4xVSS_internalFreeSaving(M4OSA_Context pContext)
2937 *
2938 * @brief    This function cleans up saving edition structure used to generate
2939 *            output.3gp file given to the VPS
2940 * @note
2941 * @param    pContext    (IN) The integrator own context
2942 *
2943 * @return    M4NO_ERROR:    No error
2944 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
2945 ******************************************************************************
2946 */
2947M4OSA_ERR M4xVSS_internalFreeSaving(M4OSA_Context pContext)
2948{
2949    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
2950    M4OSA_UInt8 i;
2951
2952    if(xVSS_context->pCurrentEditSettings != M4OSA_NULL)
2953    {
2954        /**
2955         * Free clip/transition settings */
2956        for(i=0; i<xVSS_context->pCurrentEditSettings->uiClipNumber; i++)
2957        {
2958            M4xVSS_FreeClipSettings(xVSS_context->pCurrentEditSettings->pClipList[i]);
2959
2960            free((xVSS_context->pCurrentEditSettings->pClipList[i]));
2961            xVSS_context->pCurrentEditSettings->pClipList[i] = M4OSA_NULL;
2962
2963            /**
2964             * Because there is 1 less transition than clip number */
2965            if(i != xVSS_context->pCurrentEditSettings->uiClipNumber-1)
2966            {
2967                free(\
2968                    (xVSS_context->pCurrentEditSettings->pTransitionList[i]));
2969                xVSS_context->pCurrentEditSettings->pTransitionList[i] = M4OSA_NULL;
2970            }
2971        }
2972
2973        /**
2974         * Free clip/transition list */
2975        if(xVSS_context->pCurrentEditSettings->pClipList != M4OSA_NULL)
2976        {
2977            free((xVSS_context->pCurrentEditSettings->pClipList));
2978            xVSS_context->pCurrentEditSettings->pClipList = M4OSA_NULL;
2979        }
2980        if(xVSS_context->pCurrentEditSettings->pTransitionList != M4OSA_NULL)
2981        {
2982            free((xVSS_context->pCurrentEditSettings->pTransitionList));
2983            xVSS_context->pCurrentEditSettings->pTransitionList = M4OSA_NULL;
2984        }
2985
2986        if(xVSS_context->pCurrentEditSettings->Effects != M4OSA_NULL)
2987        {
2988            free((xVSS_context->pCurrentEditSettings->Effects));
2989            xVSS_context->pCurrentEditSettings->Effects = M4OSA_NULL;
2990            xVSS_context->pCurrentEditSettings->nbEffects = 0;
2991        }
2992
2993        /**
2994         * Free output saving file path */
2995        if(xVSS_context->pCurrentEditSettings->pOutputFile != M4OSA_NULL)
2996        {
2997            if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack != M4OSA_NULL)
2998            {
2999                remove((const char *)xVSS_context->pCurrentEditSettings->pOutputFile);
3000                free(xVSS_context->pCurrentEditSettings->pOutputFile);
3001            }
3002            if(xVSS_context->pOutputFile != M4OSA_NULL)
3003            {
3004                free(xVSS_context->pOutputFile);
3005                xVSS_context->pOutputFile = M4OSA_NULL;
3006            }
3007            xVSS_context->pSettings->pOutputFile = M4OSA_NULL;
3008            xVSS_context->pCurrentEditSettings->pOutputFile = M4OSA_NULL;
3009        }
3010
3011        /**
3012         * Free temporary saving file path */
3013        if(xVSS_context->pCurrentEditSettings->pTemporaryFile != M4OSA_NULL)
3014        {
3015            remove((const char *)xVSS_context->pCurrentEditSettings->pTemporaryFile);
3016            free(xVSS_context->pCurrentEditSettings->pTemporaryFile);
3017            xVSS_context->pCurrentEditSettings->pTemporaryFile = M4OSA_NULL;
3018        }
3019
3020        /**
3021         * Free "local" BGM settings */
3022        if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack != M4OSA_NULL)
3023        {
3024            if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile != M4OSA_NULL)
3025            {
3026                free(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile);
3027                xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile = M4OSA_NULL;
3028            }
3029            free(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack);
3030            xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack = M4OSA_NULL;
3031        }
3032
3033        /**
3034         * Free current edit settings structure */
3035        free(xVSS_context->pCurrentEditSettings);
3036        xVSS_context->pCurrentEditSettings = M4OSA_NULL;
3037    }
3038
3039    return M4NO_ERROR;
3040}
3041
3042
3043/**
3044 ******************************************************************************
3045 * prototype    M4OSA_ERR M4xVSS_freeSettings(M4OSA_Context pContext)
3046 *
3047 * @brief    This function cleans up an M4VSS3GPP_EditSettings structure
3048 * @note
3049 * @param    pSettings    (IN) Pointer on M4VSS3GPP_EditSettings structure to free
3050 *
3051 * @return    M4NO_ERROR:    No error
3052 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
3053 ******************************************************************************
3054 */
3055M4OSA_ERR M4xVSS_freeSettings(M4VSS3GPP_EditSettings* pSettings)
3056{
3057    M4OSA_UInt8 i,j;
3058
3059    /**
3060     * For each clip ... */
3061    for(i=0; i<pSettings->uiClipNumber; i++)
3062    {
3063        /**
3064         * ... free clip settings */
3065        if(pSettings->pClipList[i] != M4OSA_NULL)
3066        {
3067            M4xVSS_FreeClipSettings(pSettings->pClipList[i]);
3068
3069            free((pSettings->pClipList[i]));
3070            pSettings->pClipList[i] = M4OSA_NULL;
3071        }
3072
3073        /**
3074         * ... free transition settings */
3075        if(i < pSettings->uiClipNumber-1) /* Because there is 1 less transition than clip number */
3076        {
3077            if(pSettings->pTransitionList[i] != M4OSA_NULL)
3078            {
3079                switch (pSettings->pTransitionList[i]->VideoTransitionType)
3080                {
3081                    case M4xVSS_kVideoTransitionType_AlphaMagic:
3082
3083                        /**
3084                         * In case of Alpha Magic transition,
3085                          some extra parameters need to be freed */
3086                        if(pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt\
3087                             != M4OSA_NULL)
3088                        {
3089                            free((((M4xVSS_internal_AlphaMagicSettings*)\
3090                                pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt)->\
3091                                    pPlane->pac_data));
3092                            ((M4xVSS_internal_AlphaMagicSettings*)pSettings->pTransitionList[i\
3093                                ]->pExtVideoTransitionFctCtxt)->pPlane->pac_data = M4OSA_NULL;
3094
3095                            free((((M4xVSS_internal_AlphaMagicSettings*)\
3096                                pSettings->pTransitionList[i]->\
3097                                    pExtVideoTransitionFctCtxt)->pPlane));
3098                            ((M4xVSS_internal_AlphaMagicSettings*)pSettings->pTransitionList[i]\
3099                                ->pExtVideoTransitionFctCtxt)->pPlane = M4OSA_NULL;
3100
3101                            free((pSettings->pTransitionList[i]->\
3102                                pExtVideoTransitionFctCtxt));
3103                            pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt = M4OSA_NULL;
3104
3105                            for(j=i+1;j<pSettings->uiClipNumber-1;j++)
3106                            {
3107                                if(pSettings->pTransitionList[j] != M4OSA_NULL)
3108                                {
3109                                    if(pSettings->pTransitionList[j]->VideoTransitionType ==
3110                                     M4xVSS_kVideoTransitionType_AlphaMagic)
3111                                    {
3112                                        M4OSA_UInt32 pCmpResult=0;
3113                                        pCmpResult = strcmp((const char *)pSettings->pTransitionList[i]->\
3114                                            xVSS.transitionSpecific.pAlphaMagicSettings->\
3115                                                pAlphaFilePath,
3116                                                (const char *)pSettings->pTransitionList[j]->\
3117                                                xVSS.transitionSpecific.pAlphaMagicSettings->\
3118                                                pAlphaFilePath);
3119                                        if(pCmpResult == 0)
3120                                        {
3121                                            /* Free extra internal alpha magic structure and put
3122                                            it to NULL to avoid refreeing it */
3123                                            free((pSettings->\
3124                                                pTransitionList[j]->pExtVideoTransitionFctCtxt));
3125                                            pSettings->pTransitionList[j]->\
3126                                                pExtVideoTransitionFctCtxt = M4OSA_NULL;
3127                                        }
3128                                    }
3129                                }
3130                            }
3131                        }
3132
3133                        if(pSettings->pTransitionList[i]->\
3134                            xVSS.transitionSpecific.pAlphaMagicSettings != M4OSA_NULL)
3135                        {
3136                            if(pSettings->pTransitionList[i]->\
3137                                xVSS.transitionSpecific.pAlphaMagicSettings->\
3138                                    pAlphaFilePath != M4OSA_NULL)
3139                            {
3140                                free(pSettings->\
3141                                    pTransitionList[i]->\
3142                                        xVSS.transitionSpecific.pAlphaMagicSettings->\
3143                                            pAlphaFilePath);
3144                                pSettings->pTransitionList[i]->\
3145                                    xVSS.transitionSpecific.pAlphaMagicSettings->\
3146                                        pAlphaFilePath = M4OSA_NULL;
3147                            }
3148                            free(pSettings->pTransitionList[i]->\
3149                                xVSS.transitionSpecific.pAlphaMagicSettings);
3150                            pSettings->pTransitionList[i]->\
3151                                xVSS.transitionSpecific.pAlphaMagicSettings = M4OSA_NULL;
3152
3153                        }
3154
3155                    break;
3156
3157
3158                    case M4xVSS_kVideoTransitionType_SlideTransition:
3159                        if (M4OSA_NULL != pSettings->pTransitionList[i]->\
3160                            xVSS.transitionSpecific.pSlideTransitionSettings)
3161                        {
3162                            free(pSettings->pTransitionList[i]->\
3163                                xVSS.transitionSpecific.pSlideTransitionSettings);
3164                            pSettings->pTransitionList[i]->\
3165                                xVSS.transitionSpecific.pSlideTransitionSettings = M4OSA_NULL;
3166                        }
3167                        if(pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt != M4OSA_NULL)
3168                        {
3169                            free((pSettings->pTransitionList[i]->\
3170                                pExtVideoTransitionFctCtxt));
3171                            pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt = M4OSA_NULL;
3172                        }
3173                    break;
3174                                        default:
3175                    break;
3176
3177                }
3178                /**
3179                 * Free transition settings structure */
3180                free((pSettings->pTransitionList[i]));
3181                pSettings->pTransitionList[i] = M4OSA_NULL;
3182            }
3183        }
3184    }
3185
3186    /**
3187     * Free clip list */
3188    if(pSettings->pClipList != M4OSA_NULL)
3189    {
3190        free((pSettings->pClipList));
3191        pSettings->pClipList = M4OSA_NULL;
3192    }
3193
3194    /**
3195     * Free transition list */
3196    if(pSettings->pTransitionList != M4OSA_NULL)
3197    {
3198        free((pSettings->pTransitionList));
3199        pSettings->pTransitionList = M4OSA_NULL;
3200    }
3201
3202    /**
3203     * RC: Free effects list */
3204    if(pSettings->Effects != M4OSA_NULL)
3205    {
3206        for(i=0; i<pSettings->nbEffects; i++)
3207        {
3208            /**
3209             * For each clip, free framing structure if needed */
3210            if(pSettings->Effects[i].VideoEffectType == M4xVSS_kVideoEffectType_Framing
3211                || pSettings->Effects[i].VideoEffectType == M4xVSS_kVideoEffectType_Text)
3212            {
3213#ifdef DECODE_GIF_ON_SAVING
3214                M4xVSS_FramingContext* framingCtx = pSettings->Effects[i].pExtVideoEffectFctCtxt;
3215#else
3216                M4xVSS_FramingStruct* framingCtx = pSettings->Effects[i].pExtVideoEffectFctCtxt;
3217                M4xVSS_FramingStruct* framingCtx_save;
3218                M4xVSS_Framing3102Struct* framingCtx_first = framingCtx;
3219#endif
3220
3221#ifdef DECODE_GIF_ON_SAVING
3222                if(framingCtx != M4OSA_NULL) /* Bugfix 1.2.0: crash, trying to free non existant
3223                 pointer */
3224                {
3225                    if(framingCtx->aFramingCtx != M4OSA_NULL)
3226                    {
3227                        {
3228                            if(framingCtx->aFramingCtx->FramingRgb != M4OSA_NULL)
3229                            {
3230                                free(framingCtx->aFramingCtx->\
3231                                    FramingRgb->pac_data);
3232                                framingCtx->aFramingCtx->FramingRgb->pac_data = M4OSA_NULL;
3233                                free(framingCtx->aFramingCtx->FramingRgb);
3234                                framingCtx->aFramingCtx->FramingRgb = M4OSA_NULL;
3235                            }
3236                        }
3237                        if(framingCtx->aFramingCtx->FramingYuv != M4OSA_NULL)
3238                        {
3239                            free(framingCtx->aFramingCtx->\
3240                                FramingYuv[0].pac_data);
3241                            framingCtx->aFramingCtx->FramingYuv[0].pac_data = M4OSA_NULL;
3242                           free(framingCtx->aFramingCtx->\
3243                                FramingYuv[1].pac_data);
3244                            framingCtx->aFramingCtx->FramingYuv[1].pac_data = M4OSA_NULL;
3245                           free(framingCtx->aFramingCtx->\
3246                                FramingYuv[2].pac_data);
3247                            framingCtx->aFramingCtx->FramingYuv[2].pac_data = M4OSA_NULL;
3248                            free(framingCtx->aFramingCtx->FramingYuv);
3249                            framingCtx->aFramingCtx->FramingYuv = M4OSA_NULL;
3250                        }
3251                        free(framingCtx->aFramingCtx);
3252                        framingCtx->aFramingCtx = M4OSA_NULL;
3253                    }
3254                    if(framingCtx->aFramingCtx_last != M4OSA_NULL)
3255                    {
3256                        if(framingCtx->aFramingCtx_last->FramingRgb != M4OSA_NULL)
3257                        {
3258                            free(framingCtx->aFramingCtx_last->\
3259                                FramingRgb->pac_data);
3260                            framingCtx->aFramingCtx_last->FramingRgb->pac_data = M4OSA_NULL;
3261                            free(framingCtx->aFramingCtx_last->\
3262                                FramingRgb);
3263                            framingCtx->aFramingCtx_last->FramingRgb = M4OSA_NULL;
3264                        }
3265                        if(framingCtx->aFramingCtx_last->FramingYuv != M4OSA_NULL)
3266                        {
3267                            free(framingCtx->aFramingCtx_last->\
3268                                FramingYuv[0].pac_data);
3269                            framingCtx->aFramingCtx_last->FramingYuv[0].pac_data = M4OSA_NULL;
3270                            free(framingCtx->aFramingCtx_last->FramingYuv);
3271                            framingCtx->aFramingCtx_last->FramingYuv = M4OSA_NULL;
3272                        }
3273                        free(framingCtx->aFramingCtx_last);
3274                        framingCtx->aFramingCtx_last = M4OSA_NULL;
3275                    }
3276                    if(framingCtx->pEffectFilePath != M4OSA_NULL)
3277                    {
3278                        free(framingCtx->pEffectFilePath);
3279                        framingCtx->pEffectFilePath = M4OSA_NULL;
3280                    }
3281                    /*In case there are still allocated*/
3282                    if(framingCtx->pSPSContext != M4OSA_NULL)
3283                    {
3284                    //    M4SPS_destroy(framingCtx->pSPSContext);
3285                        framingCtx->pSPSContext = M4OSA_NULL;
3286                    }
3287                    /*Alpha blending structure*/
3288                    if(framingCtx->alphaBlendingStruct  != M4OSA_NULL)
3289                    {
3290                        free(framingCtx->alphaBlendingStruct);
3291                        framingCtx->alphaBlendingStruct = M4OSA_NULL;
3292                    }
3293
3294                    free(framingCtx);
3295                    framingCtx = M4OSA_NULL;
3296                }
3297#else
3298                do
3299                {
3300                    if(framingCtx != M4OSA_NULL) /* Bugfix 1.2.0: crash, trying to free non
3301                    existant pointer */
3302                    {
3303                        if(framingCtx->FramingRgb != M4OSA_NULL)
3304                        {
3305                            free(framingCtx->FramingRgb->pac_data);
3306                            framingCtx->FramingRgb->pac_data = M4OSA_NULL;
3307                            free(framingCtx->FramingRgb);
3308                            framingCtx->FramingRgb = M4OSA_NULL;
3309                        }
3310                        if(framingCtx->FramingYuv != M4OSA_NULL)
3311                        {
3312                            free(framingCtx->FramingYuv[0].pac_data);
3313                            framingCtx->FramingYuv[0].pac_data = M4OSA_NULL;
3314                            free(framingCtx->FramingYuv);
3315                            framingCtx->FramingYuv = M4OSA_NULL;
3316                        }
3317                        framingCtx_save = framingCtx->pNext;
3318                        free(framingCtx);
3319                        framingCtx = M4OSA_NULL;
3320                        framingCtx = framingCtx_save;
3321                    }
3322                    else
3323                    {
3324                        /*FB: bug fix P4ME00003002*/
3325                        break;
3326                    }
3327                } while(framingCtx_first != framingCtx);
3328#endif
3329            }
3330            else if( M4xVSS_kVideoEffectType_Fifties == pSettings->Effects[i].VideoEffectType)
3331            {
3332                /* Free Fifties context */
3333                M4xVSS_FiftiesStruct* FiftiesCtx = pSettings->Effects[i].pExtVideoEffectFctCtxt;
3334
3335                if(FiftiesCtx != M4OSA_NULL)
3336                {
3337                    free(FiftiesCtx);
3338                    FiftiesCtx = M4OSA_NULL;
3339                }
3340
3341            }
3342            else if( M4xVSS_kVideoEffectType_ColorRGB16 == pSettings->Effects[i].VideoEffectType
3343                || M4xVSS_kVideoEffectType_BlackAndWhite == pSettings->Effects[i].VideoEffectType
3344                || M4xVSS_kVideoEffectType_Pink == pSettings->Effects[i].VideoEffectType
3345                || M4xVSS_kVideoEffectType_Green == pSettings->Effects[i].VideoEffectType
3346                || M4xVSS_kVideoEffectType_Sepia == pSettings->Effects[i].VideoEffectType
3347                || M4xVSS_kVideoEffectType_Negative== pSettings->Effects[i].VideoEffectType
3348                || M4xVSS_kVideoEffectType_Gradient== pSettings->Effects[i].VideoEffectType)
3349            {
3350                /* Free Color context */
3351                M4xVSS_ColorStruct* ColorCtx = pSettings->Effects[i].pExtVideoEffectFctCtxt;
3352
3353                if(ColorCtx != M4OSA_NULL)
3354                {
3355                    free(ColorCtx);
3356                    ColorCtx = M4OSA_NULL;
3357                }
3358            }
3359
3360            /* Free simple fields */
3361            if(pSettings->Effects[i].xVSS.pFramingFilePath != M4OSA_NULL)
3362            {
3363                free(pSettings->Effects[i].xVSS.pFramingFilePath);
3364                pSettings->Effects[i].xVSS.pFramingFilePath = M4OSA_NULL;
3365            }
3366            if(pSettings->Effects[i].xVSS.pFramingBuffer != M4OSA_NULL)
3367            {
3368                free(pSettings->Effects[i].xVSS.pFramingBuffer);
3369                pSettings->Effects[i].xVSS.pFramingBuffer = M4OSA_NULL;
3370            }
3371            if(pSettings->Effects[i].xVSS.pTextBuffer != M4OSA_NULL)
3372            {
3373                free(pSettings->Effects[i].xVSS.pTextBuffer);
3374                pSettings->Effects[i].xVSS.pTextBuffer = M4OSA_NULL;
3375            }
3376        }
3377        free(pSettings->Effects);
3378        pSettings->Effects = M4OSA_NULL;
3379    }
3380
3381    return M4NO_ERROR;
3382}
3383
3384M4OSA_ERR M4xVSS_freeCommand(M4OSA_Context pContext)
3385{
3386    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
3387//    M4OSA_UInt8 i,j;
3388
3389    /* Free "local" BGM settings */
3390    if(xVSS_context->pSettings->xVSS.pBGMtrack != M4OSA_NULL)
3391    {
3392        if(xVSS_context->pSettings->xVSS.pBGMtrack->pFile != M4OSA_NULL)
3393        {
3394            free(xVSS_context->pSettings->xVSS.pBGMtrack->pFile);
3395            xVSS_context->pSettings->xVSS.pBGMtrack->pFile = M4OSA_NULL;
3396        }
3397        free(xVSS_context->pSettings->xVSS.pBGMtrack);
3398        xVSS_context->pSettings->xVSS.pBGMtrack = M4OSA_NULL;
3399    }
3400
3401    M4xVSS_freeSettings(xVSS_context->pSettings);
3402
3403    if(xVSS_context->pPTo3GPPparamsList != M4OSA_NULL)
3404    {
3405        M4xVSS_Pto3GPP_params* pParams = xVSS_context->pPTo3GPPparamsList;
3406        M4xVSS_Pto3GPP_params* pParams_sauv;
3407
3408        while(pParams != M4OSA_NULL)
3409        {
3410            if(pParams->pFileIn != M4OSA_NULL)
3411            {
3412                free(pParams->pFileIn);
3413                pParams->pFileIn = M4OSA_NULL;
3414            }
3415            if(pParams->pFileOut != M4OSA_NULL)
3416            {
3417                /* Delete temporary file */
3418                remove((const char *)pParams->pFileOut);
3419                free(pParams->pFileOut);
3420                pParams->pFileOut = M4OSA_NULL;
3421            }
3422            if(pParams->pFileTemp != M4OSA_NULL)
3423            {
3424                /* Delete temporary file */
3425#ifdef M4xVSS_RESERVED_MOOV_DISK_SPACE
3426                remove((const char *)pParams->pFileTemp);
3427                free(pParams->pFileTemp);
3428#endif/*M4xVSS_RESERVED_MOOV_DISK_SPACE*/
3429                pParams->pFileTemp = M4OSA_NULL;
3430            }
3431            pParams_sauv = pParams;
3432            pParams = pParams->pNext;
3433            free(pParams_sauv);
3434            pParams_sauv = M4OSA_NULL;
3435        }
3436    }
3437
3438    if(xVSS_context->pMCSparamsList != M4OSA_NULL)
3439    {
3440        M4xVSS_MCS_params* pParams = xVSS_context->pMCSparamsList;
3441        M4xVSS_MCS_params* pParams_sauv;
3442
3443        while(pParams != M4OSA_NULL)
3444        {
3445            if(pParams->pFileIn != M4OSA_NULL)
3446            {
3447                free(pParams->pFileIn);
3448                pParams->pFileIn = M4OSA_NULL;
3449            }
3450            if(pParams->pFileOut != M4OSA_NULL)
3451            {
3452                /* Delete temporary file */
3453                remove((const char *)pParams->pFileOut);
3454                free(pParams->pFileOut);
3455                pParams->pFileOut = M4OSA_NULL;
3456            }
3457            if(pParams->pFileTemp != M4OSA_NULL)
3458            {
3459                /* Delete temporary file */
3460#ifdef M4xVSS_RESERVED_MOOV_DISK_SPACE
3461                remove((const char *)pParams->pFileTemp);
3462                free(pParams->pFileTemp);
3463#endif/*M4xVSS_RESERVED_MOOV_DISK_SPACE*/
3464                pParams->pFileTemp = M4OSA_NULL;
3465            }
3466            pParams_sauv = pParams;
3467            pParams = pParams->pNext;
3468            free(pParams_sauv);
3469            pParams_sauv = M4OSA_NULL;
3470        }
3471    }
3472
3473    if(xVSS_context->pcmPreviewFile != M4OSA_NULL)
3474    {
3475        free(xVSS_context->pcmPreviewFile);
3476        xVSS_context->pcmPreviewFile = M4OSA_NULL;
3477    }
3478    if(xVSS_context->pSettings->pOutputFile != M4OSA_NULL
3479        && xVSS_context->pOutputFile != M4OSA_NULL)
3480    {
3481        free(xVSS_context->pSettings->pOutputFile);
3482        xVSS_context->pSettings->pOutputFile = M4OSA_NULL;
3483        xVSS_context->pOutputFile = M4OSA_NULL;
3484    }
3485
3486    /* Reinit all context variables */
3487    xVSS_context->previousClipNumber = 0;
3488    xVSS_context->editingStep = M4xVSS_kMicroStateEditing;
3489    xVSS_context->analyseStep = M4xVSS_kMicroStateAnalysePto3GPP;
3490    xVSS_context->pPTo3GPPparamsList = M4OSA_NULL;
3491    xVSS_context->pPTo3GPPcurrentParams = M4OSA_NULL;
3492    xVSS_context->pMCSparamsList = M4OSA_NULL;
3493    xVSS_context->pMCScurrentParams = M4OSA_NULL;
3494    xVSS_context->tempFileIndex = 0;
3495    xVSS_context->targetedTimescale = 0;
3496
3497    return M4NO_ERROR;
3498}
3499
3500/**
3501 ******************************************************************************
3502 * prototype    M4OSA_ERR M4xVSS_internalGetProperties(M4OSA_Context pContext,
3503 *                                    M4OSA_Char* pFile,
3504 *                                    M4VIDEOEDITING_ClipProperties *pFileProperties)
3505 *
3506 * @brief    This function retrieve properties of an input 3GP file using MCS
3507 * @note
3508 * @param    pContext        (IN) The integrator own context
3509 * @param    pFile            (IN) 3GP file to analyse
3510 * @param    pFileProperties    (IN/OUT) Pointer on a structure that will contain
3511 *                            the 3GP file properties
3512 *
3513 * @return    M4NO_ERROR:    No error
3514 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
3515 ******************************************************************************
3516 */
3517M4OSA_ERR M4xVSS_internalGetProperties(M4OSA_Context pContext, M4OSA_Char* pFile,
3518                                       M4VIDEOEDITING_ClipProperties *pFileProperties)
3519{
3520    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
3521    M4OSA_ERR err;
3522    M4MCS_Context mcs_context;
3523
3524    err = M4MCS_init(&mcs_context, xVSS_context->pFileReadPtr, xVSS_context->pFileWritePtr);
3525    if(err != M4NO_ERROR)
3526    {
3527        M4OSA_TRACE1_1("M4xVSS_internalGetProperties: Error in M4MCS_init: 0x%x", err);
3528        return err;
3529    }
3530
3531    /*open the MCS in the "normal opening" mode to retrieve the exact duration*/
3532    err = M4MCS_open_normalMode(mcs_context, pFile, M4VIDEOEDITING_kFileType_3GPP,
3533        M4OSA_NULL, M4OSA_NULL);
3534    if (err != M4NO_ERROR)
3535    {
3536        M4OSA_TRACE1_1("M4xVSS_internalGetProperties: Error in M4MCS_open: 0x%x", err);
3537        M4MCS_abort(mcs_context);
3538        return err;
3539    }
3540
3541    err = M4MCS_getInputFileProperties(mcs_context, pFileProperties);
3542    if(err != M4NO_ERROR)
3543    {
3544        M4OSA_TRACE1_1("Error in M4MCS_getInputFileProperties: 0x%x", err);
3545        M4MCS_abort(mcs_context);
3546        return err;
3547    }
3548
3549    err = M4MCS_abort(mcs_context);
3550    if (err != M4NO_ERROR)
3551    {
3552        M4OSA_TRACE1_1("M4xVSS_internalGetProperties: Error in M4MCS_abort: 0x%x", err);
3553        return err;
3554    }
3555
3556    return M4NO_ERROR;
3557}
3558
3559
3560/**
3561 ******************************************************************************
3562 * prototype    M4OSA_ERR M4xVSS_internalGetTargetedTimeScale(M4OSA_Context pContext,
3563 *                                                M4OSA_UInt32* pTargetedTimeScale)
3564 *
3565 * @brief    This function retrieve targeted time scale
3566 * @note
3567 * @param    pContext            (IN)    The integrator own context
3568 * @param    pTargetedTimeScale    (OUT)    Targeted time scale
3569 *
3570 * @return    M4NO_ERROR:    No error
3571 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
3572 ******************************************************************************
3573 */
3574M4OSA_ERR M4xVSS_internalGetTargetedTimeScale(M4OSA_Context pContext,
3575                                                 M4VSS3GPP_EditSettings* pSettings,
3576                                                  M4OSA_UInt32* pTargetedTimeScale)
3577{
3578    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
3579    M4OSA_ERR err;
3580    M4OSA_UInt32 totalDuration = 0;
3581    M4OSA_UInt8 i = 0;
3582    M4OSA_UInt32 tempTimeScale = 0, tempDuration = 0;
3583
3584    for(i=0;i<pSettings->uiClipNumber;i++)
3585    {
3586        /*search timescale only in mpeg4 case*/
3587        if(pSettings->pClipList[i]->FileType == M4VIDEOEDITING_kFileType_3GPP
3588            || pSettings->pClipList[i]->FileType == M4VIDEOEDITING_kFileType_MP4
3589            || pSettings->pClipList[i]->FileType == M4VIDEOEDITING_kFileType_M4V)
3590        {
3591            M4VIDEOEDITING_ClipProperties fileProperties;
3592
3593            /*UTF conversion support*/
3594            M4OSA_Char* pDecodedPath = M4OSA_NULL;
3595
3596            /**
3597            * UTF conversion: convert into the customer format, before being used*/
3598            pDecodedPath = pSettings->pClipList[i]->pFile;
3599
3600            if(xVSS_context->UTFConversionContext.pConvToUTF8Fct != M4OSA_NULL
3601                && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL)
3602            {
3603                M4OSA_UInt32 length = 0;
3604                err = M4xVSS_internalConvertFromUTF8(xVSS_context,
3605                     (M4OSA_Void*) pSettings->pClipList[i]->pFile,
3606                        (M4OSA_Void*) xVSS_context->UTFConversionContext.pTempOutConversionBuffer,
3607                             &length);
3608                if(err != M4NO_ERROR)
3609                {
3610                    M4OSA_TRACE1_1("M4xVSS_Init:\
3611                         M4xVSS_internalConvertToUTF8 returns err: 0x%x",err);
3612                    return err;
3613                }
3614                pDecodedPath = xVSS_context->UTFConversionContext.pTempOutConversionBuffer;
3615            }
3616
3617            /*End of the conversion: use the decoded path*/
3618            err = M4xVSS_internalGetProperties(xVSS_context, pDecodedPath, &fileProperties);
3619
3620            /*get input file properties*/
3621            /*err = M4xVSS_internalGetProperties(xVSS_context, pSettings->\
3622                pClipList[i]->pFile, &fileProperties);*/
3623            if(M4NO_ERROR != err)
3624            {
3625                M4OSA_TRACE1_1("M4xVSS_internalGetTargetedTimeScale:\
3626                     M4xVSS_internalGetProperties returned: 0x%x", err);
3627                return err;
3628            }
3629            if(fileProperties.VideoStreamType == M4VIDEOEDITING_kMPEG4)
3630            {
3631                if(pSettings->pClipList[i]->uiEndCutTime > 0)
3632                {
3633                    if(tempDuration < (pSettings->pClipList[i]->uiEndCutTime \
3634                        - pSettings->pClipList[i]->uiBeginCutTime))
3635                    {
3636                        tempTimeScale = fileProperties.uiVideoTimeScale;
3637                        tempDuration = (pSettings->pClipList[i]->uiEndCutTime\
3638                             - pSettings->pClipList[i]->uiBeginCutTime);
3639                    }
3640                }
3641                else
3642                {
3643                    if(tempDuration < (fileProperties.uiClipDuration\
3644                         - pSettings->pClipList[i]->uiBeginCutTime))
3645                    {
3646                        tempTimeScale = fileProperties.uiVideoTimeScale;
3647                        tempDuration = (fileProperties.uiClipDuration\
3648                             - pSettings->pClipList[i]->uiBeginCutTime);
3649                    }
3650                }
3651            }
3652        }
3653        if(pSettings->pClipList[i]->FileType == M4VIDEOEDITING_kFileType_ARGB8888)
3654        {
3655            /*the timescale is 30 for PTO3GP*/
3656            *pTargetedTimeScale = 30;
3657            return M4NO_ERROR;
3658
3659        }
3660    }
3661
3662    if(tempTimeScale >= 30)/*Define a minimum time scale, otherwise if the timescale is not
3663    enough, there will be an infinite loop in the shell encoder*/
3664    {
3665        *pTargetedTimeScale = tempTimeScale;
3666    }
3667    else
3668    {
3669        *pTargetedTimeScale = 30;
3670    }
3671
3672    return M4NO_ERROR;
3673}
3674
3675
3676/**
3677 ******************************************************************************
3678 * prototype    M4VSS3GPP_externalVideoEffectColor(M4OSA_Void *pFunctionContext,
3679 *                                                    M4VIFI_ImagePlane *PlaneIn,
3680 *                                                    M4VIFI_ImagePlane *PlaneOut,
3681 *                                                    M4VSS3GPP_ExternalProgress *pProgress,
3682 *                                                    M4OSA_UInt32 uiEffectKind)
3683 *
3684 * @brief    This function apply a color effect on an input YUV420 planar frame
3685 * @note
3686 * @param    pFunctionContext(IN) Contains which color to apply (not very clean ...)
3687 * @param    PlaneIn            (IN) Input YUV420 planar
3688 * @param    PlaneOut        (IN/OUT) Output YUV420 planar
3689 * @param    pProgress        (IN/OUT) Progress indication (0-100)
3690 * @param    uiEffectKind    (IN) Unused
3691 *
3692 * @return    M4VIFI_OK:    No error
3693 ******************************************************************************
3694 */
3695M4OSA_ERR M4VSS3GPP_externalVideoEffectColor(M4OSA_Void *pFunctionContext,
3696                                             M4VIFI_ImagePlane *PlaneIn,
3697                                             M4VIFI_ImagePlane *PlaneOut,
3698                                             M4VSS3GPP_ExternalProgress *pProgress,
3699                                             M4OSA_UInt32 uiEffectKind)
3700{
3701    M4VIFI_Int32 plane_number;
3702    M4VIFI_UInt32 i,j;
3703    M4VIFI_UInt8 *p_buf_src, *p_buf_dest;
3704    M4xVSS_ColorStruct* ColorContext = (M4xVSS_ColorStruct*)pFunctionContext;
3705
3706    for (plane_number = 0; plane_number < 3; plane_number++)
3707    {
3708        p_buf_src = &(PlaneIn[plane_number].pac_data[PlaneIn[plane_number].u_topleft]);
3709        p_buf_dest = &(PlaneOut[plane_number].pac_data[PlaneOut[plane_number].u_topleft]);
3710        for (i = 0; i < PlaneOut[plane_number].u_height; i++)
3711        {
3712            /**
3713             * Chrominance */
3714            if(plane_number==1 || plane_number==2)
3715            {
3716                //switch ((M4OSA_UInt32)pFunctionContext)
3717                // commented because a structure for the effects context exist
3718                switch (ColorContext->colorEffectType)
3719                {
3720                    case M4xVSS_kVideoEffectType_BlackAndWhite:
3721                        memset((void *)p_buf_dest,128,
3722                         PlaneIn[plane_number].u_width);
3723                        break;
3724                    case M4xVSS_kVideoEffectType_Pink:
3725                        memset((void *)p_buf_dest,255,
3726                         PlaneIn[plane_number].u_width);
3727                        break;
3728                    case M4xVSS_kVideoEffectType_Green:
3729                        memset((void *)p_buf_dest,0,
3730                         PlaneIn[plane_number].u_width);
3731                        break;
3732                    case M4xVSS_kVideoEffectType_Sepia:
3733                        if(plane_number==1)
3734                        {
3735                            memset((void *)p_buf_dest,117,
3736                             PlaneIn[plane_number].u_width);
3737                        }
3738                        else
3739                        {
3740                            memset((void *)p_buf_dest,139,
3741                             PlaneIn[plane_number].u_width);
3742                        }
3743                        break;
3744                    case M4xVSS_kVideoEffectType_Negative:
3745                        memcpy((void *)p_buf_dest,
3746                         (void *)p_buf_src ,PlaneOut[plane_number].u_width);
3747                        break;
3748
3749                    case M4xVSS_kVideoEffectType_ColorRGB16:
3750                        {
3751                            M4OSA_UInt16 r = 0,g = 0,b = 0,y = 0,u = 0,v = 0;
3752
3753                            /*first get the r, g, b*/
3754                            b = (ColorContext->rgb16ColorData &  0x001f);
3755                            g = (ColorContext->rgb16ColorData &  0x07e0)>>5;
3756                            r = (ColorContext->rgb16ColorData &  0xf800)>>11;
3757
3758                            /*keep y, but replace u and v*/
3759                            if(plane_number==1)
3760                            {
3761                                /*then convert to u*/
3762                                u = U16(r, g, b);
3763                                memset((void *)p_buf_dest,(M4OSA_UInt8)u,
3764                                 PlaneIn[plane_number].u_width);
3765                            }
3766                            if(plane_number==2)
3767                            {
3768                                /*then convert to v*/
3769                                v = V16(r, g, b);
3770                                memset((void *)p_buf_dest, (M4OSA_UInt8)v,
3771                                 PlaneIn[plane_number].u_width);
3772                            }
3773                        }
3774                        break;
3775                    case M4xVSS_kVideoEffectType_Gradient:
3776                        {
3777                            M4OSA_UInt16 r = 0,g = 0,b = 0,y = 0,u = 0,v = 0;
3778
3779                            /*first get the r, g, b*/
3780                            b = (ColorContext->rgb16ColorData &  0x001f);
3781                            g = (ColorContext->rgb16ColorData &  0x07e0)>>5;
3782                            r = (ColorContext->rgb16ColorData &  0xf800)>>11;
3783
3784                            /*for color gradation*/
3785                            b = (M4OSA_UInt16)( b - ((b*i)/PlaneIn[plane_number].u_height));
3786                            g = (M4OSA_UInt16)(g - ((g*i)/PlaneIn[plane_number].u_height));
3787                            r = (M4OSA_UInt16)(r - ((r*i)/PlaneIn[plane_number].u_height));
3788
3789                            /*keep y, but replace u and v*/
3790                            if(plane_number==1)
3791                            {
3792                                /*then convert to u*/
3793                                u = U16(r, g, b);
3794                                memset((void *)p_buf_dest,(M4OSA_UInt8)u,
3795                                 PlaneIn[plane_number].u_width);
3796                            }
3797                            if(plane_number==2)
3798                            {
3799                                /*then convert to v*/
3800                                v = V16(r, g, b);
3801                                memset((void *)p_buf_dest,(M4OSA_UInt8)v,
3802                                 PlaneIn[plane_number].u_width);
3803                            }
3804                        }
3805                        break;
3806                        default:
3807                        break;
3808                }
3809            }
3810            /**
3811             * Luminance */
3812            else
3813            {
3814                //switch ((M4OSA_UInt32)pFunctionContext)
3815                // commented because a structure for the effects context exist
3816                switch (ColorContext->colorEffectType)
3817                {
3818                case M4xVSS_kVideoEffectType_Negative:
3819                    for(j=0;j<PlaneOut[plane_number].u_width;j++)
3820                    {
3821                            p_buf_dest[j] = 255 - p_buf_src[j];
3822                    }
3823                    break;
3824                default:
3825                    memcpy((void *)p_buf_dest,
3826                     (void *)p_buf_src ,PlaneOut[plane_number].u_width);
3827                    break;
3828                }
3829            }
3830            p_buf_src += PlaneIn[plane_number].u_stride;
3831            p_buf_dest += PlaneOut[plane_number].u_stride;
3832        }
3833    }
3834
3835    return M4VIFI_OK;
3836}
3837
3838/**
3839 ******************************************************************************
3840 * prototype    M4VSS3GPP_externalVideoEffectFraming(M4OSA_Void *pFunctionContext,
3841 *                                                    M4VIFI_ImagePlane *PlaneIn,
3842 *                                                    M4VIFI_ImagePlane *PlaneOut,
3843 *                                                    M4VSS3GPP_ExternalProgress *pProgress,
3844 *                                                    M4OSA_UInt32 uiEffectKind)
3845 *
3846 * @brief    This function add a fixed or animated image on an input YUV420 planar frame
3847 * @note
3848 * @param    pFunctionContext(IN) Contains which color to apply (not very clean ...)
3849 * @param    PlaneIn            (IN) Input YUV420 planar
3850 * @param    PlaneOut        (IN/OUT) Output YUV420 planar
3851 * @param    pProgress        (IN/OUT) Progress indication (0-100)
3852 * @param    uiEffectKind    (IN) Unused
3853 *
3854 * @return    M4VIFI_OK:    No error
3855 ******************************************************************************
3856 */
3857M4OSA_ERR M4VSS3GPP_externalVideoEffectFraming( M4OSA_Void *userData,
3858                                                M4VIFI_ImagePlane PlaneIn[3],
3859                                                M4VIFI_ImagePlane *PlaneOut,
3860                                                M4VSS3GPP_ExternalProgress *pProgress,
3861                                                M4OSA_UInt32 uiEffectKind )
3862{
3863    M4VIFI_UInt32 x,y;
3864
3865    M4VIFI_UInt8 *p_in_Y = PlaneIn[0].pac_data;
3866    M4VIFI_UInt8 *p_in_U = PlaneIn[1].pac_data;
3867    M4VIFI_UInt8 *p_in_V = PlaneIn[2].pac_data;
3868
3869    M4xVSS_FramingStruct* Framing = M4OSA_NULL;
3870    M4xVSS_FramingStruct* currentFraming = M4OSA_NULL;
3871    M4VIFI_UInt8 *FramingRGB = M4OSA_NULL;
3872
3873    M4VIFI_UInt8 *p_out0;
3874    M4VIFI_UInt8 *p_out1;
3875    M4VIFI_UInt8 *p_out2;
3876
3877    M4VIFI_UInt32 topleft[2];
3878
3879    M4OSA_UInt8 transparent1 = (M4OSA_UInt8)((TRANSPARENT_COLOR & 0xFF00)>>8);
3880    M4OSA_UInt8 transparent2 = (M4OSA_UInt8)TRANSPARENT_COLOR;
3881
3882#ifndef DECODE_GIF_ON_SAVING
3883    Framing = (M4xVSS_FramingStruct *)userData;
3884    currentFraming = (M4xVSS_FramingStruct *)Framing->pCurrent;
3885    FramingRGB = Framing->FramingRgb->pac_data;
3886#endif /*DECODE_GIF_ON_SAVING*/
3887
3888    /*FB*/
3889#ifdef DECODE_GIF_ON_SAVING
3890    M4OSA_ERR err;
3891    Framing = (M4xVSS_FramingStruct *)((M4xVSS_FramingContext*)userData)->aFramingCtx;
3892    currentFraming = (M4xVSS_FramingStruct *)Framing;
3893    FramingRGB = Framing->FramingRgb->pac_data;
3894#endif /*DECODE_GIF_ON_SAVING*/
3895    /*end FB*/
3896
3897    /**
3898     * Initialize input / output plane pointers */
3899    p_in_Y += PlaneIn[0].u_topleft;
3900    p_in_U += PlaneIn[1].u_topleft;
3901    p_in_V += PlaneIn[2].u_topleft;
3902
3903    p_out0 = PlaneOut[0].pac_data;
3904    p_out1 = PlaneOut[1].pac_data;
3905    p_out2 = PlaneOut[2].pac_data;
3906
3907    /**
3908     * Depending on time, initialize Framing frame to use */
3909    if(Framing->previousClipTime == -1)
3910    {
3911        Framing->previousClipTime = pProgress->uiOutputTime;
3912    }
3913
3914    /**
3915     * If the current clip time has reach the duration of one frame of the framing picture
3916     * we need to step to next framing picture */
3917
3918    Framing->previousClipTime = pProgress->uiOutputTime;
3919    FramingRGB = currentFraming->FramingRgb->pac_data;
3920    topleft[0] = currentFraming->topleft_x;
3921    topleft[1] = currentFraming->topleft_y;
3922
3923    for( x=0 ;x < PlaneIn[0].u_height ; x++)
3924    {
3925        for( y=0 ;y < PlaneIn[0].u_width ; y++)
3926        {
3927            /**
3928             * To handle framing with input size != output size
3929             * Framing is applyed if coordinates matches between framing/topleft and input plane */
3930            if( y < (topleft[0] + currentFraming->FramingYuv[0].u_width)  &&
3931                y >= topleft[0] &&
3932                x < (topleft[1] + currentFraming->FramingYuv[0].u_height) &&
3933                x >= topleft[1])
3934            {
3935                /*Alpha blending support*/
3936                M4OSA_Float alphaBlending = 1;
3937                M4xVSS_internalEffectsAlphaBlending*  alphaBlendingStruct =\
3938                 (M4xVSS_internalEffectsAlphaBlending*)\
3939                    ((M4xVSS_FramingContext*)userData)->alphaBlendingStruct;
3940
3941                if(alphaBlendingStruct != M4OSA_NULL)
3942                {
3943                    if(pProgress->uiProgress \
3944                    < (M4OSA_UInt32)(alphaBlendingStruct->m_fadeInTime*10))
3945                    {
3946                        if(alphaBlendingStruct->m_fadeInTime == 0) {
3947                            alphaBlending = alphaBlendingStruct->m_start / 100;
3948                        } else {
3949                            alphaBlending = ((M4OSA_Float)(alphaBlendingStruct->m_middle\
3950                             - alphaBlendingStruct->m_start)\
3951                                *pProgress->uiProgress/(alphaBlendingStruct->m_fadeInTime*10));
3952                            alphaBlending += alphaBlendingStruct->m_start;
3953                            alphaBlending /= 100;
3954                        }
3955                    }
3956                    else if(pProgress->uiProgress >= (M4OSA_UInt32)(alphaBlendingStruct->\
3957                    m_fadeInTime*10) && pProgress->uiProgress < 1000\
3958                     - (M4OSA_UInt32)(alphaBlendingStruct->m_fadeOutTime*10))
3959                    {
3960                        alphaBlending = (M4OSA_Float)\
3961                        ((M4OSA_Float)alphaBlendingStruct->m_middle/100);
3962                    }
3963                    else if(pProgress->uiProgress >= 1000 - (M4OSA_UInt32)\
3964                    (alphaBlendingStruct->m_fadeOutTime*10))
3965                    {
3966                        if(alphaBlendingStruct->m_fadeOutTime == 0) {
3967                            alphaBlending = alphaBlendingStruct->m_end / 100;
3968                        } else {
3969                            alphaBlending = ((M4OSA_Float)(alphaBlendingStruct->m_middle \
3970                            - alphaBlendingStruct->m_end))*(1000 - pProgress->uiProgress)\
3971                            /(alphaBlendingStruct->m_fadeOutTime*10);
3972                            alphaBlending += alphaBlendingStruct->m_end;
3973                            alphaBlending /= 100;
3974                        }
3975                    }
3976                }
3977                /**/
3978
3979                if((*(FramingRGB)==transparent1) && (*(FramingRGB+1)==transparent2))
3980                {
3981                    *( p_out0+y+x*PlaneOut[0].u_stride)=(*(p_in_Y+y+x*PlaneIn[0].u_stride));
3982                    *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)=
3983                        (*(p_in_U+(y>>1)+(x>>1)*PlaneIn[1].u_stride));
3984                    *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)=
3985                        (*(p_in_V+(y>>1)+(x>>1)*PlaneIn[2].u_stride));
3986                }
3987                else
3988                {
3989                    *( p_out0+y+x*PlaneOut[0].u_stride)=
3990                        (*(currentFraming->FramingYuv[0].pac_data+(y-topleft[0])\
3991                            +(x-topleft[1])*currentFraming->FramingYuv[0].u_stride))*alphaBlending;
3992                    *( p_out0+y+x*PlaneOut[0].u_stride)+=
3993                        (*(p_in_Y+y+x*PlaneIn[0].u_stride))*(1-alphaBlending);
3994                    *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)=
3995                        (*(currentFraming->FramingYuv[1].pac_data+((y-topleft[0])>>1)\
3996                            +((x-topleft[1])>>1)*currentFraming->FramingYuv[1].u_stride))\
3997                                *alphaBlending;
3998                    *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)+=
3999                        (*(p_in_U+(y>>1)+(x>>1)*PlaneIn[1].u_stride))*(1-alphaBlending);
4000                    *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)=
4001                        (*(currentFraming->FramingYuv[2].pac_data+((y-topleft[0])>>1)\
4002                            +((x-topleft[1])>>1)*currentFraming->FramingYuv[2].u_stride))\
4003                                *alphaBlending;
4004                    *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)+=
4005                        (*(p_in_V+(y>>1)+(x>>1)*PlaneIn[2].u_stride))*(1-alphaBlending);
4006                }
4007                if( PlaneIn[0].u_width < (topleft[0] + currentFraming->FramingYuv[0].u_width) &&
4008                    y == PlaneIn[0].u_width-1)
4009                {
4010                    FramingRGB = FramingRGB + 2 \
4011                        * (topleft[0] + currentFraming->FramingYuv[0].u_width \
4012                            - PlaneIn[0].u_width + 1);
4013                }
4014                else
4015                {
4016                    FramingRGB = FramingRGB + 2;
4017                }
4018            }
4019            /**
4020             * Just copy input plane to output plane */
4021            else
4022            {
4023                *( p_out0+y+x*PlaneOut[0].u_stride)=*(p_in_Y+y+x*PlaneIn[0].u_stride);
4024                *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)=
4025                    *(p_in_U+(y>>1)+(x>>1)*PlaneIn[1].u_stride);
4026                *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)=
4027                    *(p_in_V+(y>>1)+(x>>1)*PlaneIn[2].u_stride);
4028            }
4029        }
4030    }
4031
4032
4033    return M4VIFI_OK;
4034}
4035
4036
4037/**
4038 ******************************************************************************
4039 * prototype    M4VSS3GPP_externalVideoEffectFifties(M4OSA_Void *pFunctionContext,
4040 *                                                    M4VIFI_ImagePlane *PlaneIn,
4041 *                                                    M4VIFI_ImagePlane *PlaneOut,
4042 *                                                    M4VSS3GPP_ExternalProgress *pProgress,
4043 *                                                    M4OSA_UInt32 uiEffectKind)
4044 *
4045 * @brief    This function make a video look as if it was taken in the fifties
4046 * @note
4047 * @param    pUserData       (IN) Context
4048 * @param    pPlaneIn        (IN) Input YUV420 planar
4049 * @param    pPlaneOut        (IN/OUT) Output YUV420 planar
4050 * @param    pProgress        (IN/OUT) Progress indication (0-100)
4051 * @param    uiEffectKind    (IN) Unused
4052 *
4053 * @return    M4VIFI_OK:            No error
4054 * @return  M4ERR_PARAMETER:    pFiftiesData, pPlaneOut or pProgress are NULL (DEBUG only)
4055 ******************************************************************************
4056 */
4057M4OSA_ERR M4VSS3GPP_externalVideoEffectFifties( M4OSA_Void *pUserData,
4058                                                M4VIFI_ImagePlane *pPlaneIn,
4059                                                M4VIFI_ImagePlane *pPlaneOut,
4060                                                M4VSS3GPP_ExternalProgress *pProgress,
4061                                                M4OSA_UInt32 uiEffectKind )
4062{
4063    M4VIFI_UInt32 x, y, xShift;
4064    M4VIFI_UInt8 *pInY = pPlaneIn[0].pac_data;
4065    M4VIFI_UInt8 *pOutY, *pInYbegin;
4066    M4VIFI_UInt8 *pInCr,* pOutCr;
4067    M4VIFI_Int32 plane_number;
4068
4069    /* Internal context*/
4070    M4xVSS_FiftiesStruct* p_FiftiesData = (M4xVSS_FiftiesStruct *)pUserData;
4071
4072    /* Check the inputs (debug only) */
4073    M4OSA_DEBUG_IF2((pFiftiesData == M4OSA_NULL),M4ERR_PARAMETER,
4074         "xVSS: p_FiftiesData is M4OSA_NULL in M4VSS3GPP_externalVideoEffectFifties");
4075    M4OSA_DEBUG_IF2((pPlaneOut == M4OSA_NULL),M4ERR_PARAMETER,
4076         "xVSS: p_PlaneOut is M4OSA_NULL in M4VSS3GPP_externalVideoEffectFifties");
4077    M4OSA_DEBUG_IF2((pProgress == M4OSA_NULL),M4ERR_PARAMETER,
4078        "xVSS: p_Progress is M4OSA_NULL in M4VSS3GPP_externalVideoEffectFifties");
4079
4080    /* Initialize input / output plane pointers */
4081    pInY += pPlaneIn[0].u_topleft;
4082    pOutY = pPlaneOut[0].pac_data;
4083    pInYbegin  = pInY;
4084
4085    /* Initialize the random */
4086    if(p_FiftiesData->previousClipTime < 0)
4087    {
4088        M4OSA_randInit();
4089        M4OSA_rand((M4OSA_Int32 *)&(p_FiftiesData->shiftRandomValue), (pPlaneIn[0].u_height) >> 4);
4090        M4OSA_rand((M4OSA_Int32 *)&(p_FiftiesData->stripeRandomValue), (pPlaneIn[0].u_width)<< 2);
4091        p_FiftiesData->previousClipTime = pProgress->uiOutputTime;
4092    }
4093
4094    /* Choose random values if we have reached the duration of a partial effect */
4095    else if( (pProgress->uiOutputTime - p_FiftiesData->previousClipTime)\
4096         > p_FiftiesData->fiftiesEffectDuration)
4097    {
4098        M4OSA_rand((M4OSA_Int32 *)&(p_FiftiesData->shiftRandomValue), (pPlaneIn[0].u_height) >> 4);
4099        M4OSA_rand((M4OSA_Int32 *)&(p_FiftiesData->stripeRandomValue), (pPlaneIn[0].u_width)<< 2);
4100        p_FiftiesData->previousClipTime = pProgress->uiOutputTime;
4101    }
4102
4103    /* Put in Sepia the chrominance */
4104    for (plane_number = 1; plane_number < 3; plane_number++)
4105    {
4106        pInCr  = pPlaneIn[plane_number].pac_data  + pPlaneIn[plane_number].u_topleft;
4107        pOutCr = pPlaneOut[plane_number].pac_data + pPlaneOut[plane_number].u_topleft;
4108
4109        for (x = 0; x < pPlaneOut[plane_number].u_height; x++)
4110        {
4111            if (1 == plane_number)
4112                memset((void *)pOutCr, 117,pPlaneIn[plane_number].u_width); /* U value */
4113            else
4114                memset((void *)pOutCr, 139,pPlaneIn[plane_number].u_width); /* V value */
4115
4116            pInCr  += pPlaneIn[plane_number].u_stride;
4117            pOutCr += pPlaneOut[plane_number].u_stride;
4118        }
4119    }
4120
4121    /* Compute the new pixels values */
4122    for( x = 0 ; x < pPlaneIn[0].u_height ; x++)
4123    {
4124        M4VIFI_UInt8 *p_outYtmp, *p_inYtmp;
4125
4126        /* Compute the xShift (random value) */
4127        if (0 == (p_FiftiesData->shiftRandomValue % 5 ))
4128            xShift = (x + p_FiftiesData->shiftRandomValue ) % (pPlaneIn[0].u_height - 1);
4129        else
4130            xShift = (x + (pPlaneIn[0].u_height - p_FiftiesData->shiftRandomValue) ) \
4131                % (pPlaneIn[0].u_height - 1);
4132
4133        /* Initialize the pointers */
4134        p_outYtmp = pOutY + 1;                                    /* yShift of 1 pixel */
4135        p_inYtmp  = pInYbegin + (xShift * pPlaneIn[0].u_stride);  /* Apply the xShift */
4136
4137        for( y = 0 ; y < pPlaneIn[0].u_width ; y++)
4138        {
4139            /* Set Y value */
4140            if (xShift > (pPlaneIn[0].u_height - 4))
4141                *p_outYtmp = 40;        /* Add some horizontal black lines between the
4142                                        two parts of the image */
4143            else if ( y == p_FiftiesData->stripeRandomValue)
4144                *p_outYtmp = 90;        /* Add a random vertical line for the bulk */
4145            else
4146                *p_outYtmp = *p_inYtmp;
4147
4148
4149            /* Go to the next pixel */
4150            p_outYtmp++;
4151            p_inYtmp++;
4152
4153            /* Restart at the beginning of the line for the last pixel*/
4154            if (y == (pPlaneIn[0].u_width - 2))
4155                p_outYtmp = pOutY;
4156        }
4157
4158        /* Go to the next line */
4159        pOutY += pPlaneOut[0].u_stride;
4160    }
4161
4162    return M4VIFI_OK;
4163}
4164
4165/**
4166 ******************************************************************************
4167 * M4OSA_ERR M4VSS3GPP_externalVideoEffectZoom( )
4168 * @brief    Zoom in/out video effect functions.
4169 * @note    The external video function is used only if VideoEffectType is set to
4170 * M4VSS3GPP_kVideoEffectType_ZoomIn or M4VSS3GPP_kVideoEffectType_ZoomOut.
4171 *
4172 * @param   pFunctionContext    (IN) The function context, previously set by the integrator
4173 * @param    pInputPlanes        (IN) Input YUV420 image: pointer to an array of three valid
4174 *                                    image planes (Y, U and V)
4175 * @param    pOutputPlanes        (IN/OUT) Output (filtered) YUV420 image: pointer to an array of
4176 *                                        three valid image planes (Y, U and V)
4177 * @param    pProgress            (IN) Set of information about the video transition progress.
4178 * @return    M4NO_ERROR:            No error
4179 * @return    M4ERR_PARAMETER:    At least one parameter is M4OSA_NULL (debug only)
4180 ******************************************************************************
4181 */
4182
4183M4OSA_ERR M4VSS3GPP_externalVideoEffectZoom(
4184    M4OSA_Void *pFunctionContext,
4185    M4VIFI_ImagePlane *pInputPlanes,
4186    M4VIFI_ImagePlane *pOutputPlanes,
4187    M4VSS3GPP_ExternalProgress *pProgress,
4188    M4OSA_UInt32 uiEffectKind
4189)
4190{
4191    M4OSA_UInt32 boxWidth;
4192    M4OSA_UInt32 boxHeight;
4193    M4OSA_UInt32 boxPosX;
4194    M4OSA_UInt32 boxPosY;
4195    M4OSA_UInt32 ratio = 0;
4196    /*  * 1.189207 between ratio */
4197    /* zoom between x1 and x16 */
4198    M4OSA_UInt32 ratiotab[17] ={1024,1218,1448,1722,2048,2435,2896,3444,4096,4871,5793,\
4199                                6889,8192,9742,11585,13777,16384};
4200    M4OSA_UInt32 ik;
4201
4202    M4VIFI_ImagePlane boxPlane[3];
4203
4204    if(M4xVSS_kVideoEffectType_ZoomOut == (M4OSA_UInt32)pFunctionContext)
4205    {
4206        //ratio = 16 - (15 * pProgress->uiProgress)/1000;
4207        ratio = 16 - pProgress->uiProgress / 66 ;
4208    }
4209    else if(M4xVSS_kVideoEffectType_ZoomIn == (M4OSA_UInt32)pFunctionContext)
4210    {
4211        //ratio = 1 + (15 * pProgress->uiProgress)/1000;
4212        ratio = 1 + pProgress->uiProgress / 66 ;
4213    }
4214
4215    for(ik=0;ik<3;ik++){
4216
4217        boxPlane[ik].u_stride = pInputPlanes[ik].u_stride;
4218        boxPlane[ik].pac_data = pInputPlanes[ik].pac_data;
4219
4220        boxHeight = ( pInputPlanes[ik].u_height << 10 ) / ratiotab[ratio];
4221        boxWidth = ( pInputPlanes[ik].u_width << 10 ) / ratiotab[ratio];
4222        boxPlane[ik].u_height = (boxHeight)&(~1);
4223        boxPlane[ik].u_width = (boxWidth)&(~1);
4224
4225        boxPosY = (pInputPlanes[ik].u_height >> 1) - (boxPlane[ik].u_height >> 1);
4226        boxPosX = (pInputPlanes[ik].u_width >> 1) - (boxPlane[ik].u_width >> 1);
4227        boxPlane[ik].u_topleft = boxPosY * boxPlane[ik].u_stride + boxPosX;
4228    }
4229
4230    M4VIFI_ResizeBilinearYUV420toYUV420(M4OSA_NULL, (M4VIFI_ImagePlane*)&boxPlane, pOutputPlanes);
4231
4232    /**
4233     * Return */
4234    return(M4NO_ERROR);
4235}
4236
4237/**
4238 ******************************************************************************
4239 * prototype    M4xVSS_AlphaMagic( M4OSA_Void *userData,
4240 *                                    M4VIFI_ImagePlane PlaneIn1[3],
4241 *                                    M4VIFI_ImagePlane PlaneIn2[3],
4242 *                                    M4VIFI_ImagePlane *PlaneOut,
4243 *                                    M4VSS3GPP_ExternalProgress *pProgress,
4244 *                                    M4OSA_UInt32 uiTransitionKind)
4245 *
4246 * @brief    This function apply a color effect on an input YUV420 planar frame
4247 * @note
4248 * @param    userData        (IN) Contains a pointer on a settings structure
4249 * @param    PlaneIn1        (IN) Input YUV420 planar from video 1
4250 * @param    PlaneIn2        (IN) Input YUV420 planar from video 2
4251 * @param    PlaneOut        (IN/OUT) Output YUV420 planar
4252 * @param    pProgress        (IN/OUT) Progress indication (0-100)
4253 * @param    uiTransitionKind(IN) Unused
4254 *
4255 * @return    M4VIFI_OK:    No error
4256 ******************************************************************************
4257 */
4258M4OSA_ERR M4xVSS_AlphaMagic( M4OSA_Void *userData, M4VIFI_ImagePlane PlaneIn1[3],
4259                             M4VIFI_ImagePlane PlaneIn2[3], M4VIFI_ImagePlane *PlaneOut,
4260                             M4VSS3GPP_ExternalProgress *pProgress, M4OSA_UInt32 uiTransitionKind)
4261{
4262
4263    M4OSA_ERR err;
4264
4265    M4xVSS_internal_AlphaMagicSettings* alphaContext;
4266    M4VIFI_Int32 alphaProgressLevel;
4267
4268    M4VIFI_ImagePlane* planeswap;
4269    M4VIFI_UInt32 x,y;
4270
4271    M4VIFI_UInt8 *p_out0;
4272    M4VIFI_UInt8 *p_out1;
4273    M4VIFI_UInt8 *p_out2;
4274    M4VIFI_UInt8 *alphaMask;
4275    /* "Old image" */
4276    M4VIFI_UInt8 *p_in1_Y;
4277    M4VIFI_UInt8 *p_in1_U;
4278    M4VIFI_UInt8 *p_in1_V;
4279    /* "New image" */
4280    M4VIFI_UInt8 *p_in2_Y;
4281    M4VIFI_UInt8 *p_in2_U;
4282    M4VIFI_UInt8 *p_in2_V;
4283
4284    err = M4NO_ERROR;
4285
4286    alphaContext = (M4xVSS_internal_AlphaMagicSettings*)userData;
4287
4288    alphaProgressLevel = (pProgress->uiProgress * 255)/1000;
4289
4290    if( alphaContext->isreverse != M4OSA_FALSE)
4291    {
4292        alphaProgressLevel = 255 - alphaProgressLevel;
4293        planeswap = PlaneIn1;
4294        PlaneIn1 = PlaneIn2;
4295        PlaneIn2 = planeswap;
4296    }
4297
4298    p_out0 = PlaneOut[0].pac_data;
4299    p_out1 = PlaneOut[1].pac_data;
4300    p_out2 = PlaneOut[2].pac_data;
4301
4302    alphaMask = alphaContext->pPlane->pac_data;
4303
4304    /* "Old image" */
4305    p_in1_Y = PlaneIn1[0].pac_data;
4306    p_in1_U = PlaneIn1[1].pac_data;
4307    p_in1_V = PlaneIn1[2].pac_data;
4308    /* "New image" */
4309    p_in2_Y = PlaneIn2[0].pac_data;
4310    p_in2_U = PlaneIn2[1].pac_data;
4311    p_in2_V = PlaneIn2[2].pac_data;
4312
4313     /**
4314     * For each column ... */
4315    for( y=0; y<PlaneOut->u_height; y++ )
4316    {
4317        /**
4318         * ... and each row of the alpha mask */
4319        for( x=0; x<PlaneOut->u_width; x++ )
4320        {
4321            /**
4322             * If the value of the current pixel of the alpha mask is > to the current time
4323             * ( current time is normalized on [0-255] ) */
4324            if( alphaProgressLevel < alphaMask[x+y*PlaneOut->u_width] )
4325            {
4326                /* We keep "old image" in output plane */
4327                *( p_out0+x+y*PlaneOut[0].u_stride)=*(p_in1_Y+x+y*PlaneIn1[0].u_stride);
4328                *( p_out1+(x>>1)+(y>>1)*PlaneOut[1].u_stride)=
4329                    *(p_in1_U+(x>>1)+(y>>1)*PlaneIn1[1].u_stride);
4330                *( p_out2+(x>>1)+(y>>1)*PlaneOut[2].u_stride)=
4331                    *(p_in1_V+(x>>1)+(y>>1)*PlaneIn1[2].u_stride);
4332            }
4333            else
4334            {
4335                /* We take "new image" in output plane */
4336                *( p_out0+x+y*PlaneOut[0].u_stride)=*(p_in2_Y+x+y*PlaneIn2[0].u_stride);
4337                *( p_out1+(x>>1)+(y>>1)*PlaneOut[1].u_stride)=
4338                    *(p_in2_U+(x>>1)+(y>>1)*PlaneIn2[1].u_stride);
4339                *( p_out2+(x>>1)+(y>>1)*PlaneOut[2].u_stride)=
4340                    *(p_in2_V+(x>>1)+(y>>1)*PlaneIn2[2].u_stride);
4341            }
4342        }
4343    }
4344
4345    return(err);
4346}
4347
4348/**
4349 ******************************************************************************
4350 * prototype    M4xVSS_AlphaMagicBlending( M4OSA_Void *userData,
4351 *                                    M4VIFI_ImagePlane PlaneIn1[3],
4352 *                                    M4VIFI_ImagePlane PlaneIn2[3],
4353 *                                    M4VIFI_ImagePlane *PlaneOut,
4354 *                                    M4VSS3GPP_ExternalProgress *pProgress,
4355 *                                    M4OSA_UInt32 uiTransitionKind)
4356 *
4357 * @brief    This function apply a color effect on an input YUV420 planar frame
4358 * @note
4359 * @param    userData        (IN) Contains a pointer on a settings structure
4360 * @param    PlaneIn1        (IN) Input YUV420 planar from video 1
4361 * @param    PlaneIn2        (IN) Input YUV420 planar from video 2
4362 * @param    PlaneOut        (IN/OUT) Output YUV420 planar
4363 * @param    pProgress        (IN/OUT) Progress indication (0-100)
4364 * @param    uiTransitionKind(IN) Unused
4365 *
4366 * @return    M4VIFI_OK:    No error
4367 ******************************************************************************
4368 */
4369M4OSA_ERR M4xVSS_AlphaMagicBlending( M4OSA_Void *userData, M4VIFI_ImagePlane PlaneIn1[3],
4370                                     M4VIFI_ImagePlane PlaneIn2[3], M4VIFI_ImagePlane *PlaneOut,
4371                                     M4VSS3GPP_ExternalProgress *pProgress,
4372                                     M4OSA_UInt32 uiTransitionKind)
4373{
4374    M4OSA_ERR err;
4375
4376    M4xVSS_internal_AlphaMagicSettings* alphaContext;
4377    M4VIFI_Int32 alphaProgressLevel;
4378    M4VIFI_Int32 alphaBlendLevelMin;
4379    M4VIFI_Int32 alphaBlendLevelMax;
4380    M4VIFI_Int32 alphaBlendRange;
4381
4382    M4VIFI_ImagePlane* planeswap;
4383    M4VIFI_UInt32 x,y;
4384    M4VIFI_Int32 alphaMaskValue;
4385
4386    M4VIFI_UInt8 *p_out0;
4387    M4VIFI_UInt8 *p_out1;
4388    M4VIFI_UInt8 *p_out2;
4389    M4VIFI_UInt8 *alphaMask;
4390    /* "Old image" */
4391    M4VIFI_UInt8 *p_in1_Y;
4392    M4VIFI_UInt8 *p_in1_U;
4393    M4VIFI_UInt8 *p_in1_V;
4394    /* "New image" */
4395    M4VIFI_UInt8 *p_in2_Y;
4396    M4VIFI_UInt8 *p_in2_U;
4397    M4VIFI_UInt8 *p_in2_V;
4398
4399
4400    err = M4NO_ERROR;
4401
4402    alphaContext = (M4xVSS_internal_AlphaMagicSettings*)userData;
4403
4404    alphaProgressLevel = (pProgress->uiProgress * 255)/1000;
4405
4406    if( alphaContext->isreverse != M4OSA_FALSE)
4407    {
4408        alphaProgressLevel = 255 - alphaProgressLevel;
4409        planeswap = PlaneIn1;
4410        PlaneIn1 = PlaneIn2;
4411        PlaneIn2 = planeswap;
4412    }
4413
4414    alphaBlendLevelMin = alphaProgressLevel-alphaContext->blendingthreshold;
4415
4416    alphaBlendLevelMax = alphaProgressLevel+alphaContext->blendingthreshold;
4417
4418    alphaBlendRange = (alphaContext->blendingthreshold)*2;
4419
4420    p_out0 = PlaneOut[0].pac_data;
4421    p_out1 = PlaneOut[1].pac_data;
4422    p_out2 = PlaneOut[2].pac_data;
4423
4424    alphaMask = alphaContext->pPlane->pac_data;
4425
4426    /* "Old image" */
4427    p_in1_Y = PlaneIn1[0].pac_data;
4428    p_in1_U = PlaneIn1[1].pac_data;
4429    p_in1_V = PlaneIn1[2].pac_data;
4430    /* "New image" */
4431    p_in2_Y = PlaneIn2[0].pac_data;
4432    p_in2_U = PlaneIn2[1].pac_data;
4433    p_in2_V = PlaneIn2[2].pac_data;
4434
4435    /* apply Alpha Magic on each pixel */
4436       for( y=0; y<PlaneOut->u_height; y++ )
4437    {
4438        for( x=0; x<PlaneOut->u_width; x++ )
4439        {
4440            alphaMaskValue = alphaMask[x+y*PlaneOut->u_width];
4441            if( alphaBlendLevelMax < alphaMaskValue )
4442            {
4443                /* We keep "old image" in output plane */
4444                *( p_out0+x+y*PlaneOut[0].u_stride)=*(p_in1_Y+x+y*PlaneIn1[0].u_stride);
4445                *( p_out1+(x>>1)+(y>>1)*PlaneOut[1].u_stride)=
4446                    *(p_in1_U+(x>>1)+(y>>1)*PlaneIn1[1].u_stride);
4447                *( p_out2+(x>>1)+(y>>1)*PlaneOut[2].u_stride)=
4448                    *(p_in1_V+(x>>1)+(y>>1)*PlaneIn1[2].u_stride);
4449            }
4450            else if( (alphaBlendLevelMin < alphaMaskValue)&&
4451                    (alphaMaskValue <= alphaBlendLevelMax ) )
4452            {
4453                /* We blend "old and new image" in output plane */
4454                *( p_out0+x+y*PlaneOut[0].u_stride)=(M4VIFI_UInt8)
4455                    (( (alphaMaskValue-alphaBlendLevelMin)*( *(p_in1_Y+x+y*PlaneIn1[0].u_stride))
4456                        +(alphaBlendLevelMax-alphaMaskValue)\
4457                            *( *(p_in2_Y+x+y*PlaneIn2[0].u_stride)) )/alphaBlendRange );
4458
4459                *( p_out1+(x>>1)+(y>>1)*PlaneOut[1].u_stride)=(M4VIFI_UInt8)\
4460                    (( (alphaMaskValue-alphaBlendLevelMin)*( *(p_in1_U+(x>>1)+(y>>1)\
4461                        *PlaneIn1[1].u_stride))
4462                            +(alphaBlendLevelMax-alphaMaskValue)*( *(p_in2_U+(x>>1)+(y>>1)\
4463                                *PlaneIn2[1].u_stride)) )/alphaBlendRange );
4464
4465                *( p_out2+(x>>1)+(y>>1)*PlaneOut[2].u_stride)=
4466                    (M4VIFI_UInt8)(( (alphaMaskValue-alphaBlendLevelMin)\
4467                        *( *(p_in1_V+(x>>1)+(y>>1)*PlaneIn1[2].u_stride))
4468                                +(alphaBlendLevelMax-alphaMaskValue)*( *(p_in2_V+(x>>1)+(y>>1)\
4469                                    *PlaneIn2[2].u_stride)) )/alphaBlendRange );
4470
4471            }
4472            else
4473            {
4474                /* We take "new image" in output plane */
4475                *( p_out0+x+y*PlaneOut[0].u_stride)=*(p_in2_Y+x+y*PlaneIn2[0].u_stride);
4476                *( p_out1+(x>>1)+(y>>1)*PlaneOut[1].u_stride)=
4477                    *(p_in2_U+(x>>1)+(y>>1)*PlaneIn2[1].u_stride);
4478                *( p_out2+(x>>1)+(y>>1)*PlaneOut[2].u_stride)=
4479                    *(p_in2_V+(x>>1)+(y>>1)*PlaneIn2[2].u_stride);
4480            }
4481        }
4482    }
4483
4484    return(err);
4485}
4486
4487#define M4XXX_SampleAddress(plane, x, y)  ( (plane).pac_data + (plane).u_topleft + (y)\
4488     * (plane).u_stride + (x) )
4489
4490static void M4XXX_CopyPlane(M4VIFI_ImagePlane* dest, M4VIFI_ImagePlane* source)
4491{
4492    M4OSA_UInt32    height, width, sourceStride, destStride, y;
4493    M4OSA_MemAddr8    sourceWalk, destWalk;
4494
4495    /* cache the vars used in the loop so as to avoid them being repeatedly fetched and
4496     recomputed from memory. */
4497    height = dest->u_height;
4498    width = dest->u_width;
4499
4500    sourceWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(*source, 0, 0);
4501    sourceStride = source->u_stride;
4502
4503    destWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(*dest, 0, 0);
4504    destStride = dest->u_stride;
4505
4506    for (y=0; y<height; y++)
4507    {
4508        memcpy((void *)destWalk, (void *)sourceWalk, width);
4509        destWalk += destStride;
4510        sourceWalk += sourceStride;
4511    }
4512}
4513
4514static M4OSA_ERR M4xVSS_VerticalSlideTransition(M4VIFI_ImagePlane* topPlane,
4515                                                M4VIFI_ImagePlane* bottomPlane,
4516                                                M4VIFI_ImagePlane *PlaneOut,
4517                                                M4OSA_UInt32    shiftUV)
4518{
4519    M4OSA_UInt32 i;
4520
4521    /* Do three loops, one for each plane type, in order to avoid having too many buffers
4522    "hot" at the same time (better for cache). */
4523    for (i=0; i<3; i++)
4524    {
4525        M4OSA_UInt32    topPartHeight, bottomPartHeight, width, sourceStride, destStride, y;
4526        M4OSA_MemAddr8    sourceWalk, destWalk;
4527
4528        /* cache the vars used in the loop so as to avoid them being repeatedly fetched and
4529         recomputed from memory. */
4530        if (0 == i) /* Y plane */
4531        {
4532            bottomPartHeight = 2*shiftUV;
4533        }
4534        else /* U and V planes */
4535        {
4536            bottomPartHeight = shiftUV;
4537        }
4538        topPartHeight = PlaneOut[i].u_height - bottomPartHeight;
4539        width = PlaneOut[i].u_width;
4540
4541        sourceWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(topPlane[i], 0, bottomPartHeight);
4542        sourceStride = topPlane[i].u_stride;
4543
4544        destWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(PlaneOut[i], 0, 0);
4545        destStride = PlaneOut[i].u_stride;
4546
4547        /* First the part from the top source clip frame. */
4548        for (y=0; y<topPartHeight; y++)
4549        {
4550            memcpy((void *)destWalk, (void *)sourceWalk, width);
4551            destWalk += destStride;
4552            sourceWalk += sourceStride;
4553        }
4554
4555        /* and now change the vars to copy the part from the bottom source clip frame. */
4556        sourceWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(bottomPlane[i], 0, 0);
4557        sourceStride = bottomPlane[i].u_stride;
4558
4559        /* destWalk is already at M4XXX_SampleAddress(PlaneOut[i], 0, topPartHeight) */
4560
4561        for (y=0; y<bottomPartHeight; y++)
4562        {
4563            memcpy((void *)destWalk, (void *)sourceWalk, width);
4564            destWalk += destStride;
4565            sourceWalk += sourceStride;
4566        }
4567    }
4568    return M4NO_ERROR;
4569}
4570
4571static M4OSA_ERR M4xVSS_HorizontalSlideTransition(M4VIFI_ImagePlane* leftPlane,
4572                                                  M4VIFI_ImagePlane* rightPlane,
4573                                                  M4VIFI_ImagePlane *PlaneOut,
4574                                                  M4OSA_UInt32    shiftUV)
4575{
4576    M4OSA_UInt32 i, y;
4577    /* If we shifted by exactly 0, or by the width of the target image, then we would get the left
4578    frame or the right frame, respectively. These cases aren't handled too well by the general
4579    handling, since they result in 0-size memcopies, so might as well particularize them. */
4580
4581    if (0 == shiftUV)    /* output left frame */
4582    {
4583        for (i = 0; i<3; i++) /* for each YUV plane */
4584        {
4585            M4XXX_CopyPlane(&(PlaneOut[i]), &(leftPlane[i]));
4586        }
4587
4588        return M4NO_ERROR;
4589    }
4590
4591    if (PlaneOut[1].u_width == shiftUV) /* output right frame */
4592    {
4593        for (i = 0; i<3; i++) /* for each YUV plane */
4594        {
4595            M4XXX_CopyPlane(&(PlaneOut[i]), &(rightPlane[i]));
4596        }
4597
4598        return M4NO_ERROR;
4599    }
4600
4601
4602    /* Do three loops, one for each plane type, in order to avoid having too many buffers
4603    "hot" at the same time (better for cache). */
4604    for (i=0; i<3; i++)
4605    {
4606        M4OSA_UInt32    height, leftPartWidth, rightPartWidth;
4607        M4OSA_UInt32    leftStride,    rightStride,    destStride;
4608        M4OSA_MemAddr8    leftWalk,    rightWalk,    destWalkLeft, destWalkRight;
4609
4610        /* cache the vars used in the loop so as to avoid them being repeatedly fetched
4611        and recomputed from memory. */
4612        height = PlaneOut[i].u_height;
4613
4614        if (0 == i) /* Y plane */
4615        {
4616            rightPartWidth = 2*shiftUV;
4617        }
4618        else /* U and V planes */
4619        {
4620            rightPartWidth = shiftUV;
4621        }
4622        leftPartWidth = PlaneOut[i].u_width - rightPartWidth;
4623
4624        leftWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(leftPlane[i], rightPartWidth, 0);
4625        leftStride = leftPlane[i].u_stride;
4626
4627        rightWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(rightPlane[i], 0, 0);
4628        rightStride = rightPlane[i].u_stride;
4629
4630        destWalkLeft = (M4OSA_MemAddr8)M4XXX_SampleAddress(PlaneOut[i], 0, 0);
4631        destWalkRight = (M4OSA_MemAddr8)M4XXX_SampleAddress(PlaneOut[i], leftPartWidth, 0);
4632        destStride = PlaneOut[i].u_stride;
4633
4634        for (y=0; y<height; y++)
4635        {
4636            memcpy((void *)destWalkLeft, (void *)leftWalk, leftPartWidth);
4637            leftWalk += leftStride;
4638
4639            memcpy((void *)destWalkRight, (void *)rightWalk, rightPartWidth);
4640            rightWalk += rightStride;
4641
4642            destWalkLeft += destStride;
4643            destWalkRight += destStride;
4644        }
4645    }
4646
4647    return M4NO_ERROR;
4648}
4649
4650
4651M4OSA_ERR M4xVSS_SlideTransition( M4OSA_Void *userData, M4VIFI_ImagePlane PlaneIn1[3],
4652                                  M4VIFI_ImagePlane PlaneIn2[3], M4VIFI_ImagePlane *PlaneOut,
4653                                  M4VSS3GPP_ExternalProgress *pProgress,
4654                                  M4OSA_UInt32 uiTransitionKind)
4655{
4656    M4xVSS_internal_SlideTransitionSettings* settings =
4657         (M4xVSS_internal_SlideTransitionSettings*)userData;
4658    M4OSA_UInt32    shiftUV;
4659
4660    M4OSA_TRACE1_0("inside M4xVSS_SlideTransition");
4661    if ((M4xVSS_SlideTransition_RightOutLeftIn == settings->direction)
4662        || (M4xVSS_SlideTransition_LeftOutRightIn == settings->direction) )
4663    {
4664        /* horizontal slide */
4665        shiftUV = ((PlaneOut[1]).u_width * pProgress->uiProgress)/1000;
4666        M4OSA_TRACE1_2("M4xVSS_SlideTransition upper: shiftUV = %d,progress = %d",
4667            shiftUV,pProgress->uiProgress );
4668        if (M4xVSS_SlideTransition_RightOutLeftIn == settings->direction)
4669        {
4670            /* Put the previous clip frame right, the next clip frame left, and reverse shiftUV
4671            (since it's a shift from the left frame) so that we start out on the right
4672            (i.e. not left) frame, it
4673            being from the previous clip. */
4674            return M4xVSS_HorizontalSlideTransition(PlaneIn2, PlaneIn1, PlaneOut,
4675                 (PlaneOut[1]).u_width - shiftUV);
4676        }
4677        else /* Left out, right in*/
4678        {
4679            return M4xVSS_HorizontalSlideTransition(PlaneIn1, PlaneIn2, PlaneOut, shiftUV);
4680        }
4681    }
4682    else
4683    {
4684        /* vertical slide */
4685        shiftUV = ((PlaneOut[1]).u_height * pProgress->uiProgress)/1000;
4686        M4OSA_TRACE1_2("M4xVSS_SlideTransition bottom: shiftUV = %d,progress = %d",shiftUV,
4687            pProgress->uiProgress );
4688        if (M4xVSS_SlideTransition_TopOutBottomIn == settings->direction)
4689        {
4690            /* Put the previous clip frame top, the next clip frame bottom. */
4691            return M4xVSS_VerticalSlideTransition(PlaneIn1, PlaneIn2, PlaneOut, shiftUV);
4692        }
4693        else /* Bottom out, top in */
4694        {
4695            return M4xVSS_VerticalSlideTransition(PlaneIn2, PlaneIn1, PlaneOut,
4696                (PlaneOut[1]).u_height - shiftUV);
4697        }
4698    }
4699
4700    /* Note: it might be worthwhile to do some parameter checking, see if dimensions match, etc.,
4701    at least in debug mode. */
4702}
4703
4704
4705/**
4706 ******************************************************************************
4707 * prototype    M4xVSS_FadeBlackTransition(M4OSA_Void *pFunctionContext,
4708 *                                                    M4VIFI_ImagePlane *PlaneIn,
4709 *                                                    M4VIFI_ImagePlane *PlaneOut,
4710 *                                                    M4VSS3GPP_ExternalProgress *pProgress,
4711 *                                                    M4OSA_UInt32 uiEffectKind)
4712 *
4713 * @brief    This function apply a fade to black and then a fade from black
4714 * @note
4715 * @param    pFunctionContext(IN) Contains which color to apply (not very clean ...)
4716 * @param    PlaneIn            (IN) Input YUV420 planar
4717 * @param    PlaneOut        (IN/OUT) Output YUV420 planar
4718 * @param    pProgress        (IN/OUT) Progress indication (0-100)
4719 * @param    uiEffectKind    (IN) Unused
4720 *
4721 * @return    M4VIFI_OK:    No error
4722 ******************************************************************************
4723 */
4724M4OSA_ERR M4xVSS_FadeBlackTransition(M4OSA_Void *userData, M4VIFI_ImagePlane PlaneIn1[3],
4725                                     M4VIFI_ImagePlane PlaneIn2[3],
4726                                     M4VIFI_ImagePlane *PlaneOut,
4727                                     M4VSS3GPP_ExternalProgress *pProgress,
4728                                     M4OSA_UInt32 uiTransitionKind)
4729{
4730    M4OSA_Int32 tmp = 0;
4731    M4OSA_ERR err = M4NO_ERROR;
4732
4733
4734    if((pProgress->uiProgress) < 500)
4735    {
4736        /**
4737         * Compute where we are in the effect (scale is 0->1024) */
4738        tmp = (M4OSA_Int32)((1.0 - ((M4OSA_Float)(pProgress->uiProgress*2)/1000)) * 1024 );
4739
4740        /**
4741         * Apply the darkening effect */
4742        err = M4VFL_modifyLumaWithScale( (M4ViComImagePlane*)PlaneIn1,
4743             (M4ViComImagePlane*)PlaneOut, tmp, M4OSA_NULL);
4744        if (M4NO_ERROR != err)
4745        {
4746            M4OSA_TRACE1_1("M4xVSS_FadeBlackTransition: M4VFL_modifyLumaWithScale returns\
4747                 error 0x%x, returning M4VSS3GPP_ERR_LUMA_FILTER_ERROR", err);
4748            return M4VSS3GPP_ERR_LUMA_FILTER_ERROR;
4749        }
4750    }
4751    else
4752    {
4753        /**
4754         * Compute where we are in the effect (scale is 0->1024). */
4755        tmp = (M4OSA_Int32)( (((M4OSA_Float)(((pProgress->uiProgress-500)*2))/1000)) * 1024 );
4756
4757        /**
4758         * Apply the darkening effect */
4759        err = M4VFL_modifyLumaWithScale((M4ViComImagePlane*)PlaneIn2,
4760             (M4ViComImagePlane*)PlaneOut, tmp, M4OSA_NULL);
4761        if (M4NO_ERROR != err)
4762        {
4763            M4OSA_TRACE1_1("M4xVSS_FadeBlackTransition:\
4764                 M4VFL_modifyLumaWithScale returns error 0x%x,\
4765                     returning M4VSS3GPP_ERR_LUMA_FILTER_ERROR", err);
4766            return M4VSS3GPP_ERR_LUMA_FILTER_ERROR;
4767        }
4768    }
4769
4770
4771    return M4VIFI_OK;
4772}
4773
4774
4775/**
4776 ******************************************************************************
4777 * prototype    M4OSA_ERR M4xVSS_internalConvertToUTF8(M4OSA_Context pContext,
4778 *                                                        M4OSA_Void* pBufferIn,
4779 *                                                        M4OSA_Void* pBufferOut,
4780 *                                                        M4OSA_UInt32* convertedSize)
4781 *
4782 * @brief    This function convert from the customer format to UTF8
4783 * @note
4784 * @param    pContext        (IN)    The integrator own context
4785 * @param    pBufferIn        (IN)    Buffer to convert
4786 * @param    pBufferOut        (OUT)    Converted buffer
4787 * @param    convertedSize    (OUT)    Size of the converted buffer
4788 *
4789 * @return    M4NO_ERROR:    No error
4790 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
4791 ******************************************************************************
4792 */
4793M4OSA_ERR M4xVSS_internalConvertToUTF8(M4OSA_Context pContext, M4OSA_Void* pBufferIn,
4794                                       M4OSA_Void* pBufferOut, M4OSA_UInt32* convertedSize)
4795{
4796    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
4797    M4OSA_ERR err;
4798
4799    pBufferOut = pBufferIn;
4800    if(xVSS_context->UTFConversionContext.pConvToUTF8Fct != M4OSA_NULL
4801        && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL)
4802    {
4803        M4OSA_UInt32 ConvertedSize = xVSS_context->UTFConversionContext.m_TempOutConversionSize;
4804
4805        memset((void *)xVSS_context->UTFConversionContext.pTempOutConversionBuffer,0
4806            ,(M4OSA_UInt32)xVSS_context->UTFConversionContext.m_TempOutConversionSize);
4807
4808        err = xVSS_context->UTFConversionContext.pConvToUTF8Fct((M4OSA_Void*)pBufferIn,
4809            (M4OSA_UInt8*)xVSS_context->UTFConversionContext.pTempOutConversionBuffer,
4810                 (M4OSA_UInt32*)&ConvertedSize);
4811        if(err == M4xVSSWAR_BUFFER_OUT_TOO_SMALL)
4812        {
4813            M4OSA_TRACE2_1("M4xVSS_internalConvertToUTF8: pConvToUTF8Fct return 0x%x",err);
4814
4815            /*free too small buffer*/
4816            free(xVSS_context->\
4817                UTFConversionContext.pTempOutConversionBuffer);
4818
4819            /*re-allocate the buffer*/
4820            xVSS_context->UTFConversionContext.pTempOutConversionBuffer    =
4821                 (M4OSA_Void*)M4OSA_32bitAlignedMalloc(ConvertedSize*sizeof(M4OSA_UInt8), M4VA,
4822                     (M4OSA_Char *)"M4xVSS_internalConvertToUTF8: UTF conversion buffer");
4823            if(M4OSA_NULL == xVSS_context->UTFConversionContext.pTempOutConversionBuffer)
4824            {
4825                M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertToUTF8");
4826                return M4ERR_ALLOC;
4827            }
4828            xVSS_context->UTFConversionContext.m_TempOutConversionSize = ConvertedSize;
4829
4830            memset((void *)xVSS_context->\
4831                UTFConversionContext.pTempOutConversionBuffer,0,(M4OSA_UInt32)xVSS_context->\
4832                    UTFConversionContext.m_TempOutConversionSize);
4833
4834            err = xVSS_context->UTFConversionContext.pConvToUTF8Fct((M4OSA_Void*)pBufferIn,
4835                (M4OSA_Void*)xVSS_context->UTFConversionContext.pTempOutConversionBuffer,
4836                    (M4OSA_UInt32*)&ConvertedSize);
4837            if(err != M4NO_ERROR)
4838            {
4839                M4OSA_TRACE1_1("M4xVSS_internalConvertToUTF8: pConvToUTF8Fct return 0x%x",err);
4840                return err;
4841            }
4842        }
4843        else if(err != M4NO_ERROR)
4844        {
4845            M4OSA_TRACE1_1("M4xVSS_internalConvertToUTF8: pConvToUTF8Fct return 0x%x",err);
4846            return err;
4847        }
4848        /*decoded path*/
4849        pBufferOut = xVSS_context->UTFConversionContext.pTempOutConversionBuffer;
4850        (*convertedSize) = ConvertedSize;
4851    }
4852    return M4NO_ERROR;
4853}
4854
4855
4856/**
4857 ******************************************************************************
4858 * prototype    M4OSA_ERR M4xVSS_internalConvertFromUTF8(M4OSA_Context pContext)
4859 *
4860 * @brief    This function convert from UTF8 to the customer format
4861 * @note
4862 * @param    pContext    (IN) The integrator own context
4863 * @param    pBufferIn        (IN)    Buffer to convert
4864 * @param    pBufferOut        (OUT)    Converted buffer
4865 * @param    convertedSize    (OUT)    Size of the converted buffer
4866 *
4867 * @return    M4NO_ERROR:    No error
4868 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
4869 ******************************************************************************
4870 */
4871M4OSA_ERR M4xVSS_internalConvertFromUTF8(M4OSA_Context pContext, M4OSA_Void* pBufferIn,
4872                                        M4OSA_Void* pBufferOut, M4OSA_UInt32* convertedSize)
4873{
4874    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
4875    M4OSA_ERR err;
4876
4877    pBufferOut = pBufferIn;
4878    if(xVSS_context->UTFConversionContext.pConvFromUTF8Fct != M4OSA_NULL
4879        && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL)
4880    {
4881        M4OSA_UInt32 ConvertedSize = xVSS_context->UTFConversionContext.m_TempOutConversionSize;
4882
4883        memset((void *)xVSS_context->\
4884            UTFConversionContext.pTempOutConversionBuffer,0,(M4OSA_UInt32)xVSS_context->\
4885                UTFConversionContext.m_TempOutConversionSize);
4886
4887        err = xVSS_context->UTFConversionContext.pConvFromUTF8Fct\
4888            ((M4OSA_Void*)pBufferIn,(M4OSA_UInt8*)xVSS_context->\
4889                UTFConversionContext.pTempOutConversionBuffer, (M4OSA_UInt32*)&ConvertedSize);
4890        if(err == M4xVSSWAR_BUFFER_OUT_TOO_SMALL)
4891        {
4892            M4OSA_TRACE2_1("M4xVSS_internalConvertFromUTF8: pConvFromUTF8Fct return 0x%x",err);
4893
4894            /*free too small buffer*/
4895            free(xVSS_context->\
4896                UTFConversionContext.pTempOutConversionBuffer);
4897
4898            /*re-allocate the buffer*/
4899            xVSS_context->UTFConversionContext.pTempOutConversionBuffer    =
4900                (M4OSA_Void*)M4OSA_32bitAlignedMalloc(ConvertedSize*sizeof(M4OSA_UInt8), M4VA,
4901                     (M4OSA_Char *)"M4xVSS_internalConvertFromUTF8: UTF conversion buffer");
4902            if(M4OSA_NULL == xVSS_context->UTFConversionContext.pTempOutConversionBuffer)
4903            {
4904                M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertFromUTF8");
4905                return M4ERR_ALLOC;
4906            }
4907            xVSS_context->UTFConversionContext.m_TempOutConversionSize = ConvertedSize;
4908
4909            memset((void *)xVSS_context->\
4910                UTFConversionContext.pTempOutConversionBuffer,0,(M4OSA_UInt32)xVSS_context->\
4911                    UTFConversionContext.m_TempOutConversionSize);
4912
4913            err = xVSS_context->UTFConversionContext.pConvFromUTF8Fct((M4OSA_Void*)pBufferIn,
4914                (M4OSA_Void*)xVSS_context->UTFConversionContext.pTempOutConversionBuffer,
4915                     (M4OSA_UInt32*)&ConvertedSize);
4916            if(err != M4NO_ERROR)
4917            {
4918                M4OSA_TRACE1_1("M4xVSS_internalConvertFromUTF8: pConvFromUTF8Fct return 0x%x",err);
4919                return err;
4920            }
4921        }
4922        else if(err != M4NO_ERROR)
4923        {
4924            M4OSA_TRACE1_1("M4xVSS_internalConvertFromUTF8: pConvFromUTF8Fct return 0x%x",err);
4925            return err;
4926        }
4927        /*decoded path*/
4928        pBufferOut = xVSS_context->UTFConversionContext.pTempOutConversionBuffer;
4929        (*convertedSize) = ConvertedSize;
4930    }
4931
4932
4933    return M4NO_ERROR;
4934}
4935