M4xVSS_internal.c revision 32ed3f4dad00f8a65f7e6b38402c70d5341c57eb
1/*
2 * Copyright (C) 2004-2011 NXP Software
3 * Copyright (C) 2011 The Android Open Source Project
4 *
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
8 *
9 *      http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 */
17/**
18 ******************************************************************************
19 * @file    M4xVSS_internal.c
20 * @brief    Internal functions of extended Video Studio Service (Video Studio 2.1)
21 * @note
22 ******************************************************************************
23 */
24#include "M4OSA_Debug.h"
25#include "M4OSA_CharStar.h"
26#include "M4OSA_FileExtra.h"
27
28#include "NXPSW_CompilerSwitches.h"
29
30#include "M4VSS3GPP_API.h"
31#include "M4VSS3GPP_ErrorCodes.h"
32
33#include "M4xVSS_API.h"
34#include "M4xVSS_Internal.h"
35
36/*for rgb16 color effect*/
37#include "M4VIFI_Defines.h"
38#include "M4VIFI_Clip.h"
39
40/**
41 * component includes */
42#include "M4VFL_transition.h"            /**< video effects */
43
44/* Internal header file of VSS is included because of MMS use case */
45#include "M4VSS3GPP_InternalTypes.h"
46
47/*Exif header files to add image rendering support (cropping, black borders)*/
48#include "M4EXIFC_CommonAPI.h"
49// StageFright encoders require %16 resolution
50#include "M4ENCODER_common.h"
51
52#define TRANSPARENT_COLOR 0x7E0
53
54/* Prototype of M4VIFI_xVSS_RGB565toYUV420 function (avoid green effect of transparency color) */
55M4VIFI_UInt8 M4VIFI_xVSS_RGB565toYUV420(void *pUserData, M4VIFI_ImagePlane *pPlaneIn,
56                                        M4VIFI_ImagePlane *pPlaneOut);
57
58
59/*special MCS function used only in VideoArtist and VideoStudio to open the media in the normal
60 mode. That way the media duration is accurate*/
61extern M4OSA_ERR M4MCS_open_normalMode(M4MCS_Context pContext, M4OSA_Void* pFileIn,
62                                         M4VIDEOEDITING_FileType InputFileType,
63                                         M4OSA_Void* pFileOut, M4OSA_Void* pTempFile);
64
65
66/**
67 ******************************************************************************
68 * prototype    M4OSA_ERR M4xVSS_internalStartTranscoding(M4OSA_Context pContext)
69 * @brief        This function initializes MCS (3GP transcoder) with the given
70 *                parameters
71 * @note        The transcoding parameters are given by the internal xVSS context.
72 *                This context contains a pointer on the current element of the
73 *                chained list of MCS parameters.
74 *
75 * @param    pContext            (IN) Pointer on the xVSS edit context
76 * @return    M4NO_ERROR:            No error
77 * @return    M4ERR_PARAMETER:    At least one parameter is M4OSA_NULL
78 * @return    M4ERR_ALLOC:        Memory allocation has failed
79 ******************************************************************************
80 */
81M4OSA_ERR M4xVSS_internalStartTranscoding(M4OSA_Context pContext)
82{
83    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
84    M4OSA_ERR err;
85    M4MCS_Context mcs_context;
86    M4MCS_OutputParams Params;
87    M4MCS_EncodingParams Rates;
88    M4OSA_UInt32 i;
89
90    err = M4MCS_init(&mcs_context, xVSS_context->pFileReadPtr, xVSS_context->pFileWritePtr);
91    if(err != M4NO_ERROR)
92    {
93        M4OSA_TRACE1_1("Error in M4MCS_init: 0x%x", err);
94        return err;
95    }
96
97#ifdef M4VSS_ENABLE_EXTERNAL_DECODERS
98    /* replay recorded external decoder registrations on the MCS */
99    for (i=0; i<M4VD_kVideoType_NB; i++)
100    {
101        if (xVSS_context->registeredExternalDecs[i].registered)
102        {
103            err = M4MCS_registerExternalVideoDecoder(mcs_context, i,
104                    xVSS_context->registeredExternalDecs[i].pDecoderInterface,
105                    xVSS_context->registeredExternalDecs[i].pUserData);
106            if (M4NO_ERROR != err)
107            {
108                M4OSA_TRACE1_1("M4xVSS_internalStartTranscoding:\
109                     M4MCS_registerExternalVideoDecoder() returns 0x%x!", err);
110                M4MCS_abort(mcs_context);
111                return err;
112            }
113        }
114    }
115#endif /* M4VSS_ENABLE_EXTERNAL_DECODERS */
116
117    /* replay recorded external encoder registrations on the MCS */
118    for (i=0; i<M4VE_kEncoderType_NB; i++)
119    {
120        if (xVSS_context->registeredExternalEncs[i].registered)
121        {
122            err = M4MCS_registerExternalVideoEncoder(mcs_context, i,
123                    xVSS_context->registeredExternalEncs[i].pEncoderInterface,
124                    xVSS_context->registeredExternalEncs[i].pUserData);
125            if (M4NO_ERROR != err)
126            {
127                M4OSA_TRACE1_1("M4xVSS_internalStartTranscoding:\
128                     M4MCS_registerExternalVideoEncoder() returns 0x%x!", err);
129                M4MCS_abort(mcs_context);
130                return err;
131            }
132        }
133    }
134
135    err = M4MCS_open(mcs_context, xVSS_context->pMCScurrentParams->pFileIn,
136         xVSS_context->pMCScurrentParams->InputFileType,
137             xVSS_context->pMCScurrentParams->pFileOut,
138             xVSS_context->pMCScurrentParams->pFileTemp);
139    if (err != M4NO_ERROR)
140    {
141        M4OSA_TRACE1_1("Error in M4MCS_open: 0x%x", err);
142        M4MCS_abort(mcs_context);
143        return err;
144    }
145
146    /**
147     * Fill MCS parameters with the parameters contained in the current element of the
148       MCS parameters chained list */
149    Params.OutputFileType = xVSS_context->pMCScurrentParams->OutputFileType;
150    Params.OutputVideoFormat = xVSS_context->pMCScurrentParams->OutputVideoFormat;
151    Params.OutputVideoFrameSize = xVSS_context->pMCScurrentParams->OutputVideoFrameSize;
152    Params.OutputVideoFrameRate = xVSS_context->pMCScurrentParams->OutputVideoFrameRate;
153    Params.OutputAudioFormat = xVSS_context->pMCScurrentParams->OutputAudioFormat;
154    Params.OutputAudioSamplingFrequency =
155         xVSS_context->pMCScurrentParams->OutputAudioSamplingFrequency;
156    Params.bAudioMono = xVSS_context->pMCScurrentParams->bAudioMono;
157    Params.pOutputPCMfile = M4OSA_NULL;
158    /*FB 2008/10/20: add media rendering parameter to keep aspect ratio*/
159    switch(xVSS_context->pMCScurrentParams->MediaRendering)
160    {
161    case M4xVSS_kResizing:
162        Params.MediaRendering = M4MCS_kResizing;
163        break;
164    case M4xVSS_kCropping:
165        Params.MediaRendering = M4MCS_kCropping;
166        break;
167    case M4xVSS_kBlackBorders:
168        Params.MediaRendering = M4MCS_kBlackBorders;
169        break;
170    default:
171        break;
172    }
173    /**/
174#ifdef TIMESCALE_BUG
175    Params.OutputVideoTimescale = xVSS_context->pMCScurrentParams->OutputVideoTimescale;
176#endif
177    // new params after integrating MCS 2.0
178    // Set the number of audio effects; 0 for now.
179    Params.nbEffects = 0;
180
181    // Set the audio effect; null for now.
182    Params.pEffects = NULL;
183
184    // Set the audio effect; null for now.
185    Params.bDiscardExif = M4OSA_FALSE;
186
187    // Set the audio effect; null for now.
188    Params.bAdjustOrientation = M4OSA_FALSE;
189    // new params after integrating MCS 2.0
190
191    /**
192     * Set output parameters */
193    err = M4MCS_setOutputParams(mcs_context, &Params);
194    if (err != M4NO_ERROR)
195    {
196        M4OSA_TRACE1_1("Error in M4MCS_setOutputParams: 0x%x", err);
197        M4MCS_abort(mcs_context);
198        return err;
199    }
200
201    Rates.OutputVideoBitrate = xVSS_context->pMCScurrentParams->OutputVideoBitrate;
202    Rates.OutputAudioBitrate = xVSS_context->pMCScurrentParams->OutputAudioBitrate;
203    Rates.BeginCutTime = 0;
204    Rates.EndCutTime = 0;
205    Rates.OutputFileSize = 0;
206
207    /*FB: transcoding per parts*/
208    Rates.BeginCutTime = xVSS_context->pMCScurrentParams->BeginCutTime;
209    Rates.EndCutTime = xVSS_context->pMCScurrentParams->EndCutTime;
210    Rates.OutputVideoTimescale = xVSS_context->pMCScurrentParams->OutputVideoTimescale;
211
212    err = M4MCS_setEncodingParams(mcs_context, &Rates);
213    if (err != M4NO_ERROR)
214    {
215        M4OSA_TRACE1_1("Error in M4MCS_setEncodingParams: 0x%x", err);
216        M4MCS_abort(mcs_context);
217        return err;
218    }
219
220    err = M4MCS_checkParamsAndStart(mcs_context);
221    if (err != M4NO_ERROR)
222    {
223        M4OSA_TRACE1_1("Error in M4MCS_checkParamsAndStart: 0x%x", err);
224        M4MCS_abort(mcs_context);
225        return err;
226    }
227
228    /**
229     * Save MCS context to be able to call MCS step function in M4xVSS_step function */
230    xVSS_context->pMCS_Ctxt = mcs_context;
231
232    return M4NO_ERROR;
233}
234
235/**
236 ******************************************************************************
237 * prototype    M4OSA_ERR M4xVSS_internalStopTranscoding(M4OSA_Context pContext)
238 * @brief        This function cleans up MCS (3GP transcoder)
239 * @note
240 *
241 * @param    pContext            (IN) Pointer on the xVSS edit context
242 * @return    M4NO_ERROR:            No error
243 * @return    M4ERR_PARAMETER:    At least one parameter is M4OSA_NULL
244 * @return    M4ERR_ALLOC:        Memory allocation has failed
245 ******************************************************************************
246 */
247M4OSA_ERR M4xVSS_internalStopTranscoding(M4OSA_Context pContext)
248{
249    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
250    M4OSA_ERR err;
251
252    err = M4MCS_close(xVSS_context->pMCS_Ctxt);
253    if (err != M4NO_ERROR)
254    {
255        M4OSA_TRACE1_1("M4xVSS_internalStopTranscoding: Error in M4MCS_close: 0x%x", err);
256        M4MCS_abort(xVSS_context->pMCS_Ctxt);
257        return err;
258    }
259
260    /**
261     * Free this MCS instance */
262    err = M4MCS_cleanUp(xVSS_context->pMCS_Ctxt);
263    if (err != M4NO_ERROR)
264    {
265        M4OSA_TRACE1_1("M4xVSS_internalStopTranscoding: Error in M4MCS_cleanUp: 0x%x", err);
266        return err;
267    }
268
269    xVSS_context->pMCS_Ctxt = M4OSA_NULL;
270
271    return M4NO_ERROR;
272}
273
274/**
275 ******************************************************************************
276 * M4OSA_ERR M4xVSS_internalConvertAndResizeARGB8888toYUV420(M4OSA_Void* pFileIn,
277 *                                             M4OSA_FileReadPointer* pFileReadPtr,
278 *                                                M4VIFI_ImagePlane* pImagePlanes,
279 *                                                 M4OSA_UInt32 width,
280 *                                                M4OSA_UInt32 height);
281 * @brief    It Coverts and resizes a ARGB8888 image to YUV420
282 * @note
283 * @param    pFileIn            (IN) The Image input file
284 * @param    pFileReadPtr    (IN) Pointer on filesystem functions
285 * @param    pImagePlanes    (IN/OUT) Pointer on YUV420 output planes allocated by the user
286 *                            ARGB8888 image  will be converted and resized  to output
287 *                             YUV420 plane size
288 *@param    width        (IN) width of the ARGB8888
289 *@param    height            (IN) height of the ARGB8888
290 * @return    M4NO_ERROR:    No error
291 * @return    M4ERR_ALLOC: memory error
292 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
293 ******************************************************************************
294 */
295
296M4OSA_ERR M4xVSS_internalConvertAndResizeARGB8888toYUV420(M4OSA_Void* pFileIn,
297                                                          M4OSA_FileReadPointer* pFileReadPtr,
298                                                          M4VIFI_ImagePlane* pImagePlanes,
299                                                          M4OSA_UInt32 width,M4OSA_UInt32 height)
300{
301    M4OSA_Context pARGBIn;
302    M4VIFI_ImagePlane rgbPlane1 ,rgbPlane2;
303    M4OSA_UInt32 frameSize_argb=(width * height * 4);
304    M4OSA_UInt32 frameSize = (width * height * 3); //Size of RGB888 data.
305    M4OSA_UInt32 i = 0,j= 0;
306    M4OSA_ERR err=M4NO_ERROR;
307
308
309    M4OSA_UInt8 *pTmpData = (M4OSA_UInt8*) M4OSA_malloc(frameSize_argb,
310         M4VS, (M4OSA_Char*)"Image argb data");
311        M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 Entering :");
312    if(pTmpData == M4OSA_NULL) {
313        M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 :\
314            Failed to allocate memory for Image clip");
315        return M4ERR_ALLOC;
316    }
317
318    M4OSA_TRACE1_2("M4xVSS_internalConvertAndResizeARGB8888toYUV420 :width and height %d %d",
319        width ,height);
320    /* Get file size (mandatory for chunk decoding) */
321    err = pFileReadPtr->openRead(&pARGBIn, pFileIn, M4OSA_kFileRead);
322    if(err != M4NO_ERROR)
323    {
324        M4OSA_TRACE1_2("M4xVSS_internalConvertAndResizeARGB8888toYUV420 :\
325            Can't open input ARGB8888 file %s, error: 0x%x\n",pFileIn, err);
326        M4OSA_free((M4OSA_MemAddr32)pTmpData);
327        pTmpData = M4OSA_NULL;
328        goto cleanup;
329    }
330
331    err = pFileReadPtr->readData(pARGBIn,(M4OSA_MemAddr8)pTmpData, &frameSize_argb);
332    if(err != M4NO_ERROR)
333    {
334        M4OSA_TRACE1_2("M4xVSS_internalConvertAndResizeARGB8888toYUV420 Can't close ARGB8888\
335             file %s, error: 0x%x\n",pFileIn, err);
336        pFileReadPtr->closeRead(pARGBIn);
337        M4OSA_free((M4OSA_MemAddr32)pTmpData);
338        pTmpData = M4OSA_NULL;
339        goto cleanup;
340    }
341
342    err = pFileReadPtr->closeRead(pARGBIn);
343    if(err != M4NO_ERROR)
344    {
345        M4OSA_TRACE1_2("M4xVSS_internalConvertAndResizeARGB8888toYUV420 Can't close ARGB8888 \
346             file %s, error: 0x%x\n",pFileIn, err);
347        M4OSA_free((M4OSA_MemAddr32)pTmpData);
348        pTmpData = M4OSA_NULL;
349        goto cleanup;
350    }
351
352    rgbPlane1.pac_data = (M4VIFI_UInt8*)M4OSA_malloc(frameSize, M4VS,
353         (M4OSA_Char*)"Image clip RGB888 data");
354    if(rgbPlane1.pac_data == M4OSA_NULL)
355    {
356        M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 \
357            Failed to allocate memory for Image clip");
358        M4OSA_free((M4OSA_MemAddr32)pTmpData);
359        return M4ERR_ALLOC;
360    }
361
362        rgbPlane1.u_height = height;
363        rgbPlane1.u_width = width;
364        rgbPlane1.u_stride = width*3;
365        rgbPlane1.u_topleft = 0;
366
367
368    /** Remove the alpha channel */
369    for (i=0, j = 0; i < frameSize_argb; i++) {
370        if ((i % 4) == 0) continue;
371        rgbPlane1.pac_data[j] = pTmpData[i];
372        j++;
373    }
374        M4OSA_free((M4OSA_MemAddr32)pTmpData);
375
376    /* To Check if resizing is required with color conversion */
377    if(width != pImagePlanes->u_width || height != pImagePlanes->u_height)
378    {
379        M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 Resizing :");
380        frameSize =  ( pImagePlanes->u_width * pImagePlanes->u_height * 3);
381        rgbPlane2.pac_data = (M4VIFI_UInt8*)M4OSA_malloc(frameSize, M4VS,
382             (M4OSA_Char*)"Image clip RGB888 data");
383        if(rgbPlane2.pac_data == M4OSA_NULL)
384        {
385            M4OSA_TRACE1_0("Failed to allocate memory for Image clip");
386            M4OSA_free((M4OSA_MemAddr32)pTmpData);
387            return M4ERR_ALLOC;
388        }
389            rgbPlane2.u_height =  pImagePlanes->u_height;
390            rgbPlane2.u_width = pImagePlanes->u_width;
391            rgbPlane2.u_stride = pImagePlanes->u_width*3;
392            rgbPlane2.u_topleft = 0;
393
394        /* Resizing RGB888 to RGB888 */
395        err = M4VIFI_ResizeBilinearRGB888toRGB888(M4OSA_NULL, &rgbPlane1, &rgbPlane2);
396        if(err != M4NO_ERROR)
397        {
398            M4OSA_TRACE1_1("error when converting from Resize RGB888 to RGB888: 0x%x\n", err);
399            M4OSA_free((M4OSA_MemAddr32)rgbPlane2.pac_data);
400            M4OSA_free((M4OSA_MemAddr32)rgbPlane1.pac_data);
401            return err;
402        }
403        /*Converting Resized RGB888 to YUV420 */
404        err = M4VIFI_RGB888toYUV420(M4OSA_NULL, &rgbPlane2, pImagePlanes);
405        if(err != M4NO_ERROR)
406        {
407            M4OSA_TRACE1_1("error when converting from RGB888 to YUV: 0x%x\n", err);
408            M4OSA_free((M4OSA_MemAddr32)rgbPlane2.pac_data);
409            M4OSA_free((M4OSA_MemAddr32)rgbPlane1.pac_data);
410            return err;
411        }
412            M4OSA_free((M4OSA_MemAddr32)rgbPlane2.pac_data);
413            M4OSA_free((M4OSA_MemAddr32)rgbPlane1.pac_data);
414
415            M4OSA_TRACE1_0("RGB to YUV done");
416
417
418    }
419    else
420    {
421        M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 NO  Resizing :");
422        err = M4VIFI_RGB888toYUV420(M4OSA_NULL, &rgbPlane1, pImagePlanes);
423        if(err != M4NO_ERROR)
424        {
425            M4OSA_TRACE1_1("error when converting from RGB to YUV: 0x%x\n", err);
426        }
427            M4OSA_free((M4OSA_MemAddr32)rgbPlane1.pac_data);
428
429            M4OSA_TRACE1_0("RGB to YUV done");
430    }
431cleanup:
432    M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 leaving :");
433    return err;
434}
435
436/**
437 ******************************************************************************
438 * M4OSA_ERR M4xVSS_internalConvertARGB8888toYUV420(M4OSA_Void* pFileIn,
439 *                                             M4OSA_FileReadPointer* pFileReadPtr,
440 *                                                M4VIFI_ImagePlane* pImagePlanes,
441 *                                                 M4OSA_UInt32 width,
442 *                                                M4OSA_UInt32 height);
443 * @brief    It Coverts a ARGB8888 image to YUV420
444 * @note
445 * @param    pFileIn            (IN) The Image input file
446 * @param    pFileReadPtr    (IN) Pointer on filesystem functions
447 * @param    pImagePlanes    (IN/OUT) Pointer on YUV420 output planes allocated by the user
448 *                            ARGB8888 image  will be converted and resized  to output
449 *                            YUV420 plane size
450 * @param    width        (IN) width of the ARGB8888
451 * @param    height            (IN) height of the ARGB8888
452 * @return    M4NO_ERROR:    No error
453 * @return    M4ERR_ALLOC: memory error
454 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
455 ******************************************************************************
456 */
457
458M4OSA_ERR M4xVSS_internalConvertARGB8888toYUV420(M4OSA_Void* pFileIn,
459                                                 M4OSA_FileReadPointer* pFileReadPtr,
460                                                 M4VIFI_ImagePlane** pImagePlanes,
461                                                 M4OSA_UInt32 width,M4OSA_UInt32 height)
462{
463    M4OSA_ERR err = M4NO_ERROR;
464    M4VIFI_ImagePlane *yuvPlane = M4OSA_NULL;
465
466    yuvPlane = (M4VIFI_ImagePlane*)M4OSA_malloc(3*sizeof(M4VIFI_ImagePlane),
467                M4VS, (M4OSA_Char*)"M4xVSS_internalConvertRGBtoYUV: Output plane YUV");
468    if(yuvPlane == M4OSA_NULL) {
469        M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 :\
470            Failed to allocate memory for Image clip");
471        return M4ERR_ALLOC;
472    }
473    yuvPlane[0].u_height = height;
474    yuvPlane[0].u_width = width;
475    yuvPlane[0].u_stride = width;
476    yuvPlane[0].u_topleft = 0;
477    yuvPlane[0].pac_data = (M4VIFI_UInt8*)M4OSA_malloc(yuvPlane[0].u_height \
478        * yuvPlane[0].u_width * 1.5, M4VS, (M4OSA_Char*)"imageClip YUV data");
479
480    yuvPlane[1].u_height = yuvPlane[0].u_height >>1;
481    yuvPlane[1].u_width = yuvPlane[0].u_width >> 1;
482    yuvPlane[1].u_stride = yuvPlane[1].u_width;
483    yuvPlane[1].u_topleft = 0;
484    yuvPlane[1].pac_data = (M4VIFI_UInt8*)(yuvPlane[0].pac_data + yuvPlane[0].u_height \
485        * yuvPlane[0].u_width);
486
487    yuvPlane[2].u_height = yuvPlane[0].u_height >>1;
488    yuvPlane[2].u_width = yuvPlane[0].u_width >> 1;
489    yuvPlane[2].u_stride = yuvPlane[2].u_width;
490    yuvPlane[2].u_topleft = 0;
491    yuvPlane[2].pac_data = (M4VIFI_UInt8*)(yuvPlane[1].pac_data + yuvPlane[1].u_height \
492        * yuvPlane[1].u_width);
493    err = M4xVSS_internalConvertAndResizeARGB8888toYUV420( pFileIn,pFileReadPtr,
494                                                          yuvPlane, width, height);
495    if(err != M4NO_ERROR)
496    {
497        M4OSA_TRACE1_1("M4xVSS_internalConvertAndResizeARGB8888toYUV420 return error: 0x%x\n", err);
498        M4OSA_free((M4OSA_MemAddr32)yuvPlane);
499        return err;
500    }
501
502        *pImagePlanes = yuvPlane;
503
504    M4OSA_TRACE1_0("M4xVSS_internalConvertARGB8888toYUV420 :Leaving");
505    return err;
506
507}
508
509/**
510 ******************************************************************************
511 * M4OSA_ERR M4xVSS_PictureCallbackFct (M4OSA_Void* pPictureCtxt,
512 *                                        M4VIFI_ImagePlane* pImagePlanes,
513 *                                        M4OSA_UInt32* pPictureDuration);
514 * @brief    It feeds the PTO3GPP with YUV420 pictures.
515 * @note    This function is given to the PTO3GPP in the M4PTO3GPP_Params structure
516 * @param    pContext    (IN) The integrator own context
517 * @param    pImagePlanes(IN/OUT) Pointer to an array of three valid image planes
518 * @param    pPictureDuration(OUT) Duration of the returned picture
519 *
520 * @return    M4NO_ERROR:    No error
521 * @return    M4PTO3GPP_WAR_LAST_PICTURE: The returned image is the last one
522 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
523 ******************************************************************************
524 */
525M4OSA_ERR M4xVSS_PictureCallbackFct(M4OSA_Void* pPictureCtxt, M4VIFI_ImagePlane* pImagePlanes,
526                                     M4OSA_Double* pPictureDuration)
527{
528    M4OSA_ERR err = M4NO_ERROR;
529    M4OSA_UInt8    last_frame_flag = 0;
530    M4xVSS_PictureCallbackCtxt* pC = (M4xVSS_PictureCallbackCtxt*) (pPictureCtxt);
531
532    /*Used for pan&zoom*/
533    M4OSA_UInt8 tempPanzoomXa = 0;
534    M4OSA_UInt8 tempPanzoomXb = 0;
535    M4AIR_Params Params;
536    /**/
537
538    /*Used for cropping and black borders*/
539    M4OSA_Context    pPictureContext = M4OSA_NULL;
540    M4OSA_FilePosition    pictureSize = 0 ;
541    M4OSA_UInt8*    pictureBuffer = M4OSA_NULL;
542    //M4EXIFC_Context pExifContext = M4OSA_NULL;
543    M4EXIFC_BasicTags pBasicTags;
544    M4VIFI_ImagePlane pImagePlanes1 = pImagePlanes[0];
545    M4VIFI_ImagePlane pImagePlanes2 = pImagePlanes[1];
546    M4VIFI_ImagePlane pImagePlanes3 = pImagePlanes[2];
547    /**/
548
549    /**
550     * Check input parameters */
551    M4OSA_DEBUG_IF2((M4OSA_NULL==pPictureCtxt),        M4ERR_PARAMETER,
552         "M4xVSS_PictureCallbackFct: pPictureCtxt is M4OSA_NULL");
553    M4OSA_DEBUG_IF2((M4OSA_NULL==pImagePlanes),        M4ERR_PARAMETER,
554         "M4xVSS_PictureCallbackFct: pImagePlanes is M4OSA_NULL");
555    M4OSA_DEBUG_IF2((M4OSA_NULL==pPictureDuration), M4ERR_PARAMETER,
556         "M4xVSS_PictureCallbackFct: pPictureDuration is M4OSA_NULL");
557    M4OSA_TRACE1_0("M4xVSS_PictureCallbackFct :Entering");
558    /*PR P4ME00003181 In case the image number is 0, pan&zoom can not be used*/
559    if(M4OSA_TRUE == pC->m_pPto3GPPparams->isPanZoom && pC->m_NbImage == 0)
560    {
561        pC->m_pPto3GPPparams->isPanZoom = M4OSA_FALSE;
562    }
563
564    /*If no cropping/black borders or pan&zoom, just decode and resize the picture*/
565    if(pC->m_mediaRendering == M4xVSS_kResizing && M4OSA_FALSE == pC->m_pPto3GPPparams->isPanZoom)
566    {
567        /**
568         * Convert and resize input ARGB8888 file to YUV420 */
569        /*To support ARGB8888 : */
570        M4OSA_TRACE1_2("M4xVSS_PictureCallbackFct 1: width and heght %d %d",
571            pC->m_pPto3GPPparams->width,pC->m_pPto3GPPparams->height);
572        err = M4xVSS_internalConvertAndResizeARGB8888toYUV420(pC->m_FileIn,
573             pC->m_pFileReadPtr, pImagePlanes,pC->m_pPto3GPPparams->width,
574                pC->m_pPto3GPPparams->height);
575        if(err != M4NO_ERROR)
576        {
577            M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct: Error when decoding JPEG: 0x%x\n", err);
578            return err;
579        }
580    }
581    /*In case of cropping, black borders or pan&zoom, call the EXIF reader and the AIR*/
582    else
583    {
584        /**
585         * Computes ratios */
586        if(pC->m_pDecodedPlane == M4OSA_NULL)
587        {
588            /**
589             * Convert input ARGB8888 file to YUV420 */
590             M4OSA_TRACE1_2("M4xVSS_PictureCallbackFct 2: width and heght %d %d",
591                pC->m_pPto3GPPparams->width,pC->m_pPto3GPPparams->height);
592            err = M4xVSS_internalConvertARGB8888toYUV420(pC->m_FileIn, pC->m_pFileReadPtr,
593                &(pC->m_pDecodedPlane),pC->m_pPto3GPPparams->width,pC->m_pPto3GPPparams->height);
594            if(err != M4NO_ERROR)
595            {
596                M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct: Error when decoding JPEG: 0x%x\n", err);
597                if(pC->m_pDecodedPlane != M4OSA_NULL)
598                {
599                    /* YUV420 planar is returned but allocation is made only once
600                        (contigous planes in memory) */
601                    if(pC->m_pDecodedPlane->pac_data != M4OSA_NULL)
602                    {
603                        M4OSA_free((M4OSA_MemAddr32)pC->m_pDecodedPlane->pac_data);
604                    }
605                    M4OSA_free((M4OSA_MemAddr32)pC->m_pDecodedPlane);
606                    pC->m_pDecodedPlane = M4OSA_NULL;
607                }
608                return err;
609            }
610        }
611
612        /*Initialize AIR Params*/
613        Params.m_inputCoord.m_x = 0;
614        Params.m_inputCoord.m_y = 0;
615        Params.m_inputSize.m_height = pC->m_pDecodedPlane->u_height;
616        Params.m_inputSize.m_width = pC->m_pDecodedPlane->u_width;
617        Params.m_outputSize.m_width = pImagePlanes->u_width;
618        Params.m_outputSize.m_height = pImagePlanes->u_height;
619        Params.m_bOutputStripe = M4OSA_FALSE;
620        Params.m_outputOrientation = M4COMMON_kOrientationTopLeft;
621
622        /*Initialize Exif params structure*/
623        pBasicTags.orientation = M4COMMON_kOrientationUnknown;
624
625        /**
626        Pan&zoom params*/
627        if(M4OSA_TRUE == pC->m_pPto3GPPparams->isPanZoom)
628        {
629            /*Save ratio values, they can be reused if the new ratios are 0*/
630            tempPanzoomXa = (M4OSA_UInt8)pC->m_pPto3GPPparams->PanZoomXa;
631            tempPanzoomXb = (M4OSA_UInt8)pC->m_pPto3GPPparams->PanZoomXb;
632#if 0
633            /**
634             * Check size of output JPEG is compatible with pan & zoom parameters
635               First, check final (b) parameters */
636            if(pC->m_pPto3GPPparams->PanZoomXb + pC->m_pPto3GPPparams->PanZoomTopleftXb > 100 )
637            {
638                M4OSA_TRACE1_1("WARNING : Bad final Pan & Zoom settings !!!\
639                    New final Zoom ratio is: %d", (100 - pC->m_pPto3GPPparams->PanZoomTopleftXb));
640                /* We do not change the topleft parameter as it may correspond to a precise area
641                of the picture -> only the zoom ratio is modified */
642                pC->m_pPto3GPPparams->PanZoomXb = 100 - pC->m_pPto3GPPparams->PanZoomTopleftXb;
643            }
644
645            if(pC->m_pPto3GPPparams->PanZoomXb + pC->m_pPto3GPPparams->PanZoomTopleftYb > 100 )
646            {
647                M4OSA_TRACE1_1("WARNING : Bad final Pan & Zoom settings \
648                    !!! New final Zoom ratio is: %d",
649                    (100 - pC->m_pPto3GPPparams->PanZoomTopleftYb));
650                /* We do not change the topleft parameter as it may correspond to a
651                precise area of the picture -> only the zoom ratio is modified */
652                pC->m_pPto3GPPparams->PanZoomXb = 100 - pC->m_pPto3GPPparams->PanZoomTopleftYb;
653            }
654
655            /**
656             * Then, check initial (a) parameters */
657            if(pC->m_pPto3GPPparams->PanZoomXa + pC->m_pPto3GPPparams->PanZoomTopleftXa > 100 )
658            {
659                M4OSA_TRACE1_1("WARNING : Bad initial Pan & Zoom settings !!! \
660                    New initial Zoom ratio is: %d",(100 - pC->m_pPto3GPPparams->PanZoomTopleftXa));
661                /* We do not change the topleft parameter as it may correspond to a precise
662                area of the picture-> only the zoom ratio is modified */
663                pC->m_pPto3GPPparams->PanZoomXa = 100 - pC->m_pPto3GPPparams->PanZoomTopleftXa;
664            }
665
666            if(pC->m_pPto3GPPparams->PanZoomXa + pC->m_pPto3GPPparams->PanZoomTopleftYa > 100 )
667            {
668                M4OSA_TRACE1_1("WARNING : Bad initial Pan & Zoom settings !!! New initial\
669                     Zoom ratio is: %d", (100 - pC->m_pPto3GPPparams->PanZoomTopleftYa));
670                /* We do not change the topleft parameter as it may correspond to a precise
671                area of the picture-> only the zoom ratio is modified */
672                pC->m_pPto3GPPparams->PanZoomXa = 100 - pC->m_pPto3GPPparams->PanZoomTopleftYa;
673            }
674#endif
675            /*Check that the ratio is not 0*/
676            /*Check (a) parameters*/
677            if(pC->m_pPto3GPPparams->PanZoomXa == 0)
678            {
679                M4OSA_UInt8 maxRatio = 0;
680                if(pC->m_pPto3GPPparams->PanZoomTopleftXa >=
681                     pC->m_pPto3GPPparams->PanZoomTopleftYa)
682                {
683                    /*The ratio is 0, that means the area of the picture defined with (a)
684                    parameters is bigger than the image size*/
685                    if(pC->m_pPto3GPPparams->PanZoomTopleftXa + tempPanzoomXa > 1000)
686                    {
687                        /*The oversize is maxRatio*/
688                        maxRatio = pC->m_pPto3GPPparams->PanZoomTopleftXa + tempPanzoomXa - 1000;
689                    }
690                }
691                else
692                {
693                    /*The ratio is 0, that means the area of the picture defined with (a)
694                     parameters is bigger than the image size*/
695                    if(pC->m_pPto3GPPparams->PanZoomTopleftYa + tempPanzoomXa > 1000)
696                    {
697                        /*The oversize is maxRatio*/
698                        maxRatio = pC->m_pPto3GPPparams->PanZoomTopleftYa + tempPanzoomXa - 1000;
699                    }
700                }
701                /*Modify the (a) parameters:*/
702                if(pC->m_pPto3GPPparams->PanZoomTopleftXa >= maxRatio)
703                {
704                    /*The (a) topleft parameters can be moved to keep the same area size*/
705                    pC->m_pPto3GPPparams->PanZoomTopleftXa -= maxRatio;
706                }
707                else
708                {
709                    /*Move the (a) topleft parameter to 0 but the ratio will be also further
710                    modified to match the image size*/
711                    pC->m_pPto3GPPparams->PanZoomTopleftXa = 0;
712                }
713                if(pC->m_pPto3GPPparams->PanZoomTopleftYa >= maxRatio)
714                {
715                    /*The (a) topleft parameters can be moved to keep the same area size*/
716                    pC->m_pPto3GPPparams->PanZoomTopleftYa -= maxRatio;
717                }
718                else
719                {
720                    /*Move the (a) topleft parameter to 0 but the ratio will be also further
721                     modified to match the image size*/
722                    pC->m_pPto3GPPparams->PanZoomTopleftYa = 0;
723                }
724                /*The new ratio is the original one*/
725                pC->m_pPto3GPPparams->PanZoomXa = tempPanzoomXa;
726                if(pC->m_pPto3GPPparams->PanZoomXa + pC->m_pPto3GPPparams->PanZoomTopleftXa > 1000)
727                {
728                    /*Change the ratio if the area of the picture defined with (a) parameters is
729                    bigger than the image size*/
730                    pC->m_pPto3GPPparams->PanZoomXa = 1000 - pC->m_pPto3GPPparams->PanZoomTopleftXa;
731                }
732                if(pC->m_pPto3GPPparams->PanZoomXa + pC->m_pPto3GPPparams->PanZoomTopleftYa > 1000)
733                {
734                    /*Change the ratio if the area of the picture defined with (a) parameters is
735                    bigger than the image size*/
736                    pC->m_pPto3GPPparams->PanZoomXa = 1000 - pC->m_pPto3GPPparams->PanZoomTopleftYa;
737                }
738            }
739            /*Check (b) parameters*/
740            if(pC->m_pPto3GPPparams->PanZoomXb == 0)
741            {
742                M4OSA_UInt8 maxRatio = 0;
743                if(pC->m_pPto3GPPparams->PanZoomTopleftXb >=
744                     pC->m_pPto3GPPparams->PanZoomTopleftYb)
745                {
746                    /*The ratio is 0, that means the area of the picture defined with (b)
747                     parameters is bigger than the image size*/
748                    if(pC->m_pPto3GPPparams->PanZoomTopleftXb + tempPanzoomXb > 1000)
749                    {
750                        /*The oversize is maxRatio*/
751                        maxRatio = pC->m_pPto3GPPparams->PanZoomTopleftXb + tempPanzoomXb - 1000;
752                    }
753                }
754                else
755                {
756                    /*The ratio is 0, that means the area of the picture defined with (b)
757                     parameters is bigger than the image size*/
758                    if(pC->m_pPto3GPPparams->PanZoomTopleftYb + tempPanzoomXb > 1000)
759                    {
760                        /*The oversize is maxRatio*/
761                        maxRatio = pC->m_pPto3GPPparams->PanZoomTopleftYb + tempPanzoomXb - 1000;
762                    }
763                }
764                /*Modify the (b) parameters:*/
765                if(pC->m_pPto3GPPparams->PanZoomTopleftXb >= maxRatio)
766                {
767                    /*The (b) topleft parameters can be moved to keep the same area size*/
768                    pC->m_pPto3GPPparams->PanZoomTopleftXb -= maxRatio;
769                }
770                else
771                {
772                    /*Move the (b) topleft parameter to 0 but the ratio will be also further
773                     modified to match the image size*/
774                    pC->m_pPto3GPPparams->PanZoomTopleftXb = 0;
775                }
776                if(pC->m_pPto3GPPparams->PanZoomTopleftYb >= maxRatio)
777                {
778                    /*The (b) topleft parameters can be moved to keep the same area size*/
779                    pC->m_pPto3GPPparams->PanZoomTopleftYb -= maxRatio;
780                }
781                else
782                {
783                    /*Move the (b) topleft parameter to 0 but the ratio will be also further
784                    modified to match the image size*/
785                    pC->m_pPto3GPPparams->PanZoomTopleftYb = 0;
786                }
787                /*The new ratio is the original one*/
788                pC->m_pPto3GPPparams->PanZoomXb = tempPanzoomXb;
789                if(pC->m_pPto3GPPparams->PanZoomXb + pC->m_pPto3GPPparams->PanZoomTopleftXb > 1000)
790                {
791                    /*Change the ratio if the area of the picture defined with (b) parameters is
792                    bigger than the image size*/
793                    pC->m_pPto3GPPparams->PanZoomXb = 1000 - pC->m_pPto3GPPparams->PanZoomTopleftXb;
794                }
795                if(pC->m_pPto3GPPparams->PanZoomXb + pC->m_pPto3GPPparams->PanZoomTopleftYb > 1000)
796                {
797                    /*Change the ratio if the area of the picture defined with (b) parameters is
798                    bigger than the image size*/
799                    pC->m_pPto3GPPparams->PanZoomXb = 1000 - pC->m_pPto3GPPparams->PanZoomTopleftYb;
800                }
801            }
802
803            /**
804             * Computes AIR parameters */
805/*        Params.m_inputCoord.m_x = (M4OSA_UInt32)(pC->m_pDecodedPlane->u_width *
806            (pC->m_pPto3GPPparams->PanZoomTopleftXa +
807            (M4OSA_Int16)((pC->m_pPto3GPPparams->PanZoomTopleftXb \
808                - pC->m_pPto3GPPparams->PanZoomTopleftXa) *
809            pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage)) / 100;
810        Params.m_inputCoord.m_y = (M4OSA_UInt32)(pC->m_pDecodedPlane->u_height *
811            (pC->m_pPto3GPPparams->PanZoomTopleftYa +
812            (M4OSA_Int16)((pC->m_pPto3GPPparams->PanZoomTopleftYb\
813                 - pC->m_pPto3GPPparams->PanZoomTopleftYa) *
814            pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage)) / 100;
815
816        Params.m_inputSize.m_width = (M4OSA_UInt32)(pC->m_pDecodedPlane->u_width *
817            (pC->m_pPto3GPPparams->PanZoomXa +
818            (M4OSA_Int16)((pC->m_pPto3GPPparams->PanZoomXb - pC->m_pPto3GPPparams->PanZoomXa) *
819            pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage)) / 100;
820
821        Params.m_inputSize.m_height =  (M4OSA_UInt32)(pC->m_pDecodedPlane->u_height *
822            (pC->m_pPto3GPPparams->PanZoomXa +
823            (M4OSA_Int16)((pC->m_pPto3GPPparams->PanZoomXb - pC->m_pPto3GPPparams->PanZoomXa) *
824            pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage)) / 100;
825 */
826            // Instead of using pC->m_NbImage we have to use (pC->m_NbImage-1) as pC->m_ImageCounter
827            // will be x-1 max for x no. of frames
828            Params.m_inputCoord.m_x = (M4OSA_UInt32)((((M4OSA_Double)pC->m_pDecodedPlane->u_width *
829                (pC->m_pPto3GPPparams->PanZoomTopleftXa +
830                (M4OSA_Double)((M4OSA_Double)(pC->m_pPto3GPPparams->PanZoomTopleftXb\
831                     - pC->m_pPto3GPPparams->PanZoomTopleftXa) *
832                pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage-1)) / 1000));
833            Params.m_inputCoord.m_y =
834                 (M4OSA_UInt32)((((M4OSA_Double)pC->m_pDecodedPlane->u_height *
835                (pC->m_pPto3GPPparams->PanZoomTopleftYa +
836                (M4OSA_Double)((M4OSA_Double)(pC->m_pPto3GPPparams->PanZoomTopleftYb\
837                     - pC->m_pPto3GPPparams->PanZoomTopleftYa) *
838                pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage-1)) / 1000));
839
840            Params.m_inputSize.m_width =
841                 (M4OSA_UInt32)((((M4OSA_Double)pC->m_pDecodedPlane->u_width *
842                (pC->m_pPto3GPPparams->PanZoomXa +
843                (M4OSA_Double)((M4OSA_Double)(pC->m_pPto3GPPparams->PanZoomXb\
844                     - pC->m_pPto3GPPparams->PanZoomXa) *
845                pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage-1)) / 1000));
846
847            Params.m_inputSize.m_height =
848                 (M4OSA_UInt32)((((M4OSA_Double)pC->m_pDecodedPlane->u_height *
849                (pC->m_pPto3GPPparams->PanZoomXa +
850                (M4OSA_Double)((M4OSA_Double)(pC->m_pPto3GPPparams->PanZoomXb \
851                    - pC->m_pPto3GPPparams->PanZoomXa) *
852                pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage-1)) / 1000));
853
854            if((Params.m_inputSize.m_width + Params.m_inputCoord.m_x)\
855                 > pC->m_pDecodedPlane->u_width)
856            {
857                Params.m_inputSize.m_width = pC->m_pDecodedPlane->u_width \
858                    - Params.m_inputCoord.m_x;
859            }
860
861            if((Params.m_inputSize.m_height + Params.m_inputCoord.m_y)\
862                 > pC->m_pDecodedPlane->u_height)
863            {
864                Params.m_inputSize.m_height = pC->m_pDecodedPlane->u_height\
865                     - Params.m_inputCoord.m_y;
866            }
867
868
869
870            Params.m_inputSize.m_width = (Params.m_inputSize.m_width>>1)<<1;
871            Params.m_inputSize.m_height = (Params.m_inputSize.m_height>>1)<<1;
872        }
873
874
875
876    /**
877        Picture rendering: Black borders*/
878
879        if(pC->m_mediaRendering == M4xVSS_kBlackBorders)
880        {
881            memset((void *)pImagePlanes[0].pac_data,Y_PLANE_BORDER_VALUE,
882                (pImagePlanes[0].u_height*pImagePlanes[0].u_stride));
883            memset((void *)pImagePlanes[1].pac_data,U_PLANE_BORDER_VALUE,
884                (pImagePlanes[1].u_height*pImagePlanes[1].u_stride));
885            memset((void *)pImagePlanes[2].pac_data,V_PLANE_BORDER_VALUE,
886                (pImagePlanes[2].u_height*pImagePlanes[2].u_stride));
887
888            /**
889            First without pan&zoom*/
890            if(M4OSA_FALSE == pC->m_pPto3GPPparams->isPanZoom)
891            {
892                switch(pBasicTags.orientation)
893                {
894                default:
895                case M4COMMON_kOrientationUnknown:
896                    Params.m_outputOrientation = M4COMMON_kOrientationTopLeft;
897                case M4COMMON_kOrientationTopLeft:
898                case M4COMMON_kOrientationTopRight:
899                case M4COMMON_kOrientationBottomRight:
900                case M4COMMON_kOrientationBottomLeft:
901                    if((M4OSA_UInt32)((pC->m_pDecodedPlane->u_height * pImagePlanes->u_width)\
902                         /pC->m_pDecodedPlane->u_width) <= pImagePlanes->u_height)
903                         //Params.m_inputSize.m_height < Params.m_inputSize.m_width)
904                    {
905                        /*it is height so black borders will be on the top and on the bottom side*/
906                        Params.m_outputSize.m_width = pImagePlanes->u_width;
907                        Params.m_outputSize.m_height =
908                             (M4OSA_UInt32)((pC->m_pDecodedPlane->u_height \
909                                * pImagePlanes->u_width) /pC->m_pDecodedPlane->u_width);
910                        /*number of lines at the top*/
911                        pImagePlanes[0].u_topleft =
912                            (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_height\
913                                -Params.m_outputSize.m_height)>>1))*pImagePlanes[0].u_stride;
914                        pImagePlanes[0].u_height = Params.m_outputSize.m_height;
915                        pImagePlanes[1].u_topleft =
916                             (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_height\
917                                -(Params.m_outputSize.m_height>>1)))>>1)*pImagePlanes[1].u_stride;
918                        pImagePlanes[1].u_height = Params.m_outputSize.m_height>>1;
919                        pImagePlanes[2].u_topleft =
920                             (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_height\
921                                -(Params.m_outputSize.m_height>>1)))>>1)*pImagePlanes[2].u_stride;
922                        pImagePlanes[2].u_height = Params.m_outputSize.m_height>>1;
923                    }
924                    else
925                    {
926                        /*it is width so black borders will be on the left and right side*/
927                        Params.m_outputSize.m_height = pImagePlanes->u_height;
928                        Params.m_outputSize.m_width =
929                             (M4OSA_UInt32)((pC->m_pDecodedPlane->u_width \
930                                * pImagePlanes->u_height) /pC->m_pDecodedPlane->u_height);
931
932                        pImagePlanes[0].u_topleft =
933                            (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_width\
934                                -Params.m_outputSize.m_width)>>1));
935                        pImagePlanes[0].u_width = Params.m_outputSize.m_width;
936                        pImagePlanes[1].u_topleft =
937                             (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_width\
938                                -(Params.m_outputSize.m_width>>1)))>>1);
939                        pImagePlanes[1].u_width = Params.m_outputSize.m_width>>1;
940                        pImagePlanes[2].u_topleft =
941                             (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_width\
942                                -(Params.m_outputSize.m_width>>1)))>>1);
943                        pImagePlanes[2].u_width = Params.m_outputSize.m_width>>1;
944                    }
945                    break;
946                case M4COMMON_kOrientationLeftTop:
947                case M4COMMON_kOrientationLeftBottom:
948                case M4COMMON_kOrientationRightTop:
949                case M4COMMON_kOrientationRightBottom:
950                        if((M4OSA_UInt32)((pC->m_pDecodedPlane->u_width * pImagePlanes->u_width)\
951                             /pC->m_pDecodedPlane->u_height) < pImagePlanes->u_height)
952                             //Params.m_inputSize.m_height > Params.m_inputSize.m_width)
953                        {
954                            /*it is height so black borders will be on the top and on
955                             the bottom side*/
956                            Params.m_outputSize.m_height = pImagePlanes->u_width;
957                            Params.m_outputSize.m_width =
958                                 (M4OSA_UInt32)((pC->m_pDecodedPlane->u_width \
959                                    * pImagePlanes->u_width) /pC->m_pDecodedPlane->u_height);
960                            /*number of lines at the top*/
961                            pImagePlanes[0].u_topleft =
962                                ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_height\
963                                    -Params.m_outputSize.m_width))>>1)*pImagePlanes[0].u_stride)+1;
964                            pImagePlanes[0].u_height = Params.m_outputSize.m_width;
965                            pImagePlanes[1].u_topleft =
966                                ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_height\
967                                    -(Params.m_outputSize.m_width>>1)))>>1)\
968                                        *pImagePlanes[1].u_stride)+1;
969                            pImagePlanes[1].u_height = Params.m_outputSize.m_width>>1;
970                            pImagePlanes[2].u_topleft =
971                                ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_height\
972                                    -(Params.m_outputSize.m_width>>1)))>>1)\
973                                        *pImagePlanes[2].u_stride)+1;
974                            pImagePlanes[2].u_height = Params.m_outputSize.m_width>>1;
975                        }
976                        else
977                        {
978                            /*it is width so black borders will be on the left and right side*/
979                            Params.m_outputSize.m_width = pImagePlanes->u_height;
980                            Params.m_outputSize.m_height =
981                                 (M4OSA_UInt32)((pC->m_pDecodedPlane->u_height\
982                                     * pImagePlanes->u_height) /pC->m_pDecodedPlane->u_width);
983
984                            pImagePlanes[0].u_topleft =
985                                 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_width\
986                                    -Params.m_outputSize.m_height))>>1))+1;
987                            pImagePlanes[0].u_width = Params.m_outputSize.m_height;
988                            pImagePlanes[1].u_topleft =
989                                 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_width\
990                                    -(Params.m_outputSize.m_height>>1)))>>1))+1;
991                            pImagePlanes[1].u_width = Params.m_outputSize.m_height>>1;
992                            pImagePlanes[2].u_topleft =
993                                 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_width\
994                                    -(Params.m_outputSize.m_height>>1)))>>1))+1;
995                            pImagePlanes[2].u_width = Params.m_outputSize.m_height>>1;
996                        }
997                    break;
998                }
999            }
1000
1001            /**
1002            Secondly with pan&zoom*/
1003            else
1004            {
1005                switch(pBasicTags.orientation)
1006                {
1007                default:
1008                case M4COMMON_kOrientationUnknown:
1009                    Params.m_outputOrientation = M4COMMON_kOrientationTopLeft;
1010                case M4COMMON_kOrientationTopLeft:
1011                case M4COMMON_kOrientationTopRight:
1012                case M4COMMON_kOrientationBottomRight:
1013                case M4COMMON_kOrientationBottomLeft:
1014                    /*NO ROTATION*/
1015                    if((M4OSA_UInt32)((pC->m_pDecodedPlane->u_height * pImagePlanes->u_width)\
1016                         /pC->m_pDecodedPlane->u_width) <= pImagePlanes->u_height)
1017                            //Params.m_inputSize.m_height < Params.m_inputSize.m_width)
1018                    {
1019                        /*Black borders will be on the top and bottom of the output video*/
1020                        /*Maximum output height if the input image aspect ratio is kept and if
1021                        the output width is the screen width*/
1022                        M4OSA_UInt32 tempOutputSizeHeight =
1023                            (M4OSA_UInt32)((pC->m_pDecodedPlane->u_height\
1024                                 * pImagePlanes->u_width) /pC->m_pDecodedPlane->u_width);
1025                        M4OSA_UInt32 tempInputSizeHeightMax = 0;
1026                        M4OSA_UInt32 tempFinalInputHeight = 0;
1027                        /*The output width is the screen width*/
1028                        Params.m_outputSize.m_width = pImagePlanes->u_width;
1029                        tempOutputSizeHeight = (tempOutputSizeHeight>>1)<<1;
1030
1031                        /*Maximum input height according to the maximum output height
1032                        (proportional to the maximum output height)*/
1033                        tempInputSizeHeightMax = (pImagePlanes->u_height\
1034                            *Params.m_inputSize.m_height)/tempOutputSizeHeight;
1035                        tempInputSizeHeightMax = (tempInputSizeHeightMax>>1)<<1;
1036
1037                        /*Check if the maximum possible input height is contained into the
1038                        input image height*/
1039                        if(tempInputSizeHeightMax <= pC->m_pDecodedPlane->u_height)
1040                        {
1041                            /*The maximum possible input height is contained in the input
1042                            image height,
1043                            that means no black borders, the input pan zoom area will be extended
1044                            so that the input AIR height will be the maximum possible*/
1045                            if(((tempInputSizeHeightMax - Params.m_inputSize.m_height)>>1)\
1046                                 <= Params.m_inputCoord.m_y
1047                                && ((tempInputSizeHeightMax - Params.m_inputSize.m_height)>>1)\
1048                                     <= pC->m_pDecodedPlane->u_height -(Params.m_inputCoord.m_y\
1049                                         + Params.m_inputSize.m_height))
1050                            {
1051                                /*The input pan zoom area can be extended symmetrically on the
1052                                top and bottom side*/
1053                                Params.m_inputCoord.m_y -= ((tempInputSizeHeightMax \
1054                                    - Params.m_inputSize.m_height)>>1);
1055                            }
1056                            else if(Params.m_inputCoord.m_y < pC->m_pDecodedPlane->u_height\
1057                                -(Params.m_inputCoord.m_y + Params.m_inputSize.m_height))
1058                            {
1059                                /*There is not enough place above the input pan zoom area to
1060                                extend it symmetrically,
1061                                so extend it to the maximum on the top*/
1062                                Params.m_inputCoord.m_y = 0;
1063                            }
1064                            else
1065                            {
1066                                /*There is not enough place below the input pan zoom area to
1067                                extend it symmetrically,
1068                                so extend it to the maximum on the bottom*/
1069                                Params.m_inputCoord.m_y = pC->m_pDecodedPlane->u_height \
1070                                    - tempInputSizeHeightMax;
1071                            }
1072                            /*The input height of the AIR is the maximum possible height*/
1073                            Params.m_inputSize.m_height = tempInputSizeHeightMax;
1074                        }
1075                        else
1076                        {
1077                            /*The maximum possible input height is greater than the input
1078                            image height,
1079                            that means black borders are necessary to keep aspect ratio
1080                            The input height of the AIR is all the input image height*/
1081                            Params.m_outputSize.m_height =
1082                                (tempOutputSizeHeight*pC->m_pDecodedPlane->u_height)\
1083                                    /Params.m_inputSize.m_height;
1084                            Params.m_outputSize.m_height = (Params.m_outputSize.m_height>>1)<<1;
1085                            Params.m_inputCoord.m_y = 0;
1086                            Params.m_inputSize.m_height = pC->m_pDecodedPlane->u_height;
1087                            pImagePlanes[0].u_topleft =
1088                                 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_height\
1089                                    -Params.m_outputSize.m_height)>>1))*pImagePlanes[0].u_stride;
1090                            pImagePlanes[0].u_height = Params.m_outputSize.m_height;
1091                            pImagePlanes[1].u_topleft =
1092                                ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_height\
1093                                    -(Params.m_outputSize.m_height>>1)))>>1)\
1094                                        *pImagePlanes[1].u_stride);
1095                            pImagePlanes[1].u_height = Params.m_outputSize.m_height>>1;
1096                            pImagePlanes[2].u_topleft =
1097                                 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_height\
1098                                    -(Params.m_outputSize.m_height>>1)))>>1)\
1099                                        *pImagePlanes[2].u_stride);
1100                            pImagePlanes[2].u_height = Params.m_outputSize.m_height>>1;
1101                        }
1102                    }
1103                    else
1104                    {
1105                        /*Black borders will be on the left and right side of the output video*/
1106                        /*Maximum output width if the input image aspect ratio is kept and if the
1107                         output height is the screen height*/
1108                        M4OSA_UInt32 tempOutputSizeWidth =
1109                             (M4OSA_UInt32)((pC->m_pDecodedPlane->u_width \
1110                                * pImagePlanes->u_height) /pC->m_pDecodedPlane->u_height);
1111                        M4OSA_UInt32 tempInputSizeWidthMax = 0;
1112                        M4OSA_UInt32 tempFinalInputWidth = 0;
1113                        /*The output height is the screen height*/
1114                        Params.m_outputSize.m_height = pImagePlanes->u_height;
1115                        tempOutputSizeWidth = (tempOutputSizeWidth>>1)<<1;
1116
1117                        /*Maximum input width according to the maximum output width
1118                        (proportional to the maximum output width)*/
1119                        tempInputSizeWidthMax =
1120                             (pImagePlanes->u_width*Params.m_inputSize.m_width)\
1121                                /tempOutputSizeWidth;
1122                        tempInputSizeWidthMax = (tempInputSizeWidthMax>>1)<<1;
1123
1124                        /*Check if the maximum possible input width is contained into the input
1125                         image width*/
1126                        if(tempInputSizeWidthMax <= pC->m_pDecodedPlane->u_width)
1127                        {
1128                            /*The maximum possible input width is contained in the input
1129                            image width,
1130                            that means no black borders, the input pan zoom area will be extended
1131                            so that the input AIR width will be the maximum possible*/
1132                            if(((tempInputSizeWidthMax - Params.m_inputSize.m_width)>>1) \
1133                                <= Params.m_inputCoord.m_x
1134                                && ((tempInputSizeWidthMax - Params.m_inputSize.m_width)>>1)\
1135                                     <= pC->m_pDecodedPlane->u_width -(Params.m_inputCoord.m_x \
1136                                        + Params.m_inputSize.m_width))
1137                            {
1138                                /*The input pan zoom area can be extended symmetrically on the
1139                                     right and left side*/
1140                                Params.m_inputCoord.m_x -= ((tempInputSizeWidthMax\
1141                                     - Params.m_inputSize.m_width)>>1);
1142                            }
1143                            else if(Params.m_inputCoord.m_x < pC->m_pDecodedPlane->u_width\
1144                                -(Params.m_inputCoord.m_x + Params.m_inputSize.m_width))
1145                            {
1146                                /*There is not enough place above the input pan zoom area to
1147                                    extend it symmetrically,
1148                                so extend it to the maximum on the left*/
1149                                Params.m_inputCoord.m_x = 0;
1150                            }
1151                            else
1152                            {
1153                                /*There is not enough place below the input pan zoom area
1154                                    to extend it symmetrically,
1155                                so extend it to the maximum on the right*/
1156                                Params.m_inputCoord.m_x = pC->m_pDecodedPlane->u_width \
1157                                    - tempInputSizeWidthMax;
1158                            }
1159                            /*The input width of the AIR is the maximum possible width*/
1160                            Params.m_inputSize.m_width = tempInputSizeWidthMax;
1161                        }
1162                        else
1163                        {
1164                            /*The maximum possible input width is greater than the input
1165                            image width,
1166                            that means black borders are necessary to keep aspect ratio
1167                            The input width of the AIR is all the input image width*/
1168                            Params.m_outputSize.m_width =\
1169                                 (tempOutputSizeWidth*pC->m_pDecodedPlane->u_width)\
1170                                    /Params.m_inputSize.m_width;
1171                            Params.m_outputSize.m_width = (Params.m_outputSize.m_width>>1)<<1;
1172                            Params.m_inputCoord.m_x = 0;
1173                            Params.m_inputSize.m_width = pC->m_pDecodedPlane->u_width;
1174                            pImagePlanes[0].u_topleft =
1175                                 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_width\
1176                                    -Params.m_outputSize.m_width)>>1));
1177                            pImagePlanes[0].u_width = Params.m_outputSize.m_width;
1178                            pImagePlanes[1].u_topleft =
1179                                 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_width\
1180                                    -(Params.m_outputSize.m_width>>1)))>>1);
1181                            pImagePlanes[1].u_width = Params.m_outputSize.m_width>>1;
1182                            pImagePlanes[2].u_topleft =
1183                                 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_width\
1184                                    -(Params.m_outputSize.m_width>>1)))>>1);
1185                            pImagePlanes[2].u_width = Params.m_outputSize.m_width>>1;
1186                        }
1187                    }
1188                    break;
1189                case M4COMMON_kOrientationLeftTop:
1190                case M4COMMON_kOrientationLeftBottom:
1191                case M4COMMON_kOrientationRightTop:
1192                case M4COMMON_kOrientationRightBottom:
1193                    /*ROTATION*/
1194                    if((M4OSA_UInt32)((pC->m_pDecodedPlane->u_width * pImagePlanes->u_width)\
1195                         /pC->m_pDecodedPlane->u_height) < pImagePlanes->u_height)
1196                         //Params.m_inputSize.m_height > Params.m_inputSize.m_width)
1197                    {
1198                        /*Black borders will be on the left and right side of the output video*/
1199                        /*Maximum output height if the input image aspect ratio is kept and if
1200                        the output height is the screen width*/
1201                        M4OSA_UInt32 tempOutputSizeHeight =
1202                        (M4OSA_UInt32)((pC->m_pDecodedPlane->u_width * pImagePlanes->u_width)\
1203                             /pC->m_pDecodedPlane->u_height);
1204                        M4OSA_UInt32 tempInputSizeHeightMax = 0;
1205                        M4OSA_UInt32 tempFinalInputHeight = 0;
1206                        /*The output width is the screen height*/
1207                        Params.m_outputSize.m_height = pImagePlanes->u_width;
1208                        Params.m_outputSize.m_width= pImagePlanes->u_height;
1209                        tempOutputSizeHeight = (tempOutputSizeHeight>>1)<<1;
1210
1211                        /*Maximum input height according to the maximum output height
1212                             (proportional to the maximum output height)*/
1213                        tempInputSizeHeightMax =
1214                            (pImagePlanes->u_height*Params.m_inputSize.m_width)\
1215                                /tempOutputSizeHeight;
1216                        tempInputSizeHeightMax = (tempInputSizeHeightMax>>1)<<1;
1217
1218                        /*Check if the maximum possible input height is contained into the
1219                             input image width (rotation included)*/
1220                        if(tempInputSizeHeightMax <= pC->m_pDecodedPlane->u_width)
1221                        {
1222                            /*The maximum possible input height is contained in the input
1223                            image width (rotation included),
1224                            that means no black borders, the input pan zoom area will be extended
1225                            so that the input AIR width will be the maximum possible*/
1226                            if(((tempInputSizeHeightMax - Params.m_inputSize.m_width)>>1) \
1227                                <= Params.m_inputCoord.m_x
1228                                && ((tempInputSizeHeightMax - Params.m_inputSize.m_width)>>1)\
1229                                     <= pC->m_pDecodedPlane->u_width -(Params.m_inputCoord.m_x \
1230                                        + Params.m_inputSize.m_width))
1231                            {
1232                                /*The input pan zoom area can be extended symmetrically on the
1233                                 right and left side*/
1234                                Params.m_inputCoord.m_x -= ((tempInputSizeHeightMax \
1235                                    - Params.m_inputSize.m_width)>>1);
1236                            }
1237                            else if(Params.m_inputCoord.m_x < pC->m_pDecodedPlane->u_width\
1238                                -(Params.m_inputCoord.m_x + Params.m_inputSize.m_width))
1239                            {
1240                                /*There is not enough place on the left of the input pan
1241                                zoom area to extend it symmetrically,
1242                                so extend it to the maximum on the left*/
1243                                Params.m_inputCoord.m_x = 0;
1244                            }
1245                            else
1246                            {
1247                                /*There is not enough place on the right of the input pan zoom
1248                                 area to extend it symmetrically,
1249                                so extend it to the maximum on the right*/
1250                                Params.m_inputCoord.m_x =
1251                                     pC->m_pDecodedPlane->u_width - tempInputSizeHeightMax;
1252                            }
1253                            /*The input width of the AIR is the maximum possible width*/
1254                            Params.m_inputSize.m_width = tempInputSizeHeightMax;
1255                        }
1256                        else
1257                        {
1258                            /*The maximum possible input height is greater than the input
1259                            image width (rotation included),
1260                            that means black borders are necessary to keep aspect ratio
1261                            The input width of the AIR is all the input image width*/
1262                            Params.m_outputSize.m_width =
1263                            (tempOutputSizeHeight*pC->m_pDecodedPlane->u_width)\
1264                                /Params.m_inputSize.m_width;
1265                            Params.m_outputSize.m_width = (Params.m_outputSize.m_width>>1)<<1;
1266                            Params.m_inputCoord.m_x = 0;
1267                            Params.m_inputSize.m_width = pC->m_pDecodedPlane->u_width;
1268                            pImagePlanes[0].u_topleft =
1269                                ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_height\
1270                                    -Params.m_outputSize.m_width))>>1)*pImagePlanes[0].u_stride)+1;
1271                            pImagePlanes[0].u_height = Params.m_outputSize.m_width;
1272                            pImagePlanes[1].u_topleft =
1273                            ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_height\
1274                                -(Params.m_outputSize.m_width>>1)))>>1)\
1275                                    *pImagePlanes[1].u_stride)+1;
1276                            pImagePlanes[1].u_height = Params.m_outputSize.m_width>>1;
1277                            pImagePlanes[2].u_topleft =
1278                            ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_height\
1279                                -(Params.m_outputSize.m_width>>1)))>>1)\
1280                                    *pImagePlanes[2].u_stride)+1;
1281                            pImagePlanes[2].u_height = Params.m_outputSize.m_width>>1;
1282                        }
1283                    }
1284                    else
1285                    {
1286                        /*Black borders will be on the top and bottom of the output video*/
1287                        /*Maximum output width if the input image aspect ratio is kept and if
1288                         the output width is the screen height*/
1289                        M4OSA_UInt32 tempOutputSizeWidth =
1290                        (M4OSA_UInt32)((pC->m_pDecodedPlane->u_height * pImagePlanes->u_height)\
1291                             /pC->m_pDecodedPlane->u_width);
1292                        M4OSA_UInt32 tempInputSizeWidthMax = 0;
1293                        M4OSA_UInt32 tempFinalInputWidth = 0, tempFinalOutputWidth = 0;
1294                        /*The output height is the screen width*/
1295                        Params.m_outputSize.m_width = pImagePlanes->u_height;
1296                        Params.m_outputSize.m_height= pImagePlanes->u_width;
1297                        tempOutputSizeWidth = (tempOutputSizeWidth>>1)<<1;
1298
1299                        /*Maximum input width according to the maximum output width
1300                         (proportional to the maximum output width)*/
1301                        tempInputSizeWidthMax =
1302                        (pImagePlanes->u_width*Params.m_inputSize.m_height)/tempOutputSizeWidth;
1303                        tempInputSizeWidthMax = (tempInputSizeWidthMax>>1)<<1;
1304
1305                        /*Check if the maximum possible input width is contained into the input
1306                         image height (rotation included)*/
1307                        if(tempInputSizeWidthMax <= pC->m_pDecodedPlane->u_height)
1308                        {
1309                            /*The maximum possible input width is contained in the input
1310                             image height (rotation included),
1311                            that means no black borders, the input pan zoom area will be extended
1312                            so that the input AIR height will be the maximum possible*/
1313                            if(((tempInputSizeWidthMax - Params.m_inputSize.m_height)>>1) \
1314                                <= Params.m_inputCoord.m_y
1315                                && ((tempInputSizeWidthMax - Params.m_inputSize.m_height)>>1)\
1316                                     <= pC->m_pDecodedPlane->u_height -(Params.m_inputCoord.m_y \
1317                                        + Params.m_inputSize.m_height))
1318                            {
1319                                /*The input pan zoom area can be extended symmetrically on
1320                                the right and left side*/
1321                                Params.m_inputCoord.m_y -= ((tempInputSizeWidthMax \
1322                                    - Params.m_inputSize.m_height)>>1);
1323                            }
1324                            else if(Params.m_inputCoord.m_y < pC->m_pDecodedPlane->u_height\
1325                                -(Params.m_inputCoord.m_y + Params.m_inputSize.m_height))
1326                            {
1327                                /*There is not enough place on the top of the input pan zoom
1328                                area to extend it symmetrically,
1329                                so extend it to the maximum on the top*/
1330                                Params.m_inputCoord.m_y = 0;
1331                            }
1332                            else
1333                            {
1334                                /*There is not enough place on the bottom of the input pan zoom
1335                                 area to extend it symmetrically,
1336                                so extend it to the maximum on the bottom*/
1337                                Params.m_inputCoord.m_y = pC->m_pDecodedPlane->u_height\
1338                                     - tempInputSizeWidthMax;
1339                            }
1340                            /*The input height of the AIR is the maximum possible height*/
1341                            Params.m_inputSize.m_height = tempInputSizeWidthMax;
1342                        }
1343                        else
1344                        {
1345                            /*The maximum possible input width is greater than the input\
1346                             image height (rotation included),
1347                            that means black borders are necessary to keep aspect ratio
1348                            The input height of the AIR is all the input image height*/
1349                            Params.m_outputSize.m_height =
1350                                (tempOutputSizeWidth*pC->m_pDecodedPlane->u_height)\
1351                                    /Params.m_inputSize.m_height;
1352                            Params.m_outputSize.m_height = (Params.m_outputSize.m_height>>1)<<1;
1353                            Params.m_inputCoord.m_y = 0;
1354                            Params.m_inputSize.m_height = pC->m_pDecodedPlane->u_height;
1355                            pImagePlanes[0].u_topleft =
1356                                ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_width\
1357                                    -Params.m_outputSize.m_height))>>1))+1;
1358                            pImagePlanes[0].u_width = Params.m_outputSize.m_height;
1359                            pImagePlanes[1].u_topleft =
1360                                ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_width\
1361                                    -(Params.m_outputSize.m_height>>1)))>>1))+1;
1362                            pImagePlanes[1].u_width = Params.m_outputSize.m_height>>1;
1363                            pImagePlanes[2].u_topleft =
1364                                 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_width\
1365                                    -(Params.m_outputSize.m_height>>1)))>>1))+1;
1366                            pImagePlanes[2].u_width = Params.m_outputSize.m_height>>1;
1367                        }
1368                    }
1369                    break;
1370                }
1371            }
1372
1373            /*Width and height have to be even*/
1374            Params.m_outputSize.m_width = (Params.m_outputSize.m_width>>1)<<1;
1375            Params.m_outputSize.m_height = (Params.m_outputSize.m_height>>1)<<1;
1376            Params.m_inputSize.m_width = (Params.m_inputSize.m_width>>1)<<1;
1377            Params.m_inputSize.m_height = (Params.m_inputSize.m_height>>1)<<1;
1378            pImagePlanes[0].u_width = (pImagePlanes[0].u_width>>1)<<1;
1379            pImagePlanes[1].u_width = (pImagePlanes[1].u_width>>1)<<1;
1380            pImagePlanes[2].u_width = (pImagePlanes[2].u_width>>1)<<1;
1381            pImagePlanes[0].u_height = (pImagePlanes[0].u_height>>1)<<1;
1382            pImagePlanes[1].u_height = (pImagePlanes[1].u_height>>1)<<1;
1383            pImagePlanes[2].u_height = (pImagePlanes[2].u_height>>1)<<1;
1384
1385            /*Check that values are coherent*/
1386            if(Params.m_inputSize.m_height == Params.m_outputSize.m_height)
1387            {
1388                Params.m_inputSize.m_width = Params.m_outputSize.m_width;
1389            }
1390            else if(Params.m_inputSize.m_width == Params.m_outputSize.m_width)
1391            {
1392                Params.m_inputSize.m_height = Params.m_outputSize.m_height;
1393            }
1394        }
1395
1396        /**
1397        Picture rendering: Resizing and Cropping*/
1398        if(pC->m_mediaRendering != M4xVSS_kBlackBorders)
1399        {
1400            switch(pBasicTags.orientation)
1401            {
1402            default:
1403            case M4COMMON_kOrientationUnknown:
1404                Params.m_outputOrientation = M4COMMON_kOrientationTopLeft;
1405            case M4COMMON_kOrientationTopLeft:
1406            case M4COMMON_kOrientationTopRight:
1407            case M4COMMON_kOrientationBottomRight:
1408            case M4COMMON_kOrientationBottomLeft:
1409                Params.m_outputSize.m_height = pImagePlanes->u_height;
1410                Params.m_outputSize.m_width = pImagePlanes->u_width;
1411                break;
1412            case M4COMMON_kOrientationLeftTop:
1413            case M4COMMON_kOrientationLeftBottom:
1414            case M4COMMON_kOrientationRightTop:
1415            case M4COMMON_kOrientationRightBottom:
1416                Params.m_outputSize.m_height = pImagePlanes->u_width;
1417                Params.m_outputSize.m_width = pImagePlanes->u_height;
1418                break;
1419            }
1420        }
1421
1422        /**
1423        Picture rendering: Cropping*/
1424        if(pC->m_mediaRendering == M4xVSS_kCropping)
1425        {
1426            if((Params.m_outputSize.m_height * Params.m_inputSize.m_width)\
1427                 /Params.m_outputSize.m_width<Params.m_inputSize.m_height)
1428            {
1429                M4OSA_UInt32 tempHeight = Params.m_inputSize.m_height;
1430                /*height will be cropped*/
1431                Params.m_inputSize.m_height =  (M4OSA_UInt32)((Params.m_outputSize.m_height \
1432                    * Params.m_inputSize.m_width) /Params.m_outputSize.m_width);
1433                Params.m_inputSize.m_height =  (Params.m_inputSize.m_height>>1)<<1;
1434                if(M4OSA_FALSE == pC->m_pPto3GPPparams->isPanZoom)
1435                {
1436                    Params.m_inputCoord.m_y = (M4OSA_Int32)((M4OSA_Int32)\
1437                        ((pC->m_pDecodedPlane->u_height - Params.m_inputSize.m_height))>>1);
1438                }
1439                else
1440                {
1441                    Params.m_inputCoord.m_y += (M4OSA_Int32)((M4OSA_Int32)\
1442                        ((tempHeight - Params.m_inputSize.m_height))>>1);
1443                }
1444            }
1445            else
1446            {
1447                M4OSA_UInt32 tempWidth= Params.m_inputSize.m_width;
1448                /*width will be cropped*/
1449                Params.m_inputSize.m_width =  (M4OSA_UInt32)((Params.m_outputSize.m_width \
1450                    * Params.m_inputSize.m_height) /Params.m_outputSize.m_height);
1451                Params.m_inputSize.m_width =  (Params.m_inputSize.m_width>>1)<<1;
1452                if(M4OSA_FALSE == pC->m_pPto3GPPparams->isPanZoom)
1453                {
1454                    Params.m_inputCoord.m_x = (M4OSA_Int32)((M4OSA_Int32)\
1455                        ((pC->m_pDecodedPlane->u_width - Params.m_inputSize.m_width))>>1);
1456                }
1457                else
1458                {
1459                    Params.m_inputCoord.m_x += (M4OSA_Int32)\
1460                        (((M4OSA_Int32)(tempWidth - Params.m_inputSize.m_width))>>1);
1461                }
1462            }
1463        }
1464
1465
1466
1467        /**
1468         * Call AIR functions */
1469        if(M4OSA_NULL == pC->m_air_context)
1470        {
1471            err = M4AIR_create(&pC->m_air_context, M4AIR_kYUV420P);
1472            if(err != M4NO_ERROR)
1473            {
1474                M4OSA_free((M4OSA_MemAddr32)pC->m_pDecodedPlane[0].pac_data);
1475                M4OSA_free((M4OSA_MemAddr32)pC->m_pDecodedPlane);
1476                pC->m_pDecodedPlane = M4OSA_NULL;
1477                M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct:\
1478                     Error when initializing AIR: 0x%x", err);
1479                return err;
1480            }
1481        }
1482
1483        err = M4AIR_configure(pC->m_air_context, &Params);
1484        if(err != M4NO_ERROR)
1485        {
1486            M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct:\
1487                 Error when configuring AIR: 0x%x", err);
1488            M4AIR_cleanUp(pC->m_air_context);
1489            M4OSA_free((M4OSA_MemAddr32)pC->m_pDecodedPlane[0].pac_data);
1490            M4OSA_free((M4OSA_MemAddr32)pC->m_pDecodedPlane);
1491            pC->m_pDecodedPlane = M4OSA_NULL;
1492            return err;
1493        }
1494
1495        err = M4AIR_get(pC->m_air_context, pC->m_pDecodedPlane, pImagePlanes);
1496        if(err != M4NO_ERROR)
1497        {
1498            M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct: Error when getting AIR plane: 0x%x", err);
1499            M4AIR_cleanUp(pC->m_air_context);
1500            M4OSA_free((M4OSA_MemAddr32)pC->m_pDecodedPlane[0].pac_data);
1501            M4OSA_free((M4OSA_MemAddr32)pC->m_pDecodedPlane);
1502            pC->m_pDecodedPlane = M4OSA_NULL;
1503            return err;
1504        }
1505        pImagePlanes[0] = pImagePlanes1;
1506        pImagePlanes[1] = pImagePlanes2;
1507        pImagePlanes[2] = pImagePlanes3;
1508    }
1509
1510
1511    /**
1512     * Increment the image counter */
1513    pC->m_ImageCounter++;
1514
1515    /**
1516     * Check end of sequence */
1517    last_frame_flag    = (pC->m_ImageCounter >= pC->m_NbImage);
1518
1519    /**
1520     * Keep the picture duration */
1521    *pPictureDuration = pC->m_timeDuration;
1522
1523    if (1 == last_frame_flag)
1524    {
1525        if(M4OSA_NULL != pC->m_air_context)
1526        {
1527            err = M4AIR_cleanUp(pC->m_air_context);
1528            if(err != M4NO_ERROR)
1529            {
1530                M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct: Error when cleaning AIR: 0x%x", err);
1531                return err;
1532            }
1533        }
1534        if(M4OSA_NULL != pC->m_pDecodedPlane)
1535        {
1536            M4OSA_free((M4OSA_MemAddr32)pC->m_pDecodedPlane[0].pac_data);
1537            M4OSA_free((M4OSA_MemAddr32)pC->m_pDecodedPlane);
1538            pC->m_pDecodedPlane = M4OSA_NULL;
1539        }
1540        return M4PTO3GPP_WAR_LAST_PICTURE;
1541    }
1542
1543    M4OSA_TRACE1_0("M4xVSS_PictureCallbackFct: Leaving ");
1544    return M4NO_ERROR;
1545}
1546
1547/**
1548 ******************************************************************************
1549 * M4OSA_ERR M4xVSS_internalStartConvertPictureTo3gp(M4OSA_Context pContext)
1550 * @brief    This function initializes Pto3GPP with the given parameters
1551 * @note    The "Pictures to 3GPP" parameters are given by the internal xVSS
1552 *            context. This context contains a pointer on the current element
1553 *            of the chained list of Pto3GPP parameters.
1554 * @param    pContext    (IN) The integrator own context
1555 *
1556 * @return    M4NO_ERROR:    No error
1557 * @return    M4PTO3GPP_WAR_LAST_PICTURE: The returned image is the last one
1558 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
1559 ******************************************************************************
1560 */
1561M4OSA_ERR M4xVSS_internalStartConvertPictureTo3gp(M4OSA_Context pContext)
1562{
1563    /************************************************************************/
1564    /* Definitions to generate dummy AMR file used to add AMR silence in files generated
1565     by Pto3GPP */
1566    #define M4VSS3GPP_AMR_AU_SILENCE_FRAME_048_SIZE     13
1567    /* This constant is defined in M4VSS3GPP_InternalConfig.h */
1568    extern const M4OSA_UInt8\
1569         M4VSS3GPP_AMR_AU_SILENCE_FRAME_048[M4VSS3GPP_AMR_AU_SILENCE_FRAME_048_SIZE];
1570
1571    /* AMR silent frame used to compute dummy AMR silence file */
1572    #define M4VSS3GPP_AMR_HEADER_SIZE 6
1573    const M4OSA_UInt8 M4VSS3GPP_AMR_HEADER[M4VSS3GPP_AMR_AU_SILENCE_FRAME_048_SIZE] =
1574    { 0x23, 0x21, 0x41, 0x4d, 0x52, 0x0a };
1575    /************************************************************************/
1576
1577    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
1578    M4OSA_ERR err;
1579    M4PTO3GPP_Context pM4PTO3GPP_Ctxt = M4OSA_NULL;
1580    M4PTO3GPP_Params Params;
1581     M4xVSS_PictureCallbackCtxt*    pCallBackCtxt;
1582    M4OSA_Bool cmpResult=M4OSA_FALSE;
1583    M4OSA_Context pDummyAMRFile;
1584    M4OSA_Char out_amr[M4XVSS_MAX_PATH_LEN];
1585    /*UTF conversion support*/
1586    M4OSA_Char* pDecodedPath = M4OSA_NULL;
1587    M4OSA_UInt32 i;
1588
1589    /**
1590     * Create a M4PTO3GPP instance */
1591    err = M4PTO3GPP_Init( &pM4PTO3GPP_Ctxt, xVSS_context->pFileReadPtr,
1592         xVSS_context->pFileWritePtr);
1593    if (err != M4NO_ERROR)
1594    {
1595        M4OSA_TRACE1_1("M4xVSS_internalStartConvertPictureTo3gp returned %ld\n",err);
1596        return err;
1597    }
1598
1599    /* replay recorded external encoder registrations on the PTO3GPP */
1600    for (i=0; i<M4VE_kEncoderType_NB; i++)
1601    {
1602        if (xVSS_context->registeredExternalEncs[i].registered)
1603        {
1604            err = M4PTO3GPP_RegisterExternalVideoEncoder(pM4PTO3GPP_Ctxt, i,
1605                    xVSS_context->registeredExternalEncs[i].pEncoderInterface,
1606                    xVSS_context->registeredExternalEncs[i].pUserData);
1607            if (M4NO_ERROR != err)
1608            {
1609                M4OSA_TRACE1_1("M4xVSS_internalGenerateEditedFile:\
1610                     M4PTO3GPP_registerExternalVideoEncoder() returns 0x%x!", err);
1611                M4PTO3GPP_CleanUp(pM4PTO3GPP_Ctxt);
1612                return err;
1613            }
1614        }
1615    }
1616
1617    pCallBackCtxt = (M4xVSS_PictureCallbackCtxt*)M4OSA_malloc(sizeof(M4xVSS_PictureCallbackCtxt),
1618         M4VS,(M4OSA_Char *) "Pto3gpp callback struct");
1619    if(pCallBackCtxt == M4OSA_NULL)
1620    {
1621        M4OSA_TRACE1_0("Allocation error in M4xVSS_internalStartConvertPictureTo3gp");
1622        return M4ERR_ALLOC;
1623    }
1624
1625    Params.OutputVideoFrameSize = xVSS_context->pSettings->xVSS.outputVideoSize;
1626    Params.OutputVideoFormat = xVSS_context->pSettings->xVSS.outputVideoFormat;
1627
1628    /**
1629     * Generate "dummy" amr file containing silence in temporary folder */
1630    M4OSA_chrNCopy(out_amr, xVSS_context->pTempPath, M4XVSS_MAX_PATH_LEN - 1);
1631    strncat((char *)out_amr, (const char *)"dummy.amr\0", 10);
1632
1633    /**
1634     * UTF conversion: convert the temporary path into the customer format*/
1635    pDecodedPath = out_amr;
1636
1637    if(xVSS_context->UTFConversionContext.pConvFromUTF8Fct != M4OSA_NULL
1638            && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL)
1639    {
1640        M4OSA_UInt32 length = 0;
1641        err = M4xVSS_internalConvertFromUTF8(xVSS_context, (M4OSA_Void*) out_amr,
1642             (M4OSA_Void*) xVSS_context->UTFConversionContext.pTempOutConversionBuffer, &length);
1643        if(err != M4NO_ERROR)
1644        {
1645            M4OSA_TRACE1_1("M4xVSS_internalStartConvertPictureTo3gp:\
1646                 M4xVSS_internalConvertFromUTF8 returns err: 0x%x",err);
1647            return err;
1648        }
1649        pDecodedPath = xVSS_context->UTFConversionContext.pTempOutConversionBuffer;
1650    }
1651
1652    /**
1653    * End of the conversion, now use the converted path*/
1654
1655    err = xVSS_context->pFileWritePtr->openWrite(&pDummyAMRFile, pDecodedPath, M4OSA_kFileWrite);
1656
1657    /*Commented because of the use of the UTF conversion see above*/
1658/*    err = xVSS_context->pFileWritePtr->openWrite(&pDummyAMRFile, out_amr, M4OSA_kFileWrite);
1659 */
1660    if(err != M4NO_ERROR)
1661    {
1662        M4OSA_TRACE1_2("M4xVSS_internalConvertPictureTo3gp: Can't open output dummy amr file %s,\
1663             error: 0x%x\n",out_amr, err);
1664        return err;
1665    }
1666
1667    err =  xVSS_context->pFileWritePtr->writeData(pDummyAMRFile,
1668        (M4OSA_Int8*)M4VSS3GPP_AMR_HEADER, M4VSS3GPP_AMR_HEADER_SIZE);
1669    if(err != M4NO_ERROR)
1670    {
1671        M4OSA_TRACE1_2("M4xVSS_internalConvertPictureTo3gp: Can't write output dummy amr file %s,\
1672             error: 0x%x\n",out_amr, err);
1673        return err;
1674    }
1675
1676    err =  xVSS_context->pFileWritePtr->writeData(pDummyAMRFile,
1677         (M4OSA_Int8*)M4VSS3GPP_AMR_AU_SILENCE_FRAME_048, M4VSS3GPP_AMR_AU_SILENCE_FRAME_048_SIZE);
1678    if(err != M4NO_ERROR)
1679    {
1680        M4OSA_TRACE1_2("M4xVSS_internalConvertPictureTo3gp: \
1681            Can't write output dummy amr file %s, error: 0x%x\n",out_amr, err);
1682        return err;
1683    }
1684
1685    err =  xVSS_context->pFileWritePtr->closeWrite(pDummyAMRFile);
1686    if(err != M4NO_ERROR)
1687    {
1688        M4OSA_TRACE1_2("M4xVSS_internalConvertPictureTo3gp: \
1689            Can't close output dummy amr file %s, error: 0x%x\n",out_amr, err);
1690        return err;
1691    }
1692
1693    /**
1694     * Fill parameters for Pto3GPP with the parameters contained in the current element of the
1695     * Pto3GPP parameters chained list and with default parameters */
1696/*+ New Encoder bitrates */
1697    if(xVSS_context->pSettings->xVSS.outputVideoBitrate == 0) {
1698        Params.OutputVideoBitrate    = M4VIDEOEDITING_kVARIABLE_KBPS;
1699    }
1700    else {
1701          Params.OutputVideoBitrate = xVSS_context->pSettings->xVSS.outputVideoBitrate;
1702    }
1703    M4OSA_TRACE1_1("M4xVSS_internalStartConvertPicTo3GP: video bitrate = %d",
1704        Params.OutputVideoBitrate);
1705/*- New Encoder bitrates */
1706    Params.OutputFileMaxSize    = M4PTO3GPP_kUNLIMITED;
1707    Params.pPictureCallbackFct    = M4xVSS_PictureCallbackFct;
1708    Params.pPictureCallbackCtxt    = pCallBackCtxt;
1709    /*FB: change to use the converted path (UTF conversion) see the conversion above*/
1710    /*Fix :- Adding Audio Track in Image as input :AudioTarckFile Setting to NULL */
1711    Params.pInputAudioTrackFile    = M4OSA_NULL;//(M4OSA_Void*)pDecodedPath;//out_amr;
1712    Params.AudioPaddingMode        = M4PTO3GPP_kAudioPaddingMode_Loop;
1713    Params.AudioFileFormat        = M4VIDEOEDITING_kFileType_AMR;
1714    Params.pOutput3gppFile        = xVSS_context->pPTo3GPPcurrentParams->pFileOut;
1715    Params.pTemporaryFile        = xVSS_context->pPTo3GPPcurrentParams->pFileTemp;
1716    /*+PR No:  blrnxpsw#223*/
1717    /*Increasing frequency of Frame, calculating Nos of Frame = duration /FPS */
1718    /*Other changes made is @ M4xVSS_API.c @ line 3841 in M4xVSS_SendCommand*/
1719    /*If case check for PanZoom removed */
1720    Params.NbVideoFrames            = (M4OSA_UInt32)
1721        (xVSS_context->pPTo3GPPcurrentParams->duration \
1722            / xVSS_context->pPTo3GPPcurrentParams->framerate); /* */
1723    pCallBackCtxt->m_timeDuration    = xVSS_context->pPTo3GPPcurrentParams->framerate;
1724    /*-PR No:  blrnxpsw#223*/
1725    pCallBackCtxt->m_ImageCounter    = 0;
1726    pCallBackCtxt->m_FileIn            = xVSS_context->pPTo3GPPcurrentParams->pFileIn;
1727    pCallBackCtxt->m_NbImage        = Params.NbVideoFrames;
1728    pCallBackCtxt->m_pFileReadPtr    = xVSS_context->pFileReadPtr;
1729    pCallBackCtxt->m_pDecodedPlane    = M4OSA_NULL;
1730    pCallBackCtxt->m_pPto3GPPparams    = xVSS_context->pPTo3GPPcurrentParams;
1731    pCallBackCtxt->m_air_context    = M4OSA_NULL;
1732    pCallBackCtxt->m_mediaRendering = xVSS_context->pPTo3GPPcurrentParams->MediaRendering;
1733
1734    /**
1735     * Set the input and output files */
1736    err = M4PTO3GPP_Open(pM4PTO3GPP_Ctxt, &Params);
1737    if (err != M4NO_ERROR)
1738    {
1739        M4OSA_TRACE1_1("M4PTO3GPP_Open returned: 0x%x\n",err);
1740        if(pCallBackCtxt != M4OSA_NULL)
1741        {
1742            M4OSA_free((M4OSA_MemAddr32)pCallBackCtxt);
1743            pCallBackCtxt = M4OSA_NULL;
1744        }
1745        M4PTO3GPP_CleanUp(pM4PTO3GPP_Ctxt);
1746        return err;
1747    }
1748
1749    /**
1750     * Save context to be able to call Pto3GPP step function in M4xVSS_step function */
1751    xVSS_context->pM4PTO3GPP_Ctxt = pM4PTO3GPP_Ctxt;
1752    xVSS_context->pCallBackCtxt = pCallBackCtxt;
1753
1754    return M4NO_ERROR;
1755}
1756
1757/**
1758 ******************************************************************************
1759 * M4OSA_ERR M4xVSS_internalStopConvertPictureTo3gp(M4OSA_Context pContext)
1760 * @brief    This function cleans up Pto3GPP
1761 * @note
1762 * @param    pContext    (IN) The integrator own context
1763 *
1764 * @return    M4NO_ERROR:    No error
1765 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
1766 ******************************************************************************
1767 */
1768M4OSA_ERR M4xVSS_internalStopConvertPictureTo3gp(M4OSA_Context pContext)
1769{
1770    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
1771    M4OSA_ERR err;
1772    M4OSA_Char out_amr[M4XVSS_MAX_PATH_LEN];
1773    /*UTF conversion support*/
1774    M4OSA_Char* pDecodedPath = M4OSA_NULL;
1775
1776    /**
1777    * Free the PTO3GPP callback context */
1778    if(M4OSA_NULL != xVSS_context->pCallBackCtxt)
1779    {
1780        M4OSA_free((M4OSA_MemAddr32)xVSS_context->pCallBackCtxt);
1781        xVSS_context->pCallBackCtxt = M4OSA_NULL;
1782    }
1783
1784    /**
1785     * Finalize the output file */
1786    err = M4PTO3GPP_Close(xVSS_context->pM4PTO3GPP_Ctxt);
1787    if (err != M4NO_ERROR)
1788    {
1789        M4OSA_TRACE1_1("M4PTO3GPP_Close returned 0x%x\n",err);
1790        M4PTO3GPP_CleanUp(xVSS_context->pM4PTO3GPP_Ctxt);
1791        return err;
1792    }
1793
1794    /**
1795     * Free this M4PTO3GPP instance */
1796    err = M4PTO3GPP_CleanUp(xVSS_context->pM4PTO3GPP_Ctxt);
1797    if (err != M4NO_ERROR)
1798    {
1799        M4OSA_TRACE1_1("M4PTO3GPP_CleanUp returned 0x%x\n",err);
1800        return err;
1801    }
1802
1803    /**
1804     * Remove dummy.amr file */
1805    M4OSA_chrNCopy(out_amr, xVSS_context->pTempPath, M4XVSS_MAX_PATH_LEN - 1);
1806    strncat((char *)out_amr, (const char *)"dummy.amr\0", 10);
1807
1808    /**
1809     * UTF conversion: convert the temporary path into the customer format*/
1810    pDecodedPath = out_amr;
1811
1812    if(xVSS_context->UTFConversionContext.pConvFromUTF8Fct != M4OSA_NULL
1813            && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL)
1814    {
1815        M4OSA_UInt32 length = 0;
1816        err = M4xVSS_internalConvertFromUTF8(xVSS_context, (M4OSA_Void*) out_amr,
1817             (M4OSA_Void*) xVSS_context->UTFConversionContext.pTempOutConversionBuffer, &length);
1818        if(err != M4NO_ERROR)
1819        {
1820            M4OSA_TRACE1_1("M4xVSS_internalStopConvertPictureTo3gp:\
1821                 M4xVSS_internalConvertFromUTF8 returns err: 0x%x",err);
1822            return err;
1823        }
1824        pDecodedPath = xVSS_context->UTFConversionContext.pTempOutConversionBuffer;
1825    }
1826    /**
1827    * End of the conversion, now use the decoded path*/
1828    remove((const char *)pDecodedPath);
1829
1830    /*Commented because of the use of the UTF conversion*/
1831/*    remove(out_amr);
1832 */
1833
1834    xVSS_context->pM4PTO3GPP_Ctxt = M4OSA_NULL;
1835    xVSS_context->pCallBackCtxt = M4OSA_NULL;
1836
1837    return M4NO_ERROR;
1838}
1839
1840/**
1841 ******************************************************************************
1842 * prototype    M4OSA_ERR M4xVSS_internalConvertRGBtoYUV(M4xVSS_FramingStruct* framingCtx)
1843 * @brief    This function converts an RGB565 plane to YUV420 planar
1844 * @note    It is used only for framing effect
1845 *            It allocates output YUV planes
1846 * @param    framingCtx    (IN) The framing struct containing input RGB565 plane
1847 *
1848 * @return    M4NO_ERROR:    No error
1849 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
1850 * @return    M4ERR_ALLOC: Allocation error (no more memory)
1851 ******************************************************************************
1852 */
1853M4OSA_ERR M4xVSS_internalConvertRGBtoYUV(M4xVSS_FramingStruct* framingCtx)
1854{
1855    M4OSA_ERR err;
1856
1857    /**
1858     * Allocate output YUV planes */
1859    framingCtx->FramingYuv = (M4VIFI_ImagePlane*)M4OSA_malloc(3*sizeof(M4VIFI_ImagePlane),
1860         M4VS, (M4OSA_Char *)"M4xVSS_internalConvertRGBtoYUV: Output plane YUV");
1861    if(framingCtx->FramingYuv == M4OSA_NULL)
1862    {
1863        M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertRGBtoYUV");
1864        return M4ERR_ALLOC;
1865    }
1866    framingCtx->FramingYuv[0].u_width = framingCtx->FramingRgb->u_width;
1867    framingCtx->FramingYuv[0].u_height = framingCtx->FramingRgb->u_height;
1868    framingCtx->FramingYuv[0].u_topleft = 0;
1869    framingCtx->FramingYuv[0].u_stride = framingCtx->FramingRgb->u_width;
1870    framingCtx->FramingYuv[0].pac_data =
1871         (M4VIFI_UInt8*)M4OSA_malloc((framingCtx->FramingYuv[0].u_width\
1872            *framingCtx->FramingYuv[0].u_height*3)>>1, M4VS, (M4OSA_Char *)\
1873                "Alloc for the Convertion output YUV");;
1874    if(framingCtx->FramingYuv[0].pac_data == M4OSA_NULL)
1875    {
1876        M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertRGBtoYUV");
1877        return M4ERR_ALLOC;
1878    }
1879    framingCtx->FramingYuv[1].u_width = (framingCtx->FramingRgb->u_width)>>1;
1880    framingCtx->FramingYuv[1].u_height = (framingCtx->FramingRgb->u_height)>>1;
1881    framingCtx->FramingYuv[1].u_topleft = 0;
1882    framingCtx->FramingYuv[1].u_stride = (framingCtx->FramingRgb->u_width)>>1;
1883    framingCtx->FramingYuv[1].pac_data = framingCtx->FramingYuv[0].pac_data \
1884        + framingCtx->FramingYuv[0].u_width * framingCtx->FramingYuv[0].u_height;
1885    framingCtx->FramingYuv[2].u_width = (framingCtx->FramingRgb->u_width)>>1;
1886    framingCtx->FramingYuv[2].u_height = (framingCtx->FramingRgb->u_height)>>1;
1887    framingCtx->FramingYuv[2].u_topleft = 0;
1888    framingCtx->FramingYuv[2].u_stride = (framingCtx->FramingRgb->u_width)>>1;
1889    framingCtx->FramingYuv[2].pac_data = framingCtx->FramingYuv[1].pac_data \
1890        + framingCtx->FramingYuv[1].u_width * framingCtx->FramingYuv[1].u_height;
1891
1892    /**
1893     * Convert input RGB 565 to YUV 420 to be able to merge it with output video in framing
1894      effect */
1895    err = M4VIFI_xVSS_RGB565toYUV420(M4OSA_NULL, framingCtx->FramingRgb, framingCtx->FramingYuv);
1896    if(err != M4NO_ERROR)
1897    {
1898        M4OSA_TRACE1_1("M4xVSS_internalConvertRGBtoYUV:\
1899             error when converting from RGB to YUV: 0x%x\n", err);
1900    }
1901
1902    framingCtx->duration = 0;
1903    framingCtx->previousClipTime = -1;
1904    framingCtx->previewOffsetClipTime = -1;
1905
1906    /**
1907     * Only one element in the chained list (no animated image with RGB buffer...) */
1908    framingCtx->pCurrent = framingCtx;
1909    framingCtx->pNext = framingCtx;
1910
1911    return M4NO_ERROR;
1912}
1913
1914M4OSA_ERR M4xVSS_internalSetPlaneTransparent(M4OSA_UInt8* planeIn, M4OSA_UInt32 size)
1915{
1916    M4OSA_UInt32 i;
1917    M4OSA_UInt8* plane = planeIn;
1918    M4OSA_UInt8 transparent1 = (M4OSA_UInt8)((TRANSPARENT_COLOR & 0xFF00)>>8);
1919    M4OSA_UInt8 transparent2 = (M4OSA_UInt8)TRANSPARENT_COLOR;
1920
1921    for(i=0; i<(size>>1); i++)
1922    {
1923        *plane++ = transparent1;
1924        *plane++ = transparent2;
1925    }
1926
1927    return M4NO_ERROR;
1928}
1929
1930
1931/**
1932 ******************************************************************************
1933 * prototype M4OSA_ERR M4xVSS_internalConvertARBG888toYUV420_FrammingEffect(M4OSA_Context pContext,
1934 *                                                M4VSS3GPP_EffectSettings* pEffect,
1935 *                                                M4xVSS_FramingStruct* framingCtx,
1936                                                  M4VIDEOEDITING_VideoFrameSize OutputVideoResolution)
1937 *
1938 * @brief    This function converts ARGB8888 input file  to YUV420 whenused for framming effect
1939 * @note    The input ARGB8888 file path is contained in the pEffect structure
1940 *            If the ARGB8888 must be resized to fit output video size, this function
1941 *            will do it.
1942 * @param    pContext    (IN) The integrator own context
1943 * @param    pEffect        (IN) The effect structure containing all informations on
1944 *                        the file to decode, resizing ...
1945 * @param    framingCtx    (IN/OUT) Structure in which the output RGB will be stored
1946 *
1947 * @return    M4NO_ERROR:    No error
1948 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
1949 * @return    M4ERR_ALLOC: Allocation error (no more memory)
1950 * @return    M4ERR_FILE_NOT_FOUND: File not found.
1951 ******************************************************************************
1952 */
1953
1954
1955M4OSA_ERR M4xVSS_internalConvertARGB888toYUV420_FrammingEffect(M4OSA_Context pContext,
1956                                                               M4VSS3GPP_EffectSettings* pEffect,
1957                                                               M4xVSS_FramingStruct* framingCtx,
1958                                                               M4VIDEOEDITING_VideoFrameSize\
1959                                                               OutputVideoResolution)
1960{
1961    M4OSA_ERR err = M4NO_ERROR;
1962    M4OSA_Context pARGBIn;
1963    M4OSA_UInt32 file_size;
1964    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
1965    M4OSA_UInt32 width, height, width_out, height_out;
1966    M4OSA_Void* pFile = pEffect->xVSS.pFramingFilePath;
1967    M4OSA_UInt8 transparent1 = (M4OSA_UInt8)((TRANSPARENT_COLOR & 0xFF00)>>8);
1968    M4OSA_UInt8 transparent2 = (M4OSA_UInt8)TRANSPARENT_COLOR;
1969    /*UTF conversion support*/
1970    M4OSA_Char* pDecodedPath = M4OSA_NULL;
1971    M4OSA_UInt32 i = 0,j = 0;
1972    M4VIFI_ImagePlane rgbPlane;
1973    M4OSA_UInt32 frameSize_argb=(framingCtx->width * framingCtx->height * 4);
1974    M4OSA_UInt32 frameSize;
1975    M4OSA_UInt32 tempAlphaPercent = 0;
1976    M4VIFI_UInt8* TempPacData = M4OSA_NULL;
1977    M4OSA_UInt16 *ptr = M4OSA_NULL;
1978    M4OSA_UInt32 z = 0;
1979
1980    M4OSA_TRACE3_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect: Entering ");
1981
1982    M4OSA_TRACE1_2("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect width and height %d %d ",
1983        framingCtx->width,framingCtx->height);
1984
1985    M4OSA_UInt8 *pTmpData = (M4OSA_UInt8*) M4OSA_malloc(frameSize_argb, M4VS, (M4OSA_Char*)\
1986        "Image argb data");
1987    if(pTmpData == M4OSA_NULL) {
1988        M4OSA_TRACE1_0("Failed to allocate memory for Image clip");
1989        return M4ERR_ALLOC;
1990    }
1991    /**
1992     * UTF conversion: convert the file path into the customer format*/
1993    pDecodedPath = pFile;
1994
1995    if(xVSS_context->UTFConversionContext.pConvFromUTF8Fct != M4OSA_NULL
1996            && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL)
1997    {
1998        M4OSA_UInt32 length = 0;
1999        err = M4xVSS_internalConvertFromUTF8(xVSS_context, (M4OSA_Void*) pFile,
2000             (M4OSA_Void*) xVSS_context->UTFConversionContext.pTempOutConversionBuffer, &length);
2001        if(err != M4NO_ERROR)
2002        {
2003            M4OSA_TRACE1_1("M4xVSS_internalDecodePNG:\
2004                 M4xVSS_internalConvertFromUTF8 returns err: 0x%x",err);
2005            M4OSA_free((M4OSA_MemAddr32)pTmpData);
2006            pTmpData = M4OSA_NULL;
2007            return err;
2008        }
2009        pDecodedPath = xVSS_context->UTFConversionContext.pTempOutConversionBuffer;
2010    }
2011
2012    /**
2013    * End of the conversion, now use the decoded path*/
2014
2015     /* Open input ARGB8888 file and store it into memory */
2016    err = xVSS_context->pFileReadPtr->openRead(&pARGBIn, pDecodedPath, M4OSA_kFileRead);
2017
2018    if(err != M4NO_ERROR)
2019    {
2020        M4OSA_TRACE1_2("Can't open input ARGB8888 file %s, error: 0x%x\n",pFile, err);
2021        M4OSA_free((M4OSA_MemAddr32)pTmpData);
2022        pTmpData = M4OSA_NULL;
2023        return err;
2024    }
2025
2026    err = xVSS_context->pFileReadPtr->readData(pARGBIn,(M4OSA_MemAddr8)pTmpData, &frameSize_argb);
2027    if(err != M4NO_ERROR)
2028    {
2029        xVSS_context->pFileReadPtr->closeRead(pARGBIn);
2030        M4OSA_free((M4OSA_MemAddr32)pTmpData);
2031        pTmpData = M4OSA_NULL;
2032        return err;
2033    }
2034
2035
2036    err =  xVSS_context->pFileReadPtr->closeRead(pARGBIn);
2037    if(err != M4NO_ERROR)
2038    {
2039        M4OSA_TRACE1_2("Can't close input png file %s, error: 0x%x\n",pFile, err);
2040        M4OSA_free((M4OSA_MemAddr32)pTmpData);
2041        pTmpData = M4OSA_NULL;
2042        return err;
2043    }
2044
2045
2046    rgbPlane.u_height = framingCtx->height;
2047    rgbPlane.u_width = framingCtx->width;
2048    rgbPlane.u_stride = rgbPlane.u_width*3;
2049    rgbPlane.u_topleft = 0;
2050
2051    frameSize = (rgbPlane.u_width * rgbPlane.u_height * 3); //Size of RGB888 data
2052    rgbPlane.pac_data = (M4VIFI_UInt8*)M4OSA_malloc(((frameSize)+ (2 * framingCtx->width)),
2053         M4VS, (M4OSA_Char*)"Image clip RGB888 data");
2054    if(rgbPlane.pac_data == M4OSA_NULL)
2055    {
2056        M4OSA_TRACE1_0("Failed to allocate memory for Image clip");
2057        M4OSA_free((M4OSA_MemAddr32)pTmpData);
2058        return M4ERR_ALLOC;
2059    }
2060
2061    M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:\
2062          Remove the alpha channel  ");
2063
2064    /* premultiplied alpha % on RGB */
2065    for (i=0, j = 0; i < frameSize_argb; i += 4) {
2066        /* this is alpha value */
2067        if ((i % 4) == 0)
2068        {
2069            tempAlphaPercent = pTmpData[i];
2070        }
2071
2072        /* R */
2073        rgbPlane.pac_data[j] = pTmpData[i+1];
2074        j++;
2075
2076        /* G */
2077        if (tempAlphaPercent > 0) {
2078            rgbPlane.pac_data[j] = pTmpData[i+2];
2079            j++;
2080        } else {/* In case of alpha value 0, make GREEN to 255 */
2081            rgbPlane.pac_data[j] = 255; //pTmpData[i+2];
2082            j++;
2083        }
2084
2085        /* B */
2086        rgbPlane.pac_data[j] = pTmpData[i+3];
2087        j++;
2088    }
2089
2090    M4OSA_free((M4OSA_MemAddr32)pTmpData);
2091    pTmpData = M4OSA_NULL;
2092
2093    /* convert RGB888 to RGB565 */
2094
2095    /* allocate temp RGB 565 buffer */
2096    TempPacData = (M4VIFI_UInt8*)M4OSA_malloc(frameSize +
2097                       (4 * (framingCtx->width + framingCtx->height + 1)),
2098                        M4VS, (M4OSA_Char*)"Image clip RGB565 data");
2099    if (TempPacData == M4OSA_NULL) {
2100        M4OSA_TRACE1_0("Failed to allocate memory for Image clip RGB565 data");
2101        M4OSA_free((M4OSA_MemAddr32)rgbPlane.pac_data);
2102        return M4ERR_ALLOC;
2103    }
2104
2105    ptr = (M4OSA_UInt16 *)TempPacData;
2106    z = 0;
2107
2108    for (i = 0; i < j ; i += 3)
2109    {
2110        ptr[z++] = PACK_RGB565(0,   rgbPlane.pac_data[i],
2111                                    rgbPlane.pac_data[i+1],
2112                                    rgbPlane.pac_data[i+2]);
2113    }
2114
2115    /* free the RBG888 and assign RGB565 */
2116    M4OSA_free((M4OSA_MemAddr32)rgbPlane.pac_data);
2117    rgbPlane.pac_data = TempPacData;
2118
2119    /**
2120     * Check if output sizes are odd */
2121    if(rgbPlane.u_height % 2 != 0)
2122    {
2123        M4VIFI_UInt8* output_pac_data = rgbPlane.pac_data;
2124        M4OSA_UInt32 i;
2125        M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:\
2126             output height is odd  ");
2127        output_pac_data +=rgbPlane.u_width * rgbPlane.u_height*2;
2128
2129        for(i=0;i<rgbPlane.u_width;i++)
2130        {
2131            *output_pac_data++ = transparent1;
2132            *output_pac_data++ = transparent2;
2133        }
2134
2135        /**
2136         * We just add a white line to the PNG that will be transparent */
2137        rgbPlane.u_height++;
2138    }
2139    if(rgbPlane.u_width % 2 != 0)
2140    {
2141        /**
2142         * We add a new column of white (=transparent), but we need to parse all RGB lines ... */
2143        M4OSA_UInt32 i;
2144        M4VIFI_UInt8* newRGBpac_data;
2145        M4VIFI_UInt8* output_pac_data, *input_pac_data;
2146
2147        rgbPlane.u_width++;
2148        M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect: \
2149             output width is odd  ");
2150        /**
2151         * We need to allocate a new RGB output buffer in which all decoded data
2152          + white line will be copied */
2153        newRGBpac_data = (M4VIFI_UInt8*)M4OSA_malloc(rgbPlane.u_height*rgbPlane.u_width*2\
2154            *sizeof(M4VIFI_UInt8), M4VS, (M4OSA_Char *)"New Framing GIF Output pac_data RGB");
2155
2156        if(newRGBpac_data == M4OSA_NULL)
2157        {
2158            M4OSA_TRACE1_0("Allocation error in \
2159                M4xVSS_internalConvertARGB888toYUV420_FrammingEffect");
2160            M4OSA_free((M4OSA_MemAddr32)rgbPlane.pac_data);
2161            return M4ERR_ALLOC;
2162        }
2163
2164        output_pac_data= newRGBpac_data;
2165        input_pac_data = rgbPlane.pac_data;
2166
2167        for(i=0;i<rgbPlane.u_height;i++)
2168        {
2169            memcpy((void *)output_pac_data, (void *)input_pac_data,
2170                 (rgbPlane.u_width-1)*2);
2171
2172            output_pac_data += ((rgbPlane.u_width-1)*2);
2173            /* Put the pixel to transparency color */
2174            *output_pac_data++ = transparent1;
2175            *output_pac_data++ = transparent2;
2176
2177            input_pac_data += ((rgbPlane.u_width-1)*2);
2178        }
2179        M4OSA_free((M4OSA_MemAddr32)rgbPlane.pac_data);
2180        rgbPlane.pac_data = newRGBpac_data;
2181    }
2182
2183    /* reset stride */
2184    rgbPlane.u_stride = rgbPlane.u_width*2;
2185
2186    /**
2187     * Initialize chained list parameters */
2188    framingCtx->duration = 0;
2189    framingCtx->previousClipTime = -1;
2190    framingCtx->previewOffsetClipTime = -1;
2191
2192    /**
2193     * Only one element in the chained list (no animated image ...) */
2194    framingCtx->pCurrent = framingCtx;
2195    framingCtx->pNext = framingCtx;
2196
2197    /**
2198     * Get output width/height */
2199     switch(OutputVideoResolution)
2200    //switch(xVSS_context->pSettings->xVSS.outputVideoSize)
2201    {
2202    case M4VIDEOEDITING_kSQCIF:
2203        width_out = 128;
2204        height_out = 96;
2205        break;
2206    case M4VIDEOEDITING_kQQVGA:
2207        width_out = 160;
2208        height_out = 120;
2209        break;
2210    case M4VIDEOEDITING_kQCIF:
2211        width_out = 176;
2212        height_out = 144;
2213        break;
2214    case M4VIDEOEDITING_kQVGA:
2215        width_out = 320;
2216        height_out = 240;
2217        break;
2218    case M4VIDEOEDITING_kCIF:
2219        width_out = 352;
2220        height_out = 288;
2221        break;
2222    case M4VIDEOEDITING_kVGA:
2223        width_out = 640;
2224        height_out = 480;
2225        break;
2226    case M4VIDEOEDITING_kWVGA:
2227        width_out = 800;
2228        height_out = 480;
2229        break;
2230    case M4VIDEOEDITING_kNTSC:
2231        width_out = 720;
2232        height_out = 480;
2233        break;
2234    case M4VIDEOEDITING_k640_360:
2235        width_out = 640;
2236        height_out = 360;
2237        break;
2238    case M4VIDEOEDITING_k854_480:
2239        // StageFright encoders require %16 resolution
2240        width_out = M4ENCODER_854_480_Width;
2241        height_out = 480;
2242        break;
2243    case M4VIDEOEDITING_kHD1280:
2244        width_out = 1280;
2245        height_out = 720;
2246        break;
2247    case M4VIDEOEDITING_kHD1080:
2248        // StageFright encoders require %16 resolution
2249        width_out = M4ENCODER_HD1080_Width;
2250        height_out = 720;
2251        break;
2252    case M4VIDEOEDITING_kHD960:
2253        width_out = 960;
2254        height_out = 720;
2255        break;
2256
2257    /**
2258     * If output video size is not given, we take QCIF size,
2259     * should not happen, because already done in M4xVSS_sendCommand */
2260    default:
2261        width_out = 176;
2262        height_out = 144;
2263        break;
2264    }
2265
2266    /**
2267     * Allocate output planes structures */
2268    framingCtx->FramingRgb = (M4VIFI_ImagePlane*)M4OSA_malloc(sizeof(M4VIFI_ImagePlane), M4VS,
2269         (M4OSA_Char *)"Framing Output plane RGB");
2270    if(framingCtx->FramingRgb == M4OSA_NULL)
2271    {
2272        M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertARGB888toYUV420_FrammingEffect");
2273        return M4ERR_ALLOC;
2274    }
2275    /**
2276     * Resize RGB if needed */
2277    if((pEffect->xVSS.bResize) &&
2278         (rgbPlane.u_width != width_out || rgbPlane.u_height != height_out))
2279    {
2280        width = width_out;
2281        height = height_out;
2282
2283        M4OSA_TRACE1_2("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect: \
2284             New Width and height %d %d  ",width,height);
2285
2286        framingCtx->FramingRgb->u_height = height_out;
2287        framingCtx->FramingRgb->u_width = width_out;
2288        framingCtx->FramingRgb->u_stride = framingCtx->FramingRgb->u_width*2;
2289        framingCtx->FramingRgb->u_topleft = 0;
2290
2291        framingCtx->FramingRgb->pac_data =
2292             (M4VIFI_UInt8*)M4OSA_malloc(framingCtx->FramingRgb->u_height*framingCtx->\
2293                FramingRgb->u_width*2*sizeof(M4VIFI_UInt8), M4VS,
2294                  (M4OSA_Char *)"Framing Output pac_data RGB");
2295
2296        if(framingCtx->FramingRgb->pac_data == M4OSA_NULL)
2297        {
2298            M4OSA_TRACE1_0("Allocation error in \
2299                M4xVSS_internalConvertARGB888toYUV420_FrammingEffect");
2300            M4OSA_free((M4OSA_MemAddr32)framingCtx->FramingRgb);
2301            M4OSA_free((M4OSA_MemAddr32)rgbPlane.pac_data);
2302            return M4ERR_ALLOC;
2303        }
2304
2305        M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:  Resizing Needed ");
2306        M4OSA_TRACE1_2("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:\
2307              rgbPlane.u_height & rgbPlane.u_width %d %d",rgbPlane.u_height,rgbPlane.u_width);
2308
2309        //err = M4VIFI_ResizeBilinearRGB888toRGB888(M4OSA_NULL, &rgbPlane,framingCtx->FramingRgb);
2310        err = M4VIFI_ResizeBilinearRGB565toRGB565(M4OSA_NULL, &rgbPlane,framingCtx->FramingRgb);
2311
2312        if(err != M4NO_ERROR)
2313        {
2314            M4OSA_TRACE1_1("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect :\
2315                when resizing RGB plane: 0x%x\n", err);
2316            return err;
2317        }
2318
2319        if(rgbPlane.pac_data != M4OSA_NULL)
2320        {
2321            M4OSA_free((M4OSA_MemAddr32)rgbPlane.pac_data);
2322            rgbPlane.pac_data = M4OSA_NULL;
2323        }
2324    }
2325    else
2326    {
2327
2328        M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:\
2329              Resizing Not Needed ");
2330
2331        width = rgbPlane.u_width;
2332        height = rgbPlane.u_height;
2333        framingCtx->FramingRgb->u_height = height;
2334        framingCtx->FramingRgb->u_width = width;
2335        framingCtx->FramingRgb->u_stride = framingCtx->FramingRgb->u_width*2;
2336        framingCtx->FramingRgb->u_topleft = 0;
2337        framingCtx->FramingRgb->pac_data = rgbPlane.pac_data;
2338    }
2339
2340
2341    if(pEffect->xVSS.bResize)
2342    {
2343        /**
2344         * Force topleft to 0 for pure framing effect */
2345        framingCtx->topleft_x = 0;
2346        framingCtx->topleft_y = 0;
2347    }
2348
2349
2350    /**
2351     * Convert  RGB output to YUV 420 to be able to merge it with output video in framing
2352     effect */
2353    framingCtx->FramingYuv = (M4VIFI_ImagePlane*)M4OSA_malloc(3*sizeof(M4VIFI_ImagePlane), M4VS,
2354         (M4OSA_Char *)"Framing Output plane YUV");
2355    if(framingCtx->FramingYuv == M4OSA_NULL)
2356    {
2357        M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertARGB888toYUV420_FrammingEffect");
2358        M4OSA_free((M4OSA_MemAddr32)framingCtx->FramingRgb->pac_data);
2359        return M4ERR_ALLOC;
2360    }
2361
2362    // Alloc for Y, U and V planes
2363    framingCtx->FramingYuv[0].u_width = ((width+1)>>1)<<1;
2364    framingCtx->FramingYuv[0].u_height = ((height+1)>>1)<<1;
2365    framingCtx->FramingYuv[0].u_topleft = 0;
2366    framingCtx->FramingYuv[0].u_stride = ((width+1)>>1)<<1;
2367    framingCtx->FramingYuv[0].pac_data = (M4VIFI_UInt8*)M4OSA_malloc
2368        ((framingCtx->FramingYuv[0].u_width*framingCtx->FramingYuv[0].u_height), M4VS,
2369            (M4OSA_Char *)"Alloc for the output Y");
2370    if(framingCtx->FramingYuv[0].pac_data == M4OSA_NULL)
2371    {
2372        M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertARGB888toYUV420_FrammingEffect");
2373        M4OSA_free((M4OSA_MemAddr32)framingCtx->FramingYuv);
2374        M4OSA_free((M4OSA_MemAddr32)framingCtx->FramingRgb->pac_data);
2375        return M4ERR_ALLOC;
2376    }
2377    framingCtx->FramingYuv[1].u_width = (((width+1)>>1)<<1)>>1;
2378    framingCtx->FramingYuv[1].u_height = (((height+1)>>1)<<1)>>1;
2379    framingCtx->FramingYuv[1].u_topleft = 0;
2380    framingCtx->FramingYuv[1].u_stride = (((width+1)>>1)<<1)>>1;
2381
2382
2383    framingCtx->FramingYuv[1].pac_data = (M4VIFI_UInt8*)M4OSA_malloc(
2384        framingCtx->FramingYuv[1].u_width * framingCtx->FramingYuv[1].u_height, M4VS,
2385        (M4OSA_Char *)"Alloc for the output U");
2386    if (framingCtx->FramingYuv[1].pac_data == M4OSA_NULL) {
2387        M4OSA_free((M4OSA_MemAddr32)framingCtx->FramingYuv[0].pac_data);
2388        M4OSA_free((M4OSA_MemAddr32)framingCtx->FramingYuv);
2389        M4OSA_free((M4OSA_MemAddr32)framingCtx->FramingRgb->pac_data);
2390        return M4ERR_ALLOC;
2391    }
2392
2393    framingCtx->FramingYuv[2].u_width = (((width+1)>>1)<<1)>>1;
2394    framingCtx->FramingYuv[2].u_height = (((height+1)>>1)<<1)>>1;
2395    framingCtx->FramingYuv[2].u_topleft = 0;
2396    framingCtx->FramingYuv[2].u_stride = (((width+1)>>1)<<1)>>1;
2397
2398
2399    framingCtx->FramingYuv[2].pac_data = (M4VIFI_UInt8*)M4OSA_malloc(
2400        framingCtx->FramingYuv[2].u_width * framingCtx->FramingYuv[0].u_height, M4VS,
2401        (M4OSA_Char *)"Alloc for the  output V");
2402    if (framingCtx->FramingYuv[2].pac_data == M4OSA_NULL) {
2403        M4OSA_free((M4OSA_MemAddr32)framingCtx->FramingYuv[1].pac_data);
2404        M4OSA_free((M4OSA_MemAddr32)framingCtx->FramingYuv[0].pac_data);
2405        M4OSA_free((M4OSA_MemAddr32)framingCtx->FramingYuv);
2406        M4OSA_free((M4OSA_MemAddr32)framingCtx->FramingRgb->pac_data);
2407        return M4ERR_ALLOC;
2408    }
2409
2410    M4OSA_TRACE3_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:\
2411        convert RGB to YUV ");
2412
2413    //err = M4VIFI_RGB888toYUV420(M4OSA_NULL, framingCtx->FramingRgb,  framingCtx->FramingYuv);
2414    err = M4VIFI_RGB565toYUV420(M4OSA_NULL, framingCtx->FramingRgb,  framingCtx->FramingYuv);
2415
2416    if (err != M4NO_ERROR)
2417    {
2418        M4OSA_TRACE1_1("SPS png: error when converting from RGB to YUV: 0x%x\n", err);
2419    }
2420    M4OSA_TRACE3_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:  Leaving ");
2421    return err;
2422}
2423
2424/**
2425 ******************************************************************************
2426 * prototype    M4OSA_ERR M4xVSS_internalGenerateEditedFile(M4OSA_Context pContext)
2427 *
2428 * @brief    This function prepares VSS for editing
2429 * @note    It also set special xVSS effect as external effects for the VSS
2430 * @param    pContext    (IN) The integrator own context
2431 *
2432 * @return    M4NO_ERROR:    No error
2433 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
2434 * @return    M4ERR_ALLOC: Allocation error (no more memory)
2435 ******************************************************************************
2436 */
2437M4OSA_ERR M4xVSS_internalGenerateEditedFile(M4OSA_Context pContext)
2438{
2439    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
2440    M4VSS3GPP_EditContext pVssCtxt;
2441    M4OSA_UInt32 i,j;
2442    M4OSA_ERR err;
2443
2444    /**
2445     * Create a VSS 3GPP edition instance */
2446    err = M4VSS3GPP_editInit( &pVssCtxt, xVSS_context->pFileReadPtr, xVSS_context->pFileWritePtr);
2447    if (err != M4NO_ERROR)
2448    {
2449        M4OSA_TRACE1_1("M4xVSS_internalGenerateEditedFile: M4VSS3GPP_editInit returned 0x%x\n",
2450            err);
2451        M4VSS3GPP_editCleanUp(pVssCtxt);
2452        return err;
2453    }
2454
2455#ifdef M4VSS_ENABLE_EXTERNAL_DECODERS
2456    /* replay recorded external decoder registrations on the VSS3GPP */
2457    for (i=0; i<M4VD_kVideoType_NB; i++)
2458    {
2459        if (xVSS_context->registeredExternalDecs[i].registered)
2460        {
2461            err = M4VSS3GPP_editRegisterExternalVideoDecoder(pVssCtxt, i,
2462                    xVSS_context->registeredExternalDecs[i].pDecoderInterface,
2463                    xVSS_context->registeredExternalDecs[i].pUserData);
2464            if (M4NO_ERROR != err)
2465            {
2466                M4OSA_TRACE1_1("M4xVSS_internalGenerateEditedFile: \
2467                    M4VSS3GPP_editRegisterExternalVideoDecoder() returns 0x%x!", err);
2468                M4VSS3GPP_editCleanUp(pVssCtxt);
2469                return err;
2470            }
2471        }
2472    }
2473#endif /* M4VSS_ENABLE_EXTERNAL_DECODERS */
2474
2475    /* replay recorded external encoder registrations on the VSS3GPP */
2476    for (i=0; i<M4VE_kEncoderType_NB; i++)
2477    {
2478        if (xVSS_context->registeredExternalEncs[i].registered)
2479        {
2480            err = M4VSS3GPP_editRegisterExternalVideoEncoder(pVssCtxt, i,
2481                    xVSS_context->registeredExternalEncs[i].pEncoderInterface,
2482                    xVSS_context->registeredExternalEncs[i].pUserData);
2483            if (M4NO_ERROR != err)
2484            {
2485                M4OSA_TRACE1_1("M4xVSS_internalGenerateEditedFile:\
2486                     M4VSS3GPP_editRegisterExternalVideoEncoder() returns 0x%x!", err);
2487                M4VSS3GPP_editCleanUp(pVssCtxt);
2488                return err;
2489            }
2490        }
2491    }
2492
2493    /* In case of MMS use case, we fill directly into the VSS context the targeted bitrate */
2494    if(xVSS_context->targetedBitrate != 0)
2495    {
2496        M4VSS3GPP_InternalEditContext* pVSSContext = (M4VSS3GPP_InternalEditContext*)pVssCtxt;
2497
2498        pVSSContext->bIsMMS = M4OSA_TRUE;
2499        pVSSContext->uiMMSVideoBitrate = xVSS_context->targetedBitrate;
2500        pVSSContext->MMSvideoFramerate = xVSS_context->pSettings->videoFrameRate;
2501    }
2502
2503    /*Warning: since the adding of the UTF conversion, pSettings has been changed in the next
2504    part in  pCurrentEditSettings (there is a specific current editing structure for the saving,
2505     as for the preview)*/
2506
2507    /**
2508     * Set the external video effect functions, for saving mode (to be moved to
2509      M4xVSS_saveStart() ?)*/
2510    for (i=0; i<xVSS_context->pCurrentEditSettings->uiClipNumber; i++)
2511    {
2512        for (j=0; j<xVSS_context->pCurrentEditSettings->nbEffects; j++)
2513        {
2514            if (M4xVSS_kVideoEffectType_BlackAndWhite ==
2515            xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2516            {
2517                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2518                 M4VSS3GPP_externalVideoEffectColor;
2519                //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt =
2520                // (M4OSA_Void*)M4xVSS_kVideoEffectType_BlackAndWhite;
2521                /*commented FB*/
2522                /**
2523                 * We do not need to set the color context, it is already set
2524                 during sendCommand function */
2525            }
2526            if (M4xVSS_kVideoEffectType_Pink ==
2527                xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2528            {
2529                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2530                 M4VSS3GPP_externalVideoEffectColor;
2531                //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt =
2532                // (M4OSA_Void*)M4xVSS_kVideoEffectType_Pink; /**< we don't
2533                // use any function context */
2534                /*commented FB*/
2535                /**
2536                 * We do not need to set the color context,
2537                  it is already set during sendCommand function */
2538            }
2539            if (M4xVSS_kVideoEffectType_Green ==
2540                 xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2541            {
2542                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2543                    M4VSS3GPP_externalVideoEffectColor;
2544                //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt =
2545                    // (M4OSA_Void*)M4xVSS_kVideoEffectType_Green;
2546                     /**< we don't use any function context */
2547                /*commented FB*/
2548                /**
2549                 * We do not need to set the color context, it is already set during
2550                  sendCommand function */
2551            }
2552            if (M4xVSS_kVideoEffectType_Sepia ==
2553                 xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2554            {
2555                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2556                 M4VSS3GPP_externalVideoEffectColor;
2557                //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt =
2558                // (M4OSA_Void*)M4xVSS_kVideoEffectType_Sepia;
2559                /**< we don't use any function context */
2560                /*commented FB*/
2561                /**
2562                 * We do not need to set the color context, it is already set during
2563                 sendCommand function */
2564            }
2565            if (M4xVSS_kVideoEffectType_Fifties ==
2566             xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2567            {
2568                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2569                 M4VSS3GPP_externalVideoEffectFifties;
2570                /**
2571                 * We do not need to set the framing context, it is already set during
2572                 sendCommand function */
2573            }
2574            if (M4xVSS_kVideoEffectType_Negative ==
2575             xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2576            {
2577                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2578                 M4VSS3GPP_externalVideoEffectColor;
2579                //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt =
2580                // (M4OSA_Void*)M4xVSS_kVideoEffectType_Negative;
2581                 /**< we don't use any function context */
2582                /*commented FB*/
2583                /**
2584                 * We do not need to set the color context, it is already set during
2585                  sendCommand function */
2586            }
2587            if (M4xVSS_kVideoEffectType_Framing ==
2588             xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2589            {
2590                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2591                 M4VSS3GPP_externalVideoEffectFraming;
2592                /**
2593                 * We do not need to set the framing context, it is already set during
2594                 sendCommand function */
2595            }
2596            if (M4xVSS_kVideoEffectType_ZoomIn ==
2597             xVSS_context->pSettings->Effects[j].VideoEffectType)
2598            {
2599                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2600                 M4VSS3GPP_externalVideoEffectZoom;
2601                xVSS_context->pCurrentEditSettings->Effects[j].pExtVideoEffectFctCtxt =
2602                 (M4OSA_Void*)M4xVSS_kVideoEffectType_ZoomIn; /**< we don't use any
2603                 function context */
2604            }
2605            if (M4xVSS_kVideoEffectType_ZoomOut ==
2606             xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2607            {
2608                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2609                 M4VSS3GPP_externalVideoEffectZoom;
2610                xVSS_context->pCurrentEditSettings->Effects[j].pExtVideoEffectFctCtxt =
2611                 (M4OSA_Void*)M4xVSS_kVideoEffectType_ZoomOut; /**< we don't use any
2612                 function context */
2613            }
2614            if (M4xVSS_kVideoEffectType_ColorRGB16 ==
2615             xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2616            {
2617                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2618                 M4VSS3GPP_externalVideoEffectColor;
2619                //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt =
2620                // (M4OSA_Void*)M4xVSS_kVideoEffectType_ColorRGB16;
2621                /**< we don't use any function context */
2622                /**
2623                 * We do not need to set the color context, it is already set during
2624                 sendCommand function */
2625            }
2626            if (M4xVSS_kVideoEffectType_Gradient ==
2627             xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
2628            {
2629                xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
2630                 M4VSS3GPP_externalVideoEffectColor;
2631                //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt =
2632                // (M4OSA_Void*)M4xVSS_kVideoEffectType_ColorRGB16;
2633                /**< we don't use any function context */
2634                /**
2635                 * We do not need to set the color context, it is already set during
2636                 sendCommand function */
2637            }
2638
2639        }
2640    }
2641
2642    /**
2643     * Open the VSS 3GPP */
2644    err = M4VSS3GPP_editOpen(pVssCtxt, xVSS_context->pCurrentEditSettings);
2645    if (err != M4NO_ERROR)
2646    {
2647        M4OSA_TRACE1_1("M4xVSS_internalGenerateEditedFile:\
2648             M4VSS3GPP_editOpen returned 0x%x\n",err);
2649        M4VSS3GPP_editCleanUp(pVssCtxt);
2650        return err;
2651    }
2652
2653    /**
2654     * Save VSS context to be able to close / free VSS later */
2655    xVSS_context->pCurrentEditContext = pVssCtxt;
2656
2657    return M4NO_ERROR;
2658}
2659
2660/**
2661 ******************************************************************************
2662 * prototype    M4OSA_ERR M4xVSS_internalCloseEditedFile(M4OSA_Context pContext)
2663 *
2664 * @brief    This function cleans up VSS
2665 * @note
2666 * @param    pContext    (IN) The integrator own context
2667 *
2668 * @return    M4NO_ERROR:    No error
2669 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
2670 ******************************************************************************
2671 */
2672M4OSA_ERR M4xVSS_internalCloseEditedFile(M4OSA_Context pContext)
2673{
2674    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
2675    M4VSS3GPP_EditContext pVssCtxt = xVSS_context->pCurrentEditContext;
2676    M4OSA_ERR err;
2677
2678    if(xVSS_context->pCurrentEditContext != M4OSA_NULL)
2679    {
2680        /**
2681         * Close the VSS 3GPP */
2682        err = M4VSS3GPP_editClose(pVssCtxt);
2683        if (err != M4NO_ERROR)
2684        {
2685            M4OSA_TRACE1_1("M4xVSS_internalCloseEditedFile:\
2686                 M4VSS3GPP_editClose returned 0x%x\n",err);
2687            M4VSS3GPP_editCleanUp(pVssCtxt);
2688            return err;
2689        }
2690
2691        /**
2692         * Free this VSS3GPP edition instance */
2693        err = M4VSS3GPP_editCleanUp(pVssCtxt);
2694        if (err != M4NO_ERROR)
2695        {
2696            M4OSA_TRACE1_1("M4xVSS_internalCloseEditedFile: \
2697                M4VSS3GPP_editCleanUp returned 0x%x\n",err);
2698            return err;
2699        }
2700    }
2701
2702    return M4NO_ERROR;
2703}
2704
2705/**
2706 ******************************************************************************
2707 * prototype    M4OSA_ERR M4xVSS_internalGenerateAudioMixFile(M4OSA_Context pContext)
2708 *
2709 * @brief    This function prepares VSS for audio mixing
2710 * @note    It takes its parameters from the BGM settings in the xVSS internal context
2711 * @param    pContext    (IN) The integrator own context
2712 *
2713 * @return    M4NO_ERROR:    No error
2714 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
2715 * @return    M4ERR_ALLOC: Allocation error (no more memory)
2716 ******************************************************************************
2717 */
2718/***
2719 * FB: the function has been modified since the structure used for the saving is now the
2720 *  pCurrentEditSettings and not the pSettings
2721 * This change has been added for the UTF support
2722 * All the "xVSS_context->pSettings" has been replaced by "xVSS_context->pCurrentEditSettings"
2723 ***/
2724M4OSA_ERR M4xVSS_internalGenerateAudioMixFile(M4OSA_Context pContext)
2725{
2726    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
2727    M4VSS3GPP_AudioMixingSettings* pAudioMixSettings;
2728    M4VSS3GPP_AudioMixingContext pAudioMixingCtxt;
2729    M4OSA_ERR err;
2730    M4VIDEOEDITING_ClipProperties fileProperties;
2731
2732    /**
2733     * Allocate audio mixing settings structure and fill it with BGM parameters */
2734    pAudioMixSettings = (M4VSS3GPP_AudioMixingSettings*)M4OSA_malloc
2735        (sizeof(M4VSS3GPP_AudioMixingSettings), M4VS, (M4OSA_Char *)"pAudioMixSettings");
2736    if(pAudioMixSettings == M4OSA_NULL)
2737    {
2738        M4OSA_TRACE1_0("Allocation error in M4xVSS_internalGenerateAudioMixFile");
2739        return M4ERR_ALLOC;
2740    }
2741
2742    if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->FileType ==
2743         M4VIDEOEDITING_kFileType_3GPP)
2744    {
2745        err = M4xVSS_internalGetProperties((M4OSA_Context)xVSS_context,
2746             (M4OSA_Char*)xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile,
2747                 &fileProperties);
2748        if(err != M4NO_ERROR)
2749        {
2750            M4OSA_TRACE1_1("M4xVSS_internalGenerateAudioMixFile:\
2751                 impossible to retrieve audio BGM properties ->\
2752                     reencoding audio background music", err);
2753            fileProperties.AudioStreamType =
2754                 xVSS_context->pCurrentEditSettings->xVSS.outputAudioFormat+1;
2755                  /* To force BGM encoding */
2756        }
2757    }
2758
2759    pAudioMixSettings->bRemoveOriginal = M4OSA_FALSE;
2760    pAudioMixSettings->AddedAudioFileType =
2761     xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->FileType;
2762    pAudioMixSettings->pAddedAudioTrackFile =
2763     xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile;
2764    pAudioMixSettings->uiAddVolume =
2765     xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->uiAddVolume;
2766
2767    pAudioMixSettings->outputAudioFormat = xVSS_context->pSettings->xVSS.outputAudioFormat;
2768    pAudioMixSettings->outputASF = xVSS_context->pSettings->xVSS.outputAudioSamplFreq;
2769    pAudioMixSettings->outputAudioBitrate = xVSS_context->pSettings->xVSS.outputAudioBitrate;
2770    pAudioMixSettings->uiSamplingFrequency =
2771     xVSS_context->pSettings->xVSS.pBGMtrack->uiSamplingFrequency;
2772    pAudioMixSettings->uiNumChannels = xVSS_context->pSettings->xVSS.pBGMtrack->uiNumChannels;
2773
2774    pAudioMixSettings->b_DuckingNeedeed =
2775     xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->b_DuckingNeedeed;
2776    pAudioMixSettings->fBTVolLevel =
2777     (M4OSA_Float )xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->uiAddVolume/100;
2778    pAudioMixSettings->InDucking_threshold =
2779     xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->InDucking_threshold;
2780    pAudioMixSettings->InDucking_lowVolume =
2781     xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->lowVolume/100;
2782    pAudioMixSettings->fPTVolLevel =
2783     (M4OSA_Float)xVSS_context->pSettings->PTVolLevel/100;
2784    pAudioMixSettings->bLoop = xVSS_context->pSettings->xVSS.pBGMtrack->bLoop;
2785
2786    if(xVSS_context->pSettings->xVSS.bAudioMono)
2787    {
2788        pAudioMixSettings->outputNBChannels = 1;
2789    }
2790    else
2791    {
2792        pAudioMixSettings->outputNBChannels = 2;
2793    }
2794
2795    /**
2796     * Fill audio mix settings with BGM parameters */
2797    pAudioMixSettings->uiBeginLoop =
2798     xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->uiBeginLoop;
2799    pAudioMixSettings->uiEndLoop =
2800     xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->uiEndLoop;
2801    pAudioMixSettings->uiAddCts =
2802     xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->uiAddCts;
2803
2804    /**
2805     * Output file of the audio mixer will be final file (audio mixing is the last step) */
2806    pAudioMixSettings->pOutputClipFile = xVSS_context->pOutputFile;
2807    pAudioMixSettings->pTemporaryFile = xVSS_context->pTemporaryFile;
2808
2809    /**
2810     * Input file of the audio mixer is a temporary file containing all audio/video editions */
2811    pAudioMixSettings->pOriginalClipFile = xVSS_context->pCurrentEditSettings->pOutputFile;
2812
2813    /**
2814     * Save audio mixing settings pointer to be able to free it in
2815     M4xVSS_internalCloseAudioMixedFile function */
2816    xVSS_context->pAudioMixSettings = pAudioMixSettings;
2817
2818    /**
2819     * Create a VSS 3GPP audio mixing instance */
2820    err = M4VSS3GPP_audioMixingInit(&pAudioMixingCtxt, pAudioMixSettings,
2821         xVSS_context->pFileReadPtr, xVSS_context->pFileWritePtr);
2822
2823    /**
2824     * Save audio mixing context to be able to call audio mixing step function in
2825      M4xVSS_step function */
2826    xVSS_context->pAudioMixContext = pAudioMixingCtxt;
2827
2828    if (err != M4NO_ERROR)
2829    {
2830        M4OSA_TRACE1_1("M4xVSS_internalGenerateAudioMixFile:\
2831             M4VSS3GPP_audioMixingInit returned 0x%x\n",err);
2832        //M4VSS3GPP_audioMixingCleanUp(pAudioMixingCtxt);
2833        return err;
2834    }
2835
2836    return M4NO_ERROR;
2837}
2838
2839/**
2840 ******************************************************************************
2841 * prototype    M4OSA_ERR M4xVSS_internalCloseAudioMixedFile(M4OSA_Context pContext)
2842 *
2843 * @brief    This function cleans up VSS for audio mixing
2844 * @note
2845 * @param    pContext    (IN) The integrator own context
2846 *
2847 * @return    M4NO_ERROR:    No error
2848 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
2849 ******************************************************************************
2850 */
2851M4OSA_ERR M4xVSS_internalCloseAudioMixedFile(M4OSA_Context pContext)
2852{
2853    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
2854    M4OSA_ERR err;
2855
2856    /**
2857     * Free this VSS3GPP audio mixing instance */
2858    if(xVSS_context->pAudioMixContext != M4OSA_NULL)
2859    {
2860        err = M4VSS3GPP_audioMixingCleanUp(xVSS_context->pAudioMixContext);
2861        if (err != M4NO_ERROR)
2862        {
2863            M4OSA_TRACE1_1("M4xVSS_internalCloseAudioMixedFile:\
2864                 M4VSS3GPP_audioMixingCleanUp returned 0x%x\n",err);
2865            return err;
2866        }
2867    }
2868
2869    /**
2870     * Free VSS audio mixing settings */
2871    if(xVSS_context->pAudioMixSettings != M4OSA_NULL)
2872    {
2873        M4OSA_free((M4OSA_MemAddr32)xVSS_context->pAudioMixSettings);
2874        xVSS_context->pAudioMixSettings = M4OSA_NULL;
2875    }
2876
2877    return M4NO_ERROR;
2878}
2879
2880/**
2881 ******************************************************************************
2882 * prototype    M4OSA_ERR M4xVSS_internalFreePreview(M4OSA_Context pContext)
2883 *
2884 * @brief    This function cleans up preview edition structure used to generate
2885 *            preview.3gp file given to the VPS
2886 * @note    It also free the preview structure given to the VPS
2887 * @param    pContext    (IN) The integrator own context
2888 *
2889 * @return    M4NO_ERROR:    No error
2890 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
2891 ******************************************************************************
2892 */
2893M4OSA_ERR M4xVSS_internalFreePreview(M4OSA_Context pContext)
2894{
2895    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
2896    M4OSA_UInt8 i;
2897
2898    /**
2899     * Free clip/transition settings */
2900    for(i=0; i<xVSS_context->pCurrentEditSettings->uiClipNumber; i++)
2901    {
2902        M4xVSS_FreeClipSettings(xVSS_context->pCurrentEditSettings->pClipList[i]);
2903
2904        M4OSA_free((M4OSA_MemAddr32)(xVSS_context->pCurrentEditSettings->pClipList[i]));
2905        xVSS_context->pCurrentEditSettings->pClipList[i] = M4OSA_NULL;
2906
2907        /**
2908         * Because there is 1 less transition than clip number */
2909        if(i != xVSS_context->pCurrentEditSettings->uiClipNumber-1)
2910        {
2911            M4OSA_free((M4OSA_MemAddr32)(xVSS_context->pCurrentEditSettings->pTransitionList[i]));
2912            xVSS_context->pCurrentEditSettings->pTransitionList[i] = M4OSA_NULL;
2913        }
2914    }
2915
2916    /**
2917     * Free clip/transition list */
2918    if(xVSS_context->pCurrentEditSettings->pClipList != M4OSA_NULL)
2919    {
2920        M4OSA_free((M4OSA_MemAddr32)(xVSS_context->pCurrentEditSettings->pClipList));
2921        xVSS_context->pCurrentEditSettings->pClipList = M4OSA_NULL;
2922    }
2923    if(xVSS_context->pCurrentEditSettings->pTransitionList != M4OSA_NULL)
2924    {
2925        M4OSA_free((M4OSA_MemAddr32)(xVSS_context->pCurrentEditSettings->pTransitionList));
2926        xVSS_context->pCurrentEditSettings->pTransitionList = M4OSA_NULL;
2927    }
2928
2929    /**
2930     * Free output preview file path */
2931    if(xVSS_context->pCurrentEditSettings->pOutputFile != M4OSA_NULL)
2932    {
2933        M4OSA_free(xVSS_context->pCurrentEditSettings->pOutputFile);
2934        xVSS_context->pCurrentEditSettings->pOutputFile = M4OSA_NULL;
2935    }
2936
2937    /**
2938     * Free temporary preview file path */
2939    if(xVSS_context->pCurrentEditSettings->pTemporaryFile != M4OSA_NULL)
2940    {
2941        remove((const char *)xVSS_context->pCurrentEditSettings->pTemporaryFile);
2942        M4OSA_free(xVSS_context->pCurrentEditSettings->pTemporaryFile);
2943        xVSS_context->pCurrentEditSettings->pTemporaryFile = M4OSA_NULL;
2944    }
2945
2946    /**
2947     * Free "local" BGM settings */
2948    if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack != M4OSA_NULL)
2949    {
2950        if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile != M4OSA_NULL)
2951        {
2952            M4OSA_free(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile);
2953            xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile = M4OSA_NULL;
2954        }
2955        M4OSA_free((M4OSA_MemAddr32)xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack);
2956        xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack = M4OSA_NULL;
2957    }
2958
2959    /**
2960     * Free current edit settings structure */
2961    if(xVSS_context->pCurrentEditSettings != M4OSA_NULL)
2962    {
2963        M4OSA_free((M4OSA_MemAddr32)xVSS_context->pCurrentEditSettings);
2964        xVSS_context->pCurrentEditSettings = M4OSA_NULL;
2965    }
2966
2967    /**
2968     * Free preview effects given to application */
2969    if(M4OSA_NULL != xVSS_context->pPreviewSettings->Effects)
2970    {
2971        M4OSA_free((M4OSA_MemAddr32)xVSS_context->pPreviewSettings->Effects);
2972        xVSS_context->pPreviewSettings->Effects = M4OSA_NULL;
2973        xVSS_context->pPreviewSettings->nbEffects = 0;
2974    }
2975
2976    return M4NO_ERROR;
2977}
2978
2979
2980/**
2981 ******************************************************************************
2982 * prototype    M4OSA_ERR M4xVSS_internalFreeSaving(M4OSA_Context pContext)
2983 *
2984 * @brief    This function cleans up saving edition structure used to generate
2985 *            output.3gp file given to the VPS
2986 * @note
2987 * @param    pContext    (IN) The integrator own context
2988 *
2989 * @return    M4NO_ERROR:    No error
2990 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
2991 ******************************************************************************
2992 */
2993M4OSA_ERR M4xVSS_internalFreeSaving(M4OSA_Context pContext)
2994{
2995    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
2996    M4OSA_UInt8 i;
2997
2998    if(xVSS_context->pCurrentEditSettings != M4OSA_NULL)
2999    {
3000        /**
3001         * Free clip/transition settings */
3002        for(i=0; i<xVSS_context->pCurrentEditSettings->uiClipNumber; i++)
3003        {
3004            M4xVSS_FreeClipSettings(xVSS_context->pCurrentEditSettings->pClipList[i]);
3005
3006            M4OSA_free((M4OSA_MemAddr32)(xVSS_context->pCurrentEditSettings->pClipList[i]));
3007            xVSS_context->pCurrentEditSettings->pClipList[i] = M4OSA_NULL;
3008
3009            /**
3010             * Because there is 1 less transition than clip number */
3011            if(i != xVSS_context->pCurrentEditSettings->uiClipNumber-1)
3012            {
3013                M4OSA_free((M4OSA_MemAddr32)\
3014                    (xVSS_context->pCurrentEditSettings->pTransitionList[i]));
3015                xVSS_context->pCurrentEditSettings->pTransitionList[i] = M4OSA_NULL;
3016            }
3017        }
3018
3019        /**
3020         * Free clip/transition list */
3021        if(xVSS_context->pCurrentEditSettings->pClipList != M4OSA_NULL)
3022        {
3023            M4OSA_free((M4OSA_MemAddr32)(xVSS_context->pCurrentEditSettings->pClipList));
3024            xVSS_context->pCurrentEditSettings->pClipList = M4OSA_NULL;
3025        }
3026        if(xVSS_context->pCurrentEditSettings->pTransitionList != M4OSA_NULL)
3027        {
3028            M4OSA_free((M4OSA_MemAddr32)(xVSS_context->pCurrentEditSettings->pTransitionList));
3029            xVSS_context->pCurrentEditSettings->pTransitionList = M4OSA_NULL;
3030        }
3031
3032        if(xVSS_context->pCurrentEditSettings->Effects != M4OSA_NULL)
3033        {
3034            M4OSA_free((M4OSA_MemAddr32)(xVSS_context->pCurrentEditSettings->Effects));
3035            xVSS_context->pCurrentEditSettings->Effects = M4OSA_NULL;
3036            xVSS_context->pCurrentEditSettings->nbEffects = 0;
3037        }
3038
3039        /**
3040         * Free output saving file path */
3041        if(xVSS_context->pCurrentEditSettings->pOutputFile != M4OSA_NULL)
3042        {
3043            if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack != M4OSA_NULL)
3044            {
3045                remove((const char *)xVSS_context->pCurrentEditSettings->pOutputFile);
3046                M4OSA_free(xVSS_context->pCurrentEditSettings->pOutputFile);
3047            }
3048            if(xVSS_context->pOutputFile != M4OSA_NULL)
3049            {
3050                M4OSA_free((M4OSA_MemAddr32)xVSS_context->pOutputFile);
3051                xVSS_context->pOutputFile = M4OSA_NULL;
3052            }
3053            xVSS_context->pSettings->pOutputFile = M4OSA_NULL;
3054            xVSS_context->pCurrentEditSettings->pOutputFile = M4OSA_NULL;
3055        }
3056
3057        /**
3058         * Free temporary saving file path */
3059        if(xVSS_context->pCurrentEditSettings->pTemporaryFile != M4OSA_NULL)
3060        {
3061            remove((const char *)xVSS_context->pCurrentEditSettings->pTemporaryFile);
3062            M4OSA_free(xVSS_context->pCurrentEditSettings->pTemporaryFile);
3063            xVSS_context->pCurrentEditSettings->pTemporaryFile = M4OSA_NULL;
3064        }
3065
3066        /**
3067         * Free "local" BGM settings */
3068        if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack != M4OSA_NULL)
3069        {
3070            if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile != M4OSA_NULL)
3071            {
3072                M4OSA_free(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile);
3073                xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile = M4OSA_NULL;
3074            }
3075            M4OSA_free((M4OSA_MemAddr32)xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack);
3076            xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack = M4OSA_NULL;
3077        }
3078
3079        /**
3080         * Free current edit settings structure */
3081        M4OSA_free((M4OSA_MemAddr32)xVSS_context->pCurrentEditSettings);
3082        xVSS_context->pCurrentEditSettings = M4OSA_NULL;
3083    }
3084
3085    return M4NO_ERROR;
3086}
3087
3088
3089/**
3090 ******************************************************************************
3091 * prototype    M4OSA_ERR M4xVSS_freeSettings(M4OSA_Context pContext)
3092 *
3093 * @brief    This function cleans up an M4VSS3GPP_EditSettings structure
3094 * @note
3095 * @param    pSettings    (IN) Pointer on M4VSS3GPP_EditSettings structure to free
3096 *
3097 * @return    M4NO_ERROR:    No error
3098 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
3099 ******************************************************************************
3100 */
3101M4OSA_ERR M4xVSS_freeSettings(M4VSS3GPP_EditSettings* pSettings)
3102{
3103    M4OSA_UInt8 i,j;
3104
3105    /**
3106     * For each clip ... */
3107    for(i=0; i<pSettings->uiClipNumber; i++)
3108    {
3109        /**
3110         * ... free clip settings */
3111        if(pSettings->pClipList[i] != M4OSA_NULL)
3112        {
3113            M4xVSS_FreeClipSettings(pSettings->pClipList[i]);
3114
3115            M4OSA_free((M4OSA_MemAddr32)(pSettings->pClipList[i]));
3116            pSettings->pClipList[i] = M4OSA_NULL;
3117        }
3118
3119        /**
3120         * ... free transition settings */
3121        if(i < pSettings->uiClipNumber-1) /* Because there is 1 less transition than clip number */
3122        {
3123            if(pSettings->pTransitionList[i] != M4OSA_NULL)
3124            {
3125                switch (pSettings->pTransitionList[i]->VideoTransitionType)
3126                {
3127                    case M4xVSS_kVideoTransitionType_AlphaMagic:
3128
3129                        /**
3130                         * In case of Alpha Magic transition,
3131                          some extra parameters need to be freed */
3132                        if(pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt\
3133                             != M4OSA_NULL)
3134                        {
3135                            M4OSA_free((M4OSA_MemAddr32)(((M4xVSS_internal_AlphaMagicSettings*)\
3136                                pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt)->\
3137                                    pPlane->pac_data));
3138                            ((M4xVSS_internal_AlphaMagicSettings*)pSettings->pTransitionList[i\
3139                                ]->pExtVideoTransitionFctCtxt)->pPlane->pac_data = M4OSA_NULL;
3140
3141                            M4OSA_free((M4OSA_MemAddr32)(((M4xVSS_internal_AlphaMagicSettings*)\
3142                                pSettings->pTransitionList[i]->\
3143                                    pExtVideoTransitionFctCtxt)->pPlane));
3144                            ((M4xVSS_internal_AlphaMagicSettings*)pSettings->pTransitionList[i]\
3145                                ->pExtVideoTransitionFctCtxt)->pPlane = M4OSA_NULL;
3146
3147                            M4OSA_free((M4OSA_MemAddr32)(pSettings->pTransitionList[i]->\
3148                                pExtVideoTransitionFctCtxt));
3149                            pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt = M4OSA_NULL;
3150
3151                            for(j=i+1;j<pSettings->uiClipNumber-1;j++)
3152                            {
3153                                if(pSettings->pTransitionList[j] != M4OSA_NULL)
3154                                {
3155                                    if(pSettings->pTransitionList[j]->VideoTransitionType ==
3156                                     M4xVSS_kVideoTransitionType_AlphaMagic)
3157                                    {
3158                                        M4OSA_UInt32 pCmpResult=0;
3159                                        pCmpResult = strcmp((const char *)pSettings->pTransitionList[i]->\
3160                                            xVSS.transitionSpecific.pAlphaMagicSettings->\
3161                                                pAlphaFilePath,
3162                                                (const char *)pSettings->pTransitionList[j]->\
3163                                                xVSS.transitionSpecific.pAlphaMagicSettings->\
3164                                                pAlphaFilePath);
3165                                        if(pCmpResult == 0)
3166                                        {
3167                                            /* Free extra internal alpha magic structure and put
3168                                            it to NULL to avoid refreeing it */
3169                                            M4OSA_free((M4OSA_MemAddr32)(pSettings->\
3170                                                pTransitionList[j]->pExtVideoTransitionFctCtxt));
3171                                            pSettings->pTransitionList[j]->\
3172                                                pExtVideoTransitionFctCtxt = M4OSA_NULL;
3173                                        }
3174                                    }
3175                                }
3176                            }
3177                        }
3178
3179                        if(pSettings->pTransitionList[i]->\
3180                            xVSS.transitionSpecific.pAlphaMagicSettings != M4OSA_NULL)
3181                        {
3182                            if(pSettings->pTransitionList[i]->\
3183                                xVSS.transitionSpecific.pAlphaMagicSettings->\
3184                                    pAlphaFilePath != M4OSA_NULL)
3185                            {
3186                                M4OSA_free((M4OSA_MemAddr32)pSettings->\
3187                                    pTransitionList[i]->\
3188                                        xVSS.transitionSpecific.pAlphaMagicSettings->\
3189                                            pAlphaFilePath);
3190                                pSettings->pTransitionList[i]->\
3191                                    xVSS.transitionSpecific.pAlphaMagicSettings->\
3192                                        pAlphaFilePath = M4OSA_NULL;
3193                            }
3194                            M4OSA_free((M4OSA_MemAddr32)pSettings->pTransitionList[i]->\
3195                                xVSS.transitionSpecific.pAlphaMagicSettings);
3196                            pSettings->pTransitionList[i]->\
3197                                xVSS.transitionSpecific.pAlphaMagicSettings = M4OSA_NULL;
3198
3199                        }
3200
3201                    break;
3202
3203
3204                    case M4xVSS_kVideoTransitionType_SlideTransition:
3205                        if (M4OSA_NULL != pSettings->pTransitionList[i]->\
3206                            xVSS.transitionSpecific.pSlideTransitionSettings)
3207                        {
3208                            M4OSA_free((M4OSA_MemAddr32)pSettings->pTransitionList[i]->\
3209                                xVSS.transitionSpecific.pSlideTransitionSettings);
3210                            pSettings->pTransitionList[i]->\
3211                                xVSS.transitionSpecific.pSlideTransitionSettings = M4OSA_NULL;
3212                        }
3213                        if(pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt != M4OSA_NULL)
3214                        {
3215                            M4OSA_free((M4OSA_MemAddr32)(pSettings->pTransitionList[i]->\
3216                                pExtVideoTransitionFctCtxt));
3217                            pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt = M4OSA_NULL;
3218                        }
3219                    break;
3220                                        default:
3221                    break;
3222
3223                }
3224                /**
3225                 * Free transition settings structure */
3226                M4OSA_free((M4OSA_MemAddr32)(pSettings->pTransitionList[i]));
3227                pSettings->pTransitionList[i] = M4OSA_NULL;
3228            }
3229        }
3230    }
3231
3232    /**
3233     * Free clip list */
3234    if(pSettings->pClipList != M4OSA_NULL)
3235    {
3236        M4OSA_free((M4OSA_MemAddr32)(pSettings->pClipList));
3237        pSettings->pClipList = M4OSA_NULL;
3238    }
3239
3240    /**
3241     * Free transition list */
3242    if(pSettings->pTransitionList != M4OSA_NULL)
3243    {
3244        M4OSA_free((M4OSA_MemAddr32)(pSettings->pTransitionList));
3245        pSettings->pTransitionList = M4OSA_NULL;
3246    }
3247
3248    /**
3249     * RC: Free effects list */
3250    if(pSettings->Effects != M4OSA_NULL)
3251    {
3252        for(i=0; i<pSettings->nbEffects; i++)
3253        {
3254            /**
3255             * For each clip, free framing structure if needed */
3256            if(pSettings->Effects[i].VideoEffectType == M4xVSS_kVideoEffectType_Framing
3257                || pSettings->Effects[i].VideoEffectType == M4xVSS_kVideoEffectType_Text)
3258            {
3259#ifdef DECODE_GIF_ON_SAVING
3260                M4xVSS_FramingContext* framingCtx = pSettings->Effects[i].pExtVideoEffectFctCtxt;
3261#else
3262                M4xVSS_FramingStruct* framingCtx = pSettings->Effects[i].pExtVideoEffectFctCtxt;
3263                M4xVSS_FramingStruct* framingCtx_save;
3264                M4xVSS_Framing3102Struct* framingCtx_first = framingCtx;
3265#endif
3266
3267#ifdef DECODE_GIF_ON_SAVING
3268                if(framingCtx != M4OSA_NULL) /* Bugfix 1.2.0: crash, trying to free non existant
3269                 pointer */
3270                {
3271                    if(framingCtx->aFramingCtx != M4OSA_NULL)
3272                    {
3273                        {
3274                            if(framingCtx->aFramingCtx->FramingRgb != M4OSA_NULL)
3275                            {
3276                                M4OSA_free((M4OSA_MemAddr32)framingCtx->aFramingCtx->\
3277                                    FramingRgb->pac_data);
3278                                framingCtx->aFramingCtx->FramingRgb->pac_data = M4OSA_NULL;
3279                                M4OSA_free((M4OSA_MemAddr32)framingCtx->aFramingCtx->FramingRgb);
3280                                framingCtx->aFramingCtx->FramingRgb = M4OSA_NULL;
3281                            }
3282                        }
3283                        if(framingCtx->aFramingCtx->FramingYuv != M4OSA_NULL)
3284                        {
3285                            M4OSA_free((M4OSA_MemAddr32)framingCtx->aFramingCtx->\
3286                                FramingYuv[0].pac_data);
3287                            framingCtx->aFramingCtx->FramingYuv[0].pac_data = M4OSA_NULL;
3288                           M4OSA_free((M4OSA_MemAddr32)framingCtx->aFramingCtx->\
3289                                FramingYuv[1].pac_data);
3290                            framingCtx->aFramingCtx->FramingYuv[1].pac_data = M4OSA_NULL;
3291                           M4OSA_free((M4OSA_MemAddr32)framingCtx->aFramingCtx->\
3292                                FramingYuv[2].pac_data);
3293                            framingCtx->aFramingCtx->FramingYuv[2].pac_data = M4OSA_NULL;
3294                            M4OSA_free((M4OSA_MemAddr32)framingCtx->aFramingCtx->FramingYuv);
3295                            framingCtx->aFramingCtx->FramingYuv = M4OSA_NULL;
3296                        }
3297                        M4OSA_free((M4OSA_MemAddr32)framingCtx->aFramingCtx);
3298                        framingCtx->aFramingCtx = M4OSA_NULL;
3299                    }
3300                    if(framingCtx->aFramingCtx_last != M4OSA_NULL)
3301                    {
3302                        if(framingCtx->aFramingCtx_last->FramingRgb != M4OSA_NULL)
3303                        {
3304                            M4OSA_free((M4OSA_MemAddr32)framingCtx->aFramingCtx_last->\
3305                                FramingRgb->pac_data);
3306                            framingCtx->aFramingCtx_last->FramingRgb->pac_data = M4OSA_NULL;
3307                            M4OSA_free((M4OSA_MemAddr32)framingCtx->aFramingCtx_last->\
3308                                FramingRgb);
3309                            framingCtx->aFramingCtx_last->FramingRgb = M4OSA_NULL;
3310                        }
3311                        if(framingCtx->aFramingCtx_last->FramingYuv != M4OSA_NULL)
3312                        {
3313                            M4OSA_free((M4OSA_MemAddr32)framingCtx->aFramingCtx_last->\
3314                                FramingYuv[0].pac_data);
3315                            framingCtx->aFramingCtx_last->FramingYuv[0].pac_data = M4OSA_NULL;
3316                            M4OSA_free((M4OSA_MemAddr32)framingCtx->aFramingCtx_last->FramingYuv);
3317                            framingCtx->aFramingCtx_last->FramingYuv = M4OSA_NULL;
3318                        }
3319                        M4OSA_free((M4OSA_MemAddr32)framingCtx->aFramingCtx_last);
3320                        framingCtx->aFramingCtx_last = M4OSA_NULL;
3321                    }
3322                    if(framingCtx->pEffectFilePath != M4OSA_NULL)
3323                    {
3324                        M4OSA_free((M4OSA_MemAddr32)framingCtx->pEffectFilePath);
3325                        framingCtx->pEffectFilePath = M4OSA_NULL;
3326                    }
3327                    /*In case there are still allocated*/
3328                    if(framingCtx->pSPSContext != M4OSA_NULL)
3329                    {
3330                    //    M4SPS_destroy(framingCtx->pSPSContext);
3331                        framingCtx->pSPSContext = M4OSA_NULL;
3332#if 0
3333                        if(framingCtx->inputStream.data_buffer  != M4OSA_NULL)
3334                        {
3335                            M4OSA_free((M4OSA_MemAddr32)framingCtx->inputStream.data_buffer);
3336                            framingCtx->inputStream.data_buffer = M4OSA_NULL;
3337                        }
3338#endif
3339                    }
3340                    /*Alpha blending structure*/
3341                    if(framingCtx->alphaBlendingStruct  != M4OSA_NULL)
3342                    {
3343                        M4OSA_free((M4OSA_MemAddr32)framingCtx->alphaBlendingStruct);
3344                        framingCtx->alphaBlendingStruct = M4OSA_NULL;
3345                    }
3346
3347                    M4OSA_free((M4OSA_MemAddr32)framingCtx);
3348                    framingCtx = M4OSA_NULL;
3349                }
3350#else
3351                do
3352                {
3353                    if(framingCtx != M4OSA_NULL) /* Bugfix 1.2.0: crash, trying to free non
3354                    existant pointer */
3355                    {
3356                        if(framingCtx->FramingRgb != M4OSA_NULL)
3357                        {
3358                            M4OSA_free((M4OSA_MemAddr32)framingCtx->FramingRgb->pac_data);
3359                            framingCtx->FramingRgb->pac_data = M4OSA_NULL;
3360                            M4OSA_free((M4OSA_MemAddr32)framingCtx->FramingRgb);
3361                            framingCtx->FramingRgb = M4OSA_NULL;
3362                        }
3363                        if(framingCtx->FramingYuv != M4OSA_NULL)
3364                        {
3365                            M4OSA_free((M4OSA_MemAddr32)framingCtx->FramingYuv[0].pac_data);
3366                            framingCtx->FramingYuv[0].pac_data = M4OSA_NULL;
3367                            M4OSA_free((M4OSA_MemAddr32)framingCtx->FramingYuv);
3368                            framingCtx->FramingYuv = M4OSA_NULL;
3369                        }
3370                        framingCtx_save = framingCtx->pNext;
3371                        M4OSA_free((M4OSA_MemAddr32)framingCtx);
3372                        framingCtx = M4OSA_NULL;
3373                        framingCtx = framingCtx_save;
3374                    }
3375                    else
3376                    {
3377                        /*FB: bug fix P4ME00003002*/
3378                        break;
3379                    }
3380                } while(framingCtx_first != framingCtx);
3381#endif
3382            }
3383            else if( M4xVSS_kVideoEffectType_Fifties == pSettings->Effects[i].VideoEffectType)
3384            {
3385                /* Free Fifties context */
3386                M4xVSS_FiftiesStruct* FiftiesCtx = pSettings->Effects[i].pExtVideoEffectFctCtxt;
3387
3388                if(FiftiesCtx != M4OSA_NULL)
3389                {
3390                    M4OSA_free((M4OSA_MemAddr32)FiftiesCtx);
3391                    FiftiesCtx = M4OSA_NULL;
3392                }
3393
3394            }
3395            else if( M4xVSS_kVideoEffectType_ColorRGB16 == pSettings->Effects[i].VideoEffectType
3396                || M4xVSS_kVideoEffectType_BlackAndWhite == pSettings->Effects[i].VideoEffectType
3397                || M4xVSS_kVideoEffectType_Pink == pSettings->Effects[i].VideoEffectType
3398                || M4xVSS_kVideoEffectType_Green == pSettings->Effects[i].VideoEffectType
3399                || M4xVSS_kVideoEffectType_Sepia == pSettings->Effects[i].VideoEffectType
3400                || M4xVSS_kVideoEffectType_Negative== pSettings->Effects[i].VideoEffectType
3401                || M4xVSS_kVideoEffectType_Gradient== pSettings->Effects[i].VideoEffectType)
3402            {
3403                /* Free Color context */
3404                M4xVSS_ColorStruct* ColorCtx = pSettings->Effects[i].pExtVideoEffectFctCtxt;
3405
3406                if(ColorCtx != M4OSA_NULL)
3407                {
3408                    M4OSA_free((M4OSA_MemAddr32)ColorCtx);
3409                    ColorCtx = M4OSA_NULL;
3410                }
3411            }
3412
3413            /* Free simple fields */
3414            if(pSettings->Effects[i].xVSS.pFramingFilePath != M4OSA_NULL)
3415            {
3416                M4OSA_free((M4OSA_MemAddr32)pSettings->Effects[i].xVSS.pFramingFilePath);
3417                pSettings->Effects[i].xVSS.pFramingFilePath = M4OSA_NULL;
3418            }
3419            if(pSettings->Effects[i].xVSS.pFramingBuffer != M4OSA_NULL)
3420            {
3421                M4OSA_free((M4OSA_MemAddr32)pSettings->Effects[i].xVSS.pFramingBuffer);
3422                pSettings->Effects[i].xVSS.pFramingBuffer = M4OSA_NULL;
3423            }
3424            if(pSettings->Effects[i].xVSS.pTextBuffer != M4OSA_NULL)
3425            {
3426                M4OSA_free((M4OSA_MemAddr32)pSettings->Effects[i].xVSS.pTextBuffer);
3427                pSettings->Effects[i].xVSS.pTextBuffer = M4OSA_NULL;
3428            }
3429        }
3430        M4OSA_free((M4OSA_MemAddr32)pSettings->Effects);
3431        pSettings->Effects = M4OSA_NULL;
3432    }
3433
3434    return M4NO_ERROR;
3435}
3436
3437M4OSA_ERR M4xVSS_freeCommand(M4OSA_Context pContext)
3438{
3439    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
3440//    M4OSA_UInt8 i,j;
3441
3442    /* Free "local" BGM settings */
3443    if(xVSS_context->pSettings->xVSS.pBGMtrack != M4OSA_NULL)
3444    {
3445        if(xVSS_context->pSettings->xVSS.pBGMtrack->pFile != M4OSA_NULL)
3446        {
3447            M4OSA_free(xVSS_context->pSettings->xVSS.pBGMtrack->pFile);
3448            xVSS_context->pSettings->xVSS.pBGMtrack->pFile = M4OSA_NULL;
3449        }
3450        M4OSA_free((M4OSA_MemAddr32)xVSS_context->pSettings->xVSS.pBGMtrack);
3451        xVSS_context->pSettings->xVSS.pBGMtrack = M4OSA_NULL;
3452    }
3453#if 0
3454    /* Parse transitions to free internal "alpha magic" settings structure */
3455    /**
3456     * In case there is twice or more the same Alpha Magic effect, the effect context
3457     * may be freed twice or more.
3458     * So, we parse all remaining transition settings to know if the context can be
3459     * "re-freed", and if yes, we put its context to NULL to avoid freeing it again */
3460    for(i=0; i<xVSS_context->pSettings->uiClipNumber-1; i++)
3461    {
3462        if(xVSS_context->pSettings->pTransitionList[i] != M4OSA_NULL)
3463        {
3464            switch (xVSS_context->pSettings->pTransitionList[i]->VideoTransitionType)
3465            {
3466                case M4xVSS_kVideoTransitionType_AlphaMagic:
3467                    /**
3468                     * In case of Alpha Magic transition, some extra parameters need to be freed */
3469                    if(xVSS_context->pSettings->pTransitionList[i]->\
3470                        pExtVideoTransitionFctCtxt != M4OSA_NULL)
3471                    {
3472                        M4OSA_free((M4OSA_MemAddr32)(((M4xVSS_internal_AlphaMagicSettings*)\
3473                            xVSS_context->pSettings->pTransitionList[i]->\
3474                                pExtVideoTransitionFctCtxt)->pPlane->pac_data));
3475                        ((M4xVSS_internal_AlphaMagicSettings*)xVSS_context->\
3476                            pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt)->\
3477                                pPlane->pac_data = M4OSA_NULL;
3478
3479                        M4OSA_free((M4OSA_MemAddr32)(((M4xVSS_internal_AlphaMagicSettings*)\
3480                            xVSS_context->pSettings->pTransitionList[i]->\
3481                                pExtVideoTransitionFctCtxt)->pPlane));
3482                        ((M4xVSS_internal_AlphaMagicSettings*)xVSS_context->\
3483                            pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt)->\
3484                                pPlane = M4OSA_NULL;
3485
3486                        M4OSA_free((M4OSA_MemAddr32)(xVSS_context->pSettings->\
3487                            pTransitionList[i]->pExtVideoTransitionFctCtxt));
3488                        xVSS_context->pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt
3489                             = M4OSA_NULL;
3490
3491                        for(j=i+1;j<xVSS_context->pSettings->uiClipNumber-1;j++)
3492                        {
3493                            if(xVSS_context->pSettings->pTransitionList[j] != M4OSA_NULL)
3494                            {
3495                                if(xVSS_context->pSettings->pTransitionList[j]->\
3496                                    VideoTransitionType == M4xVSS_kVideoTransitionType_AlphaMagic)
3497                                {
3498                                    M4OSA_UInt32 pCmpResult=0;
3499                                    pCmpResult = strcmp((const char *)xVSS_context->pSettings->pTransitionList[i]->\
3500                                        xVSS.transitionSpecific.pAlphaMagicSettings->\
3501                                            pAlphaFilePath,
3502                                        (const char *)xVSS_context->pSettings->pTransitionList[j]->\
3503                                            xVSS.transitionSpecific.pAlphaMagicSettings->\
3504                                                pAlphaFilePath);
3505                                    if(pCmpResult == 0)
3506                                        {
3507                                        /* Free extra internal alpha magic structure and put it
3508                                         to NULL to avoid refreeing it */
3509                                        M4OSA_free((M4OSA_MemAddr32)(xVSS_context->pSettings->\
3510                                            pTransitionList[j]->pExtVideoTransitionFctCtxt));
3511                                        xVSS_context->pSettings->pTransitionList[j]->\
3512                                            pExtVideoTransitionFctCtxt = M4OSA_NULL;
3513                                    }
3514                                }
3515                            }
3516                        }
3517                    }
3518                break;
3519
3520                case M4xVSS_kVideoTransitionType_SlideTransition:
3521                    if(xVSS_context->pSettings->pTransitionList[i]->\
3522                        pExtVideoTransitionFctCtxt != M4OSA_NULL)
3523                    {
3524                        M4OSA_free((M4OSA_MemAddr32)(xVSS_context->pSettings->\
3525                            pTransitionList[i]->pExtVideoTransitionFctCtxt));
3526                        xVSS_context->pSettings->pTransitionList[i]->\
3527                            pExtVideoTransitionFctCtxt = M4OSA_NULL;
3528                    }
3529                break;
3530            }
3531        }
3532    }
3533#endif
3534
3535    M4xVSS_freeSettings(xVSS_context->pSettings);
3536
3537    if(xVSS_context->pPTo3GPPparamsList != M4OSA_NULL)
3538    {
3539        M4xVSS_Pto3GPP_params* pParams = xVSS_context->pPTo3GPPparamsList;
3540        M4xVSS_Pto3GPP_params* pParams_sauv;
3541
3542        while(pParams != M4OSA_NULL)
3543        {
3544            if(pParams->pFileIn != M4OSA_NULL)
3545            {
3546                M4OSA_free((M4OSA_MemAddr32)pParams->pFileIn);
3547                pParams->pFileIn = M4OSA_NULL;
3548            }
3549            if(pParams->pFileOut != M4OSA_NULL)
3550            {
3551                /* Delete temporary file */
3552                remove((const char *)pParams->pFileOut);
3553                M4OSA_free((M4OSA_MemAddr32)pParams->pFileOut);
3554                pParams->pFileOut = M4OSA_NULL;
3555            }
3556            if(pParams->pFileTemp != M4OSA_NULL)
3557            {
3558                /* Delete temporary file */
3559#ifdef M4xVSS_RESERVED_MOOV_DISK_SPACE
3560                remove((const char *)pParams->pFileTemp);
3561                M4OSA_free((M4OSA_MemAddr32)pParams->pFileTemp);
3562#endif/*M4xVSS_RESERVED_MOOV_DISK_SPACE*/
3563                pParams->pFileTemp = M4OSA_NULL;
3564            }
3565            pParams_sauv = pParams;
3566            pParams = pParams->pNext;
3567            M4OSA_free((M4OSA_MemAddr32)pParams_sauv);
3568            pParams_sauv = M4OSA_NULL;
3569        }
3570    }
3571
3572    if(xVSS_context->pMCSparamsList != M4OSA_NULL)
3573    {
3574        M4xVSS_MCS_params* pParams = xVSS_context->pMCSparamsList;
3575        M4xVSS_MCS_params* pParams_sauv;
3576
3577        while(pParams != M4OSA_NULL)
3578        {
3579            if(pParams->pFileIn != M4OSA_NULL)
3580            {
3581                M4OSA_free((M4OSA_MemAddr32)pParams->pFileIn);
3582                pParams->pFileIn = M4OSA_NULL;
3583            }
3584            if(pParams->pFileOut != M4OSA_NULL)
3585            {
3586                /* Delete temporary file */
3587                remove((const char *)pParams->pFileOut);
3588                M4OSA_free((M4OSA_MemAddr32)pParams->pFileOut);
3589                pParams->pFileOut = M4OSA_NULL;
3590            }
3591            if(pParams->pFileTemp != M4OSA_NULL)
3592            {
3593                /* Delete temporary file */
3594#ifdef M4xVSS_RESERVED_MOOV_DISK_SPACE
3595                remove((const char *)pParams->pFileTemp);
3596                M4OSA_free((M4OSA_MemAddr32)pParams->pFileTemp);
3597#endif/*M4xVSS_RESERVED_MOOV_DISK_SPACE*/
3598                pParams->pFileTemp = M4OSA_NULL;
3599            }
3600            pParams_sauv = pParams;
3601            pParams = pParams->pNext;
3602            M4OSA_free((M4OSA_MemAddr32)pParams_sauv);
3603            pParams_sauv = M4OSA_NULL;
3604        }
3605    }
3606
3607    if(xVSS_context->pcmPreviewFile != M4OSA_NULL)
3608    {
3609        M4OSA_free((M4OSA_MemAddr32)xVSS_context->pcmPreviewFile);
3610        xVSS_context->pcmPreviewFile = M4OSA_NULL;
3611    }
3612    if(xVSS_context->pSettings->pOutputFile != M4OSA_NULL
3613        && xVSS_context->pOutputFile != M4OSA_NULL)
3614    {
3615        M4OSA_free((M4OSA_MemAddr32)xVSS_context->pSettings->pOutputFile);
3616        xVSS_context->pSettings->pOutputFile = M4OSA_NULL;
3617        xVSS_context->pOutputFile = M4OSA_NULL;
3618    }
3619
3620    /* Reinit all context variables */
3621    xVSS_context->previousClipNumber = 0;
3622    xVSS_context->editingStep = M4xVSS_kMicroStateEditing;
3623    xVSS_context->analyseStep = M4xVSS_kMicroStateAnalysePto3GPP;
3624    xVSS_context->pPTo3GPPparamsList = M4OSA_NULL;
3625    xVSS_context->pPTo3GPPcurrentParams = M4OSA_NULL;
3626    xVSS_context->pMCSparamsList = M4OSA_NULL;
3627    xVSS_context->pMCScurrentParams = M4OSA_NULL;
3628    xVSS_context->tempFileIndex = 0;
3629    xVSS_context->targetedTimescale = 0;
3630
3631    return M4NO_ERROR;
3632}
3633
3634/**
3635 ******************************************************************************
3636 * prototype    M4OSA_ERR M4xVSS_internalGetProperties(M4OSA_Context pContext,
3637 *                                    M4OSA_Char* pFile,
3638 *                                    M4VIDEOEDITING_ClipProperties *pFileProperties)
3639 *
3640 * @brief    This function retrieve properties of an input 3GP file using MCS
3641 * @note
3642 * @param    pContext        (IN) The integrator own context
3643 * @param    pFile            (IN) 3GP file to analyse
3644 * @param    pFileProperties    (IN/OUT) Pointer on a structure that will contain
3645 *                            the 3GP file properties
3646 *
3647 * @return    M4NO_ERROR:    No error
3648 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
3649 ******************************************************************************
3650 */
3651M4OSA_ERR M4xVSS_internalGetProperties(M4OSA_Context pContext, M4OSA_Char* pFile,
3652                                       M4VIDEOEDITING_ClipProperties *pFileProperties)
3653{
3654    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
3655    M4OSA_ERR err;
3656    M4MCS_Context mcs_context;
3657
3658    err = M4MCS_init(&mcs_context, xVSS_context->pFileReadPtr, xVSS_context->pFileWritePtr);
3659    if(err != M4NO_ERROR)
3660    {
3661        M4OSA_TRACE1_1("M4xVSS_internalGetProperties: Error in M4MCS_init: 0x%x", err);
3662        return err;
3663    }
3664
3665    /*open the MCS in the "normal opening" mode to retrieve the exact duration*/
3666    err = M4MCS_open_normalMode(mcs_context, pFile, M4VIDEOEDITING_kFileType_3GPP,
3667        M4OSA_NULL, M4OSA_NULL);
3668    if (err != M4NO_ERROR)
3669    {
3670        M4OSA_TRACE1_1("M4xVSS_internalGetProperties: Error in M4MCS_open: 0x%x", err);
3671        M4MCS_abort(mcs_context);
3672        return err;
3673    }
3674
3675    err = M4MCS_getInputFileProperties(mcs_context, pFileProperties);
3676    if(err != M4NO_ERROR)
3677    {
3678        M4OSA_TRACE1_1("Error in M4MCS_getInputFileProperties: 0x%x", err);
3679        M4MCS_abort(mcs_context);
3680        return err;
3681    }
3682
3683    err = M4MCS_abort(mcs_context);
3684    if (err != M4NO_ERROR)
3685    {
3686        M4OSA_TRACE1_1("M4xVSS_internalGetProperties: Error in M4MCS_abort: 0x%x", err);
3687        return err;
3688    }
3689
3690    return M4NO_ERROR;
3691}
3692
3693
3694/**
3695 ******************************************************************************
3696 * prototype    M4OSA_ERR M4xVSS_internalGetTargetedTimeScale(M4OSA_Context pContext,
3697 *                                                M4OSA_UInt32* pTargetedTimeScale)
3698 *
3699 * @brief    This function retrieve targeted time scale
3700 * @note
3701 * @param    pContext            (IN)    The integrator own context
3702 * @param    pTargetedTimeScale    (OUT)    Targeted time scale
3703 *
3704 * @return    M4NO_ERROR:    No error
3705 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
3706 ******************************************************************************
3707 */
3708M4OSA_ERR M4xVSS_internalGetTargetedTimeScale(M4OSA_Context pContext,
3709                                                 M4VSS3GPP_EditSettings* pSettings,
3710                                                  M4OSA_UInt32* pTargetedTimeScale)
3711{
3712    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
3713    M4OSA_ERR err;
3714    M4OSA_UInt32 totalDuration = 0;
3715    M4OSA_UInt8 i = 0;
3716    M4OSA_UInt32 tempTimeScale = 0, tempDuration = 0;
3717
3718    for(i=0;i<pSettings->uiClipNumber;i++)
3719    {
3720        /*search timescale only in mpeg4 case*/
3721        if(pSettings->pClipList[i]->FileType == M4VIDEOEDITING_kFileType_3GPP
3722            || pSettings->pClipList[i]->FileType == M4VIDEOEDITING_kFileType_MP4
3723            || pSettings->pClipList[i]->FileType == M4VIDEOEDITING_kFileType_M4V)
3724        {
3725            M4VIDEOEDITING_ClipProperties fileProperties;
3726
3727            /*UTF conversion support*/
3728            M4OSA_Char* pDecodedPath = M4OSA_NULL;
3729
3730            /**
3731            * UTF conversion: convert into the customer format, before being used*/
3732            pDecodedPath = pSettings->pClipList[i]->pFile;
3733
3734            if(xVSS_context->UTFConversionContext.pConvToUTF8Fct != M4OSA_NULL
3735                && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL)
3736            {
3737                M4OSA_UInt32 length = 0;
3738                err = M4xVSS_internalConvertFromUTF8(xVSS_context,
3739                     (M4OSA_Void*) pSettings->pClipList[i]->pFile,
3740                        (M4OSA_Void*) xVSS_context->UTFConversionContext.pTempOutConversionBuffer,
3741                             &length);
3742                if(err != M4NO_ERROR)
3743                {
3744                    M4OSA_TRACE1_1("M4xVSS_Init:\
3745                         M4xVSS_internalConvertToUTF8 returns err: 0x%x",err);
3746                    return err;
3747                }
3748                pDecodedPath = xVSS_context->UTFConversionContext.pTempOutConversionBuffer;
3749            }
3750
3751            /*End of the conversion: use the decoded path*/
3752            err = M4xVSS_internalGetProperties(xVSS_context, pDecodedPath, &fileProperties);
3753
3754            /*get input file properties*/
3755            /*err = M4xVSS_internalGetProperties(xVSS_context, pSettings->\
3756                pClipList[i]->pFile, &fileProperties);*/
3757            if(M4NO_ERROR != err)
3758            {
3759                M4OSA_TRACE1_1("M4xVSS_internalGetTargetedTimeScale:\
3760                     M4xVSS_internalGetProperties returned: 0x%x", err);
3761                return err;
3762            }
3763            if(fileProperties.VideoStreamType == M4VIDEOEDITING_kMPEG4)
3764            {
3765                if(pSettings->pClipList[i]->uiEndCutTime > 0)
3766                {
3767                    if(tempDuration < (pSettings->pClipList[i]->uiEndCutTime \
3768                        - pSettings->pClipList[i]->uiBeginCutTime))
3769                    {
3770                        tempTimeScale = fileProperties.uiVideoTimeScale;
3771                        tempDuration = (pSettings->pClipList[i]->uiEndCutTime\
3772                             - pSettings->pClipList[i]->uiBeginCutTime);
3773                    }
3774                }
3775                else
3776                {
3777                    if(tempDuration < (fileProperties.uiClipDuration\
3778                         - pSettings->pClipList[i]->uiBeginCutTime))
3779                    {
3780                        tempTimeScale = fileProperties.uiVideoTimeScale;
3781                        tempDuration = (fileProperties.uiClipDuration\
3782                             - pSettings->pClipList[i]->uiBeginCutTime);
3783                    }
3784                }
3785            }
3786        }
3787        if(pSettings->pClipList[i]->FileType == M4VIDEOEDITING_kFileType_ARGB8888)
3788        {
3789            /*the timescale is 30 for PTO3GP*/
3790            *pTargetedTimeScale = 30;
3791            return M4NO_ERROR;
3792
3793        }
3794    }
3795
3796    if(tempTimeScale >= 30)/*Define a minimum time scale, otherwise if the timescale is not
3797    enough, there will be an infinite loop in the shell encoder*/
3798    {
3799        *pTargetedTimeScale = tempTimeScale;
3800    }
3801    else
3802    {
3803        *pTargetedTimeScale = 30;
3804    }
3805
3806    return M4NO_ERROR;
3807}
3808
3809
3810/**
3811 ******************************************************************************
3812 * prototype    M4VSS3GPP_externalVideoEffectColor(M4OSA_Void *pFunctionContext,
3813 *                                                    M4VIFI_ImagePlane *PlaneIn,
3814 *                                                    M4VIFI_ImagePlane *PlaneOut,
3815 *                                                    M4VSS3GPP_ExternalProgress *pProgress,
3816 *                                                    M4OSA_UInt32 uiEffectKind)
3817 *
3818 * @brief    This function apply a color effect on an input YUV420 planar frame
3819 * @note
3820 * @param    pFunctionContext(IN) Contains which color to apply (not very clean ...)
3821 * @param    PlaneIn            (IN) Input YUV420 planar
3822 * @param    PlaneOut        (IN/OUT) Output YUV420 planar
3823 * @param    pProgress        (IN/OUT) Progress indication (0-100)
3824 * @param    uiEffectKind    (IN) Unused
3825 *
3826 * @return    M4VIFI_OK:    No error
3827 ******************************************************************************
3828 */
3829M4OSA_ERR M4VSS3GPP_externalVideoEffectColor(M4OSA_Void *pFunctionContext,
3830                                             M4VIFI_ImagePlane *PlaneIn,
3831                                             M4VIFI_ImagePlane *PlaneOut,
3832                                             M4VSS3GPP_ExternalProgress *pProgress,
3833                                             M4OSA_UInt32 uiEffectKind)
3834{
3835    M4VIFI_Int32 plane_number;
3836    M4VIFI_UInt32 i,j;
3837    M4VIFI_UInt8 *p_buf_src, *p_buf_dest;
3838    M4xVSS_ColorStruct* ColorContext = (M4xVSS_ColorStruct*)pFunctionContext;
3839
3840    for (plane_number = 0; plane_number < 3; plane_number++)
3841    {
3842        p_buf_src = &(PlaneIn[plane_number].pac_data[PlaneIn[plane_number].u_topleft]);
3843        p_buf_dest = &(PlaneOut[plane_number].pac_data[PlaneOut[plane_number].u_topleft]);
3844        for (i = 0; i < PlaneOut[plane_number].u_height; i++)
3845        {
3846            /**
3847             * Chrominance */
3848            if(plane_number==1 || plane_number==2)
3849            {
3850                //switch ((M4OSA_UInt32)pFunctionContext)
3851                // commented because a structure for the effects context exist
3852                switch (ColorContext->colorEffectType)
3853                {
3854                    case M4xVSS_kVideoEffectType_BlackAndWhite:
3855                        memset((void *)p_buf_dest,128,
3856                         PlaneIn[plane_number].u_width);
3857                        break;
3858                    case M4xVSS_kVideoEffectType_Pink:
3859                        memset((void *)p_buf_dest,255,
3860                         PlaneIn[plane_number].u_width);
3861                        break;
3862                    case M4xVSS_kVideoEffectType_Green:
3863                        memset((void *)p_buf_dest,0,
3864                         PlaneIn[plane_number].u_width);
3865                        break;
3866                    case M4xVSS_kVideoEffectType_Sepia:
3867                        if(plane_number==1)
3868                        {
3869                            memset((void *)p_buf_dest,117,
3870                             PlaneIn[plane_number].u_width);
3871                        }
3872                        else
3873                        {
3874                            memset((void *)p_buf_dest,139,
3875                             PlaneIn[plane_number].u_width);
3876                        }
3877                        break;
3878                    case M4xVSS_kVideoEffectType_Negative:
3879                        memcpy((void *)p_buf_dest,
3880                         (void *)p_buf_src ,PlaneOut[plane_number].u_width);
3881                        break;
3882
3883                    case M4xVSS_kVideoEffectType_ColorRGB16:
3884                        {
3885                            M4OSA_UInt16 r = 0,g = 0,b = 0,y = 0,u = 0,v = 0;
3886
3887                            /*first get the r, g, b*/
3888                            b = (ColorContext->rgb16ColorData &  0x001f);
3889                            g = (ColorContext->rgb16ColorData &  0x07e0)>>5;
3890                            r = (ColorContext->rgb16ColorData &  0xf800)>>11;
3891
3892                            /*keep y, but replace u and v*/
3893                            if(plane_number==1)
3894                            {
3895                                /*then convert to u*/
3896                                u = U16(r, g, b);
3897                                memset((void *)p_buf_dest,(M4OSA_UInt8)u,
3898                                 PlaneIn[plane_number].u_width);
3899                            }
3900                            if(plane_number==2)
3901                            {
3902                                /*then convert to v*/
3903                                v = V16(r, g, b);
3904                                memset((void *)p_buf_dest, (M4OSA_UInt8)v,
3905                                 PlaneIn[plane_number].u_width);
3906                            }
3907                        }
3908                        break;
3909                    case M4xVSS_kVideoEffectType_Gradient:
3910                        {
3911                            M4OSA_UInt16 r = 0,g = 0,b = 0,y = 0,u = 0,v = 0;
3912
3913                            /*first get the r, g, b*/
3914                            b = (ColorContext->rgb16ColorData &  0x001f);
3915                            g = (ColorContext->rgb16ColorData &  0x07e0)>>5;
3916                            r = (ColorContext->rgb16ColorData &  0xf800)>>11;
3917
3918                            /*for color gradation*/
3919                            b = (M4OSA_UInt16)( b - ((b*i)/PlaneIn[plane_number].u_height));
3920                            g = (M4OSA_UInt16)(g - ((g*i)/PlaneIn[plane_number].u_height));
3921                            r = (M4OSA_UInt16)(r - ((r*i)/PlaneIn[plane_number].u_height));
3922
3923                            /*keep y, but replace u and v*/
3924                            if(plane_number==1)
3925                            {
3926                                /*then convert to u*/
3927                                u = U16(r, g, b);
3928                                memset((void *)p_buf_dest,(M4OSA_UInt8)u,
3929                                 PlaneIn[plane_number].u_width);
3930                            }
3931                            if(plane_number==2)
3932                            {
3933                                /*then convert to v*/
3934                                v = V16(r, g, b);
3935                                memset((void *)p_buf_dest,(M4OSA_UInt8)v,
3936                                 PlaneIn[plane_number].u_width);
3937                            }
3938                        }
3939                        break;
3940                        default:
3941                        break;
3942                }
3943            }
3944            /**
3945             * Luminance */
3946            else
3947            {
3948                //switch ((M4OSA_UInt32)pFunctionContext)
3949                // commented because a structure for the effects context exist
3950                switch (ColorContext->colorEffectType)
3951                {
3952                case M4xVSS_kVideoEffectType_Negative:
3953                    for(j=0;j<PlaneOut[plane_number].u_width;j++)
3954                    {
3955                            p_buf_dest[j] = 255 - p_buf_src[j];
3956                    }
3957                    break;
3958                default:
3959                    memcpy((void *)p_buf_dest,
3960                     (void *)p_buf_src ,PlaneOut[plane_number].u_width);
3961                    break;
3962                }
3963            }
3964            p_buf_src += PlaneIn[plane_number].u_stride;
3965            p_buf_dest += PlaneOut[plane_number].u_stride;
3966        }
3967    }
3968
3969    return M4VIFI_OK;
3970}
3971
3972/**
3973 ******************************************************************************
3974 * prototype    M4VSS3GPP_externalVideoEffectFraming(M4OSA_Void *pFunctionContext,
3975 *                                                    M4VIFI_ImagePlane *PlaneIn,
3976 *                                                    M4VIFI_ImagePlane *PlaneOut,
3977 *                                                    M4VSS3GPP_ExternalProgress *pProgress,
3978 *                                                    M4OSA_UInt32 uiEffectKind)
3979 *
3980 * @brief    This function add a fixed or animated image on an input YUV420 planar frame
3981 * @note
3982 * @param    pFunctionContext(IN) Contains which color to apply (not very clean ...)
3983 * @param    PlaneIn            (IN) Input YUV420 planar
3984 * @param    PlaneOut        (IN/OUT) Output YUV420 planar
3985 * @param    pProgress        (IN/OUT) Progress indication (0-100)
3986 * @param    uiEffectKind    (IN) Unused
3987 *
3988 * @return    M4VIFI_OK:    No error
3989 ******************************************************************************
3990 */
3991M4OSA_ERR M4VSS3GPP_externalVideoEffectFraming( M4OSA_Void *userData,
3992                                                M4VIFI_ImagePlane PlaneIn[3],
3993                                                M4VIFI_ImagePlane *PlaneOut,
3994                                                M4VSS3GPP_ExternalProgress *pProgress,
3995                                                M4OSA_UInt32 uiEffectKind )
3996{
3997    M4VIFI_UInt32 x,y;
3998
3999    M4VIFI_UInt8 *p_in_Y = PlaneIn[0].pac_data;
4000    M4VIFI_UInt8 *p_in_U = PlaneIn[1].pac_data;
4001    M4VIFI_UInt8 *p_in_V = PlaneIn[2].pac_data;
4002
4003    M4xVSS_FramingStruct* Framing = M4OSA_NULL;
4004    M4xVSS_FramingStruct* currentFraming = M4OSA_NULL;
4005    M4VIFI_UInt8 *FramingRGB = M4OSA_NULL;
4006
4007    M4VIFI_UInt8 *p_out0;
4008    M4VIFI_UInt8 *p_out1;
4009    M4VIFI_UInt8 *p_out2;
4010
4011    M4VIFI_UInt32 topleft[2];
4012
4013    M4OSA_UInt8 transparent1 = (M4OSA_UInt8)((TRANSPARENT_COLOR & 0xFF00)>>8);
4014    M4OSA_UInt8 transparent2 = (M4OSA_UInt8)TRANSPARENT_COLOR;
4015
4016#ifndef DECODE_GIF_ON_SAVING
4017    Framing = (M4xVSS_FramingStruct *)userData;
4018    currentFraming = (M4xVSS_FramingStruct *)Framing->pCurrent;
4019    FramingRGB = Framing->FramingRgb->pac_data;
4020#endif /*DECODE_GIF_ON_SAVING*/
4021
4022    /*FB*/
4023#ifdef DECODE_GIF_ON_SAVING
4024    M4OSA_ERR err;
4025    Framing = (M4xVSS_FramingStruct *)((M4xVSS_FramingContext*)userData)->aFramingCtx;
4026#if 0
4027    if(Framing == M4OSA_NULL)
4028    {
4029        ((M4xVSS_FramingContext*)userData)->clipTime = pProgress->uiOutputTime;
4030        err = M4xVSS_internalDecodeGIF(userData);
4031        if(M4NO_ERROR != err)
4032        {
4033            M4OSA_TRACE1_1("M4VSS3GPP_externalVideoEffectFraming:\
4034             Error in M4xVSS_internalDecodeGIF: 0x%x", err);
4035            return err;
4036        }
4037        Framing = (M4xVSS_FramingStruct *)((M4xVSS_FramingContext*)userData)->aFramingCtx;
4038        /* Initializes first GIF time */
4039        ((M4xVSS_FramingContext*)userData)->current_gif_time = pProgress->uiOutputTime;
4040    }
4041#endif
4042    currentFraming = (M4xVSS_FramingStruct *)Framing;
4043    FramingRGB = Framing->FramingRgb->pac_data;
4044#endif /*DECODE_GIF_ON_SAVING*/
4045    /*end FB*/
4046
4047    /**
4048     * Initialize input / output plane pointers */
4049    p_in_Y += PlaneIn[0].u_topleft;
4050    p_in_U += PlaneIn[1].u_topleft;
4051    p_in_V += PlaneIn[2].u_topleft;
4052
4053    p_out0 = PlaneOut[0].pac_data;
4054    p_out1 = PlaneOut[1].pac_data;
4055    p_out2 = PlaneOut[2].pac_data;
4056
4057    /**
4058     * Depending on time, initialize Framing frame to use */
4059    if(Framing->previousClipTime == -1)
4060    {
4061        Framing->previousClipTime = pProgress->uiOutputTime;
4062    }
4063
4064    /**
4065     * If the current clip time has reach the duration of one frame of the framing picture
4066     * we need to step to next framing picture */
4067#if 0
4068    if(((M4xVSS_FramingContext*)userData)->b_animated == M4OSA_TRUE)
4069    {
4070        while((((M4xVSS_FramingContext*)userData)->current_gif_time + currentFraming->duration)\
4071         < pProgress->uiOutputTime)
4072        {
4073#ifdef DECODE_GIF_ON_SAVING
4074            ((M4xVSS_FramingContext*)userData)->clipTime = pProgress->uiOutputTime;
4075            err = M4xVSS_internalDecodeGIF(userData);
4076            if(M4NO_ERROR != err)
4077            {
4078                M4OSA_TRACE1_1("M4VSS3GPP_externalVideoEffectFraming:\
4079                 Error in M4xVSS_internalDecodeGIF: 0x%x", err);
4080                return err;
4081            }
4082            if(currentFraming->duration != 0)
4083            {
4084                ((M4xVSS_FramingContext*)userData)->current_gif_time += currentFraming->duration;
4085            }
4086            else
4087            {
4088                ((M4xVSS_FramingContext*)userData)->current_gif_time \
4089                 += pProgress->uiOutputTime - Framing->previousClipTime;
4090            }
4091            Framing = (M4xVSS_FramingStruct *)((M4xVSS_FramingContext*)userData)->aFramingCtx;
4092            currentFraming = (M4xVSS_FramingStruct *)Framing;
4093            FramingRGB = Framing->FramingRgb->pac_data;
4094#else
4095            Framing->pCurrent = currentFraming->pNext;
4096            currentFraming = Framing->pCurrent;
4097#endif /*DECODE_GIF_ON_SAVING*/
4098        }
4099    }
4100#endif
4101
4102    Framing->previousClipTime = pProgress->uiOutputTime;
4103    FramingRGB = currentFraming->FramingRgb->pac_data;
4104    topleft[0] = currentFraming->topleft_x;
4105    topleft[1] = currentFraming->topleft_y;
4106
4107    for( x=0 ;x < PlaneIn[0].u_height ; x++)
4108    {
4109        for( y=0 ;y < PlaneIn[0].u_width ; y++)
4110        {
4111            /**
4112             * To handle framing with input size != output size
4113             * Framing is applyed if coordinates matches between framing/topleft and input plane */
4114            if( y < (topleft[0] + currentFraming->FramingYuv[0].u_width)  &&
4115                y >= topleft[0] &&
4116                x < (topleft[1] + currentFraming->FramingYuv[0].u_height) &&
4117                x >= topleft[1])
4118            {
4119                /*Alpha blending support*/
4120                M4OSA_Float alphaBlending = 1;
4121                M4xVSS_internalEffectsAlphaBlending*  alphaBlendingStruct =\
4122                 (M4xVSS_internalEffectsAlphaBlending*)\
4123                    ((M4xVSS_FramingContext*)userData)->alphaBlendingStruct;
4124
4125                if(alphaBlendingStruct != M4OSA_NULL)
4126                {
4127                    if(pProgress->uiProgress \
4128                    < (M4OSA_UInt32)(alphaBlendingStruct->m_fadeInTime*10))
4129                    {
4130                        if(alphaBlendingStruct->m_fadeInTime == 0) {
4131                            alphaBlending = alphaBlendingStruct->m_start / 100;
4132                        } else {
4133                            alphaBlending = ((M4OSA_Float)(alphaBlendingStruct->m_middle\
4134                             - alphaBlendingStruct->m_start)\
4135                                *pProgress->uiProgress/(alphaBlendingStruct->m_fadeInTime*10));
4136                            alphaBlending += alphaBlendingStruct->m_start;
4137                            alphaBlending /= 100;
4138                        }
4139                    }
4140                    else if(pProgress->uiProgress >= (M4OSA_UInt32)(alphaBlendingStruct->\
4141                    m_fadeInTime*10) && pProgress->uiProgress < 1000\
4142                     - (M4OSA_UInt32)(alphaBlendingStruct->m_fadeOutTime*10))
4143                    {
4144                        alphaBlending = (M4OSA_Float)\
4145                        ((M4OSA_Float)alphaBlendingStruct->m_middle/100);
4146                    }
4147                    else if(pProgress->uiProgress >= 1000 - (M4OSA_UInt32)\
4148                    (alphaBlendingStruct->m_fadeOutTime*10))
4149                    {
4150                        if(alphaBlendingStruct->m_fadeOutTime == 0) {
4151                            alphaBlending = alphaBlendingStruct->m_end / 100;
4152                        } else {
4153                            alphaBlending = ((M4OSA_Float)(alphaBlendingStruct->m_middle \
4154                            - alphaBlendingStruct->m_end))*(1000 - pProgress->uiProgress)\
4155                            /(alphaBlendingStruct->m_fadeOutTime*10);
4156                            alphaBlending += alphaBlendingStruct->m_end;
4157                            alphaBlending /= 100;
4158                        }
4159                    }
4160                }
4161                /**/
4162
4163                if((*(FramingRGB)==transparent1) && (*(FramingRGB+1)==transparent2))
4164                {
4165                    *( p_out0+y+x*PlaneOut[0].u_stride)=(*(p_in_Y+y+x*PlaneIn[0].u_stride));
4166                    *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)=
4167                        (*(p_in_U+(y>>1)+(x>>1)*PlaneIn[1].u_stride));
4168                    *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)=
4169                        (*(p_in_V+(y>>1)+(x>>1)*PlaneIn[2].u_stride));
4170                }
4171                else
4172                {
4173                    *( p_out0+y+x*PlaneOut[0].u_stride)=
4174                        (*(currentFraming->FramingYuv[0].pac_data+(y-topleft[0])\
4175                            +(x-topleft[1])*currentFraming->FramingYuv[0].u_stride))*alphaBlending;
4176                    *( p_out0+y+x*PlaneOut[0].u_stride)+=
4177                        (*(p_in_Y+y+x*PlaneIn[0].u_stride))*(1-alphaBlending);
4178                    *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)=
4179                        (*(currentFraming->FramingYuv[1].pac_data+((y-topleft[0])>>1)\
4180                            +((x-topleft[1])>>1)*currentFraming->FramingYuv[1].u_stride))\
4181                                *alphaBlending;
4182                    *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)+=
4183                        (*(p_in_U+(y>>1)+(x>>1)*PlaneIn[1].u_stride))*(1-alphaBlending);
4184                    *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)=
4185                        (*(currentFraming->FramingYuv[2].pac_data+((y-topleft[0])>>1)\
4186                            +((x-topleft[1])>>1)*currentFraming->FramingYuv[2].u_stride))\
4187                                *alphaBlending;
4188                    *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)+=
4189                        (*(p_in_V+(y>>1)+(x>>1)*PlaneIn[2].u_stride))*(1-alphaBlending);
4190                }
4191                if( PlaneIn[0].u_width < (topleft[0] + currentFraming->FramingYuv[0].u_width) &&
4192                    y == PlaneIn[0].u_width-1)
4193                {
4194                    FramingRGB = FramingRGB + 2 \
4195                        * (topleft[0] + currentFraming->FramingYuv[0].u_width \
4196                            - PlaneIn[0].u_width + 1);
4197                }
4198                else
4199                {
4200                    FramingRGB = FramingRGB + 2;
4201                }
4202            }
4203            /**
4204             * Just copy input plane to output plane */
4205            else
4206            {
4207                *( p_out0+y+x*PlaneOut[0].u_stride)=*(p_in_Y+y+x*PlaneIn[0].u_stride);
4208                *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)=
4209                    *(p_in_U+(y>>1)+(x>>1)*PlaneIn[1].u_stride);
4210                *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)=
4211                    *(p_in_V+(y>>1)+(x>>1)*PlaneIn[2].u_stride);
4212            }
4213        }
4214    }
4215
4216#ifdef DECODE_GIF_ON_SAVING
4217#if 0
4218    if(pProgress->bIsLast == M4OSA_TRUE
4219        && (M4OSA_Bool)((M4xVSS_FramingContext*)userData)->b_IsFileGif == M4OSA_TRUE)
4220    {
4221        M4xVSS_internalDecodeGIF_Cleaning((M4xVSS_FramingContext*)userData);
4222    }
4223#endif
4224#endif /*DECODE_GIF_ON_SAVING*/
4225
4226    return M4VIFI_OK;
4227}
4228
4229
4230/**
4231 ******************************************************************************
4232 * prototype    M4VSS3GPP_externalVideoEffectFifties(M4OSA_Void *pFunctionContext,
4233 *                                                    M4VIFI_ImagePlane *PlaneIn,
4234 *                                                    M4VIFI_ImagePlane *PlaneOut,
4235 *                                                    M4VSS3GPP_ExternalProgress *pProgress,
4236 *                                                    M4OSA_UInt32 uiEffectKind)
4237 *
4238 * @brief    This function make a video look as if it was taken in the fifties
4239 * @note
4240 * @param    pUserData       (IN) Context
4241 * @param    pPlaneIn        (IN) Input YUV420 planar
4242 * @param    pPlaneOut        (IN/OUT) Output YUV420 planar
4243 * @param    pProgress        (IN/OUT) Progress indication (0-100)
4244 * @param    uiEffectKind    (IN) Unused
4245 *
4246 * @return    M4VIFI_OK:            No error
4247 * @return  M4ERR_PARAMETER:    pFiftiesData, pPlaneOut or pProgress are NULL (DEBUG only)
4248 ******************************************************************************
4249 */
4250M4OSA_ERR M4VSS3GPP_externalVideoEffectFifties( M4OSA_Void *pUserData,
4251                                                M4VIFI_ImagePlane *pPlaneIn,
4252                                                M4VIFI_ImagePlane *pPlaneOut,
4253                                                M4VSS3GPP_ExternalProgress *pProgress,
4254                                                M4OSA_UInt32 uiEffectKind )
4255{
4256    M4VIFI_UInt32 x, y, xShift;
4257    M4VIFI_UInt8 *pInY = pPlaneIn[0].pac_data;
4258    M4VIFI_UInt8 *pOutY, *pInYbegin;
4259    M4VIFI_UInt8 *pInCr,* pOutCr;
4260    M4VIFI_Int32 plane_number;
4261
4262    /* Internal context*/
4263    M4xVSS_FiftiesStruct* p_FiftiesData = (M4xVSS_FiftiesStruct *)pUserData;
4264
4265    /* Check the inputs (debug only) */
4266    M4OSA_DEBUG_IF2((pFiftiesData == M4OSA_NULL),M4ERR_PARAMETER,
4267         "xVSS: p_FiftiesData is M4OSA_NULL in M4VSS3GPP_externalVideoEffectFifties");
4268    M4OSA_DEBUG_IF2((pPlaneOut == M4OSA_NULL),M4ERR_PARAMETER,
4269         "xVSS: p_PlaneOut is M4OSA_NULL in M4VSS3GPP_externalVideoEffectFifties");
4270    M4OSA_DEBUG_IF2((pProgress == M4OSA_NULL),M4ERR_PARAMETER,
4271        "xVSS: p_Progress is M4OSA_NULL in M4VSS3GPP_externalVideoEffectFifties");
4272
4273    /* Initialize input / output plane pointers */
4274    pInY += pPlaneIn[0].u_topleft;
4275    pOutY = pPlaneOut[0].pac_data;
4276    pInYbegin  = pInY;
4277
4278    /* Initialize the random */
4279    if(p_FiftiesData->previousClipTime < 0)
4280    {
4281        M4OSA_randInit();
4282        M4OSA_rand((M4OSA_Int32 *)&(p_FiftiesData->shiftRandomValue), (pPlaneIn[0].u_height) >> 4);
4283        M4OSA_rand((M4OSA_Int32 *)&(p_FiftiesData->stripeRandomValue), (pPlaneIn[0].u_width)<< 2);
4284        p_FiftiesData->previousClipTime = pProgress->uiOutputTime;
4285    }
4286
4287    /* Choose random values if we have reached the duration of a partial effect */
4288    else if( (pProgress->uiOutputTime - p_FiftiesData->previousClipTime)\
4289         > p_FiftiesData->fiftiesEffectDuration)
4290    {
4291        M4OSA_rand((M4OSA_Int32 *)&(p_FiftiesData->shiftRandomValue), (pPlaneIn[0].u_height) >> 4);
4292        M4OSA_rand((M4OSA_Int32 *)&(p_FiftiesData->stripeRandomValue), (pPlaneIn[0].u_width)<< 2);
4293        p_FiftiesData->previousClipTime = pProgress->uiOutputTime;
4294    }
4295
4296    /* Put in Sepia the chrominance */
4297    for (plane_number = 1; plane_number < 3; plane_number++)
4298    {
4299        pInCr  = pPlaneIn[plane_number].pac_data  + pPlaneIn[plane_number].u_topleft;
4300        pOutCr = pPlaneOut[plane_number].pac_data + pPlaneOut[plane_number].u_topleft;
4301
4302        for (x = 0; x < pPlaneOut[plane_number].u_height; x++)
4303        {
4304            if (1 == plane_number)
4305                memset((void *)pOutCr, 117,pPlaneIn[plane_number].u_width); /* U value */
4306            else
4307                memset((void *)pOutCr, 139,pPlaneIn[plane_number].u_width); /* V value */
4308
4309            pInCr  += pPlaneIn[plane_number].u_stride;
4310            pOutCr += pPlaneOut[plane_number].u_stride;
4311        }
4312    }
4313
4314    /* Compute the new pixels values */
4315    for( x = 0 ; x < pPlaneIn[0].u_height ; x++)
4316    {
4317        M4VIFI_UInt8 *p_outYtmp, *p_inYtmp;
4318
4319        /* Compute the xShift (random value) */
4320        if (0 == (p_FiftiesData->shiftRandomValue % 5 ))
4321            xShift = (x + p_FiftiesData->shiftRandomValue ) % (pPlaneIn[0].u_height - 1);
4322        else
4323            xShift = (x + (pPlaneIn[0].u_height - p_FiftiesData->shiftRandomValue) ) \
4324                % (pPlaneIn[0].u_height - 1);
4325
4326        /* Initialize the pointers */
4327        p_outYtmp = pOutY + 1;                                    /* yShift of 1 pixel */
4328        p_inYtmp  = pInYbegin + (xShift * pPlaneIn[0].u_stride);  /* Apply the xShift */
4329
4330        for( y = 0 ; y < pPlaneIn[0].u_width ; y++)
4331        {
4332            /* Set Y value */
4333            if (xShift > (pPlaneIn[0].u_height - 4))
4334                *p_outYtmp = 40;        /* Add some horizontal black lines between the
4335                                        two parts of the image */
4336            else if ( y == p_FiftiesData->stripeRandomValue)
4337                *p_outYtmp = 90;        /* Add a random vertical line for the bulk */
4338            else
4339                *p_outYtmp = *p_inYtmp;
4340
4341
4342            /* Go to the next pixel */
4343            p_outYtmp++;
4344            p_inYtmp++;
4345
4346            /* Restart at the beginning of the line for the last pixel*/
4347            if (y == (pPlaneIn[0].u_width - 2))
4348                p_outYtmp = pOutY;
4349        }
4350
4351        /* Go to the next line */
4352        pOutY += pPlaneOut[0].u_stride;
4353    }
4354
4355    return M4VIFI_OK;
4356}
4357
4358/**
4359 ******************************************************************************
4360 * M4OSA_ERR M4VSS3GPP_externalVideoEffectZoom( )
4361 * @brief    Zoom in/out video effect functions.
4362 * @note    The external video function is used only if VideoEffectType is set to
4363 * M4VSS3GPP_kVideoEffectType_ZoomIn or M4VSS3GPP_kVideoEffectType_ZoomOut.
4364 *
4365 * @param   pFunctionContext    (IN) The function context, previously set by the integrator
4366 * @param    pInputPlanes        (IN) Input YUV420 image: pointer to an array of three valid
4367 *                                    image planes (Y, U and V)
4368 * @param    pOutputPlanes        (IN/OUT) Output (filtered) YUV420 image: pointer to an array of
4369 *                                        three valid image planes (Y, U and V)
4370 * @param    pProgress            (IN) Set of information about the video transition progress.
4371 * @return    M4NO_ERROR:            No error
4372 * @return    M4ERR_PARAMETER:    At least one parameter is M4OSA_NULL (debug only)
4373 ******************************************************************************
4374 */
4375
4376M4OSA_ERR M4VSS3GPP_externalVideoEffectZoom(
4377    M4OSA_Void *pFunctionContext,
4378    M4VIFI_ImagePlane *pInputPlanes,
4379    M4VIFI_ImagePlane *pOutputPlanes,
4380    M4VSS3GPP_ExternalProgress *pProgress,
4381    M4OSA_UInt32 uiEffectKind
4382)
4383{
4384    M4OSA_UInt32 boxWidth;
4385    M4OSA_UInt32 boxHeight;
4386    M4OSA_UInt32 boxPosX;
4387    M4OSA_UInt32 boxPosY;
4388    M4OSA_UInt32 ratio = 0;
4389    /*  * 1.189207 between ratio */
4390    /* zoom between x1 and x16 */
4391    M4OSA_UInt32 ratiotab[17] ={1024,1218,1448,1722,2048,2435,2896,3444,4096,4871,5793,\
4392                                6889,8192,9742,11585,13777,16384};
4393    M4OSA_UInt32 ik;
4394
4395    M4VIFI_ImagePlane boxPlane[3];
4396
4397    if(M4xVSS_kVideoEffectType_ZoomOut == (M4OSA_UInt32)pFunctionContext)
4398    {
4399        //ratio = 16 - (15 * pProgress->uiProgress)/1000;
4400        ratio = 16 - pProgress->uiProgress / 66 ;
4401    }
4402    else if(M4xVSS_kVideoEffectType_ZoomIn == (M4OSA_UInt32)pFunctionContext)
4403    {
4404        //ratio = 1 + (15 * pProgress->uiProgress)/1000;
4405        ratio = 1 + pProgress->uiProgress / 66 ;
4406    }
4407
4408    for(ik=0;ik<3;ik++){
4409
4410        boxPlane[ik].u_stride = pInputPlanes[ik].u_stride;
4411        boxPlane[ik].pac_data = pInputPlanes[ik].pac_data;
4412
4413        boxHeight = ( pInputPlanes[ik].u_height << 10 ) / ratiotab[ratio];
4414        boxWidth = ( pInputPlanes[ik].u_width << 10 ) / ratiotab[ratio];
4415        boxPlane[ik].u_height = (boxHeight)&(~1);
4416        boxPlane[ik].u_width = (boxWidth)&(~1);
4417
4418        boxPosY = (pInputPlanes[ik].u_height >> 1) - (boxPlane[ik].u_height >> 1);
4419        boxPosX = (pInputPlanes[ik].u_width >> 1) - (boxPlane[ik].u_width >> 1);
4420        boxPlane[ik].u_topleft = boxPosY * boxPlane[ik].u_stride + boxPosX;
4421    }
4422
4423    M4VIFI_ResizeBilinearYUV420toYUV420(M4OSA_NULL, (M4VIFI_ImagePlane*)&boxPlane, pOutputPlanes);
4424
4425    /**
4426     * Return */
4427    return(M4NO_ERROR);
4428}
4429
4430/**
4431 ******************************************************************************
4432 * prototype    M4xVSS_AlphaMagic( M4OSA_Void *userData,
4433 *                                    M4VIFI_ImagePlane PlaneIn1[3],
4434 *                                    M4VIFI_ImagePlane PlaneIn2[3],
4435 *                                    M4VIFI_ImagePlane *PlaneOut,
4436 *                                    M4VSS3GPP_ExternalProgress *pProgress,
4437 *                                    M4OSA_UInt32 uiTransitionKind)
4438 *
4439 * @brief    This function apply a color effect on an input YUV420 planar frame
4440 * @note
4441 * @param    userData        (IN) Contains a pointer on a settings structure
4442 * @param    PlaneIn1        (IN) Input YUV420 planar from video 1
4443 * @param    PlaneIn2        (IN) Input YUV420 planar from video 2
4444 * @param    PlaneOut        (IN/OUT) Output YUV420 planar
4445 * @param    pProgress        (IN/OUT) Progress indication (0-100)
4446 * @param    uiTransitionKind(IN) Unused
4447 *
4448 * @return    M4VIFI_OK:    No error
4449 ******************************************************************************
4450 */
4451M4OSA_ERR M4xVSS_AlphaMagic( M4OSA_Void *userData, M4VIFI_ImagePlane PlaneIn1[3],
4452                             M4VIFI_ImagePlane PlaneIn2[3], M4VIFI_ImagePlane *PlaneOut,
4453                             M4VSS3GPP_ExternalProgress *pProgress, M4OSA_UInt32 uiTransitionKind)
4454{
4455
4456    M4OSA_ERR err;
4457
4458    M4xVSS_internal_AlphaMagicSettings* alphaContext;
4459    M4VIFI_Int32 alphaProgressLevel;
4460
4461    M4VIFI_ImagePlane* planeswap;
4462    M4VIFI_UInt32 x,y;
4463
4464    M4VIFI_UInt8 *p_out0;
4465    M4VIFI_UInt8 *p_out1;
4466    M4VIFI_UInt8 *p_out2;
4467    M4VIFI_UInt8 *alphaMask;
4468    /* "Old image" */
4469    M4VIFI_UInt8 *p_in1_Y;
4470    M4VIFI_UInt8 *p_in1_U;
4471    M4VIFI_UInt8 *p_in1_V;
4472    /* "New image" */
4473    M4VIFI_UInt8 *p_in2_Y;
4474    M4VIFI_UInt8 *p_in2_U;
4475    M4VIFI_UInt8 *p_in2_V;
4476
4477    err = M4NO_ERROR;
4478
4479    alphaContext = (M4xVSS_internal_AlphaMagicSettings*)userData;
4480
4481    alphaProgressLevel = (pProgress->uiProgress * 255)/1000;
4482
4483    if( alphaContext->isreverse != M4OSA_FALSE)
4484    {
4485        alphaProgressLevel = 255 - alphaProgressLevel;
4486        planeswap = PlaneIn1;
4487        PlaneIn1 = PlaneIn2;
4488        PlaneIn2 = planeswap;
4489    }
4490
4491    p_out0 = PlaneOut[0].pac_data;
4492    p_out1 = PlaneOut[1].pac_data;
4493    p_out2 = PlaneOut[2].pac_data;
4494
4495    alphaMask = alphaContext->pPlane->pac_data;
4496
4497    /* "Old image" */
4498    p_in1_Y = PlaneIn1[0].pac_data;
4499    p_in1_U = PlaneIn1[1].pac_data;
4500    p_in1_V = PlaneIn1[2].pac_data;
4501    /* "New image" */
4502    p_in2_Y = PlaneIn2[0].pac_data;
4503    p_in2_U = PlaneIn2[1].pac_data;
4504    p_in2_V = PlaneIn2[2].pac_data;
4505
4506     /**
4507     * For each column ... */
4508    for( y=0; y<PlaneOut->u_height; y++ )
4509    {
4510        /**
4511         * ... and each row of the alpha mask */
4512        for( x=0; x<PlaneOut->u_width; x++ )
4513        {
4514            /**
4515             * If the value of the current pixel of the alpha mask is > to the current time
4516             * ( current time is normalized on [0-255] ) */
4517            if( alphaProgressLevel < alphaMask[x+y*PlaneOut->u_width] )
4518            {
4519                /* We keep "old image" in output plane */
4520                *( p_out0+x+y*PlaneOut[0].u_stride)=*(p_in1_Y+x+y*PlaneIn1[0].u_stride);
4521                *( p_out1+(x>>1)+(y>>1)*PlaneOut[1].u_stride)=
4522                    *(p_in1_U+(x>>1)+(y>>1)*PlaneIn1[1].u_stride);
4523                *( p_out2+(x>>1)+(y>>1)*PlaneOut[2].u_stride)=
4524                    *(p_in1_V+(x>>1)+(y>>1)*PlaneIn1[2].u_stride);
4525            }
4526            else
4527            {
4528                /* We take "new image" in output plane */
4529                *( p_out0+x+y*PlaneOut[0].u_stride)=*(p_in2_Y+x+y*PlaneIn2[0].u_stride);
4530                *( p_out1+(x>>1)+(y>>1)*PlaneOut[1].u_stride)=
4531                    *(p_in2_U+(x>>1)+(y>>1)*PlaneIn2[1].u_stride);
4532                *( p_out2+(x>>1)+(y>>1)*PlaneOut[2].u_stride)=
4533                    *(p_in2_V+(x>>1)+(y>>1)*PlaneIn2[2].u_stride);
4534            }
4535        }
4536    }
4537
4538    return(err);
4539}
4540
4541/**
4542 ******************************************************************************
4543 * prototype    M4xVSS_AlphaMagicBlending( M4OSA_Void *userData,
4544 *                                    M4VIFI_ImagePlane PlaneIn1[3],
4545 *                                    M4VIFI_ImagePlane PlaneIn2[3],
4546 *                                    M4VIFI_ImagePlane *PlaneOut,
4547 *                                    M4VSS3GPP_ExternalProgress *pProgress,
4548 *                                    M4OSA_UInt32 uiTransitionKind)
4549 *
4550 * @brief    This function apply a color effect on an input YUV420 planar frame
4551 * @note
4552 * @param    userData        (IN) Contains a pointer on a settings structure
4553 * @param    PlaneIn1        (IN) Input YUV420 planar from video 1
4554 * @param    PlaneIn2        (IN) Input YUV420 planar from video 2
4555 * @param    PlaneOut        (IN/OUT) Output YUV420 planar
4556 * @param    pProgress        (IN/OUT) Progress indication (0-100)
4557 * @param    uiTransitionKind(IN) Unused
4558 *
4559 * @return    M4VIFI_OK:    No error
4560 ******************************************************************************
4561 */
4562M4OSA_ERR M4xVSS_AlphaMagicBlending( M4OSA_Void *userData, M4VIFI_ImagePlane PlaneIn1[3],
4563                                     M4VIFI_ImagePlane PlaneIn2[3], M4VIFI_ImagePlane *PlaneOut,
4564                                     M4VSS3GPP_ExternalProgress *pProgress,
4565                                     M4OSA_UInt32 uiTransitionKind)
4566{
4567    M4OSA_ERR err;
4568
4569    M4xVSS_internal_AlphaMagicSettings* alphaContext;
4570    M4VIFI_Int32 alphaProgressLevel;
4571    M4VIFI_Int32 alphaBlendLevelMin;
4572    M4VIFI_Int32 alphaBlendLevelMax;
4573    M4VIFI_Int32 alphaBlendRange;
4574
4575    M4VIFI_ImagePlane* planeswap;
4576    M4VIFI_UInt32 x,y;
4577    M4VIFI_Int32 alphaMaskValue;
4578
4579    M4VIFI_UInt8 *p_out0;
4580    M4VIFI_UInt8 *p_out1;
4581    M4VIFI_UInt8 *p_out2;
4582    M4VIFI_UInt8 *alphaMask;
4583    /* "Old image" */
4584    M4VIFI_UInt8 *p_in1_Y;
4585    M4VIFI_UInt8 *p_in1_U;
4586    M4VIFI_UInt8 *p_in1_V;
4587    /* "New image" */
4588    M4VIFI_UInt8 *p_in2_Y;
4589    M4VIFI_UInt8 *p_in2_U;
4590    M4VIFI_UInt8 *p_in2_V;
4591
4592
4593    err = M4NO_ERROR;
4594
4595    alphaContext = (M4xVSS_internal_AlphaMagicSettings*)userData;
4596
4597    alphaProgressLevel = (pProgress->uiProgress * 255)/1000;
4598
4599    if( alphaContext->isreverse != M4OSA_FALSE)
4600    {
4601        alphaProgressLevel = 255 - alphaProgressLevel;
4602        planeswap = PlaneIn1;
4603        PlaneIn1 = PlaneIn2;
4604        PlaneIn2 = planeswap;
4605    }
4606
4607    alphaBlendLevelMin = alphaProgressLevel-alphaContext->blendingthreshold;
4608
4609    alphaBlendLevelMax = alphaProgressLevel+alphaContext->blendingthreshold;
4610
4611    alphaBlendRange = (alphaContext->blendingthreshold)*2;
4612
4613    p_out0 = PlaneOut[0].pac_data;
4614    p_out1 = PlaneOut[1].pac_data;
4615    p_out2 = PlaneOut[2].pac_data;
4616
4617    alphaMask = alphaContext->pPlane->pac_data;
4618
4619    /* "Old image" */
4620    p_in1_Y = PlaneIn1[0].pac_data;
4621    p_in1_U = PlaneIn1[1].pac_data;
4622    p_in1_V = PlaneIn1[2].pac_data;
4623    /* "New image" */
4624    p_in2_Y = PlaneIn2[0].pac_data;
4625    p_in2_U = PlaneIn2[1].pac_data;
4626    p_in2_V = PlaneIn2[2].pac_data;
4627
4628    /* apply Alpha Magic on each pixel */
4629       for( y=0; y<PlaneOut->u_height; y++ )
4630    {
4631        for( x=0; x<PlaneOut->u_width; x++ )
4632        {
4633            alphaMaskValue = alphaMask[x+y*PlaneOut->u_width];
4634            if( alphaBlendLevelMax < alphaMaskValue )
4635            {
4636                /* We keep "old image" in output plane */
4637                *( p_out0+x+y*PlaneOut[0].u_stride)=*(p_in1_Y+x+y*PlaneIn1[0].u_stride);
4638                *( p_out1+(x>>1)+(y>>1)*PlaneOut[1].u_stride)=
4639                    *(p_in1_U+(x>>1)+(y>>1)*PlaneIn1[1].u_stride);
4640                *( p_out2+(x>>1)+(y>>1)*PlaneOut[2].u_stride)=
4641                    *(p_in1_V+(x>>1)+(y>>1)*PlaneIn1[2].u_stride);
4642            }
4643            else if( (alphaBlendLevelMin < alphaMaskValue)&&
4644                    (alphaMaskValue <= alphaBlendLevelMax ) )
4645            {
4646                /* We blend "old and new image" in output plane */
4647                *( p_out0+x+y*PlaneOut[0].u_stride)=(M4VIFI_UInt8)
4648                    (( (alphaMaskValue-alphaBlendLevelMin)*( *(p_in1_Y+x+y*PlaneIn1[0].u_stride))
4649                        +(alphaBlendLevelMax-alphaMaskValue)\
4650                            *( *(p_in2_Y+x+y*PlaneIn2[0].u_stride)) )/alphaBlendRange );
4651
4652                *( p_out1+(x>>1)+(y>>1)*PlaneOut[1].u_stride)=(M4VIFI_UInt8)\
4653                    (( (alphaMaskValue-alphaBlendLevelMin)*( *(p_in1_U+(x>>1)+(y>>1)\
4654                        *PlaneIn1[1].u_stride))
4655                            +(alphaBlendLevelMax-alphaMaskValue)*( *(p_in2_U+(x>>1)+(y>>1)\
4656                                *PlaneIn2[1].u_stride)) )/alphaBlendRange );
4657
4658                *( p_out2+(x>>1)+(y>>1)*PlaneOut[2].u_stride)=
4659                    (M4VIFI_UInt8)(( (alphaMaskValue-alphaBlendLevelMin)\
4660                        *( *(p_in1_V+(x>>1)+(y>>1)*PlaneIn1[2].u_stride))
4661                                +(alphaBlendLevelMax-alphaMaskValue)*( *(p_in2_V+(x>>1)+(y>>1)\
4662                                    *PlaneIn2[2].u_stride)) )/alphaBlendRange );
4663
4664            }
4665            else
4666            {
4667                /* We take "new image" in output plane */
4668                *( p_out0+x+y*PlaneOut[0].u_stride)=*(p_in2_Y+x+y*PlaneIn2[0].u_stride);
4669                *( p_out1+(x>>1)+(y>>1)*PlaneOut[1].u_stride)=
4670                    *(p_in2_U+(x>>1)+(y>>1)*PlaneIn2[1].u_stride);
4671                *( p_out2+(x>>1)+(y>>1)*PlaneOut[2].u_stride)=
4672                    *(p_in2_V+(x>>1)+(y>>1)*PlaneIn2[2].u_stride);
4673            }
4674        }
4675    }
4676
4677    return(err);
4678}
4679
4680#define M4XXX_SampleAddress(plane, x, y)  ( (plane).pac_data + (plane).u_topleft + (y)\
4681     * (plane).u_stride + (x) )
4682
4683static void M4XXX_CopyPlane(M4VIFI_ImagePlane* dest, M4VIFI_ImagePlane* source)
4684{
4685    M4OSA_UInt32    height, width, sourceStride, destStride, y;
4686    M4OSA_MemAddr8    sourceWalk, destWalk;
4687
4688    /* cache the vars used in the loop so as to avoid them being repeatedly fetched and
4689     recomputed from memory. */
4690    height = dest->u_height;
4691    width = dest->u_width;
4692
4693    sourceWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(*source, 0, 0);
4694    sourceStride = source->u_stride;
4695
4696    destWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(*dest, 0, 0);
4697    destStride = dest->u_stride;
4698
4699    for (y=0; y<height; y++)
4700    {
4701        memcpy((void *)destWalk, (void *)sourceWalk, width);
4702        destWalk += destStride;
4703        sourceWalk += sourceStride;
4704    }
4705}
4706
4707static M4OSA_ERR M4xVSS_VerticalSlideTransition(M4VIFI_ImagePlane* topPlane,
4708                                                M4VIFI_ImagePlane* bottomPlane,
4709                                                M4VIFI_ImagePlane *PlaneOut,
4710                                                M4OSA_UInt32    shiftUV)
4711{
4712    M4OSA_UInt32 i;
4713
4714    /* Do three loops, one for each plane type, in order to avoid having too many buffers
4715    "hot" at the same time (better for cache). */
4716    for (i=0; i<3; i++)
4717    {
4718        M4OSA_UInt32    topPartHeight, bottomPartHeight, width, sourceStride, destStride, y;
4719        M4OSA_MemAddr8    sourceWalk, destWalk;
4720
4721        /* cache the vars used in the loop so as to avoid them being repeatedly fetched and
4722         recomputed from memory. */
4723        if (0 == i) /* Y plane */
4724        {
4725            bottomPartHeight = 2*shiftUV;
4726        }
4727        else /* U and V planes */
4728        {
4729            bottomPartHeight = shiftUV;
4730        }
4731        topPartHeight = PlaneOut[i].u_height - bottomPartHeight;
4732        width = PlaneOut[i].u_width;
4733
4734        sourceWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(topPlane[i], 0, bottomPartHeight);
4735        sourceStride = topPlane[i].u_stride;
4736
4737        destWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(PlaneOut[i], 0, 0);
4738        destStride = PlaneOut[i].u_stride;
4739
4740        /* First the part from the top source clip frame. */
4741        for (y=0; y<topPartHeight; y++)
4742        {
4743            memcpy((void *)destWalk, (void *)sourceWalk, width);
4744            destWalk += destStride;
4745            sourceWalk += sourceStride;
4746        }
4747
4748        /* and now change the vars to copy the part from the bottom source clip frame. */
4749        sourceWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(bottomPlane[i], 0, 0);
4750        sourceStride = bottomPlane[i].u_stride;
4751
4752        /* destWalk is already at M4XXX_SampleAddress(PlaneOut[i], 0, topPartHeight) */
4753
4754        for (y=0; y<bottomPartHeight; y++)
4755        {
4756            memcpy((void *)destWalk, (void *)sourceWalk, width);
4757            destWalk += destStride;
4758            sourceWalk += sourceStride;
4759        }
4760    }
4761    return M4NO_ERROR;
4762}
4763
4764static M4OSA_ERR M4xVSS_HorizontalSlideTransition(M4VIFI_ImagePlane* leftPlane,
4765                                                  M4VIFI_ImagePlane* rightPlane,
4766                                                  M4VIFI_ImagePlane *PlaneOut,
4767                                                  M4OSA_UInt32    shiftUV)
4768{
4769    M4OSA_UInt32 i, y;
4770    /* If we shifted by exactly 0, or by the width of the target image, then we would get the left
4771    frame or the right frame, respectively. These cases aren't handled too well by the general
4772    handling, since they result in 0-size memcopies, so might as well particularize them. */
4773
4774    if (0 == shiftUV)    /* output left frame */
4775    {
4776        for (i = 0; i<3; i++) /* for each YUV plane */
4777        {
4778            M4XXX_CopyPlane(&(PlaneOut[i]), &(leftPlane[i]));
4779        }
4780
4781        return M4NO_ERROR;
4782    }
4783
4784    if (PlaneOut[1].u_width == shiftUV) /* output right frame */
4785    {
4786        for (i = 0; i<3; i++) /* for each YUV plane */
4787        {
4788            M4XXX_CopyPlane(&(PlaneOut[i]), &(rightPlane[i]));
4789        }
4790
4791        return M4NO_ERROR;
4792    }
4793
4794
4795    /* Do three loops, one for each plane type, in order to avoid having too many buffers
4796    "hot" at the same time (better for cache). */
4797    for (i=0; i<3; i++)
4798    {
4799        M4OSA_UInt32    height, leftPartWidth, rightPartWidth;
4800        M4OSA_UInt32    leftStride,    rightStride,    destStride;
4801        M4OSA_MemAddr8    leftWalk,    rightWalk,    destWalkLeft, destWalkRight;
4802
4803        /* cache the vars used in the loop so as to avoid them being repeatedly fetched
4804        and recomputed from memory. */
4805        height = PlaneOut[i].u_height;
4806
4807        if (0 == i) /* Y plane */
4808        {
4809            rightPartWidth = 2*shiftUV;
4810        }
4811        else /* U and V planes */
4812        {
4813            rightPartWidth = shiftUV;
4814        }
4815        leftPartWidth = PlaneOut[i].u_width - rightPartWidth;
4816
4817        leftWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(leftPlane[i], rightPartWidth, 0);
4818        leftStride = leftPlane[i].u_stride;
4819
4820        rightWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(rightPlane[i], 0, 0);
4821        rightStride = rightPlane[i].u_stride;
4822
4823        destWalkLeft = (M4OSA_MemAddr8)M4XXX_SampleAddress(PlaneOut[i], 0, 0);
4824        destWalkRight = (M4OSA_MemAddr8)M4XXX_SampleAddress(PlaneOut[i], leftPartWidth, 0);
4825        destStride = PlaneOut[i].u_stride;
4826
4827        for (y=0; y<height; y++)
4828        {
4829            memcpy((void *)destWalkLeft, (void *)leftWalk, leftPartWidth);
4830            leftWalk += leftStride;
4831
4832            memcpy((void *)destWalkRight, (void *)rightWalk, rightPartWidth);
4833            rightWalk += rightStride;
4834
4835            destWalkLeft += destStride;
4836            destWalkRight += destStride;
4837        }
4838    }
4839
4840    return M4NO_ERROR;
4841}
4842
4843
4844M4OSA_ERR M4xVSS_SlideTransition( M4OSA_Void *userData, M4VIFI_ImagePlane PlaneIn1[3],
4845                                  M4VIFI_ImagePlane PlaneIn2[3], M4VIFI_ImagePlane *PlaneOut,
4846                                  M4VSS3GPP_ExternalProgress *pProgress,
4847                                  M4OSA_UInt32 uiTransitionKind)
4848{
4849    M4xVSS_internal_SlideTransitionSettings* settings =
4850         (M4xVSS_internal_SlideTransitionSettings*)userData;
4851    M4OSA_UInt32    shiftUV;
4852
4853    M4OSA_TRACE1_0("inside M4xVSS_SlideTransition");
4854    if ((M4xVSS_SlideTransition_RightOutLeftIn == settings->direction)
4855        || (M4xVSS_SlideTransition_LeftOutRightIn == settings->direction) )
4856    {
4857        /* horizontal slide */
4858        shiftUV = ((PlaneOut[1]).u_width * pProgress->uiProgress)/1000;
4859        M4OSA_TRACE1_2("M4xVSS_SlideTransition upper: shiftUV = %d,progress = %d",
4860            shiftUV,pProgress->uiProgress );
4861        if (M4xVSS_SlideTransition_RightOutLeftIn == settings->direction)
4862        {
4863            /* Put the previous clip frame right, the next clip frame left, and reverse shiftUV
4864            (since it's a shift from the left frame) so that we start out on the right
4865            (i.e. not left) frame, it
4866            being from the previous clip. */
4867            return M4xVSS_HorizontalSlideTransition(PlaneIn2, PlaneIn1, PlaneOut,
4868                 (PlaneOut[1]).u_width - shiftUV);
4869        }
4870        else /* Left out, right in*/
4871        {
4872            return M4xVSS_HorizontalSlideTransition(PlaneIn1, PlaneIn2, PlaneOut, shiftUV);
4873        }
4874    }
4875    else
4876    {
4877        /* vertical slide */
4878        shiftUV = ((PlaneOut[1]).u_height * pProgress->uiProgress)/1000;
4879        M4OSA_TRACE1_2("M4xVSS_SlideTransition bottom: shiftUV = %d,progress = %d",shiftUV,
4880            pProgress->uiProgress );
4881        if (M4xVSS_SlideTransition_TopOutBottomIn == settings->direction)
4882        {
4883            /* Put the previous clip frame top, the next clip frame bottom. */
4884            return M4xVSS_VerticalSlideTransition(PlaneIn1, PlaneIn2, PlaneOut, shiftUV);
4885        }
4886        else /* Bottom out, top in */
4887        {
4888            return M4xVSS_VerticalSlideTransition(PlaneIn2, PlaneIn1, PlaneOut,
4889                (PlaneOut[1]).u_height - shiftUV);
4890        }
4891    }
4892
4893    /* Note: it might be worthwhile to do some parameter checking, see if dimensions match, etc.,
4894    at least in debug mode. */
4895}
4896
4897
4898/**
4899 ******************************************************************************
4900 * prototype    M4xVSS_FadeBlackTransition(M4OSA_Void *pFunctionContext,
4901 *                                                    M4VIFI_ImagePlane *PlaneIn,
4902 *                                                    M4VIFI_ImagePlane *PlaneOut,
4903 *                                                    M4VSS3GPP_ExternalProgress *pProgress,
4904 *                                                    M4OSA_UInt32 uiEffectKind)
4905 *
4906 * @brief    This function apply a fade to black and then a fade from black
4907 * @note
4908 * @param    pFunctionContext(IN) Contains which color to apply (not very clean ...)
4909 * @param    PlaneIn            (IN) Input YUV420 planar
4910 * @param    PlaneOut        (IN/OUT) Output YUV420 planar
4911 * @param    pProgress        (IN/OUT) Progress indication (0-100)
4912 * @param    uiEffectKind    (IN) Unused
4913 *
4914 * @return    M4VIFI_OK:    No error
4915 ******************************************************************************
4916 */
4917M4OSA_ERR M4xVSS_FadeBlackTransition(M4OSA_Void *userData, M4VIFI_ImagePlane PlaneIn1[3],
4918                                     M4VIFI_ImagePlane PlaneIn2[3],
4919                                     M4VIFI_ImagePlane *PlaneOut,
4920                                     M4VSS3GPP_ExternalProgress *pProgress,
4921                                     M4OSA_UInt32 uiTransitionKind)
4922{
4923    M4OSA_Int32 tmp = 0;
4924    M4OSA_ERR err = M4NO_ERROR;
4925
4926
4927    if((pProgress->uiProgress) < 500)
4928    {
4929        /**
4930         * Compute where we are in the effect (scale is 0->1024) */
4931        tmp = (M4OSA_Int32)((1.0 - ((M4OSA_Float)(pProgress->uiProgress*2)/1000)) * 1024 );
4932
4933        /**
4934         * Apply the darkening effect */
4935        err = M4VFL_modifyLumaWithScale( (M4ViComImagePlane*)PlaneIn1,
4936             (M4ViComImagePlane*)PlaneOut, tmp, M4OSA_NULL);
4937        if (M4NO_ERROR != err)
4938        {
4939            M4OSA_TRACE1_1("M4xVSS_FadeBlackTransition: M4VFL_modifyLumaWithScale returns\
4940                 error 0x%x, returning M4VSS3GPP_ERR_LUMA_FILTER_ERROR", err);
4941            return M4VSS3GPP_ERR_LUMA_FILTER_ERROR;
4942        }
4943    }
4944    else
4945    {
4946        /**
4947         * Compute where we are in the effect (scale is 0->1024). */
4948        tmp = (M4OSA_Int32)( (((M4OSA_Float)(((pProgress->uiProgress-500)*2))/1000)) * 1024 );
4949
4950        /**
4951         * Apply the darkening effect */
4952        err = M4VFL_modifyLumaWithScale((M4ViComImagePlane*)PlaneIn2,
4953             (M4ViComImagePlane*)PlaneOut, tmp, M4OSA_NULL);
4954        if (M4NO_ERROR != err)
4955        {
4956            M4OSA_TRACE1_1("M4xVSS_FadeBlackTransition:\
4957                 M4VFL_modifyLumaWithScale returns error 0x%x,\
4958                     returning M4VSS3GPP_ERR_LUMA_FILTER_ERROR", err);
4959            return M4VSS3GPP_ERR_LUMA_FILTER_ERROR;
4960        }
4961    }
4962
4963
4964    return M4VIFI_OK;
4965}
4966
4967
4968/**
4969 ******************************************************************************
4970 * prototype    M4OSA_ERR M4xVSS_internalConvertToUTF8(M4OSA_Context pContext,
4971 *                                                        M4OSA_Void* pBufferIn,
4972 *                                                        M4OSA_Void* pBufferOut,
4973 *                                                        M4OSA_UInt32* convertedSize)
4974 *
4975 * @brief    This function convert from the customer format to UTF8
4976 * @note
4977 * @param    pContext        (IN)    The integrator own context
4978 * @param    pBufferIn        (IN)    Buffer to convert
4979 * @param    pBufferOut        (OUT)    Converted buffer
4980 * @param    convertedSize    (OUT)    Size of the converted buffer
4981 *
4982 * @return    M4NO_ERROR:    No error
4983 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
4984 ******************************************************************************
4985 */
4986M4OSA_ERR M4xVSS_internalConvertToUTF8(M4OSA_Context pContext, M4OSA_Void* pBufferIn,
4987                                       M4OSA_Void* pBufferOut, M4OSA_UInt32* convertedSize)
4988{
4989    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
4990    M4OSA_ERR err;
4991
4992    pBufferOut = pBufferIn;
4993    if(xVSS_context->UTFConversionContext.pConvToUTF8Fct != M4OSA_NULL
4994        && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL)
4995    {
4996        M4OSA_UInt32 ConvertedSize = xVSS_context->UTFConversionContext.m_TempOutConversionSize;
4997
4998        memset((void *)xVSS_context->UTFConversionContext.pTempOutConversionBuffer,0
4999            ,(M4OSA_UInt32)xVSS_context->UTFConversionContext.m_TempOutConversionSize);
5000
5001        err = xVSS_context->UTFConversionContext.pConvToUTF8Fct((M4OSA_Void*)pBufferIn,
5002            (M4OSA_UInt8*)xVSS_context->UTFConversionContext.pTempOutConversionBuffer,
5003                 (M4OSA_UInt32*)&ConvertedSize);
5004        if(err == M4xVSSWAR_BUFFER_OUT_TOO_SMALL)
5005        {
5006            M4OSA_TRACE2_1("M4xVSS_internalConvertToUTF8: pConvToUTF8Fct return 0x%x",err);
5007
5008            /*free too small buffer*/
5009            M4OSA_free((M4OSA_MemAddr32)xVSS_context->\
5010                UTFConversionContext.pTempOutConversionBuffer);
5011
5012            /*re-allocate the buffer*/
5013            xVSS_context->UTFConversionContext.pTempOutConversionBuffer    =
5014                 (M4OSA_Void*)M4OSA_malloc(ConvertedSize*sizeof(M4OSA_UInt8), M4VA,
5015                     (M4OSA_Char *)"M4xVSS_internalConvertToUTF8: UTF conversion buffer");
5016            if(M4OSA_NULL == xVSS_context->UTFConversionContext.pTempOutConversionBuffer)
5017            {
5018                M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertToUTF8");
5019                return M4ERR_ALLOC;
5020            }
5021            xVSS_context->UTFConversionContext.m_TempOutConversionSize = ConvertedSize;
5022
5023            memset((void *)xVSS_context->\
5024                UTFConversionContext.pTempOutConversionBuffer,0,(M4OSA_UInt32)xVSS_context->\
5025                    UTFConversionContext.m_TempOutConversionSize);
5026
5027            err = xVSS_context->UTFConversionContext.pConvToUTF8Fct((M4OSA_Void*)pBufferIn,
5028                (M4OSA_Void*)xVSS_context->UTFConversionContext.pTempOutConversionBuffer,
5029                    (M4OSA_UInt32*)&ConvertedSize);
5030            if(err != M4NO_ERROR)
5031            {
5032                M4OSA_TRACE1_1("M4xVSS_internalConvertToUTF8: pConvToUTF8Fct return 0x%x",err);
5033                return err;
5034            }
5035        }
5036        else if(err != M4NO_ERROR)
5037        {
5038            M4OSA_TRACE1_1("M4xVSS_internalConvertToUTF8: pConvToUTF8Fct return 0x%x",err);
5039            return err;
5040        }
5041        /*decoded path*/
5042        pBufferOut = xVSS_context->UTFConversionContext.pTempOutConversionBuffer;
5043        (*convertedSize) = ConvertedSize;
5044    }
5045    return M4NO_ERROR;
5046}
5047
5048
5049/**
5050 ******************************************************************************
5051 * prototype    M4OSA_ERR M4xVSS_internalConvertFromUTF8(M4OSA_Context pContext)
5052 *
5053 * @brief    This function convert from UTF8 to the customer format
5054 * @note
5055 * @param    pContext    (IN) The integrator own context
5056 * @param    pBufferIn        (IN)    Buffer to convert
5057 * @param    pBufferOut        (OUT)    Converted buffer
5058 * @param    convertedSize    (OUT)    Size of the converted buffer
5059 *
5060 * @return    M4NO_ERROR:    No error
5061 * @return    M4ERR_PARAMETER: At least one of the function parameters is null
5062 ******************************************************************************
5063 */
5064M4OSA_ERR M4xVSS_internalConvertFromUTF8(M4OSA_Context pContext, M4OSA_Void* pBufferIn,
5065                                        M4OSA_Void* pBufferOut, M4OSA_UInt32* convertedSize)
5066{
5067    M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
5068    M4OSA_ERR err;
5069
5070    pBufferOut = pBufferIn;
5071    if(xVSS_context->UTFConversionContext.pConvFromUTF8Fct != M4OSA_NULL
5072        && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL)
5073    {
5074        M4OSA_UInt32 ConvertedSize = xVSS_context->UTFConversionContext.m_TempOutConversionSize;
5075
5076        memset((void *)xVSS_context->\
5077            UTFConversionContext.pTempOutConversionBuffer,0,(M4OSA_UInt32)xVSS_context->\
5078                UTFConversionContext.m_TempOutConversionSize);
5079
5080        err = xVSS_context->UTFConversionContext.pConvFromUTF8Fct\
5081            ((M4OSA_Void*)pBufferIn,(M4OSA_UInt8*)xVSS_context->\
5082                UTFConversionContext.pTempOutConversionBuffer, (M4OSA_UInt32*)&ConvertedSize);
5083        if(err == M4xVSSWAR_BUFFER_OUT_TOO_SMALL)
5084        {
5085            M4OSA_TRACE2_1("M4xVSS_internalConvertFromUTF8: pConvFromUTF8Fct return 0x%x",err);
5086
5087            /*free too small buffer*/
5088            M4OSA_free((M4OSA_MemAddr32)xVSS_context->\
5089                UTFConversionContext.pTempOutConversionBuffer);
5090
5091            /*re-allocate the buffer*/
5092            xVSS_context->UTFConversionContext.pTempOutConversionBuffer    =
5093                (M4OSA_Void*)M4OSA_malloc(ConvertedSize*sizeof(M4OSA_UInt8), M4VA,
5094                     (M4OSA_Char *)"M4xVSS_internalConvertFromUTF8: UTF conversion buffer");
5095            if(M4OSA_NULL == xVSS_context->UTFConversionContext.pTempOutConversionBuffer)
5096            {
5097                M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertFromUTF8");
5098                return M4ERR_ALLOC;
5099            }
5100            xVSS_context->UTFConversionContext.m_TempOutConversionSize = ConvertedSize;
5101
5102            memset((void *)xVSS_context->\
5103                UTFConversionContext.pTempOutConversionBuffer,0,(M4OSA_UInt32)xVSS_context->\
5104                    UTFConversionContext.m_TempOutConversionSize);
5105
5106            err = xVSS_context->UTFConversionContext.pConvFromUTF8Fct((M4OSA_Void*)pBufferIn,
5107                (M4OSA_Void*)xVSS_context->UTFConversionContext.pTempOutConversionBuffer,
5108                     (M4OSA_UInt32*)&ConvertedSize);
5109            if(err != M4NO_ERROR)
5110            {
5111                M4OSA_TRACE1_1("M4xVSS_internalConvertFromUTF8: pConvFromUTF8Fct return 0x%x",err);
5112                return err;
5113            }
5114        }
5115        else if(err != M4NO_ERROR)
5116        {
5117            M4OSA_TRACE1_1("M4xVSS_internalConvertFromUTF8: pConvFromUTF8Fct return 0x%x",err);
5118            return err;
5119        }
5120        /*decoded path*/
5121        pBufferOut = xVSS_context->UTFConversionContext.pTempOutConversionBuffer;
5122        (*convertedSize) = ConvertedSize;
5123    }
5124
5125
5126    return M4NO_ERROR;
5127}
5128