M4VSS3GPP_EditVideo.c revision ff65330bfdd2841110d8ff2a1aba3543b2cb3156
1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16/**
17 ******************************************************************************
18 * @file    M4VSS3GPP_EditVideo.c
19 * @brief    Video Studio Service 3GPP edit API implementation.
20 * @note
21 ******************************************************************************
22 */
23
24/****************/
25/*** Includes ***/
26/****************/
27
28#include "NXPSW_CompilerSwitches.h"
29/**
30 * Our header */
31#include "M4VSS3GPP_API.h"
32#include "M4VSS3GPP_InternalTypes.h"
33#include "M4VSS3GPP_InternalFunctions.h"
34#include "M4VSS3GPP_InternalConfig.h"
35#include "M4VSS3GPP_ErrorCodes.h"
36
37// StageFright encoders require %16 resolution
38#include "M4ENCODER_common.h"
39/**
40 * OSAL headers */
41#include "M4OSA_Memory.h" /**< OSAL memory management */
42#include "M4OSA_Debug.h"  /**< OSAL debug management */
43
44/**
45 * component includes */
46#include "M4VFL_transition.h" /**< video effects */
47
48/*for transition behaviour*/
49#include <math.h>
50#include "M4AIR_API.h"
51#include "M4VSS3GPP_Extended_API.h"
52/** Determine absolute value of a. */
53#define M4xVSS_ABS(a) ( ( (a) < (0) ) ? (-(a)) : (a) )
54#define Y_PLANE_BORDER_VALUE    0x00
55#define U_PLANE_BORDER_VALUE    0x80
56#define V_PLANE_BORDER_VALUE    0x80
57
58/************************************************************************/
59/* Static local functions                                               */
60/************************************************************************/
61
62static M4OSA_ERR M4VSS3GPP_intCheckVideoMode(
63    M4VSS3GPP_InternalEditContext *pC );
64static M4OSA_Void
65M4VSS3GPP_intCheckVideoEffects( M4VSS3GPP_InternalEditContext *pC,
66                               M4OSA_UInt8 uiClipNumber );
67static M4OSA_ERR M4VSS3GPP_intApplyVideoEffect(
68          M4VSS3GPP_InternalEditContext *pC, M4VIFI_ImagePlane *pPlaneIn,
69          M4VIFI_ImagePlane *pPlaneOut, M4OSA_Bool bSkipFramingEffect);
70
71static M4OSA_ERR
72M4VSS3GPP_intVideoTransition( M4VSS3GPP_InternalEditContext *pC,
73                             M4VIFI_ImagePlane *pPlaneOut );
74
75static M4OSA_Void
76M4VSS3GPP_intUpdateTimeInfo( M4VSS3GPP_InternalEditContext *pC,
77                            M4SYS_AccessUnit *pAU );
78static M4OSA_Void M4VSS3GPP_intSetH263TimeCounter( M4OSA_MemAddr8 pAuDataBuffer,
79                                                  M4OSA_UInt8 uiCts );
80static M4OSA_Void M4VSS3GPP_intSetMPEG4Gov( M4OSA_MemAddr8 pAuDataBuffer,
81                                           M4OSA_UInt32 uiCtsSec );
82static M4OSA_Void M4VSS3GPP_intGetMPEG4Gov( M4OSA_MemAddr8 pAuDataBuffer,
83                                           M4OSA_UInt32 *pCtsSec );
84static M4OSA_ERR M4VSS3GPP_intAllocateYUV420( M4VIFI_ImagePlane *pPlanes,
85                                             M4OSA_UInt32 uiWidth, M4OSA_UInt32 uiHeight );
86static M4OSA_ERR M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420(
87          M4OSA_Void* pFileIn, M4OSA_FileReadPointer* pFileReadPtr,
88          M4VIFI_ImagePlane* pImagePlanes,
89          M4OSA_UInt32 width,M4OSA_UInt32 height);
90static M4OSA_ERR M4VSS3GPP_intApplyRenderingMode(
91          M4VSS3GPP_InternalEditContext *pC,
92          M4xVSS_MediaRendering renderingMode,
93          M4VIFI_ImagePlane* pInplane,
94          M4VIFI_ImagePlane* pOutplane);
95
96static M4OSA_ERR M4VSS3GPP_intSetYuv420PlaneFromARGB888 (
97                                        M4VSS3GPP_InternalEditContext *pC,
98                                        M4VSS3GPP_ClipContext* pClipCtxt);
99static M4OSA_ERR M4VSS3GPP_intRenderFrameWithEffect(
100                                             M4VSS3GPP_InternalEditContext *pC,
101                                             M4VSS3GPP_ClipContext* pClipCtxt,
102                                             M4_MediaTime ts,
103                                             M4OSA_Bool bIsClip1,
104                                             M4VIFI_ImagePlane *pResizePlane,
105                                             M4VIFI_ImagePlane *pPlaneNoResize,
106                                             M4VIFI_ImagePlane *pPlaneOut);
107
108static M4OSA_ERR M4VSS3GPP_intRotateVideo(M4VIFI_ImagePlane* pPlaneIn,
109                                      M4OSA_UInt32 rotationDegree);
110
111static M4OSA_ERR M4VSS3GPP_intSetYUV420Plane(M4VIFI_ImagePlane* planeIn,
112                                      M4OSA_UInt32 width, M4OSA_UInt32 height);
113
114static M4OSA_ERR M4VSS3GPP_intApplyVideoOverlay (
115                                      M4VSS3GPP_InternalEditContext *pC,
116                                      M4VIFI_ImagePlane *pPlaneIn,
117                                      M4VIFI_ImagePlane *pPlaneOut);
118
119/**
120 ******************************************************************************
121 * M4OSA_ERR M4VSS3GPP_intEditStepVideo()
122 * @brief    One step of video processing
123 * @param   pC    (IN/OUT) Internal edit context
124 ******************************************************************************
125 */
126M4OSA_ERR M4VSS3GPP_intEditStepVideo( M4VSS3GPP_InternalEditContext *pC )
127{
128    M4OSA_ERR err;
129    M4OSA_Int32 iCts, iNextCts;
130    M4ENCODER_FrameMode FrameMode;
131    M4OSA_Bool bSkipFrame;
132    M4OSA_UInt16 offset;
133
134    /**
135     * Check if we reached end cut. Decorrelate input and output encoding
136     * timestamp to handle encoder prefetch
137     */
138    if ( ((M4OSA_Int32)(pC->ewc.dInputVidCts) - pC->pC1->iVoffset
139        + pC->iInOutTimeOffset) >= pC->pC1->iEndTime )
140    {
141        /* Re-adjust video to precise cut time */
142        pC->iInOutTimeOffset = ((M4OSA_Int32)(pC->ewc.dInputVidCts))
143            - pC->pC1->iVoffset + pC->iInOutTimeOffset - pC->pC1->iEndTime;
144        if ( pC->iInOutTimeOffset < 0 ) {
145            pC->iInOutTimeOffset = 0;
146        }
147
148        /**
149        * Video is done for this clip */
150        err = M4VSS3GPP_intReachedEndOfVideo(pC);
151
152        /* RC: to know when a file has been processed */
153        if (M4NO_ERROR != err && err != M4VSS3GPP_WAR_SWITCH_CLIP)
154        {
155            M4OSA_TRACE1_1(
156                "M4VSS3GPP_intEditStepVideo: M4VSS3GPP_intReachedEndOfVideo returns 0x%x",
157                err);
158        }
159
160        return err;
161    }
162
163    /* Don't change the states if we are in decodeUpTo() */
164    if ( (M4VSS3GPP_kClipStatus_DECODE_UP_TO != pC->pC1->Vstatus)
165        && (( pC->pC2 == M4OSA_NULL)
166        || (M4VSS3GPP_kClipStatus_DECODE_UP_TO != pC->pC2->Vstatus)) )
167    {
168        /**
169        * Check Video Mode, depending on the current output CTS */
170        err = M4VSS3GPP_intCheckVideoMode(
171            pC); /**< This function change the pC->Vstate variable! */
172
173        if (M4NO_ERROR != err)
174        {
175            M4OSA_TRACE1_1(
176                "M4VSS3GPP_intEditStepVideo: M4VSS3GPP_intCheckVideoMode returns 0x%x!",
177                err);
178            return err;
179        }
180    }
181
182
183    switch( pC->Vstate )
184    {
185        /* _________________ */
186        /*|                 |*/
187        /*| READ_WRITE MODE |*/
188        /*|_________________|*/
189
190        case M4VSS3GPP_kEditVideoState_READ_WRITE:
191        case M4VSS3GPP_kEditVideoState_AFTER_CUT:
192            {
193                M4OSA_TRACE3_0("M4VSS3GPP_intEditStepVideo READ_WRITE");
194
195                bSkipFrame = M4OSA_FALSE;
196
197                /**
198                * If we were decoding the clip, we must jump to be sure
199                * to get to the good position. */
200                if( M4VSS3GPP_kClipStatus_READ != pC->pC1->Vstatus )
201                {
202                    /**
203                    * Jump to target video time (tc = to-T) */
204                // Decorrelate input and output encoding timestamp to handle encoder prefetch
205                iCts = (M4OSA_Int32)(pC->ewc.dInputVidCts) - pC->pC1->iVoffset;
206                    err = pC->pC1->ShellAPI.m_pReader->m_pFctJump(
207                        pC->pC1->pReaderContext,
208                        (M4_StreamHandler *)pC->pC1->pVideoStream, &iCts);
209
210                    if( M4NO_ERROR != err )
211                    {
212                        M4OSA_TRACE1_1(
213                            "M4VSS3GPP_intEditStepVideo:\
214                            READ_WRITE: m_pReader->m_pFctJump(V1) returns 0x%x!",
215                            err);
216                        return err;
217                    }
218
219                    err = pC->pC1->ShellAPI.m_pReaderDataIt->m_pFctGetNextAu(
220                        pC->pC1->pReaderContext,
221                        (M4_StreamHandler *)pC->pC1->pVideoStream,
222                        &pC->pC1->VideoAU);
223
224                    if( ( M4NO_ERROR != err) && (M4WAR_NO_MORE_AU != err) )
225                    {
226                        M4OSA_TRACE1_1(
227                            "M4VSS3GPP_intEditStepVideo:\
228                            READ_WRITE: m_pReader->m_pFctGetNextAu returns 0x%x!",
229                            err);
230                        return err;
231                    }
232
233                    M4OSA_TRACE2_3("A .... read  : cts  = %.0f + %ld [ 0x%x ]",
234                        pC->pC1->VideoAU.m_CTS, pC->pC1->iVoffset,
235                        pC->pC1->VideoAU.m_size);
236
237                    /* This frame has been already written in BEGIN CUT step -> skip it */
238                    if( pC->pC1->VideoAU.m_CTS == iCts
239                        && pC->pC1->iVideoRenderCts >= iCts )
240                    {
241                        bSkipFrame = M4OSA_TRUE;
242                    }
243                }
244
245                /* This frame has been already written in BEGIN CUT step -> skip it */
246                if( ( pC->Vstate == M4VSS3GPP_kEditVideoState_AFTER_CUT)
247                    && (pC->pC1->VideoAU.m_CTS
248                    + pC->pC1->iVoffset <= pC->ewc.WriterVideoAU.CTS) )
249                {
250                    bSkipFrame = M4OSA_TRUE;
251                }
252
253                /**
254                * Remember the clip reading state */
255                pC->pC1->Vstatus = M4VSS3GPP_kClipStatus_READ;
256                // Decorrelate input and output encoding timestamp to handle encoder prefetch
257                // Rounding is to compensate reader imprecision (m_CTS is actually an integer)
258                iCts = ((M4OSA_Int32)pC->ewc.dInputVidCts) - pC->pC1->iVoffset - 1;
259                iNextCts = iCts + ((M4OSA_Int32)pC->dOutputFrameDuration) + 1;
260                /* Avoid to write a last frame of duration 0 */
261                if( iNextCts > pC->pC1->iEndTime )
262                    iNextCts = pC->pC1->iEndTime;
263
264                /**
265                * If the AU is good to be written, write it, else just skip it */
266                if( ( M4OSA_FALSE == bSkipFrame)
267                    && (( pC->pC1->VideoAU.m_CTS >= iCts)
268                    && (pC->pC1->VideoAU.m_CTS < iNextCts)
269                    && (pC->pC1->VideoAU.m_size > 0)) )
270                {
271                    /**
272                    * Get the output AU to write into */
273                    err = pC->ShellAPI.pWriterDataFcts->pStartAU(
274                        pC->ewc.p3gpWriterContext,
275                        M4VSS3GPP_WRITER_VIDEO_STREAM_ID,
276                        &pC->ewc.WriterVideoAU);
277
278                    if( M4NO_ERROR != err )
279                    {
280                        M4OSA_TRACE1_1(
281                            "M4VSS3GPP_intEditStepVideo: READ_WRITE:\
282                            pWriterDataFcts->pStartAU(Video) returns 0x%x!",
283                            err);
284                        return err;
285                    }
286
287                    /**
288                    * Copy the input AU to the output AU */
289                    pC->ewc.WriterVideoAU.attribute = pC->pC1->VideoAU.m_attribute;
290                    // Decorrelate input and output encoding timestamp to handle encoder prefetch
291                    pC->ewc.WriterVideoAU.CTS = (M4OSA_Time)pC->pC1->VideoAU.m_CTS +
292                        (M4OSA_Time)pC->pC1->iVoffset;
293                    pC->ewc.dInputVidCts += pC->dOutputFrameDuration;
294                    offset = 0;
295                    /* for h.264 stream do not read the 1st 4 bytes as they are header
296                     indicators */
297                    if( pC->pC1->pVideoStream->m_basicProperties.m_streamType
298                        == M4DA_StreamTypeVideoMpeg4Avc )
299                        offset = 4;
300
301                    pC->ewc.WriterVideoAU.size = pC->pC1->VideoAU.m_size - offset;
302                    if( pC->ewc.WriterVideoAU.size > pC->ewc.uiVideoMaxAuSize )
303                    {
304                        M4OSA_TRACE1_2(
305                            "M4VSS3GPP_intEditStepVideo: READ_WRITE: AU size greater than\
306                             MaxAuSize (%d>%d)! returning M4VSS3GPP_ERR_INPUT_VIDEO_AU_TOO_LARGE",
307                            pC->ewc.WriterVideoAU.size, pC->ewc.uiVideoMaxAuSize);
308                        return M4VSS3GPP_ERR_INPUT_VIDEO_AU_TOO_LARGE;
309                    }
310
311                    memcpy((void *)pC->ewc.WriterVideoAU.dataAddress,
312                        (void *)(pC->pC1->VideoAU.m_dataAddress + offset),
313                        (pC->ewc.WriterVideoAU.size));
314
315                    /**
316                    * Update time info for the Counter Time System to be equal to the bit
317                    -stream time*/
318                    M4VSS3GPP_intUpdateTimeInfo(pC, &pC->ewc.WriterVideoAU);
319                    M4OSA_TRACE2_2("B ---- write : cts  = %lu [ 0x%x ]",
320                        pC->ewc.WriterVideoAU.CTS, pC->ewc.WriterVideoAU.size);
321
322                    /**
323                    * Write the AU */
324                    err = pC->ShellAPI.pWriterDataFcts->pProcessAU(
325                        pC->ewc.p3gpWriterContext,
326                        M4VSS3GPP_WRITER_VIDEO_STREAM_ID,
327                        &pC->ewc.WriterVideoAU);
328
329                    if( M4NO_ERROR != err )
330                    {
331                        /* the warning M4WAR_WRITER_STOP_REQ is returned when the targeted output
332                         file size is reached
333                        The editing is then finished, the warning M4VSS3GPP_WAR_EDITING_DONE
334                        is returned*/
335                        if( M4WAR_WRITER_STOP_REQ == err )
336                        {
337                            M4OSA_TRACE1_0(
338                                "M4VSS3GPP_intEditStepVideo: File was cut to avoid oversize");
339                            return M4VSS3GPP_WAR_EDITING_DONE;
340                        }
341                        else
342                        {
343                            M4OSA_TRACE1_1(
344                                "M4VSS3GPP_intEditStepVideo: READ_WRITE:\
345                                pWriterDataFcts->pProcessAU(Video) returns 0x%x!",
346                                err);
347                            return err;
348                        }
349                    }
350
351                    /**
352                    * Read next AU for next step */
353                    err = pC->pC1->ShellAPI.m_pReaderDataIt->m_pFctGetNextAu(
354                        pC->pC1->pReaderContext,
355                        (M4_StreamHandler *)pC->pC1->pVideoStream,
356                        &pC->pC1->VideoAU);
357
358                    if( ( M4NO_ERROR != err) && (M4WAR_NO_MORE_AU != err) )
359                    {
360                        M4OSA_TRACE1_1(
361                            "M4VSS3GPP_intEditStepVideo: READ_WRITE:\
362                            m_pReaderDataIt->m_pFctGetNextAu returns 0x%x!",
363                            err);
364                        return err;
365                    }
366
367                    M4OSA_TRACE2_3("C .... read  : cts  = %.0f + %ld [ 0x%x ]",
368                        pC->pC1->VideoAU.m_CTS, pC->pC1->iVoffset,
369                        pC->pC1->VideoAU.m_size);
370                }
371                else
372                {
373                    /**
374                    * Decide wether to read or to increment time increment */
375                    if( ( pC->pC1->VideoAU.m_size == 0)
376                        || (pC->pC1->VideoAU.m_CTS >= iNextCts) )
377                    {
378                        /*Increment time by the encoding period (NO_MORE_AU or reader in advance */
379                       // Decorrelate input and output encoding timestamp to handle encoder prefetch
380                       pC->ewc.dInputVidCts += pC->dOutputFrameDuration;
381
382                        /* Switch (from AFTER_CUT) to normal mode because time is
383                        no more frozen */
384                        pC->Vstate = M4VSS3GPP_kEditVideoState_READ_WRITE;
385                    }
386                    else
387                    {
388                        /* In other cases (reader late), just let the reader catch up
389                         pC->ewc.dVTo */
390                        err = pC->pC1->ShellAPI.m_pReaderDataIt->m_pFctGetNextAu(
391                            pC->pC1->pReaderContext,
392                            (M4_StreamHandler *)pC->pC1->pVideoStream,
393                            &pC->pC1->VideoAU);
394
395                        if( ( M4NO_ERROR != err) && (M4WAR_NO_MORE_AU != err) )
396                        {
397                            M4OSA_TRACE1_1(
398                                "M4VSS3GPP_intEditStepVideo: READ_WRITE:\
399                                m_pReaderDataIt->m_pFctGetNextAu returns 0x%x!",
400                                err);
401                            return err;
402                        }
403
404                        M4OSA_TRACE2_3("D .... read  : cts  = %.0f + %ld [ 0x%x ]",
405                            pC->pC1->VideoAU.m_CTS, pC->pC1->iVoffset,
406                            pC->pC1->VideoAU.m_size);
407                    }
408                }
409            }
410            break;
411
412            /* ____________________ */
413            /*|                    |*/
414            /*| DECODE_ENCODE MODE |*/
415            /*|   BEGIN_CUT MODE   |*/
416            /*|____________________|*/
417
418        case M4VSS3GPP_kEditVideoState_DECODE_ENCODE:
419        case M4VSS3GPP_kEditVideoState_BEGIN_CUT:
420            {
421                M4OSA_TRACE3_0(
422                    "M4VSS3GPP_intEditStepVideo DECODE_ENCODE / BEGIN_CUT");
423
424            if ((pC->pC1->pSettings->FileType ==
425                     M4VIDEOEDITING_kFileType_ARGB8888) &&
426                (M4OSA_FALSE ==
427                    pC->pC1->pSettings->ClipProperties.bSetImageData)) {
428
429                err = M4VSS3GPP_intSetYuv420PlaneFromARGB888(pC, pC->pC1);
430                if( M4NO_ERROR != err ) {
431                    M4OSA_TRACE1_1(
432                        "M4VSS3GPP_intEditStepVideo: DECODE_ENCODE:\
433                        M4VSS3GPP_intSetYuv420PlaneFromARGB888 err=%x", err);
434                    return err;
435                }
436            }
437                /**
438                * Decode the video up to the target time
439                (will jump to the previous RAP if needed ) */
440                // Decorrelate input and output encoding timestamp to handle encoder prefetch
441                err = M4VSS3GPP_intClipDecodeVideoUpToCts(pC->pC1, (M4OSA_Int32)pC->ewc.dInputVidCts);
442                if( M4NO_ERROR != err )
443                {
444                    M4OSA_TRACE1_1(
445                        "M4VSS3GPP_intEditStepVideo: DECODE_ENCODE:\
446                        M4VSS3GPP_intDecodeVideoUpToCts returns err=0x%x",
447                        err);
448                    return err;
449                }
450
451                /* If the decoding is not completed, do one more step with time frozen */
452                if( M4VSS3GPP_kClipStatus_DECODE_UP_TO == pC->pC1->Vstatus )
453                {
454                    return M4NO_ERROR;
455                }
456
457                /**
458                * Reset the video pre-processing error before calling the encoder */
459                pC->ewc.VppError = M4NO_ERROR;
460
461                M4OSA_TRACE2_0("E ++++ encode AU");
462
463                /**
464                * Encode the frame(rendering,filtering and writing will be done
465                 in encoder callbacks)*/
466                if( pC->Vstate == M4VSS3GPP_kEditVideoState_BEGIN_CUT )
467                    FrameMode = M4ENCODER_kIFrame;
468                else
469                    FrameMode = M4ENCODER_kNormalFrame;
470
471                // Decorrelate input and output encoding timestamp to handle encoder prefetch
472                err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctEncode(pC->ewc.pEncContext, M4OSA_NULL,
473                pC->ewc.dInputVidCts, FrameMode);
474                /**
475                * Check if we had a VPP error... */
476                if( M4NO_ERROR != pC->ewc.VppError )
477                {
478                    M4OSA_TRACE1_1(
479                        "M4VSS3GPP_intEditStepVideo: DECODE_ENCODE:\
480                        pVideoEncoderGlobalFcts->pFctEncode, returning VppErr=0x%x",
481                        pC->ewc.VppError);
482#ifdef M4VSS_SUPPORT_OMX_CODECS
483
484                    if( M4WAR_VIDEORENDERER_NO_NEW_FRAME != pC->ewc.VppError )
485                    {
486#endif //M4VSS_SUPPORT_OMX_CODECS
487
488                        return pC->ewc.VppError;
489#ifdef M4VSS_SUPPORT_OMX_CODECS
490
491                    }
492
493#endif                                   //M4VSS_SUPPORT_OMX_CODECS
494
495                }
496                else if( M4NO_ERROR != err ) /**< ...or an encoder error */
497                {
498                    if( ((M4OSA_UInt32)M4ERR_ALLOC) == err )
499                    {
500                        M4OSA_TRACE1_0(
501                            "M4VSS3GPP_intEditStepVideo: DECODE_ENCODE:\
502                            returning M4VSS3GPP_ERR_ENCODER_ACCES_UNIT_ERROR");
503                        return M4VSS3GPP_ERR_ENCODER_ACCES_UNIT_ERROR;
504                    }
505                    /* the warning M4WAR_WRITER_STOP_REQ is returned when the targeted output
506                    file size is reached
507                    The editing is then finished, the warning M4VSS3GPP_WAR_EDITING_DONE
508                    is returned*/
509                    else if( M4WAR_WRITER_STOP_REQ == err )
510                    {
511                        M4OSA_TRACE1_0(
512                            "M4VSS3GPP_intEditStepVideo: File was cut to avoid oversize");
513                        return M4VSS3GPP_WAR_EDITING_DONE;
514                    }
515                    else
516                    {
517                        M4OSA_TRACE1_1(
518                            "M4VSS3GPP_intEditStepVideo: DECODE_ENCODE:\
519                            pVideoEncoderGlobalFcts->pFctEncode returns 0x%x",
520                            err);
521                        return err;
522                    }
523                }
524
525                /**
526                * Increment time by the encoding period (for begin cut, do not increment to not
527                loose P-frames) */
528                if( M4VSS3GPP_kEditVideoState_DECODE_ENCODE == pC->Vstate )
529                {
530                    // Decorrelate input and output encoding timestamp to handle encoder prefetch
531                    pC->ewc.dInputVidCts += pC->dOutputFrameDuration;
532                }
533            }
534            break;
535
536            /* _________________ */
537            /*|                 |*/
538            /*| TRANSITION MODE |*/
539            /*|_________________|*/
540
541        case M4VSS3GPP_kEditVideoState_TRANSITION:
542            {
543                M4OSA_TRACE3_0("M4VSS3GPP_intEditStepVideo TRANSITION");
544
545                /* Don't decode more than needed */
546                if( !(( M4VSS3GPP_kClipStatus_DECODE_UP_TO != pC->pC1->Vstatus)
547                    && (M4VSS3GPP_kClipStatus_DECODE_UP_TO == pC->pC2->Vstatus)) )
548                {
549                    /**
550                    * Decode the clip1 video up to the target time
551                    (will jump to the previous RAP if needed */
552                    if ((pC->pC1->pSettings->FileType ==
553                          M4VIDEOEDITING_kFileType_ARGB8888) &&
554                        (M4OSA_FALSE ==
555                         pC->pC1->pSettings->ClipProperties.bSetImageData)) {
556
557                        err = M4VSS3GPP_intSetYuv420PlaneFromARGB888(pC, pC->pC1);
558                        if( M4NO_ERROR != err ) {
559                            M4OSA_TRACE1_1(
560                                "M4VSS3GPP_intEditStepVideo: TRANSITION:\
561                                M4VSS3GPP_intSetYuv420PlaneFromARGB888 err=%x", err);
562                            return err;
563                        }
564                    }
565                    // Decorrelate input and output encoding timestamp to handle encoder prefetch
566                    err = M4VSS3GPP_intClipDecodeVideoUpToCts(pC->pC1,
567                         (M4OSA_Int32)pC->ewc.dInputVidCts);
568                    if( M4NO_ERROR != err )
569                    {
570                        M4OSA_TRACE1_1(
571                            "M4VSS3GPP_intEditStepVideo: TRANSITION:\
572                            M4VSS3GPP_intDecodeVideoUpToCts(C1) returns err=0x%x",
573                            err);
574                        return err;
575                    }
576
577                    /* If the decoding is not completed, do one more step with time frozen */
578                    if( M4VSS3GPP_kClipStatus_DECODE_UP_TO == pC->pC1->Vstatus )
579                    {
580                        return M4NO_ERROR;
581                    }
582                }
583
584                /* Don't decode more than needed */
585                if( !(( M4VSS3GPP_kClipStatus_DECODE_UP_TO != pC->pC2->Vstatus)
586                    && (M4VSS3GPP_kClipStatus_DECODE_UP_TO == pC->pC1->Vstatus)) )
587                {
588                    /**
589                    * Decode the clip2 video up to the target time
590                        (will jump to the previous RAP if needed) */
591                    if ((pC->pC2->pSettings->FileType ==
592                          M4VIDEOEDITING_kFileType_ARGB8888) &&
593                        (M4OSA_FALSE ==
594                          pC->pC2->pSettings->ClipProperties.bSetImageData)) {
595
596                        err = M4VSS3GPP_intSetYuv420PlaneFromARGB888(pC, pC->pC2);
597                        if( M4NO_ERROR != err ) {
598                            M4OSA_TRACE1_1(
599                                "M4VSS3GPP_intEditStepVideo: TRANSITION:\
600                                M4VSS3GPP_intSetYuv420PlaneFromARGB888 err=%x", err);
601                            return err;
602                        }
603                    }
604
605                    // Decorrelate input and output encoding timestamp to handle encoder prefetch
606                    err = M4VSS3GPP_intClipDecodeVideoUpToCts(pC->pC2,
607                         (M4OSA_Int32)pC->ewc.dInputVidCts);
608                    if( M4NO_ERROR != err )
609                    {
610                        M4OSA_TRACE1_1(
611                            "M4VSS3GPP_intEditStepVideo: TRANSITION:\
612                            M4VSS3GPP_intDecodeVideoUpToCts(C2) returns err=0x%x",
613                            err);
614                        return err;
615                    }
616
617                    /* If the decoding is not completed, do one more step with time frozen */
618                    if( M4VSS3GPP_kClipStatus_DECODE_UP_TO == pC->pC2->Vstatus )
619                    {
620                        return M4NO_ERROR;
621                    }
622                }
623
624                /**
625                * Reset the video pre-processing error before calling the encoder */
626                pC->ewc.VppError = M4NO_ERROR;
627
628                M4OSA_TRACE2_0("F **** blend AUs");
629
630                /**
631                * Encode the frame (rendering, filtering and writing will be done
632                in encoder callbacks */
633                // Decorrelate input and output encoding timestamp to handle encoder prefetch
634                err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctEncode(pC->ewc.pEncContext, M4OSA_NULL,
635                    pC->ewc.dInputVidCts, M4ENCODER_kNormalFrame);
636
637                /**
638                * If encode returns a process frame error, it is likely to be a VPP error */
639                if( M4NO_ERROR != pC->ewc.VppError )
640                {
641                    M4OSA_TRACE1_1(
642                        "M4VSS3GPP_intEditStepVideo: TRANSITION:\
643                        pVideoEncoderGlobalFcts->pFctEncode, returning VppErr=0x%x",
644                        pC->ewc.VppError);
645#ifdef M4VSS_SUPPORT_OMX_CODECS
646
647                    if( M4WAR_VIDEORENDERER_NO_NEW_FRAME != pC->ewc.VppError )
648                    {
649
650#endif //M4VSS_SUPPORT_OMX_CODECS
651
652                        return pC->ewc.VppError;
653#ifdef M4VSS_SUPPORT_OMX_CODECS
654
655                    }
656
657#endif //M4VSS_SUPPORT_OMX_CODECS
658
659                }
660                else if( M4NO_ERROR != err ) /**< ...or an encoder error */
661                {
662                    if( ((M4OSA_UInt32)M4ERR_ALLOC) == err )
663                    {
664                        M4OSA_TRACE1_0(
665                            "M4VSS3GPP_intEditStepVideo: TRANSITION:\
666                            returning M4VSS3GPP_ERR_ENCODER_ACCES_UNIT_ERROR");
667                        return M4VSS3GPP_ERR_ENCODER_ACCES_UNIT_ERROR;
668                    }
669
670                    /* the warning M4WAR_WRITER_STOP_REQ is returned when the targeted output
671                     file size is reached
672                    The editing is then finished, the warning M4VSS3GPP_WAR_EDITING_DONE is
673                     returned*/
674                    else if( M4WAR_WRITER_STOP_REQ == err )
675                    {
676                        M4OSA_TRACE1_0(
677                            "M4VSS3GPP_intEditStepVideo: File was cut to avoid oversize");
678                        return M4VSS3GPP_WAR_EDITING_DONE;
679                    }
680                    else
681                    {
682                        M4OSA_TRACE1_1(
683                            "M4VSS3GPP_intEditStepVideo: TRANSITION:\
684                            pVideoEncoderGlobalFcts->pFctEncode returns 0x%x",
685                            err);
686                        return err;
687                    }
688                }
689
690                /**
691                * Increment time by the encoding period */
692                // Decorrelate input and output encoding timestamp to handle encoder prefetch
693                pC->ewc.dInputVidCts += pC->dOutputFrameDuration;
694            }
695            break;
696
697            /* ____________ */
698            /*|            |*/
699            /*| ERROR CASE |*/
700            /*|____________|*/
701
702        default:
703            M4OSA_TRACE1_1(
704                "M4VSS3GPP_intEditStepVideo: invalid internal state (0x%x),\
705                returning M4VSS3GPP_ERR_INTERNAL_STATE",
706                pC->Vstate);
707            return M4VSS3GPP_ERR_INTERNAL_STATE;
708    }
709
710    /**
711    * Return with no error */
712    M4OSA_TRACE3_0("M4VSS3GPP_intEditStepVideo: returning M4NO_ERROR");
713    return M4NO_ERROR;
714}
715
716/**
717 ******************************************************************************
718 * M4OSA_ERR M4VSS3GPP_intCheckVideoMode()
719 * @brief    Check which video process mode we must use, depending on the output CTS.
720 * @param   pC    (IN/OUT) Internal edit context
721 ******************************************************************************
722 */
723static M4OSA_ERR M4VSS3GPP_intCheckVideoMode(
724    M4VSS3GPP_InternalEditContext *pC )
725{
726    M4OSA_ERR err;
727    // Decorrelate input and output encoding timestamp to handle encoder prefetch
728    const M4OSA_Int32  t = (M4OSA_Int32)pC->ewc.dInputVidCts;
729    /**< Transition duration */
730    const M4OSA_Int32 TD = pC->pTransitionList[pC->uiCurrentClip].uiTransitionDuration;
731
732    M4OSA_Int32 iTmp;
733
734    const M4VSS3GPP_EditVideoState previousVstate = pC->Vstate;
735
736    /**
737    * Check if Clip1 is on its begin cut, or in an effect zone */
738    M4VSS3GPP_intCheckVideoEffects(pC, 1);
739
740    /**
741    * Check if we are in the transition with next clip */
742    if( ( TD > 0) && (( t - pC->pC1->iVoffset) >= (pC->pC1->iEndTime - TD)) )
743    {
744        /**
745        * We are in a transition */
746        pC->Vstate = M4VSS3GPP_kEditVideoState_TRANSITION;
747        pC->bTransitionEffect = M4OSA_TRUE;
748
749        /**
750        * Open second clip for transition, if not yet opened */
751        if( M4OSA_NULL == pC->pC2 )
752        {
753            pC->pC1->bGetYuvDataFromDecoder = M4OSA_TRUE;
754
755            err = M4VSS3GPP_intOpenClip(pC, &pC->pC2,
756                &pC->pClipList[pC->uiCurrentClip + 1]);
757
758            if( M4NO_ERROR != err )
759            {
760                M4OSA_TRACE1_1(
761                    "M4VSS3GPP_intCheckVideoMode: M4VSS3GPP_editOpenClip returns 0x%x!",
762                    err);
763                return err;
764            }
765
766            /**
767            * Add current video output CTS to the clip offset
768            * (audio output CTS is not yet at the transition, so audio
769            *  offset can't be updated yet). */
770            // Decorrelate input and output encoding timestamp to handle encoder prefetch
771            pC->pC2->iVoffset += (M4OSA_UInt32)pC->ewc.dInputVidCts;
772
773            /**
774            * 2005-03-24: BugFix for audio-video synchro:
775            * Update transition duration due to the actual video transition beginning time.
776            * It will avoid desynchronization when doing the audio transition. */
777           // Decorrelate input and output encoding timestamp to handle encoder prefetch
778            iTmp = ((M4OSA_Int32)pC->ewc.dInputVidCts)\
779             - (pC->pC1->iEndTime - TD + pC->pC1->iVoffset);
780            if (iTmp < (M4OSA_Int32)pC->pTransitionList[pC->uiCurrentClip].uiTransitionDuration)
781            /**< Test in case of a very short transition */
782            {
783                pC->pTransitionList[pC->
784                    uiCurrentClip].uiTransitionDuration -= iTmp;
785
786                /**
787                * Don't forget to also correct the total duration used for the progress bar
788                * (it was computed with the original transition duration). */
789                pC->ewc.iOutputDuration += iTmp;
790            }
791            /**< No "else" here because it's hard predict the effect of 0 duration transition...*/
792        }
793
794        /**
795        * Check effects for clip2 */
796        M4VSS3GPP_intCheckVideoEffects(pC, 2);
797    }
798    else
799    {
800        /**
801        * We are not in a transition */
802        pC->bTransitionEffect = M4OSA_FALSE;
803
804        /* If there is an effect we go to decode/encode mode */
805        if((pC->nbActiveEffects > 0) || (pC->nbActiveEffects1 > 0) ||
806            (pC->pC1->pSettings->FileType ==
807             M4VIDEOEDITING_kFileType_ARGB8888) ||
808            (pC->pC1->pSettings->bTranscodingRequired == M4OSA_TRUE)) {
809            pC->Vstate = M4VSS3GPP_kEditVideoState_DECODE_ENCODE;
810        }
811        /* We do a begin cut, except if already done (time is not progressing because we want
812        to catch all P-frames after the cut) */
813        else if( M4OSA_TRUE == pC->bClip1AtBeginCut )
814        {
815            if(pC->pC1->pSettings->ClipProperties.VideoStreamType == M4VIDEOEDITING_kH264) {
816                pC->Vstate = M4VSS3GPP_kEditVideoState_DECODE_ENCODE;
817                pC->bEncodeTillEoF = M4OSA_TRUE;
818            } else if( ( M4VSS3GPP_kEditVideoState_BEGIN_CUT == previousVstate)
819                || (M4VSS3GPP_kEditVideoState_AFTER_CUT == previousVstate) ) {
820                pC->Vstate = M4VSS3GPP_kEditVideoState_AFTER_CUT;
821            } else {
822                pC->Vstate = M4VSS3GPP_kEditVideoState_BEGIN_CUT;
823            }
824        }
825        /* Else we are in default copy/paste mode */
826        else
827        {
828            if( ( M4VSS3GPP_kEditVideoState_BEGIN_CUT == previousVstate)
829                || (M4VSS3GPP_kEditVideoState_AFTER_CUT == previousVstate) )
830            {
831                pC->Vstate = M4VSS3GPP_kEditVideoState_AFTER_CUT;
832            }
833            else if( pC->bIsMMS == M4OSA_TRUE )
834            {
835                M4OSA_UInt32 currentBitrate;
836                M4OSA_ERR err = M4NO_ERROR;
837
838                /* Do we need to reencode the video to downgrade the bitrate or not ? */
839                /* Let's compute the cirrent bitrate of the current edited clip */
840                err = pC->pC1->ShellAPI.m_pReader->m_pFctGetOption(
841                    pC->pC1->pReaderContext,
842                    M4READER_kOptionID_Bitrate, &currentBitrate);
843
844                if( err != M4NO_ERROR )
845                {
846                    M4OSA_TRACE1_1(
847                        "M4VSS3GPP_intCheckVideoMode:\
848                        Error when getting next bitrate of edited clip: 0x%x",
849                        err);
850                    return err;
851                }
852
853                /* Remove audio bitrate */
854                currentBitrate -= 12200;
855
856                /* Test if we go into copy/paste mode or into decode/encode mode */
857                if( currentBitrate > pC->uiMMSVideoBitrate )
858                {
859                    pC->Vstate = M4VSS3GPP_kEditVideoState_DECODE_ENCODE;
860                }
861                else
862                {
863                    pC->Vstate = M4VSS3GPP_kEditVideoState_READ_WRITE;
864                }
865            }
866            else if(!((pC->m_bClipExternalHasStarted == M4OSA_TRUE) &&
867                    (pC->Vstate == M4VSS3GPP_kEditVideoState_DECODE_ENCODE)) &&
868                    pC->bEncodeTillEoF == M4OSA_FALSE)
869            {
870                /**
871                 * Test if we go into copy/paste mode or into decode/encode mode
872                 * If an external effect has been applied on the current clip
873                 * then continue to be in decode/encode mode till end of
874                 * clip to avoid H.264 distortion.
875                 */
876                pC->Vstate = M4VSS3GPP_kEditVideoState_READ_WRITE;
877            }
878        }
879    }
880
881    /**
882    * Check if we create an encoder */
883    if( ( ( M4VSS3GPP_kEditVideoState_READ_WRITE == previousVstate)
884        || (M4VSS3GPP_kEditVideoState_AFTER_CUT
885        == previousVstate)) /**< read mode */
886        && (( M4VSS3GPP_kEditVideoState_DECODE_ENCODE == pC->Vstate)
887        || (M4VSS3GPP_kEditVideoState_BEGIN_CUT == pC->Vstate)
888        || (M4VSS3GPP_kEditVideoState_TRANSITION
889        == pC->Vstate)) /**< encode mode */
890        && pC->bIsMMS == M4OSA_FALSE )
891    {
892        /**
893        * Create the encoder, if not created already*/
894        if (pC->ewc.encoderState == M4VSS3GPP_kNoEncoder) {
895            err = M4VSS3GPP_intCreateVideoEncoder(pC);
896
897            if( M4NO_ERROR != err )
898            {
899                M4OSA_TRACE1_1(
900                    "M4VSS3GPP_intCheckVideoMode: M4VSS3GPP_intCreateVideoEncoder \
901                     returns 0x%x!", err);
902                return err;
903            }
904        }
905    }
906    else if( pC->bIsMMS == M4OSA_TRUE && pC->ewc.pEncContext == M4OSA_NULL )
907    {
908        /**
909        * Create the encoder */
910        err = M4VSS3GPP_intCreateVideoEncoder(pC);
911
912        if( M4NO_ERROR != err )
913        {
914            M4OSA_TRACE1_1(
915                "M4VSS3GPP_intCheckVideoMode: M4VSS3GPP_intCreateVideoEncoder returns 0x%x!",
916                err);
917            return err;
918        }
919    }
920
921    /**
922    * When we go from filtering to read/write, we must act like a begin cut,
923    * because the last filtered image may be different than the original image. */
924    else if( ( ( M4VSS3GPP_kEditVideoState_DECODE_ENCODE == previousVstate)
925        || (M4VSS3GPP_kEditVideoState_TRANSITION
926        == previousVstate)) /**< encode mode */
927        && (M4VSS3GPP_kEditVideoState_READ_WRITE == pC->Vstate) /**< read mode */
928        && (pC->bEncodeTillEoF == M4OSA_FALSE) )
929    {
930        pC->Vstate = M4VSS3GPP_kEditVideoState_BEGIN_CUT;
931    }
932
933    /**
934    * Check if we destroy an encoder */
935    else if( ( ( M4VSS3GPP_kEditVideoState_DECODE_ENCODE == previousVstate)
936        || (M4VSS3GPP_kEditVideoState_BEGIN_CUT == previousVstate)
937        || (M4VSS3GPP_kEditVideoState_TRANSITION
938        == previousVstate)) /**< encode mode */
939        && (( M4VSS3GPP_kEditVideoState_READ_WRITE == pC->Vstate)
940        || (M4VSS3GPP_kEditVideoState_AFTER_CUT
941        == pC->Vstate)) /**< read mode */
942        && pC->bIsMMS == M4OSA_FALSE )
943    {
944        /**
945        * Destroy the previously created encoder */
946        err = M4VSS3GPP_intDestroyVideoEncoder(pC);
947
948        if( M4NO_ERROR != err )
949        {
950            M4OSA_TRACE1_1(
951                "M4VSS3GPP_intCheckVideoMode: M4VSS3GPP_intDestroyVideoEncoder returns 0x%x!",
952                err);
953            return err;
954        }
955    }
956
957    /**
958    * Return with no error */
959    M4OSA_TRACE3_0("M4VSS3GPP_intCheckVideoMode: returning M4NO_ERROR");
960    return M4NO_ERROR;
961}
962
963/******************************************************************************
964 * M4OSA_ERR M4VSS3GPP_intStartAU()
965 * @brief    StartAU writer-like interface used for the VSS 3GPP only
966 * @note
967 * @param    pContext: (IN) It is the VSS 3GPP context in our case
968 * @param    streamID: (IN) Id of the stream to which the Access Unit is related.
969 * @param    pAU:      (IN/OUT) Access Unit to be prepared.
970 * @return    M4NO_ERROR: there is no error
971 ******************************************************************************
972 */
973M4OSA_ERR M4VSS3GPP_intStartAU( M4WRITER_Context pContext,
974                               M4SYS_StreamID streamID, M4SYS_AccessUnit *pAU )
975{
976    M4OSA_ERR err;
977    M4OSA_UInt32 uiMaxAuSize;
978
979    /**
980    * Given context is actually the VSS3GPP context */
981    M4VSS3GPP_InternalEditContext *pC =
982        (M4VSS3GPP_InternalEditContext *)pContext;
983
984    /**
985    * Get the output AU to write into */
986    err = pC->ShellAPI.pWriterDataFcts->pStartAU(pC->ewc.p3gpWriterContext,
987        M4VSS3GPP_WRITER_VIDEO_STREAM_ID, pAU);
988
989    if( M4NO_ERROR != err )
990    {
991        M4OSA_TRACE1_1(
992            "M4VSS3GPP_intStartAU: pWriterDataFcts->pStartAU(Video) returns 0x%x!",
993            err);
994        return err;
995    }
996
997    /**
998    *    Return */
999    M4OSA_TRACE3_0("M4VSS3GPP_intStartAU: returning M4NO_ERROR");
1000    return M4NO_ERROR;
1001}
1002
1003/******************************************************************************
1004 * M4OSA_ERR M4VSS3GPP_intProcessAU()
1005 * @brief    ProcessAU writer-like interface used for the VSS 3GPP only
1006 * @note
1007 * @param    pContext: (IN) It is the VSS 3GPP context in our case
1008 * @param    streamID: (IN) Id of the stream to which the Access Unit is related.
1009 * @param    pAU:      (IN/OUT) Access Unit to be written
1010 * @return    M4NO_ERROR: there is no error
1011 ******************************************************************************
1012 */
1013M4OSA_ERR M4VSS3GPP_intProcessAU( M4WRITER_Context pContext,
1014                                 M4SYS_StreamID streamID, M4SYS_AccessUnit *pAU )
1015{
1016    M4OSA_ERR err;
1017
1018    /**
1019    * Given context is actually the VSS3GPP context */
1020    M4VSS3GPP_InternalEditContext *pC =
1021        (M4VSS3GPP_InternalEditContext *)pContext;
1022
1023    /**
1024    * Fix the encoded AU time */
1025    // Decorrelate input and output encoding timestamp to handle encoder prefetch
1026    pC->ewc.dOutputVidCts = pAU->CTS;
1027    /**
1028    * Update time info for the Counter Time System to be equal to the bit-stream time */
1029    M4VSS3GPP_intUpdateTimeInfo(pC, pAU);
1030
1031    /**
1032    * Write the AU */
1033    err = pC->ShellAPI.pWriterDataFcts->pProcessAU(pC->ewc.p3gpWriterContext,
1034        M4VSS3GPP_WRITER_VIDEO_STREAM_ID, pAU);
1035
1036    if( M4NO_ERROR != err )
1037    {
1038        M4OSA_TRACE1_1(
1039            "M4VSS3GPP_intProcessAU: pWriterDataFcts->pProcessAU(Video) returns 0x%x!",
1040            err);
1041        return err;
1042    }
1043
1044    /**
1045    *    Return */
1046    M4OSA_TRACE3_0("M4VSS3GPP_intProcessAU: returning M4NO_ERROR");
1047    return M4NO_ERROR;
1048}
1049
1050/**
1051 ******************************************************************************
1052 * M4OSA_ERR M4VSS3GPP_intVPP()
1053 * @brief    We implement our own VideoPreProcessing function
1054 * @note    It is called by the video encoder
1055 * @param    pContext    (IN) VPP context, which actually is the VSS 3GPP context in our case
1056 * @param    pPlaneIn    (IN)
1057 * @param    pPlaneOut    (IN/OUT) Pointer to an array of 3 planes that will contain the output
1058 *                                  YUV420 image
1059 * @return    M4NO_ERROR:    No error
1060 ******************************************************************************
1061 */
1062M4OSA_ERR M4VSS3GPP_intVPP( M4VPP_Context pContext, M4VIFI_ImagePlane *pPlaneIn,
1063                           M4VIFI_ImagePlane *pPlaneOut )
1064{
1065    M4OSA_ERR err = M4NO_ERROR;
1066    M4_MediaTime ts;
1067    M4VIFI_ImagePlane *pTmp = M4OSA_NULL;
1068    M4VIFI_ImagePlane *pLastDecodedFrame = M4OSA_NULL ;
1069    M4VIFI_ImagePlane *pDecoderRenderFrame = M4OSA_NULL;
1070    M4VIFI_ImagePlane pTemp1[3],pTemp2[3];
1071    M4VIFI_ImagePlane pTempPlaneClip1[3],pTempPlaneClip2[3];
1072    M4OSA_UInt32  i = 0, yuvFrameWidth = 0, yuvFrameHeight = 0;
1073    M4OSA_Bool bSkipFrameEffect = M4OSA_FALSE;
1074    /**
1075    * VPP context is actually the VSS3GPP context */
1076    M4VSS3GPP_InternalEditContext *pC =
1077        (M4VSS3GPP_InternalEditContext *)pContext;
1078
1079    memset((void *)pTemp1, 0, 3*sizeof(M4VIFI_ImagePlane));
1080    memset((void *)pTemp2, 0, 3*sizeof(M4VIFI_ImagePlane));
1081    memset((void *)pTempPlaneClip1, 0, 3*sizeof(M4VIFI_ImagePlane));
1082    memset((void *)pTempPlaneClip2, 0, 3*sizeof(M4VIFI_ImagePlane));
1083
1084    /**
1085    * Reset VPP error remembered in context */
1086    pC->ewc.VppError = M4NO_ERROR;
1087
1088    /**
1089    * At the end of the editing, we may be called when no more clip is loaded.
1090    * (because to close the encoder properly it must be stepped one or twice...) */
1091    if( M4OSA_NULL == pC->pC1 )
1092    {
1093        /**
1094        * We must fill the input of the encoder with a dummy image, because
1095        * encoding noise leads to a huge video AU, and thus a writer buffer overflow. */
1096        memset((void *)pPlaneOut[0].pac_data,0,
1097            pPlaneOut[0].u_stride * pPlaneOut[0].u_height);
1098        memset((void *)pPlaneOut[1].pac_data,0,
1099            pPlaneOut[1].u_stride * pPlaneOut[1].u_height);
1100        memset((void *)pPlaneOut[2].pac_data,0,
1101            pPlaneOut[2].u_stride * pPlaneOut[2].u_height);
1102
1103        M4OSA_TRACE3_0("M4VSS3GPP_intVPP: returning M4NO_ERROR (abort)");
1104        return M4NO_ERROR;
1105    }
1106
1107    /**
1108    **************** Transition case ****************/
1109    if( M4OSA_TRUE == pC->bTransitionEffect )
1110    {
1111
1112        err = M4VSS3GPP_intAllocateYUV420(pTemp1, pC->ewc.uiVideoWidth,
1113                                          pC->ewc.uiVideoHeight);
1114        if (M4NO_ERROR != err)
1115        {
1116            M4OSA_TRACE1_1("M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420(1) returns 0x%x, \
1117                           returning M4NO_ERROR", err);
1118            pC->ewc.VppError = err;
1119            return M4NO_ERROR; /**< Return no error to the encoder core
1120                               (else it may leak in some situations...) */
1121        }
1122
1123        err = M4VSS3GPP_intAllocateYUV420(pTemp2, pC->ewc.uiVideoWidth,
1124                                          pC->ewc.uiVideoHeight);
1125        if (M4NO_ERROR != err)
1126        {
1127            M4OSA_TRACE1_1("M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420(2) returns 0x%x, \
1128                           returning M4NO_ERROR", err);
1129            pC->ewc.VppError = err;
1130            return M4NO_ERROR; /**< Return no error to the encoder core
1131                              (else it may leak in some situations...) */
1132        }
1133
1134        err = M4VSS3GPP_intAllocateYUV420(pC->yuv1, pC->ewc.uiVideoWidth,
1135            pC->ewc.uiVideoHeight);
1136        if( M4NO_ERROR != err )
1137        {
1138            M4OSA_TRACE1_1(
1139                "M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420(3) returns 0x%x,\
1140                returning M4NO_ERROR",
1141                err);
1142            pC->ewc.VppError = err;
1143            return
1144                M4NO_ERROR; /**< Return no error to the encoder core
1145                            (else it may leak in some situations...) */
1146        }
1147
1148        err = M4VSS3GPP_intAllocateYUV420(pC->yuv2, pC->ewc.uiVideoWidth,
1149            pC->ewc.uiVideoHeight);
1150        if( M4NO_ERROR != err )
1151        {
1152            M4OSA_TRACE1_1(
1153                "M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420(4) returns 0x%x,\
1154                returning M4NO_ERROR",
1155                err);
1156            pC->ewc.VppError = err;
1157            return
1158                M4NO_ERROR; /**< Return no error to the encoder core
1159                            (else it may leak in some situations...) */
1160        }
1161
1162        err = M4VSS3GPP_intAllocateYUV420(pC->yuv3, pC->ewc.uiVideoWidth,
1163            pC->ewc.uiVideoHeight);
1164        if( M4NO_ERROR != err )
1165        {
1166            M4OSA_TRACE1_1(
1167                "M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420(3) returns 0x%x,\
1168                returning M4NO_ERROR",
1169                err);
1170            pC->ewc.VppError = err;
1171            return
1172                M4NO_ERROR; /**< Return no error to the encoder core
1173                            (else it may leak in some situations...) */
1174        }
1175
1176        /**
1177        * Compute the time in the clip1 base: ts = to - Offset */
1178        // Decorrelate input and output encoding timestamp to handle encoder prefetch
1179        ts = pC->ewc.dInputVidCts - pC->pC1->iVoffset;
1180
1181        /**
1182        * Render Clip1 */
1183        if( pC->pC1->isRenderDup == M4OSA_FALSE )
1184        {
1185            err = M4VSS3GPP_intRenderFrameWithEffect(pC, pC->pC1, ts, M4OSA_TRUE,
1186                                                pTempPlaneClip1, pTemp1,
1187                                                pPlaneOut);
1188            if ((M4NO_ERROR != err) &&
1189                 (M4WAR_VIDEORENDERER_NO_NEW_FRAME != err)) {
1190                M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \
1191                    M4VSS3GPP_intRenderFrameWithEffect returns 0x%x", err);
1192                pC->ewc.VppError = err;
1193                /** Return no error to the encoder core
1194                  * else it may leak in some situations.*/
1195                return M4NO_ERROR;
1196            }
1197        }
1198        if ((pC->pC1->isRenderDup == M4OSA_TRUE) ||
1199             (M4WAR_VIDEORENDERER_NO_NEW_FRAME == err)) {
1200            pTmp = pC->yuv1;
1201            if (pC->pC1->lastDecodedPlane != M4NO_ERROR) {
1202                /* Copy last decoded plane to output plane */
1203                memcpy((void *)pTmp[0].pac_data,
1204                    (void *)pC->pC1->lastDecodedPlane[0].pac_data,
1205                    (pTmp[0].u_height * pTmp[0].u_width));
1206                memcpy((void *)pTmp[1].pac_data,
1207                    (void *)pC->pC1->lastDecodedPlane[1].pac_data,
1208                    (pTmp[1].u_height * pTmp[1].u_width));
1209                memcpy((void *)pTmp[2].pac_data,
1210                    (void *)pC->pC1->lastDecodedPlane[2].pac_data,
1211                    (pTmp[2].u_height * pTmp[2].u_width));
1212            }
1213            pC->pC1->lastDecodedPlane = pTmp;
1214        }
1215
1216        /**
1217        * Compute the time in the clip2 base: ts = to - Offset */
1218        // Decorrelate input and output encoding timestamp to handle encoder prefetch
1219        ts = pC->ewc.dInputVidCts - pC->pC2->iVoffset;
1220        /**
1221        * Render Clip2 */
1222        if( pC->pC2->isRenderDup == M4OSA_FALSE )
1223        {
1224
1225            err = M4VSS3GPP_intRenderFrameWithEffect(pC, pC->pC2, ts, M4OSA_FALSE,
1226                                                pTempPlaneClip2, pTemp2,
1227                                                pPlaneOut);
1228            if ((M4NO_ERROR != err) &&
1229                 (M4WAR_VIDEORENDERER_NO_NEW_FRAME != err)) {
1230                M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \
1231                    M4VSS3GPP_intRenderFrameWithEffect returns 0x%x", err);
1232                pC->ewc.VppError = err;
1233                /** Return no error to the encoder core
1234                  * else it may leak in some situations.*/
1235                return M4NO_ERROR;
1236            }
1237        }
1238        if ((pC->pC2->isRenderDup == M4OSA_TRUE) ||
1239             (M4WAR_VIDEORENDERER_NO_NEW_FRAME == err)) {
1240            pTmp = pC->yuv2;
1241            if (pC->pC2->lastDecodedPlane != M4NO_ERROR) {
1242                /* Copy last decoded plane to output plane */
1243                memcpy((void *)pTmp[0].pac_data,
1244                    (void *)pC->pC2->lastDecodedPlane[0].pac_data,
1245                    (pTmp[0].u_height * pTmp[0].u_width));
1246                memcpy((void *)pTmp[1].pac_data,
1247                    (void *)pC->pC2->lastDecodedPlane[1].pac_data,
1248                    (pTmp[1].u_height * pTmp[1].u_width));
1249                memcpy((void *)pTmp[2].pac_data,
1250                    (void *)pC->pC2->lastDecodedPlane[2].pac_data,
1251                    (pTmp[2].u_height * pTmp[2].u_width));
1252            }
1253            pC->pC2->lastDecodedPlane = pTmp;
1254        }
1255
1256
1257        pTmp = pPlaneOut;
1258        err = M4VSS3GPP_intVideoTransition(pC, pTmp);
1259
1260        if( M4NO_ERROR != err )
1261        {
1262            M4OSA_TRACE1_1(
1263                "M4VSS3GPP_intVPP: M4VSS3GPP_intVideoTransition returns 0x%x,\
1264                returning M4NO_ERROR",
1265                err);
1266            pC->ewc.VppError = err;
1267            return  M4NO_ERROR; /**< Return no error to the encoder core
1268                                (else it may leak in some situations...) */
1269        }
1270        for (i=0; i < 3; i++)
1271        {
1272            if(pTempPlaneClip2[i].pac_data != M4OSA_NULL) {
1273                free(pTempPlaneClip2[i].pac_data);
1274                pTempPlaneClip2[i].pac_data = M4OSA_NULL;
1275            }
1276
1277            if(pTempPlaneClip1[i].pac_data != M4OSA_NULL) {
1278                free(pTempPlaneClip1[i].pac_data);
1279                pTempPlaneClip1[i].pac_data = M4OSA_NULL;
1280            }
1281
1282            if (pTemp2[i].pac_data != M4OSA_NULL) {
1283                free(pTemp2[i].pac_data);
1284                pTemp2[i].pac_data = M4OSA_NULL;
1285            }
1286
1287            if (pTemp1[i].pac_data != M4OSA_NULL) {
1288                free(pTemp1[i].pac_data);
1289                pTemp1[i].pac_data = M4OSA_NULL;
1290            }
1291        }
1292    }
1293    /**
1294    **************** No Transition case ****************/
1295    else
1296    {
1297        M4OSA_TRACE3_0("M4VSS3GPP_intVPP: NO transition case");
1298        /**
1299        * Compute the time in the clip base: ts = to - Offset */
1300        ts = pC->ewc.dInputVidCts - pC->pC1->iVoffset;
1301        pC->bIssecondClip = M4OSA_FALSE;
1302        /**
1303        * Render */
1304        if (pC->pC1->isRenderDup == M4OSA_FALSE) {
1305            M4OSA_TRACE3_0("M4VSS3GPP_intVPP: renderdup false");
1306            /**
1307            *   Check if resizing is needed */
1308            if (M4OSA_NULL != pC->pC1->m_pPreResizeFrame) {
1309                if ((pC->pC1->pSettings->FileType ==
1310                            M4VIDEOEDITING_kFileType_ARGB8888) &&
1311                        (pC->nbActiveEffects == 0) &&
1312                        (pC->pC1->bGetYuvDataFromDecoder == M4OSA_FALSE)) {
1313                    err = pC->pC1->ShellAPI.m_pVideoDecoder->m_pFctSetOption(
1314                              pC->pC1->pViDecCtxt,
1315                              M4DECODER_kOptionID_EnableYuvWithEffect,
1316                              (M4OSA_DataOption)M4OSA_TRUE);
1317                    if (M4NO_ERROR == err ) {
1318                        err = pC->pC1->ShellAPI.m_pVideoDecoder->m_pFctRender(
1319                                  pC->pC1->pViDecCtxt, &ts,
1320                                  pPlaneOut, M4OSA_TRUE);
1321                    }
1322                } else {
1323                    if (pC->pC1->pSettings->FileType ==
1324                            M4VIDEOEDITING_kFileType_ARGB8888) {
1325                        err = pC->pC1->ShellAPI.m_pVideoDecoder->m_pFctSetOption(
1326                                  pC->pC1->pViDecCtxt,
1327                                  M4DECODER_kOptionID_EnableYuvWithEffect,
1328                                  (M4OSA_DataOption)M4OSA_FALSE);
1329                    }
1330                    if (M4NO_ERROR == err) {
1331                        err = pC->pC1->ShellAPI.m_pVideoDecoder->m_pFctRender(
1332                                  pC->pC1->pViDecCtxt, &ts,
1333                                  pC->pC1->m_pPreResizeFrame, M4OSA_TRUE);
1334                    }
1335                }
1336                if (M4NO_ERROR != err) {
1337                    M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \
1338                        m_pFctRender() returns error 0x%x", err);
1339                    pC->ewc.VppError = err;
1340                    return M4NO_ERROR;
1341                }
1342                if (pC->pC1->pSettings->FileType !=
1343                        M4VIDEOEDITING_kFileType_ARGB8888) {
1344                    if (0 != pC->pC1->pSettings->ClipProperties.videoRotationDegrees) {
1345                        // Save width and height of un-rotated frame
1346                        yuvFrameWidth = pC->pC1->m_pPreResizeFrame[0].u_width;
1347                        yuvFrameHeight = pC->pC1->m_pPreResizeFrame[0].u_height;
1348                        err = M4VSS3GPP_intRotateVideo(pC->pC1->m_pPreResizeFrame,
1349                                pC->pC1->pSettings->ClipProperties.videoRotationDegrees);
1350                        if (M4NO_ERROR != err) {
1351                            M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \
1352                                rotateVideo() returns error 0x%x", err);
1353                            pC->ewc.VppError = err;
1354                            return M4NO_ERROR;
1355                        }
1356                    }
1357                }
1358
1359                if (pC->nbActiveEffects > 0) {
1360                    pC->pC1->bGetYuvDataFromDecoder = M4OSA_TRUE;
1361                    /**
1362                    * If we do modify the image, we need an intermediate
1363                    * image plane */
1364                    err = M4VSS3GPP_intAllocateYUV420(pTemp1,
1365                            pC->pC1->m_pPreResizeFrame[0].u_width,
1366                            pC->pC1->m_pPreResizeFrame[0].u_height);
1367                    if (M4NO_ERROR != err) {
1368                        M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \
1369                            M4VSS3GPP_intAllocateYUV420 error 0x%x", err);
1370                        pC->ewc.VppError = err;
1371                        return M4NO_ERROR;
1372                    }
1373                    /* If video frame need to be resized, then apply the overlay after
1374                     * the frame was rendered with rendering mode.
1375                     * Here skip the framing(overlay) effect when applying video Effect. */
1376                    bSkipFrameEffect = M4OSA_TRUE;
1377                    err = M4VSS3GPP_intApplyVideoEffect(pC,
1378                            pC->pC1->m_pPreResizeFrame, pTemp1, bSkipFrameEffect);
1379                    if (M4NO_ERROR != err) {
1380                        M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \
1381                            M4VSS3GPP_intApplyVideoEffect() error 0x%x", err);
1382                        pC->ewc.VppError = err;
1383                        return M4NO_ERROR;
1384                    }
1385                    pDecoderRenderFrame= pTemp1;
1386
1387                } else {
1388                    pDecoderRenderFrame = pC->pC1->m_pPreResizeFrame;
1389                }
1390                /* Prepare overlay temporary buffer if overlay exist */
1391                if (pC->bClip1ActiveFramingEffect) {
1392                    err = M4VSS3GPP_intAllocateYUV420(pTemp2,
1393                        pPlaneOut[0].u_width, pPlaneOut[0].u_height);
1394                    if (M4NO_ERROR != err) {
1395                        M4OSA_TRACE1_1("M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420 \
1396                            returns 0x%x, returning M4NO_ERROR", err);
1397                        pC->ewc.VppError = err;
1398                        return M4NO_ERROR;
1399                    }
1400                    pTmp = pTemp2;
1401                } else {
1402                    pTmp = pPlaneOut;
1403                }
1404
1405                /* Do rendering mode. */
1406                if ((pC->pC1->bGetYuvDataFromDecoder == M4OSA_TRUE) ||
1407                    (pC->pC1->pSettings->FileType !=
1408                        M4VIDEOEDITING_kFileType_ARGB8888)) {
1409
1410                    err = M4VSS3GPP_intApplyRenderingMode(pC,
1411                              pC->pC1->pSettings->xVSS.MediaRendering,
1412                              pDecoderRenderFrame, pTmp);
1413                    if (M4NO_ERROR != err) {
1414                        M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \
1415                            M4VSS3GPP_intApplyRenderingMode) error 0x%x ", err);
1416                        pC->ewc.VppError = err;
1417                        return M4NO_ERROR;
1418                    }
1419                }
1420
1421                /* Apply overlay if overlay is exist */
1422                if (pC->bClip1ActiveFramingEffect) {
1423                    pDecoderRenderFrame = pTmp;
1424                    pTmp = pPlaneOut;
1425                    err = M4VSS3GPP_intApplyVideoOverlay(pC,
1426                        pDecoderRenderFrame, pTmp);
1427                    if (M4NO_ERROR != err) {
1428                        M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \
1429                            M4VSS3GPP_intApplyVideoOverlay) error 0x%x ", err);
1430                        pC->ewc.VppError = err;
1431                        return M4NO_ERROR;
1432                    }
1433                }
1434
1435                if ((pC->pC1->pSettings->FileType ==
1436                        M4VIDEOEDITING_kFileType_ARGB8888) &&
1437                    (pC->nbActiveEffects == 0) &&
1438                    (pC->pC1->bGetYuvDataFromDecoder == M4OSA_TRUE)) {
1439
1440                    err = pC->pC1->ShellAPI.m_pVideoDecoder->m_pFctSetOption(
1441                              pC->pC1->pViDecCtxt,
1442                              M4DECODER_kOptionID_YuvWithEffectNonContiguous,
1443                              (M4OSA_DataOption)pTmp);
1444                    if (M4NO_ERROR != err) {
1445                        pC->ewc.VppError = err;
1446                        return M4NO_ERROR;
1447                    }
1448                    pC->pC1->bGetYuvDataFromDecoder = M4OSA_FALSE;
1449                }
1450
1451                // Reset original width and height for resize frame plane
1452                if (0 != pC->pC1->pSettings->ClipProperties.videoRotationDegrees &&
1453                    180 != pC->pC1->pSettings->ClipProperties.videoRotationDegrees) {
1454
1455                    M4VSS3GPP_intSetYUV420Plane(pC->pC1->m_pPreResizeFrame,
1456                                                yuvFrameWidth, yuvFrameHeight);
1457                }
1458            }
1459            else
1460            {
1461                M4OSA_TRACE3_0("M4VSS3GPP_intVPP: NO resize required");
1462                if (pC->nbActiveEffects > 0) {
1463                    /** If we do modify the image, we need an
1464                     * intermediate image plane */
1465                    err = M4VSS3GPP_intAllocateYUV420(pTemp1,
1466                              pC->ewc.uiVideoWidth,
1467                              pC->ewc.uiVideoHeight);
1468                    if (M4NO_ERROR != err) {
1469                        pC->ewc.VppError = err;
1470                        return M4NO_ERROR;
1471                    }
1472                    pDecoderRenderFrame = pTemp1;
1473                }
1474                else {
1475                    pDecoderRenderFrame = pPlaneOut;
1476                }
1477
1478                pTmp = pPlaneOut;
1479                err = pC->pC1->ShellAPI.m_pVideoDecoder->m_pFctRender(
1480                          pC->pC1->pViDecCtxt, &ts,
1481                          pDecoderRenderFrame, M4OSA_TRUE);
1482                if (M4NO_ERROR != err) {
1483                    pC->ewc.VppError = err;
1484                    return M4NO_ERROR;
1485                }
1486
1487                if (pC->nbActiveEffects > 0) {
1488                    /* Here we do not skip the overlay effect since
1489                     * overlay and video frame are both of same resolution */
1490                    bSkipFrameEffect = M4OSA_FALSE;
1491                    err = M4VSS3GPP_intApplyVideoEffect(pC,
1492                              pDecoderRenderFrame,pPlaneOut,bSkipFrameEffect);
1493                    }
1494                    if (M4NO_ERROR != err) {
1495                        pC->ewc.VppError = err;
1496                        return M4NO_ERROR;
1497                    }
1498            }
1499            pC->pC1->lastDecodedPlane = pTmp;
1500            pC->pC1->iVideoRenderCts = (M4OSA_Int32)ts;
1501
1502        } else {
1503            M4OSA_TRACE3_0("M4VSS3GPP_intVPP: renderdup true");
1504
1505            if (M4OSA_NULL != pC->pC1->m_pPreResizeFrame) {
1506                /**
1507                * Copy last decoded plane to output plane */
1508                memcpy((void *)pC->pC1->m_pPreResizeFrame[0].pac_data,
1509                 (void *)pC->pC1->lastDecodedPlane[0].pac_data,
1510                 (pC->pC1->m_pPreResizeFrame[0].u_height * pC->pC1->m_pPreResizeFrame[0].u_width));
1511
1512                memcpy((void *)pC->pC1->m_pPreResizeFrame[1].pac_data,
1513                 (void *)pC->pC1->lastDecodedPlane[1].pac_data,
1514                 (pC->pC1->m_pPreResizeFrame[1].u_height * pC->pC1->m_pPreResizeFrame[1].u_width));
1515
1516                memcpy((void *)pC->pC1->m_pPreResizeFrame[2].pac_data,
1517                 (void *)pC->pC1->lastDecodedPlane[2].pac_data,
1518                 (pC->pC1->m_pPreResizeFrame[2].u_height * pC->pC1->m_pPreResizeFrame[2].u_width));
1519
1520                if(pC->nbActiveEffects > 0) {
1521                    /**
1522                    * If we do modify the image, we need an
1523                    * intermediate image plane */
1524                    err = M4VSS3GPP_intAllocateYUV420(pTemp1,
1525                              pC->pC1->m_pPreResizeFrame[0].u_width,
1526                              pC->pC1->m_pPreResizeFrame[0].u_height);
1527                    if (M4NO_ERROR != err) {
1528                        pC->ewc.VppError = err;
1529                        return M4NO_ERROR;
1530                    }
1531                    /* If video frame need to be resized, then apply the overlay after
1532                     * the frame was rendered with rendering mode.
1533                     * Here skip the framing(overlay) effect when applying video Effect. */
1534                    bSkipFrameEffect = M4OSA_TRUE;
1535                    err = M4VSS3GPP_intApplyVideoEffect(pC,
1536                              pC->pC1->m_pPreResizeFrame,pTemp1, bSkipFrameEffect);
1537                    if (M4NO_ERROR != err) {
1538                        pC->ewc.VppError = err;
1539                        return M4NO_ERROR;
1540                    }
1541                    pDecoderRenderFrame= pTemp1;
1542                } else {
1543                    pDecoderRenderFrame = pC->pC1->m_pPreResizeFrame;
1544                }
1545                /* Prepare overlay temporary buffer if overlay exist */
1546                if (pC->bClip1ActiveFramingEffect) {
1547                    err = M4VSS3GPP_intAllocateYUV420(
1548                        pTemp2, pC->ewc.uiVideoWidth, pC->ewc.uiVideoHeight);
1549                    if (M4NO_ERROR != err) {
1550                        M4OSA_TRACE1_1("M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420 \
1551                            returns 0x%x, returning M4NO_ERROR", err);
1552                        pC->ewc.VppError = err;
1553                        return M4NO_ERROR;
1554                    }
1555                    pTmp = pTemp2;
1556                } else {
1557                    pTmp = pPlaneOut;
1558                }
1559                /* Do rendering mode */
1560                err = M4VSS3GPP_intApplyRenderingMode(pC,
1561                          pC->pC1->pSettings->xVSS.MediaRendering,
1562                          pDecoderRenderFrame, pTmp);
1563                if (M4NO_ERROR != err) {
1564                    pC->ewc.VppError = err;
1565                    return M4NO_ERROR;
1566                }
1567                /* Apply overlay if overlay is exist */
1568                pTmp = pPlaneOut;
1569                if (pC->bClip1ActiveFramingEffect) {
1570                    err = M4VSS3GPP_intApplyVideoOverlay(pC,
1571                        pTemp2, pTmp);
1572                    if (M4NO_ERROR != err) {
1573                        M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \
1574                            M4VSS3GPP_intApplyRenderingMode) error 0x%x ", err);
1575                        pC->ewc.VppError = err;
1576                        return M4NO_ERROR;
1577                    }
1578                }
1579            } else {
1580
1581                err = M4VSS3GPP_intAllocateYUV420(pTemp1,
1582                          pC->ewc.uiVideoWidth,
1583                          pC->ewc.uiVideoHeight);
1584                if (M4NO_ERROR != err) {
1585                    pC->ewc.VppError = err;
1586                    return M4NO_ERROR;
1587                }
1588                /**
1589                 * Copy last decoded plane to output plane */
1590                memcpy((void *)pLastDecodedFrame[0].pac_data,
1591                 (void *)pC->pC1->lastDecodedPlane[0].pac_data,
1592                 (pLastDecodedFrame[0].u_height * pLastDecodedFrame[0].u_width));
1593
1594                memcpy((void *)pLastDecodedFrame[1].pac_data,
1595                 (void *)pC->pC1->lastDecodedPlane[1].pac_data,
1596                 (pLastDecodedFrame[1].u_height * pLastDecodedFrame[1].u_width));
1597
1598                memcpy((void *)pLastDecodedFrame[2].pac_data,
1599                 (void *)pC->pC1->lastDecodedPlane[2].pac_data,
1600                 (pLastDecodedFrame[2].u_height * pLastDecodedFrame[2].u_width));
1601
1602                pTmp = pPlaneOut;
1603                /**
1604                * Check if there is a effect */
1605                if(pC->nbActiveEffects > 0) {
1606                    /* Here we do not skip the overlay effect since
1607                     * overlay and video are both of same resolution */
1608                    bSkipFrameEffect = M4OSA_FALSE;
1609                    err = M4VSS3GPP_intApplyVideoEffect(pC,
1610                              pLastDecodedFrame, pTmp,bSkipFrameEffect);
1611                    if (M4NO_ERROR != err) {
1612                        pC->ewc.VppError = err;
1613                        return M4NO_ERROR;
1614                    }
1615                }
1616            }
1617            pC->pC1->lastDecodedPlane = pTmp;
1618        }
1619
1620        M4OSA_TRACE3_1("M4VSS3GPP_intVPP: Rendered at CTS %.3f", ts);
1621
1622        for (i=0; i<3; i++) {
1623            if (pTemp1[i].pac_data != M4OSA_NULL) {
1624                free(pTemp1[i].pac_data);
1625                pTemp1[i].pac_data = M4OSA_NULL;
1626            }
1627        }
1628        for (i=0; i<3; i++) {
1629            if (pTemp2[i].pac_data != M4OSA_NULL) {
1630                free(pTemp2[i].pac_data);
1631                pTemp2[i].pac_data = M4OSA_NULL;
1632            }
1633        }
1634    }
1635
1636    /**
1637    *    Return */
1638    M4OSA_TRACE3_0("M4VSS3GPP_intVPP: returning M4NO_ERROR");
1639    return M4NO_ERROR;
1640}
1641/**
1642 ******************************************************************************
1643 * M4OSA_ERR M4VSS3GPP_intApplyVideoOverlay()
1644 * @brief    Apply video overlay from pPlaneIn to pPlaneOut
1645 * @param    pC               (IN/OUT) Internal edit context
1646 * @param    pInputPlanes    (IN) Input raw YUV420 image
1647 * @param    pOutputPlanes   (IN/OUT) Output raw YUV420 image
1648 * @return   M4NO_ERROR:    No error
1649 ******************************************************************************
1650 */
1651static M4OSA_ERR
1652M4VSS3GPP_intApplyVideoOverlay (M4VSS3GPP_InternalEditContext *pC,
1653    M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut) {
1654
1655    M4VSS3GPP_ClipContext *pClip;
1656    M4VSS3GPP_EffectSettings *pFx;
1657    M4VSS3GPP_ExternalProgress extProgress;
1658    M4OSA_Double VideoEffectTime;
1659    M4OSA_Double PercentageDone;
1660    M4OSA_UInt8 NumActiveEffects =0;
1661    M4OSA_UInt32 Cts = 0;
1662    M4OSA_Int32 nextEffectTime;
1663    M4OSA_Int32 tmp;
1664    M4OSA_UInt8 i;
1665    M4OSA_ERR err;
1666
1667    pClip = pC->pC1;
1668    if (pC->bIssecondClip == M4OSA_TRUE) {
1669        NumActiveEffects = pC->nbActiveEffects1;
1670    } else {
1671        NumActiveEffects = pC->nbActiveEffects;
1672    }
1673    for (i=0; i<NumActiveEffects; i++) {
1674        if (pC->bIssecondClip == M4OSA_TRUE) {
1675            pFx = &(pC->pEffectsList[pC->pActiveEffectsList1[i]]);
1676            /* Compute how far from the beginning of the effect we are, in clip-base time. */
1677            // Decorrelate input and output encoding timestamp to handle encoder prefetch
1678            VideoEffectTime = ((M4OSA_Int32)pC->ewc.dInputVidCts) +
1679                pC->pTransitionList[pC->uiCurrentClip].uiTransitionDuration - pFx->uiStartTime;
1680        } else {
1681            pFx = &(pC->pEffectsList[pC->pActiveEffectsList[i]]);
1682            /* Compute how far from the beginning of the effect we are, in clip-base time. */
1683            // Decorrelate input and output encoding timestamp to handle encoder prefetch
1684            VideoEffectTime = ((M4OSA_Int32)pC->ewc.dInputVidCts) - pFx->uiStartTime;
1685        }
1686        /* Do the framing(overlay) effect only,
1687         * skip other color effect which had been applied */
1688        if (pFx->xVSS.pFramingBuffer == M4OSA_NULL) {
1689            continue;
1690        }
1691
1692        /* To calculate %, substract timeIncrement because effect should finish
1693         * on the last frame which is presented from CTS = eof-timeIncrement till CTS = eof */
1694        PercentageDone = VideoEffectTime / ((M4OSA_Float)pFx->uiDuration);
1695
1696        if (PercentageDone < 0.0) {
1697            PercentageDone = 0.0;
1698        }
1699        if (PercentageDone > 1.0) {
1700            PercentageDone = 1.0;
1701        }
1702        /**
1703        * Compute where we are in the effect (scale is 0->1000) */
1704        tmp = (M4OSA_Int32)(PercentageDone * 1000);
1705
1706        /**
1707        * Set the progress info provided to the external function */
1708        extProgress.uiProgress = (M4OSA_UInt32)tmp;
1709        // Decorrelate input and output encoding timestamp to handle encoder prefetch
1710        extProgress.uiOutputTime = (M4OSA_UInt32)pC->ewc.dInputVidCts;
1711        extProgress.uiClipTime = extProgress.uiOutputTime - pClip->iVoffset;
1712        extProgress.bIsLast = M4OSA_FALSE;
1713        // Decorrelate input and output encoding timestamp to handle encoder prefetch
1714        nextEffectTime = (M4OSA_Int32)(pC->ewc.dInputVidCts \
1715            + pC->dOutputFrameDuration);
1716        if (nextEffectTime >= (M4OSA_Int32)(pFx->uiStartTime + pFx->uiDuration)) {
1717            extProgress.bIsLast = M4OSA_TRUE;
1718        }
1719        err = pFx->ExtVideoEffectFct(pFx->pExtVideoEffectFctCtxt,
1720            pPlaneIn, pPlaneOut, &extProgress,
1721            pFx->VideoEffectType - M4VSS3GPP_kVideoEffectType_External);
1722
1723        if (M4NO_ERROR != err) {
1724            M4OSA_TRACE1_1(
1725                "M4VSS3GPP_intApplyVideoOverlay: \
1726                External video effect function returns 0x%x!",
1727                err);
1728            return err;
1729        }
1730    }
1731
1732    /**
1733    *    Return */
1734    M4OSA_TRACE3_0("M4VSS3GPP_intApplyVideoOverlay: returning M4NO_ERROR");
1735    return M4NO_ERROR;
1736}
1737/**
1738 ******************************************************************************
1739 * M4OSA_ERR M4VSS3GPP_intApplyVideoEffect()
1740 * @brief    Apply video effect from pPlaneIn to pPlaneOut
1741 * @param   pC                (IN/OUT) Internal edit context
1742 * @param   uiClip1orClip2    (IN/OUT) 1 for first clip, 2 for second clip
1743 * @param    pInputPlanes    (IN) Input raw YUV420 image
1744 * @param    pOutputPlanes    (IN/OUT) Output raw YUV420 image
1745 * @param    bSkipFramingEffect (IN) skip framing effect flag
1746 * @return    M4NO_ERROR:                        No error
1747 ******************************************************************************
1748 */
1749static M4OSA_ERR
1750M4VSS3GPP_intApplyVideoEffect (M4VSS3GPP_InternalEditContext *pC,
1751    M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut,
1752    M4OSA_Bool bSkipFramingEffect) {
1753
1754    M4OSA_ERR err;
1755
1756    M4VSS3GPP_ClipContext *pClip;
1757    M4VSS3GPP_EffectSettings *pFx;
1758    M4VSS3GPP_ExternalProgress extProgress;
1759
1760    M4OSA_Double VideoEffectTime;
1761    M4OSA_Double PercentageDone;
1762    M4OSA_Int32 tmp;
1763
1764    M4VIFI_ImagePlane *pPlaneTempIn;
1765    M4VIFI_ImagePlane *pPlaneTempOut;
1766    M4VIFI_ImagePlane  pTempYuvPlane[3];
1767    M4OSA_UInt8 i;
1768    M4OSA_UInt8 NumActiveEffects =0;
1769
1770
1771    pClip = pC->pC1;
1772    if (pC->bIssecondClip == M4OSA_TRUE)
1773    {
1774        NumActiveEffects = pC->nbActiveEffects1;
1775    }
1776    else
1777    {
1778        NumActiveEffects = pC->nbActiveEffects;
1779    }
1780
1781    memset((void *)pTempYuvPlane, 0, 3*sizeof(M4VIFI_ImagePlane));
1782
1783    /**
1784    * Allocate temporary plane if needed RC */
1785    if (NumActiveEffects > 1) {
1786        err = M4VSS3GPP_intAllocateYUV420(pTempYuvPlane, pPlaneOut->u_width,
1787                  pPlaneOut->u_height);
1788
1789        if( M4NO_ERROR != err )
1790        {
1791            M4OSA_TRACE1_1(
1792                "M4VSS3GPP_intApplyVideoEffect: M4VSS3GPP_intAllocateYUV420(4) returns 0x%x,\
1793                returning M4NO_ERROR",
1794                err);
1795            pC->ewc.VppError = err;
1796            return
1797                M4NO_ERROR; /**< Return no error to the encoder core
1798                            (else it may leak in some situations...) */
1799        }
1800    }
1801
1802    if (NumActiveEffects  % 2 == 0)
1803    {
1804        pPlaneTempIn = pPlaneIn;
1805        pPlaneTempOut = pTempYuvPlane;
1806    }
1807    else
1808    {
1809        pPlaneTempIn = pPlaneIn;
1810        pPlaneTempOut = pPlaneOut;
1811    }
1812
1813    for (i=0; i<NumActiveEffects; i++)
1814    {
1815        if (pC->bIssecondClip == M4OSA_TRUE)
1816        {
1817
1818
1819            pFx = &(pC->pEffectsList[pC->pActiveEffectsList1[i]]);
1820            /* Compute how far from the beginning of the effect we are, in clip-base time. */
1821            // Decorrelate input and output encoding timestamp to handle encoder prefetch
1822            VideoEffectTime = ((M4OSA_Int32)pC->ewc.dInputVidCts) +
1823                              pC->pTransitionList[pC->uiCurrentClip].
1824                              uiTransitionDuration- pFx->uiStartTime;
1825        }
1826        else
1827        {
1828            pFx = &(pC->pEffectsList[pC->pActiveEffectsList[i]]);
1829            /* Compute how far from the beginning of the effect we are, in clip-base time. */
1830            // Decorrelate input and output encoding timestamp to handle encoder prefetch
1831            VideoEffectTime = ((M4OSA_Int32)pC->ewc.dInputVidCts) - pFx->uiStartTime;
1832        }
1833
1834
1835
1836        /* To calculate %, substract timeIncrement because effect should finish on the last frame*/
1837        /* which is presented from CTS = eof-timeIncrement till CTS = eof */
1838        PercentageDone = VideoEffectTime
1839            / ((M4OSA_Float)pFx->uiDuration/*- pC->dOutputFrameDuration*/);
1840
1841        if( PercentageDone < 0.0 )
1842            PercentageDone = 0.0;
1843
1844        if( PercentageDone > 1.0 )
1845            PercentageDone = 1.0;
1846
1847        switch( pFx->VideoEffectType )
1848        {
1849            case M4VSS3GPP_kVideoEffectType_FadeFromBlack:
1850                /**
1851                * Compute where we are in the effect (scale is 0->1024). */
1852                tmp = (M4OSA_Int32)(PercentageDone * 1024);
1853
1854                /**
1855                * Apply the darkening effect */
1856                err =
1857                    M4VFL_modifyLumaWithScale((M4ViComImagePlane *)pPlaneTempIn,
1858                    (M4ViComImagePlane *)pPlaneTempOut, tmp, M4OSA_NULL);
1859
1860                if( M4NO_ERROR != err )
1861                {
1862                    M4OSA_TRACE1_1(
1863                        "M4VSS3GPP_intApplyVideoEffect:\
1864                        M4VFL_modifyLumaWithScale returns error 0x%x,\
1865                        returning M4VSS3GPP_ERR_LUMA_FILTER_ERROR",
1866                        err);
1867                    return M4VSS3GPP_ERR_LUMA_FILTER_ERROR;
1868                }
1869                break;
1870
1871            case M4VSS3GPP_kVideoEffectType_FadeToBlack:
1872                /**
1873                * Compute where we are in the effect (scale is 0->1024) */
1874                tmp = (M4OSA_Int32)(( 1.0 - PercentageDone) * 1024);
1875
1876                /**
1877                * Apply the darkening effect */
1878                err =
1879                    M4VFL_modifyLumaWithScale((M4ViComImagePlane *)pPlaneTempIn,
1880                    (M4ViComImagePlane *)pPlaneTempOut, tmp, M4OSA_NULL);
1881
1882                if( M4NO_ERROR != err )
1883                {
1884                    M4OSA_TRACE1_1(
1885                        "M4VSS3GPP_intApplyVideoEffect:\
1886                        M4VFL_modifyLumaWithScale returns error 0x%x,\
1887                        returning M4VSS3GPP_ERR_LUMA_FILTER_ERROR",
1888                        err);
1889                    return M4VSS3GPP_ERR_LUMA_FILTER_ERROR;
1890                }
1891                break;
1892
1893            default:
1894                if( pFx->VideoEffectType
1895                    >= M4VSS3GPP_kVideoEffectType_External )
1896                {
1897                    M4OSA_UInt32 Cts = 0;
1898                    M4OSA_Int32 nextEffectTime;
1899
1900                    /**
1901                    * Compute where we are in the effect (scale is 0->1000) */
1902                    tmp = (M4OSA_Int32)(PercentageDone * 1000);
1903
1904                    /**
1905                    * Set the progress info provided to the external function */
1906                    extProgress.uiProgress = (M4OSA_UInt32)tmp;
1907                    // Decorrelate input and output encoding timestamp to handle encoder prefetch
1908                    extProgress.uiOutputTime = (M4OSA_UInt32)pC->ewc.dInputVidCts;
1909                    extProgress.uiClipTime = extProgress.uiOutputTime - pClip->iVoffset;
1910                    extProgress.bIsLast = M4OSA_FALSE;
1911                    // Decorrelate input and output encoding timestamp to handle encoder prefetch
1912                    nextEffectTime = (M4OSA_Int32)(pC->ewc.dInputVidCts \
1913                        + pC->dOutputFrameDuration);
1914                    if(nextEffectTime >= (M4OSA_Int32)(pFx->uiStartTime + pFx->uiDuration))
1915                    {
1916                        extProgress.bIsLast = M4OSA_TRUE;
1917                    }
1918                    /* Here skip the framing effect,
1919                     * do the framing effect after apply rendering mode */
1920                    if ((pFx->xVSS.pFramingBuffer != M4OSA_NULL) &&
1921                        bSkipFramingEffect == M4OSA_TRUE) {
1922                        memcpy(pPlaneTempOut[0].pac_data, pPlaneTempIn[0].pac_data,
1923                            pPlaneTempIn[0].u_height * pPlaneTempIn[0].u_width);
1924                        memcpy(pPlaneTempOut[1].pac_data, pPlaneTempIn[1].pac_data,
1925                            pPlaneTempIn[1].u_height * pPlaneTempIn[1].u_width);
1926                        memcpy(pPlaneTempOut[2].pac_data, pPlaneTempIn[2].pac_data,
1927                            pPlaneTempIn[2].u_height * pPlaneTempIn[2].u_width);
1928
1929                    } else {
1930                        err = pFx->ExtVideoEffectFct(pFx->pExtVideoEffectFctCtxt,
1931                            pPlaneTempIn, pPlaneTempOut, &extProgress,
1932                            pFx->VideoEffectType
1933                            - M4VSS3GPP_kVideoEffectType_External);
1934                    }
1935                    if( M4NO_ERROR != err )
1936                    {
1937                        M4OSA_TRACE1_1(
1938                            "M4VSS3GPP_intApplyVideoEffect: \
1939                            External video effect function returns 0x%x!",
1940                            err);
1941                        return err;
1942                    }
1943                    break;
1944                }
1945                else
1946                {
1947                    M4OSA_TRACE1_1(
1948                        "M4VSS3GPP_intApplyVideoEffect: unknown effect type (0x%x),\
1949                        returning M4VSS3GPP_ERR_INVALID_VIDEO_EFFECT_TYPE",
1950                        pFx->VideoEffectType);
1951                    return M4VSS3GPP_ERR_INVALID_VIDEO_EFFECT_TYPE;
1952                }
1953        }
1954        /**
1955        * RC Updates pTempPlaneIn and pTempPlaneOut depending on current effect */
1956        if (((i % 2 == 0) && (NumActiveEffects  % 2 == 0))
1957            || ((i % 2 != 0) && (NumActiveEffects % 2 != 0)))
1958        {
1959            pPlaneTempIn = pTempYuvPlane;
1960            pPlaneTempOut = pPlaneOut;
1961        }
1962        else
1963        {
1964            pPlaneTempIn = pPlaneOut;
1965            pPlaneTempOut = pTempYuvPlane;
1966        }
1967    }
1968
1969    for(i=0; i<3; i++) {
1970        if(pTempYuvPlane[i].pac_data != M4OSA_NULL) {
1971            free(pTempYuvPlane[i].pac_data);
1972            pTempYuvPlane[i].pac_data = M4OSA_NULL;
1973        }
1974    }
1975
1976    /**
1977    *    Return */
1978    M4OSA_TRACE3_0("M4VSS3GPP_intApplyVideoEffect: returning M4NO_ERROR");
1979    return M4NO_ERROR;
1980}
1981
1982/**
1983 ******************************************************************************
1984 * M4OSA_ERR M4VSS3GPP_intVideoTransition()
1985 * @brief    Apply video transition effect pC1+pC2->pPlaneOut
1986 * @param   pC                (IN/OUT) Internal edit context
1987 * @param    pOutputPlanes    (IN/OUT) Output raw YUV420 image
1988 * @return    M4NO_ERROR:                        No error
1989 ******************************************************************************
1990 */
1991static M4OSA_ERR
1992M4VSS3GPP_intVideoTransition( M4VSS3GPP_InternalEditContext *pC,
1993                             M4VIFI_ImagePlane *pPlaneOut )
1994{
1995    M4OSA_ERR err;
1996    M4OSA_Int32 iProgress;
1997    M4VSS3GPP_ExternalProgress extProgress;
1998    M4VIFI_ImagePlane *pPlane;
1999    M4OSA_Int32 i;
2000    const M4OSA_Int32 iDur = (M4OSA_Int32)pC->
2001        pTransitionList[pC->uiCurrentClip].uiTransitionDuration;
2002
2003    /**
2004    * Compute how far from the end cut we are, in clip-base time.
2005    * It is done with integers because the offset and begin cut have been rounded already. */
2006    // Decorrelate input and output encoding timestamp to handle encoder prefetch
2007    iProgress = (M4OSA_Int32)((M4OSA_Double)pC->pC1->iEndTime) - pC->ewc.dInputVidCts +
2008        ((M4OSA_Double)pC->pC1->iVoffset);
2009    /**
2010    * We must remove the duration of one frame, else we would almost never reach the end
2011    * (It's kind of a "pile and intervals" issue). */
2012    iProgress -= (M4OSA_Int32)pC->dOutputFrameDuration;
2013
2014    if( iProgress < 0 ) /**< Sanity checks */
2015    {
2016        iProgress = 0;
2017    }
2018
2019    /**
2020    * Compute where we are in the transition, on a base 1000 */
2021    iProgress = ( ( iDur - iProgress) * 1000) / iDur;
2022
2023    /**
2024    * Sanity checks */
2025    if( iProgress < 0 )
2026    {
2027        iProgress = 0;
2028    }
2029    else if( iProgress > 1000 )
2030    {
2031        iProgress = 1000;
2032    }
2033
2034    switch( pC->pTransitionList[pC->uiCurrentClip].TransitionBehaviour )
2035    {
2036        case M4VSS3GPP_TransitionBehaviour_SpeedUp:
2037            iProgress = ( iProgress * iProgress) / 1000;
2038            break;
2039
2040        case M4VSS3GPP_TransitionBehaviour_Linear:
2041            /*do nothing*/
2042            break;
2043
2044        case M4VSS3GPP_TransitionBehaviour_SpeedDown:
2045            iProgress = (M4OSA_Int32)(sqrt(iProgress * 1000));
2046            break;
2047
2048        case M4VSS3GPP_TransitionBehaviour_SlowMiddle:
2049            if( iProgress < 500 )
2050            {
2051                iProgress = (M4OSA_Int32)(sqrt(iProgress * 500));
2052            }
2053            else
2054            {
2055                iProgress =
2056                    (M4OSA_Int32)(( ( ( iProgress - 500) * (iProgress - 500))
2057                    / 500) + 500);
2058            }
2059            break;
2060
2061        case M4VSS3GPP_TransitionBehaviour_FastMiddle:
2062            if( iProgress < 500 )
2063            {
2064                iProgress = (M4OSA_Int32)(( iProgress * iProgress) / 500);
2065            }
2066            else
2067            {
2068                iProgress = (M4OSA_Int32)(sqrt(( iProgress - 500) * 500) + 500);
2069            }
2070            break;
2071
2072        default:
2073            /*do nothing*/
2074            break;
2075    }
2076
2077    switch( pC->pTransitionList[pC->uiCurrentClip].VideoTransitionType )
2078    {
2079        case M4VSS3GPP_kVideoTransitionType_CrossFade:
2080            /**
2081            * Apply the transition effect */
2082            err = M4VIFI_ImageBlendingonYUV420(M4OSA_NULL,
2083                (M4ViComImagePlane *)pC->yuv1,
2084                (M4ViComImagePlane *)pC->yuv2,
2085                (M4ViComImagePlane *)pPlaneOut, iProgress);
2086
2087            if( M4NO_ERROR != err )
2088            {
2089                M4OSA_TRACE1_1(
2090                    "M4VSS3GPP_intVideoTransition:\
2091                     M4VIFI_ImageBlendingonYUV420 returns error 0x%x,\
2092                    returning M4VSS3GPP_ERR_TRANSITION_FILTER_ERROR",
2093                    err);
2094                return M4VSS3GPP_ERR_TRANSITION_FILTER_ERROR;
2095            }
2096            break;
2097
2098        case M4VSS3GPP_kVideoTransitionType_None:
2099            /**
2100            * This is a stupid-non optimized version of the None transition...
2101            * We copy the YUV frame */
2102            if( iProgress < 500 ) /**< first half of transition */
2103            {
2104                pPlane = pC->yuv1;
2105            }
2106            else /**< second half of transition */
2107            {
2108                pPlane = pC->yuv2;
2109            }
2110            /**
2111            * Copy the input YUV frames */
2112            i = 3;
2113
2114            while( i-- > 0 )
2115            {
2116                memcpy((void *)pPlaneOut[i].pac_data,
2117                 (void *)pPlane[i].pac_data,
2118                    pPlaneOut[i].u_stride * pPlaneOut[i].u_height);
2119            }
2120            break;
2121
2122        default:
2123            if( pC->pTransitionList[pC->uiCurrentClip].VideoTransitionType
2124                >= M4VSS3GPP_kVideoTransitionType_External )
2125            {
2126                /**
2127                * Set the progress info provided to the external function */
2128                extProgress.uiProgress = (M4OSA_UInt32)iProgress;
2129                // Decorrelate input and output encoding timestamp to handle encoder prefetch
2130                extProgress.uiOutputTime = (M4OSA_UInt32)pC->ewc.dInputVidCts;
2131                extProgress.uiClipTime = extProgress.uiOutputTime - pC->pC1->iVoffset;
2132
2133                err = pC->pTransitionList[pC->
2134                    uiCurrentClip].ExtVideoTransitionFct(
2135                    pC->pTransitionList[pC->
2136                    uiCurrentClip].pExtVideoTransitionFctCtxt,
2137                    pC->yuv1, pC->yuv2, pPlaneOut, &extProgress,
2138                    pC->pTransitionList[pC->
2139                    uiCurrentClip].VideoTransitionType
2140                    - M4VSS3GPP_kVideoTransitionType_External);
2141
2142                if( M4NO_ERROR != err )
2143                {
2144                    M4OSA_TRACE1_1(
2145                        "M4VSS3GPP_intVideoTransition:\
2146                        External video transition function returns 0x%x!",
2147                        err);
2148                    return err;
2149                }
2150                break;
2151            }
2152            else
2153            {
2154                M4OSA_TRACE1_1(
2155                    "M4VSS3GPP_intVideoTransition: unknown transition type (0x%x),\
2156                    returning M4VSS3GPP_ERR_INVALID_VIDEO_TRANSITION_TYPE",
2157                    pC->pTransitionList[pC->uiCurrentClip].VideoTransitionType);
2158                return M4VSS3GPP_ERR_INVALID_VIDEO_TRANSITION_TYPE;
2159            }
2160    }
2161
2162    /**
2163    *    Return */
2164    M4OSA_TRACE3_0("M4VSS3GPP_intVideoTransition: returning M4NO_ERROR");
2165    return M4NO_ERROR;
2166}
2167
2168/**
2169 ******************************************************************************
2170 * M4OSA_Void M4VSS3GPP_intUpdateTimeInfo()
2171 * @brief    Update bit stream time info by Counter Time System to be compliant with
2172 *          players using bit stream time info
2173 * @note    H263 uses an absolute time counter unlike MPEG4 which uses Group Of Vops
2174 *          (GOV, see the standard)
2175 * @param   pC                    (IN/OUT) returns time updated video AU,
2176 *                                the offset between system and video time (MPEG4 only)
2177 *                                and the state of the current clip (MPEG4 only)
2178 * @return    nothing
2179 ******************************************************************************
2180 */
2181static M4OSA_Void
2182M4VSS3GPP_intUpdateTimeInfo( M4VSS3GPP_InternalEditContext *pC,
2183                            M4SYS_AccessUnit *pAU )
2184{
2185    M4OSA_UInt8 uiTmp;
2186    M4OSA_UInt32 uiCts = 0;
2187    M4OSA_MemAddr8 pTmp;
2188    M4OSA_UInt32 uiAdd;
2189    M4OSA_UInt32 uiCurrGov;
2190    M4OSA_Int8 iDiff;
2191
2192    M4VSS3GPP_ClipContext *pClipCtxt = pC->pC1;
2193    M4OSA_Int32 *pOffset = &(pC->ewc.iMpeg4GovOffset);
2194
2195    /**
2196    * Set H263 time counter from system time */
2197    if( M4SYS_kH263 == pAU->stream->streamType )
2198    {
2199        uiTmp = (M4OSA_UInt8)((M4OSA_UInt32)( ( pAU->CTS * 30) / 1001 + 0.5)
2200            % M4VSS3GPP_EDIT_H263_MODULO_TIME);
2201        M4VSS3GPP_intSetH263TimeCounter((M4OSA_MemAddr8)(pAU->dataAddress),
2202            uiTmp);
2203    }
2204    /*
2205    * Set MPEG4 GOV time counter regarding video and system time */
2206    else if( M4SYS_kMPEG_4 == pAU->stream->streamType )
2207    {
2208        /*
2209        * If GOV.
2210        * beware of little/big endian! */
2211        /* correction: read 8 bits block instead of one 32 bits block */
2212        M4OSA_UInt8 *temp8 = (M4OSA_UInt8 *)(pAU->dataAddress);
2213        M4OSA_UInt32 temp32 = 0;
2214
2215        temp32 = ( 0x000000ff & (M4OSA_UInt32)(*temp8))
2216            + (0x0000ff00 & ((M4OSA_UInt32)(*(temp8 + 1))) << 8)
2217            + (0x00ff0000 & ((M4OSA_UInt32)(*(temp8 + 2))) << 16)
2218            + (0xff000000 & ((M4OSA_UInt32)(*(temp8 + 3))) << 24);
2219
2220        M4OSA_TRACE3_2("RC: Temp32: 0x%x, dataAddress: 0x%x\n", temp32,
2221            *(pAU->dataAddress));
2222
2223        if( M4VSS3GPP_EDIT_GOV_HEADER == temp32 )
2224        {
2225            pTmp =
2226                (M4OSA_MemAddr8)(pAU->dataAddress
2227                + 1); /**< Jump to the time code (just after the 32 bits header) */
2228            uiAdd = (M4OSA_UInt32)(pAU->CTS)+( *pOffset);
2229
2230            switch( pClipCtxt->bMpeg4GovState )
2231            {
2232                case M4OSA_FALSE: /*< INIT */
2233                    {
2234                        /* video time = ceil (system time + offset) */
2235                        uiCts = ( uiAdd + 999) / 1000;
2236
2237                        /* offset update */
2238                        ( *pOffset) += (( uiCts * 1000) - uiAdd);
2239
2240                        /* Save values */
2241                        pClipCtxt->uiMpeg4PrevGovValueSet = uiCts;
2242
2243                        /* State to 'first' */
2244                        pClipCtxt->bMpeg4GovState = M4OSA_TRUE;
2245                    }
2246                    break;
2247
2248                case M4OSA_TRUE: /*< UPDATE */
2249                    {
2250                        /* Get current Gov value */
2251                        M4VSS3GPP_intGetMPEG4Gov(pTmp, &uiCurrGov);
2252
2253                        /* video time = floor or ceil (system time + offset) */
2254                        uiCts = (uiAdd / 1000);
2255                        iDiff = (M4OSA_Int8)(uiCurrGov
2256                            - pClipCtxt->uiMpeg4PrevGovValueGet - uiCts
2257                            + pClipCtxt->uiMpeg4PrevGovValueSet);
2258
2259                        /* ceiling */
2260                        if( iDiff > 0 )
2261                        {
2262                            uiCts += (M4OSA_UInt32)(iDiff);
2263
2264                            /* offset update */
2265                            ( *pOffset) += (( uiCts * 1000) - uiAdd);
2266                        }
2267
2268                        /* Save values */
2269                        pClipCtxt->uiMpeg4PrevGovValueGet = uiCurrGov;
2270                        pClipCtxt->uiMpeg4PrevGovValueSet = uiCts;
2271                    }
2272                    break;
2273            }
2274
2275            M4VSS3GPP_intSetMPEG4Gov(pTmp, uiCts);
2276        }
2277    }
2278    return;
2279}
2280
2281/**
2282 ******************************************************************************
2283 * M4OSA_Void M4VSS3GPP_intCheckVideoEffects()
2284 * @brief    Check which video effect must be applied at the current time
2285 ******************************************************************************
2286 */
2287static M4OSA_Void
2288M4VSS3GPP_intCheckVideoEffects( M4VSS3GPP_InternalEditContext *pC,
2289                               M4OSA_UInt8 uiClipNumber )
2290{
2291    M4OSA_UInt8 uiClipIndex;
2292    M4OSA_UInt8 uiFxIndex, i;
2293    M4VSS3GPP_ClipContext *pClip;
2294    M4VSS3GPP_EffectSettings *pFx;
2295    M4OSA_Int32 Off, BC, EC;
2296    // Decorrelate input and output encoding timestamp to handle encoder prefetch
2297    M4OSA_Int32 t = (M4OSA_Int32)pC->ewc.dInputVidCts;
2298
2299    uiClipIndex = pC->uiCurrentClip;
2300    if (uiClipNumber == 1) {
2301        pClip = pC->pC1;
2302        pC->bClip1ActiveFramingEffect = M4OSA_FALSE;
2303    } else {
2304        pClip = pC->pC2;
2305        pC->bClip2ActiveFramingEffect = M4OSA_FALSE;
2306    }
2307    /**
2308    * Shortcuts for code readability */
2309    Off = pClip->iVoffset;
2310    BC = pClip->iActualVideoBeginCut;
2311    EC = pClip->iEndTime;
2312
2313    i = 0;
2314
2315    for ( uiFxIndex = 0; uiFxIndex < pC->nbEffects; uiFxIndex++ )
2316    {
2317        /** Shortcut, reverse order because of priority between effects(EndEffect always clean )*/
2318        pFx = &(pC->pEffectsList[pC->nbEffects - 1 - uiFxIndex]);
2319
2320        if( M4VSS3GPP_kVideoEffectType_None != pFx->VideoEffectType )
2321        {
2322            /**
2323            * Check if there is actually a video effect */
2324
2325             if(uiClipNumber ==1)
2326             {
2327                /**< Are we after the start time of the effect?
2328                 * or Are we into the effect duration?
2329                 */
2330                if ( (t >= (M4OSA_Int32)(pFx->uiStartTime)) &&
2331                    (t <= (M4OSA_Int32)(pFx->uiStartTime + pFx->uiDuration)) ) {
2332                    /**
2333                     * Set the active effect(s) */
2334                    pC->pActiveEffectsList[i] = pC->nbEffects-1-uiFxIndex;
2335
2336                    /**
2337                     * Update counter of active effects */
2338                    i++;
2339                    if (pFx->xVSS.pFramingBuffer != M4OSA_NULL) {
2340                        pC->bClip1ActiveFramingEffect = M4OSA_TRUE;
2341                    }
2342
2343                    /**
2344                     * For all external effects set this flag to true. */
2345                    if(pFx->VideoEffectType > M4VSS3GPP_kVideoEffectType_External)
2346                    {
2347                        pC->m_bClipExternalHasStarted = M4OSA_TRUE;
2348                    }
2349                }
2350
2351            }
2352            else
2353            {
2354                /**< Are we into the effect duration? */
2355                if ( ((M4OSA_Int32)(t + pC->pTransitionList[uiClipIndex].uiTransitionDuration)
2356                    >= (M4OSA_Int32)(pFx->uiStartTime))
2357                    && ( (M4OSA_Int32)(t + pC->pTransitionList[uiClipIndex].uiTransitionDuration)
2358                    <= (M4OSA_Int32)(pFx->uiStartTime + pFx->uiDuration)) ) {
2359                    /**
2360                     * Set the active effect(s) */
2361                    pC->pActiveEffectsList1[i] = pC->nbEffects-1-uiFxIndex;
2362
2363                    /**
2364                     * Update counter of active effects */
2365                    i++;
2366                    if (pFx->xVSS.pFramingBuffer != M4OSA_NULL) {
2367                        pC->bClip2ActiveFramingEffect = M4OSA_TRUE;
2368                    }
2369                    /**
2370                     * For all external effects set this flag to true. */
2371                    if(pFx->VideoEffectType > M4VSS3GPP_kVideoEffectType_External)
2372                    {
2373                        pC->m_bClipExternalHasStarted = M4OSA_TRUE;
2374                    }
2375
2376                    /**
2377                     * The third effect has the highest priority, then the second one, then the first one.
2378                     * Hence, as soon as we found an active effect, we can get out of this loop */
2379                }
2380            }
2381            if (M4VIDEOEDITING_kH264 !=
2382                    pC->pC1->pSettings->ClipProperties.VideoStreamType) {
2383
2384                    // For Mpeg4 and H263 clips, full decode encode not required
2385                    pC->m_bClipExternalHasStarted = M4OSA_FALSE;
2386            }
2387        }
2388    }
2389    if(1==uiClipNumber)
2390    {
2391    /**
2392     * Save number of active effects */
2393        pC->nbActiveEffects = i;
2394    }
2395    else
2396    {
2397        pC->nbActiveEffects1 = i;
2398    }
2399
2400    /**
2401    * Change the absolut time to clip related time */
2402    t -= Off;
2403
2404    /**
2405    * Check if we are on the begin cut (for clip1 only) */
2406    if( ( 0 != BC) && (t == BC) && (1 == uiClipNumber) )
2407    {
2408        pC->bClip1AtBeginCut = M4OSA_TRUE;
2409    }
2410    else
2411    {
2412        pC->bClip1AtBeginCut = M4OSA_FALSE;
2413    }
2414
2415    return;
2416}
2417
2418/**
2419 ******************************************************************************
2420 * M4OSA_ERR M4VSS3GPP_intCreateVideoEncoder()
2421 * @brief    Creates the video encoder
2422 * @note
2423 ******************************************************************************
2424 */
2425M4OSA_ERR M4VSS3GPP_intCreateVideoEncoder( M4VSS3GPP_InternalEditContext *pC )
2426{
2427    M4OSA_ERR err;
2428    M4ENCODER_AdvancedParams EncParams;
2429
2430    /**
2431    * Simulate a writer interface with our specific function */
2432    pC->ewc.OurWriterDataInterface.pProcessAU =
2433        M4VSS3GPP_intProcessAU; /**< This function is VSS 3GPP specific,
2434                                but it follow the writer interface */
2435    pC->ewc.OurWriterDataInterface.pStartAU =
2436        M4VSS3GPP_intStartAU; /**< This function is VSS 3GPP specific,
2437                              but it follow the writer interface */
2438    pC->ewc.OurWriterDataInterface.pWriterContext =
2439        (M4WRITER_Context)
2440        pC; /**< We give the internal context as writer context */
2441
2442    /**
2443    * Get the encoder interface, if not already done */
2444    if( M4OSA_NULL == pC->ShellAPI.pVideoEncoderGlobalFcts )
2445    {
2446        err = M4VSS3GPP_setCurrentVideoEncoder(&pC->ShellAPI,
2447            pC->ewc.VideoStreamType);
2448        M4OSA_TRACE1_1(
2449            "M4VSS3GPP_intCreateVideoEncoder: setCurrentEncoder returns 0x%x",
2450            err);
2451        M4ERR_CHECK_RETURN(err);
2452    }
2453
2454    /**
2455    * Set encoder shell parameters according to VSS settings */
2456
2457    /* Common parameters */
2458    EncParams.InputFormat = M4ENCODER_kIYUV420;
2459    EncParams.FrameWidth = pC->ewc.uiVideoWidth;
2460    EncParams.FrameHeight = pC->ewc.uiVideoHeight;
2461    EncParams.uiTimeScale = pC->ewc.uiVideoTimeScale;
2462
2463    if( pC->bIsMMS == M4OSA_FALSE )
2464    {
2465        /* No strict regulation in video editor */
2466        /* Because of the effects and transitions we should allow more flexibility */
2467        /* Also it prevents to drop important frames (with a bad result on sheduling and
2468        block effetcs) */
2469        EncParams.bInternalRegulation = M4OSA_FALSE;
2470        // Variable framerate is not supported by StageFright encoders
2471        EncParams.FrameRate = M4ENCODER_k30_FPS;
2472    }
2473    else
2474    {
2475        /* In case of MMS mode, we need to enable bitrate regulation to be sure */
2476        /* to reach the targeted output file size */
2477        EncParams.bInternalRegulation = M4OSA_TRUE;
2478        EncParams.FrameRate = pC->MMSvideoFramerate;
2479    }
2480
2481    /**
2482    * Other encoder settings (defaults) */
2483    EncParams.uiHorizontalSearchRange = 0;     /* use default */
2484    EncParams.uiVerticalSearchRange = 0;       /* use default */
2485    EncParams.bErrorResilience = M4OSA_FALSE;  /* no error resilience */
2486    EncParams.uiIVopPeriod = 0;                /* use default */
2487    EncParams.uiMotionEstimationTools = 0;     /* M4V_MOTION_EST_TOOLS_ALL */
2488    EncParams.bAcPrediction = M4OSA_TRUE;      /* use AC prediction */
2489    EncParams.uiStartingQuantizerValue = 10;   /* initial QP = 10 */
2490    EncParams.bDataPartitioning = M4OSA_FALSE; /* no data partitioning */
2491
2492    /**
2493    * Set the video profile and level */
2494    EncParams.videoProfile = pC->ewc.outputVideoProfile;
2495    EncParams.videoLevel= pC->ewc.outputVideoLevel;
2496
2497    switch ( pC->ewc.VideoStreamType )
2498    {
2499        case M4SYS_kH263:
2500
2501            EncParams.Format = M4ENCODER_kH263;
2502
2503            EncParams.uiStartingQuantizerValue = 10;
2504            EncParams.uiRateFactor = 1; /* default */
2505
2506            EncParams.bErrorResilience = M4OSA_FALSE;
2507            EncParams.bDataPartitioning = M4OSA_FALSE;
2508            break;
2509
2510        case M4SYS_kMPEG_4:
2511
2512            EncParams.Format = M4ENCODER_kMPEG4;
2513
2514            EncParams.uiStartingQuantizerValue = 8;
2515            EncParams.uiRateFactor = (M4OSA_UInt8)(( pC->dOutputFrameDuration
2516                * pC->ewc.uiVideoTimeScale) / 1000.0 + 0.5);
2517
2518            if( EncParams.uiRateFactor == 0 )
2519                EncParams.uiRateFactor = 1; /* default */
2520
2521            if( M4OSA_FALSE == pC->ewc.bVideoDataPartitioning )
2522            {
2523                EncParams.bErrorResilience = M4OSA_FALSE;
2524                EncParams.bDataPartitioning = M4OSA_FALSE;
2525            }
2526            else
2527            {
2528                EncParams.bErrorResilience = M4OSA_TRUE;
2529                EncParams.bDataPartitioning = M4OSA_TRUE;
2530            }
2531            break;
2532
2533        case M4SYS_kH264:
2534            M4OSA_TRACE1_0("M4VSS3GPP_intCreateVideoEncoder: M4SYS_H264");
2535
2536            EncParams.Format = M4ENCODER_kH264;
2537
2538            EncParams.uiStartingQuantizerValue = 10;
2539            EncParams.uiRateFactor = 1; /* default */
2540
2541            EncParams.bErrorResilience = M4OSA_FALSE;
2542            EncParams.bDataPartitioning = M4OSA_FALSE;
2543            //EncParams.FrameRate = M4VIDEOEDITING_k5_FPS;
2544            break;
2545
2546        default:
2547            M4OSA_TRACE1_1(
2548                "M4VSS3GPP_intCreateVideoEncoder: Unknown videoStreamType 0x%x",
2549                pC->ewc.VideoStreamType);
2550            return M4VSS3GPP_ERR_EDITING_UNSUPPORTED_VIDEO_FORMAT;
2551    }
2552
2553    if( pC->bIsMMS == M4OSA_FALSE )
2554    {
2555        EncParams.Bitrate = pC->xVSS.outputVideoBitrate;
2556
2557    }
2558    else
2559    {
2560        EncParams.Bitrate = pC->uiMMSVideoBitrate; /* RC */
2561        EncParams.uiTimeScale = 0; /* We let the encoder choose the timescale */
2562    }
2563
2564    M4OSA_TRACE1_0("M4VSS3GPP_intCreateVideoEncoder: calling encoder pFctInit");
2565    /**
2566    * Init the video encoder (advanced settings version of the encoder Open function) */
2567    err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctInit(&pC->ewc.pEncContext,
2568        &pC->ewc.OurWriterDataInterface, M4VSS3GPP_intVPP, pC,
2569        pC->ShellAPI.pCurrentVideoEncoderExternalAPI,
2570        pC->ShellAPI.pCurrentVideoEncoderUserData);
2571
2572    if( M4NO_ERROR != err )
2573    {
2574        M4OSA_TRACE1_1(
2575            "M4VSS3GPP_intCreateVideoEncoder: pVideoEncoderGlobalFcts->pFctInit returns 0x%x",
2576            err);
2577        return err;
2578    }
2579
2580    pC->ewc.encoderState = M4VSS3GPP_kEncoderClosed;
2581    M4OSA_TRACE1_0("M4VSS3GPP_intCreateVideoEncoder: calling encoder pFctOpen");
2582
2583    err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctOpen(pC->ewc.pEncContext,
2584        &pC->ewc.WriterVideoAU, &EncParams);
2585
2586    if( M4NO_ERROR != err )
2587    {
2588        M4OSA_TRACE1_1(
2589            "M4VSS3GPP_intCreateVideoEncoder: pVideoEncoderGlobalFcts->pFctOpen returns 0x%x",
2590            err);
2591        return err;
2592    }
2593
2594    pC->ewc.encoderState = M4VSS3GPP_kEncoderStopped;
2595    M4OSA_TRACE1_0(
2596        "M4VSS3GPP_intCreateVideoEncoder: calling encoder pFctStart");
2597
2598    if( M4OSA_NULL != pC->ShellAPI.pVideoEncoderGlobalFcts->pFctStart )
2599    {
2600        err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctStart(
2601            pC->ewc.pEncContext);
2602
2603        if( M4NO_ERROR != err )
2604        {
2605            M4OSA_TRACE1_1(
2606                "M4VSS3GPP_intCreateVideoEncoder: pVideoEncoderGlobalFcts->pFctStart returns 0x%x",
2607                err);
2608            return err;
2609        }
2610    }
2611
2612    pC->ewc.encoderState = M4VSS3GPP_kEncoderRunning;
2613
2614    /**
2615    *    Return */
2616    M4OSA_TRACE3_0("M4VSS3GPP_intCreateVideoEncoder: returning M4NO_ERROR");
2617    return M4NO_ERROR;
2618}
2619
2620/**
2621 ******************************************************************************
2622 * M4OSA_ERR M4VSS3GPP_intDestroyVideoEncoder()
2623 * @brief    Destroy the video encoder
2624 * @note
2625 ******************************************************************************
2626 */
2627M4OSA_ERR M4VSS3GPP_intDestroyVideoEncoder( M4VSS3GPP_InternalEditContext *pC )
2628{
2629    M4OSA_ERR err = M4NO_ERROR;
2630
2631    if( M4OSA_NULL != pC->ewc.pEncContext )
2632    {
2633        if( M4VSS3GPP_kEncoderRunning == pC->ewc.encoderState )
2634        {
2635            if( pC->ShellAPI.pVideoEncoderGlobalFcts->pFctStop != M4OSA_NULL )
2636            {
2637                err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctStop(
2638                    pC->ewc.pEncContext);
2639
2640                if( M4NO_ERROR != err )
2641                {
2642                    M4OSA_TRACE1_1(
2643                        "M4VSS3GPP_intDestroyVideoEncoder:\
2644                        pVideoEncoderGlobalFcts->pFctStop returns 0x%x",
2645                        err);
2646                    /* Well... how the heck do you handle a failed cleanup? */
2647                }
2648            }
2649
2650            pC->ewc.encoderState = M4VSS3GPP_kEncoderStopped;
2651        }
2652
2653        /* Has the encoder actually been opened? Don't close it if that's not the case. */
2654        if( M4VSS3GPP_kEncoderStopped == pC->ewc.encoderState )
2655        {
2656            err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctClose(
2657                pC->ewc.pEncContext);
2658
2659            if( M4NO_ERROR != err )
2660            {
2661                M4OSA_TRACE1_1(
2662                    "M4VSS3GPP_intDestroyVideoEncoder:\
2663                    pVideoEncoderGlobalFcts->pFctClose returns 0x%x",
2664                    err);
2665                /* Well... how the heck do you handle a failed cleanup? */
2666            }
2667
2668            pC->ewc.encoderState = M4VSS3GPP_kEncoderClosed;
2669        }
2670
2671        err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctCleanup(
2672            pC->ewc.pEncContext);
2673
2674        if( M4NO_ERROR != err )
2675        {
2676            M4OSA_TRACE1_1(
2677                "M4VSS3GPP_intDestroyVideoEncoder:\
2678                pVideoEncoderGlobalFcts->pFctCleanup returns 0x%x!",
2679                err);
2680            /**< We do not return the error here because we still have stuff to free */
2681        }
2682
2683        pC->ewc.encoderState = M4VSS3GPP_kNoEncoder;
2684        /**
2685        * Reset variable */
2686        pC->ewc.pEncContext = M4OSA_NULL;
2687    }
2688
2689    M4OSA_TRACE3_1("M4VSS3GPP_intDestroyVideoEncoder: returning 0x%x", err);
2690    return err;
2691}
2692
2693/**
2694 ******************************************************************************
2695 * M4OSA_Void M4VSS3GPP_intSetH263TimeCounter()
2696 * @brief    Modify the time counter of the given H263 video AU
2697 * @note
2698 * @param    pAuDataBuffer    (IN/OUT) H263 Video AU to modify
2699 * @param    uiCts            (IN)     New time counter value
2700 * @return    nothing
2701 ******************************************************************************
2702 */
2703static M4OSA_Void M4VSS3GPP_intSetH263TimeCounter( M4OSA_MemAddr8 pAuDataBuffer,
2704                                                  M4OSA_UInt8 uiCts )
2705{
2706    /*
2707    *  The H263 time counter is 8 bits located on the "x" below:
2708    *
2709    *   |--------|--------|--------|--------|
2710    *    ???????? ???????? ??????xx xxxxxx??
2711    */
2712
2713    /**
2714    * Write the 2 bits on the third byte */
2715    pAuDataBuffer[2] = ( pAuDataBuffer[2] & 0xFC) | (( uiCts >> 6) & 0x3);
2716
2717    /**
2718    * Write the 6 bits on the fourth byte */
2719    pAuDataBuffer[3] = ( ( uiCts << 2) & 0xFC) | (pAuDataBuffer[3] & 0x3);
2720
2721    return;
2722}
2723
2724/**
2725 ******************************************************************************
2726 * M4OSA_Void M4VSS3GPP_intSetMPEG4Gov()
2727 * @brief    Modify the time info from Group Of VOP video AU
2728 * @note
2729 * @param    pAuDataBuffer    (IN)    MPEG4 Video AU to modify
2730 * @param    uiCtsSec            (IN)     New GOV time info in second unit
2731 * @return    nothing
2732 ******************************************************************************
2733 */
2734static M4OSA_Void M4VSS3GPP_intSetMPEG4Gov( M4OSA_MemAddr8 pAuDataBuffer,
2735                                           M4OSA_UInt32 uiCtsSec )
2736{
2737    /*
2738    *  The MPEG-4 time code length is 18 bits:
2739    *
2740    *     hh     mm    marker    ss
2741    *    xxxxx|xxx xxx     1    xxxx xx ??????
2742    *   |----- ---|---     -    ----|-- ------|
2743    */
2744    M4OSA_UInt8 uiHh;
2745    M4OSA_UInt8 uiMm;
2746    M4OSA_UInt8 uiSs;
2747    M4OSA_UInt8 uiTmp;
2748
2749    /**
2750    * Write the 2 last bits ss */
2751    uiSs = (M4OSA_UInt8)(uiCtsSec % 60); /**< modulo part */
2752    pAuDataBuffer[2] = (( ( uiSs & 0x03) << 6) | (pAuDataBuffer[2] & 0x3F));
2753
2754    if( uiCtsSec < 60 )
2755    {
2756        /**
2757        * Write the 3 last bits of mm, the marker bit (0x10 */
2758        pAuDataBuffer[1] = (( 0x10) | (uiSs >> 2));
2759
2760        /**
2761        * Write the 5 bits of hh and 3 of mm (out of 6) */
2762        pAuDataBuffer[0] = 0;
2763    }
2764    else
2765    {
2766        /**
2767        * Write the 3 last bits of mm, the marker bit (0x10 */
2768        uiTmp = (M4OSA_UInt8)(uiCtsSec / 60); /**< integer part */
2769        uiMm = (M4OSA_UInt8)(uiTmp % 60);
2770        pAuDataBuffer[1] = (( uiMm << 5) | (0x10) | (uiSs >> 2));
2771
2772        if( uiTmp < 60 )
2773        {
2774            /**
2775            * Write the 5 bits of hh and 3 of mm (out of 6) */
2776            pAuDataBuffer[0] = ((uiMm >> 3));
2777        }
2778        else
2779        {
2780            /**
2781            * Write the 5 bits of hh and 3 of mm (out of 6) */
2782            uiHh = (M4OSA_UInt8)(uiTmp / 60);
2783            pAuDataBuffer[0] = (( uiHh << 3) | (uiMm >> 3));
2784        }
2785    }
2786    return;
2787}
2788
2789/**
2790 ******************************************************************************
2791 * M4OSA_Void M4VSS3GPP_intGetMPEG4Gov()
2792 * @brief    Get the time info from Group Of VOP video AU
2793 * @note
2794 * @param    pAuDataBuffer    (IN)    MPEG4 Video AU to modify
2795 * @param    pCtsSec            (OUT)    Current GOV time info in second unit
2796 * @return    nothing
2797 ******************************************************************************
2798 */
2799static M4OSA_Void M4VSS3GPP_intGetMPEG4Gov( M4OSA_MemAddr8 pAuDataBuffer,
2800                                           M4OSA_UInt32 *pCtsSec )
2801{
2802    /*
2803    *  The MPEG-4 time code length is 18 bits:
2804    *
2805    *     hh     mm    marker    ss
2806    *    xxxxx|xxx xxx     1    xxxx xx ??????
2807    *   |----- ---|---     -    ----|-- ------|
2808    */
2809    M4OSA_UInt8 uiHh;
2810    M4OSA_UInt8 uiMm;
2811    M4OSA_UInt8 uiSs;
2812    M4OSA_UInt8 uiTmp;
2813    M4OSA_UInt32 uiCtsSec;
2814
2815    /**
2816    * Read ss */
2817    uiSs = (( pAuDataBuffer[2] & 0xC0) >> 6);
2818    uiTmp = (( pAuDataBuffer[1] & 0x0F) << 2);
2819    uiCtsSec = uiSs + uiTmp;
2820
2821    /**
2822    * Read mm */
2823    uiMm = (( pAuDataBuffer[1] & 0xE0) >> 5);
2824    uiTmp = (( pAuDataBuffer[0] & 0x07) << 3);
2825    uiMm = uiMm + uiTmp;
2826    uiCtsSec = ( uiMm * 60) + uiCtsSec;
2827
2828    /**
2829    * Read hh */
2830    uiHh = (( pAuDataBuffer[0] & 0xF8) >> 3);
2831
2832    if( uiHh )
2833    {
2834        uiCtsSec = ( uiHh * 3600) + uiCtsSec;
2835    }
2836
2837    /*
2838    * in sec */
2839    *pCtsSec = uiCtsSec;
2840
2841    return;
2842}
2843
2844/**
2845 ******************************************************************************
2846 * M4OSA_ERR M4VSS3GPP_intAllocateYUV420()
2847 * @brief    Allocate the three YUV 4:2:0 planes
2848 * @note
2849 * @param    pPlanes    (IN/OUT) valid pointer to 3 M4VIFI_ImagePlane structures
2850 * @param    uiWidth    (IN)     Image width
2851 * @param    uiHeight(IN)     Image height
2852 ******************************************************************************
2853 */
2854static M4OSA_ERR M4VSS3GPP_intAllocateYUV420( M4VIFI_ImagePlane *pPlanes,
2855                                             M4OSA_UInt32 uiWidth, M4OSA_UInt32 uiHeight )
2856{
2857    if (pPlanes == M4OSA_NULL) {
2858        M4OSA_TRACE1_0("M4VSS3GPP_intAllocateYUV420: Invalid pPlanes pointer");
2859        return M4ERR_PARAMETER;
2860    }
2861    /* if the buffer is not NULL and same size with target size,
2862     * do not malloc again*/
2863    if (pPlanes[0].pac_data != M4OSA_NULL &&
2864        pPlanes[0].u_width == uiWidth &&
2865        pPlanes[0].u_height == uiHeight) {
2866        return M4NO_ERROR;
2867    }
2868
2869    pPlanes[0].u_width = uiWidth;
2870    pPlanes[0].u_height = uiHeight;
2871    pPlanes[0].u_stride = uiWidth;
2872    pPlanes[0].u_topleft = 0;
2873
2874    if (pPlanes[0].pac_data != M4OSA_NULL) {
2875        free(pPlanes[0].pac_data);
2876        pPlanes[0].pac_data = M4OSA_NULL;
2877    }
2878    pPlanes[0].pac_data = (M4VIFI_UInt8 *)M4OSA_32bitAlignedMalloc(pPlanes[0].u_stride
2879        * pPlanes[0].u_height, M4VSS3GPP, (M4OSA_Char *)"pPlanes[0].pac_data");
2880
2881    if( M4OSA_NULL == pPlanes[0].pac_data )
2882    {
2883        M4OSA_TRACE1_0(
2884            "M4VSS3GPP_intAllocateYUV420: unable to allocate pPlanes[0].pac_data,\
2885            returning M4ERR_ALLOC");
2886        return M4ERR_ALLOC;
2887    }
2888
2889    pPlanes[1].u_width = pPlanes[0].u_width >> 1;
2890    pPlanes[1].u_height = pPlanes[0].u_height >> 1;
2891    pPlanes[1].u_stride = pPlanes[1].u_width;
2892    pPlanes[1].u_topleft = 0;
2893    if (pPlanes[1].pac_data != M4OSA_NULL) {
2894        free(pPlanes[1].pac_data);
2895        pPlanes[1].pac_data = M4OSA_NULL;
2896    }
2897    pPlanes[1].pac_data = (M4VIFI_UInt8 *)M4OSA_32bitAlignedMalloc(pPlanes[1].u_stride
2898        * pPlanes[1].u_height, M4VSS3GPP,(M4OSA_Char *) "pPlanes[1].pac_data");
2899
2900    if( M4OSA_NULL == pPlanes[1].pac_data )
2901    {
2902        M4OSA_TRACE1_0(
2903            "M4VSS3GPP_intAllocateYUV420: unable to allocate pPlanes[1].pac_data,\
2904            returning M4ERR_ALLOC");
2905        free((void *)pPlanes[0].pac_data);
2906        pPlanes[0].pac_data = M4OSA_NULL;
2907        return M4ERR_ALLOC;
2908    }
2909
2910    pPlanes[2].u_width = pPlanes[1].u_width;
2911    pPlanes[2].u_height = pPlanes[1].u_height;
2912    pPlanes[2].u_stride = pPlanes[2].u_width;
2913    pPlanes[2].u_topleft = 0;
2914    if (pPlanes[2].pac_data != M4OSA_NULL) {
2915        free(pPlanes[2].pac_data);
2916        pPlanes[2].pac_data = M4OSA_NULL;
2917    }
2918    pPlanes[2].pac_data = (M4VIFI_UInt8 *)M4OSA_32bitAlignedMalloc(pPlanes[2].u_stride
2919        * pPlanes[2].u_height, M4VSS3GPP, (M4OSA_Char *)"pPlanes[2].pac_data");
2920
2921    if( M4OSA_NULL == pPlanes[2].pac_data )
2922    {
2923        M4OSA_TRACE1_0(
2924            "M4VSS3GPP_intAllocateYUV420: unable to allocate pPlanes[2].pac_data,\
2925            returning M4ERR_ALLOC");
2926        free((void *)pPlanes[0].pac_data);
2927        free((void *)pPlanes[1].pac_data);
2928        pPlanes[0].pac_data = M4OSA_NULL;
2929        pPlanes[1].pac_data = M4OSA_NULL;
2930        return M4ERR_ALLOC;
2931    }
2932
2933    memset((void *)pPlanes[0].pac_data, 0, pPlanes[0].u_stride*pPlanes[0].u_height);
2934    memset((void *)pPlanes[1].pac_data, 0, pPlanes[1].u_stride*pPlanes[1].u_height);
2935    memset((void *)pPlanes[2].pac_data, 0, pPlanes[2].u_stride*pPlanes[2].u_height);
2936    /**
2937    *    Return */
2938    M4OSA_TRACE3_0("M4VSS3GPP_intAllocateYUV420: returning M4NO_ERROR");
2939    return M4NO_ERROR;
2940}
2941
2942/**
2943******************************************************************************
2944* M4OSA_ERR M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420(M4OSA_Void* pFileIn,
2945*                                            M4OSA_FileReadPointer* pFileReadPtr,
2946*                                               M4VIFI_ImagePlane* pImagePlanes,
2947*                                               M4OSA_UInt32 width,
2948*                                               M4OSA_UInt32 height);
2949* @brief    It Coverts and resizes a ARGB8888 image to YUV420
2950* @note
2951* @param    pFileIn         (IN) The ARGB888 input file
2952* @param    pFileReadPtr    (IN) Pointer on filesystem functions
2953* @param    pImagePlanes    (IN/OUT) Pointer on YUV420 output planes allocated by the user.
2954*                           ARGB8888 image  will be converted and resized to output
2955*                           YUV420 plane size
2956* @param width       (IN) width of the ARGB8888
2957* @param height      (IN) height of the ARGB8888
2958* @return   M4NO_ERROR: No error
2959* @return   M4ERR_ALLOC: memory error
2960* @return   M4ERR_PARAMETER: At least one of the function parameters is null
2961******************************************************************************
2962*/
2963
2964M4OSA_ERR M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420(M4OSA_Void* pFileIn,
2965                           M4OSA_FileReadPointer* pFileReadPtr,
2966                           M4VIFI_ImagePlane* pImagePlanes,
2967                           M4OSA_UInt32 width,M4OSA_UInt32 height) {
2968    M4OSA_Context pARGBIn;
2969    M4VIFI_ImagePlane rgbPlane1 ,rgbPlane2;
2970    M4OSA_UInt32 frameSize_argb = width * height * 4;
2971    M4OSA_UInt32 frameSize_rgb888 = width * height * 3;
2972    M4OSA_UInt32 i = 0,j= 0;
2973    M4OSA_ERR err = M4NO_ERROR;
2974
2975    M4OSA_UInt8 *pArgbPlane =
2976        (M4OSA_UInt8*) M4OSA_32bitAlignedMalloc(frameSize_argb,
2977                                                M4VS, (M4OSA_Char*)"argb data");
2978    if (pArgbPlane == M4OSA_NULL) {
2979        M4OSA_TRACE1_0("M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420: \
2980            Failed to allocate memory for ARGB plane");
2981        return M4ERR_ALLOC;
2982    }
2983
2984    /* Get file size */
2985    err = pFileReadPtr->openRead(&pARGBIn, pFileIn, M4OSA_kFileRead);
2986    if (err != M4NO_ERROR) {
2987        M4OSA_TRACE1_2("M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420 : \
2988            Can not open input ARGB8888 file %s, error: 0x%x\n",pFileIn, err);
2989        free(pArgbPlane);
2990        pArgbPlane = M4OSA_NULL;
2991        goto cleanup;
2992    }
2993
2994    err = pFileReadPtr->readData(pARGBIn,(M4OSA_MemAddr8)pArgbPlane,
2995                                 &frameSize_argb);
2996    if (err != M4NO_ERROR) {
2997        M4OSA_TRACE1_2("M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420 \
2998            Can not read ARGB8888 file %s, error: 0x%x\n",pFileIn, err);
2999        pFileReadPtr->closeRead(pARGBIn);
3000        free(pArgbPlane);
3001        pArgbPlane = M4OSA_NULL;
3002        goto cleanup;
3003    }
3004
3005    err = pFileReadPtr->closeRead(pARGBIn);
3006    if(err != M4NO_ERROR) {
3007        M4OSA_TRACE1_2("M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420 \
3008            Can not close ARGB8888  file %s, error: 0x%x\n",pFileIn, err);
3009        free(pArgbPlane);
3010        pArgbPlane = M4OSA_NULL;
3011        goto cleanup;
3012    }
3013
3014    rgbPlane1.pac_data =
3015        (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(frameSize_rgb888,
3016                                            M4VS, (M4OSA_Char*)"RGB888 plane1");
3017    if(rgbPlane1.pac_data == M4OSA_NULL) {
3018        M4OSA_TRACE1_0("M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420 \
3019            Failed to allocate memory for rgb plane1");
3020        free(pArgbPlane);
3021        return M4ERR_ALLOC;
3022    }
3023
3024    rgbPlane1.u_height = height;
3025    rgbPlane1.u_width = width;
3026    rgbPlane1.u_stride = width*3;
3027    rgbPlane1.u_topleft = 0;
3028
3029
3030    /** Remove the alpha channel */
3031    for (i=0, j = 0; i < frameSize_argb; i++) {
3032        if ((i % 4) == 0) continue;
3033        rgbPlane1.pac_data[j] = pArgbPlane[i];
3034        j++;
3035    }
3036    free(pArgbPlane);
3037
3038    /**
3039     * Check if resizing is required with color conversion */
3040    if(width != pImagePlanes->u_width || height != pImagePlanes->u_height) {
3041
3042        frameSize_rgb888 = pImagePlanes->u_width * pImagePlanes->u_height * 3;
3043        rgbPlane2.pac_data =
3044            (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(frameSize_rgb888, M4VS,
3045                                                   (M4OSA_Char*)"rgb Plane2");
3046        if(rgbPlane2.pac_data == M4OSA_NULL) {
3047            M4OSA_TRACE1_0("Failed to allocate memory for rgb plane2");
3048            free(rgbPlane1.pac_data);
3049            return M4ERR_ALLOC;
3050        }
3051        rgbPlane2.u_height =  pImagePlanes->u_height;
3052        rgbPlane2.u_width = pImagePlanes->u_width;
3053        rgbPlane2.u_stride = pImagePlanes->u_width*3;
3054        rgbPlane2.u_topleft = 0;
3055
3056        /* Resizing */
3057        err = M4VIFI_ResizeBilinearRGB888toRGB888(M4OSA_NULL,
3058                                                  &rgbPlane1, &rgbPlane2);
3059        free(rgbPlane1.pac_data);
3060        if(err != M4NO_ERROR) {
3061            M4OSA_TRACE1_1("error resizing RGB888 to RGB888: 0x%x\n", err);
3062            free(rgbPlane2.pac_data);
3063            return err;
3064        }
3065
3066        /*Converting Resized RGB888 to YUV420 */
3067        err = M4VIFI_RGB888toYUV420(M4OSA_NULL, &rgbPlane2, pImagePlanes);
3068        free(rgbPlane2.pac_data);
3069        if(err != M4NO_ERROR) {
3070            M4OSA_TRACE1_1("error converting from RGB888 to YUV: 0x%x\n", err);
3071            return err;
3072        }
3073    } else {
3074        err = M4VIFI_RGB888toYUV420(M4OSA_NULL, &rgbPlane1, pImagePlanes);
3075        if(err != M4NO_ERROR) {
3076            M4OSA_TRACE1_1("error when converting from RGB to YUV: 0x%x\n", err);
3077        }
3078        free(rgbPlane1.pac_data);
3079    }
3080cleanup:
3081    M4OSA_TRACE3_0("M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420 exit");
3082    return err;
3083}
3084
3085M4OSA_ERR M4VSS3GPP_intApplyRenderingMode(M4VSS3GPP_InternalEditContext *pC,
3086                                          M4xVSS_MediaRendering renderingMode,
3087                                          M4VIFI_ImagePlane* pInplane,
3088                                          M4VIFI_ImagePlane* pOutplane) {
3089
3090    M4OSA_ERR err = M4NO_ERROR;
3091    M4AIR_Params airParams;
3092    M4VIFI_ImagePlane pImagePlanesTemp[3];
3093    M4OSA_UInt32 i = 0;
3094
3095    if (renderingMode == M4xVSS_kBlackBorders) {
3096        memset((void *)pOutplane[0].pac_data, Y_PLANE_BORDER_VALUE,
3097               (pOutplane[0].u_height*pOutplane[0].u_stride));
3098        memset((void *)pOutplane[1].pac_data, U_PLANE_BORDER_VALUE,
3099               (pOutplane[1].u_height*pOutplane[1].u_stride));
3100        memset((void *)pOutplane[2].pac_data, V_PLANE_BORDER_VALUE,
3101               (pOutplane[2].u_height*pOutplane[2].u_stride));
3102    }
3103
3104    if (renderingMode == M4xVSS_kResizing) {
3105        /**
3106        * Call the resize filter.
3107        * From the intermediate frame to the encoder image plane */
3108        err = M4VIFI_ResizeBilinearYUV420toYUV420(M4OSA_NULL,
3109                                                  pInplane, pOutplane);
3110        if (M4NO_ERROR != err) {
3111            M4OSA_TRACE1_1("M4VSS3GPP_intApplyRenderingMode: \
3112                M4ViFilResizeBilinearYUV420toYUV420 returns 0x%x!", err);
3113            return err;
3114        }
3115    } else {
3116        M4VIFI_ImagePlane* pPlaneTemp = M4OSA_NULL;
3117        M4OSA_UInt8* pOutPlaneY =
3118            pOutplane[0].pac_data + pOutplane[0].u_topleft;
3119        M4OSA_UInt8* pOutPlaneU =
3120            pOutplane[1].pac_data + pOutplane[1].u_topleft;
3121        M4OSA_UInt8* pOutPlaneV =
3122            pOutplane[2].pac_data + pOutplane[2].u_topleft;
3123        M4OSA_UInt8* pInPlaneY = M4OSA_NULL;
3124        M4OSA_UInt8* pInPlaneU = M4OSA_NULL;
3125        M4OSA_UInt8* pInPlaneV = M4OSA_NULL;
3126
3127        /* To keep media aspect ratio*/
3128        /* Initialize AIR Params*/
3129        airParams.m_inputCoord.m_x = 0;
3130        airParams.m_inputCoord.m_y = 0;
3131        airParams.m_inputSize.m_height = pInplane->u_height;
3132        airParams.m_inputSize.m_width = pInplane->u_width;
3133        airParams.m_outputSize.m_width = pOutplane->u_width;
3134        airParams.m_outputSize.m_height = pOutplane->u_height;
3135        airParams.m_bOutputStripe = M4OSA_FALSE;
3136        airParams.m_outputOrientation = M4COMMON_kOrientationTopLeft;
3137
3138        /**
3139        Media rendering: Black borders*/
3140        if (renderingMode == M4xVSS_kBlackBorders) {
3141            pImagePlanesTemp[0].u_width = pOutplane[0].u_width;
3142            pImagePlanesTemp[0].u_height = pOutplane[0].u_height;
3143            pImagePlanesTemp[0].u_stride = pOutplane[0].u_width;
3144            pImagePlanesTemp[0].u_topleft = 0;
3145
3146            pImagePlanesTemp[1].u_width = pOutplane[1].u_width;
3147            pImagePlanesTemp[1].u_height = pOutplane[1].u_height;
3148            pImagePlanesTemp[1].u_stride = pOutplane[1].u_width;
3149            pImagePlanesTemp[1].u_topleft = 0;
3150
3151            pImagePlanesTemp[2].u_width = pOutplane[2].u_width;
3152            pImagePlanesTemp[2].u_height = pOutplane[2].u_height;
3153            pImagePlanesTemp[2].u_stride = pOutplane[2].u_width;
3154            pImagePlanesTemp[2].u_topleft = 0;
3155
3156            /**
3157             * Allocates plan in local image plane structure */
3158            pImagePlanesTemp[0].pac_data =
3159                (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc(
3160                    pImagePlanesTemp[0].u_width * pImagePlanesTemp[0].u_height,
3161                    M4VS, (M4OSA_Char *)"pImagePlaneTemp Y") ;
3162            if (pImagePlanesTemp[0].pac_data == M4OSA_NULL) {
3163                M4OSA_TRACE1_0("M4VSS3GPP_intApplyRenderingMode: Alloc Error");
3164                return M4ERR_ALLOC;
3165            }
3166            pImagePlanesTemp[1].pac_data =
3167                (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc(
3168                    pImagePlanesTemp[1].u_width * pImagePlanesTemp[1].u_height,
3169                    M4VS, (M4OSA_Char *)"pImagePlaneTemp U") ;
3170            if (pImagePlanesTemp[1].pac_data == M4OSA_NULL) {
3171                M4OSA_TRACE1_0("M4VSS3GPP_intApplyRenderingMode: Alloc Error");
3172                free(pImagePlanesTemp[0].pac_data);
3173                return M4ERR_ALLOC;
3174            }
3175            pImagePlanesTemp[2].pac_data =
3176                (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc(
3177                    pImagePlanesTemp[2].u_width * pImagePlanesTemp[2].u_height,
3178                    M4VS, (M4OSA_Char *)"pImagePlaneTemp V") ;
3179            if (pImagePlanesTemp[2].pac_data == M4OSA_NULL) {
3180                M4OSA_TRACE1_0("M4VSS3GPP_intApplyRenderingMode: Alloc Error");
3181                free(pImagePlanesTemp[0].pac_data);
3182                free(pImagePlanesTemp[1].pac_data);
3183                return M4ERR_ALLOC;
3184            }
3185
3186            pInPlaneY = pImagePlanesTemp[0].pac_data ;
3187            pInPlaneU = pImagePlanesTemp[1].pac_data ;
3188            pInPlaneV = pImagePlanesTemp[2].pac_data ;
3189
3190            memset((void *)pImagePlanesTemp[0].pac_data, Y_PLANE_BORDER_VALUE,
3191                (pImagePlanesTemp[0].u_height*pImagePlanesTemp[0].u_stride));
3192            memset((void *)pImagePlanesTemp[1].pac_data, U_PLANE_BORDER_VALUE,
3193                (pImagePlanesTemp[1].u_height*pImagePlanesTemp[1].u_stride));
3194            memset((void *)pImagePlanesTemp[2].pac_data, V_PLANE_BORDER_VALUE,
3195                (pImagePlanesTemp[2].u_height*pImagePlanesTemp[2].u_stride));
3196
3197            M4OSA_UInt32 height =
3198                (pInplane->u_height * pOutplane->u_width) /pInplane->u_width;
3199
3200            if (height <= pOutplane->u_height) {
3201                /**
3202                 * Black borders will be on the top and the bottom side */
3203                airParams.m_outputSize.m_width = pOutplane->u_width;
3204                airParams.m_outputSize.m_height = height;
3205                /**
3206                 * Number of lines at the top */
3207                pImagePlanesTemp[0].u_topleft =
3208                    (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[0].u_height -
3209                      airParams.m_outputSize.m_height)>>1)) *
3210                      pImagePlanesTemp[0].u_stride;
3211                pImagePlanesTemp[0].u_height = airParams.m_outputSize.m_height;
3212                pImagePlanesTemp[1].u_topleft =
3213                    (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[1].u_height -
3214                     (airParams.m_outputSize.m_height>>1)))>>1) *
3215                     pImagePlanesTemp[1].u_stride;
3216                pImagePlanesTemp[1].u_height =
3217                    airParams.m_outputSize.m_height>>1;
3218                pImagePlanesTemp[2].u_topleft =
3219                    (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[2].u_height -
3220                     (airParams.m_outputSize.m_height>>1)))>>1) *
3221                     pImagePlanesTemp[2].u_stride;
3222                pImagePlanesTemp[2].u_height =
3223                    airParams.m_outputSize.m_height>>1;
3224            } else {
3225                /**
3226                 * Black borders will be on the left and right side */
3227                airParams.m_outputSize.m_height = pOutplane->u_height;
3228                airParams.m_outputSize.m_width =
3229                    (M4OSA_UInt32)((pInplane->u_width * pOutplane->u_height)/pInplane->u_height);
3230
3231                pImagePlanesTemp[0].u_topleft =
3232                    (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[0].u_width -
3233                     airParams.m_outputSize.m_width)>>1));
3234                pImagePlanesTemp[0].u_width = airParams.m_outputSize.m_width;
3235                pImagePlanesTemp[1].u_topleft =
3236                    (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[1].u_width -
3237                     (airParams.m_outputSize.m_width>>1)))>>1);
3238                pImagePlanesTemp[1].u_width = airParams.m_outputSize.m_width>>1;
3239                pImagePlanesTemp[2].u_topleft =
3240                    (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[2].u_width -
3241                     (airParams.m_outputSize.m_width>>1)))>>1);
3242                pImagePlanesTemp[2].u_width = airParams.m_outputSize.m_width>>1;
3243            }
3244
3245            /**
3246             * Width and height have to be even */
3247            airParams.m_outputSize.m_width =
3248                (airParams.m_outputSize.m_width>>1)<<1;
3249            airParams.m_outputSize.m_height =
3250                (airParams.m_outputSize.m_height>>1)<<1;
3251            airParams.m_inputSize.m_width =
3252                (airParams.m_inputSize.m_width>>1)<<1;
3253            airParams.m_inputSize.m_height =
3254                (airParams.m_inputSize.m_height>>1)<<1;
3255            pImagePlanesTemp[0].u_width =
3256                (pImagePlanesTemp[0].u_width>>1)<<1;
3257            pImagePlanesTemp[1].u_width =
3258                (pImagePlanesTemp[1].u_width>>1)<<1;
3259            pImagePlanesTemp[2].u_width =
3260                (pImagePlanesTemp[2].u_width>>1)<<1;
3261            pImagePlanesTemp[0].u_height =
3262                (pImagePlanesTemp[0].u_height>>1)<<1;
3263            pImagePlanesTemp[1].u_height =
3264                (pImagePlanesTemp[1].u_height>>1)<<1;
3265            pImagePlanesTemp[2].u_height =
3266                (pImagePlanesTemp[2].u_height>>1)<<1;
3267
3268            /**
3269             * Check that values are coherent */
3270            if (airParams.m_inputSize.m_height ==
3271                   airParams.m_outputSize.m_height) {
3272                airParams.m_inputSize.m_width =
3273                    airParams.m_outputSize.m_width;
3274            } else if (airParams.m_inputSize.m_width ==
3275                          airParams.m_outputSize.m_width) {
3276                airParams.m_inputSize.m_height =
3277                    airParams.m_outputSize.m_height;
3278            }
3279            pPlaneTemp = pImagePlanesTemp;
3280        }
3281
3282        /**
3283         * Media rendering: Cropping*/
3284        if (renderingMode == M4xVSS_kCropping) {
3285            airParams.m_outputSize.m_height = pOutplane->u_height;
3286            airParams.m_outputSize.m_width = pOutplane->u_width;
3287            if ((airParams.m_outputSize.m_height *
3288                 airParams.m_inputSize.m_width)/airParams.m_outputSize.m_width <
3289                  airParams.m_inputSize.m_height) {
3290                /* Height will be cropped */
3291                airParams.m_inputSize.m_height =
3292                    (M4OSA_UInt32)((airParams.m_outputSize.m_height *
3293                     airParams.m_inputSize.m_width)/airParams.m_outputSize.m_width);
3294                airParams.m_inputSize.m_height =
3295                    (airParams.m_inputSize.m_height>>1)<<1;
3296                airParams.m_inputCoord.m_y =
3297                    (M4OSA_Int32)((M4OSA_Int32)((pInplane->u_height -
3298                     airParams.m_inputSize.m_height))>>1);
3299            } else {
3300                /* Width will be cropped */
3301                airParams.m_inputSize.m_width =
3302                    (M4OSA_UInt32)((airParams.m_outputSize.m_width *
3303                     airParams.m_inputSize.m_height)/airParams.m_outputSize.m_height);
3304                airParams.m_inputSize.m_width =
3305                    (airParams.m_inputSize.m_width>>1)<<1;
3306                airParams.m_inputCoord.m_x =
3307                    (M4OSA_Int32)((M4OSA_Int32)((pInplane->u_width -
3308                     airParams.m_inputSize.m_width))>>1);
3309            }
3310            pPlaneTemp = pOutplane;
3311        }
3312        /**
3313        * Call AIR functions */
3314        if (M4OSA_NULL == pC->m_air_context) {
3315            err = M4AIR_create(&pC->m_air_context, M4AIR_kYUV420P);
3316            if(err != M4NO_ERROR) {
3317                M4OSA_TRACE1_1("M4VSS3GPP_intApplyRenderingMode: \
3318                    M4AIR_create returned error 0x%x", err);
3319                goto cleanUp;
3320            }
3321        }
3322
3323        err = M4AIR_configure(pC->m_air_context, &airParams);
3324        if (err != M4NO_ERROR) {
3325            M4OSA_TRACE1_1("M4VSS3GPP_intApplyRenderingMode: \
3326                Error when configuring AIR: 0x%x", err);
3327            M4AIR_cleanUp(pC->m_air_context);
3328            goto cleanUp;
3329        }
3330
3331        err = M4AIR_get(pC->m_air_context, pInplane, pPlaneTemp);
3332        if (err != M4NO_ERROR) {
3333            M4OSA_TRACE1_1("M4VSS3GPP_intApplyRenderingMode: \
3334                Error when getting AIR plane: 0x%x", err);
3335            M4AIR_cleanUp(pC->m_air_context);
3336            goto cleanUp;
3337        }
3338
3339        if (renderingMode == M4xVSS_kBlackBorders) {
3340            for (i=0; i<pOutplane[0].u_height; i++) {
3341                memcpy((void *)pOutPlaneY, (void *)pInPlaneY,
3342                        pOutplane[0].u_width);
3343                pInPlaneY += pOutplane[0].u_width;
3344                pOutPlaneY += pOutplane[0].u_stride;
3345            }
3346            for (i=0; i<pOutplane[1].u_height; i++) {
3347                memcpy((void *)pOutPlaneU, (void *)pInPlaneU,
3348                        pOutplane[1].u_width);
3349                pInPlaneU += pOutplane[1].u_width;
3350                pOutPlaneU += pOutplane[1].u_stride;
3351            }
3352            for (i=0; i<pOutplane[2].u_height; i++) {
3353                memcpy((void *)pOutPlaneV, (void *)pInPlaneV,
3354                        pOutplane[2].u_width);
3355                pInPlaneV += pOutplane[2].u_width;
3356                pOutPlaneV += pOutplane[2].u_stride;
3357            }
3358        }
3359    }
3360cleanUp:
3361    if (renderingMode == M4xVSS_kBlackBorders) {
3362        for (i=0; i<3; i++) {
3363            if (pImagePlanesTemp[i].pac_data != M4OSA_NULL) {
3364                free(pImagePlanesTemp[i].pac_data);
3365                pImagePlanesTemp[i].pac_data = M4OSA_NULL;
3366            }
3367        }
3368    }
3369    return err;
3370}
3371
3372M4OSA_ERR M4VSS3GPP_intSetYuv420PlaneFromARGB888 (
3373                                        M4VSS3GPP_InternalEditContext *pC,
3374                                        M4VSS3GPP_ClipContext* pClipCtxt) {
3375
3376    M4OSA_ERR err= M4NO_ERROR;
3377
3378    // Allocate memory for YUV plane
3379    pClipCtxt->pPlaneYuv =
3380     (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(
3381        3*sizeof(M4VIFI_ImagePlane), M4VS,
3382        (M4OSA_Char*)"pPlaneYuv");
3383
3384    if (pClipCtxt->pPlaneYuv == M4OSA_NULL) {
3385        return M4ERR_ALLOC;
3386    }
3387
3388    pClipCtxt->pPlaneYuv[0].u_height =
3389        pClipCtxt->pSettings->ClipProperties.uiStillPicHeight;
3390    pClipCtxt->pPlaneYuv[0].u_width =
3391        pClipCtxt->pSettings->ClipProperties.uiStillPicWidth;
3392    pClipCtxt->pPlaneYuv[0].u_stride = pClipCtxt->pPlaneYuv[0].u_width;
3393    pClipCtxt->pPlaneYuv[0].u_topleft = 0;
3394
3395    pClipCtxt->pPlaneYuv[0].pac_data =
3396     (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(
3397         pClipCtxt->pPlaneYuv[0].u_height * pClipCtxt->pPlaneYuv[0].u_width * 1.5,
3398         M4VS, (M4OSA_Char*)"imageClip YUV data");
3399    if (pClipCtxt->pPlaneYuv[0].pac_data == M4OSA_NULL) {
3400        free(pClipCtxt->pPlaneYuv);
3401        return M4ERR_ALLOC;
3402    }
3403
3404    pClipCtxt->pPlaneYuv[1].u_height = pClipCtxt->pPlaneYuv[0].u_height >>1;
3405    pClipCtxt->pPlaneYuv[1].u_width = pClipCtxt->pPlaneYuv[0].u_width >> 1;
3406    pClipCtxt->pPlaneYuv[1].u_stride = pClipCtxt->pPlaneYuv[1].u_width;
3407    pClipCtxt->pPlaneYuv[1].u_topleft = 0;
3408    pClipCtxt->pPlaneYuv[1].pac_data = (M4VIFI_UInt8*)(
3409     pClipCtxt->pPlaneYuv[0].pac_data +
3410      pClipCtxt->pPlaneYuv[0].u_height * pClipCtxt->pPlaneYuv[0].u_width);
3411
3412    pClipCtxt->pPlaneYuv[2].u_height = pClipCtxt->pPlaneYuv[0].u_height >>1;
3413    pClipCtxt->pPlaneYuv[2].u_width = pClipCtxt->pPlaneYuv[0].u_width >> 1;
3414    pClipCtxt->pPlaneYuv[2].u_stride = pClipCtxt->pPlaneYuv[2].u_width;
3415    pClipCtxt->pPlaneYuv[2].u_topleft = 0;
3416    pClipCtxt->pPlaneYuv[2].pac_data = (M4VIFI_UInt8*)(
3417     pClipCtxt->pPlaneYuv[1].pac_data +
3418      pClipCtxt->pPlaneYuv[1].u_height * pClipCtxt->pPlaneYuv[1].u_width);
3419
3420    err = M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420 (
3421        pClipCtxt->pSettings->pFile,
3422        pC->pOsaFileReadPtr,
3423        pClipCtxt->pPlaneYuv,
3424        pClipCtxt->pSettings->ClipProperties.uiStillPicWidth,
3425        pClipCtxt->pSettings->ClipProperties.uiStillPicHeight);
3426    if (M4NO_ERROR != err) {
3427        free(pClipCtxt->pPlaneYuv[0].pac_data);
3428        free(pClipCtxt->pPlaneYuv);
3429        return err;
3430    }
3431
3432    // Set the YUV data to the decoder using setoption
3433    err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctSetOption (
3434        pClipCtxt->pViDecCtxt,
3435        M4DECODER_kOptionID_DecYuvData,
3436        (M4OSA_DataOption)pClipCtxt->pPlaneYuv);
3437    if (M4NO_ERROR != err) {
3438        free(pClipCtxt->pPlaneYuv[0].pac_data);
3439        free(pClipCtxt->pPlaneYuv);
3440        return err;
3441    }
3442
3443    pClipCtxt->pSettings->ClipProperties.bSetImageData = M4OSA_TRUE;
3444
3445    // Allocate Yuv plane with effect
3446    pClipCtxt->pPlaneYuvWithEffect =
3447     (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(
3448         3*sizeof(M4VIFI_ImagePlane), M4VS,
3449         (M4OSA_Char*)"pPlaneYuvWithEffect");
3450    if (pClipCtxt->pPlaneYuvWithEffect == M4OSA_NULL) {
3451        free(pClipCtxt->pPlaneYuv[0].pac_data);
3452        free(pClipCtxt->pPlaneYuv);
3453        return M4ERR_ALLOC;
3454    }
3455
3456    pClipCtxt->pPlaneYuvWithEffect[0].u_height = pC->ewc.uiVideoHeight;
3457    pClipCtxt->pPlaneYuvWithEffect[0].u_width = pC->ewc.uiVideoWidth;
3458    pClipCtxt->pPlaneYuvWithEffect[0].u_stride = pC->ewc.uiVideoWidth;
3459    pClipCtxt->pPlaneYuvWithEffect[0].u_topleft = 0;
3460
3461    pClipCtxt->pPlaneYuvWithEffect[0].pac_data =
3462     (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(
3463         pC->ewc.uiVideoHeight * pC->ewc.uiVideoWidth * 1.5,
3464         M4VS, (M4OSA_Char*)"imageClip YUV data");
3465    if (pClipCtxt->pPlaneYuvWithEffect[0].pac_data == M4OSA_NULL) {
3466        free(pClipCtxt->pPlaneYuv[0].pac_data);
3467        free(pClipCtxt->pPlaneYuv);
3468        free(pClipCtxt->pPlaneYuvWithEffect);
3469        return M4ERR_ALLOC;
3470    }
3471
3472    pClipCtxt->pPlaneYuvWithEffect[1].u_height =
3473        pClipCtxt->pPlaneYuvWithEffect[0].u_height >>1;
3474    pClipCtxt->pPlaneYuvWithEffect[1].u_width =
3475        pClipCtxt->pPlaneYuvWithEffect[0].u_width >> 1;
3476    pClipCtxt->pPlaneYuvWithEffect[1].u_stride =
3477        pClipCtxt->pPlaneYuvWithEffect[1].u_width;
3478    pClipCtxt->pPlaneYuvWithEffect[1].u_topleft = 0;
3479    pClipCtxt->pPlaneYuvWithEffect[1].pac_data = (M4VIFI_UInt8*)(
3480        pClipCtxt->pPlaneYuvWithEffect[0].pac_data +
3481         pClipCtxt->pPlaneYuvWithEffect[0].u_height * pClipCtxt->pPlaneYuvWithEffect[0].u_width);
3482
3483    pClipCtxt->pPlaneYuvWithEffect[2].u_height =
3484        pClipCtxt->pPlaneYuvWithEffect[0].u_height >>1;
3485    pClipCtxt->pPlaneYuvWithEffect[2].u_width =
3486        pClipCtxt->pPlaneYuvWithEffect[0].u_width >> 1;
3487    pClipCtxt->pPlaneYuvWithEffect[2].u_stride =
3488        pClipCtxt->pPlaneYuvWithEffect[2].u_width;
3489    pClipCtxt->pPlaneYuvWithEffect[2].u_topleft = 0;
3490    pClipCtxt->pPlaneYuvWithEffect[2].pac_data = (M4VIFI_UInt8*)(
3491        pClipCtxt->pPlaneYuvWithEffect[1].pac_data +
3492         pClipCtxt->pPlaneYuvWithEffect[1].u_height * pClipCtxt->pPlaneYuvWithEffect[1].u_width);
3493
3494    err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctSetOption(
3495        pClipCtxt->pViDecCtxt, M4DECODER_kOptionID_YuvWithEffectContiguous,
3496        (M4OSA_DataOption)pClipCtxt->pPlaneYuvWithEffect);
3497    if (M4NO_ERROR != err) {
3498        free(pClipCtxt->pPlaneYuv[0].pac_data);
3499        free(pClipCtxt->pPlaneYuv);
3500        free(pClipCtxt->pPlaneYuvWithEffect);
3501        return err;
3502    }
3503
3504    return M4NO_ERROR;
3505}
3506
3507M4OSA_ERR M4VSS3GPP_intRenderFrameWithEffect(M4VSS3GPP_InternalEditContext *pC,
3508                                             M4VSS3GPP_ClipContext* pClipCtxt,
3509                                             M4_MediaTime ts,
3510                                             M4OSA_Bool bIsClip1,
3511                                             M4VIFI_ImagePlane *pResizePlane,
3512                                             M4VIFI_ImagePlane *pPlaneNoResize,
3513                                             M4VIFI_ImagePlane *pPlaneOut) {
3514
3515    M4OSA_ERR err = M4NO_ERROR;
3516    M4OSA_UInt8 numEffects = 0;
3517    M4VIFI_ImagePlane *pDecoderRenderFrame = M4OSA_NULL;
3518    M4OSA_UInt32 yuvFrameWidth = 0, yuvFrameHeight = 0;
3519    M4VIFI_ImagePlane* pTmp = M4OSA_NULL;
3520    M4VIFI_ImagePlane pTemp[3];
3521    M4OSA_UInt8 i = 0;
3522    M4OSA_Bool bSkipFramingEffect = M4OSA_FALSE;
3523
3524    memset((void *)pTemp, 0, 3*sizeof(M4VIFI_ImagePlane));
3525    /* Resize or rotate case */
3526    if (M4OSA_NULL != pClipCtxt->m_pPreResizeFrame) {
3527        /**
3528        * If we do modify the image, we need an intermediate image plane */
3529        err = M4VSS3GPP_intAllocateYUV420(pResizePlane,
3530            pClipCtxt->m_pPreResizeFrame[0].u_width,
3531            pClipCtxt->m_pPreResizeFrame[0].u_height);
3532        if (M4NO_ERROR != err) {
3533            M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \
3534             M4VSS3GPP_intAllocateYUV420 returns 0x%x", err);
3535            return err;
3536        }
3537
3538        if ((pClipCtxt->pSettings->FileType ==
3539              M4VIDEOEDITING_kFileType_ARGB8888) &&
3540            (pC->nbActiveEffects == 0) &&
3541            (pClipCtxt->bGetYuvDataFromDecoder == M4OSA_FALSE)) {
3542
3543            err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctSetOption(
3544                      pClipCtxt->pViDecCtxt,
3545                      M4DECODER_kOptionID_EnableYuvWithEffect,
3546                      (M4OSA_DataOption)M4OSA_TRUE);
3547            if (M4NO_ERROR == err) {
3548                pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctRender(
3549                    pClipCtxt->pViDecCtxt, &ts,
3550                    pClipCtxt->pPlaneYuvWithEffect, M4OSA_TRUE);
3551            }
3552
3553        } else {
3554            if (pClipCtxt->pSettings->FileType ==
3555              M4VIDEOEDITING_kFileType_ARGB8888) {
3556                err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctSetOption(
3557                          pClipCtxt->pViDecCtxt,
3558                          M4DECODER_kOptionID_EnableYuvWithEffect,
3559                          (M4OSA_DataOption)M4OSA_FALSE);
3560            }
3561            if (M4NO_ERROR == err) {
3562                err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctRender(
3563                    pClipCtxt->pViDecCtxt, &ts,
3564                    pClipCtxt->m_pPreResizeFrame, M4OSA_TRUE);
3565            }
3566
3567        }
3568        if (M4NO_ERROR != err) {
3569            M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \
3570                returns error 0x%x", err);
3571            return err;
3572        }
3573
3574        if (pClipCtxt->pSettings->FileType !=
3575                M4VIDEOEDITING_kFileType_ARGB8888) {
3576            if (0 != pClipCtxt->pSettings->ClipProperties.videoRotationDegrees) {
3577                // Save width and height of un-rotated frame
3578                yuvFrameWidth = pClipCtxt->m_pPreResizeFrame[0].u_width;
3579                yuvFrameHeight = pClipCtxt->m_pPreResizeFrame[0].u_height;
3580                err = M4VSS3GPP_intRotateVideo(pClipCtxt->m_pPreResizeFrame,
3581                    pClipCtxt->pSettings->ClipProperties.videoRotationDegrees);
3582                if (M4NO_ERROR != err) {
3583                    M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \
3584                        rotateVideo() returns error 0x%x", err);
3585                    return err;
3586                }
3587                /* Set the new video size for temporary buffer */
3588                M4VSS3GPP_intSetYUV420Plane(pResizePlane,
3589                    pClipCtxt->m_pPreResizeFrame[0].u_width,
3590                    pClipCtxt->m_pPreResizeFrame[0].u_height);
3591            }
3592        }
3593
3594        if (bIsClip1 == M4OSA_TRUE) {
3595            pC->bIssecondClip = M4OSA_FALSE;
3596            numEffects = pC->nbActiveEffects;
3597        } else {
3598            numEffects = pC->nbActiveEffects1;
3599            pC->bIssecondClip = M4OSA_TRUE;
3600        }
3601
3602        if ( numEffects > 0) {
3603            pClipCtxt->bGetYuvDataFromDecoder = M4OSA_TRUE;
3604            /* If video frame need to be resized or rotated,
3605             * then apply the overlay after the frame was rendered with rendering mode.
3606             * Here skip the framing(overlay) effect when applying video Effect. */
3607            bSkipFramingEffect = M4OSA_TRUE;
3608            err = M4VSS3GPP_intApplyVideoEffect(pC,
3609                      pClipCtxt->m_pPreResizeFrame, pResizePlane, bSkipFramingEffect);
3610            if (M4NO_ERROR != err) {
3611                M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \
3612                    M4VSS3GPP_intApplyVideoEffect() err 0x%x", err);
3613                return err;
3614            }
3615            pDecoderRenderFrame= pResizePlane;
3616        } else {
3617            pDecoderRenderFrame = pClipCtxt->m_pPreResizeFrame;
3618        }
3619        /* Do rendering mode */
3620        if ((pClipCtxt->bGetYuvDataFromDecoder == M4OSA_TRUE) ||
3621            (pClipCtxt->pSettings->FileType !=
3622             M4VIDEOEDITING_kFileType_ARGB8888)) {
3623            if (bIsClip1 == M4OSA_TRUE) {
3624                if (pC->bClip1ActiveFramingEffect == M4OSA_TRUE) {
3625                    err = M4VSS3GPP_intAllocateYUV420(pTemp,
3626                            pPlaneOut[0].u_width, pPlaneOut[0].u_height);
3627                    if (M4NO_ERROR != err) {
3628                        M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \
3629                            M4VSS3GPP_intAllocateYUV420 error 0x%x", err);
3630                        pC->ewc.VppError = err;
3631                        return M4NO_ERROR;
3632                    }
3633                    pTmp = pTemp;
3634                } else {
3635                    pTmp = pC->yuv1;
3636                }
3637                err = M4VSS3GPP_intApplyRenderingMode (pC,
3638                        pClipCtxt->pSettings->xVSS.MediaRendering,
3639                        pDecoderRenderFrame,pTmp);
3640            } else {
3641                if (pC->bClip2ActiveFramingEffect == M4OSA_TRUE) {
3642                    err = M4VSS3GPP_intAllocateYUV420(pTemp,
3643                            pPlaneOut[0].u_width, pPlaneOut[0].u_height);
3644                    if (M4NO_ERROR != err) {
3645                        M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \
3646                            M4VSS3GPP_intAllocateYUV420 error 0x%x", err);
3647                        pC->ewc.VppError = err;
3648                        return M4NO_ERROR;
3649                    }
3650                    pTmp = pTemp;
3651                } else {
3652                    pTmp = pC->yuv2;
3653                }
3654                err = M4VSS3GPP_intApplyRenderingMode (pC,
3655                        pClipCtxt->pSettings->xVSS.MediaRendering,
3656                        pDecoderRenderFrame,pTmp);
3657            }
3658            if (M4NO_ERROR != err) {
3659                M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \
3660                    M4VSS3GPP_intApplyRenderingMode error 0x%x ", err);
3661                for (i=0; i<3; i++) {
3662                    if (pTemp[i].pac_data != M4OSA_NULL) {
3663                        free(pTemp[i].pac_data);
3664                        pTemp[i].pac_data = M4OSA_NULL;
3665                    }
3666                }
3667                return err;
3668            }
3669            /* Apply overlay if overlay exist*/
3670            if (bIsClip1 == M4OSA_TRUE) {
3671                if (pC->bClip1ActiveFramingEffect == M4OSA_TRUE) {
3672                    err = M4VSS3GPP_intApplyVideoOverlay(pC,
3673                        pTemp, pC->yuv1);
3674                }
3675                pClipCtxt->lastDecodedPlane = pC->yuv1;
3676            } else {
3677                if (pC->bClip2ActiveFramingEffect == M4OSA_TRUE) {
3678                    err = M4VSS3GPP_intApplyVideoOverlay(pC,
3679                        pTemp, pC->yuv2);
3680                }
3681                pClipCtxt->lastDecodedPlane = pC->yuv2;
3682            }
3683            if (M4NO_ERROR != err) {
3684                M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \
3685                    M4VSS3GPP_intApplyVideoOverlay) error 0x%x ", err);
3686                pC->ewc.VppError = err;
3687                for (i=0; i<3; i++) {
3688                    if (pTemp[i].pac_data != M4OSA_NULL) {
3689                        free(pTemp[i].pac_data);
3690                        pTemp[i].pac_data = M4OSA_NULL;
3691                    }
3692                }
3693                return M4NO_ERROR;
3694            }
3695        } else {
3696            pClipCtxt->lastDecodedPlane = pClipCtxt->pPlaneYuvWithEffect;
3697        }
3698        // free the temp buffer
3699        for (i=0; i<3; i++) {
3700            if (pTemp[i].pac_data != M4OSA_NULL) {
3701                free(pTemp[i].pac_data);
3702                pTemp[i].pac_data = M4OSA_NULL;
3703            }
3704        }
3705
3706        if ((pClipCtxt->pSettings->FileType ==
3707                 M4VIDEOEDITING_kFileType_ARGB8888) &&
3708             (pC->nbActiveEffects == 0) &&
3709             (pClipCtxt->bGetYuvDataFromDecoder == M4OSA_TRUE)) {
3710            if (bIsClip1 == M4OSA_TRUE) {
3711                err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctSetOption(
3712                    pClipCtxt->pViDecCtxt,
3713                    M4DECODER_kOptionID_YuvWithEffectNonContiguous,
3714                    (M4OSA_DataOption)pC->yuv1);
3715            } else {
3716                err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctSetOption(
3717                    pClipCtxt->pViDecCtxt,
3718                    M4DECODER_kOptionID_YuvWithEffectNonContiguous,
3719                    (M4OSA_DataOption)pC->yuv2);
3720            }
3721            if (M4NO_ERROR != err) {
3722                M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \
3723                    null decoder setOption error 0x%x ", err);
3724                return err;
3725            }
3726            pClipCtxt->bGetYuvDataFromDecoder = M4OSA_FALSE;
3727        }
3728
3729        // Reset original width and height for resize frame plane
3730        if (0 != pClipCtxt->pSettings->ClipProperties.videoRotationDegrees &&
3731            180 != pClipCtxt->pSettings->ClipProperties.videoRotationDegrees) {
3732
3733            M4VSS3GPP_intSetYUV420Plane(pClipCtxt->m_pPreResizeFrame,
3734                                        yuvFrameWidth, yuvFrameHeight);
3735        }
3736
3737    } else {
3738        /* No rotate or no resize case*/
3739        if (bIsClip1 == M4OSA_TRUE) {
3740            numEffects = pC->nbActiveEffects;
3741        } else {
3742            numEffects = pC->nbActiveEffects1;
3743        }
3744
3745        if(numEffects > 0) {
3746            err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctRender(
3747                      pClipCtxt->pViDecCtxt, &ts, pPlaneNoResize, M4OSA_TRUE);
3748            if (M4NO_ERROR != err) {
3749                M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \
3750                    Render returns error 0x%x", err);
3751                return err;
3752            }
3753
3754            bSkipFramingEffect = M4OSA_FALSE;
3755            if (bIsClip1 == M4OSA_TRUE) {
3756                pC->bIssecondClip = M4OSA_FALSE;
3757                err = M4VSS3GPP_intApplyVideoEffect(pC, pPlaneNoResize,
3758                            pC->yuv1, bSkipFramingEffect);
3759                pClipCtxt->lastDecodedPlane = pC->yuv1;
3760            } else {
3761                pC->bIssecondClip = M4OSA_TRUE;
3762                err = M4VSS3GPP_intApplyVideoEffect(pC, pPlaneNoResize,
3763                            pC->yuv2, bSkipFramingEffect);
3764                pClipCtxt->lastDecodedPlane = pC->yuv2;
3765            }
3766
3767            if (M4NO_ERROR != err) {
3768                M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \
3769                    M4VSS3GPP_intApplyVideoEffect error 0x%x", err);
3770                return err;
3771            }
3772        } else {
3773
3774            if (bIsClip1 == M4OSA_TRUE) {
3775                pTmp = pC->yuv1;
3776            } else {
3777                pTmp = pC->yuv2;
3778            }
3779            err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctRender(
3780                      pClipCtxt->pViDecCtxt, &ts, pTmp, M4OSA_TRUE);
3781            if (M4NO_ERROR != err) {
3782                M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \
3783                    Render returns error 0x%x,", err);
3784                return err;
3785            }
3786            pClipCtxt->lastDecodedPlane = pTmp;
3787        }
3788        pClipCtxt->iVideoRenderCts = (M4OSA_Int32)ts;
3789    }
3790
3791    return err;
3792}
3793
3794M4OSA_ERR M4VSS3GPP_intRotateVideo(M4VIFI_ImagePlane* pPlaneIn,
3795                                   M4OSA_UInt32 rotationDegree) {
3796
3797    M4OSA_ERR err = M4NO_ERROR;
3798    M4VIFI_ImagePlane outPlane[3];
3799
3800    if (rotationDegree != 180) {
3801        // Swap width and height of in plane
3802        outPlane[0].u_width = pPlaneIn[0].u_height;
3803        outPlane[0].u_height = pPlaneIn[0].u_width;
3804        outPlane[0].u_stride = outPlane[0].u_width;
3805        outPlane[0].u_topleft = 0;
3806        outPlane[0].pac_data = (M4OSA_UInt8 *)M4OSA_32bitAlignedMalloc(
3807            (outPlane[0].u_stride*outPlane[0].u_height), M4VS,
3808            (M4OSA_Char*)("out Y plane for rotation"));
3809        if (outPlane[0].pac_data == M4OSA_NULL) {
3810            return M4ERR_ALLOC;
3811        }
3812
3813        outPlane[1].u_width = pPlaneIn[0].u_height/2;
3814        outPlane[1].u_height = pPlaneIn[0].u_width/2;
3815        outPlane[1].u_stride = outPlane[1].u_width;
3816        outPlane[1].u_topleft = 0;
3817        outPlane[1].pac_data = (M4OSA_UInt8 *)M4OSA_32bitAlignedMalloc(
3818            (outPlane[1].u_stride*outPlane[1].u_height), M4VS,
3819            (M4OSA_Char*)("out U plane for rotation"));
3820        if (outPlane[1].pac_data == M4OSA_NULL) {
3821            free((void *)outPlane[0].pac_data);
3822            return M4ERR_ALLOC;
3823        }
3824
3825        outPlane[2].u_width = pPlaneIn[0].u_height/2;
3826        outPlane[2].u_height = pPlaneIn[0].u_width/2;
3827        outPlane[2].u_stride = outPlane[2].u_width;
3828        outPlane[2].u_topleft = 0;
3829        outPlane[2].pac_data = (M4OSA_UInt8 *)M4OSA_32bitAlignedMalloc(
3830            (outPlane[2].u_stride*outPlane[2].u_height), M4VS,
3831            (M4OSA_Char*)("out V plane for rotation"));
3832        if (outPlane[2].pac_data == M4OSA_NULL) {
3833            free((void *)outPlane[0].pac_data);
3834            free((void *)outPlane[1].pac_data);
3835            return M4ERR_ALLOC;
3836        }
3837    }
3838
3839    switch(rotationDegree) {
3840        case 90:
3841            M4VIFI_Rotate90RightYUV420toYUV420(M4OSA_NULL, pPlaneIn, outPlane);
3842            break;
3843
3844        case 180:
3845            // In plane rotation, so planeOut = planeIn
3846            M4VIFI_Rotate180YUV420toYUV420(M4OSA_NULL, pPlaneIn, pPlaneIn);
3847            break;
3848
3849        case 270:
3850            M4VIFI_Rotate90LeftYUV420toYUV420(M4OSA_NULL, pPlaneIn, outPlane);
3851            break;
3852
3853        default:
3854            M4OSA_TRACE1_1("invalid rotation param %d", (int)rotationDegree);
3855            err = M4ERR_PARAMETER;
3856            break;
3857    }
3858
3859    if (rotationDegree != 180) {
3860        memset((void *)pPlaneIn[0].pac_data, 0,
3861            (pPlaneIn[0].u_width*pPlaneIn[0].u_height));
3862        memset((void *)pPlaneIn[1].pac_data, 0,
3863            (pPlaneIn[1].u_width*pPlaneIn[1].u_height));
3864        memset((void *)pPlaneIn[2].pac_data, 0,
3865            (pPlaneIn[2].u_width*pPlaneIn[2].u_height));
3866        // Copy Y, U and V planes
3867        memcpy((void *)pPlaneIn[0].pac_data, (void *)outPlane[0].pac_data,
3868            (pPlaneIn[0].u_width*pPlaneIn[0].u_height));
3869        memcpy((void *)pPlaneIn[1].pac_data, (void *)outPlane[1].pac_data,
3870            (pPlaneIn[1].u_width*pPlaneIn[1].u_height));
3871        memcpy((void *)pPlaneIn[2].pac_data, (void *)outPlane[2].pac_data,
3872            (pPlaneIn[2].u_width*pPlaneIn[2].u_height));
3873
3874        free((void *)outPlane[0].pac_data);
3875        free((void *)outPlane[1].pac_data);
3876        free((void *)outPlane[2].pac_data);
3877
3878        // Swap the width and height of the in plane
3879        uint32_t temp = 0;
3880        temp = pPlaneIn[0].u_width;
3881        pPlaneIn[0].u_width = pPlaneIn[0].u_height;
3882        pPlaneIn[0].u_height = temp;
3883        pPlaneIn[0].u_stride = pPlaneIn[0].u_width;
3884
3885        temp = pPlaneIn[1].u_width;
3886        pPlaneIn[1].u_width = pPlaneIn[1].u_height;
3887        pPlaneIn[1].u_height = temp;
3888        pPlaneIn[1].u_stride = pPlaneIn[1].u_width;
3889
3890        temp = pPlaneIn[2].u_width;
3891        pPlaneIn[2].u_width = pPlaneIn[2].u_height;
3892        pPlaneIn[2].u_height = temp;
3893        pPlaneIn[2].u_stride = pPlaneIn[2].u_width;
3894    }
3895
3896    return err;
3897}
3898
3899M4OSA_ERR M4VSS3GPP_intSetYUV420Plane(M4VIFI_ImagePlane* planeIn,
3900                                      M4OSA_UInt32 width, M4OSA_UInt32 height) {
3901
3902    M4OSA_ERR err = M4NO_ERROR;
3903
3904    if (planeIn == M4OSA_NULL) {
3905        M4OSA_TRACE1_0("NULL in plane, error");
3906        return M4ERR_PARAMETER;
3907    }
3908
3909    planeIn[0].u_width = width;
3910    planeIn[0].u_height = height;
3911    planeIn[0].u_stride = planeIn[0].u_width;
3912
3913    planeIn[1].u_width = width/2;
3914    planeIn[1].u_height = height/2;
3915    planeIn[1].u_stride = planeIn[1].u_width;
3916
3917    planeIn[2].u_width = width/2;
3918    planeIn[2].u_height = height/2;
3919    planeIn[2].u_stride = planeIn[1].u_width;
3920
3921    return err;
3922}
3923