M4VSS3GPP_EditVideo.c revision b5c7784c96a606890eb8a8b560153ef4a5d1a0d9
1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16/**
17 ******************************************************************************
18 * @file    M4VSS3GPP_EditVideo.c
19 * @brief    Video Studio Service 3GPP edit API implementation.
20 * @note
21 ******************************************************************************
22 */
23
24/****************/
25/*** Includes ***/
26/****************/
27
28#include "NXPSW_CompilerSwitches.h"
29/**
30 * Our header */
31#include "M4VSS3GPP_API.h"
32#include "M4VSS3GPP_InternalTypes.h"
33#include "M4VSS3GPP_InternalFunctions.h"
34#include "M4VSS3GPP_InternalConfig.h"
35#include "M4VSS3GPP_ErrorCodes.h"
36
37// StageFright encoders require %16 resolution
38#include "M4ENCODER_common.h"
39/**
40 * OSAL headers */
41#include "M4OSA_Memory.h" /**< OSAL memory management */
42#include "M4OSA_Debug.h"  /**< OSAL debug management */
43
44/**
45 * component includes */
46#include "M4VFL_transition.h" /**< video effects */
47
48/*for transition behaviour*/
49#include <math.h>
50
51/************************************************************************/
52/* Static local functions                                               */
53/************************************************************************/
54
55static M4OSA_ERR M4VSS3GPP_intCheckVideoMode(
56    M4VSS3GPP_InternalEditContext *pC );
57static M4OSA_Void
58M4VSS3GPP_intCheckVideoEffects( M4VSS3GPP_InternalEditContext *pC,
59                               M4OSA_UInt8 uiClipNumber );
60static M4OSA_ERR
61M4VSS3GPP_intApplyVideoEffect( M4VSS3GPP_InternalEditContext *pC,/*M4OSA_UInt8 uiClip1orClip2,*/
62                              M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut );
63static M4OSA_ERR
64M4VSS3GPP_intVideoTransition( M4VSS3GPP_InternalEditContext *pC,
65                             M4VIFI_ImagePlane *pPlaneOut );
66
67static M4OSA_Void
68M4VSS3GPP_intUpdateTimeInfo( M4VSS3GPP_InternalEditContext *pC,
69                            M4SYS_AccessUnit *pAU );
70static M4OSA_Void M4VSS3GPP_intSetH263TimeCounter( M4OSA_MemAddr8 pAuDataBuffer,
71                                                  M4OSA_UInt8 uiCts );
72static M4OSA_Void M4VSS3GPP_intSetMPEG4Gov( M4OSA_MemAddr8 pAuDataBuffer,
73                                           M4OSA_UInt32 uiCtsSec );
74static M4OSA_Void M4VSS3GPP_intGetMPEG4Gov( M4OSA_MemAddr8 pAuDataBuffer,
75                                           M4OSA_UInt32 *pCtsSec );
76static M4OSA_ERR M4VSS3GPP_intAllocateYUV420( M4VIFI_ImagePlane *pPlanes,
77                                             M4OSA_UInt32 uiWidth, M4OSA_UInt32 uiHeight );
78
79/**
80 ******************************************************************************
81 * M4OSA_ERR M4VSS3GPP_intEditStepVideo()
82 * @brief    One step of video processing
83 * @param   pC    (IN/OUT) Internal edit context
84 ******************************************************************************
85 */
86M4OSA_ERR M4VSS3GPP_intEditStepVideo( M4VSS3GPP_InternalEditContext *pC )
87{
88    M4OSA_ERR err;
89    M4OSA_Int32 iCts, iNextCts;
90    M4ENCODER_FrameMode FrameMode;
91    M4OSA_Bool bSkipFrame;
92    M4OSA_UInt16 offset;
93
94    /**
95     * Check if we reached end cut. Decorrelate input and output encoding
96     * timestamp to handle encoder prefetch
97     */
98    if ( ((M4OSA_Int32)(pC->ewc.dInputVidCts) - pC->pC1->iVoffset
99        + pC->iInOutTimeOffset) >= pC->pC1->iEndTime )
100    {
101        /* Re-adjust video to precise cut time */
102        pC->iInOutTimeOffset = ((M4OSA_Int32)(pC->ewc.dInputVidCts))
103            - pC->pC1->iVoffset + pC->iInOutTimeOffset - pC->pC1->iEndTime;
104        if ( pC->iInOutTimeOffset < 0 ) {
105            pC->iInOutTimeOffset = 0;
106        }
107
108        /**
109        * Video is done for this clip */
110        err = M4VSS3GPP_intReachedEndOfVideo(pC);
111
112        /* RC: to know when a file has been processed */
113        if (M4NO_ERROR != err && err != M4VSS3GPP_WAR_SWITCH_CLIP)
114        {
115            M4OSA_TRACE1_1(
116                "M4VSS3GPP_intEditStepVideo: M4VSS3GPP_intReachedEndOfVideo returns 0x%x",
117                err);
118        }
119
120        return err;
121    }
122
123    /* Don't change the states if we are in decodeUpTo() */
124    if ( (M4VSS3GPP_kClipStatus_DECODE_UP_TO != pC->pC1->Vstatus)
125        && (( pC->pC2 == M4OSA_NULL)
126        || (M4VSS3GPP_kClipStatus_DECODE_UP_TO != pC->pC2->Vstatus)) )
127    {
128        /**
129        * Check Video Mode, depending on the current output CTS */
130        err = M4VSS3GPP_intCheckVideoMode(
131            pC); /**< This function change the pC->Vstate variable! */
132
133        if (M4NO_ERROR != err)
134        {
135            M4OSA_TRACE1_1(
136                "M4VSS3GPP_intEditStepVideo: M4VSS3GPP_intCheckVideoMode returns 0x%x!",
137                err);
138            return err;
139        }
140    }
141
142
143    switch( pC->Vstate )
144    {
145        /* _________________ */
146        /*|                 |*/
147        /*| READ_WRITE MODE |*/
148        /*|_________________|*/
149
150        case M4VSS3GPP_kEditVideoState_READ_WRITE:
151        case M4VSS3GPP_kEditVideoState_AFTER_CUT:
152            {
153                M4OSA_TRACE3_0("M4VSS3GPP_intEditStepVideo READ_WRITE");
154
155                bSkipFrame = M4OSA_FALSE;
156
157                /**
158                * If we were decoding the clip, we must jump to be sure
159                * to get to the good position. */
160                if( M4VSS3GPP_kClipStatus_READ != pC->pC1->Vstatus )
161                {
162                    /**
163                    * Jump to target video time (tc = to-T) */
164                // Decorrelate input and output encoding timestamp to handle encoder prefetch
165                iCts = (M4OSA_Int32)(pC->ewc.dInputVidCts) - pC->pC1->iVoffset;
166                    err = pC->pC1->ShellAPI.m_pReader->m_pFctJump(
167                        pC->pC1->pReaderContext,
168                        (M4_StreamHandler *)pC->pC1->pVideoStream, &iCts);
169
170                    if( M4NO_ERROR != err )
171                    {
172                        M4OSA_TRACE1_1(
173                            "M4VSS3GPP_intEditStepVideo:\
174                            READ_WRITE: m_pReader->m_pFctJump(V1) returns 0x%x!",
175                            err);
176                        return err;
177                    }
178
179                    err = pC->pC1->ShellAPI.m_pReaderDataIt->m_pFctGetNextAu(
180                        pC->pC1->pReaderContext,
181                        (M4_StreamHandler *)pC->pC1->pVideoStream,
182                        &pC->pC1->VideoAU);
183
184                    if( ( M4NO_ERROR != err) && (M4WAR_NO_MORE_AU != err) )
185                    {
186                        M4OSA_TRACE1_1(
187                            "M4VSS3GPP_intEditStepVideo:\
188                            READ_WRITE: m_pReader->m_pFctGetNextAu returns 0x%x!",
189                            err);
190                        return err;
191                    }
192
193                    M4OSA_TRACE2_3("A .... read  : cts  = %.0f + %ld [ 0x%x ]",
194                        pC->pC1->VideoAU.m_CTS, pC->pC1->iVoffset,
195                        pC->pC1->VideoAU.m_size);
196
197                    /* This frame has been already written in BEGIN CUT step -> skip it */
198                    if( pC->pC1->VideoAU.m_CTS == iCts
199                        && pC->pC1->iVideoRenderCts >= iCts )
200                    {
201                        bSkipFrame = M4OSA_TRUE;
202                    }
203                }
204
205                /* This frame has been already written in BEGIN CUT step -> skip it */
206                if( ( pC->Vstate == M4VSS3GPP_kEditVideoState_AFTER_CUT)
207                    && (pC->pC1->VideoAU.m_CTS
208                    + pC->pC1->iVoffset <= pC->ewc.WriterVideoAU.CTS) )
209                {
210                    bSkipFrame = M4OSA_TRUE;
211                }
212
213                /**
214                * Remember the clip reading state */
215                pC->pC1->Vstatus = M4VSS3GPP_kClipStatus_READ;
216                // Decorrelate input and output encoding timestamp to handle encoder prefetch
217                // Rounding is to compensate reader imprecision (m_CTS is actually an integer)
218                iCts = ((M4OSA_Int32)pC->ewc.dInputVidCts) - pC->pC1->iVoffset - 1;
219                iNextCts = iCts + ((M4OSA_Int32)pC->dOutputFrameDuration) + 1;
220                /* Avoid to write a last frame of duration 0 */
221                if( iNextCts > pC->pC1->iEndTime )
222                    iNextCts = pC->pC1->iEndTime;
223
224                /**
225                * If the AU is good to be written, write it, else just skip it */
226                if( ( M4OSA_FALSE == bSkipFrame)
227                    && (( pC->pC1->VideoAU.m_CTS >= iCts)
228                    && (pC->pC1->VideoAU.m_CTS < iNextCts)
229                    && (pC->pC1->VideoAU.m_size > 0)) )
230                {
231                    /**
232                    * Get the output AU to write into */
233                    err = pC->ShellAPI.pWriterDataFcts->pStartAU(
234                        pC->ewc.p3gpWriterContext,
235                        M4VSS3GPP_WRITER_VIDEO_STREAM_ID,
236                        &pC->ewc.WriterVideoAU);
237
238                    if( M4NO_ERROR != err )
239                    {
240                        M4OSA_TRACE1_1(
241                            "M4VSS3GPP_intEditStepVideo: READ_WRITE:\
242                            pWriterDataFcts->pStartAU(Video) returns 0x%x!",
243                            err);
244                        return err;
245                    }
246
247                    /**
248                    * Copy the input AU to the output AU */
249                    pC->ewc.WriterVideoAU.attribute = pC->pC1->VideoAU.m_attribute;
250                    // Decorrelate input and output encoding timestamp to handle encoder prefetch
251                    pC->ewc.WriterVideoAU.CTS = (M4OSA_Time)pC->pC1->VideoAU.m_CTS +
252                        (M4OSA_Time)pC->pC1->iVoffset;
253                    pC->ewc.dInputVidCts += pC->dOutputFrameDuration;
254                    offset = 0;
255                    /* for h.264 stream do not read the 1st 4 bytes as they are header
256                     indicators */
257                    if( pC->pC1->pVideoStream->m_basicProperties.m_streamType
258                        == M4DA_StreamTypeVideoMpeg4Avc )
259                        offset = 4;
260
261                    pC->ewc.WriterVideoAU.size = pC->pC1->VideoAU.m_size - offset;
262                    if( pC->ewc.WriterVideoAU.size > pC->ewc.uiVideoMaxAuSize )
263                    {
264                        M4OSA_TRACE1_2(
265                            "M4VSS3GPP_intEditStepVideo: READ_WRITE: AU size greater than\
266                             MaxAuSize (%d>%d)! returning M4VSS3GPP_ERR_INPUT_VIDEO_AU_TOO_LARGE",
267                            pC->ewc.WriterVideoAU.size, pC->ewc.uiVideoMaxAuSize);
268                        return M4VSS3GPP_ERR_INPUT_VIDEO_AU_TOO_LARGE;
269                    }
270
271                    memcpy((void *)pC->ewc.WriterVideoAU.dataAddress,
272                        (void *)(pC->pC1->VideoAU.m_dataAddress + offset),
273                        (pC->ewc.WriterVideoAU.size));
274
275                    /**
276                    * Update time info for the Counter Time System to be equal to the bit
277                    -stream time*/
278                    M4VSS3GPP_intUpdateTimeInfo(pC, &pC->ewc.WriterVideoAU);
279                    M4OSA_TRACE2_2("B ---- write : cts  = %lu [ 0x%x ]",
280                        pC->ewc.WriterVideoAU.CTS, pC->ewc.WriterVideoAU.size);
281
282                    /**
283                    * Write the AU */
284                    err = pC->ShellAPI.pWriterDataFcts->pProcessAU(
285                        pC->ewc.p3gpWriterContext,
286                        M4VSS3GPP_WRITER_VIDEO_STREAM_ID,
287                        &pC->ewc.WriterVideoAU);
288
289                    if( M4NO_ERROR != err )
290                    {
291                        /* the warning M4WAR_WRITER_STOP_REQ is returned when the targeted output
292                         file size is reached
293                        The editing is then finished, the warning M4VSS3GPP_WAR_EDITING_DONE
294                        is returned*/
295                        if( M4WAR_WRITER_STOP_REQ == err )
296                        {
297                            M4OSA_TRACE1_0(
298                                "M4VSS3GPP_intEditStepVideo: File was cut to avoid oversize");
299                            return M4VSS3GPP_WAR_EDITING_DONE;
300                        }
301                        else
302                        {
303                            M4OSA_TRACE1_1(
304                                "M4VSS3GPP_intEditStepVideo: READ_WRITE:\
305                                pWriterDataFcts->pProcessAU(Video) returns 0x%x!",
306                                err);
307                            return err;
308                        }
309                    }
310
311                    /**
312                    * Read next AU for next step */
313                    err = pC->pC1->ShellAPI.m_pReaderDataIt->m_pFctGetNextAu(
314                        pC->pC1->pReaderContext,
315                        (M4_StreamHandler *)pC->pC1->pVideoStream,
316                        &pC->pC1->VideoAU);
317
318                    if( ( M4NO_ERROR != err) && (M4WAR_NO_MORE_AU != err) )
319                    {
320                        M4OSA_TRACE1_1(
321                            "M4VSS3GPP_intEditStepVideo: READ_WRITE:\
322                            m_pReaderDataIt->m_pFctGetNextAu returns 0x%x!",
323                            err);
324                        return err;
325                    }
326
327                    M4OSA_TRACE2_3("C .... read  : cts  = %.0f + %ld [ 0x%x ]",
328                        pC->pC1->VideoAU.m_CTS, pC->pC1->iVoffset,
329                        pC->pC1->VideoAU.m_size);
330                }
331                else
332                {
333                    /**
334                    * Decide wether to read or to increment time increment */
335                    if( ( pC->pC1->VideoAU.m_size == 0)
336                        || (pC->pC1->VideoAU.m_CTS >= iNextCts) )
337                    {
338                        /*Increment time by the encoding period (NO_MORE_AU or reader in advance */
339                       // Decorrelate input and output encoding timestamp to handle encoder prefetch
340                       pC->ewc.dInputVidCts += pC->dOutputFrameDuration;
341
342                        /* Switch (from AFTER_CUT) to normal mode because time is
343                        no more frozen */
344                        pC->Vstate = M4VSS3GPP_kEditVideoState_READ_WRITE;
345                    }
346                    else
347                    {
348                        /* In other cases (reader late), just let the reader catch up
349                         pC->ewc.dVTo */
350                        err = pC->pC1->ShellAPI.m_pReaderDataIt->m_pFctGetNextAu(
351                            pC->pC1->pReaderContext,
352                            (M4_StreamHandler *)pC->pC1->pVideoStream,
353                            &pC->pC1->VideoAU);
354
355                        if( ( M4NO_ERROR != err) && (M4WAR_NO_MORE_AU != err) )
356                        {
357                            M4OSA_TRACE1_1(
358                                "M4VSS3GPP_intEditStepVideo: READ_WRITE:\
359                                m_pReaderDataIt->m_pFctGetNextAu returns 0x%x!",
360                                err);
361                            return err;
362                        }
363
364                        M4OSA_TRACE2_3("D .... read  : cts  = %.0f + %ld [ 0x%x ]",
365                            pC->pC1->VideoAU.m_CTS, pC->pC1->iVoffset,
366                            pC->pC1->VideoAU.m_size);
367                    }
368                }
369            }
370            break;
371
372            /* ____________________ */
373            /*|                    |*/
374            /*| DECODE_ENCODE MODE |*/
375            /*|   BEGIN_CUT MODE   |*/
376            /*|____________________|*/
377
378        case M4VSS3GPP_kEditVideoState_DECODE_ENCODE:
379        case M4VSS3GPP_kEditVideoState_BEGIN_CUT:
380            {
381                M4OSA_TRACE3_0(
382                    "M4VSS3GPP_intEditStepVideo DECODE_ENCODE / BEGIN_CUT");
383
384                /**
385                * Decode the video up to the target time
386                (will jump to the previous RAP if needed ) */
387                // Decorrelate input and output encoding timestamp to handle encoder prefetch
388                err = M4VSS3GPP_intClipDecodeVideoUpToCts(pC->pC1, (M4OSA_Int32)pC->ewc.dInputVidCts);
389                if( M4NO_ERROR != err )
390                {
391                    M4OSA_TRACE1_1(
392                        "M4VSS3GPP_intEditStepVideo: DECODE_ENCODE:\
393                        M4VSS3GPP_intDecodeVideoUpToCts returns err=0x%x",
394                        err);
395                    return err;
396                }
397
398                /* If the decoding is not completed, do one more step with time frozen */
399                if( M4VSS3GPP_kClipStatus_DECODE_UP_TO == pC->pC1->Vstatus )
400                {
401                    return M4NO_ERROR;
402                }
403
404                /**
405                * Reset the video pre-processing error before calling the encoder */
406                pC->ewc.VppError = M4NO_ERROR;
407
408                M4OSA_TRACE2_0("E ++++ encode AU");
409
410                /**
411                * Encode the frame(rendering,filtering and writing will be done
412                 in encoder callbacks)*/
413                if( pC->Vstate == M4VSS3GPP_kEditVideoState_BEGIN_CUT )
414                    FrameMode = M4ENCODER_kIFrame;
415                else
416                    FrameMode = M4ENCODER_kNormalFrame;
417
418                // Decorrelate input and output encoding timestamp to handle encoder prefetch
419                err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctEncode(pC->ewc.pEncContext, M4OSA_NULL,
420                pC->ewc.dInputVidCts, FrameMode);
421                /**
422                * Check if we had a VPP error... */
423                if( M4NO_ERROR != pC->ewc.VppError )
424                {
425                    M4OSA_TRACE1_1(
426                        "M4VSS3GPP_intEditStepVideo: DECODE_ENCODE:\
427                        pVideoEncoderGlobalFcts->pFctEncode, returning VppErr=0x%x",
428                        pC->ewc.VppError);
429#ifdef M4VSS_SUPPORT_OMX_CODECS
430
431                    if( M4WAR_VIDEORENDERER_NO_NEW_FRAME != pC->ewc.VppError )
432                    {
433#endif //M4VSS_SUPPORT_OMX_CODECS
434
435                        return pC->ewc.VppError;
436#ifdef M4VSS_SUPPORT_OMX_CODECS
437
438                    }
439
440#endif                                   //M4VSS_SUPPORT_OMX_CODECS
441
442                }
443                else if( M4NO_ERROR != err ) /**< ...or an encoder error */
444                {
445                    if( ((M4OSA_UInt32)M4ERR_ALLOC) == err )
446                    {
447                        M4OSA_TRACE1_0(
448                            "M4VSS3GPP_intEditStepVideo: DECODE_ENCODE:\
449                            returning M4VSS3GPP_ERR_ENCODER_ACCES_UNIT_ERROR");
450                        return M4VSS3GPP_ERR_ENCODER_ACCES_UNIT_ERROR;
451                    }
452                    /* the warning M4WAR_WRITER_STOP_REQ is returned when the targeted output
453                    file size is reached
454                    The editing is then finished, the warning M4VSS3GPP_WAR_EDITING_DONE
455                    is returned*/
456                    else if( M4WAR_WRITER_STOP_REQ == err )
457                    {
458                        M4OSA_TRACE1_0(
459                            "M4VSS3GPP_intEditStepVideo: File was cut to avoid oversize");
460                        return M4VSS3GPP_WAR_EDITING_DONE;
461                    }
462                    else
463                    {
464                        M4OSA_TRACE1_1(
465                            "M4VSS3GPP_intEditStepVideo: DECODE_ENCODE:\
466                            pVideoEncoderGlobalFcts->pFctEncode returns 0x%x",
467                            err);
468                        return err;
469                    }
470                }
471
472                /**
473                * Increment time by the encoding period (for begin cut, do not increment to not
474                loose P-frames) */
475                if( M4VSS3GPP_kEditVideoState_DECODE_ENCODE == pC->Vstate )
476                {
477                    // Decorrelate input and output encoding timestamp to handle encoder prefetch
478                    pC->ewc.dInputVidCts += pC->dOutputFrameDuration;
479                }
480            }
481            break;
482
483            /* _________________ */
484            /*|                 |*/
485            /*| TRANSITION MODE |*/
486            /*|_________________|*/
487
488        case M4VSS3GPP_kEditVideoState_TRANSITION:
489            {
490                M4OSA_TRACE3_0("M4VSS3GPP_intEditStepVideo TRANSITION");
491
492                /* Don't decode more than needed */
493                if( !(( M4VSS3GPP_kClipStatus_DECODE_UP_TO != pC->pC1->Vstatus)
494                    && (M4VSS3GPP_kClipStatus_DECODE_UP_TO == pC->pC2->Vstatus)) )
495                {
496                    /**
497                    * Decode the clip1 video up to the target time
498                    (will jump to the previous RAP if needed */
499                    // Decorrelate input and output encoding timestamp to handle encoder prefetch
500                    err = M4VSS3GPP_intClipDecodeVideoUpToCts(pC->pC1,
501                         (M4OSA_Int32)pC->ewc.dInputVidCts);
502                    if( M4NO_ERROR != err )
503                    {
504                        M4OSA_TRACE1_1(
505                            "M4VSS3GPP_intEditStepVideo: TRANSITION:\
506                            M4VSS3GPP_intDecodeVideoUpToCts(C1) returns err=0x%x",
507                            err);
508                        return err;
509                    }
510
511                    /* If the decoding is not completed, do one more step with time frozen */
512                    if( M4VSS3GPP_kClipStatus_DECODE_UP_TO == pC->pC1->Vstatus )
513                    {
514                        return M4NO_ERROR;
515                    }
516                }
517
518                /* Don't decode more than needed */
519                if( !(( M4VSS3GPP_kClipStatus_DECODE_UP_TO != pC->pC2->Vstatus)
520                    && (M4VSS3GPP_kClipStatus_DECODE_UP_TO == pC->pC1->Vstatus)) )
521                {
522                    /**
523                    * Decode the clip2 video up to the target time
524                        (will jump to the previous RAP if needed) */
525                    // Decorrelate input and output encoding timestamp to handle encoder prefetch
526                    err = M4VSS3GPP_intClipDecodeVideoUpToCts(pC->pC2,
527                         (M4OSA_Int32)pC->ewc.dInputVidCts);
528                    if( M4NO_ERROR != err )
529                    {
530                        M4OSA_TRACE1_1(
531                            "M4VSS3GPP_intEditStepVideo: TRANSITION:\
532                            M4VSS3GPP_intDecodeVideoUpToCts(C2) returns err=0x%x",
533                            err);
534                        return err;
535                    }
536
537                    /* If the decoding is not completed, do one more step with time frozen */
538                    if( M4VSS3GPP_kClipStatus_DECODE_UP_TO == pC->pC2->Vstatus )
539                    {
540                        return M4NO_ERROR;
541                    }
542                }
543
544                /**
545                * Reset the video pre-processing error before calling the encoder */
546                pC->ewc.VppError = M4NO_ERROR;
547
548                M4OSA_TRACE2_0("F **** blend AUs");
549
550                /**
551                * Encode the frame (rendering, filtering and writing will be done
552                in encoder callbacks */
553                // Decorrelate input and output encoding timestamp to handle encoder prefetch
554                err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctEncode(pC->ewc.pEncContext, M4OSA_NULL,
555                    pC->ewc.dInputVidCts, M4ENCODER_kNormalFrame);
556
557                /**
558                * If encode returns a process frame error, it is likely to be a VPP error */
559                if( M4NO_ERROR != pC->ewc.VppError )
560                {
561                    M4OSA_TRACE1_1(
562                        "M4VSS3GPP_intEditStepVideo: TRANSITION:\
563                        pVideoEncoderGlobalFcts->pFctEncode, returning VppErr=0x%x",
564                        pC->ewc.VppError);
565#ifdef M4VSS_SUPPORT_OMX_CODECS
566
567                    if( M4WAR_VIDEORENDERER_NO_NEW_FRAME != pC->ewc.VppError )
568                    {
569
570#endif //M4VSS_SUPPORT_OMX_CODECS
571
572                        return pC->ewc.VppError;
573#ifdef M4VSS_SUPPORT_OMX_CODECS
574
575                    }
576
577#endif //M4VSS_SUPPORT_OMX_CODECS
578
579                }
580                else if( M4NO_ERROR != err ) /**< ...or an encoder error */
581                {
582                    if( ((M4OSA_UInt32)M4ERR_ALLOC) == err )
583                    {
584                        M4OSA_TRACE1_0(
585                            "M4VSS3GPP_intEditStepVideo: TRANSITION:\
586                            returning M4VSS3GPP_ERR_ENCODER_ACCES_UNIT_ERROR");
587                        return M4VSS3GPP_ERR_ENCODER_ACCES_UNIT_ERROR;
588                    }
589
590                    /* the warning M4WAR_WRITER_STOP_REQ is returned when the targeted output
591                     file size is reached
592                    The editing is then finished, the warning M4VSS3GPP_WAR_EDITING_DONE is
593                     returned*/
594                    else if( M4WAR_WRITER_STOP_REQ == err )
595                    {
596                        M4OSA_TRACE1_0(
597                            "M4VSS3GPP_intEditStepVideo: File was cut to avoid oversize");
598                        return M4VSS3GPP_WAR_EDITING_DONE;
599                    }
600                    else
601                    {
602                        M4OSA_TRACE1_1(
603                            "M4VSS3GPP_intEditStepVideo: TRANSITION:\
604                            pVideoEncoderGlobalFcts->pFctEncode returns 0x%x",
605                            err);
606                        return err;
607                    }
608                }
609
610                /**
611                * Increment time by the encoding period */
612                // Decorrelate input and output encoding timestamp to handle encoder prefetch
613                pC->ewc.dInputVidCts += pC->dOutputFrameDuration;
614            }
615            break;
616
617            /* ____________ */
618            /*|            |*/
619            /*| ERROR CASE |*/
620            /*|____________|*/
621
622        default:
623            M4OSA_TRACE1_1(
624                "M4VSS3GPP_intEditStepVideo: invalid internal state (0x%x),\
625                returning M4VSS3GPP_ERR_INTERNAL_STATE",
626                pC->Vstate);
627            return M4VSS3GPP_ERR_INTERNAL_STATE;
628    }
629
630    /**
631    * Return with no error */
632    M4OSA_TRACE3_0("M4VSS3GPP_intEditStepVideo: returning M4NO_ERROR");
633    return M4NO_ERROR;
634}
635
636/**
637 ******************************************************************************
638 * M4OSA_ERR M4VSS3GPP_intCheckVideoMode()
639 * @brief    Check which video process mode we must use, depending on the output CTS.
640 * @param   pC    (IN/OUT) Internal edit context
641 ******************************************************************************
642 */
643static M4OSA_ERR M4VSS3GPP_intCheckVideoMode(
644    M4VSS3GPP_InternalEditContext *pC )
645{
646    M4OSA_ERR err;
647    // Decorrelate input and output encoding timestamp to handle encoder prefetch
648    const M4OSA_Int32  t = (M4OSA_Int32)pC->ewc.dInputVidCts;
649    /**< Transition duration */
650    const M4OSA_Int32 TD = pC->pTransitionList[pC->uiCurrentClip].uiTransitionDuration;
651
652    M4OSA_Int32 iTmp;
653
654    const M4VSS3GPP_EditVideoState previousVstate = pC->Vstate;
655
656    /**
657    * Check if Clip1 is on its begin cut, or in an effect zone */
658    M4VSS3GPP_intCheckVideoEffects(pC, 1);
659
660    /**
661    * Check if we are in the transition with next clip */
662    if( ( TD > 0) && (( t - pC->pC1->iVoffset) >= (pC->pC1->iEndTime - TD)) )
663    {
664        /**
665        * We are in a transition */
666        pC->Vstate = M4VSS3GPP_kEditVideoState_TRANSITION;
667        pC->bTransitionEffect = M4OSA_TRUE;
668
669        /**
670        * Open second clip for transition, if not yet opened */
671        if( M4OSA_NULL == pC->pC2 )
672        {
673            err = M4VSS3GPP_intOpenClip(pC, &pC->pC2,
674                &pC->pClipList[pC->uiCurrentClip + 1]);
675
676            if( M4NO_ERROR != err )
677            {
678                M4OSA_TRACE1_1(
679                    "M4VSS3GPP_intCheckVideoMode: M4VSS3GPP_editOpenClip returns 0x%x!",
680                    err);
681                return err;
682            }
683
684            /**
685            * Add current video output CTS to the clip offset
686            * (audio output CTS is not yet at the transition, so audio
687            *  offset can't be updated yet). */
688            // Decorrelate input and output encoding timestamp to handle encoder prefetch
689            pC->pC2->iVoffset += (M4OSA_UInt32)pC->ewc.dInputVidCts;
690
691            /**
692            * 2005-03-24: BugFix for audio-video synchro:
693            * Update transition duration due to the actual video transition beginning time.
694            * It will avoid desynchronization when doing the audio transition. */
695           // Decorrelate input and output encoding timestamp to handle encoder prefetch
696            iTmp = ((M4OSA_Int32)pC->ewc.dInputVidCts)\
697             - (pC->pC1->iEndTime - TD + pC->pC1->iVoffset);
698            if (iTmp < (M4OSA_Int32)pC->pTransitionList[pC->uiCurrentClip].uiTransitionDuration)
699            /**< Test in case of a very short transition */
700            {
701                pC->pTransitionList[pC->
702                    uiCurrentClip].uiTransitionDuration -= iTmp;
703
704                /**
705                * Don't forget to also correct the total duration used for the progress bar
706                * (it was computed with the original transition duration). */
707                pC->ewc.iOutputDuration += iTmp;
708            }
709            /**< No "else" here because it's hard predict the effect of 0 duration transition...*/
710        }
711
712        /**
713        * Check effects for clip2 */
714        M4VSS3GPP_intCheckVideoEffects(pC, 2);
715    }
716    else
717    {
718        /**
719        * We are not in a transition */
720        pC->bTransitionEffect = M4OSA_FALSE;
721
722        /* If there is an effect we go to decode/encode mode */
723        if ((pC->nbActiveEffects > 0) ||(pC->nbActiveEffects1 > 0))
724        {
725            pC->Vstate = M4VSS3GPP_kEditVideoState_DECODE_ENCODE;
726        }
727        /* We do a begin cut, except if already done (time is not progressing because we want
728        to catch all P-frames after the cut) */
729        else if( M4OSA_TRUE == pC->bClip1AtBeginCut )
730        {
731            if(pC->pC1->pSettings->ClipProperties.VideoStreamType == M4VIDEOEDITING_kH264) {
732                pC->Vstate = M4VSS3GPP_kEditVideoState_DECODE_ENCODE;
733                pC->bEncodeTillEoF = M4OSA_TRUE;
734            } else if( ( M4VSS3GPP_kEditVideoState_BEGIN_CUT == previousVstate)
735                || (M4VSS3GPP_kEditVideoState_AFTER_CUT == previousVstate) ) {
736                pC->Vstate = M4VSS3GPP_kEditVideoState_AFTER_CUT;
737            } else {
738                pC->Vstate = M4VSS3GPP_kEditVideoState_BEGIN_CUT;
739            }
740        }
741        /* Else we are in default copy/paste mode */
742        else
743        {
744            if( ( M4VSS3GPP_kEditVideoState_BEGIN_CUT == previousVstate)
745                || (M4VSS3GPP_kEditVideoState_AFTER_CUT == previousVstate) )
746            {
747                pC->Vstate = M4VSS3GPP_kEditVideoState_AFTER_CUT;
748            }
749            else if( pC->bIsMMS == M4OSA_TRUE )
750            {
751                M4OSA_UInt32 currentBitrate;
752                M4OSA_ERR err = M4NO_ERROR;
753
754                /* Do we need to reencode the video to downgrade the bitrate or not ? */
755                /* Let's compute the cirrent bitrate of the current edited clip */
756                err = pC->pC1->ShellAPI.m_pReader->m_pFctGetOption(
757                    pC->pC1->pReaderContext,
758                    M4READER_kOptionID_Bitrate, &currentBitrate);
759
760                if( err != M4NO_ERROR )
761                {
762                    M4OSA_TRACE1_1(
763                        "M4VSS3GPP_intCheckVideoMode:\
764                        Error when getting next bitrate of edited clip: 0x%x",
765                        err);
766                    return err;
767                }
768
769                /* Remove audio bitrate */
770                currentBitrate -= 12200;
771
772                /* Test if we go into copy/paste mode or into decode/encode mode */
773                if( currentBitrate > pC->uiMMSVideoBitrate )
774                {
775                    pC->Vstate = M4VSS3GPP_kEditVideoState_DECODE_ENCODE;
776                }
777                else
778                {
779                    pC->Vstate = M4VSS3GPP_kEditVideoState_READ_WRITE;
780                }
781            }
782            else if(!((pC->m_bClipExternalHasStarted == M4OSA_TRUE) &&
783                    (pC->Vstate == M4VSS3GPP_kEditVideoState_DECODE_ENCODE)) &&
784                    pC->bEncodeTillEoF == M4OSA_FALSE)
785            {
786                /**
787                 * Test if we go into copy/paste mode or into decode/encode mode
788                 * If an external effect has been applied on the current clip
789                 * then continue to be in decode/encode mode till end of
790                 * clip to avoid H.264 distortion.
791                 */
792                pC->Vstate = M4VSS3GPP_kEditVideoState_READ_WRITE;
793            }
794        }
795    }
796
797    /**
798    * Check if we create an encoder */
799    if( ( ( M4VSS3GPP_kEditVideoState_READ_WRITE == previousVstate)
800        || (M4VSS3GPP_kEditVideoState_AFTER_CUT
801        == previousVstate)) /**< read mode */
802        && (( M4VSS3GPP_kEditVideoState_DECODE_ENCODE == pC->Vstate)
803        || (M4VSS3GPP_kEditVideoState_BEGIN_CUT == pC->Vstate)
804        || (M4VSS3GPP_kEditVideoState_TRANSITION
805        == pC->Vstate)) /**< encode mode */
806        && pC->bIsMMS == M4OSA_FALSE )
807    {
808        /**
809        * Create the encoder */
810        err = M4VSS3GPP_intCreateVideoEncoder(pC);
811
812        if( M4NO_ERROR != err )
813        {
814            M4OSA_TRACE1_1(
815                "M4VSS3GPP_intCheckVideoMode: M4VSS3GPP_intCreateVideoEncoder returns 0x%x!",
816                err);
817            return err;
818        }
819    }
820    else if( pC->bIsMMS == M4OSA_TRUE && pC->ewc.pEncContext == M4OSA_NULL )
821    {
822        /**
823        * Create the encoder */
824        err = M4VSS3GPP_intCreateVideoEncoder(pC);
825
826        if( M4NO_ERROR != err )
827        {
828            M4OSA_TRACE1_1(
829                "M4VSS3GPP_intCheckVideoMode: M4VSS3GPP_intCreateVideoEncoder returns 0x%x!",
830                err);
831            return err;
832        }
833    }
834
835    /**
836    * When we go from filtering to read/write, we must act like a begin cut,
837    * because the last filtered image may be different than the original image. */
838    else if( ( ( M4VSS3GPP_kEditVideoState_DECODE_ENCODE == previousVstate)
839        || (M4VSS3GPP_kEditVideoState_TRANSITION
840        == previousVstate)) /**< encode mode */
841        && (M4VSS3GPP_kEditVideoState_READ_WRITE == pC->Vstate) /**< read mode */
842        && (pC->bEncodeTillEoF == M4OSA_FALSE) )
843    {
844        pC->Vstate = M4VSS3GPP_kEditVideoState_BEGIN_CUT;
845    }
846
847    /**
848    * Check if we destroy an encoder */
849    else if( ( ( M4VSS3GPP_kEditVideoState_DECODE_ENCODE == previousVstate)
850        || (M4VSS3GPP_kEditVideoState_BEGIN_CUT == previousVstate)
851        || (M4VSS3GPP_kEditVideoState_TRANSITION
852        == previousVstate)) /**< encode mode */
853        && (( M4VSS3GPP_kEditVideoState_READ_WRITE == pC->Vstate)
854        || (M4VSS3GPP_kEditVideoState_AFTER_CUT
855        == pC->Vstate)) /**< read mode */
856        && pC->bIsMMS == M4OSA_FALSE )
857    {
858        /**
859        * Destroy the previously created encoder */
860        err = M4VSS3GPP_intDestroyVideoEncoder(pC);
861
862        if( M4NO_ERROR != err )
863        {
864            M4OSA_TRACE1_1(
865                "M4VSS3GPP_intCheckVideoMode: M4VSS3GPP_intDestroyVideoEncoder returns 0x%x!",
866                err);
867            return err;
868        }
869    }
870
871    /**
872    * Return with no error */
873    M4OSA_TRACE3_0("M4VSS3GPP_intCheckVideoMode: returning M4NO_ERROR");
874    return M4NO_ERROR;
875}
876
877/******************************************************************************
878 * M4OSA_ERR M4VSS3GPP_intStartAU()
879 * @brief    StartAU writer-like interface used for the VSS 3GPP only
880 * @note
881 * @param    pContext: (IN) It is the VSS 3GPP context in our case
882 * @param    streamID: (IN) Id of the stream to which the Access Unit is related.
883 * @param    pAU:      (IN/OUT) Access Unit to be prepared.
884 * @return    M4NO_ERROR: there is no error
885 ******************************************************************************
886 */
887M4OSA_ERR M4VSS3GPP_intStartAU( M4WRITER_Context pContext,
888                               M4SYS_StreamID streamID, M4SYS_AccessUnit *pAU )
889{
890    M4OSA_ERR err;
891    M4OSA_UInt32 uiMaxAuSize;
892
893    /**
894    * Given context is actually the VSS3GPP context */
895    M4VSS3GPP_InternalEditContext *pC =
896        (M4VSS3GPP_InternalEditContext *)pContext;
897
898    /**
899    * Get the output AU to write into */
900    err = pC->ShellAPI.pWriterDataFcts->pStartAU(pC->ewc.p3gpWriterContext,
901        M4VSS3GPP_WRITER_VIDEO_STREAM_ID, pAU);
902
903    if( M4NO_ERROR != err )
904    {
905        M4OSA_TRACE1_1(
906            "M4VSS3GPP_intStartAU: pWriterDataFcts->pStartAU(Video) returns 0x%x!",
907            err);
908        return err;
909    }
910
911    /**
912    *    Return */
913    M4OSA_TRACE3_0("M4VSS3GPP_intStartAU: returning M4NO_ERROR");
914    return M4NO_ERROR;
915}
916
917/******************************************************************************
918 * M4OSA_ERR M4VSS3GPP_intProcessAU()
919 * @brief    ProcessAU writer-like interface used for the VSS 3GPP only
920 * @note
921 * @param    pContext: (IN) It is the VSS 3GPP context in our case
922 * @param    streamID: (IN) Id of the stream to which the Access Unit is related.
923 * @param    pAU:      (IN/OUT) Access Unit to be written
924 * @return    M4NO_ERROR: there is no error
925 ******************************************************************************
926 */
927M4OSA_ERR M4VSS3GPP_intProcessAU( M4WRITER_Context pContext,
928                                 M4SYS_StreamID streamID, M4SYS_AccessUnit *pAU )
929{
930    M4OSA_ERR err;
931
932    /**
933    * Given context is actually the VSS3GPP context */
934    M4VSS3GPP_InternalEditContext *pC =
935        (M4VSS3GPP_InternalEditContext *)pContext;
936
937    /**
938    * Fix the encoded AU time */
939    // Decorrelate input and output encoding timestamp to handle encoder prefetch
940    pC->ewc.dOutputVidCts = pAU->CTS;
941    /**
942    * Update time info for the Counter Time System to be equal to the bit-stream time */
943    M4VSS3GPP_intUpdateTimeInfo(pC, pAU);
944
945    /**
946    * Write the AU */
947    err = pC->ShellAPI.pWriterDataFcts->pProcessAU(pC->ewc.p3gpWriterContext,
948        M4VSS3GPP_WRITER_VIDEO_STREAM_ID, pAU);
949
950    if( M4NO_ERROR != err )
951    {
952        M4OSA_TRACE1_1(
953            "M4VSS3GPP_intProcessAU: pWriterDataFcts->pProcessAU(Video) returns 0x%x!",
954            err);
955        return err;
956    }
957
958    /**
959    *    Return */
960    M4OSA_TRACE3_0("M4VSS3GPP_intProcessAU: returning M4NO_ERROR");
961    return M4NO_ERROR;
962}
963
964/**
965 ******************************************************************************
966 * M4OSA_ERR M4VSS3GPP_intVPP()
967 * @brief    We implement our own VideoPreProcessing function
968 * @note    It is called by the video encoder
969 * @param    pContext    (IN) VPP context, which actually is the VSS 3GPP context in our case
970 * @param    pPlaneIn    (IN)
971 * @param    pPlaneOut    (IN/OUT) Pointer to an array of 3 planes that will contain the output
972 *                                  YUV420 image
973 * @return    M4NO_ERROR:    No error
974 ******************************************************************************
975 */
976M4OSA_ERR M4VSS3GPP_intVPP( M4VPP_Context pContext, M4VIFI_ImagePlane *pPlaneIn,
977                           M4VIFI_ImagePlane *pPlaneOut )
978{
979    M4OSA_ERR err;
980    M4_MediaTime t;
981    M4VIFI_ImagePlane *pTmp = M4OSA_NULL;
982    M4VIFI_ImagePlane pTemp1[3],pTemp2[3];
983    M4OSA_UInt32  i =0;
984    /**
985    * VPP context is actually the VSS3GPP context */
986    M4VSS3GPP_InternalEditContext *pC =
987        (M4VSS3GPP_InternalEditContext *)pContext;
988    pTemp1[0].pac_data = pTemp2[0].pac_data = M4OSA_NULL;
989    /**
990    * Reset VPP error remembered in context */
991    pC->ewc.VppError = M4NO_ERROR;
992
993    /**
994    * At the end of the editing, we may be called when no more clip is loaded.
995    * (because to close the encoder properly it must be stepped one or twice...) */
996    if( M4OSA_NULL == pC->pC1 )
997    {
998        /**
999        * We must fill the input of the encoder with a dummy image, because
1000        * encoding noise leads to a huge video AU, and thus a writer buffer overflow. */
1001        memset((void *)pPlaneOut[0].pac_data,0,
1002            pPlaneOut[0].u_stride * pPlaneOut[0].u_height);
1003        memset((void *)pPlaneOut[1].pac_data,0,
1004            pPlaneOut[1].u_stride * pPlaneOut[1].u_height);
1005        memset((void *)pPlaneOut[2].pac_data,0,
1006            pPlaneOut[2].u_stride * pPlaneOut[2].u_height);
1007
1008        M4OSA_TRACE3_0("M4VSS3GPP_intVPP: returning M4NO_ERROR (abort)");
1009        return M4NO_ERROR;
1010    }
1011
1012    /**
1013    **************** Transition case ****************/
1014    if( M4OSA_TRUE == pC->bTransitionEffect )
1015    {
1016        if (M4OSA_NULL == pTemp1[0].pac_data)
1017        {
1018            err = M4VSS3GPP_intAllocateYUV420(pTemp1, pC->ewc.uiVideoWidth,
1019                                              pC->ewc.uiVideoHeight);
1020            if (M4NO_ERROR != err)
1021            {
1022                M4OSA_TRACE1_1("M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420(1) returns 0x%x, \
1023                               returning M4NO_ERROR", err);
1024                pC->ewc.VppError = err;
1025                return M4NO_ERROR; /**< Return no error to the encoder core
1026                                   (else it may leak in some situations...) */
1027            }
1028        }
1029        if (M4OSA_NULL == pTemp2[0].pac_data)
1030        {
1031            err = M4VSS3GPP_intAllocateYUV420(pTemp2, pC->ewc.uiVideoWidth,
1032                                              pC->ewc.uiVideoHeight);
1033            if (M4NO_ERROR != err)
1034            {
1035                M4OSA_TRACE1_1("M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420(2) returns 0x%x, \
1036                               returning M4NO_ERROR", err);
1037                pC->ewc.VppError = err;
1038                return M4NO_ERROR; /**< Return no error to the encoder core
1039                                  (else it may leak in some situations...) */
1040            }
1041        }
1042        /**
1043        * We need two intermediate planes */
1044        if( M4OSA_NULL == pC->yuv1[0].pac_data )
1045        {
1046            err = M4VSS3GPP_intAllocateYUV420(pC->yuv1, pC->ewc.uiVideoWidth,
1047                pC->ewc.uiVideoHeight);
1048
1049            if( M4NO_ERROR != err )
1050            {
1051                M4OSA_TRACE1_1(
1052                    "M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420(3) returns 0x%x,\
1053                    returning M4NO_ERROR",
1054                    err);
1055                pC->ewc.VppError = err;
1056                return
1057                    M4NO_ERROR; /**< Return no error to the encoder core
1058                                (else it may leak in some situations...) */
1059            }
1060        }
1061
1062        if( M4OSA_NULL == pC->yuv2[0].pac_data )
1063        {
1064            err = M4VSS3GPP_intAllocateYUV420(pC->yuv2, pC->ewc.uiVideoWidth,
1065                pC->ewc.uiVideoHeight);
1066
1067            if( M4NO_ERROR != err )
1068            {
1069                M4OSA_TRACE1_1(
1070                    "M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420(4) returns 0x%x,\
1071                    returning M4NO_ERROR",
1072                    err);
1073                pC->ewc.VppError = err;
1074                return
1075                    M4NO_ERROR; /**< Return no error to the encoder core
1076                                (else it may leak in some situations...) */
1077            }
1078        }
1079
1080        /**
1081        * Allocate new temporary plane if needed */
1082        if( M4OSA_NULL == pC->yuv3[0].pac_data )
1083        {
1084            err = M4VSS3GPP_intAllocateYUV420(pC->yuv3, pC->ewc.uiVideoWidth,
1085                pC->ewc.uiVideoHeight);
1086
1087            if( M4NO_ERROR != err )
1088            {
1089                M4OSA_TRACE1_1(
1090                    "M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420(3) returns 0x%x,\
1091                    returning M4NO_ERROR",
1092                    err);
1093                pC->ewc.VppError = err;
1094                return
1095                    M4NO_ERROR; /**< Return no error to the encoder core
1096                                (else it may leak in some situations...) */
1097            }
1098        }
1099
1100        /**
1101        * Compute the time in the clip1 base: t = to - Offset */
1102        // Decorrelate input and output encoding timestamp to handle encoder prefetch
1103        t = pC->ewc.dInputVidCts - pC->pC1->iVoffset;
1104
1105        /**
1106        * Render Clip1 */
1107        if( pC->pC1->isRenderDup == M4OSA_FALSE )
1108        {
1109            if(pC->nbActiveEffects > 0)
1110            {
1111                err = pC->pC1->ShellAPI.m_pVideoDecoder->m_pFctRender(pC->pC1->pViDecCtxt,
1112                                                                      &t, pTemp1,
1113                                                                      M4OSA_TRUE);
1114                if (M4NO_ERROR != err)
1115                {
1116                    M4OSA_TRACE1_1("M4VSS3GPP_intVPP: m_pVideoDecoder->m_pFctRender(C1) returns 0x%x, \
1117                                    returning M4NO_ERROR", err);
1118                    pC->ewc.VppError = err;
1119                    return M4NO_ERROR; /**< Return no error to the encoder core
1120                                       (else it may leak in some situations...) */
1121                }
1122                pC->bIssecondClip = M4OSA_FALSE;
1123                err = M4VSS3GPP_intApplyVideoEffect(pC, pTemp1 ,pC->yuv1 );
1124                if (M4NO_ERROR != err)
1125                {
1126                    M4OSA_TRACE1_1("M4VSS3GPP_intVPP: M4VSS3GPP_intApplyVideoEffect(1) returns 0x%x, \
1127                                    returning M4NO_ERROR", err);
1128                    pC->ewc.VppError = err;
1129                    return M4NO_ERROR; /**< Return no error to the encoder core
1130                                       (else it may leak in some situations...) */
1131                }
1132                pC->pC1->lastDecodedPlane = pTemp1;
1133            }
1134            else
1135            {
1136                err = pC->pC1->ShellAPI.m_pVideoDecoder->m_pFctRender(pC->pC1->pViDecCtxt,
1137                                                                      &t, pC->yuv1,
1138                                                                      M4OSA_TRUE);
1139                if (M4NO_ERROR != err)
1140                {
1141                    M4OSA_TRACE1_1("M4VSS3GPP_intVPP: m_pVideoDecoder->m_pFctRender(C1) returns 0x%x, \
1142                                    returning M4NO_ERROR", err);
1143                    pC->ewc.VppError = err;
1144                    return M4NO_ERROR; /**< Return no error to the encoder core
1145                                      (else it may leak in some situations...) */
1146                }
1147                pC->pC1->lastDecodedPlane = pC->yuv1;
1148            }
1149            pC->pC1->iVideoRenderCts = (M4OSA_Int32)t;
1150        }
1151        else
1152        {
1153            /* Copy last decoded plane to output plane */
1154            memcpy((void *)pTmp[0].pac_data,
1155                (void *)pC->pC1->lastDecodedPlane[0].pac_data,
1156                (pTmp[0].u_height * pTmp[0].u_width));
1157            memcpy((void *)pTmp[1].pac_data,
1158                (void *)pC->pC1->lastDecodedPlane[1].pac_data,
1159                (pTmp[1].u_height * pTmp[1].u_width));
1160            memcpy((void *)pTmp[2].pac_data,
1161                (void *)pC->pC1->lastDecodedPlane[2].pac_data,
1162                (pTmp[2].u_height * pTmp[2].u_width));
1163            pC->pC1->lastDecodedPlane = pTmp;
1164        }
1165
1166        /**
1167        * Compute the time in the clip2 base: t = to - Offset */
1168        // Decorrelate input and output encoding timestamp to handle encoder prefetch
1169        t = pC->ewc.dInputVidCts - pC->pC2->iVoffset;
1170        /**
1171        * Render Clip2 */
1172        if( pC->pC2->isRenderDup == M4OSA_FALSE )
1173        {
1174            if(pC->nbActiveEffects1 > 0)
1175            {
1176                err = pC->pC2->ShellAPI.m_pVideoDecoder->m_pFctRender(pC->pC2->pViDecCtxt,
1177                                                                      &t, pTemp2,
1178                                                                      M4OSA_TRUE);
1179                if (M4NO_ERROR != err)
1180                {
1181                    M4OSA_TRACE1_1("M4VSS3GPP_intVPP: m_pVideoDecoder->m_pFctRender(C2) returns 0x%x, \
1182                                   returning M4NO_ERROR", err);
1183                    pC->ewc.VppError = err;
1184                    return M4NO_ERROR; /**< Return no error to the encoder core
1185                                       (else it may leak in some situations...) */
1186                }
1187
1188                pC->bIssecondClip = M4OSA_TRUE;
1189                err = M4VSS3GPP_intApplyVideoEffect(pC, pTemp2 ,pC->yuv2);
1190                if (M4NO_ERROR != err)
1191                {
1192                    M4OSA_TRACE1_1("M4VSS3GPP_intVPP: M4VSS3GPP_intApplyVideoEffect(1) returns 0x%x, \
1193                                    returning M4NO_ERROR", err);
1194                    pC->ewc.VppError = err;
1195                    return M4NO_ERROR; /**< Return no error to the encoder core
1196                                       (else it may leak in some situations...) */
1197                }
1198                pC->pC2->lastDecodedPlane = pTemp2;
1199            }
1200            else
1201            {
1202                err = pC->pC2->ShellAPI.m_pVideoDecoder->m_pFctRender(pC->pC2->pViDecCtxt,
1203                                                                      &t, pC->yuv2,
1204                                                                      M4OSA_TRUE);
1205                if (M4NO_ERROR != err)
1206                {
1207                    M4OSA_TRACE1_1("M4VSS3GPP_intVPP: m_pVideoDecoder->m_pFctRender(C2) returns 0x%x, \
1208                                    returning M4NO_ERROR", err);
1209                    pC->ewc.VppError = err;
1210                    return M4NO_ERROR; /**< Return no error to the encoder core
1211                                       (else it may leak in some situations...) */
1212                }
1213                pC->pC2->lastDecodedPlane = pC->yuv2;
1214            }
1215            pC->pC2->iVideoRenderCts = (M4OSA_Int32)t;
1216        }
1217        else
1218        {
1219            /* Copy last decoded plane to output plane */
1220            memcpy((void *)pTmp[0].pac_data,
1221                (void *)pC->pC2->lastDecodedPlane[0].pac_data,
1222                (pTmp[0].u_height * pTmp[0].u_width));
1223            memcpy((void *)pTmp[1].pac_data,
1224                (void *)pC->pC2->lastDecodedPlane[1].pac_data,
1225                (pTmp[1].u_height * pTmp[1].u_width));
1226            memcpy((void *)pTmp[2].pac_data,
1227                (void *)pC->pC2->lastDecodedPlane[2].pac_data,
1228                (pTmp[2].u_height * pTmp[2].u_width));
1229            pC->pC2->lastDecodedPlane = pTmp;
1230        }
1231
1232
1233        pTmp = pPlaneOut;
1234        err = M4VSS3GPP_intVideoTransition(pC, pTmp);
1235
1236        if( M4NO_ERROR != err )
1237        {
1238            M4OSA_TRACE1_1(
1239                "M4VSS3GPP_intVPP: M4VSS3GPP_intVideoTransition returns 0x%x,\
1240                returning M4NO_ERROR",
1241                err);
1242            pC->ewc.VppError = err;
1243            return  M4NO_ERROR; /**< Return no error to the encoder core
1244                                (else it may leak in some situations...) */
1245        }
1246        for (i=0; i < 3; i++)
1247        {
1248            if (pTemp2[i].pac_data != M4OSA_NULL)
1249            {
1250                free(pTemp2[i].pac_data);
1251                pTemp2[i].pac_data = M4OSA_NULL;
1252            }
1253
1254
1255            if (pTemp1[i].pac_data != M4OSA_NULL)
1256            {
1257                    free(pTemp1[i].pac_data);
1258                    pTemp1[i].pac_data = M4OSA_NULL;
1259                }
1260            }
1261    }
1262    /**
1263    **************** No Transition case ****************/
1264    else
1265    {
1266        /**
1267        * Check if there is a filter */
1268        if( pC->nbActiveEffects > 0 )
1269        {
1270            /**
1271            * If we do modify the image, we need an intermediate image plane */
1272            if( M4OSA_NULL == pC->yuv1[0].pac_data )
1273            {
1274                err =
1275                    M4VSS3GPP_intAllocateYUV420(pC->yuv1, pC->ewc.uiVideoWidth,
1276                    pC->ewc.uiVideoHeight);
1277
1278                if( M4NO_ERROR != err )
1279                {
1280                    M4OSA_TRACE1_1(
1281                        "M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420 returns 0x%x,\
1282                        returning M4NO_ERROR",
1283                        err);
1284                    pC->ewc.VppError = err;
1285                    return
1286                        M4NO_ERROR; /**< Return no error to the encoder core
1287                                    (else it may leak in some situations...) */
1288                }
1289            }
1290            /**
1291            * The image is rendered in the intermediate image plane */
1292            pTmp = pC->yuv1;
1293        }
1294        else
1295        {
1296            /**
1297            * No filter, the image is directly rendered in pPlaneOut */
1298            pTmp = pPlaneOut;
1299        }
1300
1301        /**
1302        * Compute the time in the clip base: t = to - Offset */
1303        // Decorrelate input and output encoding timestamp to handle encoder prefetch
1304        t = pC->ewc.dInputVidCts - pC->pC1->iVoffset;
1305
1306        if( pC->pC1->isRenderDup == M4OSA_FALSE )
1307        {
1308            err = pC->pC1->ShellAPI.m_pVideoDecoder->m_pFctRender(
1309                pC->pC1->pViDecCtxt, &t, pTmp, M4OSA_TRUE);
1310
1311            if( M4NO_ERROR != err )
1312            {
1313                M4OSA_TRACE1_1(
1314                    "M4VSS3GPP_intVPP: m_pVideoDecoder->m_pFctRender returns 0x%x,\
1315                    returning M4NO_ERROR",
1316                    err);
1317                pC->ewc.VppError = err;
1318                return
1319                    M4NO_ERROR; /**< Return no error to the encoder core
1320                                (else it may leak in some situations...) */
1321            }
1322            pC->pC1->lastDecodedPlane = pTmp;
1323            pC->pC1->iVideoRenderCts = (M4OSA_Int32)t;
1324        }
1325        else
1326        {
1327            /* Copy last decoded plane to output plane */
1328            memcpy((void *)pTmp[0].pac_data,
1329                (void *)pC->pC1->lastDecodedPlane[0].pac_data,
1330                (pTmp[0].u_height * pTmp[0].u_width));
1331            memcpy((void *)pTmp[1].pac_data,
1332                (void *)pC->pC1->lastDecodedPlane[1].pac_data,
1333                (pTmp[1].u_height * pTmp[1].u_width));
1334            memcpy((void *)pTmp[2].pac_data,
1335                (void *)pC->pC1->lastDecodedPlane[2].pac_data,
1336                (pTmp[2].u_height * pTmp[2].u_width));
1337            pC->pC1->lastDecodedPlane = pTmp;
1338        }
1339
1340        M4OSA_TRACE3_1("M4VSS3GPP_intVPP: Rendered at CTS %.3f", t);
1341
1342        /**
1343        * Apply the clip1 effect */
1344        //        if (pC->iClip1ActiveEffect >= 0)
1345        if( pC->nbActiveEffects > 0 )
1346        {
1347            err = M4VSS3GPP_intApplyVideoEffect(pC,/*1,*/ pC->yuv1, pPlaneOut);
1348
1349            if( M4NO_ERROR != err )
1350            {
1351                M4OSA_TRACE1_1(
1352                    "M4VSS3GPP_intVPP: M4VSS3GPP_intApplyVideoEffect(1) returns 0x%x,\
1353                    returning M4NO_ERROR",
1354                    err);
1355                pC->ewc.VppError = err;
1356                return
1357                    M4NO_ERROR; /**< Return no error to the encoder core
1358                                (else it may leak in some situations...) */
1359            }
1360        }
1361    }
1362
1363    /**
1364    *    Return */
1365    M4OSA_TRACE3_0("M4VSS3GPP_intVPP: returning M4NO_ERROR");
1366    return M4NO_ERROR;
1367}
1368
1369/**
1370 ******************************************************************************
1371 * M4OSA_ERR M4VSS3GPP_intApplyVideoEffect()
1372 * @brief    Apply video effect from pPlaneIn to pPlaneOut
1373 * @param   pC                (IN/OUT) Internal edit context
1374 * @param   uiClip1orClip2    (IN/OUT) 1 for first clip, 2 for second clip
1375 * @param    pInputPlanes    (IN) Input raw YUV420 image
1376 * @param    pOutputPlanes    (IN/OUT) Output raw YUV420 image
1377 * @return    M4NO_ERROR:                        No error
1378 ******************************************************************************
1379 */
1380static M4OSA_ERR
1381M4VSS3GPP_intApplyVideoEffect( M4VSS3GPP_InternalEditContext *pC,
1382                               M4VIFI_ImagePlane *pPlaneIn,
1383                               M4VIFI_ImagePlane *pPlaneOut )
1384{
1385    M4OSA_ERR err;
1386
1387    M4VSS3GPP_ClipContext *pClip;
1388    M4VSS3GPP_EffectSettings *pFx;
1389    M4VFL_CurtainParam curtainParams;
1390    M4VSS3GPP_ExternalProgress extProgress;
1391
1392    M4OSA_Double VideoEffectTime;
1393    M4OSA_Double PercentageDone;
1394    M4OSA_Int32 tmp;
1395
1396    M4VIFI_ImagePlane *pPlaneTempIn;
1397    M4VIFI_ImagePlane *pPlaneTempOut;
1398    M4OSA_UInt8 i;
1399    M4OSA_UInt8 NumActiveEffects =0;
1400
1401
1402    pClip = pC->pC1;
1403    if (pC->bIssecondClip == M4OSA_TRUE)
1404    {
1405        NumActiveEffects = pC->nbActiveEffects1;
1406    }
1407    else
1408    {
1409        NumActiveEffects = pC->nbActiveEffects;
1410    }
1411
1412    /**
1413    * Allocate temporary plane if needed RC */
1414    if (M4OSA_NULL == pC->yuv4[0].pac_data && NumActiveEffects  > 1)
1415    {
1416        err = M4VSS3GPP_intAllocateYUV420(pC->yuv4, pC->ewc.uiVideoWidth,
1417            pC->ewc.uiVideoHeight);
1418
1419        if( M4NO_ERROR != err )
1420        {
1421            M4OSA_TRACE1_1(
1422                "M4VSS3GPP_intApplyVideoEffect: M4VSS3GPP_intAllocateYUV420(4) returns 0x%x,\
1423                returning M4NO_ERROR",
1424                err);
1425            pC->ewc.VppError = err;
1426            return
1427                M4NO_ERROR; /**< Return no error to the encoder core
1428                            (else it may leak in some situations...) */
1429        }
1430    }
1431
1432    if (NumActiveEffects  % 2 == 0)
1433    {
1434        pPlaneTempIn = pPlaneIn;
1435        pPlaneTempOut = pC->yuv4;
1436    }
1437    else
1438    {
1439        pPlaneTempIn = pPlaneIn;
1440        pPlaneTempOut = pPlaneOut;
1441    }
1442
1443    for (i=0; i<NumActiveEffects; i++)
1444    {
1445        if (pC->bIssecondClip == M4OSA_TRUE)
1446        {
1447
1448
1449            pFx = &(pC->pEffectsList[pC->pActiveEffectsList1[i]]);
1450            /* Compute how far from the beginning of the effect we are, in clip-base time. */
1451            // Decorrelate input and output encoding timestamp to handle encoder prefetch
1452            VideoEffectTime = ((M4OSA_Int32)pC->ewc.dInputVidCts) +
1453                              pC->pTransitionList[pC->uiCurrentClip].
1454                              uiTransitionDuration- pFx->uiStartTime;
1455        }
1456        else
1457        {
1458            pFx = &(pC->pEffectsList[pC->pActiveEffectsList[i]]);
1459            /* Compute how far from the beginning of the effect we are, in clip-base time. */
1460            // Decorrelate input and output encoding timestamp to handle encoder prefetch
1461            VideoEffectTime = ((M4OSA_Int32)pC->ewc.dInputVidCts) - pFx->uiStartTime;
1462        }
1463
1464
1465
1466        /* To calculate %, substract timeIncrement because effect should finish on the last frame*/
1467        /* which is presented from CTS = eof-timeIncrement till CTS = eof */
1468        PercentageDone = VideoEffectTime
1469            / ((M4OSA_Float)pFx->uiDuration/*- pC->dOutputFrameDuration*/);
1470
1471        if( PercentageDone < 0.0 )
1472            PercentageDone = 0.0;
1473
1474        if( PercentageDone > 1.0 )
1475            PercentageDone = 1.0;
1476
1477        switch( pFx->VideoEffectType )
1478        {
1479            case M4VSS3GPP_kVideoEffectType_FadeFromBlack:
1480                /**
1481                * Compute where we are in the effect (scale is 0->1024). */
1482                tmp = (M4OSA_Int32)(PercentageDone * 1024);
1483
1484                /**
1485                * Apply the darkening effect */
1486                err =
1487                    M4VFL_modifyLumaWithScale((M4ViComImagePlane *)pPlaneTempIn,
1488                    (M4ViComImagePlane *)pPlaneTempOut, tmp, M4OSA_NULL);
1489
1490                if( M4NO_ERROR != err )
1491                {
1492                    M4OSA_TRACE1_1(
1493                        "M4VSS3GPP_intApplyVideoEffect:\
1494                        M4VFL_modifyLumaWithScale returns error 0x%x,\
1495                        returning M4VSS3GPP_ERR_LUMA_FILTER_ERROR",
1496                        err);
1497                    return M4VSS3GPP_ERR_LUMA_FILTER_ERROR;
1498                }
1499                break;
1500
1501            case M4VSS3GPP_kVideoEffectType_CurtainOpening:
1502                /**
1503                * Compute where we are in the effect (scale is 0->height).
1504                * It is done with floats because tmp x height can be very large
1505                (with long clips).*/
1506                curtainParams.nb_black_lines =
1507                    (M4OSA_UInt16)(( 1.0 - PercentageDone)
1508                    * pPlaneTempIn[0].u_height);
1509                /**
1510                * The curtain is hanged on the ceiling */
1511                curtainParams.top_is_black = 1;
1512
1513                /**
1514                * Apply the curtain effect */
1515                err = M4VFL_applyCurtain((M4ViComImagePlane *)pPlaneTempIn,
1516                    (M4ViComImagePlane *)pPlaneTempOut, &curtainParams,
1517                    M4OSA_NULL);
1518
1519                if( M4NO_ERROR != err )
1520                {
1521                    M4OSA_TRACE1_1(
1522                        "M4VSS3GPP_intApplyVideoEffect: M4VFL_applyCurtain returns error 0x%x,\
1523                        returning M4VSS3GPP_ERR_CURTAIN_FILTER_ERROR",
1524                        err);
1525                    return M4VSS3GPP_ERR_CURTAIN_FILTER_ERROR;
1526                }
1527                break;
1528
1529            case M4VSS3GPP_kVideoEffectType_FadeToBlack:
1530                /**
1531                * Compute where we are in the effect (scale is 0->1024) */
1532                tmp = (M4OSA_Int32)(( 1.0 - PercentageDone) * 1024);
1533
1534                /**
1535                * Apply the darkening effect */
1536                err =
1537                    M4VFL_modifyLumaWithScale((M4ViComImagePlane *)pPlaneTempIn,
1538                    (M4ViComImagePlane *)pPlaneTempOut, tmp, M4OSA_NULL);
1539
1540                if( M4NO_ERROR != err )
1541                {
1542                    M4OSA_TRACE1_1(
1543                        "M4VSS3GPP_intApplyVideoEffect:\
1544                        M4VFL_modifyLumaWithScale returns error 0x%x,\
1545                        returning M4VSS3GPP_ERR_LUMA_FILTER_ERROR",
1546                        err);
1547                    return M4VSS3GPP_ERR_LUMA_FILTER_ERROR;
1548                }
1549                break;
1550
1551            case M4VSS3GPP_kVideoEffectType_CurtainClosing:
1552                /**
1553                * Compute where we are in the effect (scale is 0->height) */
1554                curtainParams.nb_black_lines =
1555                    (M4OSA_UInt16)(PercentageDone * pPlaneTempIn[0].u_height);
1556
1557                /**
1558                * The curtain is hanged on the ceiling */
1559                curtainParams.top_is_black = 1;
1560
1561                /**
1562                * Apply the curtain effect */
1563                err = M4VFL_applyCurtain((M4ViComImagePlane *)pPlaneTempIn,
1564                    (M4ViComImagePlane *)pPlaneTempOut, &curtainParams,
1565                    M4OSA_NULL);
1566
1567                if( M4NO_ERROR != err )
1568                {
1569                    M4OSA_TRACE1_1(
1570                        "M4VSS3GPP_intApplyVideoEffect: M4VFL_applyCurtain returns error 0x%x,\
1571                        returning M4VSS3GPP_ERR_CURTAIN_FILTER_ERROR",
1572                        err);
1573                    return M4VSS3GPP_ERR_CURTAIN_FILTER_ERROR;
1574                }
1575                break;
1576
1577            default:
1578                if( pFx->VideoEffectType
1579                    >= M4VSS3GPP_kVideoEffectType_External )
1580                {
1581                    M4OSA_UInt32 Cts = 0;
1582                    M4OSA_Int32 nextEffectTime;
1583
1584                    /**
1585                    * Compute where we are in the effect (scale is 0->1000) */
1586                    tmp = (M4OSA_Int32)(PercentageDone * 1000);
1587
1588                    /**
1589                    * Set the progress info provided to the external function */
1590                    extProgress.uiProgress = (M4OSA_UInt32)tmp;
1591                    // Decorrelate input and output encoding timestamp to handle encoder prefetch
1592                    extProgress.uiOutputTime = (M4OSA_UInt32)pC->ewc.dInputVidCts;
1593                    extProgress.uiClipTime = extProgress.uiOutputTime - pClip->iVoffset;
1594                    extProgress.bIsLast = M4OSA_FALSE;
1595                    // Decorrelate input and output encoding timestamp to handle encoder prefetch
1596                    nextEffectTime = (M4OSA_Int32)(pC->ewc.dInputVidCts \
1597                        + pC->dOutputFrameDuration);
1598                    if(nextEffectTime >= (M4OSA_Int32)(pFx->uiStartTime + pFx->uiDuration))
1599                    {
1600                        extProgress.bIsLast = M4OSA_TRUE;
1601                    }
1602
1603                    err = pFx->ExtVideoEffectFct(pFx->pExtVideoEffectFctCtxt,
1604                        pPlaneTempIn, pPlaneTempOut, &extProgress,
1605                        pFx->VideoEffectType
1606                        - M4VSS3GPP_kVideoEffectType_External);
1607
1608                    if( M4NO_ERROR != err )
1609                    {
1610                        M4OSA_TRACE1_1(
1611                            "M4VSS3GPP_intApplyVideoEffect: \
1612                            External video effect function returns 0x%x!",
1613                            err);
1614                        return err;
1615                    }
1616                    break;
1617                }
1618                else
1619                {
1620                    M4OSA_TRACE1_1(
1621                        "M4VSS3GPP_intApplyVideoEffect: unknown effect type (0x%x),\
1622                        returning M4VSS3GPP_ERR_INVALID_VIDEO_EFFECT_TYPE",
1623                        pFx->VideoEffectType);
1624                    return M4VSS3GPP_ERR_INVALID_VIDEO_EFFECT_TYPE;
1625                }
1626        }
1627        /**
1628        * RC Updates pTempPlaneIn and pTempPlaneOut depending on current effect */
1629        if (((i % 2 == 0) && (NumActiveEffects  % 2 == 0))
1630            || ((i % 2 != 0) && (NumActiveEffects % 2 != 0)))
1631        {
1632            pPlaneTempIn = pC->yuv4;
1633            pPlaneTempOut = pPlaneOut;
1634        }
1635        else
1636        {
1637            pPlaneTempIn = pPlaneOut;
1638            pPlaneTempOut = pC->yuv4;
1639        }
1640    }
1641
1642    /**
1643    *    Return */
1644    M4OSA_TRACE3_0("M4VSS3GPP_intApplyVideoEffect: returning M4NO_ERROR");
1645    return M4NO_ERROR;
1646}
1647
1648/**
1649 ******************************************************************************
1650 * M4OSA_ERR M4VSS3GPP_intVideoTransition()
1651 * @brief    Apply video transition effect pC1+pC2->pPlaneOut
1652 * @param   pC                (IN/OUT) Internal edit context
1653 * @param    pOutputPlanes    (IN/OUT) Output raw YUV420 image
1654 * @return    M4NO_ERROR:                        No error
1655 ******************************************************************************
1656 */
1657static M4OSA_ERR
1658M4VSS3GPP_intVideoTransition( M4VSS3GPP_InternalEditContext *pC,
1659                             M4VIFI_ImagePlane *pPlaneOut )
1660{
1661    M4OSA_ERR err;
1662    M4OSA_Int32 iProgress;
1663    M4VSS3GPP_ExternalProgress extProgress;
1664    M4VIFI_ImagePlane *pPlane;
1665    M4OSA_Int32 i;
1666    const M4OSA_Int32 iDur = (M4OSA_Int32)pC->
1667        pTransitionList[pC->uiCurrentClip].uiTransitionDuration;
1668
1669    /**
1670    * Compute how far from the end cut we are, in clip-base time.
1671    * It is done with integers because the offset and begin cut have been rounded already. */
1672    // Decorrelate input and output encoding timestamp to handle encoder prefetch
1673    iProgress = (M4OSA_Int32)((M4OSA_Double)pC->pC1->iEndTime) - pC->ewc.dInputVidCts +
1674        ((M4OSA_Double)pC->pC1->iVoffset);
1675    /**
1676    * We must remove the duration of one frame, else we would almost never reach the end
1677    * (It's kind of a "pile and intervals" issue). */
1678    iProgress -= (M4OSA_Int32)pC->dOutputFrameDuration;
1679
1680    if( iProgress < 0 ) /**< Sanity checks */
1681    {
1682        iProgress = 0;
1683    }
1684
1685    /**
1686    * Compute where we are in the transition, on a base 1000 */
1687    iProgress = ( ( iDur - iProgress) * 1000) / iDur;
1688
1689    /**
1690    * Sanity checks */
1691    if( iProgress < 0 )
1692    {
1693        iProgress = 0;
1694    }
1695    else if( iProgress > 1000 )
1696    {
1697        iProgress = 1000;
1698    }
1699
1700    switch( pC->pTransitionList[pC->uiCurrentClip].TransitionBehaviour )
1701    {
1702        case M4VSS3GPP_TransitionBehaviour_SpeedUp:
1703            iProgress = ( iProgress * iProgress) / 1000;
1704            break;
1705
1706        case M4VSS3GPP_TransitionBehaviour_Linear:
1707            /*do nothing*/
1708            break;
1709
1710        case M4VSS3GPP_TransitionBehaviour_SpeedDown:
1711            iProgress = (M4OSA_Int32)(sqrt(iProgress * 1000));
1712            break;
1713
1714        case M4VSS3GPP_TransitionBehaviour_SlowMiddle:
1715            if( iProgress < 500 )
1716            {
1717                iProgress = (M4OSA_Int32)(sqrt(iProgress * 500));
1718            }
1719            else
1720            {
1721                iProgress =
1722                    (M4OSA_Int32)(( ( ( iProgress - 500) * (iProgress - 500))
1723                    / 500) + 500);
1724            }
1725            break;
1726
1727        case M4VSS3GPP_TransitionBehaviour_FastMiddle:
1728            if( iProgress < 500 )
1729            {
1730                iProgress = (M4OSA_Int32)(( iProgress * iProgress) / 500);
1731            }
1732            else
1733            {
1734                iProgress = (M4OSA_Int32)(sqrt(( iProgress - 500) * 500) + 500);
1735            }
1736            break;
1737
1738        default:
1739            /*do nothing*/
1740            break;
1741    }
1742
1743    switch( pC->pTransitionList[pC->uiCurrentClip].VideoTransitionType )
1744    {
1745        case M4VSS3GPP_kVideoTransitionType_CrossFade:
1746            /**
1747            * Apply the transition effect */
1748            err = M4VIFI_ImageBlendingonYUV420(M4OSA_NULL,
1749                (M4ViComImagePlane *)pC->yuv1,
1750                (M4ViComImagePlane *)pC->yuv2,
1751                (M4ViComImagePlane *)pPlaneOut, iProgress);
1752
1753            if( M4NO_ERROR != err )
1754            {
1755                M4OSA_TRACE1_1(
1756                    "M4VSS3GPP_intVideoTransition:\
1757                     M4VIFI_ImageBlendingonYUV420 returns error 0x%x,\
1758                    returning M4VSS3GPP_ERR_TRANSITION_FILTER_ERROR",
1759                    err);
1760                return M4VSS3GPP_ERR_TRANSITION_FILTER_ERROR;
1761            }
1762            break;
1763
1764        case M4VSS3GPP_kVideoTransitionType_None:
1765            /**
1766            * This is a stupid-non optimized version of the None transition...
1767            * We copy the YUV frame */
1768            if( iProgress < 500 ) /**< first half of transition */
1769            {
1770                pPlane = pC->yuv1;
1771            }
1772            else /**< second half of transition */
1773            {
1774                pPlane = pC->yuv2;
1775            }
1776            /**
1777            * Copy the input YUV frames */
1778            i = 3;
1779
1780            while( i-- > 0 )
1781            {
1782                memcpy((void *)pPlaneOut[i].pac_data,
1783                 (void *)pPlane[i].pac_data,
1784                    pPlaneOut[i].u_stride * pPlaneOut[i].u_height);
1785            }
1786            break;
1787
1788        default:
1789            if( pC->pTransitionList[pC->uiCurrentClip].VideoTransitionType
1790                >= M4VSS3GPP_kVideoTransitionType_External )
1791            {
1792                /**
1793                * Set the progress info provided to the external function */
1794                extProgress.uiProgress = (M4OSA_UInt32)iProgress;
1795                // Decorrelate input and output encoding timestamp to handle encoder prefetch
1796                extProgress.uiOutputTime = (M4OSA_UInt32)pC->ewc.dInputVidCts;
1797                extProgress.uiClipTime = extProgress.uiOutputTime - pC->pC1->iVoffset;
1798
1799                err = pC->pTransitionList[pC->
1800                    uiCurrentClip].ExtVideoTransitionFct(
1801                    pC->pTransitionList[pC->
1802                    uiCurrentClip].pExtVideoTransitionFctCtxt,
1803                    pC->yuv1, pC->yuv2, pPlaneOut, &extProgress,
1804                    pC->pTransitionList[pC->
1805                    uiCurrentClip].VideoTransitionType
1806                    - M4VSS3GPP_kVideoTransitionType_External);
1807
1808                if( M4NO_ERROR != err )
1809                {
1810                    M4OSA_TRACE1_1(
1811                        "M4VSS3GPP_intVideoTransition:\
1812                        External video transition function returns 0x%x!",
1813                        err);
1814                    return err;
1815                }
1816                break;
1817            }
1818            else
1819            {
1820                M4OSA_TRACE1_1(
1821                    "M4VSS3GPP_intVideoTransition: unknown transition type (0x%x),\
1822                    returning M4VSS3GPP_ERR_INVALID_VIDEO_TRANSITION_TYPE",
1823                    pC->pTransitionList[pC->uiCurrentClip].VideoTransitionType);
1824                return M4VSS3GPP_ERR_INVALID_VIDEO_TRANSITION_TYPE;
1825            }
1826    }
1827
1828    /**
1829    *    Return */
1830    M4OSA_TRACE3_0("M4VSS3GPP_intVideoTransition: returning M4NO_ERROR");
1831    return M4NO_ERROR;
1832}
1833
1834/**
1835 ******************************************************************************
1836 * M4OSA_Void M4VSS3GPP_intUpdateTimeInfo()
1837 * @brief    Update bit stream time info by Counter Time System to be compliant with
1838 *          players using bit stream time info
1839 * @note    H263 uses an absolute time counter unlike MPEG4 which uses Group Of Vops
1840 *          (GOV, see the standard)
1841 * @param   pC                    (IN/OUT) returns time updated video AU,
1842 *                                the offset between system and video time (MPEG4 only)
1843 *                                and the state of the current clip (MPEG4 only)
1844 * @return    nothing
1845 ******************************************************************************
1846 */
1847static M4OSA_Void
1848M4VSS3GPP_intUpdateTimeInfo( M4VSS3GPP_InternalEditContext *pC,
1849                            M4SYS_AccessUnit *pAU )
1850{
1851    M4OSA_UInt8 uiTmp;
1852    M4OSA_UInt32 uiCts = 0;
1853    M4OSA_MemAddr8 pTmp;
1854    M4OSA_UInt32 uiAdd;
1855    M4OSA_UInt32 uiCurrGov;
1856    M4OSA_Int8 iDiff;
1857
1858    M4VSS3GPP_ClipContext *pClipCtxt = pC->pC1;
1859    M4OSA_Int32 *pOffset = &(pC->ewc.iMpeg4GovOffset);
1860
1861    /**
1862    * Set H263 time counter from system time */
1863    if( M4SYS_kH263 == pAU->stream->streamType )
1864    {
1865        uiTmp = (M4OSA_UInt8)((M4OSA_UInt32)( ( pAU->CTS * 30) / 1001 + 0.5)
1866            % M4VSS3GPP_EDIT_H263_MODULO_TIME);
1867        M4VSS3GPP_intSetH263TimeCounter((M4OSA_MemAddr8)(pAU->dataAddress),
1868            uiTmp);
1869    }
1870    /*
1871    * Set MPEG4 GOV time counter regarding video and system time */
1872    else if( M4SYS_kMPEG_4 == pAU->stream->streamType )
1873    {
1874        /*
1875        * If GOV.
1876        * beware of little/big endian! */
1877        /* correction: read 8 bits block instead of one 32 bits block */
1878        M4OSA_UInt8 *temp8 = (M4OSA_UInt8 *)(pAU->dataAddress);
1879        M4OSA_UInt32 temp32 = 0;
1880
1881        temp32 = ( 0x000000ff & (M4OSA_UInt32)(*temp8))
1882            + (0x0000ff00 & ((M4OSA_UInt32)(*(temp8 + 1))) << 8)
1883            + (0x00ff0000 & ((M4OSA_UInt32)(*(temp8 + 2))) << 16)
1884            + (0xff000000 & ((M4OSA_UInt32)(*(temp8 + 3))) << 24);
1885
1886        M4OSA_TRACE3_2("RC: Temp32: 0x%x, dataAddress: 0x%x\n", temp32,
1887            *(pAU->dataAddress));
1888
1889        if( M4VSS3GPP_EDIT_GOV_HEADER == temp32 )
1890        {
1891            pTmp =
1892                (M4OSA_MemAddr8)(pAU->dataAddress
1893                + 1); /**< Jump to the time code (just after the 32 bits header) */
1894            uiAdd = (M4OSA_UInt32)(pAU->CTS)+( *pOffset);
1895
1896            switch( pClipCtxt->bMpeg4GovState )
1897            {
1898                case M4OSA_FALSE: /*< INIT */
1899                    {
1900                        /* video time = ceil (system time + offset) */
1901                        uiCts = ( uiAdd + 999) / 1000;
1902
1903                        /* offset update */
1904                        ( *pOffset) += (( uiCts * 1000) - uiAdd);
1905
1906                        /* Save values */
1907                        pClipCtxt->uiMpeg4PrevGovValueSet = uiCts;
1908
1909                        /* State to 'first' */
1910                        pClipCtxt->bMpeg4GovState = M4OSA_TRUE;
1911                    }
1912                    break;
1913
1914                case M4OSA_TRUE: /*< UPDATE */
1915                    {
1916                        /* Get current Gov value */
1917                        M4VSS3GPP_intGetMPEG4Gov(pTmp, &uiCurrGov);
1918
1919                        /* video time = floor or ceil (system time + offset) */
1920                        uiCts = (uiAdd / 1000);
1921                        iDiff = (M4OSA_Int8)(uiCurrGov
1922                            - pClipCtxt->uiMpeg4PrevGovValueGet - uiCts
1923                            + pClipCtxt->uiMpeg4PrevGovValueSet);
1924
1925                        /* ceiling */
1926                        if( iDiff > 0 )
1927                        {
1928                            uiCts += (M4OSA_UInt32)(iDiff);
1929
1930                            /* offset update */
1931                            ( *pOffset) += (( uiCts * 1000) - uiAdd);
1932                        }
1933
1934                        /* Save values */
1935                        pClipCtxt->uiMpeg4PrevGovValueGet = uiCurrGov;
1936                        pClipCtxt->uiMpeg4PrevGovValueSet = uiCts;
1937                    }
1938                    break;
1939            }
1940
1941            M4VSS3GPP_intSetMPEG4Gov(pTmp, uiCts);
1942        }
1943    }
1944    return;
1945}
1946
1947/**
1948 ******************************************************************************
1949 * M4OSA_Void M4VSS3GPP_intCheckVideoEffects()
1950 * @brief    Check which video effect must be applied at the current time
1951 ******************************************************************************
1952 */
1953static M4OSA_Void
1954M4VSS3GPP_intCheckVideoEffects( M4VSS3GPP_InternalEditContext *pC,
1955                               M4OSA_UInt8 uiClipNumber )
1956{
1957    M4OSA_UInt8 uiClipIndex;
1958    M4OSA_UInt8 uiFxIndex, i;
1959    M4VSS3GPP_ClipContext *pClip;
1960    M4VSS3GPP_EffectSettings *pFx;
1961    M4OSA_Int32 Off, BC, EC;
1962    // Decorrelate input and output encoding timestamp to handle encoder prefetch
1963    M4OSA_Int32 t = (M4OSA_Int32)pC->ewc.dInputVidCts;
1964
1965    uiClipIndex = pC->uiCurrentClip;
1966    pClip = pC->pC1;
1967    /**
1968    * Shortcuts for code readability */
1969    Off = pClip->iVoffset;
1970    BC = pClip->iActualVideoBeginCut;
1971    EC = pClip->iEndTime;
1972
1973    i = 0;
1974
1975    for ( uiFxIndex = 0; uiFxIndex < pC->nbEffects; uiFxIndex++ )
1976    {
1977        /** Shortcut, reverse order because of priority between effects(EndEffect always clean )*/
1978        pFx = &(pC->pEffectsList[pC->nbEffects - 1 - uiFxIndex]);
1979
1980        if( M4VSS3GPP_kVideoEffectType_None != pFx->VideoEffectType )
1981        {
1982            /**
1983            * Check if there is actually a video effect */
1984
1985             if(uiClipNumber ==1)
1986             {
1987                /**< Are we after the start time of the effect?
1988                 * or Are we into the effect duration?
1989                 */
1990                if ( (t >= (M4OSA_Int32)(pFx->uiStartTime)) &&
1991                    (t <= (M4OSA_Int32)(pFx->uiStartTime + pFx->uiDuration)) ) {
1992                    /**
1993                     * Set the active effect(s) */
1994                    pC->pActiveEffectsList[i] = pC->nbEffects-1-uiFxIndex;
1995
1996                    /**
1997                     * Update counter of active effects */
1998                    i++;
1999
2000                    /**
2001                     * For all external effects set this flag to true. */
2002                    if(pFx->VideoEffectType > M4VSS3GPP_kVideoEffectType_External)
2003                    {
2004                        pC->m_bClipExternalHasStarted = M4OSA_TRUE;
2005                    }
2006
2007                    /**
2008                     * The third effect has the highest priority, then the
2009                     * second one, then the first one. Hence, as soon as we
2010                     * found an active effect, we can get out of this loop.
2011                     */
2012                }
2013            }
2014            else
2015            {
2016                /**< Are we into the effect duration? */
2017                if ( ((M4OSA_Int32)(t + pC->pTransitionList[uiClipIndex].uiTransitionDuration)
2018                    >= (M4OSA_Int32)(pFx->uiStartTime))
2019                    && ( (M4OSA_Int32)(t + pC->pTransitionList[uiClipIndex].uiTransitionDuration)
2020                    <= (M4OSA_Int32)(pFx->uiStartTime + pFx->uiDuration)) ) {
2021                    /**
2022                     * Set the active effect(s) */
2023                    pC->pActiveEffectsList1[i] = pC->nbEffects-1-uiFxIndex;
2024
2025                    /**
2026                     * Update counter of active effects */
2027                    i++;
2028
2029                    /**
2030                     * For all external effects set this flag to true. */
2031                    if(pFx->VideoEffectType > M4VSS3GPP_kVideoEffectType_External)
2032                    {
2033                        pC->m_bClipExternalHasStarted = M4OSA_TRUE;
2034                    }
2035
2036                    /**
2037                     * The third effect has the highest priority, then the second one, then the first one.
2038                     * Hence, as soon as we found an active effect, we can get out of this loop */
2039                }
2040            }
2041        }
2042    }
2043
2044    if(1==uiClipNumber)
2045    {
2046    /**
2047     * Save number of active effects */
2048        pC->nbActiveEffects = i;
2049    }
2050    else
2051    {
2052        pC->nbActiveEffects1 = i;
2053    }
2054
2055    /**
2056    * Change the absolut time to clip related time */
2057    t -= Off;
2058
2059    /**
2060    * Check if we are on the begin cut (for clip1 only) */
2061    if( ( 0 != BC) && (t == BC) && (1 == uiClipNumber) )
2062    {
2063        pC->bClip1AtBeginCut = M4OSA_TRUE;
2064    }
2065    else
2066    {
2067        pC->bClip1AtBeginCut = M4OSA_FALSE;
2068    }
2069
2070    return;
2071}
2072
2073/**
2074 ******************************************************************************
2075 * M4OSA_ERR M4VSS3GPP_intCreateVideoEncoder()
2076 * @brief    Creates the video encoder
2077 * @note
2078 ******************************************************************************
2079 */
2080M4OSA_ERR M4VSS3GPP_intCreateVideoEncoder( M4VSS3GPP_InternalEditContext *pC )
2081{
2082    M4OSA_ERR err;
2083    M4ENCODER_AdvancedParams EncParams;
2084
2085    /**
2086    * Simulate a writer interface with our specific function */
2087    pC->ewc.OurWriterDataInterface.pProcessAU =
2088        M4VSS3GPP_intProcessAU; /**< This function is VSS 3GPP specific,
2089                                but it follow the writer interface */
2090    pC->ewc.OurWriterDataInterface.pStartAU =
2091        M4VSS3GPP_intStartAU; /**< This function is VSS 3GPP specific,
2092                              but it follow the writer interface */
2093    pC->ewc.OurWriterDataInterface.pWriterContext =
2094        (M4WRITER_Context)
2095        pC; /**< We give the internal context as writer context */
2096
2097    /**
2098    * Get the encoder interface, if not already done */
2099    if( M4OSA_NULL == pC->ShellAPI.pVideoEncoderGlobalFcts )
2100    {
2101        err = M4VSS3GPP_setCurrentVideoEncoder(&pC->ShellAPI,
2102            pC->ewc.VideoStreamType);
2103        M4OSA_TRACE1_1(
2104            "M4VSS3GPP_intCreateVideoEncoder: setCurrentEncoder returns 0x%x",
2105            err);
2106        M4ERR_CHECK_RETURN(err);
2107    }
2108
2109    /**
2110    * Set encoder shell parameters according to VSS settings */
2111
2112    /* Common parameters */
2113    EncParams.InputFormat = M4ENCODER_kIYUV420;
2114    EncParams.FrameWidth = pC->ewc.uiVideoWidth;
2115    EncParams.FrameHeight = pC->ewc.uiVideoHeight;
2116    EncParams.uiTimeScale = pC->ewc.uiVideoTimeScale;
2117
2118    if( pC->bIsMMS == M4OSA_FALSE )
2119    {
2120        /* No strict regulation in video editor */
2121        /* Because of the effects and transitions we should allow more flexibility */
2122        /* Also it prevents to drop important frames (with a bad result on sheduling and
2123        block effetcs) */
2124        EncParams.bInternalRegulation = M4OSA_FALSE;
2125        // Variable framerate is not supported by StageFright encoders
2126        EncParams.FrameRate = M4ENCODER_k30_FPS;
2127    }
2128    else
2129    {
2130        /* In case of MMS mode, we need to enable bitrate regulation to be sure */
2131        /* to reach the targeted output file size */
2132        EncParams.bInternalRegulation = M4OSA_TRUE;
2133        EncParams.FrameRate = pC->MMSvideoFramerate;
2134    }
2135
2136    /**
2137    * Other encoder settings (defaults) */
2138    EncParams.uiHorizontalSearchRange = 0;     /* use default */
2139    EncParams.uiVerticalSearchRange = 0;       /* use default */
2140    EncParams.bErrorResilience = M4OSA_FALSE;  /* no error resilience */
2141    EncParams.uiIVopPeriod = 0;                /* use default */
2142    EncParams.uiMotionEstimationTools = 0;     /* M4V_MOTION_EST_TOOLS_ALL */
2143    EncParams.bAcPrediction = M4OSA_TRUE;      /* use AC prediction */
2144    EncParams.uiStartingQuantizerValue = 10;   /* initial QP = 10 */
2145    EncParams.bDataPartitioning = M4OSA_FALSE; /* no data partitioning */
2146
2147    switch ( pC->ewc.VideoStreamType )
2148    {
2149        case M4SYS_kH263:
2150
2151            EncParams.Format = M4ENCODER_kH263;
2152
2153            EncParams.uiStartingQuantizerValue = 10;
2154            EncParams.uiRateFactor = 1; /* default */
2155
2156            EncParams.bErrorResilience = M4OSA_FALSE;
2157            EncParams.bDataPartitioning = M4OSA_FALSE;
2158            break;
2159
2160        case M4SYS_kMPEG_4:
2161
2162            EncParams.Format = M4ENCODER_kMPEG4;
2163
2164            EncParams.uiStartingQuantizerValue = 8;
2165            EncParams.uiRateFactor = (M4OSA_UInt8)(( pC->dOutputFrameDuration
2166                * pC->ewc.uiVideoTimeScale) / 1000.0 + 0.5);
2167
2168            if( EncParams.uiRateFactor == 0 )
2169                EncParams.uiRateFactor = 1; /* default */
2170
2171            if( M4OSA_FALSE == pC->ewc.bVideoDataPartitioning )
2172            {
2173                EncParams.bErrorResilience = M4OSA_FALSE;
2174                EncParams.bDataPartitioning = M4OSA_FALSE;
2175            }
2176            else
2177            {
2178                EncParams.bErrorResilience = M4OSA_TRUE;
2179                EncParams.bDataPartitioning = M4OSA_TRUE;
2180            }
2181            break;
2182
2183        case M4SYS_kH264:
2184            M4OSA_TRACE1_0("M4VSS3GPP_intCreateVideoEncoder: M4SYS_H264");
2185
2186            EncParams.Format = M4ENCODER_kH264;
2187
2188            EncParams.uiStartingQuantizerValue = 10;
2189            EncParams.uiRateFactor = 1; /* default */
2190
2191            EncParams.bErrorResilience = M4OSA_FALSE;
2192            EncParams.bDataPartitioning = M4OSA_FALSE;
2193            //EncParams.FrameRate = M4VIDEOEDITING_k5_FPS;
2194            break;
2195
2196        default:
2197            M4OSA_TRACE1_1(
2198                "M4VSS3GPP_intCreateVideoEncoder: Unknown videoStreamType 0x%x",
2199                pC->ewc.VideoStreamType);
2200            return M4VSS3GPP_ERR_EDITING_UNSUPPORTED_VIDEO_FORMAT;
2201    }
2202
2203    /* In case of EMP we overwrite certain parameters */
2204    if( M4OSA_TRUE == pC->ewc.bActivateEmp )
2205    {
2206        EncParams.uiHorizontalSearchRange = 15;    /* set value */
2207        EncParams.uiVerticalSearchRange = 15;      /* set value */
2208        EncParams.bErrorResilience = M4OSA_FALSE;  /* no error resilience */
2209        EncParams.uiIVopPeriod = 15; /* one I frame every 15 frames */
2210        EncParams.uiMotionEstimationTools = 1; /* M4V_MOTION_EST_TOOLS_NO_4MV */
2211        EncParams.bAcPrediction = M4OSA_FALSE;     /* no AC prediction */
2212        EncParams.uiStartingQuantizerValue = 10;   /* initial QP = 10 */
2213        EncParams.bDataPartitioning = M4OSA_FALSE; /* no data partitioning */
2214    }
2215
2216    if( pC->bIsMMS == M4OSA_FALSE )
2217    {
2218        /* Compute max bitrate depending on input files bitrates and transitions */
2219        if( pC->Vstate == M4VSS3GPP_kEditVideoState_TRANSITION )
2220        {
2221            EncParams.Bitrate = pC->ewc.uiVideoBitrate;
2222        }
2223        else
2224        {
2225            EncParams.Bitrate = pC->ewc.uiVideoBitrate;
2226        }
2227    }
2228    else
2229    {
2230        EncParams.Bitrate = pC->uiMMSVideoBitrate; /* RC */
2231        EncParams.uiTimeScale = 0; /* We let the encoder choose the timescale */
2232    }
2233
2234    M4OSA_TRACE1_0("M4VSS3GPP_intCreateVideoEncoder: calling encoder pFctInit");
2235    /**
2236    * Init the video encoder (advanced settings version of the encoder Open function) */
2237    err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctInit(&pC->ewc.pEncContext,
2238        &pC->ewc.OurWriterDataInterface, M4VSS3GPP_intVPP, pC,
2239        pC->ShellAPI.pCurrentVideoEncoderExternalAPI,
2240        pC->ShellAPI.pCurrentVideoEncoderUserData);
2241
2242    if( M4NO_ERROR != err )
2243    {
2244        M4OSA_TRACE1_1(
2245            "M4VSS3GPP_intCreateVideoEncoder: pVideoEncoderGlobalFcts->pFctInit returns 0x%x",
2246            err);
2247        return err;
2248    }
2249
2250    pC->ewc.encoderState = M4VSS3GPP_kEncoderClosed;
2251    M4OSA_TRACE1_0("M4VSS3GPP_intCreateVideoEncoder: calling encoder pFctOpen");
2252
2253    err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctOpen(pC->ewc.pEncContext,
2254        &pC->ewc.WriterVideoAU, &EncParams);
2255
2256    if( M4NO_ERROR != err )
2257    {
2258        M4OSA_TRACE1_1(
2259            "M4VSS3GPP_intCreateVideoEncoder: pVideoEncoderGlobalFcts->pFctOpen returns 0x%x",
2260            err);
2261        return err;
2262    }
2263
2264    pC->ewc.encoderState = M4VSS3GPP_kEncoderStopped;
2265    M4OSA_TRACE1_0(
2266        "M4VSS3GPP_intCreateVideoEncoder: calling encoder pFctStart");
2267
2268    if( M4OSA_NULL != pC->ShellAPI.pVideoEncoderGlobalFcts->pFctStart )
2269    {
2270        err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctStart(
2271            pC->ewc.pEncContext);
2272
2273        if( M4NO_ERROR != err )
2274        {
2275            M4OSA_TRACE1_1(
2276                "M4VSS3GPP_intCreateVideoEncoder: pVideoEncoderGlobalFcts->pFctStart returns 0x%x",
2277                err);
2278            return err;
2279        }
2280    }
2281
2282    pC->ewc.encoderState = M4VSS3GPP_kEncoderRunning;
2283
2284    /**
2285    *    Return */
2286    M4OSA_TRACE3_0("M4VSS3GPP_intCreateVideoEncoder: returning M4NO_ERROR");
2287    return M4NO_ERROR;
2288}
2289
2290/**
2291 ******************************************************************************
2292 * M4OSA_ERR M4VSS3GPP_intDestroyVideoEncoder()
2293 * @brief    Destroy the video encoder
2294 * @note
2295 ******************************************************************************
2296 */
2297M4OSA_ERR M4VSS3GPP_intDestroyVideoEncoder( M4VSS3GPP_InternalEditContext *pC )
2298{
2299    M4OSA_ERR err = M4NO_ERROR;
2300
2301    if( M4OSA_NULL != pC->ewc.pEncContext )
2302    {
2303        if( M4VSS3GPP_kEncoderRunning == pC->ewc.encoderState )
2304        {
2305            if( pC->ShellAPI.pVideoEncoderGlobalFcts->pFctStop != M4OSA_NULL )
2306            {
2307                err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctStop(
2308                    pC->ewc.pEncContext);
2309
2310                if( M4NO_ERROR != err )
2311                {
2312                    M4OSA_TRACE1_1(
2313                        "M4VSS3GPP_intDestroyVideoEncoder:\
2314                        pVideoEncoderGlobalFcts->pFctStop returns 0x%x",
2315                        err);
2316                    /* Well... how the heck do you handle a failed cleanup? */
2317                }
2318            }
2319
2320            pC->ewc.encoderState = M4VSS3GPP_kEncoderStopped;
2321        }
2322
2323        /* Has the encoder actually been opened? Don't close it if that's not the case. */
2324        if( M4VSS3GPP_kEncoderStopped == pC->ewc.encoderState )
2325        {
2326            err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctClose(
2327                pC->ewc.pEncContext);
2328
2329            if( M4NO_ERROR != err )
2330            {
2331                M4OSA_TRACE1_1(
2332                    "M4VSS3GPP_intDestroyVideoEncoder:\
2333                    pVideoEncoderGlobalFcts->pFctClose returns 0x%x",
2334                    err);
2335                /* Well... how the heck do you handle a failed cleanup? */
2336            }
2337
2338            pC->ewc.encoderState = M4VSS3GPP_kEncoderClosed;
2339        }
2340
2341        err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctCleanup(
2342            pC->ewc.pEncContext);
2343
2344        if( M4NO_ERROR != err )
2345        {
2346            M4OSA_TRACE1_1(
2347                "M4VSS3GPP_intDestroyVideoEncoder:\
2348                pVideoEncoderGlobalFcts->pFctCleanup returns 0x%x!",
2349                err);
2350            /**< We do not return the error here because we still have stuff to free */
2351        }
2352
2353        pC->ewc.encoderState = M4VSS3GPP_kNoEncoder;
2354        /**
2355        * Reset variable */
2356        pC->ewc.pEncContext = M4OSA_NULL;
2357    }
2358
2359    M4OSA_TRACE3_1("M4VSS3GPP_intDestroyVideoEncoder: returning 0x%x", err);
2360    return err;
2361}
2362
2363/**
2364 ******************************************************************************
2365 * M4OSA_Void M4VSS3GPP_intSetH263TimeCounter()
2366 * @brief    Modify the time counter of the given H263 video AU
2367 * @note
2368 * @param    pAuDataBuffer    (IN/OUT) H263 Video AU to modify
2369 * @param    uiCts            (IN)     New time counter value
2370 * @return    nothing
2371 ******************************************************************************
2372 */
2373static M4OSA_Void M4VSS3GPP_intSetH263TimeCounter( M4OSA_MemAddr8 pAuDataBuffer,
2374                                                  M4OSA_UInt8 uiCts )
2375{
2376    /*
2377    *  The H263 time counter is 8 bits located on the "x" below:
2378    *
2379    *   |--------|--------|--------|--------|
2380    *    ???????? ???????? ??????xx xxxxxx??
2381    */
2382
2383    /**
2384    * Write the 2 bits on the third byte */
2385    pAuDataBuffer[2] = ( pAuDataBuffer[2] & 0xFC) | (( uiCts >> 6) & 0x3);
2386
2387    /**
2388    * Write the 6 bits on the fourth byte */
2389    pAuDataBuffer[3] = ( ( uiCts << 2) & 0xFC) | (pAuDataBuffer[3] & 0x3);
2390
2391    return;
2392}
2393
2394/**
2395 ******************************************************************************
2396 * M4OSA_Void M4VSS3GPP_intSetMPEG4Gov()
2397 * @brief    Modify the time info from Group Of VOP video AU
2398 * @note
2399 * @param    pAuDataBuffer    (IN)    MPEG4 Video AU to modify
2400 * @param    uiCtsSec            (IN)     New GOV time info in second unit
2401 * @return    nothing
2402 ******************************************************************************
2403 */
2404static M4OSA_Void M4VSS3GPP_intSetMPEG4Gov( M4OSA_MemAddr8 pAuDataBuffer,
2405                                           M4OSA_UInt32 uiCtsSec )
2406{
2407    /*
2408    *  The MPEG-4 time code length is 18 bits:
2409    *
2410    *     hh     mm    marker    ss
2411    *    xxxxx|xxx xxx     1    xxxx xx ??????
2412    *   |----- ---|---     -    ----|-- ------|
2413    */
2414    M4OSA_UInt8 uiHh;
2415    M4OSA_UInt8 uiMm;
2416    M4OSA_UInt8 uiSs;
2417    M4OSA_UInt8 uiTmp;
2418
2419    /**
2420    * Write the 2 last bits ss */
2421    uiSs = (M4OSA_UInt8)(uiCtsSec % 60); /**< modulo part */
2422    pAuDataBuffer[2] = (( ( uiSs & 0x03) << 6) | (pAuDataBuffer[2] & 0x3F));
2423
2424    if( uiCtsSec < 60 )
2425    {
2426        /**
2427        * Write the 3 last bits of mm, the marker bit (0x10 */
2428        pAuDataBuffer[1] = (( 0x10) | (uiSs >> 2));
2429
2430        /**
2431        * Write the 5 bits of hh and 3 of mm (out of 6) */
2432        pAuDataBuffer[0] = 0;
2433    }
2434    else
2435    {
2436        /**
2437        * Write the 3 last bits of mm, the marker bit (0x10 */
2438        uiTmp = (M4OSA_UInt8)(uiCtsSec / 60); /**< integer part */
2439        uiMm = (M4OSA_UInt8)(uiTmp % 60);
2440        pAuDataBuffer[1] = (( uiMm << 5) | (0x10) | (uiSs >> 2));
2441
2442        if( uiTmp < 60 )
2443        {
2444            /**
2445            * Write the 5 bits of hh and 3 of mm (out of 6) */
2446            pAuDataBuffer[0] = ((uiMm >> 3));
2447        }
2448        else
2449        {
2450            /**
2451            * Write the 5 bits of hh and 3 of mm (out of 6) */
2452            uiHh = (M4OSA_UInt8)(uiTmp / 60);
2453            pAuDataBuffer[0] = (( uiHh << 3) | (uiMm >> 3));
2454        }
2455    }
2456    return;
2457}
2458
2459/**
2460 ******************************************************************************
2461 * M4OSA_Void M4VSS3GPP_intGetMPEG4Gov()
2462 * @brief    Get the time info from Group Of VOP video AU
2463 * @note
2464 * @param    pAuDataBuffer    (IN)    MPEG4 Video AU to modify
2465 * @param    pCtsSec            (OUT)    Current GOV time info in second unit
2466 * @return    nothing
2467 ******************************************************************************
2468 */
2469static M4OSA_Void M4VSS3GPP_intGetMPEG4Gov( M4OSA_MemAddr8 pAuDataBuffer,
2470                                           M4OSA_UInt32 *pCtsSec )
2471{
2472    /*
2473    *  The MPEG-4 time code length is 18 bits:
2474    *
2475    *     hh     mm    marker    ss
2476    *    xxxxx|xxx xxx     1    xxxx xx ??????
2477    *   |----- ---|---     -    ----|-- ------|
2478    */
2479    M4OSA_UInt8 uiHh;
2480    M4OSA_UInt8 uiMm;
2481    M4OSA_UInt8 uiSs;
2482    M4OSA_UInt8 uiTmp;
2483    M4OSA_UInt32 uiCtsSec;
2484
2485    /**
2486    * Read ss */
2487    uiSs = (( pAuDataBuffer[2] & 0xC0) >> 6);
2488    uiTmp = (( pAuDataBuffer[1] & 0x0F) << 2);
2489    uiCtsSec = uiSs + uiTmp;
2490
2491    /**
2492    * Read mm */
2493    uiMm = (( pAuDataBuffer[1] & 0xE0) >> 5);
2494    uiTmp = (( pAuDataBuffer[0] & 0x07) << 3);
2495    uiMm = uiMm + uiTmp;
2496    uiCtsSec = ( uiMm * 60) + uiCtsSec;
2497
2498    /**
2499    * Read hh */
2500    uiHh = (( pAuDataBuffer[0] & 0xF8) >> 3);
2501
2502    if( uiHh )
2503    {
2504        uiCtsSec = ( uiHh * 3600) + uiCtsSec;
2505    }
2506
2507    /*
2508    * in sec */
2509    *pCtsSec = uiCtsSec;
2510
2511    return;
2512}
2513
2514/**
2515 ******************************************************************************
2516 * M4OSA_ERR M4VSS3GPP_intAllocateYUV420()
2517 * @brief    Allocate the three YUV 4:2:0 planes
2518 * @note
2519 * @param    pPlanes    (IN/OUT) valid pointer to 3 M4VIFI_ImagePlane structures
2520 * @param    uiWidth    (IN)     Image width
2521 * @param    uiHeight(IN)     Image height
2522 ******************************************************************************
2523 */
2524static M4OSA_ERR M4VSS3GPP_intAllocateYUV420( M4VIFI_ImagePlane *pPlanes,
2525                                             M4OSA_UInt32 uiWidth, M4OSA_UInt32 uiHeight )
2526{
2527
2528    pPlanes[0].u_width = uiWidth;
2529    pPlanes[0].u_height = uiHeight;
2530    pPlanes[0].u_stride = uiWidth;
2531    pPlanes[0].u_topleft = 0;
2532    pPlanes[0].pac_data = (M4VIFI_UInt8 *)M4OSA_32bitAlignedMalloc(pPlanes[0].u_stride
2533        * pPlanes[0].u_height, M4VSS3GPP, (M4OSA_Char *)"pPlanes[0].pac_data");
2534
2535    if( M4OSA_NULL == pPlanes[0].pac_data )
2536    {
2537        M4OSA_TRACE1_0(
2538            "M4VSS3GPP_intAllocateYUV420: unable to allocate pPlanes[0].pac_data,\
2539            returning M4ERR_ALLOC");
2540        return M4ERR_ALLOC;
2541    }
2542
2543    pPlanes[1].u_width = pPlanes[0].u_width >> 1;
2544    pPlanes[1].u_height = pPlanes[0].u_height >> 1;
2545    pPlanes[1].u_stride = pPlanes[1].u_width;
2546    pPlanes[1].u_topleft = 0;
2547    pPlanes[1].pac_data = (M4VIFI_UInt8 *)M4OSA_32bitAlignedMalloc(pPlanes[1].u_stride
2548        * pPlanes[1].u_height, M4VSS3GPP,(M4OSA_Char *) "pPlanes[1].pac_data");
2549
2550    if( M4OSA_NULL == pPlanes[1].pac_data )
2551    {
2552        M4OSA_TRACE1_0(
2553            "M4VSS3GPP_intAllocateYUV420: unable to allocate pPlanes[1].pac_data,\
2554            returning M4ERR_ALLOC");
2555        return M4ERR_ALLOC;
2556    }
2557
2558    pPlanes[2].u_width = pPlanes[1].u_width;
2559    pPlanes[2].u_height = pPlanes[1].u_height;
2560    pPlanes[2].u_stride = pPlanes[2].u_width;
2561    pPlanes[2].u_topleft = 0;
2562    pPlanes[2].pac_data = (M4VIFI_UInt8 *)M4OSA_32bitAlignedMalloc(pPlanes[2].u_stride
2563        * pPlanes[2].u_height, M4VSS3GPP, (M4OSA_Char *)"pPlanes[2].pac_data");
2564
2565    if( M4OSA_NULL == pPlanes[2].pac_data )
2566    {
2567        M4OSA_TRACE1_0(
2568            "M4VSS3GPP_intAllocateYUV420: unable to allocate pPlanes[2].pac_data,\
2569            returning M4ERR_ALLOC");
2570        return M4ERR_ALLOC;
2571    }
2572
2573    /**
2574    *    Return */
2575    M4OSA_TRACE3_0("M4VSS3GPP_intAllocateYUV420: returning M4NO_ERROR");
2576    return M4NO_ERROR;
2577}
2578