M4VSS3GPP_EditVideo.c revision 7dbd2c46824000ce473966637c11b8564682cb55
1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16/**
17 ******************************************************************************
18 * @file    M4VSS3GPP_EditVideo.c
19 * @brief    Video Studio Service 3GPP edit API implementation.
20 * @note
21 ******************************************************************************
22 */
23
24/****************/
25/*** Includes ***/
26/****************/
27
28#include "NXPSW_CompilerSwitches.h"
29/**
30 * Our header */
31#include "M4VSS3GPP_API.h"
32#include "M4VSS3GPP_InternalTypes.h"
33#include "M4VSS3GPP_InternalFunctions.h"
34#include "M4VSS3GPP_InternalConfig.h"
35#include "M4VSS3GPP_ErrorCodes.h"
36
37// StageFright encoders require %16 resolution
38#include "M4ENCODER_common.h"
39/**
40 * OSAL headers */
41#include "M4OSA_Memory.h" /**< OSAL memory management */
42#include "M4OSA_Debug.h"  /**< OSAL debug management */
43
44/**
45 * component includes */
46#include "M4VFL_transition.h" /**< video effects */
47
48/*for transition behaviour*/
49#include <math.h>
50
51/************************************************************************/
52/* Static local functions                                               */
53/************************************************************************/
54
55static M4OSA_ERR M4VSS3GPP_intCheckVideoMode(
56    M4VSS3GPP_InternalEditContext *pC );
57static M4OSA_Void
58M4VSS3GPP_intCheckVideoEffects( M4VSS3GPP_InternalEditContext *pC,
59                               M4OSA_UInt8 uiClipNumber );
60static M4OSA_ERR
61M4VSS3GPP_intApplyVideoEffect( M4VSS3GPP_InternalEditContext *pC,/*M4OSA_UInt8 uiClip1orClip2,*/
62                              M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut );
63static M4OSA_ERR
64M4VSS3GPP_intVideoTransition( M4VSS3GPP_InternalEditContext *pC,
65                             M4VIFI_ImagePlane *pPlaneOut );
66
67static M4OSA_Void
68M4VSS3GPP_intUpdateTimeInfo( M4VSS3GPP_InternalEditContext *pC,
69                            M4SYS_AccessUnit *pAU );
70static M4OSA_Void M4VSS3GPP_intSetH263TimeCounter( M4OSA_MemAddr8 pAuDataBuffer,
71                                                  M4OSA_UInt8 uiCts );
72static M4OSA_Void M4VSS3GPP_intSetMPEG4Gov( M4OSA_MemAddr8 pAuDataBuffer,
73                                           M4OSA_UInt32 uiCtsSec );
74static M4OSA_Void M4VSS3GPP_intGetMPEG4Gov( M4OSA_MemAddr8 pAuDataBuffer,
75                                           M4OSA_UInt32 *pCtsSec );
76static M4OSA_ERR M4VSS3GPP_intAllocateYUV420( M4VIFI_ImagePlane *pPlanes,
77                                             M4OSA_UInt32 uiWidth, M4OSA_UInt32 uiHeight );
78
79/**
80 ******************************************************************************
81 * M4OSA_ERR M4VSS3GPP_intEditStepVideo()
82 * @brief    One step of video processing
83 * @param   pC    (IN/OUT) Internal edit context
84 ******************************************************************************
85 */
86M4OSA_ERR M4VSS3GPP_intEditStepVideo( M4VSS3GPP_InternalEditContext *pC )
87{
88    M4OSA_ERR err;
89    M4OSA_Int32 iCts, iNextCts;
90    M4ENCODER_FrameMode FrameMode;
91    M4OSA_Bool bSkipFrame;
92    M4OSA_UInt16 offset;
93
94    /**
95     * Check if we reached end cut. Decorrelate input and output encoding
96     * timestamp to handle encoder prefetch
97     */
98    if ( ((M4OSA_Int32)(pC->ewc.dInputVidCts) - pC->pC1->iVoffset
99        + pC->iInOutTimeOffset) >= pC->pC1->iEndTime )
100    {
101        /* Re-adjust video to precise cut time */
102        pC->iInOutTimeOffset = ((M4OSA_Int32)(pC->ewc.dInputVidCts))
103            - pC->pC1->iVoffset + pC->iInOutTimeOffset - pC->pC1->iEndTime;
104        if ( pC->iInOutTimeOffset < 0 ) {
105            pC->iInOutTimeOffset = 0;
106        }
107
108        /**
109        * Video is done for this clip */
110        err = M4VSS3GPP_intReachedEndOfVideo(pC);
111
112        /* RC: to know when a file has been processed */
113        if (M4NO_ERROR != err && err != M4VSS3GPP_WAR_SWITCH_CLIP)
114        {
115            M4OSA_TRACE1_1(
116                "M4VSS3GPP_intEditStepVideo: M4VSS3GPP_intReachedEndOfVideo returns 0x%x",
117                err);
118        }
119
120        return err;
121    }
122
123    /* Don't change the states if we are in decodeUpTo() */
124    if ( (M4VSS3GPP_kClipStatus_DECODE_UP_TO != pC->pC1->Vstatus)
125        && (( pC->pC2 == M4OSA_NULL)
126        || (M4VSS3GPP_kClipStatus_DECODE_UP_TO != pC->pC2->Vstatus)) )
127    {
128        /**
129        * Check Video Mode, depending on the current output CTS */
130        err = M4VSS3GPP_intCheckVideoMode(
131            pC); /**< This function change the pC->Vstate variable! */
132
133        if (M4NO_ERROR != err)
134        {
135            M4OSA_TRACE1_1(
136                "M4VSS3GPP_intEditStepVideo: M4VSS3GPP_intCheckVideoMode returns 0x%x!",
137                err);
138            return err;
139        }
140    }
141
142
143    switch( pC->Vstate )
144    {
145        /* _________________ */
146        /*|                 |*/
147        /*| READ_WRITE MODE |*/
148        /*|_________________|*/
149
150        case M4VSS3GPP_kEditVideoState_READ_WRITE:
151        case M4VSS3GPP_kEditVideoState_AFTER_CUT:
152            {
153                M4OSA_TRACE3_0("M4VSS3GPP_intEditStepVideo READ_WRITE");
154
155                bSkipFrame = M4OSA_FALSE;
156
157                /**
158                * If we were decoding the clip, we must jump to be sure
159                * to get to the good position. */
160                if( M4VSS3GPP_kClipStatus_READ != pC->pC1->Vstatus )
161                {
162                    /**
163                    * Jump to target video time (tc = to-T) */
164                // Decorrelate input and output encoding timestamp to handle encoder prefetch
165                iCts = (M4OSA_Int32)(pC->ewc.dInputVidCts) - pC->pC1->iVoffset;
166                    err = pC->pC1->ShellAPI.m_pReader->m_pFctJump(
167                        pC->pC1->pReaderContext,
168                        (M4_StreamHandler *)pC->pC1->pVideoStream, &iCts);
169
170                    if( M4NO_ERROR != err )
171                    {
172                        M4OSA_TRACE1_1(
173                            "M4VSS3GPP_intEditStepVideo:\
174                            READ_WRITE: m_pReader->m_pFctJump(V1) returns 0x%x!",
175                            err);
176                        return err;
177                    }
178
179                    err = pC->pC1->ShellAPI.m_pReaderDataIt->m_pFctGetNextAu(
180                        pC->pC1->pReaderContext,
181                        (M4_StreamHandler *)pC->pC1->pVideoStream,
182                        &pC->pC1->VideoAU);
183
184                    if( ( M4NO_ERROR != err) && (M4WAR_NO_MORE_AU != err) )
185                    {
186                        M4OSA_TRACE1_1(
187                            "M4VSS3GPP_intEditStepVideo:\
188                            READ_WRITE: m_pReader->m_pFctGetNextAu returns 0x%x!",
189                            err);
190                        return err;
191                    }
192
193                    M4OSA_TRACE2_3("A .... read  : cts  = %.0f + %ld [ 0x%x ]",
194                        pC->pC1->VideoAU.m_CTS, pC->pC1->iVoffset,
195                        pC->pC1->VideoAU.m_size);
196
197                    /* This frame has been already written in BEGIN CUT step -> skip it */
198                    if( pC->pC1->VideoAU.m_CTS == iCts
199                        && pC->pC1->iVideoRenderCts >= iCts )
200                    {
201                        bSkipFrame = M4OSA_TRUE;
202                    }
203                }
204
205                /* This frame has been already written in BEGIN CUT step -> skip it */
206                if( ( pC->Vstate == M4VSS3GPP_kEditVideoState_AFTER_CUT)
207                    && (pC->pC1->VideoAU.m_CTS
208                    + pC->pC1->iVoffset <= pC->ewc.WriterVideoAU.CTS) )
209                {
210                    bSkipFrame = M4OSA_TRUE;
211                }
212
213                /**
214                * Remember the clip reading state */
215                pC->pC1->Vstatus = M4VSS3GPP_kClipStatus_READ;
216                // Decorrelate input and output encoding timestamp to handle encoder prefetch
217                // Rounding is to compensate reader imprecision (m_CTS is actually an integer)
218                iCts = ((M4OSA_Int32)pC->ewc.dInputVidCts) - pC->pC1->iVoffset - 1;
219                iNextCts = iCts + ((M4OSA_Int32)pC->dOutputFrameDuration) + 1;
220                /* Avoid to write a last frame of duration 0 */
221                if( iNextCts > pC->pC1->iEndTime )
222                    iNextCts = pC->pC1->iEndTime;
223
224                /**
225                * If the AU is good to be written, write it, else just skip it */
226                if( ( M4OSA_FALSE == bSkipFrame)
227                    && (( pC->pC1->VideoAU.m_CTS >= iCts)
228                    && (pC->pC1->VideoAU.m_CTS < iNextCts)
229                    && (pC->pC1->VideoAU.m_size > 0)) )
230                {
231                    /**
232                    * Get the output AU to write into */
233                    err = pC->ShellAPI.pWriterDataFcts->pStartAU(
234                        pC->ewc.p3gpWriterContext,
235                        M4VSS3GPP_WRITER_VIDEO_STREAM_ID,
236                        &pC->ewc.WriterVideoAU);
237
238                    if( M4NO_ERROR != err )
239                    {
240                        M4OSA_TRACE1_1(
241                            "M4VSS3GPP_intEditStepVideo: READ_WRITE:\
242                            pWriterDataFcts->pStartAU(Video) returns 0x%x!",
243                            err);
244                        return err;
245                    }
246
247                    /**
248                    * Copy the input AU to the output AU */
249                    pC->ewc.WriterVideoAU.attribute = pC->pC1->VideoAU.m_attribute;
250                    // Decorrelate input and output encoding timestamp to handle encoder prefetch
251                    pC->ewc.WriterVideoAU.CTS = (M4OSA_Time)pC->pC1->VideoAU.m_CTS +
252                        (M4OSA_Time)pC->pC1->iVoffset;
253                    pC->ewc.dInputVidCts += pC->dOutputFrameDuration;
254                    offset = 0;
255                    /* for h.264 stream do not read the 1st 4 bytes as they are header
256                     indicators */
257                    if( pC->pC1->pVideoStream->m_basicProperties.m_streamType
258                        == M4DA_StreamTypeVideoMpeg4Avc )
259                        offset = 4;
260
261                    pC->ewc.WriterVideoAU.size = pC->pC1->VideoAU.m_size - offset;
262                    if( pC->ewc.WriterVideoAU.size > pC->ewc.uiVideoMaxAuSize )
263                    {
264                        M4OSA_TRACE1_2(
265                            "M4VSS3GPP_intEditStepVideo: READ_WRITE: AU size greater than\
266                             MaxAuSize (%d>%d)! returning M4VSS3GPP_ERR_INPUT_VIDEO_AU_TOO_LARGE",
267                            pC->ewc.WriterVideoAU.size, pC->ewc.uiVideoMaxAuSize);
268                        return M4VSS3GPP_ERR_INPUT_VIDEO_AU_TOO_LARGE;
269                    }
270
271                    memcpy((void *)pC->ewc.WriterVideoAU.dataAddress,
272                        (void *)(pC->pC1->VideoAU.m_dataAddress + offset),
273                        (pC->ewc.WriterVideoAU.size));
274
275                    /**
276                    * Update time info for the Counter Time System to be equal to the bit
277                    -stream time*/
278                    M4VSS3GPP_intUpdateTimeInfo(pC, &pC->ewc.WriterVideoAU);
279                    M4OSA_TRACE2_2("B ---- write : cts  = %lu [ 0x%x ]",
280                        pC->ewc.WriterVideoAU.CTS, pC->ewc.WriterVideoAU.size);
281
282                    /**
283                    * Write the AU */
284                    err = pC->ShellAPI.pWriterDataFcts->pProcessAU(
285                        pC->ewc.p3gpWriterContext,
286                        M4VSS3GPP_WRITER_VIDEO_STREAM_ID,
287                        &pC->ewc.WriterVideoAU);
288
289                    if( M4NO_ERROR != err )
290                    {
291                        /* the warning M4WAR_WRITER_STOP_REQ is returned when the targeted output
292                         file size is reached
293                        The editing is then finished, the warning M4VSS3GPP_WAR_EDITING_DONE
294                        is returned*/
295                        if( M4WAR_WRITER_STOP_REQ == err )
296                        {
297                            M4OSA_TRACE1_0(
298                                "M4VSS3GPP_intEditStepVideo: File was cut to avoid oversize");
299                            return M4VSS3GPP_WAR_EDITING_DONE;
300                        }
301                        else
302                        {
303                            M4OSA_TRACE1_1(
304                                "M4VSS3GPP_intEditStepVideo: READ_WRITE:\
305                                pWriterDataFcts->pProcessAU(Video) returns 0x%x!",
306                                err);
307                            return err;
308                        }
309                    }
310
311                    /**
312                    * Read next AU for next step */
313                    err = pC->pC1->ShellAPI.m_pReaderDataIt->m_pFctGetNextAu(
314                        pC->pC1->pReaderContext,
315                        (M4_StreamHandler *)pC->pC1->pVideoStream,
316                        &pC->pC1->VideoAU);
317
318                    if( ( M4NO_ERROR != err) && (M4WAR_NO_MORE_AU != err) )
319                    {
320                        M4OSA_TRACE1_1(
321                            "M4VSS3GPP_intEditStepVideo: READ_WRITE:\
322                            m_pReaderDataIt->m_pFctGetNextAu returns 0x%x!",
323                            err);
324                        return err;
325                    }
326
327                    M4OSA_TRACE2_3("C .... read  : cts  = %.0f + %ld [ 0x%x ]",
328                        pC->pC1->VideoAU.m_CTS, pC->pC1->iVoffset,
329                        pC->pC1->VideoAU.m_size);
330                }
331                else
332                {
333                    /**
334                    * Decide wether to read or to increment time increment */
335                    if( ( pC->pC1->VideoAU.m_size == 0)
336                        || (pC->pC1->VideoAU.m_CTS >= iNextCts) )
337                    {
338                        /*Increment time by the encoding period (NO_MORE_AU or reader in advance */
339                       // Decorrelate input and output encoding timestamp to handle encoder prefetch
340                       pC->ewc.dInputVidCts += pC->dOutputFrameDuration;
341
342                        /* Switch (from AFTER_CUT) to normal mode because time is
343                        no more frozen */
344                        pC->Vstate = M4VSS3GPP_kEditVideoState_READ_WRITE;
345                    }
346                    else
347                    {
348                        /* In other cases (reader late), just let the reader catch up
349                         pC->ewc.dVTo */
350                        err = pC->pC1->ShellAPI.m_pReaderDataIt->m_pFctGetNextAu(
351                            pC->pC1->pReaderContext,
352                            (M4_StreamHandler *)pC->pC1->pVideoStream,
353                            &pC->pC1->VideoAU);
354
355                        if( ( M4NO_ERROR != err) && (M4WAR_NO_MORE_AU != err) )
356                        {
357                            M4OSA_TRACE1_1(
358                                "M4VSS3GPP_intEditStepVideo: READ_WRITE:\
359                                m_pReaderDataIt->m_pFctGetNextAu returns 0x%x!",
360                                err);
361                            return err;
362                        }
363
364                        M4OSA_TRACE2_3("D .... read  : cts  = %.0f + %ld [ 0x%x ]",
365                            pC->pC1->VideoAU.m_CTS, pC->pC1->iVoffset,
366                            pC->pC1->VideoAU.m_size);
367                    }
368                }
369            }
370            break;
371
372            /* ____________________ */
373            /*|                    |*/
374            /*| DECODE_ENCODE MODE |*/
375            /*|   BEGIN_CUT MODE   |*/
376            /*|____________________|*/
377
378        case M4VSS3GPP_kEditVideoState_DECODE_ENCODE:
379        case M4VSS3GPP_kEditVideoState_BEGIN_CUT:
380            {
381                M4OSA_TRACE3_0(
382                    "M4VSS3GPP_intEditStepVideo DECODE_ENCODE / BEGIN_CUT");
383
384                /**
385                * Decode the video up to the target time
386                (will jump to the previous RAP if needed ) */
387                // Decorrelate input and output encoding timestamp to handle encoder prefetch
388                err = M4VSS3GPP_intClipDecodeVideoUpToCts(pC->pC1, (M4OSA_Int32)pC->ewc.dInputVidCts);
389                if( M4NO_ERROR != err )
390                {
391                    M4OSA_TRACE1_1(
392                        "M4VSS3GPP_intEditStepVideo: DECODE_ENCODE:\
393                        M4VSS3GPP_intDecodeVideoUpToCts returns err=0x%x",
394                        err);
395                    return err;
396                }
397
398                /* If the decoding is not completed, do one more step with time frozen */
399                if( M4VSS3GPP_kClipStatus_DECODE_UP_TO == pC->pC1->Vstatus )
400                {
401                    return M4NO_ERROR;
402                }
403
404                /**
405                * Reset the video pre-processing error before calling the encoder */
406                pC->ewc.VppError = M4NO_ERROR;
407
408                M4OSA_TRACE2_0("E ++++ encode AU");
409
410                /**
411                * Encode the frame(rendering,filtering and writing will be done
412                 in encoder callbacks)*/
413                if( pC->Vstate == M4VSS3GPP_kEditVideoState_BEGIN_CUT )
414                    FrameMode = M4ENCODER_kIFrame;
415                else
416                    FrameMode = M4ENCODER_kNormalFrame;
417
418                // Decorrelate input and output encoding timestamp to handle encoder prefetch
419                err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctEncode(pC->ewc.pEncContext, M4OSA_NULL,
420                pC->ewc.dInputVidCts, FrameMode);
421                /**
422                * Check if we had a VPP error... */
423                if( M4NO_ERROR != pC->ewc.VppError )
424                {
425                    M4OSA_TRACE1_1(
426                        "M4VSS3GPP_intEditStepVideo: DECODE_ENCODE:\
427                        pVideoEncoderGlobalFcts->pFctEncode, returning VppErr=0x%x",
428                        pC->ewc.VppError);
429#ifdef M4VSS_SUPPORT_OMX_CODECS
430
431                    if( M4WAR_VIDEORENDERER_NO_NEW_FRAME != pC->ewc.VppError )
432                    {
433#endif //M4VSS_SUPPORT_OMX_CODECS
434
435                        return pC->ewc.VppError;
436#ifdef M4VSS_SUPPORT_OMX_CODECS
437
438                    }
439
440#endif                                   //M4VSS_SUPPORT_OMX_CODECS
441
442                }
443                else if( M4NO_ERROR != err ) /**< ...or an encoder error */
444                {
445                    if( ((M4OSA_UInt32)M4ERR_ALLOC) == err )
446                    {
447                        M4OSA_TRACE1_0(
448                            "M4VSS3GPP_intEditStepVideo: DECODE_ENCODE:\
449                            returning M4VSS3GPP_ERR_ENCODER_ACCES_UNIT_ERROR");
450                        return M4VSS3GPP_ERR_ENCODER_ACCES_UNIT_ERROR;
451                    }
452                    /* the warning M4WAR_WRITER_STOP_REQ is returned when the targeted output
453                    file size is reached
454                    The editing is then finished, the warning M4VSS3GPP_WAR_EDITING_DONE
455                    is returned*/
456                    else if( M4WAR_WRITER_STOP_REQ == err )
457                    {
458                        M4OSA_TRACE1_0(
459                            "M4VSS3GPP_intEditStepVideo: File was cut to avoid oversize");
460                        return M4VSS3GPP_WAR_EDITING_DONE;
461                    }
462                    else
463                    {
464                        M4OSA_TRACE1_1(
465                            "M4VSS3GPP_intEditStepVideo: DECODE_ENCODE:\
466                            pVideoEncoderGlobalFcts->pFctEncode returns 0x%x",
467                            err);
468                        return err;
469                    }
470                }
471
472                /**
473                * Increment time by the encoding period (for begin cut, do not increment to not
474                loose P-frames) */
475                if( M4VSS3GPP_kEditVideoState_DECODE_ENCODE == pC->Vstate )
476                {
477                    // Decorrelate input and output encoding timestamp to handle encoder prefetch
478                    pC->ewc.dInputVidCts += pC->dOutputFrameDuration;
479                }
480            }
481            break;
482
483            /* _________________ */
484            /*|                 |*/
485            /*| TRANSITION MODE |*/
486            /*|_________________|*/
487
488        case M4VSS3GPP_kEditVideoState_TRANSITION:
489            {
490                M4OSA_TRACE3_0("M4VSS3GPP_intEditStepVideo TRANSITION");
491
492                /* Don't decode more than needed */
493                if( !(( M4VSS3GPP_kClipStatus_DECODE_UP_TO != pC->pC1->Vstatus)
494                    && (M4VSS3GPP_kClipStatus_DECODE_UP_TO == pC->pC2->Vstatus)) )
495                {
496                    /**
497                    * Decode the clip1 video up to the target time
498                    (will jump to the previous RAP if needed */
499                    // Decorrelate input and output encoding timestamp to handle encoder prefetch
500                    err = M4VSS3GPP_intClipDecodeVideoUpToCts(pC->pC1,
501                         (M4OSA_Int32)pC->ewc.dInputVidCts);
502                    if( M4NO_ERROR != err )
503                    {
504                        M4OSA_TRACE1_1(
505                            "M4VSS3GPP_intEditStepVideo: TRANSITION:\
506                            M4VSS3GPP_intDecodeVideoUpToCts(C1) returns err=0x%x",
507                            err);
508                        return err;
509                    }
510
511                    /* If the decoding is not completed, do one more step with time frozen */
512                    if( M4VSS3GPP_kClipStatus_DECODE_UP_TO == pC->pC1->Vstatus )
513                    {
514                        return M4NO_ERROR;
515                    }
516                }
517
518                /* Don't decode more than needed */
519                if( !(( M4VSS3GPP_kClipStatus_DECODE_UP_TO != pC->pC2->Vstatus)
520                    && (M4VSS3GPP_kClipStatus_DECODE_UP_TO == pC->pC1->Vstatus)) )
521                {
522                    /**
523                    * Decode the clip2 video up to the target time
524                        (will jump to the previous RAP if needed) */
525                    // Decorrelate input and output encoding timestamp to handle encoder prefetch
526                    err = M4VSS3GPP_intClipDecodeVideoUpToCts(pC->pC2,
527                         (M4OSA_Int32)pC->ewc.dInputVidCts);
528                    if( M4NO_ERROR != err )
529                    {
530                        M4OSA_TRACE1_1(
531                            "M4VSS3GPP_intEditStepVideo: TRANSITION:\
532                            M4VSS3GPP_intDecodeVideoUpToCts(C2) returns err=0x%x",
533                            err);
534                        return err;
535                    }
536
537                    /* If the decoding is not completed, do one more step with time frozen */
538                    if( M4VSS3GPP_kClipStatus_DECODE_UP_TO == pC->pC2->Vstatus )
539                    {
540                        return M4NO_ERROR;
541                    }
542                }
543
544                /**
545                * Reset the video pre-processing error before calling the encoder */
546                pC->ewc.VppError = M4NO_ERROR;
547
548                M4OSA_TRACE2_0("F **** blend AUs");
549
550                /**
551                * Encode the frame (rendering, filtering and writing will be done
552                in encoder callbacks */
553                // Decorrelate input and output encoding timestamp to handle encoder prefetch
554                err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctEncode(pC->ewc.pEncContext, M4OSA_NULL,
555                    pC->ewc.dInputVidCts, M4ENCODER_kNormalFrame);
556
557                /**
558                * If encode returns a process frame error, it is likely to be a VPP error */
559                if( M4NO_ERROR != pC->ewc.VppError )
560                {
561                    M4OSA_TRACE1_1(
562                        "M4VSS3GPP_intEditStepVideo: TRANSITION:\
563                        pVideoEncoderGlobalFcts->pFctEncode, returning VppErr=0x%x",
564                        pC->ewc.VppError);
565#ifdef M4VSS_SUPPORT_OMX_CODECS
566
567                    if( M4WAR_VIDEORENDERER_NO_NEW_FRAME != pC->ewc.VppError )
568                    {
569
570#endif //M4VSS_SUPPORT_OMX_CODECS
571
572                        return pC->ewc.VppError;
573#ifdef M4VSS_SUPPORT_OMX_CODECS
574
575                    }
576
577#endif //M4VSS_SUPPORT_OMX_CODECS
578
579                }
580                else if( M4NO_ERROR != err ) /**< ...or an encoder error */
581                {
582                    if( ((M4OSA_UInt32)M4ERR_ALLOC) == err )
583                    {
584                        M4OSA_TRACE1_0(
585                            "M4VSS3GPP_intEditStepVideo: TRANSITION:\
586                            returning M4VSS3GPP_ERR_ENCODER_ACCES_UNIT_ERROR");
587                        return M4VSS3GPP_ERR_ENCODER_ACCES_UNIT_ERROR;
588                    }
589
590                    /* the warning M4WAR_WRITER_STOP_REQ is returned when the targeted output
591                     file size is reached
592                    The editing is then finished, the warning M4VSS3GPP_WAR_EDITING_DONE is
593                     returned*/
594                    else if( M4WAR_WRITER_STOP_REQ == err )
595                    {
596                        M4OSA_TRACE1_0(
597                            "M4VSS3GPP_intEditStepVideo: File was cut to avoid oversize");
598                        return M4VSS3GPP_WAR_EDITING_DONE;
599                    }
600                    else
601                    {
602                        M4OSA_TRACE1_1(
603                            "M4VSS3GPP_intEditStepVideo: TRANSITION:\
604                            pVideoEncoderGlobalFcts->pFctEncode returns 0x%x",
605                            err);
606                        return err;
607                    }
608                }
609
610                /**
611                * Increment time by the encoding period */
612                // Decorrelate input and output encoding timestamp to handle encoder prefetch
613                pC->ewc.dInputVidCts += pC->dOutputFrameDuration;
614            }
615            break;
616
617            /* ____________ */
618            /*|            |*/
619            /*| ERROR CASE |*/
620            /*|____________|*/
621
622        default:
623            M4OSA_TRACE1_1(
624                "M4VSS3GPP_intEditStepVideo: invalid internal state (0x%x),\
625                returning M4VSS3GPP_ERR_INTERNAL_STATE",
626                pC->Vstate);
627            return M4VSS3GPP_ERR_INTERNAL_STATE;
628    }
629
630    /**
631    * Return with no error */
632    M4OSA_TRACE3_0("M4VSS3GPP_intEditStepVideo: returning M4NO_ERROR");
633    return M4NO_ERROR;
634}
635
636/**
637 ******************************************************************************
638 * M4OSA_ERR M4VSS3GPP_intCheckVideoMode()
639 * @brief    Check which video process mode we must use, depending on the output CTS.
640 * @param   pC    (IN/OUT) Internal edit context
641 ******************************************************************************
642 */
643static M4OSA_ERR M4VSS3GPP_intCheckVideoMode(
644    M4VSS3GPP_InternalEditContext *pC )
645{
646    M4OSA_ERR err;
647    // Decorrelate input and output encoding timestamp to handle encoder prefetch
648    const M4OSA_Int32  t = (M4OSA_Int32)pC->ewc.dInputVidCts;
649    /**< Transition duration */
650    const M4OSA_Int32 TD = pC->pTransitionList[pC->uiCurrentClip].uiTransitionDuration;
651
652    M4OSA_Int32 iTmp;
653
654    const M4VSS3GPP_EditVideoState previousVstate = pC->Vstate;
655
656    /**
657    * Check if Clip1 is on its begin cut, or in an effect zone */
658    M4VSS3GPP_intCheckVideoEffects(pC, 1);
659
660    /**
661    * Check if we are in the transition with next clip */
662    if( ( TD > 0) && (( t - pC->pC1->iVoffset) >= (pC->pC1->iEndTime - TD)) )
663    {
664        /**
665        * We are in a transition */
666        pC->Vstate = M4VSS3GPP_kEditVideoState_TRANSITION;
667        pC->bTransitionEffect = M4OSA_TRUE;
668
669        /**
670        * Open second clip for transition, if not yet opened */
671        if( M4OSA_NULL == pC->pC2 )
672        {
673            err = M4VSS3GPP_intOpenClip(pC, &pC->pC2,
674                &pC->pClipList[pC->uiCurrentClip + 1]);
675
676            if( M4NO_ERROR != err )
677            {
678                M4OSA_TRACE1_1(
679                    "M4VSS3GPP_intCheckVideoMode: M4VSS3GPP_editOpenClip returns 0x%x!",
680                    err);
681                return err;
682            }
683
684            /**
685            * Add current video output CTS to the clip offset
686            * (audio output CTS is not yet at the transition, so audio
687            *  offset can't be updated yet). */
688            // Decorrelate input and output encoding timestamp to handle encoder prefetch
689            pC->pC2->iVoffset += (M4OSA_UInt32)pC->ewc.dInputVidCts;
690
691            /**
692            * 2005-03-24: BugFix for audio-video synchro:
693            * Update transition duration due to the actual video transition beginning time.
694            * It will avoid desynchronization when doing the audio transition. */
695           // Decorrelate input and output encoding timestamp to handle encoder prefetch
696            iTmp = ((M4OSA_Int32)pC->ewc.dInputVidCts)\
697             - (pC->pC1->iEndTime - TD + pC->pC1->iVoffset);
698            if (iTmp < (M4OSA_Int32)pC->pTransitionList[pC->uiCurrentClip].uiTransitionDuration)
699            /**< Test in case of a very short transition */
700            {
701                pC->pTransitionList[pC->
702                    uiCurrentClip].uiTransitionDuration -= iTmp;
703
704                /**
705                * Don't forget to also correct the total duration used for the progress bar
706                * (it was computed with the original transition duration). */
707                pC->ewc.iOutputDuration += iTmp;
708            }
709            /**< No "else" here because it's hard predict the effect of 0 duration transition...*/
710        }
711
712        /**
713        * Check effects for clip2 */
714        M4VSS3GPP_intCheckVideoEffects(pC, 2);
715    }
716    else
717    {
718        /**
719        * We are not in a transition */
720        pC->bTransitionEffect = M4OSA_FALSE;
721
722        /* If there is an effect we go to decode/encode mode */
723        if ((pC->nbActiveEffects > 0) ||(pC->nbActiveEffects1 > 0))
724        {
725            pC->Vstate = M4VSS3GPP_kEditVideoState_DECODE_ENCODE;
726        }
727        /* We do a begin cut, except if already done (time is not progressing because we want
728        to catch all P-frames after the cut) */
729        else if( M4OSA_TRUE == pC->bClip1AtBeginCut )
730        {
731            if(pC->pC1->pSettings->ClipProperties.VideoStreamType == M4VIDEOEDITING_kH264) {
732                pC->Vstate = M4VSS3GPP_kEditVideoState_DECODE_ENCODE;
733                pC->bEncodeTillEoF = M4OSA_TRUE;
734            } else if( ( M4VSS3GPP_kEditVideoState_BEGIN_CUT == previousVstate)
735                || (M4VSS3GPP_kEditVideoState_AFTER_CUT == previousVstate) ) {
736                pC->Vstate = M4VSS3GPP_kEditVideoState_AFTER_CUT;
737            } else {
738                pC->Vstate = M4VSS3GPP_kEditVideoState_BEGIN_CUT;
739            }
740        }
741        /* Else we are in default copy/paste mode */
742        else
743        {
744            if( ( M4VSS3GPP_kEditVideoState_BEGIN_CUT == previousVstate)
745                || (M4VSS3GPP_kEditVideoState_AFTER_CUT == previousVstate) )
746            {
747                pC->Vstate = M4VSS3GPP_kEditVideoState_AFTER_CUT;
748            }
749            else if( pC->bIsMMS == M4OSA_TRUE )
750            {
751                M4OSA_UInt32 currentBitrate;
752                M4OSA_ERR err = M4NO_ERROR;
753
754                /* Do we need to reencode the video to downgrade the bitrate or not ? */
755                /* Let's compute the cirrent bitrate of the current edited clip */
756                err = pC->pC1->ShellAPI.m_pReader->m_pFctGetOption(
757                    pC->pC1->pReaderContext,
758                    M4READER_kOptionID_Bitrate, &currentBitrate);
759
760                if( err != M4NO_ERROR )
761                {
762                    M4OSA_TRACE1_1(
763                        "M4VSS3GPP_intCheckVideoMode:\
764                        Error when getting next bitrate of edited clip: 0x%x",
765                        err);
766                    return err;
767                }
768
769                /* Remove audio bitrate */
770                currentBitrate -= 12200;
771
772                /* Test if we go into copy/paste mode or into decode/encode mode */
773                if( currentBitrate > pC->uiMMSVideoBitrate )
774                {
775                    pC->Vstate = M4VSS3GPP_kEditVideoState_DECODE_ENCODE;
776                }
777                else
778                {
779                    pC->Vstate = M4VSS3GPP_kEditVideoState_READ_WRITE;
780                }
781            }
782            else if(!((pC->m_bClipExternalHasStarted == M4OSA_TRUE) &&
783                    (pC->Vstate == M4VSS3GPP_kEditVideoState_DECODE_ENCODE)) &&
784                    pC->bEncodeTillEoF == M4OSA_FALSE)
785            {
786                /**
787                 * Test if we go into copy/paste mode or into decode/encode mode
788                 * If an external effect has been applied on the current clip
789                 * then continue to be in decode/encode mode till end of
790                 * clip to avoid H.264 distortion.
791                 */
792                pC->Vstate = M4VSS3GPP_kEditVideoState_READ_WRITE;
793            }
794        }
795    }
796
797    /**
798    * Check if we create an encoder */
799    if( ( ( M4VSS3GPP_kEditVideoState_READ_WRITE == previousVstate)
800        || (M4VSS3GPP_kEditVideoState_AFTER_CUT
801        == previousVstate)) /**< read mode */
802        && (( M4VSS3GPP_kEditVideoState_DECODE_ENCODE == pC->Vstate)
803        || (M4VSS3GPP_kEditVideoState_BEGIN_CUT == pC->Vstate)
804        || (M4VSS3GPP_kEditVideoState_TRANSITION
805        == pC->Vstate)) /**< encode mode */
806        && pC->bIsMMS == M4OSA_FALSE )
807    {
808        /**
809        * Create the encoder */
810        err = M4VSS3GPP_intCreateVideoEncoder(pC);
811
812        if( M4NO_ERROR != err )
813        {
814            M4OSA_TRACE1_1(
815                "M4VSS3GPP_intCheckVideoMode: M4VSS3GPP_intCreateVideoEncoder returns 0x%x!",
816                err);
817            return err;
818        }
819    }
820    else if( pC->bIsMMS == M4OSA_TRUE && pC->ewc.pEncContext == M4OSA_NULL )
821    {
822        /**
823        * Create the encoder */
824        err = M4VSS3GPP_intCreateVideoEncoder(pC);
825
826        if( M4NO_ERROR != err )
827        {
828            M4OSA_TRACE1_1(
829                "M4VSS3GPP_intCheckVideoMode: M4VSS3GPP_intCreateVideoEncoder returns 0x%x!",
830                err);
831            return err;
832        }
833    }
834
835    /**
836    * When we go from filtering to read/write, we must act like a begin cut,
837    * because the last filtered image may be different than the original image. */
838    else if( ( ( M4VSS3GPP_kEditVideoState_DECODE_ENCODE == previousVstate)
839        || (M4VSS3GPP_kEditVideoState_TRANSITION
840        == previousVstate)) /**< encode mode */
841        && (M4VSS3GPP_kEditVideoState_READ_WRITE == pC->Vstate) /**< read mode */
842        && (pC->bEncodeTillEoF == M4OSA_FALSE) )
843    {
844        pC->Vstate = M4VSS3GPP_kEditVideoState_BEGIN_CUT;
845    }
846
847    /**
848    * Check if we destroy an encoder */
849    else if( ( ( M4VSS3GPP_kEditVideoState_DECODE_ENCODE == previousVstate)
850        || (M4VSS3GPP_kEditVideoState_BEGIN_CUT == previousVstate)
851        || (M4VSS3GPP_kEditVideoState_TRANSITION
852        == previousVstate)) /**< encode mode */
853        && (( M4VSS3GPP_kEditVideoState_READ_WRITE == pC->Vstate)
854        || (M4VSS3GPP_kEditVideoState_AFTER_CUT
855        == pC->Vstate)) /**< read mode */
856        && pC->bIsMMS == M4OSA_FALSE )
857    {
858        /**
859        * Destroy the previously created encoder */
860        err = M4VSS3GPP_intDestroyVideoEncoder(pC);
861
862        if( M4NO_ERROR != err )
863        {
864            M4OSA_TRACE1_1(
865                "M4VSS3GPP_intCheckVideoMode: M4VSS3GPP_intDestroyVideoEncoder returns 0x%x!",
866                err);
867            return err;
868        }
869    }
870
871    /**
872    * Return with no error */
873    M4OSA_TRACE3_0("M4VSS3GPP_intCheckVideoMode: returning M4NO_ERROR");
874    return M4NO_ERROR;
875}
876
877/******************************************************************************
878 * M4OSA_ERR M4VSS3GPP_intStartAU()
879 * @brief    StartAU writer-like interface used for the VSS 3GPP only
880 * @note
881 * @param    pContext: (IN) It is the VSS 3GPP context in our case
882 * @param    streamID: (IN) Id of the stream to which the Access Unit is related.
883 * @param    pAU:      (IN/OUT) Access Unit to be prepared.
884 * @return    M4NO_ERROR: there is no error
885 ******************************************************************************
886 */
887M4OSA_ERR M4VSS3GPP_intStartAU( M4WRITER_Context pContext,
888                               M4SYS_StreamID streamID, M4SYS_AccessUnit *pAU )
889{
890    M4OSA_ERR err;
891    M4OSA_UInt32 uiMaxAuSize;
892
893    /**
894    * Given context is actually the VSS3GPP context */
895    M4VSS3GPP_InternalEditContext *pC =
896        (M4VSS3GPP_InternalEditContext *)pContext;
897
898    /**
899    * Get the output AU to write into */
900    err = pC->ShellAPI.pWriterDataFcts->pStartAU(pC->ewc.p3gpWriterContext,
901        M4VSS3GPP_WRITER_VIDEO_STREAM_ID, pAU);
902
903    if( M4NO_ERROR != err )
904    {
905        M4OSA_TRACE1_1(
906            "M4VSS3GPP_intStartAU: pWriterDataFcts->pStartAU(Video) returns 0x%x!",
907            err);
908        return err;
909    }
910
911    /**
912    *    Return */
913    M4OSA_TRACE3_0("M4VSS3GPP_intStartAU: returning M4NO_ERROR");
914    return M4NO_ERROR;
915}
916
917/******************************************************************************
918 * M4OSA_ERR M4VSS3GPP_intProcessAU()
919 * @brief    ProcessAU writer-like interface used for the VSS 3GPP only
920 * @note
921 * @param    pContext: (IN) It is the VSS 3GPP context in our case
922 * @param    streamID: (IN) Id of the stream to which the Access Unit is related.
923 * @param    pAU:      (IN/OUT) Access Unit to be written
924 * @return    M4NO_ERROR: there is no error
925 ******************************************************************************
926 */
927M4OSA_ERR M4VSS3GPP_intProcessAU( M4WRITER_Context pContext,
928                                 M4SYS_StreamID streamID, M4SYS_AccessUnit *pAU )
929{
930    M4OSA_ERR err;
931
932    /**
933    * Given context is actually the VSS3GPP context */
934    M4VSS3GPP_InternalEditContext *pC =
935        (M4VSS3GPP_InternalEditContext *)pContext;
936
937    /**
938    * Fix the encoded AU time */
939    // Decorrelate input and output encoding timestamp to handle encoder prefetch
940    pC->ewc.dOutputVidCts = pAU->CTS;
941    /**
942    * Update time info for the Counter Time System to be equal to the bit-stream time */
943    M4VSS3GPP_intUpdateTimeInfo(pC, pAU);
944
945    /**
946    * Write the AU */
947    err = pC->ShellAPI.pWriterDataFcts->pProcessAU(pC->ewc.p3gpWriterContext,
948        M4VSS3GPP_WRITER_VIDEO_STREAM_ID, pAU);
949
950    if( M4NO_ERROR != err )
951    {
952        M4OSA_TRACE1_1(
953            "M4VSS3GPP_intProcessAU: pWriterDataFcts->pProcessAU(Video) returns 0x%x!",
954            err);
955        return err;
956    }
957
958    /**
959    *    Return */
960    M4OSA_TRACE3_0("M4VSS3GPP_intProcessAU: returning M4NO_ERROR");
961    return M4NO_ERROR;
962}
963
964/**
965 ******************************************************************************
966 * M4OSA_ERR M4VSS3GPP_intVPP()
967 * @brief    We implement our own VideoPreProcessing function
968 * @note    It is called by the video encoder
969 * @param    pContext    (IN) VPP context, which actually is the VSS 3GPP context in our case
970 * @param    pPlaneIn    (IN)
971 * @param    pPlaneOut    (IN/OUT) Pointer to an array of 3 planes that will contain the output
972 *                                  YUV420 image
973 * @return    M4NO_ERROR:    No error
974 ******************************************************************************
975 */
976M4OSA_ERR M4VSS3GPP_intVPP( M4VPP_Context pContext, M4VIFI_ImagePlane *pPlaneIn,
977                           M4VIFI_ImagePlane *pPlaneOut )
978{
979    M4OSA_ERR err;
980    M4_MediaTime t;
981    M4VIFI_ImagePlane *pTmp = M4OSA_NULL;
982    M4VIFI_ImagePlane pTemp1[3],pTemp2[3];
983    M4OSA_UInt32  i =0;
984    /**
985    * VPP context is actually the VSS3GPP context */
986    M4VSS3GPP_InternalEditContext *pC =
987        (M4VSS3GPP_InternalEditContext *)pContext;
988    pTemp1[0].pac_data = pTemp2[0].pac_data = M4OSA_NULL;
989    /**
990    * Reset VPP error remembered in context */
991    pC->ewc.VppError = M4NO_ERROR;
992
993    /**
994    * At the end of the editing, we may be called when no more clip is loaded.
995    * (because to close the encoder properly it must be stepped one or twice...) */
996    if( M4OSA_NULL == pC->pC1 )
997    {
998        /**
999        * We must fill the input of the encoder with a dummy image, because
1000        * encoding noise leads to a huge video AU, and thus a writer buffer overflow. */
1001        memset((void *)pPlaneOut[0].pac_data,0,
1002            pPlaneOut[0].u_stride * pPlaneOut[0].u_height);
1003        memset((void *)pPlaneOut[1].pac_data,0,
1004            pPlaneOut[1].u_stride * pPlaneOut[1].u_height);
1005        memset((void *)pPlaneOut[2].pac_data,0,
1006            pPlaneOut[2].u_stride * pPlaneOut[2].u_height);
1007
1008        M4OSA_TRACE3_0("M4VSS3GPP_intVPP: returning M4NO_ERROR (abort)");
1009        return M4NO_ERROR;
1010    }
1011
1012    /**
1013    **************** Transition case ****************/
1014    if( M4OSA_TRUE == pC->bTransitionEffect )
1015    {
1016        if (M4OSA_NULL == pTemp1[0].pac_data)
1017        {
1018            err = M4VSS3GPP_intAllocateYUV420(pTemp1, pC->ewc.uiVideoWidth,
1019                                              pC->ewc.uiVideoHeight);
1020            if (M4NO_ERROR != err)
1021            {
1022                M4OSA_TRACE1_1("M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420(1) returns 0x%x, \
1023                               returning M4NO_ERROR", err);
1024                pC->ewc.VppError = err;
1025                return M4NO_ERROR; /**< Return no error to the encoder core
1026                                   (else it may leak in some situations...) */
1027            }
1028        }
1029        if (M4OSA_NULL == pTemp2[0].pac_data)
1030        {
1031            err = M4VSS3GPP_intAllocateYUV420(pTemp2, pC->ewc.uiVideoWidth,
1032                                              pC->ewc.uiVideoHeight);
1033            if (M4NO_ERROR != err)
1034            {
1035                M4OSA_TRACE1_1("M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420(2) returns 0x%x, \
1036                               returning M4NO_ERROR", err);
1037                pC->ewc.VppError = err;
1038                return M4NO_ERROR; /**< Return no error to the encoder core
1039                                  (else it may leak in some situations...) */
1040            }
1041        }
1042        /**
1043        * We need two intermediate planes */
1044        if( M4OSA_NULL == pC->yuv1[0].pac_data )
1045        {
1046            err = M4VSS3GPP_intAllocateYUV420(pC->yuv1, pC->ewc.uiVideoWidth,
1047                pC->ewc.uiVideoHeight);
1048
1049            if( M4NO_ERROR != err )
1050            {
1051                M4OSA_TRACE1_1(
1052                    "M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420(3) returns 0x%x,\
1053                    returning M4NO_ERROR",
1054                    err);
1055                pC->ewc.VppError = err;
1056                return
1057                    M4NO_ERROR; /**< Return no error to the encoder core
1058                                (else it may leak in some situations...) */
1059            }
1060        }
1061
1062        if( M4OSA_NULL == pC->yuv2[0].pac_data )
1063        {
1064            err = M4VSS3GPP_intAllocateYUV420(pC->yuv2, pC->ewc.uiVideoWidth,
1065                pC->ewc.uiVideoHeight);
1066
1067            if( M4NO_ERROR != err )
1068            {
1069                M4OSA_TRACE1_1(
1070                    "M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420(4) returns 0x%x,\
1071                    returning M4NO_ERROR",
1072                    err);
1073                pC->ewc.VppError = err;
1074                return
1075                    M4NO_ERROR; /**< Return no error to the encoder core
1076                                (else it may leak in some situations...) */
1077            }
1078        }
1079
1080        /**
1081        * Allocate new temporary plane if needed */
1082        if( M4OSA_NULL == pC->yuv3[0].pac_data )
1083        {
1084            err = M4VSS3GPP_intAllocateYUV420(pC->yuv3, pC->ewc.uiVideoWidth,
1085                pC->ewc.uiVideoHeight);
1086
1087            if( M4NO_ERROR != err )
1088            {
1089                M4OSA_TRACE1_1(
1090                    "M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420(3) returns 0x%x,\
1091                    returning M4NO_ERROR",
1092                    err);
1093                pC->ewc.VppError = err;
1094                return
1095                    M4NO_ERROR; /**< Return no error to the encoder core
1096                                (else it may leak in some situations...) */
1097            }
1098        }
1099
1100        /**
1101        * Compute the time in the clip1 base: t = to - Offset */
1102        // Decorrelate input and output encoding timestamp to handle encoder prefetch
1103        t = pC->ewc.dInputVidCts - pC->pC1->iVoffset;
1104
1105        /**
1106        * Render Clip1 */
1107        if( pC->pC1->isRenderDup == M4OSA_FALSE )
1108        {
1109            if(pC->nbActiveEffects > 0)
1110            {
1111                err = pC->pC1->ShellAPI.m_pVideoDecoder->m_pFctRender(pC->pC1->pViDecCtxt,
1112                                                                      &t, pTemp1,
1113                                                                      M4OSA_TRUE);
1114                if (M4NO_ERROR != err)
1115                {
1116                    M4OSA_TRACE1_1("M4VSS3GPP_intVPP: m_pVideoDecoder->m_pFctRender(C1) returns 0x%x, \
1117                                    returning M4NO_ERROR", err);
1118                    pC->ewc.VppError = err;
1119                    return M4NO_ERROR; /**< Return no error to the encoder core
1120                                       (else it may leak in some situations...) */
1121                }
1122                pC->bIssecondClip = M4OSA_FALSE;
1123                err = M4VSS3GPP_intApplyVideoEffect(pC, pTemp1 ,pC->yuv1 );
1124                if (M4NO_ERROR != err)
1125                {
1126                    M4OSA_TRACE1_1("M4VSS3GPP_intVPP: M4VSS3GPP_intApplyVideoEffect(1) returns 0x%x, \
1127                                    returning M4NO_ERROR", err);
1128                    pC->ewc.VppError = err;
1129                    return M4NO_ERROR; /**< Return no error to the encoder core
1130                                       (else it may leak in some situations...) */
1131                }
1132                pC->pC1->lastDecodedPlane = pTemp1;
1133            }
1134            else
1135            {
1136                err = pC->pC1->ShellAPI.m_pVideoDecoder->m_pFctRender(pC->pC1->pViDecCtxt,
1137                                                                      &t, pC->yuv1,
1138                                                                      M4OSA_TRUE);
1139                if (M4NO_ERROR != err)
1140                {
1141                    M4OSA_TRACE1_1("M4VSS3GPP_intVPP: m_pVideoDecoder->m_pFctRender(C1) returns 0x%x, \
1142                                    returning M4NO_ERROR", err);
1143                    pC->ewc.VppError = err;
1144                    return M4NO_ERROR; /**< Return no error to the encoder core
1145                                      (else it may leak in some situations...) */
1146                }
1147                pC->pC1->lastDecodedPlane = pC->yuv1;
1148            }
1149            pC->pC1->iVideoRenderCts = (M4OSA_Int32)t;
1150        }
1151        else
1152        {
1153            /* Copy last decoded plane to output plane */
1154            memcpy((void *)pTmp[0].pac_data,
1155                (void *)pC->pC1->lastDecodedPlane[0].pac_data,
1156                (pTmp[0].u_height * pTmp[0].u_width));
1157            memcpy((void *)pTmp[1].pac_data,
1158                (void *)pC->pC1->lastDecodedPlane[1].pac_data,
1159                (pTmp[1].u_height * pTmp[1].u_width));
1160            memcpy((void *)pTmp[2].pac_data,
1161                (void *)pC->pC1->lastDecodedPlane[2].pac_data,
1162                (pTmp[2].u_height * pTmp[2].u_width));
1163            pC->pC1->lastDecodedPlane = pTmp;
1164        }
1165
1166        /**
1167        * Compute the time in the clip2 base: t = to - Offset */
1168        // Decorrelate input and output encoding timestamp to handle encoder prefetch
1169        t = pC->ewc.dInputVidCts - pC->pC2->iVoffset;
1170        /**
1171        * Render Clip2 */
1172        if( pC->pC2->isRenderDup == M4OSA_FALSE )
1173        {
1174            if(pC->nbActiveEffects1 > 0)
1175            {
1176                err = pC->pC2->ShellAPI.m_pVideoDecoder->m_pFctRender(pC->pC2->pViDecCtxt,
1177                                                                      &t, pTemp2,
1178                                                                      M4OSA_TRUE);
1179                if (M4NO_ERROR != err)
1180                {
1181                    M4OSA_TRACE1_1("M4VSS3GPP_intVPP: m_pVideoDecoder->m_pFctRender(C2) returns 0x%x, \
1182                                   returning M4NO_ERROR", err);
1183                    pC->ewc.VppError = err;
1184                    return M4NO_ERROR; /**< Return no error to the encoder core
1185                                       (else it may leak in some situations...) */
1186                }
1187
1188                pC->bIssecondClip = M4OSA_TRUE;
1189                err = M4VSS3GPP_intApplyVideoEffect(pC, pTemp2 ,pC->yuv2);
1190                if (M4NO_ERROR != err)
1191                {
1192                    M4OSA_TRACE1_1("M4VSS3GPP_intVPP: M4VSS3GPP_intApplyVideoEffect(1) returns 0x%x, \
1193                                    returning M4NO_ERROR", err);
1194                    pC->ewc.VppError = err;
1195                    return M4NO_ERROR; /**< Return no error to the encoder core
1196                                       (else it may leak in some situations...) */
1197                }
1198                pC->pC2->lastDecodedPlane = pTemp2;
1199            }
1200            else
1201            {
1202                err = pC->pC2->ShellAPI.m_pVideoDecoder->m_pFctRender(pC->pC2->pViDecCtxt,
1203                                                                      &t, pC->yuv2,
1204                                                                      M4OSA_TRUE);
1205                if (M4NO_ERROR != err)
1206                {
1207                    M4OSA_TRACE1_1("M4VSS3GPP_intVPP: m_pVideoDecoder->m_pFctRender(C2) returns 0x%x, \
1208                                    returning M4NO_ERROR", err);
1209                    pC->ewc.VppError = err;
1210                    return M4NO_ERROR; /**< Return no error to the encoder core
1211                                       (else it may leak in some situations...) */
1212                }
1213                pC->pC2->lastDecodedPlane = pC->yuv2;
1214            }
1215            pC->pC2->iVideoRenderCts = (M4OSA_Int32)t;
1216        }
1217        else
1218        {
1219            /* Copy last decoded plane to output plane */
1220            memcpy((void *)pTmp[0].pac_data,
1221                (void *)pC->pC2->lastDecodedPlane[0].pac_data,
1222                (pTmp[0].u_height * pTmp[0].u_width));
1223            memcpy((void *)pTmp[1].pac_data,
1224                (void *)pC->pC2->lastDecodedPlane[1].pac_data,
1225                (pTmp[1].u_height * pTmp[1].u_width));
1226            memcpy((void *)pTmp[2].pac_data,
1227                (void *)pC->pC2->lastDecodedPlane[2].pac_data,
1228                (pTmp[2].u_height * pTmp[2].u_width));
1229            pC->pC2->lastDecodedPlane = pTmp;
1230        }
1231
1232
1233        pTmp = pPlaneOut;
1234        err = M4VSS3GPP_intVideoTransition(pC, pTmp);
1235
1236        if( M4NO_ERROR != err )
1237        {
1238            M4OSA_TRACE1_1(
1239                "M4VSS3GPP_intVPP: M4VSS3GPP_intVideoTransition returns 0x%x,\
1240                returning M4NO_ERROR",
1241                err);
1242            pC->ewc.VppError = err;
1243            return  M4NO_ERROR; /**< Return no error to the encoder core
1244                                (else it may leak in some situations...) */
1245        }
1246        for (i=0; i < 3; i++)
1247        {
1248            if (pTemp2[i].pac_data != M4OSA_NULL)
1249            {
1250                free(pTemp2[i].pac_data);
1251                pTemp2[i].pac_data = M4OSA_NULL;
1252            }
1253
1254
1255            if (pTemp1[i].pac_data != M4OSA_NULL)
1256            {
1257                    free(pTemp1[i].pac_data);
1258                    pTemp1[i].pac_data = M4OSA_NULL;
1259                }
1260            }
1261    }
1262    /**
1263    **************** No Transition case ****************/
1264    else
1265    {
1266        /**
1267        * Check if there is a filter */
1268        if( pC->nbActiveEffects > 0 )
1269        {
1270            /**
1271            * If we do modify the image, we need an intermediate image plane */
1272            if( M4OSA_NULL == pC->yuv1[0].pac_data )
1273            {
1274                err =
1275                    M4VSS3GPP_intAllocateYUV420(pC->yuv1, pC->ewc.uiVideoWidth,
1276                    pC->ewc.uiVideoHeight);
1277
1278                if( M4NO_ERROR != err )
1279                {
1280                    M4OSA_TRACE1_1(
1281                        "M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420 returns 0x%x,\
1282                        returning M4NO_ERROR",
1283                        err);
1284                    pC->ewc.VppError = err;
1285                    return
1286                        M4NO_ERROR; /**< Return no error to the encoder core
1287                                    (else it may leak in some situations...) */
1288                }
1289            }
1290            /**
1291            * The image is rendered in the intermediate image plane */
1292            pTmp = pC->yuv1;
1293        }
1294        else
1295        {
1296            /**
1297            * No filter, the image is directly rendered in pPlaneOut */
1298            pTmp = pPlaneOut;
1299        }
1300
1301        /**
1302        * Compute the time in the clip base: t = to - Offset */
1303        // Decorrelate input and output encoding timestamp to handle encoder prefetch
1304        t = pC->ewc.dInputVidCts - pC->pC1->iVoffset;
1305
1306        if( pC->pC1->isRenderDup == M4OSA_FALSE )
1307        {
1308            err = pC->pC1->ShellAPI.m_pVideoDecoder->m_pFctRender(
1309                pC->pC1->pViDecCtxt, &t, pTmp, M4OSA_TRUE);
1310
1311            if( M4NO_ERROR != err )
1312            {
1313                M4OSA_TRACE1_1(
1314                    "M4VSS3GPP_intVPP: m_pVideoDecoder->m_pFctRender returns 0x%x,\
1315                    returning M4NO_ERROR",
1316                    err);
1317                pC->ewc.VppError = err;
1318                return
1319                    M4NO_ERROR; /**< Return no error to the encoder core
1320                                (else it may leak in some situations...) */
1321            }
1322            pC->pC1->lastDecodedPlane = pTmp;
1323            pC->pC1->iVideoRenderCts = (M4OSA_Int32)t;
1324        }
1325        else
1326        {
1327            /* Copy last decoded plane to output plane */
1328            memcpy((void *)pTmp[0].pac_data,
1329                (void *)pC->pC1->lastDecodedPlane[0].pac_data,
1330                (pTmp[0].u_height * pTmp[0].u_width));
1331            memcpy((void *)pTmp[1].pac_data,
1332                (void *)pC->pC1->lastDecodedPlane[1].pac_data,
1333                (pTmp[1].u_height * pTmp[1].u_width));
1334            memcpy((void *)pTmp[2].pac_data,
1335                (void *)pC->pC1->lastDecodedPlane[2].pac_data,
1336                (pTmp[2].u_height * pTmp[2].u_width));
1337            pC->pC1->lastDecodedPlane = pTmp;
1338        }
1339
1340        M4OSA_TRACE3_1("M4VSS3GPP_intVPP: Rendered at CTS %.3f", t);
1341
1342        /**
1343        * Apply the clip1 effect */
1344        //        if (pC->iClip1ActiveEffect >= 0)
1345        if( pC->nbActiveEffects > 0 )
1346        {
1347            err = M4VSS3GPP_intApplyVideoEffect(pC,/*1,*/ pC->yuv1, pPlaneOut);
1348
1349            if( M4NO_ERROR != err )
1350            {
1351                M4OSA_TRACE1_1(
1352                    "M4VSS3GPP_intVPP: M4VSS3GPP_intApplyVideoEffect(1) returns 0x%x,\
1353                    returning M4NO_ERROR",
1354                    err);
1355                pC->ewc.VppError = err;
1356                return
1357                    M4NO_ERROR; /**< Return no error to the encoder core
1358                                (else it may leak in some situations...) */
1359            }
1360        }
1361    }
1362
1363    /**
1364    *    Return */
1365    M4OSA_TRACE3_0("M4VSS3GPP_intVPP: returning M4NO_ERROR");
1366    return M4NO_ERROR;
1367}
1368
1369/**
1370 ******************************************************************************
1371 * M4OSA_ERR M4VSS3GPP_intApplyVideoEffect()
1372 * @brief    Apply video effect from pPlaneIn to pPlaneOut
1373 * @param   pC                (IN/OUT) Internal edit context
1374 * @param   uiClip1orClip2    (IN/OUT) 1 for first clip, 2 for second clip
1375 * @param    pInputPlanes    (IN) Input raw YUV420 image
1376 * @param    pOutputPlanes    (IN/OUT) Output raw YUV420 image
1377 * @return    M4NO_ERROR:                        No error
1378 ******************************************************************************
1379 */
1380static M4OSA_ERR
1381M4VSS3GPP_intApplyVideoEffect( M4VSS3GPP_InternalEditContext *pC,
1382                               M4VIFI_ImagePlane *pPlaneIn,
1383                               M4VIFI_ImagePlane *pPlaneOut )
1384{
1385    M4OSA_ERR err;
1386
1387    M4VSS3GPP_ClipContext *pClip;
1388    M4VSS3GPP_EffectSettings *pFx;
1389    M4VSS3GPP_ExternalProgress extProgress;
1390
1391    M4OSA_Double VideoEffectTime;
1392    M4OSA_Double PercentageDone;
1393    M4OSA_Int32 tmp;
1394
1395    M4VIFI_ImagePlane *pPlaneTempIn;
1396    M4VIFI_ImagePlane *pPlaneTempOut;
1397    M4OSA_UInt8 i;
1398    M4OSA_UInt8 NumActiveEffects =0;
1399
1400
1401    pClip = pC->pC1;
1402    if (pC->bIssecondClip == M4OSA_TRUE)
1403    {
1404        NumActiveEffects = pC->nbActiveEffects1;
1405    }
1406    else
1407    {
1408        NumActiveEffects = pC->nbActiveEffects;
1409    }
1410
1411    /**
1412    * Allocate temporary plane if needed RC */
1413    if (M4OSA_NULL == pC->yuv4[0].pac_data && NumActiveEffects  > 1)
1414    {
1415        err = M4VSS3GPP_intAllocateYUV420(pC->yuv4, pC->ewc.uiVideoWidth,
1416            pC->ewc.uiVideoHeight);
1417
1418        if( M4NO_ERROR != err )
1419        {
1420            M4OSA_TRACE1_1(
1421                "M4VSS3GPP_intApplyVideoEffect: M4VSS3GPP_intAllocateYUV420(4) returns 0x%x,\
1422                returning M4NO_ERROR",
1423                err);
1424            pC->ewc.VppError = err;
1425            return
1426                M4NO_ERROR; /**< Return no error to the encoder core
1427                            (else it may leak in some situations...) */
1428        }
1429    }
1430
1431    if (NumActiveEffects  % 2 == 0)
1432    {
1433        pPlaneTempIn = pPlaneIn;
1434        pPlaneTempOut = pC->yuv4;
1435    }
1436    else
1437    {
1438        pPlaneTempIn = pPlaneIn;
1439        pPlaneTempOut = pPlaneOut;
1440    }
1441
1442    for (i=0; i<NumActiveEffects; i++)
1443    {
1444        if (pC->bIssecondClip == M4OSA_TRUE)
1445        {
1446
1447
1448            pFx = &(pC->pEffectsList[pC->pActiveEffectsList1[i]]);
1449            /* Compute how far from the beginning of the effect we are, in clip-base time. */
1450            // Decorrelate input and output encoding timestamp to handle encoder prefetch
1451            VideoEffectTime = ((M4OSA_Int32)pC->ewc.dInputVidCts) +
1452                              pC->pTransitionList[pC->uiCurrentClip].
1453                              uiTransitionDuration- pFx->uiStartTime;
1454        }
1455        else
1456        {
1457            pFx = &(pC->pEffectsList[pC->pActiveEffectsList[i]]);
1458            /* Compute how far from the beginning of the effect we are, in clip-base time. */
1459            // Decorrelate input and output encoding timestamp to handle encoder prefetch
1460            VideoEffectTime = ((M4OSA_Int32)pC->ewc.dInputVidCts) - pFx->uiStartTime;
1461        }
1462
1463
1464
1465        /* To calculate %, substract timeIncrement because effect should finish on the last frame*/
1466        /* which is presented from CTS = eof-timeIncrement till CTS = eof */
1467        PercentageDone = VideoEffectTime
1468            / ((M4OSA_Float)pFx->uiDuration/*- pC->dOutputFrameDuration*/);
1469
1470        if( PercentageDone < 0.0 )
1471            PercentageDone = 0.0;
1472
1473        if( PercentageDone > 1.0 )
1474            PercentageDone = 1.0;
1475
1476        switch( pFx->VideoEffectType )
1477        {
1478            case M4VSS3GPP_kVideoEffectType_FadeFromBlack:
1479                /**
1480                * Compute where we are in the effect (scale is 0->1024). */
1481                tmp = (M4OSA_Int32)(PercentageDone * 1024);
1482
1483                /**
1484                * Apply the darkening effect */
1485                err =
1486                    M4VFL_modifyLumaWithScale((M4ViComImagePlane *)pPlaneTempIn,
1487                    (M4ViComImagePlane *)pPlaneTempOut, tmp, M4OSA_NULL);
1488
1489                if( M4NO_ERROR != err )
1490                {
1491                    M4OSA_TRACE1_1(
1492                        "M4VSS3GPP_intApplyVideoEffect:\
1493                        M4VFL_modifyLumaWithScale returns error 0x%x,\
1494                        returning M4VSS3GPP_ERR_LUMA_FILTER_ERROR",
1495                        err);
1496                    return M4VSS3GPP_ERR_LUMA_FILTER_ERROR;
1497                }
1498                break;
1499
1500            case M4VSS3GPP_kVideoEffectType_FadeToBlack:
1501                /**
1502                * Compute where we are in the effect (scale is 0->1024) */
1503                tmp = (M4OSA_Int32)(( 1.0 - PercentageDone) * 1024);
1504
1505                /**
1506                * Apply the darkening effect */
1507                err =
1508                    M4VFL_modifyLumaWithScale((M4ViComImagePlane *)pPlaneTempIn,
1509                    (M4ViComImagePlane *)pPlaneTempOut, tmp, M4OSA_NULL);
1510
1511                if( M4NO_ERROR != err )
1512                {
1513                    M4OSA_TRACE1_1(
1514                        "M4VSS3GPP_intApplyVideoEffect:\
1515                        M4VFL_modifyLumaWithScale returns error 0x%x,\
1516                        returning M4VSS3GPP_ERR_LUMA_FILTER_ERROR",
1517                        err);
1518                    return M4VSS3GPP_ERR_LUMA_FILTER_ERROR;
1519                }
1520                break;
1521
1522            default:
1523                if( pFx->VideoEffectType
1524                    >= M4VSS3GPP_kVideoEffectType_External )
1525                {
1526                    M4OSA_UInt32 Cts = 0;
1527                    M4OSA_Int32 nextEffectTime;
1528
1529                    /**
1530                    * Compute where we are in the effect (scale is 0->1000) */
1531                    tmp = (M4OSA_Int32)(PercentageDone * 1000);
1532
1533                    /**
1534                    * Set the progress info provided to the external function */
1535                    extProgress.uiProgress = (M4OSA_UInt32)tmp;
1536                    // Decorrelate input and output encoding timestamp to handle encoder prefetch
1537                    extProgress.uiOutputTime = (M4OSA_UInt32)pC->ewc.dInputVidCts;
1538                    extProgress.uiClipTime = extProgress.uiOutputTime - pClip->iVoffset;
1539                    extProgress.bIsLast = M4OSA_FALSE;
1540                    // Decorrelate input and output encoding timestamp to handle encoder prefetch
1541                    nextEffectTime = (M4OSA_Int32)(pC->ewc.dInputVidCts \
1542                        + pC->dOutputFrameDuration);
1543                    if(nextEffectTime >= (M4OSA_Int32)(pFx->uiStartTime + pFx->uiDuration))
1544                    {
1545                        extProgress.bIsLast = M4OSA_TRUE;
1546                    }
1547
1548                    err = pFx->ExtVideoEffectFct(pFx->pExtVideoEffectFctCtxt,
1549                        pPlaneTempIn, pPlaneTempOut, &extProgress,
1550                        pFx->VideoEffectType
1551                        - M4VSS3GPP_kVideoEffectType_External);
1552
1553                    if( M4NO_ERROR != err )
1554                    {
1555                        M4OSA_TRACE1_1(
1556                            "M4VSS3GPP_intApplyVideoEffect: \
1557                            External video effect function returns 0x%x!",
1558                            err);
1559                        return err;
1560                    }
1561                    break;
1562                }
1563                else
1564                {
1565                    M4OSA_TRACE1_1(
1566                        "M4VSS3GPP_intApplyVideoEffect: unknown effect type (0x%x),\
1567                        returning M4VSS3GPP_ERR_INVALID_VIDEO_EFFECT_TYPE",
1568                        pFx->VideoEffectType);
1569                    return M4VSS3GPP_ERR_INVALID_VIDEO_EFFECT_TYPE;
1570                }
1571        }
1572        /**
1573        * RC Updates pTempPlaneIn and pTempPlaneOut depending on current effect */
1574        if (((i % 2 == 0) && (NumActiveEffects  % 2 == 0))
1575            || ((i % 2 != 0) && (NumActiveEffects % 2 != 0)))
1576        {
1577            pPlaneTempIn = pC->yuv4;
1578            pPlaneTempOut = pPlaneOut;
1579        }
1580        else
1581        {
1582            pPlaneTempIn = pPlaneOut;
1583            pPlaneTempOut = pC->yuv4;
1584        }
1585    }
1586
1587    /**
1588    *    Return */
1589    M4OSA_TRACE3_0("M4VSS3GPP_intApplyVideoEffect: returning M4NO_ERROR");
1590    return M4NO_ERROR;
1591}
1592
1593/**
1594 ******************************************************************************
1595 * M4OSA_ERR M4VSS3GPP_intVideoTransition()
1596 * @brief    Apply video transition effect pC1+pC2->pPlaneOut
1597 * @param   pC                (IN/OUT) Internal edit context
1598 * @param    pOutputPlanes    (IN/OUT) Output raw YUV420 image
1599 * @return    M4NO_ERROR:                        No error
1600 ******************************************************************************
1601 */
1602static M4OSA_ERR
1603M4VSS3GPP_intVideoTransition( M4VSS3GPP_InternalEditContext *pC,
1604                             M4VIFI_ImagePlane *pPlaneOut )
1605{
1606    M4OSA_ERR err;
1607    M4OSA_Int32 iProgress;
1608    M4VSS3GPP_ExternalProgress extProgress;
1609    M4VIFI_ImagePlane *pPlane;
1610    M4OSA_Int32 i;
1611    const M4OSA_Int32 iDur = (M4OSA_Int32)pC->
1612        pTransitionList[pC->uiCurrentClip].uiTransitionDuration;
1613
1614    /**
1615    * Compute how far from the end cut we are, in clip-base time.
1616    * It is done with integers because the offset and begin cut have been rounded already. */
1617    // Decorrelate input and output encoding timestamp to handle encoder prefetch
1618    iProgress = (M4OSA_Int32)((M4OSA_Double)pC->pC1->iEndTime) - pC->ewc.dInputVidCts +
1619        ((M4OSA_Double)pC->pC1->iVoffset);
1620    /**
1621    * We must remove the duration of one frame, else we would almost never reach the end
1622    * (It's kind of a "pile and intervals" issue). */
1623    iProgress -= (M4OSA_Int32)pC->dOutputFrameDuration;
1624
1625    if( iProgress < 0 ) /**< Sanity checks */
1626    {
1627        iProgress = 0;
1628    }
1629
1630    /**
1631    * Compute where we are in the transition, on a base 1000 */
1632    iProgress = ( ( iDur - iProgress) * 1000) / iDur;
1633
1634    /**
1635    * Sanity checks */
1636    if( iProgress < 0 )
1637    {
1638        iProgress = 0;
1639    }
1640    else if( iProgress > 1000 )
1641    {
1642        iProgress = 1000;
1643    }
1644
1645    switch( pC->pTransitionList[pC->uiCurrentClip].TransitionBehaviour )
1646    {
1647        case M4VSS3GPP_TransitionBehaviour_SpeedUp:
1648            iProgress = ( iProgress * iProgress) / 1000;
1649            break;
1650
1651        case M4VSS3GPP_TransitionBehaviour_Linear:
1652            /*do nothing*/
1653            break;
1654
1655        case M4VSS3GPP_TransitionBehaviour_SpeedDown:
1656            iProgress = (M4OSA_Int32)(sqrt(iProgress * 1000));
1657            break;
1658
1659        case M4VSS3GPP_TransitionBehaviour_SlowMiddle:
1660            if( iProgress < 500 )
1661            {
1662                iProgress = (M4OSA_Int32)(sqrt(iProgress * 500));
1663            }
1664            else
1665            {
1666                iProgress =
1667                    (M4OSA_Int32)(( ( ( iProgress - 500) * (iProgress - 500))
1668                    / 500) + 500);
1669            }
1670            break;
1671
1672        case M4VSS3GPP_TransitionBehaviour_FastMiddle:
1673            if( iProgress < 500 )
1674            {
1675                iProgress = (M4OSA_Int32)(( iProgress * iProgress) / 500);
1676            }
1677            else
1678            {
1679                iProgress = (M4OSA_Int32)(sqrt(( iProgress - 500) * 500) + 500);
1680            }
1681            break;
1682
1683        default:
1684            /*do nothing*/
1685            break;
1686    }
1687
1688    switch( pC->pTransitionList[pC->uiCurrentClip].VideoTransitionType )
1689    {
1690        case M4VSS3GPP_kVideoTransitionType_CrossFade:
1691            /**
1692            * Apply the transition effect */
1693            err = M4VIFI_ImageBlendingonYUV420(M4OSA_NULL,
1694                (M4ViComImagePlane *)pC->yuv1,
1695                (M4ViComImagePlane *)pC->yuv2,
1696                (M4ViComImagePlane *)pPlaneOut, iProgress);
1697
1698            if( M4NO_ERROR != err )
1699            {
1700                M4OSA_TRACE1_1(
1701                    "M4VSS3GPP_intVideoTransition:\
1702                     M4VIFI_ImageBlendingonYUV420 returns error 0x%x,\
1703                    returning M4VSS3GPP_ERR_TRANSITION_FILTER_ERROR",
1704                    err);
1705                return M4VSS3GPP_ERR_TRANSITION_FILTER_ERROR;
1706            }
1707            break;
1708
1709        case M4VSS3GPP_kVideoTransitionType_None:
1710            /**
1711            * This is a stupid-non optimized version of the None transition...
1712            * We copy the YUV frame */
1713            if( iProgress < 500 ) /**< first half of transition */
1714            {
1715                pPlane = pC->yuv1;
1716            }
1717            else /**< second half of transition */
1718            {
1719                pPlane = pC->yuv2;
1720            }
1721            /**
1722            * Copy the input YUV frames */
1723            i = 3;
1724
1725            while( i-- > 0 )
1726            {
1727                memcpy((void *)pPlaneOut[i].pac_data,
1728                 (void *)pPlane[i].pac_data,
1729                    pPlaneOut[i].u_stride * pPlaneOut[i].u_height);
1730            }
1731            break;
1732
1733        default:
1734            if( pC->pTransitionList[pC->uiCurrentClip].VideoTransitionType
1735                >= M4VSS3GPP_kVideoTransitionType_External )
1736            {
1737                /**
1738                * Set the progress info provided to the external function */
1739                extProgress.uiProgress = (M4OSA_UInt32)iProgress;
1740                // Decorrelate input and output encoding timestamp to handle encoder prefetch
1741                extProgress.uiOutputTime = (M4OSA_UInt32)pC->ewc.dInputVidCts;
1742                extProgress.uiClipTime = extProgress.uiOutputTime - pC->pC1->iVoffset;
1743
1744                err = pC->pTransitionList[pC->
1745                    uiCurrentClip].ExtVideoTransitionFct(
1746                    pC->pTransitionList[pC->
1747                    uiCurrentClip].pExtVideoTransitionFctCtxt,
1748                    pC->yuv1, pC->yuv2, pPlaneOut, &extProgress,
1749                    pC->pTransitionList[pC->
1750                    uiCurrentClip].VideoTransitionType
1751                    - M4VSS3GPP_kVideoTransitionType_External);
1752
1753                if( M4NO_ERROR != err )
1754                {
1755                    M4OSA_TRACE1_1(
1756                        "M4VSS3GPP_intVideoTransition:\
1757                        External video transition function returns 0x%x!",
1758                        err);
1759                    return err;
1760                }
1761                break;
1762            }
1763            else
1764            {
1765                M4OSA_TRACE1_1(
1766                    "M4VSS3GPP_intVideoTransition: unknown transition type (0x%x),\
1767                    returning M4VSS3GPP_ERR_INVALID_VIDEO_TRANSITION_TYPE",
1768                    pC->pTransitionList[pC->uiCurrentClip].VideoTransitionType);
1769                return M4VSS3GPP_ERR_INVALID_VIDEO_TRANSITION_TYPE;
1770            }
1771    }
1772
1773    /**
1774    *    Return */
1775    M4OSA_TRACE3_0("M4VSS3GPP_intVideoTransition: returning M4NO_ERROR");
1776    return M4NO_ERROR;
1777}
1778
1779/**
1780 ******************************************************************************
1781 * M4OSA_Void M4VSS3GPP_intUpdateTimeInfo()
1782 * @brief    Update bit stream time info by Counter Time System to be compliant with
1783 *          players using bit stream time info
1784 * @note    H263 uses an absolute time counter unlike MPEG4 which uses Group Of Vops
1785 *          (GOV, see the standard)
1786 * @param   pC                    (IN/OUT) returns time updated video AU,
1787 *                                the offset between system and video time (MPEG4 only)
1788 *                                and the state of the current clip (MPEG4 only)
1789 * @return    nothing
1790 ******************************************************************************
1791 */
1792static M4OSA_Void
1793M4VSS3GPP_intUpdateTimeInfo( M4VSS3GPP_InternalEditContext *pC,
1794                            M4SYS_AccessUnit *pAU )
1795{
1796    M4OSA_UInt8 uiTmp;
1797    M4OSA_UInt32 uiCts = 0;
1798    M4OSA_MemAddr8 pTmp;
1799    M4OSA_UInt32 uiAdd;
1800    M4OSA_UInt32 uiCurrGov;
1801    M4OSA_Int8 iDiff;
1802
1803    M4VSS3GPP_ClipContext *pClipCtxt = pC->pC1;
1804    M4OSA_Int32 *pOffset = &(pC->ewc.iMpeg4GovOffset);
1805
1806    /**
1807    * Set H263 time counter from system time */
1808    if( M4SYS_kH263 == pAU->stream->streamType )
1809    {
1810        uiTmp = (M4OSA_UInt8)((M4OSA_UInt32)( ( pAU->CTS * 30) / 1001 + 0.5)
1811            % M4VSS3GPP_EDIT_H263_MODULO_TIME);
1812        M4VSS3GPP_intSetH263TimeCounter((M4OSA_MemAddr8)(pAU->dataAddress),
1813            uiTmp);
1814    }
1815    /*
1816    * Set MPEG4 GOV time counter regarding video and system time */
1817    else if( M4SYS_kMPEG_4 == pAU->stream->streamType )
1818    {
1819        /*
1820        * If GOV.
1821        * beware of little/big endian! */
1822        /* correction: read 8 bits block instead of one 32 bits block */
1823        M4OSA_UInt8 *temp8 = (M4OSA_UInt8 *)(pAU->dataAddress);
1824        M4OSA_UInt32 temp32 = 0;
1825
1826        temp32 = ( 0x000000ff & (M4OSA_UInt32)(*temp8))
1827            + (0x0000ff00 & ((M4OSA_UInt32)(*(temp8 + 1))) << 8)
1828            + (0x00ff0000 & ((M4OSA_UInt32)(*(temp8 + 2))) << 16)
1829            + (0xff000000 & ((M4OSA_UInt32)(*(temp8 + 3))) << 24);
1830
1831        M4OSA_TRACE3_2("RC: Temp32: 0x%x, dataAddress: 0x%x\n", temp32,
1832            *(pAU->dataAddress));
1833
1834        if( M4VSS3GPP_EDIT_GOV_HEADER == temp32 )
1835        {
1836            pTmp =
1837                (M4OSA_MemAddr8)(pAU->dataAddress
1838                + 1); /**< Jump to the time code (just after the 32 bits header) */
1839            uiAdd = (M4OSA_UInt32)(pAU->CTS)+( *pOffset);
1840
1841            switch( pClipCtxt->bMpeg4GovState )
1842            {
1843                case M4OSA_FALSE: /*< INIT */
1844                    {
1845                        /* video time = ceil (system time + offset) */
1846                        uiCts = ( uiAdd + 999) / 1000;
1847
1848                        /* offset update */
1849                        ( *pOffset) += (( uiCts * 1000) - uiAdd);
1850
1851                        /* Save values */
1852                        pClipCtxt->uiMpeg4PrevGovValueSet = uiCts;
1853
1854                        /* State to 'first' */
1855                        pClipCtxt->bMpeg4GovState = M4OSA_TRUE;
1856                    }
1857                    break;
1858
1859                case M4OSA_TRUE: /*< UPDATE */
1860                    {
1861                        /* Get current Gov value */
1862                        M4VSS3GPP_intGetMPEG4Gov(pTmp, &uiCurrGov);
1863
1864                        /* video time = floor or ceil (system time + offset) */
1865                        uiCts = (uiAdd / 1000);
1866                        iDiff = (M4OSA_Int8)(uiCurrGov
1867                            - pClipCtxt->uiMpeg4PrevGovValueGet - uiCts
1868                            + pClipCtxt->uiMpeg4PrevGovValueSet);
1869
1870                        /* ceiling */
1871                        if( iDiff > 0 )
1872                        {
1873                            uiCts += (M4OSA_UInt32)(iDiff);
1874
1875                            /* offset update */
1876                            ( *pOffset) += (( uiCts * 1000) - uiAdd);
1877                        }
1878
1879                        /* Save values */
1880                        pClipCtxt->uiMpeg4PrevGovValueGet = uiCurrGov;
1881                        pClipCtxt->uiMpeg4PrevGovValueSet = uiCts;
1882                    }
1883                    break;
1884            }
1885
1886            M4VSS3GPP_intSetMPEG4Gov(pTmp, uiCts);
1887        }
1888    }
1889    return;
1890}
1891
1892/**
1893 ******************************************************************************
1894 * M4OSA_Void M4VSS3GPP_intCheckVideoEffects()
1895 * @brief    Check which video effect must be applied at the current time
1896 ******************************************************************************
1897 */
1898static M4OSA_Void
1899M4VSS3GPP_intCheckVideoEffects( M4VSS3GPP_InternalEditContext *pC,
1900                               M4OSA_UInt8 uiClipNumber )
1901{
1902    M4OSA_UInt8 uiClipIndex;
1903    M4OSA_UInt8 uiFxIndex, i;
1904    M4VSS3GPP_ClipContext *pClip;
1905    M4VSS3GPP_EffectSettings *pFx;
1906    M4OSA_Int32 Off, BC, EC;
1907    // Decorrelate input and output encoding timestamp to handle encoder prefetch
1908    M4OSA_Int32 t = (M4OSA_Int32)pC->ewc.dInputVidCts;
1909
1910    uiClipIndex = pC->uiCurrentClip;
1911    pClip = pC->pC1;
1912    /**
1913    * Shortcuts for code readability */
1914    Off = pClip->iVoffset;
1915    BC = pClip->iActualVideoBeginCut;
1916    EC = pClip->iEndTime;
1917
1918    i = 0;
1919
1920    for ( uiFxIndex = 0; uiFxIndex < pC->nbEffects; uiFxIndex++ )
1921    {
1922        /** Shortcut, reverse order because of priority between effects(EndEffect always clean )*/
1923        pFx = &(pC->pEffectsList[pC->nbEffects - 1 - uiFxIndex]);
1924
1925        if( M4VSS3GPP_kVideoEffectType_None != pFx->VideoEffectType )
1926        {
1927            /**
1928            * Check if there is actually a video effect */
1929
1930             if(uiClipNumber ==1)
1931             {
1932                /**< Are we after the start time of the effect?
1933                 * or Are we into the effect duration?
1934                 */
1935                if ( (t >= (M4OSA_Int32)(pFx->uiStartTime)) &&
1936                    (t <= (M4OSA_Int32)(pFx->uiStartTime + pFx->uiDuration)) ) {
1937                    /**
1938                     * Set the active effect(s) */
1939                    pC->pActiveEffectsList[i] = pC->nbEffects-1-uiFxIndex;
1940
1941                    /**
1942                     * Update counter of active effects */
1943                    i++;
1944
1945                    /**
1946                     * For all external effects set this flag to true. */
1947                    if(pFx->VideoEffectType > M4VSS3GPP_kVideoEffectType_External)
1948                    {
1949                        pC->m_bClipExternalHasStarted = M4OSA_TRUE;
1950                    }
1951
1952                    /**
1953                     * The third effect has the highest priority, then the
1954                     * second one, then the first one. Hence, as soon as we
1955                     * found an active effect, we can get out of this loop.
1956                     */
1957                }
1958            }
1959            else
1960            {
1961                /**< Are we into the effect duration? */
1962                if ( ((M4OSA_Int32)(t + pC->pTransitionList[uiClipIndex].uiTransitionDuration)
1963                    >= (M4OSA_Int32)(pFx->uiStartTime))
1964                    && ( (M4OSA_Int32)(t + pC->pTransitionList[uiClipIndex].uiTransitionDuration)
1965                    <= (M4OSA_Int32)(pFx->uiStartTime + pFx->uiDuration)) ) {
1966                    /**
1967                     * Set the active effect(s) */
1968                    pC->pActiveEffectsList1[i] = pC->nbEffects-1-uiFxIndex;
1969
1970                    /**
1971                     * Update counter of active effects */
1972                    i++;
1973
1974                    /**
1975                     * For all external effects set this flag to true. */
1976                    if(pFx->VideoEffectType > M4VSS3GPP_kVideoEffectType_External)
1977                    {
1978                        pC->m_bClipExternalHasStarted = M4OSA_TRUE;
1979                    }
1980
1981                    /**
1982                     * The third effect has the highest priority, then the second one, then the first one.
1983                     * Hence, as soon as we found an active effect, we can get out of this loop */
1984                }
1985            }
1986        }
1987    }
1988
1989    if(1==uiClipNumber)
1990    {
1991    /**
1992     * Save number of active effects */
1993        pC->nbActiveEffects = i;
1994    }
1995    else
1996    {
1997        pC->nbActiveEffects1 = i;
1998    }
1999
2000    /**
2001    * Change the absolut time to clip related time */
2002    t -= Off;
2003
2004    /**
2005    * Check if we are on the begin cut (for clip1 only) */
2006    if( ( 0 != BC) && (t == BC) && (1 == uiClipNumber) )
2007    {
2008        pC->bClip1AtBeginCut = M4OSA_TRUE;
2009    }
2010    else
2011    {
2012        pC->bClip1AtBeginCut = M4OSA_FALSE;
2013    }
2014
2015    return;
2016}
2017
2018/**
2019 ******************************************************************************
2020 * M4OSA_ERR M4VSS3GPP_intCreateVideoEncoder()
2021 * @brief    Creates the video encoder
2022 * @note
2023 ******************************************************************************
2024 */
2025M4OSA_ERR M4VSS3GPP_intCreateVideoEncoder( M4VSS3GPP_InternalEditContext *pC )
2026{
2027    M4OSA_ERR err;
2028    M4ENCODER_AdvancedParams EncParams;
2029
2030    /**
2031    * Simulate a writer interface with our specific function */
2032    pC->ewc.OurWriterDataInterface.pProcessAU =
2033        M4VSS3GPP_intProcessAU; /**< This function is VSS 3GPP specific,
2034                                but it follow the writer interface */
2035    pC->ewc.OurWriterDataInterface.pStartAU =
2036        M4VSS3GPP_intStartAU; /**< This function is VSS 3GPP specific,
2037                              but it follow the writer interface */
2038    pC->ewc.OurWriterDataInterface.pWriterContext =
2039        (M4WRITER_Context)
2040        pC; /**< We give the internal context as writer context */
2041
2042    /**
2043    * Get the encoder interface, if not already done */
2044    if( M4OSA_NULL == pC->ShellAPI.pVideoEncoderGlobalFcts )
2045    {
2046        err = M4VSS3GPP_setCurrentVideoEncoder(&pC->ShellAPI,
2047            pC->ewc.VideoStreamType);
2048        M4OSA_TRACE1_1(
2049            "M4VSS3GPP_intCreateVideoEncoder: setCurrentEncoder returns 0x%x",
2050            err);
2051        M4ERR_CHECK_RETURN(err);
2052    }
2053
2054    /**
2055    * Set encoder shell parameters according to VSS settings */
2056
2057    /* Common parameters */
2058    EncParams.InputFormat = M4ENCODER_kIYUV420;
2059    EncParams.FrameWidth = pC->ewc.uiVideoWidth;
2060    EncParams.FrameHeight = pC->ewc.uiVideoHeight;
2061    EncParams.uiTimeScale = pC->ewc.uiVideoTimeScale;
2062
2063    if( pC->bIsMMS == M4OSA_FALSE )
2064    {
2065        /* No strict regulation in video editor */
2066        /* Because of the effects and transitions we should allow more flexibility */
2067        /* Also it prevents to drop important frames (with a bad result on sheduling and
2068        block effetcs) */
2069        EncParams.bInternalRegulation = M4OSA_FALSE;
2070        // Variable framerate is not supported by StageFright encoders
2071        EncParams.FrameRate = M4ENCODER_k30_FPS;
2072    }
2073    else
2074    {
2075        /* In case of MMS mode, we need to enable bitrate regulation to be sure */
2076        /* to reach the targeted output file size */
2077        EncParams.bInternalRegulation = M4OSA_TRUE;
2078        EncParams.FrameRate = pC->MMSvideoFramerate;
2079    }
2080
2081    /**
2082    * Other encoder settings (defaults) */
2083    EncParams.uiHorizontalSearchRange = 0;     /* use default */
2084    EncParams.uiVerticalSearchRange = 0;       /* use default */
2085    EncParams.bErrorResilience = M4OSA_FALSE;  /* no error resilience */
2086    EncParams.uiIVopPeriod = 0;                /* use default */
2087    EncParams.uiMotionEstimationTools = 0;     /* M4V_MOTION_EST_TOOLS_ALL */
2088    EncParams.bAcPrediction = M4OSA_TRUE;      /* use AC prediction */
2089    EncParams.uiStartingQuantizerValue = 10;   /* initial QP = 10 */
2090    EncParams.bDataPartitioning = M4OSA_FALSE; /* no data partitioning */
2091
2092    switch ( pC->ewc.VideoStreamType )
2093    {
2094        case M4SYS_kH263:
2095
2096            EncParams.Format = M4ENCODER_kH263;
2097
2098            EncParams.uiStartingQuantizerValue = 10;
2099            EncParams.uiRateFactor = 1; /* default */
2100
2101            EncParams.bErrorResilience = M4OSA_FALSE;
2102            EncParams.bDataPartitioning = M4OSA_FALSE;
2103            break;
2104
2105        case M4SYS_kMPEG_4:
2106
2107            EncParams.Format = M4ENCODER_kMPEG4;
2108
2109            EncParams.uiStartingQuantizerValue = 8;
2110            EncParams.uiRateFactor = (M4OSA_UInt8)(( pC->dOutputFrameDuration
2111                * pC->ewc.uiVideoTimeScale) / 1000.0 + 0.5);
2112
2113            if( EncParams.uiRateFactor == 0 )
2114                EncParams.uiRateFactor = 1; /* default */
2115
2116            if( M4OSA_FALSE == pC->ewc.bVideoDataPartitioning )
2117            {
2118                EncParams.bErrorResilience = M4OSA_FALSE;
2119                EncParams.bDataPartitioning = M4OSA_FALSE;
2120            }
2121            else
2122            {
2123                EncParams.bErrorResilience = M4OSA_TRUE;
2124                EncParams.bDataPartitioning = M4OSA_TRUE;
2125            }
2126            break;
2127
2128        case M4SYS_kH264:
2129            M4OSA_TRACE1_0("M4VSS3GPP_intCreateVideoEncoder: M4SYS_H264");
2130
2131            EncParams.Format = M4ENCODER_kH264;
2132
2133            EncParams.uiStartingQuantizerValue = 10;
2134            EncParams.uiRateFactor = 1; /* default */
2135
2136            EncParams.bErrorResilience = M4OSA_FALSE;
2137            EncParams.bDataPartitioning = M4OSA_FALSE;
2138            //EncParams.FrameRate = M4VIDEOEDITING_k5_FPS;
2139            break;
2140
2141        default:
2142            M4OSA_TRACE1_1(
2143                "M4VSS3GPP_intCreateVideoEncoder: Unknown videoStreamType 0x%x",
2144                pC->ewc.VideoStreamType);
2145            return M4VSS3GPP_ERR_EDITING_UNSUPPORTED_VIDEO_FORMAT;
2146    }
2147
2148    /* In case of EMP we overwrite certain parameters */
2149    if( M4OSA_TRUE == pC->ewc.bActivateEmp )
2150    {
2151        EncParams.uiHorizontalSearchRange = 15;    /* set value */
2152        EncParams.uiVerticalSearchRange = 15;      /* set value */
2153        EncParams.bErrorResilience = M4OSA_FALSE;  /* no error resilience */
2154        EncParams.uiIVopPeriod = 15; /* one I frame every 15 frames */
2155        EncParams.uiMotionEstimationTools = 1; /* M4V_MOTION_EST_TOOLS_NO_4MV */
2156        EncParams.bAcPrediction = M4OSA_FALSE;     /* no AC prediction */
2157        EncParams.uiStartingQuantizerValue = 10;   /* initial QP = 10 */
2158        EncParams.bDataPartitioning = M4OSA_FALSE; /* no data partitioning */
2159    }
2160
2161    if( pC->bIsMMS == M4OSA_FALSE )
2162    {
2163        /* Compute max bitrate depending on input files bitrates and transitions */
2164        if( pC->Vstate == M4VSS3GPP_kEditVideoState_TRANSITION )
2165        {
2166            EncParams.Bitrate = pC->ewc.uiVideoBitrate;
2167        }
2168        else
2169        {
2170            EncParams.Bitrate = pC->ewc.uiVideoBitrate;
2171        }
2172    }
2173    else
2174    {
2175        EncParams.Bitrate = pC->uiMMSVideoBitrate; /* RC */
2176        EncParams.uiTimeScale = 0; /* We let the encoder choose the timescale */
2177    }
2178
2179    M4OSA_TRACE1_0("M4VSS3GPP_intCreateVideoEncoder: calling encoder pFctInit");
2180    /**
2181    * Init the video encoder (advanced settings version of the encoder Open function) */
2182    err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctInit(&pC->ewc.pEncContext,
2183        &pC->ewc.OurWriterDataInterface, M4VSS3GPP_intVPP, pC,
2184        pC->ShellAPI.pCurrentVideoEncoderExternalAPI,
2185        pC->ShellAPI.pCurrentVideoEncoderUserData);
2186
2187    if( M4NO_ERROR != err )
2188    {
2189        M4OSA_TRACE1_1(
2190            "M4VSS3GPP_intCreateVideoEncoder: pVideoEncoderGlobalFcts->pFctInit returns 0x%x",
2191            err);
2192        return err;
2193    }
2194
2195    pC->ewc.encoderState = M4VSS3GPP_kEncoderClosed;
2196    M4OSA_TRACE1_0("M4VSS3GPP_intCreateVideoEncoder: calling encoder pFctOpen");
2197
2198    err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctOpen(pC->ewc.pEncContext,
2199        &pC->ewc.WriterVideoAU, &EncParams);
2200
2201    if( M4NO_ERROR != err )
2202    {
2203        M4OSA_TRACE1_1(
2204            "M4VSS3GPP_intCreateVideoEncoder: pVideoEncoderGlobalFcts->pFctOpen returns 0x%x",
2205            err);
2206        return err;
2207    }
2208
2209    pC->ewc.encoderState = M4VSS3GPP_kEncoderStopped;
2210    M4OSA_TRACE1_0(
2211        "M4VSS3GPP_intCreateVideoEncoder: calling encoder pFctStart");
2212
2213    if( M4OSA_NULL != pC->ShellAPI.pVideoEncoderGlobalFcts->pFctStart )
2214    {
2215        err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctStart(
2216            pC->ewc.pEncContext);
2217
2218        if( M4NO_ERROR != err )
2219        {
2220            M4OSA_TRACE1_1(
2221                "M4VSS3GPP_intCreateVideoEncoder: pVideoEncoderGlobalFcts->pFctStart returns 0x%x",
2222                err);
2223            return err;
2224        }
2225    }
2226
2227    pC->ewc.encoderState = M4VSS3GPP_kEncoderRunning;
2228
2229    /**
2230    *    Return */
2231    M4OSA_TRACE3_0("M4VSS3GPP_intCreateVideoEncoder: returning M4NO_ERROR");
2232    return M4NO_ERROR;
2233}
2234
2235/**
2236 ******************************************************************************
2237 * M4OSA_ERR M4VSS3GPP_intDestroyVideoEncoder()
2238 * @brief    Destroy the video encoder
2239 * @note
2240 ******************************************************************************
2241 */
2242M4OSA_ERR M4VSS3GPP_intDestroyVideoEncoder( M4VSS3GPP_InternalEditContext *pC )
2243{
2244    M4OSA_ERR err = M4NO_ERROR;
2245
2246    if( M4OSA_NULL != pC->ewc.pEncContext )
2247    {
2248        if( M4VSS3GPP_kEncoderRunning == pC->ewc.encoderState )
2249        {
2250            if( pC->ShellAPI.pVideoEncoderGlobalFcts->pFctStop != M4OSA_NULL )
2251            {
2252                err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctStop(
2253                    pC->ewc.pEncContext);
2254
2255                if( M4NO_ERROR != err )
2256                {
2257                    M4OSA_TRACE1_1(
2258                        "M4VSS3GPP_intDestroyVideoEncoder:\
2259                        pVideoEncoderGlobalFcts->pFctStop returns 0x%x",
2260                        err);
2261                    /* Well... how the heck do you handle a failed cleanup? */
2262                }
2263            }
2264
2265            pC->ewc.encoderState = M4VSS3GPP_kEncoderStopped;
2266        }
2267
2268        /* Has the encoder actually been opened? Don't close it if that's not the case. */
2269        if( M4VSS3GPP_kEncoderStopped == pC->ewc.encoderState )
2270        {
2271            err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctClose(
2272                pC->ewc.pEncContext);
2273
2274            if( M4NO_ERROR != err )
2275            {
2276                M4OSA_TRACE1_1(
2277                    "M4VSS3GPP_intDestroyVideoEncoder:\
2278                    pVideoEncoderGlobalFcts->pFctClose returns 0x%x",
2279                    err);
2280                /* Well... how the heck do you handle a failed cleanup? */
2281            }
2282
2283            pC->ewc.encoderState = M4VSS3GPP_kEncoderClosed;
2284        }
2285
2286        err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctCleanup(
2287            pC->ewc.pEncContext);
2288
2289        if( M4NO_ERROR != err )
2290        {
2291            M4OSA_TRACE1_1(
2292                "M4VSS3GPP_intDestroyVideoEncoder:\
2293                pVideoEncoderGlobalFcts->pFctCleanup returns 0x%x!",
2294                err);
2295            /**< We do not return the error here because we still have stuff to free */
2296        }
2297
2298        pC->ewc.encoderState = M4VSS3GPP_kNoEncoder;
2299        /**
2300        * Reset variable */
2301        pC->ewc.pEncContext = M4OSA_NULL;
2302    }
2303
2304    M4OSA_TRACE3_1("M4VSS3GPP_intDestroyVideoEncoder: returning 0x%x", err);
2305    return err;
2306}
2307
2308/**
2309 ******************************************************************************
2310 * M4OSA_Void M4VSS3GPP_intSetH263TimeCounter()
2311 * @brief    Modify the time counter of the given H263 video AU
2312 * @note
2313 * @param    pAuDataBuffer    (IN/OUT) H263 Video AU to modify
2314 * @param    uiCts            (IN)     New time counter value
2315 * @return    nothing
2316 ******************************************************************************
2317 */
2318static M4OSA_Void M4VSS3GPP_intSetH263TimeCounter( M4OSA_MemAddr8 pAuDataBuffer,
2319                                                  M4OSA_UInt8 uiCts )
2320{
2321    /*
2322    *  The H263 time counter is 8 bits located on the "x" below:
2323    *
2324    *   |--------|--------|--------|--------|
2325    *    ???????? ???????? ??????xx xxxxxx??
2326    */
2327
2328    /**
2329    * Write the 2 bits on the third byte */
2330    pAuDataBuffer[2] = ( pAuDataBuffer[2] & 0xFC) | (( uiCts >> 6) & 0x3);
2331
2332    /**
2333    * Write the 6 bits on the fourth byte */
2334    pAuDataBuffer[3] = ( ( uiCts << 2) & 0xFC) | (pAuDataBuffer[3] & 0x3);
2335
2336    return;
2337}
2338
2339/**
2340 ******************************************************************************
2341 * M4OSA_Void M4VSS3GPP_intSetMPEG4Gov()
2342 * @brief    Modify the time info from Group Of VOP video AU
2343 * @note
2344 * @param    pAuDataBuffer    (IN)    MPEG4 Video AU to modify
2345 * @param    uiCtsSec            (IN)     New GOV time info in second unit
2346 * @return    nothing
2347 ******************************************************************************
2348 */
2349static M4OSA_Void M4VSS3GPP_intSetMPEG4Gov( M4OSA_MemAddr8 pAuDataBuffer,
2350                                           M4OSA_UInt32 uiCtsSec )
2351{
2352    /*
2353    *  The MPEG-4 time code length is 18 bits:
2354    *
2355    *     hh     mm    marker    ss
2356    *    xxxxx|xxx xxx     1    xxxx xx ??????
2357    *   |----- ---|---     -    ----|-- ------|
2358    */
2359    M4OSA_UInt8 uiHh;
2360    M4OSA_UInt8 uiMm;
2361    M4OSA_UInt8 uiSs;
2362    M4OSA_UInt8 uiTmp;
2363
2364    /**
2365    * Write the 2 last bits ss */
2366    uiSs = (M4OSA_UInt8)(uiCtsSec % 60); /**< modulo part */
2367    pAuDataBuffer[2] = (( ( uiSs & 0x03) << 6) | (pAuDataBuffer[2] & 0x3F));
2368
2369    if( uiCtsSec < 60 )
2370    {
2371        /**
2372        * Write the 3 last bits of mm, the marker bit (0x10 */
2373        pAuDataBuffer[1] = (( 0x10) | (uiSs >> 2));
2374
2375        /**
2376        * Write the 5 bits of hh and 3 of mm (out of 6) */
2377        pAuDataBuffer[0] = 0;
2378    }
2379    else
2380    {
2381        /**
2382        * Write the 3 last bits of mm, the marker bit (0x10 */
2383        uiTmp = (M4OSA_UInt8)(uiCtsSec / 60); /**< integer part */
2384        uiMm = (M4OSA_UInt8)(uiTmp % 60);
2385        pAuDataBuffer[1] = (( uiMm << 5) | (0x10) | (uiSs >> 2));
2386
2387        if( uiTmp < 60 )
2388        {
2389            /**
2390            * Write the 5 bits of hh and 3 of mm (out of 6) */
2391            pAuDataBuffer[0] = ((uiMm >> 3));
2392        }
2393        else
2394        {
2395            /**
2396            * Write the 5 bits of hh and 3 of mm (out of 6) */
2397            uiHh = (M4OSA_UInt8)(uiTmp / 60);
2398            pAuDataBuffer[0] = (( uiHh << 3) | (uiMm >> 3));
2399        }
2400    }
2401    return;
2402}
2403
2404/**
2405 ******************************************************************************
2406 * M4OSA_Void M4VSS3GPP_intGetMPEG4Gov()
2407 * @brief    Get the time info from Group Of VOP video AU
2408 * @note
2409 * @param    pAuDataBuffer    (IN)    MPEG4 Video AU to modify
2410 * @param    pCtsSec            (OUT)    Current GOV time info in second unit
2411 * @return    nothing
2412 ******************************************************************************
2413 */
2414static M4OSA_Void M4VSS3GPP_intGetMPEG4Gov( M4OSA_MemAddr8 pAuDataBuffer,
2415                                           M4OSA_UInt32 *pCtsSec )
2416{
2417    /*
2418    *  The MPEG-4 time code length is 18 bits:
2419    *
2420    *     hh     mm    marker    ss
2421    *    xxxxx|xxx xxx     1    xxxx xx ??????
2422    *   |----- ---|---     -    ----|-- ------|
2423    */
2424    M4OSA_UInt8 uiHh;
2425    M4OSA_UInt8 uiMm;
2426    M4OSA_UInt8 uiSs;
2427    M4OSA_UInt8 uiTmp;
2428    M4OSA_UInt32 uiCtsSec;
2429
2430    /**
2431    * Read ss */
2432    uiSs = (( pAuDataBuffer[2] & 0xC0) >> 6);
2433    uiTmp = (( pAuDataBuffer[1] & 0x0F) << 2);
2434    uiCtsSec = uiSs + uiTmp;
2435
2436    /**
2437    * Read mm */
2438    uiMm = (( pAuDataBuffer[1] & 0xE0) >> 5);
2439    uiTmp = (( pAuDataBuffer[0] & 0x07) << 3);
2440    uiMm = uiMm + uiTmp;
2441    uiCtsSec = ( uiMm * 60) + uiCtsSec;
2442
2443    /**
2444    * Read hh */
2445    uiHh = (( pAuDataBuffer[0] & 0xF8) >> 3);
2446
2447    if( uiHh )
2448    {
2449        uiCtsSec = ( uiHh * 3600) + uiCtsSec;
2450    }
2451
2452    /*
2453    * in sec */
2454    *pCtsSec = uiCtsSec;
2455
2456    return;
2457}
2458
2459/**
2460 ******************************************************************************
2461 * M4OSA_ERR M4VSS3GPP_intAllocateYUV420()
2462 * @brief    Allocate the three YUV 4:2:0 planes
2463 * @note
2464 * @param    pPlanes    (IN/OUT) valid pointer to 3 M4VIFI_ImagePlane structures
2465 * @param    uiWidth    (IN)     Image width
2466 * @param    uiHeight(IN)     Image height
2467 ******************************************************************************
2468 */
2469static M4OSA_ERR M4VSS3GPP_intAllocateYUV420( M4VIFI_ImagePlane *pPlanes,
2470                                             M4OSA_UInt32 uiWidth, M4OSA_UInt32 uiHeight )
2471{
2472
2473    pPlanes[0].u_width = uiWidth;
2474    pPlanes[0].u_height = uiHeight;
2475    pPlanes[0].u_stride = uiWidth;
2476    pPlanes[0].u_topleft = 0;
2477    pPlanes[0].pac_data = (M4VIFI_UInt8 *)M4OSA_32bitAlignedMalloc(pPlanes[0].u_stride
2478        * pPlanes[0].u_height, M4VSS3GPP, (M4OSA_Char *)"pPlanes[0].pac_data");
2479
2480    if( M4OSA_NULL == pPlanes[0].pac_data )
2481    {
2482        M4OSA_TRACE1_0(
2483            "M4VSS3GPP_intAllocateYUV420: unable to allocate pPlanes[0].pac_data,\
2484            returning M4ERR_ALLOC");
2485        return M4ERR_ALLOC;
2486    }
2487
2488    pPlanes[1].u_width = pPlanes[0].u_width >> 1;
2489    pPlanes[1].u_height = pPlanes[0].u_height >> 1;
2490    pPlanes[1].u_stride = pPlanes[1].u_width;
2491    pPlanes[1].u_topleft = 0;
2492    pPlanes[1].pac_data = (M4VIFI_UInt8 *)M4OSA_32bitAlignedMalloc(pPlanes[1].u_stride
2493        * pPlanes[1].u_height, M4VSS3GPP,(M4OSA_Char *) "pPlanes[1].pac_data");
2494
2495    if( M4OSA_NULL == pPlanes[1].pac_data )
2496    {
2497        M4OSA_TRACE1_0(
2498            "M4VSS3GPP_intAllocateYUV420: unable to allocate pPlanes[1].pac_data,\
2499            returning M4ERR_ALLOC");
2500        return M4ERR_ALLOC;
2501    }
2502
2503    pPlanes[2].u_width = pPlanes[1].u_width;
2504    pPlanes[2].u_height = pPlanes[1].u_height;
2505    pPlanes[2].u_stride = pPlanes[2].u_width;
2506    pPlanes[2].u_topleft = 0;
2507    pPlanes[2].pac_data = (M4VIFI_UInt8 *)M4OSA_32bitAlignedMalloc(pPlanes[2].u_stride
2508        * pPlanes[2].u_height, M4VSS3GPP, (M4OSA_Char *)"pPlanes[2].pac_data");
2509
2510    if( M4OSA_NULL == pPlanes[2].pac_data )
2511    {
2512        M4OSA_TRACE1_0(
2513            "M4VSS3GPP_intAllocateYUV420: unable to allocate pPlanes[2].pac_data,\
2514            returning M4ERR_ALLOC");
2515        return M4ERR_ALLOC;
2516    }
2517
2518    /**
2519    *    Return */
2520    M4OSA_TRACE3_0("M4VSS3GPP_intAllocateYUV420: returning M4NO_ERROR");
2521    return M4NO_ERROR;
2522}
2523