VideoEditorVideoEncoder.cpp revision 9dd21dd09e821d24eda6af225fa173f4aa2d366c
1/* 2 * Copyright (C) 2011 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16/** 17************************************************************************* 18* @file VideoEditorVideoEncoder.cpp 19* @brief StageFright shell video encoder 20************************************************************************* 21*/ 22#define LOG_NDEBUG 1 23#define LOG_TAG "VIDEOEDITOR_VIDEOENCODER" 24 25/******************* 26 * HEADERS * 27 *******************/ 28#include "M4OSA_Debug.h" 29#include "M4SYS_AccessUnit.h" 30#include "VideoEditorVideoEncoder.h" 31#include "VideoEditorUtils.h" 32 33#include "utils/Log.h" 34#include <media/stagefright/MediaSource.h> 35#include <media/stagefright/MediaDebug.h> 36#include <media/stagefright/MediaDefs.h> 37#include <media/stagefright/MetaData.h> 38#include <media/stagefright/OMXClient.h> 39#include <media/stagefright/OMXCodec.h> 40#include "OMX_Video.h" 41 42/******************** 43 * DEFINITIONS * 44 ********************/ 45 46// Minimum number of buffer in the source in order to allow encoding 47#define VIDEOEDITOR_MIN_BUFFER_NB 15 48 49// Not enough source buffers available 50#define M4WAR_SF_LOW_BUFFER M4OSA_ERR_CREATE(M4_WAR, 0xFF, 0x00001) 51 52// Encoder color format 53#define VIDEOEDITOR_ENCODER_COLOR_FORMAT OMX_COLOR_FormatYUV420Planar 54 55// Force using hardware encoder 56#define VIDEOEDITOR_FORCECODEC kHardwareCodecsOnly 57 58// Force Encoder to produce a DSI by sending fake input frames upon creation 59#define VIDEOEDITOR_ENCODER_GET_DSI_AT_CREATION 60 61#if defined(VIDEOEDITOR_ENCODER_GET_DSI_AT_CREATION) && \ 62 !defined(VIDEOEDITOR_FORCECODEC) 63 #error "Cannot force DSI retrieval if codec type is not fixed" 64#endif 65 66/******************** 67 * SOURCE CLASS * 68 ********************/ 69 70namespace android { 71 72struct VideoEditorVideoEncoderSource : public MediaSource { 73 public: 74 static sp<VideoEditorVideoEncoderSource> Create( 75 const sp<MetaData> &format); 76 virtual status_t start(MetaData *params = NULL); 77 virtual status_t stop(); 78 virtual sp<MetaData> getFormat(); 79 virtual status_t read(MediaBuffer **buffer, 80 const ReadOptions *options = NULL); 81 virtual int32_t storeBuffer(MediaBuffer *buffer); 82 83 protected: 84 virtual ~VideoEditorVideoEncoderSource(); 85 86 private: 87 struct MediaBufferChain { 88 MediaBuffer* buffer; 89 MediaBufferChain* nextLink; 90 }; 91 enum State { 92 CREATED, 93 STARTED, 94 ERROR 95 }; 96 VideoEditorVideoEncoderSource(const sp<MetaData> &format); 97 MediaBufferChain* mFirstBufferLink; 98 MediaBufferChain* mLastBufferLink; 99 int32_t mNbBuffer; 100 bool mIsEOS; 101 State mState; 102 sp<MetaData> mEncFormat; 103}; 104 105sp<VideoEditorVideoEncoderSource> VideoEditorVideoEncoderSource::Create( 106 const sp<MetaData> &format) { 107 108 sp<VideoEditorVideoEncoderSource> aSource = 109 new VideoEditorVideoEncoderSource(format); 110 return aSource; 111} 112 113VideoEditorVideoEncoderSource::VideoEditorVideoEncoderSource( 114 const sp<MetaData> &format): 115 mFirstBufferLink(NULL), 116 mLastBufferLink(NULL), 117 mNbBuffer(0), 118 mIsEOS(false), 119 mState(CREATED), 120 mEncFormat(format) { 121 LOGV("VideoEditorVideoEncoderSource::VideoEditorVideoEncoderSource"); 122} 123 124VideoEditorVideoEncoderSource::~VideoEditorVideoEncoderSource() { 125 126 // Safety clean up 127 if( STARTED == mState ) { 128 stop(); 129 } 130} 131 132status_t VideoEditorVideoEncoderSource::start(MetaData *meta) { 133 status_t err = OK; 134 135 LOGV("VideoEditorVideoEncoderSource::start() begin"); 136 137 if( CREATED != mState ) { 138 LOGV("VideoEditorVideoEncoderSource::start: invalid state %d", mState); 139 return UNKNOWN_ERROR; 140 } 141 mState = STARTED; 142 143 LOGV("VideoEditorVideoEncoderSource::start() END (0x%x)", err); 144 return err; 145} 146 147status_t VideoEditorVideoEncoderSource::stop() { 148 status_t err = OK; 149 150 LOGV("VideoEditorVideoEncoderSource::stop() begin"); 151 152 if( STARTED != mState ) { 153 LOGV("VideoEditorVideoEncoderSource::stop: invalid state %d", mState); 154 return UNKNOWN_ERROR; 155 } 156 157 // Release the buffer chain 158 int32_t i = 0; 159 MediaBufferChain* tmpLink = NULL; 160 while( mFirstBufferLink ) { 161 i++; 162 tmpLink = mFirstBufferLink; 163 mFirstBufferLink = mFirstBufferLink->nextLink; 164 delete tmpLink; 165 } 166 LOGV("VideoEditorVideoEncoderSource::stop : %d buffer remained", i); 167 mFirstBufferLink = NULL; 168 mLastBufferLink = NULL; 169 170 mState = CREATED; 171 172 LOGV("VideoEditorVideoEncoderSource::stop() END (0x%x)", err); 173 return err; 174} 175 176sp<MetaData> VideoEditorVideoEncoderSource::getFormat() { 177 178 LOGV("VideoEditorVideoEncoderSource::getFormat"); 179 return mEncFormat; 180} 181 182status_t VideoEditorVideoEncoderSource::read(MediaBuffer **buffer, 183 const ReadOptions *options) { 184 MediaSource::ReadOptions readOptions; 185 status_t err = OK; 186 MediaBufferChain* tmpLink = NULL; 187 188 LOGV("VideoEditorVideoEncoderSource::read() begin"); 189 190 if ( STARTED != mState ) { 191 LOGV("VideoEditorVideoEncoderSource::read: invalid state %d", mState); 192 return UNKNOWN_ERROR; 193 } 194 195 // Get a buffer from the chain 196 if ( NULL == mFirstBufferLink ) { 197 *buffer = NULL; 198 if( mIsEOS ) { 199 LOGV("VideoEditorVideoEncoderSource::read : EOS"); 200 return ERROR_END_OF_STREAM; 201 } else { 202 LOGV("VideoEditorVideoEncoderSource::read: no buffer available"); 203 return ERROR_END_OF_STREAM; 204 } 205 } 206 *buffer = mFirstBufferLink->buffer; 207 tmpLink = mFirstBufferLink; 208 mFirstBufferLink = mFirstBufferLink->nextLink; 209 210 if ( NULL == mFirstBufferLink ) { 211 mLastBufferLink = NULL; 212 } 213 delete tmpLink; 214 mNbBuffer--; 215 216 LOGV("VideoEditorVideoEncoderSource::read() END (0x%x)", err); 217 return err; 218} 219 220int32_t VideoEditorVideoEncoderSource::storeBuffer(MediaBuffer *buffer) { 221 status_t err = OK; 222 223 LOGV("VideoEditorVideoEncoderSource::storeBuffer() begin"); 224 225 if( NULL == buffer ) { 226 LOGV("VideoEditorVideoEncoderSource::storeBuffer : reached EOS"); 227 mIsEOS = true; 228 } else { 229 MediaBufferChain* newLink = new MediaBufferChain; 230 newLink->buffer = buffer; 231 newLink->nextLink = NULL; 232 if( NULL != mLastBufferLink ) { 233 mLastBufferLink->nextLink = newLink; 234 } else { 235 mFirstBufferLink = newLink; 236 } 237 mLastBufferLink = newLink; 238 mNbBuffer++; 239 } 240 LOGV("VideoEditorVideoEncoderSource::storeBuffer() end"); 241 return mNbBuffer; 242} 243 244/** 245 ****************************************************************************** 246 * structure VideoEditorVideoEncoder_Context 247 * @brief This structure defines the context of the StageFright video encoder 248 * shell 249 ****************************************************************************** 250*/ 251typedef enum { 252 CREATED = 0x1, 253 OPENED = 0x2, 254 STARTED = 0x4, 255 BUFFERING = 0x8, 256 READING = 0x10 257} VideoEditorVideoEncoder_State; 258 259typedef struct { 260 VideoEditorVideoEncoder_State mState; 261 M4ENCODER_Format mFormat; 262 M4WRITER_DataInterface* mWriterDataInterface; 263 M4VPP_apply_fct* mPreProcFunction; 264 M4VPP_Context mPreProcContext; 265 M4SYS_AccessUnit* mAccessUnit; 266 M4ENCODER_Params* mCodecParams; 267 M4ENCODER_Header mHeader; 268 H264MCS_ProcessEncodedNALU_fct* mH264NALUPostProcessFct; 269 M4OSA_Context mH264NALUPostProcessCtx; 270 M4OSA_UInt32 mLastCTS; 271 sp<VideoEditorVideoEncoderSource> mEncoderSource; 272 OMXClient mClient; 273 sp<MediaSource> mEncoder; 274 OMX_COLOR_FORMATTYPE mEncoderColorFormat; 275 276 uint32_t mNbInputFrames; 277 double mFirstInputCts; 278 double mLastInputCts; 279 uint32_t mNbOutputFrames; 280 int64_t mFirstOutputCts; 281 int64_t mLastOutputCts; 282 283} VideoEditorVideoEncoder_Context; 284 285/******************** 286 * TOOLS * 287 ********************/ 288 289M4OSA_ERR VideoEditorVideoEncoder_getDSI(M4ENCODER_Context pContext, 290 sp<MetaData> metaData) { 291 M4OSA_ERR err = M4NO_ERROR; 292 VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; 293 status_t result = OK; 294 int32_t nbBuffer = 0; 295 int32_t stride = 0; 296 int32_t height = 0; 297 int32_t framerate = 0; 298 int32_t isCodecConfig = 0; 299 size_t size = 0; 300 uint32_t codecFlags = 0; 301 MediaBuffer* inputBuffer = NULL; 302 MediaBuffer* outputBuffer = NULL; 303 sp<VideoEditorVideoEncoderSource> encoderSource = NULL; 304 sp<MediaSource> encoder = NULL;; 305 OMXClient client; 306 307 LOGV("VideoEditorVideoEncoder_getDSI begin"); 308 // Input parameters check 309 VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); 310 VIDEOEDITOR_CHECK(M4OSA_NULL != metaData.get(), M4ERR_PARAMETER); 311 312 pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; 313 VIDEOEDITOR_CHECK(CREATED == pEncoderContext->mState, M4ERR_STATE); 314 315 // Create the encoder source 316 encoderSource = VideoEditorVideoEncoderSource::Create(metaData); 317 VIDEOEDITOR_CHECK(NULL != encoderSource.get(), M4ERR_STATE); 318 319 // Connect to the OMX client 320 result = client.connect(); 321 VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); 322 323 // Create the OMX codec 324 // VIDEOEDITOR_FORCECODEC MUST be defined here 325 codecFlags |= OMXCodec::VIDEOEDITOR_FORCECODEC; 326 encoder = OMXCodec::Create(client.interface(), metaData, true, 327 encoderSource, NULL, codecFlags); 328 VIDEOEDITOR_CHECK(NULL != encoder.get(), M4ERR_STATE); 329 330 /** 331 * Send fake frames and retrieve the DSI 332 */ 333 // Send a fake frame to the source 334 metaData->findInt32(kKeyStride, &stride); 335 metaData->findInt32(kKeyHeight, &height); 336 metaData->findInt32(kKeySampleRate, &framerate); 337 size = (size_t)(stride*height*3)/2; 338 inputBuffer = new MediaBuffer(size); 339 inputBuffer->meta_data()->setInt64(kKeyTime, 0); 340 nbBuffer = encoderSource->storeBuffer(inputBuffer); 341 encoderSource->storeBuffer(NULL); // Signal EOS 342 343 // Call read once to get the DSI 344 result = encoder->start();; 345 VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); 346 result = encoder->read(&outputBuffer, NULL); 347 VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); 348 VIDEOEDITOR_CHECK(outputBuffer->meta_data()->findInt32( 349 kKeyIsCodecConfig, &isCodecConfig) && isCodecConfig, M4ERR_STATE); 350 351 VIDEOEDITOR_CHECK(M4OSA_NULL == pEncoderContext->mHeader.pBuf, M4ERR_STATE); 352 if ( M4ENCODER_kH264 == pEncoderContext->mFormat ) { 353 // For H264, format the DSI 354 result = buildAVCCodecSpecificData( 355 (uint8_t**)(&(pEncoderContext->mHeader.pBuf)), 356 (size_t*)(&(pEncoderContext->mHeader.Size)), 357 (const uint8_t*)outputBuffer->data() + outputBuffer->range_offset(), 358 outputBuffer->range_length(), encoder->getFormat().get()); 359 outputBuffer->release(); 360 VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); 361 } else { 362 // For MPEG4, just copy the DSI 363 pEncoderContext->mHeader.Size = 364 (M4OSA_UInt32)outputBuffer->range_length(); 365 SAFE_MALLOC(pEncoderContext->mHeader.pBuf, M4OSA_Int8, 366 pEncoderContext->mHeader.Size, "Encoder header"); 367 memcpy((void *)pEncoderContext->mHeader.pBuf, 368 (void *)((M4OSA_MemAddr8)(outputBuffer->data())+outputBuffer->range_offset()), 369 pEncoderContext->mHeader.Size); 370 outputBuffer->release(); 371 } 372 373 result = encoder->stop(); 374 VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); 375 376cleanUp: 377 // Destroy the graph 378 if ( encoder != NULL ) { encoder.clear(); } 379 client.disconnect(); 380 if ( encoderSource != NULL ) { encoderSource.clear(); } 381 if ( M4NO_ERROR == err ) { 382 LOGV("VideoEditorVideoEncoder_getDSI no error"); 383 } else { 384 LOGV("VideoEditorVideoEncoder_getDSI ERROR 0x%X", err); 385 } 386 LOGV("VideoEditorVideoEncoder_getDSI end"); 387 return err; 388} 389/******************** 390 * ENGINE INTERFACE * 391 ********************/ 392 393M4OSA_ERR VideoEditorVideoEncoder_cleanup(M4ENCODER_Context pContext) { 394 M4OSA_ERR err = M4NO_ERROR; 395 VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; 396 397 LOGV("VideoEditorVideoEncoder_cleanup begin"); 398 // Input parameters check 399 VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); 400 401 pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; 402 VIDEOEDITOR_CHECK(CREATED == pEncoderContext->mState, M4ERR_STATE); 403 404 // Release memory 405 SAFE_FREE(pEncoderContext->mHeader.pBuf); 406 SAFE_FREE(pEncoderContext); 407 pContext = M4OSA_NULL; 408 409cleanUp: 410 if ( M4NO_ERROR == err ) { 411 LOGV("VideoEditorVideoEncoder_cleanup no error"); 412 } else { 413 LOGV("VideoEditorVideoEncoder_cleanup ERROR 0x%X", err); 414 } 415 LOGV("VideoEditorVideoEncoder_cleanup end"); 416 return err; 417} 418 419M4OSA_ERR VideoEditorVideoEncoder_init(M4ENCODER_Format format, 420 M4ENCODER_Context* pContext, 421 M4WRITER_DataInterface* pWriterDataInterface, 422 M4VPP_apply_fct* pVPPfct, M4VPP_Context pVPPctxt, 423 M4OSA_Void* pExternalAPI, M4OSA_Void* pUserData) { 424 425 M4OSA_ERR err = M4NO_ERROR; 426 VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; 427 428 LOGV("VideoEditorVideoEncoder_init begin: format %d", format); 429 // Input parameters check 430 VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); 431 VIDEOEDITOR_CHECK(M4OSA_NULL != pWriterDataInterface, M4ERR_PARAMETER); 432 VIDEOEDITOR_CHECK(M4OSA_NULL != pVPPfct, M4ERR_PARAMETER); 433 VIDEOEDITOR_CHECK(M4OSA_NULL != pVPPctxt, M4ERR_PARAMETER); 434 435 // Context allocation & initialization 436 SAFE_MALLOC(pEncoderContext, VideoEditorVideoEncoder_Context, 1, 437 "VideoEditorVideoEncoder"); 438 pEncoderContext->mState = CREATED; 439 pEncoderContext->mFormat = format; 440 pEncoderContext->mWriterDataInterface = pWriterDataInterface; 441 pEncoderContext->mPreProcFunction = pVPPfct; 442 pEncoderContext->mPreProcContext = pVPPctxt; 443 444 *pContext = pEncoderContext; 445 446cleanUp: 447 if ( M4NO_ERROR == err ) { 448 LOGV("VideoEditorVideoEncoder_init no error"); 449 } else { 450 VideoEditorVideoEncoder_cleanup(pEncoderContext); 451 *pContext = M4OSA_NULL; 452 LOGV("VideoEditorVideoEncoder_init ERROR 0x%X", err); 453 } 454 LOGV("VideoEditorVideoEncoder_init end"); 455 return err; 456} 457 458M4OSA_ERR VideoEditorVideoEncoder_init_H263(M4ENCODER_Context* pContext, 459 M4WRITER_DataInterface* pWriterDataInterface, M4VPP_apply_fct* pVPPfct, 460 M4VPP_Context pVPPctxt, M4OSA_Void* pExternalAPI, M4OSA_Void* pUserData) 461 { 462 463 return VideoEditorVideoEncoder_init(M4ENCODER_kH263, pContext, 464 pWriterDataInterface, pVPPfct, pVPPctxt, pExternalAPI, pUserData); 465} 466 467 468M4OSA_ERR VideoEditorVideoEncoder_init_MPEG4(M4ENCODER_Context* pContext, 469 M4WRITER_DataInterface* pWriterDataInterface, M4VPP_apply_fct* pVPPfct, 470 M4VPP_Context pVPPctxt, M4OSA_Void* pExternalAPI, M4OSA_Void* pUserData) 471 { 472 473 return VideoEditorVideoEncoder_init(M4ENCODER_kMPEG4, pContext, 474 pWriterDataInterface, pVPPfct, pVPPctxt, pExternalAPI, pUserData); 475} 476 477 478M4OSA_ERR VideoEditorVideoEncoder_init_H264(M4ENCODER_Context* pContext, 479 M4WRITER_DataInterface* pWriterDataInterface, M4VPP_apply_fct* pVPPfct, 480 M4VPP_Context pVPPctxt, M4OSA_Void* pExternalAPI, M4OSA_Void* pUserData) 481 { 482 483 return VideoEditorVideoEncoder_init(M4ENCODER_kH264, pContext, 484 pWriterDataInterface, pVPPfct, pVPPctxt, pExternalAPI, pUserData); 485} 486 487M4OSA_ERR VideoEditorVideoEncoder_close(M4ENCODER_Context pContext) { 488 M4OSA_ERR err = M4NO_ERROR; 489 VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; 490 491 LOGV("VideoEditorVideoEncoder_close begin"); 492 // Input parameters check 493 VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); 494 495 pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; 496 VIDEOEDITOR_CHECK(OPENED == pEncoderContext->mState, M4ERR_STATE); 497 498 // Release memory 499 SAFE_FREE(pEncoderContext->mCodecParams); 500 501 // Destroy the graph 502 pEncoderContext->mEncoder.clear(); 503 pEncoderContext->mClient.disconnect(); 504 pEncoderContext->mEncoderSource.clear(); 505 506 // Set the new state 507 pEncoderContext->mState = CREATED; 508 509cleanUp: 510 if( M4NO_ERROR == err ) { 511 LOGV("VideoEditorVideoEncoder_close no error"); 512 } else { 513 LOGV("VideoEditorVideoEncoder_close ERROR 0x%X", err); 514 } 515 LOGV("VideoEditorVideoEncoder_close end"); 516 return err; 517} 518 519 520M4OSA_ERR VideoEditorVideoEncoder_open(M4ENCODER_Context pContext, 521 M4SYS_AccessUnit* pAU, M4OSA_Void* pParams) { 522 M4OSA_ERR err = M4NO_ERROR; 523 VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; 524 M4ENCODER_Params* pCodecParams = M4OSA_NULL; 525 status_t result = OK; 526 sp<MetaData> encoderMetadata = NULL; 527 const char* mime = NULL; 528 int32_t iProfile = 0; 529 int32_t iFrameRate = 0; 530 uint32_t codecFlags = 0; 531 532 LOGV(">>> VideoEditorVideoEncoder_open begin"); 533 // Input parameters check 534 VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); 535 VIDEOEDITOR_CHECK(M4OSA_NULL != pAU, M4ERR_PARAMETER); 536 VIDEOEDITOR_CHECK(M4OSA_NULL != pParams, M4ERR_PARAMETER); 537 538 pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; 539 pCodecParams = (M4ENCODER_Params*)pParams; 540 VIDEOEDITOR_CHECK(CREATED == pEncoderContext->mState, M4ERR_STATE); 541 542 // Context initialization 543 pEncoderContext->mAccessUnit = pAU; 544 545 // Allocate & initialize the encoding parameters 546 SAFE_MALLOC(pEncoderContext->mCodecParams, M4ENCODER_Params, 1, 547 "VideoEditorVideoEncoder"); 548 549 550 pEncoderContext->mCodecParams->InputFormat = pCodecParams->InputFormat; 551 pEncoderContext->mCodecParams->InputFrameWidth = 552 pCodecParams->InputFrameWidth; 553 pEncoderContext->mCodecParams->InputFrameHeight = 554 pCodecParams->InputFrameHeight; 555 pEncoderContext->mCodecParams->FrameWidth = pCodecParams->FrameWidth; 556 pEncoderContext->mCodecParams->FrameHeight = pCodecParams->FrameHeight; 557 pEncoderContext->mCodecParams->Bitrate = pCodecParams->Bitrate; 558 pEncoderContext->mCodecParams->FrameRate = pCodecParams->FrameRate; 559 pEncoderContext->mCodecParams->Format = pCodecParams->Format; 560 561 // Check output format consistency and resolution 562 VIDEOEDITOR_CHECK( 563 pEncoderContext->mCodecParams->Format == pEncoderContext->mFormat, 564 M4ERR_PARAMETER); 565 VIDEOEDITOR_CHECK(0 == pEncoderContext->mCodecParams->FrameWidth % 16, 566 M4ERR_PARAMETER); 567 VIDEOEDITOR_CHECK(0 == pEncoderContext->mCodecParams->FrameHeight % 16, 568 M4ERR_PARAMETER); 569 570 /** 571 * StageFright graph building 572 */ 573 574 // Create the meta data for the encoder 575 encoderMetadata = new MetaData; 576 switch( pEncoderContext->mCodecParams->Format ) { 577 case M4ENCODER_kH263: 578 mime = MEDIA_MIMETYPE_VIDEO_H263; 579 iProfile = OMX_VIDEO_H263ProfileBaseline; 580 break; 581 case M4ENCODER_kMPEG4: 582 mime = MEDIA_MIMETYPE_VIDEO_MPEG4; 583 iProfile = OMX_VIDEO_MPEG4ProfileSimple; 584 break; 585 case M4ENCODER_kH264: 586 mime = MEDIA_MIMETYPE_VIDEO_AVC; 587 iProfile = OMX_VIDEO_AVCProfileBaseline; 588 break; 589 default: 590 VIDEOEDITOR_CHECK(!"VideoEncoder_open : incorrect input format", 591 M4ERR_PARAMETER); 592 break; 593 } 594 encoderMetadata->setCString(kKeyMIMEType, mime); 595 encoderMetadata->setInt32(kKeyVideoProfile, iProfile); 596 encoderMetadata->setInt32(kKeyWidth, 597 (int32_t)pEncoderContext->mCodecParams->FrameWidth); 598 encoderMetadata->setInt32(kKeyStride, 599 (int32_t)pEncoderContext->mCodecParams->FrameWidth); 600 encoderMetadata->setInt32(kKeyHeight, 601 (int32_t)pEncoderContext->mCodecParams->FrameHeight); 602 encoderMetadata->setInt32(kKeySliceHeight, 603 (int32_t)pEncoderContext->mCodecParams->FrameHeight); 604 605 switch( pEncoderContext->mCodecParams->FrameRate ) { 606 case M4ENCODER_k5_FPS: iFrameRate = 5; break; 607 case M4ENCODER_k7_5_FPS: iFrameRate = 8; break; 608 case M4ENCODER_k10_FPS: iFrameRate = 10; break; 609 case M4ENCODER_k12_5_FPS: iFrameRate = 13; break; 610 case M4ENCODER_k15_FPS: iFrameRate = 15; break; 611 case M4ENCODER_k20_FPS: iFrameRate = 20; break; 612 case M4ENCODER_k25_FPS: iFrameRate = 25; break; 613 case M4ENCODER_k30_FPS: iFrameRate = 30; break; 614 case M4ENCODER_kVARIABLE_FPS: 615 iFrameRate = 30; 616 LOGI("Frame rate set to M4ENCODER_kVARIABLE_FPS: set to 30"); 617 break; 618 case M4ENCODER_kUSE_TIMESCALE: 619 iFrameRate = 30; 620 LOGI("Frame rate set to M4ENCODER_kUSE_TIMESCALE: set to 30"); 621 break; 622 623 default: 624 VIDEOEDITOR_CHECK(!"VideoEncoder_open:incorrect framerate", 625 M4ERR_STATE); 626 break; 627 } 628 encoderMetadata->setInt32(kKeyFrameRate, iFrameRate); 629 encoderMetadata->setInt32(kKeyBitRate, 630 (int32_t)pEncoderContext->mCodecParams->Bitrate); 631 encoderMetadata->setInt32(kKeyIFramesInterval, 1); 632 633 pEncoderContext->mEncoderColorFormat = VIDEOEDITOR_ENCODER_COLOR_FORMAT; 634 encoderMetadata->setInt32(kKeyColorFormat, 635 pEncoderContext->mEncoderColorFormat); 636 637#ifdef VIDEOEDITOR_ENCODER_GET_DSI_AT_CREATION 638 // Get the encoder DSI 639 err = VideoEditorVideoEncoder_getDSI(pEncoderContext, encoderMetadata); 640 VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); 641#endif /* VIDEOEDITOR_ENCODER_GET_DSI_AT_CREATION */ 642 643 // Create the encoder source 644 pEncoderContext->mEncoderSource = VideoEditorVideoEncoderSource::Create( 645 encoderMetadata); 646 VIDEOEDITOR_CHECK( 647 NULL != pEncoderContext->mEncoderSource.get(), M4ERR_STATE); 648 649 // Connect to the OMX client 650 result = pEncoderContext->mClient.connect(); 651 VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); 652 653 // Create the OMX codec 654#ifdef VIDEOEDITOR_FORCECODEC 655 codecFlags |= OMXCodec::VIDEOEDITOR_FORCECODEC; 656#endif /* VIDEOEDITOR_FORCECODEC */ 657 pEncoderContext->mEncoder = OMXCodec::Create( 658 pEncoderContext->mClient.interface(), encoderMetadata, true, 659 pEncoderContext->mEncoderSource, NULL, codecFlags); 660 VIDEOEDITOR_CHECK(NULL != pEncoderContext->mEncoder.get(), M4ERR_STATE); 661 LOGV("VideoEditorVideoEncoder_open : DONE"); 662 663 // Set the new state 664 pEncoderContext->mState = OPENED; 665 666cleanUp: 667 if( M4NO_ERROR == err ) { 668 LOGV("VideoEditorVideoEncoder_open no error"); 669 } else { 670 VideoEditorVideoEncoder_close(pEncoderContext); 671 LOGV("VideoEditorVideoEncoder_open ERROR 0x%X", err); 672 } 673 LOGV("VideoEditorVideoEncoder_open end"); 674 return err; 675} 676 677M4OSA_ERR VideoEditorVideoEncoder_processOutputBuffer( 678 M4ENCODER_Context pContext, MediaBuffer* buffer); 679M4OSA_ERR VideoEditorVideoEncoder_processInputBuffer( 680 M4ENCODER_Context pContext, M4OSA_Double Cts, 681 M4OSA_Bool bReachedEOS) { 682 M4OSA_ERR err = M4NO_ERROR; 683 VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; 684 M4VIFI_ImagePlane pOutPlane[3]; 685 MediaBuffer* buffer = NULL; 686 int32_t nbBuffer = 0; 687 688 LOGV("VideoEditorVideoEncoder_processInputBuffer begin: cts %f", Cts); 689 // Input parameters check 690 VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); 691 692 pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; 693 pOutPlane[0].pac_data = M4OSA_NULL; 694 pOutPlane[1].pac_data = M4OSA_NULL; 695 pOutPlane[2].pac_data = M4OSA_NULL; 696 697 if ( M4OSA_FALSE == bReachedEOS ) { 698 M4OSA_UInt32 sizeY = pEncoderContext->mCodecParams->FrameWidth * 699 pEncoderContext->mCodecParams->FrameHeight; 700 M4OSA_UInt32 sizeU = sizeY >> 2; 701 M4OSA_UInt32 size = sizeY + 2*sizeU; 702 M4OSA_UInt8* pData = M4OSA_NULL; 703 buffer = new MediaBuffer((size_t)size); 704 pData = (M4OSA_UInt8*)buffer->data() + buffer->range_offset(); 705 706 // Prepare the output image for pre-processing 707 pOutPlane[0].u_width = pEncoderContext->mCodecParams->FrameWidth; 708 pOutPlane[0].u_height = pEncoderContext->mCodecParams->FrameHeight; 709 pOutPlane[0].u_topleft = 0; 710 pOutPlane[0].u_stride = pOutPlane[0].u_width; 711 pOutPlane[1].u_width = pOutPlane[0].u_width/2; 712 pOutPlane[1].u_height = pOutPlane[0].u_height/2; 713 pOutPlane[1].u_topleft = 0; 714 pOutPlane[1].u_stride = pOutPlane[0].u_stride/2; 715 pOutPlane[2].u_width = pOutPlane[1].u_width; 716 pOutPlane[2].u_height = pOutPlane[1].u_height; 717 pOutPlane[2].u_topleft = 0; 718 pOutPlane[2].u_stride = pOutPlane[1].u_stride; 719 720 switch( pEncoderContext->mEncoderColorFormat ) { 721 case OMX_COLOR_FormatYUV420Planar: 722 pOutPlane[0].pac_data = pData; 723 pOutPlane[1].pac_data = pData + sizeY; 724 pOutPlane[2].pac_data = pData + sizeY + sizeU; 725 break; 726 case OMX_COLOR_FormatYUV420SemiPlanar: 727 pOutPlane[0].pac_data = pData; 728 SAFE_MALLOC(pOutPlane[1].pac_data, M4VIFI_UInt8, 729 pOutPlane[1].u_height*pOutPlane[1].u_stride,"OutputPlaneU"); 730 SAFE_MALLOC(pOutPlane[2].pac_data, M4VIFI_UInt8, 731 pOutPlane[2].u_height*pOutPlane[2].u_stride,"OutputPlaneV"); 732 break; 733 default: 734 LOGV("VideoEditorVideoEncoder_processInputBuffer : unsupported " 735 "color format 0x%X", pEncoderContext->mEncoderColorFormat); 736 VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_PARAMETER); 737 break; 738 } 739 740 // Apply pre-processing 741 err = pEncoderContext->mPreProcFunction( 742 pEncoderContext->mPreProcContext, M4OSA_NULL, pOutPlane); 743 VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); 744 745 // Convert to MediaBuffer format if necessary 746 if( OMX_COLOR_FormatYUV420SemiPlanar == \ 747 pEncoderContext->mEncoderColorFormat ) { 748 M4OSA_UInt8* pTmpData = M4OSA_NULL; 749 pTmpData = pData + sizeY; 750 // Highly unoptimized copy... 751 for( M4OSA_UInt32 i=0; i<sizeU; i++ ) { 752 *pTmpData = pOutPlane[2].pac_data[i]; pTmpData++; 753 *pTmpData = pOutPlane[1].pac_data[i]; pTmpData++; 754 } 755 } 756 757 // Set the metadata 758 buffer->meta_data()->setInt64(kKeyTime, (int64_t)(Cts*1000)); 759 } 760 761 // Push the buffer to the source, a NULL buffer, notifies the source of EOS 762 nbBuffer = pEncoderContext->mEncoderSource->storeBuffer(buffer); 763 if ( VIDEOEDITOR_MIN_BUFFER_NB > nbBuffer ) { 764 LOGV("VideoEncoder_processInputBuffer not enough source buffer" 765 "%d", nbBuffer); 766 err = M4WAR_SF_LOW_BUFFER; 767 } 768 769cleanUp: 770 if ( OMX_COLOR_FormatYUV420SemiPlanar == \ 771 pEncoderContext->mEncoderColorFormat ) { 772 // Y plane has not been allocated 773 if ( pOutPlane[1].pac_data ) { 774 SAFE_FREE(pOutPlane[1].pac_data); 775 } 776 if ( pOutPlane[2].pac_data ) { 777 SAFE_FREE(pOutPlane[2].pac_data); 778 } 779 } 780 if ( (M4NO_ERROR == err) || (M4WAR_SF_LOW_BUFFER == err) ) { 781 LOGV("VideoEditorVideoEncoder_processInputBuffer error 0x%X", err); 782 } else { 783 if( NULL != buffer ) { 784 buffer->release(); 785 } 786 LOGV("VideoEditorVideoEncoder_processInputBuffer ERROR 0x%X", err); 787 } 788 LOGV("VideoEditorVideoEncoder_processInputBuffer end"); 789 return err; 790} 791 792M4OSA_ERR VideoEditorVideoEncoder_processOutputBuffer( 793 M4ENCODER_Context pContext, MediaBuffer* buffer) { 794 M4OSA_ERR err = M4NO_ERROR; 795 VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; 796 M4OSA_UInt32 Cts = 0; 797 int32_t i32Tmp = 0; 798 int64_t i64Tmp = 0; 799 status_t result = OK; 800 801 LOGV("VideoEditorVideoEncoder_processOutputBuffer begin"); 802 // Input parameters check 803 VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); 804 VIDEOEDITOR_CHECK(M4OSA_NULL != buffer, M4ERR_PARAMETER); 805 806 pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; 807 808 // Process the returned AU 809 if ( 0 == buffer->range_length() ) { 810 // Encoder has no data yet, nothing unusual 811 LOGV("VideoEditorVideoEncoder_processOutputBuffer : buffer is empty"); 812 goto cleanUp; 813 } 814 VIDEOEDITOR_CHECK(0 == ((M4OSA_UInt32)buffer->data())%4, M4ERR_PARAMETER); 815 VIDEOEDITOR_CHECK(buffer->meta_data().get(), M4ERR_PARAMETER); 816 if ( buffer->meta_data()->findInt32(kKeyIsCodecConfig, &i32Tmp) && i32Tmp ){ 817 { // Display the DSI 818 LOGV("VideoEditorVideoEncoder_processOutputBuffer DSI %d", 819 buffer->range_length()); 820 uint8_t* tmp = (uint8_t*)(buffer->data()); 821 for( uint32_t i=0; i<buffer->range_length(); i++ ) { 822 LOGV("DSI [%d] %.2X", i, tmp[i]); 823 } 824 } 825 826#ifndef VIDEOEDITOR_ENCODER_GET_DSI_AT_CREATION 827 VIDEOEDITOR_CHECK(M4OSA_NULL == pEncoderContext->mHeader.pBuf, 828 M4ERR_STATE); 829 if ( M4ENCODER_kH264 == pEncoderContext->mFormat ) { 830 result = buildAVCCodecSpecificData( 831 (uint8_t**)(&(pEncoderContext->mHeader.pBuf)), 832 (size_t*)(&(pEncoderContext->mHeader.Size)), 833 (const uint8_t *)buffer->data() + buffer->range_offset(), 834 buffer->range_length(), 835 pEncoderContext->mEncoder->getFormat().get()); 836 } else { 837 pEncoderContext->mHeader.Size = 838 (M4OSA_UInt32)buffer->range_length(); 839 SAFE_MALLOC(pEncoderContext->mHeader.pBuf, M4OSA_Int8, 840 pEncoderContext->mHeader.Size, "Encoder header"); 841 memcpy((void *)pEncoderContext->mHeader.pBuf, 842 (void *)((M4OSA_MemAddr8)(buffer->data())+buffer->range_offset()), 843 pEncoderContext->mHeader.Size); 844 } 845#endif /* VIDEOEDITOR_ENCODER_GET_DSI_AT_CREATION */ 846 } else { 847 // Check the CTS 848 VIDEOEDITOR_CHECK(buffer->meta_data()->findInt64(kKeyTime, &i64Tmp), 849 M4ERR_STATE); 850 851 pEncoderContext->mNbOutputFrames++; 852 if ( 0 > pEncoderContext->mFirstOutputCts ) { 853 pEncoderContext->mFirstOutputCts = i64Tmp; 854 } 855 pEncoderContext->mLastOutputCts = i64Tmp; 856 857 Cts = (M4OSA_Int32)(i64Tmp/1000); 858 LOGV("[TS_CHECK] VI/ENC WRITE frame %d @ %lld -> %d (last %d)", 859 pEncoderContext->mNbOutputFrames, i64Tmp, Cts, 860 pEncoderContext->mLastCTS); 861 if ( Cts < pEncoderContext->mLastCTS ) { 862 LOGV("VideoEncoder_processOutputBuffer WARNING : Cts is going " 863 "backwards %d < %d", Cts, pEncoderContext->mLastCTS); 864 goto cleanUp; 865 } 866 LOGV("VideoEditorVideoEncoder_processOutputBuffer : %d %d", 867 Cts, pEncoderContext->mLastCTS); 868 869 // Retrieve the AU container 870 err = pEncoderContext->mWriterDataInterface->pStartAU( 871 pEncoderContext->mWriterDataInterface->pWriterContext, 872 pEncoderContext->mAccessUnit->stream->streamID, 873 pEncoderContext->mAccessUnit); 874 VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); 875 876 // Format the AU 877 VIDEOEDITOR_CHECK( 878 buffer->range_length() <= pEncoderContext->mAccessUnit->size, 879 M4ERR_PARAMETER); 880 // Remove H264 AU start code 881 if ( M4ENCODER_kH264 == pEncoderContext->mFormat ) { 882 if (!memcmp((const uint8_t *)buffer->data() + \ 883 buffer->range_offset(), "\x00\x00\x00\x01", 4) ) { 884 buffer->set_range(buffer->range_offset() + 4, 885 buffer->range_length() - 4); 886 } 887 } 888 889 if ( (M4ENCODER_kH264 == pEncoderContext->mFormat) && 890 (M4OSA_NULL != pEncoderContext->mH264NALUPostProcessFct) ) { 891 // H264 trimming case, NALU post processing is needed 892 M4OSA_Int32 outputSize = pEncoderContext->mAccessUnit->size; 893 err = pEncoderContext->mH264NALUPostProcessFct( 894 pEncoderContext->mH264NALUPostProcessCtx, 895 (M4OSA_UInt8*)buffer->data()+buffer->range_offset(), 896 buffer->range_length(), 897 (M4OSA_UInt8*)pEncoderContext->mAccessUnit->dataAddress, 898 &outputSize); 899 VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); 900 pEncoderContext->mAccessUnit->size = (M4OSA_UInt32)outputSize; 901 } else { 902 // The AU can just be copied 903 memcpy((void *)pEncoderContext->mAccessUnit->\ 904 dataAddress, (void *)((M4OSA_MemAddr8)(buffer->data())+buffer->\ 905 range_offset()), buffer->range_length()); 906 pEncoderContext->mAccessUnit->size = 907 (M4OSA_UInt32)buffer->range_length(); 908 } 909 910 if ( buffer->meta_data()->findInt32(kKeyIsSyncFrame,&i32Tmp) && i32Tmp){ 911 pEncoderContext->mAccessUnit->attribute = AU_RAP; 912 } else { 913 pEncoderContext->mAccessUnit->attribute = AU_P_Frame; 914 } 915 pEncoderContext->mLastCTS = Cts; 916 pEncoderContext->mAccessUnit->CTS = Cts; 917 pEncoderContext->mAccessUnit->DTS = Cts; 918 919 LOGV("VideoEditorVideoEncoder_processOutputBuffer: AU @ 0x%X 0x%X %d %d", 920 pEncoderContext->mAccessUnit->dataAddress, 921 *pEncoderContext->mAccessUnit->dataAddress, 922 pEncoderContext->mAccessUnit->size, 923 pEncoderContext->mAccessUnit->CTS); 924 925 // Write the AU 926 err = pEncoderContext->mWriterDataInterface->pProcessAU( 927 pEncoderContext->mWriterDataInterface->pWriterContext, 928 pEncoderContext->mAccessUnit->stream->streamID, 929 pEncoderContext->mAccessUnit); 930 VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); 931 } 932 933cleanUp: 934 buffer->release(); 935 if( M4NO_ERROR == err ) { 936 LOGV("VideoEditorVideoEncoder_processOutputBuffer no error"); 937 } else { 938 SAFE_FREE(pEncoderContext->mHeader.pBuf); 939 pEncoderContext->mHeader.Size = 0; 940 LOGV("VideoEditorVideoEncoder_processOutputBuffer ERROR 0x%X", err); 941 } 942 LOGV("VideoEditorVideoEncoder_processOutputBuffer end"); 943 return err; 944} 945 946M4OSA_ERR VideoEditorVideoEncoder_encode(M4ENCODER_Context pContext, 947 M4VIFI_ImagePlane* pInPlane, M4OSA_Double Cts, 948 M4ENCODER_FrameMode FrameMode) { 949 M4OSA_ERR err = M4NO_ERROR; 950 VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; 951 status_t result = OK; 952 MediaBuffer* outputBuffer = NULL; 953 954 LOGV("VideoEditorVideoEncoder_encode 0x%X %f %d", pInPlane, Cts, FrameMode); 955 VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); 956 957 pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; 958 if ( STARTED == pEncoderContext->mState ) { 959 pEncoderContext->mState = BUFFERING; 960 } 961 VIDEOEDITOR_CHECK( 962 (BUFFERING | READING) & pEncoderContext->mState, M4ERR_STATE); 963 964 pEncoderContext->mNbInputFrames++; 965 if ( 0 > pEncoderContext->mFirstInputCts ) { 966 pEncoderContext->mFirstInputCts = Cts; 967 } 968 pEncoderContext->mLastInputCts = Cts; 969 970 LOGV("VideoEditorVideoEncoder_encode 0x%X %d %f (%d)", pInPlane, FrameMode, 971 Cts, pEncoderContext->mLastCTS); 972 973 // Push the input buffer to the encoder source 974 err = VideoEditorVideoEncoder_processInputBuffer(pEncoderContext, Cts, 975 M4OSA_FALSE); 976 VIDEOEDITOR_CHECK((M4NO_ERROR == err) || (M4WAR_SF_LOW_BUFFER == err), err); 977 978 // Notify the source in case of EOS 979 if ( M4ENCODER_kLastFrame == FrameMode ) { 980 err = VideoEditorVideoEncoder_processInputBuffer( 981 pEncoderContext, 0, M4OSA_TRUE); 982 VIDEOEDITOR_CHECK((M4NO_ERROR == err) || (M4WAR_SF_LOW_BUFFER == err), 983 err); 984 } 985 986 if ( BUFFERING == pEncoderContext->mState ) { 987 if ( M4WAR_SF_LOW_BUFFER == err ) { 988 // Insufficient prefetch, do not encode 989 err = M4NO_ERROR; 990 goto cleanUp; 991 } else { 992 // Prefetch is complete, start reading 993 pEncoderContext->mState = READING; 994 } 995 } 996 // Read 997 result = pEncoderContext->mEncoder->read(&outputBuffer, NULL); 998 if( OK != result ) { 999 LOGV("VideoEditorVideoEncoder_encode: encoder returns 0x%X", result); 1000 } 1001 1002 if( ERROR_END_OF_STREAM == result ) { 1003 if( outputBuffer != NULL ) { 1004 LOGV("VideoEditorVideoEncoder_encode : EOS w/ buffer"); 1005 } 1006 VIDEOEDITOR_CHECK(0 == VIDEOEDITOR_MIN_BUFFER_NB, M4ERR_STATE); 1007 // No output provided here, just exit 1008 goto cleanUp; 1009 } 1010 VIDEOEDITOR_CHECK((OK == result) || (ERROR_END_OF_STREAM == result), 1011 M4ERR_STATE); 1012 1013 // Provide the encoded AU to the writer 1014 err = VideoEditorVideoEncoder_processOutputBuffer(pEncoderContext, 1015 outputBuffer); 1016 VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); 1017 1018cleanUp: 1019 if( M4NO_ERROR == err ) { 1020 LOGV("VideoEditorVideoEncoder_encode no error"); 1021 } else { 1022 LOGV("VideoEditorVideoEncoder_encode ERROR 0x%X", err); 1023 } 1024 LOGV("VideoEditorVideoEncoder_encode end"); 1025 return err; 1026} 1027 1028M4OSA_ERR VideoEditorVideoEncoder_start(M4ENCODER_Context pContext) { 1029 M4OSA_ERR err = M4NO_ERROR; 1030 VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; 1031 status_t result = OK; 1032 1033 LOGV("VideoEditorVideoEncoder_start begin"); 1034 // Input parameters check 1035 VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); 1036 1037 pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; 1038 VIDEOEDITOR_CHECK(OPENED == pEncoderContext->mState, M4ERR_STATE); 1039 1040 pEncoderContext->mNbInputFrames = 0; 1041 pEncoderContext->mFirstInputCts = -1.0; 1042 pEncoderContext->mLastInputCts = -1.0; 1043 pEncoderContext->mNbOutputFrames = 0; 1044 pEncoderContext->mFirstOutputCts = -1; 1045 pEncoderContext->mLastOutputCts = -1; 1046 1047 result = pEncoderContext->mEncoder->start(); 1048 VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); 1049 1050 // Set the new state 1051 pEncoderContext->mState = STARTED; 1052 1053cleanUp: 1054 if ( M4NO_ERROR == err ) { 1055 LOGV("VideoEditorVideoEncoder_start no error"); 1056 } else { 1057 LOGV("VideoEditorVideoEncoder_start ERROR 0x%X", err); 1058 } 1059 LOGV("VideoEditorVideoEncoder_start end"); 1060 return err; 1061} 1062 1063M4OSA_ERR VideoEditorVideoEncoder_stop(M4ENCODER_Context pContext) { 1064 M4OSA_ERR err = M4NO_ERROR; 1065 VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; 1066 MediaBuffer* outputBuffer = NULL; 1067 status_t result = OK; 1068 1069 LOGV("VideoEditorVideoEncoder_stop begin"); 1070 // Input parameters check 1071 VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); 1072 pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; 1073 1074 // Process the remaining buffers if necessary 1075 if ( (BUFFERING | READING) & pEncoderContext->mState ) { 1076 // Send EOS again just in case 1077 err = VideoEditorVideoEncoder_processInputBuffer(pEncoderContext, 0, 1078 M4OSA_TRUE); 1079 VIDEOEDITOR_CHECK((M4NO_ERROR == err) || (M4WAR_SF_LOW_BUFFER == err), 1080 err); 1081 while( OK == result ) { 1082 result = pEncoderContext->mEncoder->read(&outputBuffer, NULL); 1083 if ( OK == result ) { 1084 err = VideoEditorVideoEncoder_processOutputBuffer( 1085 pEncoderContext, outputBuffer); 1086 VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); 1087 } 1088 } 1089 pEncoderContext->mState = STARTED; 1090 } 1091 1092 // Stop the graph module if necessary 1093 if ( STARTED == pEncoderContext->mState ) { 1094 pEncoderContext->mEncoder->stop(); 1095 pEncoderContext->mState = OPENED; 1096 } 1097 1098 if (pEncoderContext->mNbInputFrames != pEncoderContext->mNbOutputFrames) { 1099 LOGW("Some frames were not encoded: input(%d) != output(%d)", 1100 pEncoderContext->mNbInputFrames, pEncoderContext->mNbOutputFrames); 1101 } 1102 1103cleanUp: 1104 if ( M4NO_ERROR == err ) { 1105 LOGV("VideoEditorVideoEncoder_stop no error"); 1106 } else { 1107 LOGV("VideoEditorVideoEncoder_stop ERROR 0x%X", err); 1108 } 1109 LOGV("VideoEditorVideoEncoder_stop end"); 1110 return err; 1111} 1112 1113M4OSA_ERR VideoEditorVideoEncoder_regulBitRate(M4ENCODER_Context pContext) { 1114 LOGW("regulBitRate is not implemented"); 1115 return M4NO_ERROR; 1116} 1117 1118M4OSA_ERR VideoEditorVideoEncoder_setOption(M4ENCODER_Context pContext, 1119 M4OSA_UInt32 optionID, M4OSA_DataOption optionValue) { 1120 M4OSA_ERR err = M4NO_ERROR; 1121 VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; 1122 1123 LOGV("VideoEditorVideoEncoder_setOption start optionID 0x%X", optionID); 1124 // Input parameters check 1125 VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); 1126 1127 pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; 1128 1129 switch( optionID ) { 1130 case M4ENCODER_kOptionID_SetH264ProcessNALUfctsPtr: 1131 pEncoderContext->mH264NALUPostProcessFct = 1132 (H264MCS_ProcessEncodedNALU_fct*)optionValue; 1133 break; 1134 case M4ENCODER_kOptionID_H264ProcessNALUContext: 1135 pEncoderContext->mH264NALUPostProcessCtx = 1136 (M4OSA_Context)optionValue; 1137 break; 1138 default: 1139 LOGV("VideoEditorVideoEncoder_setOption: unsupported optionId 0x%X", 1140 optionID); 1141 VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_BAD_OPTION_ID); 1142 break; 1143 } 1144 1145cleanUp: 1146 if ( M4NO_ERROR == err ) { 1147 LOGV("VideoEditorVideoEncoder_setOption no error"); 1148 } else { 1149 LOGV("VideoEditorVideoEncoder_setOption ERROR 0x%X", err); 1150 } 1151 LOGV("VideoEditorVideoEncoder_setOption end"); 1152 return err; 1153} 1154 1155M4OSA_ERR VideoEditorVideoEncoder_getOption(M4ENCODER_Context pContext, 1156 M4OSA_UInt32 optionID, M4OSA_DataOption optionValue) { 1157 M4OSA_ERR err = M4NO_ERROR; 1158 VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; 1159 1160 LOGV("VideoEditorVideoEncoder_getOption begin optinId 0x%X", optionID); 1161 // Input parameters check 1162 VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); 1163 pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; 1164 1165 switch( optionID ) { 1166 case M4ENCODER_kOptionID_EncoderHeader: 1167 VIDEOEDITOR_CHECK( 1168 M4OSA_NULL != pEncoderContext->mHeader.pBuf, M4ERR_STATE); 1169 *(M4ENCODER_Header**)optionValue = &(pEncoderContext->mHeader); 1170 break; 1171 default: 1172 LOGV("VideoEditorVideoEncoder_getOption: unsupported optionId 0x%X", 1173 optionID); 1174 VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_BAD_OPTION_ID); 1175 break; 1176 } 1177 1178cleanUp: 1179 if ( M4NO_ERROR == err ) { 1180 LOGV("VideoEditorVideoEncoder_getOption no error"); 1181 } else { 1182 LOGV("VideoEditorVideoEncoder_getOption ERROR 0x%X", err); 1183 } 1184 return err; 1185} 1186 1187M4OSA_ERR VideoEditorVideoEncoder_getInterface(M4ENCODER_Format format, 1188 M4ENCODER_Format* pFormat, 1189 M4ENCODER_GlobalInterface** pEncoderInterface, M4ENCODER_OpenMode mode){ 1190 M4OSA_ERR err = M4NO_ERROR; 1191 1192 // Input parameters check 1193 VIDEOEDITOR_CHECK(M4OSA_NULL != pFormat, M4ERR_PARAMETER); 1194 VIDEOEDITOR_CHECK(M4OSA_NULL != pEncoderInterface, M4ERR_PARAMETER); 1195 1196 LOGV("VideoEditorVideoEncoder_getInterface begin 0x%x 0x%x %d", pFormat, 1197 pEncoderInterface, mode); 1198 1199 SAFE_MALLOC(*pEncoderInterface, M4ENCODER_GlobalInterface, 1, 1200 "VideoEditorVideoEncoder"); 1201 1202 *pFormat = format; 1203 1204 switch( format ) { 1205 case M4ENCODER_kH263: 1206 { 1207 (*pEncoderInterface)->pFctInit = 1208 VideoEditorVideoEncoder_init_H263; 1209 break; 1210 } 1211 case M4ENCODER_kMPEG4: 1212 { 1213 (*pEncoderInterface)->pFctInit = 1214 VideoEditorVideoEncoder_init_MPEG4; 1215 break; 1216 } 1217 case M4ENCODER_kH264: 1218 { 1219 (*pEncoderInterface)->pFctInit = 1220 VideoEditorVideoEncoder_init_H264; 1221 break; 1222 } 1223 default: 1224 LOGV("VideoEditorVideoEncoder_getInterface : unsupported format %d", 1225 format); 1226 VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_PARAMETER); 1227 break; 1228 } 1229 (*pEncoderInterface)->pFctOpen = VideoEditorVideoEncoder_open; 1230 (*pEncoderInterface)->pFctStart = VideoEditorVideoEncoder_start; 1231 (*pEncoderInterface)->pFctStop = VideoEditorVideoEncoder_stop; 1232 (*pEncoderInterface)->pFctPause = M4OSA_NULL; 1233 (*pEncoderInterface)->pFctResume = M4OSA_NULL; 1234 (*pEncoderInterface)->pFctClose = VideoEditorVideoEncoder_close; 1235 (*pEncoderInterface)->pFctCleanup = VideoEditorVideoEncoder_cleanup; 1236 (*pEncoderInterface)->pFctRegulBitRate = 1237 VideoEditorVideoEncoder_regulBitRate; 1238 (*pEncoderInterface)->pFctEncode = VideoEditorVideoEncoder_encode; 1239 (*pEncoderInterface)->pFctSetOption = VideoEditorVideoEncoder_setOption; 1240 (*pEncoderInterface)->pFctGetOption = VideoEditorVideoEncoder_getOption; 1241 1242cleanUp: 1243 if( M4NO_ERROR == err ) { 1244 LOGV("VideoEditorVideoEncoder_getInterface no error"); 1245 } else { 1246 *pEncoderInterface = M4OSA_NULL; 1247 LOGV("VideoEditorVideoEncoder_getInterface ERROR 0x%X", err); 1248 } 1249 return err; 1250} 1251 1252extern "C" { 1253 1254M4OSA_ERR VideoEditorVideoEncoder_getInterface_H263(M4ENCODER_Format* pFormat, 1255 M4ENCODER_GlobalInterface** pEncoderInterface, M4ENCODER_OpenMode mode){ 1256 return VideoEditorVideoEncoder_getInterface(M4ENCODER_kH263, pFormat, 1257 pEncoderInterface, mode); 1258} 1259 1260M4OSA_ERR VideoEditorVideoEncoder_getInterface_MPEG4(M4ENCODER_Format* pFormat, 1261 M4ENCODER_GlobalInterface** pEncoderInterface, M4ENCODER_OpenMode mode){ 1262 return VideoEditorVideoEncoder_getInterface(M4ENCODER_kMPEG4, pFormat, 1263 pEncoderInterface, mode); 1264} 1265 1266M4OSA_ERR VideoEditorVideoEncoder_getInterface_H264(M4ENCODER_Format* pFormat, 1267 M4ENCODER_GlobalInterface** pEncoderInterface, M4ENCODER_OpenMode mode){ 1268 return VideoEditorVideoEncoder_getInterface(M4ENCODER_kH264, pFormat, 1269 pEncoderInterface, mode); 1270 1271} 1272 1273} // extern "C" 1274 1275} // namespace android 1276