ACodec.cpp revision 1d15ab58bf8239069ef343de6cb21aabf3ef7d78
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "ACodec" 19 20#ifdef __LP64__ 21#define OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 22#endif 23 24#include <inttypes.h> 25#include <utils/Trace.h> 26 27#include <media/stagefright/ACodec.h> 28 29#include <binder/MemoryDealer.h> 30 31#include <media/stagefright/foundation/hexdump.h> 32#include <media/stagefright/foundation/ABuffer.h> 33#include <media/stagefright/foundation/ADebug.h> 34#include <media/stagefright/foundation/AMessage.h> 35#include <media/stagefright/foundation/AUtils.h> 36 37#include <media/stagefright/BufferProducerWrapper.h> 38#include <media/stagefright/MediaCodecList.h> 39#include <media/stagefright/MediaDefs.h> 40#include <media/stagefright/NativeWindowWrapper.h> 41#include <media/stagefright/OMXClient.h> 42#include <media/stagefright/OMXCodec.h> 43 44#include <media/hardware/HardwareAPI.h> 45 46#include <OMX_AudioExt.h> 47#include <OMX_VideoExt.h> 48#include <OMX_Component.h> 49#include <OMX_IndexExt.h> 50 51#include "include/avc_utils.h" 52 53namespace android { 54 55// OMX errors are directly mapped into status_t range if 56// there is no corresponding MediaError status code. 57// Use the statusFromOMXError(int32_t omxError) function. 58// 59// Currently this is a direct map. 60// See frameworks/native/include/media/openmax/OMX_Core.h 61// 62// Vendor OMX errors from 0x90000000 - 0x9000FFFF 63// Extension OMX errors from 0x8F000000 - 0x90000000 64// Standard OMX errors from 0x80001000 - 0x80001024 (0x80001024 current) 65// 66 67// returns true if err is a recognized OMX error code. 68// as OMX error is OMX_S32, this is an int32_t type 69static inline bool isOMXError(int32_t err) { 70 return (ERROR_CODEC_MIN <= err && err <= ERROR_CODEC_MAX); 71} 72 73// converts an OMX error to a status_t 74static inline status_t statusFromOMXError(int32_t omxError) { 75 switch (omxError) { 76 case OMX_ErrorInvalidComponentName: 77 case OMX_ErrorComponentNotFound: 78 return NAME_NOT_FOUND; // can trigger illegal argument error for provided names. 79 default: 80 return isOMXError(omxError) ? omxError : 0; // no translation required 81 } 82} 83 84// checks and converts status_t to a non-side-effect status_t 85static inline status_t makeNoSideEffectStatus(status_t err) { 86 switch (err) { 87 // the following errors have side effects and may come 88 // from other code modules. Remap for safety reasons. 89 case INVALID_OPERATION: 90 case DEAD_OBJECT: 91 return UNKNOWN_ERROR; 92 default: 93 return err; 94 } 95} 96 97template<class T> 98static void InitOMXParams(T *params) { 99 params->nSize = sizeof(T); 100 params->nVersion.s.nVersionMajor = 1; 101 params->nVersion.s.nVersionMinor = 0; 102 params->nVersion.s.nRevision = 0; 103 params->nVersion.s.nStep = 0; 104} 105 106struct CodecObserver : public BnOMXObserver { 107 CodecObserver() {} 108 109 void setNotificationMessage(const sp<AMessage> &msg) { 110 mNotify = msg; 111 } 112 113 // from IOMXObserver 114 virtual void onMessage(const omx_message &omx_msg) { 115 sp<AMessage> msg = mNotify->dup(); 116 117 msg->setInt32("type", omx_msg.type); 118 msg->setInt32("node", omx_msg.node); 119 120 switch (omx_msg.type) { 121 case omx_message::EVENT: 122 { 123 msg->setInt32("event", omx_msg.u.event_data.event); 124 msg->setInt32("data1", omx_msg.u.event_data.data1); 125 msg->setInt32("data2", omx_msg.u.event_data.data2); 126 break; 127 } 128 129 case omx_message::EMPTY_BUFFER_DONE: 130 { 131 msg->setInt32("buffer", omx_msg.u.buffer_data.buffer); 132 break; 133 } 134 135 case omx_message::FILL_BUFFER_DONE: 136 { 137 msg->setInt32( 138 "buffer", omx_msg.u.extended_buffer_data.buffer); 139 msg->setInt32( 140 "range_offset", 141 omx_msg.u.extended_buffer_data.range_offset); 142 msg->setInt32( 143 "range_length", 144 omx_msg.u.extended_buffer_data.range_length); 145 msg->setInt32( 146 "flags", 147 omx_msg.u.extended_buffer_data.flags); 148 msg->setInt64( 149 "timestamp", 150 omx_msg.u.extended_buffer_data.timestamp); 151 break; 152 } 153 154 default: 155 TRESPASS(); 156 break; 157 } 158 159 msg->post(); 160 } 161 162protected: 163 virtual ~CodecObserver() {} 164 165private: 166 sp<AMessage> mNotify; 167 168 DISALLOW_EVIL_CONSTRUCTORS(CodecObserver); 169}; 170 171//////////////////////////////////////////////////////////////////////////////// 172 173struct ACodec::BaseState : public AState { 174 BaseState(ACodec *codec, const sp<AState> &parentState = NULL); 175 176protected: 177 enum PortMode { 178 KEEP_BUFFERS, 179 RESUBMIT_BUFFERS, 180 FREE_BUFFERS, 181 }; 182 183 ACodec *mCodec; 184 185 virtual PortMode getPortMode(OMX_U32 portIndex); 186 187 virtual bool onMessageReceived(const sp<AMessage> &msg); 188 189 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 190 191 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 192 virtual void onInputBufferFilled(const sp<AMessage> &msg); 193 194 void postFillThisBuffer(BufferInfo *info); 195 196private: 197 bool onOMXMessage(const sp<AMessage> &msg); 198 199 bool onOMXEmptyBufferDone(IOMX::buffer_id bufferID); 200 201 bool onOMXFillBufferDone( 202 IOMX::buffer_id bufferID, 203 size_t rangeOffset, size_t rangeLength, 204 OMX_U32 flags, 205 int64_t timeUs); 206 207 void getMoreInputDataIfPossible(); 208 209 DISALLOW_EVIL_CONSTRUCTORS(BaseState); 210}; 211 212//////////////////////////////////////////////////////////////////////////////// 213 214struct ACodec::DeathNotifier : public IBinder::DeathRecipient { 215 DeathNotifier(const sp<AMessage> ¬ify) 216 : mNotify(notify) { 217 } 218 219 virtual void binderDied(const wp<IBinder> &) { 220 mNotify->post(); 221 } 222 223protected: 224 virtual ~DeathNotifier() {} 225 226private: 227 sp<AMessage> mNotify; 228 229 DISALLOW_EVIL_CONSTRUCTORS(DeathNotifier); 230}; 231 232struct ACodec::UninitializedState : public ACodec::BaseState { 233 UninitializedState(ACodec *codec); 234 235protected: 236 virtual bool onMessageReceived(const sp<AMessage> &msg); 237 virtual void stateEntered(); 238 239private: 240 void onSetup(const sp<AMessage> &msg); 241 bool onAllocateComponent(const sp<AMessage> &msg); 242 243 sp<DeathNotifier> mDeathNotifier; 244 245 DISALLOW_EVIL_CONSTRUCTORS(UninitializedState); 246}; 247 248//////////////////////////////////////////////////////////////////////////////// 249 250struct ACodec::LoadedState : public ACodec::BaseState { 251 LoadedState(ACodec *codec); 252 253protected: 254 virtual bool onMessageReceived(const sp<AMessage> &msg); 255 virtual void stateEntered(); 256 257private: 258 friend struct ACodec::UninitializedState; 259 260 bool onConfigureComponent(const sp<AMessage> &msg); 261 void onCreateInputSurface(const sp<AMessage> &msg); 262 void onStart(); 263 void onShutdown(bool keepComponentAllocated); 264 265 DISALLOW_EVIL_CONSTRUCTORS(LoadedState); 266}; 267 268//////////////////////////////////////////////////////////////////////////////// 269 270struct ACodec::LoadedToIdleState : public ACodec::BaseState { 271 LoadedToIdleState(ACodec *codec); 272 273protected: 274 virtual bool onMessageReceived(const sp<AMessage> &msg); 275 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 276 virtual void stateEntered(); 277 278private: 279 status_t allocateBuffers(); 280 281 DISALLOW_EVIL_CONSTRUCTORS(LoadedToIdleState); 282}; 283 284//////////////////////////////////////////////////////////////////////////////// 285 286struct ACodec::IdleToExecutingState : public ACodec::BaseState { 287 IdleToExecutingState(ACodec *codec); 288 289protected: 290 virtual bool onMessageReceived(const sp<AMessage> &msg); 291 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 292 virtual void stateEntered(); 293 294private: 295 DISALLOW_EVIL_CONSTRUCTORS(IdleToExecutingState); 296}; 297 298//////////////////////////////////////////////////////////////////////////////// 299 300struct ACodec::ExecutingState : public ACodec::BaseState { 301 ExecutingState(ACodec *codec); 302 303 void submitRegularOutputBuffers(); 304 void submitOutputMetaBuffers(); 305 void submitOutputBuffers(); 306 307 // Submit output buffers to the decoder, submit input buffers to client 308 // to fill with data. 309 void resume(); 310 311 // Returns true iff input and output buffers are in play. 312 bool active() const { return mActive; } 313 314protected: 315 virtual PortMode getPortMode(OMX_U32 portIndex); 316 virtual bool onMessageReceived(const sp<AMessage> &msg); 317 virtual void stateEntered(); 318 319 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 320 321private: 322 bool mActive; 323 324 DISALLOW_EVIL_CONSTRUCTORS(ExecutingState); 325}; 326 327//////////////////////////////////////////////////////////////////////////////// 328 329struct ACodec::OutputPortSettingsChangedState : public ACodec::BaseState { 330 OutputPortSettingsChangedState(ACodec *codec); 331 332protected: 333 virtual PortMode getPortMode(OMX_U32 portIndex); 334 virtual bool onMessageReceived(const sp<AMessage> &msg); 335 virtual void stateEntered(); 336 337 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 338 339private: 340 DISALLOW_EVIL_CONSTRUCTORS(OutputPortSettingsChangedState); 341}; 342 343//////////////////////////////////////////////////////////////////////////////// 344 345struct ACodec::ExecutingToIdleState : public ACodec::BaseState { 346 ExecutingToIdleState(ACodec *codec); 347 348protected: 349 virtual bool onMessageReceived(const sp<AMessage> &msg); 350 virtual void stateEntered(); 351 352 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 353 354 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 355 virtual void onInputBufferFilled(const sp<AMessage> &msg); 356 357private: 358 void changeStateIfWeOwnAllBuffers(); 359 360 bool mComponentNowIdle; 361 362 DISALLOW_EVIL_CONSTRUCTORS(ExecutingToIdleState); 363}; 364 365//////////////////////////////////////////////////////////////////////////////// 366 367struct ACodec::IdleToLoadedState : public ACodec::BaseState { 368 IdleToLoadedState(ACodec *codec); 369 370protected: 371 virtual bool onMessageReceived(const sp<AMessage> &msg); 372 virtual void stateEntered(); 373 374 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 375 376private: 377 DISALLOW_EVIL_CONSTRUCTORS(IdleToLoadedState); 378}; 379 380//////////////////////////////////////////////////////////////////////////////// 381 382struct ACodec::FlushingState : public ACodec::BaseState { 383 FlushingState(ACodec *codec); 384 385protected: 386 virtual bool onMessageReceived(const sp<AMessage> &msg); 387 virtual void stateEntered(); 388 389 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 390 391 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 392 virtual void onInputBufferFilled(const sp<AMessage> &msg); 393 394private: 395 bool mFlushComplete[2]; 396 397 void changeStateIfWeOwnAllBuffers(); 398 399 DISALLOW_EVIL_CONSTRUCTORS(FlushingState); 400}; 401 402//////////////////////////////////////////////////////////////////////////////// 403 404ACodec::ACodec() 405 : mQuirks(0), 406 mNode(0), 407 mSentFormat(false), 408 mIsEncoder(false), 409 mUseMetadataOnEncoderOutput(false), 410 mShutdownInProgress(false), 411 mExplicitShutdown(false), 412 mEncoderDelay(0), 413 mEncoderPadding(0), 414 mRotationDegrees(0), 415 mChannelMaskPresent(false), 416 mChannelMask(0), 417 mDequeueCounter(0), 418 mStoreMetaDataInOutputBuffers(false), 419 mMetaDataBuffersToSubmit(0), 420 mRepeatFrameDelayUs(-1ll), 421 mMaxPtsGapUs(-1ll), 422 mMaxFps(-1), 423 mTimePerFrameUs(-1ll), 424 mTimePerCaptureUs(-1ll), 425 mCreateInputBuffersSuspended(false), 426 mTunneled(false) { 427 mUninitializedState = new UninitializedState(this); 428 mLoadedState = new LoadedState(this); 429 mLoadedToIdleState = new LoadedToIdleState(this); 430 mIdleToExecutingState = new IdleToExecutingState(this); 431 mExecutingState = new ExecutingState(this); 432 433 mOutputPortSettingsChangedState = 434 new OutputPortSettingsChangedState(this); 435 436 mExecutingToIdleState = new ExecutingToIdleState(this); 437 mIdleToLoadedState = new IdleToLoadedState(this); 438 mFlushingState = new FlushingState(this); 439 440 mPortEOS[kPortIndexInput] = mPortEOS[kPortIndexOutput] = false; 441 mInputEOSResult = OK; 442 443 changeState(mUninitializedState); 444} 445 446ACodec::~ACodec() { 447} 448 449void ACodec::setNotificationMessage(const sp<AMessage> &msg) { 450 mNotify = msg; 451} 452 453void ACodec::initiateSetup(const sp<AMessage> &msg) { 454 msg->setWhat(kWhatSetup); 455 msg->setTarget(this); 456 msg->post(); 457} 458 459void ACodec::signalSetParameters(const sp<AMessage> ¶ms) { 460 sp<AMessage> msg = new AMessage(kWhatSetParameters, this); 461 msg->setMessage("params", params); 462 msg->post(); 463} 464 465void ACodec::initiateAllocateComponent(const sp<AMessage> &msg) { 466 msg->setWhat(kWhatAllocateComponent); 467 msg->setTarget(this); 468 msg->post(); 469} 470 471void ACodec::initiateConfigureComponent(const sp<AMessage> &msg) { 472 msg->setWhat(kWhatConfigureComponent); 473 msg->setTarget(this); 474 msg->post(); 475} 476 477void ACodec::initiateCreateInputSurface() { 478 (new AMessage(kWhatCreateInputSurface, this))->post(); 479} 480 481void ACodec::signalEndOfInputStream() { 482 (new AMessage(kWhatSignalEndOfInputStream, this))->post(); 483} 484 485void ACodec::initiateStart() { 486 (new AMessage(kWhatStart, this))->post(); 487} 488 489void ACodec::signalFlush() { 490 ALOGV("[%s] signalFlush", mComponentName.c_str()); 491 (new AMessage(kWhatFlush, this))->post(); 492} 493 494void ACodec::signalResume() { 495 (new AMessage(kWhatResume, this))->post(); 496} 497 498void ACodec::initiateShutdown(bool keepComponentAllocated) { 499 sp<AMessage> msg = new AMessage(kWhatShutdown, this); 500 msg->setInt32("keepComponentAllocated", keepComponentAllocated); 501 msg->post(); 502 if (!keepComponentAllocated) { 503 // ensure shutdown completes in 3 seconds 504 (new AMessage(kWhatReleaseCodecInstance, this))->post(3000000); 505 } 506} 507 508void ACodec::signalRequestIDRFrame() { 509 (new AMessage(kWhatRequestIDRFrame, this))->post(); 510} 511 512// *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 513// Some codecs may return input buffers before having them processed. 514// This causes a halt if we already signaled an EOS on the input 515// port. For now keep submitting an output buffer if there was an 516// EOS on the input port, but not yet on the output port. 517void ACodec::signalSubmitOutputMetaDataBufferIfEOS_workaround() { 518 if (mPortEOS[kPortIndexInput] && !mPortEOS[kPortIndexOutput] && 519 mMetaDataBuffersToSubmit > 0) { 520 (new AMessage(kWhatSubmitOutputMetaDataBufferIfEOS, this))->post(); 521 } 522} 523 524status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) { 525 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 526 527 CHECK(mDealer[portIndex] == NULL); 528 CHECK(mBuffers[portIndex].isEmpty()); 529 530 status_t err; 531 if (mNativeWindow != NULL && portIndex == kPortIndexOutput) { 532 if (mStoreMetaDataInOutputBuffers) { 533 err = allocateOutputMetaDataBuffers(); 534 } else { 535 err = allocateOutputBuffersFromNativeWindow(); 536 } 537 } else { 538 OMX_PARAM_PORTDEFINITIONTYPE def; 539 InitOMXParams(&def); 540 def.nPortIndex = portIndex; 541 542 err = mOMX->getParameter( 543 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 544 545 if (err == OK) { 546 ALOGV("[%s] Allocating %u buffers of size %u on %s port", 547 mComponentName.c_str(), 548 def.nBufferCountActual, def.nBufferSize, 549 portIndex == kPortIndexInput ? "input" : "output"); 550 551 size_t totalSize = def.nBufferCountActual * def.nBufferSize; 552 mDealer[portIndex] = new MemoryDealer(totalSize, "ACodec"); 553 554 for (OMX_U32 i = 0; i < def.nBufferCountActual; ++i) { 555 sp<IMemory> mem = mDealer[portIndex]->allocate(def.nBufferSize); 556 CHECK(mem.get() != NULL); 557 558 BufferInfo info; 559 info.mStatus = BufferInfo::OWNED_BY_US; 560 561 uint32_t requiresAllocateBufferBit = 562 (portIndex == kPortIndexInput) 563 ? OMXCodec::kRequiresAllocateBufferOnInputPorts 564 : OMXCodec::kRequiresAllocateBufferOnOutputPorts; 565 566 if ((portIndex == kPortIndexInput && (mFlags & kFlagIsSecure)) 567 || mUseMetadataOnEncoderOutput) { 568 mem.clear(); 569 570 void *ptr; 571 err = mOMX->allocateBuffer( 572 mNode, portIndex, def.nBufferSize, &info.mBufferID, 573 &ptr); 574 575 int32_t bufSize = mUseMetadataOnEncoderOutput ? 576 (4 + sizeof(buffer_handle_t)) : def.nBufferSize; 577 578 info.mData = new ABuffer(ptr, bufSize); 579 } else if (mQuirks & requiresAllocateBufferBit) { 580 err = mOMX->allocateBufferWithBackup( 581 mNode, portIndex, mem, &info.mBufferID); 582 } else { 583 err = mOMX->useBuffer(mNode, portIndex, mem, &info.mBufferID); 584 } 585 586 if (mem != NULL) { 587 info.mData = new ABuffer(mem->pointer(), def.nBufferSize); 588 } 589 590 mBuffers[portIndex].push(info); 591 } 592 } 593 } 594 595 if (err != OK) { 596 return err; 597 } 598 599 sp<AMessage> notify = mNotify->dup(); 600 notify->setInt32("what", CodecBase::kWhatBuffersAllocated); 601 602 notify->setInt32("portIndex", portIndex); 603 604 sp<PortDescription> desc = new PortDescription; 605 606 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 607 const BufferInfo &info = mBuffers[portIndex][i]; 608 609 desc->addBuffer(info.mBufferID, info.mData); 610 } 611 612 notify->setObject("portDesc", desc); 613 notify->post(); 614 615 return OK; 616} 617 618status_t ACodec::configureOutputBuffersFromNativeWindow( 619 OMX_U32 *bufferCount, OMX_U32 *bufferSize, 620 OMX_U32 *minUndequeuedBuffers) { 621 OMX_PARAM_PORTDEFINITIONTYPE def; 622 InitOMXParams(&def); 623 def.nPortIndex = kPortIndexOutput; 624 625 status_t err = mOMX->getParameter( 626 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 627 628 if (err != OK) { 629 return err; 630 } 631 632 err = native_window_set_buffers_geometry( 633 mNativeWindow.get(), 634 def.format.video.nFrameWidth, 635 def.format.video.nFrameHeight, 636 def.format.video.eColorFormat); 637 638 if (err != 0) { 639 ALOGE("native_window_set_buffers_geometry failed: %s (%d)", 640 strerror(-err), -err); 641 return err; 642 } 643 644 if (mRotationDegrees != 0) { 645 uint32_t transform = 0; 646 switch (mRotationDegrees) { 647 case 0: transform = 0; break; 648 case 90: transform = HAL_TRANSFORM_ROT_90; break; 649 case 180: transform = HAL_TRANSFORM_ROT_180; break; 650 case 270: transform = HAL_TRANSFORM_ROT_270; break; 651 default: transform = 0; break; 652 } 653 654 if (transform > 0) { 655 err = native_window_set_buffers_transform( 656 mNativeWindow.get(), transform); 657 if (err != 0) { 658 ALOGE("native_window_set_buffers_transform failed: %s (%d)", 659 strerror(-err), -err); 660 return err; 661 } 662 } 663 } 664 665 // Set up the native window. 666 OMX_U32 usage = 0; 667 err = mOMX->getGraphicBufferUsage(mNode, kPortIndexOutput, &usage); 668 if (err != 0) { 669 ALOGW("querying usage flags from OMX IL component failed: %d", err); 670 // XXX: Currently this error is logged, but not fatal. 671 usage = 0; 672 } 673 int omxUsage = usage; 674 675 if (mFlags & kFlagIsGrallocUsageProtected) { 676 usage |= GRALLOC_USAGE_PROTECTED; 677 } 678 679 // Make sure to check whether either Stagefright or the video decoder 680 // requested protected buffers. 681 if (usage & GRALLOC_USAGE_PROTECTED) { 682 // Verify that the ANativeWindow sends images directly to 683 // SurfaceFlinger. 684 int queuesToNativeWindow = 0; 685 err = mNativeWindow->query( 686 mNativeWindow.get(), NATIVE_WINDOW_QUEUES_TO_WINDOW_COMPOSER, 687 &queuesToNativeWindow); 688 if (err != 0) { 689 ALOGE("error authenticating native window: %d", err); 690 return err; 691 } 692 if (queuesToNativeWindow != 1) { 693 ALOGE("native window could not be authenticated"); 694 return PERMISSION_DENIED; 695 } 696 } 697 698 int consumerUsage = 0; 699 err = mNativeWindow->query( 700 mNativeWindow.get(), NATIVE_WINDOW_CONSUMER_USAGE_BITS, 701 &consumerUsage); 702 if (err != 0) { 703 ALOGW("failed to get consumer usage bits. ignoring"); 704 err = 0; 705 } 706 707 ALOGV("gralloc usage: %#x(OMX) => %#x(ACodec) + %#x(Consumer) = %#x", 708 omxUsage, usage, consumerUsage, usage | consumerUsage); 709 usage |= consumerUsage; 710 err = native_window_set_usage( 711 mNativeWindow.get(), 712 usage | GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_EXTERNAL_DISP); 713 714 if (err != 0) { 715 ALOGE("native_window_set_usage failed: %s (%d)", strerror(-err), -err); 716 return err; 717 } 718 719 // Exits here for tunneled video playback codecs -- i.e. skips native window 720 // buffer allocation step as this is managed by the tunneled OMX omponent 721 // itself and explicitly sets def.nBufferCountActual to 0. 722 if (mTunneled) { 723 ALOGV("Tunneled Playback: skipping native window buffer allocation."); 724 def.nBufferCountActual = 0; 725 err = mOMX->setParameter( 726 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 727 728 *minUndequeuedBuffers = 0; 729 *bufferCount = 0; 730 *bufferSize = 0; 731 return err; 732 } 733 734 *minUndequeuedBuffers = 0; 735 err = mNativeWindow->query( 736 mNativeWindow.get(), NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 737 (int *)minUndequeuedBuffers); 738 739 if (err != 0) { 740 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 741 strerror(-err), -err); 742 return err; 743 } 744 745 // FIXME: assume that surface is controlled by app (native window 746 // returns the number for the case when surface is not controlled by app) 747 // FIXME2: This means that minUndeqeueudBufs can be 1 larger than reported 748 // For now, try to allocate 1 more buffer, but don't fail if unsuccessful 749 750 // Use conservative allocation while also trying to reduce starvation 751 // 752 // 1. allocate at least nBufferCountMin + minUndequeuedBuffers - that is the 753 // minimum needed for the consumer to be able to work 754 // 2. try to allocate two (2) additional buffers to reduce starvation from 755 // the consumer 756 // plus an extra buffer to account for incorrect minUndequeuedBufs 757 for (OMX_U32 extraBuffers = 2 + 1; /* condition inside loop */; extraBuffers--) { 758 OMX_U32 newBufferCount = 759 def.nBufferCountMin + *minUndequeuedBuffers + extraBuffers; 760 def.nBufferCountActual = newBufferCount; 761 err = mOMX->setParameter( 762 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 763 764 if (err == OK) { 765 *minUndequeuedBuffers += extraBuffers; 766 break; 767 } 768 769 ALOGW("[%s] setting nBufferCountActual to %u failed: %d", 770 mComponentName.c_str(), newBufferCount, err); 771 /* exit condition */ 772 if (extraBuffers == 0) { 773 return err; 774 } 775 } 776 777 err = native_window_set_buffer_count( 778 mNativeWindow.get(), def.nBufferCountActual); 779 780 if (err != 0) { 781 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 782 -err); 783 return err; 784 } 785 786 *bufferCount = def.nBufferCountActual; 787 *bufferSize = def.nBufferSize; 788 return err; 789} 790 791status_t ACodec::allocateOutputBuffersFromNativeWindow() { 792 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 793 status_t err = configureOutputBuffersFromNativeWindow( 794 &bufferCount, &bufferSize, &minUndequeuedBuffers); 795 if (err != 0) 796 return err; 797 mNumUndequeuedBuffers = minUndequeuedBuffers; 798 799 ALOGV("[%s] Allocating %u buffers from a native window of size %u on " 800 "output port", 801 mComponentName.c_str(), bufferCount, bufferSize); 802 803 // Dequeue buffers and send them to OMX 804 for (OMX_U32 i = 0; i < bufferCount; i++) { 805 ANativeWindowBuffer *buf; 806 err = native_window_dequeue_buffer_and_wait(mNativeWindow.get(), &buf); 807 if (err != 0) { 808 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); 809 break; 810 } 811 812 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); 813 BufferInfo info; 814 info.mStatus = BufferInfo::OWNED_BY_US; 815 info.mData = new ABuffer(NULL /* data */, bufferSize /* capacity */); 816 info.mGraphicBuffer = graphicBuffer; 817 mBuffers[kPortIndexOutput].push(info); 818 819 IOMX::buffer_id bufferId; 820 err = mOMX->useGraphicBuffer(mNode, kPortIndexOutput, graphicBuffer, 821 &bufferId); 822 if (err != 0) { 823 ALOGE("registering GraphicBuffer %u with OMX IL component failed: " 824 "%d", i, err); 825 break; 826 } 827 828 mBuffers[kPortIndexOutput].editItemAt(i).mBufferID = bufferId; 829 830 ALOGV("[%s] Registered graphic buffer with ID %u (pointer = %p)", 831 mComponentName.c_str(), 832 bufferId, graphicBuffer.get()); 833 } 834 835 OMX_U32 cancelStart; 836 OMX_U32 cancelEnd; 837 838 if (err != 0) { 839 // If an error occurred while dequeuing we need to cancel any buffers 840 // that were dequeued. 841 cancelStart = 0; 842 cancelEnd = mBuffers[kPortIndexOutput].size(); 843 } else { 844 // Return the required minimum undequeued buffers to the native window. 845 cancelStart = bufferCount - minUndequeuedBuffers; 846 cancelEnd = bufferCount; 847 } 848 849 for (OMX_U32 i = cancelStart; i < cancelEnd; i++) { 850 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 851 status_t error = cancelBufferToNativeWindow(info); 852 if (err == 0) { 853 err = error; 854 } 855 } 856 857 return err; 858} 859 860status_t ACodec::allocateOutputMetaDataBuffers() { 861 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 862 status_t err = configureOutputBuffersFromNativeWindow( 863 &bufferCount, &bufferSize, &minUndequeuedBuffers); 864 if (err != 0) 865 return err; 866 mNumUndequeuedBuffers = minUndequeuedBuffers; 867 868 ALOGV("[%s] Allocating %u meta buffers on output port", 869 mComponentName.c_str(), bufferCount); 870 871 size_t totalSize = bufferCount * 8; 872 mDealer[kPortIndexOutput] = new MemoryDealer(totalSize, "ACodec"); 873 874 // Dequeue buffers and send them to OMX 875 for (OMX_U32 i = 0; i < bufferCount; i++) { 876 BufferInfo info; 877 info.mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 878 info.mGraphicBuffer = NULL; 879 info.mDequeuedAt = mDequeueCounter; 880 881 sp<IMemory> mem = mDealer[kPortIndexOutput]->allocate( 882 sizeof(struct VideoDecoderOutputMetaData)); 883 CHECK(mem.get() != NULL); 884 info.mData = new ABuffer(mem->pointer(), mem->size()); 885 886 // we use useBuffer for metadata regardless of quirks 887 err = mOMX->useBuffer( 888 mNode, kPortIndexOutput, mem, &info.mBufferID); 889 890 mBuffers[kPortIndexOutput].push(info); 891 892 ALOGV("[%s] allocated meta buffer with ID %u (pointer = %p)", 893 mComponentName.c_str(), info.mBufferID, mem->pointer()); 894 } 895 896 mMetaDataBuffersToSubmit = bufferCount - minUndequeuedBuffers; 897 return err; 898} 899 900status_t ACodec::submitOutputMetaDataBuffer() { 901 CHECK(mStoreMetaDataInOutputBuffers); 902 if (mMetaDataBuffersToSubmit == 0) 903 return OK; 904 905 BufferInfo *info = dequeueBufferFromNativeWindow(); 906 if (info == NULL) 907 return ERROR_IO; 908 909 ALOGV("[%s] submitting output meta buffer ID %u for graphic buffer %p", 910 mComponentName.c_str(), info->mBufferID, info->mGraphicBuffer.get()); 911 912 --mMetaDataBuffersToSubmit; 913 CHECK_EQ(mOMX->fillBuffer(mNode, info->mBufferID), 914 (status_t)OK); 915 916 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 917 return OK; 918} 919 920status_t ACodec::cancelBufferToNativeWindow(BufferInfo *info) { 921 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 922 923 ALOGV("[%s] Calling cancelBuffer on buffer %u", 924 mComponentName.c_str(), info->mBufferID); 925 926 int err = mNativeWindow->cancelBuffer( 927 mNativeWindow.get(), info->mGraphicBuffer.get(), -1); 928 929 ALOGW_IF(err != 0, "[%s] can not return buffer %u to native window", 930 mComponentName.c_str(), info->mBufferID); 931 932 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 933 934 return err; 935} 936 937ACodec::BufferInfo *ACodec::dequeueBufferFromNativeWindow() { 938 ANativeWindowBuffer *buf; 939 CHECK(mNativeWindow.get() != NULL); 940 941 if (mTunneled) { 942 ALOGW("dequeueBufferFromNativeWindow() should not be called in tunnel" 943 " video playback mode mode!"); 944 return NULL; 945 } 946 947 if (native_window_dequeue_buffer_and_wait(mNativeWindow.get(), &buf) != 0) { 948 ALOGE("dequeueBuffer failed."); 949 return NULL; 950 } 951 952 BufferInfo *oldest = NULL; 953 for (size_t i = mBuffers[kPortIndexOutput].size(); i-- > 0;) { 954 BufferInfo *info = 955 &mBuffers[kPortIndexOutput].editItemAt(i); 956 957 if (info->mGraphicBuffer != NULL && 958 info->mGraphicBuffer->handle == buf->handle) { 959 CHECK_EQ((int)info->mStatus, 960 (int)BufferInfo::OWNED_BY_NATIVE_WINDOW); 961 962 info->mStatus = BufferInfo::OWNED_BY_US; 963 964 return info; 965 } 966 967 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW && 968 (oldest == NULL || 969 // avoid potential issues from counter rolling over 970 mDequeueCounter - info->mDequeuedAt > 971 mDequeueCounter - oldest->mDequeuedAt)) { 972 oldest = info; 973 } 974 } 975 976 if (oldest) { 977 CHECK(mStoreMetaDataInOutputBuffers); 978 979 // discard buffer in LRU info and replace with new buffer 980 oldest->mGraphicBuffer = new GraphicBuffer(buf, false); 981 oldest->mStatus = BufferInfo::OWNED_BY_US; 982 983 mOMX->updateGraphicBufferInMeta( 984 mNode, kPortIndexOutput, oldest->mGraphicBuffer, 985 oldest->mBufferID); 986 987 VideoDecoderOutputMetaData *metaData = 988 reinterpret_cast<VideoDecoderOutputMetaData *>( 989 oldest->mData->base()); 990 CHECK_EQ(metaData->eType, kMetadataBufferTypeGrallocSource); 991 992 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 993 oldest - &mBuffers[kPortIndexOutput][0], 994 mDequeueCounter - oldest->mDequeuedAt, 995 metaData->pHandle, 996 oldest->mGraphicBuffer->handle, oldest->mData->base()); 997 998 return oldest; 999 } 1000 1001 TRESPASS(); 1002 1003 return NULL; 1004} 1005 1006status_t ACodec::freeBuffersOnPort(OMX_U32 portIndex) { 1007 for (size_t i = mBuffers[portIndex].size(); i-- > 0;) { 1008 CHECK_EQ((status_t)OK, freeBuffer(portIndex, i)); 1009 } 1010 1011 mDealer[portIndex].clear(); 1012 1013 return OK; 1014} 1015 1016status_t ACodec::freeOutputBuffersNotOwnedByComponent() { 1017 for (size_t i = mBuffers[kPortIndexOutput].size(); i-- > 0;) { 1018 BufferInfo *info = 1019 &mBuffers[kPortIndexOutput].editItemAt(i); 1020 1021 // At this time some buffers may still be with the component 1022 // or being drained. 1023 if (info->mStatus != BufferInfo::OWNED_BY_COMPONENT && 1024 info->mStatus != BufferInfo::OWNED_BY_DOWNSTREAM) { 1025 CHECK_EQ((status_t)OK, freeBuffer(kPortIndexOutput, i)); 1026 } 1027 } 1028 1029 return OK; 1030} 1031 1032status_t ACodec::freeBuffer(OMX_U32 portIndex, size_t i) { 1033 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1034 1035 CHECK(info->mStatus == BufferInfo::OWNED_BY_US 1036 || info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW); 1037 1038 if (portIndex == kPortIndexOutput && mNativeWindow != NULL 1039 && info->mStatus == BufferInfo::OWNED_BY_US) { 1040 cancelBufferToNativeWindow(info); 1041 } 1042 1043 CHECK_EQ(mOMX->freeBuffer( 1044 mNode, portIndex, info->mBufferID), 1045 (status_t)OK); 1046 1047 mBuffers[portIndex].removeAt(i); 1048 1049 return OK; 1050} 1051 1052ACodec::BufferInfo *ACodec::findBufferByID( 1053 uint32_t portIndex, IOMX::buffer_id bufferID, 1054 ssize_t *index) { 1055 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1056 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1057 1058 if (info->mBufferID == bufferID) { 1059 if (index != NULL) { 1060 *index = i; 1061 } 1062 return info; 1063 } 1064 } 1065 1066 TRESPASS(); 1067 1068 return NULL; 1069} 1070 1071status_t ACodec::setComponentRole( 1072 bool isEncoder, const char *mime) { 1073 struct MimeToRole { 1074 const char *mime; 1075 const char *decoderRole; 1076 const char *encoderRole; 1077 }; 1078 1079 static const MimeToRole kMimeToRole[] = { 1080 { MEDIA_MIMETYPE_AUDIO_MPEG, 1081 "audio_decoder.mp3", "audio_encoder.mp3" }, 1082 { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I, 1083 "audio_decoder.mp1", "audio_encoder.mp1" }, 1084 { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II, 1085 "audio_decoder.mp2", "audio_encoder.mp2" }, 1086 { MEDIA_MIMETYPE_AUDIO_AMR_NB, 1087 "audio_decoder.amrnb", "audio_encoder.amrnb" }, 1088 { MEDIA_MIMETYPE_AUDIO_AMR_WB, 1089 "audio_decoder.amrwb", "audio_encoder.amrwb" }, 1090 { MEDIA_MIMETYPE_AUDIO_AAC, 1091 "audio_decoder.aac", "audio_encoder.aac" }, 1092 { MEDIA_MIMETYPE_AUDIO_VORBIS, 1093 "audio_decoder.vorbis", "audio_encoder.vorbis" }, 1094 { MEDIA_MIMETYPE_AUDIO_OPUS, 1095 "audio_decoder.opus", "audio_encoder.opus" }, 1096 { MEDIA_MIMETYPE_AUDIO_G711_MLAW, 1097 "audio_decoder.g711mlaw", "audio_encoder.g711mlaw" }, 1098 { MEDIA_MIMETYPE_AUDIO_G711_ALAW, 1099 "audio_decoder.g711alaw", "audio_encoder.g711alaw" }, 1100 { MEDIA_MIMETYPE_VIDEO_AVC, 1101 "video_decoder.avc", "video_encoder.avc" }, 1102 { MEDIA_MIMETYPE_VIDEO_HEVC, 1103 "video_decoder.hevc", "video_encoder.hevc" }, 1104 { MEDIA_MIMETYPE_VIDEO_MPEG4, 1105 "video_decoder.mpeg4", "video_encoder.mpeg4" }, 1106 { MEDIA_MIMETYPE_VIDEO_H263, 1107 "video_decoder.h263", "video_encoder.h263" }, 1108 { MEDIA_MIMETYPE_VIDEO_VP8, 1109 "video_decoder.vp8", "video_encoder.vp8" }, 1110 { MEDIA_MIMETYPE_VIDEO_VP9, 1111 "video_decoder.vp9", "video_encoder.vp9" }, 1112 { MEDIA_MIMETYPE_AUDIO_RAW, 1113 "audio_decoder.raw", "audio_encoder.raw" }, 1114 { MEDIA_MIMETYPE_AUDIO_FLAC, 1115 "audio_decoder.flac", "audio_encoder.flac" }, 1116 { MEDIA_MIMETYPE_AUDIO_MSGSM, 1117 "audio_decoder.gsm", "audio_encoder.gsm" }, 1118 { MEDIA_MIMETYPE_VIDEO_MPEG2, 1119 "video_decoder.mpeg2", "video_encoder.mpeg2" }, 1120 { MEDIA_MIMETYPE_AUDIO_AC3, 1121 "audio_decoder.ac3", "audio_encoder.ac3" }, 1122 { MEDIA_MIMETYPE_AUDIO_EAC3, 1123 "audio_decoder.eac3", "audio_encoder.eac3" }, 1124 }; 1125 1126 static const size_t kNumMimeToRole = 1127 sizeof(kMimeToRole) / sizeof(kMimeToRole[0]); 1128 1129 size_t i; 1130 for (i = 0; i < kNumMimeToRole; ++i) { 1131 if (!strcasecmp(mime, kMimeToRole[i].mime)) { 1132 break; 1133 } 1134 } 1135 1136 if (i == kNumMimeToRole) { 1137 return ERROR_UNSUPPORTED; 1138 } 1139 1140 const char *role = 1141 isEncoder ? kMimeToRole[i].encoderRole 1142 : kMimeToRole[i].decoderRole; 1143 1144 if (role != NULL) { 1145 OMX_PARAM_COMPONENTROLETYPE roleParams; 1146 InitOMXParams(&roleParams); 1147 1148 strncpy((char *)roleParams.cRole, 1149 role, OMX_MAX_STRINGNAME_SIZE - 1); 1150 1151 roleParams.cRole[OMX_MAX_STRINGNAME_SIZE - 1] = '\0'; 1152 1153 status_t err = mOMX->setParameter( 1154 mNode, OMX_IndexParamStandardComponentRole, 1155 &roleParams, sizeof(roleParams)); 1156 1157 if (err != OK) { 1158 ALOGW("[%s] Failed to set standard component role '%s'.", 1159 mComponentName.c_str(), role); 1160 1161 return err; 1162 } 1163 } 1164 1165 return OK; 1166} 1167 1168status_t ACodec::configureCodec( 1169 const char *mime, const sp<AMessage> &msg) { 1170 int32_t encoder; 1171 if (!msg->findInt32("encoder", &encoder)) { 1172 encoder = false; 1173 } 1174 1175 sp<AMessage> inputFormat = new AMessage(); 1176 sp<AMessage> outputFormat = mNotify->dup(); // will use this for kWhatOutputFormatChanged 1177 1178 mIsEncoder = encoder; 1179 1180 status_t err = setComponentRole(encoder /* isEncoder */, mime); 1181 1182 if (err != OK) { 1183 return err; 1184 } 1185 1186 int32_t bitRate = 0; 1187 // FLAC encoder doesn't need a bitrate, other encoders do 1188 if (encoder && strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC) 1189 && !msg->findInt32("bitrate", &bitRate)) { 1190 return INVALID_OPERATION; 1191 } 1192 1193 int32_t storeMeta; 1194 if (encoder 1195 && msg->findInt32("store-metadata-in-buffers", &storeMeta) 1196 && storeMeta != 0) { 1197 err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexInput, OMX_TRUE); 1198 1199 if (err != OK) { 1200 ALOGE("[%s] storeMetaDataInBuffers (input) failed w/ err %d", 1201 mComponentName.c_str(), err); 1202 1203 return err; 1204 } 1205 } 1206 1207 int32_t prependSPSPPS = 0; 1208 if (encoder 1209 && msg->findInt32("prepend-sps-pps-to-idr-frames", &prependSPSPPS) 1210 && prependSPSPPS != 0) { 1211 OMX_INDEXTYPE index; 1212 err = mOMX->getExtensionIndex( 1213 mNode, 1214 "OMX.google.android.index.prependSPSPPSToIDRFrames", 1215 &index); 1216 1217 if (err == OK) { 1218 PrependSPSPPSToIDRFramesParams params; 1219 InitOMXParams(¶ms); 1220 params.bEnable = OMX_TRUE; 1221 1222 err = mOMX->setParameter( 1223 mNode, index, ¶ms, sizeof(params)); 1224 } 1225 1226 if (err != OK) { 1227 ALOGE("Encoder could not be configured to emit SPS/PPS before " 1228 "IDR frames. (err %d)", err); 1229 1230 return err; 1231 } 1232 } 1233 1234 // Only enable metadata mode on encoder output if encoder can prepend 1235 // sps/pps to idr frames, since in metadata mode the bitstream is in an 1236 // opaque handle, to which we don't have access. 1237 int32_t video = !strncasecmp(mime, "video/", 6); 1238 if (encoder && video) { 1239 OMX_BOOL enable = (OMX_BOOL) (prependSPSPPS 1240 && msg->findInt32("store-metadata-in-buffers-output", &storeMeta) 1241 && storeMeta != 0); 1242 1243 err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, enable); 1244 1245 if (err != OK) { 1246 ALOGE("[%s] storeMetaDataInBuffers (output) failed w/ err %d", 1247 mComponentName.c_str(), err); 1248 mUseMetadataOnEncoderOutput = 0; 1249 } else { 1250 mUseMetadataOnEncoderOutput = enable; 1251 } 1252 1253 if (!msg->findInt64( 1254 "repeat-previous-frame-after", 1255 &mRepeatFrameDelayUs)) { 1256 mRepeatFrameDelayUs = -1ll; 1257 } 1258 1259 if (!msg->findInt64("max-pts-gap-to-encoder", &mMaxPtsGapUs)) { 1260 mMaxPtsGapUs = -1ll; 1261 } 1262 1263 if (!msg->findFloat("max-fps-to-encoder", &mMaxFps)) { 1264 mMaxFps = -1; 1265 } 1266 1267 if (!msg->findInt64("time-lapse", &mTimePerCaptureUs)) { 1268 mTimePerCaptureUs = -1ll; 1269 } 1270 1271 if (!msg->findInt32( 1272 "create-input-buffers-suspended", 1273 (int32_t*)&mCreateInputBuffersSuspended)) { 1274 mCreateInputBuffersSuspended = false; 1275 } 1276 } 1277 1278 // NOTE: we only use native window for video decoders 1279 sp<RefBase> obj; 1280 bool haveNativeWindow = msg->findObject("native-window", &obj) 1281 && obj != NULL && video && !encoder; 1282 mStoreMetaDataInOutputBuffers = false; 1283 if (video && !encoder) { 1284 inputFormat->setInt32("adaptive-playback", false); 1285 1286 int32_t usageProtected; 1287 if (msg->findInt32("protected", &usageProtected) && usageProtected) { 1288 if (!haveNativeWindow) { 1289 ALOGE("protected output buffers must be sent to an ANativeWindow"); 1290 return PERMISSION_DENIED; 1291 } 1292 mFlags |= kFlagIsGrallocUsageProtected; 1293 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 1294 } 1295 } 1296 if (haveNativeWindow) { 1297 sp<NativeWindowWrapper> windowWrapper( 1298 static_cast<NativeWindowWrapper *>(obj.get())); 1299 sp<ANativeWindow> nativeWindow = windowWrapper->getNativeWindow(); 1300 1301 // START of temporary support for automatic FRC - THIS WILL BE REMOVED 1302 int32_t autoFrc; 1303 if (msg->findInt32("auto-frc", &autoFrc)) { 1304 bool enabled = autoFrc; 1305 OMX_CONFIG_BOOLEANTYPE config; 1306 InitOMXParams(&config); 1307 config.bEnabled = (OMX_BOOL)enabled; 1308 status_t temp = mOMX->setConfig( 1309 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAutoFramerateConversion, 1310 &config, sizeof(config)); 1311 if (temp == OK) { 1312 outputFormat->setInt32("auto-frc", enabled); 1313 } else if (enabled) { 1314 ALOGI("codec does not support requested auto-frc (err %d)", temp); 1315 } 1316 } 1317 // END of temporary support for automatic FRC 1318 1319 int32_t tunneled; 1320 if (msg->findInt32("feature-tunneled-playback", &tunneled) && 1321 tunneled != 0) { 1322 ALOGI("Configuring TUNNELED video playback."); 1323 mTunneled = true; 1324 1325 int32_t audioHwSync = 0; 1326 if (!msg->findInt32("audio-hw-sync", &audioHwSync)) { 1327 ALOGW("No Audio HW Sync provided for video tunnel"); 1328 } 1329 err = configureTunneledVideoPlayback(audioHwSync, nativeWindow); 1330 if (err != OK) { 1331 ALOGE("configureTunneledVideoPlayback(%d,%p) failed!", 1332 audioHwSync, nativeWindow.get()); 1333 return err; 1334 } 1335 1336 int32_t maxWidth = 0, maxHeight = 0; 1337 if (msg->findInt32("max-width", &maxWidth) && 1338 msg->findInt32("max-height", &maxHeight)) { 1339 1340 err = mOMX->prepareForAdaptivePlayback( 1341 mNode, kPortIndexOutput, OMX_TRUE, maxWidth, maxHeight); 1342 if (err != OK) { 1343 ALOGW("[%s] prepareForAdaptivePlayback failed w/ err %d", 1344 mComponentName.c_str(), err); 1345 // allow failure 1346 err = OK; 1347 } else { 1348 inputFormat->setInt32("max-width", maxWidth); 1349 inputFormat->setInt32("max-height", maxHeight); 1350 inputFormat->setInt32("adaptive-playback", true); 1351 } 1352 } 1353 } else { 1354 ALOGV("Configuring CPU controlled video playback."); 1355 mTunneled = false; 1356 1357 // Explicity reset the sideband handle of the window for 1358 // non-tunneled video in case the window was previously used 1359 // for a tunneled video playback. 1360 err = native_window_set_sideband_stream(nativeWindow.get(), NULL); 1361 if (err != OK) { 1362 ALOGE("set_sideband_stream(NULL) failed! (err %d).", err); 1363 return err; 1364 } 1365 1366 // Always try to enable dynamic output buffers on native surface 1367 err = mOMX->storeMetaDataInBuffers( 1368 mNode, kPortIndexOutput, OMX_TRUE); 1369 if (err != OK) { 1370 ALOGE("[%s] storeMetaDataInBuffers failed w/ err %d", 1371 mComponentName.c_str(), err); 1372 1373 // if adaptive playback has been requested, try JB fallback 1374 // NOTE: THIS FALLBACK MECHANISM WILL BE REMOVED DUE TO ITS 1375 // LARGE MEMORY REQUIREMENT 1376 1377 // we will not do adaptive playback on software accessed 1378 // surfaces as they never had to respond to changes in the 1379 // crop window, and we don't trust that they will be able to. 1380 int usageBits = 0; 1381 bool canDoAdaptivePlayback; 1382 1383 if (nativeWindow->query( 1384 nativeWindow.get(), 1385 NATIVE_WINDOW_CONSUMER_USAGE_BITS, 1386 &usageBits) != OK) { 1387 canDoAdaptivePlayback = false; 1388 } else { 1389 canDoAdaptivePlayback = 1390 (usageBits & 1391 (GRALLOC_USAGE_SW_READ_MASK | 1392 GRALLOC_USAGE_SW_WRITE_MASK)) == 0; 1393 } 1394 1395 int32_t maxWidth = 0, maxHeight = 0; 1396 if (canDoAdaptivePlayback && 1397 msg->findInt32("max-width", &maxWidth) && 1398 msg->findInt32("max-height", &maxHeight)) { 1399 ALOGV("[%s] prepareForAdaptivePlayback(%dx%d)", 1400 mComponentName.c_str(), maxWidth, maxHeight); 1401 1402 err = mOMX->prepareForAdaptivePlayback( 1403 mNode, kPortIndexOutput, OMX_TRUE, maxWidth, 1404 maxHeight); 1405 ALOGW_IF(err != OK, 1406 "[%s] prepareForAdaptivePlayback failed w/ err %d", 1407 mComponentName.c_str(), err); 1408 1409 if (err == OK) { 1410 inputFormat->setInt32("max-width", maxWidth); 1411 inputFormat->setInt32("max-height", maxHeight); 1412 inputFormat->setInt32("adaptive-playback", true); 1413 } 1414 } 1415 // allow failure 1416 err = OK; 1417 } else { 1418 ALOGV("[%s] storeMetaDataInBuffers succeeded", 1419 mComponentName.c_str()); 1420 mStoreMetaDataInOutputBuffers = true; 1421 inputFormat->setInt32("adaptive-playback", true); 1422 } 1423 1424 int32_t push; 1425 if (msg->findInt32("push-blank-buffers-on-shutdown", &push) 1426 && push != 0) { 1427 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 1428 } 1429 } 1430 1431 int32_t rotationDegrees; 1432 if (msg->findInt32("rotation-degrees", &rotationDegrees)) { 1433 mRotationDegrees = rotationDegrees; 1434 } else { 1435 mRotationDegrees = 0; 1436 } 1437 } 1438 1439 if (video) { 1440 // determine need for software renderer 1441 bool usingSwRenderer = false; 1442 if (haveNativeWindow && mComponentName.startsWith("OMX.google.")) { 1443 usingSwRenderer = true; 1444 haveNativeWindow = false; 1445 } 1446 1447 if (encoder) { 1448 err = setupVideoEncoder(mime, msg); 1449 } else { 1450 err = setupVideoDecoder(mime, msg, haveNativeWindow); 1451 } 1452 1453 if (err != OK) { 1454 return err; 1455 } 1456 1457 if (haveNativeWindow) { 1458 sp<NativeWindowWrapper> nativeWindow( 1459 static_cast<NativeWindowWrapper *>(obj.get())); 1460 CHECK(nativeWindow != NULL); 1461 mNativeWindow = nativeWindow->getNativeWindow(); 1462 1463 native_window_set_scaling_mode( 1464 mNativeWindow.get(), NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW); 1465 } 1466 1467 // initialize native window now to get actual output format 1468 // TODO: this is needed for some encoders even though they don't use native window 1469 CHECK_EQ((status_t)OK, initNativeWindow()); 1470 1471 // fallback for devices that do not handle flex-YUV for native buffers 1472 if (haveNativeWindow) { 1473 int32_t requestedColorFormat = OMX_COLOR_FormatUnused; 1474 if (msg->findInt32("color-format", &requestedColorFormat) && 1475 requestedColorFormat == OMX_COLOR_FormatYUV420Flexible) { 1476 CHECK_EQ(getPortFormat(kPortIndexOutput, outputFormat), (status_t)OK); 1477 int32_t colorFormat = OMX_COLOR_FormatUnused; 1478 OMX_U32 flexibleEquivalent = OMX_COLOR_FormatUnused; 1479 CHECK(outputFormat->findInt32("color-format", &colorFormat)); 1480 ALOGD("[%s] Requested output format %#x and got %#x.", 1481 mComponentName.c_str(), requestedColorFormat, colorFormat); 1482 if (!isFlexibleColorFormat( 1483 mOMX, mNode, colorFormat, haveNativeWindow, &flexibleEquivalent) 1484 || flexibleEquivalent != (OMX_U32)requestedColorFormat) { 1485 // device did not handle flex-YUV request for native window, fall back 1486 // to SW renderer 1487 ALOGI("[%s] Falling back to software renderer", mComponentName.c_str()); 1488 mNativeWindow.clear(); 1489 haveNativeWindow = false; 1490 usingSwRenderer = true; 1491 if (mStoreMetaDataInOutputBuffers) { 1492 err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, OMX_FALSE); 1493 mStoreMetaDataInOutputBuffers = false; 1494 // TODO: implement adaptive-playback support for bytebuffer mode. 1495 // This is done by SW codecs, but most HW codecs don't support it. 1496 inputFormat->setInt32("adaptive-playback", false); 1497 } 1498 if (err == OK) { 1499 err = mOMX->enableGraphicBuffers(mNode, kPortIndexOutput, OMX_FALSE); 1500 } 1501 if (mFlags & kFlagIsGrallocUsageProtected) { 1502 // fallback is not supported for protected playback 1503 err = PERMISSION_DENIED; 1504 } else if (err == OK) { 1505 err = setupVideoDecoder(mime, msg, false); 1506 } 1507 } 1508 } 1509 } 1510 1511 if (usingSwRenderer) { 1512 outputFormat->setInt32("using-sw-renderer", 1); 1513 } 1514 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) { 1515 int32_t numChannels, sampleRate; 1516 if (!msg->findInt32("channel-count", &numChannels) 1517 || !msg->findInt32("sample-rate", &sampleRate)) { 1518 // Since we did not always check for these, leave them optional 1519 // and have the decoder figure it all out. 1520 err = OK; 1521 } else { 1522 err = setupRawAudioFormat( 1523 encoder ? kPortIndexInput : kPortIndexOutput, 1524 sampleRate, 1525 numChannels); 1526 } 1527 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) { 1528 int32_t numChannels, sampleRate; 1529 if (!msg->findInt32("channel-count", &numChannels) 1530 || !msg->findInt32("sample-rate", &sampleRate)) { 1531 err = INVALID_OPERATION; 1532 } else { 1533 int32_t isADTS, aacProfile; 1534 int32_t sbrMode; 1535 int32_t maxOutputChannelCount; 1536 int32_t pcmLimiterEnable; 1537 drcParams_t drc; 1538 if (!msg->findInt32("is-adts", &isADTS)) { 1539 isADTS = 0; 1540 } 1541 if (!msg->findInt32("aac-profile", &aacProfile)) { 1542 aacProfile = OMX_AUDIO_AACObjectNull; 1543 } 1544 if (!msg->findInt32("aac-sbr-mode", &sbrMode)) { 1545 sbrMode = -1; 1546 } 1547 1548 if (!msg->findInt32("aac-max-output-channel_count", &maxOutputChannelCount)) { 1549 maxOutputChannelCount = -1; 1550 } 1551 if (!msg->findInt32("aac-pcm-limiter-enable", &pcmLimiterEnable)) { 1552 // value is unknown 1553 pcmLimiterEnable = -1; 1554 } 1555 if (!msg->findInt32("aac-encoded-target-level", &drc.encodedTargetLevel)) { 1556 // value is unknown 1557 drc.encodedTargetLevel = -1; 1558 } 1559 if (!msg->findInt32("aac-drc-cut-level", &drc.drcCut)) { 1560 // value is unknown 1561 drc.drcCut = -1; 1562 } 1563 if (!msg->findInt32("aac-drc-boost-level", &drc.drcBoost)) { 1564 // value is unknown 1565 drc.drcBoost = -1; 1566 } 1567 if (!msg->findInt32("aac-drc-heavy-compression", &drc.heavyCompression)) { 1568 // value is unknown 1569 drc.heavyCompression = -1; 1570 } 1571 if (!msg->findInt32("aac-target-ref-level", &drc.targetRefLevel)) { 1572 // value is unknown 1573 drc.targetRefLevel = -1; 1574 } 1575 1576 err = setupAACCodec( 1577 encoder, numChannels, sampleRate, bitRate, aacProfile, 1578 isADTS != 0, sbrMode, maxOutputChannelCount, drc, 1579 pcmLimiterEnable); 1580 } 1581 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)) { 1582 err = setupAMRCodec(encoder, false /* isWAMR */, bitRate); 1583 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_WB)) { 1584 err = setupAMRCodec(encoder, true /* isWAMR */, bitRate); 1585 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_ALAW) 1586 || !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_MLAW)) { 1587 // These are PCM-like formats with a fixed sample rate but 1588 // a variable number of channels. 1589 1590 int32_t numChannels; 1591 if (!msg->findInt32("channel-count", &numChannels)) { 1592 err = INVALID_OPERATION; 1593 } else { 1594 err = setupG711Codec(encoder, numChannels); 1595 } 1596 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)) { 1597 int32_t numChannels, sampleRate, compressionLevel = -1; 1598 if (encoder && 1599 (!msg->findInt32("channel-count", &numChannels) 1600 || !msg->findInt32("sample-rate", &sampleRate))) { 1601 ALOGE("missing channel count or sample rate for FLAC encoder"); 1602 err = INVALID_OPERATION; 1603 } else { 1604 if (encoder) { 1605 if (!msg->findInt32( 1606 "complexity", &compressionLevel) && 1607 !msg->findInt32( 1608 "flac-compression-level", &compressionLevel)) { 1609 compressionLevel = 5; // default FLAC compression level 1610 } else if (compressionLevel < 0) { 1611 ALOGW("compression level %d outside [0..8] range, " 1612 "using 0", 1613 compressionLevel); 1614 compressionLevel = 0; 1615 } else if (compressionLevel > 8) { 1616 ALOGW("compression level %d outside [0..8] range, " 1617 "using 8", 1618 compressionLevel); 1619 compressionLevel = 8; 1620 } 1621 } 1622 err = setupFlacCodec( 1623 encoder, numChannels, sampleRate, compressionLevel); 1624 } 1625 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) { 1626 int32_t numChannels, sampleRate; 1627 if (encoder 1628 || !msg->findInt32("channel-count", &numChannels) 1629 || !msg->findInt32("sample-rate", &sampleRate)) { 1630 err = INVALID_OPERATION; 1631 } else { 1632 err = setupRawAudioFormat(kPortIndexInput, sampleRate, numChannels); 1633 } 1634 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AC3)) { 1635 int32_t numChannels; 1636 int32_t sampleRate; 1637 if (!msg->findInt32("channel-count", &numChannels) 1638 || !msg->findInt32("sample-rate", &sampleRate)) { 1639 err = INVALID_OPERATION; 1640 } else { 1641 err = setupAC3Codec(encoder, numChannels, sampleRate); 1642 } 1643 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_EAC3)) { 1644 int32_t numChannels; 1645 int32_t sampleRate; 1646 if (!msg->findInt32("channel-count", &numChannels) 1647 || !msg->findInt32("sample-rate", &sampleRate)) { 1648 err = INVALID_OPERATION; 1649 } else { 1650 err = setupEAC3Codec(encoder, numChannels, sampleRate); 1651 } 1652 } 1653 1654 if (err != OK) { 1655 return err; 1656 } 1657 1658 if (!msg->findInt32("encoder-delay", &mEncoderDelay)) { 1659 mEncoderDelay = 0; 1660 } 1661 1662 if (!msg->findInt32("encoder-padding", &mEncoderPadding)) { 1663 mEncoderPadding = 0; 1664 } 1665 1666 if (msg->findInt32("channel-mask", &mChannelMask)) { 1667 mChannelMaskPresent = true; 1668 } else { 1669 mChannelMaskPresent = false; 1670 } 1671 1672 int32_t maxInputSize; 1673 if (msg->findInt32("max-input-size", &maxInputSize)) { 1674 err = setMinBufferSize(kPortIndexInput, (size_t)maxInputSize); 1675 } else if (!strcmp("OMX.Nvidia.aac.decoder", mComponentName.c_str())) { 1676 err = setMinBufferSize(kPortIndexInput, 8192); // XXX 1677 } 1678 1679 mBaseOutputFormat = outputFormat; 1680 1681 CHECK_EQ(getPortFormat(kPortIndexInput, inputFormat), (status_t)OK); 1682 CHECK_EQ(getPortFormat(kPortIndexOutput, outputFormat), (status_t)OK); 1683 mInputFormat = inputFormat; 1684 mOutputFormat = outputFormat; 1685 1686 return err; 1687} 1688 1689status_t ACodec::setMinBufferSize(OMX_U32 portIndex, size_t size) { 1690 OMX_PARAM_PORTDEFINITIONTYPE def; 1691 InitOMXParams(&def); 1692 def.nPortIndex = portIndex; 1693 1694 status_t err = mOMX->getParameter( 1695 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1696 1697 if (err != OK) { 1698 return err; 1699 } 1700 1701 if (def.nBufferSize >= size) { 1702 return OK; 1703 } 1704 1705 def.nBufferSize = size; 1706 1707 err = mOMX->setParameter( 1708 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1709 1710 if (err != OK) { 1711 return err; 1712 } 1713 1714 err = mOMX->getParameter( 1715 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1716 1717 if (err != OK) { 1718 return err; 1719 } 1720 1721 CHECK(def.nBufferSize >= size); 1722 1723 return OK; 1724} 1725 1726status_t ACodec::selectAudioPortFormat( 1727 OMX_U32 portIndex, OMX_AUDIO_CODINGTYPE desiredFormat) { 1728 OMX_AUDIO_PARAM_PORTFORMATTYPE format; 1729 InitOMXParams(&format); 1730 1731 format.nPortIndex = portIndex; 1732 for (OMX_U32 index = 0;; ++index) { 1733 format.nIndex = index; 1734 1735 status_t err = mOMX->getParameter( 1736 mNode, OMX_IndexParamAudioPortFormat, 1737 &format, sizeof(format)); 1738 1739 if (err != OK) { 1740 return err; 1741 } 1742 1743 if (format.eEncoding == desiredFormat) { 1744 break; 1745 } 1746 } 1747 1748 return mOMX->setParameter( 1749 mNode, OMX_IndexParamAudioPortFormat, &format, sizeof(format)); 1750} 1751 1752status_t ACodec::setupAACCodec( 1753 bool encoder, int32_t numChannels, int32_t sampleRate, 1754 int32_t bitRate, int32_t aacProfile, bool isADTS, int32_t sbrMode, 1755 int32_t maxOutputChannelCount, const drcParams_t& drc, 1756 int32_t pcmLimiterEnable) { 1757 if (encoder && isADTS) { 1758 return -EINVAL; 1759 } 1760 1761 status_t err = setupRawAudioFormat( 1762 encoder ? kPortIndexInput : kPortIndexOutput, 1763 sampleRate, 1764 numChannels); 1765 1766 if (err != OK) { 1767 return err; 1768 } 1769 1770 if (encoder) { 1771 err = selectAudioPortFormat(kPortIndexOutput, OMX_AUDIO_CodingAAC); 1772 1773 if (err != OK) { 1774 return err; 1775 } 1776 1777 OMX_PARAM_PORTDEFINITIONTYPE def; 1778 InitOMXParams(&def); 1779 def.nPortIndex = kPortIndexOutput; 1780 1781 err = mOMX->getParameter( 1782 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1783 1784 if (err != OK) { 1785 return err; 1786 } 1787 1788 def.format.audio.bFlagErrorConcealment = OMX_TRUE; 1789 def.format.audio.eEncoding = OMX_AUDIO_CodingAAC; 1790 1791 err = mOMX->setParameter( 1792 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1793 1794 if (err != OK) { 1795 return err; 1796 } 1797 1798 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 1799 InitOMXParams(&profile); 1800 profile.nPortIndex = kPortIndexOutput; 1801 1802 err = mOMX->getParameter( 1803 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 1804 1805 if (err != OK) { 1806 return err; 1807 } 1808 1809 profile.nChannels = numChannels; 1810 1811 profile.eChannelMode = 1812 (numChannels == 1) 1813 ? OMX_AUDIO_ChannelModeMono: OMX_AUDIO_ChannelModeStereo; 1814 1815 profile.nSampleRate = sampleRate; 1816 profile.nBitRate = bitRate; 1817 profile.nAudioBandWidth = 0; 1818 profile.nFrameLength = 0; 1819 profile.nAACtools = OMX_AUDIO_AACToolAll; 1820 profile.nAACERtools = OMX_AUDIO_AACERNone; 1821 profile.eAACProfile = (OMX_AUDIO_AACPROFILETYPE) aacProfile; 1822 profile.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4FF; 1823 switch (sbrMode) { 1824 case 0: 1825 // disable sbr 1826 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 1827 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 1828 break; 1829 case 1: 1830 // enable single-rate sbr 1831 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 1832 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 1833 break; 1834 case 2: 1835 // enable dual-rate sbr 1836 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 1837 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 1838 break; 1839 case -1: 1840 // enable both modes -> the codec will decide which mode should be used 1841 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 1842 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 1843 break; 1844 default: 1845 // unsupported sbr mode 1846 return BAD_VALUE; 1847 } 1848 1849 1850 err = mOMX->setParameter( 1851 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 1852 1853 if (err != OK) { 1854 return err; 1855 } 1856 1857 return err; 1858 } 1859 1860 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 1861 InitOMXParams(&profile); 1862 profile.nPortIndex = kPortIndexInput; 1863 1864 err = mOMX->getParameter( 1865 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 1866 1867 if (err != OK) { 1868 return err; 1869 } 1870 1871 profile.nChannels = numChannels; 1872 profile.nSampleRate = sampleRate; 1873 1874 profile.eAACStreamFormat = 1875 isADTS 1876 ? OMX_AUDIO_AACStreamFormatMP4ADTS 1877 : OMX_AUDIO_AACStreamFormatMP4FF; 1878 1879 OMX_AUDIO_PARAM_ANDROID_AACPRESENTATIONTYPE presentation; 1880 presentation.nMaxOutputChannels = maxOutputChannelCount; 1881 presentation.nDrcCut = drc.drcCut; 1882 presentation.nDrcBoost = drc.drcBoost; 1883 presentation.nHeavyCompression = drc.heavyCompression; 1884 presentation.nTargetReferenceLevel = drc.targetRefLevel; 1885 presentation.nEncodedTargetLevel = drc.encodedTargetLevel; 1886 presentation.nPCMLimiterEnable = pcmLimiterEnable; 1887 1888 status_t res = mOMX->setParameter(mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 1889 if (res == OK) { 1890 // optional parameters, will not cause configuration failure 1891 mOMX->setParameter(mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAacPresentation, 1892 &presentation, sizeof(presentation)); 1893 } else { 1894 ALOGW("did not set AudioAndroidAacPresentation due to error %d when setting AudioAac", res); 1895 } 1896 return res; 1897} 1898 1899status_t ACodec::setupAC3Codec( 1900 bool encoder, int32_t numChannels, int32_t sampleRate) { 1901 status_t err = setupRawAudioFormat( 1902 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 1903 1904 if (err != OK) { 1905 return err; 1906 } 1907 1908 if (encoder) { 1909 ALOGW("AC3 encoding is not supported."); 1910 return INVALID_OPERATION; 1911 } 1912 1913 OMX_AUDIO_PARAM_ANDROID_AC3TYPE def; 1914 InitOMXParams(&def); 1915 def.nPortIndex = kPortIndexInput; 1916 1917 err = mOMX->getParameter( 1918 mNode, 1919 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 1920 &def, 1921 sizeof(def)); 1922 1923 if (err != OK) { 1924 return err; 1925 } 1926 1927 def.nChannels = numChannels; 1928 def.nSampleRate = sampleRate; 1929 1930 return mOMX->setParameter( 1931 mNode, 1932 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 1933 &def, 1934 sizeof(def)); 1935} 1936 1937status_t ACodec::setupEAC3Codec( 1938 bool encoder, int32_t numChannels, int32_t sampleRate) { 1939 status_t err = setupRawAudioFormat( 1940 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 1941 1942 if (err != OK) { 1943 return err; 1944 } 1945 1946 if (encoder) { 1947 ALOGW("EAC3 encoding is not supported."); 1948 return INVALID_OPERATION; 1949 } 1950 1951 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE def; 1952 InitOMXParams(&def); 1953 def.nPortIndex = kPortIndexInput; 1954 1955 err = mOMX->getParameter( 1956 mNode, 1957 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 1958 &def, 1959 sizeof(def)); 1960 1961 if (err != OK) { 1962 return err; 1963 } 1964 1965 def.nChannels = numChannels; 1966 def.nSampleRate = sampleRate; 1967 1968 return mOMX->setParameter( 1969 mNode, 1970 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 1971 &def, 1972 sizeof(def)); 1973} 1974 1975static OMX_AUDIO_AMRBANDMODETYPE pickModeFromBitRate( 1976 bool isAMRWB, int32_t bps) { 1977 if (isAMRWB) { 1978 if (bps <= 6600) { 1979 return OMX_AUDIO_AMRBandModeWB0; 1980 } else if (bps <= 8850) { 1981 return OMX_AUDIO_AMRBandModeWB1; 1982 } else if (bps <= 12650) { 1983 return OMX_AUDIO_AMRBandModeWB2; 1984 } else if (bps <= 14250) { 1985 return OMX_AUDIO_AMRBandModeWB3; 1986 } else if (bps <= 15850) { 1987 return OMX_AUDIO_AMRBandModeWB4; 1988 } else if (bps <= 18250) { 1989 return OMX_AUDIO_AMRBandModeWB5; 1990 } else if (bps <= 19850) { 1991 return OMX_AUDIO_AMRBandModeWB6; 1992 } else if (bps <= 23050) { 1993 return OMX_AUDIO_AMRBandModeWB7; 1994 } 1995 1996 // 23850 bps 1997 return OMX_AUDIO_AMRBandModeWB8; 1998 } else { // AMRNB 1999 if (bps <= 4750) { 2000 return OMX_AUDIO_AMRBandModeNB0; 2001 } else if (bps <= 5150) { 2002 return OMX_AUDIO_AMRBandModeNB1; 2003 } else if (bps <= 5900) { 2004 return OMX_AUDIO_AMRBandModeNB2; 2005 } else if (bps <= 6700) { 2006 return OMX_AUDIO_AMRBandModeNB3; 2007 } else if (bps <= 7400) { 2008 return OMX_AUDIO_AMRBandModeNB4; 2009 } else if (bps <= 7950) { 2010 return OMX_AUDIO_AMRBandModeNB5; 2011 } else if (bps <= 10200) { 2012 return OMX_AUDIO_AMRBandModeNB6; 2013 } 2014 2015 // 12200 bps 2016 return OMX_AUDIO_AMRBandModeNB7; 2017 } 2018} 2019 2020status_t ACodec::setupAMRCodec(bool encoder, bool isWAMR, int32_t bitrate) { 2021 OMX_AUDIO_PARAM_AMRTYPE def; 2022 InitOMXParams(&def); 2023 def.nPortIndex = encoder ? kPortIndexOutput : kPortIndexInput; 2024 2025 status_t err = 2026 mOMX->getParameter(mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); 2027 2028 if (err != OK) { 2029 return err; 2030 } 2031 2032 def.eAMRFrameFormat = OMX_AUDIO_AMRFrameFormatFSF; 2033 def.eAMRBandMode = pickModeFromBitRate(isWAMR, bitrate); 2034 2035 err = mOMX->setParameter( 2036 mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); 2037 2038 if (err != OK) { 2039 return err; 2040 } 2041 2042 return setupRawAudioFormat( 2043 encoder ? kPortIndexInput : kPortIndexOutput, 2044 isWAMR ? 16000 : 8000 /* sampleRate */, 2045 1 /* numChannels */); 2046} 2047 2048status_t ACodec::setupG711Codec(bool encoder, int32_t numChannels) { 2049 CHECK(!encoder); // XXX TODO 2050 2051 return setupRawAudioFormat( 2052 kPortIndexInput, 8000 /* sampleRate */, numChannels); 2053} 2054 2055status_t ACodec::setupFlacCodec( 2056 bool encoder, int32_t numChannels, int32_t sampleRate, int32_t compressionLevel) { 2057 2058 if (encoder) { 2059 OMX_AUDIO_PARAM_FLACTYPE def; 2060 InitOMXParams(&def); 2061 def.nPortIndex = kPortIndexOutput; 2062 2063 // configure compression level 2064 status_t err = mOMX->getParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def)); 2065 if (err != OK) { 2066 ALOGE("setupFlacCodec(): Error %d getting OMX_IndexParamAudioFlac parameter", err); 2067 return err; 2068 } 2069 def.nCompressionLevel = compressionLevel; 2070 err = mOMX->setParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def)); 2071 if (err != OK) { 2072 ALOGE("setupFlacCodec(): Error %d setting OMX_IndexParamAudioFlac parameter", err); 2073 return err; 2074 } 2075 } 2076 2077 return setupRawAudioFormat( 2078 encoder ? kPortIndexInput : kPortIndexOutput, 2079 sampleRate, 2080 numChannels); 2081} 2082 2083status_t ACodec::setupRawAudioFormat( 2084 OMX_U32 portIndex, int32_t sampleRate, int32_t numChannels) { 2085 OMX_PARAM_PORTDEFINITIONTYPE def; 2086 InitOMXParams(&def); 2087 def.nPortIndex = portIndex; 2088 2089 status_t err = mOMX->getParameter( 2090 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2091 2092 if (err != OK) { 2093 return err; 2094 } 2095 2096 def.format.audio.eEncoding = OMX_AUDIO_CodingPCM; 2097 2098 err = mOMX->setParameter( 2099 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2100 2101 if (err != OK) { 2102 return err; 2103 } 2104 2105 OMX_AUDIO_PARAM_PCMMODETYPE pcmParams; 2106 InitOMXParams(&pcmParams); 2107 pcmParams.nPortIndex = portIndex; 2108 2109 err = mOMX->getParameter( 2110 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2111 2112 if (err != OK) { 2113 return err; 2114 } 2115 2116 pcmParams.nChannels = numChannels; 2117 pcmParams.eNumData = OMX_NumericalDataSigned; 2118 pcmParams.bInterleaved = OMX_TRUE; 2119 pcmParams.nBitPerSample = 16; 2120 pcmParams.nSamplingRate = sampleRate; 2121 pcmParams.ePCMMode = OMX_AUDIO_PCMModeLinear; 2122 2123 if (getOMXChannelMapping(numChannels, pcmParams.eChannelMapping) != OK) { 2124 return OMX_ErrorNone; 2125 } 2126 2127 return mOMX->setParameter( 2128 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2129} 2130 2131status_t ACodec::configureTunneledVideoPlayback( 2132 int32_t audioHwSync, const sp<ANativeWindow> &nativeWindow) { 2133 native_handle_t* sidebandHandle; 2134 2135 status_t err = mOMX->configureVideoTunnelMode( 2136 mNode, kPortIndexOutput, OMX_TRUE, audioHwSync, &sidebandHandle); 2137 if (err != OK) { 2138 ALOGE("configureVideoTunnelMode failed! (err %d).", err); 2139 return err; 2140 } 2141 2142 err = native_window_set_sideband_stream(nativeWindow.get(), sidebandHandle); 2143 if (err != OK) { 2144 ALOGE("native_window_set_sideband_stream(%p) failed! (err %d).", 2145 sidebandHandle, err); 2146 return err; 2147 } 2148 2149 return OK; 2150} 2151 2152status_t ACodec::setVideoPortFormatType( 2153 OMX_U32 portIndex, 2154 OMX_VIDEO_CODINGTYPE compressionFormat, 2155 OMX_COLOR_FORMATTYPE colorFormat, 2156 bool usingNativeBuffers) { 2157 OMX_VIDEO_PARAM_PORTFORMATTYPE format; 2158 InitOMXParams(&format); 2159 format.nPortIndex = portIndex; 2160 format.nIndex = 0; 2161 bool found = false; 2162 2163 OMX_U32 index = 0; 2164 for (;;) { 2165 format.nIndex = index; 2166 status_t err = mOMX->getParameter( 2167 mNode, OMX_IndexParamVideoPortFormat, 2168 &format, sizeof(format)); 2169 2170 if (err != OK) { 2171 return err; 2172 } 2173 2174 // substitute back flexible color format to codec supported format 2175 OMX_U32 flexibleEquivalent; 2176 if (compressionFormat == OMX_VIDEO_CodingUnused 2177 && isFlexibleColorFormat( 2178 mOMX, mNode, format.eColorFormat, usingNativeBuffers, &flexibleEquivalent) 2179 && colorFormat == flexibleEquivalent) { 2180 ALOGI("[%s] using color format %#x in place of %#x", 2181 mComponentName.c_str(), format.eColorFormat, colorFormat); 2182 colorFormat = format.eColorFormat; 2183 } 2184 2185 // The following assertion is violated by TI's video decoder. 2186 // CHECK_EQ(format.nIndex, index); 2187 2188 if (!strcmp("OMX.TI.Video.encoder", mComponentName.c_str())) { 2189 if (portIndex == kPortIndexInput 2190 && colorFormat == format.eColorFormat) { 2191 // eCompressionFormat does not seem right. 2192 found = true; 2193 break; 2194 } 2195 if (portIndex == kPortIndexOutput 2196 && compressionFormat == format.eCompressionFormat) { 2197 // eColorFormat does not seem right. 2198 found = true; 2199 break; 2200 } 2201 } 2202 2203 if (format.eCompressionFormat == compressionFormat 2204 && format.eColorFormat == colorFormat) { 2205 found = true; 2206 break; 2207 } 2208 2209 ++index; 2210 } 2211 2212 if (!found) { 2213 return UNKNOWN_ERROR; 2214 } 2215 2216 status_t err = mOMX->setParameter( 2217 mNode, OMX_IndexParamVideoPortFormat, 2218 &format, sizeof(format)); 2219 2220 return err; 2221} 2222 2223// Set optimal output format. OMX component lists output formats in the order 2224// of preference, but this got more complicated since the introduction of flexible 2225// YUV formats. We support a legacy behavior for applications that do not use 2226// surface output, do not specify an output format, but expect a "usable" standard 2227// OMX format. SW readable and standard formats must be flex-YUV. 2228// 2229// Suggested preference order: 2230// - optimal format for texture rendering (mediaplayer behavior) 2231// - optimal SW readable & texture renderable format (flex-YUV support) 2232// - optimal SW readable non-renderable format (flex-YUV bytebuffer support) 2233// - legacy "usable" standard formats 2234// 2235// For legacy support, we prefer a standard format, but will settle for a SW readable 2236// flex-YUV format. 2237status_t ACodec::setSupportedOutputFormat(bool getLegacyFlexibleFormat) { 2238 OMX_VIDEO_PARAM_PORTFORMATTYPE format, legacyFormat; 2239 InitOMXParams(&format); 2240 format.nPortIndex = kPortIndexOutput; 2241 2242 InitOMXParams(&legacyFormat); 2243 // this field will change when we find a suitable legacy format 2244 legacyFormat.eColorFormat = OMX_COLOR_FormatUnused; 2245 2246 for (OMX_U32 index = 0; ; ++index) { 2247 format.nIndex = index; 2248 status_t err = mOMX->getParameter( 2249 mNode, OMX_IndexParamVideoPortFormat, 2250 &format, sizeof(format)); 2251 if (err != OK) { 2252 // no more formats, pick legacy format if found 2253 if (legacyFormat.eColorFormat != OMX_COLOR_FormatUnused) { 2254 memcpy(&format, &legacyFormat, sizeof(format)); 2255 break; 2256 } 2257 return err; 2258 } 2259 if (format.eCompressionFormat != OMX_VIDEO_CodingUnused) { 2260 return OMX_ErrorBadParameter; 2261 } 2262 if (!getLegacyFlexibleFormat) { 2263 break; 2264 } 2265 // standard formats that were exposed to users before 2266 if (format.eColorFormat == OMX_COLOR_FormatYUV420Planar 2267 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedPlanar 2268 || format.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar 2269 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedSemiPlanar 2270 || format.eColorFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar) { 2271 break; 2272 } 2273 // find best legacy non-standard format 2274 OMX_U32 flexibleEquivalent; 2275 if (legacyFormat.eColorFormat == OMX_COLOR_FormatUnused 2276 && isFlexibleColorFormat( 2277 mOMX, mNode, format.eColorFormat, false /* usingNativeBuffers */, 2278 &flexibleEquivalent) 2279 && flexibleEquivalent == OMX_COLOR_FormatYUV420Flexible) { 2280 memcpy(&legacyFormat, &format, sizeof(format)); 2281 } 2282 } 2283 return mOMX->setParameter( 2284 mNode, OMX_IndexParamVideoPortFormat, 2285 &format, sizeof(format)); 2286} 2287 2288static const struct VideoCodingMapEntry { 2289 const char *mMime; 2290 OMX_VIDEO_CODINGTYPE mVideoCodingType; 2291} kVideoCodingMapEntry[] = { 2292 { MEDIA_MIMETYPE_VIDEO_AVC, OMX_VIDEO_CodingAVC }, 2293 { MEDIA_MIMETYPE_VIDEO_HEVC, OMX_VIDEO_CodingHEVC }, 2294 { MEDIA_MIMETYPE_VIDEO_MPEG4, OMX_VIDEO_CodingMPEG4 }, 2295 { MEDIA_MIMETYPE_VIDEO_H263, OMX_VIDEO_CodingH263 }, 2296 { MEDIA_MIMETYPE_VIDEO_MPEG2, OMX_VIDEO_CodingMPEG2 }, 2297 { MEDIA_MIMETYPE_VIDEO_VP8, OMX_VIDEO_CodingVP8 }, 2298 { MEDIA_MIMETYPE_VIDEO_VP9, OMX_VIDEO_CodingVP9 }, 2299}; 2300 2301static status_t GetVideoCodingTypeFromMime( 2302 const char *mime, OMX_VIDEO_CODINGTYPE *codingType) { 2303 for (size_t i = 0; 2304 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 2305 ++i) { 2306 if (!strcasecmp(mime, kVideoCodingMapEntry[i].mMime)) { 2307 *codingType = kVideoCodingMapEntry[i].mVideoCodingType; 2308 return OK; 2309 } 2310 } 2311 2312 *codingType = OMX_VIDEO_CodingUnused; 2313 2314 return ERROR_UNSUPPORTED; 2315} 2316 2317static status_t GetMimeTypeForVideoCoding( 2318 OMX_VIDEO_CODINGTYPE codingType, AString *mime) { 2319 for (size_t i = 0; 2320 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 2321 ++i) { 2322 if (codingType == kVideoCodingMapEntry[i].mVideoCodingType) { 2323 *mime = kVideoCodingMapEntry[i].mMime; 2324 return OK; 2325 } 2326 } 2327 2328 mime->clear(); 2329 2330 return ERROR_UNSUPPORTED; 2331} 2332 2333status_t ACodec::setupVideoDecoder( 2334 const char *mime, const sp<AMessage> &msg, bool haveNativeWindow) { 2335 int32_t width, height; 2336 if (!msg->findInt32("width", &width) 2337 || !msg->findInt32("height", &height)) { 2338 return INVALID_OPERATION; 2339 } 2340 2341 OMX_VIDEO_CODINGTYPE compressionFormat; 2342 status_t err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 2343 2344 if (err != OK) { 2345 return err; 2346 } 2347 2348 err = setVideoPortFormatType( 2349 kPortIndexInput, compressionFormat, OMX_COLOR_FormatUnused); 2350 2351 if (err != OK) { 2352 return err; 2353 } 2354 2355 int32_t tmp; 2356 if (msg->findInt32("color-format", &tmp)) { 2357 OMX_COLOR_FORMATTYPE colorFormat = 2358 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 2359 err = setVideoPortFormatType( 2360 kPortIndexOutput, OMX_VIDEO_CodingUnused, colorFormat, haveNativeWindow); 2361 if (err != OK) { 2362 ALOGW("[%s] does not support color format %d", 2363 mComponentName.c_str(), colorFormat); 2364 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 2365 } 2366 } else { 2367 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 2368 } 2369 2370 if (err != OK) { 2371 return err; 2372 } 2373 2374 int32_t frameRateInt; 2375 float frameRateFloat; 2376 if (!msg->findFloat("frame-rate", &frameRateFloat)) { 2377 if (!msg->findInt32("frame-rate", &frameRateInt)) { 2378 frameRateInt = -1; 2379 } 2380 frameRateFloat = (float)frameRateInt; 2381 } 2382 2383 err = setVideoFormatOnPort( 2384 kPortIndexInput, width, height, compressionFormat, frameRateFloat); 2385 2386 if (err != OK) { 2387 return err; 2388 } 2389 2390 err = setVideoFormatOnPort( 2391 kPortIndexOutput, width, height, OMX_VIDEO_CodingUnused); 2392 2393 if (err != OK) { 2394 return err; 2395 } 2396 2397 return OK; 2398} 2399 2400status_t ACodec::setupVideoEncoder(const char *mime, const sp<AMessage> &msg) { 2401 int32_t tmp; 2402 if (!msg->findInt32("color-format", &tmp)) { 2403 return INVALID_OPERATION; 2404 } 2405 2406 OMX_COLOR_FORMATTYPE colorFormat = 2407 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 2408 2409 status_t err = setVideoPortFormatType( 2410 kPortIndexInput, OMX_VIDEO_CodingUnused, colorFormat); 2411 2412 if (err != OK) { 2413 ALOGE("[%s] does not support color format %d", 2414 mComponentName.c_str(), colorFormat); 2415 2416 return err; 2417 } 2418 2419 /* Input port configuration */ 2420 2421 OMX_PARAM_PORTDEFINITIONTYPE def; 2422 InitOMXParams(&def); 2423 2424 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2425 2426 def.nPortIndex = kPortIndexInput; 2427 2428 err = mOMX->getParameter( 2429 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2430 2431 if (err != OK) { 2432 return err; 2433 } 2434 2435 int32_t width, height, bitrate; 2436 if (!msg->findInt32("width", &width) 2437 || !msg->findInt32("height", &height) 2438 || !msg->findInt32("bitrate", &bitrate)) { 2439 return INVALID_OPERATION; 2440 } 2441 2442 video_def->nFrameWidth = width; 2443 video_def->nFrameHeight = height; 2444 2445 int32_t stride; 2446 if (!msg->findInt32("stride", &stride)) { 2447 stride = width; 2448 } 2449 2450 video_def->nStride = stride; 2451 2452 int32_t sliceHeight; 2453 if (!msg->findInt32("slice-height", &sliceHeight)) { 2454 sliceHeight = height; 2455 } 2456 2457 video_def->nSliceHeight = sliceHeight; 2458 2459 def.nBufferSize = (video_def->nStride * video_def->nSliceHeight * 3) / 2; 2460 2461 float frameRate; 2462 if (!msg->findFloat("frame-rate", &frameRate)) { 2463 int32_t tmp; 2464 if (!msg->findInt32("frame-rate", &tmp)) { 2465 return INVALID_OPERATION; 2466 } 2467 frameRate = (float)tmp; 2468 mTimePerFrameUs = (int64_t) (1000000.0f / frameRate); 2469 } 2470 2471 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 2472 video_def->eCompressionFormat = OMX_VIDEO_CodingUnused; 2473 // this is redundant as it was already set up in setVideoPortFormatType 2474 // FIXME for now skip this only for flexible YUV formats 2475 if (colorFormat != OMX_COLOR_FormatYUV420Flexible) { 2476 video_def->eColorFormat = colorFormat; 2477 } 2478 2479 err = mOMX->setParameter( 2480 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2481 2482 if (err != OK) { 2483 ALOGE("[%s] failed to set input port definition parameters.", 2484 mComponentName.c_str()); 2485 2486 return err; 2487 } 2488 2489 /* Output port configuration */ 2490 2491 OMX_VIDEO_CODINGTYPE compressionFormat; 2492 err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 2493 2494 if (err != OK) { 2495 return err; 2496 } 2497 2498 err = setVideoPortFormatType( 2499 kPortIndexOutput, compressionFormat, OMX_COLOR_FormatUnused); 2500 2501 if (err != OK) { 2502 ALOGE("[%s] does not support compression format %d", 2503 mComponentName.c_str(), compressionFormat); 2504 2505 return err; 2506 } 2507 2508 def.nPortIndex = kPortIndexOutput; 2509 2510 err = mOMX->getParameter( 2511 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2512 2513 if (err != OK) { 2514 return err; 2515 } 2516 2517 video_def->nFrameWidth = width; 2518 video_def->nFrameHeight = height; 2519 video_def->xFramerate = 0; 2520 video_def->nBitrate = bitrate; 2521 video_def->eCompressionFormat = compressionFormat; 2522 video_def->eColorFormat = OMX_COLOR_FormatUnused; 2523 2524 err = mOMX->setParameter( 2525 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2526 2527 if (err != OK) { 2528 ALOGE("[%s] failed to set output port definition parameters.", 2529 mComponentName.c_str()); 2530 2531 return err; 2532 } 2533 2534 switch (compressionFormat) { 2535 case OMX_VIDEO_CodingMPEG4: 2536 err = setupMPEG4EncoderParameters(msg); 2537 break; 2538 2539 case OMX_VIDEO_CodingH263: 2540 err = setupH263EncoderParameters(msg); 2541 break; 2542 2543 case OMX_VIDEO_CodingAVC: 2544 err = setupAVCEncoderParameters(msg); 2545 break; 2546 2547 case OMX_VIDEO_CodingHEVC: 2548 err = setupHEVCEncoderParameters(msg); 2549 break; 2550 2551 case OMX_VIDEO_CodingVP8: 2552 case OMX_VIDEO_CodingVP9: 2553 err = setupVPXEncoderParameters(msg); 2554 break; 2555 2556 default: 2557 break; 2558 } 2559 2560 ALOGI("setupVideoEncoder succeeded"); 2561 2562 return err; 2563} 2564 2565status_t ACodec::setCyclicIntraMacroblockRefresh(const sp<AMessage> &msg, int32_t mode) { 2566 OMX_VIDEO_PARAM_INTRAREFRESHTYPE params; 2567 InitOMXParams(¶ms); 2568 params.nPortIndex = kPortIndexOutput; 2569 2570 params.eRefreshMode = static_cast<OMX_VIDEO_INTRAREFRESHTYPE>(mode); 2571 2572 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshCyclic || 2573 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 2574 int32_t mbs; 2575 if (!msg->findInt32("intra-refresh-CIR-mbs", &mbs)) { 2576 return INVALID_OPERATION; 2577 } 2578 params.nCirMBs = mbs; 2579 } 2580 2581 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshAdaptive || 2582 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 2583 int32_t mbs; 2584 if (!msg->findInt32("intra-refresh-AIR-mbs", &mbs)) { 2585 return INVALID_OPERATION; 2586 } 2587 params.nAirMBs = mbs; 2588 2589 int32_t ref; 2590 if (!msg->findInt32("intra-refresh-AIR-ref", &ref)) { 2591 return INVALID_OPERATION; 2592 } 2593 params.nAirRef = ref; 2594 } 2595 2596 status_t err = mOMX->setParameter( 2597 mNode, OMX_IndexParamVideoIntraRefresh, 2598 ¶ms, sizeof(params)); 2599 return err; 2600} 2601 2602static OMX_U32 setPFramesSpacing(int32_t iFramesInterval, int32_t frameRate) { 2603 if (iFramesInterval < 0) { 2604 return 0xFFFFFFFF; 2605 } else if (iFramesInterval == 0) { 2606 return 0; 2607 } 2608 OMX_U32 ret = frameRate * iFramesInterval; 2609 return ret; 2610} 2611 2612static OMX_VIDEO_CONTROLRATETYPE getBitrateMode(const sp<AMessage> &msg) { 2613 int32_t tmp; 2614 if (!msg->findInt32("bitrate-mode", &tmp)) { 2615 return OMX_Video_ControlRateVariable; 2616 } 2617 2618 return static_cast<OMX_VIDEO_CONTROLRATETYPE>(tmp); 2619} 2620 2621status_t ACodec::setupMPEG4EncoderParameters(const sp<AMessage> &msg) { 2622 int32_t bitrate, iFrameInterval; 2623 if (!msg->findInt32("bitrate", &bitrate) 2624 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 2625 return INVALID_OPERATION; 2626 } 2627 2628 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 2629 2630 float frameRate; 2631 if (!msg->findFloat("frame-rate", &frameRate)) { 2632 int32_t tmp; 2633 if (!msg->findInt32("frame-rate", &tmp)) { 2634 return INVALID_OPERATION; 2635 } 2636 frameRate = (float)tmp; 2637 } 2638 2639 OMX_VIDEO_PARAM_MPEG4TYPE mpeg4type; 2640 InitOMXParams(&mpeg4type); 2641 mpeg4type.nPortIndex = kPortIndexOutput; 2642 2643 status_t err = mOMX->getParameter( 2644 mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 2645 2646 if (err != OK) { 2647 return err; 2648 } 2649 2650 mpeg4type.nSliceHeaderSpacing = 0; 2651 mpeg4type.bSVH = OMX_FALSE; 2652 mpeg4type.bGov = OMX_FALSE; 2653 2654 mpeg4type.nAllowedPictureTypes = 2655 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 2656 2657 mpeg4type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 2658 if (mpeg4type.nPFrames == 0) { 2659 mpeg4type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 2660 } 2661 mpeg4type.nBFrames = 0; 2662 mpeg4type.nIDCVLCThreshold = 0; 2663 mpeg4type.bACPred = OMX_TRUE; 2664 mpeg4type.nMaxPacketSize = 256; 2665 mpeg4type.nTimeIncRes = 1000; 2666 mpeg4type.nHeaderExtension = 0; 2667 mpeg4type.bReversibleVLC = OMX_FALSE; 2668 2669 int32_t profile; 2670 if (msg->findInt32("profile", &profile)) { 2671 int32_t level; 2672 if (!msg->findInt32("level", &level)) { 2673 return INVALID_OPERATION; 2674 } 2675 2676 err = verifySupportForProfileAndLevel(profile, level); 2677 2678 if (err != OK) { 2679 return err; 2680 } 2681 2682 mpeg4type.eProfile = static_cast<OMX_VIDEO_MPEG4PROFILETYPE>(profile); 2683 mpeg4type.eLevel = static_cast<OMX_VIDEO_MPEG4LEVELTYPE>(level); 2684 } 2685 2686 err = mOMX->setParameter( 2687 mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 2688 2689 if (err != OK) { 2690 return err; 2691 } 2692 2693 err = configureBitrate(bitrate, bitrateMode); 2694 2695 if (err != OK) { 2696 return err; 2697 } 2698 2699 return setupErrorCorrectionParameters(); 2700} 2701 2702status_t ACodec::setupH263EncoderParameters(const sp<AMessage> &msg) { 2703 int32_t bitrate, iFrameInterval; 2704 if (!msg->findInt32("bitrate", &bitrate) 2705 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 2706 return INVALID_OPERATION; 2707 } 2708 2709 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 2710 2711 float frameRate; 2712 if (!msg->findFloat("frame-rate", &frameRate)) { 2713 int32_t tmp; 2714 if (!msg->findInt32("frame-rate", &tmp)) { 2715 return INVALID_OPERATION; 2716 } 2717 frameRate = (float)tmp; 2718 } 2719 2720 OMX_VIDEO_PARAM_H263TYPE h263type; 2721 InitOMXParams(&h263type); 2722 h263type.nPortIndex = kPortIndexOutput; 2723 2724 status_t err = mOMX->getParameter( 2725 mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 2726 2727 if (err != OK) { 2728 return err; 2729 } 2730 2731 h263type.nAllowedPictureTypes = 2732 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 2733 2734 h263type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 2735 if (h263type.nPFrames == 0) { 2736 h263type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 2737 } 2738 h263type.nBFrames = 0; 2739 2740 int32_t profile; 2741 if (msg->findInt32("profile", &profile)) { 2742 int32_t level; 2743 if (!msg->findInt32("level", &level)) { 2744 return INVALID_OPERATION; 2745 } 2746 2747 err = verifySupportForProfileAndLevel(profile, level); 2748 2749 if (err != OK) { 2750 return err; 2751 } 2752 2753 h263type.eProfile = static_cast<OMX_VIDEO_H263PROFILETYPE>(profile); 2754 h263type.eLevel = static_cast<OMX_VIDEO_H263LEVELTYPE>(level); 2755 } 2756 2757 h263type.bPLUSPTYPEAllowed = OMX_FALSE; 2758 h263type.bForceRoundingTypeToZero = OMX_FALSE; 2759 h263type.nPictureHeaderRepetition = 0; 2760 h263type.nGOBHeaderInterval = 0; 2761 2762 err = mOMX->setParameter( 2763 mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 2764 2765 if (err != OK) { 2766 return err; 2767 } 2768 2769 err = configureBitrate(bitrate, bitrateMode); 2770 2771 if (err != OK) { 2772 return err; 2773 } 2774 2775 return setupErrorCorrectionParameters(); 2776} 2777 2778// static 2779int /* OMX_VIDEO_AVCLEVELTYPE */ ACodec::getAVCLevelFor( 2780 int width, int height, int rate, int bitrate, 2781 OMX_VIDEO_AVCPROFILETYPE profile) { 2782 // convert bitrate to main/baseline profile kbps equivalent 2783 switch (profile) { 2784 case OMX_VIDEO_AVCProfileHigh10: 2785 bitrate = divUp(bitrate, 3000); break; 2786 case OMX_VIDEO_AVCProfileHigh: 2787 bitrate = divUp(bitrate, 1250); break; 2788 default: 2789 bitrate = divUp(bitrate, 1000); break; 2790 } 2791 2792 // convert size and rate to MBs 2793 width = divUp(width, 16); 2794 height = divUp(height, 16); 2795 int mbs = width * height; 2796 rate *= mbs; 2797 int maxDimension = max(width, height); 2798 2799 static const int limits[][5] = { 2800 /* MBps MB dim bitrate level */ 2801 { 1485, 99, 28, 64, OMX_VIDEO_AVCLevel1 }, 2802 { 1485, 99, 28, 128, OMX_VIDEO_AVCLevel1b }, 2803 { 3000, 396, 56, 192, OMX_VIDEO_AVCLevel11 }, 2804 { 6000, 396, 56, 384, OMX_VIDEO_AVCLevel12 }, 2805 { 11880, 396, 56, 768, OMX_VIDEO_AVCLevel13 }, 2806 { 11880, 396, 56, 2000, OMX_VIDEO_AVCLevel2 }, 2807 { 19800, 792, 79, 4000, OMX_VIDEO_AVCLevel21 }, 2808 { 20250, 1620, 113, 4000, OMX_VIDEO_AVCLevel22 }, 2809 { 40500, 1620, 113, 10000, OMX_VIDEO_AVCLevel3 }, 2810 { 108000, 3600, 169, 14000, OMX_VIDEO_AVCLevel31 }, 2811 { 216000, 5120, 202, 20000, OMX_VIDEO_AVCLevel32 }, 2812 { 245760, 8192, 256, 20000, OMX_VIDEO_AVCLevel4 }, 2813 { 245760, 8192, 256, 50000, OMX_VIDEO_AVCLevel41 }, 2814 { 522240, 8704, 263, 50000, OMX_VIDEO_AVCLevel42 }, 2815 { 589824, 22080, 420, 135000, OMX_VIDEO_AVCLevel5 }, 2816 { 983040, 36864, 543, 240000, OMX_VIDEO_AVCLevel51 }, 2817 { 2073600, 36864, 543, 240000, OMX_VIDEO_AVCLevel52 }, 2818 }; 2819 2820 for (size_t i = 0; i < ARRAY_SIZE(limits); i++) { 2821 const int (&limit)[5] = limits[i]; 2822 if (rate <= limit[0] && mbs <= limit[1] && maxDimension <= limit[2] 2823 && bitrate <= limit[3]) { 2824 return limit[4]; 2825 } 2826 } 2827 return 0; 2828} 2829 2830status_t ACodec::setupAVCEncoderParameters(const sp<AMessage> &msg) { 2831 int32_t bitrate, iFrameInterval; 2832 if (!msg->findInt32("bitrate", &bitrate) 2833 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 2834 return INVALID_OPERATION; 2835 } 2836 2837 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 2838 2839 float frameRate; 2840 if (!msg->findFloat("frame-rate", &frameRate)) { 2841 int32_t tmp; 2842 if (!msg->findInt32("frame-rate", &tmp)) { 2843 return INVALID_OPERATION; 2844 } 2845 frameRate = (float)tmp; 2846 } 2847 2848 status_t err = OK; 2849 int32_t intraRefreshMode = 0; 2850 if (msg->findInt32("intra-refresh-mode", &intraRefreshMode)) { 2851 err = setCyclicIntraMacroblockRefresh(msg, intraRefreshMode); 2852 if (err != OK) { 2853 ALOGE("Setting intra macroblock refresh mode (%d) failed: 0x%x", 2854 err, intraRefreshMode); 2855 return err; 2856 } 2857 } 2858 2859 OMX_VIDEO_PARAM_AVCTYPE h264type; 2860 InitOMXParams(&h264type); 2861 h264type.nPortIndex = kPortIndexOutput; 2862 2863 err = mOMX->getParameter( 2864 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 2865 2866 if (err != OK) { 2867 return err; 2868 } 2869 2870 h264type.nAllowedPictureTypes = 2871 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 2872 2873 int32_t profile; 2874 if (msg->findInt32("profile", &profile)) { 2875 int32_t level; 2876 if (!msg->findInt32("level", &level)) { 2877 return INVALID_OPERATION; 2878 } 2879 2880 err = verifySupportForProfileAndLevel(profile, level); 2881 2882 if (err != OK) { 2883 return err; 2884 } 2885 2886 h264type.eProfile = static_cast<OMX_VIDEO_AVCPROFILETYPE>(profile); 2887 h264type.eLevel = static_cast<OMX_VIDEO_AVCLEVELTYPE>(level); 2888 } 2889 2890 // XXX 2891 if (h264type.eProfile != OMX_VIDEO_AVCProfileBaseline) { 2892 ALOGW("Use baseline profile instead of %d for AVC recording", 2893 h264type.eProfile); 2894 h264type.eProfile = OMX_VIDEO_AVCProfileBaseline; 2895 } 2896 2897 if (h264type.eProfile == OMX_VIDEO_AVCProfileBaseline) { 2898 h264type.nSliceHeaderSpacing = 0; 2899 h264type.bUseHadamard = OMX_TRUE; 2900 h264type.nRefFrames = 1; 2901 h264type.nBFrames = 0; 2902 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 2903 if (h264type.nPFrames == 0) { 2904 h264type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 2905 } 2906 h264type.nRefIdx10ActiveMinus1 = 0; 2907 h264type.nRefIdx11ActiveMinus1 = 0; 2908 h264type.bEntropyCodingCABAC = OMX_FALSE; 2909 h264type.bWeightedPPrediction = OMX_FALSE; 2910 h264type.bconstIpred = OMX_FALSE; 2911 h264type.bDirect8x8Inference = OMX_FALSE; 2912 h264type.bDirectSpatialTemporal = OMX_FALSE; 2913 h264type.nCabacInitIdc = 0; 2914 } 2915 2916 if (h264type.nBFrames != 0) { 2917 h264type.nAllowedPictureTypes |= OMX_VIDEO_PictureTypeB; 2918 } 2919 2920 h264type.bEnableUEP = OMX_FALSE; 2921 h264type.bEnableFMO = OMX_FALSE; 2922 h264type.bEnableASO = OMX_FALSE; 2923 h264type.bEnableRS = OMX_FALSE; 2924 h264type.bFrameMBsOnly = OMX_TRUE; 2925 h264type.bMBAFF = OMX_FALSE; 2926 h264type.eLoopFilterMode = OMX_VIDEO_AVCLoopFilterEnable; 2927 2928 err = mOMX->setParameter( 2929 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 2930 2931 if (err != OK) { 2932 return err; 2933 } 2934 2935 return configureBitrate(bitrate, bitrateMode); 2936} 2937 2938status_t ACodec::setupHEVCEncoderParameters(const sp<AMessage> &msg) { 2939 int32_t bitrate, iFrameInterval; 2940 if (!msg->findInt32("bitrate", &bitrate) 2941 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 2942 return INVALID_OPERATION; 2943 } 2944 2945 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 2946 2947 float frameRate; 2948 if (!msg->findFloat("frame-rate", &frameRate)) { 2949 int32_t tmp; 2950 if (!msg->findInt32("frame-rate", &tmp)) { 2951 return INVALID_OPERATION; 2952 } 2953 frameRate = (float)tmp; 2954 } 2955 2956 OMX_VIDEO_PARAM_HEVCTYPE hevcType; 2957 InitOMXParams(&hevcType); 2958 hevcType.nPortIndex = kPortIndexOutput; 2959 2960 status_t err = OK; 2961 err = mOMX->getParameter( 2962 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 2963 if (err != OK) { 2964 return err; 2965 } 2966 2967 int32_t profile; 2968 if (msg->findInt32("profile", &profile)) { 2969 int32_t level; 2970 if (!msg->findInt32("level", &level)) { 2971 return INVALID_OPERATION; 2972 } 2973 2974 err = verifySupportForProfileAndLevel(profile, level); 2975 if (err != OK) { 2976 return err; 2977 } 2978 2979 hevcType.eProfile = static_cast<OMX_VIDEO_HEVCPROFILETYPE>(profile); 2980 hevcType.eLevel = static_cast<OMX_VIDEO_HEVCLEVELTYPE>(level); 2981 } 2982 2983 // TODO: Need OMX structure definition for setting iFrameInterval 2984 2985 err = mOMX->setParameter( 2986 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 2987 if (err != OK) { 2988 return err; 2989 } 2990 2991 return configureBitrate(bitrate, bitrateMode); 2992} 2993 2994status_t ACodec::setupVPXEncoderParameters(const sp<AMessage> &msg) { 2995 int32_t bitrate; 2996 int32_t iFrameInterval = 0; 2997 size_t tsLayers = 0; 2998 OMX_VIDEO_ANDROID_VPXTEMPORALLAYERPATTERNTYPE pattern = 2999 OMX_VIDEO_VPXTemporalLayerPatternNone; 3000 static const uint32_t kVp8LayerRateAlloction 3001 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] 3002 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] = { 3003 {100, 100, 100}, // 1 layer 3004 { 60, 100, 100}, // 2 layers {60%, 40%} 3005 { 40, 60, 100}, // 3 layers {40%, 20%, 40%} 3006 }; 3007 if (!msg->findInt32("bitrate", &bitrate)) { 3008 return INVALID_OPERATION; 3009 } 3010 msg->findInt32("i-frame-interval", &iFrameInterval); 3011 3012 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3013 3014 float frameRate; 3015 if (!msg->findFloat("frame-rate", &frameRate)) { 3016 int32_t tmp; 3017 if (!msg->findInt32("frame-rate", &tmp)) { 3018 return INVALID_OPERATION; 3019 } 3020 frameRate = (float)tmp; 3021 } 3022 3023 AString tsSchema; 3024 if (msg->findString("ts-schema", &tsSchema)) { 3025 if (tsSchema == "webrtc.vp8.1-layer") { 3026 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 3027 tsLayers = 1; 3028 } else if (tsSchema == "webrtc.vp8.2-layer") { 3029 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 3030 tsLayers = 2; 3031 } else if (tsSchema == "webrtc.vp8.3-layer") { 3032 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 3033 tsLayers = 3; 3034 } else { 3035 ALOGW("Unsupported ts-schema [%s]", tsSchema.c_str()); 3036 } 3037 } 3038 3039 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 3040 InitOMXParams(&vp8type); 3041 vp8type.nPortIndex = kPortIndexOutput; 3042 status_t err = mOMX->getParameter( 3043 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 3044 &vp8type, sizeof(vp8type)); 3045 3046 if (err == OK) { 3047 if (iFrameInterval > 0) { 3048 vp8type.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate); 3049 } 3050 vp8type.eTemporalPattern = pattern; 3051 vp8type.nTemporalLayerCount = tsLayers; 3052 if (tsLayers > 0) { 3053 for (size_t i = 0; i < OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS; i++) { 3054 vp8type.nTemporalLayerBitrateRatio[i] = 3055 kVp8LayerRateAlloction[tsLayers - 1][i]; 3056 } 3057 } 3058 if (bitrateMode == OMX_Video_ControlRateConstant) { 3059 vp8type.nMinQuantizer = 2; 3060 vp8type.nMaxQuantizer = 63; 3061 } 3062 3063 err = mOMX->setParameter( 3064 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 3065 &vp8type, sizeof(vp8type)); 3066 if (err != OK) { 3067 ALOGW("Extended VP8 parameters set failed: %d", err); 3068 } 3069 } 3070 3071 return configureBitrate(bitrate, bitrateMode); 3072} 3073 3074status_t ACodec::verifySupportForProfileAndLevel( 3075 int32_t profile, int32_t level) { 3076 OMX_VIDEO_PARAM_PROFILELEVELTYPE params; 3077 InitOMXParams(¶ms); 3078 params.nPortIndex = kPortIndexOutput; 3079 3080 for (params.nProfileIndex = 0;; ++params.nProfileIndex) { 3081 status_t err = mOMX->getParameter( 3082 mNode, 3083 OMX_IndexParamVideoProfileLevelQuerySupported, 3084 ¶ms, 3085 sizeof(params)); 3086 3087 if (err != OK) { 3088 return err; 3089 } 3090 3091 int32_t supportedProfile = static_cast<int32_t>(params.eProfile); 3092 int32_t supportedLevel = static_cast<int32_t>(params.eLevel); 3093 3094 if (profile == supportedProfile && level <= supportedLevel) { 3095 return OK; 3096 } 3097 } 3098} 3099 3100status_t ACodec::configureBitrate( 3101 int32_t bitrate, OMX_VIDEO_CONTROLRATETYPE bitrateMode) { 3102 OMX_VIDEO_PARAM_BITRATETYPE bitrateType; 3103 InitOMXParams(&bitrateType); 3104 bitrateType.nPortIndex = kPortIndexOutput; 3105 3106 status_t err = mOMX->getParameter( 3107 mNode, OMX_IndexParamVideoBitrate, 3108 &bitrateType, sizeof(bitrateType)); 3109 3110 if (err != OK) { 3111 return err; 3112 } 3113 3114 bitrateType.eControlRate = bitrateMode; 3115 bitrateType.nTargetBitrate = bitrate; 3116 3117 return mOMX->setParameter( 3118 mNode, OMX_IndexParamVideoBitrate, 3119 &bitrateType, sizeof(bitrateType)); 3120} 3121 3122status_t ACodec::setupErrorCorrectionParameters() { 3123 OMX_VIDEO_PARAM_ERRORCORRECTIONTYPE errorCorrectionType; 3124 InitOMXParams(&errorCorrectionType); 3125 errorCorrectionType.nPortIndex = kPortIndexOutput; 3126 3127 status_t err = mOMX->getParameter( 3128 mNode, OMX_IndexParamVideoErrorCorrection, 3129 &errorCorrectionType, sizeof(errorCorrectionType)); 3130 3131 if (err != OK) { 3132 return OK; // Optional feature. Ignore this failure 3133 } 3134 3135 errorCorrectionType.bEnableHEC = OMX_FALSE; 3136 errorCorrectionType.bEnableResync = OMX_TRUE; 3137 errorCorrectionType.nResynchMarkerSpacing = 256; 3138 errorCorrectionType.bEnableDataPartitioning = OMX_FALSE; 3139 errorCorrectionType.bEnableRVLC = OMX_FALSE; 3140 3141 return mOMX->setParameter( 3142 mNode, OMX_IndexParamVideoErrorCorrection, 3143 &errorCorrectionType, sizeof(errorCorrectionType)); 3144} 3145 3146status_t ACodec::setVideoFormatOnPort( 3147 OMX_U32 portIndex, 3148 int32_t width, int32_t height, OMX_VIDEO_CODINGTYPE compressionFormat, 3149 float frameRate) { 3150 OMX_PARAM_PORTDEFINITIONTYPE def; 3151 InitOMXParams(&def); 3152 def.nPortIndex = portIndex; 3153 3154 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 3155 3156 status_t err = mOMX->getParameter( 3157 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3158 3159 CHECK_EQ(err, (status_t)OK); 3160 3161 if (portIndex == kPortIndexInput) { 3162 // XXX Need a (much) better heuristic to compute input buffer sizes. 3163 const size_t X = 64 * 1024; 3164 if (def.nBufferSize < X) { 3165 def.nBufferSize = X; 3166 } 3167 } 3168 3169 CHECK_EQ((int)def.eDomain, (int)OMX_PortDomainVideo); 3170 3171 video_def->nFrameWidth = width; 3172 video_def->nFrameHeight = height; 3173 3174 if (portIndex == kPortIndexInput) { 3175 video_def->eCompressionFormat = compressionFormat; 3176 video_def->eColorFormat = OMX_COLOR_FormatUnused; 3177 if (frameRate >= 0) { 3178 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 3179 } 3180 } 3181 3182 err = mOMX->setParameter( 3183 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3184 3185 return err; 3186} 3187 3188status_t ACodec::initNativeWindow() { 3189 if (mNativeWindow != NULL) { 3190 return mOMX->enableGraphicBuffers(mNode, kPortIndexOutput, OMX_TRUE); 3191 } 3192 3193 mOMX->enableGraphicBuffers(mNode, kPortIndexOutput, OMX_FALSE); 3194 return OK; 3195} 3196 3197size_t ACodec::countBuffersOwnedByComponent(OMX_U32 portIndex) const { 3198 size_t n = 0; 3199 3200 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 3201 const BufferInfo &info = mBuffers[portIndex].itemAt(i); 3202 3203 if (info.mStatus == BufferInfo::OWNED_BY_COMPONENT) { 3204 ++n; 3205 } 3206 } 3207 3208 return n; 3209} 3210 3211size_t ACodec::countBuffersOwnedByNativeWindow() const { 3212 size_t n = 0; 3213 3214 for (size_t i = 0; i < mBuffers[kPortIndexOutput].size(); ++i) { 3215 const BufferInfo &info = mBuffers[kPortIndexOutput].itemAt(i); 3216 3217 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 3218 ++n; 3219 } 3220 } 3221 3222 return n; 3223} 3224 3225void ACodec::waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs() { 3226 if (mNativeWindow == NULL) { 3227 return; 3228 } 3229 3230 while (countBuffersOwnedByNativeWindow() > mNumUndequeuedBuffers 3231 && dequeueBufferFromNativeWindow() != NULL) { 3232 // these buffers will be submitted as regular buffers; account for this 3233 if (mStoreMetaDataInOutputBuffers && mMetaDataBuffersToSubmit > 0) { 3234 --mMetaDataBuffersToSubmit; 3235 } 3236 } 3237} 3238 3239bool ACodec::allYourBuffersAreBelongToUs( 3240 OMX_U32 portIndex) { 3241 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 3242 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 3243 3244 if (info->mStatus != BufferInfo::OWNED_BY_US 3245 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 3246 ALOGV("[%s] Buffer %u on port %u still has status %d", 3247 mComponentName.c_str(), 3248 info->mBufferID, portIndex, info->mStatus); 3249 return false; 3250 } 3251 } 3252 3253 return true; 3254} 3255 3256bool ACodec::allYourBuffersAreBelongToUs() { 3257 return allYourBuffersAreBelongToUs(kPortIndexInput) 3258 && allYourBuffersAreBelongToUs(kPortIndexOutput); 3259} 3260 3261void ACodec::deferMessage(const sp<AMessage> &msg) { 3262 mDeferredQueue.push_back(msg); 3263} 3264 3265void ACodec::processDeferredMessages() { 3266 List<sp<AMessage> > queue = mDeferredQueue; 3267 mDeferredQueue.clear(); 3268 3269 List<sp<AMessage> >::iterator it = queue.begin(); 3270 while (it != queue.end()) { 3271 onMessageReceived(*it++); 3272 } 3273} 3274 3275// static 3276bool ACodec::describeDefaultColorFormat(DescribeColorFormatParams ¶ms) { 3277 MediaImage &image = params.sMediaImage; 3278 memset(&image, 0, sizeof(image)); 3279 3280 image.mType = MediaImage::MEDIA_IMAGE_TYPE_UNKNOWN; 3281 image.mNumPlanes = 0; 3282 3283 const OMX_COLOR_FORMATTYPE fmt = params.eColorFormat; 3284 image.mWidth = params.nFrameWidth; 3285 image.mHeight = params.nFrameHeight; 3286 3287 // only supporting YUV420 3288 if (fmt != OMX_COLOR_FormatYUV420Planar && 3289 fmt != OMX_COLOR_FormatYUV420PackedPlanar && 3290 fmt != OMX_COLOR_FormatYUV420SemiPlanar && 3291 fmt != OMX_COLOR_FormatYUV420PackedSemiPlanar && 3292 fmt != HAL_PIXEL_FORMAT_YV12) { 3293 ALOGW("do not know color format 0x%x = %d", fmt, fmt); 3294 return false; 3295 } 3296 3297 // TEMPORARY FIX for some vendors that advertise sliceHeight as 0 3298 if (params.nStride != 0 && params.nSliceHeight == 0) { 3299 ALOGW("using sliceHeight=%u instead of what codec advertised (=0)", 3300 params.nFrameHeight); 3301 params.nSliceHeight = params.nFrameHeight; 3302 } 3303 3304 // we need stride and slice-height to be non-zero 3305 if (params.nStride == 0 || params.nSliceHeight == 0) { 3306 ALOGW("cannot describe color format 0x%x = %d with stride=%u and sliceHeight=%u", 3307 fmt, fmt, params.nStride, params.nSliceHeight); 3308 return false; 3309 } 3310 3311 // set-up YUV format 3312 image.mType = MediaImage::MEDIA_IMAGE_TYPE_YUV; 3313 image.mNumPlanes = 3; 3314 image.mBitDepth = 8; 3315 image.mPlane[image.Y].mOffset = 0; 3316 image.mPlane[image.Y].mColInc = 1; 3317 image.mPlane[image.Y].mRowInc = params.nStride; 3318 image.mPlane[image.Y].mHorizSubsampling = 1; 3319 image.mPlane[image.Y].mVertSubsampling = 1; 3320 3321 switch ((int)fmt) { 3322 case HAL_PIXEL_FORMAT_YV12: 3323 if (params.bUsingNativeBuffers) { 3324 size_t ystride = align(params.nStride, 16); 3325 size_t cstride = align(params.nStride / 2, 16); 3326 image.mPlane[image.Y].mRowInc = ystride; 3327 3328 image.mPlane[image.V].mOffset = ystride * params.nSliceHeight; 3329 image.mPlane[image.V].mColInc = 1; 3330 image.mPlane[image.V].mRowInc = cstride; 3331 image.mPlane[image.V].mHorizSubsampling = 2; 3332 image.mPlane[image.V].mVertSubsampling = 2; 3333 3334 image.mPlane[image.U].mOffset = image.mPlane[image.V].mOffset 3335 + (cstride * params.nSliceHeight / 2); 3336 image.mPlane[image.U].mColInc = 1; 3337 image.mPlane[image.U].mRowInc = cstride; 3338 image.mPlane[image.U].mHorizSubsampling = 2; 3339 image.mPlane[image.U].mVertSubsampling = 2; 3340 break; 3341 } else { 3342 // fall through as YV12 is used for YUV420Planar by some codecs 3343 } 3344 3345 case OMX_COLOR_FormatYUV420Planar: 3346 case OMX_COLOR_FormatYUV420PackedPlanar: 3347 image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight; 3348 image.mPlane[image.U].mColInc = 1; 3349 image.mPlane[image.U].mRowInc = params.nStride / 2; 3350 image.mPlane[image.U].mHorizSubsampling = 2; 3351 image.mPlane[image.U].mVertSubsampling = 2; 3352 3353 image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset 3354 + (params.nStride * params.nSliceHeight / 4); 3355 image.mPlane[image.V].mColInc = 1; 3356 image.mPlane[image.V].mRowInc = params.nStride / 2; 3357 image.mPlane[image.V].mHorizSubsampling = 2; 3358 image.mPlane[image.V].mVertSubsampling = 2; 3359 break; 3360 3361 case OMX_COLOR_FormatYUV420SemiPlanar: 3362 // FIXME: NV21 for sw-encoder, NV12 for decoder and hw-encoder 3363 case OMX_COLOR_FormatYUV420PackedSemiPlanar: 3364 // NV12 3365 image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight; 3366 image.mPlane[image.U].mColInc = 2; 3367 image.mPlane[image.U].mRowInc = params.nStride; 3368 image.mPlane[image.U].mHorizSubsampling = 2; 3369 image.mPlane[image.U].mVertSubsampling = 2; 3370 3371 image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset + 1; 3372 image.mPlane[image.V].mColInc = 2; 3373 image.mPlane[image.V].mRowInc = params.nStride; 3374 image.mPlane[image.V].mHorizSubsampling = 2; 3375 image.mPlane[image.V].mVertSubsampling = 2; 3376 break; 3377 3378 default: 3379 TRESPASS(); 3380 } 3381 return true; 3382} 3383 3384// static 3385bool ACodec::describeColorFormat( 3386 const sp<IOMX> &omx, IOMX::node_id node, 3387 DescribeColorFormatParams &describeParams) 3388{ 3389 OMX_INDEXTYPE describeColorFormatIndex; 3390 if (omx->getExtensionIndex( 3391 node, "OMX.google.android.index.describeColorFormat", 3392 &describeColorFormatIndex) != OK || 3393 omx->getParameter( 3394 node, describeColorFormatIndex, 3395 &describeParams, sizeof(describeParams)) != OK) { 3396 return describeDefaultColorFormat(describeParams); 3397 } 3398 return describeParams.sMediaImage.mType != 3399 MediaImage::MEDIA_IMAGE_TYPE_UNKNOWN; 3400} 3401 3402// static 3403bool ACodec::isFlexibleColorFormat( 3404 const sp<IOMX> &omx, IOMX::node_id node, 3405 uint32_t colorFormat, bool usingNativeBuffers, OMX_U32 *flexibleEquivalent) { 3406 DescribeColorFormatParams describeParams; 3407 InitOMXParams(&describeParams); 3408 describeParams.eColorFormat = (OMX_COLOR_FORMATTYPE)colorFormat; 3409 // reasonable dummy values 3410 describeParams.nFrameWidth = 128; 3411 describeParams.nFrameHeight = 128; 3412 describeParams.nStride = 128; 3413 describeParams.nSliceHeight = 128; 3414 describeParams.bUsingNativeBuffers = (OMX_BOOL)usingNativeBuffers; 3415 3416 CHECK(flexibleEquivalent != NULL); 3417 3418 if (!describeColorFormat(omx, node, describeParams)) { 3419 return false; 3420 } 3421 3422 const MediaImage &img = describeParams.sMediaImage; 3423 if (img.mType == MediaImage::MEDIA_IMAGE_TYPE_YUV) { 3424 if (img.mNumPlanes != 3 || 3425 img.mPlane[img.Y].mHorizSubsampling != 1 || 3426 img.mPlane[img.Y].mVertSubsampling != 1) { 3427 return false; 3428 } 3429 3430 // YUV 420 3431 if (img.mPlane[img.U].mHorizSubsampling == 2 3432 && img.mPlane[img.U].mVertSubsampling == 2 3433 && img.mPlane[img.V].mHorizSubsampling == 2 3434 && img.mPlane[img.V].mVertSubsampling == 2) { 3435 // possible flexible YUV420 format 3436 if (img.mBitDepth <= 8) { 3437 *flexibleEquivalent = OMX_COLOR_FormatYUV420Flexible; 3438 return true; 3439 } 3440 } 3441 } 3442 return false; 3443} 3444 3445status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify) { 3446 // TODO: catch errors an return them instead of using CHECK 3447 OMX_PARAM_PORTDEFINITIONTYPE def; 3448 InitOMXParams(&def); 3449 def.nPortIndex = portIndex; 3450 3451 CHECK_EQ(mOMX->getParameter( 3452 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)), 3453 (status_t)OK); 3454 3455 CHECK_EQ((int)def.eDir, 3456 (int)(portIndex == kPortIndexOutput ? OMX_DirOutput : OMX_DirInput)); 3457 3458 switch (def.eDomain) { 3459 case OMX_PortDomainVideo: 3460 { 3461 OMX_VIDEO_PORTDEFINITIONTYPE *videoDef = &def.format.video; 3462 switch ((int)videoDef->eCompressionFormat) { 3463 case OMX_VIDEO_CodingUnused: 3464 { 3465 CHECK(mIsEncoder ^ (portIndex == kPortIndexOutput)); 3466 notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW); 3467 3468 notify->setInt32("stride", videoDef->nStride); 3469 notify->setInt32("slice-height", videoDef->nSliceHeight); 3470 notify->setInt32("color-format", videoDef->eColorFormat); 3471 3472 if (mNativeWindow == NULL) { 3473 DescribeColorFormatParams describeParams; 3474 InitOMXParams(&describeParams); 3475 describeParams.eColorFormat = videoDef->eColorFormat; 3476 describeParams.nFrameWidth = videoDef->nFrameWidth; 3477 describeParams.nFrameHeight = videoDef->nFrameHeight; 3478 describeParams.nStride = videoDef->nStride; 3479 describeParams.nSliceHeight = videoDef->nSliceHeight; 3480 describeParams.bUsingNativeBuffers = OMX_FALSE; 3481 3482 if (describeColorFormat(mOMX, mNode, describeParams)) { 3483 notify->setBuffer( 3484 "image-data", 3485 ABuffer::CreateAsCopy( 3486 &describeParams.sMediaImage, 3487 sizeof(describeParams.sMediaImage))); 3488 3489 MediaImage *img = &describeParams.sMediaImage; 3490 ALOGV("[%s] MediaImage { F(%zux%zu) @%zu+%zu+%zu @%zu+%zu+%zu @%zu+%zu+%zu }", 3491 mComponentName.c_str(), img->mWidth, img->mHeight, 3492 img->mPlane[0].mOffset, img->mPlane[0].mColInc, img->mPlane[0].mRowInc, 3493 img->mPlane[1].mOffset, img->mPlane[1].mColInc, img->mPlane[1].mRowInc, 3494 img->mPlane[2].mOffset, img->mPlane[2].mColInc, img->mPlane[2].mRowInc); 3495 } 3496 } 3497 3498 if (portIndex != kPortIndexOutput) { 3499 // TODO: also get input crop 3500 break; 3501 } 3502 3503 OMX_CONFIG_RECTTYPE rect; 3504 InitOMXParams(&rect); 3505 rect.nPortIndex = portIndex; 3506 3507 if (mOMX->getConfig( 3508 mNode, 3509 (portIndex == kPortIndexOutput ? 3510 OMX_IndexConfigCommonOutputCrop : 3511 OMX_IndexConfigCommonInputCrop), 3512 &rect, sizeof(rect)) != OK) { 3513 rect.nLeft = 0; 3514 rect.nTop = 0; 3515 rect.nWidth = videoDef->nFrameWidth; 3516 rect.nHeight = videoDef->nFrameHeight; 3517 } 3518 3519 CHECK_GE(rect.nLeft, 0); 3520 CHECK_GE(rect.nTop, 0); 3521 CHECK_GE(rect.nWidth, 0u); 3522 CHECK_GE(rect.nHeight, 0u); 3523 CHECK_LE(rect.nLeft + rect.nWidth - 1, videoDef->nFrameWidth); 3524 CHECK_LE(rect.nTop + rect.nHeight - 1, videoDef->nFrameHeight); 3525 3526 notify->setRect( 3527 "crop", 3528 rect.nLeft, 3529 rect.nTop, 3530 rect.nLeft + rect.nWidth - 1, 3531 rect.nTop + rect.nHeight - 1); 3532 3533 break; 3534 } 3535 3536 case OMX_VIDEO_CodingVP8: 3537 case OMX_VIDEO_CodingVP9: 3538 { 3539 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 3540 InitOMXParams(&vp8type); 3541 vp8type.nPortIndex = kPortIndexOutput; 3542 status_t err = mOMX->getParameter( 3543 mNode, 3544 (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 3545 &vp8type, 3546 sizeof(vp8type)); 3547 3548 if (err == OK) { 3549 AString tsSchema = "none"; 3550 if (vp8type.eTemporalPattern 3551 == OMX_VIDEO_VPXTemporalLayerPatternWebRTC) { 3552 switch (vp8type.nTemporalLayerCount) { 3553 case 1: 3554 { 3555 tsSchema = "webrtc.vp8.1-layer"; 3556 break; 3557 } 3558 case 2: 3559 { 3560 tsSchema = "webrtc.vp8.2-layer"; 3561 break; 3562 } 3563 case 3: 3564 { 3565 tsSchema = "webrtc.vp8.3-layer"; 3566 break; 3567 } 3568 default: 3569 { 3570 break; 3571 } 3572 } 3573 } 3574 notify->setString("ts-schema", tsSchema); 3575 } 3576 // Fall through to set up mime. 3577 } 3578 3579 default: 3580 { 3581 CHECK(mIsEncoder ^ (portIndex == kPortIndexInput)); 3582 AString mime; 3583 if (GetMimeTypeForVideoCoding( 3584 videoDef->eCompressionFormat, &mime) != OK) { 3585 notify->setString("mime", "application/octet-stream"); 3586 } else { 3587 notify->setString("mime", mime.c_str()); 3588 } 3589 break; 3590 } 3591 } 3592 notify->setInt32("width", videoDef->nFrameWidth); 3593 notify->setInt32("height", videoDef->nFrameHeight); 3594 ALOGV("[%s] %s format is %s", mComponentName.c_str(), 3595 portIndex == kPortIndexInput ? "input" : "output", 3596 notify->debugString().c_str()); 3597 3598 break; 3599 } 3600 3601 case OMX_PortDomainAudio: 3602 { 3603 OMX_AUDIO_PORTDEFINITIONTYPE *audioDef = &def.format.audio; 3604 3605 switch ((int)audioDef->eEncoding) { 3606 case OMX_AUDIO_CodingPCM: 3607 { 3608 OMX_AUDIO_PARAM_PCMMODETYPE params; 3609 InitOMXParams(¶ms); 3610 params.nPortIndex = portIndex; 3611 3612 CHECK_EQ(mOMX->getParameter( 3613 mNode, OMX_IndexParamAudioPcm, 3614 ¶ms, sizeof(params)), 3615 (status_t)OK); 3616 3617 CHECK_GT(params.nChannels, 0); 3618 CHECK(params.nChannels == 1 || params.bInterleaved); 3619 CHECK_EQ(params.nBitPerSample, 16u); 3620 3621 CHECK_EQ((int)params.eNumData, 3622 (int)OMX_NumericalDataSigned); 3623 3624 CHECK_EQ((int)params.ePCMMode, 3625 (int)OMX_AUDIO_PCMModeLinear); 3626 3627 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW); 3628 notify->setInt32("channel-count", params.nChannels); 3629 notify->setInt32("sample-rate", params.nSamplingRate); 3630 3631 if (mChannelMaskPresent) { 3632 notify->setInt32("channel-mask", mChannelMask); 3633 } 3634 break; 3635 } 3636 3637 case OMX_AUDIO_CodingAAC: 3638 { 3639 OMX_AUDIO_PARAM_AACPROFILETYPE params; 3640 InitOMXParams(¶ms); 3641 params.nPortIndex = portIndex; 3642 3643 CHECK_EQ(mOMX->getParameter( 3644 mNode, OMX_IndexParamAudioAac, 3645 ¶ms, sizeof(params)), 3646 (status_t)OK); 3647 3648 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC); 3649 notify->setInt32("channel-count", params.nChannels); 3650 notify->setInt32("sample-rate", params.nSampleRate); 3651 break; 3652 } 3653 3654 case OMX_AUDIO_CodingAMR: 3655 { 3656 OMX_AUDIO_PARAM_AMRTYPE params; 3657 InitOMXParams(¶ms); 3658 params.nPortIndex = portIndex; 3659 3660 CHECK_EQ(mOMX->getParameter( 3661 mNode, OMX_IndexParamAudioAmr, 3662 ¶ms, sizeof(params)), 3663 (status_t)OK); 3664 3665 notify->setInt32("channel-count", 1); 3666 if (params.eAMRBandMode >= OMX_AUDIO_AMRBandModeWB0) { 3667 notify->setString( 3668 "mime", MEDIA_MIMETYPE_AUDIO_AMR_WB); 3669 3670 notify->setInt32("sample-rate", 16000); 3671 } else { 3672 notify->setString( 3673 "mime", MEDIA_MIMETYPE_AUDIO_AMR_NB); 3674 3675 notify->setInt32("sample-rate", 8000); 3676 } 3677 break; 3678 } 3679 3680 case OMX_AUDIO_CodingFLAC: 3681 { 3682 OMX_AUDIO_PARAM_FLACTYPE params; 3683 InitOMXParams(¶ms); 3684 params.nPortIndex = portIndex; 3685 3686 CHECK_EQ(mOMX->getParameter( 3687 mNode, OMX_IndexParamAudioFlac, 3688 ¶ms, sizeof(params)), 3689 (status_t)OK); 3690 3691 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_FLAC); 3692 notify->setInt32("channel-count", params.nChannels); 3693 notify->setInt32("sample-rate", params.nSampleRate); 3694 break; 3695 } 3696 3697 case OMX_AUDIO_CodingMP3: 3698 { 3699 OMX_AUDIO_PARAM_MP3TYPE params; 3700 InitOMXParams(¶ms); 3701 params.nPortIndex = portIndex; 3702 3703 CHECK_EQ(mOMX->getParameter( 3704 mNode, OMX_IndexParamAudioMp3, 3705 ¶ms, sizeof(params)), 3706 (status_t)OK); 3707 3708 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MPEG); 3709 notify->setInt32("channel-count", params.nChannels); 3710 notify->setInt32("sample-rate", params.nSampleRate); 3711 break; 3712 } 3713 3714 case OMX_AUDIO_CodingVORBIS: 3715 { 3716 OMX_AUDIO_PARAM_VORBISTYPE params; 3717 InitOMXParams(¶ms); 3718 params.nPortIndex = portIndex; 3719 3720 CHECK_EQ(mOMX->getParameter( 3721 mNode, OMX_IndexParamAudioVorbis, 3722 ¶ms, sizeof(params)), 3723 (status_t)OK); 3724 3725 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_VORBIS); 3726 notify->setInt32("channel-count", params.nChannels); 3727 notify->setInt32("sample-rate", params.nSampleRate); 3728 break; 3729 } 3730 3731 case OMX_AUDIO_CodingAndroidAC3: 3732 { 3733 OMX_AUDIO_PARAM_ANDROID_AC3TYPE params; 3734 InitOMXParams(¶ms); 3735 params.nPortIndex = portIndex; 3736 3737 CHECK_EQ((status_t)OK, mOMX->getParameter( 3738 mNode, 3739 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 3740 ¶ms, 3741 sizeof(params))); 3742 3743 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AC3); 3744 notify->setInt32("channel-count", params.nChannels); 3745 notify->setInt32("sample-rate", params.nSampleRate); 3746 break; 3747 } 3748 3749 case OMX_AUDIO_CodingAndroidEAC3: 3750 { 3751 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE params; 3752 InitOMXParams(¶ms); 3753 params.nPortIndex = portIndex; 3754 3755 CHECK_EQ((status_t)OK, mOMX->getParameter( 3756 mNode, 3757 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 3758 ¶ms, 3759 sizeof(params))); 3760 3761 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_EAC3); 3762 notify->setInt32("channel-count", params.nChannels); 3763 notify->setInt32("sample-rate", params.nSampleRate); 3764 break; 3765 } 3766 3767 case OMX_AUDIO_CodingAndroidOPUS: 3768 { 3769 OMX_AUDIO_PARAM_ANDROID_OPUSTYPE params; 3770 InitOMXParams(¶ms); 3771 params.nPortIndex = portIndex; 3772 3773 CHECK_EQ((status_t)OK, mOMX->getParameter( 3774 mNode, 3775 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidOpus, 3776 ¶ms, 3777 sizeof(params))); 3778 3779 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_OPUS); 3780 notify->setInt32("channel-count", params.nChannels); 3781 notify->setInt32("sample-rate", params.nSampleRate); 3782 break; 3783 } 3784 3785 case OMX_AUDIO_CodingG711: 3786 { 3787 OMX_AUDIO_PARAM_PCMMODETYPE params; 3788 InitOMXParams(¶ms); 3789 params.nPortIndex = portIndex; 3790 3791 CHECK_EQ((status_t)OK, mOMX->getParameter( 3792 mNode, 3793 (OMX_INDEXTYPE)OMX_IndexParamAudioPcm, 3794 ¶ms, 3795 sizeof(params))); 3796 3797 const char *mime = NULL; 3798 if (params.ePCMMode == OMX_AUDIO_PCMModeMULaw) { 3799 mime = MEDIA_MIMETYPE_AUDIO_G711_MLAW; 3800 } else if (params.ePCMMode == OMX_AUDIO_PCMModeALaw) { 3801 mime = MEDIA_MIMETYPE_AUDIO_G711_ALAW; 3802 } else { // params.ePCMMode == OMX_AUDIO_PCMModeLinear 3803 mime = MEDIA_MIMETYPE_AUDIO_RAW; 3804 } 3805 notify->setString("mime", mime); 3806 notify->setInt32("channel-count", params.nChannels); 3807 notify->setInt32("sample-rate", params.nSamplingRate); 3808 break; 3809 } 3810 3811 case OMX_AUDIO_CodingGSMFR: 3812 { 3813 OMX_AUDIO_PARAM_MP3TYPE params; 3814 InitOMXParams(¶ms); 3815 params.nPortIndex = portIndex; 3816 3817 CHECK_EQ(mOMX->getParameter( 3818 mNode, OMX_IndexParamAudioPcm, 3819 ¶ms, sizeof(params)), 3820 (status_t)OK); 3821 3822 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MSGSM); 3823 notify->setInt32("channel-count", params.nChannels); 3824 notify->setInt32("sample-rate", params.nSampleRate); 3825 break; 3826 } 3827 3828 default: 3829 ALOGE("UNKNOWN AUDIO CODING: %d\n", audioDef->eEncoding); 3830 TRESPASS(); 3831 } 3832 break; 3833 } 3834 3835 default: 3836 TRESPASS(); 3837 } 3838 3839 return OK; 3840} 3841 3842void ACodec::sendFormatChange(const sp<AMessage> &reply) { 3843 sp<AMessage> notify = mBaseOutputFormat->dup(); 3844 notify->setInt32("what", kWhatOutputFormatChanged); 3845 3846 CHECK_EQ(getPortFormat(kPortIndexOutput, notify), (status_t)OK); 3847 3848 AString mime; 3849 CHECK(notify->findString("mime", &mime)); 3850 3851 int32_t left, top, right, bottom; 3852 if (mime == MEDIA_MIMETYPE_VIDEO_RAW && 3853 mNativeWindow != NULL && 3854 notify->findRect("crop", &left, &top, &right, &bottom)) { 3855 // notify renderer of the crop change 3856 // NOTE: native window uses extended right-bottom coordinate 3857 reply->setRect("crop", left, top, right + 1, bottom + 1); 3858 } else if (mime == MEDIA_MIMETYPE_AUDIO_RAW && 3859 (mEncoderDelay || mEncoderPadding)) { 3860 int32_t channelCount; 3861 CHECK(notify->findInt32("channel-count", &channelCount)); 3862 size_t frameSize = channelCount * sizeof(int16_t); 3863 if (mSkipCutBuffer != NULL) { 3864 size_t prevbufsize = mSkipCutBuffer->size(); 3865 if (prevbufsize != 0) { 3866 ALOGW("Replacing SkipCutBuffer holding %d " 3867 "bytes", 3868 prevbufsize); 3869 } 3870 } 3871 mSkipCutBuffer = new SkipCutBuffer( 3872 mEncoderDelay * frameSize, 3873 mEncoderPadding * frameSize); 3874 } 3875 3876 notify->post(); 3877 3878 mSentFormat = true; 3879} 3880 3881void ACodec::signalError(OMX_ERRORTYPE error, status_t internalError) { 3882 sp<AMessage> notify = mNotify->dup(); 3883 notify->setInt32("what", CodecBase::kWhatError); 3884 ALOGE("signalError(omxError %#x, internalError %d)", error, internalError); 3885 3886 if (internalError == UNKNOWN_ERROR) { // find better error code 3887 const status_t omxStatus = statusFromOMXError(error); 3888 if (omxStatus != 0) { 3889 internalError = omxStatus; 3890 } else { 3891 ALOGW("Invalid OMX error %#x", error); 3892 } 3893 } 3894 notify->setInt32("err", internalError); 3895 notify->setInt32("actionCode", ACTION_CODE_FATAL); // could translate from OMX error. 3896 notify->post(); 3897} 3898 3899status_t ACodec::pushBlankBuffersToNativeWindow() { 3900 status_t err = NO_ERROR; 3901 ANativeWindowBuffer* anb = NULL; 3902 int numBufs = 0; 3903 int minUndequeuedBufs = 0; 3904 3905 // We need to reconnect to the ANativeWindow as a CPU client to ensure that 3906 // no frames get dropped by SurfaceFlinger assuming that these are video 3907 // frames. 3908 err = native_window_api_disconnect(mNativeWindow.get(), 3909 NATIVE_WINDOW_API_MEDIA); 3910 if (err != NO_ERROR) { 3911 ALOGE("error pushing blank frames: api_disconnect failed: %s (%d)", 3912 strerror(-err), -err); 3913 return err; 3914 } 3915 3916 err = native_window_api_connect(mNativeWindow.get(), 3917 NATIVE_WINDOW_API_CPU); 3918 if (err != NO_ERROR) { 3919 ALOGE("error pushing blank frames: api_connect failed: %s (%d)", 3920 strerror(-err), -err); 3921 return err; 3922 } 3923 3924 err = native_window_set_buffers_geometry(mNativeWindow.get(), 1, 1, 3925 HAL_PIXEL_FORMAT_RGBX_8888); 3926 if (err != NO_ERROR) { 3927 ALOGE("error pushing blank frames: set_buffers_geometry failed: %s (%d)", 3928 strerror(-err), -err); 3929 goto error; 3930 } 3931 3932 err = native_window_set_scaling_mode(mNativeWindow.get(), 3933 NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW); 3934 if (err != NO_ERROR) { 3935 ALOGE("error pushing blank_frames: set_scaling_mode failed: %s (%d)", 3936 strerror(-err), -err); 3937 goto error; 3938 } 3939 3940 err = native_window_set_usage(mNativeWindow.get(), 3941 GRALLOC_USAGE_SW_WRITE_OFTEN); 3942 if (err != NO_ERROR) { 3943 ALOGE("error pushing blank frames: set_usage failed: %s (%d)", 3944 strerror(-err), -err); 3945 goto error; 3946 } 3947 3948 err = mNativeWindow->query(mNativeWindow.get(), 3949 NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &minUndequeuedBufs); 3950 if (err != NO_ERROR) { 3951 ALOGE("error pushing blank frames: MIN_UNDEQUEUED_BUFFERS query " 3952 "failed: %s (%d)", strerror(-err), -err); 3953 goto error; 3954 } 3955 3956 numBufs = minUndequeuedBufs + 1; 3957 err = native_window_set_buffer_count(mNativeWindow.get(), numBufs); 3958 if (err != NO_ERROR) { 3959 ALOGE("error pushing blank frames: set_buffer_count failed: %s (%d)", 3960 strerror(-err), -err); 3961 goto error; 3962 } 3963 3964 // We push numBufs + 1 buffers to ensure that we've drawn into the same 3965 // buffer twice. This should guarantee that the buffer has been displayed 3966 // on the screen and then been replaced, so an previous video frames are 3967 // guaranteed NOT to be currently displayed. 3968 for (int i = 0; i < numBufs + 1; i++) { 3969 err = native_window_dequeue_buffer_and_wait(mNativeWindow.get(), &anb); 3970 if (err != NO_ERROR) { 3971 ALOGE("error pushing blank frames: dequeueBuffer failed: %s (%d)", 3972 strerror(-err), -err); 3973 goto error; 3974 } 3975 3976 sp<GraphicBuffer> buf(new GraphicBuffer(anb, false)); 3977 3978 // Fill the buffer with the a 1x1 checkerboard pattern ;) 3979 uint32_t* img = NULL; 3980 err = buf->lock(GRALLOC_USAGE_SW_WRITE_OFTEN, (void**)(&img)); 3981 if (err != NO_ERROR) { 3982 ALOGE("error pushing blank frames: lock failed: %s (%d)", 3983 strerror(-err), -err); 3984 goto error; 3985 } 3986 3987 *img = 0; 3988 3989 err = buf->unlock(); 3990 if (err != NO_ERROR) { 3991 ALOGE("error pushing blank frames: unlock failed: %s (%d)", 3992 strerror(-err), -err); 3993 goto error; 3994 } 3995 3996 err = mNativeWindow->queueBuffer(mNativeWindow.get(), 3997 buf->getNativeBuffer(), -1); 3998 if (err != NO_ERROR) { 3999 ALOGE("error pushing blank frames: queueBuffer failed: %s (%d)", 4000 strerror(-err), -err); 4001 goto error; 4002 } 4003 4004 anb = NULL; 4005 } 4006 4007error: 4008 4009 if (err != NO_ERROR) { 4010 // Clean up after an error. 4011 if (anb != NULL) { 4012 mNativeWindow->cancelBuffer(mNativeWindow.get(), anb, -1); 4013 } 4014 4015 native_window_api_disconnect(mNativeWindow.get(), 4016 NATIVE_WINDOW_API_CPU); 4017 native_window_api_connect(mNativeWindow.get(), 4018 NATIVE_WINDOW_API_MEDIA); 4019 4020 return err; 4021 } else { 4022 // Clean up after success. 4023 err = native_window_api_disconnect(mNativeWindow.get(), 4024 NATIVE_WINDOW_API_CPU); 4025 if (err != NO_ERROR) { 4026 ALOGE("error pushing blank frames: api_disconnect failed: %s (%d)", 4027 strerror(-err), -err); 4028 return err; 4029 } 4030 4031 err = native_window_api_connect(mNativeWindow.get(), 4032 NATIVE_WINDOW_API_MEDIA); 4033 if (err != NO_ERROR) { 4034 ALOGE("error pushing blank frames: api_connect failed: %s (%d)", 4035 strerror(-err), -err); 4036 return err; 4037 } 4038 4039 return NO_ERROR; 4040 } 4041} 4042 4043//////////////////////////////////////////////////////////////////////////////// 4044 4045ACodec::PortDescription::PortDescription() { 4046} 4047 4048status_t ACodec::requestIDRFrame() { 4049 if (!mIsEncoder) { 4050 return ERROR_UNSUPPORTED; 4051 } 4052 4053 OMX_CONFIG_INTRAREFRESHVOPTYPE params; 4054 InitOMXParams(¶ms); 4055 4056 params.nPortIndex = kPortIndexOutput; 4057 params.IntraRefreshVOP = OMX_TRUE; 4058 4059 return mOMX->setConfig( 4060 mNode, 4061 OMX_IndexConfigVideoIntraVOPRefresh, 4062 ¶ms, 4063 sizeof(params)); 4064} 4065 4066void ACodec::PortDescription::addBuffer( 4067 IOMX::buffer_id id, const sp<ABuffer> &buffer) { 4068 mBufferIDs.push_back(id); 4069 mBuffers.push_back(buffer); 4070} 4071 4072size_t ACodec::PortDescription::countBuffers() { 4073 return mBufferIDs.size(); 4074} 4075 4076IOMX::buffer_id ACodec::PortDescription::bufferIDAt(size_t index) const { 4077 return mBufferIDs.itemAt(index); 4078} 4079 4080sp<ABuffer> ACodec::PortDescription::bufferAt(size_t index) const { 4081 return mBuffers.itemAt(index); 4082} 4083 4084//////////////////////////////////////////////////////////////////////////////// 4085 4086ACodec::BaseState::BaseState(ACodec *codec, const sp<AState> &parentState) 4087 : AState(parentState), 4088 mCodec(codec) { 4089} 4090 4091ACodec::BaseState::PortMode ACodec::BaseState::getPortMode( 4092 OMX_U32 /* portIndex */) { 4093 return KEEP_BUFFERS; 4094} 4095 4096bool ACodec::BaseState::onMessageReceived(const sp<AMessage> &msg) { 4097 switch (msg->what()) { 4098 case kWhatInputBufferFilled: 4099 { 4100 onInputBufferFilled(msg); 4101 break; 4102 } 4103 4104 case kWhatOutputBufferDrained: 4105 { 4106 onOutputBufferDrained(msg); 4107 break; 4108 } 4109 4110 case ACodec::kWhatOMXMessage: 4111 { 4112 return onOMXMessage(msg); 4113 } 4114 4115 case ACodec::kWhatCreateInputSurface: 4116 case ACodec::kWhatSignalEndOfInputStream: 4117 { 4118 // This may result in an app illegal state exception. 4119 ALOGE("Message 0x%x was not handled", msg->what()); 4120 mCodec->signalError(OMX_ErrorUndefined, INVALID_OPERATION); 4121 return true; 4122 } 4123 4124 case ACodec::kWhatOMXDied: 4125 { 4126 // This will result in kFlagSawMediaServerDie handling in MediaCodec. 4127 ALOGE("OMX/mediaserver died, signalling error!"); 4128 mCodec->signalError(OMX_ErrorResourcesLost, DEAD_OBJECT); 4129 break; 4130 } 4131 4132 case ACodec::kWhatReleaseCodecInstance: 4133 { 4134 ALOGI("[%s] forcing the release of codec", 4135 mCodec->mComponentName.c_str()); 4136 status_t err = mCodec->mOMX->freeNode(mCodec->mNode); 4137 ALOGE_IF("[%s] failed to release codec instance: err=%d", 4138 mCodec->mComponentName.c_str(), err); 4139 sp<AMessage> notify = mCodec->mNotify->dup(); 4140 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 4141 notify->post(); 4142 break; 4143 } 4144 4145 default: 4146 return false; 4147 } 4148 4149 return true; 4150} 4151 4152bool ACodec::BaseState::onOMXMessage(const sp<AMessage> &msg) { 4153 int32_t type; 4154 CHECK(msg->findInt32("type", &type)); 4155 4156 // there is a possibility that this is an outstanding message for a 4157 // codec that we have already destroyed 4158 if (mCodec->mNode == NULL) { 4159 ALOGI("ignoring message as already freed component: %s", 4160 msg->debugString().c_str()); 4161 return true; 4162 } 4163 4164 IOMX::node_id nodeID; 4165 CHECK(msg->findInt32("node", (int32_t*)&nodeID)); 4166 CHECK_EQ(nodeID, mCodec->mNode); 4167 4168 switch (type) { 4169 case omx_message::EVENT: 4170 { 4171 int32_t event, data1, data2; 4172 CHECK(msg->findInt32("event", &event)); 4173 CHECK(msg->findInt32("data1", &data1)); 4174 CHECK(msg->findInt32("data2", &data2)); 4175 4176 if (event == OMX_EventCmdComplete 4177 && data1 == OMX_CommandFlush 4178 && data2 == (int32_t)OMX_ALL) { 4179 // Use of this notification is not consistent across 4180 // implementations. We'll drop this notification and rely 4181 // on flush-complete notifications on the individual port 4182 // indices instead. 4183 4184 return true; 4185 } 4186 4187 return onOMXEvent( 4188 static_cast<OMX_EVENTTYPE>(event), 4189 static_cast<OMX_U32>(data1), 4190 static_cast<OMX_U32>(data2)); 4191 } 4192 4193 case omx_message::EMPTY_BUFFER_DONE: 4194 { 4195 IOMX::buffer_id bufferID; 4196 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 4197 4198 return onOMXEmptyBufferDone(bufferID); 4199 } 4200 4201 case omx_message::FILL_BUFFER_DONE: 4202 { 4203 IOMX::buffer_id bufferID; 4204 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 4205 4206 int32_t rangeOffset, rangeLength, flags; 4207 int64_t timeUs; 4208 4209 CHECK(msg->findInt32("range_offset", &rangeOffset)); 4210 CHECK(msg->findInt32("range_length", &rangeLength)); 4211 CHECK(msg->findInt32("flags", &flags)); 4212 CHECK(msg->findInt64("timestamp", &timeUs)); 4213 4214 return onOMXFillBufferDone( 4215 bufferID, 4216 (size_t)rangeOffset, (size_t)rangeLength, 4217 (OMX_U32)flags, 4218 timeUs); 4219 } 4220 4221 default: 4222 TRESPASS(); 4223 break; 4224 } 4225} 4226 4227bool ACodec::BaseState::onOMXEvent( 4228 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 4229 if (event != OMX_EventError) { 4230 ALOGV("[%s] EVENT(%d, 0x%08lx, 0x%08lx)", 4231 mCodec->mComponentName.c_str(), event, data1, data2); 4232 4233 return false; 4234 } 4235 4236 ALOGE("[%s] ERROR(0x%08lx)", mCodec->mComponentName.c_str(), data1); 4237 4238 // verify OMX component sends back an error we expect. 4239 OMX_ERRORTYPE omxError = (OMX_ERRORTYPE)data1; 4240 if (!isOMXError(omxError)) { 4241 ALOGW("Invalid OMX error %#x", omxError); 4242 omxError = OMX_ErrorUndefined; 4243 } 4244 mCodec->signalError(omxError); 4245 4246 return true; 4247} 4248 4249bool ACodec::BaseState::onOMXEmptyBufferDone(IOMX::buffer_id bufferID) { 4250 ALOGV("[%s] onOMXEmptyBufferDone %p", 4251 mCodec->mComponentName.c_str(), bufferID); 4252 4253 BufferInfo *info = 4254 mCodec->findBufferByID(kPortIndexInput, bufferID); 4255 4256 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_COMPONENT); 4257 info->mStatus = BufferInfo::OWNED_BY_US; 4258 4259 // We're in "store-metadata-in-buffers" mode, the underlying 4260 // OMX component had access to data that's implicitly refcounted 4261 // by this "MediaBuffer" object. Now that the OMX component has 4262 // told us that it's done with the input buffer, we can decrement 4263 // the mediaBuffer's reference count. 4264 info->mData->setMediaBufferBase(NULL); 4265 4266 PortMode mode = getPortMode(kPortIndexInput); 4267 4268 switch (mode) { 4269 case KEEP_BUFFERS: 4270 break; 4271 4272 case RESUBMIT_BUFFERS: 4273 postFillThisBuffer(info); 4274 break; 4275 4276 default: 4277 { 4278 CHECK_EQ((int)mode, (int)FREE_BUFFERS); 4279 TRESPASS(); // Not currently used 4280 break; 4281 } 4282 } 4283 4284 return true; 4285} 4286 4287void ACodec::BaseState::postFillThisBuffer(BufferInfo *info) { 4288 if (mCodec->mPortEOS[kPortIndexInput]) { 4289 return; 4290 } 4291 4292 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 4293 4294 sp<AMessage> notify = mCodec->mNotify->dup(); 4295 notify->setInt32("what", CodecBase::kWhatFillThisBuffer); 4296 notify->setInt32("buffer-id", info->mBufferID); 4297 4298 info->mData->meta()->clear(); 4299 notify->setBuffer("buffer", info->mData); 4300 4301 sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, mCodec); 4302 reply->setInt32("buffer-id", info->mBufferID); 4303 4304 notify->setMessage("reply", reply); 4305 4306 notify->post(); 4307 4308 info->mStatus = BufferInfo::OWNED_BY_UPSTREAM; 4309} 4310 4311void ACodec::BaseState::onInputBufferFilled(const sp<AMessage> &msg) { 4312 IOMX::buffer_id bufferID; 4313 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 4314 sp<ABuffer> buffer; 4315 int32_t err = OK; 4316 bool eos = false; 4317 PortMode mode = getPortMode(kPortIndexInput); 4318 4319 if (!msg->findBuffer("buffer", &buffer)) { 4320 /* these are unfilled buffers returned by client */ 4321 CHECK(msg->findInt32("err", &err)); 4322 4323 if (err == OK) { 4324 /* buffers with no errors are returned on MediaCodec.flush */ 4325 mode = KEEP_BUFFERS; 4326 } else { 4327 ALOGV("[%s] saw error %d instead of an input buffer", 4328 mCodec->mComponentName.c_str(), err); 4329 eos = true; 4330 } 4331 4332 buffer.clear(); 4333 } 4334 4335 int32_t tmp; 4336 if (buffer != NULL && buffer->meta()->findInt32("eos", &tmp) && tmp) { 4337 eos = true; 4338 err = ERROR_END_OF_STREAM; 4339 } 4340 4341 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 4342 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_UPSTREAM); 4343 4344 info->mStatus = BufferInfo::OWNED_BY_US; 4345 4346 switch (mode) { 4347 case KEEP_BUFFERS: 4348 { 4349 if (eos) { 4350 if (!mCodec->mPortEOS[kPortIndexInput]) { 4351 mCodec->mPortEOS[kPortIndexInput] = true; 4352 mCodec->mInputEOSResult = err; 4353 } 4354 } 4355 break; 4356 } 4357 4358 case RESUBMIT_BUFFERS: 4359 { 4360 if (buffer != NULL && !mCodec->mPortEOS[kPortIndexInput]) { 4361 int64_t timeUs; 4362 CHECK(buffer->meta()->findInt64("timeUs", &timeUs)); 4363 4364 OMX_U32 flags = OMX_BUFFERFLAG_ENDOFFRAME; 4365 4366 int32_t isCSD; 4367 if (buffer->meta()->findInt32("csd", &isCSD) && isCSD != 0) { 4368 flags |= OMX_BUFFERFLAG_CODECCONFIG; 4369 } 4370 4371 if (eos) { 4372 flags |= OMX_BUFFERFLAG_EOS; 4373 } 4374 4375 if (buffer != info->mData) { 4376 ALOGV("[%s] Needs to copy input data for buffer %p. (%p != %p)", 4377 mCodec->mComponentName.c_str(), 4378 bufferID, 4379 buffer.get(), info->mData.get()); 4380 4381 CHECK_LE(buffer->size(), info->mData->capacity()); 4382 memcpy(info->mData->data(), buffer->data(), buffer->size()); 4383 } 4384 4385 if (flags & OMX_BUFFERFLAG_CODECCONFIG) { 4386 ALOGV("[%s] calling emptyBuffer %p w/ codec specific data", 4387 mCodec->mComponentName.c_str(), bufferID); 4388 } else if (flags & OMX_BUFFERFLAG_EOS) { 4389 ALOGV("[%s] calling emptyBuffer %p w/ EOS", 4390 mCodec->mComponentName.c_str(), bufferID); 4391 } else { 4392#if TRACK_BUFFER_TIMING 4393 ALOGI("[%s] calling emptyBuffer %p w/ time %lld us", 4394 mCodec->mComponentName.c_str(), bufferID, timeUs); 4395#else 4396 ALOGV("[%s] calling emptyBuffer %p w/ time %lld us", 4397 mCodec->mComponentName.c_str(), bufferID, timeUs); 4398#endif 4399 } 4400 4401#if TRACK_BUFFER_TIMING 4402 ACodec::BufferStats stats; 4403 stats.mEmptyBufferTimeUs = ALooper::GetNowUs(); 4404 stats.mFillBufferDoneTimeUs = -1ll; 4405 mCodec->mBufferStats.add(timeUs, stats); 4406#endif 4407 4408 if (mCodec->mStoreMetaDataInOutputBuffers) { 4409 // try to submit an output buffer for each input buffer 4410 PortMode outputMode = getPortMode(kPortIndexOutput); 4411 4412 ALOGV("MetaDataBuffersToSubmit=%u portMode=%s", 4413 mCodec->mMetaDataBuffersToSubmit, 4414 (outputMode == FREE_BUFFERS ? "FREE" : 4415 outputMode == KEEP_BUFFERS ? "KEEP" : "RESUBMIT")); 4416 if (outputMode == RESUBMIT_BUFFERS) { 4417 mCodec->submitOutputMetaDataBuffer(); 4418 } 4419 } 4420 4421 CHECK_EQ(mCodec->mOMX->emptyBuffer( 4422 mCodec->mNode, 4423 bufferID, 4424 0, 4425 buffer->size(), 4426 flags, 4427 timeUs), 4428 (status_t)OK); 4429 4430 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 4431 4432 if (!eos) { 4433 getMoreInputDataIfPossible(); 4434 } else { 4435 ALOGV("[%s] Signalled EOS on the input port", 4436 mCodec->mComponentName.c_str()); 4437 4438 mCodec->mPortEOS[kPortIndexInput] = true; 4439 mCodec->mInputEOSResult = err; 4440 } 4441 } else if (!mCodec->mPortEOS[kPortIndexInput]) { 4442 if (err != ERROR_END_OF_STREAM) { 4443 ALOGV("[%s] Signalling EOS on the input port " 4444 "due to error %d", 4445 mCodec->mComponentName.c_str(), err); 4446 } else { 4447 ALOGV("[%s] Signalling EOS on the input port", 4448 mCodec->mComponentName.c_str()); 4449 } 4450 4451 ALOGV("[%s] calling emptyBuffer %p signalling EOS", 4452 mCodec->mComponentName.c_str(), bufferID); 4453 4454 CHECK_EQ(mCodec->mOMX->emptyBuffer( 4455 mCodec->mNode, 4456 bufferID, 4457 0, 4458 0, 4459 OMX_BUFFERFLAG_EOS, 4460 0), 4461 (status_t)OK); 4462 4463 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 4464 4465 mCodec->mPortEOS[kPortIndexInput] = true; 4466 mCodec->mInputEOSResult = err; 4467 } 4468 break; 4469 } 4470 4471 default: 4472 CHECK_EQ((int)mode, (int)FREE_BUFFERS); 4473 break; 4474 } 4475} 4476 4477void ACodec::BaseState::getMoreInputDataIfPossible() { 4478 if (mCodec->mPortEOS[kPortIndexInput]) { 4479 return; 4480 } 4481 4482 BufferInfo *eligible = NULL; 4483 4484 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 4485 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 4486 4487#if 0 4488 if (info->mStatus == BufferInfo::OWNED_BY_UPSTREAM) { 4489 // There's already a "read" pending. 4490 return; 4491 } 4492#endif 4493 4494 if (info->mStatus == BufferInfo::OWNED_BY_US) { 4495 eligible = info; 4496 } 4497 } 4498 4499 if (eligible == NULL) { 4500 return; 4501 } 4502 4503 postFillThisBuffer(eligible); 4504} 4505 4506bool ACodec::BaseState::onOMXFillBufferDone( 4507 IOMX::buffer_id bufferID, 4508 size_t rangeOffset, size_t rangeLength, 4509 OMX_U32 flags, 4510 int64_t timeUs) { 4511 ALOGV("[%s] onOMXFillBufferDone %u time %" PRId64 " us, flags = 0x%08x", 4512 mCodec->mComponentName.c_str(), bufferID, timeUs, flags); 4513 4514 ssize_t index; 4515 4516#if TRACK_BUFFER_TIMING 4517 index = mCodec->mBufferStats.indexOfKey(timeUs); 4518 if (index >= 0) { 4519 ACodec::BufferStats *stats = &mCodec->mBufferStats.editValueAt(index); 4520 stats->mFillBufferDoneTimeUs = ALooper::GetNowUs(); 4521 4522 ALOGI("frame PTS %lld: %lld", 4523 timeUs, 4524 stats->mFillBufferDoneTimeUs - stats->mEmptyBufferTimeUs); 4525 4526 mCodec->mBufferStats.removeItemsAt(index); 4527 stats = NULL; 4528 } 4529#endif 4530 4531 BufferInfo *info = 4532 mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 4533 4534 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_COMPONENT); 4535 4536 info->mDequeuedAt = ++mCodec->mDequeueCounter; 4537 info->mStatus = BufferInfo::OWNED_BY_US; 4538 4539 PortMode mode = getPortMode(kPortIndexOutput); 4540 4541 switch (mode) { 4542 case KEEP_BUFFERS: 4543 break; 4544 4545 case RESUBMIT_BUFFERS: 4546 { 4547 if (rangeLength == 0 && (!(flags & OMX_BUFFERFLAG_EOS) 4548 || mCodec->mPortEOS[kPortIndexOutput])) { 4549 ALOGV("[%s] calling fillBuffer %u", 4550 mCodec->mComponentName.c_str(), info->mBufferID); 4551 4552 CHECK_EQ(mCodec->mOMX->fillBuffer( 4553 mCodec->mNode, info->mBufferID), 4554 (status_t)OK); 4555 4556 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 4557 break; 4558 } 4559 4560 sp<AMessage> reply = 4561 new AMessage(kWhatOutputBufferDrained, mCodec); 4562 4563 if (!mCodec->mSentFormat && rangeLength > 0) { 4564 mCodec->sendFormatChange(reply); 4565 } 4566 4567 if (mCodec->mUseMetadataOnEncoderOutput) { 4568 native_handle_t* handle = 4569 *(native_handle_t**)(info->mData->data() + 4); 4570 info->mData->meta()->setPointer("handle", handle); 4571 info->mData->meta()->setInt32("rangeOffset", rangeOffset); 4572 info->mData->meta()->setInt32("rangeLength", rangeLength); 4573 } else { 4574 info->mData->setRange(rangeOffset, rangeLength); 4575 } 4576#if 0 4577 if (mCodec->mNativeWindow == NULL) { 4578 if (IsIDR(info->mData)) { 4579 ALOGI("IDR frame"); 4580 } 4581 } 4582#endif 4583 4584 if (mCodec->mSkipCutBuffer != NULL) { 4585 mCodec->mSkipCutBuffer->submit(info->mData); 4586 } 4587 info->mData->meta()->setInt64("timeUs", timeUs); 4588 4589 sp<AMessage> notify = mCodec->mNotify->dup(); 4590 notify->setInt32("what", CodecBase::kWhatDrainThisBuffer); 4591 notify->setInt32("buffer-id", info->mBufferID); 4592 notify->setBuffer("buffer", info->mData); 4593 notify->setInt32("flags", flags); 4594 4595 reply->setInt32("buffer-id", info->mBufferID); 4596 4597 notify->setMessage("reply", reply); 4598 4599 notify->post(); 4600 4601 info->mStatus = BufferInfo::OWNED_BY_DOWNSTREAM; 4602 4603 if (flags & OMX_BUFFERFLAG_EOS) { 4604 ALOGV("[%s] saw output EOS", mCodec->mComponentName.c_str()); 4605 4606 sp<AMessage> notify = mCodec->mNotify->dup(); 4607 notify->setInt32("what", CodecBase::kWhatEOS); 4608 notify->setInt32("err", mCodec->mInputEOSResult); 4609 notify->post(); 4610 4611 mCodec->mPortEOS[kPortIndexOutput] = true; 4612 } 4613 break; 4614 } 4615 4616 default: 4617 { 4618 CHECK_EQ((int)mode, (int)FREE_BUFFERS); 4619 4620 CHECK_EQ((status_t)OK, 4621 mCodec->freeBuffer(kPortIndexOutput, index)); 4622 break; 4623 } 4624 } 4625 4626 return true; 4627} 4628 4629void ACodec::BaseState::onOutputBufferDrained(const sp<AMessage> &msg) { 4630 IOMX::buffer_id bufferID; 4631 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 4632 ssize_t index; 4633 BufferInfo *info = 4634 mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 4635 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_DOWNSTREAM); 4636 4637 android_native_rect_t crop; 4638 if (msg->findRect("crop", 4639 &crop.left, &crop.top, &crop.right, &crop.bottom)) { 4640 CHECK_EQ(0, native_window_set_crop( 4641 mCodec->mNativeWindow.get(), &crop)); 4642 } 4643 4644 int32_t render; 4645 if (mCodec->mNativeWindow != NULL 4646 && msg->findInt32("render", &render) && render != 0 4647 && info->mData != NULL && info->mData->size() != 0) { 4648 ATRACE_NAME("render"); 4649 // The client wants this buffer to be rendered. 4650 4651 int64_t timestampNs = 0; 4652 if (!msg->findInt64("timestampNs", ×tampNs)) { 4653 // TODO: it seems like we should use the timestamp 4654 // in the (media)buffer as it potentially came from 4655 // an input surface, but we did not propagate it prior to 4656 // API 20. Perhaps check for target SDK version. 4657#if 0 4658 if (info->mData->meta()->findInt64("timeUs", ×tampNs)) { 4659 ALOGV("using buffer PTS of %" PRId64, timestampNs); 4660 timestampNs *= 1000; 4661 } 4662#endif 4663 } 4664 4665 status_t err; 4666 err = native_window_set_buffers_timestamp(mCodec->mNativeWindow.get(), timestampNs); 4667 if (err != OK) { 4668 ALOGW("failed to set buffer timestamp: %d", err); 4669 } 4670 4671 if ((err = mCodec->mNativeWindow->queueBuffer( 4672 mCodec->mNativeWindow.get(), 4673 info->mGraphicBuffer.get(), -1)) == OK) { 4674 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 4675 } else { 4676 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 4677 info->mStatus = BufferInfo::OWNED_BY_US; 4678 } 4679 } else { 4680 if (mCodec->mNativeWindow != NULL && 4681 (info->mData == NULL || info->mData->size() != 0)) { 4682 ATRACE_NAME("frame-drop"); 4683 } 4684 info->mStatus = BufferInfo::OWNED_BY_US; 4685 } 4686 4687 PortMode mode = getPortMode(kPortIndexOutput); 4688 4689 switch (mode) { 4690 case KEEP_BUFFERS: 4691 { 4692 // XXX fishy, revisit!!! What about the FREE_BUFFERS case below? 4693 4694 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4695 // We cannot resubmit the buffer we just rendered, dequeue 4696 // the spare instead. 4697 4698 info = mCodec->dequeueBufferFromNativeWindow(); 4699 } 4700 break; 4701 } 4702 4703 case RESUBMIT_BUFFERS: 4704 { 4705 if (!mCodec->mPortEOS[kPortIndexOutput]) { 4706 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4707 // We cannot resubmit the buffer we just rendered, dequeue 4708 // the spare instead. 4709 4710 info = mCodec->dequeueBufferFromNativeWindow(); 4711 } 4712 4713 if (info != NULL) { 4714 ALOGV("[%s] calling fillBuffer %u", 4715 mCodec->mComponentName.c_str(), info->mBufferID); 4716 4717 CHECK_EQ(mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID), 4718 (status_t)OK); 4719 4720 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 4721 } 4722 } 4723 break; 4724 } 4725 4726 default: 4727 { 4728 CHECK_EQ((int)mode, (int)FREE_BUFFERS); 4729 4730 CHECK_EQ((status_t)OK, 4731 mCodec->freeBuffer(kPortIndexOutput, index)); 4732 break; 4733 } 4734 } 4735} 4736 4737//////////////////////////////////////////////////////////////////////////////// 4738 4739ACodec::UninitializedState::UninitializedState(ACodec *codec) 4740 : BaseState(codec) { 4741} 4742 4743void ACodec::UninitializedState::stateEntered() { 4744 ALOGV("Now uninitialized"); 4745 4746 if (mDeathNotifier != NULL) { 4747 IInterface::asBinder(mCodec->mOMX)->unlinkToDeath(mDeathNotifier); 4748 mDeathNotifier.clear(); 4749 } 4750 4751 mCodec->mNativeWindow.clear(); 4752 mCodec->mNode = NULL; 4753 mCodec->mOMX.clear(); 4754 mCodec->mQuirks = 0; 4755 mCodec->mFlags = 0; 4756 mCodec->mUseMetadataOnEncoderOutput = 0; 4757 mCodec->mComponentName.clear(); 4758} 4759 4760bool ACodec::UninitializedState::onMessageReceived(const sp<AMessage> &msg) { 4761 bool handled = false; 4762 4763 switch (msg->what()) { 4764 case ACodec::kWhatSetup: 4765 { 4766 onSetup(msg); 4767 4768 handled = true; 4769 break; 4770 } 4771 4772 case ACodec::kWhatAllocateComponent: 4773 { 4774 onAllocateComponent(msg); 4775 handled = true; 4776 break; 4777 } 4778 4779 case ACodec::kWhatShutdown: 4780 { 4781 int32_t keepComponentAllocated; 4782 CHECK(msg->findInt32( 4783 "keepComponentAllocated", &keepComponentAllocated)); 4784 ALOGW_IF(keepComponentAllocated, 4785 "cannot keep component allocated on shutdown in Uninitialized state"); 4786 4787 sp<AMessage> notify = mCodec->mNotify->dup(); 4788 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 4789 notify->post(); 4790 4791 handled = true; 4792 break; 4793 } 4794 4795 case ACodec::kWhatFlush: 4796 { 4797 sp<AMessage> notify = mCodec->mNotify->dup(); 4798 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 4799 notify->post(); 4800 4801 handled = true; 4802 break; 4803 } 4804 4805 case ACodec::kWhatReleaseCodecInstance: 4806 { 4807 // nothing to do, as we have already signaled shutdown 4808 handled = true; 4809 break; 4810 } 4811 4812 default: 4813 return BaseState::onMessageReceived(msg); 4814 } 4815 4816 return handled; 4817} 4818 4819void ACodec::UninitializedState::onSetup( 4820 const sp<AMessage> &msg) { 4821 if (onAllocateComponent(msg) 4822 && mCodec->mLoadedState->onConfigureComponent(msg)) { 4823 mCodec->mLoadedState->onStart(); 4824 } 4825} 4826 4827bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) { 4828 ALOGV("onAllocateComponent"); 4829 4830 CHECK(mCodec->mNode == NULL); 4831 4832 OMXClient client; 4833 CHECK_EQ(client.connect(), (status_t)OK); 4834 4835 sp<IOMX> omx = client.interface(); 4836 4837 sp<AMessage> notify = new AMessage(kWhatOMXDied, mCodec); 4838 4839 mDeathNotifier = new DeathNotifier(notify); 4840 if (IInterface::asBinder(omx)->linkToDeath(mDeathNotifier) != OK) { 4841 // This was a local binder, if it dies so do we, we won't care 4842 // about any notifications in the afterlife. 4843 mDeathNotifier.clear(); 4844 } 4845 4846 Vector<OMXCodec::CodecNameAndQuirks> matchingCodecs; 4847 4848 AString mime; 4849 4850 AString componentName; 4851 uint32_t quirks = 0; 4852 int32_t encoder = false; 4853 if (msg->findString("componentName", &componentName)) { 4854 ssize_t index = matchingCodecs.add(); 4855 OMXCodec::CodecNameAndQuirks *entry = &matchingCodecs.editItemAt(index); 4856 entry->mName = String8(componentName.c_str()); 4857 4858 if (!OMXCodec::findCodecQuirks( 4859 componentName.c_str(), &entry->mQuirks)) { 4860 entry->mQuirks = 0; 4861 } 4862 } else { 4863 CHECK(msg->findString("mime", &mime)); 4864 4865 if (!msg->findInt32("encoder", &encoder)) { 4866 encoder = false; 4867 } 4868 4869 OMXCodec::findMatchingCodecs( 4870 mime.c_str(), 4871 encoder, // createEncoder 4872 NULL, // matchComponentName 4873 0, // flags 4874 &matchingCodecs); 4875 } 4876 4877 sp<CodecObserver> observer = new CodecObserver; 4878 IOMX::node_id node = NULL; 4879 4880 for (size_t matchIndex = 0; matchIndex < matchingCodecs.size(); 4881 ++matchIndex) { 4882 componentName = matchingCodecs.itemAt(matchIndex).mName.string(); 4883 quirks = matchingCodecs.itemAt(matchIndex).mQuirks; 4884 4885 pid_t tid = gettid(); 4886 int prevPriority = androidGetThreadPriority(tid); 4887 androidSetThreadPriority(tid, ANDROID_PRIORITY_FOREGROUND); 4888 status_t err = omx->allocateNode(componentName.c_str(), observer, &node); 4889 androidSetThreadPriority(tid, prevPriority); 4890 4891 if (err == OK) { 4892 break; 4893 } else { 4894 ALOGW("Allocating component '%s' failed, try next one.", componentName.c_str()); 4895 } 4896 4897 node = NULL; 4898 } 4899 4900 if (node == NULL) { 4901 if (!mime.empty()) { 4902 ALOGE("Unable to instantiate a %scoder for type '%s'.", 4903 encoder ? "en" : "de", mime.c_str()); 4904 } else { 4905 ALOGE("Unable to instantiate codec '%s'.", componentName.c_str()); 4906 } 4907 4908 mCodec->signalError(OMX_ErrorComponentNotFound); 4909 return false; 4910 } 4911 4912 notify = new AMessage(kWhatOMXMessage, mCodec); 4913 observer->setNotificationMessage(notify); 4914 4915 mCodec->mComponentName = componentName; 4916 mCodec->mFlags = 0; 4917 4918 if (componentName.endsWith(".secure")) { 4919 mCodec->mFlags |= kFlagIsSecure; 4920 mCodec->mFlags |= kFlagIsGrallocUsageProtected; 4921 mCodec->mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 4922 } 4923 4924 mCodec->mQuirks = quirks; 4925 mCodec->mOMX = omx; 4926 mCodec->mNode = node; 4927 4928 { 4929 sp<AMessage> notify = mCodec->mNotify->dup(); 4930 notify->setInt32("what", CodecBase::kWhatComponentAllocated); 4931 notify->setString("componentName", mCodec->mComponentName.c_str()); 4932 notify->post(); 4933 } 4934 4935 mCodec->changeState(mCodec->mLoadedState); 4936 4937 return true; 4938} 4939 4940//////////////////////////////////////////////////////////////////////////////// 4941 4942ACodec::LoadedState::LoadedState(ACodec *codec) 4943 : BaseState(codec) { 4944} 4945 4946void ACodec::LoadedState::stateEntered() { 4947 ALOGV("[%s] Now Loaded", mCodec->mComponentName.c_str()); 4948 4949 mCodec->mPortEOS[kPortIndexInput] = 4950 mCodec->mPortEOS[kPortIndexOutput] = false; 4951 4952 mCodec->mInputEOSResult = OK; 4953 4954 mCodec->mDequeueCounter = 0; 4955 mCodec->mMetaDataBuffersToSubmit = 0; 4956 mCodec->mRepeatFrameDelayUs = -1ll; 4957 mCodec->mInputFormat.clear(); 4958 mCodec->mOutputFormat.clear(); 4959 mCodec->mBaseOutputFormat.clear(); 4960 4961 if (mCodec->mShutdownInProgress) { 4962 bool keepComponentAllocated = mCodec->mKeepComponentAllocated; 4963 4964 mCodec->mShutdownInProgress = false; 4965 mCodec->mKeepComponentAllocated = false; 4966 4967 onShutdown(keepComponentAllocated); 4968 } 4969 mCodec->mExplicitShutdown = false; 4970 4971 mCodec->processDeferredMessages(); 4972} 4973 4974void ACodec::LoadedState::onShutdown(bool keepComponentAllocated) { 4975 if (!keepComponentAllocated) { 4976 CHECK_EQ(mCodec->mOMX->freeNode(mCodec->mNode), (status_t)OK); 4977 4978 mCodec->changeState(mCodec->mUninitializedState); 4979 } 4980 4981 if (mCodec->mExplicitShutdown) { 4982 sp<AMessage> notify = mCodec->mNotify->dup(); 4983 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 4984 notify->post(); 4985 mCodec->mExplicitShutdown = false; 4986 } 4987} 4988 4989bool ACodec::LoadedState::onMessageReceived(const sp<AMessage> &msg) { 4990 bool handled = false; 4991 4992 switch (msg->what()) { 4993 case ACodec::kWhatConfigureComponent: 4994 { 4995 onConfigureComponent(msg); 4996 handled = true; 4997 break; 4998 } 4999 5000 case ACodec::kWhatCreateInputSurface: 5001 { 5002 onCreateInputSurface(msg); 5003 handled = true; 5004 break; 5005 } 5006 5007 case ACodec::kWhatStart: 5008 { 5009 onStart(); 5010 handled = true; 5011 break; 5012 } 5013 5014 case ACodec::kWhatShutdown: 5015 { 5016 int32_t keepComponentAllocated; 5017 CHECK(msg->findInt32( 5018 "keepComponentAllocated", &keepComponentAllocated)); 5019 5020 mCodec->mExplicitShutdown = true; 5021 onShutdown(keepComponentAllocated); 5022 5023 handled = true; 5024 break; 5025 } 5026 5027 case ACodec::kWhatFlush: 5028 { 5029 sp<AMessage> notify = mCodec->mNotify->dup(); 5030 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 5031 notify->post(); 5032 5033 handled = true; 5034 break; 5035 } 5036 5037 default: 5038 return BaseState::onMessageReceived(msg); 5039 } 5040 5041 return handled; 5042} 5043 5044bool ACodec::LoadedState::onConfigureComponent( 5045 const sp<AMessage> &msg) { 5046 ALOGV("onConfigureComponent"); 5047 5048 CHECK(mCodec->mNode != NULL); 5049 5050 AString mime; 5051 CHECK(msg->findString("mime", &mime)); 5052 5053 status_t err = mCodec->configureCodec(mime.c_str(), msg); 5054 5055 if (err != OK) { 5056 ALOGE("[%s] configureCodec returning error %d", 5057 mCodec->mComponentName.c_str(), err); 5058 5059 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5060 return false; 5061 } 5062 5063 { 5064 sp<AMessage> notify = mCodec->mNotify->dup(); 5065 notify->setInt32("what", CodecBase::kWhatComponentConfigured); 5066 notify->setMessage("input-format", mCodec->mInputFormat); 5067 notify->setMessage("output-format", mCodec->mOutputFormat); 5068 notify->post(); 5069 } 5070 5071 return true; 5072} 5073 5074void ACodec::LoadedState::onCreateInputSurface( 5075 const sp<AMessage> & /* msg */) { 5076 ALOGV("onCreateInputSurface"); 5077 5078 sp<AMessage> notify = mCodec->mNotify->dup(); 5079 notify->setInt32("what", CodecBase::kWhatInputSurfaceCreated); 5080 5081 sp<IGraphicBufferProducer> bufferProducer; 5082 status_t err; 5083 5084 err = mCodec->mOMX->createInputSurface(mCodec->mNode, kPortIndexInput, 5085 &bufferProducer); 5086 5087 if (err == OK && mCodec->mRepeatFrameDelayUs > 0ll) { 5088 err = mCodec->mOMX->setInternalOption( 5089 mCodec->mNode, 5090 kPortIndexInput, 5091 IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY, 5092 &mCodec->mRepeatFrameDelayUs, 5093 sizeof(mCodec->mRepeatFrameDelayUs)); 5094 5095 if (err != OK) { 5096 ALOGE("[%s] Unable to configure option to repeat previous " 5097 "frames (err %d)", 5098 mCodec->mComponentName.c_str(), 5099 err); 5100 } 5101 } 5102 5103 if (err == OK && mCodec->mMaxPtsGapUs > 0ll) { 5104 err = mCodec->mOMX->setInternalOption( 5105 mCodec->mNode, 5106 kPortIndexInput, 5107 IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP, 5108 &mCodec->mMaxPtsGapUs, 5109 sizeof(mCodec->mMaxPtsGapUs)); 5110 5111 if (err != OK) { 5112 ALOGE("[%s] Unable to configure max timestamp gap (err %d)", 5113 mCodec->mComponentName.c_str(), 5114 err); 5115 } 5116 } 5117 5118 if (err == OK && mCodec->mMaxFps > 0) { 5119 err = mCodec->mOMX->setInternalOption( 5120 mCodec->mNode, 5121 kPortIndexInput, 5122 IOMX::INTERNAL_OPTION_MAX_FPS, 5123 &mCodec->mMaxFps, 5124 sizeof(mCodec->mMaxFps)); 5125 5126 if (err != OK) { 5127 ALOGE("[%s] Unable to configure max fps (err %d)", 5128 mCodec->mComponentName.c_str(), 5129 err); 5130 } 5131 } 5132 5133 if (err == OK && mCodec->mTimePerCaptureUs > 0ll 5134 && mCodec->mTimePerFrameUs > 0ll) { 5135 int64_t timeLapse[2]; 5136 timeLapse[0] = mCodec->mTimePerFrameUs; 5137 timeLapse[1] = mCodec->mTimePerCaptureUs; 5138 err = mCodec->mOMX->setInternalOption( 5139 mCodec->mNode, 5140 kPortIndexInput, 5141 IOMX::INTERNAL_OPTION_TIME_LAPSE, 5142 &timeLapse[0], 5143 sizeof(timeLapse)); 5144 5145 if (err != OK) { 5146 ALOGE("[%s] Unable to configure time lapse (err %d)", 5147 mCodec->mComponentName.c_str(), 5148 err); 5149 } 5150 } 5151 5152 if (err == OK && mCodec->mCreateInputBuffersSuspended) { 5153 bool suspend = true; 5154 err = mCodec->mOMX->setInternalOption( 5155 mCodec->mNode, 5156 kPortIndexInput, 5157 IOMX::INTERNAL_OPTION_SUSPEND, 5158 &suspend, 5159 sizeof(suspend)); 5160 5161 if (err != OK) { 5162 ALOGE("[%s] Unable to configure option to suspend (err %d)", 5163 mCodec->mComponentName.c_str(), 5164 err); 5165 } 5166 } 5167 5168 if (err == OK) { 5169 notify->setObject("input-surface", 5170 new BufferProducerWrapper(bufferProducer)); 5171 } else { 5172 // Can't use mCodec->signalError() here -- MediaCodec won't forward 5173 // the error through because it's in the "configured" state. We 5174 // send a kWhatInputSurfaceCreated with an error value instead. 5175 ALOGE("[%s] onCreateInputSurface returning error %d", 5176 mCodec->mComponentName.c_str(), err); 5177 notify->setInt32("err", err); 5178 } 5179 notify->post(); 5180} 5181 5182void ACodec::LoadedState::onStart() { 5183 ALOGV("onStart"); 5184 5185 CHECK_EQ(mCodec->mOMX->sendCommand( 5186 mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle), 5187 (status_t)OK); 5188 5189 mCodec->changeState(mCodec->mLoadedToIdleState); 5190} 5191 5192//////////////////////////////////////////////////////////////////////////////// 5193 5194ACodec::LoadedToIdleState::LoadedToIdleState(ACodec *codec) 5195 : BaseState(codec) { 5196} 5197 5198void ACodec::LoadedToIdleState::stateEntered() { 5199 ALOGV("[%s] Now Loaded->Idle", mCodec->mComponentName.c_str()); 5200 5201 status_t err; 5202 if ((err = allocateBuffers()) != OK) { 5203 ALOGE("Failed to allocate buffers after transitioning to IDLE state " 5204 "(error 0x%08x)", 5205 err); 5206 5207 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5208 5209 mCodec->changeState(mCodec->mLoadedState); 5210 } 5211} 5212 5213status_t ACodec::LoadedToIdleState::allocateBuffers() { 5214 status_t err = mCodec->allocateBuffersOnPort(kPortIndexInput); 5215 5216 if (err != OK) { 5217 return err; 5218 } 5219 5220 return mCodec->allocateBuffersOnPort(kPortIndexOutput); 5221} 5222 5223bool ACodec::LoadedToIdleState::onMessageReceived(const sp<AMessage> &msg) { 5224 switch (msg->what()) { 5225 case kWhatSetParameters: 5226 case kWhatShutdown: 5227 { 5228 mCodec->deferMessage(msg); 5229 return true; 5230 } 5231 5232 case kWhatSignalEndOfInputStream: 5233 { 5234 mCodec->onSignalEndOfInputStream(); 5235 return true; 5236 } 5237 5238 case kWhatResume: 5239 { 5240 // We'll be active soon enough. 5241 return true; 5242 } 5243 5244 case kWhatFlush: 5245 { 5246 // We haven't even started yet, so we're flushed alright... 5247 sp<AMessage> notify = mCodec->mNotify->dup(); 5248 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 5249 notify->post(); 5250 return true; 5251 } 5252 5253 default: 5254 return BaseState::onMessageReceived(msg); 5255 } 5256} 5257 5258bool ACodec::LoadedToIdleState::onOMXEvent( 5259 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 5260 switch (event) { 5261 case OMX_EventCmdComplete: 5262 { 5263 CHECK_EQ(data1, (OMX_U32)OMX_CommandStateSet); 5264 CHECK_EQ(data2, (OMX_U32)OMX_StateIdle); 5265 5266 CHECK_EQ(mCodec->mOMX->sendCommand( 5267 mCodec->mNode, OMX_CommandStateSet, OMX_StateExecuting), 5268 (status_t)OK); 5269 5270 mCodec->changeState(mCodec->mIdleToExecutingState); 5271 5272 return true; 5273 } 5274 5275 default: 5276 return BaseState::onOMXEvent(event, data1, data2); 5277 } 5278} 5279 5280//////////////////////////////////////////////////////////////////////////////// 5281 5282ACodec::IdleToExecutingState::IdleToExecutingState(ACodec *codec) 5283 : BaseState(codec) { 5284} 5285 5286void ACodec::IdleToExecutingState::stateEntered() { 5287 ALOGV("[%s] Now Idle->Executing", mCodec->mComponentName.c_str()); 5288} 5289 5290bool ACodec::IdleToExecutingState::onMessageReceived(const sp<AMessage> &msg) { 5291 switch (msg->what()) { 5292 case kWhatSetParameters: 5293 case kWhatShutdown: 5294 { 5295 mCodec->deferMessage(msg); 5296 return true; 5297 } 5298 5299 case kWhatResume: 5300 { 5301 // We'll be active soon enough. 5302 return true; 5303 } 5304 5305 case kWhatFlush: 5306 { 5307 // We haven't even started yet, so we're flushed alright... 5308 sp<AMessage> notify = mCodec->mNotify->dup(); 5309 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 5310 notify->post(); 5311 5312 return true; 5313 } 5314 5315 case kWhatSignalEndOfInputStream: 5316 { 5317 mCodec->onSignalEndOfInputStream(); 5318 return true; 5319 } 5320 5321 default: 5322 return BaseState::onMessageReceived(msg); 5323 } 5324} 5325 5326bool ACodec::IdleToExecutingState::onOMXEvent( 5327 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 5328 switch (event) { 5329 case OMX_EventCmdComplete: 5330 { 5331 CHECK_EQ(data1, (OMX_U32)OMX_CommandStateSet); 5332 CHECK_EQ(data2, (OMX_U32)OMX_StateExecuting); 5333 5334 mCodec->mExecutingState->resume(); 5335 mCodec->changeState(mCodec->mExecutingState); 5336 5337 return true; 5338 } 5339 5340 default: 5341 return BaseState::onOMXEvent(event, data1, data2); 5342 } 5343} 5344 5345//////////////////////////////////////////////////////////////////////////////// 5346 5347ACodec::ExecutingState::ExecutingState(ACodec *codec) 5348 : BaseState(codec), 5349 mActive(false) { 5350} 5351 5352ACodec::BaseState::PortMode ACodec::ExecutingState::getPortMode( 5353 OMX_U32 /* portIndex */) { 5354 return RESUBMIT_BUFFERS; 5355} 5356 5357void ACodec::ExecutingState::submitOutputMetaBuffers() { 5358 // submit as many buffers as there are input buffers with the codec 5359 // in case we are in port reconfiguring 5360 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 5361 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 5362 5363 if (info->mStatus == BufferInfo::OWNED_BY_COMPONENT) { 5364 if (mCodec->submitOutputMetaDataBuffer() != OK) 5365 break; 5366 } 5367 } 5368 5369 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 5370 mCodec->signalSubmitOutputMetaDataBufferIfEOS_workaround(); 5371} 5372 5373void ACodec::ExecutingState::submitRegularOutputBuffers() { 5374 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexOutput].size(); ++i) { 5375 BufferInfo *info = &mCodec->mBuffers[kPortIndexOutput].editItemAt(i); 5376 5377 if (mCodec->mNativeWindow != NULL) { 5378 CHECK(info->mStatus == BufferInfo::OWNED_BY_US 5379 || info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW); 5380 5381 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 5382 continue; 5383 } 5384 } else { 5385 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 5386 } 5387 5388 ALOGV("[%s] calling fillBuffer %p", 5389 mCodec->mComponentName.c_str(), info->mBufferID); 5390 5391 CHECK_EQ(mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID), 5392 (status_t)OK); 5393 5394 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5395 } 5396} 5397 5398void ACodec::ExecutingState::submitOutputBuffers() { 5399 submitRegularOutputBuffers(); 5400 if (mCodec->mStoreMetaDataInOutputBuffers) { 5401 submitOutputMetaBuffers(); 5402 } 5403} 5404 5405void ACodec::ExecutingState::resume() { 5406 if (mActive) { 5407 ALOGV("[%s] We're already active, no need to resume.", 5408 mCodec->mComponentName.c_str()); 5409 5410 return; 5411 } 5412 5413 submitOutputBuffers(); 5414 5415 // Post all available input buffers 5416 CHECK_GT(mCodec->mBuffers[kPortIndexInput].size(), 0u); 5417 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); i++) { 5418 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 5419 if (info->mStatus == BufferInfo::OWNED_BY_US) { 5420 postFillThisBuffer(info); 5421 } 5422 } 5423 5424 mActive = true; 5425} 5426 5427void ACodec::ExecutingState::stateEntered() { 5428 ALOGV("[%s] Now Executing", mCodec->mComponentName.c_str()); 5429 5430 mCodec->processDeferredMessages(); 5431} 5432 5433bool ACodec::ExecutingState::onMessageReceived(const sp<AMessage> &msg) { 5434 bool handled = false; 5435 5436 switch (msg->what()) { 5437 case kWhatShutdown: 5438 { 5439 int32_t keepComponentAllocated; 5440 CHECK(msg->findInt32( 5441 "keepComponentAllocated", &keepComponentAllocated)); 5442 5443 mCodec->mShutdownInProgress = true; 5444 mCodec->mExplicitShutdown = true; 5445 mCodec->mKeepComponentAllocated = keepComponentAllocated; 5446 5447 mActive = false; 5448 5449 CHECK_EQ(mCodec->mOMX->sendCommand( 5450 mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle), 5451 (status_t)OK); 5452 5453 mCodec->changeState(mCodec->mExecutingToIdleState); 5454 5455 handled = true; 5456 break; 5457 } 5458 5459 case kWhatFlush: 5460 { 5461 ALOGV("[%s] ExecutingState flushing now " 5462 "(codec owns %d/%d input, %d/%d output).", 5463 mCodec->mComponentName.c_str(), 5464 mCodec->countBuffersOwnedByComponent(kPortIndexInput), 5465 mCodec->mBuffers[kPortIndexInput].size(), 5466 mCodec->countBuffersOwnedByComponent(kPortIndexOutput), 5467 mCodec->mBuffers[kPortIndexOutput].size()); 5468 5469 mActive = false; 5470 5471 CHECK_EQ(mCodec->mOMX->sendCommand( 5472 mCodec->mNode, OMX_CommandFlush, OMX_ALL), 5473 (status_t)OK); 5474 5475 mCodec->changeState(mCodec->mFlushingState); 5476 handled = true; 5477 break; 5478 } 5479 5480 case kWhatResume: 5481 { 5482 resume(); 5483 5484 handled = true; 5485 break; 5486 } 5487 5488 case kWhatRequestIDRFrame: 5489 { 5490 status_t err = mCodec->requestIDRFrame(); 5491 if (err != OK) { 5492 ALOGW("Requesting an IDR frame failed."); 5493 } 5494 5495 handled = true; 5496 break; 5497 } 5498 5499 case kWhatSetParameters: 5500 { 5501 sp<AMessage> params; 5502 CHECK(msg->findMessage("params", ¶ms)); 5503 5504 status_t err = mCodec->setParameters(params); 5505 5506 sp<AMessage> reply; 5507 if (msg->findMessage("reply", &reply)) { 5508 reply->setInt32("err", err); 5509 reply->post(); 5510 } 5511 5512 handled = true; 5513 break; 5514 } 5515 5516 case ACodec::kWhatSignalEndOfInputStream: 5517 { 5518 mCodec->onSignalEndOfInputStream(); 5519 handled = true; 5520 break; 5521 } 5522 5523 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 5524 case kWhatSubmitOutputMetaDataBufferIfEOS: 5525 { 5526 if (mCodec->mPortEOS[kPortIndexInput] && 5527 !mCodec->mPortEOS[kPortIndexOutput]) { 5528 status_t err = mCodec->submitOutputMetaDataBuffer(); 5529 if (err == OK) { 5530 mCodec->signalSubmitOutputMetaDataBufferIfEOS_workaround(); 5531 } 5532 } 5533 return true; 5534 } 5535 5536 default: 5537 handled = BaseState::onMessageReceived(msg); 5538 break; 5539 } 5540 5541 return handled; 5542} 5543 5544status_t ACodec::setParameters(const sp<AMessage> ¶ms) { 5545 int32_t videoBitrate; 5546 if (params->findInt32("video-bitrate", &videoBitrate)) { 5547 OMX_VIDEO_CONFIG_BITRATETYPE configParams; 5548 InitOMXParams(&configParams); 5549 configParams.nPortIndex = kPortIndexOutput; 5550 configParams.nEncodeBitrate = videoBitrate; 5551 5552 status_t err = mOMX->setConfig( 5553 mNode, 5554 OMX_IndexConfigVideoBitrate, 5555 &configParams, 5556 sizeof(configParams)); 5557 5558 if (err != OK) { 5559 ALOGE("setConfig(OMX_IndexConfigVideoBitrate, %d) failed w/ err %d", 5560 videoBitrate, err); 5561 5562 return err; 5563 } 5564 } 5565 5566 int64_t skipFramesBeforeUs; 5567 if (params->findInt64("skip-frames-before", &skipFramesBeforeUs)) { 5568 status_t err = 5569 mOMX->setInternalOption( 5570 mNode, 5571 kPortIndexInput, 5572 IOMX::INTERNAL_OPTION_START_TIME, 5573 &skipFramesBeforeUs, 5574 sizeof(skipFramesBeforeUs)); 5575 5576 if (err != OK) { 5577 ALOGE("Failed to set parameter 'skip-frames-before' (err %d)", err); 5578 return err; 5579 } 5580 } 5581 5582 int32_t dropInputFrames; 5583 if (params->findInt32("drop-input-frames", &dropInputFrames)) { 5584 bool suspend = dropInputFrames != 0; 5585 5586 status_t err = 5587 mOMX->setInternalOption( 5588 mNode, 5589 kPortIndexInput, 5590 IOMX::INTERNAL_OPTION_SUSPEND, 5591 &suspend, 5592 sizeof(suspend)); 5593 5594 if (err != OK) { 5595 ALOGE("Failed to set parameter 'drop-input-frames' (err %d)", err); 5596 return err; 5597 } 5598 } 5599 5600 int32_t dummy; 5601 if (params->findInt32("request-sync", &dummy)) { 5602 status_t err = requestIDRFrame(); 5603 5604 if (err != OK) { 5605 ALOGE("Requesting a sync frame failed w/ err %d", err); 5606 return err; 5607 } 5608 } 5609 5610 return OK; 5611} 5612 5613void ACodec::onSignalEndOfInputStream() { 5614 sp<AMessage> notify = mNotify->dup(); 5615 notify->setInt32("what", CodecBase::kWhatSignaledInputEOS); 5616 5617 status_t err = mOMX->signalEndOfInputStream(mNode); 5618 if (err != OK) { 5619 notify->setInt32("err", err); 5620 } 5621 notify->post(); 5622} 5623 5624bool ACodec::ExecutingState::onOMXEvent( 5625 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 5626 switch (event) { 5627 case OMX_EventPortSettingsChanged: 5628 { 5629 CHECK_EQ(data1, (OMX_U32)kPortIndexOutput); 5630 5631 if (data2 == 0 || data2 == OMX_IndexParamPortDefinition) { 5632 mCodec->mMetaDataBuffersToSubmit = 0; 5633 CHECK_EQ(mCodec->mOMX->sendCommand( 5634 mCodec->mNode, 5635 OMX_CommandPortDisable, kPortIndexOutput), 5636 (status_t)OK); 5637 5638 mCodec->freeOutputBuffersNotOwnedByComponent(); 5639 5640 mCodec->changeState(mCodec->mOutputPortSettingsChangedState); 5641 } else if (data2 == OMX_IndexConfigCommonOutputCrop) { 5642 mCodec->mSentFormat = false; 5643 } else { 5644 ALOGV("[%s] OMX_EventPortSettingsChanged 0x%08lx", 5645 mCodec->mComponentName.c_str(), data2); 5646 } 5647 5648 return true; 5649 } 5650 5651 case OMX_EventBufferFlag: 5652 { 5653 return true; 5654 } 5655 5656 default: 5657 return BaseState::onOMXEvent(event, data1, data2); 5658 } 5659} 5660 5661//////////////////////////////////////////////////////////////////////////////// 5662 5663ACodec::OutputPortSettingsChangedState::OutputPortSettingsChangedState( 5664 ACodec *codec) 5665 : BaseState(codec) { 5666} 5667 5668ACodec::BaseState::PortMode ACodec::OutputPortSettingsChangedState::getPortMode( 5669 OMX_U32 portIndex) { 5670 if (portIndex == kPortIndexOutput) { 5671 return FREE_BUFFERS; 5672 } 5673 5674 CHECK_EQ(portIndex, (OMX_U32)kPortIndexInput); 5675 5676 return RESUBMIT_BUFFERS; 5677} 5678 5679bool ACodec::OutputPortSettingsChangedState::onMessageReceived( 5680 const sp<AMessage> &msg) { 5681 bool handled = false; 5682 5683 switch (msg->what()) { 5684 case kWhatFlush: 5685 case kWhatShutdown: 5686 case kWhatResume: 5687 case kWhatSetParameters: 5688 { 5689 if (msg->what() == kWhatResume) { 5690 ALOGV("[%s] Deferring resume", mCodec->mComponentName.c_str()); 5691 } 5692 5693 mCodec->deferMessage(msg); 5694 handled = true; 5695 break; 5696 } 5697 5698 default: 5699 handled = BaseState::onMessageReceived(msg); 5700 break; 5701 } 5702 5703 return handled; 5704} 5705 5706void ACodec::OutputPortSettingsChangedState::stateEntered() { 5707 ALOGV("[%s] Now handling output port settings change", 5708 mCodec->mComponentName.c_str()); 5709} 5710 5711bool ACodec::OutputPortSettingsChangedState::onOMXEvent( 5712 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 5713 switch (event) { 5714 case OMX_EventCmdComplete: 5715 { 5716 if (data1 == (OMX_U32)OMX_CommandPortDisable) { 5717 CHECK_EQ(data2, (OMX_U32)kPortIndexOutput); 5718 5719 ALOGV("[%s] Output port now disabled.", 5720 mCodec->mComponentName.c_str()); 5721 5722 CHECK(mCodec->mBuffers[kPortIndexOutput].isEmpty()); 5723 mCodec->mDealer[kPortIndexOutput].clear(); 5724 5725 CHECK_EQ(mCodec->mOMX->sendCommand( 5726 mCodec->mNode, OMX_CommandPortEnable, kPortIndexOutput), 5727 (status_t)OK); 5728 5729 status_t err; 5730 if ((err = mCodec->allocateBuffersOnPort( 5731 kPortIndexOutput)) != OK) { 5732 ALOGE("Failed to allocate output port buffers after " 5733 "port reconfiguration (error 0x%08x)", 5734 err); 5735 5736 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5737 5738 // This is technically not correct, but appears to be 5739 // the only way to free the component instance. 5740 // Controlled transitioning from excecuting->idle 5741 // and idle->loaded seem impossible probably because 5742 // the output port never finishes re-enabling. 5743 mCodec->mShutdownInProgress = true; 5744 mCodec->mKeepComponentAllocated = false; 5745 mCodec->changeState(mCodec->mLoadedState); 5746 } 5747 5748 return true; 5749 } else if (data1 == (OMX_U32)OMX_CommandPortEnable) { 5750 CHECK_EQ(data2, (OMX_U32)kPortIndexOutput); 5751 5752 mCodec->mSentFormat = false; 5753 5754 ALOGV("[%s] Output port now reenabled.", 5755 mCodec->mComponentName.c_str()); 5756 5757 if (mCodec->mExecutingState->active()) { 5758 mCodec->mExecutingState->submitOutputBuffers(); 5759 } 5760 5761 mCodec->changeState(mCodec->mExecutingState); 5762 5763 return true; 5764 } 5765 5766 return false; 5767 } 5768 5769 default: 5770 return false; 5771 } 5772} 5773 5774//////////////////////////////////////////////////////////////////////////////// 5775 5776ACodec::ExecutingToIdleState::ExecutingToIdleState(ACodec *codec) 5777 : BaseState(codec), 5778 mComponentNowIdle(false) { 5779} 5780 5781bool ACodec::ExecutingToIdleState::onMessageReceived(const sp<AMessage> &msg) { 5782 bool handled = false; 5783 5784 switch (msg->what()) { 5785 case kWhatFlush: 5786 { 5787 // Don't send me a flush request if you previously wanted me 5788 // to shutdown. 5789 TRESPASS(); 5790 break; 5791 } 5792 5793 case kWhatShutdown: 5794 { 5795 // We're already doing that... 5796 5797 handled = true; 5798 break; 5799 } 5800 5801 default: 5802 handled = BaseState::onMessageReceived(msg); 5803 break; 5804 } 5805 5806 return handled; 5807} 5808 5809void ACodec::ExecutingToIdleState::stateEntered() { 5810 ALOGV("[%s] Now Executing->Idle", mCodec->mComponentName.c_str()); 5811 5812 mComponentNowIdle = false; 5813 mCodec->mSentFormat = false; 5814} 5815 5816bool ACodec::ExecutingToIdleState::onOMXEvent( 5817 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 5818 switch (event) { 5819 case OMX_EventCmdComplete: 5820 { 5821 CHECK_EQ(data1, (OMX_U32)OMX_CommandStateSet); 5822 CHECK_EQ(data2, (OMX_U32)OMX_StateIdle); 5823 5824 mComponentNowIdle = true; 5825 5826 changeStateIfWeOwnAllBuffers(); 5827 5828 return true; 5829 } 5830 5831 case OMX_EventPortSettingsChanged: 5832 case OMX_EventBufferFlag: 5833 { 5834 // We're shutting down and don't care about this anymore. 5835 return true; 5836 } 5837 5838 default: 5839 return BaseState::onOMXEvent(event, data1, data2); 5840 } 5841} 5842 5843void ACodec::ExecutingToIdleState::changeStateIfWeOwnAllBuffers() { 5844 if (mComponentNowIdle && mCodec->allYourBuffersAreBelongToUs()) { 5845 CHECK_EQ(mCodec->mOMX->sendCommand( 5846 mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded), 5847 (status_t)OK); 5848 5849 CHECK_EQ(mCodec->freeBuffersOnPort(kPortIndexInput), (status_t)OK); 5850 CHECK_EQ(mCodec->freeBuffersOnPort(kPortIndexOutput), (status_t)OK); 5851 5852 if ((mCodec->mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) 5853 && mCodec->mNativeWindow != NULL) { 5854 // We push enough 1x1 blank buffers to ensure that one of 5855 // them has made it to the display. This allows the OMX 5856 // component teardown to zero out any protected buffers 5857 // without the risk of scanning out one of those buffers. 5858 mCodec->pushBlankBuffersToNativeWindow(); 5859 } 5860 5861 mCodec->changeState(mCodec->mIdleToLoadedState); 5862 } 5863} 5864 5865void ACodec::ExecutingToIdleState::onInputBufferFilled( 5866 const sp<AMessage> &msg) { 5867 BaseState::onInputBufferFilled(msg); 5868 5869 changeStateIfWeOwnAllBuffers(); 5870} 5871 5872void ACodec::ExecutingToIdleState::onOutputBufferDrained( 5873 const sp<AMessage> &msg) { 5874 BaseState::onOutputBufferDrained(msg); 5875 5876 changeStateIfWeOwnAllBuffers(); 5877} 5878 5879//////////////////////////////////////////////////////////////////////////////// 5880 5881ACodec::IdleToLoadedState::IdleToLoadedState(ACodec *codec) 5882 : BaseState(codec) { 5883} 5884 5885bool ACodec::IdleToLoadedState::onMessageReceived(const sp<AMessage> &msg) { 5886 bool handled = false; 5887 5888 switch (msg->what()) { 5889 case kWhatShutdown: 5890 { 5891 // We're already doing that... 5892 5893 handled = true; 5894 break; 5895 } 5896 5897 case kWhatFlush: 5898 { 5899 // Don't send me a flush request if you previously wanted me 5900 // to shutdown. 5901 TRESPASS(); 5902 break; 5903 } 5904 5905 default: 5906 handled = BaseState::onMessageReceived(msg); 5907 break; 5908 } 5909 5910 return handled; 5911} 5912 5913void ACodec::IdleToLoadedState::stateEntered() { 5914 ALOGV("[%s] Now Idle->Loaded", mCodec->mComponentName.c_str()); 5915} 5916 5917bool ACodec::IdleToLoadedState::onOMXEvent( 5918 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 5919 switch (event) { 5920 case OMX_EventCmdComplete: 5921 { 5922 CHECK_EQ(data1, (OMX_U32)OMX_CommandStateSet); 5923 CHECK_EQ(data2, (OMX_U32)OMX_StateLoaded); 5924 5925 mCodec->changeState(mCodec->mLoadedState); 5926 5927 return true; 5928 } 5929 5930 default: 5931 return BaseState::onOMXEvent(event, data1, data2); 5932 } 5933} 5934 5935//////////////////////////////////////////////////////////////////////////////// 5936 5937ACodec::FlushingState::FlushingState(ACodec *codec) 5938 : BaseState(codec) { 5939} 5940 5941void ACodec::FlushingState::stateEntered() { 5942 ALOGV("[%s] Now Flushing", mCodec->mComponentName.c_str()); 5943 5944 mFlushComplete[kPortIndexInput] = mFlushComplete[kPortIndexOutput] = false; 5945} 5946 5947bool ACodec::FlushingState::onMessageReceived(const sp<AMessage> &msg) { 5948 bool handled = false; 5949 5950 switch (msg->what()) { 5951 case kWhatShutdown: 5952 { 5953 mCodec->deferMessage(msg); 5954 break; 5955 } 5956 5957 case kWhatFlush: 5958 { 5959 // We're already doing this right now. 5960 handled = true; 5961 break; 5962 } 5963 5964 default: 5965 handled = BaseState::onMessageReceived(msg); 5966 break; 5967 } 5968 5969 return handled; 5970} 5971 5972bool ACodec::FlushingState::onOMXEvent( 5973 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 5974 ALOGV("[%s] FlushingState onOMXEvent(%d,%ld)", 5975 mCodec->mComponentName.c_str(), event, data1); 5976 5977 switch (event) { 5978 case OMX_EventCmdComplete: 5979 { 5980 CHECK_EQ(data1, (OMX_U32)OMX_CommandFlush); 5981 5982 if (data2 == kPortIndexInput || data2 == kPortIndexOutput) { 5983 CHECK(!mFlushComplete[data2]); 5984 mFlushComplete[data2] = true; 5985 5986 if (mFlushComplete[kPortIndexInput] 5987 && mFlushComplete[kPortIndexOutput]) { 5988 changeStateIfWeOwnAllBuffers(); 5989 } 5990 } else { 5991 CHECK_EQ(data2, OMX_ALL); 5992 CHECK(mFlushComplete[kPortIndexInput]); 5993 CHECK(mFlushComplete[kPortIndexOutput]); 5994 5995 changeStateIfWeOwnAllBuffers(); 5996 } 5997 5998 return true; 5999 } 6000 6001 case OMX_EventPortSettingsChanged: 6002 { 6003 sp<AMessage> msg = new AMessage(kWhatOMXMessage, mCodec); 6004 msg->setInt32("type", omx_message::EVENT); 6005 msg->setInt32("node", mCodec->mNode); 6006 msg->setInt32("event", event); 6007 msg->setInt32("data1", data1); 6008 msg->setInt32("data2", data2); 6009 6010 ALOGV("[%s] Deferring OMX_EventPortSettingsChanged", 6011 mCodec->mComponentName.c_str()); 6012 6013 mCodec->deferMessage(msg); 6014 6015 return true; 6016 } 6017 6018 default: 6019 return BaseState::onOMXEvent(event, data1, data2); 6020 } 6021 6022 return true; 6023} 6024 6025void ACodec::FlushingState::onOutputBufferDrained(const sp<AMessage> &msg) { 6026 BaseState::onOutputBufferDrained(msg); 6027 6028 changeStateIfWeOwnAllBuffers(); 6029} 6030 6031void ACodec::FlushingState::onInputBufferFilled(const sp<AMessage> &msg) { 6032 BaseState::onInputBufferFilled(msg); 6033 6034 changeStateIfWeOwnAllBuffers(); 6035} 6036 6037void ACodec::FlushingState::changeStateIfWeOwnAllBuffers() { 6038 if (mFlushComplete[kPortIndexInput] 6039 && mFlushComplete[kPortIndexOutput] 6040 && mCodec->allYourBuffersAreBelongToUs()) { 6041 // We now own all buffers except possibly those still queued with 6042 // the native window for rendering. Let's get those back as well. 6043 mCodec->waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs(); 6044 6045 sp<AMessage> notify = mCodec->mNotify->dup(); 6046 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6047 notify->post(); 6048 6049 mCodec->mPortEOS[kPortIndexInput] = 6050 mCodec->mPortEOS[kPortIndexOutput] = false; 6051 6052 mCodec->mInputEOSResult = OK; 6053 6054 if (mCodec->mSkipCutBuffer != NULL) { 6055 mCodec->mSkipCutBuffer->clear(); 6056 } 6057 6058 mCodec->changeState(mCodec->mExecutingState); 6059 } 6060} 6061 6062} // namespace android 6063