ACodec.cpp revision a23eea5d6160e2ebddb925f20becfc3c52639243
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "ACodec" 19 20#ifdef __LP64__ 21#define OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 22#endif 23 24#include <inttypes.h> 25#include <utils/Trace.h> 26 27#include <media/stagefright/ACodec.h> 28 29#include <binder/MemoryDealer.h> 30 31#include <media/stagefright/foundation/hexdump.h> 32#include <media/stagefright/foundation/ABuffer.h> 33#include <media/stagefright/foundation/ADebug.h> 34#include <media/stagefright/foundation/AMessage.h> 35#include <media/stagefright/foundation/AUtils.h> 36 37#include <media/stagefright/BufferProducerWrapper.h> 38#include <media/stagefright/MediaCodecList.h> 39#include <media/stagefright/MediaDefs.h> 40#include <media/stagefright/NativeWindowWrapper.h> 41#include <media/stagefright/OMXClient.h> 42#include <media/stagefright/OMXCodec.h> 43 44#include <media/hardware/HardwareAPI.h> 45 46#include <OMX_AudioExt.h> 47#include <OMX_VideoExt.h> 48#include <OMX_Component.h> 49#include <OMX_IndexExt.h> 50 51#include "include/avc_utils.h" 52 53namespace android { 54 55// OMX errors are directly mapped into status_t range if 56// there is no corresponding MediaError status code. 57// Use the statusFromOMXError(int32_t omxError) function. 58// 59// Currently this is a direct map. 60// See frameworks/native/include/media/openmax/OMX_Core.h 61// 62// Vendor OMX errors from 0x90000000 - 0x9000FFFF 63// Extension OMX errors from 0x8F000000 - 0x90000000 64// Standard OMX errors from 0x80001000 - 0x80001024 (0x80001024 current) 65// 66 67// returns true if err is a recognized OMX error code. 68// as OMX error is OMX_S32, this is an int32_t type 69static inline bool isOMXError(int32_t err) { 70 return (ERROR_CODEC_MIN <= err && err <= ERROR_CODEC_MAX); 71} 72 73// converts an OMX error to a status_t 74static inline status_t statusFromOMXError(int32_t omxError) { 75 switch (omxError) { 76 case OMX_ErrorInvalidComponentName: 77 case OMX_ErrorComponentNotFound: 78 return NAME_NOT_FOUND; // can trigger illegal argument error for provided names. 79 default: 80 return isOMXError(omxError) ? omxError : 0; // no translation required 81 } 82} 83 84// checks and converts status_t to a non-side-effect status_t 85static inline status_t makeNoSideEffectStatus(status_t err) { 86 switch (err) { 87 // the following errors have side effects and may come 88 // from other code modules. Remap for safety reasons. 89 case INVALID_OPERATION: 90 case DEAD_OBJECT: 91 return UNKNOWN_ERROR; 92 default: 93 return err; 94 } 95} 96 97template<class T> 98static void InitOMXParams(T *params) { 99 params->nSize = sizeof(T); 100 params->nVersion.s.nVersionMajor = 1; 101 params->nVersion.s.nVersionMinor = 0; 102 params->nVersion.s.nRevision = 0; 103 params->nVersion.s.nStep = 0; 104} 105 106struct CodecObserver : public BnOMXObserver { 107 CodecObserver() {} 108 109 void setNotificationMessage(const sp<AMessage> &msg) { 110 mNotify = msg; 111 } 112 113 // from IOMXObserver 114 virtual void onMessage(const omx_message &omx_msg) { 115 sp<AMessage> msg = mNotify->dup(); 116 117 msg->setInt32("type", omx_msg.type); 118 msg->setInt32("node", omx_msg.node); 119 120 switch (omx_msg.type) { 121 case omx_message::EVENT: 122 { 123 msg->setInt32("event", omx_msg.u.event_data.event); 124 msg->setInt32("data1", omx_msg.u.event_data.data1); 125 msg->setInt32("data2", omx_msg.u.event_data.data2); 126 break; 127 } 128 129 case omx_message::EMPTY_BUFFER_DONE: 130 { 131 msg->setInt32("buffer", omx_msg.u.buffer_data.buffer); 132 break; 133 } 134 135 case omx_message::FILL_BUFFER_DONE: 136 { 137 msg->setInt32( 138 "buffer", omx_msg.u.extended_buffer_data.buffer); 139 msg->setInt32( 140 "range_offset", 141 omx_msg.u.extended_buffer_data.range_offset); 142 msg->setInt32( 143 "range_length", 144 omx_msg.u.extended_buffer_data.range_length); 145 msg->setInt32( 146 "flags", 147 omx_msg.u.extended_buffer_data.flags); 148 msg->setInt64( 149 "timestamp", 150 omx_msg.u.extended_buffer_data.timestamp); 151 break; 152 } 153 154 default: 155 TRESPASS(); 156 break; 157 } 158 159 msg->post(); 160 } 161 162protected: 163 virtual ~CodecObserver() {} 164 165private: 166 sp<AMessage> mNotify; 167 168 DISALLOW_EVIL_CONSTRUCTORS(CodecObserver); 169}; 170 171//////////////////////////////////////////////////////////////////////////////// 172 173struct ACodec::BaseState : public AState { 174 BaseState(ACodec *codec, const sp<AState> &parentState = NULL); 175 176protected: 177 enum PortMode { 178 KEEP_BUFFERS, 179 RESUBMIT_BUFFERS, 180 FREE_BUFFERS, 181 }; 182 183 ACodec *mCodec; 184 185 virtual PortMode getPortMode(OMX_U32 portIndex); 186 187 virtual bool onMessageReceived(const sp<AMessage> &msg); 188 189 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 190 191 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 192 virtual void onInputBufferFilled(const sp<AMessage> &msg); 193 194 void postFillThisBuffer(BufferInfo *info); 195 196private: 197 bool onOMXMessage(const sp<AMessage> &msg); 198 199 bool onOMXEmptyBufferDone(IOMX::buffer_id bufferID); 200 201 bool onOMXFillBufferDone( 202 IOMX::buffer_id bufferID, 203 size_t rangeOffset, size_t rangeLength, 204 OMX_U32 flags, 205 int64_t timeUs); 206 207 void getMoreInputDataIfPossible(); 208 209 DISALLOW_EVIL_CONSTRUCTORS(BaseState); 210}; 211 212//////////////////////////////////////////////////////////////////////////////// 213 214struct ACodec::DeathNotifier : public IBinder::DeathRecipient { 215 DeathNotifier(const sp<AMessage> ¬ify) 216 : mNotify(notify) { 217 } 218 219 virtual void binderDied(const wp<IBinder> &) { 220 mNotify->post(); 221 } 222 223protected: 224 virtual ~DeathNotifier() {} 225 226private: 227 sp<AMessage> mNotify; 228 229 DISALLOW_EVIL_CONSTRUCTORS(DeathNotifier); 230}; 231 232struct ACodec::UninitializedState : public ACodec::BaseState { 233 UninitializedState(ACodec *codec); 234 235protected: 236 virtual bool onMessageReceived(const sp<AMessage> &msg); 237 virtual void stateEntered(); 238 239private: 240 void onSetup(const sp<AMessage> &msg); 241 bool onAllocateComponent(const sp<AMessage> &msg); 242 243 sp<DeathNotifier> mDeathNotifier; 244 245 DISALLOW_EVIL_CONSTRUCTORS(UninitializedState); 246}; 247 248//////////////////////////////////////////////////////////////////////////////// 249 250struct ACodec::LoadedState : public ACodec::BaseState { 251 LoadedState(ACodec *codec); 252 253protected: 254 virtual bool onMessageReceived(const sp<AMessage> &msg); 255 virtual void stateEntered(); 256 257private: 258 friend struct ACodec::UninitializedState; 259 260 bool onConfigureComponent(const sp<AMessage> &msg); 261 void onCreateInputSurface(const sp<AMessage> &msg); 262 void onStart(); 263 void onShutdown(bool keepComponentAllocated); 264 265 DISALLOW_EVIL_CONSTRUCTORS(LoadedState); 266}; 267 268//////////////////////////////////////////////////////////////////////////////// 269 270struct ACodec::LoadedToIdleState : public ACodec::BaseState { 271 LoadedToIdleState(ACodec *codec); 272 273protected: 274 virtual bool onMessageReceived(const sp<AMessage> &msg); 275 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 276 virtual void stateEntered(); 277 278private: 279 status_t allocateBuffers(); 280 281 DISALLOW_EVIL_CONSTRUCTORS(LoadedToIdleState); 282}; 283 284//////////////////////////////////////////////////////////////////////////////// 285 286struct ACodec::IdleToExecutingState : public ACodec::BaseState { 287 IdleToExecutingState(ACodec *codec); 288 289protected: 290 virtual bool onMessageReceived(const sp<AMessage> &msg); 291 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 292 virtual void stateEntered(); 293 294private: 295 DISALLOW_EVIL_CONSTRUCTORS(IdleToExecutingState); 296}; 297 298//////////////////////////////////////////////////////////////////////////////// 299 300struct ACodec::ExecutingState : public ACodec::BaseState { 301 ExecutingState(ACodec *codec); 302 303 void submitRegularOutputBuffers(); 304 void submitOutputMetaBuffers(); 305 void submitOutputBuffers(); 306 307 // Submit output buffers to the decoder, submit input buffers to client 308 // to fill with data. 309 void resume(); 310 311 // Returns true iff input and output buffers are in play. 312 bool active() const { return mActive; } 313 314protected: 315 virtual PortMode getPortMode(OMX_U32 portIndex); 316 virtual bool onMessageReceived(const sp<AMessage> &msg); 317 virtual void stateEntered(); 318 319 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 320 321private: 322 bool mActive; 323 324 DISALLOW_EVIL_CONSTRUCTORS(ExecutingState); 325}; 326 327//////////////////////////////////////////////////////////////////////////////// 328 329struct ACodec::OutputPortSettingsChangedState : public ACodec::BaseState { 330 OutputPortSettingsChangedState(ACodec *codec); 331 332protected: 333 virtual PortMode getPortMode(OMX_U32 portIndex); 334 virtual bool onMessageReceived(const sp<AMessage> &msg); 335 virtual void stateEntered(); 336 337 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 338 339private: 340 DISALLOW_EVIL_CONSTRUCTORS(OutputPortSettingsChangedState); 341}; 342 343//////////////////////////////////////////////////////////////////////////////// 344 345struct ACodec::ExecutingToIdleState : public ACodec::BaseState { 346 ExecutingToIdleState(ACodec *codec); 347 348protected: 349 virtual bool onMessageReceived(const sp<AMessage> &msg); 350 virtual void stateEntered(); 351 352 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 353 354 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 355 virtual void onInputBufferFilled(const sp<AMessage> &msg); 356 357private: 358 void changeStateIfWeOwnAllBuffers(); 359 360 bool mComponentNowIdle; 361 362 DISALLOW_EVIL_CONSTRUCTORS(ExecutingToIdleState); 363}; 364 365//////////////////////////////////////////////////////////////////////////////// 366 367struct ACodec::IdleToLoadedState : public ACodec::BaseState { 368 IdleToLoadedState(ACodec *codec); 369 370protected: 371 virtual bool onMessageReceived(const sp<AMessage> &msg); 372 virtual void stateEntered(); 373 374 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 375 376private: 377 DISALLOW_EVIL_CONSTRUCTORS(IdleToLoadedState); 378}; 379 380//////////////////////////////////////////////////////////////////////////////// 381 382struct ACodec::FlushingState : public ACodec::BaseState { 383 FlushingState(ACodec *codec); 384 385protected: 386 virtual bool onMessageReceived(const sp<AMessage> &msg); 387 virtual void stateEntered(); 388 389 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 390 391 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 392 virtual void onInputBufferFilled(const sp<AMessage> &msg); 393 394private: 395 bool mFlushComplete[2]; 396 397 void changeStateIfWeOwnAllBuffers(); 398 399 DISALLOW_EVIL_CONSTRUCTORS(FlushingState); 400}; 401 402//////////////////////////////////////////////////////////////////////////////// 403 404ACodec::ACodec() 405 : mQuirks(0), 406 mNode(0), 407 mSentFormat(false), 408 mIsEncoder(false), 409 mUseMetadataOnEncoderOutput(false), 410 mShutdownInProgress(false), 411 mExplicitShutdown(false), 412 mEncoderDelay(0), 413 mEncoderPadding(0), 414 mRotationDegrees(0), 415 mChannelMaskPresent(false), 416 mChannelMask(0), 417 mDequeueCounter(0), 418 mStoreMetaDataInOutputBuffers(false), 419 mMetaDataBuffersToSubmit(0), 420 mRepeatFrameDelayUs(-1ll), 421 mMaxPtsGapUs(-1ll), 422 mTimePerFrameUs(-1ll), 423 mTimePerCaptureUs(-1ll), 424 mCreateInputBuffersSuspended(false), 425 mTunneled(false) { 426 mUninitializedState = new UninitializedState(this); 427 mLoadedState = new LoadedState(this); 428 mLoadedToIdleState = new LoadedToIdleState(this); 429 mIdleToExecutingState = new IdleToExecutingState(this); 430 mExecutingState = new ExecutingState(this); 431 432 mOutputPortSettingsChangedState = 433 new OutputPortSettingsChangedState(this); 434 435 mExecutingToIdleState = new ExecutingToIdleState(this); 436 mIdleToLoadedState = new IdleToLoadedState(this); 437 mFlushingState = new FlushingState(this); 438 439 mPortEOS[kPortIndexInput] = mPortEOS[kPortIndexOutput] = false; 440 mInputEOSResult = OK; 441 442 changeState(mUninitializedState); 443} 444 445ACodec::~ACodec() { 446} 447 448void ACodec::setNotificationMessage(const sp<AMessage> &msg) { 449 mNotify = msg; 450} 451 452void ACodec::initiateSetup(const sp<AMessage> &msg) { 453 msg->setWhat(kWhatSetup); 454 msg->setTarget(id()); 455 msg->post(); 456} 457 458void ACodec::signalSetParameters(const sp<AMessage> ¶ms) { 459 sp<AMessage> msg = new AMessage(kWhatSetParameters, id()); 460 msg->setMessage("params", params); 461 msg->post(); 462} 463 464void ACodec::initiateAllocateComponent(const sp<AMessage> &msg) { 465 msg->setWhat(kWhatAllocateComponent); 466 msg->setTarget(id()); 467 msg->post(); 468} 469 470void ACodec::initiateConfigureComponent(const sp<AMessage> &msg) { 471 msg->setWhat(kWhatConfigureComponent); 472 msg->setTarget(id()); 473 msg->post(); 474} 475 476void ACodec::initiateCreateInputSurface() { 477 (new AMessage(kWhatCreateInputSurface, id()))->post(); 478} 479 480void ACodec::signalEndOfInputStream() { 481 (new AMessage(kWhatSignalEndOfInputStream, id()))->post(); 482} 483 484void ACodec::initiateStart() { 485 (new AMessage(kWhatStart, id()))->post(); 486} 487 488void ACodec::signalFlush() { 489 ALOGV("[%s] signalFlush", mComponentName.c_str()); 490 (new AMessage(kWhatFlush, id()))->post(); 491} 492 493void ACodec::signalResume() { 494 (new AMessage(kWhatResume, id()))->post(); 495} 496 497void ACodec::initiateShutdown(bool keepComponentAllocated) { 498 sp<AMessage> msg = new AMessage(kWhatShutdown, id()); 499 msg->setInt32("keepComponentAllocated", keepComponentAllocated); 500 msg->post(); 501 if (!keepComponentAllocated) { 502 // ensure shutdown completes in 3 seconds 503 (new AMessage(kWhatReleaseCodecInstance, id()))->post(3000000); 504 } 505} 506 507void ACodec::signalRequestIDRFrame() { 508 (new AMessage(kWhatRequestIDRFrame, id()))->post(); 509} 510 511// *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 512// Some codecs may return input buffers before having them processed. 513// This causes a halt if we already signaled an EOS on the input 514// port. For now keep submitting an output buffer if there was an 515// EOS on the input port, but not yet on the output port. 516void ACodec::signalSubmitOutputMetaDataBufferIfEOS_workaround() { 517 if (mPortEOS[kPortIndexInput] && !mPortEOS[kPortIndexOutput] && 518 mMetaDataBuffersToSubmit > 0) { 519 (new AMessage(kWhatSubmitOutputMetaDataBufferIfEOS, id()))->post(); 520 } 521} 522 523status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) { 524 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 525 526 CHECK(mDealer[portIndex] == NULL); 527 CHECK(mBuffers[portIndex].isEmpty()); 528 529 status_t err; 530 if (mNativeWindow != NULL && portIndex == kPortIndexOutput) { 531 if (mStoreMetaDataInOutputBuffers) { 532 err = allocateOutputMetaDataBuffers(); 533 } else { 534 err = allocateOutputBuffersFromNativeWindow(); 535 } 536 } else { 537 OMX_PARAM_PORTDEFINITIONTYPE def; 538 InitOMXParams(&def); 539 def.nPortIndex = portIndex; 540 541 err = mOMX->getParameter( 542 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 543 544 if (err == OK) { 545 ALOGV("[%s] Allocating %u buffers of size %u on %s port", 546 mComponentName.c_str(), 547 def.nBufferCountActual, def.nBufferSize, 548 portIndex == kPortIndexInput ? "input" : "output"); 549 550 size_t totalSize = def.nBufferCountActual * def.nBufferSize; 551 mDealer[portIndex] = new MemoryDealer(totalSize, "ACodec"); 552 553 for (OMX_U32 i = 0; i < def.nBufferCountActual; ++i) { 554 sp<IMemory> mem = mDealer[portIndex]->allocate(def.nBufferSize); 555 CHECK(mem.get() != NULL); 556 557 BufferInfo info; 558 info.mStatus = BufferInfo::OWNED_BY_US; 559 560 uint32_t requiresAllocateBufferBit = 561 (portIndex == kPortIndexInput) 562 ? OMXCodec::kRequiresAllocateBufferOnInputPorts 563 : OMXCodec::kRequiresAllocateBufferOnOutputPorts; 564 565 if ((portIndex == kPortIndexInput && (mFlags & kFlagIsSecure)) 566 || mUseMetadataOnEncoderOutput) { 567 mem.clear(); 568 569 void *ptr; 570 err = mOMX->allocateBuffer( 571 mNode, portIndex, def.nBufferSize, &info.mBufferID, 572 &ptr); 573 574 int32_t bufSize = mUseMetadataOnEncoderOutput ? 575 (4 + sizeof(buffer_handle_t)) : def.nBufferSize; 576 577 info.mData = new ABuffer(ptr, bufSize); 578 } else if (mQuirks & requiresAllocateBufferBit) { 579 err = mOMX->allocateBufferWithBackup( 580 mNode, portIndex, mem, &info.mBufferID); 581 } else { 582 err = mOMX->useBuffer(mNode, portIndex, mem, &info.mBufferID); 583 } 584 585 if (mem != NULL) { 586 info.mData = new ABuffer(mem->pointer(), def.nBufferSize); 587 } 588 589 mBuffers[portIndex].push(info); 590 } 591 } 592 } 593 594 if (err != OK) { 595 return err; 596 } 597 598 sp<AMessage> notify = mNotify->dup(); 599 notify->setInt32("what", CodecBase::kWhatBuffersAllocated); 600 601 notify->setInt32("portIndex", portIndex); 602 603 sp<PortDescription> desc = new PortDescription; 604 605 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 606 const BufferInfo &info = mBuffers[portIndex][i]; 607 608 desc->addBuffer(info.mBufferID, info.mData); 609 } 610 611 notify->setObject("portDesc", desc); 612 notify->post(); 613 614 return OK; 615} 616 617status_t ACodec::configureOutputBuffersFromNativeWindow( 618 OMX_U32 *bufferCount, OMX_U32 *bufferSize, 619 OMX_U32 *minUndequeuedBuffers) { 620 OMX_PARAM_PORTDEFINITIONTYPE def; 621 InitOMXParams(&def); 622 def.nPortIndex = kPortIndexOutput; 623 624 status_t err = mOMX->getParameter( 625 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 626 627 if (err != OK) { 628 return err; 629 } 630 631 err = native_window_set_buffers_geometry( 632 mNativeWindow.get(), 633 def.format.video.nFrameWidth, 634 def.format.video.nFrameHeight, 635 def.format.video.eColorFormat); 636 637 if (err != 0) { 638 ALOGE("native_window_set_buffers_geometry failed: %s (%d)", 639 strerror(-err), -err); 640 return err; 641 } 642 643 if (mRotationDegrees != 0) { 644 uint32_t transform = 0; 645 switch (mRotationDegrees) { 646 case 0: transform = 0; break; 647 case 90: transform = HAL_TRANSFORM_ROT_90; break; 648 case 180: transform = HAL_TRANSFORM_ROT_180; break; 649 case 270: transform = HAL_TRANSFORM_ROT_270; break; 650 default: transform = 0; break; 651 } 652 653 if (transform > 0) { 654 err = native_window_set_buffers_transform( 655 mNativeWindow.get(), transform); 656 if (err != 0) { 657 ALOGE("native_window_set_buffers_transform failed: %s (%d)", 658 strerror(-err), -err); 659 return err; 660 } 661 } 662 } 663 664 // Set up the native window. 665 OMX_U32 usage = 0; 666 err = mOMX->getGraphicBufferUsage(mNode, kPortIndexOutput, &usage); 667 if (err != 0) { 668 ALOGW("querying usage flags from OMX IL component failed: %d", err); 669 // XXX: Currently this error is logged, but not fatal. 670 usage = 0; 671 } 672 int omxUsage = usage; 673 674 if (mFlags & kFlagIsGrallocUsageProtected) { 675 usage |= GRALLOC_USAGE_PROTECTED; 676 } 677 678 // Make sure to check whether either Stagefright or the video decoder 679 // requested protected buffers. 680 if (usage & GRALLOC_USAGE_PROTECTED) { 681 // Verify that the ANativeWindow sends images directly to 682 // SurfaceFlinger. 683 int queuesToNativeWindow = 0; 684 err = mNativeWindow->query( 685 mNativeWindow.get(), NATIVE_WINDOW_QUEUES_TO_WINDOW_COMPOSER, 686 &queuesToNativeWindow); 687 if (err != 0) { 688 ALOGE("error authenticating native window: %d", err); 689 return err; 690 } 691 if (queuesToNativeWindow != 1) { 692 ALOGE("native window could not be authenticated"); 693 return PERMISSION_DENIED; 694 } 695 } 696 697 int consumerUsage = 0; 698 err = mNativeWindow->query( 699 mNativeWindow.get(), NATIVE_WINDOW_CONSUMER_USAGE_BITS, 700 &consumerUsage); 701 if (err != 0) { 702 ALOGW("failed to get consumer usage bits. ignoring"); 703 err = 0; 704 } 705 706 ALOGV("gralloc usage: %#x(OMX) => %#x(ACodec) + %#x(Consumer) = %#x", 707 omxUsage, usage, consumerUsage, usage | consumerUsage); 708 usage |= consumerUsage; 709 err = native_window_set_usage( 710 mNativeWindow.get(), 711 usage | GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_EXTERNAL_DISP); 712 713 if (err != 0) { 714 ALOGE("native_window_set_usage failed: %s (%d)", strerror(-err), -err); 715 return err; 716 } 717 718 // Exits here for tunneled video playback codecs -- i.e. skips native window 719 // buffer allocation step as this is managed by the tunneled OMX omponent 720 // itself and explicitly sets def.nBufferCountActual to 0. 721 if (mTunneled) { 722 ALOGV("Tunneled Playback: skipping native window buffer allocation."); 723 def.nBufferCountActual = 0; 724 err = mOMX->setParameter( 725 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 726 727 *minUndequeuedBuffers = 0; 728 *bufferCount = 0; 729 *bufferSize = 0; 730 return err; 731 } 732 733 *minUndequeuedBuffers = 0; 734 err = mNativeWindow->query( 735 mNativeWindow.get(), NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 736 (int *)minUndequeuedBuffers); 737 738 if (err != 0) { 739 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 740 strerror(-err), -err); 741 return err; 742 } 743 744 // FIXME: assume that surface is controlled by app (native window 745 // returns the number for the case when surface is not controlled by app) 746 // FIXME2: This means that minUndeqeueudBufs can be 1 larger than reported 747 // For now, try to allocate 1 more buffer, but don't fail if unsuccessful 748 749 // Use conservative allocation while also trying to reduce starvation 750 // 751 // 1. allocate at least nBufferCountMin + minUndequeuedBuffers - that is the 752 // minimum needed for the consumer to be able to work 753 // 2. try to allocate two (2) additional buffers to reduce starvation from 754 // the consumer 755 // plus an extra buffer to account for incorrect minUndequeuedBufs 756 for (OMX_U32 extraBuffers = 2 + 1; /* condition inside loop */; extraBuffers--) { 757 OMX_U32 newBufferCount = 758 def.nBufferCountMin + *minUndequeuedBuffers + extraBuffers; 759 def.nBufferCountActual = newBufferCount; 760 err = mOMX->setParameter( 761 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 762 763 if (err == OK) { 764 *minUndequeuedBuffers += extraBuffers; 765 break; 766 } 767 768 ALOGW("[%s] setting nBufferCountActual to %u failed: %d", 769 mComponentName.c_str(), newBufferCount, err); 770 /* exit condition */ 771 if (extraBuffers == 0) { 772 return err; 773 } 774 } 775 776 err = native_window_set_buffer_count( 777 mNativeWindow.get(), def.nBufferCountActual); 778 779 if (err != 0) { 780 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 781 -err); 782 return err; 783 } 784 785 *bufferCount = def.nBufferCountActual; 786 *bufferSize = def.nBufferSize; 787 return err; 788} 789 790status_t ACodec::allocateOutputBuffersFromNativeWindow() { 791 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 792 status_t err = configureOutputBuffersFromNativeWindow( 793 &bufferCount, &bufferSize, &minUndequeuedBuffers); 794 if (err != 0) 795 return err; 796 mNumUndequeuedBuffers = minUndequeuedBuffers; 797 798 ALOGV("[%s] Allocating %u buffers from a native window of size %u on " 799 "output port", 800 mComponentName.c_str(), bufferCount, bufferSize); 801 802 // Dequeue buffers and send them to OMX 803 for (OMX_U32 i = 0; i < bufferCount; i++) { 804 ANativeWindowBuffer *buf; 805 err = native_window_dequeue_buffer_and_wait(mNativeWindow.get(), &buf); 806 if (err != 0) { 807 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); 808 break; 809 } 810 811 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); 812 BufferInfo info; 813 info.mStatus = BufferInfo::OWNED_BY_US; 814 info.mData = new ABuffer(NULL /* data */, bufferSize /* capacity */); 815 info.mGraphicBuffer = graphicBuffer; 816 mBuffers[kPortIndexOutput].push(info); 817 818 IOMX::buffer_id bufferId; 819 err = mOMX->useGraphicBuffer(mNode, kPortIndexOutput, graphicBuffer, 820 &bufferId); 821 if (err != 0) { 822 ALOGE("registering GraphicBuffer %u with OMX IL component failed: " 823 "%d", i, err); 824 break; 825 } 826 827 mBuffers[kPortIndexOutput].editItemAt(i).mBufferID = bufferId; 828 829 ALOGV("[%s] Registered graphic buffer with ID %u (pointer = %p)", 830 mComponentName.c_str(), 831 bufferId, graphicBuffer.get()); 832 } 833 834 OMX_U32 cancelStart; 835 OMX_U32 cancelEnd; 836 837 if (err != 0) { 838 // If an error occurred while dequeuing we need to cancel any buffers 839 // that were dequeued. 840 cancelStart = 0; 841 cancelEnd = mBuffers[kPortIndexOutput].size(); 842 } else { 843 // Return the required minimum undequeued buffers to the native window. 844 cancelStart = bufferCount - minUndequeuedBuffers; 845 cancelEnd = bufferCount; 846 } 847 848 for (OMX_U32 i = cancelStart; i < cancelEnd; i++) { 849 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 850 status_t error = cancelBufferToNativeWindow(info); 851 if (err == 0) { 852 err = error; 853 } 854 } 855 856 return err; 857} 858 859status_t ACodec::allocateOutputMetaDataBuffers() { 860 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 861 status_t err = configureOutputBuffersFromNativeWindow( 862 &bufferCount, &bufferSize, &minUndequeuedBuffers); 863 if (err != 0) 864 return err; 865 mNumUndequeuedBuffers = minUndequeuedBuffers; 866 867 ALOGV("[%s] Allocating %u meta buffers on output port", 868 mComponentName.c_str(), bufferCount); 869 870 size_t totalSize = bufferCount * 8; 871 mDealer[kPortIndexOutput] = new MemoryDealer(totalSize, "ACodec"); 872 873 // Dequeue buffers and send them to OMX 874 for (OMX_U32 i = 0; i < bufferCount; i++) { 875 BufferInfo info; 876 info.mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 877 info.mGraphicBuffer = NULL; 878 info.mDequeuedAt = mDequeueCounter; 879 880 sp<IMemory> mem = mDealer[kPortIndexOutput]->allocate( 881 sizeof(struct VideoDecoderOutputMetaData)); 882 CHECK(mem.get() != NULL); 883 info.mData = new ABuffer(mem->pointer(), mem->size()); 884 885 // we use useBuffer for metadata regardless of quirks 886 err = mOMX->useBuffer( 887 mNode, kPortIndexOutput, mem, &info.mBufferID); 888 889 mBuffers[kPortIndexOutput].push(info); 890 891 ALOGV("[%s] allocated meta buffer with ID %u (pointer = %p)", 892 mComponentName.c_str(), info.mBufferID, mem->pointer()); 893 } 894 895 mMetaDataBuffersToSubmit = bufferCount - minUndequeuedBuffers; 896 return err; 897} 898 899status_t ACodec::submitOutputMetaDataBuffer() { 900 CHECK(mStoreMetaDataInOutputBuffers); 901 if (mMetaDataBuffersToSubmit == 0) 902 return OK; 903 904 BufferInfo *info = dequeueBufferFromNativeWindow(); 905 if (info == NULL) 906 return ERROR_IO; 907 908 ALOGV("[%s] submitting output meta buffer ID %u for graphic buffer %p", 909 mComponentName.c_str(), info->mBufferID, info->mGraphicBuffer.get()); 910 911 --mMetaDataBuffersToSubmit; 912 CHECK_EQ(mOMX->fillBuffer(mNode, info->mBufferID), 913 (status_t)OK); 914 915 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 916 return OK; 917} 918 919status_t ACodec::cancelBufferToNativeWindow(BufferInfo *info) { 920 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 921 922 ALOGV("[%s] Calling cancelBuffer on buffer %u", 923 mComponentName.c_str(), info->mBufferID); 924 925 int err = mNativeWindow->cancelBuffer( 926 mNativeWindow.get(), info->mGraphicBuffer.get(), -1); 927 928 ALOGW_IF(err != 0, "[%s] can not return buffer %u to native window", 929 mComponentName.c_str(), info->mBufferID); 930 931 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 932 933 return err; 934} 935 936ACodec::BufferInfo *ACodec::dequeueBufferFromNativeWindow() { 937 ANativeWindowBuffer *buf; 938 CHECK(mNativeWindow.get() != NULL); 939 940 if (mTunneled) { 941 ALOGW("dequeueBufferFromNativeWindow() should not be called in tunnel" 942 " video playback mode mode!"); 943 return NULL; 944 } 945 946 if (native_window_dequeue_buffer_and_wait(mNativeWindow.get(), &buf) != 0) { 947 ALOGE("dequeueBuffer failed."); 948 return NULL; 949 } 950 951 BufferInfo *oldest = NULL; 952 for (size_t i = mBuffers[kPortIndexOutput].size(); i-- > 0;) { 953 BufferInfo *info = 954 &mBuffers[kPortIndexOutput].editItemAt(i); 955 956 if (info->mGraphicBuffer != NULL && 957 info->mGraphicBuffer->handle == buf->handle) { 958 CHECK_EQ((int)info->mStatus, 959 (int)BufferInfo::OWNED_BY_NATIVE_WINDOW); 960 961 info->mStatus = BufferInfo::OWNED_BY_US; 962 963 return info; 964 } 965 966 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW && 967 (oldest == NULL || 968 // avoid potential issues from counter rolling over 969 mDequeueCounter - info->mDequeuedAt > 970 mDequeueCounter - oldest->mDequeuedAt)) { 971 oldest = info; 972 } 973 } 974 975 if (oldest) { 976 CHECK(mStoreMetaDataInOutputBuffers); 977 978 // discard buffer in LRU info and replace with new buffer 979 oldest->mGraphicBuffer = new GraphicBuffer(buf, false); 980 oldest->mStatus = BufferInfo::OWNED_BY_US; 981 982 mOMX->updateGraphicBufferInMeta( 983 mNode, kPortIndexOutput, oldest->mGraphicBuffer, 984 oldest->mBufferID); 985 986 VideoDecoderOutputMetaData *metaData = 987 reinterpret_cast<VideoDecoderOutputMetaData *>( 988 oldest->mData->base()); 989 CHECK_EQ(metaData->eType, kMetadataBufferTypeGrallocSource); 990 991 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 992 oldest - &mBuffers[kPortIndexOutput][0], 993 mDequeueCounter - oldest->mDequeuedAt, 994 metaData->pHandle, 995 oldest->mGraphicBuffer->handle, oldest->mData->base()); 996 997 return oldest; 998 } 999 1000 TRESPASS(); 1001 1002 return NULL; 1003} 1004 1005status_t ACodec::freeBuffersOnPort(OMX_U32 portIndex) { 1006 for (size_t i = mBuffers[portIndex].size(); i-- > 0;) { 1007 CHECK_EQ((status_t)OK, freeBuffer(portIndex, i)); 1008 } 1009 1010 mDealer[portIndex].clear(); 1011 1012 return OK; 1013} 1014 1015status_t ACodec::freeOutputBuffersNotOwnedByComponent() { 1016 for (size_t i = mBuffers[kPortIndexOutput].size(); i-- > 0;) { 1017 BufferInfo *info = 1018 &mBuffers[kPortIndexOutput].editItemAt(i); 1019 1020 // At this time some buffers may still be with the component 1021 // or being drained. 1022 if (info->mStatus != BufferInfo::OWNED_BY_COMPONENT && 1023 info->mStatus != BufferInfo::OWNED_BY_DOWNSTREAM) { 1024 CHECK_EQ((status_t)OK, freeBuffer(kPortIndexOutput, i)); 1025 } 1026 } 1027 1028 return OK; 1029} 1030 1031status_t ACodec::freeBuffer(OMX_U32 portIndex, size_t i) { 1032 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1033 1034 CHECK(info->mStatus == BufferInfo::OWNED_BY_US 1035 || info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW); 1036 1037 if (portIndex == kPortIndexOutput && mNativeWindow != NULL 1038 && info->mStatus == BufferInfo::OWNED_BY_US) { 1039 cancelBufferToNativeWindow(info); 1040 } 1041 1042 CHECK_EQ(mOMX->freeBuffer( 1043 mNode, portIndex, info->mBufferID), 1044 (status_t)OK); 1045 1046 mBuffers[portIndex].removeAt(i); 1047 1048 return OK; 1049} 1050 1051ACodec::BufferInfo *ACodec::findBufferByID( 1052 uint32_t portIndex, IOMX::buffer_id bufferID, 1053 ssize_t *index) { 1054 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1055 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1056 1057 if (info->mBufferID == bufferID) { 1058 if (index != NULL) { 1059 *index = i; 1060 } 1061 return info; 1062 } 1063 } 1064 1065 TRESPASS(); 1066 1067 return NULL; 1068} 1069 1070status_t ACodec::setComponentRole( 1071 bool isEncoder, const char *mime) { 1072 struct MimeToRole { 1073 const char *mime; 1074 const char *decoderRole; 1075 const char *encoderRole; 1076 }; 1077 1078 static const MimeToRole kMimeToRole[] = { 1079 { MEDIA_MIMETYPE_AUDIO_MPEG, 1080 "audio_decoder.mp3", "audio_encoder.mp3" }, 1081 { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I, 1082 "audio_decoder.mp1", "audio_encoder.mp1" }, 1083 { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II, 1084 "audio_decoder.mp2", "audio_encoder.mp2" }, 1085 { MEDIA_MIMETYPE_AUDIO_AMR_NB, 1086 "audio_decoder.amrnb", "audio_encoder.amrnb" }, 1087 { MEDIA_MIMETYPE_AUDIO_AMR_WB, 1088 "audio_decoder.amrwb", "audio_encoder.amrwb" }, 1089 { MEDIA_MIMETYPE_AUDIO_AAC, 1090 "audio_decoder.aac", "audio_encoder.aac" }, 1091 { MEDIA_MIMETYPE_AUDIO_VORBIS, 1092 "audio_decoder.vorbis", "audio_encoder.vorbis" }, 1093 { MEDIA_MIMETYPE_AUDIO_OPUS, 1094 "audio_decoder.opus", "audio_encoder.opus" }, 1095 { MEDIA_MIMETYPE_AUDIO_G711_MLAW, 1096 "audio_decoder.g711mlaw", "audio_encoder.g711mlaw" }, 1097 { MEDIA_MIMETYPE_AUDIO_G711_ALAW, 1098 "audio_decoder.g711alaw", "audio_encoder.g711alaw" }, 1099 { MEDIA_MIMETYPE_VIDEO_AVC, 1100 "video_decoder.avc", "video_encoder.avc" }, 1101 { MEDIA_MIMETYPE_VIDEO_HEVC, 1102 "video_decoder.hevc", "video_encoder.hevc" }, 1103 { MEDIA_MIMETYPE_VIDEO_MPEG4, 1104 "video_decoder.mpeg4", "video_encoder.mpeg4" }, 1105 { MEDIA_MIMETYPE_VIDEO_H263, 1106 "video_decoder.h263", "video_encoder.h263" }, 1107 { MEDIA_MIMETYPE_VIDEO_VP8, 1108 "video_decoder.vp8", "video_encoder.vp8" }, 1109 { MEDIA_MIMETYPE_VIDEO_VP9, 1110 "video_decoder.vp9", "video_encoder.vp9" }, 1111 { MEDIA_MIMETYPE_AUDIO_RAW, 1112 "audio_decoder.raw", "audio_encoder.raw" }, 1113 { MEDIA_MIMETYPE_AUDIO_FLAC, 1114 "audio_decoder.flac", "audio_encoder.flac" }, 1115 { MEDIA_MIMETYPE_AUDIO_MSGSM, 1116 "audio_decoder.gsm", "audio_encoder.gsm" }, 1117 { MEDIA_MIMETYPE_VIDEO_MPEG2, 1118 "video_decoder.mpeg2", "video_encoder.mpeg2" }, 1119 { MEDIA_MIMETYPE_AUDIO_AC3, 1120 "audio_decoder.ac3", "audio_encoder.ac3" }, 1121 { MEDIA_MIMETYPE_AUDIO_EAC3, 1122 "audio_decoder.eac3", "audio_encoder.eac3" }, 1123 }; 1124 1125 static const size_t kNumMimeToRole = 1126 sizeof(kMimeToRole) / sizeof(kMimeToRole[0]); 1127 1128 size_t i; 1129 for (i = 0; i < kNumMimeToRole; ++i) { 1130 if (!strcasecmp(mime, kMimeToRole[i].mime)) { 1131 break; 1132 } 1133 } 1134 1135 if (i == kNumMimeToRole) { 1136 return ERROR_UNSUPPORTED; 1137 } 1138 1139 const char *role = 1140 isEncoder ? kMimeToRole[i].encoderRole 1141 : kMimeToRole[i].decoderRole; 1142 1143 if (role != NULL) { 1144 OMX_PARAM_COMPONENTROLETYPE roleParams; 1145 InitOMXParams(&roleParams); 1146 1147 strncpy((char *)roleParams.cRole, 1148 role, OMX_MAX_STRINGNAME_SIZE - 1); 1149 1150 roleParams.cRole[OMX_MAX_STRINGNAME_SIZE - 1] = '\0'; 1151 1152 status_t err = mOMX->setParameter( 1153 mNode, OMX_IndexParamStandardComponentRole, 1154 &roleParams, sizeof(roleParams)); 1155 1156 if (err != OK) { 1157 ALOGW("[%s] Failed to set standard component role '%s'.", 1158 mComponentName.c_str(), role); 1159 1160 return err; 1161 } 1162 } 1163 1164 return OK; 1165} 1166 1167status_t ACodec::configureCodec( 1168 const char *mime, const sp<AMessage> &msg) { 1169 int32_t encoder; 1170 if (!msg->findInt32("encoder", &encoder)) { 1171 encoder = false; 1172 } 1173 1174 sp<AMessage> inputFormat = new AMessage(); 1175 sp<AMessage> outputFormat = mNotify->dup(); // will use this for kWhatOutputFormatChanged 1176 1177 mIsEncoder = encoder; 1178 1179 status_t err = setComponentRole(encoder /* isEncoder */, mime); 1180 1181 if (err != OK) { 1182 return err; 1183 } 1184 1185 int32_t bitRate = 0; 1186 // FLAC encoder doesn't need a bitrate, other encoders do 1187 if (encoder && strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC) 1188 && !msg->findInt32("bitrate", &bitRate)) { 1189 return INVALID_OPERATION; 1190 } 1191 1192 int32_t storeMeta; 1193 if (encoder 1194 && msg->findInt32("store-metadata-in-buffers", &storeMeta) 1195 && storeMeta != 0) { 1196 err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexInput, OMX_TRUE); 1197 1198 if (err != OK) { 1199 ALOGE("[%s] storeMetaDataInBuffers (input) failed w/ err %d", 1200 mComponentName.c_str(), err); 1201 1202 return err; 1203 } 1204 } 1205 1206 int32_t prependSPSPPS = 0; 1207 if (encoder 1208 && msg->findInt32("prepend-sps-pps-to-idr-frames", &prependSPSPPS) 1209 && prependSPSPPS != 0) { 1210 OMX_INDEXTYPE index; 1211 err = mOMX->getExtensionIndex( 1212 mNode, 1213 "OMX.google.android.index.prependSPSPPSToIDRFrames", 1214 &index); 1215 1216 if (err == OK) { 1217 PrependSPSPPSToIDRFramesParams params; 1218 InitOMXParams(¶ms); 1219 params.bEnable = OMX_TRUE; 1220 1221 err = mOMX->setParameter( 1222 mNode, index, ¶ms, sizeof(params)); 1223 } 1224 1225 if (err != OK) { 1226 ALOGE("Encoder could not be configured to emit SPS/PPS before " 1227 "IDR frames. (err %d)", err); 1228 1229 return err; 1230 } 1231 } 1232 1233 // Only enable metadata mode on encoder output if encoder can prepend 1234 // sps/pps to idr frames, since in metadata mode the bitstream is in an 1235 // opaque handle, to which we don't have access. 1236 int32_t video = !strncasecmp(mime, "video/", 6); 1237 if (encoder && video) { 1238 OMX_BOOL enable = (OMX_BOOL) (prependSPSPPS 1239 && msg->findInt32("store-metadata-in-buffers-output", &storeMeta) 1240 && storeMeta != 0); 1241 1242 err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, enable); 1243 1244 if (err != OK) { 1245 ALOGE("[%s] storeMetaDataInBuffers (output) failed w/ err %d", 1246 mComponentName.c_str(), err); 1247 mUseMetadataOnEncoderOutput = 0; 1248 } else { 1249 mUseMetadataOnEncoderOutput = enable; 1250 } 1251 1252 if (!msg->findInt64( 1253 "repeat-previous-frame-after", 1254 &mRepeatFrameDelayUs)) { 1255 mRepeatFrameDelayUs = -1ll; 1256 } 1257 1258 if (!msg->findInt64("max-pts-gap-to-encoder", &mMaxPtsGapUs)) { 1259 mMaxPtsGapUs = -1ll; 1260 } 1261 1262 if (!msg->findInt64("time-lapse", &mTimePerCaptureUs)) { 1263 mTimePerCaptureUs = -1ll; 1264 } 1265 1266 if (!msg->findInt32( 1267 "create-input-buffers-suspended", 1268 (int32_t*)&mCreateInputBuffersSuspended)) { 1269 mCreateInputBuffersSuspended = false; 1270 } 1271 } 1272 1273 // NOTE: we only use native window for video decoders 1274 sp<RefBase> obj; 1275 bool haveNativeWindow = msg->findObject("native-window", &obj) 1276 && obj != NULL && video && !encoder; 1277 mStoreMetaDataInOutputBuffers = false; 1278 if (video && !encoder) { 1279 inputFormat->setInt32("adaptive-playback", false); 1280 1281 int32_t usageProtected; 1282 if (msg->findInt32("protected", &usageProtected) && usageProtected) { 1283 if (!haveNativeWindow) { 1284 ALOGE("protected output buffers must be sent to an ANativeWindow"); 1285 return PERMISSION_DENIED; 1286 } 1287 mFlags |= kFlagIsGrallocUsageProtected; 1288 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 1289 } 1290 } 1291 if (haveNativeWindow) { 1292 sp<NativeWindowWrapper> windowWrapper( 1293 static_cast<NativeWindowWrapper *>(obj.get())); 1294 sp<ANativeWindow> nativeWindow = windowWrapper->getNativeWindow(); 1295 1296 // START of temporary support for automatic FRC - THIS WILL BE REMOVED 1297 int32_t autoFrc; 1298 if (msg->findInt32("auto-frc", &autoFrc)) { 1299 bool enabled = autoFrc; 1300 OMX_CONFIG_BOOLEANTYPE config; 1301 InitOMXParams(&config); 1302 config.bEnabled = (OMX_BOOL)enabled; 1303 status_t temp = mOMX->setConfig( 1304 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAutoFramerateConversion, 1305 &config, sizeof(config)); 1306 if (temp == OK) { 1307 outputFormat->setInt32("auto-frc", enabled); 1308 } else if (enabled) { 1309 ALOGI("codec does not support requested auto-frc (err %d)", temp); 1310 } 1311 } 1312 // END of temporary support for automatic FRC 1313 1314 int32_t tunneled; 1315 if (msg->findInt32("feature-tunneled-playback", &tunneled) && 1316 tunneled != 0) { 1317 ALOGI("Configuring TUNNELED video playback."); 1318 mTunneled = true; 1319 1320 int32_t audioHwSync = 0; 1321 if (!msg->findInt32("audio-hw-sync", &audioHwSync)) { 1322 ALOGW("No Audio HW Sync provided for video tunnel"); 1323 } 1324 err = configureTunneledVideoPlayback(audioHwSync, nativeWindow); 1325 if (err != OK) { 1326 ALOGE("configureTunneledVideoPlayback(%d,%p) failed!", 1327 audioHwSync, nativeWindow.get()); 1328 return err; 1329 } 1330 1331 int32_t maxWidth = 0, maxHeight = 0; 1332 if (msg->findInt32("max-width", &maxWidth) && 1333 msg->findInt32("max-height", &maxHeight)) { 1334 1335 err = mOMX->prepareForAdaptivePlayback( 1336 mNode, kPortIndexOutput, OMX_TRUE, maxWidth, maxHeight); 1337 if (err != OK) { 1338 ALOGW("[%s] prepareForAdaptivePlayback failed w/ err %d", 1339 mComponentName.c_str(), err); 1340 // allow failure 1341 err = OK; 1342 } else { 1343 inputFormat->setInt32("max-width", maxWidth); 1344 inputFormat->setInt32("max-height", maxHeight); 1345 inputFormat->setInt32("adaptive-playback", true); 1346 } 1347 } 1348 } else { 1349 ALOGV("Configuring CPU controlled video playback."); 1350 mTunneled = false; 1351 1352 // Explicity reset the sideband handle of the window for 1353 // non-tunneled video in case the window was previously used 1354 // for a tunneled video playback. 1355 err = native_window_set_sideband_stream(nativeWindow.get(), NULL); 1356 if (err != OK) { 1357 ALOGE("set_sideband_stream(NULL) failed! (err %d).", err); 1358 return err; 1359 } 1360 1361 // Always try to enable dynamic output buffers on native surface 1362 err = mOMX->storeMetaDataInBuffers( 1363 mNode, kPortIndexOutput, OMX_TRUE); 1364 if (err != OK) { 1365 ALOGE("[%s] storeMetaDataInBuffers failed w/ err %d", 1366 mComponentName.c_str(), err); 1367 1368 // if adaptive playback has been requested, try JB fallback 1369 // NOTE: THIS FALLBACK MECHANISM WILL BE REMOVED DUE TO ITS 1370 // LARGE MEMORY REQUIREMENT 1371 1372 // we will not do adaptive playback on software accessed 1373 // surfaces as they never had to respond to changes in the 1374 // crop window, and we don't trust that they will be able to. 1375 int usageBits = 0; 1376 bool canDoAdaptivePlayback; 1377 1378 if (nativeWindow->query( 1379 nativeWindow.get(), 1380 NATIVE_WINDOW_CONSUMER_USAGE_BITS, 1381 &usageBits) != OK) { 1382 canDoAdaptivePlayback = false; 1383 } else { 1384 canDoAdaptivePlayback = 1385 (usageBits & 1386 (GRALLOC_USAGE_SW_READ_MASK | 1387 GRALLOC_USAGE_SW_WRITE_MASK)) == 0; 1388 } 1389 1390 int32_t maxWidth = 0, maxHeight = 0; 1391 if (canDoAdaptivePlayback && 1392 msg->findInt32("max-width", &maxWidth) && 1393 msg->findInt32("max-height", &maxHeight)) { 1394 ALOGV("[%s] prepareForAdaptivePlayback(%dx%d)", 1395 mComponentName.c_str(), maxWidth, maxHeight); 1396 1397 err = mOMX->prepareForAdaptivePlayback( 1398 mNode, kPortIndexOutput, OMX_TRUE, maxWidth, 1399 maxHeight); 1400 ALOGW_IF(err != OK, 1401 "[%s] prepareForAdaptivePlayback failed w/ err %d", 1402 mComponentName.c_str(), err); 1403 1404 if (err == OK) { 1405 inputFormat->setInt32("max-width", maxWidth); 1406 inputFormat->setInt32("max-height", maxHeight); 1407 inputFormat->setInt32("adaptive-playback", true); 1408 } 1409 } 1410 // allow failure 1411 err = OK; 1412 } else { 1413 ALOGV("[%s] storeMetaDataInBuffers succeeded", 1414 mComponentName.c_str()); 1415 mStoreMetaDataInOutputBuffers = true; 1416 inputFormat->setInt32("adaptive-playback", true); 1417 } 1418 1419 int32_t push; 1420 if (msg->findInt32("push-blank-buffers-on-shutdown", &push) 1421 && push != 0) { 1422 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 1423 } 1424 } 1425 1426 int32_t rotationDegrees; 1427 if (msg->findInt32("rotation-degrees", &rotationDegrees)) { 1428 mRotationDegrees = rotationDegrees; 1429 } else { 1430 mRotationDegrees = 0; 1431 } 1432 } 1433 1434 if (video) { 1435 // determine need for software renderer 1436 bool usingSwRenderer = false; 1437 if (haveNativeWindow && mComponentName.startsWith("OMX.google.")) { 1438 usingSwRenderer = true; 1439 haveNativeWindow = false; 1440 } 1441 1442 if (encoder) { 1443 err = setupVideoEncoder(mime, msg); 1444 } else { 1445 err = setupVideoDecoder(mime, msg, haveNativeWindow); 1446 } 1447 1448 if (err != OK) { 1449 return err; 1450 } 1451 1452 if (haveNativeWindow) { 1453 sp<NativeWindowWrapper> nativeWindow( 1454 static_cast<NativeWindowWrapper *>(obj.get())); 1455 CHECK(nativeWindow != NULL); 1456 mNativeWindow = nativeWindow->getNativeWindow(); 1457 1458 native_window_set_scaling_mode( 1459 mNativeWindow.get(), NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW); 1460 } 1461 1462 // initialize native window now to get actual output format 1463 // TODO: this is needed for some encoders even though they don't use native window 1464 CHECK_EQ((status_t)OK, initNativeWindow()); 1465 1466 // fallback for devices that do not handle flex-YUV for native buffers 1467 if (haveNativeWindow) { 1468 int32_t requestedColorFormat = OMX_COLOR_FormatUnused; 1469 if (msg->findInt32("color-format", &requestedColorFormat) && 1470 requestedColorFormat == OMX_COLOR_FormatYUV420Flexible) { 1471 CHECK_EQ(getPortFormat(kPortIndexOutput, outputFormat), (status_t)OK); 1472 int32_t colorFormat = OMX_COLOR_FormatUnused; 1473 OMX_U32 flexibleEquivalent = OMX_COLOR_FormatUnused; 1474 CHECK(outputFormat->findInt32("color-format", &colorFormat)); 1475 ALOGD("[%s] Requested output format %#x and got %#x.", 1476 mComponentName.c_str(), requestedColorFormat, colorFormat); 1477 if (!isFlexibleColorFormat( 1478 mOMX, mNode, colorFormat, haveNativeWindow, &flexibleEquivalent) 1479 || flexibleEquivalent != (OMX_U32)requestedColorFormat) { 1480 // device did not handle flex-YUV request for native window, fall back 1481 // to SW renderer 1482 ALOGI("[%s] Falling back to software renderer", mComponentName.c_str()); 1483 mNativeWindow.clear(); 1484 haveNativeWindow = false; 1485 usingSwRenderer = true; 1486 if (mStoreMetaDataInOutputBuffers) { 1487 err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, OMX_FALSE); 1488 mStoreMetaDataInOutputBuffers = false; 1489 // TODO: implement adaptive-playback support for bytebuffer mode. 1490 // This is done by SW codecs, but most HW codecs don't support it. 1491 inputFormat->setInt32("adaptive-playback", false); 1492 } 1493 if (err == OK) { 1494 err = mOMX->enableGraphicBuffers(mNode, kPortIndexOutput, OMX_FALSE); 1495 } 1496 if (mFlags & kFlagIsGrallocUsageProtected) { 1497 // fallback is not supported for protected playback 1498 err = PERMISSION_DENIED; 1499 } else if (err == OK) { 1500 err = setupVideoDecoder(mime, msg, false); 1501 } 1502 } 1503 } 1504 } 1505 1506 if (usingSwRenderer) { 1507 outputFormat->setInt32("using-sw-renderer", 1); 1508 } 1509 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) { 1510 int32_t numChannels, sampleRate; 1511 if (!msg->findInt32("channel-count", &numChannels) 1512 || !msg->findInt32("sample-rate", &sampleRate)) { 1513 // Since we did not always check for these, leave them optional 1514 // and have the decoder figure it all out. 1515 err = OK; 1516 } else { 1517 err = setupRawAudioFormat( 1518 encoder ? kPortIndexInput : kPortIndexOutput, 1519 sampleRate, 1520 numChannels); 1521 } 1522 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) { 1523 int32_t numChannels, sampleRate; 1524 if (!msg->findInt32("channel-count", &numChannels) 1525 || !msg->findInt32("sample-rate", &sampleRate)) { 1526 err = INVALID_OPERATION; 1527 } else { 1528 int32_t isADTS, aacProfile; 1529 int32_t sbrMode; 1530 int32_t maxOutputChannelCount; 1531 int32_t pcmLimiterEnable; 1532 drcParams_t drc; 1533 if (!msg->findInt32("is-adts", &isADTS)) { 1534 isADTS = 0; 1535 } 1536 if (!msg->findInt32("aac-profile", &aacProfile)) { 1537 aacProfile = OMX_AUDIO_AACObjectNull; 1538 } 1539 if (!msg->findInt32("aac-sbr-mode", &sbrMode)) { 1540 sbrMode = -1; 1541 } 1542 1543 if (!msg->findInt32("aac-max-output-channel_count", &maxOutputChannelCount)) { 1544 maxOutputChannelCount = -1; 1545 } 1546 if (!msg->findInt32("aac-pcm-limiter-enable", &pcmLimiterEnable)) { 1547 // value is unknown 1548 pcmLimiterEnable = -1; 1549 } 1550 if (!msg->findInt32("aac-encoded-target-level", &drc.encodedTargetLevel)) { 1551 // value is unknown 1552 drc.encodedTargetLevel = -1; 1553 } 1554 if (!msg->findInt32("aac-drc-cut-level", &drc.drcCut)) { 1555 // value is unknown 1556 drc.drcCut = -1; 1557 } 1558 if (!msg->findInt32("aac-drc-boost-level", &drc.drcBoost)) { 1559 // value is unknown 1560 drc.drcBoost = -1; 1561 } 1562 if (!msg->findInt32("aac-drc-heavy-compression", &drc.heavyCompression)) { 1563 // value is unknown 1564 drc.heavyCompression = -1; 1565 } 1566 if (!msg->findInt32("aac-target-ref-level", &drc.targetRefLevel)) { 1567 // value is unknown 1568 drc.targetRefLevel = -1; 1569 } 1570 1571 err = setupAACCodec( 1572 encoder, numChannels, sampleRate, bitRate, aacProfile, 1573 isADTS != 0, sbrMode, maxOutputChannelCount, drc, 1574 pcmLimiterEnable); 1575 } 1576 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)) { 1577 err = setupAMRCodec(encoder, false /* isWAMR */, bitRate); 1578 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_WB)) { 1579 err = setupAMRCodec(encoder, true /* isWAMR */, bitRate); 1580 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_ALAW) 1581 || !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_MLAW)) { 1582 // These are PCM-like formats with a fixed sample rate but 1583 // a variable number of channels. 1584 1585 int32_t numChannels; 1586 if (!msg->findInt32("channel-count", &numChannels)) { 1587 err = INVALID_OPERATION; 1588 } else { 1589 err = setupG711Codec(encoder, numChannels); 1590 } 1591 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)) { 1592 int32_t numChannels, sampleRate, compressionLevel = -1; 1593 if (encoder && 1594 (!msg->findInt32("channel-count", &numChannels) 1595 || !msg->findInt32("sample-rate", &sampleRate))) { 1596 ALOGE("missing channel count or sample rate for FLAC encoder"); 1597 err = INVALID_OPERATION; 1598 } else { 1599 if (encoder) { 1600 if (!msg->findInt32( 1601 "complexity", &compressionLevel) && 1602 !msg->findInt32( 1603 "flac-compression-level", &compressionLevel)) { 1604 compressionLevel = 5; // default FLAC compression level 1605 } else if (compressionLevel < 0) { 1606 ALOGW("compression level %d outside [0..8] range, " 1607 "using 0", 1608 compressionLevel); 1609 compressionLevel = 0; 1610 } else if (compressionLevel > 8) { 1611 ALOGW("compression level %d outside [0..8] range, " 1612 "using 8", 1613 compressionLevel); 1614 compressionLevel = 8; 1615 } 1616 } 1617 err = setupFlacCodec( 1618 encoder, numChannels, sampleRate, compressionLevel); 1619 } 1620 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) { 1621 int32_t numChannels, sampleRate; 1622 if (encoder 1623 || !msg->findInt32("channel-count", &numChannels) 1624 || !msg->findInt32("sample-rate", &sampleRate)) { 1625 err = INVALID_OPERATION; 1626 } else { 1627 err = setupRawAudioFormat(kPortIndexInput, sampleRate, numChannels); 1628 } 1629 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AC3)) { 1630 int32_t numChannels; 1631 int32_t sampleRate; 1632 if (!msg->findInt32("channel-count", &numChannels) 1633 || !msg->findInt32("sample-rate", &sampleRate)) { 1634 err = INVALID_OPERATION; 1635 } else { 1636 err = setupAC3Codec(encoder, numChannels, sampleRate); 1637 } 1638 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_EAC3)) { 1639 int32_t numChannels; 1640 int32_t sampleRate; 1641 if (!msg->findInt32("channel-count", &numChannels) 1642 || !msg->findInt32("sample-rate", &sampleRate)) { 1643 err = INVALID_OPERATION; 1644 } else { 1645 err = setupEAC3Codec(encoder, numChannels, sampleRate); 1646 } 1647 } 1648 1649 if (err != OK) { 1650 return err; 1651 } 1652 1653 if (!msg->findInt32("encoder-delay", &mEncoderDelay)) { 1654 mEncoderDelay = 0; 1655 } 1656 1657 if (!msg->findInt32("encoder-padding", &mEncoderPadding)) { 1658 mEncoderPadding = 0; 1659 } 1660 1661 if (msg->findInt32("channel-mask", &mChannelMask)) { 1662 mChannelMaskPresent = true; 1663 } else { 1664 mChannelMaskPresent = false; 1665 } 1666 1667 int32_t maxInputSize; 1668 if (msg->findInt32("max-input-size", &maxInputSize)) { 1669 err = setMinBufferSize(kPortIndexInput, (size_t)maxInputSize); 1670 } else if (!strcmp("OMX.Nvidia.aac.decoder", mComponentName.c_str())) { 1671 err = setMinBufferSize(kPortIndexInput, 8192); // XXX 1672 } 1673 1674 mBaseOutputFormat = outputFormat; 1675 1676 CHECK_EQ(getPortFormat(kPortIndexInput, inputFormat), (status_t)OK); 1677 CHECK_EQ(getPortFormat(kPortIndexOutput, outputFormat), (status_t)OK); 1678 mInputFormat = inputFormat; 1679 mOutputFormat = outputFormat; 1680 1681 return err; 1682} 1683 1684status_t ACodec::setMinBufferSize(OMX_U32 portIndex, size_t size) { 1685 OMX_PARAM_PORTDEFINITIONTYPE def; 1686 InitOMXParams(&def); 1687 def.nPortIndex = portIndex; 1688 1689 status_t err = mOMX->getParameter( 1690 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1691 1692 if (err != OK) { 1693 return err; 1694 } 1695 1696 if (def.nBufferSize >= size) { 1697 return OK; 1698 } 1699 1700 def.nBufferSize = size; 1701 1702 err = mOMX->setParameter( 1703 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1704 1705 if (err != OK) { 1706 return err; 1707 } 1708 1709 err = mOMX->getParameter( 1710 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1711 1712 if (err != OK) { 1713 return err; 1714 } 1715 1716 CHECK(def.nBufferSize >= size); 1717 1718 return OK; 1719} 1720 1721status_t ACodec::selectAudioPortFormat( 1722 OMX_U32 portIndex, OMX_AUDIO_CODINGTYPE desiredFormat) { 1723 OMX_AUDIO_PARAM_PORTFORMATTYPE format; 1724 InitOMXParams(&format); 1725 1726 format.nPortIndex = portIndex; 1727 for (OMX_U32 index = 0;; ++index) { 1728 format.nIndex = index; 1729 1730 status_t err = mOMX->getParameter( 1731 mNode, OMX_IndexParamAudioPortFormat, 1732 &format, sizeof(format)); 1733 1734 if (err != OK) { 1735 return err; 1736 } 1737 1738 if (format.eEncoding == desiredFormat) { 1739 break; 1740 } 1741 } 1742 1743 return mOMX->setParameter( 1744 mNode, OMX_IndexParamAudioPortFormat, &format, sizeof(format)); 1745} 1746 1747status_t ACodec::setupAACCodec( 1748 bool encoder, int32_t numChannels, int32_t sampleRate, 1749 int32_t bitRate, int32_t aacProfile, bool isADTS, int32_t sbrMode, 1750 int32_t maxOutputChannelCount, const drcParams_t& drc, 1751 int32_t pcmLimiterEnable) { 1752 if (encoder && isADTS) { 1753 return -EINVAL; 1754 } 1755 1756 status_t err = setupRawAudioFormat( 1757 encoder ? kPortIndexInput : kPortIndexOutput, 1758 sampleRate, 1759 numChannels); 1760 1761 if (err != OK) { 1762 return err; 1763 } 1764 1765 if (encoder) { 1766 err = selectAudioPortFormat(kPortIndexOutput, OMX_AUDIO_CodingAAC); 1767 1768 if (err != OK) { 1769 return err; 1770 } 1771 1772 OMX_PARAM_PORTDEFINITIONTYPE def; 1773 InitOMXParams(&def); 1774 def.nPortIndex = kPortIndexOutput; 1775 1776 err = mOMX->getParameter( 1777 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1778 1779 if (err != OK) { 1780 return err; 1781 } 1782 1783 def.format.audio.bFlagErrorConcealment = OMX_TRUE; 1784 def.format.audio.eEncoding = OMX_AUDIO_CodingAAC; 1785 1786 err = mOMX->setParameter( 1787 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1788 1789 if (err != OK) { 1790 return err; 1791 } 1792 1793 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 1794 InitOMXParams(&profile); 1795 profile.nPortIndex = kPortIndexOutput; 1796 1797 err = mOMX->getParameter( 1798 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 1799 1800 if (err != OK) { 1801 return err; 1802 } 1803 1804 profile.nChannels = numChannels; 1805 1806 profile.eChannelMode = 1807 (numChannels == 1) 1808 ? OMX_AUDIO_ChannelModeMono: OMX_AUDIO_ChannelModeStereo; 1809 1810 profile.nSampleRate = sampleRate; 1811 profile.nBitRate = bitRate; 1812 profile.nAudioBandWidth = 0; 1813 profile.nFrameLength = 0; 1814 profile.nAACtools = OMX_AUDIO_AACToolAll; 1815 profile.nAACERtools = OMX_AUDIO_AACERNone; 1816 profile.eAACProfile = (OMX_AUDIO_AACPROFILETYPE) aacProfile; 1817 profile.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4FF; 1818 switch (sbrMode) { 1819 case 0: 1820 // disable sbr 1821 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 1822 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 1823 break; 1824 case 1: 1825 // enable single-rate sbr 1826 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 1827 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 1828 break; 1829 case 2: 1830 // enable dual-rate sbr 1831 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 1832 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 1833 break; 1834 case -1: 1835 // enable both modes -> the codec will decide which mode should be used 1836 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 1837 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 1838 break; 1839 default: 1840 // unsupported sbr mode 1841 return BAD_VALUE; 1842 } 1843 1844 1845 err = mOMX->setParameter( 1846 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 1847 1848 if (err != OK) { 1849 return err; 1850 } 1851 1852 return err; 1853 } 1854 1855 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 1856 InitOMXParams(&profile); 1857 profile.nPortIndex = kPortIndexInput; 1858 1859 err = mOMX->getParameter( 1860 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 1861 1862 if (err != OK) { 1863 return err; 1864 } 1865 1866 profile.nChannels = numChannels; 1867 profile.nSampleRate = sampleRate; 1868 1869 profile.eAACStreamFormat = 1870 isADTS 1871 ? OMX_AUDIO_AACStreamFormatMP4ADTS 1872 : OMX_AUDIO_AACStreamFormatMP4FF; 1873 1874 OMX_AUDIO_PARAM_ANDROID_AACPRESENTATIONTYPE presentation; 1875 presentation.nMaxOutputChannels = maxOutputChannelCount; 1876 presentation.nDrcCut = drc.drcCut; 1877 presentation.nDrcBoost = drc.drcBoost; 1878 presentation.nHeavyCompression = drc.heavyCompression; 1879 presentation.nTargetReferenceLevel = drc.targetRefLevel; 1880 presentation.nEncodedTargetLevel = drc.encodedTargetLevel; 1881 presentation.nPCMLimiterEnable = pcmLimiterEnable; 1882 1883 status_t res = mOMX->setParameter(mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 1884 if (res == OK) { 1885 // optional parameters, will not cause configuration failure 1886 mOMX->setParameter(mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAacPresentation, 1887 &presentation, sizeof(presentation)); 1888 } else { 1889 ALOGW("did not set AudioAndroidAacPresentation due to error %d when setting AudioAac", res); 1890 } 1891 return res; 1892} 1893 1894status_t ACodec::setupAC3Codec( 1895 bool encoder, int32_t numChannels, int32_t sampleRate) { 1896 status_t err = setupRawAudioFormat( 1897 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 1898 1899 if (err != OK) { 1900 return err; 1901 } 1902 1903 if (encoder) { 1904 ALOGW("AC3 encoding is not supported."); 1905 return INVALID_OPERATION; 1906 } 1907 1908 OMX_AUDIO_PARAM_ANDROID_AC3TYPE def; 1909 InitOMXParams(&def); 1910 def.nPortIndex = kPortIndexInput; 1911 1912 err = mOMX->getParameter( 1913 mNode, 1914 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 1915 &def, 1916 sizeof(def)); 1917 1918 if (err != OK) { 1919 return err; 1920 } 1921 1922 def.nChannels = numChannels; 1923 def.nSampleRate = sampleRate; 1924 1925 return mOMX->setParameter( 1926 mNode, 1927 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 1928 &def, 1929 sizeof(def)); 1930} 1931 1932status_t ACodec::setupEAC3Codec( 1933 bool encoder, int32_t numChannels, int32_t sampleRate) { 1934 status_t err = setupRawAudioFormat( 1935 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 1936 1937 if (err != OK) { 1938 return err; 1939 } 1940 1941 if (encoder) { 1942 ALOGW("EAC3 encoding is not supported."); 1943 return INVALID_OPERATION; 1944 } 1945 1946 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE def; 1947 InitOMXParams(&def); 1948 def.nPortIndex = kPortIndexInput; 1949 1950 err = mOMX->getParameter( 1951 mNode, 1952 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 1953 &def, 1954 sizeof(def)); 1955 1956 if (err != OK) { 1957 return err; 1958 } 1959 1960 def.nChannels = numChannels; 1961 def.nSampleRate = sampleRate; 1962 1963 return mOMX->setParameter( 1964 mNode, 1965 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 1966 &def, 1967 sizeof(def)); 1968} 1969 1970static OMX_AUDIO_AMRBANDMODETYPE pickModeFromBitRate( 1971 bool isAMRWB, int32_t bps) { 1972 if (isAMRWB) { 1973 if (bps <= 6600) { 1974 return OMX_AUDIO_AMRBandModeWB0; 1975 } else if (bps <= 8850) { 1976 return OMX_AUDIO_AMRBandModeWB1; 1977 } else if (bps <= 12650) { 1978 return OMX_AUDIO_AMRBandModeWB2; 1979 } else if (bps <= 14250) { 1980 return OMX_AUDIO_AMRBandModeWB3; 1981 } else if (bps <= 15850) { 1982 return OMX_AUDIO_AMRBandModeWB4; 1983 } else if (bps <= 18250) { 1984 return OMX_AUDIO_AMRBandModeWB5; 1985 } else if (bps <= 19850) { 1986 return OMX_AUDIO_AMRBandModeWB6; 1987 } else if (bps <= 23050) { 1988 return OMX_AUDIO_AMRBandModeWB7; 1989 } 1990 1991 // 23850 bps 1992 return OMX_AUDIO_AMRBandModeWB8; 1993 } else { // AMRNB 1994 if (bps <= 4750) { 1995 return OMX_AUDIO_AMRBandModeNB0; 1996 } else if (bps <= 5150) { 1997 return OMX_AUDIO_AMRBandModeNB1; 1998 } else if (bps <= 5900) { 1999 return OMX_AUDIO_AMRBandModeNB2; 2000 } else if (bps <= 6700) { 2001 return OMX_AUDIO_AMRBandModeNB3; 2002 } else if (bps <= 7400) { 2003 return OMX_AUDIO_AMRBandModeNB4; 2004 } else if (bps <= 7950) { 2005 return OMX_AUDIO_AMRBandModeNB5; 2006 } else if (bps <= 10200) { 2007 return OMX_AUDIO_AMRBandModeNB6; 2008 } 2009 2010 // 12200 bps 2011 return OMX_AUDIO_AMRBandModeNB7; 2012 } 2013} 2014 2015status_t ACodec::setupAMRCodec(bool encoder, bool isWAMR, int32_t bitrate) { 2016 OMX_AUDIO_PARAM_AMRTYPE def; 2017 InitOMXParams(&def); 2018 def.nPortIndex = encoder ? kPortIndexOutput : kPortIndexInput; 2019 2020 status_t err = 2021 mOMX->getParameter(mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); 2022 2023 if (err != OK) { 2024 return err; 2025 } 2026 2027 def.eAMRFrameFormat = OMX_AUDIO_AMRFrameFormatFSF; 2028 def.eAMRBandMode = pickModeFromBitRate(isWAMR, bitrate); 2029 2030 err = mOMX->setParameter( 2031 mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); 2032 2033 if (err != OK) { 2034 return err; 2035 } 2036 2037 return setupRawAudioFormat( 2038 encoder ? kPortIndexInput : kPortIndexOutput, 2039 isWAMR ? 16000 : 8000 /* sampleRate */, 2040 1 /* numChannels */); 2041} 2042 2043status_t ACodec::setupG711Codec(bool encoder, int32_t numChannels) { 2044 CHECK(!encoder); // XXX TODO 2045 2046 return setupRawAudioFormat( 2047 kPortIndexInput, 8000 /* sampleRate */, numChannels); 2048} 2049 2050status_t ACodec::setupFlacCodec( 2051 bool encoder, int32_t numChannels, int32_t sampleRate, int32_t compressionLevel) { 2052 2053 if (encoder) { 2054 OMX_AUDIO_PARAM_FLACTYPE def; 2055 InitOMXParams(&def); 2056 def.nPortIndex = kPortIndexOutput; 2057 2058 // configure compression level 2059 status_t err = mOMX->getParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def)); 2060 if (err != OK) { 2061 ALOGE("setupFlacCodec(): Error %d getting OMX_IndexParamAudioFlac parameter", err); 2062 return err; 2063 } 2064 def.nCompressionLevel = compressionLevel; 2065 err = mOMX->setParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def)); 2066 if (err != OK) { 2067 ALOGE("setupFlacCodec(): Error %d setting OMX_IndexParamAudioFlac parameter", err); 2068 return err; 2069 } 2070 } 2071 2072 return setupRawAudioFormat( 2073 encoder ? kPortIndexInput : kPortIndexOutput, 2074 sampleRate, 2075 numChannels); 2076} 2077 2078status_t ACodec::setupRawAudioFormat( 2079 OMX_U32 portIndex, int32_t sampleRate, int32_t numChannels) { 2080 OMX_PARAM_PORTDEFINITIONTYPE def; 2081 InitOMXParams(&def); 2082 def.nPortIndex = portIndex; 2083 2084 status_t err = mOMX->getParameter( 2085 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2086 2087 if (err != OK) { 2088 return err; 2089 } 2090 2091 def.format.audio.eEncoding = OMX_AUDIO_CodingPCM; 2092 2093 err = mOMX->setParameter( 2094 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2095 2096 if (err != OK) { 2097 return err; 2098 } 2099 2100 OMX_AUDIO_PARAM_PCMMODETYPE pcmParams; 2101 InitOMXParams(&pcmParams); 2102 pcmParams.nPortIndex = portIndex; 2103 2104 err = mOMX->getParameter( 2105 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2106 2107 if (err != OK) { 2108 return err; 2109 } 2110 2111 pcmParams.nChannels = numChannels; 2112 pcmParams.eNumData = OMX_NumericalDataSigned; 2113 pcmParams.bInterleaved = OMX_TRUE; 2114 pcmParams.nBitPerSample = 16; 2115 pcmParams.nSamplingRate = sampleRate; 2116 pcmParams.ePCMMode = OMX_AUDIO_PCMModeLinear; 2117 2118 if (getOMXChannelMapping(numChannels, pcmParams.eChannelMapping) != OK) { 2119 return OMX_ErrorNone; 2120 } 2121 2122 return mOMX->setParameter( 2123 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2124} 2125 2126status_t ACodec::configureTunneledVideoPlayback( 2127 int32_t audioHwSync, const sp<ANativeWindow> &nativeWindow) { 2128 native_handle_t* sidebandHandle; 2129 2130 status_t err = mOMX->configureVideoTunnelMode( 2131 mNode, kPortIndexOutput, OMX_TRUE, audioHwSync, &sidebandHandle); 2132 if (err != OK) { 2133 ALOGE("configureVideoTunnelMode failed! (err %d).", err); 2134 return err; 2135 } 2136 2137 err = native_window_set_sideband_stream(nativeWindow.get(), sidebandHandle); 2138 if (err != OK) { 2139 ALOGE("native_window_set_sideband_stream(%p) failed! (err %d).", 2140 sidebandHandle, err); 2141 return err; 2142 } 2143 2144 return OK; 2145} 2146 2147status_t ACodec::setVideoPortFormatType( 2148 OMX_U32 portIndex, 2149 OMX_VIDEO_CODINGTYPE compressionFormat, 2150 OMX_COLOR_FORMATTYPE colorFormat, 2151 bool usingNativeBuffers) { 2152 OMX_VIDEO_PARAM_PORTFORMATTYPE format; 2153 InitOMXParams(&format); 2154 format.nPortIndex = portIndex; 2155 format.nIndex = 0; 2156 bool found = false; 2157 2158 OMX_U32 index = 0; 2159 for (;;) { 2160 format.nIndex = index; 2161 status_t err = mOMX->getParameter( 2162 mNode, OMX_IndexParamVideoPortFormat, 2163 &format, sizeof(format)); 2164 2165 if (err != OK) { 2166 return err; 2167 } 2168 2169 // substitute back flexible color format to codec supported format 2170 OMX_U32 flexibleEquivalent; 2171 if (compressionFormat == OMX_VIDEO_CodingUnused 2172 && isFlexibleColorFormat( 2173 mOMX, mNode, format.eColorFormat, usingNativeBuffers, &flexibleEquivalent) 2174 && colorFormat == flexibleEquivalent) { 2175 ALOGI("[%s] using color format %#x in place of %#x", 2176 mComponentName.c_str(), format.eColorFormat, colorFormat); 2177 colorFormat = format.eColorFormat; 2178 } 2179 2180 // The following assertion is violated by TI's video decoder. 2181 // CHECK_EQ(format.nIndex, index); 2182 2183 if (!strcmp("OMX.TI.Video.encoder", mComponentName.c_str())) { 2184 if (portIndex == kPortIndexInput 2185 && colorFormat == format.eColorFormat) { 2186 // eCompressionFormat does not seem right. 2187 found = true; 2188 break; 2189 } 2190 if (portIndex == kPortIndexOutput 2191 && compressionFormat == format.eCompressionFormat) { 2192 // eColorFormat does not seem right. 2193 found = true; 2194 break; 2195 } 2196 } 2197 2198 if (format.eCompressionFormat == compressionFormat 2199 && format.eColorFormat == colorFormat) { 2200 found = true; 2201 break; 2202 } 2203 2204 ++index; 2205 } 2206 2207 if (!found) { 2208 return UNKNOWN_ERROR; 2209 } 2210 2211 status_t err = mOMX->setParameter( 2212 mNode, OMX_IndexParamVideoPortFormat, 2213 &format, sizeof(format)); 2214 2215 return err; 2216} 2217 2218// Set optimal output format. OMX component lists output formats in the order 2219// of preference, but this got more complicated since the introduction of flexible 2220// YUV formats. We support a legacy behavior for applications that do not use 2221// surface output, do not specify an output format, but expect a "usable" standard 2222// OMX format. SW readable and standard formats must be flex-YUV. 2223// 2224// Suggested preference order: 2225// - optimal format for texture rendering (mediaplayer behavior) 2226// - optimal SW readable & texture renderable format (flex-YUV support) 2227// - optimal SW readable non-renderable format (flex-YUV bytebuffer support) 2228// - legacy "usable" standard formats 2229// 2230// For legacy support, we prefer a standard format, but will settle for a SW readable 2231// flex-YUV format. 2232status_t ACodec::setSupportedOutputFormat(bool getLegacyFlexibleFormat) { 2233 OMX_VIDEO_PARAM_PORTFORMATTYPE format, legacyFormat; 2234 InitOMXParams(&format); 2235 format.nPortIndex = kPortIndexOutput; 2236 2237 InitOMXParams(&legacyFormat); 2238 // this field will change when we find a suitable legacy format 2239 legacyFormat.eColorFormat = OMX_COLOR_FormatUnused; 2240 2241 for (OMX_U32 index = 0; ; ++index) { 2242 format.nIndex = index; 2243 status_t err = mOMX->getParameter( 2244 mNode, OMX_IndexParamVideoPortFormat, 2245 &format, sizeof(format)); 2246 if (err != OK) { 2247 // no more formats, pick legacy format if found 2248 if (legacyFormat.eColorFormat != OMX_COLOR_FormatUnused) { 2249 memcpy(&format, &legacyFormat, sizeof(format)); 2250 break; 2251 } 2252 return err; 2253 } 2254 if (format.eCompressionFormat != OMX_VIDEO_CodingUnused) { 2255 return OMX_ErrorBadParameter; 2256 } 2257 if (!getLegacyFlexibleFormat) { 2258 break; 2259 } 2260 // standard formats that were exposed to users before 2261 if (format.eColorFormat == OMX_COLOR_FormatYUV420Planar 2262 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedPlanar 2263 || format.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar 2264 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedSemiPlanar 2265 || format.eColorFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar) { 2266 break; 2267 } 2268 // find best legacy non-standard format 2269 OMX_U32 flexibleEquivalent; 2270 if (legacyFormat.eColorFormat == OMX_COLOR_FormatUnused 2271 && isFlexibleColorFormat( 2272 mOMX, mNode, format.eColorFormat, false /* usingNativeBuffers */, 2273 &flexibleEquivalent) 2274 && flexibleEquivalent == OMX_COLOR_FormatYUV420Flexible) { 2275 memcpy(&legacyFormat, &format, sizeof(format)); 2276 } 2277 } 2278 return mOMX->setParameter( 2279 mNode, OMX_IndexParamVideoPortFormat, 2280 &format, sizeof(format)); 2281} 2282 2283static const struct VideoCodingMapEntry { 2284 const char *mMime; 2285 OMX_VIDEO_CODINGTYPE mVideoCodingType; 2286} kVideoCodingMapEntry[] = { 2287 { MEDIA_MIMETYPE_VIDEO_AVC, OMX_VIDEO_CodingAVC }, 2288 { MEDIA_MIMETYPE_VIDEO_HEVC, OMX_VIDEO_CodingHEVC }, 2289 { MEDIA_MIMETYPE_VIDEO_MPEG4, OMX_VIDEO_CodingMPEG4 }, 2290 { MEDIA_MIMETYPE_VIDEO_H263, OMX_VIDEO_CodingH263 }, 2291 { MEDIA_MIMETYPE_VIDEO_MPEG2, OMX_VIDEO_CodingMPEG2 }, 2292 { MEDIA_MIMETYPE_VIDEO_VP8, OMX_VIDEO_CodingVP8 }, 2293 { MEDIA_MIMETYPE_VIDEO_VP9, OMX_VIDEO_CodingVP9 }, 2294}; 2295 2296static status_t GetVideoCodingTypeFromMime( 2297 const char *mime, OMX_VIDEO_CODINGTYPE *codingType) { 2298 for (size_t i = 0; 2299 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 2300 ++i) { 2301 if (!strcasecmp(mime, kVideoCodingMapEntry[i].mMime)) { 2302 *codingType = kVideoCodingMapEntry[i].mVideoCodingType; 2303 return OK; 2304 } 2305 } 2306 2307 *codingType = OMX_VIDEO_CodingUnused; 2308 2309 return ERROR_UNSUPPORTED; 2310} 2311 2312static status_t GetMimeTypeForVideoCoding( 2313 OMX_VIDEO_CODINGTYPE codingType, AString *mime) { 2314 for (size_t i = 0; 2315 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 2316 ++i) { 2317 if (codingType == kVideoCodingMapEntry[i].mVideoCodingType) { 2318 *mime = kVideoCodingMapEntry[i].mMime; 2319 return OK; 2320 } 2321 } 2322 2323 mime->clear(); 2324 2325 return ERROR_UNSUPPORTED; 2326} 2327 2328status_t ACodec::setupVideoDecoder( 2329 const char *mime, const sp<AMessage> &msg, bool haveNativeWindow) { 2330 int32_t width, height; 2331 if (!msg->findInt32("width", &width) 2332 || !msg->findInt32("height", &height)) { 2333 return INVALID_OPERATION; 2334 } 2335 2336 OMX_VIDEO_CODINGTYPE compressionFormat; 2337 status_t err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 2338 2339 if (err != OK) { 2340 return err; 2341 } 2342 2343 err = setVideoPortFormatType( 2344 kPortIndexInput, compressionFormat, OMX_COLOR_FormatUnused); 2345 2346 if (err != OK) { 2347 return err; 2348 } 2349 2350 int32_t tmp; 2351 if (msg->findInt32("color-format", &tmp)) { 2352 OMX_COLOR_FORMATTYPE colorFormat = 2353 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 2354 err = setVideoPortFormatType( 2355 kPortIndexOutput, OMX_VIDEO_CodingUnused, colorFormat, haveNativeWindow); 2356 if (err != OK) { 2357 ALOGW("[%s] does not support color format %d", 2358 mComponentName.c_str(), colorFormat); 2359 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 2360 } 2361 } else { 2362 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 2363 } 2364 2365 if (err != OK) { 2366 return err; 2367 } 2368 2369 int32_t frameRateInt; 2370 float frameRateFloat; 2371 if (!msg->findFloat("frame-rate", &frameRateFloat)) { 2372 if (!msg->findInt32("frame-rate", &frameRateInt)) { 2373 frameRateInt = -1; 2374 } 2375 frameRateFloat = (float)frameRateInt; 2376 } 2377 2378 err = setVideoFormatOnPort( 2379 kPortIndexInput, width, height, compressionFormat, frameRateFloat); 2380 2381 if (err != OK) { 2382 return err; 2383 } 2384 2385 err = setVideoFormatOnPort( 2386 kPortIndexOutput, width, height, OMX_VIDEO_CodingUnused); 2387 2388 if (err != OK) { 2389 return err; 2390 } 2391 2392 return OK; 2393} 2394 2395status_t ACodec::setupVideoEncoder(const char *mime, const sp<AMessage> &msg) { 2396 int32_t tmp; 2397 if (!msg->findInt32("color-format", &tmp)) { 2398 return INVALID_OPERATION; 2399 } 2400 2401 OMX_COLOR_FORMATTYPE colorFormat = 2402 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 2403 2404 status_t err = setVideoPortFormatType( 2405 kPortIndexInput, OMX_VIDEO_CodingUnused, colorFormat); 2406 2407 if (err != OK) { 2408 ALOGE("[%s] does not support color format %d", 2409 mComponentName.c_str(), colorFormat); 2410 2411 return err; 2412 } 2413 2414 /* Input port configuration */ 2415 2416 OMX_PARAM_PORTDEFINITIONTYPE def; 2417 InitOMXParams(&def); 2418 2419 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2420 2421 def.nPortIndex = kPortIndexInput; 2422 2423 err = mOMX->getParameter( 2424 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2425 2426 if (err != OK) { 2427 return err; 2428 } 2429 2430 int32_t width, height, bitrate; 2431 if (!msg->findInt32("width", &width) 2432 || !msg->findInt32("height", &height) 2433 || !msg->findInt32("bitrate", &bitrate)) { 2434 return INVALID_OPERATION; 2435 } 2436 2437 video_def->nFrameWidth = width; 2438 video_def->nFrameHeight = height; 2439 2440 int32_t stride; 2441 if (!msg->findInt32("stride", &stride)) { 2442 stride = width; 2443 } 2444 2445 video_def->nStride = stride; 2446 2447 int32_t sliceHeight; 2448 if (!msg->findInt32("slice-height", &sliceHeight)) { 2449 sliceHeight = height; 2450 } 2451 2452 video_def->nSliceHeight = sliceHeight; 2453 2454 def.nBufferSize = (video_def->nStride * video_def->nSliceHeight * 3) / 2; 2455 2456 float frameRate; 2457 if (!msg->findFloat("frame-rate", &frameRate)) { 2458 int32_t tmp; 2459 if (!msg->findInt32("frame-rate", &tmp)) { 2460 return INVALID_OPERATION; 2461 } 2462 frameRate = (float)tmp; 2463 mTimePerFrameUs = (int64_t) (1000000.0f / frameRate); 2464 } 2465 2466 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 2467 video_def->eCompressionFormat = OMX_VIDEO_CodingUnused; 2468 // this is redundant as it was already set up in setVideoPortFormatType 2469 // FIXME for now skip this only for flexible YUV formats 2470 if (colorFormat != OMX_COLOR_FormatYUV420Flexible) { 2471 video_def->eColorFormat = colorFormat; 2472 } 2473 2474 err = mOMX->setParameter( 2475 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2476 2477 if (err != OK) { 2478 ALOGE("[%s] failed to set input port definition parameters.", 2479 mComponentName.c_str()); 2480 2481 return err; 2482 } 2483 2484 /* Output port configuration */ 2485 2486 OMX_VIDEO_CODINGTYPE compressionFormat; 2487 err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 2488 2489 if (err != OK) { 2490 return err; 2491 } 2492 2493 err = setVideoPortFormatType( 2494 kPortIndexOutput, compressionFormat, OMX_COLOR_FormatUnused); 2495 2496 if (err != OK) { 2497 ALOGE("[%s] does not support compression format %d", 2498 mComponentName.c_str(), compressionFormat); 2499 2500 return err; 2501 } 2502 2503 def.nPortIndex = kPortIndexOutput; 2504 2505 err = mOMX->getParameter( 2506 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2507 2508 if (err != OK) { 2509 return err; 2510 } 2511 2512 video_def->nFrameWidth = width; 2513 video_def->nFrameHeight = height; 2514 video_def->xFramerate = 0; 2515 video_def->nBitrate = bitrate; 2516 video_def->eCompressionFormat = compressionFormat; 2517 video_def->eColorFormat = OMX_COLOR_FormatUnused; 2518 2519 err = mOMX->setParameter( 2520 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2521 2522 if (err != OK) { 2523 ALOGE("[%s] failed to set output port definition parameters.", 2524 mComponentName.c_str()); 2525 2526 return err; 2527 } 2528 2529 switch (compressionFormat) { 2530 case OMX_VIDEO_CodingMPEG4: 2531 err = setupMPEG4EncoderParameters(msg); 2532 break; 2533 2534 case OMX_VIDEO_CodingH263: 2535 err = setupH263EncoderParameters(msg); 2536 break; 2537 2538 case OMX_VIDEO_CodingAVC: 2539 err = setupAVCEncoderParameters(msg); 2540 break; 2541 2542 case OMX_VIDEO_CodingHEVC: 2543 err = setupHEVCEncoderParameters(msg); 2544 break; 2545 2546 case OMX_VIDEO_CodingVP8: 2547 case OMX_VIDEO_CodingVP9: 2548 err = setupVPXEncoderParameters(msg); 2549 break; 2550 2551 default: 2552 break; 2553 } 2554 2555 ALOGI("setupVideoEncoder succeeded"); 2556 2557 return err; 2558} 2559 2560status_t ACodec::setCyclicIntraMacroblockRefresh(const sp<AMessage> &msg, int32_t mode) { 2561 OMX_VIDEO_PARAM_INTRAREFRESHTYPE params; 2562 InitOMXParams(¶ms); 2563 params.nPortIndex = kPortIndexOutput; 2564 2565 params.eRefreshMode = static_cast<OMX_VIDEO_INTRAREFRESHTYPE>(mode); 2566 2567 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshCyclic || 2568 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 2569 int32_t mbs; 2570 if (!msg->findInt32("intra-refresh-CIR-mbs", &mbs)) { 2571 return INVALID_OPERATION; 2572 } 2573 params.nCirMBs = mbs; 2574 } 2575 2576 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshAdaptive || 2577 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 2578 int32_t mbs; 2579 if (!msg->findInt32("intra-refresh-AIR-mbs", &mbs)) { 2580 return INVALID_OPERATION; 2581 } 2582 params.nAirMBs = mbs; 2583 2584 int32_t ref; 2585 if (!msg->findInt32("intra-refresh-AIR-ref", &ref)) { 2586 return INVALID_OPERATION; 2587 } 2588 params.nAirRef = ref; 2589 } 2590 2591 status_t err = mOMX->setParameter( 2592 mNode, OMX_IndexParamVideoIntraRefresh, 2593 ¶ms, sizeof(params)); 2594 return err; 2595} 2596 2597static OMX_U32 setPFramesSpacing(int32_t iFramesInterval, int32_t frameRate) { 2598 if (iFramesInterval < 0) { 2599 return 0xFFFFFFFF; 2600 } else if (iFramesInterval == 0) { 2601 return 0; 2602 } 2603 OMX_U32 ret = frameRate * iFramesInterval; 2604 return ret; 2605} 2606 2607static OMX_VIDEO_CONTROLRATETYPE getBitrateMode(const sp<AMessage> &msg) { 2608 int32_t tmp; 2609 if (!msg->findInt32("bitrate-mode", &tmp)) { 2610 return OMX_Video_ControlRateVariable; 2611 } 2612 2613 return static_cast<OMX_VIDEO_CONTROLRATETYPE>(tmp); 2614} 2615 2616status_t ACodec::setupMPEG4EncoderParameters(const sp<AMessage> &msg) { 2617 int32_t bitrate, iFrameInterval; 2618 if (!msg->findInt32("bitrate", &bitrate) 2619 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 2620 return INVALID_OPERATION; 2621 } 2622 2623 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 2624 2625 float frameRate; 2626 if (!msg->findFloat("frame-rate", &frameRate)) { 2627 int32_t tmp; 2628 if (!msg->findInt32("frame-rate", &tmp)) { 2629 return INVALID_OPERATION; 2630 } 2631 frameRate = (float)tmp; 2632 } 2633 2634 OMX_VIDEO_PARAM_MPEG4TYPE mpeg4type; 2635 InitOMXParams(&mpeg4type); 2636 mpeg4type.nPortIndex = kPortIndexOutput; 2637 2638 status_t err = mOMX->getParameter( 2639 mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 2640 2641 if (err != OK) { 2642 return err; 2643 } 2644 2645 mpeg4type.nSliceHeaderSpacing = 0; 2646 mpeg4type.bSVH = OMX_FALSE; 2647 mpeg4type.bGov = OMX_FALSE; 2648 2649 mpeg4type.nAllowedPictureTypes = 2650 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 2651 2652 mpeg4type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 2653 if (mpeg4type.nPFrames == 0) { 2654 mpeg4type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 2655 } 2656 mpeg4type.nBFrames = 0; 2657 mpeg4type.nIDCVLCThreshold = 0; 2658 mpeg4type.bACPred = OMX_TRUE; 2659 mpeg4type.nMaxPacketSize = 256; 2660 mpeg4type.nTimeIncRes = 1000; 2661 mpeg4type.nHeaderExtension = 0; 2662 mpeg4type.bReversibleVLC = OMX_FALSE; 2663 2664 int32_t profile; 2665 if (msg->findInt32("profile", &profile)) { 2666 int32_t level; 2667 if (!msg->findInt32("level", &level)) { 2668 return INVALID_OPERATION; 2669 } 2670 2671 err = verifySupportForProfileAndLevel(profile, level); 2672 2673 if (err != OK) { 2674 return err; 2675 } 2676 2677 mpeg4type.eProfile = static_cast<OMX_VIDEO_MPEG4PROFILETYPE>(profile); 2678 mpeg4type.eLevel = static_cast<OMX_VIDEO_MPEG4LEVELTYPE>(level); 2679 } 2680 2681 err = mOMX->setParameter( 2682 mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 2683 2684 if (err != OK) { 2685 return err; 2686 } 2687 2688 err = configureBitrate(bitrate, bitrateMode); 2689 2690 if (err != OK) { 2691 return err; 2692 } 2693 2694 return setupErrorCorrectionParameters(); 2695} 2696 2697status_t ACodec::setupH263EncoderParameters(const sp<AMessage> &msg) { 2698 int32_t bitrate, iFrameInterval; 2699 if (!msg->findInt32("bitrate", &bitrate) 2700 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 2701 return INVALID_OPERATION; 2702 } 2703 2704 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 2705 2706 float frameRate; 2707 if (!msg->findFloat("frame-rate", &frameRate)) { 2708 int32_t tmp; 2709 if (!msg->findInt32("frame-rate", &tmp)) { 2710 return INVALID_OPERATION; 2711 } 2712 frameRate = (float)tmp; 2713 } 2714 2715 OMX_VIDEO_PARAM_H263TYPE h263type; 2716 InitOMXParams(&h263type); 2717 h263type.nPortIndex = kPortIndexOutput; 2718 2719 status_t err = mOMX->getParameter( 2720 mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 2721 2722 if (err != OK) { 2723 return err; 2724 } 2725 2726 h263type.nAllowedPictureTypes = 2727 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 2728 2729 h263type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 2730 if (h263type.nPFrames == 0) { 2731 h263type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 2732 } 2733 h263type.nBFrames = 0; 2734 2735 int32_t profile; 2736 if (msg->findInt32("profile", &profile)) { 2737 int32_t level; 2738 if (!msg->findInt32("level", &level)) { 2739 return INVALID_OPERATION; 2740 } 2741 2742 err = verifySupportForProfileAndLevel(profile, level); 2743 2744 if (err != OK) { 2745 return err; 2746 } 2747 2748 h263type.eProfile = static_cast<OMX_VIDEO_H263PROFILETYPE>(profile); 2749 h263type.eLevel = static_cast<OMX_VIDEO_H263LEVELTYPE>(level); 2750 } 2751 2752 h263type.bPLUSPTYPEAllowed = OMX_FALSE; 2753 h263type.bForceRoundingTypeToZero = OMX_FALSE; 2754 h263type.nPictureHeaderRepetition = 0; 2755 h263type.nGOBHeaderInterval = 0; 2756 2757 err = mOMX->setParameter( 2758 mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 2759 2760 if (err != OK) { 2761 return err; 2762 } 2763 2764 err = configureBitrate(bitrate, bitrateMode); 2765 2766 if (err != OK) { 2767 return err; 2768 } 2769 2770 return setupErrorCorrectionParameters(); 2771} 2772 2773// static 2774int /* OMX_VIDEO_AVCLEVELTYPE */ ACodec::getAVCLevelFor( 2775 int width, int height, int rate, int bitrate, 2776 OMX_VIDEO_AVCPROFILETYPE profile) { 2777 // convert bitrate to main/baseline profile kbps equivalent 2778 switch (profile) { 2779 case OMX_VIDEO_AVCProfileHigh10: 2780 bitrate = divUp(bitrate, 3000); break; 2781 case OMX_VIDEO_AVCProfileHigh: 2782 bitrate = divUp(bitrate, 1250); break; 2783 default: 2784 bitrate = divUp(bitrate, 1000); break; 2785 } 2786 2787 // convert size and rate to MBs 2788 width = divUp(width, 16); 2789 height = divUp(height, 16); 2790 int mbs = width * height; 2791 rate *= mbs; 2792 int maxDimension = max(width, height); 2793 2794 static const int limits[][5] = { 2795 /* MBps MB dim bitrate level */ 2796 { 1485, 99, 28, 64, OMX_VIDEO_AVCLevel1 }, 2797 { 1485, 99, 28, 128, OMX_VIDEO_AVCLevel1b }, 2798 { 3000, 396, 56, 192, OMX_VIDEO_AVCLevel11 }, 2799 { 6000, 396, 56, 384, OMX_VIDEO_AVCLevel12 }, 2800 { 11880, 396, 56, 768, OMX_VIDEO_AVCLevel13 }, 2801 { 11880, 396, 56, 2000, OMX_VIDEO_AVCLevel2 }, 2802 { 19800, 792, 79, 4000, OMX_VIDEO_AVCLevel21 }, 2803 { 20250, 1620, 113, 4000, OMX_VIDEO_AVCLevel22 }, 2804 { 40500, 1620, 113, 10000, OMX_VIDEO_AVCLevel3 }, 2805 { 108000, 3600, 169, 14000, OMX_VIDEO_AVCLevel31 }, 2806 { 216000, 5120, 202, 20000, OMX_VIDEO_AVCLevel32 }, 2807 { 245760, 8192, 256, 20000, OMX_VIDEO_AVCLevel4 }, 2808 { 245760, 8192, 256, 50000, OMX_VIDEO_AVCLevel41 }, 2809 { 522240, 8704, 263, 50000, OMX_VIDEO_AVCLevel42 }, 2810 { 589824, 22080, 420, 135000, OMX_VIDEO_AVCLevel5 }, 2811 { 983040, 36864, 543, 240000, OMX_VIDEO_AVCLevel51 }, 2812 { 2073600, 36864, 543, 240000, OMX_VIDEO_AVCLevel52 }, 2813 }; 2814 2815 for (size_t i = 0; i < ARRAY_SIZE(limits); i++) { 2816 const int (&limit)[5] = limits[i]; 2817 if (rate <= limit[0] && mbs <= limit[1] && maxDimension <= limit[2] 2818 && bitrate <= limit[3]) { 2819 return limit[4]; 2820 } 2821 } 2822 return 0; 2823} 2824 2825status_t ACodec::setupAVCEncoderParameters(const sp<AMessage> &msg) { 2826 int32_t bitrate, iFrameInterval; 2827 if (!msg->findInt32("bitrate", &bitrate) 2828 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 2829 return INVALID_OPERATION; 2830 } 2831 2832 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 2833 2834 float frameRate; 2835 if (!msg->findFloat("frame-rate", &frameRate)) { 2836 int32_t tmp; 2837 if (!msg->findInt32("frame-rate", &tmp)) { 2838 return INVALID_OPERATION; 2839 } 2840 frameRate = (float)tmp; 2841 } 2842 2843 status_t err = OK; 2844 int32_t intraRefreshMode = 0; 2845 if (msg->findInt32("intra-refresh-mode", &intraRefreshMode)) { 2846 err = setCyclicIntraMacroblockRefresh(msg, intraRefreshMode); 2847 if (err != OK) { 2848 ALOGE("Setting intra macroblock refresh mode (%d) failed: 0x%x", 2849 err, intraRefreshMode); 2850 return err; 2851 } 2852 } 2853 2854 OMX_VIDEO_PARAM_AVCTYPE h264type; 2855 InitOMXParams(&h264type); 2856 h264type.nPortIndex = kPortIndexOutput; 2857 2858 err = mOMX->getParameter( 2859 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 2860 2861 if (err != OK) { 2862 return err; 2863 } 2864 2865 h264type.nAllowedPictureTypes = 2866 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 2867 2868 int32_t profile; 2869 if (msg->findInt32("profile", &profile)) { 2870 int32_t level; 2871 if (!msg->findInt32("level", &level)) { 2872 return INVALID_OPERATION; 2873 } 2874 2875 err = verifySupportForProfileAndLevel(profile, level); 2876 2877 if (err != OK) { 2878 return err; 2879 } 2880 2881 h264type.eProfile = static_cast<OMX_VIDEO_AVCPROFILETYPE>(profile); 2882 h264type.eLevel = static_cast<OMX_VIDEO_AVCLEVELTYPE>(level); 2883 } 2884 2885 // XXX 2886 if (h264type.eProfile != OMX_VIDEO_AVCProfileBaseline) { 2887 ALOGW("Use baseline profile instead of %d for AVC recording", 2888 h264type.eProfile); 2889 h264type.eProfile = OMX_VIDEO_AVCProfileBaseline; 2890 } 2891 2892 if (h264type.eProfile == OMX_VIDEO_AVCProfileBaseline) { 2893 h264type.nSliceHeaderSpacing = 0; 2894 h264type.bUseHadamard = OMX_TRUE; 2895 h264type.nRefFrames = 1; 2896 h264type.nBFrames = 0; 2897 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 2898 if (h264type.nPFrames == 0) { 2899 h264type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 2900 } 2901 h264type.nRefIdx10ActiveMinus1 = 0; 2902 h264type.nRefIdx11ActiveMinus1 = 0; 2903 h264type.bEntropyCodingCABAC = OMX_FALSE; 2904 h264type.bWeightedPPrediction = OMX_FALSE; 2905 h264type.bconstIpred = OMX_FALSE; 2906 h264type.bDirect8x8Inference = OMX_FALSE; 2907 h264type.bDirectSpatialTemporal = OMX_FALSE; 2908 h264type.nCabacInitIdc = 0; 2909 } 2910 2911 if (h264type.nBFrames != 0) { 2912 h264type.nAllowedPictureTypes |= OMX_VIDEO_PictureTypeB; 2913 } 2914 2915 h264type.bEnableUEP = OMX_FALSE; 2916 h264type.bEnableFMO = OMX_FALSE; 2917 h264type.bEnableASO = OMX_FALSE; 2918 h264type.bEnableRS = OMX_FALSE; 2919 h264type.bFrameMBsOnly = OMX_TRUE; 2920 h264type.bMBAFF = OMX_FALSE; 2921 h264type.eLoopFilterMode = OMX_VIDEO_AVCLoopFilterEnable; 2922 2923 err = mOMX->setParameter( 2924 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 2925 2926 if (err != OK) { 2927 return err; 2928 } 2929 2930 return configureBitrate(bitrate, bitrateMode); 2931} 2932 2933status_t ACodec::setupHEVCEncoderParameters(const sp<AMessage> &msg) { 2934 int32_t bitrate, iFrameInterval; 2935 if (!msg->findInt32("bitrate", &bitrate) 2936 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 2937 return INVALID_OPERATION; 2938 } 2939 2940 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 2941 2942 float frameRate; 2943 if (!msg->findFloat("frame-rate", &frameRate)) { 2944 int32_t tmp; 2945 if (!msg->findInt32("frame-rate", &tmp)) { 2946 return INVALID_OPERATION; 2947 } 2948 frameRate = (float)tmp; 2949 } 2950 2951 OMX_VIDEO_PARAM_HEVCTYPE hevcType; 2952 InitOMXParams(&hevcType); 2953 hevcType.nPortIndex = kPortIndexOutput; 2954 2955 status_t err = OK; 2956 err = mOMX->getParameter( 2957 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 2958 if (err != OK) { 2959 return err; 2960 } 2961 2962 int32_t profile; 2963 if (msg->findInt32("profile", &profile)) { 2964 int32_t level; 2965 if (!msg->findInt32("level", &level)) { 2966 return INVALID_OPERATION; 2967 } 2968 2969 err = verifySupportForProfileAndLevel(profile, level); 2970 if (err != OK) { 2971 return err; 2972 } 2973 2974 hevcType.eProfile = static_cast<OMX_VIDEO_HEVCPROFILETYPE>(profile); 2975 hevcType.eLevel = static_cast<OMX_VIDEO_HEVCLEVELTYPE>(level); 2976 } 2977 2978 // TODO: Need OMX structure definition for setting iFrameInterval 2979 2980 err = mOMX->setParameter( 2981 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 2982 if (err != OK) { 2983 return err; 2984 } 2985 2986 return configureBitrate(bitrate, bitrateMode); 2987} 2988 2989status_t ACodec::setupVPXEncoderParameters(const sp<AMessage> &msg) { 2990 int32_t bitrate; 2991 int32_t iFrameInterval = 0; 2992 size_t tsLayers = 0; 2993 OMX_VIDEO_ANDROID_VPXTEMPORALLAYERPATTERNTYPE pattern = 2994 OMX_VIDEO_VPXTemporalLayerPatternNone; 2995 static const uint32_t kVp8LayerRateAlloction 2996 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] 2997 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] = { 2998 {100, 100, 100}, // 1 layer 2999 { 60, 100, 100}, // 2 layers {60%, 40%} 3000 { 40, 60, 100}, // 3 layers {40%, 20%, 40%} 3001 }; 3002 if (!msg->findInt32("bitrate", &bitrate)) { 3003 return INVALID_OPERATION; 3004 } 3005 msg->findInt32("i-frame-interval", &iFrameInterval); 3006 3007 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3008 3009 float frameRate; 3010 if (!msg->findFloat("frame-rate", &frameRate)) { 3011 int32_t tmp; 3012 if (!msg->findInt32("frame-rate", &tmp)) { 3013 return INVALID_OPERATION; 3014 } 3015 frameRate = (float)tmp; 3016 } 3017 3018 AString tsSchema; 3019 if (msg->findString("ts-schema", &tsSchema)) { 3020 if (tsSchema == "webrtc.vp8.1-layer") { 3021 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 3022 tsLayers = 1; 3023 } else if (tsSchema == "webrtc.vp8.2-layer") { 3024 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 3025 tsLayers = 2; 3026 } else if (tsSchema == "webrtc.vp8.3-layer") { 3027 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 3028 tsLayers = 3; 3029 } else { 3030 ALOGW("Unsupported ts-schema [%s]", tsSchema.c_str()); 3031 } 3032 } 3033 3034 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 3035 InitOMXParams(&vp8type); 3036 vp8type.nPortIndex = kPortIndexOutput; 3037 status_t err = mOMX->getParameter( 3038 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 3039 &vp8type, sizeof(vp8type)); 3040 3041 if (err == OK) { 3042 if (iFrameInterval > 0) { 3043 vp8type.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate); 3044 } 3045 vp8type.eTemporalPattern = pattern; 3046 vp8type.nTemporalLayerCount = tsLayers; 3047 if (tsLayers > 0) { 3048 for (size_t i = 0; i < OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS; i++) { 3049 vp8type.nTemporalLayerBitrateRatio[i] = 3050 kVp8LayerRateAlloction[tsLayers - 1][i]; 3051 } 3052 } 3053 if (bitrateMode == OMX_Video_ControlRateConstant) { 3054 vp8type.nMinQuantizer = 2; 3055 vp8type.nMaxQuantizer = 63; 3056 } 3057 3058 err = mOMX->setParameter( 3059 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 3060 &vp8type, sizeof(vp8type)); 3061 if (err != OK) { 3062 ALOGW("Extended VP8 parameters set failed: %d", err); 3063 } 3064 } 3065 3066 return configureBitrate(bitrate, bitrateMode); 3067} 3068 3069status_t ACodec::verifySupportForProfileAndLevel( 3070 int32_t profile, int32_t level) { 3071 OMX_VIDEO_PARAM_PROFILELEVELTYPE params; 3072 InitOMXParams(¶ms); 3073 params.nPortIndex = kPortIndexOutput; 3074 3075 for (params.nProfileIndex = 0;; ++params.nProfileIndex) { 3076 status_t err = mOMX->getParameter( 3077 mNode, 3078 OMX_IndexParamVideoProfileLevelQuerySupported, 3079 ¶ms, 3080 sizeof(params)); 3081 3082 if (err != OK) { 3083 return err; 3084 } 3085 3086 int32_t supportedProfile = static_cast<int32_t>(params.eProfile); 3087 int32_t supportedLevel = static_cast<int32_t>(params.eLevel); 3088 3089 if (profile == supportedProfile && level <= supportedLevel) { 3090 return OK; 3091 } 3092 } 3093} 3094 3095status_t ACodec::configureBitrate( 3096 int32_t bitrate, OMX_VIDEO_CONTROLRATETYPE bitrateMode) { 3097 OMX_VIDEO_PARAM_BITRATETYPE bitrateType; 3098 InitOMXParams(&bitrateType); 3099 bitrateType.nPortIndex = kPortIndexOutput; 3100 3101 status_t err = mOMX->getParameter( 3102 mNode, OMX_IndexParamVideoBitrate, 3103 &bitrateType, sizeof(bitrateType)); 3104 3105 if (err != OK) { 3106 return err; 3107 } 3108 3109 bitrateType.eControlRate = bitrateMode; 3110 bitrateType.nTargetBitrate = bitrate; 3111 3112 return mOMX->setParameter( 3113 mNode, OMX_IndexParamVideoBitrate, 3114 &bitrateType, sizeof(bitrateType)); 3115} 3116 3117status_t ACodec::setupErrorCorrectionParameters() { 3118 OMX_VIDEO_PARAM_ERRORCORRECTIONTYPE errorCorrectionType; 3119 InitOMXParams(&errorCorrectionType); 3120 errorCorrectionType.nPortIndex = kPortIndexOutput; 3121 3122 status_t err = mOMX->getParameter( 3123 mNode, OMX_IndexParamVideoErrorCorrection, 3124 &errorCorrectionType, sizeof(errorCorrectionType)); 3125 3126 if (err != OK) { 3127 return OK; // Optional feature. Ignore this failure 3128 } 3129 3130 errorCorrectionType.bEnableHEC = OMX_FALSE; 3131 errorCorrectionType.bEnableResync = OMX_TRUE; 3132 errorCorrectionType.nResynchMarkerSpacing = 256; 3133 errorCorrectionType.bEnableDataPartitioning = OMX_FALSE; 3134 errorCorrectionType.bEnableRVLC = OMX_FALSE; 3135 3136 return mOMX->setParameter( 3137 mNode, OMX_IndexParamVideoErrorCorrection, 3138 &errorCorrectionType, sizeof(errorCorrectionType)); 3139} 3140 3141status_t ACodec::setVideoFormatOnPort( 3142 OMX_U32 portIndex, 3143 int32_t width, int32_t height, OMX_VIDEO_CODINGTYPE compressionFormat, 3144 float frameRate) { 3145 OMX_PARAM_PORTDEFINITIONTYPE def; 3146 InitOMXParams(&def); 3147 def.nPortIndex = portIndex; 3148 3149 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 3150 3151 status_t err = mOMX->getParameter( 3152 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3153 3154 CHECK_EQ(err, (status_t)OK); 3155 3156 if (portIndex == kPortIndexInput) { 3157 // XXX Need a (much) better heuristic to compute input buffer sizes. 3158 const size_t X = 64 * 1024; 3159 if (def.nBufferSize < X) { 3160 def.nBufferSize = X; 3161 } 3162 } 3163 3164 CHECK_EQ((int)def.eDomain, (int)OMX_PortDomainVideo); 3165 3166 video_def->nFrameWidth = width; 3167 video_def->nFrameHeight = height; 3168 3169 if (portIndex == kPortIndexInput) { 3170 video_def->eCompressionFormat = compressionFormat; 3171 video_def->eColorFormat = OMX_COLOR_FormatUnused; 3172 if (frameRate >= 0) { 3173 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 3174 } 3175 } 3176 3177 err = mOMX->setParameter( 3178 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3179 3180 return err; 3181} 3182 3183status_t ACodec::initNativeWindow() { 3184 if (mNativeWindow != NULL) { 3185 return mOMX->enableGraphicBuffers(mNode, kPortIndexOutput, OMX_TRUE); 3186 } 3187 3188 mOMX->enableGraphicBuffers(mNode, kPortIndexOutput, OMX_FALSE); 3189 return OK; 3190} 3191 3192size_t ACodec::countBuffersOwnedByComponent(OMX_U32 portIndex) const { 3193 size_t n = 0; 3194 3195 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 3196 const BufferInfo &info = mBuffers[portIndex].itemAt(i); 3197 3198 if (info.mStatus == BufferInfo::OWNED_BY_COMPONENT) { 3199 ++n; 3200 } 3201 } 3202 3203 return n; 3204} 3205 3206size_t ACodec::countBuffersOwnedByNativeWindow() const { 3207 size_t n = 0; 3208 3209 for (size_t i = 0; i < mBuffers[kPortIndexOutput].size(); ++i) { 3210 const BufferInfo &info = mBuffers[kPortIndexOutput].itemAt(i); 3211 3212 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 3213 ++n; 3214 } 3215 } 3216 3217 return n; 3218} 3219 3220void ACodec::waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs() { 3221 if (mNativeWindow == NULL) { 3222 return; 3223 } 3224 3225 while (countBuffersOwnedByNativeWindow() > mNumUndequeuedBuffers 3226 && dequeueBufferFromNativeWindow() != NULL) { 3227 // these buffers will be submitted as regular buffers; account for this 3228 if (mStoreMetaDataInOutputBuffers && mMetaDataBuffersToSubmit > 0) { 3229 --mMetaDataBuffersToSubmit; 3230 } 3231 } 3232} 3233 3234bool ACodec::allYourBuffersAreBelongToUs( 3235 OMX_U32 portIndex) { 3236 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 3237 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 3238 3239 if (info->mStatus != BufferInfo::OWNED_BY_US 3240 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 3241 ALOGV("[%s] Buffer %u on port %u still has status %d", 3242 mComponentName.c_str(), 3243 info->mBufferID, portIndex, info->mStatus); 3244 return false; 3245 } 3246 } 3247 3248 return true; 3249} 3250 3251bool ACodec::allYourBuffersAreBelongToUs() { 3252 return allYourBuffersAreBelongToUs(kPortIndexInput) 3253 && allYourBuffersAreBelongToUs(kPortIndexOutput); 3254} 3255 3256void ACodec::deferMessage(const sp<AMessage> &msg) { 3257 mDeferredQueue.push_back(msg); 3258} 3259 3260void ACodec::processDeferredMessages() { 3261 List<sp<AMessage> > queue = mDeferredQueue; 3262 mDeferredQueue.clear(); 3263 3264 List<sp<AMessage> >::iterator it = queue.begin(); 3265 while (it != queue.end()) { 3266 onMessageReceived(*it++); 3267 } 3268} 3269 3270// static 3271bool ACodec::describeDefaultColorFormat(DescribeColorFormatParams ¶ms) { 3272 MediaImage &image = params.sMediaImage; 3273 memset(&image, 0, sizeof(image)); 3274 3275 image.mType = MediaImage::MEDIA_IMAGE_TYPE_UNKNOWN; 3276 image.mNumPlanes = 0; 3277 3278 const OMX_COLOR_FORMATTYPE fmt = params.eColorFormat; 3279 image.mWidth = params.nFrameWidth; 3280 image.mHeight = params.nFrameHeight; 3281 3282 // only supporting YUV420 3283 if (fmt != OMX_COLOR_FormatYUV420Planar && 3284 fmt != OMX_COLOR_FormatYUV420PackedPlanar && 3285 fmt != OMX_COLOR_FormatYUV420SemiPlanar && 3286 fmt != OMX_COLOR_FormatYUV420PackedSemiPlanar && 3287 fmt != HAL_PIXEL_FORMAT_YV12) { 3288 ALOGW("do not know color format 0x%x = %d", fmt, fmt); 3289 return false; 3290 } 3291 3292 // TEMPORARY FIX for some vendors that advertise sliceHeight as 0 3293 if (params.nStride != 0 && params.nSliceHeight == 0) { 3294 ALOGW("using sliceHeight=%u instead of what codec advertised (=0)", 3295 params.nFrameHeight); 3296 params.nSliceHeight = params.nFrameHeight; 3297 } 3298 3299 // we need stride and slice-height to be non-zero 3300 if (params.nStride == 0 || params.nSliceHeight == 0) { 3301 ALOGW("cannot describe color format 0x%x = %d with stride=%u and sliceHeight=%u", 3302 fmt, fmt, params.nStride, params.nSliceHeight); 3303 return false; 3304 } 3305 3306 // set-up YUV format 3307 image.mType = MediaImage::MEDIA_IMAGE_TYPE_YUV; 3308 image.mNumPlanes = 3; 3309 image.mBitDepth = 8; 3310 image.mPlane[image.Y].mOffset = 0; 3311 image.mPlane[image.Y].mColInc = 1; 3312 image.mPlane[image.Y].mRowInc = params.nStride; 3313 image.mPlane[image.Y].mHorizSubsampling = 1; 3314 image.mPlane[image.Y].mVertSubsampling = 1; 3315 3316 switch ((int)fmt) { 3317 case HAL_PIXEL_FORMAT_YV12: 3318 if (params.bUsingNativeBuffers) { 3319 size_t ystride = align(params.nStride, 16); 3320 size_t cstride = align(params.nStride / 2, 16); 3321 image.mPlane[image.Y].mRowInc = ystride; 3322 3323 image.mPlane[image.V].mOffset = ystride * params.nSliceHeight; 3324 image.mPlane[image.V].mColInc = 1; 3325 image.mPlane[image.V].mRowInc = cstride; 3326 image.mPlane[image.V].mHorizSubsampling = 2; 3327 image.mPlane[image.V].mVertSubsampling = 2; 3328 3329 image.mPlane[image.U].mOffset = image.mPlane[image.V].mOffset 3330 + (cstride * params.nSliceHeight / 2); 3331 image.mPlane[image.U].mColInc = 1; 3332 image.mPlane[image.U].mRowInc = cstride; 3333 image.mPlane[image.U].mHorizSubsampling = 2; 3334 image.mPlane[image.U].mVertSubsampling = 2; 3335 break; 3336 } else { 3337 // fall through as YV12 is used for YUV420Planar by some codecs 3338 } 3339 3340 case OMX_COLOR_FormatYUV420Planar: 3341 case OMX_COLOR_FormatYUV420PackedPlanar: 3342 image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight; 3343 image.mPlane[image.U].mColInc = 1; 3344 image.mPlane[image.U].mRowInc = params.nStride / 2; 3345 image.mPlane[image.U].mHorizSubsampling = 2; 3346 image.mPlane[image.U].mVertSubsampling = 2; 3347 3348 image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset 3349 + (params.nStride * params.nSliceHeight / 4); 3350 image.mPlane[image.V].mColInc = 1; 3351 image.mPlane[image.V].mRowInc = params.nStride / 2; 3352 image.mPlane[image.V].mHorizSubsampling = 2; 3353 image.mPlane[image.V].mVertSubsampling = 2; 3354 break; 3355 3356 case OMX_COLOR_FormatYUV420SemiPlanar: 3357 // FIXME: NV21 for sw-encoder, NV12 for decoder and hw-encoder 3358 case OMX_COLOR_FormatYUV420PackedSemiPlanar: 3359 // NV12 3360 image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight; 3361 image.mPlane[image.U].mColInc = 2; 3362 image.mPlane[image.U].mRowInc = params.nStride; 3363 image.mPlane[image.U].mHorizSubsampling = 2; 3364 image.mPlane[image.U].mVertSubsampling = 2; 3365 3366 image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset + 1; 3367 image.mPlane[image.V].mColInc = 2; 3368 image.mPlane[image.V].mRowInc = params.nStride; 3369 image.mPlane[image.V].mHorizSubsampling = 2; 3370 image.mPlane[image.V].mVertSubsampling = 2; 3371 break; 3372 3373 default: 3374 TRESPASS(); 3375 } 3376 return true; 3377} 3378 3379// static 3380bool ACodec::describeColorFormat( 3381 const sp<IOMX> &omx, IOMX::node_id node, 3382 DescribeColorFormatParams &describeParams) 3383{ 3384 OMX_INDEXTYPE describeColorFormatIndex; 3385 if (omx->getExtensionIndex( 3386 node, "OMX.google.android.index.describeColorFormat", 3387 &describeColorFormatIndex) != OK || 3388 omx->getParameter( 3389 node, describeColorFormatIndex, 3390 &describeParams, sizeof(describeParams)) != OK) { 3391 return describeDefaultColorFormat(describeParams); 3392 } 3393 return describeParams.sMediaImage.mType != 3394 MediaImage::MEDIA_IMAGE_TYPE_UNKNOWN; 3395} 3396 3397// static 3398bool ACodec::isFlexibleColorFormat( 3399 const sp<IOMX> &omx, IOMX::node_id node, 3400 uint32_t colorFormat, bool usingNativeBuffers, OMX_U32 *flexibleEquivalent) { 3401 DescribeColorFormatParams describeParams; 3402 InitOMXParams(&describeParams); 3403 describeParams.eColorFormat = (OMX_COLOR_FORMATTYPE)colorFormat; 3404 // reasonable dummy values 3405 describeParams.nFrameWidth = 128; 3406 describeParams.nFrameHeight = 128; 3407 describeParams.nStride = 128; 3408 describeParams.nSliceHeight = 128; 3409 describeParams.bUsingNativeBuffers = (OMX_BOOL)usingNativeBuffers; 3410 3411 CHECK(flexibleEquivalent != NULL); 3412 3413 if (!describeColorFormat(omx, node, describeParams)) { 3414 return false; 3415 } 3416 3417 const MediaImage &img = describeParams.sMediaImage; 3418 if (img.mType == MediaImage::MEDIA_IMAGE_TYPE_YUV) { 3419 if (img.mNumPlanes != 3 || 3420 img.mPlane[img.Y].mHorizSubsampling != 1 || 3421 img.mPlane[img.Y].mVertSubsampling != 1) { 3422 return false; 3423 } 3424 3425 // YUV 420 3426 if (img.mPlane[img.U].mHorizSubsampling == 2 3427 && img.mPlane[img.U].mVertSubsampling == 2 3428 && img.mPlane[img.V].mHorizSubsampling == 2 3429 && img.mPlane[img.V].mVertSubsampling == 2) { 3430 // possible flexible YUV420 format 3431 if (img.mBitDepth <= 8) { 3432 *flexibleEquivalent = OMX_COLOR_FormatYUV420Flexible; 3433 return true; 3434 } 3435 } 3436 } 3437 return false; 3438} 3439 3440status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify) { 3441 // TODO: catch errors an return them instead of using CHECK 3442 OMX_PARAM_PORTDEFINITIONTYPE def; 3443 InitOMXParams(&def); 3444 def.nPortIndex = portIndex; 3445 3446 CHECK_EQ(mOMX->getParameter( 3447 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)), 3448 (status_t)OK); 3449 3450 CHECK_EQ((int)def.eDir, 3451 (int)(portIndex == kPortIndexOutput ? OMX_DirOutput : OMX_DirInput)); 3452 3453 switch (def.eDomain) { 3454 case OMX_PortDomainVideo: 3455 { 3456 OMX_VIDEO_PORTDEFINITIONTYPE *videoDef = &def.format.video; 3457 switch ((int)videoDef->eCompressionFormat) { 3458 case OMX_VIDEO_CodingUnused: 3459 { 3460 CHECK(mIsEncoder ^ (portIndex == kPortIndexOutput)); 3461 notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW); 3462 3463 notify->setInt32("stride", videoDef->nStride); 3464 notify->setInt32("slice-height", videoDef->nSliceHeight); 3465 notify->setInt32("color-format", videoDef->eColorFormat); 3466 3467 if (mNativeWindow == NULL) { 3468 DescribeColorFormatParams describeParams; 3469 InitOMXParams(&describeParams); 3470 describeParams.eColorFormat = videoDef->eColorFormat; 3471 describeParams.nFrameWidth = videoDef->nFrameWidth; 3472 describeParams.nFrameHeight = videoDef->nFrameHeight; 3473 describeParams.nStride = videoDef->nStride; 3474 describeParams.nSliceHeight = videoDef->nSliceHeight; 3475 describeParams.bUsingNativeBuffers = OMX_FALSE; 3476 3477 if (describeColorFormat(mOMX, mNode, describeParams)) { 3478 notify->setBuffer( 3479 "image-data", 3480 ABuffer::CreateAsCopy( 3481 &describeParams.sMediaImage, 3482 sizeof(describeParams.sMediaImage))); 3483 3484 MediaImage *img = &describeParams.sMediaImage; 3485 ALOGV("[%s] MediaImage { F(%zux%zu) @%zu+%zu+%zu @%zu+%zu+%zu @%zu+%zu+%zu }", 3486 mComponentName.c_str(), img->mWidth, img->mHeight, 3487 img->mPlane[0].mOffset, img->mPlane[0].mColInc, img->mPlane[0].mRowInc, 3488 img->mPlane[1].mOffset, img->mPlane[1].mColInc, img->mPlane[1].mRowInc, 3489 img->mPlane[2].mOffset, img->mPlane[2].mColInc, img->mPlane[2].mRowInc); 3490 } 3491 } 3492 3493 if (portIndex != kPortIndexOutput) { 3494 // TODO: also get input crop 3495 break; 3496 } 3497 3498 OMX_CONFIG_RECTTYPE rect; 3499 InitOMXParams(&rect); 3500 rect.nPortIndex = portIndex; 3501 3502 if (mOMX->getConfig( 3503 mNode, 3504 (portIndex == kPortIndexOutput ? 3505 OMX_IndexConfigCommonOutputCrop : 3506 OMX_IndexConfigCommonInputCrop), 3507 &rect, sizeof(rect)) != OK) { 3508 rect.nLeft = 0; 3509 rect.nTop = 0; 3510 rect.nWidth = videoDef->nFrameWidth; 3511 rect.nHeight = videoDef->nFrameHeight; 3512 } 3513 3514 CHECK_GE(rect.nLeft, 0); 3515 CHECK_GE(rect.nTop, 0); 3516 CHECK_GE(rect.nWidth, 0u); 3517 CHECK_GE(rect.nHeight, 0u); 3518 CHECK_LE(rect.nLeft + rect.nWidth - 1, videoDef->nFrameWidth); 3519 CHECK_LE(rect.nTop + rect.nHeight - 1, videoDef->nFrameHeight); 3520 3521 notify->setRect( 3522 "crop", 3523 rect.nLeft, 3524 rect.nTop, 3525 rect.nLeft + rect.nWidth - 1, 3526 rect.nTop + rect.nHeight - 1); 3527 3528 break; 3529 } 3530 3531 case OMX_VIDEO_CodingVP8: 3532 case OMX_VIDEO_CodingVP9: 3533 { 3534 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 3535 InitOMXParams(&vp8type); 3536 vp8type.nPortIndex = kPortIndexOutput; 3537 status_t err = mOMX->getParameter( 3538 mNode, 3539 (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 3540 &vp8type, 3541 sizeof(vp8type)); 3542 3543 if (err == OK) { 3544 AString tsSchema = "none"; 3545 if (vp8type.eTemporalPattern 3546 == OMX_VIDEO_VPXTemporalLayerPatternWebRTC) { 3547 switch (vp8type.nTemporalLayerCount) { 3548 case 1: 3549 { 3550 tsSchema = "webrtc.vp8.1-layer"; 3551 break; 3552 } 3553 case 2: 3554 { 3555 tsSchema = "webrtc.vp8.2-layer"; 3556 break; 3557 } 3558 case 3: 3559 { 3560 tsSchema = "webrtc.vp8.3-layer"; 3561 break; 3562 } 3563 default: 3564 { 3565 break; 3566 } 3567 } 3568 } 3569 notify->setString("ts-schema", tsSchema); 3570 } 3571 // Fall through to set up mime. 3572 } 3573 3574 default: 3575 { 3576 CHECK(mIsEncoder ^ (portIndex == kPortIndexInput)); 3577 AString mime; 3578 if (GetMimeTypeForVideoCoding( 3579 videoDef->eCompressionFormat, &mime) != OK) { 3580 notify->setString("mime", "application/octet-stream"); 3581 } else { 3582 notify->setString("mime", mime.c_str()); 3583 } 3584 break; 3585 } 3586 } 3587 notify->setInt32("width", videoDef->nFrameWidth); 3588 notify->setInt32("height", videoDef->nFrameHeight); 3589 ALOGV("[%s] %s format is %s", mComponentName.c_str(), 3590 portIndex == kPortIndexInput ? "input" : "output", 3591 notify->debugString().c_str()); 3592 3593 break; 3594 } 3595 3596 case OMX_PortDomainAudio: 3597 { 3598 OMX_AUDIO_PORTDEFINITIONTYPE *audioDef = &def.format.audio; 3599 3600 switch ((int)audioDef->eEncoding) { 3601 case OMX_AUDIO_CodingPCM: 3602 { 3603 OMX_AUDIO_PARAM_PCMMODETYPE params; 3604 InitOMXParams(¶ms); 3605 params.nPortIndex = portIndex; 3606 3607 CHECK_EQ(mOMX->getParameter( 3608 mNode, OMX_IndexParamAudioPcm, 3609 ¶ms, sizeof(params)), 3610 (status_t)OK); 3611 3612 CHECK_GT(params.nChannels, 0); 3613 CHECK(params.nChannels == 1 || params.bInterleaved); 3614 CHECK_EQ(params.nBitPerSample, 16u); 3615 3616 CHECK_EQ((int)params.eNumData, 3617 (int)OMX_NumericalDataSigned); 3618 3619 CHECK_EQ((int)params.ePCMMode, 3620 (int)OMX_AUDIO_PCMModeLinear); 3621 3622 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW); 3623 notify->setInt32("channel-count", params.nChannels); 3624 notify->setInt32("sample-rate", params.nSamplingRate); 3625 3626 if (mChannelMaskPresent) { 3627 notify->setInt32("channel-mask", mChannelMask); 3628 } 3629 break; 3630 } 3631 3632 case OMX_AUDIO_CodingAAC: 3633 { 3634 OMX_AUDIO_PARAM_AACPROFILETYPE params; 3635 InitOMXParams(¶ms); 3636 params.nPortIndex = portIndex; 3637 3638 CHECK_EQ(mOMX->getParameter( 3639 mNode, OMX_IndexParamAudioAac, 3640 ¶ms, sizeof(params)), 3641 (status_t)OK); 3642 3643 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC); 3644 notify->setInt32("channel-count", params.nChannels); 3645 notify->setInt32("sample-rate", params.nSampleRate); 3646 break; 3647 } 3648 3649 case OMX_AUDIO_CodingAMR: 3650 { 3651 OMX_AUDIO_PARAM_AMRTYPE params; 3652 InitOMXParams(¶ms); 3653 params.nPortIndex = portIndex; 3654 3655 CHECK_EQ(mOMX->getParameter( 3656 mNode, OMX_IndexParamAudioAmr, 3657 ¶ms, sizeof(params)), 3658 (status_t)OK); 3659 3660 notify->setInt32("channel-count", 1); 3661 if (params.eAMRBandMode >= OMX_AUDIO_AMRBandModeWB0) { 3662 notify->setString( 3663 "mime", MEDIA_MIMETYPE_AUDIO_AMR_WB); 3664 3665 notify->setInt32("sample-rate", 16000); 3666 } else { 3667 notify->setString( 3668 "mime", MEDIA_MIMETYPE_AUDIO_AMR_NB); 3669 3670 notify->setInt32("sample-rate", 8000); 3671 } 3672 break; 3673 } 3674 3675 case OMX_AUDIO_CodingFLAC: 3676 { 3677 OMX_AUDIO_PARAM_FLACTYPE params; 3678 InitOMXParams(¶ms); 3679 params.nPortIndex = portIndex; 3680 3681 CHECK_EQ(mOMX->getParameter( 3682 mNode, OMX_IndexParamAudioFlac, 3683 ¶ms, sizeof(params)), 3684 (status_t)OK); 3685 3686 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_FLAC); 3687 notify->setInt32("channel-count", params.nChannels); 3688 notify->setInt32("sample-rate", params.nSampleRate); 3689 break; 3690 } 3691 3692 case OMX_AUDIO_CodingMP3: 3693 { 3694 OMX_AUDIO_PARAM_MP3TYPE params; 3695 InitOMXParams(¶ms); 3696 params.nPortIndex = portIndex; 3697 3698 CHECK_EQ(mOMX->getParameter( 3699 mNode, OMX_IndexParamAudioMp3, 3700 ¶ms, sizeof(params)), 3701 (status_t)OK); 3702 3703 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MPEG); 3704 notify->setInt32("channel-count", params.nChannels); 3705 notify->setInt32("sample-rate", params.nSampleRate); 3706 break; 3707 } 3708 3709 case OMX_AUDIO_CodingVORBIS: 3710 { 3711 OMX_AUDIO_PARAM_VORBISTYPE params; 3712 InitOMXParams(¶ms); 3713 params.nPortIndex = portIndex; 3714 3715 CHECK_EQ(mOMX->getParameter( 3716 mNode, OMX_IndexParamAudioVorbis, 3717 ¶ms, sizeof(params)), 3718 (status_t)OK); 3719 3720 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_VORBIS); 3721 notify->setInt32("channel-count", params.nChannels); 3722 notify->setInt32("sample-rate", params.nSampleRate); 3723 break; 3724 } 3725 3726 case OMX_AUDIO_CodingAndroidAC3: 3727 { 3728 OMX_AUDIO_PARAM_ANDROID_AC3TYPE params; 3729 InitOMXParams(¶ms); 3730 params.nPortIndex = portIndex; 3731 3732 CHECK_EQ((status_t)OK, mOMX->getParameter( 3733 mNode, 3734 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 3735 ¶ms, 3736 sizeof(params))); 3737 3738 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AC3); 3739 notify->setInt32("channel-count", params.nChannels); 3740 notify->setInt32("sample-rate", params.nSampleRate); 3741 break; 3742 } 3743 3744 case OMX_AUDIO_CodingAndroidEAC3: 3745 { 3746 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE params; 3747 InitOMXParams(¶ms); 3748 params.nPortIndex = portIndex; 3749 3750 CHECK_EQ((status_t)OK, mOMX->getParameter( 3751 mNode, 3752 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 3753 ¶ms, 3754 sizeof(params))); 3755 3756 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_EAC3); 3757 notify->setInt32("channel-count", params.nChannels); 3758 notify->setInt32("sample-rate", params.nSampleRate); 3759 break; 3760 } 3761 3762 case OMX_AUDIO_CodingAndroidOPUS: 3763 { 3764 OMX_AUDIO_PARAM_ANDROID_OPUSTYPE params; 3765 InitOMXParams(¶ms); 3766 params.nPortIndex = portIndex; 3767 3768 CHECK_EQ((status_t)OK, mOMX->getParameter( 3769 mNode, 3770 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidOpus, 3771 ¶ms, 3772 sizeof(params))); 3773 3774 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_OPUS); 3775 notify->setInt32("channel-count", params.nChannels); 3776 notify->setInt32("sample-rate", params.nSampleRate); 3777 break; 3778 } 3779 3780 case OMX_AUDIO_CodingG711: 3781 { 3782 OMX_AUDIO_PARAM_PCMMODETYPE params; 3783 InitOMXParams(¶ms); 3784 params.nPortIndex = portIndex; 3785 3786 CHECK_EQ((status_t)OK, mOMX->getParameter( 3787 mNode, 3788 (OMX_INDEXTYPE)OMX_IndexParamAudioPcm, 3789 ¶ms, 3790 sizeof(params))); 3791 3792 const char *mime = NULL; 3793 if (params.ePCMMode == OMX_AUDIO_PCMModeMULaw) { 3794 mime = MEDIA_MIMETYPE_AUDIO_G711_MLAW; 3795 } else if (params.ePCMMode == OMX_AUDIO_PCMModeALaw) { 3796 mime = MEDIA_MIMETYPE_AUDIO_G711_ALAW; 3797 } else { // params.ePCMMode == OMX_AUDIO_PCMModeLinear 3798 mime = MEDIA_MIMETYPE_AUDIO_RAW; 3799 } 3800 notify->setString("mime", mime); 3801 notify->setInt32("channel-count", params.nChannels); 3802 notify->setInt32("sample-rate", params.nSamplingRate); 3803 break; 3804 } 3805 3806 case OMX_AUDIO_CodingGSMFR: 3807 { 3808 OMX_AUDIO_PARAM_MP3TYPE params; 3809 InitOMXParams(¶ms); 3810 params.nPortIndex = portIndex; 3811 3812 CHECK_EQ(mOMX->getParameter( 3813 mNode, OMX_IndexParamAudioPcm, 3814 ¶ms, sizeof(params)), 3815 (status_t)OK); 3816 3817 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MSGSM); 3818 notify->setInt32("channel-count", params.nChannels); 3819 notify->setInt32("sample-rate", params.nSampleRate); 3820 break; 3821 } 3822 3823 default: 3824 ALOGE("UNKNOWN AUDIO CODING: %d\n", audioDef->eEncoding); 3825 TRESPASS(); 3826 } 3827 break; 3828 } 3829 3830 default: 3831 TRESPASS(); 3832 } 3833 3834 return OK; 3835} 3836 3837void ACodec::sendFormatChange(const sp<AMessage> &reply) { 3838 sp<AMessage> notify = mBaseOutputFormat->dup(); 3839 notify->setInt32("what", kWhatOutputFormatChanged); 3840 3841 CHECK_EQ(getPortFormat(kPortIndexOutput, notify), (status_t)OK); 3842 3843 AString mime; 3844 CHECK(notify->findString("mime", &mime)); 3845 3846 int32_t left, top, right, bottom; 3847 if (mime == MEDIA_MIMETYPE_VIDEO_RAW && 3848 mNativeWindow != NULL && 3849 notify->findRect("crop", &left, &top, &right, &bottom)) { 3850 // notify renderer of the crop change 3851 // NOTE: native window uses extended right-bottom coordinate 3852 reply->setRect("crop", left, top, right + 1, bottom + 1); 3853 } else if (mime == MEDIA_MIMETYPE_AUDIO_RAW && 3854 (mEncoderDelay || mEncoderPadding)) { 3855 int32_t channelCount; 3856 CHECK(notify->findInt32("channel-count", &channelCount)); 3857 size_t frameSize = channelCount * sizeof(int16_t); 3858 if (mSkipCutBuffer != NULL) { 3859 size_t prevbufsize = mSkipCutBuffer->size(); 3860 if (prevbufsize != 0) { 3861 ALOGW("Replacing SkipCutBuffer holding %d " 3862 "bytes", 3863 prevbufsize); 3864 } 3865 } 3866 mSkipCutBuffer = new SkipCutBuffer( 3867 mEncoderDelay * frameSize, 3868 mEncoderPadding * frameSize); 3869 } 3870 3871 notify->post(); 3872 3873 mSentFormat = true; 3874} 3875 3876void ACodec::signalError(OMX_ERRORTYPE error, status_t internalError) { 3877 sp<AMessage> notify = mNotify->dup(); 3878 notify->setInt32("what", CodecBase::kWhatError); 3879 ALOGE("signalError(omxError %#x, internalError %d)", error, internalError); 3880 3881 if (internalError == UNKNOWN_ERROR) { // find better error code 3882 const status_t omxStatus = statusFromOMXError(error); 3883 if (omxStatus != 0) { 3884 internalError = omxStatus; 3885 } else { 3886 ALOGW("Invalid OMX error %#x", error); 3887 } 3888 } 3889 notify->setInt32("err", internalError); 3890 notify->setInt32("actionCode", ACTION_CODE_FATAL); // could translate from OMX error. 3891 notify->post(); 3892} 3893 3894status_t ACodec::pushBlankBuffersToNativeWindow() { 3895 status_t err = NO_ERROR; 3896 ANativeWindowBuffer* anb = NULL; 3897 int numBufs = 0; 3898 int minUndequeuedBufs = 0; 3899 3900 // We need to reconnect to the ANativeWindow as a CPU client to ensure that 3901 // no frames get dropped by SurfaceFlinger assuming that these are video 3902 // frames. 3903 err = native_window_api_disconnect(mNativeWindow.get(), 3904 NATIVE_WINDOW_API_MEDIA); 3905 if (err != NO_ERROR) { 3906 ALOGE("error pushing blank frames: api_disconnect failed: %s (%d)", 3907 strerror(-err), -err); 3908 return err; 3909 } 3910 3911 err = native_window_api_connect(mNativeWindow.get(), 3912 NATIVE_WINDOW_API_CPU); 3913 if (err != NO_ERROR) { 3914 ALOGE("error pushing blank frames: api_connect failed: %s (%d)", 3915 strerror(-err), -err); 3916 return err; 3917 } 3918 3919 err = native_window_set_buffers_geometry(mNativeWindow.get(), 1, 1, 3920 HAL_PIXEL_FORMAT_RGBX_8888); 3921 if (err != NO_ERROR) { 3922 ALOGE("error pushing blank frames: set_buffers_geometry failed: %s (%d)", 3923 strerror(-err), -err); 3924 goto error; 3925 } 3926 3927 err = native_window_set_scaling_mode(mNativeWindow.get(), 3928 NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW); 3929 if (err != NO_ERROR) { 3930 ALOGE("error pushing blank_frames: set_scaling_mode failed: %s (%d)", 3931 strerror(-err), -err); 3932 goto error; 3933 } 3934 3935 err = native_window_set_usage(mNativeWindow.get(), 3936 GRALLOC_USAGE_SW_WRITE_OFTEN); 3937 if (err != NO_ERROR) { 3938 ALOGE("error pushing blank frames: set_usage failed: %s (%d)", 3939 strerror(-err), -err); 3940 goto error; 3941 } 3942 3943 err = mNativeWindow->query(mNativeWindow.get(), 3944 NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &minUndequeuedBufs); 3945 if (err != NO_ERROR) { 3946 ALOGE("error pushing blank frames: MIN_UNDEQUEUED_BUFFERS query " 3947 "failed: %s (%d)", strerror(-err), -err); 3948 goto error; 3949 } 3950 3951 numBufs = minUndequeuedBufs + 1; 3952 err = native_window_set_buffer_count(mNativeWindow.get(), numBufs); 3953 if (err != NO_ERROR) { 3954 ALOGE("error pushing blank frames: set_buffer_count failed: %s (%d)", 3955 strerror(-err), -err); 3956 goto error; 3957 } 3958 3959 // We push numBufs + 1 buffers to ensure that we've drawn into the same 3960 // buffer twice. This should guarantee that the buffer has been displayed 3961 // on the screen and then been replaced, so an previous video frames are 3962 // guaranteed NOT to be currently displayed. 3963 for (int i = 0; i < numBufs + 1; i++) { 3964 err = native_window_dequeue_buffer_and_wait(mNativeWindow.get(), &anb); 3965 if (err != NO_ERROR) { 3966 ALOGE("error pushing blank frames: dequeueBuffer failed: %s (%d)", 3967 strerror(-err), -err); 3968 goto error; 3969 } 3970 3971 sp<GraphicBuffer> buf(new GraphicBuffer(anb, false)); 3972 3973 // Fill the buffer with the a 1x1 checkerboard pattern ;) 3974 uint32_t* img = NULL; 3975 err = buf->lock(GRALLOC_USAGE_SW_WRITE_OFTEN, (void**)(&img)); 3976 if (err != NO_ERROR) { 3977 ALOGE("error pushing blank frames: lock failed: %s (%d)", 3978 strerror(-err), -err); 3979 goto error; 3980 } 3981 3982 *img = 0; 3983 3984 err = buf->unlock(); 3985 if (err != NO_ERROR) { 3986 ALOGE("error pushing blank frames: unlock failed: %s (%d)", 3987 strerror(-err), -err); 3988 goto error; 3989 } 3990 3991 err = mNativeWindow->queueBuffer(mNativeWindow.get(), 3992 buf->getNativeBuffer(), -1); 3993 if (err != NO_ERROR) { 3994 ALOGE("error pushing blank frames: queueBuffer failed: %s (%d)", 3995 strerror(-err), -err); 3996 goto error; 3997 } 3998 3999 anb = NULL; 4000 } 4001 4002error: 4003 4004 if (err != NO_ERROR) { 4005 // Clean up after an error. 4006 if (anb != NULL) { 4007 mNativeWindow->cancelBuffer(mNativeWindow.get(), anb, -1); 4008 } 4009 4010 native_window_api_disconnect(mNativeWindow.get(), 4011 NATIVE_WINDOW_API_CPU); 4012 native_window_api_connect(mNativeWindow.get(), 4013 NATIVE_WINDOW_API_MEDIA); 4014 4015 return err; 4016 } else { 4017 // Clean up after success. 4018 err = native_window_api_disconnect(mNativeWindow.get(), 4019 NATIVE_WINDOW_API_CPU); 4020 if (err != NO_ERROR) { 4021 ALOGE("error pushing blank frames: api_disconnect failed: %s (%d)", 4022 strerror(-err), -err); 4023 return err; 4024 } 4025 4026 err = native_window_api_connect(mNativeWindow.get(), 4027 NATIVE_WINDOW_API_MEDIA); 4028 if (err != NO_ERROR) { 4029 ALOGE("error pushing blank frames: api_connect failed: %s (%d)", 4030 strerror(-err), -err); 4031 return err; 4032 } 4033 4034 return NO_ERROR; 4035 } 4036} 4037 4038//////////////////////////////////////////////////////////////////////////////// 4039 4040ACodec::PortDescription::PortDescription() { 4041} 4042 4043status_t ACodec::requestIDRFrame() { 4044 if (!mIsEncoder) { 4045 return ERROR_UNSUPPORTED; 4046 } 4047 4048 OMX_CONFIG_INTRAREFRESHVOPTYPE params; 4049 InitOMXParams(¶ms); 4050 4051 params.nPortIndex = kPortIndexOutput; 4052 params.IntraRefreshVOP = OMX_TRUE; 4053 4054 return mOMX->setConfig( 4055 mNode, 4056 OMX_IndexConfigVideoIntraVOPRefresh, 4057 ¶ms, 4058 sizeof(params)); 4059} 4060 4061void ACodec::PortDescription::addBuffer( 4062 IOMX::buffer_id id, const sp<ABuffer> &buffer) { 4063 mBufferIDs.push_back(id); 4064 mBuffers.push_back(buffer); 4065} 4066 4067size_t ACodec::PortDescription::countBuffers() { 4068 return mBufferIDs.size(); 4069} 4070 4071IOMX::buffer_id ACodec::PortDescription::bufferIDAt(size_t index) const { 4072 return mBufferIDs.itemAt(index); 4073} 4074 4075sp<ABuffer> ACodec::PortDescription::bufferAt(size_t index) const { 4076 return mBuffers.itemAt(index); 4077} 4078 4079//////////////////////////////////////////////////////////////////////////////// 4080 4081ACodec::BaseState::BaseState(ACodec *codec, const sp<AState> &parentState) 4082 : AState(parentState), 4083 mCodec(codec) { 4084} 4085 4086ACodec::BaseState::PortMode ACodec::BaseState::getPortMode( 4087 OMX_U32 /* portIndex */) { 4088 return KEEP_BUFFERS; 4089} 4090 4091bool ACodec::BaseState::onMessageReceived(const sp<AMessage> &msg) { 4092 switch (msg->what()) { 4093 case kWhatInputBufferFilled: 4094 { 4095 onInputBufferFilled(msg); 4096 break; 4097 } 4098 4099 case kWhatOutputBufferDrained: 4100 { 4101 onOutputBufferDrained(msg); 4102 break; 4103 } 4104 4105 case ACodec::kWhatOMXMessage: 4106 { 4107 return onOMXMessage(msg); 4108 } 4109 4110 case ACodec::kWhatCreateInputSurface: 4111 case ACodec::kWhatSignalEndOfInputStream: 4112 { 4113 // This may result in an app illegal state exception. 4114 ALOGE("Message 0x%x was not handled", msg->what()); 4115 mCodec->signalError(OMX_ErrorUndefined, INVALID_OPERATION); 4116 return true; 4117 } 4118 4119 case ACodec::kWhatOMXDied: 4120 { 4121 // This will result in kFlagSawMediaServerDie handling in MediaCodec. 4122 ALOGE("OMX/mediaserver died, signalling error!"); 4123 mCodec->signalError(OMX_ErrorResourcesLost, DEAD_OBJECT); 4124 break; 4125 } 4126 4127 case ACodec::kWhatReleaseCodecInstance: 4128 { 4129 ALOGI("[%s] forcing the release of codec", 4130 mCodec->mComponentName.c_str()); 4131 status_t err = mCodec->mOMX->freeNode(mCodec->mNode); 4132 ALOGE_IF("[%s] failed to release codec instance: err=%d", 4133 mCodec->mComponentName.c_str(), err); 4134 sp<AMessage> notify = mCodec->mNotify->dup(); 4135 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 4136 notify->post(); 4137 break; 4138 } 4139 4140 default: 4141 return false; 4142 } 4143 4144 return true; 4145} 4146 4147bool ACodec::BaseState::onOMXMessage(const sp<AMessage> &msg) { 4148 int32_t type; 4149 CHECK(msg->findInt32("type", &type)); 4150 4151 // there is a possibility that this is an outstanding message for a 4152 // codec that we have already destroyed 4153 if (mCodec->mNode == NULL) { 4154 ALOGI("ignoring message as already freed component: %s", 4155 msg->debugString().c_str()); 4156 return true; 4157 } 4158 4159 IOMX::node_id nodeID; 4160 CHECK(msg->findInt32("node", (int32_t*)&nodeID)); 4161 CHECK_EQ(nodeID, mCodec->mNode); 4162 4163 switch (type) { 4164 case omx_message::EVENT: 4165 { 4166 int32_t event, data1, data2; 4167 CHECK(msg->findInt32("event", &event)); 4168 CHECK(msg->findInt32("data1", &data1)); 4169 CHECK(msg->findInt32("data2", &data2)); 4170 4171 if (event == OMX_EventCmdComplete 4172 && data1 == OMX_CommandFlush 4173 && data2 == (int32_t)OMX_ALL) { 4174 // Use of this notification is not consistent across 4175 // implementations. We'll drop this notification and rely 4176 // on flush-complete notifications on the individual port 4177 // indices instead. 4178 4179 return true; 4180 } 4181 4182 return onOMXEvent( 4183 static_cast<OMX_EVENTTYPE>(event), 4184 static_cast<OMX_U32>(data1), 4185 static_cast<OMX_U32>(data2)); 4186 } 4187 4188 case omx_message::EMPTY_BUFFER_DONE: 4189 { 4190 IOMX::buffer_id bufferID; 4191 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 4192 4193 return onOMXEmptyBufferDone(bufferID); 4194 } 4195 4196 case omx_message::FILL_BUFFER_DONE: 4197 { 4198 IOMX::buffer_id bufferID; 4199 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 4200 4201 int32_t rangeOffset, rangeLength, flags; 4202 int64_t timeUs; 4203 4204 CHECK(msg->findInt32("range_offset", &rangeOffset)); 4205 CHECK(msg->findInt32("range_length", &rangeLength)); 4206 CHECK(msg->findInt32("flags", &flags)); 4207 CHECK(msg->findInt64("timestamp", &timeUs)); 4208 4209 return onOMXFillBufferDone( 4210 bufferID, 4211 (size_t)rangeOffset, (size_t)rangeLength, 4212 (OMX_U32)flags, 4213 timeUs); 4214 } 4215 4216 default: 4217 TRESPASS(); 4218 break; 4219 } 4220} 4221 4222bool ACodec::BaseState::onOMXEvent( 4223 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 4224 if (event != OMX_EventError) { 4225 ALOGV("[%s] EVENT(%d, 0x%08lx, 0x%08lx)", 4226 mCodec->mComponentName.c_str(), event, data1, data2); 4227 4228 return false; 4229 } 4230 4231 ALOGE("[%s] ERROR(0x%08lx)", mCodec->mComponentName.c_str(), data1); 4232 4233 // verify OMX component sends back an error we expect. 4234 OMX_ERRORTYPE omxError = (OMX_ERRORTYPE)data1; 4235 if (!isOMXError(omxError)) { 4236 ALOGW("Invalid OMX error %#x", omxError); 4237 omxError = OMX_ErrorUndefined; 4238 } 4239 mCodec->signalError(omxError); 4240 4241 return true; 4242} 4243 4244bool ACodec::BaseState::onOMXEmptyBufferDone(IOMX::buffer_id bufferID) { 4245 ALOGV("[%s] onOMXEmptyBufferDone %p", 4246 mCodec->mComponentName.c_str(), bufferID); 4247 4248 BufferInfo *info = 4249 mCodec->findBufferByID(kPortIndexInput, bufferID); 4250 4251 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_COMPONENT); 4252 info->mStatus = BufferInfo::OWNED_BY_US; 4253 4254 // We're in "store-metadata-in-buffers" mode, the underlying 4255 // OMX component had access to data that's implicitly refcounted 4256 // by this "MediaBuffer" object. Now that the OMX component has 4257 // told us that it's done with the input buffer, we can decrement 4258 // the mediaBuffer's reference count. 4259 info->mData->setMediaBufferBase(NULL); 4260 4261 PortMode mode = getPortMode(kPortIndexInput); 4262 4263 switch (mode) { 4264 case KEEP_BUFFERS: 4265 break; 4266 4267 case RESUBMIT_BUFFERS: 4268 postFillThisBuffer(info); 4269 break; 4270 4271 default: 4272 { 4273 CHECK_EQ((int)mode, (int)FREE_BUFFERS); 4274 TRESPASS(); // Not currently used 4275 break; 4276 } 4277 } 4278 4279 return true; 4280} 4281 4282void ACodec::BaseState::postFillThisBuffer(BufferInfo *info) { 4283 if (mCodec->mPortEOS[kPortIndexInput]) { 4284 return; 4285 } 4286 4287 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 4288 4289 sp<AMessage> notify = mCodec->mNotify->dup(); 4290 notify->setInt32("what", CodecBase::kWhatFillThisBuffer); 4291 notify->setInt32("buffer-id", info->mBufferID); 4292 4293 info->mData->meta()->clear(); 4294 notify->setBuffer("buffer", info->mData); 4295 4296 sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, mCodec->id()); 4297 reply->setInt32("buffer-id", info->mBufferID); 4298 4299 notify->setMessage("reply", reply); 4300 4301 notify->post(); 4302 4303 info->mStatus = BufferInfo::OWNED_BY_UPSTREAM; 4304} 4305 4306void ACodec::BaseState::onInputBufferFilled(const sp<AMessage> &msg) { 4307 IOMX::buffer_id bufferID; 4308 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 4309 sp<ABuffer> buffer; 4310 int32_t err = OK; 4311 bool eos = false; 4312 PortMode mode = getPortMode(kPortIndexInput); 4313 4314 if (!msg->findBuffer("buffer", &buffer)) { 4315 /* these are unfilled buffers returned by client */ 4316 CHECK(msg->findInt32("err", &err)); 4317 4318 if (err == OK) { 4319 /* buffers with no errors are returned on MediaCodec.flush */ 4320 mode = KEEP_BUFFERS; 4321 } else { 4322 ALOGV("[%s] saw error %d instead of an input buffer", 4323 mCodec->mComponentName.c_str(), err); 4324 eos = true; 4325 } 4326 4327 buffer.clear(); 4328 } 4329 4330 int32_t tmp; 4331 if (buffer != NULL && buffer->meta()->findInt32("eos", &tmp) && tmp) { 4332 eos = true; 4333 err = ERROR_END_OF_STREAM; 4334 } 4335 4336 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 4337 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_UPSTREAM); 4338 4339 info->mStatus = BufferInfo::OWNED_BY_US; 4340 4341 switch (mode) { 4342 case KEEP_BUFFERS: 4343 { 4344 if (eos) { 4345 if (!mCodec->mPortEOS[kPortIndexInput]) { 4346 mCodec->mPortEOS[kPortIndexInput] = true; 4347 mCodec->mInputEOSResult = err; 4348 } 4349 } 4350 break; 4351 } 4352 4353 case RESUBMIT_BUFFERS: 4354 { 4355 if (buffer != NULL && !mCodec->mPortEOS[kPortIndexInput]) { 4356 int64_t timeUs; 4357 CHECK(buffer->meta()->findInt64("timeUs", &timeUs)); 4358 4359 OMX_U32 flags = OMX_BUFFERFLAG_ENDOFFRAME; 4360 4361 int32_t isCSD; 4362 if (buffer->meta()->findInt32("csd", &isCSD) && isCSD != 0) { 4363 flags |= OMX_BUFFERFLAG_CODECCONFIG; 4364 } 4365 4366 if (eos) { 4367 flags |= OMX_BUFFERFLAG_EOS; 4368 } 4369 4370 if (buffer != info->mData) { 4371 ALOGV("[%s] Needs to copy input data for buffer %p. (%p != %p)", 4372 mCodec->mComponentName.c_str(), 4373 bufferID, 4374 buffer.get(), info->mData.get()); 4375 4376 CHECK_LE(buffer->size(), info->mData->capacity()); 4377 memcpy(info->mData->data(), buffer->data(), buffer->size()); 4378 } 4379 4380 if (flags & OMX_BUFFERFLAG_CODECCONFIG) { 4381 ALOGV("[%s] calling emptyBuffer %p w/ codec specific data", 4382 mCodec->mComponentName.c_str(), bufferID); 4383 } else if (flags & OMX_BUFFERFLAG_EOS) { 4384 ALOGV("[%s] calling emptyBuffer %p w/ EOS", 4385 mCodec->mComponentName.c_str(), bufferID); 4386 } else { 4387#if TRACK_BUFFER_TIMING 4388 ALOGI("[%s] calling emptyBuffer %p w/ time %lld us", 4389 mCodec->mComponentName.c_str(), bufferID, timeUs); 4390#else 4391 ALOGV("[%s] calling emptyBuffer %p w/ time %lld us", 4392 mCodec->mComponentName.c_str(), bufferID, timeUs); 4393#endif 4394 } 4395 4396#if TRACK_BUFFER_TIMING 4397 ACodec::BufferStats stats; 4398 stats.mEmptyBufferTimeUs = ALooper::GetNowUs(); 4399 stats.mFillBufferDoneTimeUs = -1ll; 4400 mCodec->mBufferStats.add(timeUs, stats); 4401#endif 4402 4403 if (mCodec->mStoreMetaDataInOutputBuffers) { 4404 // try to submit an output buffer for each input buffer 4405 PortMode outputMode = getPortMode(kPortIndexOutput); 4406 4407 ALOGV("MetaDataBuffersToSubmit=%u portMode=%s", 4408 mCodec->mMetaDataBuffersToSubmit, 4409 (outputMode == FREE_BUFFERS ? "FREE" : 4410 outputMode == KEEP_BUFFERS ? "KEEP" : "RESUBMIT")); 4411 if (outputMode == RESUBMIT_BUFFERS) { 4412 mCodec->submitOutputMetaDataBuffer(); 4413 } 4414 } 4415 4416 CHECK_EQ(mCodec->mOMX->emptyBuffer( 4417 mCodec->mNode, 4418 bufferID, 4419 0, 4420 buffer->size(), 4421 flags, 4422 timeUs), 4423 (status_t)OK); 4424 4425 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 4426 4427 if (!eos) { 4428 getMoreInputDataIfPossible(); 4429 } else { 4430 ALOGV("[%s] Signalled EOS on the input port", 4431 mCodec->mComponentName.c_str()); 4432 4433 mCodec->mPortEOS[kPortIndexInput] = true; 4434 mCodec->mInputEOSResult = err; 4435 } 4436 } else if (!mCodec->mPortEOS[kPortIndexInput]) { 4437 if (err != ERROR_END_OF_STREAM) { 4438 ALOGV("[%s] Signalling EOS on the input port " 4439 "due to error %d", 4440 mCodec->mComponentName.c_str(), err); 4441 } else { 4442 ALOGV("[%s] Signalling EOS on the input port", 4443 mCodec->mComponentName.c_str()); 4444 } 4445 4446 ALOGV("[%s] calling emptyBuffer %p signalling EOS", 4447 mCodec->mComponentName.c_str(), bufferID); 4448 4449 CHECK_EQ(mCodec->mOMX->emptyBuffer( 4450 mCodec->mNode, 4451 bufferID, 4452 0, 4453 0, 4454 OMX_BUFFERFLAG_EOS, 4455 0), 4456 (status_t)OK); 4457 4458 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 4459 4460 mCodec->mPortEOS[kPortIndexInput] = true; 4461 mCodec->mInputEOSResult = err; 4462 } 4463 break; 4464 } 4465 4466 default: 4467 CHECK_EQ((int)mode, (int)FREE_BUFFERS); 4468 break; 4469 } 4470} 4471 4472void ACodec::BaseState::getMoreInputDataIfPossible() { 4473 if (mCodec->mPortEOS[kPortIndexInput]) { 4474 return; 4475 } 4476 4477 BufferInfo *eligible = NULL; 4478 4479 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 4480 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 4481 4482#if 0 4483 if (info->mStatus == BufferInfo::OWNED_BY_UPSTREAM) { 4484 // There's already a "read" pending. 4485 return; 4486 } 4487#endif 4488 4489 if (info->mStatus == BufferInfo::OWNED_BY_US) { 4490 eligible = info; 4491 } 4492 } 4493 4494 if (eligible == NULL) { 4495 return; 4496 } 4497 4498 postFillThisBuffer(eligible); 4499} 4500 4501bool ACodec::BaseState::onOMXFillBufferDone( 4502 IOMX::buffer_id bufferID, 4503 size_t rangeOffset, size_t rangeLength, 4504 OMX_U32 flags, 4505 int64_t timeUs) { 4506 ALOGV("[%s] onOMXFillBufferDone %u time %" PRId64 " us, flags = 0x%08x", 4507 mCodec->mComponentName.c_str(), bufferID, timeUs, flags); 4508 4509 ssize_t index; 4510 4511#if TRACK_BUFFER_TIMING 4512 index = mCodec->mBufferStats.indexOfKey(timeUs); 4513 if (index >= 0) { 4514 ACodec::BufferStats *stats = &mCodec->mBufferStats.editValueAt(index); 4515 stats->mFillBufferDoneTimeUs = ALooper::GetNowUs(); 4516 4517 ALOGI("frame PTS %lld: %lld", 4518 timeUs, 4519 stats->mFillBufferDoneTimeUs - stats->mEmptyBufferTimeUs); 4520 4521 mCodec->mBufferStats.removeItemsAt(index); 4522 stats = NULL; 4523 } 4524#endif 4525 4526 BufferInfo *info = 4527 mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 4528 4529 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_COMPONENT); 4530 4531 info->mDequeuedAt = ++mCodec->mDequeueCounter; 4532 info->mStatus = BufferInfo::OWNED_BY_US; 4533 4534 PortMode mode = getPortMode(kPortIndexOutput); 4535 4536 switch (mode) { 4537 case KEEP_BUFFERS: 4538 break; 4539 4540 case RESUBMIT_BUFFERS: 4541 { 4542 if (rangeLength == 0 && (!(flags & OMX_BUFFERFLAG_EOS) 4543 || mCodec->mPortEOS[kPortIndexOutput])) { 4544 ALOGV("[%s] calling fillBuffer %u", 4545 mCodec->mComponentName.c_str(), info->mBufferID); 4546 4547 CHECK_EQ(mCodec->mOMX->fillBuffer( 4548 mCodec->mNode, info->mBufferID), 4549 (status_t)OK); 4550 4551 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 4552 break; 4553 } 4554 4555 sp<AMessage> reply = 4556 new AMessage(kWhatOutputBufferDrained, mCodec->id()); 4557 4558 if (!mCodec->mSentFormat && rangeLength > 0) { 4559 mCodec->sendFormatChange(reply); 4560 } 4561 4562 if (mCodec->mUseMetadataOnEncoderOutput) { 4563 native_handle_t* handle = 4564 *(native_handle_t**)(info->mData->data() + 4); 4565 info->mData->meta()->setPointer("handle", handle); 4566 info->mData->meta()->setInt32("rangeOffset", rangeOffset); 4567 info->mData->meta()->setInt32("rangeLength", rangeLength); 4568 } else { 4569 info->mData->setRange(rangeOffset, rangeLength); 4570 } 4571#if 0 4572 if (mCodec->mNativeWindow == NULL) { 4573 if (IsIDR(info->mData)) { 4574 ALOGI("IDR frame"); 4575 } 4576 } 4577#endif 4578 4579 if (mCodec->mSkipCutBuffer != NULL) { 4580 mCodec->mSkipCutBuffer->submit(info->mData); 4581 } 4582 info->mData->meta()->setInt64("timeUs", timeUs); 4583 4584 sp<AMessage> notify = mCodec->mNotify->dup(); 4585 notify->setInt32("what", CodecBase::kWhatDrainThisBuffer); 4586 notify->setInt32("buffer-id", info->mBufferID); 4587 notify->setBuffer("buffer", info->mData); 4588 notify->setInt32("flags", flags); 4589 4590 reply->setInt32("buffer-id", info->mBufferID); 4591 4592 notify->setMessage("reply", reply); 4593 4594 notify->post(); 4595 4596 info->mStatus = BufferInfo::OWNED_BY_DOWNSTREAM; 4597 4598 if (flags & OMX_BUFFERFLAG_EOS) { 4599 ALOGV("[%s] saw output EOS", mCodec->mComponentName.c_str()); 4600 4601 sp<AMessage> notify = mCodec->mNotify->dup(); 4602 notify->setInt32("what", CodecBase::kWhatEOS); 4603 notify->setInt32("err", mCodec->mInputEOSResult); 4604 notify->post(); 4605 4606 mCodec->mPortEOS[kPortIndexOutput] = true; 4607 } 4608 break; 4609 } 4610 4611 default: 4612 { 4613 CHECK_EQ((int)mode, (int)FREE_BUFFERS); 4614 4615 CHECK_EQ((status_t)OK, 4616 mCodec->freeBuffer(kPortIndexOutput, index)); 4617 break; 4618 } 4619 } 4620 4621 return true; 4622} 4623 4624void ACodec::BaseState::onOutputBufferDrained(const sp<AMessage> &msg) { 4625 IOMX::buffer_id bufferID; 4626 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 4627 ssize_t index; 4628 BufferInfo *info = 4629 mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 4630 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_DOWNSTREAM); 4631 4632 android_native_rect_t crop; 4633 if (msg->findRect("crop", 4634 &crop.left, &crop.top, &crop.right, &crop.bottom)) { 4635 CHECK_EQ(0, native_window_set_crop( 4636 mCodec->mNativeWindow.get(), &crop)); 4637 } 4638 4639 int32_t render; 4640 if (mCodec->mNativeWindow != NULL 4641 && msg->findInt32("render", &render) && render != 0 4642 && info->mData != NULL && info->mData->size() != 0) { 4643 ATRACE_NAME("render"); 4644 // The client wants this buffer to be rendered. 4645 4646 int64_t timestampNs = 0; 4647 if (!msg->findInt64("timestampNs", ×tampNs)) { 4648 // TODO: it seems like we should use the timestamp 4649 // in the (media)buffer as it potentially came from 4650 // an input surface, but we did not propagate it prior to 4651 // API 20. Perhaps check for target SDK version. 4652#if 0 4653 if (info->mData->meta()->findInt64("timeUs", ×tampNs)) { 4654 ALOGV("using buffer PTS of %" PRId64, timestampNs); 4655 timestampNs *= 1000; 4656 } 4657#endif 4658 } 4659 4660 status_t err; 4661 err = native_window_set_buffers_timestamp(mCodec->mNativeWindow.get(), timestampNs); 4662 if (err != OK) { 4663 ALOGW("failed to set buffer timestamp: %d", err); 4664 } 4665 4666 if ((err = mCodec->mNativeWindow->queueBuffer( 4667 mCodec->mNativeWindow.get(), 4668 info->mGraphicBuffer.get(), -1)) == OK) { 4669 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 4670 } else { 4671 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 4672 info->mStatus = BufferInfo::OWNED_BY_US; 4673 } 4674 } else { 4675 if (mCodec->mNativeWindow != NULL && 4676 (info->mData == NULL || info->mData->size() != 0)) { 4677 ATRACE_NAME("frame-drop"); 4678 } 4679 info->mStatus = BufferInfo::OWNED_BY_US; 4680 } 4681 4682 PortMode mode = getPortMode(kPortIndexOutput); 4683 4684 switch (mode) { 4685 case KEEP_BUFFERS: 4686 { 4687 // XXX fishy, revisit!!! What about the FREE_BUFFERS case below? 4688 4689 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4690 // We cannot resubmit the buffer we just rendered, dequeue 4691 // the spare instead. 4692 4693 info = mCodec->dequeueBufferFromNativeWindow(); 4694 } 4695 break; 4696 } 4697 4698 case RESUBMIT_BUFFERS: 4699 { 4700 if (!mCodec->mPortEOS[kPortIndexOutput]) { 4701 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4702 // We cannot resubmit the buffer we just rendered, dequeue 4703 // the spare instead. 4704 4705 info = mCodec->dequeueBufferFromNativeWindow(); 4706 } 4707 4708 if (info != NULL) { 4709 ALOGV("[%s] calling fillBuffer %u", 4710 mCodec->mComponentName.c_str(), info->mBufferID); 4711 4712 CHECK_EQ(mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID), 4713 (status_t)OK); 4714 4715 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 4716 } 4717 } 4718 break; 4719 } 4720 4721 default: 4722 { 4723 CHECK_EQ((int)mode, (int)FREE_BUFFERS); 4724 4725 CHECK_EQ((status_t)OK, 4726 mCodec->freeBuffer(kPortIndexOutput, index)); 4727 break; 4728 } 4729 } 4730} 4731 4732//////////////////////////////////////////////////////////////////////////////// 4733 4734ACodec::UninitializedState::UninitializedState(ACodec *codec) 4735 : BaseState(codec) { 4736} 4737 4738void ACodec::UninitializedState::stateEntered() { 4739 ALOGV("Now uninitialized"); 4740 4741 if (mDeathNotifier != NULL) { 4742 IInterface::asBinder(mCodec->mOMX)->unlinkToDeath(mDeathNotifier); 4743 mDeathNotifier.clear(); 4744 } 4745 4746 mCodec->mNativeWindow.clear(); 4747 mCodec->mNode = NULL; 4748 mCodec->mOMX.clear(); 4749 mCodec->mQuirks = 0; 4750 mCodec->mFlags = 0; 4751 mCodec->mUseMetadataOnEncoderOutput = 0; 4752 mCodec->mComponentName.clear(); 4753} 4754 4755bool ACodec::UninitializedState::onMessageReceived(const sp<AMessage> &msg) { 4756 bool handled = false; 4757 4758 switch (msg->what()) { 4759 case ACodec::kWhatSetup: 4760 { 4761 onSetup(msg); 4762 4763 handled = true; 4764 break; 4765 } 4766 4767 case ACodec::kWhatAllocateComponent: 4768 { 4769 onAllocateComponent(msg); 4770 handled = true; 4771 break; 4772 } 4773 4774 case ACodec::kWhatShutdown: 4775 { 4776 int32_t keepComponentAllocated; 4777 CHECK(msg->findInt32( 4778 "keepComponentAllocated", &keepComponentAllocated)); 4779 ALOGW_IF(keepComponentAllocated, 4780 "cannot keep component allocated on shutdown in Uninitialized state"); 4781 4782 sp<AMessage> notify = mCodec->mNotify->dup(); 4783 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 4784 notify->post(); 4785 4786 handled = true; 4787 break; 4788 } 4789 4790 case ACodec::kWhatFlush: 4791 { 4792 sp<AMessage> notify = mCodec->mNotify->dup(); 4793 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 4794 notify->post(); 4795 4796 handled = true; 4797 break; 4798 } 4799 4800 case ACodec::kWhatReleaseCodecInstance: 4801 { 4802 // nothing to do, as we have already signaled shutdown 4803 handled = true; 4804 break; 4805 } 4806 4807 default: 4808 return BaseState::onMessageReceived(msg); 4809 } 4810 4811 return handled; 4812} 4813 4814void ACodec::UninitializedState::onSetup( 4815 const sp<AMessage> &msg) { 4816 if (onAllocateComponent(msg) 4817 && mCodec->mLoadedState->onConfigureComponent(msg)) { 4818 mCodec->mLoadedState->onStart(); 4819 } 4820} 4821 4822bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) { 4823 ALOGV("onAllocateComponent"); 4824 4825 CHECK(mCodec->mNode == NULL); 4826 4827 OMXClient client; 4828 CHECK_EQ(client.connect(), (status_t)OK); 4829 4830 sp<IOMX> omx = client.interface(); 4831 4832 sp<AMessage> notify = new AMessage(kWhatOMXDied, mCodec->id()); 4833 4834 mDeathNotifier = new DeathNotifier(notify); 4835 if (IInterface::asBinder(omx)->linkToDeath(mDeathNotifier) != OK) { 4836 // This was a local binder, if it dies so do we, we won't care 4837 // about any notifications in the afterlife. 4838 mDeathNotifier.clear(); 4839 } 4840 4841 Vector<OMXCodec::CodecNameAndQuirks> matchingCodecs; 4842 4843 AString mime; 4844 4845 AString componentName; 4846 uint32_t quirks = 0; 4847 int32_t encoder = false; 4848 if (msg->findString("componentName", &componentName)) { 4849 ssize_t index = matchingCodecs.add(); 4850 OMXCodec::CodecNameAndQuirks *entry = &matchingCodecs.editItemAt(index); 4851 entry->mName = String8(componentName.c_str()); 4852 4853 if (!OMXCodec::findCodecQuirks( 4854 componentName.c_str(), &entry->mQuirks)) { 4855 entry->mQuirks = 0; 4856 } 4857 } else { 4858 CHECK(msg->findString("mime", &mime)); 4859 4860 if (!msg->findInt32("encoder", &encoder)) { 4861 encoder = false; 4862 } 4863 4864 OMXCodec::findMatchingCodecs( 4865 mime.c_str(), 4866 encoder, // createEncoder 4867 NULL, // matchComponentName 4868 0, // flags 4869 &matchingCodecs); 4870 } 4871 4872 sp<CodecObserver> observer = new CodecObserver; 4873 IOMX::node_id node = NULL; 4874 4875 for (size_t matchIndex = 0; matchIndex < matchingCodecs.size(); 4876 ++matchIndex) { 4877 componentName = matchingCodecs.itemAt(matchIndex).mName.string(); 4878 quirks = matchingCodecs.itemAt(matchIndex).mQuirks; 4879 4880 pid_t tid = gettid(); 4881 int prevPriority = androidGetThreadPriority(tid); 4882 androidSetThreadPriority(tid, ANDROID_PRIORITY_FOREGROUND); 4883 status_t err = omx->allocateNode(componentName.c_str(), observer, &node); 4884 androidSetThreadPriority(tid, prevPriority); 4885 4886 if (err == OK) { 4887 break; 4888 } else { 4889 ALOGW("Allocating component '%s' failed, try next one.", componentName.c_str()); 4890 } 4891 4892 node = NULL; 4893 } 4894 4895 if (node == NULL) { 4896 if (!mime.empty()) { 4897 ALOGE("Unable to instantiate a %scoder for type '%s'.", 4898 encoder ? "en" : "de", mime.c_str()); 4899 } else { 4900 ALOGE("Unable to instantiate codec '%s'.", componentName.c_str()); 4901 } 4902 4903 mCodec->signalError(OMX_ErrorComponentNotFound); 4904 return false; 4905 } 4906 4907 notify = new AMessage(kWhatOMXMessage, mCodec->id()); 4908 observer->setNotificationMessage(notify); 4909 4910 mCodec->mComponentName = componentName; 4911 mCodec->mFlags = 0; 4912 4913 if (componentName.endsWith(".secure")) { 4914 mCodec->mFlags |= kFlagIsSecure; 4915 mCodec->mFlags |= kFlagIsGrallocUsageProtected; 4916 mCodec->mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 4917 } 4918 4919 mCodec->mQuirks = quirks; 4920 mCodec->mOMX = omx; 4921 mCodec->mNode = node; 4922 4923 { 4924 sp<AMessage> notify = mCodec->mNotify->dup(); 4925 notify->setInt32("what", CodecBase::kWhatComponentAllocated); 4926 notify->setString("componentName", mCodec->mComponentName.c_str()); 4927 notify->post(); 4928 } 4929 4930 mCodec->changeState(mCodec->mLoadedState); 4931 4932 return true; 4933} 4934 4935//////////////////////////////////////////////////////////////////////////////// 4936 4937ACodec::LoadedState::LoadedState(ACodec *codec) 4938 : BaseState(codec) { 4939} 4940 4941void ACodec::LoadedState::stateEntered() { 4942 ALOGV("[%s] Now Loaded", mCodec->mComponentName.c_str()); 4943 4944 mCodec->mPortEOS[kPortIndexInput] = 4945 mCodec->mPortEOS[kPortIndexOutput] = false; 4946 4947 mCodec->mInputEOSResult = OK; 4948 4949 mCodec->mDequeueCounter = 0; 4950 mCodec->mMetaDataBuffersToSubmit = 0; 4951 mCodec->mRepeatFrameDelayUs = -1ll; 4952 mCodec->mInputFormat.clear(); 4953 mCodec->mOutputFormat.clear(); 4954 mCodec->mBaseOutputFormat.clear(); 4955 4956 if (mCodec->mShutdownInProgress) { 4957 bool keepComponentAllocated = mCodec->mKeepComponentAllocated; 4958 4959 mCodec->mShutdownInProgress = false; 4960 mCodec->mKeepComponentAllocated = false; 4961 4962 onShutdown(keepComponentAllocated); 4963 } 4964 mCodec->mExplicitShutdown = false; 4965 4966 mCodec->processDeferredMessages(); 4967} 4968 4969void ACodec::LoadedState::onShutdown(bool keepComponentAllocated) { 4970 if (!keepComponentAllocated) { 4971 CHECK_EQ(mCodec->mOMX->freeNode(mCodec->mNode), (status_t)OK); 4972 4973 mCodec->changeState(mCodec->mUninitializedState); 4974 } 4975 4976 if (mCodec->mExplicitShutdown) { 4977 sp<AMessage> notify = mCodec->mNotify->dup(); 4978 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 4979 notify->post(); 4980 mCodec->mExplicitShutdown = false; 4981 } 4982} 4983 4984bool ACodec::LoadedState::onMessageReceived(const sp<AMessage> &msg) { 4985 bool handled = false; 4986 4987 switch (msg->what()) { 4988 case ACodec::kWhatConfigureComponent: 4989 { 4990 onConfigureComponent(msg); 4991 handled = true; 4992 break; 4993 } 4994 4995 case ACodec::kWhatCreateInputSurface: 4996 { 4997 onCreateInputSurface(msg); 4998 handled = true; 4999 break; 5000 } 5001 5002 case ACodec::kWhatStart: 5003 { 5004 onStart(); 5005 handled = true; 5006 break; 5007 } 5008 5009 case ACodec::kWhatShutdown: 5010 { 5011 int32_t keepComponentAllocated; 5012 CHECK(msg->findInt32( 5013 "keepComponentAllocated", &keepComponentAllocated)); 5014 5015 mCodec->mExplicitShutdown = true; 5016 onShutdown(keepComponentAllocated); 5017 5018 handled = true; 5019 break; 5020 } 5021 5022 case ACodec::kWhatFlush: 5023 { 5024 sp<AMessage> notify = mCodec->mNotify->dup(); 5025 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 5026 notify->post(); 5027 5028 handled = true; 5029 break; 5030 } 5031 5032 default: 5033 return BaseState::onMessageReceived(msg); 5034 } 5035 5036 return handled; 5037} 5038 5039bool ACodec::LoadedState::onConfigureComponent( 5040 const sp<AMessage> &msg) { 5041 ALOGV("onConfigureComponent"); 5042 5043 CHECK(mCodec->mNode != NULL); 5044 5045 AString mime; 5046 CHECK(msg->findString("mime", &mime)); 5047 5048 status_t err = mCodec->configureCodec(mime.c_str(), msg); 5049 5050 if (err != OK) { 5051 ALOGE("[%s] configureCodec returning error %d", 5052 mCodec->mComponentName.c_str(), err); 5053 5054 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5055 return false; 5056 } 5057 5058 { 5059 sp<AMessage> notify = mCodec->mNotify->dup(); 5060 notify->setInt32("what", CodecBase::kWhatComponentConfigured); 5061 notify->setMessage("input-format", mCodec->mInputFormat); 5062 notify->setMessage("output-format", mCodec->mOutputFormat); 5063 notify->post(); 5064 } 5065 5066 return true; 5067} 5068 5069void ACodec::LoadedState::onCreateInputSurface( 5070 const sp<AMessage> & /* msg */) { 5071 ALOGV("onCreateInputSurface"); 5072 5073 sp<AMessage> notify = mCodec->mNotify->dup(); 5074 notify->setInt32("what", CodecBase::kWhatInputSurfaceCreated); 5075 5076 sp<IGraphicBufferProducer> bufferProducer; 5077 status_t err; 5078 5079 err = mCodec->mOMX->createInputSurface(mCodec->mNode, kPortIndexInput, 5080 &bufferProducer); 5081 5082 if (err == OK && mCodec->mRepeatFrameDelayUs > 0ll) { 5083 err = mCodec->mOMX->setInternalOption( 5084 mCodec->mNode, 5085 kPortIndexInput, 5086 IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY, 5087 &mCodec->mRepeatFrameDelayUs, 5088 sizeof(mCodec->mRepeatFrameDelayUs)); 5089 5090 if (err != OK) { 5091 ALOGE("[%s] Unable to configure option to repeat previous " 5092 "frames (err %d)", 5093 mCodec->mComponentName.c_str(), 5094 err); 5095 } 5096 } 5097 5098 if (err == OK && mCodec->mMaxPtsGapUs > 0ll) { 5099 err = mCodec->mOMX->setInternalOption( 5100 mCodec->mNode, 5101 kPortIndexInput, 5102 IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP, 5103 &mCodec->mMaxPtsGapUs, 5104 sizeof(mCodec->mMaxPtsGapUs)); 5105 5106 if (err != OK) { 5107 ALOGE("[%s] Unable to configure max timestamp gap (err %d)", 5108 mCodec->mComponentName.c_str(), 5109 err); 5110 } 5111 } 5112 5113 if (err == OK && mCodec->mTimePerCaptureUs > 0ll 5114 && mCodec->mTimePerFrameUs > 0ll) { 5115 int64_t timeLapse[2]; 5116 timeLapse[0] = mCodec->mTimePerFrameUs; 5117 timeLapse[1] = mCodec->mTimePerCaptureUs; 5118 err = mCodec->mOMX->setInternalOption( 5119 mCodec->mNode, 5120 kPortIndexInput, 5121 IOMX::INTERNAL_OPTION_TIME_LAPSE, 5122 &timeLapse[0], 5123 sizeof(timeLapse)); 5124 5125 if (err != OK) { 5126 ALOGE("[%s] Unable to configure time lapse (err %d)", 5127 mCodec->mComponentName.c_str(), 5128 err); 5129 } 5130 } 5131 5132 if (err == OK && mCodec->mCreateInputBuffersSuspended) { 5133 bool suspend = true; 5134 err = mCodec->mOMX->setInternalOption( 5135 mCodec->mNode, 5136 kPortIndexInput, 5137 IOMX::INTERNAL_OPTION_SUSPEND, 5138 &suspend, 5139 sizeof(suspend)); 5140 5141 if (err != OK) { 5142 ALOGE("[%s] Unable to configure option to suspend (err %d)", 5143 mCodec->mComponentName.c_str(), 5144 err); 5145 } 5146 } 5147 5148 if (err == OK) { 5149 notify->setObject("input-surface", 5150 new BufferProducerWrapper(bufferProducer)); 5151 } else { 5152 // Can't use mCodec->signalError() here -- MediaCodec won't forward 5153 // the error through because it's in the "configured" state. We 5154 // send a kWhatInputSurfaceCreated with an error value instead. 5155 ALOGE("[%s] onCreateInputSurface returning error %d", 5156 mCodec->mComponentName.c_str(), err); 5157 notify->setInt32("err", err); 5158 } 5159 notify->post(); 5160} 5161 5162void ACodec::LoadedState::onStart() { 5163 ALOGV("onStart"); 5164 5165 CHECK_EQ(mCodec->mOMX->sendCommand( 5166 mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle), 5167 (status_t)OK); 5168 5169 mCodec->changeState(mCodec->mLoadedToIdleState); 5170} 5171 5172//////////////////////////////////////////////////////////////////////////////// 5173 5174ACodec::LoadedToIdleState::LoadedToIdleState(ACodec *codec) 5175 : BaseState(codec) { 5176} 5177 5178void ACodec::LoadedToIdleState::stateEntered() { 5179 ALOGV("[%s] Now Loaded->Idle", mCodec->mComponentName.c_str()); 5180 5181 status_t err; 5182 if ((err = allocateBuffers()) != OK) { 5183 ALOGE("Failed to allocate buffers after transitioning to IDLE state " 5184 "(error 0x%08x)", 5185 err); 5186 5187 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5188 5189 mCodec->changeState(mCodec->mLoadedState); 5190 } 5191} 5192 5193status_t ACodec::LoadedToIdleState::allocateBuffers() { 5194 status_t err = mCodec->allocateBuffersOnPort(kPortIndexInput); 5195 5196 if (err != OK) { 5197 return err; 5198 } 5199 5200 return mCodec->allocateBuffersOnPort(kPortIndexOutput); 5201} 5202 5203bool ACodec::LoadedToIdleState::onMessageReceived(const sp<AMessage> &msg) { 5204 switch (msg->what()) { 5205 case kWhatSetParameters: 5206 case kWhatShutdown: 5207 { 5208 mCodec->deferMessage(msg); 5209 return true; 5210 } 5211 5212 case kWhatSignalEndOfInputStream: 5213 { 5214 mCodec->onSignalEndOfInputStream(); 5215 return true; 5216 } 5217 5218 case kWhatResume: 5219 { 5220 // We'll be active soon enough. 5221 return true; 5222 } 5223 5224 case kWhatFlush: 5225 { 5226 // We haven't even started yet, so we're flushed alright... 5227 sp<AMessage> notify = mCodec->mNotify->dup(); 5228 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 5229 notify->post(); 5230 return true; 5231 } 5232 5233 default: 5234 return BaseState::onMessageReceived(msg); 5235 } 5236} 5237 5238bool ACodec::LoadedToIdleState::onOMXEvent( 5239 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 5240 switch (event) { 5241 case OMX_EventCmdComplete: 5242 { 5243 CHECK_EQ(data1, (OMX_U32)OMX_CommandStateSet); 5244 CHECK_EQ(data2, (OMX_U32)OMX_StateIdle); 5245 5246 CHECK_EQ(mCodec->mOMX->sendCommand( 5247 mCodec->mNode, OMX_CommandStateSet, OMX_StateExecuting), 5248 (status_t)OK); 5249 5250 mCodec->changeState(mCodec->mIdleToExecutingState); 5251 5252 return true; 5253 } 5254 5255 default: 5256 return BaseState::onOMXEvent(event, data1, data2); 5257 } 5258} 5259 5260//////////////////////////////////////////////////////////////////////////////// 5261 5262ACodec::IdleToExecutingState::IdleToExecutingState(ACodec *codec) 5263 : BaseState(codec) { 5264} 5265 5266void ACodec::IdleToExecutingState::stateEntered() { 5267 ALOGV("[%s] Now Idle->Executing", mCodec->mComponentName.c_str()); 5268} 5269 5270bool ACodec::IdleToExecutingState::onMessageReceived(const sp<AMessage> &msg) { 5271 switch (msg->what()) { 5272 case kWhatSetParameters: 5273 case kWhatShutdown: 5274 { 5275 mCodec->deferMessage(msg); 5276 return true; 5277 } 5278 5279 case kWhatResume: 5280 { 5281 // We'll be active soon enough. 5282 return true; 5283 } 5284 5285 case kWhatFlush: 5286 { 5287 // We haven't even started yet, so we're flushed alright... 5288 sp<AMessage> notify = mCodec->mNotify->dup(); 5289 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 5290 notify->post(); 5291 5292 return true; 5293 } 5294 5295 case kWhatSignalEndOfInputStream: 5296 { 5297 mCodec->onSignalEndOfInputStream(); 5298 return true; 5299 } 5300 5301 default: 5302 return BaseState::onMessageReceived(msg); 5303 } 5304} 5305 5306bool ACodec::IdleToExecutingState::onOMXEvent( 5307 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 5308 switch (event) { 5309 case OMX_EventCmdComplete: 5310 { 5311 CHECK_EQ(data1, (OMX_U32)OMX_CommandStateSet); 5312 CHECK_EQ(data2, (OMX_U32)OMX_StateExecuting); 5313 5314 mCodec->mExecutingState->resume(); 5315 mCodec->changeState(mCodec->mExecutingState); 5316 5317 return true; 5318 } 5319 5320 default: 5321 return BaseState::onOMXEvent(event, data1, data2); 5322 } 5323} 5324 5325//////////////////////////////////////////////////////////////////////////////// 5326 5327ACodec::ExecutingState::ExecutingState(ACodec *codec) 5328 : BaseState(codec), 5329 mActive(false) { 5330} 5331 5332ACodec::BaseState::PortMode ACodec::ExecutingState::getPortMode( 5333 OMX_U32 /* portIndex */) { 5334 return RESUBMIT_BUFFERS; 5335} 5336 5337void ACodec::ExecutingState::submitOutputMetaBuffers() { 5338 // submit as many buffers as there are input buffers with the codec 5339 // in case we are in port reconfiguring 5340 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 5341 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 5342 5343 if (info->mStatus == BufferInfo::OWNED_BY_COMPONENT) { 5344 if (mCodec->submitOutputMetaDataBuffer() != OK) 5345 break; 5346 } 5347 } 5348 5349 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 5350 mCodec->signalSubmitOutputMetaDataBufferIfEOS_workaround(); 5351} 5352 5353void ACodec::ExecutingState::submitRegularOutputBuffers() { 5354 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexOutput].size(); ++i) { 5355 BufferInfo *info = &mCodec->mBuffers[kPortIndexOutput].editItemAt(i); 5356 5357 if (mCodec->mNativeWindow != NULL) { 5358 CHECK(info->mStatus == BufferInfo::OWNED_BY_US 5359 || info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW); 5360 5361 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 5362 continue; 5363 } 5364 } else { 5365 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 5366 } 5367 5368 ALOGV("[%s] calling fillBuffer %p", 5369 mCodec->mComponentName.c_str(), info->mBufferID); 5370 5371 CHECK_EQ(mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID), 5372 (status_t)OK); 5373 5374 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5375 } 5376} 5377 5378void ACodec::ExecutingState::submitOutputBuffers() { 5379 submitRegularOutputBuffers(); 5380 if (mCodec->mStoreMetaDataInOutputBuffers) { 5381 submitOutputMetaBuffers(); 5382 } 5383} 5384 5385void ACodec::ExecutingState::resume() { 5386 if (mActive) { 5387 ALOGV("[%s] We're already active, no need to resume.", 5388 mCodec->mComponentName.c_str()); 5389 5390 return; 5391 } 5392 5393 submitOutputBuffers(); 5394 5395 // Post all available input buffers 5396 CHECK_GT(mCodec->mBuffers[kPortIndexInput].size(), 0u); 5397 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); i++) { 5398 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 5399 if (info->mStatus == BufferInfo::OWNED_BY_US) { 5400 postFillThisBuffer(info); 5401 } 5402 } 5403 5404 mActive = true; 5405} 5406 5407void ACodec::ExecutingState::stateEntered() { 5408 ALOGV("[%s] Now Executing", mCodec->mComponentName.c_str()); 5409 5410 mCodec->processDeferredMessages(); 5411} 5412 5413bool ACodec::ExecutingState::onMessageReceived(const sp<AMessage> &msg) { 5414 bool handled = false; 5415 5416 switch (msg->what()) { 5417 case kWhatShutdown: 5418 { 5419 int32_t keepComponentAllocated; 5420 CHECK(msg->findInt32( 5421 "keepComponentAllocated", &keepComponentAllocated)); 5422 5423 mCodec->mShutdownInProgress = true; 5424 mCodec->mExplicitShutdown = true; 5425 mCodec->mKeepComponentAllocated = keepComponentAllocated; 5426 5427 mActive = false; 5428 5429 CHECK_EQ(mCodec->mOMX->sendCommand( 5430 mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle), 5431 (status_t)OK); 5432 5433 mCodec->changeState(mCodec->mExecutingToIdleState); 5434 5435 handled = true; 5436 break; 5437 } 5438 5439 case kWhatFlush: 5440 { 5441 ALOGV("[%s] ExecutingState flushing now " 5442 "(codec owns %d/%d input, %d/%d output).", 5443 mCodec->mComponentName.c_str(), 5444 mCodec->countBuffersOwnedByComponent(kPortIndexInput), 5445 mCodec->mBuffers[kPortIndexInput].size(), 5446 mCodec->countBuffersOwnedByComponent(kPortIndexOutput), 5447 mCodec->mBuffers[kPortIndexOutput].size()); 5448 5449 mActive = false; 5450 5451 CHECK_EQ(mCodec->mOMX->sendCommand( 5452 mCodec->mNode, OMX_CommandFlush, OMX_ALL), 5453 (status_t)OK); 5454 5455 mCodec->changeState(mCodec->mFlushingState); 5456 handled = true; 5457 break; 5458 } 5459 5460 case kWhatResume: 5461 { 5462 resume(); 5463 5464 handled = true; 5465 break; 5466 } 5467 5468 case kWhatRequestIDRFrame: 5469 { 5470 status_t err = mCodec->requestIDRFrame(); 5471 if (err != OK) { 5472 ALOGW("Requesting an IDR frame failed."); 5473 } 5474 5475 handled = true; 5476 break; 5477 } 5478 5479 case kWhatSetParameters: 5480 { 5481 sp<AMessage> params; 5482 CHECK(msg->findMessage("params", ¶ms)); 5483 5484 status_t err = mCodec->setParameters(params); 5485 5486 sp<AMessage> reply; 5487 if (msg->findMessage("reply", &reply)) { 5488 reply->setInt32("err", err); 5489 reply->post(); 5490 } 5491 5492 handled = true; 5493 break; 5494 } 5495 5496 case ACodec::kWhatSignalEndOfInputStream: 5497 { 5498 mCodec->onSignalEndOfInputStream(); 5499 handled = true; 5500 break; 5501 } 5502 5503 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 5504 case kWhatSubmitOutputMetaDataBufferIfEOS: 5505 { 5506 if (mCodec->mPortEOS[kPortIndexInput] && 5507 !mCodec->mPortEOS[kPortIndexOutput]) { 5508 status_t err = mCodec->submitOutputMetaDataBuffer(); 5509 if (err == OK) { 5510 mCodec->signalSubmitOutputMetaDataBufferIfEOS_workaround(); 5511 } 5512 } 5513 return true; 5514 } 5515 5516 default: 5517 handled = BaseState::onMessageReceived(msg); 5518 break; 5519 } 5520 5521 return handled; 5522} 5523 5524status_t ACodec::setParameters(const sp<AMessage> ¶ms) { 5525 int32_t videoBitrate; 5526 if (params->findInt32("video-bitrate", &videoBitrate)) { 5527 OMX_VIDEO_CONFIG_BITRATETYPE configParams; 5528 InitOMXParams(&configParams); 5529 configParams.nPortIndex = kPortIndexOutput; 5530 configParams.nEncodeBitrate = videoBitrate; 5531 5532 status_t err = mOMX->setConfig( 5533 mNode, 5534 OMX_IndexConfigVideoBitrate, 5535 &configParams, 5536 sizeof(configParams)); 5537 5538 if (err != OK) { 5539 ALOGE("setConfig(OMX_IndexConfigVideoBitrate, %d) failed w/ err %d", 5540 videoBitrate, err); 5541 5542 return err; 5543 } 5544 } 5545 5546 int64_t skipFramesBeforeUs; 5547 if (params->findInt64("skip-frames-before", &skipFramesBeforeUs)) { 5548 status_t err = 5549 mOMX->setInternalOption( 5550 mNode, 5551 kPortIndexInput, 5552 IOMX::INTERNAL_OPTION_START_TIME, 5553 &skipFramesBeforeUs, 5554 sizeof(skipFramesBeforeUs)); 5555 5556 if (err != OK) { 5557 ALOGE("Failed to set parameter 'skip-frames-before' (err %d)", err); 5558 return err; 5559 } 5560 } 5561 5562 int32_t dropInputFrames; 5563 if (params->findInt32("drop-input-frames", &dropInputFrames)) { 5564 bool suspend = dropInputFrames != 0; 5565 5566 status_t err = 5567 mOMX->setInternalOption( 5568 mNode, 5569 kPortIndexInput, 5570 IOMX::INTERNAL_OPTION_SUSPEND, 5571 &suspend, 5572 sizeof(suspend)); 5573 5574 if (err != OK) { 5575 ALOGE("Failed to set parameter 'drop-input-frames' (err %d)", err); 5576 return err; 5577 } 5578 } 5579 5580 int32_t dummy; 5581 if (params->findInt32("request-sync", &dummy)) { 5582 status_t err = requestIDRFrame(); 5583 5584 if (err != OK) { 5585 ALOGE("Requesting a sync frame failed w/ err %d", err); 5586 return err; 5587 } 5588 } 5589 5590 return OK; 5591} 5592 5593void ACodec::onSignalEndOfInputStream() { 5594 sp<AMessage> notify = mNotify->dup(); 5595 notify->setInt32("what", CodecBase::kWhatSignaledInputEOS); 5596 5597 status_t err = mOMX->signalEndOfInputStream(mNode); 5598 if (err != OK) { 5599 notify->setInt32("err", err); 5600 } 5601 notify->post(); 5602} 5603 5604bool ACodec::ExecutingState::onOMXEvent( 5605 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 5606 switch (event) { 5607 case OMX_EventPortSettingsChanged: 5608 { 5609 CHECK_EQ(data1, (OMX_U32)kPortIndexOutput); 5610 5611 if (data2 == 0 || data2 == OMX_IndexParamPortDefinition) { 5612 mCodec->mMetaDataBuffersToSubmit = 0; 5613 CHECK_EQ(mCodec->mOMX->sendCommand( 5614 mCodec->mNode, 5615 OMX_CommandPortDisable, kPortIndexOutput), 5616 (status_t)OK); 5617 5618 mCodec->freeOutputBuffersNotOwnedByComponent(); 5619 5620 mCodec->changeState(mCodec->mOutputPortSettingsChangedState); 5621 } else if (data2 == OMX_IndexConfigCommonOutputCrop) { 5622 mCodec->mSentFormat = false; 5623 } else { 5624 ALOGV("[%s] OMX_EventPortSettingsChanged 0x%08lx", 5625 mCodec->mComponentName.c_str(), data2); 5626 } 5627 5628 return true; 5629 } 5630 5631 case OMX_EventBufferFlag: 5632 { 5633 return true; 5634 } 5635 5636 default: 5637 return BaseState::onOMXEvent(event, data1, data2); 5638 } 5639} 5640 5641//////////////////////////////////////////////////////////////////////////////// 5642 5643ACodec::OutputPortSettingsChangedState::OutputPortSettingsChangedState( 5644 ACodec *codec) 5645 : BaseState(codec) { 5646} 5647 5648ACodec::BaseState::PortMode ACodec::OutputPortSettingsChangedState::getPortMode( 5649 OMX_U32 portIndex) { 5650 if (portIndex == kPortIndexOutput) { 5651 return FREE_BUFFERS; 5652 } 5653 5654 CHECK_EQ(portIndex, (OMX_U32)kPortIndexInput); 5655 5656 return RESUBMIT_BUFFERS; 5657} 5658 5659bool ACodec::OutputPortSettingsChangedState::onMessageReceived( 5660 const sp<AMessage> &msg) { 5661 bool handled = false; 5662 5663 switch (msg->what()) { 5664 case kWhatFlush: 5665 case kWhatShutdown: 5666 case kWhatResume: 5667 case kWhatSetParameters: 5668 { 5669 if (msg->what() == kWhatResume) { 5670 ALOGV("[%s] Deferring resume", mCodec->mComponentName.c_str()); 5671 } 5672 5673 mCodec->deferMessage(msg); 5674 handled = true; 5675 break; 5676 } 5677 5678 default: 5679 handled = BaseState::onMessageReceived(msg); 5680 break; 5681 } 5682 5683 return handled; 5684} 5685 5686void ACodec::OutputPortSettingsChangedState::stateEntered() { 5687 ALOGV("[%s] Now handling output port settings change", 5688 mCodec->mComponentName.c_str()); 5689} 5690 5691bool ACodec::OutputPortSettingsChangedState::onOMXEvent( 5692 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 5693 switch (event) { 5694 case OMX_EventCmdComplete: 5695 { 5696 if (data1 == (OMX_U32)OMX_CommandPortDisable) { 5697 CHECK_EQ(data2, (OMX_U32)kPortIndexOutput); 5698 5699 ALOGV("[%s] Output port now disabled.", 5700 mCodec->mComponentName.c_str()); 5701 5702 CHECK(mCodec->mBuffers[kPortIndexOutput].isEmpty()); 5703 mCodec->mDealer[kPortIndexOutput].clear(); 5704 5705 CHECK_EQ(mCodec->mOMX->sendCommand( 5706 mCodec->mNode, OMX_CommandPortEnable, kPortIndexOutput), 5707 (status_t)OK); 5708 5709 status_t err; 5710 if ((err = mCodec->allocateBuffersOnPort( 5711 kPortIndexOutput)) != OK) { 5712 ALOGE("Failed to allocate output port buffers after " 5713 "port reconfiguration (error 0x%08x)", 5714 err); 5715 5716 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5717 5718 // This is technically not correct, but appears to be 5719 // the only way to free the component instance. 5720 // Controlled transitioning from excecuting->idle 5721 // and idle->loaded seem impossible probably because 5722 // the output port never finishes re-enabling. 5723 mCodec->mShutdownInProgress = true; 5724 mCodec->mKeepComponentAllocated = false; 5725 mCodec->changeState(mCodec->mLoadedState); 5726 } 5727 5728 return true; 5729 } else if (data1 == (OMX_U32)OMX_CommandPortEnable) { 5730 CHECK_EQ(data2, (OMX_U32)kPortIndexOutput); 5731 5732 mCodec->mSentFormat = false; 5733 5734 ALOGV("[%s] Output port now reenabled.", 5735 mCodec->mComponentName.c_str()); 5736 5737 if (mCodec->mExecutingState->active()) { 5738 mCodec->mExecutingState->submitOutputBuffers(); 5739 } 5740 5741 mCodec->changeState(mCodec->mExecutingState); 5742 5743 return true; 5744 } 5745 5746 return false; 5747 } 5748 5749 default: 5750 return false; 5751 } 5752} 5753 5754//////////////////////////////////////////////////////////////////////////////// 5755 5756ACodec::ExecutingToIdleState::ExecutingToIdleState(ACodec *codec) 5757 : BaseState(codec), 5758 mComponentNowIdle(false) { 5759} 5760 5761bool ACodec::ExecutingToIdleState::onMessageReceived(const sp<AMessage> &msg) { 5762 bool handled = false; 5763 5764 switch (msg->what()) { 5765 case kWhatFlush: 5766 { 5767 // Don't send me a flush request if you previously wanted me 5768 // to shutdown. 5769 TRESPASS(); 5770 break; 5771 } 5772 5773 case kWhatShutdown: 5774 { 5775 // We're already doing that... 5776 5777 handled = true; 5778 break; 5779 } 5780 5781 default: 5782 handled = BaseState::onMessageReceived(msg); 5783 break; 5784 } 5785 5786 return handled; 5787} 5788 5789void ACodec::ExecutingToIdleState::stateEntered() { 5790 ALOGV("[%s] Now Executing->Idle", mCodec->mComponentName.c_str()); 5791 5792 mComponentNowIdle = false; 5793 mCodec->mSentFormat = false; 5794} 5795 5796bool ACodec::ExecutingToIdleState::onOMXEvent( 5797 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 5798 switch (event) { 5799 case OMX_EventCmdComplete: 5800 { 5801 CHECK_EQ(data1, (OMX_U32)OMX_CommandStateSet); 5802 CHECK_EQ(data2, (OMX_U32)OMX_StateIdle); 5803 5804 mComponentNowIdle = true; 5805 5806 changeStateIfWeOwnAllBuffers(); 5807 5808 return true; 5809 } 5810 5811 case OMX_EventPortSettingsChanged: 5812 case OMX_EventBufferFlag: 5813 { 5814 // We're shutting down and don't care about this anymore. 5815 return true; 5816 } 5817 5818 default: 5819 return BaseState::onOMXEvent(event, data1, data2); 5820 } 5821} 5822 5823void ACodec::ExecutingToIdleState::changeStateIfWeOwnAllBuffers() { 5824 if (mComponentNowIdle && mCodec->allYourBuffersAreBelongToUs()) { 5825 CHECK_EQ(mCodec->mOMX->sendCommand( 5826 mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded), 5827 (status_t)OK); 5828 5829 CHECK_EQ(mCodec->freeBuffersOnPort(kPortIndexInput), (status_t)OK); 5830 CHECK_EQ(mCodec->freeBuffersOnPort(kPortIndexOutput), (status_t)OK); 5831 5832 if ((mCodec->mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) 5833 && mCodec->mNativeWindow != NULL) { 5834 // We push enough 1x1 blank buffers to ensure that one of 5835 // them has made it to the display. This allows the OMX 5836 // component teardown to zero out any protected buffers 5837 // without the risk of scanning out one of those buffers. 5838 mCodec->pushBlankBuffersToNativeWindow(); 5839 } 5840 5841 mCodec->changeState(mCodec->mIdleToLoadedState); 5842 } 5843} 5844 5845void ACodec::ExecutingToIdleState::onInputBufferFilled( 5846 const sp<AMessage> &msg) { 5847 BaseState::onInputBufferFilled(msg); 5848 5849 changeStateIfWeOwnAllBuffers(); 5850} 5851 5852void ACodec::ExecutingToIdleState::onOutputBufferDrained( 5853 const sp<AMessage> &msg) { 5854 BaseState::onOutputBufferDrained(msg); 5855 5856 changeStateIfWeOwnAllBuffers(); 5857} 5858 5859//////////////////////////////////////////////////////////////////////////////// 5860 5861ACodec::IdleToLoadedState::IdleToLoadedState(ACodec *codec) 5862 : BaseState(codec) { 5863} 5864 5865bool ACodec::IdleToLoadedState::onMessageReceived(const sp<AMessage> &msg) { 5866 bool handled = false; 5867 5868 switch (msg->what()) { 5869 case kWhatShutdown: 5870 { 5871 // We're already doing that... 5872 5873 handled = true; 5874 break; 5875 } 5876 5877 case kWhatFlush: 5878 { 5879 // Don't send me a flush request if you previously wanted me 5880 // to shutdown. 5881 TRESPASS(); 5882 break; 5883 } 5884 5885 default: 5886 handled = BaseState::onMessageReceived(msg); 5887 break; 5888 } 5889 5890 return handled; 5891} 5892 5893void ACodec::IdleToLoadedState::stateEntered() { 5894 ALOGV("[%s] Now Idle->Loaded", mCodec->mComponentName.c_str()); 5895} 5896 5897bool ACodec::IdleToLoadedState::onOMXEvent( 5898 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 5899 switch (event) { 5900 case OMX_EventCmdComplete: 5901 { 5902 CHECK_EQ(data1, (OMX_U32)OMX_CommandStateSet); 5903 CHECK_EQ(data2, (OMX_U32)OMX_StateLoaded); 5904 5905 mCodec->changeState(mCodec->mLoadedState); 5906 5907 return true; 5908 } 5909 5910 default: 5911 return BaseState::onOMXEvent(event, data1, data2); 5912 } 5913} 5914 5915//////////////////////////////////////////////////////////////////////////////// 5916 5917ACodec::FlushingState::FlushingState(ACodec *codec) 5918 : BaseState(codec) { 5919} 5920 5921void ACodec::FlushingState::stateEntered() { 5922 ALOGV("[%s] Now Flushing", mCodec->mComponentName.c_str()); 5923 5924 mFlushComplete[kPortIndexInput] = mFlushComplete[kPortIndexOutput] = false; 5925} 5926 5927bool ACodec::FlushingState::onMessageReceived(const sp<AMessage> &msg) { 5928 bool handled = false; 5929 5930 switch (msg->what()) { 5931 case kWhatShutdown: 5932 { 5933 mCodec->deferMessage(msg); 5934 break; 5935 } 5936 5937 case kWhatFlush: 5938 { 5939 // We're already doing this right now. 5940 handled = true; 5941 break; 5942 } 5943 5944 default: 5945 handled = BaseState::onMessageReceived(msg); 5946 break; 5947 } 5948 5949 return handled; 5950} 5951 5952bool ACodec::FlushingState::onOMXEvent( 5953 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 5954 ALOGV("[%s] FlushingState onOMXEvent(%d,%ld)", 5955 mCodec->mComponentName.c_str(), event, data1); 5956 5957 switch (event) { 5958 case OMX_EventCmdComplete: 5959 { 5960 CHECK_EQ(data1, (OMX_U32)OMX_CommandFlush); 5961 5962 if (data2 == kPortIndexInput || data2 == kPortIndexOutput) { 5963 CHECK(!mFlushComplete[data2]); 5964 mFlushComplete[data2] = true; 5965 5966 if (mFlushComplete[kPortIndexInput] 5967 && mFlushComplete[kPortIndexOutput]) { 5968 changeStateIfWeOwnAllBuffers(); 5969 } 5970 } else { 5971 CHECK_EQ(data2, OMX_ALL); 5972 CHECK(mFlushComplete[kPortIndexInput]); 5973 CHECK(mFlushComplete[kPortIndexOutput]); 5974 5975 changeStateIfWeOwnAllBuffers(); 5976 } 5977 5978 return true; 5979 } 5980 5981 case OMX_EventPortSettingsChanged: 5982 { 5983 sp<AMessage> msg = new AMessage(kWhatOMXMessage, mCodec->id()); 5984 msg->setInt32("type", omx_message::EVENT); 5985 msg->setInt32("node", mCodec->mNode); 5986 msg->setInt32("event", event); 5987 msg->setInt32("data1", data1); 5988 msg->setInt32("data2", data2); 5989 5990 ALOGV("[%s] Deferring OMX_EventPortSettingsChanged", 5991 mCodec->mComponentName.c_str()); 5992 5993 mCodec->deferMessage(msg); 5994 5995 return true; 5996 } 5997 5998 default: 5999 return BaseState::onOMXEvent(event, data1, data2); 6000 } 6001 6002 return true; 6003} 6004 6005void ACodec::FlushingState::onOutputBufferDrained(const sp<AMessage> &msg) { 6006 BaseState::onOutputBufferDrained(msg); 6007 6008 changeStateIfWeOwnAllBuffers(); 6009} 6010 6011void ACodec::FlushingState::onInputBufferFilled(const sp<AMessage> &msg) { 6012 BaseState::onInputBufferFilled(msg); 6013 6014 changeStateIfWeOwnAllBuffers(); 6015} 6016 6017void ACodec::FlushingState::changeStateIfWeOwnAllBuffers() { 6018 if (mFlushComplete[kPortIndexInput] 6019 && mFlushComplete[kPortIndexOutput] 6020 && mCodec->allYourBuffersAreBelongToUs()) { 6021 // We now own all buffers except possibly those still queued with 6022 // the native window for rendering. Let's get those back as well. 6023 mCodec->waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs(); 6024 6025 sp<AMessage> notify = mCodec->mNotify->dup(); 6026 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6027 notify->post(); 6028 6029 mCodec->mPortEOS[kPortIndexInput] = 6030 mCodec->mPortEOS[kPortIndexOutput] = false; 6031 6032 mCodec->mInputEOSResult = OK; 6033 6034 if (mCodec->mSkipCutBuffer != NULL) { 6035 mCodec->mSkipCutBuffer->clear(); 6036 } 6037 6038 mCodec->changeState(mCodec->mExecutingState); 6039 } 6040} 6041 6042} // namespace android 6043