ACodec.cpp revision ab76066c11e988ca3d3a5d6d74dd510ae080322e
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "ACodec" 19 20#ifdef __LP64__ 21#define OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 22#endif 23 24#include <inttypes.h> 25#include <utils/Trace.h> 26 27#include <media/stagefright/ACodec.h> 28 29#include <binder/MemoryDealer.h> 30 31#include <media/stagefright/foundation/hexdump.h> 32#include <media/stagefright/foundation/ABuffer.h> 33#include <media/stagefright/foundation/ADebug.h> 34#include <media/stagefright/foundation/AMessage.h> 35 36#include <media/stagefright/BufferProducerWrapper.h> 37#include <media/stagefright/MediaCodecList.h> 38#include <media/stagefright/MediaDefs.h> 39#include <media/stagefright/NativeWindowWrapper.h> 40#include <media/stagefright/OMXClient.h> 41#include <media/stagefright/OMXCodec.h> 42 43#include <media/hardware/HardwareAPI.h> 44 45#include <OMX_AudioExt.h> 46#include <OMX_VideoExt.h> 47#include <OMX_Component.h> 48#include <OMX_IndexExt.h> 49 50#include "include/avc_utils.h" 51 52namespace android { 53 54// OMX errors are directly mapped into status_t range if 55// there is no corresponding MediaError status code. 56// Use the statusFromOMXError(int32_t omxError) function. 57// 58// Currently this is a direct map. 59// See frameworks/native/include/media/openmax/OMX_Core.h 60// 61// Vendor OMX errors from 0x90000000 - 0x9000FFFF 62// Extension OMX errors from 0x8F000000 - 0x90000000 63// Standard OMX errors from 0x80001000 - 0x80001024 (0x80001024 current) 64// 65 66// returns true if err is a recognized OMX error code. 67// as OMX error is OMX_S32, this is an int32_t type 68static inline bool isOMXError(int32_t err) { 69 return (ERROR_CODEC_MIN <= err && err <= ERROR_CODEC_MAX); 70} 71 72// converts an OMX error to a status_t 73static inline status_t statusFromOMXError(int32_t omxError) { 74 switch (omxError) { 75 case OMX_ErrorInvalidComponentName: 76 case OMX_ErrorComponentNotFound: 77 return NAME_NOT_FOUND; // can trigger illegal argument error for provided names. 78 default: 79 return isOMXError(omxError) ? omxError : 0; // no translation required 80 } 81} 82 83// checks and converts status_t to a non-side-effect status_t 84static inline status_t makeNoSideEffectStatus(status_t err) { 85 switch (err) { 86 // the following errors have side effects and may come 87 // from other code modules. Remap for safety reasons. 88 case INVALID_OPERATION: 89 case DEAD_OBJECT: 90 return UNKNOWN_ERROR; 91 default: 92 return err; 93 } 94} 95 96template<class T> 97static void InitOMXParams(T *params) { 98 params->nSize = sizeof(T); 99 params->nVersion.s.nVersionMajor = 1; 100 params->nVersion.s.nVersionMinor = 0; 101 params->nVersion.s.nRevision = 0; 102 params->nVersion.s.nStep = 0; 103} 104 105struct CodecObserver : public BnOMXObserver { 106 CodecObserver() {} 107 108 void setNotificationMessage(const sp<AMessage> &msg) { 109 mNotify = msg; 110 } 111 112 // from IOMXObserver 113 virtual void onMessage(const omx_message &omx_msg) { 114 sp<AMessage> msg = mNotify->dup(); 115 116 msg->setInt32("type", omx_msg.type); 117 msg->setInt32("node", omx_msg.node); 118 119 switch (omx_msg.type) { 120 case omx_message::EVENT: 121 { 122 msg->setInt32("event", omx_msg.u.event_data.event); 123 msg->setInt32("data1", omx_msg.u.event_data.data1); 124 msg->setInt32("data2", omx_msg.u.event_data.data2); 125 break; 126 } 127 128 case omx_message::EMPTY_BUFFER_DONE: 129 { 130 msg->setInt32("buffer", omx_msg.u.buffer_data.buffer); 131 break; 132 } 133 134 case omx_message::FILL_BUFFER_DONE: 135 { 136 msg->setInt32( 137 "buffer", omx_msg.u.extended_buffer_data.buffer); 138 msg->setInt32( 139 "range_offset", 140 omx_msg.u.extended_buffer_data.range_offset); 141 msg->setInt32( 142 "range_length", 143 omx_msg.u.extended_buffer_data.range_length); 144 msg->setInt32( 145 "flags", 146 omx_msg.u.extended_buffer_data.flags); 147 msg->setInt64( 148 "timestamp", 149 omx_msg.u.extended_buffer_data.timestamp); 150 break; 151 } 152 153 default: 154 TRESPASS(); 155 break; 156 } 157 158 msg->post(); 159 } 160 161protected: 162 virtual ~CodecObserver() {} 163 164private: 165 sp<AMessage> mNotify; 166 167 DISALLOW_EVIL_CONSTRUCTORS(CodecObserver); 168}; 169 170//////////////////////////////////////////////////////////////////////////////// 171 172struct ACodec::BaseState : public AState { 173 BaseState(ACodec *codec, const sp<AState> &parentState = NULL); 174 175protected: 176 enum PortMode { 177 KEEP_BUFFERS, 178 RESUBMIT_BUFFERS, 179 FREE_BUFFERS, 180 }; 181 182 ACodec *mCodec; 183 184 virtual PortMode getPortMode(OMX_U32 portIndex); 185 186 virtual bool onMessageReceived(const sp<AMessage> &msg); 187 188 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 189 190 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 191 virtual void onInputBufferFilled(const sp<AMessage> &msg); 192 193 void postFillThisBuffer(BufferInfo *info); 194 195private: 196 bool onOMXMessage(const sp<AMessage> &msg); 197 198 bool onOMXEmptyBufferDone(IOMX::buffer_id bufferID); 199 200 bool onOMXFillBufferDone( 201 IOMX::buffer_id bufferID, 202 size_t rangeOffset, size_t rangeLength, 203 OMX_U32 flags, 204 int64_t timeUs); 205 206 void getMoreInputDataIfPossible(); 207 208 DISALLOW_EVIL_CONSTRUCTORS(BaseState); 209}; 210 211//////////////////////////////////////////////////////////////////////////////// 212 213struct ACodec::DeathNotifier : public IBinder::DeathRecipient { 214 DeathNotifier(const sp<AMessage> ¬ify) 215 : mNotify(notify) { 216 } 217 218 virtual void binderDied(const wp<IBinder> &) { 219 mNotify->post(); 220 } 221 222protected: 223 virtual ~DeathNotifier() {} 224 225private: 226 sp<AMessage> mNotify; 227 228 DISALLOW_EVIL_CONSTRUCTORS(DeathNotifier); 229}; 230 231struct ACodec::UninitializedState : public ACodec::BaseState { 232 UninitializedState(ACodec *codec); 233 234protected: 235 virtual bool onMessageReceived(const sp<AMessage> &msg); 236 virtual void stateEntered(); 237 238private: 239 void onSetup(const sp<AMessage> &msg); 240 bool onAllocateComponent(const sp<AMessage> &msg); 241 242 sp<DeathNotifier> mDeathNotifier; 243 244 DISALLOW_EVIL_CONSTRUCTORS(UninitializedState); 245}; 246 247//////////////////////////////////////////////////////////////////////////////// 248 249struct ACodec::LoadedState : public ACodec::BaseState { 250 LoadedState(ACodec *codec); 251 252protected: 253 virtual bool onMessageReceived(const sp<AMessage> &msg); 254 virtual void stateEntered(); 255 256private: 257 friend struct ACodec::UninitializedState; 258 259 bool onConfigureComponent(const sp<AMessage> &msg); 260 void onCreateInputSurface(const sp<AMessage> &msg); 261 void onStart(); 262 void onShutdown(bool keepComponentAllocated); 263 264 DISALLOW_EVIL_CONSTRUCTORS(LoadedState); 265}; 266 267//////////////////////////////////////////////////////////////////////////////// 268 269struct ACodec::LoadedToIdleState : public ACodec::BaseState { 270 LoadedToIdleState(ACodec *codec); 271 272protected: 273 virtual bool onMessageReceived(const sp<AMessage> &msg); 274 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 275 virtual void stateEntered(); 276 277private: 278 status_t allocateBuffers(); 279 280 DISALLOW_EVIL_CONSTRUCTORS(LoadedToIdleState); 281}; 282 283//////////////////////////////////////////////////////////////////////////////// 284 285struct ACodec::IdleToExecutingState : public ACodec::BaseState { 286 IdleToExecutingState(ACodec *codec); 287 288protected: 289 virtual bool onMessageReceived(const sp<AMessage> &msg); 290 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 291 virtual void stateEntered(); 292 293private: 294 DISALLOW_EVIL_CONSTRUCTORS(IdleToExecutingState); 295}; 296 297//////////////////////////////////////////////////////////////////////////////// 298 299struct ACodec::ExecutingState : public ACodec::BaseState { 300 ExecutingState(ACodec *codec); 301 302 void submitRegularOutputBuffers(); 303 void submitOutputMetaBuffers(); 304 void submitOutputBuffers(); 305 306 // Submit output buffers to the decoder, submit input buffers to client 307 // to fill with data. 308 void resume(); 309 310 // Returns true iff input and output buffers are in play. 311 bool active() const { return mActive; } 312 313protected: 314 virtual PortMode getPortMode(OMX_U32 portIndex); 315 virtual bool onMessageReceived(const sp<AMessage> &msg); 316 virtual void stateEntered(); 317 318 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 319 320private: 321 bool mActive; 322 323 DISALLOW_EVIL_CONSTRUCTORS(ExecutingState); 324}; 325 326//////////////////////////////////////////////////////////////////////////////// 327 328struct ACodec::OutputPortSettingsChangedState : public ACodec::BaseState { 329 OutputPortSettingsChangedState(ACodec *codec); 330 331protected: 332 virtual PortMode getPortMode(OMX_U32 portIndex); 333 virtual bool onMessageReceived(const sp<AMessage> &msg); 334 virtual void stateEntered(); 335 336 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 337 338private: 339 DISALLOW_EVIL_CONSTRUCTORS(OutputPortSettingsChangedState); 340}; 341 342//////////////////////////////////////////////////////////////////////////////// 343 344struct ACodec::ExecutingToIdleState : public ACodec::BaseState { 345 ExecutingToIdleState(ACodec *codec); 346 347protected: 348 virtual bool onMessageReceived(const sp<AMessage> &msg); 349 virtual void stateEntered(); 350 351 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 352 353 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 354 virtual void onInputBufferFilled(const sp<AMessage> &msg); 355 356private: 357 void changeStateIfWeOwnAllBuffers(); 358 359 bool mComponentNowIdle; 360 361 DISALLOW_EVIL_CONSTRUCTORS(ExecutingToIdleState); 362}; 363 364//////////////////////////////////////////////////////////////////////////////// 365 366struct ACodec::IdleToLoadedState : public ACodec::BaseState { 367 IdleToLoadedState(ACodec *codec); 368 369protected: 370 virtual bool onMessageReceived(const sp<AMessage> &msg); 371 virtual void stateEntered(); 372 373 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 374 375private: 376 DISALLOW_EVIL_CONSTRUCTORS(IdleToLoadedState); 377}; 378 379//////////////////////////////////////////////////////////////////////////////// 380 381struct ACodec::FlushingState : public ACodec::BaseState { 382 FlushingState(ACodec *codec); 383 384protected: 385 virtual bool onMessageReceived(const sp<AMessage> &msg); 386 virtual void stateEntered(); 387 388 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 389 390 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 391 virtual void onInputBufferFilled(const sp<AMessage> &msg); 392 393private: 394 bool mFlushComplete[2]; 395 396 void changeStateIfWeOwnAllBuffers(); 397 398 DISALLOW_EVIL_CONSTRUCTORS(FlushingState); 399}; 400 401//////////////////////////////////////////////////////////////////////////////// 402 403ACodec::ACodec() 404 : mQuirks(0), 405 mNode(0), 406 mSentFormat(false), 407 mIsEncoder(false), 408 mUseMetadataOnEncoderOutput(false), 409 mShutdownInProgress(false), 410 mExplicitShutdown(false), 411 mEncoderDelay(0), 412 mEncoderPadding(0), 413 mRotationDegrees(0), 414 mChannelMaskPresent(false), 415 mChannelMask(0), 416 mDequeueCounter(0), 417 mStoreMetaDataInOutputBuffers(false), 418 mMetaDataBuffersToSubmit(0), 419 mRepeatFrameDelayUs(-1ll), 420 mMaxPtsGapUs(-1ll), 421 mTimePerFrameUs(-1ll), 422 mTimePerCaptureUs(-1ll), 423 mCreateInputBuffersSuspended(false), 424 mTunneled(false) { 425 mUninitializedState = new UninitializedState(this); 426 mLoadedState = new LoadedState(this); 427 mLoadedToIdleState = new LoadedToIdleState(this); 428 mIdleToExecutingState = new IdleToExecutingState(this); 429 mExecutingState = new ExecutingState(this); 430 431 mOutputPortSettingsChangedState = 432 new OutputPortSettingsChangedState(this); 433 434 mExecutingToIdleState = new ExecutingToIdleState(this); 435 mIdleToLoadedState = new IdleToLoadedState(this); 436 mFlushingState = new FlushingState(this); 437 438 mPortEOS[kPortIndexInput] = mPortEOS[kPortIndexOutput] = false; 439 mInputEOSResult = OK; 440 441 changeState(mUninitializedState); 442} 443 444ACodec::~ACodec() { 445} 446 447void ACodec::setNotificationMessage(const sp<AMessage> &msg) { 448 mNotify = msg; 449} 450 451void ACodec::initiateSetup(const sp<AMessage> &msg) { 452 msg->setWhat(kWhatSetup); 453 msg->setTarget(id()); 454 msg->post(); 455} 456 457void ACodec::signalSetParameters(const sp<AMessage> ¶ms) { 458 sp<AMessage> msg = new AMessage(kWhatSetParameters, id()); 459 msg->setMessage("params", params); 460 msg->post(); 461} 462 463void ACodec::initiateAllocateComponent(const sp<AMessage> &msg) { 464 msg->setWhat(kWhatAllocateComponent); 465 msg->setTarget(id()); 466 msg->post(); 467} 468 469void ACodec::initiateConfigureComponent(const sp<AMessage> &msg) { 470 msg->setWhat(kWhatConfigureComponent); 471 msg->setTarget(id()); 472 msg->post(); 473} 474 475void ACodec::initiateCreateInputSurface() { 476 (new AMessage(kWhatCreateInputSurface, id()))->post(); 477} 478 479void ACodec::signalEndOfInputStream() { 480 (new AMessage(kWhatSignalEndOfInputStream, id()))->post(); 481} 482 483void ACodec::initiateStart() { 484 (new AMessage(kWhatStart, id()))->post(); 485} 486 487void ACodec::signalFlush() { 488 ALOGV("[%s] signalFlush", mComponentName.c_str()); 489 (new AMessage(kWhatFlush, id()))->post(); 490} 491 492void ACodec::signalResume() { 493 (new AMessage(kWhatResume, id()))->post(); 494} 495 496void ACodec::initiateShutdown(bool keepComponentAllocated) { 497 sp<AMessage> msg = new AMessage(kWhatShutdown, id()); 498 msg->setInt32("keepComponentAllocated", keepComponentAllocated); 499 msg->post(); 500} 501 502void ACodec::signalRequestIDRFrame() { 503 (new AMessage(kWhatRequestIDRFrame, id()))->post(); 504} 505 506// *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 507// Some codecs may return input buffers before having them processed. 508// This causes a halt if we already signaled an EOS on the input 509// port. For now keep submitting an output buffer if there was an 510// EOS on the input port, but not yet on the output port. 511void ACodec::signalSubmitOutputMetaDataBufferIfEOS_workaround() { 512 if (mPortEOS[kPortIndexInput] && !mPortEOS[kPortIndexOutput] && 513 mMetaDataBuffersToSubmit > 0) { 514 (new AMessage(kWhatSubmitOutputMetaDataBufferIfEOS, id()))->post(); 515 } 516} 517 518status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) { 519 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 520 521 CHECK(mDealer[portIndex] == NULL); 522 CHECK(mBuffers[portIndex].isEmpty()); 523 524 status_t err; 525 if (mNativeWindow != NULL && portIndex == kPortIndexOutput) { 526 if (mStoreMetaDataInOutputBuffers) { 527 err = allocateOutputMetaDataBuffers(); 528 } else { 529 err = allocateOutputBuffersFromNativeWindow(); 530 } 531 } else { 532 OMX_PARAM_PORTDEFINITIONTYPE def; 533 InitOMXParams(&def); 534 def.nPortIndex = portIndex; 535 536 err = mOMX->getParameter( 537 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 538 539 if (err == OK) { 540 ALOGV("[%s] Allocating %u buffers of size %u on %s port", 541 mComponentName.c_str(), 542 def.nBufferCountActual, def.nBufferSize, 543 portIndex == kPortIndexInput ? "input" : "output"); 544 545 size_t totalSize = def.nBufferCountActual * def.nBufferSize; 546 mDealer[portIndex] = new MemoryDealer(totalSize, "ACodec"); 547 548 for (OMX_U32 i = 0; i < def.nBufferCountActual; ++i) { 549 sp<IMemory> mem = mDealer[portIndex]->allocate(def.nBufferSize); 550 CHECK(mem.get() != NULL); 551 552 BufferInfo info; 553 info.mStatus = BufferInfo::OWNED_BY_US; 554 555 uint32_t requiresAllocateBufferBit = 556 (portIndex == kPortIndexInput) 557 ? OMXCodec::kRequiresAllocateBufferOnInputPorts 558 : OMXCodec::kRequiresAllocateBufferOnOutputPorts; 559 560 if ((portIndex == kPortIndexInput && (mFlags & kFlagIsSecure)) 561 || mUseMetadataOnEncoderOutput) { 562 mem.clear(); 563 564 void *ptr; 565 err = mOMX->allocateBuffer( 566 mNode, portIndex, def.nBufferSize, &info.mBufferID, 567 &ptr); 568 569 int32_t bufSize = mUseMetadataOnEncoderOutput ? 570 (4 + sizeof(buffer_handle_t)) : def.nBufferSize; 571 572 info.mData = new ABuffer(ptr, bufSize); 573 } else if (mQuirks & requiresAllocateBufferBit) { 574 err = mOMX->allocateBufferWithBackup( 575 mNode, portIndex, mem, &info.mBufferID); 576 } else { 577 err = mOMX->useBuffer(mNode, portIndex, mem, &info.mBufferID); 578 } 579 580 if (mem != NULL) { 581 info.mData = new ABuffer(mem->pointer(), def.nBufferSize); 582 } 583 584 mBuffers[portIndex].push(info); 585 } 586 } 587 } 588 589 if (err != OK) { 590 return err; 591 } 592 593 sp<AMessage> notify = mNotify->dup(); 594 notify->setInt32("what", CodecBase::kWhatBuffersAllocated); 595 596 notify->setInt32("portIndex", portIndex); 597 598 sp<PortDescription> desc = new PortDescription; 599 600 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 601 const BufferInfo &info = mBuffers[portIndex][i]; 602 603 desc->addBuffer(info.mBufferID, info.mData); 604 } 605 606 notify->setObject("portDesc", desc); 607 notify->post(); 608 609 return OK; 610} 611 612status_t ACodec::configureOutputBuffersFromNativeWindow( 613 OMX_U32 *bufferCount, OMX_U32 *bufferSize, 614 OMX_U32 *minUndequeuedBuffers) { 615 OMX_PARAM_PORTDEFINITIONTYPE def; 616 InitOMXParams(&def); 617 def.nPortIndex = kPortIndexOutput; 618 619 status_t err = mOMX->getParameter( 620 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 621 622 if (err != OK) { 623 return err; 624 } 625 626 err = native_window_set_buffers_geometry( 627 mNativeWindow.get(), 628 def.format.video.nFrameWidth, 629 def.format.video.nFrameHeight, 630 def.format.video.eColorFormat); 631 632 if (err != 0) { 633 ALOGE("native_window_set_buffers_geometry failed: %s (%d)", 634 strerror(-err), -err); 635 return err; 636 } 637 638 if (mRotationDegrees != 0) { 639 uint32_t transform = 0; 640 switch (mRotationDegrees) { 641 case 0: transform = 0; break; 642 case 90: transform = HAL_TRANSFORM_ROT_90; break; 643 case 180: transform = HAL_TRANSFORM_ROT_180; break; 644 case 270: transform = HAL_TRANSFORM_ROT_270; break; 645 default: transform = 0; break; 646 } 647 648 if (transform > 0) { 649 err = native_window_set_buffers_transform( 650 mNativeWindow.get(), transform); 651 if (err != 0) { 652 ALOGE("native_window_set_buffers_transform failed: %s (%d)", 653 strerror(-err), -err); 654 return err; 655 } 656 } 657 } 658 659 // Set up the native window. 660 OMX_U32 usage = 0; 661 err = mOMX->getGraphicBufferUsage(mNode, kPortIndexOutput, &usage); 662 if (err != 0) { 663 ALOGW("querying usage flags from OMX IL component failed: %d", err); 664 // XXX: Currently this error is logged, but not fatal. 665 usage = 0; 666 } 667 668 if (mFlags & kFlagIsSecure) { 669 usage |= GRALLOC_USAGE_PROTECTED; 670 } 671 672 // Make sure to check whether either Stagefright or the video decoder 673 // requested protected buffers. 674 if (usage & GRALLOC_USAGE_PROTECTED) { 675 // Verify that the ANativeWindow sends images directly to 676 // SurfaceFlinger. 677 int queuesToNativeWindow = 0; 678 err = mNativeWindow->query( 679 mNativeWindow.get(), NATIVE_WINDOW_QUEUES_TO_WINDOW_COMPOSER, 680 &queuesToNativeWindow); 681 if (err != 0) { 682 ALOGE("error authenticating native window: %d", err); 683 return err; 684 } 685 if (queuesToNativeWindow != 1) { 686 ALOGE("native window could not be authenticated"); 687 return PERMISSION_DENIED; 688 } 689 } 690 691 err = native_window_set_usage( 692 mNativeWindow.get(), 693 usage | GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_EXTERNAL_DISP); 694 695 if (err != 0) { 696 ALOGE("native_window_set_usage failed: %s (%d)", strerror(-err), -err); 697 return err; 698 } 699 700 // Exits here for tunneled video playback codecs -- i.e. skips native window 701 // buffer allocation step as this is managed by the tunneled OMX omponent 702 // itself and explicitly sets def.nBufferCountActual to 0. 703 if (mTunneled) { 704 ALOGV("Tunneled Playback: skipping native window buffer allocation."); 705 def.nBufferCountActual = 0; 706 err = mOMX->setParameter( 707 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 708 709 *minUndequeuedBuffers = 0; 710 *bufferCount = 0; 711 *bufferSize = 0; 712 return err; 713 } 714 715 *minUndequeuedBuffers = 0; 716 err = mNativeWindow->query( 717 mNativeWindow.get(), NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 718 (int *)minUndequeuedBuffers); 719 720 if (err != 0) { 721 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 722 strerror(-err), -err); 723 return err; 724 } 725 726 // FIXME: assume that surface is controlled by app (native window 727 // returns the number for the case when surface is not controlled by app) 728 // FIXME2: This means that minUndeqeueudBufs can be 1 larger than reported 729 // For now, try to allocate 1 more buffer, but don't fail if unsuccessful 730 731 // Use conservative allocation while also trying to reduce starvation 732 // 733 // 1. allocate at least nBufferCountMin + minUndequeuedBuffers - that is the 734 // minimum needed for the consumer to be able to work 735 // 2. try to allocate two (2) additional buffers to reduce starvation from 736 // the consumer 737 // plus an extra buffer to account for incorrect minUndequeuedBufs 738 for (OMX_U32 extraBuffers = 2 + 1; /* condition inside loop */; extraBuffers--) { 739 OMX_U32 newBufferCount = 740 def.nBufferCountMin + *minUndequeuedBuffers + extraBuffers; 741 def.nBufferCountActual = newBufferCount; 742 err = mOMX->setParameter( 743 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 744 745 if (err == OK) { 746 *minUndequeuedBuffers += extraBuffers; 747 break; 748 } 749 750 ALOGW("[%s] setting nBufferCountActual to %u failed: %d", 751 mComponentName.c_str(), newBufferCount, err); 752 /* exit condition */ 753 if (extraBuffers == 0) { 754 return err; 755 } 756 } 757 758 err = native_window_set_buffer_count( 759 mNativeWindow.get(), def.nBufferCountActual); 760 761 if (err != 0) { 762 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 763 -err); 764 return err; 765 } 766 767 *bufferCount = def.nBufferCountActual; 768 *bufferSize = def.nBufferSize; 769 return err; 770} 771 772status_t ACodec::allocateOutputBuffersFromNativeWindow() { 773 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 774 status_t err = configureOutputBuffersFromNativeWindow( 775 &bufferCount, &bufferSize, &minUndequeuedBuffers); 776 if (err != 0) 777 return err; 778 mNumUndequeuedBuffers = minUndequeuedBuffers; 779 780 ALOGV("[%s] Allocating %u buffers from a native window of size %u on " 781 "output port", 782 mComponentName.c_str(), bufferCount, bufferSize); 783 784 // Dequeue buffers and send them to OMX 785 for (OMX_U32 i = 0; i < bufferCount; i++) { 786 ANativeWindowBuffer *buf; 787 err = native_window_dequeue_buffer_and_wait(mNativeWindow.get(), &buf); 788 if (err != 0) { 789 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); 790 break; 791 } 792 793 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); 794 BufferInfo info; 795 info.mStatus = BufferInfo::OWNED_BY_US; 796 info.mData = new ABuffer(NULL /* data */, bufferSize /* capacity */); 797 info.mGraphicBuffer = graphicBuffer; 798 mBuffers[kPortIndexOutput].push(info); 799 800 IOMX::buffer_id bufferId; 801 err = mOMX->useGraphicBuffer(mNode, kPortIndexOutput, graphicBuffer, 802 &bufferId); 803 if (err != 0) { 804 ALOGE("registering GraphicBuffer %u with OMX IL component failed: " 805 "%d", i, err); 806 break; 807 } 808 809 mBuffers[kPortIndexOutput].editItemAt(i).mBufferID = bufferId; 810 811 ALOGV("[%s] Registered graphic buffer with ID %u (pointer = %p)", 812 mComponentName.c_str(), 813 bufferId, graphicBuffer.get()); 814 } 815 816 OMX_U32 cancelStart; 817 OMX_U32 cancelEnd; 818 819 if (err != 0) { 820 // If an error occurred while dequeuing we need to cancel any buffers 821 // that were dequeued. 822 cancelStart = 0; 823 cancelEnd = mBuffers[kPortIndexOutput].size(); 824 } else { 825 // Return the required minimum undequeued buffers to the native window. 826 cancelStart = bufferCount - minUndequeuedBuffers; 827 cancelEnd = bufferCount; 828 } 829 830 for (OMX_U32 i = cancelStart; i < cancelEnd; i++) { 831 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 832 status_t error = cancelBufferToNativeWindow(info); 833 if (err == 0) { 834 err = error; 835 } 836 } 837 838 return err; 839} 840 841status_t ACodec::allocateOutputMetaDataBuffers() { 842 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 843 status_t err = configureOutputBuffersFromNativeWindow( 844 &bufferCount, &bufferSize, &minUndequeuedBuffers); 845 if (err != 0) 846 return err; 847 mNumUndequeuedBuffers = minUndequeuedBuffers; 848 849 ALOGV("[%s] Allocating %u meta buffers on output port", 850 mComponentName.c_str(), bufferCount); 851 852 size_t totalSize = bufferCount * 8; 853 mDealer[kPortIndexOutput] = new MemoryDealer(totalSize, "ACodec"); 854 855 // Dequeue buffers and send them to OMX 856 for (OMX_U32 i = 0; i < bufferCount; i++) { 857 BufferInfo info; 858 info.mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 859 info.mGraphicBuffer = NULL; 860 info.mDequeuedAt = mDequeueCounter; 861 862 sp<IMemory> mem = mDealer[kPortIndexOutput]->allocate( 863 sizeof(struct VideoDecoderOutputMetaData)); 864 CHECK(mem.get() != NULL); 865 info.mData = new ABuffer(mem->pointer(), mem->size()); 866 867 // we use useBuffer for metadata regardless of quirks 868 err = mOMX->useBuffer( 869 mNode, kPortIndexOutput, mem, &info.mBufferID); 870 871 mBuffers[kPortIndexOutput].push(info); 872 873 ALOGV("[%s] allocated meta buffer with ID %u (pointer = %p)", 874 mComponentName.c_str(), info.mBufferID, mem->pointer()); 875 } 876 877 mMetaDataBuffersToSubmit = bufferCount - minUndequeuedBuffers; 878 return err; 879} 880 881status_t ACodec::submitOutputMetaDataBuffer() { 882 CHECK(mStoreMetaDataInOutputBuffers); 883 if (mMetaDataBuffersToSubmit == 0) 884 return OK; 885 886 BufferInfo *info = dequeueBufferFromNativeWindow(); 887 if (info == NULL) 888 return ERROR_IO; 889 890 ALOGV("[%s] submitting output meta buffer ID %u for graphic buffer %p", 891 mComponentName.c_str(), info->mBufferID, info->mGraphicBuffer.get()); 892 893 --mMetaDataBuffersToSubmit; 894 CHECK_EQ(mOMX->fillBuffer(mNode, info->mBufferID), 895 (status_t)OK); 896 897 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 898 return OK; 899} 900 901status_t ACodec::cancelBufferToNativeWindow(BufferInfo *info) { 902 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 903 904 ALOGV("[%s] Calling cancelBuffer on buffer %u", 905 mComponentName.c_str(), info->mBufferID); 906 907 int err = mNativeWindow->cancelBuffer( 908 mNativeWindow.get(), info->mGraphicBuffer.get(), -1); 909 910 ALOGW_IF(err != 0, "[%s] can not return buffer %u to native window", 911 mComponentName.c_str(), info->mBufferID); 912 913 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 914 915 return err; 916} 917 918ACodec::BufferInfo *ACodec::dequeueBufferFromNativeWindow() { 919 ANativeWindowBuffer *buf; 920 int fenceFd = -1; 921 CHECK(mNativeWindow.get() != NULL); 922 923 if (mTunneled) { 924 ALOGW("dequeueBufferFromNativeWindow() should not be called in tunnel" 925 " video playback mode mode!"); 926 return NULL; 927 } 928 929 if (native_window_dequeue_buffer_and_wait(mNativeWindow.get(), &buf) != 0) { 930 ALOGE("dequeueBuffer failed."); 931 return NULL; 932 } 933 934 BufferInfo *oldest = NULL; 935 for (size_t i = mBuffers[kPortIndexOutput].size(); i-- > 0;) { 936 BufferInfo *info = 937 &mBuffers[kPortIndexOutput].editItemAt(i); 938 939 if (info->mGraphicBuffer != NULL && 940 info->mGraphicBuffer->handle == buf->handle) { 941 CHECK_EQ((int)info->mStatus, 942 (int)BufferInfo::OWNED_BY_NATIVE_WINDOW); 943 944 info->mStatus = BufferInfo::OWNED_BY_US; 945 946 return info; 947 } 948 949 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW && 950 (oldest == NULL || 951 // avoid potential issues from counter rolling over 952 mDequeueCounter - info->mDequeuedAt > 953 mDequeueCounter - oldest->mDequeuedAt)) { 954 oldest = info; 955 } 956 } 957 958 if (oldest) { 959 CHECK(mStoreMetaDataInOutputBuffers); 960 961 // discard buffer in LRU info and replace with new buffer 962 oldest->mGraphicBuffer = new GraphicBuffer(buf, false); 963 oldest->mStatus = BufferInfo::OWNED_BY_US; 964 965 mOMX->updateGraphicBufferInMeta( 966 mNode, kPortIndexOutput, oldest->mGraphicBuffer, 967 oldest->mBufferID); 968 969 VideoDecoderOutputMetaData *metaData = 970 reinterpret_cast<VideoDecoderOutputMetaData *>( 971 oldest->mData->base()); 972 CHECK_EQ(metaData->eType, kMetadataBufferTypeGrallocSource); 973 974 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 975 oldest - &mBuffers[kPortIndexOutput][0], 976 mDequeueCounter - oldest->mDequeuedAt, 977 metaData->pHandle, 978 oldest->mGraphicBuffer->handle, oldest->mData->base()); 979 980 return oldest; 981 } 982 983 TRESPASS(); 984 985 return NULL; 986} 987 988status_t ACodec::freeBuffersOnPort(OMX_U32 portIndex) { 989 for (size_t i = mBuffers[portIndex].size(); i-- > 0;) { 990 CHECK_EQ((status_t)OK, freeBuffer(portIndex, i)); 991 } 992 993 mDealer[portIndex].clear(); 994 995 return OK; 996} 997 998status_t ACodec::freeOutputBuffersNotOwnedByComponent() { 999 for (size_t i = mBuffers[kPortIndexOutput].size(); i-- > 0;) { 1000 BufferInfo *info = 1001 &mBuffers[kPortIndexOutput].editItemAt(i); 1002 1003 // At this time some buffers may still be with the component 1004 // or being drained. 1005 if (info->mStatus != BufferInfo::OWNED_BY_COMPONENT && 1006 info->mStatus != BufferInfo::OWNED_BY_DOWNSTREAM) { 1007 CHECK_EQ((status_t)OK, freeBuffer(kPortIndexOutput, i)); 1008 } 1009 } 1010 1011 return OK; 1012} 1013 1014status_t ACodec::freeBuffer(OMX_U32 portIndex, size_t i) { 1015 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1016 1017 CHECK(info->mStatus == BufferInfo::OWNED_BY_US 1018 || info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW); 1019 1020 if (portIndex == kPortIndexOutput && mNativeWindow != NULL 1021 && info->mStatus == BufferInfo::OWNED_BY_US) { 1022 cancelBufferToNativeWindow(info); 1023 } 1024 1025 CHECK_EQ(mOMX->freeBuffer( 1026 mNode, portIndex, info->mBufferID), 1027 (status_t)OK); 1028 1029 mBuffers[portIndex].removeAt(i); 1030 1031 return OK; 1032} 1033 1034ACodec::BufferInfo *ACodec::findBufferByID( 1035 uint32_t portIndex, IOMX::buffer_id bufferID, 1036 ssize_t *index) { 1037 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1038 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1039 1040 if (info->mBufferID == bufferID) { 1041 if (index != NULL) { 1042 *index = i; 1043 } 1044 return info; 1045 } 1046 } 1047 1048 TRESPASS(); 1049 1050 return NULL; 1051} 1052 1053status_t ACodec::setComponentRole( 1054 bool isEncoder, const char *mime) { 1055 struct MimeToRole { 1056 const char *mime; 1057 const char *decoderRole; 1058 const char *encoderRole; 1059 }; 1060 1061 static const MimeToRole kMimeToRole[] = { 1062 { MEDIA_MIMETYPE_AUDIO_MPEG, 1063 "audio_decoder.mp3", "audio_encoder.mp3" }, 1064 { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I, 1065 "audio_decoder.mp1", "audio_encoder.mp1" }, 1066 { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II, 1067 "audio_decoder.mp2", "audio_encoder.mp2" }, 1068 { MEDIA_MIMETYPE_AUDIO_AMR_NB, 1069 "audio_decoder.amrnb", "audio_encoder.amrnb" }, 1070 { MEDIA_MIMETYPE_AUDIO_AMR_WB, 1071 "audio_decoder.amrwb", "audio_encoder.amrwb" }, 1072 { MEDIA_MIMETYPE_AUDIO_AAC, 1073 "audio_decoder.aac", "audio_encoder.aac" }, 1074 { MEDIA_MIMETYPE_AUDIO_VORBIS, 1075 "audio_decoder.vorbis", "audio_encoder.vorbis" }, 1076 { MEDIA_MIMETYPE_AUDIO_OPUS, 1077 "audio_decoder.opus", "audio_encoder.opus" }, 1078 { MEDIA_MIMETYPE_AUDIO_G711_MLAW, 1079 "audio_decoder.g711mlaw", "audio_encoder.g711mlaw" }, 1080 { MEDIA_MIMETYPE_AUDIO_G711_ALAW, 1081 "audio_decoder.g711alaw", "audio_encoder.g711alaw" }, 1082 { MEDIA_MIMETYPE_VIDEO_AVC, 1083 "video_decoder.avc", "video_encoder.avc" }, 1084 { MEDIA_MIMETYPE_VIDEO_HEVC, 1085 "video_decoder.hevc", "video_encoder.hevc" }, 1086 { MEDIA_MIMETYPE_VIDEO_MPEG4, 1087 "video_decoder.mpeg4", "video_encoder.mpeg4" }, 1088 { MEDIA_MIMETYPE_VIDEO_H263, 1089 "video_decoder.h263", "video_encoder.h263" }, 1090 { MEDIA_MIMETYPE_VIDEO_VP8, 1091 "video_decoder.vp8", "video_encoder.vp8" }, 1092 { MEDIA_MIMETYPE_VIDEO_VP9, 1093 "video_decoder.vp9", "video_encoder.vp9" }, 1094 { MEDIA_MIMETYPE_AUDIO_RAW, 1095 "audio_decoder.raw", "audio_encoder.raw" }, 1096 { MEDIA_MIMETYPE_AUDIO_FLAC, 1097 "audio_decoder.flac", "audio_encoder.flac" }, 1098 { MEDIA_MIMETYPE_AUDIO_MSGSM, 1099 "audio_decoder.gsm", "audio_encoder.gsm" }, 1100 { MEDIA_MIMETYPE_VIDEO_MPEG2, 1101 "video_decoder.mpeg2", "video_encoder.mpeg2" }, 1102 { MEDIA_MIMETYPE_AUDIO_AC3, 1103 "audio_decoder.ac3", "audio_encoder.ac3" }, 1104 }; 1105 1106 static const size_t kNumMimeToRole = 1107 sizeof(kMimeToRole) / sizeof(kMimeToRole[0]); 1108 1109 size_t i; 1110 for (i = 0; i < kNumMimeToRole; ++i) { 1111 if (!strcasecmp(mime, kMimeToRole[i].mime)) { 1112 break; 1113 } 1114 } 1115 1116 if (i == kNumMimeToRole) { 1117 return ERROR_UNSUPPORTED; 1118 } 1119 1120 const char *role = 1121 isEncoder ? kMimeToRole[i].encoderRole 1122 : kMimeToRole[i].decoderRole; 1123 1124 if (role != NULL) { 1125 OMX_PARAM_COMPONENTROLETYPE roleParams; 1126 InitOMXParams(&roleParams); 1127 1128 strncpy((char *)roleParams.cRole, 1129 role, OMX_MAX_STRINGNAME_SIZE - 1); 1130 1131 roleParams.cRole[OMX_MAX_STRINGNAME_SIZE - 1] = '\0'; 1132 1133 status_t err = mOMX->setParameter( 1134 mNode, OMX_IndexParamStandardComponentRole, 1135 &roleParams, sizeof(roleParams)); 1136 1137 if (err != OK) { 1138 ALOGW("[%s] Failed to set standard component role '%s'.", 1139 mComponentName.c_str(), role); 1140 1141 return err; 1142 } 1143 } 1144 1145 return OK; 1146} 1147 1148status_t ACodec::configureCodec( 1149 const char *mime, const sp<AMessage> &msg) { 1150 int32_t encoder; 1151 if (!msg->findInt32("encoder", &encoder)) { 1152 encoder = false; 1153 } 1154 1155 sp<AMessage> inputFormat = new AMessage(); 1156 sp<AMessage> outputFormat = new AMessage(); 1157 1158 mIsEncoder = encoder; 1159 1160 status_t err = setComponentRole(encoder /* isEncoder */, mime); 1161 1162 if (err != OK) { 1163 return err; 1164 } 1165 1166 int32_t bitRate = 0; 1167 // FLAC encoder doesn't need a bitrate, other encoders do 1168 if (encoder && strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC) 1169 && !msg->findInt32("bitrate", &bitRate)) { 1170 return INVALID_OPERATION; 1171 } 1172 1173 int32_t storeMeta; 1174 if (encoder 1175 && msg->findInt32("store-metadata-in-buffers", &storeMeta) 1176 && storeMeta != 0) { 1177 err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexInput, OMX_TRUE); 1178 1179 if (err != OK) { 1180 ALOGE("[%s] storeMetaDataInBuffers (input) failed w/ err %d", 1181 mComponentName.c_str(), err); 1182 1183 return err; 1184 } 1185 } 1186 1187 int32_t prependSPSPPS = 0; 1188 if (encoder 1189 && msg->findInt32("prepend-sps-pps-to-idr-frames", &prependSPSPPS) 1190 && prependSPSPPS != 0) { 1191 OMX_INDEXTYPE index; 1192 err = mOMX->getExtensionIndex( 1193 mNode, 1194 "OMX.google.android.index.prependSPSPPSToIDRFrames", 1195 &index); 1196 1197 if (err == OK) { 1198 PrependSPSPPSToIDRFramesParams params; 1199 InitOMXParams(¶ms); 1200 params.bEnable = OMX_TRUE; 1201 1202 err = mOMX->setParameter( 1203 mNode, index, ¶ms, sizeof(params)); 1204 } 1205 1206 if (err != OK) { 1207 ALOGE("Encoder could not be configured to emit SPS/PPS before " 1208 "IDR frames. (err %d)", err); 1209 1210 return err; 1211 } 1212 } 1213 1214 // Only enable metadata mode on encoder output if encoder can prepend 1215 // sps/pps to idr frames, since in metadata mode the bitstream is in an 1216 // opaque handle, to which we don't have access. 1217 int32_t video = !strncasecmp(mime, "video/", 6); 1218 if (encoder && video) { 1219 OMX_BOOL enable = (OMX_BOOL) (prependSPSPPS 1220 && msg->findInt32("store-metadata-in-buffers-output", &storeMeta) 1221 && storeMeta != 0); 1222 1223 err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, enable); 1224 1225 if (err != OK) { 1226 ALOGE("[%s] storeMetaDataInBuffers (output) failed w/ err %d", 1227 mComponentName.c_str(), err); 1228 mUseMetadataOnEncoderOutput = 0; 1229 } else { 1230 mUseMetadataOnEncoderOutput = enable; 1231 } 1232 1233 if (!msg->findInt64( 1234 "repeat-previous-frame-after", 1235 &mRepeatFrameDelayUs)) { 1236 mRepeatFrameDelayUs = -1ll; 1237 } 1238 1239 if (!msg->findInt64("max-pts-gap-to-encoder", &mMaxPtsGapUs)) { 1240 mMaxPtsGapUs = -1ll; 1241 } 1242 1243 if (!msg->findInt64("time-lapse", &mTimePerCaptureUs)) { 1244 mTimePerCaptureUs = -1ll; 1245 } 1246 1247 if (!msg->findInt32( 1248 "create-input-buffers-suspended", 1249 (int32_t*)&mCreateInputBuffersSuspended)) { 1250 mCreateInputBuffersSuspended = false; 1251 } 1252 } 1253 1254 sp<RefBase> obj; 1255 int32_t haveNativeWindow = msg->findObject("native-window", &obj) && 1256 obj != NULL; 1257 mStoreMetaDataInOutputBuffers = false; 1258 if (video && !encoder) { 1259 inputFormat->setInt32("adaptive-playback", false); 1260 } 1261 if (!encoder && video && haveNativeWindow) { 1262 sp<NativeWindowWrapper> windowWrapper( 1263 static_cast<NativeWindowWrapper *>(obj.get())); 1264 sp<ANativeWindow> nativeWindow = windowWrapper->getNativeWindow(); 1265 1266 int32_t tunneled; 1267 if (msg->findInt32("feature-tunneled-playback", &tunneled) && 1268 tunneled != 0) { 1269 ALOGI("Configuring TUNNELED video playback."); 1270 mTunneled = true; 1271 1272 int32_t audioHwSync = 0; 1273 if (!msg->findInt32("audio-hw-sync", &audioHwSync)) { 1274 ALOGW("No Audio HW Sync provided for video tunnel"); 1275 } 1276 err = configureTunneledVideoPlayback(audioHwSync, nativeWindow); 1277 if (err != OK) { 1278 ALOGE("configureTunneledVideoPlayback(%d,%p) failed!", 1279 audioHwSync, nativeWindow.get()); 1280 return err; 1281 } 1282 1283 inputFormat->setInt32("adaptive-playback", true); 1284 } else { 1285 ALOGV("Configuring CPU controlled video playback."); 1286 mTunneled = false; 1287 1288 // Always try to enable dynamic output buffers on native surface 1289 err = mOMX->storeMetaDataInBuffers( 1290 mNode, kPortIndexOutput, OMX_TRUE); 1291 if (err != OK) { 1292 ALOGE("[%s] storeMetaDataInBuffers failed w/ err %d", 1293 mComponentName.c_str(), err); 1294 1295 // if adaptive playback has been requested, try JB fallback 1296 // NOTE: THIS FALLBACK MECHANISM WILL BE REMOVED DUE TO ITS 1297 // LARGE MEMORY REQUIREMENT 1298 1299 // we will not do adaptive playback on software accessed 1300 // surfaces as they never had to respond to changes in the 1301 // crop window, and we don't trust that they will be able to. 1302 int usageBits = 0; 1303 bool canDoAdaptivePlayback; 1304 1305 if (nativeWindow->query( 1306 nativeWindow.get(), 1307 NATIVE_WINDOW_CONSUMER_USAGE_BITS, 1308 &usageBits) != OK) { 1309 canDoAdaptivePlayback = false; 1310 } else { 1311 canDoAdaptivePlayback = 1312 (usageBits & 1313 (GRALLOC_USAGE_SW_READ_MASK | 1314 GRALLOC_USAGE_SW_WRITE_MASK)) == 0; 1315 } 1316 1317 int32_t maxWidth = 0, maxHeight = 0; 1318 if (canDoAdaptivePlayback && 1319 msg->findInt32("max-width", &maxWidth) && 1320 msg->findInt32("max-height", &maxHeight)) { 1321 ALOGV("[%s] prepareForAdaptivePlayback(%dx%d)", 1322 mComponentName.c_str(), maxWidth, maxHeight); 1323 1324 err = mOMX->prepareForAdaptivePlayback( 1325 mNode, kPortIndexOutput, OMX_TRUE, maxWidth, 1326 maxHeight); 1327 ALOGW_IF(err != OK, 1328 "[%s] prepareForAdaptivePlayback failed w/ err %d", 1329 mComponentName.c_str(), err); 1330 1331 if (err == OK) { 1332 inputFormat->setInt32("max-width", maxWidth); 1333 inputFormat->setInt32("max-height", maxHeight); 1334 inputFormat->setInt32("adaptive-playback", true); 1335 } 1336 } 1337 // allow failure 1338 err = OK; 1339 } else { 1340 ALOGV("[%s] storeMetaDataInBuffers succeeded", 1341 mComponentName.c_str()); 1342 mStoreMetaDataInOutputBuffers = true; 1343 inputFormat->setInt32("adaptive-playback", true); 1344 } 1345 1346 int32_t push; 1347 if (msg->findInt32("push-blank-buffers-on-shutdown", &push) 1348 && push != 0) { 1349 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 1350 } 1351 } 1352 1353 int32_t rotationDegrees; 1354 if (msg->findInt32("rotation-degrees", &rotationDegrees)) { 1355 mRotationDegrees = rotationDegrees; 1356 } else { 1357 mRotationDegrees = 0; 1358 } 1359 } 1360 1361 if (video) { 1362 if (encoder) { 1363 err = setupVideoEncoder(mime, msg); 1364 } else { 1365 err = setupVideoDecoder(mime, msg); 1366 } 1367 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) { 1368 int32_t numChannels, sampleRate; 1369 if (!msg->findInt32("channel-count", &numChannels) 1370 || !msg->findInt32("sample-rate", &sampleRate)) { 1371 // Since we did not always check for these, leave them optional 1372 // and have the decoder figure it all out. 1373 err = OK; 1374 } else { 1375 err = setupRawAudioFormat( 1376 encoder ? kPortIndexInput : kPortIndexOutput, 1377 sampleRate, 1378 numChannels); 1379 } 1380 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) { 1381 int32_t numChannels, sampleRate; 1382 if (!msg->findInt32("channel-count", &numChannels) 1383 || !msg->findInt32("sample-rate", &sampleRate)) { 1384 err = INVALID_OPERATION; 1385 } else { 1386 int32_t isADTS, aacProfile; 1387 int32_t sbrMode; 1388 int32_t maxOutputChannelCount; 1389 drcParams_t drc; 1390 if (!msg->findInt32("is-adts", &isADTS)) { 1391 isADTS = 0; 1392 } 1393 if (!msg->findInt32("aac-profile", &aacProfile)) { 1394 aacProfile = OMX_AUDIO_AACObjectNull; 1395 } 1396 if (!msg->findInt32("aac-sbr-mode", &sbrMode)) { 1397 sbrMode = -1; 1398 } 1399 1400 if (!msg->findInt32("aac-max-output-channel_count", &maxOutputChannelCount)) { 1401 maxOutputChannelCount = -1; 1402 } 1403 if (!msg->findInt32("aac-encoded-target-level", &drc.encodedTargetLevel)) { 1404 // value is unknown 1405 drc.encodedTargetLevel = -1; 1406 } 1407 if (!msg->findInt32("aac-drc-cut-level", &drc.drcCut)) { 1408 // value is unknown 1409 drc.drcCut = -1; 1410 } 1411 if (!msg->findInt32("aac-drc-boost-level", &drc.drcBoost)) { 1412 // value is unknown 1413 drc.drcBoost = -1; 1414 } 1415 if (!msg->findInt32("aac-drc-heavy-compression", &drc.heavyCompression)) { 1416 // value is unknown 1417 drc.heavyCompression = -1; 1418 } 1419 if (!msg->findInt32("aac-target-ref-level", &drc.targetRefLevel)) { 1420 // value is unknown 1421 drc.targetRefLevel = -1; 1422 } 1423 1424 err = setupAACCodec( 1425 encoder, numChannels, sampleRate, bitRate, aacProfile, 1426 isADTS != 0, sbrMode, maxOutputChannelCount, drc); 1427 } 1428 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)) { 1429 err = setupAMRCodec(encoder, false /* isWAMR */, bitRate); 1430 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_WB)) { 1431 err = setupAMRCodec(encoder, true /* isWAMR */, bitRate); 1432 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_ALAW) 1433 || !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_MLAW)) { 1434 // These are PCM-like formats with a fixed sample rate but 1435 // a variable number of channels. 1436 1437 int32_t numChannels; 1438 if (!msg->findInt32("channel-count", &numChannels)) { 1439 err = INVALID_OPERATION; 1440 } else { 1441 err = setupG711Codec(encoder, numChannels); 1442 } 1443 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)) { 1444 int32_t numChannels, sampleRate, compressionLevel = -1; 1445 if (encoder && 1446 (!msg->findInt32("channel-count", &numChannels) 1447 || !msg->findInt32("sample-rate", &sampleRate))) { 1448 ALOGE("missing channel count or sample rate for FLAC encoder"); 1449 err = INVALID_OPERATION; 1450 } else { 1451 if (encoder) { 1452 if (!msg->findInt32( 1453 "complexity", &compressionLevel) && 1454 !msg->findInt32( 1455 "flac-compression-level", &compressionLevel)) { 1456 compressionLevel = 5; // default FLAC compression level 1457 } else if (compressionLevel < 0) { 1458 ALOGW("compression level %d outside [0..8] range, " 1459 "using 0", 1460 compressionLevel); 1461 compressionLevel = 0; 1462 } else if (compressionLevel > 8) { 1463 ALOGW("compression level %d outside [0..8] range, " 1464 "using 8", 1465 compressionLevel); 1466 compressionLevel = 8; 1467 } 1468 } 1469 err = setupFlacCodec( 1470 encoder, numChannels, sampleRate, compressionLevel); 1471 } 1472 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) { 1473 int32_t numChannels, sampleRate; 1474 if (encoder 1475 || !msg->findInt32("channel-count", &numChannels) 1476 || !msg->findInt32("sample-rate", &sampleRate)) { 1477 err = INVALID_OPERATION; 1478 } else { 1479 err = setupRawAudioFormat(kPortIndexInput, sampleRate, numChannels); 1480 } 1481 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AC3)) { 1482 int32_t numChannels; 1483 int32_t sampleRate; 1484 if (!msg->findInt32("channel-count", &numChannels) 1485 || !msg->findInt32("sample-rate", &sampleRate)) { 1486 err = INVALID_OPERATION; 1487 } else { 1488 err = setupAC3Codec(encoder, numChannels, sampleRate); 1489 } 1490 } 1491 1492 if (err != OK) { 1493 return err; 1494 } 1495 1496 if (!msg->findInt32("encoder-delay", &mEncoderDelay)) { 1497 mEncoderDelay = 0; 1498 } 1499 1500 if (!msg->findInt32("encoder-padding", &mEncoderPadding)) { 1501 mEncoderPadding = 0; 1502 } 1503 1504 if (msg->findInt32("channel-mask", &mChannelMask)) { 1505 mChannelMaskPresent = true; 1506 } else { 1507 mChannelMaskPresent = false; 1508 } 1509 1510 int32_t maxInputSize; 1511 if (msg->findInt32("max-input-size", &maxInputSize)) { 1512 err = setMinBufferSize(kPortIndexInput, (size_t)maxInputSize); 1513 } else if (!strcmp("OMX.Nvidia.aac.decoder", mComponentName.c_str())) { 1514 err = setMinBufferSize(kPortIndexInput, 8192); // XXX 1515 } 1516 1517 CHECK_EQ(getPortFormat(kPortIndexInput, inputFormat), (status_t)OK); 1518 CHECK_EQ(getPortFormat(kPortIndexOutput, outputFormat), (status_t)OK); 1519 mInputFormat = inputFormat; 1520 mOutputFormat = outputFormat; 1521 1522 return err; 1523} 1524 1525status_t ACodec::setMinBufferSize(OMX_U32 portIndex, size_t size) { 1526 OMX_PARAM_PORTDEFINITIONTYPE def; 1527 InitOMXParams(&def); 1528 def.nPortIndex = portIndex; 1529 1530 status_t err = mOMX->getParameter( 1531 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1532 1533 if (err != OK) { 1534 return err; 1535 } 1536 1537 if (def.nBufferSize >= size) { 1538 return OK; 1539 } 1540 1541 def.nBufferSize = size; 1542 1543 err = mOMX->setParameter( 1544 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1545 1546 if (err != OK) { 1547 return err; 1548 } 1549 1550 err = mOMX->getParameter( 1551 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1552 1553 if (err != OK) { 1554 return err; 1555 } 1556 1557 CHECK(def.nBufferSize >= size); 1558 1559 return OK; 1560} 1561 1562status_t ACodec::selectAudioPortFormat( 1563 OMX_U32 portIndex, OMX_AUDIO_CODINGTYPE desiredFormat) { 1564 OMX_AUDIO_PARAM_PORTFORMATTYPE format; 1565 InitOMXParams(&format); 1566 1567 format.nPortIndex = portIndex; 1568 for (OMX_U32 index = 0;; ++index) { 1569 format.nIndex = index; 1570 1571 status_t err = mOMX->getParameter( 1572 mNode, OMX_IndexParamAudioPortFormat, 1573 &format, sizeof(format)); 1574 1575 if (err != OK) { 1576 return err; 1577 } 1578 1579 if (format.eEncoding == desiredFormat) { 1580 break; 1581 } 1582 } 1583 1584 return mOMX->setParameter( 1585 mNode, OMX_IndexParamAudioPortFormat, &format, sizeof(format)); 1586} 1587 1588status_t ACodec::setupAACCodec( 1589 bool encoder, int32_t numChannels, int32_t sampleRate, 1590 int32_t bitRate, int32_t aacProfile, bool isADTS, int32_t sbrMode, 1591 int32_t maxOutputChannelCount, const drcParams_t& drc) { 1592 if (encoder && isADTS) { 1593 return -EINVAL; 1594 } 1595 1596 status_t err = setupRawAudioFormat( 1597 encoder ? kPortIndexInput : kPortIndexOutput, 1598 sampleRate, 1599 numChannels); 1600 1601 if (err != OK) { 1602 return err; 1603 } 1604 1605 if (encoder) { 1606 err = selectAudioPortFormat(kPortIndexOutput, OMX_AUDIO_CodingAAC); 1607 1608 if (err != OK) { 1609 return err; 1610 } 1611 1612 OMX_PARAM_PORTDEFINITIONTYPE def; 1613 InitOMXParams(&def); 1614 def.nPortIndex = kPortIndexOutput; 1615 1616 err = mOMX->getParameter( 1617 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1618 1619 if (err != OK) { 1620 return err; 1621 } 1622 1623 def.format.audio.bFlagErrorConcealment = OMX_TRUE; 1624 def.format.audio.eEncoding = OMX_AUDIO_CodingAAC; 1625 1626 err = mOMX->setParameter( 1627 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1628 1629 if (err != OK) { 1630 return err; 1631 } 1632 1633 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 1634 InitOMXParams(&profile); 1635 profile.nPortIndex = kPortIndexOutput; 1636 1637 err = mOMX->getParameter( 1638 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 1639 1640 if (err != OK) { 1641 return err; 1642 } 1643 1644 profile.nChannels = numChannels; 1645 1646 profile.eChannelMode = 1647 (numChannels == 1) 1648 ? OMX_AUDIO_ChannelModeMono: OMX_AUDIO_ChannelModeStereo; 1649 1650 profile.nSampleRate = sampleRate; 1651 profile.nBitRate = bitRate; 1652 profile.nAudioBandWidth = 0; 1653 profile.nFrameLength = 0; 1654 profile.nAACtools = OMX_AUDIO_AACToolAll; 1655 profile.nAACERtools = OMX_AUDIO_AACERNone; 1656 profile.eAACProfile = (OMX_AUDIO_AACPROFILETYPE) aacProfile; 1657 profile.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4FF; 1658 switch (sbrMode) { 1659 case 0: 1660 // disable sbr 1661 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 1662 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 1663 break; 1664 case 1: 1665 // enable single-rate sbr 1666 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 1667 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 1668 break; 1669 case 2: 1670 // enable dual-rate sbr 1671 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 1672 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 1673 break; 1674 case -1: 1675 // enable both modes -> the codec will decide which mode should be used 1676 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 1677 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 1678 break; 1679 default: 1680 // unsupported sbr mode 1681 return BAD_VALUE; 1682 } 1683 1684 1685 err = mOMX->setParameter( 1686 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 1687 1688 if (err != OK) { 1689 return err; 1690 } 1691 1692 return err; 1693 } 1694 1695 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 1696 InitOMXParams(&profile); 1697 profile.nPortIndex = kPortIndexInput; 1698 1699 err = mOMX->getParameter( 1700 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 1701 1702 if (err != OK) { 1703 return err; 1704 } 1705 1706 profile.nChannels = numChannels; 1707 profile.nSampleRate = sampleRate; 1708 1709 profile.eAACStreamFormat = 1710 isADTS 1711 ? OMX_AUDIO_AACStreamFormatMP4ADTS 1712 : OMX_AUDIO_AACStreamFormatMP4FF; 1713 1714 OMX_AUDIO_PARAM_ANDROID_AACPRESENTATIONTYPE presentation; 1715 presentation.nMaxOutputChannels = maxOutputChannelCount; 1716 presentation.nDrcCut = drc.drcCut; 1717 presentation.nDrcBoost = drc.drcBoost; 1718 presentation.nHeavyCompression = drc.heavyCompression; 1719 presentation.nTargetReferenceLevel = drc.targetRefLevel; 1720 presentation.nEncodedTargetLevel = drc.encodedTargetLevel; 1721 1722 status_t res = mOMX->setParameter(mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 1723 if (res == OK) { 1724 // optional parameters, will not cause configuration failure 1725 mOMX->setParameter(mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAacPresentation, 1726 &presentation, sizeof(presentation)); 1727 } else { 1728 ALOGW("did not set AudioAndroidAacPresentation due to error %d when setting AudioAac", res); 1729 } 1730 return res; 1731} 1732 1733status_t ACodec::setupAC3Codec( 1734 bool encoder, int32_t numChannels, int32_t sampleRate) { 1735 status_t err = setupRawAudioFormat( 1736 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 1737 1738 if (err != OK) { 1739 return err; 1740 } 1741 1742 if (encoder) { 1743 ALOGW("AC3 encoding is not supported."); 1744 return INVALID_OPERATION; 1745 } 1746 1747 OMX_AUDIO_PARAM_ANDROID_AC3TYPE def; 1748 InitOMXParams(&def); 1749 def.nPortIndex = kPortIndexInput; 1750 1751 err = mOMX->getParameter( 1752 mNode, 1753 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 1754 &def, 1755 sizeof(def)); 1756 1757 if (err != OK) { 1758 return err; 1759 } 1760 1761 def.nChannels = numChannels; 1762 def.nSampleRate = sampleRate; 1763 1764 return mOMX->setParameter( 1765 mNode, 1766 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 1767 &def, 1768 sizeof(def)); 1769} 1770 1771static OMX_AUDIO_AMRBANDMODETYPE pickModeFromBitRate( 1772 bool isAMRWB, int32_t bps) { 1773 if (isAMRWB) { 1774 if (bps <= 6600) { 1775 return OMX_AUDIO_AMRBandModeWB0; 1776 } else if (bps <= 8850) { 1777 return OMX_AUDIO_AMRBandModeWB1; 1778 } else if (bps <= 12650) { 1779 return OMX_AUDIO_AMRBandModeWB2; 1780 } else if (bps <= 14250) { 1781 return OMX_AUDIO_AMRBandModeWB3; 1782 } else if (bps <= 15850) { 1783 return OMX_AUDIO_AMRBandModeWB4; 1784 } else if (bps <= 18250) { 1785 return OMX_AUDIO_AMRBandModeWB5; 1786 } else if (bps <= 19850) { 1787 return OMX_AUDIO_AMRBandModeWB6; 1788 } else if (bps <= 23050) { 1789 return OMX_AUDIO_AMRBandModeWB7; 1790 } 1791 1792 // 23850 bps 1793 return OMX_AUDIO_AMRBandModeWB8; 1794 } else { // AMRNB 1795 if (bps <= 4750) { 1796 return OMX_AUDIO_AMRBandModeNB0; 1797 } else if (bps <= 5150) { 1798 return OMX_AUDIO_AMRBandModeNB1; 1799 } else if (bps <= 5900) { 1800 return OMX_AUDIO_AMRBandModeNB2; 1801 } else if (bps <= 6700) { 1802 return OMX_AUDIO_AMRBandModeNB3; 1803 } else if (bps <= 7400) { 1804 return OMX_AUDIO_AMRBandModeNB4; 1805 } else if (bps <= 7950) { 1806 return OMX_AUDIO_AMRBandModeNB5; 1807 } else if (bps <= 10200) { 1808 return OMX_AUDIO_AMRBandModeNB6; 1809 } 1810 1811 // 12200 bps 1812 return OMX_AUDIO_AMRBandModeNB7; 1813 } 1814} 1815 1816status_t ACodec::setupAMRCodec(bool encoder, bool isWAMR, int32_t bitrate) { 1817 OMX_AUDIO_PARAM_AMRTYPE def; 1818 InitOMXParams(&def); 1819 def.nPortIndex = encoder ? kPortIndexOutput : kPortIndexInput; 1820 1821 status_t err = 1822 mOMX->getParameter(mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); 1823 1824 if (err != OK) { 1825 return err; 1826 } 1827 1828 def.eAMRFrameFormat = OMX_AUDIO_AMRFrameFormatFSF; 1829 def.eAMRBandMode = pickModeFromBitRate(isWAMR, bitrate); 1830 1831 err = mOMX->setParameter( 1832 mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); 1833 1834 if (err != OK) { 1835 return err; 1836 } 1837 1838 return setupRawAudioFormat( 1839 encoder ? kPortIndexInput : kPortIndexOutput, 1840 isWAMR ? 16000 : 8000 /* sampleRate */, 1841 1 /* numChannels */); 1842} 1843 1844status_t ACodec::setupG711Codec(bool encoder, int32_t numChannels) { 1845 CHECK(!encoder); // XXX TODO 1846 1847 return setupRawAudioFormat( 1848 kPortIndexInput, 8000 /* sampleRate */, numChannels); 1849} 1850 1851status_t ACodec::setupFlacCodec( 1852 bool encoder, int32_t numChannels, int32_t sampleRate, int32_t compressionLevel) { 1853 1854 if (encoder) { 1855 OMX_AUDIO_PARAM_FLACTYPE def; 1856 InitOMXParams(&def); 1857 def.nPortIndex = kPortIndexOutput; 1858 1859 // configure compression level 1860 status_t err = mOMX->getParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def)); 1861 if (err != OK) { 1862 ALOGE("setupFlacCodec(): Error %d getting OMX_IndexParamAudioFlac parameter", err); 1863 return err; 1864 } 1865 def.nCompressionLevel = compressionLevel; 1866 err = mOMX->setParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def)); 1867 if (err != OK) { 1868 ALOGE("setupFlacCodec(): Error %d setting OMX_IndexParamAudioFlac parameter", err); 1869 return err; 1870 } 1871 } 1872 1873 return setupRawAudioFormat( 1874 encoder ? kPortIndexInput : kPortIndexOutput, 1875 sampleRate, 1876 numChannels); 1877} 1878 1879status_t ACodec::setupRawAudioFormat( 1880 OMX_U32 portIndex, int32_t sampleRate, int32_t numChannels) { 1881 OMX_PARAM_PORTDEFINITIONTYPE def; 1882 InitOMXParams(&def); 1883 def.nPortIndex = portIndex; 1884 1885 status_t err = mOMX->getParameter( 1886 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1887 1888 if (err != OK) { 1889 return err; 1890 } 1891 1892 def.format.audio.eEncoding = OMX_AUDIO_CodingPCM; 1893 1894 err = mOMX->setParameter( 1895 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1896 1897 if (err != OK) { 1898 return err; 1899 } 1900 1901 OMX_AUDIO_PARAM_PCMMODETYPE pcmParams; 1902 InitOMXParams(&pcmParams); 1903 pcmParams.nPortIndex = portIndex; 1904 1905 err = mOMX->getParameter( 1906 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 1907 1908 if (err != OK) { 1909 return err; 1910 } 1911 1912 pcmParams.nChannels = numChannels; 1913 pcmParams.eNumData = OMX_NumericalDataSigned; 1914 pcmParams.bInterleaved = OMX_TRUE; 1915 pcmParams.nBitPerSample = 16; 1916 pcmParams.nSamplingRate = sampleRate; 1917 pcmParams.ePCMMode = OMX_AUDIO_PCMModeLinear; 1918 1919 if (getOMXChannelMapping(numChannels, pcmParams.eChannelMapping) != OK) { 1920 return OMX_ErrorNone; 1921 } 1922 1923 return mOMX->setParameter( 1924 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 1925} 1926 1927status_t ACodec::configureTunneledVideoPlayback( 1928 int32_t audioHwSync, const sp<ANativeWindow> &nativeWindow) { 1929 native_handle_t* sidebandHandle; 1930 1931 status_t err = mOMX->configureVideoTunnelMode( 1932 mNode, kPortIndexOutput, OMX_TRUE, audioHwSync, &sidebandHandle); 1933 if (err != OK) { 1934 ALOGE("configureVideoTunnelMode failed! (err %d).", err); 1935 return err; 1936 } 1937 1938 err = native_window_set_sideband_stream(nativeWindow.get(), sidebandHandle); 1939 if (err != OK) { 1940 ALOGE("native_window_set_sideband_stream(%p) failed! (err %d).", 1941 sidebandHandle, err); 1942 return err; 1943 } 1944 1945 return OK; 1946} 1947 1948status_t ACodec::setVideoPortFormatType( 1949 OMX_U32 portIndex, 1950 OMX_VIDEO_CODINGTYPE compressionFormat, 1951 OMX_COLOR_FORMATTYPE colorFormat) { 1952 OMX_VIDEO_PARAM_PORTFORMATTYPE format; 1953 InitOMXParams(&format); 1954 format.nPortIndex = portIndex; 1955 format.nIndex = 0; 1956 bool found = false; 1957 1958 OMX_U32 index = 0; 1959 for (;;) { 1960 format.nIndex = index; 1961 status_t err = mOMX->getParameter( 1962 mNode, OMX_IndexParamVideoPortFormat, 1963 &format, sizeof(format)); 1964 1965 if (err != OK) { 1966 return err; 1967 } 1968 1969 // substitute back flexible color format to codec supported format 1970 OMX_U32 flexibleEquivalent; 1971 if (compressionFormat == OMX_VIDEO_CodingUnused && 1972 isFlexibleColorFormat( 1973 mOMX, mNode, format.eColorFormat, &flexibleEquivalent) && 1974 colorFormat == flexibleEquivalent) { 1975 ALOGI("[%s] using color format %#x in place of %#x", 1976 mComponentName.c_str(), format.eColorFormat, colorFormat); 1977 colorFormat = format.eColorFormat; 1978 } 1979 1980 // The following assertion is violated by TI's video decoder. 1981 // CHECK_EQ(format.nIndex, index); 1982 1983 if (!strcmp("OMX.TI.Video.encoder", mComponentName.c_str())) { 1984 if (portIndex == kPortIndexInput 1985 && colorFormat == format.eColorFormat) { 1986 // eCompressionFormat does not seem right. 1987 found = true; 1988 break; 1989 } 1990 if (portIndex == kPortIndexOutput 1991 && compressionFormat == format.eCompressionFormat) { 1992 // eColorFormat does not seem right. 1993 found = true; 1994 break; 1995 } 1996 } 1997 1998 if (format.eCompressionFormat == compressionFormat 1999 && format.eColorFormat == colorFormat) { 2000 found = true; 2001 break; 2002 } 2003 2004 ++index; 2005 } 2006 2007 if (!found) { 2008 return UNKNOWN_ERROR; 2009 } 2010 2011 status_t err = mOMX->setParameter( 2012 mNode, OMX_IndexParamVideoPortFormat, 2013 &format, sizeof(format)); 2014 2015 return err; 2016} 2017 2018status_t ACodec::setSupportedOutputFormat() { 2019 OMX_VIDEO_PARAM_PORTFORMATTYPE format; 2020 InitOMXParams(&format); 2021 format.nPortIndex = kPortIndexOutput; 2022 format.nIndex = 0; 2023 2024 status_t err = mOMX->getParameter( 2025 mNode, OMX_IndexParamVideoPortFormat, 2026 &format, sizeof(format)); 2027 CHECK_EQ(err, (status_t)OK); 2028 CHECK_EQ((int)format.eCompressionFormat, (int)OMX_VIDEO_CodingUnused); 2029 2030 return mOMX->setParameter( 2031 mNode, OMX_IndexParamVideoPortFormat, 2032 &format, sizeof(format)); 2033} 2034 2035static const struct VideoCodingMapEntry { 2036 const char *mMime; 2037 OMX_VIDEO_CODINGTYPE mVideoCodingType; 2038} kVideoCodingMapEntry[] = { 2039 { MEDIA_MIMETYPE_VIDEO_AVC, OMX_VIDEO_CodingAVC }, 2040 { MEDIA_MIMETYPE_VIDEO_HEVC, OMX_VIDEO_CodingHEVC }, 2041 { MEDIA_MIMETYPE_VIDEO_MPEG4, OMX_VIDEO_CodingMPEG4 }, 2042 { MEDIA_MIMETYPE_VIDEO_H263, OMX_VIDEO_CodingH263 }, 2043 { MEDIA_MIMETYPE_VIDEO_MPEG2, OMX_VIDEO_CodingMPEG2 }, 2044 { MEDIA_MIMETYPE_VIDEO_VP8, OMX_VIDEO_CodingVP8 }, 2045 { MEDIA_MIMETYPE_VIDEO_VP9, OMX_VIDEO_CodingVP9 }, 2046}; 2047 2048static status_t GetVideoCodingTypeFromMime( 2049 const char *mime, OMX_VIDEO_CODINGTYPE *codingType) { 2050 for (size_t i = 0; 2051 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 2052 ++i) { 2053 if (!strcasecmp(mime, kVideoCodingMapEntry[i].mMime)) { 2054 *codingType = kVideoCodingMapEntry[i].mVideoCodingType; 2055 return OK; 2056 } 2057 } 2058 2059 *codingType = OMX_VIDEO_CodingUnused; 2060 2061 return ERROR_UNSUPPORTED; 2062} 2063 2064static status_t GetMimeTypeForVideoCoding( 2065 OMX_VIDEO_CODINGTYPE codingType, AString *mime) { 2066 for (size_t i = 0; 2067 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 2068 ++i) { 2069 if (codingType == kVideoCodingMapEntry[i].mVideoCodingType) { 2070 *mime = kVideoCodingMapEntry[i].mMime; 2071 return OK; 2072 } 2073 } 2074 2075 mime->clear(); 2076 2077 return ERROR_UNSUPPORTED; 2078} 2079 2080status_t ACodec::setupVideoDecoder( 2081 const char *mime, const sp<AMessage> &msg) { 2082 int32_t width, height; 2083 if (!msg->findInt32("width", &width) 2084 || !msg->findInt32("height", &height)) { 2085 return INVALID_OPERATION; 2086 } 2087 2088 OMX_VIDEO_CODINGTYPE compressionFormat; 2089 status_t err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 2090 2091 if (err != OK) { 2092 return err; 2093 } 2094 2095 err = setVideoPortFormatType( 2096 kPortIndexInput, compressionFormat, OMX_COLOR_FormatUnused); 2097 2098 if (err != OK) { 2099 return err; 2100 } 2101 2102 int32_t tmp; 2103 if (msg->findInt32("color-format", &tmp)) { 2104 OMX_COLOR_FORMATTYPE colorFormat = 2105 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 2106 err = setVideoPortFormatType( 2107 kPortIndexOutput, OMX_VIDEO_CodingUnused, colorFormat); 2108 if (err != OK) { 2109 ALOGW("[%s] does not support color format %d", 2110 mComponentName.c_str(), colorFormat); 2111 err = setSupportedOutputFormat(); 2112 } 2113 } else { 2114 err = setSupportedOutputFormat(); 2115 } 2116 2117 if (err != OK) { 2118 return err; 2119 } 2120 2121 err = setVideoFormatOnPort( 2122 kPortIndexInput, width, height, compressionFormat); 2123 2124 if (err != OK) { 2125 return err; 2126 } 2127 2128 err = setVideoFormatOnPort( 2129 kPortIndexOutput, width, height, OMX_VIDEO_CodingUnused); 2130 2131 if (err != OK) { 2132 return err; 2133 } 2134 2135 return OK; 2136} 2137 2138status_t ACodec::setupVideoEncoder(const char *mime, const sp<AMessage> &msg) { 2139 int32_t tmp; 2140 if (!msg->findInt32("color-format", &tmp)) { 2141 return INVALID_OPERATION; 2142 } 2143 2144 OMX_COLOR_FORMATTYPE colorFormat = 2145 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 2146 2147 status_t err = setVideoPortFormatType( 2148 kPortIndexInput, OMX_VIDEO_CodingUnused, colorFormat); 2149 2150 if (err != OK) { 2151 ALOGE("[%s] does not support color format %d", 2152 mComponentName.c_str(), colorFormat); 2153 2154 return err; 2155 } 2156 2157 /* Input port configuration */ 2158 2159 OMX_PARAM_PORTDEFINITIONTYPE def; 2160 InitOMXParams(&def); 2161 2162 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2163 2164 def.nPortIndex = kPortIndexInput; 2165 2166 err = mOMX->getParameter( 2167 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2168 2169 if (err != OK) { 2170 return err; 2171 } 2172 2173 int32_t width, height, bitrate; 2174 if (!msg->findInt32("width", &width) 2175 || !msg->findInt32("height", &height) 2176 || !msg->findInt32("bitrate", &bitrate)) { 2177 return INVALID_OPERATION; 2178 } 2179 2180 video_def->nFrameWidth = width; 2181 video_def->nFrameHeight = height; 2182 2183 int32_t stride; 2184 if (!msg->findInt32("stride", &stride)) { 2185 stride = width; 2186 } 2187 2188 video_def->nStride = stride; 2189 2190 int32_t sliceHeight; 2191 if (!msg->findInt32("slice-height", &sliceHeight)) { 2192 sliceHeight = height; 2193 } 2194 2195 video_def->nSliceHeight = sliceHeight; 2196 2197 def.nBufferSize = (video_def->nStride * video_def->nSliceHeight * 3) / 2; 2198 2199 float frameRate; 2200 if (!msg->findFloat("frame-rate", &frameRate)) { 2201 int32_t tmp; 2202 if (!msg->findInt32("frame-rate", &tmp)) { 2203 return INVALID_OPERATION; 2204 } 2205 frameRate = (float)tmp; 2206 mTimePerFrameUs = (int64_t) (1000000.0f / frameRate); 2207 } 2208 2209 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 2210 video_def->eCompressionFormat = OMX_VIDEO_CodingUnused; 2211 video_def->eColorFormat = colorFormat; 2212 2213 err = mOMX->setParameter( 2214 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2215 2216 if (err != OK) { 2217 ALOGE("[%s] failed to set input port definition parameters.", 2218 mComponentName.c_str()); 2219 2220 return err; 2221 } 2222 2223 /* Output port configuration */ 2224 2225 OMX_VIDEO_CODINGTYPE compressionFormat; 2226 err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 2227 2228 if (err != OK) { 2229 return err; 2230 } 2231 2232 err = setVideoPortFormatType( 2233 kPortIndexOutput, compressionFormat, OMX_COLOR_FormatUnused); 2234 2235 if (err != OK) { 2236 ALOGE("[%s] does not support compression format %d", 2237 mComponentName.c_str(), compressionFormat); 2238 2239 return err; 2240 } 2241 2242 def.nPortIndex = kPortIndexOutput; 2243 2244 err = mOMX->getParameter( 2245 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2246 2247 if (err != OK) { 2248 return err; 2249 } 2250 2251 video_def->nFrameWidth = width; 2252 video_def->nFrameHeight = height; 2253 video_def->xFramerate = 0; 2254 video_def->nBitrate = bitrate; 2255 video_def->eCompressionFormat = compressionFormat; 2256 video_def->eColorFormat = OMX_COLOR_FormatUnused; 2257 2258 err = mOMX->setParameter( 2259 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2260 2261 if (err != OK) { 2262 ALOGE("[%s] failed to set output port definition parameters.", 2263 mComponentName.c_str()); 2264 2265 return err; 2266 } 2267 2268 switch (compressionFormat) { 2269 case OMX_VIDEO_CodingMPEG4: 2270 err = setupMPEG4EncoderParameters(msg); 2271 break; 2272 2273 case OMX_VIDEO_CodingH263: 2274 err = setupH263EncoderParameters(msg); 2275 break; 2276 2277 case OMX_VIDEO_CodingAVC: 2278 err = setupAVCEncoderParameters(msg); 2279 break; 2280 2281 case OMX_VIDEO_CodingHEVC: 2282 err = setupHEVCEncoderParameters(msg); 2283 break; 2284 2285 case OMX_VIDEO_CodingVP8: 2286 case OMX_VIDEO_CodingVP9: 2287 err = setupVPXEncoderParameters(msg); 2288 break; 2289 2290 default: 2291 break; 2292 } 2293 2294 ALOGI("setupVideoEncoder succeeded"); 2295 2296 return err; 2297} 2298 2299status_t ACodec::setCyclicIntraMacroblockRefresh(const sp<AMessage> &msg, int32_t mode) { 2300 OMX_VIDEO_PARAM_INTRAREFRESHTYPE params; 2301 InitOMXParams(¶ms); 2302 params.nPortIndex = kPortIndexOutput; 2303 2304 params.eRefreshMode = static_cast<OMX_VIDEO_INTRAREFRESHTYPE>(mode); 2305 2306 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshCyclic || 2307 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 2308 int32_t mbs; 2309 if (!msg->findInt32("intra-refresh-CIR-mbs", &mbs)) { 2310 return INVALID_OPERATION; 2311 } 2312 params.nCirMBs = mbs; 2313 } 2314 2315 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshAdaptive || 2316 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 2317 int32_t mbs; 2318 if (!msg->findInt32("intra-refresh-AIR-mbs", &mbs)) { 2319 return INVALID_OPERATION; 2320 } 2321 params.nAirMBs = mbs; 2322 2323 int32_t ref; 2324 if (!msg->findInt32("intra-refresh-AIR-ref", &ref)) { 2325 return INVALID_OPERATION; 2326 } 2327 params.nAirRef = ref; 2328 } 2329 2330 status_t err = mOMX->setParameter( 2331 mNode, OMX_IndexParamVideoIntraRefresh, 2332 ¶ms, sizeof(params)); 2333 return err; 2334} 2335 2336static OMX_U32 setPFramesSpacing(int32_t iFramesInterval, int32_t frameRate) { 2337 if (iFramesInterval < 0) { 2338 return 0xFFFFFFFF; 2339 } else if (iFramesInterval == 0) { 2340 return 0; 2341 } 2342 OMX_U32 ret = frameRate * iFramesInterval; 2343 return ret; 2344} 2345 2346static OMX_VIDEO_CONTROLRATETYPE getBitrateMode(const sp<AMessage> &msg) { 2347 int32_t tmp; 2348 if (!msg->findInt32("bitrate-mode", &tmp)) { 2349 return OMX_Video_ControlRateVariable; 2350 } 2351 2352 return static_cast<OMX_VIDEO_CONTROLRATETYPE>(tmp); 2353} 2354 2355status_t ACodec::setupMPEG4EncoderParameters(const sp<AMessage> &msg) { 2356 int32_t bitrate, iFrameInterval; 2357 if (!msg->findInt32("bitrate", &bitrate) 2358 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 2359 return INVALID_OPERATION; 2360 } 2361 2362 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 2363 2364 float frameRate; 2365 if (!msg->findFloat("frame-rate", &frameRate)) { 2366 int32_t tmp; 2367 if (!msg->findInt32("frame-rate", &tmp)) { 2368 return INVALID_OPERATION; 2369 } 2370 frameRate = (float)tmp; 2371 } 2372 2373 OMX_VIDEO_PARAM_MPEG4TYPE mpeg4type; 2374 InitOMXParams(&mpeg4type); 2375 mpeg4type.nPortIndex = kPortIndexOutput; 2376 2377 status_t err = mOMX->getParameter( 2378 mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 2379 2380 if (err != OK) { 2381 return err; 2382 } 2383 2384 mpeg4type.nSliceHeaderSpacing = 0; 2385 mpeg4type.bSVH = OMX_FALSE; 2386 mpeg4type.bGov = OMX_FALSE; 2387 2388 mpeg4type.nAllowedPictureTypes = 2389 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 2390 2391 mpeg4type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 2392 if (mpeg4type.nPFrames == 0) { 2393 mpeg4type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 2394 } 2395 mpeg4type.nBFrames = 0; 2396 mpeg4type.nIDCVLCThreshold = 0; 2397 mpeg4type.bACPred = OMX_TRUE; 2398 mpeg4type.nMaxPacketSize = 256; 2399 mpeg4type.nTimeIncRes = 1000; 2400 mpeg4type.nHeaderExtension = 0; 2401 mpeg4type.bReversibleVLC = OMX_FALSE; 2402 2403 int32_t profile; 2404 if (msg->findInt32("profile", &profile)) { 2405 int32_t level; 2406 if (!msg->findInt32("level", &level)) { 2407 return INVALID_OPERATION; 2408 } 2409 2410 err = verifySupportForProfileAndLevel(profile, level); 2411 2412 if (err != OK) { 2413 return err; 2414 } 2415 2416 mpeg4type.eProfile = static_cast<OMX_VIDEO_MPEG4PROFILETYPE>(profile); 2417 mpeg4type.eLevel = static_cast<OMX_VIDEO_MPEG4LEVELTYPE>(level); 2418 } 2419 2420 err = mOMX->setParameter( 2421 mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 2422 2423 if (err != OK) { 2424 return err; 2425 } 2426 2427 err = configureBitrate(bitrate, bitrateMode); 2428 2429 if (err != OK) { 2430 return err; 2431 } 2432 2433 return setupErrorCorrectionParameters(); 2434} 2435 2436status_t ACodec::setupH263EncoderParameters(const sp<AMessage> &msg) { 2437 int32_t bitrate, iFrameInterval; 2438 if (!msg->findInt32("bitrate", &bitrate) 2439 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 2440 return INVALID_OPERATION; 2441 } 2442 2443 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 2444 2445 float frameRate; 2446 if (!msg->findFloat("frame-rate", &frameRate)) { 2447 int32_t tmp; 2448 if (!msg->findInt32("frame-rate", &tmp)) { 2449 return INVALID_OPERATION; 2450 } 2451 frameRate = (float)tmp; 2452 } 2453 2454 OMX_VIDEO_PARAM_H263TYPE h263type; 2455 InitOMXParams(&h263type); 2456 h263type.nPortIndex = kPortIndexOutput; 2457 2458 status_t err = mOMX->getParameter( 2459 mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 2460 2461 if (err != OK) { 2462 return err; 2463 } 2464 2465 h263type.nAllowedPictureTypes = 2466 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 2467 2468 h263type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 2469 if (h263type.nPFrames == 0) { 2470 h263type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 2471 } 2472 h263type.nBFrames = 0; 2473 2474 int32_t profile; 2475 if (msg->findInt32("profile", &profile)) { 2476 int32_t level; 2477 if (!msg->findInt32("level", &level)) { 2478 return INVALID_OPERATION; 2479 } 2480 2481 err = verifySupportForProfileAndLevel(profile, level); 2482 2483 if (err != OK) { 2484 return err; 2485 } 2486 2487 h263type.eProfile = static_cast<OMX_VIDEO_H263PROFILETYPE>(profile); 2488 h263type.eLevel = static_cast<OMX_VIDEO_H263LEVELTYPE>(level); 2489 } 2490 2491 h263type.bPLUSPTYPEAllowed = OMX_FALSE; 2492 h263type.bForceRoundingTypeToZero = OMX_FALSE; 2493 h263type.nPictureHeaderRepetition = 0; 2494 h263type.nGOBHeaderInterval = 0; 2495 2496 err = mOMX->setParameter( 2497 mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 2498 2499 if (err != OK) { 2500 return err; 2501 } 2502 2503 err = configureBitrate(bitrate, bitrateMode); 2504 2505 if (err != OK) { 2506 return err; 2507 } 2508 2509 return setupErrorCorrectionParameters(); 2510} 2511 2512status_t ACodec::setupAVCEncoderParameters(const sp<AMessage> &msg) { 2513 int32_t bitrate, iFrameInterval; 2514 if (!msg->findInt32("bitrate", &bitrate) 2515 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 2516 return INVALID_OPERATION; 2517 } 2518 2519 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 2520 2521 float frameRate; 2522 if (!msg->findFloat("frame-rate", &frameRate)) { 2523 int32_t tmp; 2524 if (!msg->findInt32("frame-rate", &tmp)) { 2525 return INVALID_OPERATION; 2526 } 2527 frameRate = (float)tmp; 2528 } 2529 2530 status_t err = OK; 2531 int32_t intraRefreshMode = 0; 2532 if (msg->findInt32("intra-refresh-mode", &intraRefreshMode)) { 2533 err = setCyclicIntraMacroblockRefresh(msg, intraRefreshMode); 2534 if (err != OK) { 2535 ALOGE("Setting intra macroblock refresh mode (%d) failed: 0x%x", 2536 err, intraRefreshMode); 2537 return err; 2538 } 2539 } 2540 2541 OMX_VIDEO_PARAM_AVCTYPE h264type; 2542 InitOMXParams(&h264type); 2543 h264type.nPortIndex = kPortIndexOutput; 2544 2545 err = mOMX->getParameter( 2546 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 2547 2548 if (err != OK) { 2549 return err; 2550 } 2551 2552 h264type.nAllowedPictureTypes = 2553 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 2554 2555 int32_t profile; 2556 if (msg->findInt32("profile", &profile)) { 2557 int32_t level; 2558 if (!msg->findInt32("level", &level)) { 2559 return INVALID_OPERATION; 2560 } 2561 2562 err = verifySupportForProfileAndLevel(profile, level); 2563 2564 if (err != OK) { 2565 return err; 2566 } 2567 2568 h264type.eProfile = static_cast<OMX_VIDEO_AVCPROFILETYPE>(profile); 2569 h264type.eLevel = static_cast<OMX_VIDEO_AVCLEVELTYPE>(level); 2570 } 2571 2572 // XXX 2573 if (h264type.eProfile != OMX_VIDEO_AVCProfileBaseline) { 2574 ALOGW("Use baseline profile instead of %d for AVC recording", 2575 h264type.eProfile); 2576 h264type.eProfile = OMX_VIDEO_AVCProfileBaseline; 2577 } 2578 2579 if (h264type.eProfile == OMX_VIDEO_AVCProfileBaseline) { 2580 h264type.nSliceHeaderSpacing = 0; 2581 h264type.bUseHadamard = OMX_TRUE; 2582 h264type.nRefFrames = 1; 2583 h264type.nBFrames = 0; 2584 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 2585 if (h264type.nPFrames == 0) { 2586 h264type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 2587 } 2588 h264type.nRefIdx10ActiveMinus1 = 0; 2589 h264type.nRefIdx11ActiveMinus1 = 0; 2590 h264type.bEntropyCodingCABAC = OMX_FALSE; 2591 h264type.bWeightedPPrediction = OMX_FALSE; 2592 h264type.bconstIpred = OMX_FALSE; 2593 h264type.bDirect8x8Inference = OMX_FALSE; 2594 h264type.bDirectSpatialTemporal = OMX_FALSE; 2595 h264type.nCabacInitIdc = 0; 2596 } 2597 2598 if (h264type.nBFrames != 0) { 2599 h264type.nAllowedPictureTypes |= OMX_VIDEO_PictureTypeB; 2600 } 2601 2602 h264type.bEnableUEP = OMX_FALSE; 2603 h264type.bEnableFMO = OMX_FALSE; 2604 h264type.bEnableASO = OMX_FALSE; 2605 h264type.bEnableRS = OMX_FALSE; 2606 h264type.bFrameMBsOnly = OMX_TRUE; 2607 h264type.bMBAFF = OMX_FALSE; 2608 h264type.eLoopFilterMode = OMX_VIDEO_AVCLoopFilterEnable; 2609 2610 err = mOMX->setParameter( 2611 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 2612 2613 if (err != OK) { 2614 return err; 2615 } 2616 2617 return configureBitrate(bitrate, bitrateMode); 2618} 2619 2620status_t ACodec::setupHEVCEncoderParameters(const sp<AMessage> &msg) { 2621 int32_t bitrate, iFrameInterval; 2622 if (!msg->findInt32("bitrate", &bitrate) 2623 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 2624 return INVALID_OPERATION; 2625 } 2626 2627 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 2628 2629 float frameRate; 2630 if (!msg->findFloat("frame-rate", &frameRate)) { 2631 int32_t tmp; 2632 if (!msg->findInt32("frame-rate", &tmp)) { 2633 return INVALID_OPERATION; 2634 } 2635 frameRate = (float)tmp; 2636 } 2637 2638 OMX_VIDEO_PARAM_HEVCTYPE hevcType; 2639 InitOMXParams(&hevcType); 2640 hevcType.nPortIndex = kPortIndexOutput; 2641 2642 status_t err = OK; 2643 err = mOMX->getParameter( 2644 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 2645 if (err != OK) { 2646 return err; 2647 } 2648 2649 int32_t profile; 2650 if (msg->findInt32("profile", &profile)) { 2651 int32_t level; 2652 if (!msg->findInt32("level", &level)) { 2653 return INVALID_OPERATION; 2654 } 2655 2656 err = verifySupportForProfileAndLevel(profile, level); 2657 if (err != OK) { 2658 return err; 2659 } 2660 2661 hevcType.eProfile = static_cast<OMX_VIDEO_HEVCPROFILETYPE>(profile); 2662 hevcType.eLevel = static_cast<OMX_VIDEO_HEVCLEVELTYPE>(level); 2663 } 2664 2665 // TODO: Need OMX structure definition for setting iFrameInterval 2666 2667 err = mOMX->setParameter( 2668 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 2669 if (err != OK) { 2670 return err; 2671 } 2672 2673 return configureBitrate(bitrate, bitrateMode); 2674} 2675 2676status_t ACodec::setupVPXEncoderParameters(const sp<AMessage> &msg) { 2677 int32_t bitrate; 2678 int32_t iFrameInterval = 0; 2679 size_t tsLayers = 0; 2680 OMX_VIDEO_ANDROID_VPXTEMPORALLAYERPATTERNTYPE pattern = 2681 OMX_VIDEO_VPXTemporalLayerPatternNone; 2682 static const uint32_t kVp8LayerRateAlloction 2683 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] 2684 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] = { 2685 {100, 100, 100}, // 1 layer 2686 { 60, 100, 100}, // 2 layers {60%, 40%} 2687 { 40, 60, 100}, // 3 layers {40%, 20%, 40%} 2688 }; 2689 if (!msg->findInt32("bitrate", &bitrate)) { 2690 return INVALID_OPERATION; 2691 } 2692 msg->findInt32("i-frame-interval", &iFrameInterval); 2693 2694 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 2695 2696 float frameRate; 2697 if (!msg->findFloat("frame-rate", &frameRate)) { 2698 int32_t tmp; 2699 if (!msg->findInt32("frame-rate", &tmp)) { 2700 return INVALID_OPERATION; 2701 } 2702 frameRate = (float)tmp; 2703 } 2704 2705 AString tsSchema; 2706 if (msg->findString("ts-schema", &tsSchema)) { 2707 if (tsSchema == "webrtc.vp8.1-layer") { 2708 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 2709 tsLayers = 1; 2710 } else if (tsSchema == "webrtc.vp8.2-layer") { 2711 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 2712 tsLayers = 2; 2713 } else if (tsSchema == "webrtc.vp8.3-layer") { 2714 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 2715 tsLayers = 3; 2716 } else { 2717 ALOGW("Unsupported ts-schema [%s]", tsSchema.c_str()); 2718 } 2719 } 2720 2721 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 2722 InitOMXParams(&vp8type); 2723 vp8type.nPortIndex = kPortIndexOutput; 2724 status_t err = mOMX->getParameter( 2725 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 2726 &vp8type, sizeof(vp8type)); 2727 2728 if (err == OK) { 2729 if (iFrameInterval > 0) { 2730 vp8type.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate); 2731 } 2732 vp8type.eTemporalPattern = pattern; 2733 vp8type.nTemporalLayerCount = tsLayers; 2734 if (tsLayers > 0) { 2735 for (size_t i = 0; i < OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS; i++) { 2736 vp8type.nTemporalLayerBitrateRatio[i] = 2737 kVp8LayerRateAlloction[tsLayers - 1][i]; 2738 } 2739 } 2740 if (bitrateMode == OMX_Video_ControlRateConstant) { 2741 vp8type.nMinQuantizer = 2; 2742 vp8type.nMaxQuantizer = 63; 2743 } 2744 2745 err = mOMX->setParameter( 2746 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 2747 &vp8type, sizeof(vp8type)); 2748 if (err != OK) { 2749 ALOGW("Extended VP8 parameters set failed: %d", err); 2750 } 2751 } 2752 2753 return configureBitrate(bitrate, bitrateMode); 2754} 2755 2756status_t ACodec::verifySupportForProfileAndLevel( 2757 int32_t profile, int32_t level) { 2758 OMX_VIDEO_PARAM_PROFILELEVELTYPE params; 2759 InitOMXParams(¶ms); 2760 params.nPortIndex = kPortIndexOutput; 2761 2762 for (params.nProfileIndex = 0;; ++params.nProfileIndex) { 2763 status_t err = mOMX->getParameter( 2764 mNode, 2765 OMX_IndexParamVideoProfileLevelQuerySupported, 2766 ¶ms, 2767 sizeof(params)); 2768 2769 if (err != OK) { 2770 return err; 2771 } 2772 2773 int32_t supportedProfile = static_cast<int32_t>(params.eProfile); 2774 int32_t supportedLevel = static_cast<int32_t>(params.eLevel); 2775 2776 if (profile == supportedProfile && level <= supportedLevel) { 2777 return OK; 2778 } 2779 } 2780} 2781 2782status_t ACodec::configureBitrate( 2783 int32_t bitrate, OMX_VIDEO_CONTROLRATETYPE bitrateMode) { 2784 OMX_VIDEO_PARAM_BITRATETYPE bitrateType; 2785 InitOMXParams(&bitrateType); 2786 bitrateType.nPortIndex = kPortIndexOutput; 2787 2788 status_t err = mOMX->getParameter( 2789 mNode, OMX_IndexParamVideoBitrate, 2790 &bitrateType, sizeof(bitrateType)); 2791 2792 if (err != OK) { 2793 return err; 2794 } 2795 2796 bitrateType.eControlRate = bitrateMode; 2797 bitrateType.nTargetBitrate = bitrate; 2798 2799 return mOMX->setParameter( 2800 mNode, OMX_IndexParamVideoBitrate, 2801 &bitrateType, sizeof(bitrateType)); 2802} 2803 2804status_t ACodec::setupErrorCorrectionParameters() { 2805 OMX_VIDEO_PARAM_ERRORCORRECTIONTYPE errorCorrectionType; 2806 InitOMXParams(&errorCorrectionType); 2807 errorCorrectionType.nPortIndex = kPortIndexOutput; 2808 2809 status_t err = mOMX->getParameter( 2810 mNode, OMX_IndexParamVideoErrorCorrection, 2811 &errorCorrectionType, sizeof(errorCorrectionType)); 2812 2813 if (err != OK) { 2814 return OK; // Optional feature. Ignore this failure 2815 } 2816 2817 errorCorrectionType.bEnableHEC = OMX_FALSE; 2818 errorCorrectionType.bEnableResync = OMX_TRUE; 2819 errorCorrectionType.nResynchMarkerSpacing = 256; 2820 errorCorrectionType.bEnableDataPartitioning = OMX_FALSE; 2821 errorCorrectionType.bEnableRVLC = OMX_FALSE; 2822 2823 return mOMX->setParameter( 2824 mNode, OMX_IndexParamVideoErrorCorrection, 2825 &errorCorrectionType, sizeof(errorCorrectionType)); 2826} 2827 2828status_t ACodec::setVideoFormatOnPort( 2829 OMX_U32 portIndex, 2830 int32_t width, int32_t height, OMX_VIDEO_CODINGTYPE compressionFormat) { 2831 OMX_PARAM_PORTDEFINITIONTYPE def; 2832 InitOMXParams(&def); 2833 def.nPortIndex = portIndex; 2834 2835 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2836 2837 status_t err = mOMX->getParameter( 2838 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2839 2840 CHECK_EQ(err, (status_t)OK); 2841 2842 if (portIndex == kPortIndexInput) { 2843 // XXX Need a (much) better heuristic to compute input buffer sizes. 2844 const size_t X = 64 * 1024; 2845 if (def.nBufferSize < X) { 2846 def.nBufferSize = X; 2847 } 2848 } 2849 2850 CHECK_EQ((int)def.eDomain, (int)OMX_PortDomainVideo); 2851 2852 video_def->nFrameWidth = width; 2853 video_def->nFrameHeight = height; 2854 2855 if (portIndex == kPortIndexInput) { 2856 video_def->eCompressionFormat = compressionFormat; 2857 video_def->eColorFormat = OMX_COLOR_FormatUnused; 2858 } 2859 2860 err = mOMX->setParameter( 2861 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2862 2863 return err; 2864} 2865 2866status_t ACodec::initNativeWindow() { 2867 if (mNativeWindow != NULL) { 2868 return mOMX->enableGraphicBuffers(mNode, kPortIndexOutput, OMX_TRUE); 2869 } 2870 2871 mOMX->enableGraphicBuffers(mNode, kPortIndexOutput, OMX_FALSE); 2872 return OK; 2873} 2874 2875size_t ACodec::countBuffersOwnedByComponent(OMX_U32 portIndex) const { 2876 size_t n = 0; 2877 2878 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 2879 const BufferInfo &info = mBuffers[portIndex].itemAt(i); 2880 2881 if (info.mStatus == BufferInfo::OWNED_BY_COMPONENT) { 2882 ++n; 2883 } 2884 } 2885 2886 return n; 2887} 2888 2889size_t ACodec::countBuffersOwnedByNativeWindow() const { 2890 size_t n = 0; 2891 2892 for (size_t i = 0; i < mBuffers[kPortIndexOutput].size(); ++i) { 2893 const BufferInfo &info = mBuffers[kPortIndexOutput].itemAt(i); 2894 2895 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 2896 ++n; 2897 } 2898 } 2899 2900 return n; 2901} 2902 2903void ACodec::waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs() { 2904 if (mNativeWindow == NULL) { 2905 return; 2906 } 2907 2908 while (countBuffersOwnedByNativeWindow() > mNumUndequeuedBuffers 2909 && dequeueBufferFromNativeWindow() != NULL) { 2910 // these buffers will be submitted as regular buffers; account for this 2911 if (mStoreMetaDataInOutputBuffers && mMetaDataBuffersToSubmit > 0) { 2912 --mMetaDataBuffersToSubmit; 2913 } 2914 } 2915} 2916 2917bool ACodec::allYourBuffersAreBelongToUs( 2918 OMX_U32 portIndex) { 2919 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 2920 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 2921 2922 if (info->mStatus != BufferInfo::OWNED_BY_US 2923 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 2924 ALOGV("[%s] Buffer %u on port %u still has status %d", 2925 mComponentName.c_str(), 2926 info->mBufferID, portIndex, info->mStatus); 2927 return false; 2928 } 2929 } 2930 2931 return true; 2932} 2933 2934bool ACodec::allYourBuffersAreBelongToUs() { 2935 return allYourBuffersAreBelongToUs(kPortIndexInput) 2936 && allYourBuffersAreBelongToUs(kPortIndexOutput); 2937} 2938 2939void ACodec::deferMessage(const sp<AMessage> &msg) { 2940 bool wasEmptyBefore = mDeferredQueue.empty(); 2941 mDeferredQueue.push_back(msg); 2942} 2943 2944void ACodec::processDeferredMessages() { 2945 List<sp<AMessage> > queue = mDeferredQueue; 2946 mDeferredQueue.clear(); 2947 2948 List<sp<AMessage> >::iterator it = queue.begin(); 2949 while (it != queue.end()) { 2950 onMessageReceived(*it++); 2951 } 2952} 2953 2954// static 2955bool ACodec::describeDefaultColorFormat(DescribeColorFormatParams ¶ms) { 2956 MediaImage &image = params.sMediaImage; 2957 memset(&image, 0, sizeof(image)); 2958 2959 image.mType = MediaImage::MEDIA_IMAGE_TYPE_UNKNOWN; 2960 image.mNumPlanes = 0; 2961 2962 const OMX_COLOR_FORMATTYPE fmt = params.eColorFormat; 2963 // we need stride and slice-height to be non-zero 2964 if (params.nStride == 0 || params.nSliceHeight == 0) { 2965 ALOGW("cannot describe color format 0x%x = %d with stride=%u and sliceHeight=%u", 2966 fmt, fmt, params.nStride, params.nSliceHeight); 2967 return false; 2968 } 2969 2970 image.mWidth = params.nFrameWidth; 2971 image.mHeight = params.nFrameHeight; 2972 2973 // only supporting YUV420 2974 if (fmt != OMX_COLOR_FormatYUV420Planar && 2975 fmt != OMX_COLOR_FormatYUV420PackedPlanar && 2976 fmt != OMX_COLOR_FormatYUV420SemiPlanar && 2977 fmt != OMX_COLOR_FormatYUV420PackedSemiPlanar) { 2978 ALOGW("do not know color format 0x%x = %d", fmt, fmt); 2979 return false; 2980 } 2981 2982 // set-up YUV format 2983 image.mType = MediaImage::MEDIA_IMAGE_TYPE_YUV; 2984 image.mNumPlanes = 3; 2985 image.mBitDepth = 8; 2986 image.mPlane[image.Y].mOffset = 0; 2987 image.mPlane[image.Y].mColInc = 1; 2988 image.mPlane[image.Y].mRowInc = params.nStride; 2989 image.mPlane[image.Y].mHorizSubsampling = 1; 2990 image.mPlane[image.Y].mVertSubsampling = 1; 2991 2992 switch (fmt) { 2993 case OMX_COLOR_FormatYUV420Planar: // used for YV12 2994 case OMX_COLOR_FormatYUV420PackedPlanar: 2995 image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight; 2996 image.mPlane[image.U].mColInc = 1; 2997 image.mPlane[image.U].mRowInc = params.nStride / 2; 2998 image.mPlane[image.U].mHorizSubsampling = 2; 2999 image.mPlane[image.U].mVertSubsampling = 2; 3000 3001 image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset 3002 + (params.nStride * params.nSliceHeight / 4); 3003 image.mPlane[image.V].mColInc = 1; 3004 image.mPlane[image.V].mRowInc = params.nStride / 2; 3005 image.mPlane[image.V].mHorizSubsampling = 2; 3006 image.mPlane[image.V].mVertSubsampling = 2; 3007 break; 3008 3009 case OMX_COLOR_FormatYUV420SemiPlanar: 3010 // FIXME: NV21 for sw-encoder, NV12 for decoder and hw-encoder 3011 case OMX_COLOR_FormatYUV420PackedSemiPlanar: 3012 // NV12 3013 image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight; 3014 image.mPlane[image.U].mColInc = 2; 3015 image.mPlane[image.U].mRowInc = params.nStride; 3016 image.mPlane[image.U].mHorizSubsampling = 2; 3017 image.mPlane[image.U].mVertSubsampling = 2; 3018 3019 image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset + 1; 3020 image.mPlane[image.V].mColInc = 2; 3021 image.mPlane[image.V].mRowInc = params.nStride; 3022 image.mPlane[image.V].mHorizSubsampling = 2; 3023 image.mPlane[image.V].mVertSubsampling = 2; 3024 break; 3025 3026 default: 3027 TRESPASS(); 3028 } 3029 return true; 3030} 3031 3032// static 3033bool ACodec::describeColorFormat( 3034 const sp<IOMX> &omx, IOMX::node_id node, 3035 DescribeColorFormatParams &describeParams) 3036{ 3037 OMX_INDEXTYPE describeColorFormatIndex; 3038 if (omx->getExtensionIndex( 3039 node, "OMX.google.android.index.describeColorFormat", 3040 &describeColorFormatIndex) != OK || 3041 omx->getParameter( 3042 node, describeColorFormatIndex, 3043 &describeParams, sizeof(describeParams)) != OK) { 3044 return describeDefaultColorFormat(describeParams); 3045 } 3046 return describeParams.sMediaImage.mType != 3047 MediaImage::MEDIA_IMAGE_TYPE_UNKNOWN; 3048} 3049 3050// static 3051bool ACodec::isFlexibleColorFormat( 3052 const sp<IOMX> &omx, IOMX::node_id node, 3053 uint32_t colorFormat, OMX_U32 *flexibleEquivalent) { 3054 DescribeColorFormatParams describeParams; 3055 InitOMXParams(&describeParams); 3056 describeParams.eColorFormat = (OMX_COLOR_FORMATTYPE)colorFormat; 3057 // reasonable dummy values 3058 describeParams.nFrameWidth = 128; 3059 describeParams.nFrameHeight = 128; 3060 describeParams.nStride = 128; 3061 describeParams.nSliceHeight = 128; 3062 3063 CHECK(flexibleEquivalent != NULL); 3064 3065 if (!describeColorFormat(omx, node, describeParams)) { 3066 return false; 3067 } 3068 3069 const MediaImage &img = describeParams.sMediaImage; 3070 if (img.mType == MediaImage::MEDIA_IMAGE_TYPE_YUV) { 3071 if (img.mNumPlanes != 3 || 3072 img.mPlane[img.Y].mHorizSubsampling != 1 || 3073 img.mPlane[img.Y].mVertSubsampling != 1) { 3074 return false; 3075 } 3076 3077 // YUV 420 3078 if (img.mPlane[img.U].mHorizSubsampling == 2 3079 && img.mPlane[img.U].mVertSubsampling == 2 3080 && img.mPlane[img.V].mHorizSubsampling == 2 3081 && img.mPlane[img.V].mVertSubsampling == 2) { 3082 // possible flexible YUV420 format 3083 if (img.mBitDepth <= 8) { 3084 *flexibleEquivalent = OMX_COLOR_FormatYUV420Flexible; 3085 return true; 3086 } 3087 } 3088 } 3089 return false; 3090} 3091 3092status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify) { 3093 // TODO: catch errors an return them instead of using CHECK 3094 OMX_PARAM_PORTDEFINITIONTYPE def; 3095 InitOMXParams(&def); 3096 def.nPortIndex = portIndex; 3097 3098 CHECK_EQ(mOMX->getParameter( 3099 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)), 3100 (status_t)OK); 3101 3102 CHECK_EQ((int)def.eDir, 3103 (int)(portIndex == kPortIndexOutput ? OMX_DirOutput : OMX_DirInput)); 3104 3105 switch (def.eDomain) { 3106 case OMX_PortDomainVideo: 3107 { 3108 OMX_VIDEO_PORTDEFINITIONTYPE *videoDef = &def.format.video; 3109 switch ((int)videoDef->eCompressionFormat) { 3110 case OMX_VIDEO_CodingUnused: 3111 { 3112 CHECK(mIsEncoder ^ (portIndex == kPortIndexOutput)); 3113 notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW); 3114 3115 notify->setInt32("stride", videoDef->nStride); 3116 notify->setInt32("slice-height", videoDef->nSliceHeight); 3117 notify->setInt32("color-format", videoDef->eColorFormat); 3118 3119 DescribeColorFormatParams describeParams; 3120 InitOMXParams(&describeParams); 3121 describeParams.eColorFormat = videoDef->eColorFormat; 3122 describeParams.nFrameWidth = videoDef->nFrameWidth; 3123 describeParams.nFrameHeight = videoDef->nFrameHeight; 3124 describeParams.nStride = videoDef->nStride; 3125 describeParams.nSliceHeight = videoDef->nSliceHeight; 3126 3127 if (describeColorFormat(mOMX, mNode, describeParams)) { 3128 notify->setBuffer( 3129 "image-data", 3130 ABuffer::CreateAsCopy( 3131 &describeParams.sMediaImage, 3132 sizeof(describeParams.sMediaImage))); 3133 } 3134 3135 OMX_CONFIG_RECTTYPE rect; 3136 InitOMXParams(&rect); 3137 rect.nPortIndex = kPortIndexOutput; 3138 3139 if (mOMX->getConfig( 3140 mNode, OMX_IndexConfigCommonOutputCrop, 3141 &rect, sizeof(rect)) != OK) { 3142 rect.nLeft = 0; 3143 rect.nTop = 0; 3144 rect.nWidth = videoDef->nFrameWidth; 3145 rect.nHeight = videoDef->nFrameHeight; 3146 } 3147 3148 CHECK_GE(rect.nLeft, 0); 3149 CHECK_GE(rect.nTop, 0); 3150 CHECK_GE(rect.nWidth, 0u); 3151 CHECK_GE(rect.nHeight, 0u); 3152 CHECK_LE(rect.nLeft + rect.nWidth - 1, videoDef->nFrameWidth); 3153 CHECK_LE(rect.nTop + rect.nHeight - 1, videoDef->nFrameHeight); 3154 3155 notify->setRect( 3156 "crop", 3157 rect.nLeft, 3158 rect.nTop, 3159 rect.nLeft + rect.nWidth - 1, 3160 rect.nTop + rect.nHeight - 1); 3161 3162 break; 3163 } 3164 3165 case OMX_VIDEO_CodingVP8: 3166 case OMX_VIDEO_CodingVP9: 3167 { 3168 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 3169 InitOMXParams(&vp8type); 3170 vp8type.nPortIndex = kPortIndexOutput; 3171 status_t err = mOMX->getParameter( 3172 mNode, 3173 (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 3174 &vp8type, 3175 sizeof(vp8type)); 3176 3177 if (err == OK) { 3178 AString tsSchema = "none"; 3179 if (vp8type.eTemporalPattern 3180 == OMX_VIDEO_VPXTemporalLayerPatternWebRTC) { 3181 switch (vp8type.nTemporalLayerCount) { 3182 case 1: 3183 { 3184 tsSchema = "webrtc.vp8.1-layer"; 3185 break; 3186 } 3187 case 2: 3188 { 3189 tsSchema = "webrtc.vp8.2-layer"; 3190 break; 3191 } 3192 case 3: 3193 { 3194 tsSchema = "webrtc.vp8.3-layer"; 3195 break; 3196 } 3197 default: 3198 { 3199 break; 3200 } 3201 } 3202 } 3203 notify->setString("ts-schema", tsSchema); 3204 } 3205 // Fall through to set up mime. 3206 } 3207 3208 default: 3209 { 3210 CHECK(mIsEncoder ^ (portIndex == kPortIndexInput)); 3211 AString mime; 3212 if (GetMimeTypeForVideoCoding( 3213 videoDef->eCompressionFormat, &mime) != OK) { 3214 notify->setString("mime", "application/octet-stream"); 3215 } else { 3216 notify->setString("mime", mime.c_str()); 3217 } 3218 break; 3219 } 3220 } 3221 3222 notify->setInt32("width", videoDef->nFrameWidth); 3223 notify->setInt32("height", videoDef->nFrameHeight); 3224 break; 3225 } 3226 3227 case OMX_PortDomainAudio: 3228 { 3229 OMX_AUDIO_PORTDEFINITIONTYPE *audioDef = &def.format.audio; 3230 3231 switch ((int)audioDef->eEncoding) { 3232 case OMX_AUDIO_CodingPCM: 3233 { 3234 OMX_AUDIO_PARAM_PCMMODETYPE params; 3235 InitOMXParams(¶ms); 3236 params.nPortIndex = portIndex; 3237 3238 CHECK_EQ(mOMX->getParameter( 3239 mNode, OMX_IndexParamAudioPcm, 3240 ¶ms, sizeof(params)), 3241 (status_t)OK); 3242 3243 CHECK_GT(params.nChannels, 0); 3244 CHECK(params.nChannels == 1 || params.bInterleaved); 3245 CHECK_EQ(params.nBitPerSample, 16u); 3246 3247 CHECK_EQ((int)params.eNumData, 3248 (int)OMX_NumericalDataSigned); 3249 3250 CHECK_EQ((int)params.ePCMMode, 3251 (int)OMX_AUDIO_PCMModeLinear); 3252 3253 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW); 3254 notify->setInt32("channel-count", params.nChannels); 3255 notify->setInt32("sample-rate", params.nSamplingRate); 3256 3257 if (mChannelMaskPresent) { 3258 notify->setInt32("channel-mask", mChannelMask); 3259 } 3260 break; 3261 } 3262 3263 case OMX_AUDIO_CodingAAC: 3264 { 3265 OMX_AUDIO_PARAM_AACPROFILETYPE params; 3266 InitOMXParams(¶ms); 3267 params.nPortIndex = portIndex; 3268 3269 CHECK_EQ(mOMX->getParameter( 3270 mNode, OMX_IndexParamAudioAac, 3271 ¶ms, sizeof(params)), 3272 (status_t)OK); 3273 3274 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC); 3275 notify->setInt32("channel-count", params.nChannels); 3276 notify->setInt32("sample-rate", params.nSampleRate); 3277 break; 3278 } 3279 3280 case OMX_AUDIO_CodingAMR: 3281 { 3282 OMX_AUDIO_PARAM_AMRTYPE params; 3283 InitOMXParams(¶ms); 3284 params.nPortIndex = portIndex; 3285 3286 CHECK_EQ(mOMX->getParameter( 3287 mNode, OMX_IndexParamAudioAmr, 3288 ¶ms, sizeof(params)), 3289 (status_t)OK); 3290 3291 notify->setInt32("channel-count", 1); 3292 if (params.eAMRBandMode >= OMX_AUDIO_AMRBandModeWB0) { 3293 notify->setString( 3294 "mime", MEDIA_MIMETYPE_AUDIO_AMR_WB); 3295 3296 notify->setInt32("sample-rate", 16000); 3297 } else { 3298 notify->setString( 3299 "mime", MEDIA_MIMETYPE_AUDIO_AMR_NB); 3300 3301 notify->setInt32("sample-rate", 8000); 3302 } 3303 break; 3304 } 3305 3306 case OMX_AUDIO_CodingFLAC: 3307 { 3308 OMX_AUDIO_PARAM_FLACTYPE params; 3309 InitOMXParams(¶ms); 3310 params.nPortIndex = portIndex; 3311 3312 CHECK_EQ(mOMX->getParameter( 3313 mNode, OMX_IndexParamAudioFlac, 3314 ¶ms, sizeof(params)), 3315 (status_t)OK); 3316 3317 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_FLAC); 3318 notify->setInt32("channel-count", params.nChannels); 3319 notify->setInt32("sample-rate", params.nSampleRate); 3320 break; 3321 } 3322 3323 case OMX_AUDIO_CodingMP3: 3324 { 3325 OMX_AUDIO_PARAM_MP3TYPE params; 3326 InitOMXParams(¶ms); 3327 params.nPortIndex = portIndex; 3328 3329 CHECK_EQ(mOMX->getParameter( 3330 mNode, OMX_IndexParamAudioMp3, 3331 ¶ms, sizeof(params)), 3332 (status_t)OK); 3333 3334 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MPEG); 3335 notify->setInt32("channel-count", params.nChannels); 3336 notify->setInt32("sample-rate", params.nSampleRate); 3337 break; 3338 } 3339 3340 case OMX_AUDIO_CodingVORBIS: 3341 { 3342 OMX_AUDIO_PARAM_VORBISTYPE params; 3343 InitOMXParams(¶ms); 3344 params.nPortIndex = portIndex; 3345 3346 CHECK_EQ(mOMX->getParameter( 3347 mNode, OMX_IndexParamAudioVorbis, 3348 ¶ms, sizeof(params)), 3349 (status_t)OK); 3350 3351 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_VORBIS); 3352 notify->setInt32("channel-count", params.nChannels); 3353 notify->setInt32("sample-rate", params.nSampleRate); 3354 break; 3355 } 3356 3357 case OMX_AUDIO_CodingAndroidAC3: 3358 { 3359 OMX_AUDIO_PARAM_ANDROID_AC3TYPE params; 3360 InitOMXParams(¶ms); 3361 params.nPortIndex = portIndex; 3362 3363 CHECK_EQ((status_t)OK, mOMX->getParameter( 3364 mNode, 3365 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 3366 ¶ms, 3367 sizeof(params))); 3368 3369 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AC3); 3370 notify->setInt32("channel-count", params.nChannels); 3371 notify->setInt32("sample-rate", params.nSampleRate); 3372 break; 3373 } 3374 3375 case OMX_AUDIO_CodingAndroidOPUS: 3376 { 3377 OMX_AUDIO_PARAM_ANDROID_OPUSTYPE params; 3378 InitOMXParams(¶ms); 3379 params.nPortIndex = portIndex; 3380 3381 CHECK_EQ((status_t)OK, mOMX->getParameter( 3382 mNode, 3383 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidOpus, 3384 ¶ms, 3385 sizeof(params))); 3386 3387 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_OPUS); 3388 notify->setInt32("channel-count", params.nChannels); 3389 notify->setInt32("sample-rate", params.nSampleRate); 3390 break; 3391 } 3392 3393 default: 3394 ALOGE("UNKNOWN AUDIO CODING: %d\n", audioDef->eEncoding); 3395 TRESPASS(); 3396 } 3397 break; 3398 } 3399 3400 default: 3401 TRESPASS(); 3402 } 3403 3404 return OK; 3405} 3406 3407void ACodec::sendFormatChange(const sp<AMessage> &reply) { 3408 sp<AMessage> notify = mNotify->dup(); 3409 notify->setInt32("what", kWhatOutputFormatChanged); 3410 3411 CHECK_EQ(getPortFormat(kPortIndexOutput, notify), (status_t)OK); 3412 3413 AString mime; 3414 CHECK(notify->findString("mime", &mime)); 3415 3416 int32_t left, top, right, bottom; 3417 if (mime == MEDIA_MIMETYPE_VIDEO_RAW && 3418 mNativeWindow != NULL && 3419 notify->findRect("crop", &left, &top, &right, &bottom)) { 3420 // notify renderer of the crop change 3421 // NOTE: native window uses extended right-bottom coordinate 3422 reply->setRect("crop", left, top, right + 1, bottom + 1); 3423 } else if (mime == MEDIA_MIMETYPE_AUDIO_RAW && 3424 (mEncoderDelay || mEncoderPadding)) { 3425 int32_t channelCount; 3426 CHECK(notify->findInt32("channel-count", &channelCount)); 3427 size_t frameSize = channelCount * sizeof(int16_t); 3428 if (mSkipCutBuffer != NULL) { 3429 size_t prevbufsize = mSkipCutBuffer->size(); 3430 if (prevbufsize != 0) { 3431 ALOGW("Replacing SkipCutBuffer holding %d " 3432 "bytes", 3433 prevbufsize); 3434 } 3435 } 3436 mSkipCutBuffer = new SkipCutBuffer( 3437 mEncoderDelay * frameSize, 3438 mEncoderPadding * frameSize); 3439 } 3440 3441 notify->post(); 3442 3443 mSentFormat = true; 3444} 3445 3446void ACodec::signalError(OMX_ERRORTYPE error, status_t internalError) { 3447 sp<AMessage> notify = mNotify->dup(); 3448 notify->setInt32("what", CodecBase::kWhatError); 3449 ALOGE("signalError(omxError %#x, internalError %d)", error, internalError); 3450 3451 if (internalError == UNKNOWN_ERROR) { // find better error code 3452 const status_t omxStatus = statusFromOMXError(error); 3453 if (omxStatus != 0) { 3454 internalError = omxStatus; 3455 } else { 3456 ALOGW("Invalid OMX error %#x", error); 3457 } 3458 } 3459 notify->setInt32("err", internalError); 3460 notify->setInt32("actionCode", ACTION_CODE_FATAL); // could translate from OMX error. 3461 notify->post(); 3462} 3463 3464status_t ACodec::pushBlankBuffersToNativeWindow() { 3465 status_t err = NO_ERROR; 3466 ANativeWindowBuffer* anb = NULL; 3467 int numBufs = 0; 3468 int minUndequeuedBufs = 0; 3469 3470 // We need to reconnect to the ANativeWindow as a CPU client to ensure that 3471 // no frames get dropped by SurfaceFlinger assuming that these are video 3472 // frames. 3473 err = native_window_api_disconnect(mNativeWindow.get(), 3474 NATIVE_WINDOW_API_MEDIA); 3475 if (err != NO_ERROR) { 3476 ALOGE("error pushing blank frames: api_disconnect failed: %s (%d)", 3477 strerror(-err), -err); 3478 return err; 3479 } 3480 3481 err = native_window_api_connect(mNativeWindow.get(), 3482 NATIVE_WINDOW_API_CPU); 3483 if (err != NO_ERROR) { 3484 ALOGE("error pushing blank frames: api_connect failed: %s (%d)", 3485 strerror(-err), -err); 3486 return err; 3487 } 3488 3489 err = native_window_set_buffers_geometry(mNativeWindow.get(), 1, 1, 3490 HAL_PIXEL_FORMAT_RGBX_8888); 3491 if (err != NO_ERROR) { 3492 ALOGE("error pushing blank frames: set_buffers_geometry failed: %s (%d)", 3493 strerror(-err), -err); 3494 goto error; 3495 } 3496 3497 err = native_window_set_scaling_mode(mNativeWindow.get(), 3498 NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW); 3499 if (err != NO_ERROR) { 3500 ALOGE("error pushing blank_frames: set_scaling_mode failed: %s (%d)", 3501 strerror(-err), -err); 3502 goto error; 3503 } 3504 3505 err = native_window_set_usage(mNativeWindow.get(), 3506 GRALLOC_USAGE_SW_WRITE_OFTEN); 3507 if (err != NO_ERROR) { 3508 ALOGE("error pushing blank frames: set_usage failed: %s (%d)", 3509 strerror(-err), -err); 3510 goto error; 3511 } 3512 3513 err = mNativeWindow->query(mNativeWindow.get(), 3514 NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &minUndequeuedBufs); 3515 if (err != NO_ERROR) { 3516 ALOGE("error pushing blank frames: MIN_UNDEQUEUED_BUFFERS query " 3517 "failed: %s (%d)", strerror(-err), -err); 3518 goto error; 3519 } 3520 3521 numBufs = minUndequeuedBufs + 1; 3522 err = native_window_set_buffer_count(mNativeWindow.get(), numBufs); 3523 if (err != NO_ERROR) { 3524 ALOGE("error pushing blank frames: set_buffer_count failed: %s (%d)", 3525 strerror(-err), -err); 3526 goto error; 3527 } 3528 3529 // We push numBufs + 1 buffers to ensure that we've drawn into the same 3530 // buffer twice. This should guarantee that the buffer has been displayed 3531 // on the screen and then been replaced, so an previous video frames are 3532 // guaranteed NOT to be currently displayed. 3533 for (int i = 0; i < numBufs + 1; i++) { 3534 int fenceFd = -1; 3535 err = native_window_dequeue_buffer_and_wait(mNativeWindow.get(), &anb); 3536 if (err != NO_ERROR) { 3537 ALOGE("error pushing blank frames: dequeueBuffer failed: %s (%d)", 3538 strerror(-err), -err); 3539 goto error; 3540 } 3541 3542 sp<GraphicBuffer> buf(new GraphicBuffer(anb, false)); 3543 3544 // Fill the buffer with the a 1x1 checkerboard pattern ;) 3545 uint32_t* img = NULL; 3546 err = buf->lock(GRALLOC_USAGE_SW_WRITE_OFTEN, (void**)(&img)); 3547 if (err != NO_ERROR) { 3548 ALOGE("error pushing blank frames: lock failed: %s (%d)", 3549 strerror(-err), -err); 3550 goto error; 3551 } 3552 3553 *img = 0; 3554 3555 err = buf->unlock(); 3556 if (err != NO_ERROR) { 3557 ALOGE("error pushing blank frames: unlock failed: %s (%d)", 3558 strerror(-err), -err); 3559 goto error; 3560 } 3561 3562 err = mNativeWindow->queueBuffer(mNativeWindow.get(), 3563 buf->getNativeBuffer(), -1); 3564 if (err != NO_ERROR) { 3565 ALOGE("error pushing blank frames: queueBuffer failed: %s (%d)", 3566 strerror(-err), -err); 3567 goto error; 3568 } 3569 3570 anb = NULL; 3571 } 3572 3573error: 3574 3575 if (err != NO_ERROR) { 3576 // Clean up after an error. 3577 if (anb != NULL) { 3578 mNativeWindow->cancelBuffer(mNativeWindow.get(), anb, -1); 3579 } 3580 3581 native_window_api_disconnect(mNativeWindow.get(), 3582 NATIVE_WINDOW_API_CPU); 3583 native_window_api_connect(mNativeWindow.get(), 3584 NATIVE_WINDOW_API_MEDIA); 3585 3586 return err; 3587 } else { 3588 // Clean up after success. 3589 err = native_window_api_disconnect(mNativeWindow.get(), 3590 NATIVE_WINDOW_API_CPU); 3591 if (err != NO_ERROR) { 3592 ALOGE("error pushing blank frames: api_disconnect failed: %s (%d)", 3593 strerror(-err), -err); 3594 return err; 3595 } 3596 3597 err = native_window_api_connect(mNativeWindow.get(), 3598 NATIVE_WINDOW_API_MEDIA); 3599 if (err != NO_ERROR) { 3600 ALOGE("error pushing blank frames: api_connect failed: %s (%d)", 3601 strerror(-err), -err); 3602 return err; 3603 } 3604 3605 return NO_ERROR; 3606 } 3607} 3608 3609//////////////////////////////////////////////////////////////////////////////// 3610 3611ACodec::PortDescription::PortDescription() { 3612} 3613 3614status_t ACodec::requestIDRFrame() { 3615 if (!mIsEncoder) { 3616 return ERROR_UNSUPPORTED; 3617 } 3618 3619 OMX_CONFIG_INTRAREFRESHVOPTYPE params; 3620 InitOMXParams(¶ms); 3621 3622 params.nPortIndex = kPortIndexOutput; 3623 params.IntraRefreshVOP = OMX_TRUE; 3624 3625 return mOMX->setConfig( 3626 mNode, 3627 OMX_IndexConfigVideoIntraVOPRefresh, 3628 ¶ms, 3629 sizeof(params)); 3630} 3631 3632void ACodec::PortDescription::addBuffer( 3633 IOMX::buffer_id id, const sp<ABuffer> &buffer) { 3634 mBufferIDs.push_back(id); 3635 mBuffers.push_back(buffer); 3636} 3637 3638size_t ACodec::PortDescription::countBuffers() { 3639 return mBufferIDs.size(); 3640} 3641 3642IOMX::buffer_id ACodec::PortDescription::bufferIDAt(size_t index) const { 3643 return mBufferIDs.itemAt(index); 3644} 3645 3646sp<ABuffer> ACodec::PortDescription::bufferAt(size_t index) const { 3647 return mBuffers.itemAt(index); 3648} 3649 3650//////////////////////////////////////////////////////////////////////////////// 3651 3652ACodec::BaseState::BaseState(ACodec *codec, const sp<AState> &parentState) 3653 : AState(parentState), 3654 mCodec(codec) { 3655} 3656 3657ACodec::BaseState::PortMode ACodec::BaseState::getPortMode( 3658 OMX_U32 /* portIndex */) { 3659 return KEEP_BUFFERS; 3660} 3661 3662bool ACodec::BaseState::onMessageReceived(const sp<AMessage> &msg) { 3663 switch (msg->what()) { 3664 case kWhatInputBufferFilled: 3665 { 3666 onInputBufferFilled(msg); 3667 break; 3668 } 3669 3670 case kWhatOutputBufferDrained: 3671 { 3672 onOutputBufferDrained(msg); 3673 break; 3674 } 3675 3676 case ACodec::kWhatOMXMessage: 3677 { 3678 return onOMXMessage(msg); 3679 } 3680 3681 case ACodec::kWhatCreateInputSurface: 3682 case ACodec::kWhatSignalEndOfInputStream: 3683 { 3684 // This may result in an app illegal state exception. 3685 ALOGE("Message 0x%x was not handled", msg->what()); 3686 mCodec->signalError(OMX_ErrorUndefined, INVALID_OPERATION); 3687 return true; 3688 } 3689 3690 case ACodec::kWhatOMXDied: 3691 { 3692 // This will result in kFlagSawMediaServerDie handling in MediaCodec. 3693 ALOGE("OMX/mediaserver died, signalling error!"); 3694 mCodec->signalError(OMX_ErrorResourcesLost, DEAD_OBJECT); 3695 break; 3696 } 3697 3698 default: 3699 return false; 3700 } 3701 3702 return true; 3703} 3704 3705bool ACodec::BaseState::onOMXMessage(const sp<AMessage> &msg) { 3706 int32_t type; 3707 CHECK(msg->findInt32("type", &type)); 3708 3709 // there is a possibility that this is an outstanding message for a 3710 // codec that we have already destroyed 3711 if (mCodec->mNode == NULL) { 3712 ALOGI("ignoring message as already freed component: %s", 3713 msg->debugString().c_str()); 3714 return true; 3715 } 3716 3717 IOMX::node_id nodeID; 3718 CHECK(msg->findInt32("node", (int32_t*)&nodeID)); 3719 CHECK_EQ(nodeID, mCodec->mNode); 3720 3721 switch (type) { 3722 case omx_message::EVENT: 3723 { 3724 int32_t event, data1, data2; 3725 CHECK(msg->findInt32("event", &event)); 3726 CHECK(msg->findInt32("data1", &data1)); 3727 CHECK(msg->findInt32("data2", &data2)); 3728 3729 if (event == OMX_EventCmdComplete 3730 && data1 == OMX_CommandFlush 3731 && data2 == (int32_t)OMX_ALL) { 3732 // Use of this notification is not consistent across 3733 // implementations. We'll drop this notification and rely 3734 // on flush-complete notifications on the individual port 3735 // indices instead. 3736 3737 return true; 3738 } 3739 3740 return onOMXEvent( 3741 static_cast<OMX_EVENTTYPE>(event), 3742 static_cast<OMX_U32>(data1), 3743 static_cast<OMX_U32>(data2)); 3744 } 3745 3746 case omx_message::EMPTY_BUFFER_DONE: 3747 { 3748 IOMX::buffer_id bufferID; 3749 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 3750 3751 return onOMXEmptyBufferDone(bufferID); 3752 } 3753 3754 case omx_message::FILL_BUFFER_DONE: 3755 { 3756 IOMX::buffer_id bufferID; 3757 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 3758 3759 int32_t rangeOffset, rangeLength, flags; 3760 int64_t timeUs; 3761 3762 CHECK(msg->findInt32("range_offset", &rangeOffset)); 3763 CHECK(msg->findInt32("range_length", &rangeLength)); 3764 CHECK(msg->findInt32("flags", &flags)); 3765 CHECK(msg->findInt64("timestamp", &timeUs)); 3766 3767 return onOMXFillBufferDone( 3768 bufferID, 3769 (size_t)rangeOffset, (size_t)rangeLength, 3770 (OMX_U32)flags, 3771 timeUs); 3772 } 3773 3774 default: 3775 TRESPASS(); 3776 break; 3777 } 3778} 3779 3780bool ACodec::BaseState::onOMXEvent( 3781 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 3782 if (event != OMX_EventError) { 3783 ALOGV("[%s] EVENT(%d, 0x%08lx, 0x%08lx)", 3784 mCodec->mComponentName.c_str(), event, data1, data2); 3785 3786 return false; 3787 } 3788 3789 ALOGE("[%s] ERROR(0x%08lx)", mCodec->mComponentName.c_str(), data1); 3790 3791 // verify OMX component sends back an error we expect. 3792 OMX_ERRORTYPE omxError = (OMX_ERRORTYPE)data1; 3793 if (!isOMXError(omxError)) { 3794 ALOGW("Invalid OMX error %#x", omxError); 3795 omxError = OMX_ErrorUndefined; 3796 } 3797 mCodec->signalError(omxError); 3798 3799 return true; 3800} 3801 3802bool ACodec::BaseState::onOMXEmptyBufferDone(IOMX::buffer_id bufferID) { 3803 ALOGV("[%s] onOMXEmptyBufferDone %p", 3804 mCodec->mComponentName.c_str(), bufferID); 3805 3806 BufferInfo *info = 3807 mCodec->findBufferByID(kPortIndexInput, bufferID); 3808 3809 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_COMPONENT); 3810 info->mStatus = BufferInfo::OWNED_BY_US; 3811 3812 const sp<AMessage> &bufferMeta = info->mData->meta(); 3813 void *mediaBuffer; 3814 if (bufferMeta->findPointer("mediaBuffer", &mediaBuffer) 3815 && mediaBuffer != NULL) { 3816 // We're in "store-metadata-in-buffers" mode, the underlying 3817 // OMX component had access to data that's implicitly refcounted 3818 // by this "mediaBuffer" object. Now that the OMX component has 3819 // told us that it's done with the input buffer, we can decrement 3820 // the mediaBuffer's reference count. 3821 3822 ALOGV("releasing mbuf %p", mediaBuffer); 3823 3824 ((MediaBuffer *)mediaBuffer)->release(); 3825 mediaBuffer = NULL; 3826 3827 bufferMeta->setPointer("mediaBuffer", NULL); 3828 } 3829 3830 PortMode mode = getPortMode(kPortIndexInput); 3831 3832 switch (mode) { 3833 case KEEP_BUFFERS: 3834 break; 3835 3836 case RESUBMIT_BUFFERS: 3837 postFillThisBuffer(info); 3838 break; 3839 3840 default: 3841 { 3842 CHECK_EQ((int)mode, (int)FREE_BUFFERS); 3843 TRESPASS(); // Not currently used 3844 break; 3845 } 3846 } 3847 3848 return true; 3849} 3850 3851void ACodec::BaseState::postFillThisBuffer(BufferInfo *info) { 3852 if (mCodec->mPortEOS[kPortIndexInput]) { 3853 return; 3854 } 3855 3856 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 3857 3858 sp<AMessage> notify = mCodec->mNotify->dup(); 3859 notify->setInt32("what", CodecBase::kWhatFillThisBuffer); 3860 notify->setInt32("buffer-id", info->mBufferID); 3861 3862 info->mData->meta()->clear(); 3863 notify->setBuffer("buffer", info->mData); 3864 3865 sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, mCodec->id()); 3866 reply->setInt32("buffer-id", info->mBufferID); 3867 3868 notify->setMessage("reply", reply); 3869 3870 notify->post(); 3871 3872 info->mStatus = BufferInfo::OWNED_BY_UPSTREAM; 3873} 3874 3875void ACodec::BaseState::onInputBufferFilled(const sp<AMessage> &msg) { 3876 IOMX::buffer_id bufferID; 3877 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 3878 sp<ABuffer> buffer; 3879 int32_t err = OK; 3880 bool eos = false; 3881 PortMode mode = getPortMode(kPortIndexInput); 3882 3883 if (!msg->findBuffer("buffer", &buffer)) { 3884 /* these are unfilled buffers returned by client */ 3885 CHECK(msg->findInt32("err", &err)); 3886 3887 if (err == OK) { 3888 /* buffers with no errors are returned on MediaCodec.flush */ 3889 mode = KEEP_BUFFERS; 3890 } else { 3891 ALOGV("[%s] saw error %d instead of an input buffer", 3892 mCodec->mComponentName.c_str(), err); 3893 eos = true; 3894 } 3895 3896 buffer.clear(); 3897 } 3898 3899 int32_t tmp; 3900 if (buffer != NULL && buffer->meta()->findInt32("eos", &tmp) && tmp) { 3901 eos = true; 3902 err = ERROR_END_OF_STREAM; 3903 } 3904 3905 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 3906 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_UPSTREAM); 3907 3908 info->mStatus = BufferInfo::OWNED_BY_US; 3909 3910 switch (mode) { 3911 case KEEP_BUFFERS: 3912 { 3913 if (eos) { 3914 if (!mCodec->mPortEOS[kPortIndexInput]) { 3915 mCodec->mPortEOS[kPortIndexInput] = true; 3916 mCodec->mInputEOSResult = err; 3917 } 3918 } 3919 break; 3920 } 3921 3922 case RESUBMIT_BUFFERS: 3923 { 3924 if (buffer != NULL && !mCodec->mPortEOS[kPortIndexInput]) { 3925 int64_t timeUs; 3926 CHECK(buffer->meta()->findInt64("timeUs", &timeUs)); 3927 3928 OMX_U32 flags = OMX_BUFFERFLAG_ENDOFFRAME; 3929 3930 int32_t isCSD; 3931 if (buffer->meta()->findInt32("csd", &isCSD) && isCSD != 0) { 3932 flags |= OMX_BUFFERFLAG_CODECCONFIG; 3933 } 3934 3935 if (eos) { 3936 flags |= OMX_BUFFERFLAG_EOS; 3937 } 3938 3939 if (buffer != info->mData) { 3940 ALOGV("[%s] Needs to copy input data for buffer %p. (%p != %p)", 3941 mCodec->mComponentName.c_str(), 3942 bufferID, 3943 buffer.get(), info->mData.get()); 3944 3945 CHECK_LE(buffer->size(), info->mData->capacity()); 3946 memcpy(info->mData->data(), buffer->data(), buffer->size()); 3947 } 3948 3949 if (flags & OMX_BUFFERFLAG_CODECCONFIG) { 3950 ALOGV("[%s] calling emptyBuffer %p w/ codec specific data", 3951 mCodec->mComponentName.c_str(), bufferID); 3952 } else if (flags & OMX_BUFFERFLAG_EOS) { 3953 ALOGV("[%s] calling emptyBuffer %p w/ EOS", 3954 mCodec->mComponentName.c_str(), bufferID); 3955 } else { 3956#if TRACK_BUFFER_TIMING 3957 ALOGI("[%s] calling emptyBuffer %p w/ time %lld us", 3958 mCodec->mComponentName.c_str(), bufferID, timeUs); 3959#else 3960 ALOGV("[%s] calling emptyBuffer %p w/ time %lld us", 3961 mCodec->mComponentName.c_str(), bufferID, timeUs); 3962#endif 3963 } 3964 3965#if TRACK_BUFFER_TIMING 3966 ACodec::BufferStats stats; 3967 stats.mEmptyBufferTimeUs = ALooper::GetNowUs(); 3968 stats.mFillBufferDoneTimeUs = -1ll; 3969 mCodec->mBufferStats.add(timeUs, stats); 3970#endif 3971 3972 if (mCodec->mStoreMetaDataInOutputBuffers) { 3973 // try to submit an output buffer for each input buffer 3974 PortMode outputMode = getPortMode(kPortIndexOutput); 3975 3976 ALOGV("MetaDataBuffersToSubmit=%u portMode=%s", 3977 mCodec->mMetaDataBuffersToSubmit, 3978 (outputMode == FREE_BUFFERS ? "FREE" : 3979 outputMode == KEEP_BUFFERS ? "KEEP" : "RESUBMIT")); 3980 if (outputMode == RESUBMIT_BUFFERS) { 3981 mCodec->submitOutputMetaDataBuffer(); 3982 } 3983 } 3984 3985 CHECK_EQ(mCodec->mOMX->emptyBuffer( 3986 mCodec->mNode, 3987 bufferID, 3988 0, 3989 buffer->size(), 3990 flags, 3991 timeUs), 3992 (status_t)OK); 3993 3994 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 3995 3996 if (!eos) { 3997 getMoreInputDataIfPossible(); 3998 } else { 3999 ALOGV("[%s] Signalled EOS on the input port", 4000 mCodec->mComponentName.c_str()); 4001 4002 mCodec->mPortEOS[kPortIndexInput] = true; 4003 mCodec->mInputEOSResult = err; 4004 } 4005 } else if (!mCodec->mPortEOS[kPortIndexInput]) { 4006 if (err != ERROR_END_OF_STREAM) { 4007 ALOGV("[%s] Signalling EOS on the input port " 4008 "due to error %d", 4009 mCodec->mComponentName.c_str(), err); 4010 } else { 4011 ALOGV("[%s] Signalling EOS on the input port", 4012 mCodec->mComponentName.c_str()); 4013 } 4014 4015 ALOGV("[%s] calling emptyBuffer %p signalling EOS", 4016 mCodec->mComponentName.c_str(), bufferID); 4017 4018 CHECK_EQ(mCodec->mOMX->emptyBuffer( 4019 mCodec->mNode, 4020 bufferID, 4021 0, 4022 0, 4023 OMX_BUFFERFLAG_EOS, 4024 0), 4025 (status_t)OK); 4026 4027 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 4028 4029 mCodec->mPortEOS[kPortIndexInput] = true; 4030 mCodec->mInputEOSResult = err; 4031 } 4032 break; 4033 } 4034 4035 default: 4036 CHECK_EQ((int)mode, (int)FREE_BUFFERS); 4037 break; 4038 } 4039} 4040 4041void ACodec::BaseState::getMoreInputDataIfPossible() { 4042 if (mCodec->mPortEOS[kPortIndexInput]) { 4043 return; 4044 } 4045 4046 BufferInfo *eligible = NULL; 4047 4048 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 4049 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 4050 4051#if 0 4052 if (info->mStatus == BufferInfo::OWNED_BY_UPSTREAM) { 4053 // There's already a "read" pending. 4054 return; 4055 } 4056#endif 4057 4058 if (info->mStatus == BufferInfo::OWNED_BY_US) { 4059 eligible = info; 4060 } 4061 } 4062 4063 if (eligible == NULL) { 4064 return; 4065 } 4066 4067 postFillThisBuffer(eligible); 4068} 4069 4070bool ACodec::BaseState::onOMXFillBufferDone( 4071 IOMX::buffer_id bufferID, 4072 size_t rangeOffset, size_t rangeLength, 4073 OMX_U32 flags, 4074 int64_t timeUs) { 4075 ALOGV("[%s] onOMXFillBufferDone %u time %" PRId64 " us, flags = 0x%08x", 4076 mCodec->mComponentName.c_str(), bufferID, timeUs, flags); 4077 4078 ssize_t index; 4079 4080#if TRACK_BUFFER_TIMING 4081 index = mCodec->mBufferStats.indexOfKey(timeUs); 4082 if (index >= 0) { 4083 ACodec::BufferStats *stats = &mCodec->mBufferStats.editValueAt(index); 4084 stats->mFillBufferDoneTimeUs = ALooper::GetNowUs(); 4085 4086 ALOGI("frame PTS %lld: %lld", 4087 timeUs, 4088 stats->mFillBufferDoneTimeUs - stats->mEmptyBufferTimeUs); 4089 4090 mCodec->mBufferStats.removeItemsAt(index); 4091 stats = NULL; 4092 } 4093#endif 4094 4095 BufferInfo *info = 4096 mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 4097 4098 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_COMPONENT); 4099 4100 info->mDequeuedAt = ++mCodec->mDequeueCounter; 4101 info->mStatus = BufferInfo::OWNED_BY_US; 4102 4103 PortMode mode = getPortMode(kPortIndexOutput); 4104 4105 switch (mode) { 4106 case KEEP_BUFFERS: 4107 break; 4108 4109 case RESUBMIT_BUFFERS: 4110 { 4111 if (rangeLength == 0 && !(flags & OMX_BUFFERFLAG_EOS)) { 4112 ALOGV("[%s] calling fillBuffer %u", 4113 mCodec->mComponentName.c_str(), info->mBufferID); 4114 4115 CHECK_EQ(mCodec->mOMX->fillBuffer( 4116 mCodec->mNode, info->mBufferID), 4117 (status_t)OK); 4118 4119 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 4120 break; 4121 } 4122 4123 sp<AMessage> reply = 4124 new AMessage(kWhatOutputBufferDrained, mCodec->id()); 4125 4126 if (!mCodec->mSentFormat && rangeLength > 0) { 4127 mCodec->sendFormatChange(reply); 4128 } 4129 4130 if (mCodec->mUseMetadataOnEncoderOutput) { 4131 native_handle_t* handle = 4132 *(native_handle_t**)(info->mData->data() + 4); 4133 info->mData->meta()->setPointer("handle", handle); 4134 info->mData->meta()->setInt32("rangeOffset", rangeOffset); 4135 info->mData->meta()->setInt32("rangeLength", rangeLength); 4136 } else { 4137 info->mData->setRange(rangeOffset, rangeLength); 4138 } 4139#if 0 4140 if (mCodec->mNativeWindow == NULL) { 4141 if (IsIDR(info->mData)) { 4142 ALOGI("IDR frame"); 4143 } 4144 } 4145#endif 4146 4147 if (mCodec->mSkipCutBuffer != NULL) { 4148 mCodec->mSkipCutBuffer->submit(info->mData); 4149 } 4150 info->mData->meta()->setInt64("timeUs", timeUs); 4151 4152 sp<AMessage> notify = mCodec->mNotify->dup(); 4153 notify->setInt32("what", CodecBase::kWhatDrainThisBuffer); 4154 notify->setInt32("buffer-id", info->mBufferID); 4155 notify->setBuffer("buffer", info->mData); 4156 notify->setInt32("flags", flags); 4157 4158 reply->setInt32("buffer-id", info->mBufferID); 4159 4160 notify->setMessage("reply", reply); 4161 4162 notify->post(); 4163 4164 info->mStatus = BufferInfo::OWNED_BY_DOWNSTREAM; 4165 4166 if (flags & OMX_BUFFERFLAG_EOS) { 4167 ALOGV("[%s] saw output EOS", mCodec->mComponentName.c_str()); 4168 4169 sp<AMessage> notify = mCodec->mNotify->dup(); 4170 notify->setInt32("what", CodecBase::kWhatEOS); 4171 notify->setInt32("err", mCodec->mInputEOSResult); 4172 notify->post(); 4173 4174 mCodec->mPortEOS[kPortIndexOutput] = true; 4175 } 4176 break; 4177 } 4178 4179 default: 4180 { 4181 CHECK_EQ((int)mode, (int)FREE_BUFFERS); 4182 4183 CHECK_EQ((status_t)OK, 4184 mCodec->freeBuffer(kPortIndexOutput, index)); 4185 break; 4186 } 4187 } 4188 4189 return true; 4190} 4191 4192void ACodec::BaseState::onOutputBufferDrained(const sp<AMessage> &msg) { 4193 IOMX::buffer_id bufferID; 4194 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 4195 ssize_t index; 4196 BufferInfo *info = 4197 mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 4198 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_DOWNSTREAM); 4199 4200 android_native_rect_t crop; 4201 if (msg->findRect("crop", 4202 &crop.left, &crop.top, &crop.right, &crop.bottom)) { 4203 CHECK_EQ(0, native_window_set_crop( 4204 mCodec->mNativeWindow.get(), &crop)); 4205 } 4206 4207 int32_t render; 4208 if (mCodec->mNativeWindow != NULL 4209 && msg->findInt32("render", &render) && render != 0 4210 && info->mData != NULL && info->mData->size() != 0) { 4211 ATRACE_NAME("render"); 4212 // The client wants this buffer to be rendered. 4213 4214 int64_t timestampNs = 0; 4215 if (!msg->findInt64("timestampNs", ×tampNs)) { 4216 // TODO: it seems like we should use the timestamp 4217 // in the (media)buffer as it potentially came from 4218 // an input surface, but we did not propagate it prior to 4219 // API 20. Perhaps check for target SDK version. 4220#if 0 4221 if (info->mData->meta()->findInt64("timeUs", ×tampNs)) { 4222 ALOGV("using buffer PTS of %" PRId64, timestampNs); 4223 timestampNs *= 1000; 4224 } 4225#endif 4226 } 4227 4228 status_t err; 4229 err = native_window_set_buffers_timestamp(mCodec->mNativeWindow.get(), timestampNs); 4230 if (err != OK) { 4231 ALOGW("failed to set buffer timestamp: %d", err); 4232 } 4233 4234 if ((err = mCodec->mNativeWindow->queueBuffer( 4235 mCodec->mNativeWindow.get(), 4236 info->mGraphicBuffer.get(), -1)) == OK) { 4237 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 4238 } else { 4239 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 4240 info->mStatus = BufferInfo::OWNED_BY_US; 4241 } 4242 } else { 4243 if (mCodec->mNativeWindow != NULL && 4244 (info->mData == NULL || info->mData->size() != 0)) { 4245 ATRACE_NAME("frame-drop"); 4246 } 4247 info->mStatus = BufferInfo::OWNED_BY_US; 4248 } 4249 4250 PortMode mode = getPortMode(kPortIndexOutput); 4251 4252 switch (mode) { 4253 case KEEP_BUFFERS: 4254 { 4255 // XXX fishy, revisit!!! What about the FREE_BUFFERS case below? 4256 4257 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4258 // We cannot resubmit the buffer we just rendered, dequeue 4259 // the spare instead. 4260 4261 info = mCodec->dequeueBufferFromNativeWindow(); 4262 } 4263 break; 4264 } 4265 4266 case RESUBMIT_BUFFERS: 4267 { 4268 if (!mCodec->mPortEOS[kPortIndexOutput]) { 4269 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4270 // We cannot resubmit the buffer we just rendered, dequeue 4271 // the spare instead. 4272 4273 info = mCodec->dequeueBufferFromNativeWindow(); 4274 } 4275 4276 if (info != NULL) { 4277 ALOGV("[%s] calling fillBuffer %u", 4278 mCodec->mComponentName.c_str(), info->mBufferID); 4279 4280 CHECK_EQ(mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID), 4281 (status_t)OK); 4282 4283 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 4284 } 4285 } 4286 break; 4287 } 4288 4289 default: 4290 { 4291 CHECK_EQ((int)mode, (int)FREE_BUFFERS); 4292 4293 CHECK_EQ((status_t)OK, 4294 mCodec->freeBuffer(kPortIndexOutput, index)); 4295 break; 4296 } 4297 } 4298} 4299 4300//////////////////////////////////////////////////////////////////////////////// 4301 4302ACodec::UninitializedState::UninitializedState(ACodec *codec) 4303 : BaseState(codec) { 4304} 4305 4306void ACodec::UninitializedState::stateEntered() { 4307 ALOGV("Now uninitialized"); 4308 4309 if (mDeathNotifier != NULL) { 4310 mCodec->mOMX->asBinder()->unlinkToDeath(mDeathNotifier); 4311 mDeathNotifier.clear(); 4312 } 4313 4314 mCodec->mNativeWindow.clear(); 4315 mCodec->mNode = NULL; 4316 mCodec->mOMX.clear(); 4317 mCodec->mQuirks = 0; 4318 mCodec->mFlags = 0; 4319 mCodec->mUseMetadataOnEncoderOutput = 0; 4320 mCodec->mComponentName.clear(); 4321} 4322 4323bool ACodec::UninitializedState::onMessageReceived(const sp<AMessage> &msg) { 4324 bool handled = false; 4325 4326 switch (msg->what()) { 4327 case ACodec::kWhatSetup: 4328 { 4329 onSetup(msg); 4330 4331 handled = true; 4332 break; 4333 } 4334 4335 case ACodec::kWhatAllocateComponent: 4336 { 4337 onAllocateComponent(msg); 4338 handled = true; 4339 break; 4340 } 4341 4342 case ACodec::kWhatShutdown: 4343 { 4344 int32_t keepComponentAllocated; 4345 CHECK(msg->findInt32( 4346 "keepComponentAllocated", &keepComponentAllocated)); 4347 ALOGW_IF(keepComponentAllocated, 4348 "cannot keep component allocated on shutdown in Uninitialized state"); 4349 4350 sp<AMessage> notify = mCodec->mNotify->dup(); 4351 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 4352 notify->post(); 4353 4354 handled = true; 4355 break; 4356 } 4357 4358 case ACodec::kWhatFlush: 4359 { 4360 sp<AMessage> notify = mCodec->mNotify->dup(); 4361 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 4362 notify->post(); 4363 4364 handled = true; 4365 break; 4366 } 4367 4368 default: 4369 return BaseState::onMessageReceived(msg); 4370 } 4371 4372 return handled; 4373} 4374 4375void ACodec::UninitializedState::onSetup( 4376 const sp<AMessage> &msg) { 4377 if (onAllocateComponent(msg) 4378 && mCodec->mLoadedState->onConfigureComponent(msg)) { 4379 mCodec->mLoadedState->onStart(); 4380 } 4381} 4382 4383bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) { 4384 ALOGV("onAllocateComponent"); 4385 4386 CHECK(mCodec->mNode == NULL); 4387 4388 OMXClient client; 4389 CHECK_EQ(client.connect(), (status_t)OK); 4390 4391 sp<IOMX> omx = client.interface(); 4392 4393 sp<AMessage> notify = new AMessage(kWhatOMXDied, mCodec->id()); 4394 4395 mDeathNotifier = new DeathNotifier(notify); 4396 if (omx->asBinder()->linkToDeath(mDeathNotifier) != OK) { 4397 // This was a local binder, if it dies so do we, we won't care 4398 // about any notifications in the afterlife. 4399 mDeathNotifier.clear(); 4400 } 4401 4402 Vector<OMXCodec::CodecNameAndQuirks> matchingCodecs; 4403 4404 AString mime; 4405 4406 AString componentName; 4407 uint32_t quirks = 0; 4408 int32_t encoder = false; 4409 if (msg->findString("componentName", &componentName)) { 4410 ssize_t index = matchingCodecs.add(); 4411 OMXCodec::CodecNameAndQuirks *entry = &matchingCodecs.editItemAt(index); 4412 entry->mName = String8(componentName.c_str()); 4413 4414 if (!OMXCodec::findCodecQuirks( 4415 componentName.c_str(), &entry->mQuirks)) { 4416 entry->mQuirks = 0; 4417 } 4418 } else { 4419 CHECK(msg->findString("mime", &mime)); 4420 4421 if (!msg->findInt32("encoder", &encoder)) { 4422 encoder = false; 4423 } 4424 4425 OMXCodec::findMatchingCodecs( 4426 mime.c_str(), 4427 encoder, // createEncoder 4428 NULL, // matchComponentName 4429 0, // flags 4430 &matchingCodecs); 4431 } 4432 4433 sp<CodecObserver> observer = new CodecObserver; 4434 IOMX::node_id node = NULL; 4435 4436 for (size_t matchIndex = 0; matchIndex < matchingCodecs.size(); 4437 ++matchIndex) { 4438 componentName = matchingCodecs.itemAt(matchIndex).mName.string(); 4439 quirks = matchingCodecs.itemAt(matchIndex).mQuirks; 4440 4441 pid_t tid = androidGetTid(); 4442 int prevPriority = androidGetThreadPriority(tid); 4443 androidSetThreadPriority(tid, ANDROID_PRIORITY_FOREGROUND); 4444 status_t err = omx->allocateNode(componentName.c_str(), observer, &node); 4445 androidSetThreadPriority(tid, prevPriority); 4446 4447 if (err == OK) { 4448 break; 4449 } else { 4450 ALOGW("Allocating component '%s' failed, try next one.", componentName.c_str()); 4451 } 4452 4453 node = NULL; 4454 } 4455 4456 if (node == NULL) { 4457 if (!mime.empty()) { 4458 ALOGE("Unable to instantiate a %scoder for type '%s'.", 4459 encoder ? "en" : "de", mime.c_str()); 4460 } else { 4461 ALOGE("Unable to instantiate codec '%s'.", componentName.c_str()); 4462 } 4463 4464 mCodec->signalError(OMX_ErrorComponentNotFound); 4465 return false; 4466 } 4467 4468 notify = new AMessage(kWhatOMXMessage, mCodec->id()); 4469 observer->setNotificationMessage(notify); 4470 4471 mCodec->mComponentName = componentName; 4472 mCodec->mFlags = 0; 4473 4474 if (componentName.endsWith(".secure")) { 4475 mCodec->mFlags |= kFlagIsSecure; 4476 mCodec->mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 4477 } 4478 4479 mCodec->mQuirks = quirks; 4480 mCodec->mOMX = omx; 4481 mCodec->mNode = node; 4482 4483 { 4484 sp<AMessage> notify = mCodec->mNotify->dup(); 4485 notify->setInt32("what", CodecBase::kWhatComponentAllocated); 4486 notify->setString("componentName", mCodec->mComponentName.c_str()); 4487 notify->post(); 4488 } 4489 4490 mCodec->changeState(mCodec->mLoadedState); 4491 4492 return true; 4493} 4494 4495//////////////////////////////////////////////////////////////////////////////// 4496 4497ACodec::LoadedState::LoadedState(ACodec *codec) 4498 : BaseState(codec) { 4499} 4500 4501void ACodec::LoadedState::stateEntered() { 4502 ALOGV("[%s] Now Loaded", mCodec->mComponentName.c_str()); 4503 4504 mCodec->mPortEOS[kPortIndexInput] = 4505 mCodec->mPortEOS[kPortIndexOutput] = false; 4506 4507 mCodec->mInputEOSResult = OK; 4508 4509 mCodec->mDequeueCounter = 0; 4510 mCodec->mMetaDataBuffersToSubmit = 0; 4511 mCodec->mRepeatFrameDelayUs = -1ll; 4512 mCodec->mInputFormat.clear(); 4513 mCodec->mOutputFormat.clear(); 4514 4515 if (mCodec->mShutdownInProgress) { 4516 bool keepComponentAllocated = mCodec->mKeepComponentAllocated; 4517 4518 mCodec->mShutdownInProgress = false; 4519 mCodec->mKeepComponentAllocated = false; 4520 4521 onShutdown(keepComponentAllocated); 4522 } 4523 mCodec->mExplicitShutdown = false; 4524} 4525 4526void ACodec::LoadedState::onShutdown(bool keepComponentAllocated) { 4527 if (!keepComponentAllocated) { 4528 CHECK_EQ(mCodec->mOMX->freeNode(mCodec->mNode), (status_t)OK); 4529 4530 mCodec->changeState(mCodec->mUninitializedState); 4531 } 4532 4533 if (mCodec->mExplicitShutdown) { 4534 sp<AMessage> notify = mCodec->mNotify->dup(); 4535 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 4536 notify->post(); 4537 mCodec->mExplicitShutdown = false; 4538 } 4539} 4540 4541bool ACodec::LoadedState::onMessageReceived(const sp<AMessage> &msg) { 4542 bool handled = false; 4543 4544 switch (msg->what()) { 4545 case ACodec::kWhatConfigureComponent: 4546 { 4547 onConfigureComponent(msg); 4548 handled = true; 4549 break; 4550 } 4551 4552 case ACodec::kWhatCreateInputSurface: 4553 { 4554 onCreateInputSurface(msg); 4555 handled = true; 4556 break; 4557 } 4558 4559 case ACodec::kWhatStart: 4560 { 4561 onStart(); 4562 handled = true; 4563 break; 4564 } 4565 4566 case ACodec::kWhatShutdown: 4567 { 4568 int32_t keepComponentAllocated; 4569 CHECK(msg->findInt32( 4570 "keepComponentAllocated", &keepComponentAllocated)); 4571 4572 mCodec->mExplicitShutdown = true; 4573 onShutdown(keepComponentAllocated); 4574 4575 handled = true; 4576 break; 4577 } 4578 4579 case ACodec::kWhatFlush: 4580 { 4581 sp<AMessage> notify = mCodec->mNotify->dup(); 4582 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 4583 notify->post(); 4584 4585 handled = true; 4586 break; 4587 } 4588 4589 default: 4590 return BaseState::onMessageReceived(msg); 4591 } 4592 4593 return handled; 4594} 4595 4596bool ACodec::LoadedState::onConfigureComponent( 4597 const sp<AMessage> &msg) { 4598 ALOGV("onConfigureComponent"); 4599 4600 CHECK(mCodec->mNode != NULL); 4601 4602 AString mime; 4603 CHECK(msg->findString("mime", &mime)); 4604 4605 status_t err = mCodec->configureCodec(mime.c_str(), msg); 4606 4607 if (err != OK) { 4608 ALOGE("[%s] configureCodec returning error %d", 4609 mCodec->mComponentName.c_str(), err); 4610 4611 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 4612 return false; 4613 } 4614 4615 sp<RefBase> obj; 4616 if (msg->findObject("native-window", &obj) 4617 && strncmp("OMX.google.", mCodec->mComponentName.c_str(), 11)) { 4618 sp<NativeWindowWrapper> nativeWindow( 4619 static_cast<NativeWindowWrapper *>(obj.get())); 4620 CHECK(nativeWindow != NULL); 4621 mCodec->mNativeWindow = nativeWindow->getNativeWindow(); 4622 4623 native_window_set_scaling_mode( 4624 mCodec->mNativeWindow.get(), 4625 NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW); 4626 } 4627 CHECK_EQ((status_t)OK, mCodec->initNativeWindow()); 4628 4629 { 4630 sp<AMessage> notify = mCodec->mNotify->dup(); 4631 notify->setInt32("what", CodecBase::kWhatComponentConfigured); 4632 notify->setMessage("input-format", mCodec->mInputFormat); 4633 notify->setMessage("output-format", mCodec->mOutputFormat); 4634 notify->post(); 4635 } 4636 4637 return true; 4638} 4639 4640void ACodec::LoadedState::onCreateInputSurface( 4641 const sp<AMessage> & /* msg */) { 4642 ALOGV("onCreateInputSurface"); 4643 4644 sp<AMessage> notify = mCodec->mNotify->dup(); 4645 notify->setInt32("what", CodecBase::kWhatInputSurfaceCreated); 4646 4647 sp<IGraphicBufferProducer> bufferProducer; 4648 status_t err; 4649 4650 err = mCodec->mOMX->createInputSurface(mCodec->mNode, kPortIndexInput, 4651 &bufferProducer); 4652 4653 if (err == OK && mCodec->mRepeatFrameDelayUs > 0ll) { 4654 err = mCodec->mOMX->setInternalOption( 4655 mCodec->mNode, 4656 kPortIndexInput, 4657 IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY, 4658 &mCodec->mRepeatFrameDelayUs, 4659 sizeof(mCodec->mRepeatFrameDelayUs)); 4660 4661 if (err != OK) { 4662 ALOGE("[%s] Unable to configure option to repeat previous " 4663 "frames (err %d)", 4664 mCodec->mComponentName.c_str(), 4665 err); 4666 } 4667 } 4668 4669 if (err == OK && mCodec->mMaxPtsGapUs > 0ll) { 4670 err = mCodec->mOMX->setInternalOption( 4671 mCodec->mNode, 4672 kPortIndexInput, 4673 IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP, 4674 &mCodec->mMaxPtsGapUs, 4675 sizeof(mCodec->mMaxPtsGapUs)); 4676 4677 if (err != OK) { 4678 ALOGE("[%s] Unable to configure max timestamp gap (err %d)", 4679 mCodec->mComponentName.c_str(), 4680 err); 4681 } 4682 } 4683 4684 if (err == OK && mCodec->mTimePerCaptureUs > 0ll 4685 && mCodec->mTimePerFrameUs > 0ll) { 4686 int64_t timeLapse[2]; 4687 timeLapse[0] = mCodec->mTimePerFrameUs; 4688 timeLapse[1] = mCodec->mTimePerCaptureUs; 4689 err = mCodec->mOMX->setInternalOption( 4690 mCodec->mNode, 4691 kPortIndexInput, 4692 IOMX::INTERNAL_OPTION_TIME_LAPSE, 4693 &timeLapse[0], 4694 sizeof(timeLapse)); 4695 4696 if (err != OK) { 4697 ALOGE("[%s] Unable to configure time lapse (err %d)", 4698 mCodec->mComponentName.c_str(), 4699 err); 4700 } 4701 } 4702 4703 if (err == OK && mCodec->mCreateInputBuffersSuspended) { 4704 bool suspend = true; 4705 err = mCodec->mOMX->setInternalOption( 4706 mCodec->mNode, 4707 kPortIndexInput, 4708 IOMX::INTERNAL_OPTION_SUSPEND, 4709 &suspend, 4710 sizeof(suspend)); 4711 4712 if (err != OK) { 4713 ALOGE("[%s] Unable to configure option to suspend (err %d)", 4714 mCodec->mComponentName.c_str(), 4715 err); 4716 } 4717 } 4718 4719 if (err == OK) { 4720 notify->setObject("input-surface", 4721 new BufferProducerWrapper(bufferProducer)); 4722 } else { 4723 // Can't use mCodec->signalError() here -- MediaCodec won't forward 4724 // the error through because it's in the "configured" state. We 4725 // send a kWhatInputSurfaceCreated with an error value instead. 4726 ALOGE("[%s] onCreateInputSurface returning error %d", 4727 mCodec->mComponentName.c_str(), err); 4728 notify->setInt32("err", err); 4729 } 4730 notify->post(); 4731} 4732 4733void ACodec::LoadedState::onStart() { 4734 ALOGV("onStart"); 4735 4736 CHECK_EQ(mCodec->mOMX->sendCommand( 4737 mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle), 4738 (status_t)OK); 4739 4740 mCodec->changeState(mCodec->mLoadedToIdleState); 4741} 4742 4743//////////////////////////////////////////////////////////////////////////////// 4744 4745ACodec::LoadedToIdleState::LoadedToIdleState(ACodec *codec) 4746 : BaseState(codec) { 4747} 4748 4749void ACodec::LoadedToIdleState::stateEntered() { 4750 ALOGV("[%s] Now Loaded->Idle", mCodec->mComponentName.c_str()); 4751 4752 status_t err; 4753 if ((err = allocateBuffers()) != OK) { 4754 ALOGE("Failed to allocate buffers after transitioning to IDLE state " 4755 "(error 0x%08x)", 4756 err); 4757 4758 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 4759 4760 mCodec->changeState(mCodec->mLoadedState); 4761 } 4762} 4763 4764status_t ACodec::LoadedToIdleState::allocateBuffers() { 4765 status_t err = mCodec->allocateBuffersOnPort(kPortIndexInput); 4766 4767 if (err != OK) { 4768 return err; 4769 } 4770 4771 return mCodec->allocateBuffersOnPort(kPortIndexOutput); 4772} 4773 4774bool ACodec::LoadedToIdleState::onMessageReceived(const sp<AMessage> &msg) { 4775 switch (msg->what()) { 4776 case kWhatSetParameters: 4777 case kWhatShutdown: 4778 { 4779 mCodec->deferMessage(msg); 4780 return true; 4781 } 4782 4783 case kWhatSignalEndOfInputStream: 4784 { 4785 mCodec->onSignalEndOfInputStream(); 4786 return true; 4787 } 4788 4789 case kWhatResume: 4790 { 4791 // We'll be active soon enough. 4792 return true; 4793 } 4794 4795 case kWhatFlush: 4796 { 4797 // We haven't even started yet, so we're flushed alright... 4798 sp<AMessage> notify = mCodec->mNotify->dup(); 4799 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 4800 notify->post(); 4801 return true; 4802 } 4803 4804 default: 4805 return BaseState::onMessageReceived(msg); 4806 } 4807} 4808 4809bool ACodec::LoadedToIdleState::onOMXEvent( 4810 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 4811 switch (event) { 4812 case OMX_EventCmdComplete: 4813 { 4814 CHECK_EQ(data1, (OMX_U32)OMX_CommandStateSet); 4815 CHECK_EQ(data2, (OMX_U32)OMX_StateIdle); 4816 4817 CHECK_EQ(mCodec->mOMX->sendCommand( 4818 mCodec->mNode, OMX_CommandStateSet, OMX_StateExecuting), 4819 (status_t)OK); 4820 4821 mCodec->changeState(mCodec->mIdleToExecutingState); 4822 4823 return true; 4824 } 4825 4826 default: 4827 return BaseState::onOMXEvent(event, data1, data2); 4828 } 4829} 4830 4831//////////////////////////////////////////////////////////////////////////////// 4832 4833ACodec::IdleToExecutingState::IdleToExecutingState(ACodec *codec) 4834 : BaseState(codec) { 4835} 4836 4837void ACodec::IdleToExecutingState::stateEntered() { 4838 ALOGV("[%s] Now Idle->Executing", mCodec->mComponentName.c_str()); 4839} 4840 4841bool ACodec::IdleToExecutingState::onMessageReceived(const sp<AMessage> &msg) { 4842 switch (msg->what()) { 4843 case kWhatSetParameters: 4844 case kWhatShutdown: 4845 { 4846 mCodec->deferMessage(msg); 4847 return true; 4848 } 4849 4850 case kWhatResume: 4851 { 4852 // We'll be active soon enough. 4853 return true; 4854 } 4855 4856 case kWhatFlush: 4857 { 4858 // We haven't even started yet, so we're flushed alright... 4859 sp<AMessage> notify = mCodec->mNotify->dup(); 4860 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 4861 notify->post(); 4862 4863 return true; 4864 } 4865 4866 case kWhatSignalEndOfInputStream: 4867 { 4868 mCodec->onSignalEndOfInputStream(); 4869 return true; 4870 } 4871 4872 default: 4873 return BaseState::onMessageReceived(msg); 4874 } 4875} 4876 4877bool ACodec::IdleToExecutingState::onOMXEvent( 4878 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 4879 switch (event) { 4880 case OMX_EventCmdComplete: 4881 { 4882 CHECK_EQ(data1, (OMX_U32)OMX_CommandStateSet); 4883 CHECK_EQ(data2, (OMX_U32)OMX_StateExecuting); 4884 4885 mCodec->mExecutingState->resume(); 4886 mCodec->changeState(mCodec->mExecutingState); 4887 4888 return true; 4889 } 4890 4891 default: 4892 return BaseState::onOMXEvent(event, data1, data2); 4893 } 4894} 4895 4896//////////////////////////////////////////////////////////////////////////////// 4897 4898ACodec::ExecutingState::ExecutingState(ACodec *codec) 4899 : BaseState(codec), 4900 mActive(false) { 4901} 4902 4903ACodec::BaseState::PortMode ACodec::ExecutingState::getPortMode( 4904 OMX_U32 /* portIndex */) { 4905 return RESUBMIT_BUFFERS; 4906} 4907 4908void ACodec::ExecutingState::submitOutputMetaBuffers() { 4909 // submit as many buffers as there are input buffers with the codec 4910 // in case we are in port reconfiguring 4911 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 4912 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 4913 4914 if (info->mStatus == BufferInfo::OWNED_BY_COMPONENT) { 4915 if (mCodec->submitOutputMetaDataBuffer() != OK) 4916 break; 4917 } 4918 } 4919 4920 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 4921 mCodec->signalSubmitOutputMetaDataBufferIfEOS_workaround(); 4922} 4923 4924void ACodec::ExecutingState::submitRegularOutputBuffers() { 4925 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexOutput].size(); ++i) { 4926 BufferInfo *info = &mCodec->mBuffers[kPortIndexOutput].editItemAt(i); 4927 4928 if (mCodec->mNativeWindow != NULL) { 4929 CHECK(info->mStatus == BufferInfo::OWNED_BY_US 4930 || info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW); 4931 4932 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4933 continue; 4934 } 4935 } else { 4936 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 4937 } 4938 4939 ALOGV("[%s] calling fillBuffer %p", 4940 mCodec->mComponentName.c_str(), info->mBufferID); 4941 4942 CHECK_EQ(mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID), 4943 (status_t)OK); 4944 4945 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 4946 } 4947} 4948 4949void ACodec::ExecutingState::submitOutputBuffers() { 4950 submitRegularOutputBuffers(); 4951 if (mCodec->mStoreMetaDataInOutputBuffers) { 4952 submitOutputMetaBuffers(); 4953 } 4954} 4955 4956void ACodec::ExecutingState::resume() { 4957 if (mActive) { 4958 ALOGV("[%s] We're already active, no need to resume.", 4959 mCodec->mComponentName.c_str()); 4960 4961 return; 4962 } 4963 4964 submitOutputBuffers(); 4965 4966 // Post all available input buffers 4967 CHECK_GT(mCodec->mBuffers[kPortIndexInput].size(), 0u); 4968 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); i++) { 4969 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 4970 if (info->mStatus == BufferInfo::OWNED_BY_US) { 4971 postFillThisBuffer(info); 4972 } 4973 } 4974 4975 mActive = true; 4976} 4977 4978void ACodec::ExecutingState::stateEntered() { 4979 ALOGV("[%s] Now Executing", mCodec->mComponentName.c_str()); 4980 4981 mCodec->processDeferredMessages(); 4982} 4983 4984bool ACodec::ExecutingState::onMessageReceived(const sp<AMessage> &msg) { 4985 bool handled = false; 4986 4987 switch (msg->what()) { 4988 case kWhatShutdown: 4989 { 4990 int32_t keepComponentAllocated; 4991 CHECK(msg->findInt32( 4992 "keepComponentAllocated", &keepComponentAllocated)); 4993 4994 mCodec->mShutdownInProgress = true; 4995 mCodec->mExplicitShutdown = true; 4996 mCodec->mKeepComponentAllocated = keepComponentAllocated; 4997 4998 mActive = false; 4999 5000 CHECK_EQ(mCodec->mOMX->sendCommand( 5001 mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle), 5002 (status_t)OK); 5003 5004 mCodec->changeState(mCodec->mExecutingToIdleState); 5005 5006 handled = true; 5007 break; 5008 } 5009 5010 case kWhatFlush: 5011 { 5012 ALOGV("[%s] ExecutingState flushing now " 5013 "(codec owns %d/%d input, %d/%d output).", 5014 mCodec->mComponentName.c_str(), 5015 mCodec->countBuffersOwnedByComponent(kPortIndexInput), 5016 mCodec->mBuffers[kPortIndexInput].size(), 5017 mCodec->countBuffersOwnedByComponent(kPortIndexOutput), 5018 mCodec->mBuffers[kPortIndexOutput].size()); 5019 5020 mActive = false; 5021 5022 CHECK_EQ(mCodec->mOMX->sendCommand( 5023 mCodec->mNode, OMX_CommandFlush, OMX_ALL), 5024 (status_t)OK); 5025 5026 mCodec->changeState(mCodec->mFlushingState); 5027 handled = true; 5028 break; 5029 } 5030 5031 case kWhatResume: 5032 { 5033 resume(); 5034 5035 handled = true; 5036 break; 5037 } 5038 5039 case kWhatRequestIDRFrame: 5040 { 5041 status_t err = mCodec->requestIDRFrame(); 5042 if (err != OK) { 5043 ALOGW("Requesting an IDR frame failed."); 5044 } 5045 5046 handled = true; 5047 break; 5048 } 5049 5050 case kWhatSetParameters: 5051 { 5052 sp<AMessage> params; 5053 CHECK(msg->findMessage("params", ¶ms)); 5054 5055 status_t err = mCodec->setParameters(params); 5056 5057 sp<AMessage> reply; 5058 if (msg->findMessage("reply", &reply)) { 5059 reply->setInt32("err", err); 5060 reply->post(); 5061 } 5062 5063 handled = true; 5064 break; 5065 } 5066 5067 case ACodec::kWhatSignalEndOfInputStream: 5068 { 5069 mCodec->onSignalEndOfInputStream(); 5070 handled = true; 5071 break; 5072 } 5073 5074 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 5075 case kWhatSubmitOutputMetaDataBufferIfEOS: 5076 { 5077 if (mCodec->mPortEOS[kPortIndexInput] && 5078 !mCodec->mPortEOS[kPortIndexOutput]) { 5079 status_t err = mCodec->submitOutputMetaDataBuffer(); 5080 if (err == OK) { 5081 mCodec->signalSubmitOutputMetaDataBufferIfEOS_workaround(); 5082 } 5083 } 5084 return true; 5085 } 5086 5087 default: 5088 handled = BaseState::onMessageReceived(msg); 5089 break; 5090 } 5091 5092 return handled; 5093} 5094 5095status_t ACodec::setParameters(const sp<AMessage> ¶ms) { 5096 int32_t videoBitrate; 5097 if (params->findInt32("video-bitrate", &videoBitrate)) { 5098 OMX_VIDEO_CONFIG_BITRATETYPE configParams; 5099 InitOMXParams(&configParams); 5100 configParams.nPortIndex = kPortIndexOutput; 5101 configParams.nEncodeBitrate = videoBitrate; 5102 5103 status_t err = mOMX->setConfig( 5104 mNode, 5105 OMX_IndexConfigVideoBitrate, 5106 &configParams, 5107 sizeof(configParams)); 5108 5109 if (err != OK) { 5110 ALOGE("setConfig(OMX_IndexConfigVideoBitrate, %d) failed w/ err %d", 5111 videoBitrate, err); 5112 5113 return err; 5114 } 5115 } 5116 5117 int64_t skipFramesBeforeUs; 5118 if (params->findInt64("skip-frames-before", &skipFramesBeforeUs)) { 5119 status_t err = 5120 mOMX->setInternalOption( 5121 mNode, 5122 kPortIndexInput, 5123 IOMX::INTERNAL_OPTION_START_TIME, 5124 &skipFramesBeforeUs, 5125 sizeof(skipFramesBeforeUs)); 5126 5127 if (err != OK) { 5128 ALOGE("Failed to set parameter 'skip-frames-before' (err %d)", err); 5129 return err; 5130 } 5131 } 5132 5133 int32_t dropInputFrames; 5134 if (params->findInt32("drop-input-frames", &dropInputFrames)) { 5135 bool suspend = dropInputFrames != 0; 5136 5137 status_t err = 5138 mOMX->setInternalOption( 5139 mNode, 5140 kPortIndexInput, 5141 IOMX::INTERNAL_OPTION_SUSPEND, 5142 &suspend, 5143 sizeof(suspend)); 5144 5145 if (err != OK) { 5146 ALOGE("Failed to set parameter 'drop-input-frames' (err %d)", err); 5147 return err; 5148 } 5149 } 5150 5151 int32_t dummy; 5152 if (params->findInt32("request-sync", &dummy)) { 5153 status_t err = requestIDRFrame(); 5154 5155 if (err != OK) { 5156 ALOGE("Requesting a sync frame failed w/ err %d", err); 5157 return err; 5158 } 5159 } 5160 5161 return OK; 5162} 5163 5164void ACodec::onSignalEndOfInputStream() { 5165 sp<AMessage> notify = mNotify->dup(); 5166 notify->setInt32("what", CodecBase::kWhatSignaledInputEOS); 5167 5168 status_t err = mOMX->signalEndOfInputStream(mNode); 5169 if (err != OK) { 5170 notify->setInt32("err", err); 5171 } 5172 notify->post(); 5173} 5174 5175bool ACodec::ExecutingState::onOMXEvent( 5176 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 5177 switch (event) { 5178 case OMX_EventPortSettingsChanged: 5179 { 5180 CHECK_EQ(data1, (OMX_U32)kPortIndexOutput); 5181 5182 if (data2 == 0 || data2 == OMX_IndexParamPortDefinition) { 5183 mCodec->mMetaDataBuffersToSubmit = 0; 5184 CHECK_EQ(mCodec->mOMX->sendCommand( 5185 mCodec->mNode, 5186 OMX_CommandPortDisable, kPortIndexOutput), 5187 (status_t)OK); 5188 5189 mCodec->freeOutputBuffersNotOwnedByComponent(); 5190 5191 mCodec->changeState(mCodec->mOutputPortSettingsChangedState); 5192 } else if (data2 == OMX_IndexConfigCommonOutputCrop) { 5193 mCodec->mSentFormat = false; 5194 } else { 5195 ALOGV("[%s] OMX_EventPortSettingsChanged 0x%08lx", 5196 mCodec->mComponentName.c_str(), data2); 5197 } 5198 5199 return true; 5200 } 5201 5202 case OMX_EventBufferFlag: 5203 { 5204 return true; 5205 } 5206 5207 default: 5208 return BaseState::onOMXEvent(event, data1, data2); 5209 } 5210} 5211 5212//////////////////////////////////////////////////////////////////////////////// 5213 5214ACodec::OutputPortSettingsChangedState::OutputPortSettingsChangedState( 5215 ACodec *codec) 5216 : BaseState(codec) { 5217} 5218 5219ACodec::BaseState::PortMode ACodec::OutputPortSettingsChangedState::getPortMode( 5220 OMX_U32 portIndex) { 5221 if (portIndex == kPortIndexOutput) { 5222 return FREE_BUFFERS; 5223 } 5224 5225 CHECK_EQ(portIndex, (OMX_U32)kPortIndexInput); 5226 5227 return RESUBMIT_BUFFERS; 5228} 5229 5230bool ACodec::OutputPortSettingsChangedState::onMessageReceived( 5231 const sp<AMessage> &msg) { 5232 bool handled = false; 5233 5234 switch (msg->what()) { 5235 case kWhatFlush: 5236 case kWhatShutdown: 5237 case kWhatResume: 5238 { 5239 if (msg->what() == kWhatResume) { 5240 ALOGV("[%s] Deferring resume", mCodec->mComponentName.c_str()); 5241 } 5242 5243 mCodec->deferMessage(msg); 5244 handled = true; 5245 break; 5246 } 5247 5248 default: 5249 handled = BaseState::onMessageReceived(msg); 5250 break; 5251 } 5252 5253 return handled; 5254} 5255 5256void ACodec::OutputPortSettingsChangedState::stateEntered() { 5257 ALOGV("[%s] Now handling output port settings change", 5258 mCodec->mComponentName.c_str()); 5259} 5260 5261bool ACodec::OutputPortSettingsChangedState::onOMXEvent( 5262 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 5263 switch (event) { 5264 case OMX_EventCmdComplete: 5265 { 5266 if (data1 == (OMX_U32)OMX_CommandPortDisable) { 5267 CHECK_EQ(data2, (OMX_U32)kPortIndexOutput); 5268 5269 ALOGV("[%s] Output port now disabled.", 5270 mCodec->mComponentName.c_str()); 5271 5272 CHECK(mCodec->mBuffers[kPortIndexOutput].isEmpty()); 5273 mCodec->mDealer[kPortIndexOutput].clear(); 5274 5275 CHECK_EQ(mCodec->mOMX->sendCommand( 5276 mCodec->mNode, OMX_CommandPortEnable, kPortIndexOutput), 5277 (status_t)OK); 5278 5279 status_t err; 5280 if ((err = mCodec->allocateBuffersOnPort( 5281 kPortIndexOutput)) != OK) { 5282 ALOGE("Failed to allocate output port buffers after " 5283 "port reconfiguration (error 0x%08x)", 5284 err); 5285 5286 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5287 5288 // This is technically not correct, but appears to be 5289 // the only way to free the component instance. 5290 // Controlled transitioning from excecuting->idle 5291 // and idle->loaded seem impossible probably because 5292 // the output port never finishes re-enabling. 5293 mCodec->mShutdownInProgress = true; 5294 mCodec->mKeepComponentAllocated = false; 5295 mCodec->changeState(mCodec->mLoadedState); 5296 } 5297 5298 return true; 5299 } else if (data1 == (OMX_U32)OMX_CommandPortEnable) { 5300 CHECK_EQ(data2, (OMX_U32)kPortIndexOutput); 5301 5302 mCodec->mSentFormat = false; 5303 5304 ALOGV("[%s] Output port now reenabled.", 5305 mCodec->mComponentName.c_str()); 5306 5307 if (mCodec->mExecutingState->active()) { 5308 mCodec->mExecutingState->submitOutputBuffers(); 5309 } 5310 5311 mCodec->changeState(mCodec->mExecutingState); 5312 5313 return true; 5314 } 5315 5316 return false; 5317 } 5318 5319 default: 5320 return false; 5321 } 5322} 5323 5324//////////////////////////////////////////////////////////////////////////////// 5325 5326ACodec::ExecutingToIdleState::ExecutingToIdleState(ACodec *codec) 5327 : BaseState(codec), 5328 mComponentNowIdle(false) { 5329} 5330 5331bool ACodec::ExecutingToIdleState::onMessageReceived(const sp<AMessage> &msg) { 5332 bool handled = false; 5333 5334 switch (msg->what()) { 5335 case kWhatFlush: 5336 { 5337 // Don't send me a flush request if you previously wanted me 5338 // to shutdown. 5339 TRESPASS(); 5340 break; 5341 } 5342 5343 case kWhatShutdown: 5344 { 5345 // We're already doing that... 5346 5347 handled = true; 5348 break; 5349 } 5350 5351 default: 5352 handled = BaseState::onMessageReceived(msg); 5353 break; 5354 } 5355 5356 return handled; 5357} 5358 5359void ACodec::ExecutingToIdleState::stateEntered() { 5360 ALOGV("[%s] Now Executing->Idle", mCodec->mComponentName.c_str()); 5361 5362 mComponentNowIdle = false; 5363 mCodec->mSentFormat = false; 5364} 5365 5366bool ACodec::ExecutingToIdleState::onOMXEvent( 5367 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 5368 switch (event) { 5369 case OMX_EventCmdComplete: 5370 { 5371 CHECK_EQ(data1, (OMX_U32)OMX_CommandStateSet); 5372 CHECK_EQ(data2, (OMX_U32)OMX_StateIdle); 5373 5374 mComponentNowIdle = true; 5375 5376 changeStateIfWeOwnAllBuffers(); 5377 5378 return true; 5379 } 5380 5381 case OMX_EventPortSettingsChanged: 5382 case OMX_EventBufferFlag: 5383 { 5384 // We're shutting down and don't care about this anymore. 5385 return true; 5386 } 5387 5388 default: 5389 return BaseState::onOMXEvent(event, data1, data2); 5390 } 5391} 5392 5393void ACodec::ExecutingToIdleState::changeStateIfWeOwnAllBuffers() { 5394 if (mComponentNowIdle && mCodec->allYourBuffersAreBelongToUs()) { 5395 CHECK_EQ(mCodec->mOMX->sendCommand( 5396 mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded), 5397 (status_t)OK); 5398 5399 CHECK_EQ(mCodec->freeBuffersOnPort(kPortIndexInput), (status_t)OK); 5400 CHECK_EQ(mCodec->freeBuffersOnPort(kPortIndexOutput), (status_t)OK); 5401 5402 if ((mCodec->mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) 5403 && mCodec->mNativeWindow != NULL) { 5404 // We push enough 1x1 blank buffers to ensure that one of 5405 // them has made it to the display. This allows the OMX 5406 // component teardown to zero out any protected buffers 5407 // without the risk of scanning out one of those buffers. 5408 mCodec->pushBlankBuffersToNativeWindow(); 5409 } 5410 5411 mCodec->changeState(mCodec->mIdleToLoadedState); 5412 } 5413} 5414 5415void ACodec::ExecutingToIdleState::onInputBufferFilled( 5416 const sp<AMessage> &msg) { 5417 BaseState::onInputBufferFilled(msg); 5418 5419 changeStateIfWeOwnAllBuffers(); 5420} 5421 5422void ACodec::ExecutingToIdleState::onOutputBufferDrained( 5423 const sp<AMessage> &msg) { 5424 BaseState::onOutputBufferDrained(msg); 5425 5426 changeStateIfWeOwnAllBuffers(); 5427} 5428 5429//////////////////////////////////////////////////////////////////////////////// 5430 5431ACodec::IdleToLoadedState::IdleToLoadedState(ACodec *codec) 5432 : BaseState(codec) { 5433} 5434 5435bool ACodec::IdleToLoadedState::onMessageReceived(const sp<AMessage> &msg) { 5436 bool handled = false; 5437 5438 switch (msg->what()) { 5439 case kWhatShutdown: 5440 { 5441 // We're already doing that... 5442 5443 handled = true; 5444 break; 5445 } 5446 5447 case kWhatFlush: 5448 { 5449 // Don't send me a flush request if you previously wanted me 5450 // to shutdown. 5451 TRESPASS(); 5452 break; 5453 } 5454 5455 default: 5456 handled = BaseState::onMessageReceived(msg); 5457 break; 5458 } 5459 5460 return handled; 5461} 5462 5463void ACodec::IdleToLoadedState::stateEntered() { 5464 ALOGV("[%s] Now Idle->Loaded", mCodec->mComponentName.c_str()); 5465} 5466 5467bool ACodec::IdleToLoadedState::onOMXEvent( 5468 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 5469 switch (event) { 5470 case OMX_EventCmdComplete: 5471 { 5472 CHECK_EQ(data1, (OMX_U32)OMX_CommandStateSet); 5473 CHECK_EQ(data2, (OMX_U32)OMX_StateLoaded); 5474 5475 mCodec->changeState(mCodec->mLoadedState); 5476 5477 return true; 5478 } 5479 5480 default: 5481 return BaseState::onOMXEvent(event, data1, data2); 5482 } 5483} 5484 5485//////////////////////////////////////////////////////////////////////////////// 5486 5487ACodec::FlushingState::FlushingState(ACodec *codec) 5488 : BaseState(codec) { 5489} 5490 5491void ACodec::FlushingState::stateEntered() { 5492 ALOGV("[%s] Now Flushing", mCodec->mComponentName.c_str()); 5493 5494 mFlushComplete[kPortIndexInput] = mFlushComplete[kPortIndexOutput] = false; 5495} 5496 5497bool ACodec::FlushingState::onMessageReceived(const sp<AMessage> &msg) { 5498 bool handled = false; 5499 5500 switch (msg->what()) { 5501 case kWhatShutdown: 5502 { 5503 mCodec->deferMessage(msg); 5504 break; 5505 } 5506 5507 case kWhatFlush: 5508 { 5509 // We're already doing this right now. 5510 handled = true; 5511 break; 5512 } 5513 5514 default: 5515 handled = BaseState::onMessageReceived(msg); 5516 break; 5517 } 5518 5519 return handled; 5520} 5521 5522bool ACodec::FlushingState::onOMXEvent( 5523 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 5524 ALOGV("[%s] FlushingState onOMXEvent(%d,%ld)", 5525 mCodec->mComponentName.c_str(), event, data1); 5526 5527 switch (event) { 5528 case OMX_EventCmdComplete: 5529 { 5530 CHECK_EQ(data1, (OMX_U32)OMX_CommandFlush); 5531 5532 if (data2 == kPortIndexInput || data2 == kPortIndexOutput) { 5533 CHECK(!mFlushComplete[data2]); 5534 mFlushComplete[data2] = true; 5535 5536 if (mFlushComplete[kPortIndexInput] 5537 && mFlushComplete[kPortIndexOutput]) { 5538 changeStateIfWeOwnAllBuffers(); 5539 } 5540 } else { 5541 CHECK_EQ(data2, OMX_ALL); 5542 CHECK(mFlushComplete[kPortIndexInput]); 5543 CHECK(mFlushComplete[kPortIndexOutput]); 5544 5545 changeStateIfWeOwnAllBuffers(); 5546 } 5547 5548 return true; 5549 } 5550 5551 case OMX_EventPortSettingsChanged: 5552 { 5553 sp<AMessage> msg = new AMessage(kWhatOMXMessage, mCodec->id()); 5554 msg->setInt32("type", omx_message::EVENT); 5555 msg->setInt32("node", mCodec->mNode); 5556 msg->setInt32("event", event); 5557 msg->setInt32("data1", data1); 5558 msg->setInt32("data2", data2); 5559 5560 ALOGV("[%s] Deferring OMX_EventPortSettingsChanged", 5561 mCodec->mComponentName.c_str()); 5562 5563 mCodec->deferMessage(msg); 5564 5565 return true; 5566 } 5567 5568 default: 5569 return BaseState::onOMXEvent(event, data1, data2); 5570 } 5571 5572 return true; 5573} 5574 5575void ACodec::FlushingState::onOutputBufferDrained(const sp<AMessage> &msg) { 5576 BaseState::onOutputBufferDrained(msg); 5577 5578 changeStateIfWeOwnAllBuffers(); 5579} 5580 5581void ACodec::FlushingState::onInputBufferFilled(const sp<AMessage> &msg) { 5582 BaseState::onInputBufferFilled(msg); 5583 5584 changeStateIfWeOwnAllBuffers(); 5585} 5586 5587void ACodec::FlushingState::changeStateIfWeOwnAllBuffers() { 5588 if (mFlushComplete[kPortIndexInput] 5589 && mFlushComplete[kPortIndexOutput] 5590 && mCodec->allYourBuffersAreBelongToUs()) { 5591 // We now own all buffers except possibly those still queued with 5592 // the native window for rendering. Let's get those back as well. 5593 mCodec->waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs(); 5594 5595 sp<AMessage> notify = mCodec->mNotify->dup(); 5596 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 5597 notify->post(); 5598 5599 mCodec->mPortEOS[kPortIndexInput] = 5600 mCodec->mPortEOS[kPortIndexOutput] = false; 5601 5602 mCodec->mInputEOSResult = OK; 5603 5604 if (mCodec->mSkipCutBuffer != NULL) { 5605 mCodec->mSkipCutBuffer->clear(); 5606 } 5607 5608 mCodec->changeState(mCodec->mExecutingState); 5609 } 5610} 5611 5612} // namespace android 5613