ACodec.cpp revision 5b4401b28990107bb797dc0d5f3904cf27417e98
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "ACodec" 19 20#ifdef __LP64__ 21#define OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 22#endif 23 24#include <inttypes.h> 25#include <utils/Trace.h> 26 27#include <gui/Surface.h> 28 29#include <media/stagefright/ACodec.h> 30 31#include <binder/MemoryDealer.h> 32 33#include <media/stagefright/foundation/hexdump.h> 34#include <media/stagefright/foundation/ABuffer.h> 35#include <media/stagefright/foundation/ADebug.h> 36#include <media/stagefright/foundation/AMessage.h> 37#include <media/stagefright/foundation/AUtils.h> 38 39#include <media/stagefright/BufferProducerWrapper.h> 40#include <media/stagefright/MediaCodec.h> 41#include <media/stagefright/MediaCodecList.h> 42#include <media/stagefright/MediaDefs.h> 43#include <media/stagefright/OMXClient.h> 44#include <media/stagefright/PersistentSurface.h> 45#include <media/stagefright/SurfaceUtils.h> 46#include <media/hardware/HardwareAPI.h> 47 48#include <OMX_AudioExt.h> 49#include <OMX_VideoExt.h> 50#include <OMX_Component.h> 51#include <OMX_IndexExt.h> 52#include <OMX_AsString.h> 53 54#include "include/avc_utils.h" 55#include "include/DataConverter.h" 56#include "omx/OMXUtils.h" 57 58namespace android { 59 60enum { 61 kMaxIndicesToCheck = 32, // used when enumerating supported formats and profiles 62}; 63 64// OMX errors are directly mapped into status_t range if 65// there is no corresponding MediaError status code. 66// Use the statusFromOMXError(int32_t omxError) function. 67// 68// Currently this is a direct map. 69// See frameworks/native/include/media/openmax/OMX_Core.h 70// 71// Vendor OMX errors from 0x90000000 - 0x9000FFFF 72// Extension OMX errors from 0x8F000000 - 0x90000000 73// Standard OMX errors from 0x80001000 - 0x80001024 (0x80001024 current) 74// 75 76// returns true if err is a recognized OMX error code. 77// as OMX error is OMX_S32, this is an int32_t type 78static inline bool isOMXError(int32_t err) { 79 return (ERROR_CODEC_MIN <= err && err <= ERROR_CODEC_MAX); 80} 81 82// converts an OMX error to a status_t 83static inline status_t statusFromOMXError(int32_t omxError) { 84 switch (omxError) { 85 case OMX_ErrorInvalidComponentName: 86 case OMX_ErrorComponentNotFound: 87 return NAME_NOT_FOUND; // can trigger illegal argument error for provided names. 88 default: 89 return isOMXError(omxError) ? omxError : 0; // no translation required 90 } 91} 92 93// checks and converts status_t to a non-side-effect status_t 94static inline status_t makeNoSideEffectStatus(status_t err) { 95 switch (err) { 96 // the following errors have side effects and may come 97 // from other code modules. Remap for safety reasons. 98 case INVALID_OPERATION: 99 case DEAD_OBJECT: 100 return UNKNOWN_ERROR; 101 default: 102 return err; 103 } 104} 105 106struct MessageList : public RefBase { 107 MessageList() { 108 } 109 virtual ~MessageList() { 110 } 111 std::list<sp<AMessage> > &getList() { return mList; } 112private: 113 std::list<sp<AMessage> > mList; 114 115 DISALLOW_EVIL_CONSTRUCTORS(MessageList); 116}; 117 118static sp<DataConverter> getCopyConverter() { 119 static pthread_once_t once = PTHREAD_ONCE_INIT; // const-inited 120 static sp<DataConverter> sCopyConverter; // zero-inited 121 pthread_once(&once, [](){ sCopyConverter = new DataConverter(); }); 122 return sCopyConverter; 123} 124 125struct CodecObserver : public BnOMXObserver { 126 CodecObserver() {} 127 128 void setNotificationMessage(const sp<AMessage> &msg) { 129 mNotify = msg; 130 } 131 132 // from IOMXObserver 133 virtual void onMessages(const std::list<omx_message> &messages) { 134 if (messages.empty()) { 135 return; 136 } 137 138 sp<AMessage> notify = mNotify->dup(); 139 bool first = true; 140 sp<MessageList> msgList = new MessageList(); 141 for (std::list<omx_message>::const_iterator it = messages.cbegin(); 142 it != messages.cend(); ++it) { 143 const omx_message &omx_msg = *it; 144 if (first) { 145 notify->setInt32("node", omx_msg.node); 146 first = false; 147 } 148 149 sp<AMessage> msg = new AMessage; 150 msg->setInt32("type", omx_msg.type); 151 switch (omx_msg.type) { 152 case omx_message::EVENT: 153 { 154 msg->setInt32("event", omx_msg.u.event_data.event); 155 msg->setInt32("data1", omx_msg.u.event_data.data1); 156 msg->setInt32("data2", omx_msg.u.event_data.data2); 157 break; 158 } 159 160 case omx_message::EMPTY_BUFFER_DONE: 161 { 162 msg->setInt32("buffer", omx_msg.u.buffer_data.buffer); 163 msg->setInt32("fence_fd", omx_msg.fenceFd); 164 break; 165 } 166 167 case omx_message::FILL_BUFFER_DONE: 168 { 169 msg->setInt32( 170 "buffer", omx_msg.u.extended_buffer_data.buffer); 171 msg->setInt32( 172 "range_offset", 173 omx_msg.u.extended_buffer_data.range_offset); 174 msg->setInt32( 175 "range_length", 176 omx_msg.u.extended_buffer_data.range_length); 177 msg->setInt32( 178 "flags", 179 omx_msg.u.extended_buffer_data.flags); 180 msg->setInt64( 181 "timestamp", 182 omx_msg.u.extended_buffer_data.timestamp); 183 msg->setInt32( 184 "fence_fd", omx_msg.fenceFd); 185 break; 186 } 187 188 case omx_message::FRAME_RENDERED: 189 { 190 msg->setInt64( 191 "media_time_us", omx_msg.u.render_data.timestamp); 192 msg->setInt64( 193 "system_nano", omx_msg.u.render_data.nanoTime); 194 break; 195 } 196 197 default: 198 ALOGE("Unrecognized message type: %d", omx_msg.type); 199 break; 200 } 201 msgList->getList().push_back(msg); 202 } 203 notify->setObject("messages", msgList); 204 notify->post(); 205 } 206 207protected: 208 virtual ~CodecObserver() {} 209 210private: 211 sp<AMessage> mNotify; 212 213 DISALLOW_EVIL_CONSTRUCTORS(CodecObserver); 214}; 215 216//////////////////////////////////////////////////////////////////////////////// 217 218struct ACodec::BaseState : public AState { 219 BaseState(ACodec *codec, const sp<AState> &parentState = NULL); 220 221protected: 222 enum PortMode { 223 KEEP_BUFFERS, 224 RESUBMIT_BUFFERS, 225 FREE_BUFFERS, 226 }; 227 228 ACodec *mCodec; 229 230 virtual PortMode getPortMode(OMX_U32 portIndex); 231 232 virtual bool onMessageReceived(const sp<AMessage> &msg); 233 234 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 235 236 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 237 virtual void onInputBufferFilled(const sp<AMessage> &msg); 238 239 void postFillThisBuffer(BufferInfo *info); 240 241private: 242 // Handles an OMX message. Returns true iff message was handled. 243 bool onOMXMessage(const sp<AMessage> &msg); 244 245 // Handles a list of messages. Returns true iff messages were handled. 246 bool onOMXMessageList(const sp<AMessage> &msg); 247 248 // returns true iff this message is for this component and the component is alive 249 bool checkOMXMessage(const sp<AMessage> &msg); 250 251 bool onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd); 252 253 bool onOMXFillBufferDone( 254 IOMX::buffer_id bufferID, 255 size_t rangeOffset, size_t rangeLength, 256 OMX_U32 flags, 257 int64_t timeUs, 258 int fenceFd); 259 260 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 261 262 void getMoreInputDataIfPossible(); 263 264 DISALLOW_EVIL_CONSTRUCTORS(BaseState); 265}; 266 267//////////////////////////////////////////////////////////////////////////////// 268 269struct ACodec::DeathNotifier : public IBinder::DeathRecipient { 270 DeathNotifier(const sp<AMessage> ¬ify) 271 : mNotify(notify) { 272 } 273 274 virtual void binderDied(const wp<IBinder> &) { 275 mNotify->post(); 276 } 277 278protected: 279 virtual ~DeathNotifier() {} 280 281private: 282 sp<AMessage> mNotify; 283 284 DISALLOW_EVIL_CONSTRUCTORS(DeathNotifier); 285}; 286 287struct ACodec::UninitializedState : public ACodec::BaseState { 288 UninitializedState(ACodec *codec); 289 290protected: 291 virtual bool onMessageReceived(const sp<AMessage> &msg); 292 virtual void stateEntered(); 293 294private: 295 void onSetup(const sp<AMessage> &msg); 296 bool onAllocateComponent(const sp<AMessage> &msg); 297 298 sp<DeathNotifier> mDeathNotifier; 299 300 DISALLOW_EVIL_CONSTRUCTORS(UninitializedState); 301}; 302 303//////////////////////////////////////////////////////////////////////////////// 304 305struct ACodec::LoadedState : public ACodec::BaseState { 306 LoadedState(ACodec *codec); 307 308protected: 309 virtual bool onMessageReceived(const sp<AMessage> &msg); 310 virtual void stateEntered(); 311 312private: 313 friend struct ACodec::UninitializedState; 314 315 bool onConfigureComponent(const sp<AMessage> &msg); 316 void onCreateInputSurface(const sp<AMessage> &msg); 317 void onSetInputSurface(const sp<AMessage> &msg); 318 void onStart(); 319 void onShutdown(bool keepComponentAllocated); 320 321 status_t setupInputSurface(); 322 323 DISALLOW_EVIL_CONSTRUCTORS(LoadedState); 324}; 325 326//////////////////////////////////////////////////////////////////////////////// 327 328struct ACodec::LoadedToIdleState : public ACodec::BaseState { 329 LoadedToIdleState(ACodec *codec); 330 331protected: 332 virtual bool onMessageReceived(const sp<AMessage> &msg); 333 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 334 virtual void stateEntered(); 335 336private: 337 status_t allocateBuffers(); 338 339 DISALLOW_EVIL_CONSTRUCTORS(LoadedToIdleState); 340}; 341 342//////////////////////////////////////////////////////////////////////////////// 343 344struct ACodec::IdleToExecutingState : public ACodec::BaseState { 345 IdleToExecutingState(ACodec *codec); 346 347protected: 348 virtual bool onMessageReceived(const sp<AMessage> &msg); 349 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 350 virtual void stateEntered(); 351 352private: 353 DISALLOW_EVIL_CONSTRUCTORS(IdleToExecutingState); 354}; 355 356//////////////////////////////////////////////////////////////////////////////// 357 358struct ACodec::ExecutingState : public ACodec::BaseState { 359 ExecutingState(ACodec *codec); 360 361 void submitRegularOutputBuffers(); 362 void submitOutputMetaBuffers(); 363 void submitOutputBuffers(); 364 365 // Submit output buffers to the decoder, submit input buffers to client 366 // to fill with data. 367 void resume(); 368 369 // Returns true iff input and output buffers are in play. 370 bool active() const { return mActive; } 371 372protected: 373 virtual PortMode getPortMode(OMX_U32 portIndex); 374 virtual bool onMessageReceived(const sp<AMessage> &msg); 375 virtual void stateEntered(); 376 377 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 378 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 379 380private: 381 bool mActive; 382 383 DISALLOW_EVIL_CONSTRUCTORS(ExecutingState); 384}; 385 386//////////////////////////////////////////////////////////////////////////////// 387 388struct ACodec::OutputPortSettingsChangedState : public ACodec::BaseState { 389 OutputPortSettingsChangedState(ACodec *codec); 390 391protected: 392 virtual PortMode getPortMode(OMX_U32 portIndex); 393 virtual bool onMessageReceived(const sp<AMessage> &msg); 394 virtual void stateEntered(); 395 396 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 397 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 398 399private: 400 DISALLOW_EVIL_CONSTRUCTORS(OutputPortSettingsChangedState); 401}; 402 403//////////////////////////////////////////////////////////////////////////////// 404 405struct ACodec::ExecutingToIdleState : public ACodec::BaseState { 406 ExecutingToIdleState(ACodec *codec); 407 408protected: 409 virtual bool onMessageReceived(const sp<AMessage> &msg); 410 virtual void stateEntered(); 411 412 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 413 414 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 415 virtual void onInputBufferFilled(const sp<AMessage> &msg); 416 417private: 418 void changeStateIfWeOwnAllBuffers(); 419 420 bool mComponentNowIdle; 421 422 DISALLOW_EVIL_CONSTRUCTORS(ExecutingToIdleState); 423}; 424 425//////////////////////////////////////////////////////////////////////////////// 426 427struct ACodec::IdleToLoadedState : public ACodec::BaseState { 428 IdleToLoadedState(ACodec *codec); 429 430protected: 431 virtual bool onMessageReceived(const sp<AMessage> &msg); 432 virtual void stateEntered(); 433 434 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 435 436private: 437 DISALLOW_EVIL_CONSTRUCTORS(IdleToLoadedState); 438}; 439 440//////////////////////////////////////////////////////////////////////////////// 441 442struct ACodec::FlushingState : public ACodec::BaseState { 443 FlushingState(ACodec *codec); 444 445protected: 446 virtual bool onMessageReceived(const sp<AMessage> &msg); 447 virtual void stateEntered(); 448 449 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 450 451 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 452 virtual void onInputBufferFilled(const sp<AMessage> &msg); 453 454private: 455 bool mFlushComplete[2]; 456 457 void changeStateIfWeOwnAllBuffers(); 458 459 DISALLOW_EVIL_CONSTRUCTORS(FlushingState); 460}; 461 462//////////////////////////////////////////////////////////////////////////////// 463 464void ACodec::BufferInfo::setWriteFence(int fenceFd, const char *dbg) { 465 if (mFenceFd >= 0) { 466 ALOGW("OVERWRITE OF %s fence %d by write fence %d in %s", 467 mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg); 468 } 469 mFenceFd = fenceFd; 470 mIsReadFence = false; 471} 472 473void ACodec::BufferInfo::setReadFence(int fenceFd, const char *dbg) { 474 if (mFenceFd >= 0) { 475 ALOGW("OVERWRITE OF %s fence %d by read fence %d in %s", 476 mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg); 477 } 478 mFenceFd = fenceFd; 479 mIsReadFence = true; 480} 481 482void ACodec::BufferInfo::checkWriteFence(const char *dbg) { 483 if (mFenceFd >= 0 && mIsReadFence) { 484 ALOGD("REUSING read fence %d as write fence in %s", mFenceFd, dbg); 485 } 486} 487 488void ACodec::BufferInfo::checkReadFence(const char *dbg) { 489 if (mFenceFd >= 0 && !mIsReadFence) { 490 ALOGD("REUSING write fence %d as read fence in %s", mFenceFd, dbg); 491 } 492} 493 494//////////////////////////////////////////////////////////////////////////////// 495 496ACodec::ACodec() 497 : mQuirks(0), 498 mNode(0), 499 mUsingNativeWindow(false), 500 mNativeWindowUsageBits(0), 501 mIsVideo(false), 502 mIsEncoder(false), 503 mFatalError(false), 504 mShutdownInProgress(false), 505 mExplicitShutdown(false), 506 mEncoderDelay(0), 507 mEncoderPadding(0), 508 mRotationDegrees(0), 509 mChannelMaskPresent(false), 510 mChannelMask(0), 511 mDequeueCounter(0), 512 mInputMetadataType(kMetadataBufferTypeInvalid), 513 mOutputMetadataType(kMetadataBufferTypeInvalid), 514 mLegacyAdaptiveExperiment(false), 515 mMetadataBuffersToSubmit(0), 516 mNumUndequeuedBuffers(0), 517 mRepeatFrameDelayUs(-1ll), 518 mMaxPtsGapUs(-1ll), 519 mMaxFps(-1), 520 mTimePerFrameUs(-1ll), 521 mTimePerCaptureUs(-1ll), 522 mCreateInputBuffersSuspended(false), 523 mTunneled(false), 524 mDescribeColorAspectsIndex((OMX_INDEXTYPE)0) { 525 mUninitializedState = new UninitializedState(this); 526 mLoadedState = new LoadedState(this); 527 mLoadedToIdleState = new LoadedToIdleState(this); 528 mIdleToExecutingState = new IdleToExecutingState(this); 529 mExecutingState = new ExecutingState(this); 530 531 mOutputPortSettingsChangedState = 532 new OutputPortSettingsChangedState(this); 533 534 mExecutingToIdleState = new ExecutingToIdleState(this); 535 mIdleToLoadedState = new IdleToLoadedState(this); 536 mFlushingState = new FlushingState(this); 537 538 mPortEOS[kPortIndexInput] = mPortEOS[kPortIndexOutput] = false; 539 mInputEOSResult = OK; 540 541 changeState(mUninitializedState); 542} 543 544ACodec::~ACodec() { 545} 546 547void ACodec::setNotificationMessage(const sp<AMessage> &msg) { 548 mNotify = msg; 549} 550 551void ACodec::initiateSetup(const sp<AMessage> &msg) { 552 msg->setWhat(kWhatSetup); 553 msg->setTarget(this); 554 msg->post(); 555} 556 557void ACodec::signalSetParameters(const sp<AMessage> ¶ms) { 558 sp<AMessage> msg = new AMessage(kWhatSetParameters, this); 559 msg->setMessage("params", params); 560 msg->post(); 561} 562 563void ACodec::initiateAllocateComponent(const sp<AMessage> &msg) { 564 msg->setWhat(kWhatAllocateComponent); 565 msg->setTarget(this); 566 msg->post(); 567} 568 569void ACodec::initiateConfigureComponent(const sp<AMessage> &msg) { 570 msg->setWhat(kWhatConfigureComponent); 571 msg->setTarget(this); 572 msg->post(); 573} 574 575status_t ACodec::setSurface(const sp<Surface> &surface) { 576 sp<AMessage> msg = new AMessage(kWhatSetSurface, this); 577 msg->setObject("surface", surface); 578 579 sp<AMessage> response; 580 status_t err = msg->postAndAwaitResponse(&response); 581 582 if (err == OK) { 583 (void)response->findInt32("err", &err); 584 } 585 return err; 586} 587 588void ACodec::initiateCreateInputSurface() { 589 (new AMessage(kWhatCreateInputSurface, this))->post(); 590} 591 592void ACodec::initiateSetInputSurface( 593 const sp<PersistentSurface> &surface) { 594 sp<AMessage> msg = new AMessage(kWhatSetInputSurface, this); 595 msg->setObject("input-surface", surface); 596 msg->post(); 597} 598 599void ACodec::signalEndOfInputStream() { 600 (new AMessage(kWhatSignalEndOfInputStream, this))->post(); 601} 602 603void ACodec::initiateStart() { 604 (new AMessage(kWhatStart, this))->post(); 605} 606 607void ACodec::signalFlush() { 608 ALOGV("[%s] signalFlush", mComponentName.c_str()); 609 (new AMessage(kWhatFlush, this))->post(); 610} 611 612void ACodec::signalResume() { 613 (new AMessage(kWhatResume, this))->post(); 614} 615 616void ACodec::initiateShutdown(bool keepComponentAllocated) { 617 sp<AMessage> msg = new AMessage(kWhatShutdown, this); 618 msg->setInt32("keepComponentAllocated", keepComponentAllocated); 619 msg->post(); 620 if (!keepComponentAllocated) { 621 // ensure shutdown completes in 3 seconds 622 (new AMessage(kWhatReleaseCodecInstance, this))->post(3000000); 623 } 624} 625 626void ACodec::signalRequestIDRFrame() { 627 (new AMessage(kWhatRequestIDRFrame, this))->post(); 628} 629 630// *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 631// Some codecs may return input buffers before having them processed. 632// This causes a halt if we already signaled an EOS on the input 633// port. For now keep submitting an output buffer if there was an 634// EOS on the input port, but not yet on the output port. 635void ACodec::signalSubmitOutputMetadataBufferIfEOS_workaround() { 636 if (mPortEOS[kPortIndexInput] && !mPortEOS[kPortIndexOutput] && 637 mMetadataBuffersToSubmit > 0) { 638 (new AMessage(kWhatSubmitOutputMetadataBufferIfEOS, this))->post(); 639 } 640} 641 642status_t ACodec::handleSetSurface(const sp<Surface> &surface) { 643 // allow keeping unset surface 644 if (surface == NULL) { 645 if (mNativeWindow != NULL) { 646 ALOGW("cannot unset a surface"); 647 return INVALID_OPERATION; 648 } 649 return OK; 650 } 651 652 // cannot switch from bytebuffers to surface 653 if (mNativeWindow == NULL) { 654 ALOGW("component was not configured with a surface"); 655 return INVALID_OPERATION; 656 } 657 658 ANativeWindow *nativeWindow = surface.get(); 659 // if we have not yet started the codec, we can simply set the native window 660 if (mBuffers[kPortIndexInput].size() == 0) { 661 mNativeWindow = surface; 662 return OK; 663 } 664 665 // we do not support changing a tunneled surface after start 666 if (mTunneled) { 667 ALOGW("cannot change tunneled surface"); 668 return INVALID_OPERATION; 669 } 670 671 int usageBits = 0; 672 status_t err = setupNativeWindowSizeFormatAndUsage(nativeWindow, &usageBits); 673 if (err != OK) { 674 return err; 675 } 676 677 int ignoredFlags = kVideoGrallocUsage; 678 // New output surface is not allowed to add new usage flag except ignored ones. 679 if ((usageBits & ~(mNativeWindowUsageBits | ignoredFlags)) != 0) { 680 ALOGW("cannot change usage from %#x to %#x", mNativeWindowUsageBits, usageBits); 681 return BAD_VALUE; 682 } 683 684 // get min undequeued count. We cannot switch to a surface that has a higher 685 // undequeued count than we allocated. 686 int minUndequeuedBuffers = 0; 687 err = nativeWindow->query( 688 nativeWindow, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 689 &minUndequeuedBuffers); 690 if (err != 0) { 691 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 692 strerror(-err), -err); 693 return err; 694 } 695 if (minUndequeuedBuffers > (int)mNumUndequeuedBuffers) { 696 ALOGE("new surface holds onto more buffers (%d) than planned for (%zu)", 697 minUndequeuedBuffers, mNumUndequeuedBuffers); 698 return BAD_VALUE; 699 } 700 701 // we cannot change the number of output buffers while OMX is running 702 // set up surface to the same count 703 Vector<BufferInfo> &buffers = mBuffers[kPortIndexOutput]; 704 ALOGV("setting up surface for %zu buffers", buffers.size()); 705 706 err = native_window_set_buffer_count(nativeWindow, buffers.size()); 707 if (err != 0) { 708 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 709 -err); 710 return err; 711 } 712 713 // need to enable allocation when attaching 714 surface->getIGraphicBufferProducer()->allowAllocation(true); 715 716 // for meta data mode, we move dequeud buffers to the new surface. 717 // for non-meta mode, we must move all registered buffers 718 for (size_t i = 0; i < buffers.size(); ++i) { 719 const BufferInfo &info = buffers[i]; 720 // skip undequeued buffers for meta data mode 721 if (storingMetadataInDecodedBuffers() 722 && !mLegacyAdaptiveExperiment 723 && info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 724 ALOGV("skipping buffer %p", info.mGraphicBuffer->getNativeBuffer()); 725 continue; 726 } 727 ALOGV("attaching buffer %p", info.mGraphicBuffer->getNativeBuffer()); 728 729 err = surface->attachBuffer(info.mGraphicBuffer->getNativeBuffer()); 730 if (err != OK) { 731 ALOGE("failed to attach buffer %p to the new surface: %s (%d)", 732 info.mGraphicBuffer->getNativeBuffer(), 733 strerror(-err), -err); 734 return err; 735 } 736 } 737 738 // cancel undequeued buffers to new surface 739 if (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment) { 740 for (size_t i = 0; i < buffers.size(); ++i) { 741 BufferInfo &info = buffers.editItemAt(i); 742 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 743 ALOGV("canceling buffer %p", info.mGraphicBuffer->getNativeBuffer()); 744 err = nativeWindow->cancelBuffer( 745 nativeWindow, info.mGraphicBuffer->getNativeBuffer(), info.mFenceFd); 746 info.mFenceFd = -1; 747 if (err != OK) { 748 ALOGE("failed to cancel buffer %p to the new surface: %s (%d)", 749 info.mGraphicBuffer->getNativeBuffer(), 750 strerror(-err), -err); 751 return err; 752 } 753 } 754 } 755 // disallow further allocation 756 (void)surface->getIGraphicBufferProducer()->allowAllocation(false); 757 } 758 759 // push blank buffers to previous window if requested 760 if (mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) { 761 pushBlankBuffersToNativeWindow(mNativeWindow.get()); 762 } 763 764 mNativeWindow = nativeWindow; 765 mNativeWindowUsageBits = usageBits; 766 return OK; 767} 768 769status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) { 770 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 771 772 CHECK(mDealer[portIndex] == NULL); 773 CHECK(mBuffers[portIndex].isEmpty()); 774 775 status_t err; 776 if (mNativeWindow != NULL && portIndex == kPortIndexOutput) { 777 if (storingMetadataInDecodedBuffers()) { 778 err = allocateOutputMetadataBuffers(); 779 } else { 780 err = allocateOutputBuffersFromNativeWindow(); 781 } 782 } else { 783 OMX_PARAM_PORTDEFINITIONTYPE def; 784 InitOMXParams(&def); 785 def.nPortIndex = portIndex; 786 787 err = mOMX->getParameter( 788 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 789 790 if (err == OK) { 791 MetadataBufferType type = 792 portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; 793 size_t bufSize = def.nBufferSize; 794 if (type == kMetadataBufferTypeGrallocSource) { 795 bufSize = sizeof(VideoGrallocMetadata); 796 } else if (type == kMetadataBufferTypeANWBuffer) { 797 bufSize = sizeof(VideoNativeMetadata); 798 } 799 800 // If using gralloc or native source input metadata buffers, allocate largest 801 // metadata size as we prefer to generate native source metadata, but component 802 // may require gralloc source. For camera source, allocate at least enough 803 // size for native metadata buffers. 804 size_t allottedSize = bufSize; 805 if (portIndex == kPortIndexInput && type >= kMetadataBufferTypeGrallocSource) { 806 bufSize = max(sizeof(VideoGrallocMetadata), sizeof(VideoNativeMetadata)); 807 } else if (portIndex == kPortIndexInput && type == kMetadataBufferTypeCameraSource) { 808 bufSize = max(bufSize, sizeof(VideoNativeMetadata)); 809 } 810 811 size_t conversionBufferSize = 0; 812 813 sp<DataConverter> converter = mConverter[portIndex]; 814 if (converter != NULL) { 815 // here we assume sane conversions of max 4:1, so result fits in int32 816 if (portIndex == kPortIndexInput) { 817 conversionBufferSize = converter->sourceSize(bufSize); 818 } else { 819 conversionBufferSize = converter->targetSize(bufSize); 820 } 821 } 822 823 size_t alignment = MemoryDealer::getAllocationAlignment(); 824 825 ALOGV("[%s] Allocating %u buffers of size %zu/%zu (from %u using %s) on %s port", 826 mComponentName.c_str(), 827 def.nBufferCountActual, bufSize, allottedSize, def.nBufferSize, asString(type), 828 portIndex == kPortIndexInput ? "input" : "output"); 829 830 // verify buffer sizes to avoid overflow in align() 831 if (bufSize == 0 || max(bufSize, conversionBufferSize) > kMaxCodecBufferSize) { 832 ALOGE("b/22885421"); 833 return NO_MEMORY; 834 } 835 836 // don't modify bufSize as OMX may not expect it to increase after negotiation 837 size_t alignedSize = align(bufSize, alignment); 838 size_t alignedConvSize = align(conversionBufferSize, alignment); 839 if (def.nBufferCountActual > SIZE_MAX / (alignedSize + alignedConvSize)) { 840 ALOGE("b/22885421"); 841 return NO_MEMORY; 842 } 843 844 size_t totalSize = def.nBufferCountActual * (alignedSize + alignedConvSize); 845 mDealer[portIndex] = new MemoryDealer(totalSize, "ACodec"); 846 847 for (OMX_U32 i = 0; i < def.nBufferCountActual && err == OK; ++i) { 848 sp<IMemory> mem = mDealer[portIndex]->allocate(bufSize); 849 if (mem == NULL || mem->pointer() == NULL) { 850 return NO_MEMORY; 851 } 852 853 BufferInfo info; 854 info.mStatus = BufferInfo::OWNED_BY_US; 855 info.mFenceFd = -1; 856 info.mRenderInfo = NULL; 857 info.mNativeHandle = NULL; 858 859 uint32_t requiresAllocateBufferBit = 860 (portIndex == kPortIndexInput) 861 ? kRequiresAllocateBufferOnInputPorts 862 : kRequiresAllocateBufferOnOutputPorts; 863 864 if (portIndex == kPortIndexInput && (mFlags & kFlagIsSecure)) { 865 mem.clear(); 866 867 void *ptr = NULL; 868 native_handle_t *native_handle = NULL; 869 err = mOMX->allocateSecureBuffer( 870 mNode, portIndex, bufSize, &info.mBufferID, 871 &ptr, &native_handle); 872 873 // TRICKY: this representation is unorthodox, but ACodec requires 874 // an ABuffer with a proper size to validate range offsets and lengths. 875 // Since mData is never referenced for secure input, it is used to store 876 // either the pointer to the secure buffer, or the opaque handle as on 877 // some devices ptr is actually an opaque handle, not a pointer. 878 879 // TRICKY2: use native handle as the base of the ABuffer if received one, 880 // because Widevine source only receives these base addresses. 881 info.mData = new ABuffer(ptr != NULL ? ptr : (void *)native_handle, bufSize); 882 info.mNativeHandle = NativeHandle::create(native_handle, true /* ownsHandle */); 883 info.mCodecData = info.mData; 884 } else if (mQuirks & requiresAllocateBufferBit) { 885 err = mOMX->allocateBufferWithBackup( 886 mNode, portIndex, mem, &info.mBufferID, allottedSize); 887 } else { 888 err = mOMX->useBuffer(mNode, portIndex, mem, &info.mBufferID, allottedSize); 889 } 890 891 if (mem != NULL) { 892 info.mCodecData = new ABuffer(mem->pointer(), bufSize); 893 info.mCodecRef = mem; 894 895 if (type == kMetadataBufferTypeANWBuffer) { 896 ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; 897 } 898 899 // if we require conversion, allocate conversion buffer for client use; 900 // otherwise, reuse codec buffer 901 if (mConverter[portIndex] != NULL) { 902 CHECK_GT(conversionBufferSize, (size_t)0); 903 mem = mDealer[portIndex]->allocate(conversionBufferSize); 904 if (mem == NULL|| mem->pointer() == NULL) { 905 return NO_MEMORY; 906 } 907 info.mData = new ABuffer(mem->pointer(), conversionBufferSize); 908 info.mMemRef = mem; 909 } else { 910 info.mData = info.mCodecData; 911 info.mMemRef = info.mCodecRef; 912 } 913 } 914 915 mBuffers[portIndex].push(info); 916 } 917 } 918 } 919 920 if (err != OK) { 921 return err; 922 } 923 924 sp<AMessage> notify = mNotify->dup(); 925 notify->setInt32("what", CodecBase::kWhatBuffersAllocated); 926 927 notify->setInt32("portIndex", portIndex); 928 929 sp<PortDescription> desc = new PortDescription; 930 931 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 932 const BufferInfo &info = mBuffers[portIndex][i]; 933 desc->addBuffer(info.mBufferID, info.mData, info.mNativeHandle, info.mMemRef); 934 } 935 936 notify->setObject("portDesc", desc); 937 notify->post(); 938 939 return OK; 940} 941 942status_t ACodec::setupNativeWindowSizeFormatAndUsage( 943 ANativeWindow *nativeWindow /* nonnull */, int *finalUsage /* nonnull */) { 944 OMX_PARAM_PORTDEFINITIONTYPE def; 945 InitOMXParams(&def); 946 def.nPortIndex = kPortIndexOutput; 947 948 status_t err = mOMX->getParameter( 949 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 950 951 if (err != OK) { 952 return err; 953 } 954 955 OMX_U32 usage = 0; 956 err = mOMX->getGraphicBufferUsage(mNode, kPortIndexOutput, &usage); 957 if (err != 0) { 958 ALOGW("querying usage flags from OMX IL component failed: %d", err); 959 // XXX: Currently this error is logged, but not fatal. 960 usage = 0; 961 } 962 int omxUsage = usage; 963 964 if (mFlags & kFlagIsGrallocUsageProtected) { 965 usage |= GRALLOC_USAGE_PROTECTED; 966 } 967 968 usage |= kVideoGrallocUsage; 969 *finalUsage = usage; 970 971 ALOGV("gralloc usage: %#x(OMX) => %#x(ACodec)", omxUsage, usage); 972 return setNativeWindowSizeFormatAndUsage( 973 nativeWindow, 974 def.format.video.nFrameWidth, 975 def.format.video.nFrameHeight, 976 def.format.video.eColorFormat, 977 mRotationDegrees, 978 usage); 979} 980 981status_t ACodec::configureOutputBuffersFromNativeWindow( 982 OMX_U32 *bufferCount, OMX_U32 *bufferSize, 983 OMX_U32 *minUndequeuedBuffers) { 984 OMX_PARAM_PORTDEFINITIONTYPE def; 985 InitOMXParams(&def); 986 def.nPortIndex = kPortIndexOutput; 987 988 status_t err = mOMX->getParameter( 989 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 990 991 if (err == OK) { 992 err = setupNativeWindowSizeFormatAndUsage(mNativeWindow.get(), &mNativeWindowUsageBits); 993 } 994 if (err != OK) { 995 mNativeWindowUsageBits = 0; 996 return err; 997 } 998 999 // Exits here for tunneled video playback codecs -- i.e. skips native window 1000 // buffer allocation step as this is managed by the tunneled OMX omponent 1001 // itself and explicitly sets def.nBufferCountActual to 0. 1002 if (mTunneled) { 1003 ALOGV("Tunneled Playback: skipping native window buffer allocation."); 1004 def.nBufferCountActual = 0; 1005 err = mOMX->setParameter( 1006 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1007 1008 *minUndequeuedBuffers = 0; 1009 *bufferCount = 0; 1010 *bufferSize = 0; 1011 return err; 1012 } 1013 1014 *minUndequeuedBuffers = 0; 1015 err = mNativeWindow->query( 1016 mNativeWindow.get(), NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 1017 (int *)minUndequeuedBuffers); 1018 1019 if (err != 0) { 1020 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 1021 strerror(-err), -err); 1022 return err; 1023 } 1024 1025 // FIXME: assume that surface is controlled by app (native window 1026 // returns the number for the case when surface is not controlled by app) 1027 // FIXME2: This means that minUndeqeueudBufs can be 1 larger than reported 1028 // For now, try to allocate 1 more buffer, but don't fail if unsuccessful 1029 1030 // Use conservative allocation while also trying to reduce starvation 1031 // 1032 // 1. allocate at least nBufferCountMin + minUndequeuedBuffers - that is the 1033 // minimum needed for the consumer to be able to work 1034 // 2. try to allocate two (2) additional buffers to reduce starvation from 1035 // the consumer 1036 // plus an extra buffer to account for incorrect minUndequeuedBufs 1037 for (OMX_U32 extraBuffers = 2 + 1; /* condition inside loop */; extraBuffers--) { 1038 OMX_U32 newBufferCount = 1039 def.nBufferCountMin + *minUndequeuedBuffers + extraBuffers; 1040 def.nBufferCountActual = newBufferCount; 1041 err = mOMX->setParameter( 1042 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1043 1044 if (err == OK) { 1045 *minUndequeuedBuffers += extraBuffers; 1046 break; 1047 } 1048 1049 ALOGW("[%s] setting nBufferCountActual to %u failed: %d", 1050 mComponentName.c_str(), newBufferCount, err); 1051 /* exit condition */ 1052 if (extraBuffers == 0) { 1053 return err; 1054 } 1055 } 1056 1057 err = native_window_set_buffer_count( 1058 mNativeWindow.get(), def.nBufferCountActual); 1059 1060 if (err != 0) { 1061 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 1062 -err); 1063 return err; 1064 } 1065 1066 *bufferCount = def.nBufferCountActual; 1067 *bufferSize = def.nBufferSize; 1068 return err; 1069} 1070 1071status_t ACodec::allocateOutputBuffersFromNativeWindow() { 1072 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 1073 status_t err = configureOutputBuffersFromNativeWindow( 1074 &bufferCount, &bufferSize, &minUndequeuedBuffers); 1075 if (err != 0) 1076 return err; 1077 mNumUndequeuedBuffers = minUndequeuedBuffers; 1078 1079 if (!storingMetadataInDecodedBuffers()) { 1080 static_cast<Surface*>(mNativeWindow.get()) 1081 ->getIGraphicBufferProducer()->allowAllocation(true); 1082 } 1083 1084 ALOGV("[%s] Allocating %u buffers from a native window of size %u on " 1085 "output port", 1086 mComponentName.c_str(), bufferCount, bufferSize); 1087 1088 // Dequeue buffers and send them to OMX 1089 for (OMX_U32 i = 0; i < bufferCount; i++) { 1090 ANativeWindowBuffer *buf; 1091 int fenceFd; 1092 err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1093 if (err != 0) { 1094 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); 1095 break; 1096 } 1097 1098 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); 1099 BufferInfo info; 1100 info.mStatus = BufferInfo::OWNED_BY_US; 1101 info.mFenceFd = fenceFd; 1102 info.mIsReadFence = false; 1103 info.mRenderInfo = NULL; 1104 info.mData = new ABuffer(NULL /* data */, bufferSize /* capacity */); 1105 info.mCodecData = info.mData; 1106 info.mGraphicBuffer = graphicBuffer; 1107 mBuffers[kPortIndexOutput].push(info); 1108 1109 IOMX::buffer_id bufferId; 1110 err = mOMX->useGraphicBuffer(mNode, kPortIndexOutput, graphicBuffer, 1111 &bufferId); 1112 if (err != 0) { 1113 ALOGE("registering GraphicBuffer %u with OMX IL component failed: " 1114 "%d", i, err); 1115 break; 1116 } 1117 1118 mBuffers[kPortIndexOutput].editItemAt(i).mBufferID = bufferId; 1119 1120 ALOGV("[%s] Registered graphic buffer with ID %u (pointer = %p)", 1121 mComponentName.c_str(), 1122 bufferId, graphicBuffer.get()); 1123 } 1124 1125 OMX_U32 cancelStart; 1126 OMX_U32 cancelEnd; 1127 1128 if (err != 0) { 1129 // If an error occurred while dequeuing we need to cancel any buffers 1130 // that were dequeued. 1131 cancelStart = 0; 1132 cancelEnd = mBuffers[kPortIndexOutput].size(); 1133 } else { 1134 // Return the required minimum undequeued buffers to the native window. 1135 cancelStart = bufferCount - minUndequeuedBuffers; 1136 cancelEnd = bufferCount; 1137 } 1138 1139 for (OMX_U32 i = cancelStart; i < cancelEnd; i++) { 1140 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1141 if (info->mStatus == BufferInfo::OWNED_BY_US) { 1142 status_t error = cancelBufferToNativeWindow(info); 1143 if (err == 0) { 1144 err = error; 1145 } 1146 } 1147 } 1148 1149 if (!storingMetadataInDecodedBuffers()) { 1150 static_cast<Surface*>(mNativeWindow.get()) 1151 ->getIGraphicBufferProducer()->allowAllocation(false); 1152 } 1153 1154 return err; 1155} 1156 1157status_t ACodec::allocateOutputMetadataBuffers() { 1158 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 1159 status_t err = configureOutputBuffersFromNativeWindow( 1160 &bufferCount, &bufferSize, &minUndequeuedBuffers); 1161 if (err != 0) 1162 return err; 1163 mNumUndequeuedBuffers = minUndequeuedBuffers; 1164 1165 ALOGV("[%s] Allocating %u meta buffers on output port", 1166 mComponentName.c_str(), bufferCount); 1167 1168 size_t bufSize = mOutputMetadataType == kMetadataBufferTypeANWBuffer ? 1169 sizeof(struct VideoNativeMetadata) : sizeof(struct VideoGrallocMetadata); 1170 size_t totalSize = bufferCount * align(bufSize, MemoryDealer::getAllocationAlignment()); 1171 mDealer[kPortIndexOutput] = new MemoryDealer(totalSize, "ACodec"); 1172 1173 // Dequeue buffers and send them to OMX 1174 for (OMX_U32 i = 0; i < bufferCount; i++) { 1175 BufferInfo info; 1176 info.mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 1177 info.mFenceFd = -1; 1178 info.mRenderInfo = NULL; 1179 info.mGraphicBuffer = NULL; 1180 info.mDequeuedAt = mDequeueCounter; 1181 1182 sp<IMemory> mem = mDealer[kPortIndexOutput]->allocate(bufSize); 1183 if (mem == NULL || mem->pointer() == NULL) { 1184 return NO_MEMORY; 1185 } 1186 if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) { 1187 ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; 1188 } 1189 info.mData = new ABuffer(mem->pointer(), mem->size()); 1190 info.mMemRef = mem; 1191 info.mCodecData = info.mData; 1192 info.mCodecRef = mem; 1193 1194 // we use useBuffer for metadata regardless of quirks 1195 err = mOMX->useBuffer( 1196 mNode, kPortIndexOutput, mem, &info.mBufferID, mem->size()); 1197 mBuffers[kPortIndexOutput].push(info); 1198 1199 ALOGV("[%s] allocated meta buffer with ID %u (pointer = %p)", 1200 mComponentName.c_str(), info.mBufferID, mem->pointer()); 1201 } 1202 1203 if (mLegacyAdaptiveExperiment) { 1204 // preallocate and preregister buffers 1205 static_cast<Surface *>(mNativeWindow.get()) 1206 ->getIGraphicBufferProducer()->allowAllocation(true); 1207 1208 ALOGV("[%s] Allocating %u buffers from a native window of size %u on " 1209 "output port", 1210 mComponentName.c_str(), bufferCount, bufferSize); 1211 1212 // Dequeue buffers then cancel them all 1213 for (OMX_U32 i = 0; i < bufferCount; i++) { 1214 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1215 1216 ANativeWindowBuffer *buf; 1217 int fenceFd; 1218 err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1219 if (err != 0) { 1220 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); 1221 break; 1222 } 1223 1224 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); 1225 mOMX->updateGraphicBufferInMeta( 1226 mNode, kPortIndexOutput, graphicBuffer, info->mBufferID); 1227 info->mStatus = BufferInfo::OWNED_BY_US; 1228 info->setWriteFence(fenceFd, "allocateOutputMetadataBuffers for legacy"); 1229 info->mGraphicBuffer = graphicBuffer; 1230 } 1231 1232 for (OMX_U32 i = 0; i < mBuffers[kPortIndexOutput].size(); i++) { 1233 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1234 if (info->mStatus == BufferInfo::OWNED_BY_US) { 1235 status_t error = cancelBufferToNativeWindow(info); 1236 if (err == OK) { 1237 err = error; 1238 } 1239 } 1240 } 1241 1242 static_cast<Surface*>(mNativeWindow.get()) 1243 ->getIGraphicBufferProducer()->allowAllocation(false); 1244 } 1245 1246 mMetadataBuffersToSubmit = bufferCount - minUndequeuedBuffers; 1247 return err; 1248} 1249 1250status_t ACodec::submitOutputMetadataBuffer() { 1251 CHECK(storingMetadataInDecodedBuffers()); 1252 if (mMetadataBuffersToSubmit == 0) 1253 return OK; 1254 1255 BufferInfo *info = dequeueBufferFromNativeWindow(); 1256 if (info == NULL) { 1257 return ERROR_IO; 1258 } 1259 1260 ALOGV("[%s] submitting output meta buffer ID %u for graphic buffer %p", 1261 mComponentName.c_str(), info->mBufferID, info->mGraphicBuffer.get()); 1262 1263 --mMetadataBuffersToSubmit; 1264 info->checkWriteFence("submitOutputMetadataBuffer"); 1265 status_t err = mOMX->fillBuffer(mNode, info->mBufferID, info->mFenceFd); 1266 info->mFenceFd = -1; 1267 if (err == OK) { 1268 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 1269 } 1270 1271 return err; 1272} 1273 1274status_t ACodec::waitForFence(int fd, const char *dbg ) { 1275 status_t res = OK; 1276 if (fd >= 0) { 1277 sp<Fence> fence = new Fence(fd); 1278 res = fence->wait(IOMX::kFenceTimeoutMs); 1279 ALOGW_IF(res != OK, "FENCE TIMEOUT for %d in %s", fd, dbg); 1280 } 1281 return res; 1282} 1283 1284// static 1285const char *ACodec::_asString(BufferInfo::Status s) { 1286 switch (s) { 1287 case BufferInfo::OWNED_BY_US: return "OUR"; 1288 case BufferInfo::OWNED_BY_COMPONENT: return "COMPONENT"; 1289 case BufferInfo::OWNED_BY_UPSTREAM: return "UPSTREAM"; 1290 case BufferInfo::OWNED_BY_DOWNSTREAM: return "DOWNSTREAM"; 1291 case BufferInfo::OWNED_BY_NATIVE_WINDOW: return "SURFACE"; 1292 case BufferInfo::UNRECOGNIZED: return "UNRECOGNIZED"; 1293 default: return "?"; 1294 } 1295} 1296 1297void ACodec::dumpBuffers(OMX_U32 portIndex) { 1298 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 1299 ALOGI("[%s] %s port has %zu buffers:", mComponentName.c_str(), 1300 portIndex == kPortIndexInput ? "input" : "output", mBuffers[portIndex].size()); 1301 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1302 const BufferInfo &info = mBuffers[portIndex][i]; 1303 ALOGI(" slot %2zu: #%8u %p/%p %s(%d) dequeued:%u", 1304 i, info.mBufferID, info.mGraphicBuffer.get(), 1305 info.mGraphicBuffer == NULL ? NULL : info.mGraphicBuffer->getNativeBuffer(), 1306 _asString(info.mStatus), info.mStatus, info.mDequeuedAt); 1307 } 1308} 1309 1310status_t ACodec::cancelBufferToNativeWindow(BufferInfo *info) { 1311 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 1312 1313 ALOGV("[%s] Calling cancelBuffer on buffer %u", 1314 mComponentName.c_str(), info->mBufferID); 1315 1316 info->checkWriteFence("cancelBufferToNativeWindow"); 1317 int err = mNativeWindow->cancelBuffer( 1318 mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd); 1319 info->mFenceFd = -1; 1320 1321 ALOGW_IF(err != 0, "[%s] can not return buffer %u to native window", 1322 mComponentName.c_str(), info->mBufferID); 1323 // change ownership even if cancelBuffer fails 1324 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 1325 1326 return err; 1327} 1328 1329void ACodec::updateRenderInfoForDequeuedBuffer( 1330 ANativeWindowBuffer *buf, int fenceFd, BufferInfo *info) { 1331 1332 info->mRenderInfo = 1333 mRenderTracker.updateInfoForDequeuedBuffer( 1334 buf, fenceFd, info - &mBuffers[kPortIndexOutput][0]); 1335 1336 // check for any fences already signaled 1337 notifyOfRenderedFrames(false /* dropIncomplete */, info->mRenderInfo); 1338} 1339 1340void ACodec::onFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { 1341 if (mRenderTracker.onFrameRendered(mediaTimeUs, systemNano) != OK) { 1342 mRenderTracker.dumpRenderQueue(); 1343 } 1344} 1345 1346void ACodec::notifyOfRenderedFrames(bool dropIncomplete, FrameRenderTracker::Info *until) { 1347 sp<AMessage> msg = mNotify->dup(); 1348 msg->setInt32("what", CodecBase::kWhatOutputFramesRendered); 1349 std::list<FrameRenderTracker::Info> done = 1350 mRenderTracker.checkFencesAndGetRenderedFrames(until, dropIncomplete); 1351 1352 // unlink untracked frames 1353 for (std::list<FrameRenderTracker::Info>::const_iterator it = done.cbegin(); 1354 it != done.cend(); ++it) { 1355 ssize_t index = it->getIndex(); 1356 if (index >= 0 && (size_t)index < mBuffers[kPortIndexOutput].size()) { 1357 mBuffers[kPortIndexOutput].editItemAt(index).mRenderInfo = NULL; 1358 } else if (index >= 0) { 1359 // THIS SHOULD NEVER HAPPEN 1360 ALOGE("invalid index %zd in %zu", index, mBuffers[kPortIndexOutput].size()); 1361 } 1362 } 1363 1364 if (MediaCodec::CreateFramesRenderedMessage(done, msg)) { 1365 msg->post(); 1366 } 1367} 1368 1369ACodec::BufferInfo *ACodec::dequeueBufferFromNativeWindow() { 1370 ANativeWindowBuffer *buf; 1371 CHECK(mNativeWindow.get() != NULL); 1372 1373 if (mTunneled) { 1374 ALOGW("dequeueBufferFromNativeWindow() should not be called in tunnel" 1375 " video playback mode mode!"); 1376 return NULL; 1377 } 1378 1379 if (mFatalError) { 1380 ALOGW("not dequeuing from native window due to fatal error"); 1381 return NULL; 1382 } 1383 1384 int fenceFd = -1; 1385 do { 1386 status_t err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1387 if (err != 0) { 1388 ALOGE("dequeueBuffer failed: %s(%d).", asString(err), err); 1389 return NULL; 1390 } 1391 1392 bool stale = false; 1393 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1394 i--; 1395 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1396 1397 if (info->mGraphicBuffer != NULL && 1398 info->mGraphicBuffer->handle == buf->handle) { 1399 // Since consumers can attach buffers to BufferQueues, it is possible 1400 // that a known yet stale buffer can return from a surface that we 1401 // once used. We can simply ignore this as we have already dequeued 1402 // this buffer properly. NOTE: this does not eliminate all cases, 1403 // e.g. it is possible that we have queued the valid buffer to the 1404 // NW, and a stale copy of the same buffer gets dequeued - which will 1405 // be treated as the valid buffer by ACodec. 1406 if (info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 1407 ALOGI("dequeued stale buffer %p. discarding", buf); 1408 stale = true; 1409 break; 1410 } 1411 1412 ALOGV("dequeued buffer %p", info->mGraphicBuffer->getNativeBuffer()); 1413 info->mStatus = BufferInfo::OWNED_BY_US; 1414 info->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow"); 1415 updateRenderInfoForDequeuedBuffer(buf, fenceFd, info); 1416 return info; 1417 } 1418 } 1419 1420 // It is also possible to receive a previously unregistered buffer 1421 // in non-meta mode. These should be treated as stale buffers. The 1422 // same is possible in meta mode, in which case, it will be treated 1423 // as a normal buffer, which is not desirable. 1424 // TODO: fix this. 1425 if (!stale && (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment)) { 1426 ALOGI("dequeued unrecognized (stale) buffer %p. discarding", buf); 1427 stale = true; 1428 } 1429 if (stale) { 1430 // TODO: detach stale buffer, but there is no API yet to do it. 1431 buf = NULL; 1432 } 1433 } while (buf == NULL); 1434 1435 // get oldest undequeued buffer 1436 BufferInfo *oldest = NULL; 1437 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1438 i--; 1439 BufferInfo *info = 1440 &mBuffers[kPortIndexOutput].editItemAt(i); 1441 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW && 1442 (oldest == NULL || 1443 // avoid potential issues from counter rolling over 1444 mDequeueCounter - info->mDequeuedAt > 1445 mDequeueCounter - oldest->mDequeuedAt)) { 1446 oldest = info; 1447 } 1448 } 1449 1450 // it is impossible dequeue a buffer when there are no buffers with ANW 1451 CHECK(oldest != NULL); 1452 // it is impossible to dequeue an unknown buffer in non-meta mode, as the 1453 // while loop above does not complete 1454 CHECK(storingMetadataInDecodedBuffers()); 1455 1456 // discard buffer in LRU info and replace with new buffer 1457 oldest->mGraphicBuffer = new GraphicBuffer(buf, false); 1458 oldest->mStatus = BufferInfo::OWNED_BY_US; 1459 oldest->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow for oldest"); 1460 mRenderTracker.untrackFrame(oldest->mRenderInfo); 1461 oldest->mRenderInfo = NULL; 1462 1463 mOMX->updateGraphicBufferInMeta( 1464 mNode, kPortIndexOutput, oldest->mGraphicBuffer, 1465 oldest->mBufferID); 1466 1467 if (mOutputMetadataType == kMetadataBufferTypeGrallocSource) { 1468 VideoGrallocMetadata *grallocMeta = 1469 reinterpret_cast<VideoGrallocMetadata *>(oldest->mData->base()); 1470 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 1471 (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]), 1472 mDequeueCounter - oldest->mDequeuedAt, 1473 (void *)(uintptr_t)grallocMeta->pHandle, 1474 oldest->mGraphicBuffer->handle, oldest->mData->base()); 1475 } else if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) { 1476 VideoNativeMetadata *nativeMeta = 1477 reinterpret_cast<VideoNativeMetadata *>(oldest->mData->base()); 1478 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 1479 (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]), 1480 mDequeueCounter - oldest->mDequeuedAt, 1481 (void *)(uintptr_t)nativeMeta->pBuffer, 1482 oldest->mGraphicBuffer->getNativeBuffer(), oldest->mData->base()); 1483 } 1484 1485 updateRenderInfoForDequeuedBuffer(buf, fenceFd, oldest); 1486 return oldest; 1487} 1488 1489status_t ACodec::freeBuffersOnPort(OMX_U32 portIndex) { 1490 status_t err = OK; 1491 for (size_t i = mBuffers[portIndex].size(); i > 0;) { 1492 i--; 1493 status_t err2 = freeBuffer(portIndex, i); 1494 if (err == OK) { 1495 err = err2; 1496 } 1497 } 1498 1499 // clear mDealer even on an error 1500 mDealer[portIndex].clear(); 1501 return err; 1502} 1503 1504status_t ACodec::freeOutputBuffersNotOwnedByComponent() { 1505 status_t err = OK; 1506 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1507 i--; 1508 BufferInfo *info = 1509 &mBuffers[kPortIndexOutput].editItemAt(i); 1510 1511 // At this time some buffers may still be with the component 1512 // or being drained. 1513 if (info->mStatus != BufferInfo::OWNED_BY_COMPONENT && 1514 info->mStatus != BufferInfo::OWNED_BY_DOWNSTREAM) { 1515 status_t err2 = freeBuffer(kPortIndexOutput, i); 1516 if (err == OK) { 1517 err = err2; 1518 } 1519 } 1520 } 1521 1522 return err; 1523} 1524 1525status_t ACodec::freeBuffer(OMX_U32 portIndex, size_t i) { 1526 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1527 status_t err = OK; 1528 1529 // there should not be any fences in the metadata 1530 MetadataBufferType type = 1531 portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; 1532 if (type == kMetadataBufferTypeANWBuffer && info->mData != NULL 1533 && info->mData->size() >= sizeof(VideoNativeMetadata)) { 1534 int fenceFd = ((VideoNativeMetadata *)info->mData->data())->nFenceFd; 1535 if (fenceFd >= 0) { 1536 ALOGW("unreleased fence (%d) in %s metadata buffer %zu", 1537 fenceFd, portIndex == kPortIndexInput ? "input" : "output", i); 1538 } 1539 } 1540 1541 switch (info->mStatus) { 1542 case BufferInfo::OWNED_BY_US: 1543 if (portIndex == kPortIndexOutput && mNativeWindow != NULL) { 1544 (void)cancelBufferToNativeWindow(info); 1545 } 1546 // fall through 1547 1548 case BufferInfo::OWNED_BY_NATIVE_WINDOW: 1549 err = mOMX->freeBuffer(mNode, portIndex, info->mBufferID); 1550 break; 1551 1552 default: 1553 ALOGE("trying to free buffer not owned by us or ANW (%d)", info->mStatus); 1554 err = FAILED_TRANSACTION; 1555 break; 1556 } 1557 1558 if (info->mFenceFd >= 0) { 1559 ::close(info->mFenceFd); 1560 } 1561 1562 if (portIndex == kPortIndexOutput) { 1563 mRenderTracker.untrackFrame(info->mRenderInfo, i); 1564 info->mRenderInfo = NULL; 1565 } 1566 1567 // remove buffer even if mOMX->freeBuffer fails 1568 mBuffers[portIndex].removeAt(i); 1569 return err; 1570} 1571 1572ACodec::BufferInfo *ACodec::findBufferByID( 1573 uint32_t portIndex, IOMX::buffer_id bufferID, ssize_t *index) { 1574 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1575 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1576 1577 if (info->mBufferID == bufferID) { 1578 if (index != NULL) { 1579 *index = i; 1580 } 1581 return info; 1582 } 1583 } 1584 1585 ALOGE("Could not find buffer with ID %u", bufferID); 1586 return NULL; 1587} 1588 1589status_t ACodec::setComponentRole( 1590 bool isEncoder, const char *mime) { 1591 const char *role = getComponentRole(isEncoder, mime); 1592 if (role == NULL) { 1593 return BAD_VALUE; 1594 } 1595 status_t err = setComponentRole(mOMX, mNode, role); 1596 if (err != OK) { 1597 ALOGW("[%s] Failed to set standard component role '%s'.", 1598 mComponentName.c_str(), role); 1599 } 1600 return err; 1601} 1602 1603//static 1604const char *ACodec::getComponentRole( 1605 bool isEncoder, const char *mime) { 1606 struct MimeToRole { 1607 const char *mime; 1608 const char *decoderRole; 1609 const char *encoderRole; 1610 }; 1611 1612 static const MimeToRole kMimeToRole[] = { 1613 { MEDIA_MIMETYPE_AUDIO_MPEG, 1614 "audio_decoder.mp3", "audio_encoder.mp3" }, 1615 { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I, 1616 "audio_decoder.mp1", "audio_encoder.mp1" }, 1617 { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II, 1618 "audio_decoder.mp2", "audio_encoder.mp2" }, 1619 { MEDIA_MIMETYPE_AUDIO_AMR_NB, 1620 "audio_decoder.amrnb", "audio_encoder.amrnb" }, 1621 { MEDIA_MIMETYPE_AUDIO_AMR_WB, 1622 "audio_decoder.amrwb", "audio_encoder.amrwb" }, 1623 { MEDIA_MIMETYPE_AUDIO_AAC, 1624 "audio_decoder.aac", "audio_encoder.aac" }, 1625 { MEDIA_MIMETYPE_AUDIO_VORBIS, 1626 "audio_decoder.vorbis", "audio_encoder.vorbis" }, 1627 { MEDIA_MIMETYPE_AUDIO_OPUS, 1628 "audio_decoder.opus", "audio_encoder.opus" }, 1629 { MEDIA_MIMETYPE_AUDIO_G711_MLAW, 1630 "audio_decoder.g711mlaw", "audio_encoder.g711mlaw" }, 1631 { MEDIA_MIMETYPE_AUDIO_G711_ALAW, 1632 "audio_decoder.g711alaw", "audio_encoder.g711alaw" }, 1633 { MEDIA_MIMETYPE_VIDEO_AVC, 1634 "video_decoder.avc", "video_encoder.avc" }, 1635 { MEDIA_MIMETYPE_VIDEO_HEVC, 1636 "video_decoder.hevc", "video_encoder.hevc" }, 1637 { MEDIA_MIMETYPE_VIDEO_MPEG4, 1638 "video_decoder.mpeg4", "video_encoder.mpeg4" }, 1639 { MEDIA_MIMETYPE_VIDEO_H263, 1640 "video_decoder.h263", "video_encoder.h263" }, 1641 { MEDIA_MIMETYPE_VIDEO_VP8, 1642 "video_decoder.vp8", "video_encoder.vp8" }, 1643 { MEDIA_MIMETYPE_VIDEO_VP9, 1644 "video_decoder.vp9", "video_encoder.vp9" }, 1645 { MEDIA_MIMETYPE_AUDIO_RAW, 1646 "audio_decoder.raw", "audio_encoder.raw" }, 1647 { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, 1648 "video_decoder.dolby-vision", "video_encoder.dolby-vision" }, 1649 { MEDIA_MIMETYPE_AUDIO_FLAC, 1650 "audio_decoder.flac", "audio_encoder.flac" }, 1651 { MEDIA_MIMETYPE_AUDIO_MSGSM, 1652 "audio_decoder.gsm", "audio_encoder.gsm" }, 1653 { MEDIA_MIMETYPE_VIDEO_MPEG2, 1654 "video_decoder.mpeg2", "video_encoder.mpeg2" }, 1655 { MEDIA_MIMETYPE_AUDIO_AC3, 1656 "audio_decoder.ac3", "audio_encoder.ac3" }, 1657 { MEDIA_MIMETYPE_AUDIO_EAC3, 1658 "audio_decoder.eac3", "audio_encoder.eac3" }, 1659 }; 1660 1661 static const size_t kNumMimeToRole = 1662 sizeof(kMimeToRole) / sizeof(kMimeToRole[0]); 1663 1664 size_t i; 1665 for (i = 0; i < kNumMimeToRole; ++i) { 1666 if (!strcasecmp(mime, kMimeToRole[i].mime)) { 1667 break; 1668 } 1669 } 1670 1671 if (i == kNumMimeToRole) { 1672 return NULL; 1673 } 1674 1675 return isEncoder ? kMimeToRole[i].encoderRole 1676 : kMimeToRole[i].decoderRole; 1677} 1678 1679//static 1680status_t ACodec::setComponentRole( 1681 const sp<IOMX> &omx, IOMX::node_id node, const char *role) { 1682 OMX_PARAM_COMPONENTROLETYPE roleParams; 1683 InitOMXParams(&roleParams); 1684 1685 strncpy((char *)roleParams.cRole, 1686 role, OMX_MAX_STRINGNAME_SIZE - 1); 1687 1688 roleParams.cRole[OMX_MAX_STRINGNAME_SIZE - 1] = '\0'; 1689 1690 return omx->setParameter( 1691 node, OMX_IndexParamStandardComponentRole, 1692 &roleParams, sizeof(roleParams)); 1693} 1694 1695status_t ACodec::configureCodec( 1696 const char *mime, const sp<AMessage> &msg) { 1697 int32_t encoder; 1698 if (!msg->findInt32("encoder", &encoder)) { 1699 encoder = false; 1700 } 1701 1702 sp<AMessage> inputFormat = new AMessage; 1703 sp<AMessage> outputFormat = new AMessage; 1704 mConfigFormat = msg; 1705 1706 mIsEncoder = encoder; 1707 1708 mInputMetadataType = kMetadataBufferTypeInvalid; 1709 mOutputMetadataType = kMetadataBufferTypeInvalid; 1710 1711 status_t err = setComponentRole(encoder /* isEncoder */, mime); 1712 1713 if (err != OK) { 1714 return err; 1715 } 1716 1717 int32_t bitRate = 0; 1718 // FLAC encoder doesn't need a bitrate, other encoders do 1719 if (encoder && strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC) 1720 && !msg->findInt32("bitrate", &bitRate)) { 1721 return INVALID_OPERATION; 1722 } 1723 1724 int32_t storeMeta; 1725 if (encoder 1726 && msg->findInt32("store-metadata-in-buffers", &storeMeta) 1727 && storeMeta != 0) { 1728 err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexInput, OMX_TRUE, &mInputMetadataType); 1729 if (err != OK) { 1730 ALOGE("[%s] storeMetaDataInBuffers (input) failed w/ err %d", 1731 mComponentName.c_str(), err); 1732 1733 return err; 1734 } 1735 // For this specific case we could be using camera source even if storeMetaDataInBuffers 1736 // returns Gralloc source. Pretend that we are; this will force us to use nBufferSize. 1737 if (mInputMetadataType == kMetadataBufferTypeGrallocSource) { 1738 mInputMetadataType = kMetadataBufferTypeCameraSource; 1739 } 1740 1741 uint32_t usageBits; 1742 if (mOMX->getParameter( 1743 mNode, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, 1744 &usageBits, sizeof(usageBits)) == OK) { 1745 inputFormat->setInt32( 1746 "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); 1747 } 1748 } 1749 1750 int32_t prependSPSPPS = 0; 1751 if (encoder 1752 && msg->findInt32("prepend-sps-pps-to-idr-frames", &prependSPSPPS) 1753 && prependSPSPPS != 0) { 1754 OMX_INDEXTYPE index; 1755 err = mOMX->getExtensionIndex( 1756 mNode, 1757 "OMX.google.android.index.prependSPSPPSToIDRFrames", 1758 &index); 1759 1760 if (err == OK) { 1761 PrependSPSPPSToIDRFramesParams params; 1762 InitOMXParams(¶ms); 1763 params.bEnable = OMX_TRUE; 1764 1765 err = mOMX->setParameter( 1766 mNode, index, ¶ms, sizeof(params)); 1767 } 1768 1769 if (err != OK) { 1770 ALOGE("Encoder could not be configured to emit SPS/PPS before " 1771 "IDR frames. (err %d)", err); 1772 1773 return err; 1774 } 1775 } 1776 1777 // Only enable metadata mode on encoder output if encoder can prepend 1778 // sps/pps to idr frames, since in metadata mode the bitstream is in an 1779 // opaque handle, to which we don't have access. 1780 int32_t video = !strncasecmp(mime, "video/", 6); 1781 mIsVideo = video; 1782 if (encoder && video) { 1783 OMX_BOOL enable = (OMX_BOOL) (prependSPSPPS 1784 && msg->findInt32("store-metadata-in-buffers-output", &storeMeta) 1785 && storeMeta != 0); 1786 1787 err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, enable, &mOutputMetadataType); 1788 if (err != OK) { 1789 ALOGE("[%s] storeMetaDataInBuffers (output) failed w/ err %d", 1790 mComponentName.c_str(), err); 1791 } 1792 1793 if (!msg->findInt64( 1794 "repeat-previous-frame-after", 1795 &mRepeatFrameDelayUs)) { 1796 mRepeatFrameDelayUs = -1ll; 1797 } 1798 1799 if (!msg->findInt64("max-pts-gap-to-encoder", &mMaxPtsGapUs)) { 1800 mMaxPtsGapUs = -1ll; 1801 } 1802 1803 if (!msg->findFloat("max-fps-to-encoder", &mMaxFps)) { 1804 mMaxFps = -1; 1805 } 1806 1807 if (!msg->findInt64("time-lapse", &mTimePerCaptureUs)) { 1808 mTimePerCaptureUs = -1ll; 1809 } 1810 1811 if (!msg->findInt32( 1812 "create-input-buffers-suspended", 1813 (int32_t*)&mCreateInputBuffersSuspended)) { 1814 mCreateInputBuffersSuspended = false; 1815 } 1816 } 1817 1818 // NOTE: we only use native window for video decoders 1819 sp<RefBase> obj; 1820 bool haveNativeWindow = msg->findObject("native-window", &obj) 1821 && obj != NULL && video && !encoder; 1822 mUsingNativeWindow = haveNativeWindow; 1823 mLegacyAdaptiveExperiment = false; 1824 if (video && !encoder) { 1825 inputFormat->setInt32("adaptive-playback", false); 1826 1827 int32_t usageProtected; 1828 if (msg->findInt32("protected", &usageProtected) && usageProtected) { 1829 if (!haveNativeWindow) { 1830 ALOGE("protected output buffers must be sent to an ANativeWindow"); 1831 return PERMISSION_DENIED; 1832 } 1833 mFlags |= kFlagIsGrallocUsageProtected; 1834 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 1835 } 1836 1837 if (mFlags & kFlagIsSecure) { 1838 // use native_handles for secure input buffers 1839 err = mOMX->enableNativeBuffers( 1840 mNode, kPortIndexInput, OMX_FALSE /* graphic */, OMX_TRUE); 1841 ALOGI_IF(err != OK, "falling back to non-native_handles"); 1842 err = OK; // ignore error for now 1843 } 1844 } 1845 if (haveNativeWindow) { 1846 sp<ANativeWindow> nativeWindow = 1847 static_cast<ANativeWindow *>(static_cast<Surface *>(obj.get())); 1848 1849 // START of temporary support for automatic FRC - THIS WILL BE REMOVED 1850 int32_t autoFrc; 1851 if (msg->findInt32("auto-frc", &autoFrc)) { 1852 bool enabled = autoFrc; 1853 OMX_CONFIG_BOOLEANTYPE config; 1854 InitOMXParams(&config); 1855 config.bEnabled = (OMX_BOOL)enabled; 1856 status_t temp = mOMX->setConfig( 1857 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAutoFramerateConversion, 1858 &config, sizeof(config)); 1859 if (temp == OK) { 1860 outputFormat->setInt32("auto-frc", enabled); 1861 } else if (enabled) { 1862 ALOGI("codec does not support requested auto-frc (err %d)", temp); 1863 } 1864 } 1865 // END of temporary support for automatic FRC 1866 1867 int32_t tunneled; 1868 if (msg->findInt32("feature-tunneled-playback", &tunneled) && 1869 tunneled != 0) { 1870 ALOGI("Configuring TUNNELED video playback."); 1871 mTunneled = true; 1872 1873 int32_t audioHwSync = 0; 1874 if (!msg->findInt32("audio-hw-sync", &audioHwSync)) { 1875 ALOGW("No Audio HW Sync provided for video tunnel"); 1876 } 1877 err = configureTunneledVideoPlayback(audioHwSync, nativeWindow); 1878 if (err != OK) { 1879 ALOGE("configureTunneledVideoPlayback(%d,%p) failed!", 1880 audioHwSync, nativeWindow.get()); 1881 return err; 1882 } 1883 1884 int32_t maxWidth = 0, maxHeight = 0; 1885 if (msg->findInt32("max-width", &maxWidth) && 1886 msg->findInt32("max-height", &maxHeight)) { 1887 1888 err = mOMX->prepareForAdaptivePlayback( 1889 mNode, kPortIndexOutput, OMX_TRUE, maxWidth, maxHeight); 1890 if (err != OK) { 1891 ALOGW("[%s] prepareForAdaptivePlayback failed w/ err %d", 1892 mComponentName.c_str(), err); 1893 // allow failure 1894 err = OK; 1895 } else { 1896 inputFormat->setInt32("max-width", maxWidth); 1897 inputFormat->setInt32("max-height", maxHeight); 1898 inputFormat->setInt32("adaptive-playback", true); 1899 } 1900 } 1901 } else { 1902 ALOGV("Configuring CPU controlled video playback."); 1903 mTunneled = false; 1904 1905 // Explicity reset the sideband handle of the window for 1906 // non-tunneled video in case the window was previously used 1907 // for a tunneled video playback. 1908 err = native_window_set_sideband_stream(nativeWindow.get(), NULL); 1909 if (err != OK) { 1910 ALOGE("set_sideband_stream(NULL) failed! (err %d).", err); 1911 return err; 1912 } 1913 1914 // Always try to enable dynamic output buffers on native surface 1915 err = mOMX->storeMetaDataInBuffers( 1916 mNode, kPortIndexOutput, OMX_TRUE, &mOutputMetadataType); 1917 if (err != OK) { 1918 ALOGE("[%s] storeMetaDataInBuffers failed w/ err %d", 1919 mComponentName.c_str(), err); 1920 1921 // if adaptive playback has been requested, try JB fallback 1922 // NOTE: THIS FALLBACK MECHANISM WILL BE REMOVED DUE TO ITS 1923 // LARGE MEMORY REQUIREMENT 1924 1925 // we will not do adaptive playback on software accessed 1926 // surfaces as they never had to respond to changes in the 1927 // crop window, and we don't trust that they will be able to. 1928 int usageBits = 0; 1929 bool canDoAdaptivePlayback; 1930 1931 if (nativeWindow->query( 1932 nativeWindow.get(), 1933 NATIVE_WINDOW_CONSUMER_USAGE_BITS, 1934 &usageBits) != OK) { 1935 canDoAdaptivePlayback = false; 1936 } else { 1937 canDoAdaptivePlayback = 1938 (usageBits & 1939 (GRALLOC_USAGE_SW_READ_MASK | 1940 GRALLOC_USAGE_SW_WRITE_MASK)) == 0; 1941 } 1942 1943 int32_t maxWidth = 0, maxHeight = 0; 1944 if (canDoAdaptivePlayback && 1945 msg->findInt32("max-width", &maxWidth) && 1946 msg->findInt32("max-height", &maxHeight)) { 1947 ALOGV("[%s] prepareForAdaptivePlayback(%dx%d)", 1948 mComponentName.c_str(), maxWidth, maxHeight); 1949 1950 err = mOMX->prepareForAdaptivePlayback( 1951 mNode, kPortIndexOutput, OMX_TRUE, maxWidth, 1952 maxHeight); 1953 ALOGW_IF(err != OK, 1954 "[%s] prepareForAdaptivePlayback failed w/ err %d", 1955 mComponentName.c_str(), err); 1956 1957 if (err == OK) { 1958 inputFormat->setInt32("max-width", maxWidth); 1959 inputFormat->setInt32("max-height", maxHeight); 1960 inputFormat->setInt32("adaptive-playback", true); 1961 } 1962 } 1963 // allow failure 1964 err = OK; 1965 } else { 1966 ALOGV("[%s] storeMetaDataInBuffers succeeded", 1967 mComponentName.c_str()); 1968 CHECK(storingMetadataInDecodedBuffers()); 1969 mLegacyAdaptiveExperiment = ADebug::isExperimentEnabled( 1970 "legacy-adaptive", !msg->contains("no-experiments")); 1971 1972 inputFormat->setInt32("adaptive-playback", true); 1973 } 1974 1975 int32_t push; 1976 if (msg->findInt32("push-blank-buffers-on-shutdown", &push) 1977 && push != 0) { 1978 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 1979 } 1980 } 1981 1982 int32_t rotationDegrees; 1983 if (msg->findInt32("rotation-degrees", &rotationDegrees)) { 1984 mRotationDegrees = rotationDegrees; 1985 } else { 1986 mRotationDegrees = 0; 1987 } 1988 } 1989 1990 AudioEncoding pcmEncoding = kAudioEncodingPcm16bit; 1991 (void)msg->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 1992 // invalid encodings will default to PCM-16bit in setupRawAudioFormat. 1993 1994 if (video) { 1995 // determine need for software renderer 1996 bool usingSwRenderer = false; 1997 if (haveNativeWindow && mComponentName.startsWith("OMX.google.")) { 1998 usingSwRenderer = true; 1999 haveNativeWindow = false; 2000 } 2001 2002 if (encoder) { 2003 err = setupVideoEncoder(mime, msg, outputFormat, inputFormat); 2004 } else { 2005 err = setupVideoDecoder(mime, msg, haveNativeWindow, usingSwRenderer, outputFormat); 2006 } 2007 2008 if (err != OK) { 2009 return err; 2010 } 2011 2012 if (haveNativeWindow) { 2013 mNativeWindow = static_cast<Surface *>(obj.get()); 2014 } 2015 2016 // initialize native window now to get actual output format 2017 // TODO: this is needed for some encoders even though they don't use native window 2018 err = initNativeWindow(); 2019 if (err != OK) { 2020 return err; 2021 } 2022 2023 // fallback for devices that do not handle flex-YUV for native buffers 2024 if (haveNativeWindow) { 2025 int32_t requestedColorFormat = OMX_COLOR_FormatUnused; 2026 if (msg->findInt32("color-format", &requestedColorFormat) && 2027 requestedColorFormat == OMX_COLOR_FormatYUV420Flexible) { 2028 status_t err = getPortFormat(kPortIndexOutput, outputFormat); 2029 if (err != OK) { 2030 return err; 2031 } 2032 int32_t colorFormat = OMX_COLOR_FormatUnused; 2033 OMX_U32 flexibleEquivalent = OMX_COLOR_FormatUnused; 2034 if (!outputFormat->findInt32("color-format", &colorFormat)) { 2035 ALOGE("ouptut port did not have a color format (wrong domain?)"); 2036 return BAD_VALUE; 2037 } 2038 ALOGD("[%s] Requested output format %#x and got %#x.", 2039 mComponentName.c_str(), requestedColorFormat, colorFormat); 2040 if (!isFlexibleColorFormat( 2041 mOMX, mNode, colorFormat, haveNativeWindow, &flexibleEquivalent) 2042 || flexibleEquivalent != (OMX_U32)requestedColorFormat) { 2043 // device did not handle flex-YUV request for native window, fall back 2044 // to SW renderer 2045 ALOGI("[%s] Falling back to software renderer", mComponentName.c_str()); 2046 mNativeWindow.clear(); 2047 mNativeWindowUsageBits = 0; 2048 haveNativeWindow = false; 2049 usingSwRenderer = true; 2050 if (storingMetadataInDecodedBuffers()) { 2051 err = mOMX->storeMetaDataInBuffers( 2052 mNode, kPortIndexOutput, OMX_FALSE, &mOutputMetadataType); 2053 mOutputMetadataType = kMetadataBufferTypeInvalid; // just in case 2054 // TODO: implement adaptive-playback support for bytebuffer mode. 2055 // This is done by SW codecs, but most HW codecs don't support it. 2056 inputFormat->setInt32("adaptive-playback", false); 2057 } 2058 if (err == OK) { 2059 err = mOMX->enableNativeBuffers( 2060 mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE); 2061 } 2062 if (mFlags & kFlagIsGrallocUsageProtected) { 2063 // fallback is not supported for protected playback 2064 err = PERMISSION_DENIED; 2065 } else if (err == OK) { 2066 err = setupVideoDecoder( 2067 mime, msg, haveNativeWindow, usingSwRenderer, outputFormat); 2068 } 2069 } 2070 } 2071 } 2072 2073 if (usingSwRenderer) { 2074 outputFormat->setInt32("using-sw-renderer", 1); 2075 } 2076 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) { 2077 int32_t numChannels, sampleRate; 2078 if (!msg->findInt32("channel-count", &numChannels) 2079 || !msg->findInt32("sample-rate", &sampleRate)) { 2080 // Since we did not always check for these, leave them optional 2081 // and have the decoder figure it all out. 2082 err = OK; 2083 } else { 2084 err = setupRawAudioFormat( 2085 encoder ? kPortIndexInput : kPortIndexOutput, 2086 sampleRate, 2087 numChannels); 2088 } 2089 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) { 2090 int32_t numChannels, sampleRate; 2091 if (!msg->findInt32("channel-count", &numChannels) 2092 || !msg->findInt32("sample-rate", &sampleRate)) { 2093 err = INVALID_OPERATION; 2094 } else { 2095 int32_t isADTS, aacProfile; 2096 int32_t sbrMode; 2097 int32_t maxOutputChannelCount; 2098 int32_t pcmLimiterEnable; 2099 drcParams_t drc; 2100 if (!msg->findInt32("is-adts", &isADTS)) { 2101 isADTS = 0; 2102 } 2103 if (!msg->findInt32("aac-profile", &aacProfile)) { 2104 aacProfile = OMX_AUDIO_AACObjectNull; 2105 } 2106 if (!msg->findInt32("aac-sbr-mode", &sbrMode)) { 2107 sbrMode = -1; 2108 } 2109 2110 if (!msg->findInt32("aac-max-output-channel_count", &maxOutputChannelCount)) { 2111 maxOutputChannelCount = -1; 2112 } 2113 if (!msg->findInt32("aac-pcm-limiter-enable", &pcmLimiterEnable)) { 2114 // value is unknown 2115 pcmLimiterEnable = -1; 2116 } 2117 if (!msg->findInt32("aac-encoded-target-level", &drc.encodedTargetLevel)) { 2118 // value is unknown 2119 drc.encodedTargetLevel = -1; 2120 } 2121 if (!msg->findInt32("aac-drc-cut-level", &drc.drcCut)) { 2122 // value is unknown 2123 drc.drcCut = -1; 2124 } 2125 if (!msg->findInt32("aac-drc-boost-level", &drc.drcBoost)) { 2126 // value is unknown 2127 drc.drcBoost = -1; 2128 } 2129 if (!msg->findInt32("aac-drc-heavy-compression", &drc.heavyCompression)) { 2130 // value is unknown 2131 drc.heavyCompression = -1; 2132 } 2133 if (!msg->findInt32("aac-target-ref-level", &drc.targetRefLevel)) { 2134 // value is unknown 2135 drc.targetRefLevel = -1; 2136 } 2137 2138 err = setupAACCodec( 2139 encoder, numChannels, sampleRate, bitRate, aacProfile, 2140 isADTS != 0, sbrMode, maxOutputChannelCount, drc, 2141 pcmLimiterEnable); 2142 } 2143 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)) { 2144 err = setupAMRCodec(encoder, false /* isWAMR */, bitRate); 2145 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_WB)) { 2146 err = setupAMRCodec(encoder, true /* isWAMR */, bitRate); 2147 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_ALAW) 2148 || !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_MLAW)) { 2149 // These are PCM-like formats with a fixed sample rate but 2150 // a variable number of channels. 2151 2152 int32_t numChannels; 2153 if (!msg->findInt32("channel-count", &numChannels)) { 2154 err = INVALID_OPERATION; 2155 } else { 2156 int32_t sampleRate; 2157 if (!msg->findInt32("sample-rate", &sampleRate)) { 2158 sampleRate = 8000; 2159 } 2160 err = setupG711Codec(encoder, sampleRate, numChannels); 2161 } 2162 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)) { 2163 int32_t numChannels = 0, sampleRate = 0, compressionLevel = -1; 2164 if (encoder && 2165 (!msg->findInt32("channel-count", &numChannels) 2166 || !msg->findInt32("sample-rate", &sampleRate))) { 2167 ALOGE("missing channel count or sample rate for FLAC encoder"); 2168 err = INVALID_OPERATION; 2169 } else { 2170 if (encoder) { 2171 if (!msg->findInt32( 2172 "complexity", &compressionLevel) && 2173 !msg->findInt32( 2174 "flac-compression-level", &compressionLevel)) { 2175 compressionLevel = 5; // default FLAC compression level 2176 } else if (compressionLevel < 0) { 2177 ALOGW("compression level %d outside [0..8] range, " 2178 "using 0", 2179 compressionLevel); 2180 compressionLevel = 0; 2181 } else if (compressionLevel > 8) { 2182 ALOGW("compression level %d outside [0..8] range, " 2183 "using 8", 2184 compressionLevel); 2185 compressionLevel = 8; 2186 } 2187 } 2188 err = setupFlacCodec( 2189 encoder, numChannels, sampleRate, compressionLevel); 2190 } 2191 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) { 2192 int32_t numChannels, sampleRate; 2193 if (encoder 2194 || !msg->findInt32("channel-count", &numChannels) 2195 || !msg->findInt32("sample-rate", &sampleRate)) { 2196 err = INVALID_OPERATION; 2197 } else { 2198 err = setupRawAudioFormat(kPortIndexInput, sampleRate, numChannels, pcmEncoding); 2199 } 2200 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AC3)) { 2201 int32_t numChannels; 2202 int32_t sampleRate; 2203 if (!msg->findInt32("channel-count", &numChannels) 2204 || !msg->findInt32("sample-rate", &sampleRate)) { 2205 err = INVALID_OPERATION; 2206 } else { 2207 err = setupAC3Codec(encoder, numChannels, sampleRate); 2208 } 2209 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_EAC3)) { 2210 int32_t numChannels; 2211 int32_t sampleRate; 2212 if (!msg->findInt32("channel-count", &numChannels) 2213 || !msg->findInt32("sample-rate", &sampleRate)) { 2214 err = INVALID_OPERATION; 2215 } else { 2216 err = setupEAC3Codec(encoder, numChannels, sampleRate); 2217 } 2218 } 2219 2220 if (err != OK) { 2221 return err; 2222 } 2223 2224 if (!msg->findInt32("encoder-delay", &mEncoderDelay)) { 2225 mEncoderDelay = 0; 2226 } 2227 2228 if (!msg->findInt32("encoder-padding", &mEncoderPadding)) { 2229 mEncoderPadding = 0; 2230 } 2231 2232 if (msg->findInt32("channel-mask", &mChannelMask)) { 2233 mChannelMaskPresent = true; 2234 } else { 2235 mChannelMaskPresent = false; 2236 } 2237 2238 int32_t maxInputSize; 2239 if (msg->findInt32("max-input-size", &maxInputSize)) { 2240 err = setMinBufferSize(kPortIndexInput, (size_t)maxInputSize); 2241 } else if (!strcmp("OMX.Nvidia.aac.decoder", mComponentName.c_str())) { 2242 err = setMinBufferSize(kPortIndexInput, 8192); // XXX 2243 } 2244 2245 int32_t priority; 2246 if (msg->findInt32("priority", &priority)) { 2247 err = setPriority(priority); 2248 } 2249 2250 int32_t rateInt = -1; 2251 float rateFloat = -1; 2252 if (!msg->findFloat("operating-rate", &rateFloat)) { 2253 msg->findInt32("operating-rate", &rateInt); 2254 rateFloat = (float)rateInt; // 16MHz (FLINTMAX) is OK for upper bound. 2255 } 2256 if (rateFloat > 0) { 2257 err = setOperatingRate(rateFloat, video); 2258 } 2259 2260 mBaseOutputFormat = outputFormat; 2261 // trigger a kWhatOutputFormatChanged msg on first buffer 2262 mLastOutputFormat.clear(); 2263 2264 err = getPortFormat(kPortIndexInput, inputFormat); 2265 if (err == OK) { 2266 err = getPortFormat(kPortIndexOutput, outputFormat); 2267 if (err == OK) { 2268 mInputFormat = inputFormat; 2269 mOutputFormat = outputFormat; 2270 } 2271 } 2272 2273 // create data converters if needed 2274 if (!video && err == OK) { 2275 AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit; 2276 if (encoder) { 2277 (void)mInputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding); 2278 mConverter[kPortIndexInput] = AudioConverter::Create(pcmEncoding, codecPcmEncoding); 2279 if (mConverter[kPortIndexInput] != NULL) { 2280 mInputFormat->setInt32("pcm-encoding", pcmEncoding); 2281 } 2282 } else { 2283 (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding); 2284 mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding); 2285 if (mConverter[kPortIndexOutput] != NULL) { 2286 mOutputFormat->setInt32("pcm-encoding", pcmEncoding); 2287 } 2288 } 2289 } 2290 2291 return err; 2292} 2293 2294status_t ACodec::setPriority(int32_t priority) { 2295 if (priority < 0) { 2296 return BAD_VALUE; 2297 } 2298 OMX_PARAM_U32TYPE config; 2299 InitOMXParams(&config); 2300 config.nU32 = (OMX_U32)priority; 2301 status_t temp = mOMX->setConfig( 2302 mNode, (OMX_INDEXTYPE)OMX_IndexConfigPriority, 2303 &config, sizeof(config)); 2304 if (temp != OK) { 2305 ALOGI("codec does not support config priority (err %d)", temp); 2306 } 2307 return OK; 2308} 2309 2310status_t ACodec::setOperatingRate(float rateFloat, bool isVideo) { 2311 if (rateFloat < 0) { 2312 return BAD_VALUE; 2313 } 2314 OMX_U32 rate; 2315 if (isVideo) { 2316 if (rateFloat > 65535) { 2317 return BAD_VALUE; 2318 } 2319 rate = (OMX_U32)(rateFloat * 65536.0f + 0.5f); 2320 } else { 2321 if (rateFloat > UINT_MAX) { 2322 return BAD_VALUE; 2323 } 2324 rate = (OMX_U32)(rateFloat); 2325 } 2326 OMX_PARAM_U32TYPE config; 2327 InitOMXParams(&config); 2328 config.nU32 = rate; 2329 status_t err = mOMX->setConfig( 2330 mNode, (OMX_INDEXTYPE)OMX_IndexConfigOperatingRate, 2331 &config, sizeof(config)); 2332 if (err != OK) { 2333 ALOGI("codec does not support config operating rate (err %d)", err); 2334 } 2335 return OK; 2336} 2337 2338status_t ACodec::getIntraRefreshPeriod(uint32_t *intraRefreshPeriod) { 2339 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 2340 InitOMXParams(¶ms); 2341 params.nPortIndex = kPortIndexOutput; 2342 status_t err = mOMX->getConfig( 2343 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params)); 2344 if (err == OK) { 2345 *intraRefreshPeriod = params.nRefreshPeriod; 2346 return OK; 2347 } 2348 2349 // Fallback to query through standard OMX index. 2350 OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams; 2351 InitOMXParams(&refreshParams); 2352 refreshParams.nPortIndex = kPortIndexOutput; 2353 refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic; 2354 err = mOMX->getParameter( 2355 mNode, OMX_IndexParamVideoIntraRefresh, &refreshParams, sizeof(refreshParams)); 2356 if (err != OK || refreshParams.nCirMBs == 0) { 2357 *intraRefreshPeriod = 0; 2358 return OK; 2359 } 2360 2361 // Calculate period based on width and height 2362 uint32_t width, height; 2363 OMX_PARAM_PORTDEFINITIONTYPE def; 2364 InitOMXParams(&def); 2365 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2366 def.nPortIndex = kPortIndexOutput; 2367 err = mOMX->getParameter( 2368 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2369 if (err != OK) { 2370 *intraRefreshPeriod = 0; 2371 return err; 2372 } 2373 width = video_def->nFrameWidth; 2374 height = video_def->nFrameHeight; 2375 // Use H.264/AVC MacroBlock size 16x16 2376 *intraRefreshPeriod = divUp((divUp(width, 16u) * divUp(height, 16u)), refreshParams.nCirMBs); 2377 2378 return OK; 2379} 2380 2381status_t ACodec::setIntraRefreshPeriod(uint32_t intraRefreshPeriod, bool inConfigure) { 2382 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 2383 InitOMXParams(¶ms); 2384 params.nPortIndex = kPortIndexOutput; 2385 params.nRefreshPeriod = intraRefreshPeriod; 2386 status_t err = mOMX->setConfig( 2387 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params)); 2388 if (err == OK) { 2389 return OK; 2390 } 2391 2392 // Only in configure state, a component could invoke setParameter. 2393 if (!inConfigure) { 2394 return INVALID_OPERATION; 2395 } else { 2396 ALOGI("[%s] try falling back to Cyclic", mComponentName.c_str()); 2397 } 2398 2399 OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams; 2400 InitOMXParams(&refreshParams); 2401 refreshParams.nPortIndex = kPortIndexOutput; 2402 refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic; 2403 2404 if (intraRefreshPeriod == 0) { 2405 // 0 means disable intra refresh. 2406 refreshParams.nCirMBs = 0; 2407 } else { 2408 // Calculate macroblocks that need to be intra coded base on width and height 2409 uint32_t width, height; 2410 OMX_PARAM_PORTDEFINITIONTYPE def; 2411 InitOMXParams(&def); 2412 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2413 def.nPortIndex = kPortIndexOutput; 2414 err = mOMX->getParameter( 2415 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2416 if (err != OK) { 2417 return err; 2418 } 2419 width = video_def->nFrameWidth; 2420 height = video_def->nFrameHeight; 2421 // Use H.264/AVC MacroBlock size 16x16 2422 refreshParams.nCirMBs = divUp((divUp(width, 16u) * divUp(height, 16u)), intraRefreshPeriod); 2423 } 2424 2425 err = mOMX->setParameter(mNode, OMX_IndexParamVideoIntraRefresh, 2426 &refreshParams, sizeof(refreshParams)); 2427 if (err != OK) { 2428 return err; 2429 } 2430 2431 return OK; 2432} 2433 2434status_t ACodec::setMinBufferSize(OMX_U32 portIndex, size_t size) { 2435 OMX_PARAM_PORTDEFINITIONTYPE def; 2436 InitOMXParams(&def); 2437 def.nPortIndex = portIndex; 2438 2439 status_t err = mOMX->getParameter( 2440 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2441 2442 if (err != OK) { 2443 return err; 2444 } 2445 2446 if (def.nBufferSize >= size) { 2447 return OK; 2448 } 2449 2450 def.nBufferSize = size; 2451 2452 err = mOMX->setParameter( 2453 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2454 2455 if (err != OK) { 2456 return err; 2457 } 2458 2459 err = mOMX->getParameter( 2460 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2461 2462 if (err != OK) { 2463 return err; 2464 } 2465 2466 if (def.nBufferSize < size) { 2467 ALOGE("failed to set min buffer size to %zu (is still %u)", size, def.nBufferSize); 2468 return FAILED_TRANSACTION; 2469 } 2470 2471 return OK; 2472} 2473 2474status_t ACodec::selectAudioPortFormat( 2475 OMX_U32 portIndex, OMX_AUDIO_CODINGTYPE desiredFormat) { 2476 OMX_AUDIO_PARAM_PORTFORMATTYPE format; 2477 InitOMXParams(&format); 2478 2479 format.nPortIndex = portIndex; 2480 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 2481 format.nIndex = index; 2482 status_t err = mOMX->getParameter( 2483 mNode, OMX_IndexParamAudioPortFormat, 2484 &format, sizeof(format)); 2485 2486 if (err != OK) { 2487 return err; 2488 } 2489 2490 if (format.eEncoding == desiredFormat) { 2491 break; 2492 } 2493 2494 if (index == kMaxIndicesToCheck) { 2495 ALOGW("[%s] stopping checking formats after %u: %s(%x)", 2496 mComponentName.c_str(), index, 2497 asString(format.eEncoding), format.eEncoding); 2498 return ERROR_UNSUPPORTED; 2499 } 2500 } 2501 2502 return mOMX->setParameter( 2503 mNode, OMX_IndexParamAudioPortFormat, &format, sizeof(format)); 2504} 2505 2506status_t ACodec::setupAACCodec( 2507 bool encoder, int32_t numChannels, int32_t sampleRate, 2508 int32_t bitRate, int32_t aacProfile, bool isADTS, int32_t sbrMode, 2509 int32_t maxOutputChannelCount, const drcParams_t& drc, 2510 int32_t pcmLimiterEnable) { 2511 if (encoder && isADTS) { 2512 return -EINVAL; 2513 } 2514 2515 status_t err = setupRawAudioFormat( 2516 encoder ? kPortIndexInput : kPortIndexOutput, 2517 sampleRate, 2518 numChannels); 2519 2520 if (err != OK) { 2521 return err; 2522 } 2523 2524 if (encoder) { 2525 err = selectAudioPortFormat(kPortIndexOutput, OMX_AUDIO_CodingAAC); 2526 2527 if (err != OK) { 2528 return err; 2529 } 2530 2531 OMX_PARAM_PORTDEFINITIONTYPE def; 2532 InitOMXParams(&def); 2533 def.nPortIndex = kPortIndexOutput; 2534 2535 err = mOMX->getParameter( 2536 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2537 2538 if (err != OK) { 2539 return err; 2540 } 2541 2542 def.format.audio.bFlagErrorConcealment = OMX_TRUE; 2543 def.format.audio.eEncoding = OMX_AUDIO_CodingAAC; 2544 2545 err = mOMX->setParameter( 2546 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2547 2548 if (err != OK) { 2549 return err; 2550 } 2551 2552 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 2553 InitOMXParams(&profile); 2554 profile.nPortIndex = kPortIndexOutput; 2555 2556 err = mOMX->getParameter( 2557 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2558 2559 if (err != OK) { 2560 return err; 2561 } 2562 2563 profile.nChannels = numChannels; 2564 2565 profile.eChannelMode = 2566 (numChannels == 1) 2567 ? OMX_AUDIO_ChannelModeMono: OMX_AUDIO_ChannelModeStereo; 2568 2569 profile.nSampleRate = sampleRate; 2570 profile.nBitRate = bitRate; 2571 profile.nAudioBandWidth = 0; 2572 profile.nFrameLength = 0; 2573 profile.nAACtools = OMX_AUDIO_AACToolAll; 2574 profile.nAACERtools = OMX_AUDIO_AACERNone; 2575 profile.eAACProfile = (OMX_AUDIO_AACPROFILETYPE) aacProfile; 2576 profile.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4FF; 2577 switch (sbrMode) { 2578 case 0: 2579 // disable sbr 2580 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 2581 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 2582 break; 2583 case 1: 2584 // enable single-rate sbr 2585 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 2586 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 2587 break; 2588 case 2: 2589 // enable dual-rate sbr 2590 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 2591 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 2592 break; 2593 case -1: 2594 // enable both modes -> the codec will decide which mode should be used 2595 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 2596 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 2597 break; 2598 default: 2599 // unsupported sbr mode 2600 return BAD_VALUE; 2601 } 2602 2603 2604 err = mOMX->setParameter( 2605 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2606 2607 if (err != OK) { 2608 return err; 2609 } 2610 2611 return err; 2612 } 2613 2614 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 2615 InitOMXParams(&profile); 2616 profile.nPortIndex = kPortIndexInput; 2617 2618 err = mOMX->getParameter( 2619 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2620 2621 if (err != OK) { 2622 return err; 2623 } 2624 2625 profile.nChannels = numChannels; 2626 profile.nSampleRate = sampleRate; 2627 2628 profile.eAACStreamFormat = 2629 isADTS 2630 ? OMX_AUDIO_AACStreamFormatMP4ADTS 2631 : OMX_AUDIO_AACStreamFormatMP4FF; 2632 2633 OMX_AUDIO_PARAM_ANDROID_AACPRESENTATIONTYPE presentation; 2634 InitOMXParams(&presentation); 2635 presentation.nMaxOutputChannels = maxOutputChannelCount; 2636 presentation.nDrcCut = drc.drcCut; 2637 presentation.nDrcBoost = drc.drcBoost; 2638 presentation.nHeavyCompression = drc.heavyCompression; 2639 presentation.nTargetReferenceLevel = drc.targetRefLevel; 2640 presentation.nEncodedTargetLevel = drc.encodedTargetLevel; 2641 presentation.nPCMLimiterEnable = pcmLimiterEnable; 2642 2643 status_t res = mOMX->setParameter(mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2644 if (res == OK) { 2645 // optional parameters, will not cause configuration failure 2646 mOMX->setParameter(mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAacPresentation, 2647 &presentation, sizeof(presentation)); 2648 } else { 2649 ALOGW("did not set AudioAndroidAacPresentation due to error %d when setting AudioAac", res); 2650 } 2651 return res; 2652} 2653 2654status_t ACodec::setupAC3Codec( 2655 bool encoder, int32_t numChannels, int32_t sampleRate) { 2656 status_t err = setupRawAudioFormat( 2657 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 2658 2659 if (err != OK) { 2660 return err; 2661 } 2662 2663 if (encoder) { 2664 ALOGW("AC3 encoding is not supported."); 2665 return INVALID_OPERATION; 2666 } 2667 2668 OMX_AUDIO_PARAM_ANDROID_AC3TYPE def; 2669 InitOMXParams(&def); 2670 def.nPortIndex = kPortIndexInput; 2671 2672 err = mOMX->getParameter( 2673 mNode, 2674 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 2675 &def, 2676 sizeof(def)); 2677 2678 if (err != OK) { 2679 return err; 2680 } 2681 2682 def.nChannels = numChannels; 2683 def.nSampleRate = sampleRate; 2684 2685 return mOMX->setParameter( 2686 mNode, 2687 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 2688 &def, 2689 sizeof(def)); 2690} 2691 2692status_t ACodec::setupEAC3Codec( 2693 bool encoder, int32_t numChannels, int32_t sampleRate) { 2694 status_t err = setupRawAudioFormat( 2695 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 2696 2697 if (err != OK) { 2698 return err; 2699 } 2700 2701 if (encoder) { 2702 ALOGW("EAC3 encoding is not supported."); 2703 return INVALID_OPERATION; 2704 } 2705 2706 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE def; 2707 InitOMXParams(&def); 2708 def.nPortIndex = kPortIndexInput; 2709 2710 err = mOMX->getParameter( 2711 mNode, 2712 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 2713 &def, 2714 sizeof(def)); 2715 2716 if (err != OK) { 2717 return err; 2718 } 2719 2720 def.nChannels = numChannels; 2721 def.nSampleRate = sampleRate; 2722 2723 return mOMX->setParameter( 2724 mNode, 2725 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 2726 &def, 2727 sizeof(def)); 2728} 2729 2730static OMX_AUDIO_AMRBANDMODETYPE pickModeFromBitRate( 2731 bool isAMRWB, int32_t bps) { 2732 if (isAMRWB) { 2733 if (bps <= 6600) { 2734 return OMX_AUDIO_AMRBandModeWB0; 2735 } else if (bps <= 8850) { 2736 return OMX_AUDIO_AMRBandModeWB1; 2737 } else if (bps <= 12650) { 2738 return OMX_AUDIO_AMRBandModeWB2; 2739 } else if (bps <= 14250) { 2740 return OMX_AUDIO_AMRBandModeWB3; 2741 } else if (bps <= 15850) { 2742 return OMX_AUDIO_AMRBandModeWB4; 2743 } else if (bps <= 18250) { 2744 return OMX_AUDIO_AMRBandModeWB5; 2745 } else if (bps <= 19850) { 2746 return OMX_AUDIO_AMRBandModeWB6; 2747 } else if (bps <= 23050) { 2748 return OMX_AUDIO_AMRBandModeWB7; 2749 } 2750 2751 // 23850 bps 2752 return OMX_AUDIO_AMRBandModeWB8; 2753 } else { // AMRNB 2754 if (bps <= 4750) { 2755 return OMX_AUDIO_AMRBandModeNB0; 2756 } else if (bps <= 5150) { 2757 return OMX_AUDIO_AMRBandModeNB1; 2758 } else if (bps <= 5900) { 2759 return OMX_AUDIO_AMRBandModeNB2; 2760 } else if (bps <= 6700) { 2761 return OMX_AUDIO_AMRBandModeNB3; 2762 } else if (bps <= 7400) { 2763 return OMX_AUDIO_AMRBandModeNB4; 2764 } else if (bps <= 7950) { 2765 return OMX_AUDIO_AMRBandModeNB5; 2766 } else if (bps <= 10200) { 2767 return OMX_AUDIO_AMRBandModeNB6; 2768 } 2769 2770 // 12200 bps 2771 return OMX_AUDIO_AMRBandModeNB7; 2772 } 2773} 2774 2775status_t ACodec::setupAMRCodec(bool encoder, bool isWAMR, int32_t bitrate) { 2776 OMX_AUDIO_PARAM_AMRTYPE def; 2777 InitOMXParams(&def); 2778 def.nPortIndex = encoder ? kPortIndexOutput : kPortIndexInput; 2779 2780 status_t err = 2781 mOMX->getParameter(mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); 2782 2783 if (err != OK) { 2784 return err; 2785 } 2786 2787 def.eAMRFrameFormat = OMX_AUDIO_AMRFrameFormatFSF; 2788 def.eAMRBandMode = pickModeFromBitRate(isWAMR, bitrate); 2789 2790 err = mOMX->setParameter( 2791 mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); 2792 2793 if (err != OK) { 2794 return err; 2795 } 2796 2797 return setupRawAudioFormat( 2798 encoder ? kPortIndexInput : kPortIndexOutput, 2799 isWAMR ? 16000 : 8000 /* sampleRate */, 2800 1 /* numChannels */); 2801} 2802 2803status_t ACodec::setupG711Codec(bool encoder, int32_t sampleRate, int32_t numChannels) { 2804 if (encoder) { 2805 return INVALID_OPERATION; 2806 } 2807 2808 return setupRawAudioFormat( 2809 kPortIndexInput, sampleRate, numChannels); 2810} 2811 2812status_t ACodec::setupFlacCodec( 2813 bool encoder, int32_t numChannels, int32_t sampleRate, int32_t compressionLevel) { 2814 2815 if (encoder) { 2816 OMX_AUDIO_PARAM_FLACTYPE def; 2817 InitOMXParams(&def); 2818 def.nPortIndex = kPortIndexOutput; 2819 2820 // configure compression level 2821 status_t err = mOMX->getParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def)); 2822 if (err != OK) { 2823 ALOGE("setupFlacCodec(): Error %d getting OMX_IndexParamAudioFlac parameter", err); 2824 return err; 2825 } 2826 def.nCompressionLevel = compressionLevel; 2827 err = mOMX->setParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def)); 2828 if (err != OK) { 2829 ALOGE("setupFlacCodec(): Error %d setting OMX_IndexParamAudioFlac parameter", err); 2830 return err; 2831 } 2832 } 2833 2834 return setupRawAudioFormat( 2835 encoder ? kPortIndexInput : kPortIndexOutput, 2836 sampleRate, 2837 numChannels); 2838} 2839 2840status_t ACodec::setupRawAudioFormat( 2841 OMX_U32 portIndex, int32_t sampleRate, int32_t numChannels, AudioEncoding encoding) { 2842 OMX_PARAM_PORTDEFINITIONTYPE def; 2843 InitOMXParams(&def); 2844 def.nPortIndex = portIndex; 2845 2846 status_t err = mOMX->getParameter( 2847 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2848 2849 if (err != OK) { 2850 return err; 2851 } 2852 2853 def.format.audio.eEncoding = OMX_AUDIO_CodingPCM; 2854 2855 err = mOMX->setParameter( 2856 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2857 2858 if (err != OK) { 2859 return err; 2860 } 2861 2862 OMX_AUDIO_PARAM_PCMMODETYPE pcmParams; 2863 InitOMXParams(&pcmParams); 2864 pcmParams.nPortIndex = portIndex; 2865 2866 err = mOMX->getParameter( 2867 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2868 2869 if (err != OK) { 2870 return err; 2871 } 2872 2873 pcmParams.nChannels = numChannels; 2874 switch (encoding) { 2875 case kAudioEncodingPcm8bit: 2876 pcmParams.eNumData = OMX_NumericalDataUnsigned; 2877 pcmParams.nBitPerSample = 8; 2878 break; 2879 case kAudioEncodingPcmFloat: 2880 pcmParams.eNumData = OMX_NumericalDataFloat; 2881 pcmParams.nBitPerSample = 32; 2882 break; 2883 case kAudioEncodingPcm16bit: 2884 pcmParams.eNumData = OMX_NumericalDataSigned; 2885 pcmParams.nBitPerSample = 16; 2886 break; 2887 default: 2888 return BAD_VALUE; 2889 } 2890 pcmParams.bInterleaved = OMX_TRUE; 2891 pcmParams.nSamplingRate = sampleRate; 2892 pcmParams.ePCMMode = OMX_AUDIO_PCMModeLinear; 2893 2894 if (getOMXChannelMapping(numChannels, pcmParams.eChannelMapping) != OK) { 2895 return OMX_ErrorNone; 2896 } 2897 2898 err = mOMX->setParameter( 2899 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2900 // if we could not set up raw format to non-16-bit, try with 16-bit 2901 // NOTE: we will also verify this via readback, in case codec ignores these fields 2902 if (err != OK && encoding != kAudioEncodingPcm16bit) { 2903 pcmParams.eNumData = OMX_NumericalDataSigned; 2904 pcmParams.nBitPerSample = 16; 2905 err = mOMX->setParameter( 2906 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2907 } 2908 return err; 2909} 2910 2911status_t ACodec::configureTunneledVideoPlayback( 2912 int32_t audioHwSync, const sp<ANativeWindow> &nativeWindow) { 2913 native_handle_t* sidebandHandle; 2914 2915 status_t err = mOMX->configureVideoTunnelMode( 2916 mNode, kPortIndexOutput, OMX_TRUE, audioHwSync, &sidebandHandle); 2917 if (err != OK) { 2918 ALOGE("configureVideoTunnelMode failed! (err %d).", err); 2919 return err; 2920 } 2921 2922 err = native_window_set_sideband_stream(nativeWindow.get(), sidebandHandle); 2923 if (err != OK) { 2924 ALOGE("native_window_set_sideband_stream(%p) failed! (err %d).", 2925 sidebandHandle, err); 2926 return err; 2927 } 2928 2929 return OK; 2930} 2931 2932status_t ACodec::setVideoPortFormatType( 2933 OMX_U32 portIndex, 2934 OMX_VIDEO_CODINGTYPE compressionFormat, 2935 OMX_COLOR_FORMATTYPE colorFormat, 2936 bool usingNativeBuffers) { 2937 OMX_VIDEO_PARAM_PORTFORMATTYPE format; 2938 InitOMXParams(&format); 2939 format.nPortIndex = portIndex; 2940 format.nIndex = 0; 2941 bool found = false; 2942 2943 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 2944 format.nIndex = index; 2945 status_t err = mOMX->getParameter( 2946 mNode, OMX_IndexParamVideoPortFormat, 2947 &format, sizeof(format)); 2948 2949 if (err != OK) { 2950 return err; 2951 } 2952 2953 // substitute back flexible color format to codec supported format 2954 OMX_U32 flexibleEquivalent; 2955 if (compressionFormat == OMX_VIDEO_CodingUnused 2956 && isFlexibleColorFormat( 2957 mOMX, mNode, format.eColorFormat, usingNativeBuffers, &flexibleEquivalent) 2958 && colorFormat == flexibleEquivalent) { 2959 ALOGI("[%s] using color format %#x in place of %#x", 2960 mComponentName.c_str(), format.eColorFormat, colorFormat); 2961 colorFormat = format.eColorFormat; 2962 } 2963 2964 // The following assertion is violated by TI's video decoder. 2965 // CHECK_EQ(format.nIndex, index); 2966 2967 if (!strcmp("OMX.TI.Video.encoder", mComponentName.c_str())) { 2968 if (portIndex == kPortIndexInput 2969 && colorFormat == format.eColorFormat) { 2970 // eCompressionFormat does not seem right. 2971 found = true; 2972 break; 2973 } 2974 if (portIndex == kPortIndexOutput 2975 && compressionFormat == format.eCompressionFormat) { 2976 // eColorFormat does not seem right. 2977 found = true; 2978 break; 2979 } 2980 } 2981 2982 if (format.eCompressionFormat == compressionFormat 2983 && format.eColorFormat == colorFormat) { 2984 found = true; 2985 break; 2986 } 2987 2988 if (index == kMaxIndicesToCheck) { 2989 ALOGW("[%s] stopping checking formats after %u: %s(%x)/%s(%x)", 2990 mComponentName.c_str(), index, 2991 asString(format.eCompressionFormat), format.eCompressionFormat, 2992 asString(format.eColorFormat), format.eColorFormat); 2993 } 2994 } 2995 2996 if (!found) { 2997 return UNKNOWN_ERROR; 2998 } 2999 3000 status_t err = mOMX->setParameter( 3001 mNode, OMX_IndexParamVideoPortFormat, 3002 &format, sizeof(format)); 3003 3004 return err; 3005} 3006 3007// Set optimal output format. OMX component lists output formats in the order 3008// of preference, but this got more complicated since the introduction of flexible 3009// YUV formats. We support a legacy behavior for applications that do not use 3010// surface output, do not specify an output format, but expect a "usable" standard 3011// OMX format. SW readable and standard formats must be flex-YUV. 3012// 3013// Suggested preference order: 3014// - optimal format for texture rendering (mediaplayer behavior) 3015// - optimal SW readable & texture renderable format (flex-YUV support) 3016// - optimal SW readable non-renderable format (flex-YUV bytebuffer support) 3017// - legacy "usable" standard formats 3018// 3019// For legacy support, we prefer a standard format, but will settle for a SW readable 3020// flex-YUV format. 3021status_t ACodec::setSupportedOutputFormat(bool getLegacyFlexibleFormat) { 3022 OMX_VIDEO_PARAM_PORTFORMATTYPE format, legacyFormat; 3023 InitOMXParams(&format); 3024 format.nPortIndex = kPortIndexOutput; 3025 3026 InitOMXParams(&legacyFormat); 3027 // this field will change when we find a suitable legacy format 3028 legacyFormat.eColorFormat = OMX_COLOR_FormatUnused; 3029 3030 for (OMX_U32 index = 0; ; ++index) { 3031 format.nIndex = index; 3032 status_t err = mOMX->getParameter( 3033 mNode, OMX_IndexParamVideoPortFormat, 3034 &format, sizeof(format)); 3035 if (err != OK) { 3036 // no more formats, pick legacy format if found 3037 if (legacyFormat.eColorFormat != OMX_COLOR_FormatUnused) { 3038 memcpy(&format, &legacyFormat, sizeof(format)); 3039 break; 3040 } 3041 return err; 3042 } 3043 if (format.eCompressionFormat != OMX_VIDEO_CodingUnused) { 3044 return OMX_ErrorBadParameter; 3045 } 3046 if (!getLegacyFlexibleFormat) { 3047 break; 3048 } 3049 // standard formats that were exposed to users before 3050 if (format.eColorFormat == OMX_COLOR_FormatYUV420Planar 3051 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedPlanar 3052 || format.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar 3053 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedSemiPlanar 3054 || format.eColorFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar) { 3055 break; 3056 } 3057 // find best legacy non-standard format 3058 OMX_U32 flexibleEquivalent; 3059 if (legacyFormat.eColorFormat == OMX_COLOR_FormatUnused 3060 && isFlexibleColorFormat( 3061 mOMX, mNode, format.eColorFormat, false /* usingNativeBuffers */, 3062 &flexibleEquivalent) 3063 && flexibleEquivalent == OMX_COLOR_FormatYUV420Flexible) { 3064 memcpy(&legacyFormat, &format, sizeof(format)); 3065 } 3066 } 3067 return mOMX->setParameter( 3068 mNode, OMX_IndexParamVideoPortFormat, 3069 &format, sizeof(format)); 3070} 3071 3072static const struct VideoCodingMapEntry { 3073 const char *mMime; 3074 OMX_VIDEO_CODINGTYPE mVideoCodingType; 3075} kVideoCodingMapEntry[] = { 3076 { MEDIA_MIMETYPE_VIDEO_AVC, OMX_VIDEO_CodingAVC }, 3077 { MEDIA_MIMETYPE_VIDEO_HEVC, OMX_VIDEO_CodingHEVC }, 3078 { MEDIA_MIMETYPE_VIDEO_MPEG4, OMX_VIDEO_CodingMPEG4 }, 3079 { MEDIA_MIMETYPE_VIDEO_H263, OMX_VIDEO_CodingH263 }, 3080 { MEDIA_MIMETYPE_VIDEO_MPEG2, OMX_VIDEO_CodingMPEG2 }, 3081 { MEDIA_MIMETYPE_VIDEO_VP8, OMX_VIDEO_CodingVP8 }, 3082 { MEDIA_MIMETYPE_VIDEO_VP9, OMX_VIDEO_CodingVP9 }, 3083 { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, OMX_VIDEO_CodingDolbyVision }, 3084}; 3085 3086static status_t GetVideoCodingTypeFromMime( 3087 const char *mime, OMX_VIDEO_CODINGTYPE *codingType) { 3088 for (size_t i = 0; 3089 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 3090 ++i) { 3091 if (!strcasecmp(mime, kVideoCodingMapEntry[i].mMime)) { 3092 *codingType = kVideoCodingMapEntry[i].mVideoCodingType; 3093 return OK; 3094 } 3095 } 3096 3097 *codingType = OMX_VIDEO_CodingUnused; 3098 3099 return ERROR_UNSUPPORTED; 3100} 3101 3102static status_t GetMimeTypeForVideoCoding( 3103 OMX_VIDEO_CODINGTYPE codingType, AString *mime) { 3104 for (size_t i = 0; 3105 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 3106 ++i) { 3107 if (codingType == kVideoCodingMapEntry[i].mVideoCodingType) { 3108 *mime = kVideoCodingMapEntry[i].mMime; 3109 return OK; 3110 } 3111 } 3112 3113 mime->clear(); 3114 3115 return ERROR_UNSUPPORTED; 3116} 3117 3118status_t ACodec::setupVideoDecoder( 3119 const char *mime, const sp<AMessage> &msg, bool haveNativeWindow, 3120 bool usingSwRenderer, sp<AMessage> &outputFormat) { 3121 int32_t width, height; 3122 if (!msg->findInt32("width", &width) 3123 || !msg->findInt32("height", &height)) { 3124 return INVALID_OPERATION; 3125 } 3126 3127 OMX_VIDEO_CODINGTYPE compressionFormat; 3128 status_t err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 3129 3130 if (err != OK) { 3131 return err; 3132 } 3133 3134 err = setVideoPortFormatType( 3135 kPortIndexInput, compressionFormat, OMX_COLOR_FormatUnused); 3136 3137 if (err != OK) { 3138 return err; 3139 } 3140 3141 int32_t tmp; 3142 if (msg->findInt32("color-format", &tmp)) { 3143 OMX_COLOR_FORMATTYPE colorFormat = 3144 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 3145 err = setVideoPortFormatType( 3146 kPortIndexOutput, OMX_VIDEO_CodingUnused, colorFormat, haveNativeWindow); 3147 if (err != OK) { 3148 ALOGW("[%s] does not support color format %d", 3149 mComponentName.c_str(), colorFormat); 3150 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 3151 } 3152 } else { 3153 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 3154 } 3155 3156 if (err != OK) { 3157 return err; 3158 } 3159 3160 int32_t frameRateInt; 3161 float frameRateFloat; 3162 if (!msg->findFloat("frame-rate", &frameRateFloat)) { 3163 if (!msg->findInt32("frame-rate", &frameRateInt)) { 3164 frameRateInt = -1; 3165 } 3166 frameRateFloat = (float)frameRateInt; 3167 } 3168 3169 err = setVideoFormatOnPort( 3170 kPortIndexInput, width, height, compressionFormat, frameRateFloat); 3171 3172 if (err != OK) { 3173 return err; 3174 } 3175 3176 err = setVideoFormatOnPort( 3177 kPortIndexOutput, width, height, OMX_VIDEO_CodingUnused); 3178 3179 if (err != OK) { 3180 return err; 3181 } 3182 3183 err = setColorAspectsForVideoDecoder( 3184 width, height, haveNativeWindow | usingSwRenderer, msg, outputFormat); 3185 if (err == ERROR_UNSUPPORTED) { // support is optional 3186 err = OK; 3187 } 3188 return err; 3189} 3190 3191status_t ACodec::initDescribeColorAspectsIndex() { 3192 status_t err = mOMX->getExtensionIndex( 3193 mNode, "OMX.google.android.index.describeColorAspects", &mDescribeColorAspectsIndex); 3194 if (err != OK) { 3195 mDescribeColorAspectsIndex = (OMX_INDEXTYPE)0; 3196 } 3197 return err; 3198} 3199 3200status_t ACodec::setCodecColorAspects(DescribeColorAspectsParams ¶ms, bool verify) { 3201 status_t err = ERROR_UNSUPPORTED; 3202 if (mDescribeColorAspectsIndex) { 3203 err = mOMX->setConfig(mNode, mDescribeColorAspectsIndex, ¶ms, sizeof(params)); 3204 } 3205 ALOGV("[%s] setting color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)", 3206 mComponentName.c_str(), 3207 params.sAspects.mRange, asString(params.sAspects.mRange), 3208 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3209 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3210 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3211 err, asString(err)); 3212 3213 if (verify && err == OK) { 3214 err = getCodecColorAspects(params); 3215 } 3216 3217 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex, 3218 "[%s] getting color aspects failed even though codec advertises support", 3219 mComponentName.c_str()); 3220 return err; 3221} 3222 3223status_t ACodec::setColorAspectsForVideoDecoder( 3224 int32_t width, int32_t height, bool usingNativeWindow, 3225 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) { 3226 DescribeColorAspectsParams params; 3227 InitOMXParams(¶ms); 3228 params.nPortIndex = kPortIndexOutput; 3229 3230 getColorAspectsFromFormat(configFormat, params.sAspects); 3231 if (usingNativeWindow) { 3232 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3233 // The default aspects will be set back to the output format during the 3234 // getFormat phase of configure(). Set non-Unspecified values back into the 3235 // format, in case component does not support this enumeration. 3236 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3237 } 3238 3239 (void)initDescribeColorAspectsIndex(); 3240 3241 // communicate color aspects to codec 3242 return setCodecColorAspects(params); 3243} 3244 3245status_t ACodec::getCodecColorAspects(DescribeColorAspectsParams ¶ms) { 3246 status_t err = ERROR_UNSUPPORTED; 3247 if (mDescribeColorAspectsIndex) { 3248 err = mOMX->getConfig(mNode, mDescribeColorAspectsIndex, ¶ms, sizeof(params)); 3249 } 3250 ALOGV("[%s] got color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)", 3251 mComponentName.c_str(), 3252 params.sAspects.mRange, asString(params.sAspects.mRange), 3253 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3254 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3255 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3256 err, asString(err)); 3257 if (params.bRequestingDataSpace) { 3258 ALOGV("for dataspace %#x", params.nDataSpace); 3259 } 3260 if (err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex 3261 && !params.bRequestingDataSpace && !params.bDataSpaceChanged) { 3262 ALOGW("[%s] getting color aspects failed even though codec advertises support", 3263 mComponentName.c_str()); 3264 } 3265 return err; 3266} 3267 3268status_t ACodec::getInputColorAspectsForVideoEncoder(sp<AMessage> &format) { 3269 DescribeColorAspectsParams params; 3270 InitOMXParams(¶ms); 3271 params.nPortIndex = kPortIndexInput; 3272 status_t err = getCodecColorAspects(params); 3273 if (err == OK) { 3274 // we only set encoder input aspects if codec supports them 3275 setColorAspectsIntoFormat(params.sAspects, format, true /* force */); 3276 } 3277 return err; 3278} 3279 3280status_t ACodec::getDataSpace( 3281 DescribeColorAspectsParams ¶ms, android_dataspace *dataSpace /* nonnull */, 3282 bool tryCodec) { 3283 status_t err = OK; 3284 if (tryCodec) { 3285 // request dataspace guidance from codec. 3286 params.bRequestingDataSpace = OMX_TRUE; 3287 err = getCodecColorAspects(params); 3288 params.bRequestingDataSpace = OMX_FALSE; 3289 if (err == OK && params.nDataSpace != HAL_DATASPACE_UNKNOWN) { 3290 *dataSpace = (android_dataspace)params.nDataSpace; 3291 return err; 3292 } else if (err == ERROR_UNSUPPORTED) { 3293 // ignore not-implemented error for dataspace requests 3294 err = OK; 3295 } 3296 } 3297 3298 // this returns legacy versions if available 3299 *dataSpace = getDataSpaceForColorAspects(params.sAspects, true /* mayexpand */); 3300 ALOGV("[%s] using color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) " 3301 "and dataspace %#x", 3302 mComponentName.c_str(), 3303 params.sAspects.mRange, asString(params.sAspects.mRange), 3304 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3305 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3306 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3307 *dataSpace); 3308 return err; 3309} 3310 3311 3312status_t ACodec::getColorAspectsAndDataSpaceForVideoDecoder( 3313 int32_t width, int32_t height, const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, 3314 android_dataspace *dataSpace) { 3315 DescribeColorAspectsParams params; 3316 InitOMXParams(¶ms); 3317 params.nPortIndex = kPortIndexOutput; 3318 3319 // reset default format and get resulting format 3320 getColorAspectsFromFormat(configFormat, params.sAspects); 3321 if (dataSpace != NULL) { 3322 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3323 } 3324 status_t err = setCodecColorAspects(params, true /* readBack */); 3325 3326 // we always set specified aspects for decoders 3327 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3328 3329 if (dataSpace != NULL) { 3330 status_t res = getDataSpace(params, dataSpace, err == OK /* tryCodec */); 3331 if (err == OK) { 3332 err = res; 3333 } 3334 } 3335 3336 return err; 3337} 3338 3339// initial video encoder setup for bytebuffer mode 3340status_t ACodec::setColorAspectsForVideoEncoder( 3341 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) { 3342 // copy config to output format as this is not exposed via getFormat 3343 copyColorConfig(configFormat, outputFormat); 3344 3345 DescribeColorAspectsParams params; 3346 InitOMXParams(¶ms); 3347 params.nPortIndex = kPortIndexInput; 3348 getColorAspectsFromFormat(configFormat, params.sAspects); 3349 3350 (void)initDescribeColorAspectsIndex(); 3351 3352 int32_t usingRecorder; 3353 if (configFormat->findInt32("android._using-recorder", &usingRecorder) && usingRecorder) { 3354 android_dataspace dataSpace = HAL_DATASPACE_BT709; 3355 int32_t width, height; 3356 if (configFormat->findInt32("width", &width) 3357 && configFormat->findInt32("height", &height)) { 3358 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3359 status_t err = getDataSpace( 3360 params, &dataSpace, mDescribeColorAspectsIndex /* tryCodec */); 3361 if (err != OK) { 3362 return err; 3363 } 3364 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3365 } 3366 inputFormat->setInt32("android._dataspace", (int32_t)dataSpace); 3367 } 3368 3369 // communicate color aspects to codec, but do not allow change of the platform aspects 3370 ColorAspects origAspects = params.sAspects; 3371 for (int triesLeft = 2; --triesLeft >= 0; ) { 3372 status_t err = setCodecColorAspects(params, true /* readBack */); 3373 if (err != OK 3374 || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem( 3375 params.sAspects, origAspects, true /* usePlatformAspects */)) { 3376 return err; 3377 } 3378 ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.", 3379 mComponentName.c_str()); 3380 } 3381 return OK; 3382} 3383 3384// subsequent initial video encoder setup for surface mode 3385status_t ACodec::setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace( 3386 android_dataspace *dataSpace /* nonnull */) { 3387 DescribeColorAspectsParams params; 3388 InitOMXParams(¶ms); 3389 params.nPortIndex = kPortIndexInput; 3390 ColorAspects &aspects = params.sAspects; 3391 3392 // reset default format and store resulting format into both input and output formats 3393 getColorAspectsFromFormat(mConfigFormat, aspects); 3394 int32_t width, height; 3395 if (mInputFormat->findInt32("width", &width) && mInputFormat->findInt32("height", &height)) { 3396 setDefaultCodecColorAspectsIfNeeded(aspects, width, height); 3397 } 3398 setColorAspectsIntoFormat(aspects, mInputFormat); 3399 setColorAspectsIntoFormat(aspects, mOutputFormat); 3400 3401 // communicate color aspects to codec, but do not allow any change 3402 ColorAspects origAspects = aspects; 3403 status_t err = OK; 3404 for (int triesLeft = 2; mDescribeColorAspectsIndex && --triesLeft >= 0; ) { 3405 status_t err = setCodecColorAspects(params, true /* readBack */); 3406 if (err != OK || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem(aspects, origAspects)) { 3407 break; 3408 } 3409 ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.", 3410 mComponentName.c_str()); 3411 } 3412 3413 *dataSpace = HAL_DATASPACE_BT709; 3414 aspects = origAspects; // restore desired color aspects 3415 status_t res = getDataSpace( 3416 params, dataSpace, err == OK && mDescribeColorAspectsIndex /* tryCodec */); 3417 if (err == OK) { 3418 err = res; 3419 } 3420 mInputFormat->setInt32("android._dataspace", (int32_t)*dataSpace); 3421 mInputFormat->setBuffer( 3422 "android._color-aspects", ABuffer::CreateAsCopy(&aspects, sizeof(aspects))); 3423 3424 // update input format with codec supported color aspects (basically set unsupported 3425 // aspects to Unspecified) 3426 if (err == OK) { 3427 (void)getInputColorAspectsForVideoEncoder(mInputFormat); 3428 } 3429 3430 ALOGV("set default color aspects, updated input format to %s, output format to %s", 3431 mInputFormat->debugString(4).c_str(), mOutputFormat->debugString(4).c_str()); 3432 3433 return err; 3434} 3435 3436status_t ACodec::setupVideoEncoder( 3437 const char *mime, const sp<AMessage> &msg, 3438 sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) { 3439 int32_t tmp; 3440 if (!msg->findInt32("color-format", &tmp)) { 3441 return INVALID_OPERATION; 3442 } 3443 3444 OMX_COLOR_FORMATTYPE colorFormat = 3445 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 3446 3447 status_t err = setVideoPortFormatType( 3448 kPortIndexInput, OMX_VIDEO_CodingUnused, colorFormat); 3449 3450 if (err != OK) { 3451 ALOGE("[%s] does not support color format %d", 3452 mComponentName.c_str(), colorFormat); 3453 3454 return err; 3455 } 3456 3457 /* Input port configuration */ 3458 3459 OMX_PARAM_PORTDEFINITIONTYPE def; 3460 InitOMXParams(&def); 3461 3462 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 3463 3464 def.nPortIndex = kPortIndexInput; 3465 3466 err = mOMX->getParameter( 3467 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3468 3469 if (err != OK) { 3470 return err; 3471 } 3472 3473 int32_t width, height, bitrate; 3474 if (!msg->findInt32("width", &width) 3475 || !msg->findInt32("height", &height) 3476 || !msg->findInt32("bitrate", &bitrate)) { 3477 return INVALID_OPERATION; 3478 } 3479 3480 video_def->nFrameWidth = width; 3481 video_def->nFrameHeight = height; 3482 3483 int32_t stride; 3484 if (!msg->findInt32("stride", &stride)) { 3485 stride = width; 3486 } 3487 3488 video_def->nStride = stride; 3489 3490 int32_t sliceHeight; 3491 if (!msg->findInt32("slice-height", &sliceHeight)) { 3492 sliceHeight = height; 3493 } 3494 3495 video_def->nSliceHeight = sliceHeight; 3496 3497 def.nBufferSize = (video_def->nStride * video_def->nSliceHeight * 3) / 2; 3498 3499 float frameRate; 3500 if (!msg->findFloat("frame-rate", &frameRate)) { 3501 int32_t tmp; 3502 if (!msg->findInt32("frame-rate", &tmp)) { 3503 return INVALID_OPERATION; 3504 } 3505 frameRate = (float)tmp; 3506 mTimePerFrameUs = (int64_t) (1000000.0f / frameRate); 3507 } 3508 3509 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 3510 video_def->eCompressionFormat = OMX_VIDEO_CodingUnused; 3511 // this is redundant as it was already set up in setVideoPortFormatType 3512 // FIXME for now skip this only for flexible YUV formats 3513 if (colorFormat != OMX_COLOR_FormatYUV420Flexible) { 3514 video_def->eColorFormat = colorFormat; 3515 } 3516 3517 err = mOMX->setParameter( 3518 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3519 3520 if (err != OK) { 3521 ALOGE("[%s] failed to set input port definition parameters.", 3522 mComponentName.c_str()); 3523 3524 return err; 3525 } 3526 3527 /* Output port configuration */ 3528 3529 OMX_VIDEO_CODINGTYPE compressionFormat; 3530 err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 3531 3532 if (err != OK) { 3533 return err; 3534 } 3535 3536 err = setVideoPortFormatType( 3537 kPortIndexOutput, compressionFormat, OMX_COLOR_FormatUnused); 3538 3539 if (err != OK) { 3540 ALOGE("[%s] does not support compression format %d", 3541 mComponentName.c_str(), compressionFormat); 3542 3543 return err; 3544 } 3545 3546 def.nPortIndex = kPortIndexOutput; 3547 3548 err = mOMX->getParameter( 3549 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3550 3551 if (err != OK) { 3552 return err; 3553 } 3554 3555 video_def->nFrameWidth = width; 3556 video_def->nFrameHeight = height; 3557 video_def->xFramerate = 0; 3558 video_def->nBitrate = bitrate; 3559 video_def->eCompressionFormat = compressionFormat; 3560 video_def->eColorFormat = OMX_COLOR_FormatUnused; 3561 3562 err = mOMX->setParameter( 3563 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3564 3565 if (err != OK) { 3566 ALOGE("[%s] failed to set output port definition parameters.", 3567 mComponentName.c_str()); 3568 3569 return err; 3570 } 3571 3572 int32_t intraRefreshPeriod = 0; 3573 if (msg->findInt32("intra-refresh-period", &intraRefreshPeriod) 3574 && intraRefreshPeriod >= 0) { 3575 err = setIntraRefreshPeriod((uint32_t)intraRefreshPeriod, true); 3576 if (err != OK) { 3577 ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional", 3578 mComponentName.c_str()); 3579 err = OK; 3580 } 3581 } 3582 3583 switch (compressionFormat) { 3584 case OMX_VIDEO_CodingMPEG4: 3585 err = setupMPEG4EncoderParameters(msg); 3586 break; 3587 3588 case OMX_VIDEO_CodingH263: 3589 err = setupH263EncoderParameters(msg); 3590 break; 3591 3592 case OMX_VIDEO_CodingAVC: 3593 err = setupAVCEncoderParameters(msg); 3594 break; 3595 3596 case OMX_VIDEO_CodingHEVC: 3597 err = setupHEVCEncoderParameters(msg); 3598 break; 3599 3600 case OMX_VIDEO_CodingVP8: 3601 case OMX_VIDEO_CodingVP9: 3602 err = setupVPXEncoderParameters(msg); 3603 break; 3604 3605 default: 3606 break; 3607 } 3608 3609 // Set up color aspects on input, but propagate them to the output format, as they will 3610 // not be read back from encoder. 3611 err = setColorAspectsForVideoEncoder(msg, outputFormat, inputFormat); 3612 if (err == ERROR_UNSUPPORTED) { 3613 ALOGI("[%s] cannot encode color aspects. Ignoring.", mComponentName.c_str()); 3614 err = OK; 3615 } 3616 3617 if (err == OK) { 3618 ALOGI("setupVideoEncoder succeeded"); 3619 } 3620 3621 return err; 3622} 3623 3624status_t ACodec::setCyclicIntraMacroblockRefresh(const sp<AMessage> &msg, int32_t mode) { 3625 OMX_VIDEO_PARAM_INTRAREFRESHTYPE params; 3626 InitOMXParams(¶ms); 3627 params.nPortIndex = kPortIndexOutput; 3628 3629 params.eRefreshMode = static_cast<OMX_VIDEO_INTRAREFRESHTYPE>(mode); 3630 3631 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshCyclic || 3632 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 3633 int32_t mbs; 3634 if (!msg->findInt32("intra-refresh-CIR-mbs", &mbs)) { 3635 return INVALID_OPERATION; 3636 } 3637 params.nCirMBs = mbs; 3638 } 3639 3640 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshAdaptive || 3641 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 3642 int32_t mbs; 3643 if (!msg->findInt32("intra-refresh-AIR-mbs", &mbs)) { 3644 return INVALID_OPERATION; 3645 } 3646 params.nAirMBs = mbs; 3647 3648 int32_t ref; 3649 if (!msg->findInt32("intra-refresh-AIR-ref", &ref)) { 3650 return INVALID_OPERATION; 3651 } 3652 params.nAirRef = ref; 3653 } 3654 3655 status_t err = mOMX->setParameter( 3656 mNode, OMX_IndexParamVideoIntraRefresh, 3657 ¶ms, sizeof(params)); 3658 return err; 3659} 3660 3661static OMX_U32 setPFramesSpacing(int32_t iFramesInterval, int32_t frameRate) { 3662 if (iFramesInterval < 0) { 3663 return 0xFFFFFFFF; 3664 } else if (iFramesInterval == 0) { 3665 return 0; 3666 } 3667 OMX_U32 ret = frameRate * iFramesInterval; 3668 return ret; 3669} 3670 3671static OMX_VIDEO_CONTROLRATETYPE getBitrateMode(const sp<AMessage> &msg) { 3672 int32_t tmp; 3673 if (!msg->findInt32("bitrate-mode", &tmp)) { 3674 return OMX_Video_ControlRateVariable; 3675 } 3676 3677 return static_cast<OMX_VIDEO_CONTROLRATETYPE>(tmp); 3678} 3679 3680status_t ACodec::setupMPEG4EncoderParameters(const sp<AMessage> &msg) { 3681 int32_t bitrate, iFrameInterval; 3682 if (!msg->findInt32("bitrate", &bitrate) 3683 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 3684 return INVALID_OPERATION; 3685 } 3686 3687 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3688 3689 float frameRate; 3690 if (!msg->findFloat("frame-rate", &frameRate)) { 3691 int32_t tmp; 3692 if (!msg->findInt32("frame-rate", &tmp)) { 3693 return INVALID_OPERATION; 3694 } 3695 frameRate = (float)tmp; 3696 } 3697 3698 OMX_VIDEO_PARAM_MPEG4TYPE mpeg4type; 3699 InitOMXParams(&mpeg4type); 3700 mpeg4type.nPortIndex = kPortIndexOutput; 3701 3702 status_t err = mOMX->getParameter( 3703 mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 3704 3705 if (err != OK) { 3706 return err; 3707 } 3708 3709 mpeg4type.nSliceHeaderSpacing = 0; 3710 mpeg4type.bSVH = OMX_FALSE; 3711 mpeg4type.bGov = OMX_FALSE; 3712 3713 mpeg4type.nAllowedPictureTypes = 3714 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 3715 3716 mpeg4type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 3717 if (mpeg4type.nPFrames == 0) { 3718 mpeg4type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 3719 } 3720 mpeg4type.nBFrames = 0; 3721 mpeg4type.nIDCVLCThreshold = 0; 3722 mpeg4type.bACPred = OMX_TRUE; 3723 mpeg4type.nMaxPacketSize = 256; 3724 mpeg4type.nTimeIncRes = 1000; 3725 mpeg4type.nHeaderExtension = 0; 3726 mpeg4type.bReversibleVLC = OMX_FALSE; 3727 3728 int32_t profile; 3729 if (msg->findInt32("profile", &profile)) { 3730 int32_t level; 3731 if (!msg->findInt32("level", &level)) { 3732 return INVALID_OPERATION; 3733 } 3734 3735 err = verifySupportForProfileAndLevel(profile, level); 3736 3737 if (err != OK) { 3738 return err; 3739 } 3740 3741 mpeg4type.eProfile = static_cast<OMX_VIDEO_MPEG4PROFILETYPE>(profile); 3742 mpeg4type.eLevel = static_cast<OMX_VIDEO_MPEG4LEVELTYPE>(level); 3743 } 3744 3745 err = mOMX->setParameter( 3746 mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 3747 3748 if (err != OK) { 3749 return err; 3750 } 3751 3752 err = configureBitrate(bitrate, bitrateMode); 3753 3754 if (err != OK) { 3755 return err; 3756 } 3757 3758 return setupErrorCorrectionParameters(); 3759} 3760 3761status_t ACodec::setupH263EncoderParameters(const sp<AMessage> &msg) { 3762 int32_t bitrate, iFrameInterval; 3763 if (!msg->findInt32("bitrate", &bitrate) 3764 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 3765 return INVALID_OPERATION; 3766 } 3767 3768 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3769 3770 float frameRate; 3771 if (!msg->findFloat("frame-rate", &frameRate)) { 3772 int32_t tmp; 3773 if (!msg->findInt32("frame-rate", &tmp)) { 3774 return INVALID_OPERATION; 3775 } 3776 frameRate = (float)tmp; 3777 } 3778 3779 OMX_VIDEO_PARAM_H263TYPE h263type; 3780 InitOMXParams(&h263type); 3781 h263type.nPortIndex = kPortIndexOutput; 3782 3783 status_t err = mOMX->getParameter( 3784 mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 3785 3786 if (err != OK) { 3787 return err; 3788 } 3789 3790 h263type.nAllowedPictureTypes = 3791 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 3792 3793 h263type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 3794 if (h263type.nPFrames == 0) { 3795 h263type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 3796 } 3797 h263type.nBFrames = 0; 3798 3799 int32_t profile; 3800 if (msg->findInt32("profile", &profile)) { 3801 int32_t level; 3802 if (!msg->findInt32("level", &level)) { 3803 return INVALID_OPERATION; 3804 } 3805 3806 err = verifySupportForProfileAndLevel(profile, level); 3807 3808 if (err != OK) { 3809 return err; 3810 } 3811 3812 h263type.eProfile = static_cast<OMX_VIDEO_H263PROFILETYPE>(profile); 3813 h263type.eLevel = static_cast<OMX_VIDEO_H263LEVELTYPE>(level); 3814 } 3815 3816 h263type.bPLUSPTYPEAllowed = OMX_FALSE; 3817 h263type.bForceRoundingTypeToZero = OMX_FALSE; 3818 h263type.nPictureHeaderRepetition = 0; 3819 h263type.nGOBHeaderInterval = 0; 3820 3821 err = mOMX->setParameter( 3822 mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 3823 3824 if (err != OK) { 3825 return err; 3826 } 3827 3828 err = configureBitrate(bitrate, bitrateMode); 3829 3830 if (err != OK) { 3831 return err; 3832 } 3833 3834 return setupErrorCorrectionParameters(); 3835} 3836 3837// static 3838int /* OMX_VIDEO_AVCLEVELTYPE */ ACodec::getAVCLevelFor( 3839 int width, int height, int rate, int bitrate, 3840 OMX_VIDEO_AVCPROFILETYPE profile) { 3841 // convert bitrate to main/baseline profile kbps equivalent 3842 switch (profile) { 3843 case OMX_VIDEO_AVCProfileHigh10: 3844 bitrate = divUp(bitrate, 3000); break; 3845 case OMX_VIDEO_AVCProfileHigh: 3846 bitrate = divUp(bitrate, 1250); break; 3847 default: 3848 bitrate = divUp(bitrate, 1000); break; 3849 } 3850 3851 // convert size and rate to MBs 3852 width = divUp(width, 16); 3853 height = divUp(height, 16); 3854 int mbs = width * height; 3855 rate *= mbs; 3856 int maxDimension = max(width, height); 3857 3858 static const int limits[][5] = { 3859 /* MBps MB dim bitrate level */ 3860 { 1485, 99, 28, 64, OMX_VIDEO_AVCLevel1 }, 3861 { 1485, 99, 28, 128, OMX_VIDEO_AVCLevel1b }, 3862 { 3000, 396, 56, 192, OMX_VIDEO_AVCLevel11 }, 3863 { 6000, 396, 56, 384, OMX_VIDEO_AVCLevel12 }, 3864 { 11880, 396, 56, 768, OMX_VIDEO_AVCLevel13 }, 3865 { 11880, 396, 56, 2000, OMX_VIDEO_AVCLevel2 }, 3866 { 19800, 792, 79, 4000, OMX_VIDEO_AVCLevel21 }, 3867 { 20250, 1620, 113, 4000, OMX_VIDEO_AVCLevel22 }, 3868 { 40500, 1620, 113, 10000, OMX_VIDEO_AVCLevel3 }, 3869 { 108000, 3600, 169, 14000, OMX_VIDEO_AVCLevel31 }, 3870 { 216000, 5120, 202, 20000, OMX_VIDEO_AVCLevel32 }, 3871 { 245760, 8192, 256, 20000, OMX_VIDEO_AVCLevel4 }, 3872 { 245760, 8192, 256, 50000, OMX_VIDEO_AVCLevel41 }, 3873 { 522240, 8704, 263, 50000, OMX_VIDEO_AVCLevel42 }, 3874 { 589824, 22080, 420, 135000, OMX_VIDEO_AVCLevel5 }, 3875 { 983040, 36864, 543, 240000, OMX_VIDEO_AVCLevel51 }, 3876 { 2073600, 36864, 543, 240000, OMX_VIDEO_AVCLevel52 }, 3877 }; 3878 3879 for (size_t i = 0; i < ARRAY_SIZE(limits); i++) { 3880 const int (&limit)[5] = limits[i]; 3881 if (rate <= limit[0] && mbs <= limit[1] && maxDimension <= limit[2] 3882 && bitrate <= limit[3]) { 3883 return limit[4]; 3884 } 3885 } 3886 return 0; 3887} 3888 3889status_t ACodec::setupAVCEncoderParameters(const sp<AMessage> &msg) { 3890 int32_t bitrate, iFrameInterval; 3891 if (!msg->findInt32("bitrate", &bitrate) 3892 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 3893 return INVALID_OPERATION; 3894 } 3895 3896 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3897 3898 float frameRate; 3899 if (!msg->findFloat("frame-rate", &frameRate)) { 3900 int32_t tmp; 3901 if (!msg->findInt32("frame-rate", &tmp)) { 3902 return INVALID_OPERATION; 3903 } 3904 frameRate = (float)tmp; 3905 } 3906 3907 status_t err = OK; 3908 int32_t intraRefreshMode = 0; 3909 if (msg->findInt32("intra-refresh-mode", &intraRefreshMode)) { 3910 err = setCyclicIntraMacroblockRefresh(msg, intraRefreshMode); 3911 if (err != OK) { 3912 ALOGE("Setting intra macroblock refresh mode (%d) failed: 0x%x", 3913 err, intraRefreshMode); 3914 return err; 3915 } 3916 } 3917 3918 OMX_VIDEO_PARAM_AVCTYPE h264type; 3919 InitOMXParams(&h264type); 3920 h264type.nPortIndex = kPortIndexOutput; 3921 3922 err = mOMX->getParameter( 3923 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 3924 3925 if (err != OK) { 3926 return err; 3927 } 3928 3929 h264type.nAllowedPictureTypes = 3930 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 3931 3932 int32_t profile; 3933 if (msg->findInt32("profile", &profile)) { 3934 int32_t level; 3935 if (!msg->findInt32("level", &level)) { 3936 return INVALID_OPERATION; 3937 } 3938 3939 err = verifySupportForProfileAndLevel(profile, level); 3940 3941 if (err != OK) { 3942 return err; 3943 } 3944 3945 h264type.eProfile = static_cast<OMX_VIDEO_AVCPROFILETYPE>(profile); 3946 h264type.eLevel = static_cast<OMX_VIDEO_AVCLEVELTYPE>(level); 3947 } 3948 3949 if (h264type.eProfile == OMX_VIDEO_AVCProfileBaseline) { 3950 h264type.nSliceHeaderSpacing = 0; 3951 h264type.bUseHadamard = OMX_TRUE; 3952 h264type.nRefFrames = 1; 3953 h264type.nBFrames = 0; 3954 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 3955 if (h264type.nPFrames == 0) { 3956 h264type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 3957 } 3958 h264type.nRefIdx10ActiveMinus1 = 0; 3959 h264type.nRefIdx11ActiveMinus1 = 0; 3960 h264type.bEntropyCodingCABAC = OMX_FALSE; 3961 h264type.bWeightedPPrediction = OMX_FALSE; 3962 h264type.bconstIpred = OMX_FALSE; 3963 h264type.bDirect8x8Inference = OMX_FALSE; 3964 h264type.bDirectSpatialTemporal = OMX_FALSE; 3965 h264type.nCabacInitIdc = 0; 3966 } else if (h264type.eProfile == OMX_VIDEO_AVCProfileMain || 3967 h264type.eProfile == OMX_VIDEO_AVCProfileHigh) { 3968 h264type.nSliceHeaderSpacing = 0; 3969 h264type.bUseHadamard = OMX_TRUE; 3970 h264type.nRefFrames = 2; 3971 h264type.nBFrames = 1; 3972 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 3973 h264type.nAllowedPictureTypes = 3974 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP | OMX_VIDEO_PictureTypeB; 3975 h264type.nRefIdx10ActiveMinus1 = 0; 3976 h264type.nRefIdx11ActiveMinus1 = 0; 3977 h264type.bEntropyCodingCABAC = OMX_TRUE; 3978 h264type.bWeightedPPrediction = OMX_TRUE; 3979 h264type.bconstIpred = OMX_TRUE; 3980 h264type.bDirect8x8Inference = OMX_TRUE; 3981 h264type.bDirectSpatialTemporal = OMX_TRUE; 3982 h264type.nCabacInitIdc = 1; 3983 } 3984 3985 if (h264type.nBFrames != 0) { 3986 h264type.nAllowedPictureTypes |= OMX_VIDEO_PictureTypeB; 3987 } 3988 3989 h264type.bEnableUEP = OMX_FALSE; 3990 h264type.bEnableFMO = OMX_FALSE; 3991 h264type.bEnableASO = OMX_FALSE; 3992 h264type.bEnableRS = OMX_FALSE; 3993 h264type.bFrameMBsOnly = OMX_TRUE; 3994 h264type.bMBAFF = OMX_FALSE; 3995 h264type.eLoopFilterMode = OMX_VIDEO_AVCLoopFilterEnable; 3996 3997 err = mOMX->setParameter( 3998 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 3999 4000 if (err != OK) { 4001 return err; 4002 } 4003 4004 return configureBitrate(bitrate, bitrateMode); 4005} 4006 4007status_t ACodec::setupHEVCEncoderParameters(const sp<AMessage> &msg) { 4008 int32_t bitrate, iFrameInterval; 4009 if (!msg->findInt32("bitrate", &bitrate) 4010 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 4011 return INVALID_OPERATION; 4012 } 4013 4014 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4015 4016 float frameRate; 4017 if (!msg->findFloat("frame-rate", &frameRate)) { 4018 int32_t tmp; 4019 if (!msg->findInt32("frame-rate", &tmp)) { 4020 return INVALID_OPERATION; 4021 } 4022 frameRate = (float)tmp; 4023 } 4024 4025 OMX_VIDEO_PARAM_HEVCTYPE hevcType; 4026 InitOMXParams(&hevcType); 4027 hevcType.nPortIndex = kPortIndexOutput; 4028 4029 status_t err = OK; 4030 err = mOMX->getParameter( 4031 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 4032 if (err != OK) { 4033 return err; 4034 } 4035 4036 int32_t profile; 4037 if (msg->findInt32("profile", &profile)) { 4038 int32_t level; 4039 if (!msg->findInt32("level", &level)) { 4040 return INVALID_OPERATION; 4041 } 4042 4043 err = verifySupportForProfileAndLevel(profile, level); 4044 if (err != OK) { 4045 return err; 4046 } 4047 4048 hevcType.eProfile = static_cast<OMX_VIDEO_HEVCPROFILETYPE>(profile); 4049 hevcType.eLevel = static_cast<OMX_VIDEO_HEVCLEVELTYPE>(level); 4050 } 4051 // TODO: finer control? 4052 hevcType.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate); 4053 4054 err = mOMX->setParameter( 4055 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 4056 if (err != OK) { 4057 return err; 4058 } 4059 4060 return configureBitrate(bitrate, bitrateMode); 4061} 4062 4063status_t ACodec::setupVPXEncoderParameters(const sp<AMessage> &msg) { 4064 int32_t bitrate; 4065 int32_t iFrameInterval = 0; 4066 size_t tsLayers = 0; 4067 OMX_VIDEO_ANDROID_VPXTEMPORALLAYERPATTERNTYPE pattern = 4068 OMX_VIDEO_VPXTemporalLayerPatternNone; 4069 static const uint32_t kVp8LayerRateAlloction 4070 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] 4071 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] = { 4072 {100, 100, 100}, // 1 layer 4073 { 60, 100, 100}, // 2 layers {60%, 40%} 4074 { 40, 60, 100}, // 3 layers {40%, 20%, 40%} 4075 }; 4076 if (!msg->findInt32("bitrate", &bitrate)) { 4077 return INVALID_OPERATION; 4078 } 4079 msg->findInt32("i-frame-interval", &iFrameInterval); 4080 4081 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4082 4083 float frameRate; 4084 if (!msg->findFloat("frame-rate", &frameRate)) { 4085 int32_t tmp; 4086 if (!msg->findInt32("frame-rate", &tmp)) { 4087 return INVALID_OPERATION; 4088 } 4089 frameRate = (float)tmp; 4090 } 4091 4092 AString tsSchema; 4093 if (msg->findString("ts-schema", &tsSchema)) { 4094 if (tsSchema == "webrtc.vp8.1-layer") { 4095 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4096 tsLayers = 1; 4097 } else if (tsSchema == "webrtc.vp8.2-layer") { 4098 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4099 tsLayers = 2; 4100 } else if (tsSchema == "webrtc.vp8.3-layer") { 4101 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4102 tsLayers = 3; 4103 } else { 4104 ALOGW("Unsupported ts-schema [%s]", tsSchema.c_str()); 4105 } 4106 } 4107 4108 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 4109 InitOMXParams(&vp8type); 4110 vp8type.nPortIndex = kPortIndexOutput; 4111 status_t err = mOMX->getParameter( 4112 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4113 &vp8type, sizeof(vp8type)); 4114 4115 if (err == OK) { 4116 if (iFrameInterval > 0) { 4117 vp8type.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate); 4118 } 4119 vp8type.eTemporalPattern = pattern; 4120 vp8type.nTemporalLayerCount = tsLayers; 4121 if (tsLayers > 0) { 4122 for (size_t i = 0; i < OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS; i++) { 4123 vp8type.nTemporalLayerBitrateRatio[i] = 4124 kVp8LayerRateAlloction[tsLayers - 1][i]; 4125 } 4126 } 4127 if (bitrateMode == OMX_Video_ControlRateConstant) { 4128 vp8type.nMinQuantizer = 2; 4129 vp8type.nMaxQuantizer = 63; 4130 } 4131 4132 err = mOMX->setParameter( 4133 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4134 &vp8type, sizeof(vp8type)); 4135 if (err != OK) { 4136 ALOGW("Extended VP8 parameters set failed: %d", err); 4137 } 4138 } 4139 4140 return configureBitrate(bitrate, bitrateMode); 4141} 4142 4143status_t ACodec::verifySupportForProfileAndLevel( 4144 int32_t profile, int32_t level) { 4145 OMX_VIDEO_PARAM_PROFILELEVELTYPE params; 4146 InitOMXParams(¶ms); 4147 params.nPortIndex = kPortIndexOutput; 4148 4149 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 4150 params.nProfileIndex = index; 4151 status_t err = mOMX->getParameter( 4152 mNode, 4153 OMX_IndexParamVideoProfileLevelQuerySupported, 4154 ¶ms, 4155 sizeof(params)); 4156 4157 if (err != OK) { 4158 return err; 4159 } 4160 4161 int32_t supportedProfile = static_cast<int32_t>(params.eProfile); 4162 int32_t supportedLevel = static_cast<int32_t>(params.eLevel); 4163 4164 if (profile == supportedProfile && level <= supportedLevel) { 4165 return OK; 4166 } 4167 4168 if (index == kMaxIndicesToCheck) { 4169 ALOGW("[%s] stopping checking profiles after %u: %x/%x", 4170 mComponentName.c_str(), index, 4171 params.eProfile, params.eLevel); 4172 } 4173 } 4174 return ERROR_UNSUPPORTED; 4175} 4176 4177status_t ACodec::configureBitrate( 4178 int32_t bitrate, OMX_VIDEO_CONTROLRATETYPE bitrateMode) { 4179 OMX_VIDEO_PARAM_BITRATETYPE bitrateType; 4180 InitOMXParams(&bitrateType); 4181 bitrateType.nPortIndex = kPortIndexOutput; 4182 4183 status_t err = mOMX->getParameter( 4184 mNode, OMX_IndexParamVideoBitrate, 4185 &bitrateType, sizeof(bitrateType)); 4186 4187 if (err != OK) { 4188 return err; 4189 } 4190 4191 bitrateType.eControlRate = bitrateMode; 4192 bitrateType.nTargetBitrate = bitrate; 4193 4194 return mOMX->setParameter( 4195 mNode, OMX_IndexParamVideoBitrate, 4196 &bitrateType, sizeof(bitrateType)); 4197} 4198 4199status_t ACodec::setupErrorCorrectionParameters() { 4200 OMX_VIDEO_PARAM_ERRORCORRECTIONTYPE errorCorrectionType; 4201 InitOMXParams(&errorCorrectionType); 4202 errorCorrectionType.nPortIndex = kPortIndexOutput; 4203 4204 status_t err = mOMX->getParameter( 4205 mNode, OMX_IndexParamVideoErrorCorrection, 4206 &errorCorrectionType, sizeof(errorCorrectionType)); 4207 4208 if (err != OK) { 4209 return OK; // Optional feature. Ignore this failure 4210 } 4211 4212 errorCorrectionType.bEnableHEC = OMX_FALSE; 4213 errorCorrectionType.bEnableResync = OMX_TRUE; 4214 errorCorrectionType.nResynchMarkerSpacing = 256; 4215 errorCorrectionType.bEnableDataPartitioning = OMX_FALSE; 4216 errorCorrectionType.bEnableRVLC = OMX_FALSE; 4217 4218 return mOMX->setParameter( 4219 mNode, OMX_IndexParamVideoErrorCorrection, 4220 &errorCorrectionType, sizeof(errorCorrectionType)); 4221} 4222 4223status_t ACodec::setVideoFormatOnPort( 4224 OMX_U32 portIndex, 4225 int32_t width, int32_t height, OMX_VIDEO_CODINGTYPE compressionFormat, 4226 float frameRate) { 4227 OMX_PARAM_PORTDEFINITIONTYPE def; 4228 InitOMXParams(&def); 4229 def.nPortIndex = portIndex; 4230 4231 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 4232 4233 status_t err = mOMX->getParameter( 4234 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4235 if (err != OK) { 4236 return err; 4237 } 4238 4239 if (portIndex == kPortIndexInput) { 4240 // XXX Need a (much) better heuristic to compute input buffer sizes. 4241 const size_t X = 64 * 1024; 4242 if (def.nBufferSize < X) { 4243 def.nBufferSize = X; 4244 } 4245 } 4246 4247 if (def.eDomain != OMX_PortDomainVideo) { 4248 ALOGE("expected video port, got %s(%d)", asString(def.eDomain), def.eDomain); 4249 return FAILED_TRANSACTION; 4250 } 4251 4252 video_def->nFrameWidth = width; 4253 video_def->nFrameHeight = height; 4254 4255 if (portIndex == kPortIndexInput) { 4256 video_def->eCompressionFormat = compressionFormat; 4257 video_def->eColorFormat = OMX_COLOR_FormatUnused; 4258 if (frameRate >= 0) { 4259 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 4260 } 4261 } 4262 4263 err = mOMX->setParameter( 4264 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4265 4266 return err; 4267} 4268 4269status_t ACodec::initNativeWindow() { 4270 if (mNativeWindow != NULL) { 4271 return mOMX->enableNativeBuffers(mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_TRUE); 4272 } 4273 4274 mOMX->enableNativeBuffers(mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE); 4275 return OK; 4276} 4277 4278size_t ACodec::countBuffersOwnedByComponent(OMX_U32 portIndex) const { 4279 size_t n = 0; 4280 4281 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 4282 const BufferInfo &info = mBuffers[portIndex].itemAt(i); 4283 4284 if (info.mStatus == BufferInfo::OWNED_BY_COMPONENT) { 4285 ++n; 4286 } 4287 } 4288 4289 return n; 4290} 4291 4292size_t ACodec::countBuffersOwnedByNativeWindow() const { 4293 size_t n = 0; 4294 4295 for (size_t i = 0; i < mBuffers[kPortIndexOutput].size(); ++i) { 4296 const BufferInfo &info = mBuffers[kPortIndexOutput].itemAt(i); 4297 4298 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4299 ++n; 4300 } 4301 } 4302 4303 return n; 4304} 4305 4306void ACodec::waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs() { 4307 if (mNativeWindow == NULL) { 4308 return; 4309 } 4310 4311 while (countBuffersOwnedByNativeWindow() > mNumUndequeuedBuffers 4312 && dequeueBufferFromNativeWindow() != NULL) { 4313 // these buffers will be submitted as regular buffers; account for this 4314 if (storingMetadataInDecodedBuffers() && mMetadataBuffersToSubmit > 0) { 4315 --mMetadataBuffersToSubmit; 4316 } 4317 } 4318} 4319 4320bool ACodec::allYourBuffersAreBelongToUs( 4321 OMX_U32 portIndex) { 4322 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 4323 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 4324 4325 if (info->mStatus != BufferInfo::OWNED_BY_US 4326 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4327 ALOGV("[%s] Buffer %u on port %u still has status %d", 4328 mComponentName.c_str(), 4329 info->mBufferID, portIndex, info->mStatus); 4330 return false; 4331 } 4332 } 4333 4334 return true; 4335} 4336 4337bool ACodec::allYourBuffersAreBelongToUs() { 4338 return allYourBuffersAreBelongToUs(kPortIndexInput) 4339 && allYourBuffersAreBelongToUs(kPortIndexOutput); 4340} 4341 4342void ACodec::deferMessage(const sp<AMessage> &msg) { 4343 mDeferredQueue.push_back(msg); 4344} 4345 4346void ACodec::processDeferredMessages() { 4347 List<sp<AMessage> > queue = mDeferredQueue; 4348 mDeferredQueue.clear(); 4349 4350 List<sp<AMessage> >::iterator it = queue.begin(); 4351 while (it != queue.end()) { 4352 onMessageReceived(*it++); 4353 } 4354} 4355 4356// static 4357bool ACodec::describeDefaultColorFormat(DescribeColorFormat2Params ¶ms) { 4358 MediaImage2 &image = params.sMediaImage; 4359 memset(&image, 0, sizeof(image)); 4360 4361 image.mType = MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN; 4362 image.mNumPlanes = 0; 4363 4364 const OMX_COLOR_FORMATTYPE fmt = params.eColorFormat; 4365 image.mWidth = params.nFrameWidth; 4366 image.mHeight = params.nFrameHeight; 4367 4368 // only supporting YUV420 4369 if (fmt != OMX_COLOR_FormatYUV420Planar && 4370 fmt != OMX_COLOR_FormatYUV420PackedPlanar && 4371 fmt != OMX_COLOR_FormatYUV420SemiPlanar && 4372 fmt != OMX_COLOR_FormatYUV420PackedSemiPlanar && 4373 fmt != (OMX_COLOR_FORMATTYPE)HAL_PIXEL_FORMAT_YV12) { 4374 ALOGW("do not know color format 0x%x = %d", fmt, fmt); 4375 return false; 4376 } 4377 4378 // TEMPORARY FIX for some vendors that advertise sliceHeight as 0 4379 if (params.nStride != 0 && params.nSliceHeight == 0) { 4380 ALOGW("using sliceHeight=%u instead of what codec advertised (=0)", 4381 params.nFrameHeight); 4382 params.nSliceHeight = params.nFrameHeight; 4383 } 4384 4385 // we need stride and slice-height to be non-zero and sensible. These values were chosen to 4386 // prevent integer overflows further down the line, and do not indicate support for 4387 // 32kx32k video. 4388 if (params.nStride == 0 || params.nSliceHeight == 0 4389 || params.nStride > 32768 || params.nSliceHeight > 32768) { 4390 ALOGW("cannot describe color format 0x%x = %d with stride=%u and sliceHeight=%u", 4391 fmt, fmt, params.nStride, params.nSliceHeight); 4392 return false; 4393 } 4394 4395 // set-up YUV format 4396 image.mType = MediaImage2::MEDIA_IMAGE_TYPE_YUV; 4397 image.mNumPlanes = 3; 4398 image.mBitDepth = 8; 4399 image.mBitDepthAllocated = 8; 4400 image.mPlane[image.Y].mOffset = 0; 4401 image.mPlane[image.Y].mColInc = 1; 4402 image.mPlane[image.Y].mRowInc = params.nStride; 4403 image.mPlane[image.Y].mHorizSubsampling = 1; 4404 image.mPlane[image.Y].mVertSubsampling = 1; 4405 4406 switch ((int)fmt) { 4407 case HAL_PIXEL_FORMAT_YV12: 4408 if (params.bUsingNativeBuffers) { 4409 size_t ystride = align(params.nStride, 16); 4410 size_t cstride = align(params.nStride / 2, 16); 4411 image.mPlane[image.Y].mRowInc = ystride; 4412 4413 image.mPlane[image.V].mOffset = ystride * params.nSliceHeight; 4414 image.mPlane[image.V].mColInc = 1; 4415 image.mPlane[image.V].mRowInc = cstride; 4416 image.mPlane[image.V].mHorizSubsampling = 2; 4417 image.mPlane[image.V].mVertSubsampling = 2; 4418 4419 image.mPlane[image.U].mOffset = image.mPlane[image.V].mOffset 4420 + (cstride * params.nSliceHeight / 2); 4421 image.mPlane[image.U].mColInc = 1; 4422 image.mPlane[image.U].mRowInc = cstride; 4423 image.mPlane[image.U].mHorizSubsampling = 2; 4424 image.mPlane[image.U].mVertSubsampling = 2; 4425 break; 4426 } else { 4427 // fall through as YV12 is used for YUV420Planar by some codecs 4428 } 4429 4430 case OMX_COLOR_FormatYUV420Planar: 4431 case OMX_COLOR_FormatYUV420PackedPlanar: 4432 image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight; 4433 image.mPlane[image.U].mColInc = 1; 4434 image.mPlane[image.U].mRowInc = params.nStride / 2; 4435 image.mPlane[image.U].mHorizSubsampling = 2; 4436 image.mPlane[image.U].mVertSubsampling = 2; 4437 4438 image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset 4439 + (params.nStride * params.nSliceHeight / 4); 4440 image.mPlane[image.V].mColInc = 1; 4441 image.mPlane[image.V].mRowInc = params.nStride / 2; 4442 image.mPlane[image.V].mHorizSubsampling = 2; 4443 image.mPlane[image.V].mVertSubsampling = 2; 4444 break; 4445 4446 case OMX_COLOR_FormatYUV420SemiPlanar: 4447 // FIXME: NV21 for sw-encoder, NV12 for decoder and hw-encoder 4448 case OMX_COLOR_FormatYUV420PackedSemiPlanar: 4449 // NV12 4450 image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight; 4451 image.mPlane[image.U].mColInc = 2; 4452 image.mPlane[image.U].mRowInc = params.nStride; 4453 image.mPlane[image.U].mHorizSubsampling = 2; 4454 image.mPlane[image.U].mVertSubsampling = 2; 4455 4456 image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset + 1; 4457 image.mPlane[image.V].mColInc = 2; 4458 image.mPlane[image.V].mRowInc = params.nStride; 4459 image.mPlane[image.V].mHorizSubsampling = 2; 4460 image.mPlane[image.V].mVertSubsampling = 2; 4461 break; 4462 4463 default: 4464 TRESPASS(); 4465 } 4466 return true; 4467} 4468 4469// static 4470bool ACodec::describeColorFormat( 4471 const sp<IOMX> &omx, IOMX::node_id node, 4472 DescribeColorFormat2Params &describeParams) 4473{ 4474 OMX_INDEXTYPE describeColorFormatIndex; 4475 if (omx->getExtensionIndex( 4476 node, "OMX.google.android.index.describeColorFormat", 4477 &describeColorFormatIndex) == OK) { 4478 DescribeColorFormatParams describeParamsV1(describeParams); 4479 if (omx->getParameter( 4480 node, describeColorFormatIndex, 4481 &describeParamsV1, sizeof(describeParamsV1)) == OK) { 4482 describeParams.initFromV1(describeParamsV1); 4483 return describeParams.sMediaImage.mType != MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN; 4484 } 4485 } else if (omx->getExtensionIndex( 4486 node, "OMX.google.android.index.describeColorFormat2", &describeColorFormatIndex) == OK 4487 && omx->getParameter( 4488 node, describeColorFormatIndex, &describeParams, sizeof(describeParams)) == OK) { 4489 return describeParams.sMediaImage.mType != MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN; 4490 } 4491 4492 return describeDefaultColorFormat(describeParams); 4493} 4494 4495// static 4496bool ACodec::isFlexibleColorFormat( 4497 const sp<IOMX> &omx, IOMX::node_id node, 4498 uint32_t colorFormat, bool usingNativeBuffers, OMX_U32 *flexibleEquivalent) { 4499 DescribeColorFormat2Params describeParams; 4500 InitOMXParams(&describeParams); 4501 describeParams.eColorFormat = (OMX_COLOR_FORMATTYPE)colorFormat; 4502 // reasonable dummy values 4503 describeParams.nFrameWidth = 128; 4504 describeParams.nFrameHeight = 128; 4505 describeParams.nStride = 128; 4506 describeParams.nSliceHeight = 128; 4507 describeParams.bUsingNativeBuffers = (OMX_BOOL)usingNativeBuffers; 4508 4509 CHECK(flexibleEquivalent != NULL); 4510 4511 if (!describeColorFormat(omx, node, describeParams)) { 4512 return false; 4513 } 4514 4515 const MediaImage2 &img = describeParams.sMediaImage; 4516 if (img.mType == MediaImage2::MEDIA_IMAGE_TYPE_YUV) { 4517 if (img.mNumPlanes != 3 4518 || img.mPlane[img.Y].mHorizSubsampling != 1 4519 || img.mPlane[img.Y].mVertSubsampling != 1) { 4520 return false; 4521 } 4522 4523 // YUV 420 4524 if (img.mPlane[img.U].mHorizSubsampling == 2 4525 && img.mPlane[img.U].mVertSubsampling == 2 4526 && img.mPlane[img.V].mHorizSubsampling == 2 4527 && img.mPlane[img.V].mVertSubsampling == 2) { 4528 // possible flexible YUV420 format 4529 if (img.mBitDepth <= 8) { 4530 *flexibleEquivalent = OMX_COLOR_FormatYUV420Flexible; 4531 return true; 4532 } 4533 } 4534 } 4535 return false; 4536} 4537 4538status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify) { 4539 const char *niceIndex = portIndex == kPortIndexInput ? "input" : "output"; 4540 OMX_PARAM_PORTDEFINITIONTYPE def; 4541 InitOMXParams(&def); 4542 def.nPortIndex = portIndex; 4543 4544 status_t err = mOMX->getParameter(mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4545 if (err != OK) { 4546 return err; 4547 } 4548 4549 if (def.eDir != (portIndex == kPortIndexOutput ? OMX_DirOutput : OMX_DirInput)) { 4550 ALOGE("unexpected dir: %s(%d) on %s port", asString(def.eDir), def.eDir, niceIndex); 4551 return BAD_VALUE; 4552 } 4553 4554 switch (def.eDomain) { 4555 case OMX_PortDomainVideo: 4556 { 4557 OMX_VIDEO_PORTDEFINITIONTYPE *videoDef = &def.format.video; 4558 switch ((int)videoDef->eCompressionFormat) { 4559 case OMX_VIDEO_CodingUnused: 4560 { 4561 CHECK(mIsEncoder ^ (portIndex == kPortIndexOutput)); 4562 notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW); 4563 4564 notify->setInt32("stride", videoDef->nStride); 4565 notify->setInt32("slice-height", videoDef->nSliceHeight); 4566 notify->setInt32("color-format", videoDef->eColorFormat); 4567 4568 if (mNativeWindow == NULL) { 4569 DescribeColorFormat2Params describeParams; 4570 InitOMXParams(&describeParams); 4571 describeParams.eColorFormat = videoDef->eColorFormat; 4572 describeParams.nFrameWidth = videoDef->nFrameWidth; 4573 describeParams.nFrameHeight = videoDef->nFrameHeight; 4574 describeParams.nStride = videoDef->nStride; 4575 describeParams.nSliceHeight = videoDef->nSliceHeight; 4576 describeParams.bUsingNativeBuffers = OMX_FALSE; 4577 4578 if (describeColorFormat(mOMX, mNode, describeParams)) { 4579 notify->setBuffer( 4580 "image-data", 4581 ABuffer::CreateAsCopy( 4582 &describeParams.sMediaImage, 4583 sizeof(describeParams.sMediaImage))); 4584 4585 MediaImage2 &img = describeParams.sMediaImage; 4586 MediaImage2::PlaneInfo *plane = img.mPlane; 4587 ALOGV("[%s] MediaImage { F(%ux%u) @%u+%d+%d @%u+%d+%d @%u+%d+%d }", 4588 mComponentName.c_str(), img.mWidth, img.mHeight, 4589 plane[0].mOffset, plane[0].mColInc, plane[0].mRowInc, 4590 plane[1].mOffset, plane[1].mColInc, plane[1].mRowInc, 4591 plane[2].mOffset, plane[2].mColInc, plane[2].mRowInc); 4592 } 4593 } 4594 4595 int32_t width = (int32_t)videoDef->nFrameWidth; 4596 int32_t height = (int32_t)videoDef->nFrameHeight; 4597 4598 if (portIndex == kPortIndexOutput) { 4599 OMX_CONFIG_RECTTYPE rect; 4600 InitOMXParams(&rect); 4601 rect.nPortIndex = portIndex; 4602 4603 if (mOMX->getConfig( 4604 mNode, 4605 (portIndex == kPortIndexOutput ? 4606 OMX_IndexConfigCommonOutputCrop : 4607 OMX_IndexConfigCommonInputCrop), 4608 &rect, sizeof(rect)) != OK) { 4609 rect.nLeft = 0; 4610 rect.nTop = 0; 4611 rect.nWidth = videoDef->nFrameWidth; 4612 rect.nHeight = videoDef->nFrameHeight; 4613 } 4614 4615 if (rect.nLeft < 0 || 4616 rect.nTop < 0 || 4617 rect.nLeft + rect.nWidth > videoDef->nFrameWidth || 4618 rect.nTop + rect.nHeight > videoDef->nFrameHeight) { 4619 ALOGE("Wrong cropped rect (%d, %d) - (%u, %u) vs. frame (%u, %u)", 4620 rect.nLeft, rect.nTop, 4621 rect.nLeft + rect.nWidth, rect.nTop + rect.nHeight, 4622 videoDef->nFrameWidth, videoDef->nFrameHeight); 4623 return BAD_VALUE; 4624 } 4625 4626 notify->setRect( 4627 "crop", 4628 rect.nLeft, 4629 rect.nTop, 4630 rect.nLeft + rect.nWidth - 1, 4631 rect.nTop + rect.nHeight - 1); 4632 4633 width = rect.nWidth; 4634 height = rect.nHeight; 4635 4636 android_dataspace dataSpace = HAL_DATASPACE_UNKNOWN; 4637 (void)getColorAspectsAndDataSpaceForVideoDecoder( 4638 width, height, mConfigFormat, notify, 4639 mUsingNativeWindow ? &dataSpace : NULL); 4640 if (mUsingNativeWindow) { 4641 notify->setInt32("android._dataspace", dataSpace); 4642 } 4643 } else { 4644 (void)getInputColorAspectsForVideoEncoder(notify); 4645 } 4646 4647 break; 4648 } 4649 4650 case OMX_VIDEO_CodingVP8: 4651 case OMX_VIDEO_CodingVP9: 4652 { 4653 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 4654 InitOMXParams(&vp8type); 4655 vp8type.nPortIndex = kPortIndexOutput; 4656 status_t err = mOMX->getParameter( 4657 mNode, 4658 (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4659 &vp8type, 4660 sizeof(vp8type)); 4661 4662 if (err == OK) { 4663 AString tsSchema = "none"; 4664 if (vp8type.eTemporalPattern 4665 == OMX_VIDEO_VPXTemporalLayerPatternWebRTC) { 4666 switch (vp8type.nTemporalLayerCount) { 4667 case 1: 4668 { 4669 tsSchema = "webrtc.vp8.1-layer"; 4670 break; 4671 } 4672 case 2: 4673 { 4674 tsSchema = "webrtc.vp8.2-layer"; 4675 break; 4676 } 4677 case 3: 4678 { 4679 tsSchema = "webrtc.vp8.3-layer"; 4680 break; 4681 } 4682 default: 4683 { 4684 break; 4685 } 4686 } 4687 } 4688 notify->setString("ts-schema", tsSchema); 4689 } 4690 // Fall through to set up mime. 4691 } 4692 4693 default: 4694 { 4695 if (mIsEncoder ^ (portIndex == kPortIndexOutput)) { 4696 // should be CodingUnused 4697 ALOGE("Raw port video compression format is %s(%d)", 4698 asString(videoDef->eCompressionFormat), 4699 videoDef->eCompressionFormat); 4700 return BAD_VALUE; 4701 } 4702 AString mime; 4703 if (GetMimeTypeForVideoCoding( 4704 videoDef->eCompressionFormat, &mime) != OK) { 4705 notify->setString("mime", "application/octet-stream"); 4706 } else { 4707 notify->setString("mime", mime.c_str()); 4708 } 4709 uint32_t intraRefreshPeriod = 0; 4710 if (mIsEncoder && getIntraRefreshPeriod(&intraRefreshPeriod) == OK 4711 && intraRefreshPeriod > 0) { 4712 notify->setInt32("intra-refresh-period", intraRefreshPeriod); 4713 } 4714 break; 4715 } 4716 } 4717 notify->setInt32("width", videoDef->nFrameWidth); 4718 notify->setInt32("height", videoDef->nFrameHeight); 4719 ALOGV("[%s] %s format is %s", mComponentName.c_str(), 4720 portIndex == kPortIndexInput ? "input" : "output", 4721 notify->debugString().c_str()); 4722 4723 break; 4724 } 4725 4726 case OMX_PortDomainAudio: 4727 { 4728 OMX_AUDIO_PORTDEFINITIONTYPE *audioDef = &def.format.audio; 4729 4730 switch ((int)audioDef->eEncoding) { 4731 case OMX_AUDIO_CodingPCM: 4732 { 4733 OMX_AUDIO_PARAM_PCMMODETYPE params; 4734 InitOMXParams(¶ms); 4735 params.nPortIndex = portIndex; 4736 4737 err = mOMX->getParameter( 4738 mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 4739 if (err != OK) { 4740 return err; 4741 } 4742 4743 if (params.nChannels <= 0 4744 || (params.nChannels != 1 && !params.bInterleaved) 4745 || params.ePCMMode != OMX_AUDIO_PCMModeLinear) { 4746 ALOGE("unsupported PCM port: %u channels%s, %u-bit", 4747 params.nChannels, 4748 params.bInterleaved ? " interleaved" : "", 4749 params.nBitPerSample); 4750 return FAILED_TRANSACTION; 4751 } 4752 4753 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW); 4754 notify->setInt32("channel-count", params.nChannels); 4755 notify->setInt32("sample-rate", params.nSamplingRate); 4756 4757 AudioEncoding encoding = kAudioEncodingPcm16bit; 4758 if (params.eNumData == OMX_NumericalDataUnsigned 4759 && params.nBitPerSample == 8u) { 4760 encoding = kAudioEncodingPcm8bit; 4761 } else if (params.eNumData == OMX_NumericalDataFloat 4762 && params.nBitPerSample == 32u) { 4763 encoding = kAudioEncodingPcmFloat; 4764 } else if (params.nBitPerSample != 16u 4765 || params.eNumData != OMX_NumericalDataSigned) { 4766 ALOGE("unsupported PCM port: %s(%d), %s(%d) mode ", 4767 asString(params.eNumData), params.eNumData, 4768 asString(params.ePCMMode), params.ePCMMode); 4769 return FAILED_TRANSACTION; 4770 } 4771 notify->setInt32("pcm-encoding", encoding); 4772 4773 if (mChannelMaskPresent) { 4774 notify->setInt32("channel-mask", mChannelMask); 4775 } 4776 break; 4777 } 4778 4779 case OMX_AUDIO_CodingAAC: 4780 { 4781 OMX_AUDIO_PARAM_AACPROFILETYPE params; 4782 InitOMXParams(¶ms); 4783 params.nPortIndex = portIndex; 4784 4785 err = mOMX->getParameter( 4786 mNode, OMX_IndexParamAudioAac, ¶ms, sizeof(params)); 4787 if (err != OK) { 4788 return err; 4789 } 4790 4791 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC); 4792 notify->setInt32("channel-count", params.nChannels); 4793 notify->setInt32("sample-rate", params.nSampleRate); 4794 break; 4795 } 4796 4797 case OMX_AUDIO_CodingAMR: 4798 { 4799 OMX_AUDIO_PARAM_AMRTYPE params; 4800 InitOMXParams(¶ms); 4801 params.nPortIndex = portIndex; 4802 4803 err = mOMX->getParameter( 4804 mNode, OMX_IndexParamAudioAmr, ¶ms, sizeof(params)); 4805 if (err != OK) { 4806 return err; 4807 } 4808 4809 notify->setInt32("channel-count", 1); 4810 if (params.eAMRBandMode >= OMX_AUDIO_AMRBandModeWB0) { 4811 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_WB); 4812 notify->setInt32("sample-rate", 16000); 4813 } else { 4814 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_NB); 4815 notify->setInt32("sample-rate", 8000); 4816 } 4817 break; 4818 } 4819 4820 case OMX_AUDIO_CodingFLAC: 4821 { 4822 OMX_AUDIO_PARAM_FLACTYPE params; 4823 InitOMXParams(¶ms); 4824 params.nPortIndex = portIndex; 4825 4826 err = mOMX->getParameter( 4827 mNode, OMX_IndexParamAudioFlac, ¶ms, sizeof(params)); 4828 if (err != OK) { 4829 return err; 4830 } 4831 4832 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_FLAC); 4833 notify->setInt32("channel-count", params.nChannels); 4834 notify->setInt32("sample-rate", params.nSampleRate); 4835 break; 4836 } 4837 4838 case OMX_AUDIO_CodingMP3: 4839 { 4840 OMX_AUDIO_PARAM_MP3TYPE params; 4841 InitOMXParams(¶ms); 4842 params.nPortIndex = portIndex; 4843 4844 err = mOMX->getParameter( 4845 mNode, OMX_IndexParamAudioMp3, ¶ms, sizeof(params)); 4846 if (err != OK) { 4847 return err; 4848 } 4849 4850 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MPEG); 4851 notify->setInt32("channel-count", params.nChannels); 4852 notify->setInt32("sample-rate", params.nSampleRate); 4853 break; 4854 } 4855 4856 case OMX_AUDIO_CodingVORBIS: 4857 { 4858 OMX_AUDIO_PARAM_VORBISTYPE params; 4859 InitOMXParams(¶ms); 4860 params.nPortIndex = portIndex; 4861 4862 err = mOMX->getParameter( 4863 mNode, OMX_IndexParamAudioVorbis, ¶ms, sizeof(params)); 4864 if (err != OK) { 4865 return err; 4866 } 4867 4868 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_VORBIS); 4869 notify->setInt32("channel-count", params.nChannels); 4870 notify->setInt32("sample-rate", params.nSampleRate); 4871 break; 4872 } 4873 4874 case OMX_AUDIO_CodingAndroidAC3: 4875 { 4876 OMX_AUDIO_PARAM_ANDROID_AC3TYPE params; 4877 InitOMXParams(¶ms); 4878 params.nPortIndex = portIndex; 4879 4880 err = mOMX->getParameter( 4881 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 4882 ¶ms, sizeof(params)); 4883 if (err != OK) { 4884 return err; 4885 } 4886 4887 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AC3); 4888 notify->setInt32("channel-count", params.nChannels); 4889 notify->setInt32("sample-rate", params.nSampleRate); 4890 break; 4891 } 4892 4893 case OMX_AUDIO_CodingAndroidEAC3: 4894 { 4895 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE params; 4896 InitOMXParams(¶ms); 4897 params.nPortIndex = portIndex; 4898 4899 err = mOMX->getParameter( 4900 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 4901 ¶ms, sizeof(params)); 4902 if (err != OK) { 4903 return err; 4904 } 4905 4906 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_EAC3); 4907 notify->setInt32("channel-count", params.nChannels); 4908 notify->setInt32("sample-rate", params.nSampleRate); 4909 break; 4910 } 4911 4912 case OMX_AUDIO_CodingAndroidOPUS: 4913 { 4914 OMX_AUDIO_PARAM_ANDROID_OPUSTYPE params; 4915 InitOMXParams(¶ms); 4916 params.nPortIndex = portIndex; 4917 4918 err = mOMX->getParameter( 4919 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidOpus, 4920 ¶ms, sizeof(params)); 4921 if (err != OK) { 4922 return err; 4923 } 4924 4925 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_OPUS); 4926 notify->setInt32("channel-count", params.nChannels); 4927 notify->setInt32("sample-rate", params.nSampleRate); 4928 break; 4929 } 4930 4931 case OMX_AUDIO_CodingG711: 4932 { 4933 OMX_AUDIO_PARAM_PCMMODETYPE params; 4934 InitOMXParams(¶ms); 4935 params.nPortIndex = portIndex; 4936 4937 err = mOMX->getParameter( 4938 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 4939 if (err != OK) { 4940 return err; 4941 } 4942 4943 const char *mime = NULL; 4944 if (params.ePCMMode == OMX_AUDIO_PCMModeMULaw) { 4945 mime = MEDIA_MIMETYPE_AUDIO_G711_MLAW; 4946 } else if (params.ePCMMode == OMX_AUDIO_PCMModeALaw) { 4947 mime = MEDIA_MIMETYPE_AUDIO_G711_ALAW; 4948 } else { // params.ePCMMode == OMX_AUDIO_PCMModeLinear 4949 mime = MEDIA_MIMETYPE_AUDIO_RAW; 4950 } 4951 notify->setString("mime", mime); 4952 notify->setInt32("channel-count", params.nChannels); 4953 notify->setInt32("sample-rate", params.nSamplingRate); 4954 notify->setInt32("pcm-encoding", kAudioEncodingPcm16bit); 4955 break; 4956 } 4957 4958 case OMX_AUDIO_CodingGSMFR: 4959 { 4960 OMX_AUDIO_PARAM_PCMMODETYPE params; 4961 InitOMXParams(¶ms); 4962 params.nPortIndex = portIndex; 4963 4964 err = mOMX->getParameter( 4965 mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 4966 if (err != OK) { 4967 return err; 4968 } 4969 4970 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MSGSM); 4971 notify->setInt32("channel-count", params.nChannels); 4972 notify->setInt32("sample-rate", params.nSamplingRate); 4973 break; 4974 } 4975 4976 default: 4977 ALOGE("Unsupported audio coding: %s(%d)\n", 4978 asString(audioDef->eEncoding), audioDef->eEncoding); 4979 return BAD_TYPE; 4980 } 4981 break; 4982 } 4983 4984 default: 4985 ALOGE("Unsupported domain: %s(%d)", asString(def.eDomain), def.eDomain); 4986 return BAD_TYPE; 4987 } 4988 4989 return OK; 4990} 4991 4992void ACodec::onDataSpaceChanged(android_dataspace dataSpace, const ColorAspects &aspects) { 4993 // aspects are normally communicated in ColorAspects 4994 int32_t range, standard, transfer; 4995 convertCodecColorAspectsToPlatformAspects(aspects, &range, &standard, &transfer); 4996 4997 // if some aspects are unspecified, use dataspace fields 4998 if (range != 0) { 4999 range = (dataSpace & HAL_DATASPACE_RANGE_MASK) >> HAL_DATASPACE_RANGE_SHIFT; 5000 } 5001 if (standard != 0) { 5002 standard = (dataSpace & HAL_DATASPACE_STANDARD_MASK) >> HAL_DATASPACE_STANDARD_SHIFT; 5003 } 5004 if (transfer != 0) { 5005 transfer = (dataSpace & HAL_DATASPACE_TRANSFER_MASK) >> HAL_DATASPACE_TRANSFER_SHIFT; 5006 } 5007 5008 mOutputFormat = mOutputFormat->dup(); // trigger an output format changed event 5009 if (range != 0) { 5010 mOutputFormat->setInt32("color-range", range); 5011 } 5012 if (standard != 0) { 5013 mOutputFormat->setInt32("color-standard", standard); 5014 } 5015 if (transfer != 0) { 5016 mOutputFormat->setInt32("color-transfer", transfer); 5017 } 5018 5019 ALOGD("dataspace changed to %#x (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) " 5020 "(R:%d(%s), S:%d(%s), T:%d(%s))", 5021 dataSpace, 5022 aspects.mRange, asString(aspects.mRange), 5023 aspects.mPrimaries, asString(aspects.mPrimaries), 5024 aspects.mMatrixCoeffs, asString(aspects.mMatrixCoeffs), 5025 aspects.mTransfer, asString(aspects.mTransfer), 5026 range, asString((ColorRange)range), 5027 standard, asString((ColorStandard)standard), 5028 transfer, asString((ColorTransfer)transfer)); 5029} 5030 5031void ACodec::onOutputFormatChanged() { 5032 // store new output format 5033 mOutputFormat = mBaseOutputFormat->dup(); 5034 5035 if (getPortFormat(kPortIndexOutput, mOutputFormat) != OK) { 5036 ALOGE("[%s] Failed to get port format to send format change", mComponentName.c_str()); 5037 return; 5038 } 5039 5040 if (!mIsVideo && !mIsEncoder) { 5041 AudioEncoding pcmEncoding = kAudioEncodingPcm16bit; 5042 (void)mConfigFormat->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 5043 AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit; 5044 (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 5045 5046 mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding); 5047 if (mConverter[kPortIndexOutput] != NULL) { 5048 mOutputFormat->setInt32("pcm-encoding", pcmEncoding); 5049 } 5050 } 5051 5052 if (mTunneled) { 5053 sendFormatChange(); 5054 } 5055} 5056 5057void ACodec::addKeyFormatChangesToRenderBufferNotification(sp<AMessage> ¬ify) { 5058 AString mime; 5059 CHECK(mOutputFormat->findString("mime", &mime)); 5060 5061 if (mime == MEDIA_MIMETYPE_VIDEO_RAW && mNativeWindow != NULL) { 5062 // notify renderer of the crop change and dataspace change 5063 // NOTE: native window uses extended right-bottom coordinate 5064 int32_t left, top, right, bottom; 5065 if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) { 5066 notify->setRect("crop", left, top, right + 1, bottom + 1); 5067 } 5068 5069 int32_t dataSpace; 5070 if (mOutputFormat->findInt32("android._dataspace", &dataSpace)) { 5071 notify->setInt32("dataspace", dataSpace); 5072 } 5073 } 5074} 5075 5076void ACodec::sendFormatChange() { 5077 AString mime; 5078 CHECK(mOutputFormat->findString("mime", &mime)); 5079 5080 if (mime == MEDIA_MIMETYPE_AUDIO_RAW && (mEncoderDelay || mEncoderPadding)) { 5081 int32_t channelCount; 5082 CHECK(mOutputFormat->findInt32("channel-count", &channelCount)); 5083 if (mSkipCutBuffer != NULL) { 5084 size_t prevbufsize = mSkipCutBuffer->size(); 5085 if (prevbufsize != 0) { 5086 ALOGW("Replacing SkipCutBuffer holding %zu bytes", prevbufsize); 5087 } 5088 } 5089 mSkipCutBuffer = new SkipCutBuffer(mEncoderDelay, mEncoderPadding, channelCount); 5090 } 5091 5092 sp<AMessage> notify = mNotify->dup(); 5093 notify->setInt32("what", kWhatOutputFormatChanged); 5094 notify->setMessage("format", mOutputFormat); 5095 notify->post(); 5096 5097 // mLastOutputFormat is not used when tunneled; doing this just to stay consistent 5098 mLastOutputFormat = mOutputFormat; 5099} 5100 5101void ACodec::signalError(OMX_ERRORTYPE error, status_t internalError) { 5102 sp<AMessage> notify = mNotify->dup(); 5103 notify->setInt32("what", CodecBase::kWhatError); 5104 ALOGE("signalError(omxError %#x, internalError %d)", error, internalError); 5105 5106 if (internalError == UNKNOWN_ERROR) { // find better error code 5107 const status_t omxStatus = statusFromOMXError(error); 5108 if (omxStatus != 0) { 5109 internalError = omxStatus; 5110 } else { 5111 ALOGW("Invalid OMX error %#x", error); 5112 } 5113 } 5114 5115 mFatalError = true; 5116 5117 notify->setInt32("err", internalError); 5118 notify->setInt32("actionCode", ACTION_CODE_FATAL); // could translate from OMX error. 5119 notify->post(); 5120} 5121 5122//////////////////////////////////////////////////////////////////////////////// 5123 5124ACodec::PortDescription::PortDescription() { 5125} 5126 5127status_t ACodec::requestIDRFrame() { 5128 if (!mIsEncoder) { 5129 return ERROR_UNSUPPORTED; 5130 } 5131 5132 OMX_CONFIG_INTRAREFRESHVOPTYPE params; 5133 InitOMXParams(¶ms); 5134 5135 params.nPortIndex = kPortIndexOutput; 5136 params.IntraRefreshVOP = OMX_TRUE; 5137 5138 return mOMX->setConfig( 5139 mNode, 5140 OMX_IndexConfigVideoIntraVOPRefresh, 5141 ¶ms, 5142 sizeof(params)); 5143} 5144 5145void ACodec::PortDescription::addBuffer( 5146 IOMX::buffer_id id, const sp<ABuffer> &buffer, 5147 const sp<NativeHandle> &handle, const sp<RefBase> &memRef) { 5148 mBufferIDs.push_back(id); 5149 mBuffers.push_back(buffer); 5150 mHandles.push_back(handle); 5151 mMemRefs.push_back(memRef); 5152} 5153 5154size_t ACodec::PortDescription::countBuffers() { 5155 return mBufferIDs.size(); 5156} 5157 5158IOMX::buffer_id ACodec::PortDescription::bufferIDAt(size_t index) const { 5159 return mBufferIDs.itemAt(index); 5160} 5161 5162sp<ABuffer> ACodec::PortDescription::bufferAt(size_t index) const { 5163 return mBuffers.itemAt(index); 5164} 5165 5166sp<NativeHandle> ACodec::PortDescription::handleAt(size_t index) const { 5167 return mHandles.itemAt(index); 5168} 5169 5170sp<RefBase> ACodec::PortDescription::memRefAt(size_t index) const { 5171 return mMemRefs.itemAt(index); 5172} 5173 5174//////////////////////////////////////////////////////////////////////////////// 5175 5176ACodec::BaseState::BaseState(ACodec *codec, const sp<AState> &parentState) 5177 : AState(parentState), 5178 mCodec(codec) { 5179} 5180 5181ACodec::BaseState::PortMode ACodec::BaseState::getPortMode( 5182 OMX_U32 /* portIndex */) { 5183 return KEEP_BUFFERS; 5184} 5185 5186bool ACodec::BaseState::onMessageReceived(const sp<AMessage> &msg) { 5187 switch (msg->what()) { 5188 case kWhatInputBufferFilled: 5189 { 5190 onInputBufferFilled(msg); 5191 break; 5192 } 5193 5194 case kWhatOutputBufferDrained: 5195 { 5196 onOutputBufferDrained(msg); 5197 break; 5198 } 5199 5200 case ACodec::kWhatOMXMessageList: 5201 { 5202 return checkOMXMessage(msg) ? onOMXMessageList(msg) : true; 5203 } 5204 5205 case ACodec::kWhatOMXMessageItem: 5206 { 5207 // no need to check as we already did it for kWhatOMXMessageList 5208 return onOMXMessage(msg); 5209 } 5210 5211 case ACodec::kWhatOMXMessage: 5212 { 5213 return checkOMXMessage(msg) ? onOMXMessage(msg) : true; 5214 } 5215 5216 case ACodec::kWhatSetSurface: 5217 { 5218 sp<AReplyToken> replyID; 5219 CHECK(msg->senderAwaitsResponse(&replyID)); 5220 5221 sp<RefBase> obj; 5222 CHECK(msg->findObject("surface", &obj)); 5223 5224 status_t err = mCodec->handleSetSurface(static_cast<Surface *>(obj.get())); 5225 5226 sp<AMessage> response = new AMessage; 5227 response->setInt32("err", err); 5228 response->postReply(replyID); 5229 break; 5230 } 5231 5232 case ACodec::kWhatCreateInputSurface: 5233 case ACodec::kWhatSetInputSurface: 5234 case ACodec::kWhatSignalEndOfInputStream: 5235 { 5236 // This may result in an app illegal state exception. 5237 ALOGE("Message 0x%x was not handled", msg->what()); 5238 mCodec->signalError(OMX_ErrorUndefined, INVALID_OPERATION); 5239 return true; 5240 } 5241 5242 case ACodec::kWhatOMXDied: 5243 { 5244 // This will result in kFlagSawMediaServerDie handling in MediaCodec. 5245 ALOGE("OMX/mediaserver died, signalling error!"); 5246 mCodec->signalError(OMX_ErrorResourcesLost, DEAD_OBJECT); 5247 break; 5248 } 5249 5250 case ACodec::kWhatReleaseCodecInstance: 5251 { 5252 ALOGI("[%s] forcing the release of codec", 5253 mCodec->mComponentName.c_str()); 5254 status_t err = mCodec->mOMX->freeNode(mCodec->mNode); 5255 ALOGE_IF("[%s] failed to release codec instance: err=%d", 5256 mCodec->mComponentName.c_str(), err); 5257 sp<AMessage> notify = mCodec->mNotify->dup(); 5258 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 5259 notify->post(); 5260 break; 5261 } 5262 5263 default: 5264 return false; 5265 } 5266 5267 return true; 5268} 5269 5270bool ACodec::BaseState::checkOMXMessage(const sp<AMessage> &msg) { 5271 // there is a possibility that this is an outstanding message for a 5272 // codec that we have already destroyed 5273 if (mCodec->mNode == 0) { 5274 ALOGI("ignoring message as already freed component: %s", 5275 msg->debugString().c_str()); 5276 return false; 5277 } 5278 5279 IOMX::node_id nodeID; 5280 CHECK(msg->findInt32("node", (int32_t*)&nodeID)); 5281 if (nodeID != mCodec->mNode) { 5282 ALOGE("Unexpected message for nodeID: %u, should have been %u", nodeID, mCodec->mNode); 5283 return false; 5284 } 5285 return true; 5286} 5287 5288bool ACodec::BaseState::onOMXMessageList(const sp<AMessage> &msg) { 5289 sp<RefBase> obj; 5290 CHECK(msg->findObject("messages", &obj)); 5291 sp<MessageList> msgList = static_cast<MessageList *>(obj.get()); 5292 5293 bool receivedRenderedEvents = false; 5294 for (std::list<sp<AMessage>>::const_iterator it = msgList->getList().cbegin(); 5295 it != msgList->getList().cend(); ++it) { 5296 (*it)->setWhat(ACodec::kWhatOMXMessageItem); 5297 mCodec->handleMessage(*it); 5298 int32_t type; 5299 CHECK((*it)->findInt32("type", &type)); 5300 if (type == omx_message::FRAME_RENDERED) { 5301 receivedRenderedEvents = true; 5302 } 5303 } 5304 5305 if (receivedRenderedEvents) { 5306 // NOTE: all buffers are rendered in this case 5307 mCodec->notifyOfRenderedFrames(); 5308 } 5309 return true; 5310} 5311 5312bool ACodec::BaseState::onOMXMessage(const sp<AMessage> &msg) { 5313 int32_t type; 5314 CHECK(msg->findInt32("type", &type)); 5315 5316 switch (type) { 5317 case omx_message::EVENT: 5318 { 5319 int32_t event, data1, data2; 5320 CHECK(msg->findInt32("event", &event)); 5321 CHECK(msg->findInt32("data1", &data1)); 5322 CHECK(msg->findInt32("data2", &data2)); 5323 5324 if (event == OMX_EventCmdComplete 5325 && data1 == OMX_CommandFlush 5326 && data2 == (int32_t)OMX_ALL) { 5327 // Use of this notification is not consistent across 5328 // implementations. We'll drop this notification and rely 5329 // on flush-complete notifications on the individual port 5330 // indices instead. 5331 5332 return true; 5333 } 5334 5335 return onOMXEvent( 5336 static_cast<OMX_EVENTTYPE>(event), 5337 static_cast<OMX_U32>(data1), 5338 static_cast<OMX_U32>(data2)); 5339 } 5340 5341 case omx_message::EMPTY_BUFFER_DONE: 5342 { 5343 IOMX::buffer_id bufferID; 5344 int32_t fenceFd; 5345 5346 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 5347 CHECK(msg->findInt32("fence_fd", &fenceFd)); 5348 5349 return onOMXEmptyBufferDone(bufferID, fenceFd); 5350 } 5351 5352 case omx_message::FILL_BUFFER_DONE: 5353 { 5354 IOMX::buffer_id bufferID; 5355 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 5356 5357 int32_t rangeOffset, rangeLength, flags, fenceFd; 5358 int64_t timeUs; 5359 5360 CHECK(msg->findInt32("range_offset", &rangeOffset)); 5361 CHECK(msg->findInt32("range_length", &rangeLength)); 5362 CHECK(msg->findInt32("flags", &flags)); 5363 CHECK(msg->findInt64("timestamp", &timeUs)); 5364 CHECK(msg->findInt32("fence_fd", &fenceFd)); 5365 5366 return onOMXFillBufferDone( 5367 bufferID, 5368 (size_t)rangeOffset, (size_t)rangeLength, 5369 (OMX_U32)flags, 5370 timeUs, 5371 fenceFd); 5372 } 5373 5374 case omx_message::FRAME_RENDERED: 5375 { 5376 int64_t mediaTimeUs, systemNano; 5377 5378 CHECK(msg->findInt64("media_time_us", &mediaTimeUs)); 5379 CHECK(msg->findInt64("system_nano", &systemNano)); 5380 5381 return onOMXFrameRendered( 5382 mediaTimeUs, systemNano); 5383 } 5384 5385 default: 5386 ALOGE("Unexpected message type: %d", type); 5387 return false; 5388 } 5389} 5390 5391bool ACodec::BaseState::onOMXFrameRendered( 5392 int64_t mediaTimeUs __unused, nsecs_t systemNano __unused) { 5393 // ignore outside of Executing and PortSettingsChanged states 5394 return true; 5395} 5396 5397bool ACodec::BaseState::onOMXEvent( 5398 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 5399 if (event == OMX_EventDataSpaceChanged) { 5400 ColorAspects aspects; 5401 aspects.mRange = (ColorAspects::Range)((data2 >> 24) & 0xFF); 5402 aspects.mPrimaries = (ColorAspects::Primaries)((data2 >> 16) & 0xFF); 5403 aspects.mMatrixCoeffs = (ColorAspects::MatrixCoeffs)((data2 >> 8) & 0xFF); 5404 aspects.mTransfer = (ColorAspects::Transfer)(data2 & 0xFF); 5405 5406 mCodec->onDataSpaceChanged((android_dataspace)data1, aspects); 5407 return true; 5408 } 5409 5410 if (event != OMX_EventError) { 5411 ALOGV("[%s] EVENT(%d, 0x%08x, 0x%08x)", 5412 mCodec->mComponentName.c_str(), event, data1, data2); 5413 5414 return false; 5415 } 5416 5417 ALOGE("[%s] ERROR(0x%08x)", mCodec->mComponentName.c_str(), data1); 5418 5419 // verify OMX component sends back an error we expect. 5420 OMX_ERRORTYPE omxError = (OMX_ERRORTYPE)data1; 5421 if (!isOMXError(omxError)) { 5422 ALOGW("Invalid OMX error %#x", omxError); 5423 omxError = OMX_ErrorUndefined; 5424 } 5425 mCodec->signalError(omxError); 5426 5427 return true; 5428} 5429 5430bool ACodec::BaseState::onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd) { 5431 ALOGV("[%s] onOMXEmptyBufferDone %u", 5432 mCodec->mComponentName.c_str(), bufferID); 5433 5434 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 5435 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5436 if (status != BufferInfo::OWNED_BY_COMPONENT) { 5437 ALOGE("Wrong ownership in EBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 5438 mCodec->dumpBuffers(kPortIndexInput); 5439 if (fenceFd >= 0) { 5440 ::close(fenceFd); 5441 } 5442 return false; 5443 } 5444 info->mStatus = BufferInfo::OWNED_BY_US; 5445 5446 // input buffers cannot take fences, so wait for any fence now 5447 (void)mCodec->waitForFence(fenceFd, "onOMXEmptyBufferDone"); 5448 fenceFd = -1; 5449 5450 // still save fence for completeness 5451 info->setWriteFence(fenceFd, "onOMXEmptyBufferDone"); 5452 5453 // We're in "store-metadata-in-buffers" mode, the underlying 5454 // OMX component had access to data that's implicitly refcounted 5455 // by this "MediaBuffer" object. Now that the OMX component has 5456 // told us that it's done with the input buffer, we can decrement 5457 // the mediaBuffer's reference count. 5458 info->mData->setMediaBufferBase(NULL); 5459 5460 PortMode mode = getPortMode(kPortIndexInput); 5461 5462 switch (mode) { 5463 case KEEP_BUFFERS: 5464 break; 5465 5466 case RESUBMIT_BUFFERS: 5467 postFillThisBuffer(info); 5468 break; 5469 5470 case FREE_BUFFERS: 5471 default: 5472 ALOGE("SHOULD NOT REACH HERE: cannot free empty output buffers"); 5473 return false; 5474 } 5475 5476 return true; 5477} 5478 5479void ACodec::BaseState::postFillThisBuffer(BufferInfo *info) { 5480 if (mCodec->mPortEOS[kPortIndexInput]) { 5481 return; 5482 } 5483 5484 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 5485 5486 sp<AMessage> notify = mCodec->mNotify->dup(); 5487 notify->setInt32("what", CodecBase::kWhatFillThisBuffer); 5488 notify->setInt32("buffer-id", info->mBufferID); 5489 5490 info->mData->meta()->clear(); 5491 notify->setBuffer("buffer", info->mData); 5492 5493 sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, mCodec); 5494 reply->setInt32("buffer-id", info->mBufferID); 5495 5496 notify->setMessage("reply", reply); 5497 5498 notify->post(); 5499 5500 info->mStatus = BufferInfo::OWNED_BY_UPSTREAM; 5501} 5502 5503void ACodec::BaseState::onInputBufferFilled(const sp<AMessage> &msg) { 5504 IOMX::buffer_id bufferID; 5505 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 5506 sp<ABuffer> buffer; 5507 int32_t err = OK; 5508 bool eos = false; 5509 PortMode mode = getPortMode(kPortIndexInput); 5510 5511 if (!msg->findBuffer("buffer", &buffer)) { 5512 /* these are unfilled buffers returned by client */ 5513 CHECK(msg->findInt32("err", &err)); 5514 5515 if (err == OK) { 5516 /* buffers with no errors are returned on MediaCodec.flush */ 5517 mode = KEEP_BUFFERS; 5518 } else { 5519 ALOGV("[%s] saw error %d instead of an input buffer", 5520 mCodec->mComponentName.c_str(), err); 5521 eos = true; 5522 } 5523 5524 buffer.clear(); 5525 } 5526 5527 int32_t tmp; 5528 if (buffer != NULL && buffer->meta()->findInt32("eos", &tmp) && tmp) { 5529 eos = true; 5530 err = ERROR_END_OF_STREAM; 5531 } 5532 5533 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 5534 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5535 if (status != BufferInfo::OWNED_BY_UPSTREAM) { 5536 ALOGE("Wrong ownership in IBF: %s(%d) buffer #%u", _asString(status), status, bufferID); 5537 mCodec->dumpBuffers(kPortIndexInput); 5538 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 5539 return; 5540 } 5541 5542 info->mStatus = BufferInfo::OWNED_BY_US; 5543 5544 switch (mode) { 5545 case KEEP_BUFFERS: 5546 { 5547 if (eos) { 5548 if (!mCodec->mPortEOS[kPortIndexInput]) { 5549 mCodec->mPortEOS[kPortIndexInput] = true; 5550 mCodec->mInputEOSResult = err; 5551 } 5552 } 5553 break; 5554 } 5555 5556 case RESUBMIT_BUFFERS: 5557 { 5558 if (buffer != NULL && !mCodec->mPortEOS[kPortIndexInput]) { 5559 // Do not send empty input buffer w/o EOS to the component. 5560 if (buffer->size() == 0 && !eos) { 5561 postFillThisBuffer(info); 5562 break; 5563 } 5564 5565 int64_t timeUs; 5566 CHECK(buffer->meta()->findInt64("timeUs", &timeUs)); 5567 5568 OMX_U32 flags = OMX_BUFFERFLAG_ENDOFFRAME; 5569 5570 int32_t isCSD; 5571 if (buffer->meta()->findInt32("csd", &isCSD) && isCSD != 0) { 5572 flags |= OMX_BUFFERFLAG_CODECCONFIG; 5573 } 5574 5575 if (eos) { 5576 flags |= OMX_BUFFERFLAG_EOS; 5577 } 5578 5579 if (buffer != info->mCodecData) { 5580 ALOGV("[%s] Needs to copy input data for buffer %u. (%p != %p)", 5581 mCodec->mComponentName.c_str(), 5582 bufferID, 5583 buffer.get(), info->mCodecData.get()); 5584 5585 sp<DataConverter> converter = mCodec->mConverter[kPortIndexInput]; 5586 if (converter == NULL) { 5587 converter = getCopyConverter(); 5588 } 5589 status_t err = converter->convert(buffer, info->mCodecData); 5590 if (err != OK) { 5591 mCodec->signalError(OMX_ErrorUndefined, err); 5592 return; 5593 } 5594 } 5595 5596 if (flags & OMX_BUFFERFLAG_CODECCONFIG) { 5597 ALOGV("[%s] calling emptyBuffer %u w/ codec specific data", 5598 mCodec->mComponentName.c_str(), bufferID); 5599 } else if (flags & OMX_BUFFERFLAG_EOS) { 5600 ALOGV("[%s] calling emptyBuffer %u w/ EOS", 5601 mCodec->mComponentName.c_str(), bufferID); 5602 } else { 5603#if TRACK_BUFFER_TIMING 5604 ALOGI("[%s] calling emptyBuffer %u w/ time %lld us", 5605 mCodec->mComponentName.c_str(), bufferID, (long long)timeUs); 5606#else 5607 ALOGV("[%s] calling emptyBuffer %u w/ time %lld us", 5608 mCodec->mComponentName.c_str(), bufferID, (long long)timeUs); 5609#endif 5610 } 5611 5612#if TRACK_BUFFER_TIMING 5613 ACodec::BufferStats stats; 5614 stats.mEmptyBufferTimeUs = ALooper::GetNowUs(); 5615 stats.mFillBufferDoneTimeUs = -1ll; 5616 mCodec->mBufferStats.add(timeUs, stats); 5617#endif 5618 5619 if (mCodec->storingMetadataInDecodedBuffers()) { 5620 // try to submit an output buffer for each input buffer 5621 PortMode outputMode = getPortMode(kPortIndexOutput); 5622 5623 ALOGV("MetadataBuffersToSubmit=%u portMode=%s", 5624 mCodec->mMetadataBuffersToSubmit, 5625 (outputMode == FREE_BUFFERS ? "FREE" : 5626 outputMode == KEEP_BUFFERS ? "KEEP" : "RESUBMIT")); 5627 if (outputMode == RESUBMIT_BUFFERS) { 5628 mCodec->submitOutputMetadataBuffer(); 5629 } 5630 } 5631 info->checkReadFence("onInputBufferFilled"); 5632 status_t err2 = mCodec->mOMX->emptyBuffer( 5633 mCodec->mNode, 5634 bufferID, 5635 0, 5636 info->mCodecData->size(), 5637 flags, 5638 timeUs, 5639 info->mFenceFd); 5640 info->mFenceFd = -1; 5641 if (err2 != OK) { 5642 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2)); 5643 return; 5644 } 5645 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5646 5647 if (!eos && err == OK) { 5648 getMoreInputDataIfPossible(); 5649 } else { 5650 ALOGV("[%s] Signalled EOS (%d) on the input port", 5651 mCodec->mComponentName.c_str(), err); 5652 5653 mCodec->mPortEOS[kPortIndexInput] = true; 5654 mCodec->mInputEOSResult = err; 5655 } 5656 } else if (!mCodec->mPortEOS[kPortIndexInput]) { 5657 if (err != OK && err != ERROR_END_OF_STREAM) { 5658 ALOGV("[%s] Signalling EOS on the input port due to error %d", 5659 mCodec->mComponentName.c_str(), err); 5660 } else { 5661 ALOGV("[%s] Signalling EOS on the input port", 5662 mCodec->mComponentName.c_str()); 5663 } 5664 5665 ALOGV("[%s] calling emptyBuffer %u signalling EOS", 5666 mCodec->mComponentName.c_str(), bufferID); 5667 5668 info->checkReadFence("onInputBufferFilled"); 5669 status_t err2 = mCodec->mOMX->emptyBuffer( 5670 mCodec->mNode, 5671 bufferID, 5672 0, 5673 0, 5674 OMX_BUFFERFLAG_EOS, 5675 0, 5676 info->mFenceFd); 5677 info->mFenceFd = -1; 5678 if (err2 != OK) { 5679 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2)); 5680 return; 5681 } 5682 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5683 5684 mCodec->mPortEOS[kPortIndexInput] = true; 5685 mCodec->mInputEOSResult = err; 5686 } 5687 break; 5688 } 5689 5690 case FREE_BUFFERS: 5691 break; 5692 5693 default: 5694 ALOGE("invalid port mode: %d", mode); 5695 break; 5696 } 5697} 5698 5699void ACodec::BaseState::getMoreInputDataIfPossible() { 5700 if (mCodec->mPortEOS[kPortIndexInput]) { 5701 return; 5702 } 5703 5704 BufferInfo *eligible = NULL; 5705 5706 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 5707 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 5708 5709#if 0 5710 if (info->mStatus == BufferInfo::OWNED_BY_UPSTREAM) { 5711 // There's already a "read" pending. 5712 return; 5713 } 5714#endif 5715 5716 if (info->mStatus == BufferInfo::OWNED_BY_US) { 5717 eligible = info; 5718 } 5719 } 5720 5721 if (eligible == NULL) { 5722 return; 5723 } 5724 5725 postFillThisBuffer(eligible); 5726} 5727 5728bool ACodec::BaseState::onOMXFillBufferDone( 5729 IOMX::buffer_id bufferID, 5730 size_t rangeOffset, size_t rangeLength, 5731 OMX_U32 flags, 5732 int64_t timeUs, 5733 int fenceFd) { 5734 ALOGV("[%s] onOMXFillBufferDone %u time %" PRId64 " us, flags = 0x%08x", 5735 mCodec->mComponentName.c_str(), bufferID, timeUs, flags); 5736 5737 ssize_t index; 5738 status_t err= OK; 5739 5740#if TRACK_BUFFER_TIMING 5741 index = mCodec->mBufferStats.indexOfKey(timeUs); 5742 if (index >= 0) { 5743 ACodec::BufferStats *stats = &mCodec->mBufferStats.editValueAt(index); 5744 stats->mFillBufferDoneTimeUs = ALooper::GetNowUs(); 5745 5746 ALOGI("frame PTS %lld: %lld", 5747 timeUs, 5748 stats->mFillBufferDoneTimeUs - stats->mEmptyBufferTimeUs); 5749 5750 mCodec->mBufferStats.removeItemsAt(index); 5751 stats = NULL; 5752 } 5753#endif 5754 5755 BufferInfo *info = 5756 mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 5757 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5758 if (status != BufferInfo::OWNED_BY_COMPONENT) { 5759 ALOGE("Wrong ownership in FBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 5760 mCodec->dumpBuffers(kPortIndexOutput); 5761 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 5762 if (fenceFd >= 0) { 5763 ::close(fenceFd); 5764 } 5765 return true; 5766 } 5767 5768 info->mDequeuedAt = ++mCodec->mDequeueCounter; 5769 info->mStatus = BufferInfo::OWNED_BY_US; 5770 5771 if (info->mRenderInfo != NULL) { 5772 // The fence for an emptied buffer must have signaled, but there still could be queued 5773 // or out-of-order dequeued buffers in the render queue prior to this buffer. Drop these, 5774 // as we will soon requeue this buffer to the surface. While in theory we could still keep 5775 // track of buffers that are requeued to the surface, it is better to add support to the 5776 // buffer-queue to notify us of released buffers and their fences (in the future). 5777 mCodec->notifyOfRenderedFrames(true /* dropIncomplete */); 5778 } 5779 5780 // byte buffers cannot take fences, so wait for any fence now 5781 if (mCodec->mNativeWindow == NULL) { 5782 (void)mCodec->waitForFence(fenceFd, "onOMXFillBufferDone"); 5783 fenceFd = -1; 5784 } 5785 info->setReadFence(fenceFd, "onOMXFillBufferDone"); 5786 5787 PortMode mode = getPortMode(kPortIndexOutput); 5788 5789 switch (mode) { 5790 case KEEP_BUFFERS: 5791 break; 5792 5793 case RESUBMIT_BUFFERS: 5794 { 5795 if (rangeLength == 0 && (!(flags & OMX_BUFFERFLAG_EOS) 5796 || mCodec->mPortEOS[kPortIndexOutput])) { 5797 ALOGV("[%s] calling fillBuffer %u", 5798 mCodec->mComponentName.c_str(), info->mBufferID); 5799 5800 err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd); 5801 info->mFenceFd = -1; 5802 if (err != OK) { 5803 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5804 return true; 5805 } 5806 5807 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5808 break; 5809 } 5810 5811 sp<AMessage> reply = 5812 new AMessage(kWhatOutputBufferDrained, mCodec); 5813 5814 if (mCodec->mOutputFormat != mCodec->mLastOutputFormat && rangeLength > 0) { 5815 mCodec->addKeyFormatChangesToRenderBufferNotification(reply); 5816 mCodec->sendFormatChange(); 5817 } 5818 5819 if (mCodec->usingMetadataOnEncoderOutput()) { 5820 native_handle_t *handle = NULL; 5821 VideoGrallocMetadata &grallocMeta = *(VideoGrallocMetadata *)info->mData->data(); 5822 VideoNativeMetadata &nativeMeta = *(VideoNativeMetadata *)info->mData->data(); 5823 if (info->mData->size() >= sizeof(grallocMeta) 5824 && grallocMeta.eType == kMetadataBufferTypeGrallocSource) { 5825 handle = (native_handle_t *)(uintptr_t)grallocMeta.pHandle; 5826 } else if (info->mData->size() >= sizeof(nativeMeta) 5827 && nativeMeta.eType == kMetadataBufferTypeANWBuffer) { 5828#ifdef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 5829 // ANativeWindowBuffer is only valid on 32-bit/mediaserver process 5830 handle = NULL; 5831#else 5832 handle = (native_handle_t *)nativeMeta.pBuffer->handle; 5833#endif 5834 } 5835 info->mData->meta()->setPointer("handle", handle); 5836 info->mData->meta()->setInt32("rangeOffset", rangeOffset); 5837 info->mData->meta()->setInt32("rangeLength", rangeLength); 5838 } else if (info->mData == info->mCodecData) { 5839 info->mData->setRange(rangeOffset, rangeLength); 5840 } else { 5841 info->mCodecData->setRange(rangeOffset, rangeLength); 5842 // in this case we know that mConverter is not null 5843 status_t err = mCodec->mConverter[kPortIndexOutput]->convert( 5844 info->mCodecData, info->mData); 5845 if (err != OK) { 5846 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5847 return true; 5848 } 5849 } 5850#if 0 5851 if (mCodec->mNativeWindow == NULL) { 5852 if (IsIDR(info->mData)) { 5853 ALOGI("IDR frame"); 5854 } 5855 } 5856#endif 5857 5858 if (mCodec->mSkipCutBuffer != NULL) { 5859 mCodec->mSkipCutBuffer->submit(info->mData); 5860 } 5861 info->mData->meta()->setInt64("timeUs", timeUs); 5862 5863 sp<AMessage> notify = mCodec->mNotify->dup(); 5864 notify->setInt32("what", CodecBase::kWhatDrainThisBuffer); 5865 notify->setInt32("buffer-id", info->mBufferID); 5866 notify->setBuffer("buffer", info->mData); 5867 notify->setInt32("flags", flags); 5868 5869 reply->setInt32("buffer-id", info->mBufferID); 5870 5871 notify->setMessage("reply", reply); 5872 5873 notify->post(); 5874 5875 info->mStatus = BufferInfo::OWNED_BY_DOWNSTREAM; 5876 5877 if (flags & OMX_BUFFERFLAG_EOS) { 5878 ALOGV("[%s] saw output EOS", mCodec->mComponentName.c_str()); 5879 5880 sp<AMessage> notify = mCodec->mNotify->dup(); 5881 notify->setInt32("what", CodecBase::kWhatEOS); 5882 notify->setInt32("err", mCodec->mInputEOSResult); 5883 notify->post(); 5884 5885 mCodec->mPortEOS[kPortIndexOutput] = true; 5886 } 5887 break; 5888 } 5889 5890 case FREE_BUFFERS: 5891 err = mCodec->freeBuffer(kPortIndexOutput, index); 5892 if (err != OK) { 5893 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5894 return true; 5895 } 5896 break; 5897 5898 default: 5899 ALOGE("Invalid port mode: %d", mode); 5900 return false; 5901 } 5902 5903 return true; 5904} 5905 5906void ACodec::BaseState::onOutputBufferDrained(const sp<AMessage> &msg) { 5907 IOMX::buffer_id bufferID; 5908 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 5909 ssize_t index; 5910 BufferInfo *info = mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 5911 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5912 if (status != BufferInfo::OWNED_BY_DOWNSTREAM) { 5913 ALOGE("Wrong ownership in OBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 5914 mCodec->dumpBuffers(kPortIndexOutput); 5915 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 5916 return; 5917 } 5918 5919 android_native_rect_t crop; 5920 if (msg->findRect("crop", &crop.left, &crop.top, &crop.right, &crop.bottom)) { 5921 status_t err = native_window_set_crop(mCodec->mNativeWindow.get(), &crop); 5922 ALOGW_IF(err != NO_ERROR, "failed to set crop: %d", err); 5923 } 5924 5925 int32_t dataSpace; 5926 if (msg->findInt32("dataspace", &dataSpace)) { 5927 status_t err = native_window_set_buffers_data_space( 5928 mCodec->mNativeWindow.get(), (android_dataspace)dataSpace); 5929 ALOGW_IF(err != NO_ERROR, "failed to set dataspace: %d", err); 5930 } 5931 5932 int32_t render; 5933 if (mCodec->mNativeWindow != NULL 5934 && msg->findInt32("render", &render) && render != 0 5935 && info->mData != NULL && info->mData->size() != 0) { 5936 ATRACE_NAME("render"); 5937 // The client wants this buffer to be rendered. 5938 5939 // save buffers sent to the surface so we can get render time when they return 5940 int64_t mediaTimeUs = -1; 5941 info->mData->meta()->findInt64("timeUs", &mediaTimeUs); 5942 if (mediaTimeUs >= 0) { 5943 mCodec->mRenderTracker.onFrameQueued( 5944 mediaTimeUs, info->mGraphicBuffer, new Fence(::dup(info->mFenceFd))); 5945 } 5946 5947 int64_t timestampNs = 0; 5948 if (!msg->findInt64("timestampNs", ×tampNs)) { 5949 // use media timestamp if client did not request a specific render timestamp 5950 if (info->mData->meta()->findInt64("timeUs", ×tampNs)) { 5951 ALOGV("using buffer PTS of %lld", (long long)timestampNs); 5952 timestampNs *= 1000; 5953 } 5954 } 5955 5956 status_t err; 5957 err = native_window_set_buffers_timestamp(mCodec->mNativeWindow.get(), timestampNs); 5958 ALOGW_IF(err != NO_ERROR, "failed to set buffer timestamp: %d", err); 5959 5960 info->checkReadFence("onOutputBufferDrained before queueBuffer"); 5961 err = mCodec->mNativeWindow->queueBuffer( 5962 mCodec->mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd); 5963 info->mFenceFd = -1; 5964 if (err == OK) { 5965 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 5966 } else { 5967 ALOGE("queueBuffer failed in onOutputBufferDrained: %d", err); 5968 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5969 info->mStatus = BufferInfo::OWNED_BY_US; 5970 // keeping read fence as write fence to avoid clobbering 5971 info->mIsReadFence = false; 5972 } 5973 } else { 5974 if (mCodec->mNativeWindow != NULL && 5975 (info->mData == NULL || info->mData->size() != 0)) { 5976 // move read fence into write fence to avoid clobbering 5977 info->mIsReadFence = false; 5978 ATRACE_NAME("frame-drop"); 5979 } 5980 info->mStatus = BufferInfo::OWNED_BY_US; 5981 } 5982 5983 PortMode mode = getPortMode(kPortIndexOutput); 5984 5985 switch (mode) { 5986 case KEEP_BUFFERS: 5987 { 5988 // XXX fishy, revisit!!! What about the FREE_BUFFERS case below? 5989 5990 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 5991 // We cannot resubmit the buffer we just rendered, dequeue 5992 // the spare instead. 5993 5994 info = mCodec->dequeueBufferFromNativeWindow(); 5995 } 5996 break; 5997 } 5998 5999 case RESUBMIT_BUFFERS: 6000 { 6001 if (!mCodec->mPortEOS[kPortIndexOutput]) { 6002 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6003 // We cannot resubmit the buffer we just rendered, dequeue 6004 // the spare instead. 6005 6006 info = mCodec->dequeueBufferFromNativeWindow(); 6007 } 6008 6009 if (info != NULL) { 6010 ALOGV("[%s] calling fillBuffer %u", 6011 mCodec->mComponentName.c_str(), info->mBufferID); 6012 info->checkWriteFence("onOutputBufferDrained::RESUBMIT_BUFFERS"); 6013 status_t err = mCodec->mOMX->fillBuffer( 6014 mCodec->mNode, info->mBufferID, info->mFenceFd); 6015 info->mFenceFd = -1; 6016 if (err == OK) { 6017 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 6018 } else { 6019 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6020 } 6021 } 6022 } 6023 break; 6024 } 6025 6026 case FREE_BUFFERS: 6027 { 6028 status_t err = mCodec->freeBuffer(kPortIndexOutput, index); 6029 if (err != OK) { 6030 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6031 } 6032 break; 6033 } 6034 6035 default: 6036 ALOGE("Invalid port mode: %d", mode); 6037 return; 6038 } 6039} 6040 6041//////////////////////////////////////////////////////////////////////////////// 6042 6043ACodec::UninitializedState::UninitializedState(ACodec *codec) 6044 : BaseState(codec) { 6045} 6046 6047void ACodec::UninitializedState::stateEntered() { 6048 ALOGV("Now uninitialized"); 6049 6050 if (mDeathNotifier != NULL) { 6051 mCodec->mNodeBinder->unlinkToDeath(mDeathNotifier); 6052 mDeathNotifier.clear(); 6053 } 6054 6055 mCodec->mUsingNativeWindow = false; 6056 mCodec->mNativeWindow.clear(); 6057 mCodec->mNativeWindowUsageBits = 0; 6058 mCodec->mNode = 0; 6059 mCodec->mOMX.clear(); 6060 mCodec->mQuirks = 0; 6061 mCodec->mFlags = 0; 6062 mCodec->mInputMetadataType = kMetadataBufferTypeInvalid; 6063 mCodec->mOutputMetadataType = kMetadataBufferTypeInvalid; 6064 mCodec->mConverter[0].clear(); 6065 mCodec->mConverter[1].clear(); 6066 mCodec->mComponentName.clear(); 6067} 6068 6069bool ACodec::UninitializedState::onMessageReceived(const sp<AMessage> &msg) { 6070 bool handled = false; 6071 6072 switch (msg->what()) { 6073 case ACodec::kWhatSetup: 6074 { 6075 onSetup(msg); 6076 6077 handled = true; 6078 break; 6079 } 6080 6081 case ACodec::kWhatAllocateComponent: 6082 { 6083 onAllocateComponent(msg); 6084 handled = true; 6085 break; 6086 } 6087 6088 case ACodec::kWhatShutdown: 6089 { 6090 int32_t keepComponentAllocated; 6091 CHECK(msg->findInt32( 6092 "keepComponentAllocated", &keepComponentAllocated)); 6093 ALOGW_IF(keepComponentAllocated, 6094 "cannot keep component allocated on shutdown in Uninitialized state"); 6095 6096 sp<AMessage> notify = mCodec->mNotify->dup(); 6097 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 6098 notify->post(); 6099 6100 handled = true; 6101 break; 6102 } 6103 6104 case ACodec::kWhatFlush: 6105 { 6106 sp<AMessage> notify = mCodec->mNotify->dup(); 6107 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6108 notify->post(); 6109 6110 handled = true; 6111 break; 6112 } 6113 6114 case ACodec::kWhatReleaseCodecInstance: 6115 { 6116 // nothing to do, as we have already signaled shutdown 6117 handled = true; 6118 break; 6119 } 6120 6121 default: 6122 return BaseState::onMessageReceived(msg); 6123 } 6124 6125 return handled; 6126} 6127 6128void ACodec::UninitializedState::onSetup( 6129 const sp<AMessage> &msg) { 6130 if (onAllocateComponent(msg) 6131 && mCodec->mLoadedState->onConfigureComponent(msg)) { 6132 mCodec->mLoadedState->onStart(); 6133 } 6134} 6135 6136bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) { 6137 ALOGV("onAllocateComponent"); 6138 6139 CHECK(mCodec->mNode == 0); 6140 6141 OMXClient client; 6142 if (client.connect() != OK) { 6143 mCodec->signalError(OMX_ErrorUndefined, NO_INIT); 6144 return false; 6145 } 6146 6147 sp<IOMX> omx = client.interface(); 6148 6149 sp<AMessage> notify = new AMessage(kWhatOMXDied, mCodec); 6150 6151 Vector<AString> matchingCodecs; 6152 6153 AString mime; 6154 6155 AString componentName; 6156 uint32_t quirks = 0; 6157 int32_t encoder = false; 6158 if (msg->findString("componentName", &componentName)) { 6159 sp<IMediaCodecList> list = MediaCodecList::getInstance(); 6160 if (list != NULL && list->findCodecByName(componentName.c_str()) >= 0) { 6161 matchingCodecs.add(componentName); 6162 } 6163 } else { 6164 CHECK(msg->findString("mime", &mime)); 6165 6166 if (!msg->findInt32("encoder", &encoder)) { 6167 encoder = false; 6168 } 6169 6170 MediaCodecList::findMatchingCodecs( 6171 mime.c_str(), 6172 encoder, // createEncoder 6173 0, // flags 6174 &matchingCodecs); 6175 } 6176 6177 sp<CodecObserver> observer = new CodecObserver; 6178 IOMX::node_id node = 0; 6179 6180 status_t err = NAME_NOT_FOUND; 6181 for (size_t matchIndex = 0; matchIndex < matchingCodecs.size(); 6182 ++matchIndex) { 6183 componentName = matchingCodecs[matchIndex]; 6184 quirks = MediaCodecList::getQuirksFor(componentName.c_str()); 6185 6186 pid_t tid = gettid(); 6187 int prevPriority = androidGetThreadPriority(tid); 6188 androidSetThreadPriority(tid, ANDROID_PRIORITY_FOREGROUND); 6189 err = omx->allocateNode(componentName.c_str(), observer, &mCodec->mNodeBinder, &node); 6190 androidSetThreadPriority(tid, prevPriority); 6191 6192 if (err == OK) { 6193 break; 6194 } else { 6195 ALOGW("Allocating component '%s' failed, try next one.", componentName.c_str()); 6196 } 6197 6198 node = 0; 6199 } 6200 6201 if (node == 0) { 6202 if (!mime.empty()) { 6203 ALOGE("Unable to instantiate a %scoder for type '%s' with err %#x.", 6204 encoder ? "en" : "de", mime.c_str(), err); 6205 } else { 6206 ALOGE("Unable to instantiate codec '%s' with err %#x.", componentName.c_str(), err); 6207 } 6208 6209 mCodec->signalError((OMX_ERRORTYPE)err, makeNoSideEffectStatus(err)); 6210 return false; 6211 } 6212 6213 mDeathNotifier = new DeathNotifier(notify); 6214 if (mCodec->mNodeBinder == NULL || 6215 mCodec->mNodeBinder->linkToDeath(mDeathNotifier) != OK) { 6216 // This was a local binder, if it dies so do we, we won't care 6217 // about any notifications in the afterlife. 6218 mDeathNotifier.clear(); 6219 } 6220 6221 notify = new AMessage(kWhatOMXMessageList, mCodec); 6222 observer->setNotificationMessage(notify); 6223 6224 mCodec->mComponentName = componentName; 6225 mCodec->mRenderTracker.setComponentName(componentName); 6226 mCodec->mFlags = 0; 6227 6228 if (componentName.endsWith(".secure")) { 6229 mCodec->mFlags |= kFlagIsSecure; 6230 mCodec->mFlags |= kFlagIsGrallocUsageProtected; 6231 mCodec->mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 6232 } 6233 6234 mCodec->mQuirks = quirks; 6235 mCodec->mOMX = omx; 6236 mCodec->mNode = node; 6237 6238 { 6239 sp<AMessage> notify = mCodec->mNotify->dup(); 6240 notify->setInt32("what", CodecBase::kWhatComponentAllocated); 6241 notify->setString("componentName", mCodec->mComponentName.c_str()); 6242 notify->post(); 6243 } 6244 6245 mCodec->changeState(mCodec->mLoadedState); 6246 6247 return true; 6248} 6249 6250//////////////////////////////////////////////////////////////////////////////// 6251 6252ACodec::LoadedState::LoadedState(ACodec *codec) 6253 : BaseState(codec) { 6254} 6255 6256void ACodec::LoadedState::stateEntered() { 6257 ALOGV("[%s] Now Loaded", mCodec->mComponentName.c_str()); 6258 6259 mCodec->mPortEOS[kPortIndexInput] = 6260 mCodec->mPortEOS[kPortIndexOutput] = false; 6261 6262 mCodec->mInputEOSResult = OK; 6263 6264 mCodec->mDequeueCounter = 0; 6265 mCodec->mMetadataBuffersToSubmit = 0; 6266 mCodec->mRepeatFrameDelayUs = -1ll; 6267 mCodec->mInputFormat.clear(); 6268 mCodec->mOutputFormat.clear(); 6269 mCodec->mBaseOutputFormat.clear(); 6270 6271 if (mCodec->mShutdownInProgress) { 6272 bool keepComponentAllocated = mCodec->mKeepComponentAllocated; 6273 6274 mCodec->mShutdownInProgress = false; 6275 mCodec->mKeepComponentAllocated = false; 6276 6277 onShutdown(keepComponentAllocated); 6278 } 6279 mCodec->mExplicitShutdown = false; 6280 6281 mCodec->processDeferredMessages(); 6282} 6283 6284void ACodec::LoadedState::onShutdown(bool keepComponentAllocated) { 6285 if (!keepComponentAllocated) { 6286 (void)mCodec->mOMX->freeNode(mCodec->mNode); 6287 6288 mCodec->changeState(mCodec->mUninitializedState); 6289 } 6290 6291 if (mCodec->mExplicitShutdown) { 6292 sp<AMessage> notify = mCodec->mNotify->dup(); 6293 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 6294 notify->post(); 6295 mCodec->mExplicitShutdown = false; 6296 } 6297} 6298 6299bool ACodec::LoadedState::onMessageReceived(const sp<AMessage> &msg) { 6300 bool handled = false; 6301 6302 switch (msg->what()) { 6303 case ACodec::kWhatConfigureComponent: 6304 { 6305 onConfigureComponent(msg); 6306 handled = true; 6307 break; 6308 } 6309 6310 case ACodec::kWhatCreateInputSurface: 6311 { 6312 onCreateInputSurface(msg); 6313 handled = true; 6314 break; 6315 } 6316 6317 case ACodec::kWhatSetInputSurface: 6318 { 6319 onSetInputSurface(msg); 6320 handled = true; 6321 break; 6322 } 6323 6324 case ACodec::kWhatStart: 6325 { 6326 onStart(); 6327 handled = true; 6328 break; 6329 } 6330 6331 case ACodec::kWhatShutdown: 6332 { 6333 int32_t keepComponentAllocated; 6334 CHECK(msg->findInt32( 6335 "keepComponentAllocated", &keepComponentAllocated)); 6336 6337 mCodec->mExplicitShutdown = true; 6338 onShutdown(keepComponentAllocated); 6339 6340 handled = true; 6341 break; 6342 } 6343 6344 case ACodec::kWhatFlush: 6345 { 6346 sp<AMessage> notify = mCodec->mNotify->dup(); 6347 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6348 notify->post(); 6349 6350 handled = true; 6351 break; 6352 } 6353 6354 default: 6355 return BaseState::onMessageReceived(msg); 6356 } 6357 6358 return handled; 6359} 6360 6361bool ACodec::LoadedState::onConfigureComponent( 6362 const sp<AMessage> &msg) { 6363 ALOGV("onConfigureComponent"); 6364 6365 CHECK(mCodec->mNode != 0); 6366 6367 status_t err = OK; 6368 AString mime; 6369 if (!msg->findString("mime", &mime)) { 6370 err = BAD_VALUE; 6371 } else { 6372 err = mCodec->configureCodec(mime.c_str(), msg); 6373 } 6374 if (err != OK) { 6375 ALOGE("[%s] configureCodec returning error %d", 6376 mCodec->mComponentName.c_str(), err); 6377 6378 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6379 return false; 6380 } 6381 6382 { 6383 sp<AMessage> notify = mCodec->mNotify->dup(); 6384 notify->setInt32("what", CodecBase::kWhatComponentConfigured); 6385 notify->setMessage("input-format", mCodec->mInputFormat); 6386 notify->setMessage("output-format", mCodec->mOutputFormat); 6387 notify->post(); 6388 } 6389 6390 return true; 6391} 6392 6393status_t ACodec::LoadedState::setupInputSurface() { 6394 status_t err = OK; 6395 6396 if (mCodec->mRepeatFrameDelayUs > 0ll) { 6397 err = mCodec->mOMX->setInternalOption( 6398 mCodec->mNode, 6399 kPortIndexInput, 6400 IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY, 6401 &mCodec->mRepeatFrameDelayUs, 6402 sizeof(mCodec->mRepeatFrameDelayUs)); 6403 6404 if (err != OK) { 6405 ALOGE("[%s] Unable to configure option to repeat previous " 6406 "frames (err %d)", 6407 mCodec->mComponentName.c_str(), 6408 err); 6409 return err; 6410 } 6411 } 6412 6413 if (mCodec->mMaxPtsGapUs > 0ll) { 6414 err = mCodec->mOMX->setInternalOption( 6415 mCodec->mNode, 6416 kPortIndexInput, 6417 IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP, 6418 &mCodec->mMaxPtsGapUs, 6419 sizeof(mCodec->mMaxPtsGapUs)); 6420 6421 if (err != OK) { 6422 ALOGE("[%s] Unable to configure max timestamp gap (err %d)", 6423 mCodec->mComponentName.c_str(), 6424 err); 6425 return err; 6426 } 6427 } 6428 6429 if (mCodec->mMaxFps > 0) { 6430 err = mCodec->mOMX->setInternalOption( 6431 mCodec->mNode, 6432 kPortIndexInput, 6433 IOMX::INTERNAL_OPTION_MAX_FPS, 6434 &mCodec->mMaxFps, 6435 sizeof(mCodec->mMaxFps)); 6436 6437 if (err != OK) { 6438 ALOGE("[%s] Unable to configure max fps (err %d)", 6439 mCodec->mComponentName.c_str(), 6440 err); 6441 return err; 6442 } 6443 } 6444 6445 if (mCodec->mTimePerCaptureUs > 0ll 6446 && mCodec->mTimePerFrameUs > 0ll) { 6447 int64_t timeLapse[2]; 6448 timeLapse[0] = mCodec->mTimePerFrameUs; 6449 timeLapse[1] = mCodec->mTimePerCaptureUs; 6450 err = mCodec->mOMX->setInternalOption( 6451 mCodec->mNode, 6452 kPortIndexInput, 6453 IOMX::INTERNAL_OPTION_TIME_LAPSE, 6454 &timeLapse[0], 6455 sizeof(timeLapse)); 6456 6457 if (err != OK) { 6458 ALOGE("[%s] Unable to configure time lapse (err %d)", 6459 mCodec->mComponentName.c_str(), 6460 err); 6461 return err; 6462 } 6463 } 6464 6465 if (mCodec->mCreateInputBuffersSuspended) { 6466 bool suspend = true; 6467 err = mCodec->mOMX->setInternalOption( 6468 mCodec->mNode, 6469 kPortIndexInput, 6470 IOMX::INTERNAL_OPTION_SUSPEND, 6471 &suspend, 6472 sizeof(suspend)); 6473 6474 if (err != OK) { 6475 ALOGE("[%s] Unable to configure option to suspend (err %d)", 6476 mCodec->mComponentName.c_str(), 6477 err); 6478 return err; 6479 } 6480 } 6481 6482 uint32_t usageBits; 6483 if (mCodec->mOMX->getParameter( 6484 mCodec->mNode, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, 6485 &usageBits, sizeof(usageBits)) == OK) { 6486 mCodec->mInputFormat->setInt32( 6487 "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); 6488 } 6489 6490 sp<ABuffer> colorAspectsBuffer; 6491 if (mCodec->mInputFormat->findBuffer("android._color-aspects", &colorAspectsBuffer)) { 6492 err = mCodec->mOMX->setInternalOption( 6493 mCodec->mNode, kPortIndexInput, IOMX::INTERNAL_OPTION_COLOR_ASPECTS, 6494 colorAspectsBuffer->base(), colorAspectsBuffer->capacity()); 6495 if (err != OK) { 6496 ALOGE("[%s] Unable to configure color aspects (err %d)", 6497 mCodec->mComponentName.c_str(), err); 6498 return err; 6499 } 6500 } 6501 return OK; 6502} 6503 6504void ACodec::LoadedState::onCreateInputSurface( 6505 const sp<AMessage> & /* msg */) { 6506 ALOGV("onCreateInputSurface"); 6507 6508 sp<AMessage> notify = mCodec->mNotify->dup(); 6509 notify->setInt32("what", CodecBase::kWhatInputSurfaceCreated); 6510 6511 android_dataspace dataSpace; 6512 status_t err = 6513 mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace); 6514 notify->setMessage("input-format", mCodec->mInputFormat); 6515 notify->setMessage("output-format", mCodec->mOutputFormat); 6516 6517 sp<IGraphicBufferProducer> bufferProducer; 6518 if (err == OK) { 6519 err = mCodec->mOMX->createInputSurface( 6520 mCodec->mNode, kPortIndexInput, dataSpace, &bufferProducer, &mCodec->mInputMetadataType); 6521 } 6522 6523 if (err == OK) { 6524 err = setupInputSurface(); 6525 } 6526 6527 if (err == OK) { 6528 notify->setObject("input-surface", 6529 new BufferProducerWrapper(bufferProducer)); 6530 } else { 6531 // Can't use mCodec->signalError() here -- MediaCodec won't forward 6532 // the error through because it's in the "configured" state. We 6533 // send a kWhatInputSurfaceCreated with an error value instead. 6534 ALOGE("[%s] onCreateInputSurface returning error %d", 6535 mCodec->mComponentName.c_str(), err); 6536 notify->setInt32("err", err); 6537 } 6538 notify->post(); 6539} 6540 6541void ACodec::LoadedState::onSetInputSurface( 6542 const sp<AMessage> &msg) { 6543 ALOGV("onSetInputSurface"); 6544 6545 sp<AMessage> notify = mCodec->mNotify->dup(); 6546 notify->setInt32("what", CodecBase::kWhatInputSurfaceAccepted); 6547 6548 sp<RefBase> obj; 6549 CHECK(msg->findObject("input-surface", &obj)); 6550 sp<PersistentSurface> surface = static_cast<PersistentSurface *>(obj.get()); 6551 6552 android_dataspace dataSpace; 6553 status_t err = 6554 mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace); 6555 notify->setMessage("input-format", mCodec->mInputFormat); 6556 notify->setMessage("output-format", mCodec->mOutputFormat); 6557 6558 if (err == OK) { 6559 err = mCodec->mOMX->setInputSurface( 6560 mCodec->mNode, kPortIndexInput, surface->getBufferConsumer(), 6561 &mCodec->mInputMetadataType); 6562 } 6563 6564 if (err == OK) { 6565 surface->getBufferConsumer()->setDefaultBufferDataSpace(dataSpace); 6566 err = setupInputSurface(); 6567 } 6568 6569 if (err != OK) { 6570 // Can't use mCodec->signalError() here -- MediaCodec won't forward 6571 // the error through because it's in the "configured" state. We 6572 // send a kWhatInputSurfaceAccepted with an error value instead. 6573 ALOGE("[%s] onSetInputSurface returning error %d", 6574 mCodec->mComponentName.c_str(), err); 6575 notify->setInt32("err", err); 6576 } 6577 notify->post(); 6578} 6579 6580void ACodec::LoadedState::onStart() { 6581 ALOGV("onStart"); 6582 6583 status_t err = mCodec->mOMX->sendCommand(mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle); 6584 if (err != OK) { 6585 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6586 } else { 6587 mCodec->changeState(mCodec->mLoadedToIdleState); 6588 } 6589} 6590 6591//////////////////////////////////////////////////////////////////////////////// 6592 6593ACodec::LoadedToIdleState::LoadedToIdleState(ACodec *codec) 6594 : BaseState(codec) { 6595} 6596 6597void ACodec::LoadedToIdleState::stateEntered() { 6598 ALOGV("[%s] Now Loaded->Idle", mCodec->mComponentName.c_str()); 6599 6600 status_t err; 6601 if ((err = allocateBuffers()) != OK) { 6602 ALOGE("Failed to allocate buffers after transitioning to IDLE state " 6603 "(error 0x%08x)", 6604 err); 6605 6606 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6607 6608 mCodec->mOMX->sendCommand( 6609 mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded); 6610 if (mCodec->allYourBuffersAreBelongToUs(kPortIndexInput)) { 6611 mCodec->freeBuffersOnPort(kPortIndexInput); 6612 } 6613 if (mCodec->allYourBuffersAreBelongToUs(kPortIndexOutput)) { 6614 mCodec->freeBuffersOnPort(kPortIndexOutput); 6615 } 6616 6617 mCodec->changeState(mCodec->mLoadedState); 6618 } 6619} 6620 6621status_t ACodec::LoadedToIdleState::allocateBuffers() { 6622 status_t err = mCodec->allocateBuffersOnPort(kPortIndexInput); 6623 6624 if (err != OK) { 6625 return err; 6626 } 6627 6628 return mCodec->allocateBuffersOnPort(kPortIndexOutput); 6629} 6630 6631bool ACodec::LoadedToIdleState::onMessageReceived(const sp<AMessage> &msg) { 6632 switch (msg->what()) { 6633 case kWhatSetParameters: 6634 case kWhatShutdown: 6635 { 6636 mCodec->deferMessage(msg); 6637 return true; 6638 } 6639 6640 case kWhatSignalEndOfInputStream: 6641 { 6642 mCodec->onSignalEndOfInputStream(); 6643 return true; 6644 } 6645 6646 case kWhatResume: 6647 { 6648 // We'll be active soon enough. 6649 return true; 6650 } 6651 6652 case kWhatFlush: 6653 { 6654 // We haven't even started yet, so we're flushed alright... 6655 sp<AMessage> notify = mCodec->mNotify->dup(); 6656 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6657 notify->post(); 6658 return true; 6659 } 6660 6661 default: 6662 return BaseState::onMessageReceived(msg); 6663 } 6664} 6665 6666bool ACodec::LoadedToIdleState::onOMXEvent( 6667 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 6668 switch (event) { 6669 case OMX_EventCmdComplete: 6670 { 6671 status_t err = OK; 6672 if (data1 != (OMX_U32)OMX_CommandStateSet 6673 || data2 != (OMX_U32)OMX_StateIdle) { 6674 ALOGE("Unexpected command completion in LoadedToIdleState: %s(%u) %s(%u)", 6675 asString((OMX_COMMANDTYPE)data1), data1, 6676 asString((OMX_STATETYPE)data2), data2); 6677 err = FAILED_TRANSACTION; 6678 } 6679 6680 if (err == OK) { 6681 err = mCodec->mOMX->sendCommand( 6682 mCodec->mNode, OMX_CommandStateSet, OMX_StateExecuting); 6683 } 6684 6685 if (err != OK) { 6686 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6687 } else { 6688 mCodec->changeState(mCodec->mIdleToExecutingState); 6689 } 6690 6691 return true; 6692 } 6693 6694 default: 6695 return BaseState::onOMXEvent(event, data1, data2); 6696 } 6697} 6698 6699//////////////////////////////////////////////////////////////////////////////// 6700 6701ACodec::IdleToExecutingState::IdleToExecutingState(ACodec *codec) 6702 : BaseState(codec) { 6703} 6704 6705void ACodec::IdleToExecutingState::stateEntered() { 6706 ALOGV("[%s] Now Idle->Executing", mCodec->mComponentName.c_str()); 6707} 6708 6709bool ACodec::IdleToExecutingState::onMessageReceived(const sp<AMessage> &msg) { 6710 switch (msg->what()) { 6711 case kWhatSetParameters: 6712 case kWhatShutdown: 6713 { 6714 mCodec->deferMessage(msg); 6715 return true; 6716 } 6717 6718 case kWhatResume: 6719 { 6720 // We'll be active soon enough. 6721 return true; 6722 } 6723 6724 case kWhatFlush: 6725 { 6726 // We haven't even started yet, so we're flushed alright... 6727 sp<AMessage> notify = mCodec->mNotify->dup(); 6728 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6729 notify->post(); 6730 6731 return true; 6732 } 6733 6734 case kWhatSignalEndOfInputStream: 6735 { 6736 mCodec->onSignalEndOfInputStream(); 6737 return true; 6738 } 6739 6740 default: 6741 return BaseState::onMessageReceived(msg); 6742 } 6743} 6744 6745bool ACodec::IdleToExecutingState::onOMXEvent( 6746 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 6747 switch (event) { 6748 case OMX_EventCmdComplete: 6749 { 6750 if (data1 != (OMX_U32)OMX_CommandStateSet 6751 || data2 != (OMX_U32)OMX_StateExecuting) { 6752 ALOGE("Unexpected command completion in IdleToExecutingState: %s(%u) %s(%u)", 6753 asString((OMX_COMMANDTYPE)data1), data1, 6754 asString((OMX_STATETYPE)data2), data2); 6755 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6756 return true; 6757 } 6758 6759 mCodec->mExecutingState->resume(); 6760 mCodec->changeState(mCodec->mExecutingState); 6761 6762 return true; 6763 } 6764 6765 default: 6766 return BaseState::onOMXEvent(event, data1, data2); 6767 } 6768} 6769 6770//////////////////////////////////////////////////////////////////////////////// 6771 6772ACodec::ExecutingState::ExecutingState(ACodec *codec) 6773 : BaseState(codec), 6774 mActive(false) { 6775} 6776 6777ACodec::BaseState::PortMode ACodec::ExecutingState::getPortMode( 6778 OMX_U32 /* portIndex */) { 6779 return RESUBMIT_BUFFERS; 6780} 6781 6782void ACodec::ExecutingState::submitOutputMetaBuffers() { 6783 // submit as many buffers as there are input buffers with the codec 6784 // in case we are in port reconfiguring 6785 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 6786 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 6787 6788 if (info->mStatus == BufferInfo::OWNED_BY_COMPONENT) { 6789 if (mCodec->submitOutputMetadataBuffer() != OK) 6790 break; 6791 } 6792 } 6793 6794 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 6795 mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround(); 6796} 6797 6798void ACodec::ExecutingState::submitRegularOutputBuffers() { 6799 bool failed = false; 6800 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexOutput].size(); ++i) { 6801 BufferInfo *info = &mCodec->mBuffers[kPortIndexOutput].editItemAt(i); 6802 6803 if (mCodec->mNativeWindow != NULL) { 6804 if (info->mStatus != BufferInfo::OWNED_BY_US 6805 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6806 ALOGE("buffers should be owned by us or the surface"); 6807 failed = true; 6808 break; 6809 } 6810 6811 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6812 continue; 6813 } 6814 } else { 6815 if (info->mStatus != BufferInfo::OWNED_BY_US) { 6816 ALOGE("buffers should be owned by us"); 6817 failed = true; 6818 break; 6819 } 6820 } 6821 6822 ALOGV("[%s] calling fillBuffer %u", mCodec->mComponentName.c_str(), info->mBufferID); 6823 6824 info->checkWriteFence("submitRegularOutputBuffers"); 6825 status_t err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd); 6826 info->mFenceFd = -1; 6827 if (err != OK) { 6828 failed = true; 6829 break; 6830 } 6831 6832 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 6833 } 6834 6835 if (failed) { 6836 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6837 } 6838} 6839 6840void ACodec::ExecutingState::submitOutputBuffers() { 6841 submitRegularOutputBuffers(); 6842 if (mCodec->storingMetadataInDecodedBuffers()) { 6843 submitOutputMetaBuffers(); 6844 } 6845} 6846 6847void ACodec::ExecutingState::resume() { 6848 if (mActive) { 6849 ALOGV("[%s] We're already active, no need to resume.", mCodec->mComponentName.c_str()); 6850 return; 6851 } 6852 6853 submitOutputBuffers(); 6854 6855 // Post all available input buffers 6856 if (mCodec->mBuffers[kPortIndexInput].size() == 0u) { 6857 ALOGW("[%s] we don't have any input buffers to resume", mCodec->mComponentName.c_str()); 6858 } 6859 6860 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); i++) { 6861 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 6862 if (info->mStatus == BufferInfo::OWNED_BY_US) { 6863 postFillThisBuffer(info); 6864 } 6865 } 6866 6867 mActive = true; 6868} 6869 6870void ACodec::ExecutingState::stateEntered() { 6871 ALOGV("[%s] Now Executing", mCodec->mComponentName.c_str()); 6872 6873 mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC)); 6874 mCodec->processDeferredMessages(); 6875} 6876 6877bool ACodec::ExecutingState::onMessageReceived(const sp<AMessage> &msg) { 6878 bool handled = false; 6879 6880 switch (msg->what()) { 6881 case kWhatShutdown: 6882 { 6883 int32_t keepComponentAllocated; 6884 CHECK(msg->findInt32( 6885 "keepComponentAllocated", &keepComponentAllocated)); 6886 6887 mCodec->mShutdownInProgress = true; 6888 mCodec->mExplicitShutdown = true; 6889 mCodec->mKeepComponentAllocated = keepComponentAllocated; 6890 6891 mActive = false; 6892 6893 status_t err = mCodec->mOMX->sendCommand( 6894 mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle); 6895 if (err != OK) { 6896 if (keepComponentAllocated) { 6897 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6898 } 6899 // TODO: do some recovery here. 6900 } else { 6901 mCodec->changeState(mCodec->mExecutingToIdleState); 6902 } 6903 6904 handled = true; 6905 break; 6906 } 6907 6908 case kWhatFlush: 6909 { 6910 ALOGV("[%s] ExecutingState flushing now " 6911 "(codec owns %zu/%zu input, %zu/%zu output).", 6912 mCodec->mComponentName.c_str(), 6913 mCodec->countBuffersOwnedByComponent(kPortIndexInput), 6914 mCodec->mBuffers[kPortIndexInput].size(), 6915 mCodec->countBuffersOwnedByComponent(kPortIndexOutput), 6916 mCodec->mBuffers[kPortIndexOutput].size()); 6917 6918 mActive = false; 6919 6920 status_t err = mCodec->mOMX->sendCommand(mCodec->mNode, OMX_CommandFlush, OMX_ALL); 6921 if (err != OK) { 6922 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6923 } else { 6924 mCodec->changeState(mCodec->mFlushingState); 6925 } 6926 6927 handled = true; 6928 break; 6929 } 6930 6931 case kWhatResume: 6932 { 6933 resume(); 6934 6935 handled = true; 6936 break; 6937 } 6938 6939 case kWhatRequestIDRFrame: 6940 { 6941 status_t err = mCodec->requestIDRFrame(); 6942 if (err != OK) { 6943 ALOGW("Requesting an IDR frame failed."); 6944 } 6945 6946 handled = true; 6947 break; 6948 } 6949 6950 case kWhatSetParameters: 6951 { 6952 sp<AMessage> params; 6953 CHECK(msg->findMessage("params", ¶ms)); 6954 6955 status_t err = mCodec->setParameters(params); 6956 6957 sp<AMessage> reply; 6958 if (msg->findMessage("reply", &reply)) { 6959 reply->setInt32("err", err); 6960 reply->post(); 6961 } 6962 6963 handled = true; 6964 break; 6965 } 6966 6967 case ACodec::kWhatSignalEndOfInputStream: 6968 { 6969 mCodec->onSignalEndOfInputStream(); 6970 handled = true; 6971 break; 6972 } 6973 6974 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 6975 case kWhatSubmitOutputMetadataBufferIfEOS: 6976 { 6977 if (mCodec->mPortEOS[kPortIndexInput] && 6978 !mCodec->mPortEOS[kPortIndexOutput]) { 6979 status_t err = mCodec->submitOutputMetadataBuffer(); 6980 if (err == OK) { 6981 mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround(); 6982 } 6983 } 6984 return true; 6985 } 6986 6987 default: 6988 handled = BaseState::onMessageReceived(msg); 6989 break; 6990 } 6991 6992 return handled; 6993} 6994 6995status_t ACodec::setParameters(const sp<AMessage> ¶ms) { 6996 int32_t videoBitrate; 6997 if (params->findInt32("video-bitrate", &videoBitrate)) { 6998 OMX_VIDEO_CONFIG_BITRATETYPE configParams; 6999 InitOMXParams(&configParams); 7000 configParams.nPortIndex = kPortIndexOutput; 7001 configParams.nEncodeBitrate = videoBitrate; 7002 7003 status_t err = mOMX->setConfig( 7004 mNode, 7005 OMX_IndexConfigVideoBitrate, 7006 &configParams, 7007 sizeof(configParams)); 7008 7009 if (err != OK) { 7010 ALOGE("setConfig(OMX_IndexConfigVideoBitrate, %d) failed w/ err %d", 7011 videoBitrate, err); 7012 7013 return err; 7014 } 7015 } 7016 7017 int64_t skipFramesBeforeUs; 7018 if (params->findInt64("skip-frames-before", &skipFramesBeforeUs)) { 7019 status_t err = 7020 mOMX->setInternalOption( 7021 mNode, 7022 kPortIndexInput, 7023 IOMX::INTERNAL_OPTION_START_TIME, 7024 &skipFramesBeforeUs, 7025 sizeof(skipFramesBeforeUs)); 7026 7027 if (err != OK) { 7028 ALOGE("Failed to set parameter 'skip-frames-before' (err %d)", err); 7029 return err; 7030 } 7031 } 7032 7033 int32_t dropInputFrames; 7034 if (params->findInt32("drop-input-frames", &dropInputFrames)) { 7035 bool suspend = dropInputFrames != 0; 7036 7037 status_t err = 7038 mOMX->setInternalOption( 7039 mNode, 7040 kPortIndexInput, 7041 IOMX::INTERNAL_OPTION_SUSPEND, 7042 &suspend, 7043 sizeof(suspend)); 7044 7045 if (err != OK) { 7046 ALOGE("Failed to set parameter 'drop-input-frames' (err %d)", err); 7047 return err; 7048 } 7049 } 7050 7051 int32_t dummy; 7052 if (params->findInt32("request-sync", &dummy)) { 7053 status_t err = requestIDRFrame(); 7054 7055 if (err != OK) { 7056 ALOGE("Requesting a sync frame failed w/ err %d", err); 7057 return err; 7058 } 7059 } 7060 7061 float rate; 7062 if (params->findFloat("operating-rate", &rate) && rate > 0) { 7063 status_t err = setOperatingRate(rate, mIsVideo); 7064 if (err != OK) { 7065 ALOGE("Failed to set parameter 'operating-rate' (err %d)", err); 7066 return err; 7067 } 7068 } 7069 7070 int32_t intraRefreshPeriod = 0; 7071 if (params->findInt32("intra-refresh-period", &intraRefreshPeriod) 7072 && intraRefreshPeriod > 0) { 7073 status_t err = setIntraRefreshPeriod(intraRefreshPeriod, false); 7074 if (err != OK) { 7075 ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional", 7076 mComponentName.c_str()); 7077 err = OK; 7078 } 7079 } 7080 7081 return OK; 7082} 7083 7084void ACodec::onSignalEndOfInputStream() { 7085 sp<AMessage> notify = mNotify->dup(); 7086 notify->setInt32("what", CodecBase::kWhatSignaledInputEOS); 7087 7088 status_t err = mOMX->signalEndOfInputStream(mNode); 7089 if (err != OK) { 7090 notify->setInt32("err", err); 7091 } 7092 notify->post(); 7093} 7094 7095bool ACodec::ExecutingState::onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { 7096 mCodec->onFrameRendered(mediaTimeUs, systemNano); 7097 return true; 7098} 7099 7100bool ACodec::ExecutingState::onOMXEvent( 7101 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7102 switch (event) { 7103 case OMX_EventPortSettingsChanged: 7104 { 7105 CHECK_EQ(data1, (OMX_U32)kPortIndexOutput); 7106 7107 mCodec->onOutputFormatChanged(); 7108 7109 if (data2 == 0 || data2 == OMX_IndexParamPortDefinition) { 7110 mCodec->mMetadataBuffersToSubmit = 0; 7111 CHECK_EQ(mCodec->mOMX->sendCommand( 7112 mCodec->mNode, 7113 OMX_CommandPortDisable, kPortIndexOutput), 7114 (status_t)OK); 7115 7116 mCodec->freeOutputBuffersNotOwnedByComponent(); 7117 7118 mCodec->changeState(mCodec->mOutputPortSettingsChangedState); 7119 } else if (data2 != OMX_IndexConfigCommonOutputCrop 7120 && data2 != OMX_IndexConfigAndroidIntraRefresh) { 7121 ALOGV("[%s] OMX_EventPortSettingsChanged 0x%08x", 7122 mCodec->mComponentName.c_str(), data2); 7123 } 7124 7125 return true; 7126 } 7127 7128 case OMX_EventBufferFlag: 7129 { 7130 return true; 7131 } 7132 7133 default: 7134 return BaseState::onOMXEvent(event, data1, data2); 7135 } 7136} 7137 7138//////////////////////////////////////////////////////////////////////////////// 7139 7140ACodec::OutputPortSettingsChangedState::OutputPortSettingsChangedState( 7141 ACodec *codec) 7142 : BaseState(codec) { 7143} 7144 7145ACodec::BaseState::PortMode ACodec::OutputPortSettingsChangedState::getPortMode( 7146 OMX_U32 portIndex) { 7147 if (portIndex == kPortIndexOutput) { 7148 return FREE_BUFFERS; 7149 } 7150 7151 CHECK_EQ(portIndex, (OMX_U32)kPortIndexInput); 7152 7153 return RESUBMIT_BUFFERS; 7154} 7155 7156bool ACodec::OutputPortSettingsChangedState::onMessageReceived( 7157 const sp<AMessage> &msg) { 7158 bool handled = false; 7159 7160 switch (msg->what()) { 7161 case kWhatFlush: 7162 case kWhatShutdown: 7163 case kWhatResume: 7164 case kWhatSetParameters: 7165 { 7166 if (msg->what() == kWhatResume) { 7167 ALOGV("[%s] Deferring resume", mCodec->mComponentName.c_str()); 7168 } 7169 7170 mCodec->deferMessage(msg); 7171 handled = true; 7172 break; 7173 } 7174 7175 default: 7176 handled = BaseState::onMessageReceived(msg); 7177 break; 7178 } 7179 7180 return handled; 7181} 7182 7183void ACodec::OutputPortSettingsChangedState::stateEntered() { 7184 ALOGV("[%s] Now handling output port settings change", 7185 mCodec->mComponentName.c_str()); 7186} 7187 7188bool ACodec::OutputPortSettingsChangedState::onOMXFrameRendered( 7189 int64_t mediaTimeUs, nsecs_t systemNano) { 7190 mCodec->onFrameRendered(mediaTimeUs, systemNano); 7191 return true; 7192} 7193 7194bool ACodec::OutputPortSettingsChangedState::onOMXEvent( 7195 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7196 switch (event) { 7197 case OMX_EventCmdComplete: 7198 { 7199 if (data1 == (OMX_U32)OMX_CommandPortDisable) { 7200 if (data2 != (OMX_U32)kPortIndexOutput) { 7201 ALOGW("ignoring EventCmdComplete CommandPortDisable for port %u", data2); 7202 return false; 7203 } 7204 7205 ALOGV("[%s] Output port now disabled.", mCodec->mComponentName.c_str()); 7206 7207 status_t err = OK; 7208 if (!mCodec->mBuffers[kPortIndexOutput].isEmpty()) { 7209 ALOGE("disabled port should be empty, but has %zu buffers", 7210 mCodec->mBuffers[kPortIndexOutput].size()); 7211 err = FAILED_TRANSACTION; 7212 } else { 7213 mCodec->mDealer[kPortIndexOutput].clear(); 7214 } 7215 7216 if (err == OK) { 7217 err = mCodec->mOMX->sendCommand( 7218 mCodec->mNode, OMX_CommandPortEnable, kPortIndexOutput); 7219 } 7220 7221 if (err == OK) { 7222 err = mCodec->allocateBuffersOnPort(kPortIndexOutput); 7223 ALOGE_IF(err != OK, "Failed to allocate output port buffers after port " 7224 "reconfiguration: (%d)", err); 7225 } 7226 7227 if (err != OK) { 7228 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 7229 7230 // This is technically not correct, but appears to be 7231 // the only way to free the component instance. 7232 // Controlled transitioning from excecuting->idle 7233 // and idle->loaded seem impossible probably because 7234 // the output port never finishes re-enabling. 7235 mCodec->mShutdownInProgress = true; 7236 mCodec->mKeepComponentAllocated = false; 7237 mCodec->changeState(mCodec->mLoadedState); 7238 } 7239 7240 return true; 7241 } else if (data1 == (OMX_U32)OMX_CommandPortEnable) { 7242 if (data2 != (OMX_U32)kPortIndexOutput) { 7243 ALOGW("ignoring EventCmdComplete OMX_CommandPortEnable for port %u", data2); 7244 return false; 7245 } 7246 7247 ALOGV("[%s] Output port now reenabled.", mCodec->mComponentName.c_str()); 7248 7249 if (mCodec->mExecutingState->active()) { 7250 mCodec->mExecutingState->submitOutputBuffers(); 7251 } 7252 7253 mCodec->changeState(mCodec->mExecutingState); 7254 7255 return true; 7256 } 7257 7258 return false; 7259 } 7260 7261 default: 7262 return false; 7263 } 7264} 7265 7266//////////////////////////////////////////////////////////////////////////////// 7267 7268ACodec::ExecutingToIdleState::ExecutingToIdleState(ACodec *codec) 7269 : BaseState(codec), 7270 mComponentNowIdle(false) { 7271} 7272 7273bool ACodec::ExecutingToIdleState::onMessageReceived(const sp<AMessage> &msg) { 7274 bool handled = false; 7275 7276 switch (msg->what()) { 7277 case kWhatFlush: 7278 { 7279 // Don't send me a flush request if you previously wanted me 7280 // to shutdown. 7281 ALOGW("Ignoring flush request in ExecutingToIdleState"); 7282 break; 7283 } 7284 7285 case kWhatShutdown: 7286 { 7287 // We're already doing that... 7288 7289 handled = true; 7290 break; 7291 } 7292 7293 default: 7294 handled = BaseState::onMessageReceived(msg); 7295 break; 7296 } 7297 7298 return handled; 7299} 7300 7301void ACodec::ExecutingToIdleState::stateEntered() { 7302 ALOGV("[%s] Now Executing->Idle", mCodec->mComponentName.c_str()); 7303 7304 mComponentNowIdle = false; 7305 mCodec->mLastOutputFormat.clear(); 7306} 7307 7308bool ACodec::ExecutingToIdleState::onOMXEvent( 7309 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7310 switch (event) { 7311 case OMX_EventCmdComplete: 7312 { 7313 if (data1 != (OMX_U32)OMX_CommandStateSet 7314 || data2 != (OMX_U32)OMX_StateIdle) { 7315 ALOGE("Unexpected command completion in ExecutingToIdleState: %s(%u) %s(%u)", 7316 asString((OMX_COMMANDTYPE)data1), data1, 7317 asString((OMX_STATETYPE)data2), data2); 7318 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7319 return true; 7320 } 7321 7322 mComponentNowIdle = true; 7323 7324 changeStateIfWeOwnAllBuffers(); 7325 7326 return true; 7327 } 7328 7329 case OMX_EventPortSettingsChanged: 7330 case OMX_EventBufferFlag: 7331 { 7332 // We're shutting down and don't care about this anymore. 7333 return true; 7334 } 7335 7336 default: 7337 return BaseState::onOMXEvent(event, data1, data2); 7338 } 7339} 7340 7341void ACodec::ExecutingToIdleState::changeStateIfWeOwnAllBuffers() { 7342 if (mComponentNowIdle && mCodec->allYourBuffersAreBelongToUs()) { 7343 status_t err = mCodec->mOMX->sendCommand( 7344 mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded); 7345 if (err == OK) { 7346 err = mCodec->freeBuffersOnPort(kPortIndexInput); 7347 status_t err2 = mCodec->freeBuffersOnPort(kPortIndexOutput); 7348 if (err == OK) { 7349 err = err2; 7350 } 7351 } 7352 7353 if ((mCodec->mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) 7354 && mCodec->mNativeWindow != NULL) { 7355 // We push enough 1x1 blank buffers to ensure that one of 7356 // them has made it to the display. This allows the OMX 7357 // component teardown to zero out any protected buffers 7358 // without the risk of scanning out one of those buffers. 7359 pushBlankBuffersToNativeWindow(mCodec->mNativeWindow.get()); 7360 } 7361 7362 if (err != OK) { 7363 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7364 return; 7365 } 7366 7367 mCodec->changeState(mCodec->mIdleToLoadedState); 7368 } 7369} 7370 7371void ACodec::ExecutingToIdleState::onInputBufferFilled( 7372 const sp<AMessage> &msg) { 7373 BaseState::onInputBufferFilled(msg); 7374 7375 changeStateIfWeOwnAllBuffers(); 7376} 7377 7378void ACodec::ExecutingToIdleState::onOutputBufferDrained( 7379 const sp<AMessage> &msg) { 7380 BaseState::onOutputBufferDrained(msg); 7381 7382 changeStateIfWeOwnAllBuffers(); 7383} 7384 7385//////////////////////////////////////////////////////////////////////////////// 7386 7387ACodec::IdleToLoadedState::IdleToLoadedState(ACodec *codec) 7388 : BaseState(codec) { 7389} 7390 7391bool ACodec::IdleToLoadedState::onMessageReceived(const sp<AMessage> &msg) { 7392 bool handled = false; 7393 7394 switch (msg->what()) { 7395 case kWhatShutdown: 7396 { 7397 // We're already doing that... 7398 7399 handled = true; 7400 break; 7401 } 7402 7403 case kWhatFlush: 7404 { 7405 // Don't send me a flush request if you previously wanted me 7406 // to shutdown. 7407 ALOGE("Got flush request in IdleToLoadedState"); 7408 break; 7409 } 7410 7411 default: 7412 handled = BaseState::onMessageReceived(msg); 7413 break; 7414 } 7415 7416 return handled; 7417} 7418 7419void ACodec::IdleToLoadedState::stateEntered() { 7420 ALOGV("[%s] Now Idle->Loaded", mCodec->mComponentName.c_str()); 7421} 7422 7423bool ACodec::IdleToLoadedState::onOMXEvent( 7424 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7425 switch (event) { 7426 case OMX_EventCmdComplete: 7427 { 7428 if (data1 != (OMX_U32)OMX_CommandStateSet 7429 || data2 != (OMX_U32)OMX_StateLoaded) { 7430 ALOGE("Unexpected command completion in IdleToLoadedState: %s(%u) %s(%u)", 7431 asString((OMX_COMMANDTYPE)data1), data1, 7432 asString((OMX_STATETYPE)data2), data2); 7433 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7434 return true; 7435 } 7436 7437 mCodec->changeState(mCodec->mLoadedState); 7438 7439 return true; 7440 } 7441 7442 default: 7443 return BaseState::onOMXEvent(event, data1, data2); 7444 } 7445} 7446 7447//////////////////////////////////////////////////////////////////////////////// 7448 7449ACodec::FlushingState::FlushingState(ACodec *codec) 7450 : BaseState(codec) { 7451} 7452 7453void ACodec::FlushingState::stateEntered() { 7454 ALOGV("[%s] Now Flushing", mCodec->mComponentName.c_str()); 7455 7456 mFlushComplete[kPortIndexInput] = mFlushComplete[kPortIndexOutput] = false; 7457} 7458 7459bool ACodec::FlushingState::onMessageReceived(const sp<AMessage> &msg) { 7460 bool handled = false; 7461 7462 switch (msg->what()) { 7463 case kWhatShutdown: 7464 { 7465 mCodec->deferMessage(msg); 7466 break; 7467 } 7468 7469 case kWhatFlush: 7470 { 7471 // We're already doing this right now. 7472 handled = true; 7473 break; 7474 } 7475 7476 default: 7477 handled = BaseState::onMessageReceived(msg); 7478 break; 7479 } 7480 7481 return handled; 7482} 7483 7484bool ACodec::FlushingState::onOMXEvent( 7485 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7486 ALOGV("[%s] FlushingState onOMXEvent(%u,%d)", 7487 mCodec->mComponentName.c_str(), event, (OMX_S32)data1); 7488 7489 switch (event) { 7490 case OMX_EventCmdComplete: 7491 { 7492 if (data1 != (OMX_U32)OMX_CommandFlush) { 7493 ALOGE("unexpected EventCmdComplete %s(%d) data2:%d in FlushingState", 7494 asString((OMX_COMMANDTYPE)data1), data1, data2); 7495 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7496 return true; 7497 } 7498 7499 if (data2 == kPortIndexInput || data2 == kPortIndexOutput) { 7500 if (mFlushComplete[data2]) { 7501 ALOGW("Flush already completed for %s port", 7502 data2 == kPortIndexInput ? "input" : "output"); 7503 return true; 7504 } 7505 mFlushComplete[data2] = true; 7506 7507 if (mFlushComplete[kPortIndexInput] && mFlushComplete[kPortIndexOutput]) { 7508 changeStateIfWeOwnAllBuffers(); 7509 } 7510 } else if (data2 == OMX_ALL) { 7511 if (!mFlushComplete[kPortIndexInput] || !mFlushComplete[kPortIndexOutput]) { 7512 ALOGW("received flush complete event for OMX_ALL before ports have been" 7513 "flushed (%d/%d)", 7514 mFlushComplete[kPortIndexInput], mFlushComplete[kPortIndexOutput]); 7515 return false; 7516 } 7517 7518 changeStateIfWeOwnAllBuffers(); 7519 } else { 7520 ALOGW("data2 not OMX_ALL but %u in EventCmdComplete CommandFlush", data2); 7521 } 7522 7523 return true; 7524 } 7525 7526 case OMX_EventPortSettingsChanged: 7527 { 7528 sp<AMessage> msg = new AMessage(kWhatOMXMessage, mCodec); 7529 msg->setInt32("type", omx_message::EVENT); 7530 msg->setInt32("node", mCodec->mNode); 7531 msg->setInt32("event", event); 7532 msg->setInt32("data1", data1); 7533 msg->setInt32("data2", data2); 7534 7535 ALOGV("[%s] Deferring OMX_EventPortSettingsChanged", 7536 mCodec->mComponentName.c_str()); 7537 7538 mCodec->deferMessage(msg); 7539 7540 return true; 7541 } 7542 7543 default: 7544 return BaseState::onOMXEvent(event, data1, data2); 7545 } 7546 7547 return true; 7548} 7549 7550void ACodec::FlushingState::onOutputBufferDrained(const sp<AMessage> &msg) { 7551 BaseState::onOutputBufferDrained(msg); 7552 7553 changeStateIfWeOwnAllBuffers(); 7554} 7555 7556void ACodec::FlushingState::onInputBufferFilled(const sp<AMessage> &msg) { 7557 BaseState::onInputBufferFilled(msg); 7558 7559 changeStateIfWeOwnAllBuffers(); 7560} 7561 7562void ACodec::FlushingState::changeStateIfWeOwnAllBuffers() { 7563 if (mFlushComplete[kPortIndexInput] 7564 && mFlushComplete[kPortIndexOutput] 7565 && mCodec->allYourBuffersAreBelongToUs()) { 7566 // We now own all buffers except possibly those still queued with 7567 // the native window for rendering. Let's get those back as well. 7568 mCodec->waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs(); 7569 7570 mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC)); 7571 7572 sp<AMessage> notify = mCodec->mNotify->dup(); 7573 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 7574 notify->post(); 7575 7576 mCodec->mPortEOS[kPortIndexInput] = 7577 mCodec->mPortEOS[kPortIndexOutput] = false; 7578 7579 mCodec->mInputEOSResult = OK; 7580 7581 if (mCodec->mSkipCutBuffer != NULL) { 7582 mCodec->mSkipCutBuffer->clear(); 7583 } 7584 7585 mCodec->changeState(mCodec->mExecutingState); 7586 } 7587} 7588 7589status_t ACodec::queryCapabilities( 7590 const AString &name, const AString &mime, bool isEncoder, 7591 sp<MediaCodecInfo::Capabilities> *caps) { 7592 (*caps).clear(); 7593 const char *role = getComponentRole(isEncoder, mime.c_str()); 7594 if (role == NULL) { 7595 return BAD_VALUE; 7596 } 7597 7598 OMXClient client; 7599 status_t err = client.connect(); 7600 if (err != OK) { 7601 return err; 7602 } 7603 7604 sp<IOMX> omx = client.interface(); 7605 sp<CodecObserver> observer = new CodecObserver; 7606 IOMX::node_id node = 0; 7607 7608 err = omx->allocateNode(name.c_str(), observer, NULL, &node); 7609 if (err != OK) { 7610 client.disconnect(); 7611 return err; 7612 } 7613 7614 err = setComponentRole(omx, node, role); 7615 if (err != OK) { 7616 omx->freeNode(node); 7617 client.disconnect(); 7618 return err; 7619 } 7620 7621 sp<MediaCodecInfo::CapabilitiesBuilder> builder = new MediaCodecInfo::CapabilitiesBuilder(); 7622 bool isVideo = mime.startsWithIgnoreCase("video/"); 7623 7624 if (isVideo) { 7625 OMX_VIDEO_PARAM_PROFILELEVELTYPE param; 7626 InitOMXParams(¶m); 7627 param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput; 7628 7629 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7630 param.nProfileIndex = index; 7631 status_t err = omx->getParameter( 7632 node, OMX_IndexParamVideoProfileLevelQuerySupported, 7633 ¶m, sizeof(param)); 7634 if (err != OK) { 7635 break; 7636 } 7637 builder->addProfileLevel(param.eProfile, param.eLevel); 7638 7639 if (index == kMaxIndicesToCheck) { 7640 ALOGW("[%s] stopping checking profiles after %u: %x/%x", 7641 name.c_str(), index, 7642 param.eProfile, param.eLevel); 7643 } 7644 } 7645 7646 // Color format query 7647 // return colors in the order reported by the OMX component 7648 // prefix "flexible" standard ones with the flexible equivalent 7649 OMX_VIDEO_PARAM_PORTFORMATTYPE portFormat; 7650 InitOMXParams(&portFormat); 7651 portFormat.nPortIndex = isEncoder ? kPortIndexInput : kPortIndexOutput; 7652 Vector<uint32_t> supportedColors; // shadow copy to check for duplicates 7653 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7654 portFormat.nIndex = index; 7655 status_t err = omx->getParameter( 7656 node, OMX_IndexParamVideoPortFormat, 7657 &portFormat, sizeof(portFormat)); 7658 if (err != OK) { 7659 break; 7660 } 7661 7662 OMX_U32 flexibleEquivalent; 7663 if (isFlexibleColorFormat( 7664 omx, node, portFormat.eColorFormat, false /* usingNativeWindow */, 7665 &flexibleEquivalent)) { 7666 bool marked = false; 7667 for (size_t i = 0; i < supportedColors.size(); ++i) { 7668 if (supportedColors[i] == flexibleEquivalent) { 7669 marked = true; 7670 break; 7671 } 7672 } 7673 if (!marked) { 7674 supportedColors.push(flexibleEquivalent); 7675 builder->addColorFormat(flexibleEquivalent); 7676 } 7677 } 7678 supportedColors.push(portFormat.eColorFormat); 7679 builder->addColorFormat(portFormat.eColorFormat); 7680 7681 if (index == kMaxIndicesToCheck) { 7682 ALOGW("[%s] stopping checking formats after %u: %s(%x)", 7683 name.c_str(), index, 7684 asString(portFormat.eColorFormat), portFormat.eColorFormat); 7685 } 7686 } 7687 } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_AUDIO_AAC)) { 7688 // More audio codecs if they have profiles. 7689 OMX_AUDIO_PARAM_ANDROID_PROFILETYPE param; 7690 InitOMXParams(¶m); 7691 param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput; 7692 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7693 param.nProfileIndex = index; 7694 status_t err = omx->getParameter( 7695 node, (OMX_INDEXTYPE)OMX_IndexParamAudioProfileQuerySupported, 7696 ¶m, sizeof(param)); 7697 if (err != OK) { 7698 break; 7699 } 7700 // For audio, level is ignored. 7701 builder->addProfileLevel(param.eProfile, 0 /* level */); 7702 7703 if (index == kMaxIndicesToCheck) { 7704 ALOGW("[%s] stopping checking profiles after %u: %x", 7705 name.c_str(), index, 7706 param.eProfile); 7707 } 7708 } 7709 7710 // NOTE: Without Android extensions, OMX does not provide a way to query 7711 // AAC profile support 7712 if (param.nProfileIndex == 0) { 7713 ALOGW("component %s doesn't support profile query.", name.c_str()); 7714 } 7715 } 7716 7717 if (isVideo && !isEncoder) { 7718 native_handle_t *sidebandHandle = NULL; 7719 if (omx->configureVideoTunnelMode( 7720 node, kPortIndexOutput, OMX_TRUE, 0, &sidebandHandle) == OK) { 7721 // tunneled playback includes adaptive playback 7722 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback 7723 | MediaCodecInfo::Capabilities::kFlagSupportsTunneledPlayback); 7724 } else if (omx->storeMetaDataInBuffers( 7725 node, kPortIndexOutput, OMX_TRUE) == OK || 7726 omx->prepareForAdaptivePlayback( 7727 node, kPortIndexOutput, OMX_TRUE, 7728 1280 /* width */, 720 /* height */) == OK) { 7729 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback); 7730 } 7731 } 7732 7733 if (isVideo && isEncoder) { 7734 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 7735 InitOMXParams(¶ms); 7736 params.nPortIndex = kPortIndexOutput; 7737 // TODO: should we verify if fallback is supported? 7738 if (omx->getConfig( 7739 node, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, 7740 ¶ms, sizeof(params)) == OK) { 7741 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsIntraRefresh); 7742 } 7743 } 7744 7745 *caps = builder; 7746 omx->freeNode(node); 7747 client.disconnect(); 7748 return OK; 7749} 7750 7751// These are supposed be equivalent to the logic in 7752// "audio_channel_out_mask_from_count". 7753//static 7754status_t ACodec::getOMXChannelMapping(size_t numChannels, OMX_AUDIO_CHANNELTYPE map[]) { 7755 switch (numChannels) { 7756 case 1: 7757 map[0] = OMX_AUDIO_ChannelCF; 7758 break; 7759 case 2: 7760 map[0] = OMX_AUDIO_ChannelLF; 7761 map[1] = OMX_AUDIO_ChannelRF; 7762 break; 7763 case 3: 7764 map[0] = OMX_AUDIO_ChannelLF; 7765 map[1] = OMX_AUDIO_ChannelRF; 7766 map[2] = OMX_AUDIO_ChannelCF; 7767 break; 7768 case 4: 7769 map[0] = OMX_AUDIO_ChannelLF; 7770 map[1] = OMX_AUDIO_ChannelRF; 7771 map[2] = OMX_AUDIO_ChannelLR; 7772 map[3] = OMX_AUDIO_ChannelRR; 7773 break; 7774 case 5: 7775 map[0] = OMX_AUDIO_ChannelLF; 7776 map[1] = OMX_AUDIO_ChannelRF; 7777 map[2] = OMX_AUDIO_ChannelCF; 7778 map[3] = OMX_AUDIO_ChannelLR; 7779 map[4] = OMX_AUDIO_ChannelRR; 7780 break; 7781 case 6: 7782 map[0] = OMX_AUDIO_ChannelLF; 7783 map[1] = OMX_AUDIO_ChannelRF; 7784 map[2] = OMX_AUDIO_ChannelCF; 7785 map[3] = OMX_AUDIO_ChannelLFE; 7786 map[4] = OMX_AUDIO_ChannelLR; 7787 map[5] = OMX_AUDIO_ChannelRR; 7788 break; 7789 case 7: 7790 map[0] = OMX_AUDIO_ChannelLF; 7791 map[1] = OMX_AUDIO_ChannelRF; 7792 map[2] = OMX_AUDIO_ChannelCF; 7793 map[3] = OMX_AUDIO_ChannelLFE; 7794 map[4] = OMX_AUDIO_ChannelLR; 7795 map[5] = OMX_AUDIO_ChannelRR; 7796 map[6] = OMX_AUDIO_ChannelCS; 7797 break; 7798 case 8: 7799 map[0] = OMX_AUDIO_ChannelLF; 7800 map[1] = OMX_AUDIO_ChannelRF; 7801 map[2] = OMX_AUDIO_ChannelCF; 7802 map[3] = OMX_AUDIO_ChannelLFE; 7803 map[4] = OMX_AUDIO_ChannelLR; 7804 map[5] = OMX_AUDIO_ChannelRR; 7805 map[6] = OMX_AUDIO_ChannelLS; 7806 map[7] = OMX_AUDIO_ChannelRS; 7807 break; 7808 default: 7809 return -EINVAL; 7810 } 7811 7812 return OK; 7813} 7814 7815} // namespace android 7816