ACodec.cpp revision 96bdf620189622005e83b4f1421c4f25b7fa1729
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "ACodec" 19 20#ifdef __LP64__ 21#define OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 22#endif 23 24#include <inttypes.h> 25#include <utils/Trace.h> 26 27#include <gui/Surface.h> 28 29#include <media/stagefright/ACodec.h> 30 31#include <binder/MemoryDealer.h> 32 33#include <media/stagefright/foundation/hexdump.h> 34#include <media/stagefright/foundation/ABuffer.h> 35#include <media/stagefright/foundation/ADebug.h> 36#include <media/stagefright/foundation/AMessage.h> 37#include <media/stagefright/foundation/AUtils.h> 38 39#include <media/stagefright/BufferProducerWrapper.h> 40#include <media/stagefright/MediaCodec.h> 41#include <media/stagefright/MediaCodecList.h> 42#include <media/stagefright/MediaDefs.h> 43#include <media/stagefright/OMXClient.h> 44#include <media/stagefright/PersistentSurface.h> 45#include <media/stagefright/SurfaceUtils.h> 46#include <media/hardware/HardwareAPI.h> 47 48#include <OMX_AudioExt.h> 49#include <OMX_VideoExt.h> 50#include <OMX_Component.h> 51#include <OMX_IndexExt.h> 52#include <OMX_AsString.h> 53 54#include "include/avc_utils.h" 55#include "include/DataConverter.h" 56#include "omx/OMXUtils.h" 57 58namespace android { 59 60enum { 61 kMaxIndicesToCheck = 32, // used when enumerating supported formats and profiles 62}; 63 64// OMX errors are directly mapped into status_t range if 65// there is no corresponding MediaError status code. 66// Use the statusFromOMXError(int32_t omxError) function. 67// 68// Currently this is a direct map. 69// See frameworks/native/include/media/openmax/OMX_Core.h 70// 71// Vendor OMX errors from 0x90000000 - 0x9000FFFF 72// Extension OMX errors from 0x8F000000 - 0x90000000 73// Standard OMX errors from 0x80001000 - 0x80001024 (0x80001024 current) 74// 75 76// returns true if err is a recognized OMX error code. 77// as OMX error is OMX_S32, this is an int32_t type 78static inline bool isOMXError(int32_t err) { 79 return (ERROR_CODEC_MIN <= err && err <= ERROR_CODEC_MAX); 80} 81 82// converts an OMX error to a status_t 83static inline status_t statusFromOMXError(int32_t omxError) { 84 switch (omxError) { 85 case OMX_ErrorInvalidComponentName: 86 case OMX_ErrorComponentNotFound: 87 return NAME_NOT_FOUND; // can trigger illegal argument error for provided names. 88 default: 89 return isOMXError(omxError) ? omxError : 0; // no translation required 90 } 91} 92 93// checks and converts status_t to a non-side-effect status_t 94static inline status_t makeNoSideEffectStatus(status_t err) { 95 switch (err) { 96 // the following errors have side effects and may come 97 // from other code modules. Remap for safety reasons. 98 case INVALID_OPERATION: 99 case DEAD_OBJECT: 100 return UNKNOWN_ERROR; 101 default: 102 return err; 103 } 104} 105 106struct MessageList : public RefBase { 107 MessageList() { 108 } 109 virtual ~MessageList() { 110 } 111 std::list<sp<AMessage> > &getList() { return mList; } 112private: 113 std::list<sp<AMessage> > mList; 114 115 DISALLOW_EVIL_CONSTRUCTORS(MessageList); 116}; 117 118static sp<DataConverter> getCopyConverter() { 119 static pthread_once_t once = PTHREAD_ONCE_INIT; // const-inited 120 static sp<DataConverter> sCopyConverter; // zero-inited 121 pthread_once(&once, [](){ sCopyConverter = new DataConverter(); }); 122 return sCopyConverter; 123} 124 125struct CodecObserver : public BnOMXObserver { 126 CodecObserver() {} 127 128 void setNotificationMessage(const sp<AMessage> &msg) { 129 mNotify = msg; 130 } 131 132 // from IOMXObserver 133 virtual void onMessages(const std::list<omx_message> &messages) { 134 if (messages.empty()) { 135 return; 136 } 137 138 sp<AMessage> notify = mNotify->dup(); 139 bool first = true; 140 sp<MessageList> msgList = new MessageList(); 141 for (std::list<omx_message>::const_iterator it = messages.cbegin(); 142 it != messages.cend(); ++it) { 143 const omx_message &omx_msg = *it; 144 if (first) { 145 notify->setInt32("node", omx_msg.node); 146 first = false; 147 } 148 149 sp<AMessage> msg = new AMessage; 150 msg->setInt32("type", omx_msg.type); 151 switch (omx_msg.type) { 152 case omx_message::EVENT: 153 { 154 msg->setInt32("event", omx_msg.u.event_data.event); 155 msg->setInt32("data1", omx_msg.u.event_data.data1); 156 msg->setInt32("data2", omx_msg.u.event_data.data2); 157 break; 158 } 159 160 case omx_message::EMPTY_BUFFER_DONE: 161 { 162 msg->setInt32("buffer", omx_msg.u.buffer_data.buffer); 163 msg->setInt32("fence_fd", omx_msg.fenceFd); 164 break; 165 } 166 167 case omx_message::FILL_BUFFER_DONE: 168 { 169 msg->setInt32( 170 "buffer", omx_msg.u.extended_buffer_data.buffer); 171 msg->setInt32( 172 "range_offset", 173 omx_msg.u.extended_buffer_data.range_offset); 174 msg->setInt32( 175 "range_length", 176 omx_msg.u.extended_buffer_data.range_length); 177 msg->setInt32( 178 "flags", 179 omx_msg.u.extended_buffer_data.flags); 180 msg->setInt64( 181 "timestamp", 182 omx_msg.u.extended_buffer_data.timestamp); 183 msg->setInt32( 184 "fence_fd", omx_msg.fenceFd); 185 break; 186 } 187 188 case omx_message::FRAME_RENDERED: 189 { 190 msg->setInt64( 191 "media_time_us", omx_msg.u.render_data.timestamp); 192 msg->setInt64( 193 "system_nano", omx_msg.u.render_data.nanoTime); 194 break; 195 } 196 197 default: 198 ALOGE("Unrecognized message type: %d", omx_msg.type); 199 break; 200 } 201 msgList->getList().push_back(msg); 202 } 203 notify->setObject("messages", msgList); 204 notify->post(); 205 } 206 207protected: 208 virtual ~CodecObserver() {} 209 210private: 211 sp<AMessage> mNotify; 212 213 DISALLOW_EVIL_CONSTRUCTORS(CodecObserver); 214}; 215 216//////////////////////////////////////////////////////////////////////////////// 217 218struct ACodec::BaseState : public AState { 219 BaseState(ACodec *codec, const sp<AState> &parentState = NULL); 220 221protected: 222 enum PortMode { 223 KEEP_BUFFERS, 224 RESUBMIT_BUFFERS, 225 FREE_BUFFERS, 226 }; 227 228 ACodec *mCodec; 229 230 virtual PortMode getPortMode(OMX_U32 portIndex); 231 232 virtual bool onMessageReceived(const sp<AMessage> &msg); 233 234 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 235 236 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 237 virtual void onInputBufferFilled(const sp<AMessage> &msg); 238 239 void postFillThisBuffer(BufferInfo *info); 240 241private: 242 // Handles an OMX message. Returns true iff message was handled. 243 bool onOMXMessage(const sp<AMessage> &msg); 244 245 // Handles a list of messages. Returns true iff messages were handled. 246 bool onOMXMessageList(const sp<AMessage> &msg); 247 248 // returns true iff this message is for this component and the component is alive 249 bool checkOMXMessage(const sp<AMessage> &msg); 250 251 bool onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd); 252 253 bool onOMXFillBufferDone( 254 IOMX::buffer_id bufferID, 255 size_t rangeOffset, size_t rangeLength, 256 OMX_U32 flags, 257 int64_t timeUs, 258 int fenceFd); 259 260 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 261 262 void getMoreInputDataIfPossible(); 263 264 DISALLOW_EVIL_CONSTRUCTORS(BaseState); 265}; 266 267//////////////////////////////////////////////////////////////////////////////// 268 269struct ACodec::DeathNotifier : public IBinder::DeathRecipient { 270 DeathNotifier(const sp<AMessage> ¬ify) 271 : mNotify(notify) { 272 } 273 274 virtual void binderDied(const wp<IBinder> &) { 275 mNotify->post(); 276 } 277 278protected: 279 virtual ~DeathNotifier() {} 280 281private: 282 sp<AMessage> mNotify; 283 284 DISALLOW_EVIL_CONSTRUCTORS(DeathNotifier); 285}; 286 287struct ACodec::UninitializedState : public ACodec::BaseState { 288 UninitializedState(ACodec *codec); 289 290protected: 291 virtual bool onMessageReceived(const sp<AMessage> &msg); 292 virtual void stateEntered(); 293 294private: 295 void onSetup(const sp<AMessage> &msg); 296 bool onAllocateComponent(const sp<AMessage> &msg); 297 298 sp<DeathNotifier> mDeathNotifier; 299 300 DISALLOW_EVIL_CONSTRUCTORS(UninitializedState); 301}; 302 303//////////////////////////////////////////////////////////////////////////////// 304 305struct ACodec::LoadedState : public ACodec::BaseState { 306 LoadedState(ACodec *codec); 307 308protected: 309 virtual bool onMessageReceived(const sp<AMessage> &msg); 310 virtual void stateEntered(); 311 312private: 313 friend struct ACodec::UninitializedState; 314 315 bool onConfigureComponent(const sp<AMessage> &msg); 316 void onCreateInputSurface(const sp<AMessage> &msg); 317 void onSetInputSurface(const sp<AMessage> &msg); 318 void onStart(); 319 void onShutdown(bool keepComponentAllocated); 320 321 status_t setupInputSurface(); 322 323 DISALLOW_EVIL_CONSTRUCTORS(LoadedState); 324}; 325 326//////////////////////////////////////////////////////////////////////////////// 327 328struct ACodec::LoadedToIdleState : public ACodec::BaseState { 329 LoadedToIdleState(ACodec *codec); 330 331protected: 332 virtual bool onMessageReceived(const sp<AMessage> &msg); 333 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 334 virtual void stateEntered(); 335 336private: 337 status_t allocateBuffers(); 338 339 DISALLOW_EVIL_CONSTRUCTORS(LoadedToIdleState); 340}; 341 342//////////////////////////////////////////////////////////////////////////////// 343 344struct ACodec::IdleToExecutingState : public ACodec::BaseState { 345 IdleToExecutingState(ACodec *codec); 346 347protected: 348 virtual bool onMessageReceived(const sp<AMessage> &msg); 349 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 350 virtual void stateEntered(); 351 352private: 353 DISALLOW_EVIL_CONSTRUCTORS(IdleToExecutingState); 354}; 355 356//////////////////////////////////////////////////////////////////////////////// 357 358struct ACodec::ExecutingState : public ACodec::BaseState { 359 ExecutingState(ACodec *codec); 360 361 void submitRegularOutputBuffers(); 362 void submitOutputMetaBuffers(); 363 void submitOutputBuffers(); 364 365 // Submit output buffers to the decoder, submit input buffers to client 366 // to fill with data. 367 void resume(); 368 369 // Returns true iff input and output buffers are in play. 370 bool active() const { return mActive; } 371 372protected: 373 virtual PortMode getPortMode(OMX_U32 portIndex); 374 virtual bool onMessageReceived(const sp<AMessage> &msg); 375 virtual void stateEntered(); 376 377 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 378 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 379 380private: 381 bool mActive; 382 383 DISALLOW_EVIL_CONSTRUCTORS(ExecutingState); 384}; 385 386//////////////////////////////////////////////////////////////////////////////// 387 388struct ACodec::OutputPortSettingsChangedState : public ACodec::BaseState { 389 OutputPortSettingsChangedState(ACodec *codec); 390 391protected: 392 virtual PortMode getPortMode(OMX_U32 portIndex); 393 virtual bool onMessageReceived(const sp<AMessage> &msg); 394 virtual void stateEntered(); 395 396 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 397 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 398 399private: 400 DISALLOW_EVIL_CONSTRUCTORS(OutputPortSettingsChangedState); 401}; 402 403//////////////////////////////////////////////////////////////////////////////// 404 405struct ACodec::ExecutingToIdleState : public ACodec::BaseState { 406 ExecutingToIdleState(ACodec *codec); 407 408protected: 409 virtual bool onMessageReceived(const sp<AMessage> &msg); 410 virtual void stateEntered(); 411 412 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 413 414 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 415 virtual void onInputBufferFilled(const sp<AMessage> &msg); 416 417private: 418 void changeStateIfWeOwnAllBuffers(); 419 420 bool mComponentNowIdle; 421 422 DISALLOW_EVIL_CONSTRUCTORS(ExecutingToIdleState); 423}; 424 425//////////////////////////////////////////////////////////////////////////////// 426 427struct ACodec::IdleToLoadedState : public ACodec::BaseState { 428 IdleToLoadedState(ACodec *codec); 429 430protected: 431 virtual bool onMessageReceived(const sp<AMessage> &msg); 432 virtual void stateEntered(); 433 434 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 435 436private: 437 DISALLOW_EVIL_CONSTRUCTORS(IdleToLoadedState); 438}; 439 440//////////////////////////////////////////////////////////////////////////////// 441 442struct ACodec::FlushingState : public ACodec::BaseState { 443 FlushingState(ACodec *codec); 444 445protected: 446 virtual bool onMessageReceived(const sp<AMessage> &msg); 447 virtual void stateEntered(); 448 449 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 450 451 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 452 virtual void onInputBufferFilled(const sp<AMessage> &msg); 453 454private: 455 bool mFlushComplete[2]; 456 457 void changeStateIfWeOwnAllBuffers(); 458 459 DISALLOW_EVIL_CONSTRUCTORS(FlushingState); 460}; 461 462//////////////////////////////////////////////////////////////////////////////// 463 464void ACodec::BufferInfo::setWriteFence(int fenceFd, const char *dbg) { 465 if (mFenceFd >= 0) { 466 ALOGW("OVERWRITE OF %s fence %d by write fence %d in %s", 467 mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg); 468 } 469 mFenceFd = fenceFd; 470 mIsReadFence = false; 471} 472 473void ACodec::BufferInfo::setReadFence(int fenceFd, const char *dbg) { 474 if (mFenceFd >= 0) { 475 ALOGW("OVERWRITE OF %s fence %d by read fence %d in %s", 476 mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg); 477 } 478 mFenceFd = fenceFd; 479 mIsReadFence = true; 480} 481 482void ACodec::BufferInfo::checkWriteFence(const char *dbg) { 483 if (mFenceFd >= 0 && mIsReadFence) { 484 ALOGD("REUSING read fence %d as write fence in %s", mFenceFd, dbg); 485 } 486} 487 488void ACodec::BufferInfo::checkReadFence(const char *dbg) { 489 if (mFenceFd >= 0 && !mIsReadFence) { 490 ALOGD("REUSING write fence %d as read fence in %s", mFenceFd, dbg); 491 } 492} 493 494//////////////////////////////////////////////////////////////////////////////// 495 496ACodec::ACodec() 497 : mQuirks(0), 498 mNode(0), 499 mUsingNativeWindow(false), 500 mNativeWindowUsageBits(0), 501 mLastNativeWindowDataSpace(HAL_DATASPACE_UNKNOWN), 502 mIsVideo(false), 503 mIsEncoder(false), 504 mFatalError(false), 505 mShutdownInProgress(false), 506 mExplicitShutdown(false), 507 mIsLegacyVP9Decoder(false), 508 mEncoderDelay(0), 509 mEncoderPadding(0), 510 mRotationDegrees(0), 511 mChannelMaskPresent(false), 512 mChannelMask(0), 513 mDequeueCounter(0), 514 mInputMetadataType(kMetadataBufferTypeInvalid), 515 mOutputMetadataType(kMetadataBufferTypeInvalid), 516 mLegacyAdaptiveExperiment(false), 517 mMetadataBuffersToSubmit(0), 518 mNumUndequeuedBuffers(0), 519 mRepeatFrameDelayUs(-1ll), 520 mMaxPtsGapUs(-1ll), 521 mMaxFps(-1), 522 mTimePerFrameUs(-1ll), 523 mTimePerCaptureUs(-1ll), 524 mCreateInputBuffersSuspended(false), 525 mTunneled(false), 526 mDescribeColorAspectsIndex((OMX_INDEXTYPE)0), 527 mDescribeHDRStaticInfoIndex((OMX_INDEXTYPE)0) { 528 mUninitializedState = new UninitializedState(this); 529 mLoadedState = new LoadedState(this); 530 mLoadedToIdleState = new LoadedToIdleState(this); 531 mIdleToExecutingState = new IdleToExecutingState(this); 532 mExecutingState = new ExecutingState(this); 533 534 mOutputPortSettingsChangedState = 535 new OutputPortSettingsChangedState(this); 536 537 mExecutingToIdleState = new ExecutingToIdleState(this); 538 mIdleToLoadedState = new IdleToLoadedState(this); 539 mFlushingState = new FlushingState(this); 540 541 mPortEOS[kPortIndexInput] = mPortEOS[kPortIndexOutput] = false; 542 mInputEOSResult = OK; 543 544 memset(&mLastNativeWindowCrop, 0, sizeof(mLastNativeWindowCrop)); 545 546 changeState(mUninitializedState); 547} 548 549ACodec::~ACodec() { 550} 551 552void ACodec::setNotificationMessage(const sp<AMessage> &msg) { 553 mNotify = msg; 554} 555 556void ACodec::initiateSetup(const sp<AMessage> &msg) { 557 msg->setWhat(kWhatSetup); 558 msg->setTarget(this); 559 msg->post(); 560} 561 562void ACodec::signalSetParameters(const sp<AMessage> ¶ms) { 563 sp<AMessage> msg = new AMessage(kWhatSetParameters, this); 564 msg->setMessage("params", params); 565 msg->post(); 566} 567 568void ACodec::initiateAllocateComponent(const sp<AMessage> &msg) { 569 msg->setWhat(kWhatAllocateComponent); 570 msg->setTarget(this); 571 msg->post(); 572} 573 574void ACodec::initiateConfigureComponent(const sp<AMessage> &msg) { 575 msg->setWhat(kWhatConfigureComponent); 576 msg->setTarget(this); 577 msg->post(); 578} 579 580status_t ACodec::setSurface(const sp<Surface> &surface) { 581 sp<AMessage> msg = new AMessage(kWhatSetSurface, this); 582 msg->setObject("surface", surface); 583 584 sp<AMessage> response; 585 status_t err = msg->postAndAwaitResponse(&response); 586 587 if (err == OK) { 588 (void)response->findInt32("err", &err); 589 } 590 return err; 591} 592 593void ACodec::initiateCreateInputSurface() { 594 (new AMessage(kWhatCreateInputSurface, this))->post(); 595} 596 597void ACodec::initiateSetInputSurface( 598 const sp<PersistentSurface> &surface) { 599 sp<AMessage> msg = new AMessage(kWhatSetInputSurface, this); 600 msg->setObject("input-surface", surface); 601 msg->post(); 602} 603 604void ACodec::signalEndOfInputStream() { 605 (new AMessage(kWhatSignalEndOfInputStream, this))->post(); 606} 607 608void ACodec::initiateStart() { 609 (new AMessage(kWhatStart, this))->post(); 610} 611 612void ACodec::signalFlush() { 613 ALOGV("[%s] signalFlush", mComponentName.c_str()); 614 (new AMessage(kWhatFlush, this))->post(); 615} 616 617void ACodec::signalResume() { 618 (new AMessage(kWhatResume, this))->post(); 619} 620 621void ACodec::initiateShutdown(bool keepComponentAllocated) { 622 sp<AMessage> msg = new AMessage(kWhatShutdown, this); 623 msg->setInt32("keepComponentAllocated", keepComponentAllocated); 624 msg->post(); 625 if (!keepComponentAllocated) { 626 // ensure shutdown completes in 3 seconds 627 (new AMessage(kWhatReleaseCodecInstance, this))->post(3000000); 628 } 629} 630 631void ACodec::signalRequestIDRFrame() { 632 (new AMessage(kWhatRequestIDRFrame, this))->post(); 633} 634 635// *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 636// Some codecs may return input buffers before having them processed. 637// This causes a halt if we already signaled an EOS on the input 638// port. For now keep submitting an output buffer if there was an 639// EOS on the input port, but not yet on the output port. 640void ACodec::signalSubmitOutputMetadataBufferIfEOS_workaround() { 641 if (mPortEOS[kPortIndexInput] && !mPortEOS[kPortIndexOutput] && 642 mMetadataBuffersToSubmit > 0) { 643 (new AMessage(kWhatSubmitOutputMetadataBufferIfEOS, this))->post(); 644 } 645} 646 647status_t ACodec::handleSetSurface(const sp<Surface> &surface) { 648 // allow keeping unset surface 649 if (surface == NULL) { 650 if (mNativeWindow != NULL) { 651 ALOGW("cannot unset a surface"); 652 return INVALID_OPERATION; 653 } 654 return OK; 655 } 656 657 // cannot switch from bytebuffers to surface 658 if (mNativeWindow == NULL) { 659 ALOGW("component was not configured with a surface"); 660 return INVALID_OPERATION; 661 } 662 663 ANativeWindow *nativeWindow = surface.get(); 664 // if we have not yet started the codec, we can simply set the native window 665 if (mBuffers[kPortIndexInput].size() == 0) { 666 mNativeWindow = surface; 667 return OK; 668 } 669 670 // we do not support changing a tunneled surface after start 671 if (mTunneled) { 672 ALOGW("cannot change tunneled surface"); 673 return INVALID_OPERATION; 674 } 675 676 int usageBits = 0; 677 status_t err = setupNativeWindowSizeFormatAndUsage(nativeWindow, &usageBits); 678 if (err != OK) { 679 return err; 680 } 681 682 int ignoredFlags = kVideoGrallocUsage; 683 // New output surface is not allowed to add new usage flag except ignored ones. 684 if ((usageBits & ~(mNativeWindowUsageBits | ignoredFlags)) != 0) { 685 ALOGW("cannot change usage from %#x to %#x", mNativeWindowUsageBits, usageBits); 686 return BAD_VALUE; 687 } 688 689 // get min undequeued count. We cannot switch to a surface that has a higher 690 // undequeued count than we allocated. 691 int minUndequeuedBuffers = 0; 692 err = nativeWindow->query( 693 nativeWindow, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 694 &minUndequeuedBuffers); 695 if (err != 0) { 696 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 697 strerror(-err), -err); 698 return err; 699 } 700 if (minUndequeuedBuffers > (int)mNumUndequeuedBuffers) { 701 ALOGE("new surface holds onto more buffers (%d) than planned for (%zu)", 702 minUndequeuedBuffers, mNumUndequeuedBuffers); 703 return BAD_VALUE; 704 } 705 706 // we cannot change the number of output buffers while OMX is running 707 // set up surface to the same count 708 Vector<BufferInfo> &buffers = mBuffers[kPortIndexOutput]; 709 ALOGV("setting up surface for %zu buffers", buffers.size()); 710 711 err = native_window_set_buffer_count(nativeWindow, buffers.size()); 712 if (err != 0) { 713 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 714 -err); 715 return err; 716 } 717 718 // need to enable allocation when attaching 719 surface->getIGraphicBufferProducer()->allowAllocation(true); 720 721 // for meta data mode, we move dequeud buffers to the new surface. 722 // for non-meta mode, we must move all registered buffers 723 for (size_t i = 0; i < buffers.size(); ++i) { 724 const BufferInfo &info = buffers[i]; 725 // skip undequeued buffers for meta data mode 726 if (storingMetadataInDecodedBuffers() 727 && !mLegacyAdaptiveExperiment 728 && info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 729 ALOGV("skipping buffer"); 730 continue; 731 } 732 ALOGV("attaching buffer %p", info.mGraphicBuffer->getNativeBuffer()); 733 734 err = surface->attachBuffer(info.mGraphicBuffer->getNativeBuffer()); 735 if (err != OK) { 736 ALOGE("failed to attach buffer %p to the new surface: %s (%d)", 737 info.mGraphicBuffer->getNativeBuffer(), 738 strerror(-err), -err); 739 return err; 740 } 741 } 742 743 // cancel undequeued buffers to new surface 744 if (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment) { 745 for (size_t i = 0; i < buffers.size(); ++i) { 746 BufferInfo &info = buffers.editItemAt(i); 747 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 748 ALOGV("canceling buffer %p", info.mGraphicBuffer->getNativeBuffer()); 749 err = nativeWindow->cancelBuffer( 750 nativeWindow, info.mGraphicBuffer->getNativeBuffer(), info.mFenceFd); 751 info.mFenceFd = -1; 752 if (err != OK) { 753 ALOGE("failed to cancel buffer %p to the new surface: %s (%d)", 754 info.mGraphicBuffer->getNativeBuffer(), 755 strerror(-err), -err); 756 return err; 757 } 758 } 759 } 760 // disallow further allocation 761 (void)surface->getIGraphicBufferProducer()->allowAllocation(false); 762 } 763 764 // push blank buffers to previous window if requested 765 if (mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) { 766 pushBlankBuffersToNativeWindow(mNativeWindow.get()); 767 } 768 769 mNativeWindow = nativeWindow; 770 mNativeWindowUsageBits = usageBits; 771 return OK; 772} 773 774status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) { 775 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 776 777 CHECK(mDealer[portIndex] == NULL); 778 CHECK(mBuffers[portIndex].isEmpty()); 779 780 status_t err; 781 if (mNativeWindow != NULL && portIndex == kPortIndexOutput) { 782 if (storingMetadataInDecodedBuffers()) { 783 err = allocateOutputMetadataBuffers(); 784 } else { 785 err = allocateOutputBuffersFromNativeWindow(); 786 } 787 } else { 788 OMX_PARAM_PORTDEFINITIONTYPE def; 789 InitOMXParams(&def); 790 def.nPortIndex = portIndex; 791 792 err = mOMX->getParameter( 793 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 794 795 if (err == OK) { 796 MetadataBufferType type = 797 portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; 798 size_t bufSize = def.nBufferSize; 799 if (type == kMetadataBufferTypeANWBuffer) { 800 bufSize = sizeof(VideoNativeMetadata); 801 } else if (type == kMetadataBufferTypeNativeHandleSource) { 802 bufSize = sizeof(VideoNativeHandleMetadata); 803 } 804 805 // If using gralloc or native source input metadata buffers, allocate largest 806 // metadata size as we prefer to generate native source metadata, but component 807 // may require gralloc source. For camera source, allocate at least enough 808 // size for native metadata buffers. 809 size_t allottedSize = bufSize; 810 if (portIndex == kPortIndexInput && type == kMetadataBufferTypeANWBuffer) { 811 bufSize = max(sizeof(VideoGrallocMetadata), sizeof(VideoNativeMetadata)); 812 } else if (portIndex == kPortIndexInput && type == kMetadataBufferTypeCameraSource) { 813 bufSize = max(bufSize, sizeof(VideoNativeMetadata)); 814 } 815 816 size_t conversionBufferSize = 0; 817 818 sp<DataConverter> converter = mConverter[portIndex]; 819 if (converter != NULL) { 820 // here we assume sane conversions of max 4:1, so result fits in int32 821 if (portIndex == kPortIndexInput) { 822 conversionBufferSize = converter->sourceSize(bufSize); 823 } else { 824 conversionBufferSize = converter->targetSize(bufSize); 825 } 826 } 827 828 size_t alignment = MemoryDealer::getAllocationAlignment(); 829 830 ALOGV("[%s] Allocating %u buffers of size %zu/%zu (from %u using %s) on %s port", 831 mComponentName.c_str(), 832 def.nBufferCountActual, bufSize, allottedSize, def.nBufferSize, asString(type), 833 portIndex == kPortIndexInput ? "input" : "output"); 834 835 // verify buffer sizes to avoid overflow in align() 836 if (bufSize == 0 || max(bufSize, conversionBufferSize) > kMaxCodecBufferSize) { 837 ALOGE("b/22885421"); 838 return NO_MEMORY; 839 } 840 841 // don't modify bufSize as OMX may not expect it to increase after negotiation 842 size_t alignedSize = align(bufSize, alignment); 843 size_t alignedConvSize = align(conversionBufferSize, alignment); 844 if (def.nBufferCountActual > SIZE_MAX / (alignedSize + alignedConvSize)) { 845 ALOGE("b/22885421"); 846 return NO_MEMORY; 847 } 848 849 size_t totalSize = def.nBufferCountActual * (alignedSize + alignedConvSize); 850 mDealer[portIndex] = new MemoryDealer(totalSize, "ACodec"); 851 852 for (OMX_U32 i = 0; i < def.nBufferCountActual && err == OK; ++i) { 853 sp<IMemory> mem = mDealer[portIndex]->allocate(bufSize); 854 if (mem == NULL || mem->pointer() == NULL) { 855 return NO_MEMORY; 856 } 857 858 BufferInfo info; 859 info.mStatus = BufferInfo::OWNED_BY_US; 860 info.mFenceFd = -1; 861 info.mRenderInfo = NULL; 862 info.mNativeHandle = NULL; 863 864 uint32_t requiresAllocateBufferBit = 865 (portIndex == kPortIndexInput) 866 ? kRequiresAllocateBufferOnInputPorts 867 : kRequiresAllocateBufferOnOutputPorts; 868 869 if (portIndex == kPortIndexInput && (mFlags & kFlagIsSecure)) { 870 mem.clear(); 871 872 void *ptr = NULL; 873 sp<NativeHandle> native_handle; 874 err = mOMX->allocateSecureBuffer( 875 mNode, portIndex, bufSize, &info.mBufferID, 876 &ptr, &native_handle); 877 878 // TRICKY: this representation is unorthodox, but ACodec requires 879 // an ABuffer with a proper size to validate range offsets and lengths. 880 // Since mData is never referenced for secure input, it is used to store 881 // either the pointer to the secure buffer, or the opaque handle as on 882 // some devices ptr is actually an opaque handle, not a pointer. 883 884 // TRICKY2: use native handle as the base of the ABuffer if received one, 885 // because Widevine source only receives these base addresses. 886 const native_handle_t *native_handle_ptr = 887 native_handle == NULL ? NULL : native_handle->handle(); 888 info.mData = new ABuffer( 889 ptr != NULL ? ptr : (void *)native_handle_ptr, bufSize); 890 info.mNativeHandle = native_handle; 891 info.mCodecData = info.mData; 892 } else if (mQuirks & requiresAllocateBufferBit) { 893 err = mOMX->allocateBufferWithBackup( 894 mNode, portIndex, mem, &info.mBufferID, allottedSize); 895 } else { 896 err = mOMX->useBuffer(mNode, portIndex, mem, &info.mBufferID, allottedSize); 897 } 898 899 if (mem != NULL) { 900 info.mCodecData = new ABuffer(mem->pointer(), bufSize); 901 info.mCodecRef = mem; 902 903 if (type == kMetadataBufferTypeANWBuffer) { 904 ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; 905 } 906 907 // if we require conversion, allocate conversion buffer for client use; 908 // otherwise, reuse codec buffer 909 if (mConverter[portIndex] != NULL) { 910 CHECK_GT(conversionBufferSize, (size_t)0); 911 mem = mDealer[portIndex]->allocate(conversionBufferSize); 912 if (mem == NULL|| mem->pointer() == NULL) { 913 return NO_MEMORY; 914 } 915 info.mData = new ABuffer(mem->pointer(), conversionBufferSize); 916 info.mMemRef = mem; 917 } else { 918 info.mData = info.mCodecData; 919 info.mMemRef = info.mCodecRef; 920 } 921 } 922 923 mBuffers[portIndex].push(info); 924 } 925 } 926 } 927 928 if (err != OK) { 929 return err; 930 } 931 932 sp<AMessage> notify = mNotify->dup(); 933 notify->setInt32("what", CodecBase::kWhatBuffersAllocated); 934 935 notify->setInt32("portIndex", portIndex); 936 937 sp<PortDescription> desc = new PortDescription; 938 939 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 940 const BufferInfo &info = mBuffers[portIndex][i]; 941 desc->addBuffer(info.mBufferID, info.mData, info.mNativeHandle, info.mMemRef); 942 } 943 944 notify->setObject("portDesc", desc); 945 notify->post(); 946 947 return OK; 948} 949 950status_t ACodec::setupNativeWindowSizeFormatAndUsage( 951 ANativeWindow *nativeWindow /* nonnull */, int *finalUsage /* nonnull */) { 952 OMX_PARAM_PORTDEFINITIONTYPE def; 953 InitOMXParams(&def); 954 def.nPortIndex = kPortIndexOutput; 955 956 status_t err = mOMX->getParameter( 957 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 958 959 if (err != OK) { 960 return err; 961 } 962 963 OMX_U32 usage = 0; 964 err = mOMX->getGraphicBufferUsage(mNode, kPortIndexOutput, &usage); 965 if (err != 0) { 966 ALOGW("querying usage flags from OMX IL component failed: %d", err); 967 // XXX: Currently this error is logged, but not fatal. 968 usage = 0; 969 } 970 int omxUsage = usage; 971 972 if (mFlags & kFlagIsGrallocUsageProtected) { 973 usage |= GRALLOC_USAGE_PROTECTED; 974 } 975 976 usage |= kVideoGrallocUsage; 977 *finalUsage = usage; 978 979 memset(&mLastNativeWindowCrop, 0, sizeof(mLastNativeWindowCrop)); 980 mLastNativeWindowDataSpace = HAL_DATASPACE_UNKNOWN; 981 982 ALOGV("gralloc usage: %#x(OMX) => %#x(ACodec)", omxUsage, usage); 983 return setNativeWindowSizeFormatAndUsage( 984 nativeWindow, 985 def.format.video.nFrameWidth, 986 def.format.video.nFrameHeight, 987 def.format.video.eColorFormat, 988 mRotationDegrees, 989 usage); 990} 991 992status_t ACodec::configureOutputBuffersFromNativeWindow( 993 OMX_U32 *bufferCount, OMX_U32 *bufferSize, 994 OMX_U32 *minUndequeuedBuffers) { 995 OMX_PARAM_PORTDEFINITIONTYPE def; 996 InitOMXParams(&def); 997 def.nPortIndex = kPortIndexOutput; 998 999 status_t err = mOMX->getParameter( 1000 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1001 1002 if (err == OK) { 1003 err = setupNativeWindowSizeFormatAndUsage(mNativeWindow.get(), &mNativeWindowUsageBits); 1004 } 1005 if (err != OK) { 1006 mNativeWindowUsageBits = 0; 1007 return err; 1008 } 1009 1010 // Exits here for tunneled video playback codecs -- i.e. skips native window 1011 // buffer allocation step as this is managed by the tunneled OMX omponent 1012 // itself and explicitly sets def.nBufferCountActual to 0. 1013 if (mTunneled) { 1014 ALOGV("Tunneled Playback: skipping native window buffer allocation."); 1015 def.nBufferCountActual = 0; 1016 err = mOMX->setParameter( 1017 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1018 1019 *minUndequeuedBuffers = 0; 1020 *bufferCount = 0; 1021 *bufferSize = 0; 1022 return err; 1023 } 1024 1025 *minUndequeuedBuffers = 0; 1026 err = mNativeWindow->query( 1027 mNativeWindow.get(), NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 1028 (int *)minUndequeuedBuffers); 1029 1030 if (err != 0) { 1031 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 1032 strerror(-err), -err); 1033 return err; 1034 } 1035 1036 // FIXME: assume that surface is controlled by app (native window 1037 // returns the number for the case when surface is not controlled by app) 1038 // FIXME2: This means that minUndeqeueudBufs can be 1 larger than reported 1039 // For now, try to allocate 1 more buffer, but don't fail if unsuccessful 1040 1041 // Use conservative allocation while also trying to reduce starvation 1042 // 1043 // 1. allocate at least nBufferCountMin + minUndequeuedBuffers - that is the 1044 // minimum needed for the consumer to be able to work 1045 // 2. try to allocate two (2) additional buffers to reduce starvation from 1046 // the consumer 1047 // plus an extra buffer to account for incorrect minUndequeuedBufs 1048 for (OMX_U32 extraBuffers = 2 + 1; /* condition inside loop */; extraBuffers--) { 1049 OMX_U32 newBufferCount = 1050 def.nBufferCountMin + *minUndequeuedBuffers + extraBuffers; 1051 def.nBufferCountActual = newBufferCount; 1052 err = mOMX->setParameter( 1053 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1054 1055 if (err == OK) { 1056 *minUndequeuedBuffers += extraBuffers; 1057 break; 1058 } 1059 1060 ALOGW("[%s] setting nBufferCountActual to %u failed: %d", 1061 mComponentName.c_str(), newBufferCount, err); 1062 /* exit condition */ 1063 if (extraBuffers == 0) { 1064 return err; 1065 } 1066 } 1067 1068 err = native_window_set_buffer_count( 1069 mNativeWindow.get(), def.nBufferCountActual); 1070 1071 if (err != 0) { 1072 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 1073 -err); 1074 return err; 1075 } 1076 1077 *bufferCount = def.nBufferCountActual; 1078 *bufferSize = def.nBufferSize; 1079 return err; 1080} 1081 1082status_t ACodec::allocateOutputBuffersFromNativeWindow() { 1083 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 1084 status_t err = configureOutputBuffersFromNativeWindow( 1085 &bufferCount, &bufferSize, &minUndequeuedBuffers); 1086 if (err != 0) 1087 return err; 1088 mNumUndequeuedBuffers = minUndequeuedBuffers; 1089 1090 if (!storingMetadataInDecodedBuffers()) { 1091 static_cast<Surface*>(mNativeWindow.get()) 1092 ->getIGraphicBufferProducer()->allowAllocation(true); 1093 } 1094 1095 ALOGV("[%s] Allocating %u buffers from a native window of size %u on " 1096 "output port", 1097 mComponentName.c_str(), bufferCount, bufferSize); 1098 1099 // Dequeue buffers and send them to OMX 1100 for (OMX_U32 i = 0; i < bufferCount; i++) { 1101 ANativeWindowBuffer *buf; 1102 int fenceFd; 1103 err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1104 if (err != 0) { 1105 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); 1106 break; 1107 } 1108 1109 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); 1110 BufferInfo info; 1111 info.mStatus = BufferInfo::OWNED_BY_US; 1112 info.mFenceFd = fenceFd; 1113 info.mIsReadFence = false; 1114 info.mRenderInfo = NULL; 1115 info.mData = new ABuffer(NULL /* data */, bufferSize /* capacity */); 1116 info.mCodecData = info.mData; 1117 info.mGraphicBuffer = graphicBuffer; 1118 mBuffers[kPortIndexOutput].push(info); 1119 1120 IOMX::buffer_id bufferId; 1121 err = mOMX->useGraphicBuffer(mNode, kPortIndexOutput, graphicBuffer, 1122 &bufferId); 1123 if (err != 0) { 1124 ALOGE("registering GraphicBuffer %u with OMX IL component failed: " 1125 "%d", i, err); 1126 break; 1127 } 1128 1129 mBuffers[kPortIndexOutput].editItemAt(i).mBufferID = bufferId; 1130 1131 ALOGV("[%s] Registered graphic buffer with ID %u (pointer = %p)", 1132 mComponentName.c_str(), 1133 bufferId, graphicBuffer.get()); 1134 } 1135 1136 OMX_U32 cancelStart; 1137 OMX_U32 cancelEnd; 1138 1139 if (err != 0) { 1140 // If an error occurred while dequeuing we need to cancel any buffers 1141 // that were dequeued. 1142 cancelStart = 0; 1143 cancelEnd = mBuffers[kPortIndexOutput].size(); 1144 } else { 1145 // Return the required minimum undequeued buffers to the native window. 1146 cancelStart = bufferCount - minUndequeuedBuffers; 1147 cancelEnd = bufferCount; 1148 } 1149 1150 for (OMX_U32 i = cancelStart; i < cancelEnd; i++) { 1151 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1152 if (info->mStatus == BufferInfo::OWNED_BY_US) { 1153 status_t error = cancelBufferToNativeWindow(info); 1154 if (err == 0) { 1155 err = error; 1156 } 1157 } 1158 } 1159 1160 if (!storingMetadataInDecodedBuffers()) { 1161 static_cast<Surface*>(mNativeWindow.get()) 1162 ->getIGraphicBufferProducer()->allowAllocation(false); 1163 } 1164 1165 return err; 1166} 1167 1168status_t ACodec::allocateOutputMetadataBuffers() { 1169 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 1170 status_t err = configureOutputBuffersFromNativeWindow( 1171 &bufferCount, &bufferSize, &minUndequeuedBuffers); 1172 if (err != 0) 1173 return err; 1174 mNumUndequeuedBuffers = minUndequeuedBuffers; 1175 1176 ALOGV("[%s] Allocating %u meta buffers on output port", 1177 mComponentName.c_str(), bufferCount); 1178 1179 size_t bufSize = mOutputMetadataType == kMetadataBufferTypeANWBuffer ? 1180 sizeof(struct VideoNativeMetadata) : sizeof(struct VideoGrallocMetadata); 1181 size_t totalSize = bufferCount * align(bufSize, MemoryDealer::getAllocationAlignment()); 1182 mDealer[kPortIndexOutput] = new MemoryDealer(totalSize, "ACodec"); 1183 1184 // Dequeue buffers and send them to OMX 1185 for (OMX_U32 i = 0; i < bufferCount; i++) { 1186 BufferInfo info; 1187 info.mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 1188 info.mFenceFd = -1; 1189 info.mRenderInfo = NULL; 1190 info.mGraphicBuffer = NULL; 1191 info.mDequeuedAt = mDequeueCounter; 1192 1193 sp<IMemory> mem = mDealer[kPortIndexOutput]->allocate(bufSize); 1194 if (mem == NULL || mem->pointer() == NULL) { 1195 return NO_MEMORY; 1196 } 1197 if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) { 1198 ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; 1199 } 1200 info.mData = new ABuffer(mem->pointer(), mem->size()); 1201 info.mMemRef = mem; 1202 info.mCodecData = info.mData; 1203 info.mCodecRef = mem; 1204 1205 // we use useBuffer for metadata regardless of quirks 1206 err = mOMX->useBuffer( 1207 mNode, kPortIndexOutput, mem, &info.mBufferID, mem->size()); 1208 mBuffers[kPortIndexOutput].push(info); 1209 1210 ALOGV("[%s] allocated meta buffer with ID %u (pointer = %p)", 1211 mComponentName.c_str(), info.mBufferID, mem->pointer()); 1212 } 1213 1214 if (mLegacyAdaptiveExperiment) { 1215 // preallocate and preregister buffers 1216 static_cast<Surface *>(mNativeWindow.get()) 1217 ->getIGraphicBufferProducer()->allowAllocation(true); 1218 1219 ALOGV("[%s] Allocating %u buffers from a native window of size %u on " 1220 "output port", 1221 mComponentName.c_str(), bufferCount, bufferSize); 1222 1223 // Dequeue buffers then cancel them all 1224 for (OMX_U32 i = 0; i < bufferCount; i++) { 1225 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1226 1227 ANativeWindowBuffer *buf; 1228 int fenceFd; 1229 err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1230 if (err != 0) { 1231 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); 1232 break; 1233 } 1234 1235 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); 1236 mOMX->updateGraphicBufferInMeta( 1237 mNode, kPortIndexOutput, graphicBuffer, info->mBufferID); 1238 info->mStatus = BufferInfo::OWNED_BY_US; 1239 info->setWriteFence(fenceFd, "allocateOutputMetadataBuffers for legacy"); 1240 info->mGraphicBuffer = graphicBuffer; 1241 } 1242 1243 for (OMX_U32 i = 0; i < mBuffers[kPortIndexOutput].size(); i++) { 1244 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1245 if (info->mStatus == BufferInfo::OWNED_BY_US) { 1246 status_t error = cancelBufferToNativeWindow(info); 1247 if (err == OK) { 1248 err = error; 1249 } 1250 } 1251 } 1252 1253 static_cast<Surface*>(mNativeWindow.get()) 1254 ->getIGraphicBufferProducer()->allowAllocation(false); 1255 } 1256 1257 mMetadataBuffersToSubmit = bufferCount - minUndequeuedBuffers; 1258 return err; 1259} 1260 1261status_t ACodec::submitOutputMetadataBuffer() { 1262 CHECK(storingMetadataInDecodedBuffers()); 1263 if (mMetadataBuffersToSubmit == 0) 1264 return OK; 1265 1266 BufferInfo *info = dequeueBufferFromNativeWindow(); 1267 if (info == NULL) { 1268 return ERROR_IO; 1269 } 1270 1271 ALOGV("[%s] submitting output meta buffer ID %u for graphic buffer %p", 1272 mComponentName.c_str(), info->mBufferID, info->mGraphicBuffer.get()); 1273 1274 --mMetadataBuffersToSubmit; 1275 info->checkWriteFence("submitOutputMetadataBuffer"); 1276 status_t err = mOMX->fillBuffer(mNode, info->mBufferID, info->mFenceFd); 1277 info->mFenceFd = -1; 1278 if (err == OK) { 1279 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 1280 } 1281 1282 return err; 1283} 1284 1285status_t ACodec::waitForFence(int fd, const char *dbg ) { 1286 status_t res = OK; 1287 if (fd >= 0) { 1288 sp<Fence> fence = new Fence(fd); 1289 res = fence->wait(IOMX::kFenceTimeoutMs); 1290 ALOGW_IF(res != OK, "FENCE TIMEOUT for %d in %s", fd, dbg); 1291 } 1292 return res; 1293} 1294 1295// static 1296const char *ACodec::_asString(BufferInfo::Status s) { 1297 switch (s) { 1298 case BufferInfo::OWNED_BY_US: return "OUR"; 1299 case BufferInfo::OWNED_BY_COMPONENT: return "COMPONENT"; 1300 case BufferInfo::OWNED_BY_UPSTREAM: return "UPSTREAM"; 1301 case BufferInfo::OWNED_BY_DOWNSTREAM: return "DOWNSTREAM"; 1302 case BufferInfo::OWNED_BY_NATIVE_WINDOW: return "SURFACE"; 1303 case BufferInfo::UNRECOGNIZED: return "UNRECOGNIZED"; 1304 default: return "?"; 1305 } 1306} 1307 1308void ACodec::dumpBuffers(OMX_U32 portIndex) { 1309 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 1310 ALOGI("[%s] %s port has %zu buffers:", mComponentName.c_str(), 1311 portIndex == kPortIndexInput ? "input" : "output", mBuffers[portIndex].size()); 1312 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1313 const BufferInfo &info = mBuffers[portIndex][i]; 1314 ALOGI(" slot %2zu: #%8u %p/%p %s(%d) dequeued:%u", 1315 i, info.mBufferID, info.mGraphicBuffer.get(), 1316 info.mGraphicBuffer == NULL ? NULL : info.mGraphicBuffer->getNativeBuffer(), 1317 _asString(info.mStatus), info.mStatus, info.mDequeuedAt); 1318 } 1319} 1320 1321status_t ACodec::cancelBufferToNativeWindow(BufferInfo *info) { 1322 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 1323 1324 ALOGV("[%s] Calling cancelBuffer on buffer %u", 1325 mComponentName.c_str(), info->mBufferID); 1326 1327 info->checkWriteFence("cancelBufferToNativeWindow"); 1328 int err = mNativeWindow->cancelBuffer( 1329 mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd); 1330 info->mFenceFd = -1; 1331 1332 ALOGW_IF(err != 0, "[%s] can not return buffer %u to native window", 1333 mComponentName.c_str(), info->mBufferID); 1334 // change ownership even if cancelBuffer fails 1335 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 1336 1337 return err; 1338} 1339 1340void ACodec::updateRenderInfoForDequeuedBuffer( 1341 ANativeWindowBuffer *buf, int fenceFd, BufferInfo *info) { 1342 1343 info->mRenderInfo = 1344 mRenderTracker.updateInfoForDequeuedBuffer( 1345 buf, fenceFd, info - &mBuffers[kPortIndexOutput][0]); 1346 1347 // check for any fences already signaled 1348 notifyOfRenderedFrames(false /* dropIncomplete */, info->mRenderInfo); 1349} 1350 1351void ACodec::onFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { 1352 if (mRenderTracker.onFrameRendered(mediaTimeUs, systemNano) != OK) { 1353 mRenderTracker.dumpRenderQueue(); 1354 } 1355} 1356 1357void ACodec::notifyOfRenderedFrames(bool dropIncomplete, FrameRenderTracker::Info *until) { 1358 sp<AMessage> msg = mNotify->dup(); 1359 msg->setInt32("what", CodecBase::kWhatOutputFramesRendered); 1360 std::list<FrameRenderTracker::Info> done = 1361 mRenderTracker.checkFencesAndGetRenderedFrames(until, dropIncomplete); 1362 1363 // unlink untracked frames 1364 for (std::list<FrameRenderTracker::Info>::const_iterator it = done.cbegin(); 1365 it != done.cend(); ++it) { 1366 ssize_t index = it->getIndex(); 1367 if (index >= 0 && (size_t)index < mBuffers[kPortIndexOutput].size()) { 1368 mBuffers[kPortIndexOutput].editItemAt(index).mRenderInfo = NULL; 1369 } else if (index >= 0) { 1370 // THIS SHOULD NEVER HAPPEN 1371 ALOGE("invalid index %zd in %zu", index, mBuffers[kPortIndexOutput].size()); 1372 } 1373 } 1374 1375 if (MediaCodec::CreateFramesRenderedMessage(done, msg)) { 1376 msg->post(); 1377 } 1378} 1379 1380ACodec::BufferInfo *ACodec::dequeueBufferFromNativeWindow() { 1381 ANativeWindowBuffer *buf; 1382 CHECK(mNativeWindow.get() != NULL); 1383 1384 if (mTunneled) { 1385 ALOGW("dequeueBufferFromNativeWindow() should not be called in tunnel" 1386 " video playback mode mode!"); 1387 return NULL; 1388 } 1389 1390 if (mFatalError) { 1391 ALOGW("not dequeuing from native window due to fatal error"); 1392 return NULL; 1393 } 1394 1395 int fenceFd = -1; 1396 do { 1397 status_t err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1398 if (err != 0) { 1399 ALOGE("dequeueBuffer failed: %s(%d).", asString(err), err); 1400 return NULL; 1401 } 1402 1403 bool stale = false; 1404 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1405 i--; 1406 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1407 1408 if (info->mGraphicBuffer != NULL && 1409 info->mGraphicBuffer->handle == buf->handle) { 1410 // Since consumers can attach buffers to BufferQueues, it is possible 1411 // that a known yet stale buffer can return from a surface that we 1412 // once used. We can simply ignore this as we have already dequeued 1413 // this buffer properly. NOTE: this does not eliminate all cases, 1414 // e.g. it is possible that we have queued the valid buffer to the 1415 // NW, and a stale copy of the same buffer gets dequeued - which will 1416 // be treated as the valid buffer by ACodec. 1417 if (info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 1418 ALOGI("dequeued stale buffer %p. discarding", buf); 1419 stale = true; 1420 break; 1421 } 1422 1423 ALOGV("dequeued buffer %p", info->mGraphicBuffer->getNativeBuffer()); 1424 info->mStatus = BufferInfo::OWNED_BY_US; 1425 info->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow"); 1426 updateRenderInfoForDequeuedBuffer(buf, fenceFd, info); 1427 return info; 1428 } 1429 } 1430 1431 // It is also possible to receive a previously unregistered buffer 1432 // in non-meta mode. These should be treated as stale buffers. The 1433 // same is possible in meta mode, in which case, it will be treated 1434 // as a normal buffer, which is not desirable. 1435 // TODO: fix this. 1436 if (!stale && (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment)) { 1437 ALOGI("dequeued unrecognized (stale) buffer %p. discarding", buf); 1438 stale = true; 1439 } 1440 if (stale) { 1441 // TODO: detach stale buffer, but there is no API yet to do it. 1442 buf = NULL; 1443 } 1444 } while (buf == NULL); 1445 1446 // get oldest undequeued buffer 1447 BufferInfo *oldest = NULL; 1448 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1449 i--; 1450 BufferInfo *info = 1451 &mBuffers[kPortIndexOutput].editItemAt(i); 1452 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW && 1453 (oldest == NULL || 1454 // avoid potential issues from counter rolling over 1455 mDequeueCounter - info->mDequeuedAt > 1456 mDequeueCounter - oldest->mDequeuedAt)) { 1457 oldest = info; 1458 } 1459 } 1460 1461 // it is impossible dequeue a buffer when there are no buffers with ANW 1462 CHECK(oldest != NULL); 1463 // it is impossible to dequeue an unknown buffer in non-meta mode, as the 1464 // while loop above does not complete 1465 CHECK(storingMetadataInDecodedBuffers()); 1466 1467 // discard buffer in LRU info and replace with new buffer 1468 oldest->mGraphicBuffer = new GraphicBuffer(buf, false); 1469 oldest->mStatus = BufferInfo::OWNED_BY_US; 1470 oldest->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow for oldest"); 1471 mRenderTracker.untrackFrame(oldest->mRenderInfo); 1472 oldest->mRenderInfo = NULL; 1473 1474 mOMX->updateGraphicBufferInMeta( 1475 mNode, kPortIndexOutput, oldest->mGraphicBuffer, 1476 oldest->mBufferID); 1477 1478 if (mOutputMetadataType == kMetadataBufferTypeGrallocSource) { 1479 VideoGrallocMetadata *grallocMeta = 1480 reinterpret_cast<VideoGrallocMetadata *>(oldest->mData->base()); 1481 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 1482 (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]), 1483 mDequeueCounter - oldest->mDequeuedAt, 1484 (void *)(uintptr_t)grallocMeta->pHandle, 1485 oldest->mGraphicBuffer->handle, oldest->mData->base()); 1486 } else if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) { 1487 VideoNativeMetadata *nativeMeta = 1488 reinterpret_cast<VideoNativeMetadata *>(oldest->mData->base()); 1489 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 1490 (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]), 1491 mDequeueCounter - oldest->mDequeuedAt, 1492 (void *)(uintptr_t)nativeMeta->pBuffer, 1493 oldest->mGraphicBuffer->getNativeBuffer(), oldest->mData->base()); 1494 } 1495 1496 updateRenderInfoForDequeuedBuffer(buf, fenceFd, oldest); 1497 return oldest; 1498} 1499 1500status_t ACodec::freeBuffersOnPort(OMX_U32 portIndex) { 1501 status_t err = OK; 1502 for (size_t i = mBuffers[portIndex].size(); i > 0;) { 1503 i--; 1504 status_t err2 = freeBuffer(portIndex, i); 1505 if (err == OK) { 1506 err = err2; 1507 } 1508 } 1509 1510 // clear mDealer even on an error 1511 mDealer[portIndex].clear(); 1512 return err; 1513} 1514 1515status_t ACodec::freeOutputBuffersNotOwnedByComponent() { 1516 status_t err = OK; 1517 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1518 i--; 1519 BufferInfo *info = 1520 &mBuffers[kPortIndexOutput].editItemAt(i); 1521 1522 // At this time some buffers may still be with the component 1523 // or being drained. 1524 if (info->mStatus != BufferInfo::OWNED_BY_COMPONENT && 1525 info->mStatus != BufferInfo::OWNED_BY_DOWNSTREAM) { 1526 status_t err2 = freeBuffer(kPortIndexOutput, i); 1527 if (err == OK) { 1528 err = err2; 1529 } 1530 } 1531 } 1532 1533 return err; 1534} 1535 1536status_t ACodec::freeBuffer(OMX_U32 portIndex, size_t i) { 1537 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1538 status_t err = OK; 1539 1540 // there should not be any fences in the metadata 1541 MetadataBufferType type = 1542 portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; 1543 if (type == kMetadataBufferTypeANWBuffer && info->mData != NULL 1544 && info->mData->size() >= sizeof(VideoNativeMetadata)) { 1545 int fenceFd = ((VideoNativeMetadata *)info->mData->data())->nFenceFd; 1546 if (fenceFd >= 0) { 1547 ALOGW("unreleased fence (%d) in %s metadata buffer %zu", 1548 fenceFd, portIndex == kPortIndexInput ? "input" : "output", i); 1549 } 1550 } 1551 1552 switch (info->mStatus) { 1553 case BufferInfo::OWNED_BY_US: 1554 if (portIndex == kPortIndexOutput && mNativeWindow != NULL) { 1555 (void)cancelBufferToNativeWindow(info); 1556 } 1557 // fall through 1558 1559 case BufferInfo::OWNED_BY_NATIVE_WINDOW: 1560 err = mOMX->freeBuffer(mNode, portIndex, info->mBufferID); 1561 break; 1562 1563 default: 1564 ALOGE("trying to free buffer not owned by us or ANW (%d)", info->mStatus); 1565 err = FAILED_TRANSACTION; 1566 break; 1567 } 1568 1569 if (info->mFenceFd >= 0) { 1570 ::close(info->mFenceFd); 1571 } 1572 1573 if (portIndex == kPortIndexOutput) { 1574 mRenderTracker.untrackFrame(info->mRenderInfo, i); 1575 info->mRenderInfo = NULL; 1576 } 1577 1578 // remove buffer even if mOMX->freeBuffer fails 1579 mBuffers[portIndex].removeAt(i); 1580 return err; 1581} 1582 1583ACodec::BufferInfo *ACodec::findBufferByID( 1584 uint32_t portIndex, IOMX::buffer_id bufferID, ssize_t *index) { 1585 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1586 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1587 1588 if (info->mBufferID == bufferID) { 1589 if (index != NULL) { 1590 *index = i; 1591 } 1592 return info; 1593 } 1594 } 1595 1596 ALOGE("Could not find buffer with ID %u", bufferID); 1597 return NULL; 1598} 1599 1600status_t ACodec::setComponentRole( 1601 bool isEncoder, const char *mime) { 1602 const char *role = getComponentRole(isEncoder, mime); 1603 if (role == NULL) { 1604 return BAD_VALUE; 1605 } 1606 status_t err = setComponentRole(mOMX, mNode, role); 1607 if (err != OK) { 1608 ALOGW("[%s] Failed to set standard component role '%s'.", 1609 mComponentName.c_str(), role); 1610 } 1611 return err; 1612} 1613 1614//static 1615const char *ACodec::getComponentRole( 1616 bool isEncoder, const char *mime) { 1617 struct MimeToRole { 1618 const char *mime; 1619 const char *decoderRole; 1620 const char *encoderRole; 1621 }; 1622 1623 static const MimeToRole kMimeToRole[] = { 1624 { MEDIA_MIMETYPE_AUDIO_MPEG, 1625 "audio_decoder.mp3", "audio_encoder.mp3" }, 1626 { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I, 1627 "audio_decoder.mp1", "audio_encoder.mp1" }, 1628 { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II, 1629 "audio_decoder.mp2", "audio_encoder.mp2" }, 1630 { MEDIA_MIMETYPE_AUDIO_AMR_NB, 1631 "audio_decoder.amrnb", "audio_encoder.amrnb" }, 1632 { MEDIA_MIMETYPE_AUDIO_AMR_WB, 1633 "audio_decoder.amrwb", "audio_encoder.amrwb" }, 1634 { MEDIA_MIMETYPE_AUDIO_AAC, 1635 "audio_decoder.aac", "audio_encoder.aac" }, 1636 { MEDIA_MIMETYPE_AUDIO_VORBIS, 1637 "audio_decoder.vorbis", "audio_encoder.vorbis" }, 1638 { MEDIA_MIMETYPE_AUDIO_OPUS, 1639 "audio_decoder.opus", "audio_encoder.opus" }, 1640 { MEDIA_MIMETYPE_AUDIO_G711_MLAW, 1641 "audio_decoder.g711mlaw", "audio_encoder.g711mlaw" }, 1642 { MEDIA_MIMETYPE_AUDIO_G711_ALAW, 1643 "audio_decoder.g711alaw", "audio_encoder.g711alaw" }, 1644 { MEDIA_MIMETYPE_VIDEO_AVC, 1645 "video_decoder.avc", "video_encoder.avc" }, 1646 { MEDIA_MIMETYPE_VIDEO_HEVC, 1647 "video_decoder.hevc", "video_encoder.hevc" }, 1648 { MEDIA_MIMETYPE_VIDEO_MPEG4, 1649 "video_decoder.mpeg4", "video_encoder.mpeg4" }, 1650 { MEDIA_MIMETYPE_VIDEO_H263, 1651 "video_decoder.h263", "video_encoder.h263" }, 1652 { MEDIA_MIMETYPE_VIDEO_VP8, 1653 "video_decoder.vp8", "video_encoder.vp8" }, 1654 { MEDIA_MIMETYPE_VIDEO_VP9, 1655 "video_decoder.vp9", "video_encoder.vp9" }, 1656 { MEDIA_MIMETYPE_AUDIO_RAW, 1657 "audio_decoder.raw", "audio_encoder.raw" }, 1658 { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, 1659 "video_decoder.dolby-vision", "video_encoder.dolby-vision" }, 1660 { MEDIA_MIMETYPE_AUDIO_FLAC, 1661 "audio_decoder.flac", "audio_encoder.flac" }, 1662 { MEDIA_MIMETYPE_AUDIO_MSGSM, 1663 "audio_decoder.gsm", "audio_encoder.gsm" }, 1664 { MEDIA_MIMETYPE_VIDEO_MPEG2, 1665 "video_decoder.mpeg2", "video_encoder.mpeg2" }, 1666 { MEDIA_MIMETYPE_AUDIO_AC3, 1667 "audio_decoder.ac3", "audio_encoder.ac3" }, 1668 { MEDIA_MIMETYPE_AUDIO_EAC3, 1669 "audio_decoder.eac3", "audio_encoder.eac3" }, 1670 }; 1671 1672 static const size_t kNumMimeToRole = 1673 sizeof(kMimeToRole) / sizeof(kMimeToRole[0]); 1674 1675 size_t i; 1676 for (i = 0; i < kNumMimeToRole; ++i) { 1677 if (!strcasecmp(mime, kMimeToRole[i].mime)) { 1678 break; 1679 } 1680 } 1681 1682 if (i == kNumMimeToRole) { 1683 return NULL; 1684 } 1685 1686 return isEncoder ? kMimeToRole[i].encoderRole 1687 : kMimeToRole[i].decoderRole; 1688} 1689 1690//static 1691status_t ACodec::setComponentRole( 1692 const sp<IOMX> &omx, IOMX::node_id node, const char *role) { 1693 OMX_PARAM_COMPONENTROLETYPE roleParams; 1694 InitOMXParams(&roleParams); 1695 1696 strncpy((char *)roleParams.cRole, 1697 role, OMX_MAX_STRINGNAME_SIZE - 1); 1698 1699 roleParams.cRole[OMX_MAX_STRINGNAME_SIZE - 1] = '\0'; 1700 1701 return omx->setParameter( 1702 node, OMX_IndexParamStandardComponentRole, 1703 &roleParams, sizeof(roleParams)); 1704} 1705 1706status_t ACodec::configureCodec( 1707 const char *mime, const sp<AMessage> &msg) { 1708 int32_t encoder; 1709 if (!msg->findInt32("encoder", &encoder)) { 1710 encoder = false; 1711 } 1712 1713 sp<AMessage> inputFormat = new AMessage; 1714 sp<AMessage> outputFormat = new AMessage; 1715 mConfigFormat = msg; 1716 1717 mIsEncoder = encoder; 1718 1719 mInputMetadataType = kMetadataBufferTypeInvalid; 1720 mOutputMetadataType = kMetadataBufferTypeInvalid; 1721 1722 status_t err = setComponentRole(encoder /* isEncoder */, mime); 1723 1724 if (err != OK) { 1725 return err; 1726 } 1727 1728 int32_t bitRate = 0; 1729 // FLAC encoder doesn't need a bitrate, other encoders do 1730 if (encoder && strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC) 1731 && !msg->findInt32("bitrate", &bitRate)) { 1732 return INVALID_OPERATION; 1733 } 1734 1735 // propagate bitrate to the output so that the muxer has it 1736 if (encoder && msg->findInt32("bitrate", &bitRate)) { 1737 // Technically ISO spec says that 'bitrate' should be 0 for VBR even though it is the 1738 // average bitrate. We've been setting both bitrate and max-bitrate to this same value. 1739 outputFormat->setInt32("bitrate", bitRate); 1740 outputFormat->setInt32("max-bitrate", bitRate); 1741 } 1742 1743 int32_t storeMeta; 1744 if (encoder 1745 && msg->findInt32("android._input-metadata-buffer-type", &storeMeta) 1746 && storeMeta != kMetadataBufferTypeInvalid) { 1747 mInputMetadataType = (MetadataBufferType)storeMeta; 1748 err = mOMX->storeMetaDataInBuffers( 1749 mNode, kPortIndexInput, OMX_TRUE, &mInputMetadataType); 1750 if (err != OK) { 1751 ALOGE("[%s] storeMetaDataInBuffers (input) failed w/ err %d", 1752 mComponentName.c_str(), err); 1753 1754 return err; 1755 } else if (storeMeta == kMetadataBufferTypeANWBuffer 1756 && mInputMetadataType == kMetadataBufferTypeGrallocSource) { 1757 // IOMX translates ANWBuffers to gralloc source already. 1758 mInputMetadataType = (MetadataBufferType)storeMeta; 1759 } 1760 1761 uint32_t usageBits; 1762 if (mOMX->getParameter( 1763 mNode, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, 1764 &usageBits, sizeof(usageBits)) == OK) { 1765 inputFormat->setInt32( 1766 "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); 1767 } 1768 } 1769 1770 int32_t prependSPSPPS = 0; 1771 if (encoder 1772 && msg->findInt32("prepend-sps-pps-to-idr-frames", &prependSPSPPS) 1773 && prependSPSPPS != 0) { 1774 OMX_INDEXTYPE index; 1775 err = mOMX->getExtensionIndex( 1776 mNode, 1777 "OMX.google.android.index.prependSPSPPSToIDRFrames", 1778 &index); 1779 1780 if (err == OK) { 1781 PrependSPSPPSToIDRFramesParams params; 1782 InitOMXParams(¶ms); 1783 params.bEnable = OMX_TRUE; 1784 1785 err = mOMX->setParameter( 1786 mNode, index, ¶ms, sizeof(params)); 1787 } 1788 1789 if (err != OK) { 1790 ALOGE("Encoder could not be configured to emit SPS/PPS before " 1791 "IDR frames. (err %d)", err); 1792 1793 return err; 1794 } 1795 } 1796 1797 // Only enable metadata mode on encoder output if encoder can prepend 1798 // sps/pps to idr frames, since in metadata mode the bitstream is in an 1799 // opaque handle, to which we don't have access. 1800 int32_t video = !strncasecmp(mime, "video/", 6); 1801 mIsVideo = video; 1802 if (encoder && video) { 1803 OMX_BOOL enable = (OMX_BOOL) (prependSPSPPS 1804 && msg->findInt32("android._store-metadata-in-buffers-output", &storeMeta) 1805 && storeMeta != 0); 1806 1807 mOutputMetadataType = kMetadataBufferTypeNativeHandleSource; 1808 err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, enable, &mOutputMetadataType); 1809 if (err != OK) { 1810 ALOGE("[%s] storeMetaDataInBuffers (output) failed w/ err %d", 1811 mComponentName.c_str(), err); 1812 } 1813 1814 if (!msg->findInt64( 1815 "repeat-previous-frame-after", 1816 &mRepeatFrameDelayUs)) { 1817 mRepeatFrameDelayUs = -1ll; 1818 } 1819 1820 if (!msg->findInt64("max-pts-gap-to-encoder", &mMaxPtsGapUs)) { 1821 mMaxPtsGapUs = -1ll; 1822 } 1823 1824 if (!msg->findFloat("max-fps-to-encoder", &mMaxFps)) { 1825 mMaxFps = -1; 1826 } 1827 1828 if (!msg->findInt64("time-lapse", &mTimePerCaptureUs)) { 1829 mTimePerCaptureUs = -1ll; 1830 } 1831 1832 if (!msg->findInt32( 1833 "create-input-buffers-suspended", 1834 (int32_t*)&mCreateInputBuffersSuspended)) { 1835 mCreateInputBuffersSuspended = false; 1836 } 1837 } 1838 1839 // NOTE: we only use native window for video decoders 1840 sp<RefBase> obj; 1841 bool haveNativeWindow = msg->findObject("native-window", &obj) 1842 && obj != NULL && video && !encoder; 1843 mUsingNativeWindow = haveNativeWindow; 1844 mLegacyAdaptiveExperiment = false; 1845 if (video && !encoder) { 1846 inputFormat->setInt32("adaptive-playback", false); 1847 1848 int32_t usageProtected; 1849 if (msg->findInt32("protected", &usageProtected) && usageProtected) { 1850 if (!haveNativeWindow) { 1851 ALOGE("protected output buffers must be sent to an ANativeWindow"); 1852 return PERMISSION_DENIED; 1853 } 1854 mFlags |= kFlagIsGrallocUsageProtected; 1855 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 1856 } 1857 1858 if (mFlags & kFlagIsSecure) { 1859 // use native_handles for secure input buffers 1860 err = mOMX->enableNativeBuffers( 1861 mNode, kPortIndexInput, OMX_FALSE /* graphic */, OMX_TRUE); 1862 ALOGI_IF(err != OK, "falling back to non-native_handles"); 1863 err = OK; // ignore error for now 1864 } 1865 } 1866 if (haveNativeWindow) { 1867 sp<ANativeWindow> nativeWindow = 1868 static_cast<ANativeWindow *>(static_cast<Surface *>(obj.get())); 1869 1870 // START of temporary support for automatic FRC - THIS WILL BE REMOVED 1871 int32_t autoFrc; 1872 if (msg->findInt32("auto-frc", &autoFrc)) { 1873 bool enabled = autoFrc; 1874 OMX_CONFIG_BOOLEANTYPE config; 1875 InitOMXParams(&config); 1876 config.bEnabled = (OMX_BOOL)enabled; 1877 status_t temp = mOMX->setConfig( 1878 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAutoFramerateConversion, 1879 &config, sizeof(config)); 1880 if (temp == OK) { 1881 outputFormat->setInt32("auto-frc", enabled); 1882 } else if (enabled) { 1883 ALOGI("codec does not support requested auto-frc (err %d)", temp); 1884 } 1885 } 1886 // END of temporary support for automatic FRC 1887 1888 int32_t tunneled; 1889 if (msg->findInt32("feature-tunneled-playback", &tunneled) && 1890 tunneled != 0) { 1891 ALOGI("Configuring TUNNELED video playback."); 1892 mTunneled = true; 1893 1894 int32_t audioHwSync = 0; 1895 if (!msg->findInt32("audio-hw-sync", &audioHwSync)) { 1896 ALOGW("No Audio HW Sync provided for video tunnel"); 1897 } 1898 err = configureTunneledVideoPlayback(audioHwSync, nativeWindow); 1899 if (err != OK) { 1900 ALOGE("configureTunneledVideoPlayback(%d,%p) failed!", 1901 audioHwSync, nativeWindow.get()); 1902 return err; 1903 } 1904 1905 int32_t maxWidth = 0, maxHeight = 0; 1906 if (msg->findInt32("max-width", &maxWidth) && 1907 msg->findInt32("max-height", &maxHeight)) { 1908 1909 err = mOMX->prepareForAdaptivePlayback( 1910 mNode, kPortIndexOutput, OMX_TRUE, maxWidth, maxHeight); 1911 if (err != OK) { 1912 ALOGW("[%s] prepareForAdaptivePlayback failed w/ err %d", 1913 mComponentName.c_str(), err); 1914 // allow failure 1915 err = OK; 1916 } else { 1917 inputFormat->setInt32("max-width", maxWidth); 1918 inputFormat->setInt32("max-height", maxHeight); 1919 inputFormat->setInt32("adaptive-playback", true); 1920 } 1921 } 1922 } else { 1923 ALOGV("Configuring CPU controlled video playback."); 1924 mTunneled = false; 1925 1926 // Explicity reset the sideband handle of the window for 1927 // non-tunneled video in case the window was previously used 1928 // for a tunneled video playback. 1929 err = native_window_set_sideband_stream(nativeWindow.get(), NULL); 1930 if (err != OK) { 1931 ALOGE("set_sideband_stream(NULL) failed! (err %d).", err); 1932 return err; 1933 } 1934 1935 // Always try to enable dynamic output buffers on native surface 1936 mOutputMetadataType = kMetadataBufferTypeANWBuffer; 1937 err = mOMX->storeMetaDataInBuffers( 1938 mNode, kPortIndexOutput, OMX_TRUE, &mOutputMetadataType); 1939 if (err != OK) { 1940 ALOGE("[%s] storeMetaDataInBuffers failed w/ err %d", 1941 mComponentName.c_str(), err); 1942 1943 // if adaptive playback has been requested, try JB fallback 1944 // NOTE: THIS FALLBACK MECHANISM WILL BE REMOVED DUE TO ITS 1945 // LARGE MEMORY REQUIREMENT 1946 1947 // we will not do adaptive playback on software accessed 1948 // surfaces as they never had to respond to changes in the 1949 // crop window, and we don't trust that they will be able to. 1950 int usageBits = 0; 1951 bool canDoAdaptivePlayback; 1952 1953 if (nativeWindow->query( 1954 nativeWindow.get(), 1955 NATIVE_WINDOW_CONSUMER_USAGE_BITS, 1956 &usageBits) != OK) { 1957 canDoAdaptivePlayback = false; 1958 } else { 1959 canDoAdaptivePlayback = 1960 (usageBits & 1961 (GRALLOC_USAGE_SW_READ_MASK | 1962 GRALLOC_USAGE_SW_WRITE_MASK)) == 0; 1963 } 1964 1965 int32_t maxWidth = 0, maxHeight = 0; 1966 if (canDoAdaptivePlayback && 1967 msg->findInt32("max-width", &maxWidth) && 1968 msg->findInt32("max-height", &maxHeight)) { 1969 ALOGV("[%s] prepareForAdaptivePlayback(%dx%d)", 1970 mComponentName.c_str(), maxWidth, maxHeight); 1971 1972 err = mOMX->prepareForAdaptivePlayback( 1973 mNode, kPortIndexOutput, OMX_TRUE, maxWidth, 1974 maxHeight); 1975 ALOGW_IF(err != OK, 1976 "[%s] prepareForAdaptivePlayback failed w/ err %d", 1977 mComponentName.c_str(), err); 1978 1979 if (err == OK) { 1980 inputFormat->setInt32("max-width", maxWidth); 1981 inputFormat->setInt32("max-height", maxHeight); 1982 inputFormat->setInt32("adaptive-playback", true); 1983 } 1984 } 1985 // allow failure 1986 err = OK; 1987 } else { 1988 ALOGV("[%s] storeMetaDataInBuffers succeeded", 1989 mComponentName.c_str()); 1990 CHECK(storingMetadataInDecodedBuffers()); 1991 mLegacyAdaptiveExperiment = ADebug::isExperimentEnabled( 1992 "legacy-adaptive", !msg->contains("no-experiments")); 1993 1994 inputFormat->setInt32("adaptive-playback", true); 1995 } 1996 1997 int32_t push; 1998 if (msg->findInt32("push-blank-buffers-on-shutdown", &push) 1999 && push != 0) { 2000 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 2001 } 2002 } 2003 2004 int32_t rotationDegrees; 2005 if (msg->findInt32("rotation-degrees", &rotationDegrees)) { 2006 mRotationDegrees = rotationDegrees; 2007 } else { 2008 mRotationDegrees = 0; 2009 } 2010 } 2011 2012 AudioEncoding pcmEncoding = kAudioEncodingPcm16bit; 2013 (void)msg->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 2014 // invalid encodings will default to PCM-16bit in setupRawAudioFormat. 2015 2016 if (video) { 2017 // determine need for software renderer 2018 bool usingSwRenderer = false; 2019 if (haveNativeWindow && mComponentName.startsWith("OMX.google.")) { 2020 usingSwRenderer = true; 2021 haveNativeWindow = false; 2022 } 2023 2024 if (encoder) { 2025 err = setupVideoEncoder(mime, msg, outputFormat, inputFormat); 2026 } else { 2027 err = setupVideoDecoder(mime, msg, haveNativeWindow, usingSwRenderer, outputFormat); 2028 } 2029 2030 if (err != OK) { 2031 return err; 2032 } 2033 2034 if (haveNativeWindow) { 2035 mNativeWindow = static_cast<Surface *>(obj.get()); 2036 } 2037 2038 // initialize native window now to get actual output format 2039 // TODO: this is needed for some encoders even though they don't use native window 2040 err = initNativeWindow(); 2041 if (err != OK) { 2042 return err; 2043 } 2044 2045 // fallback for devices that do not handle flex-YUV for native buffers 2046 if (haveNativeWindow) { 2047 int32_t requestedColorFormat = OMX_COLOR_FormatUnused; 2048 if (msg->findInt32("color-format", &requestedColorFormat) && 2049 requestedColorFormat == OMX_COLOR_FormatYUV420Flexible) { 2050 status_t err = getPortFormat(kPortIndexOutput, outputFormat); 2051 if (err != OK) { 2052 return err; 2053 } 2054 int32_t colorFormat = OMX_COLOR_FormatUnused; 2055 OMX_U32 flexibleEquivalent = OMX_COLOR_FormatUnused; 2056 if (!outputFormat->findInt32("color-format", &colorFormat)) { 2057 ALOGE("ouptut port did not have a color format (wrong domain?)"); 2058 return BAD_VALUE; 2059 } 2060 ALOGD("[%s] Requested output format %#x and got %#x.", 2061 mComponentName.c_str(), requestedColorFormat, colorFormat); 2062 if (!isFlexibleColorFormat( 2063 mOMX, mNode, colorFormat, haveNativeWindow, &flexibleEquivalent) 2064 || flexibleEquivalent != (OMX_U32)requestedColorFormat) { 2065 // device did not handle flex-YUV request for native window, fall back 2066 // to SW renderer 2067 ALOGI("[%s] Falling back to software renderer", mComponentName.c_str()); 2068 mNativeWindow.clear(); 2069 mNativeWindowUsageBits = 0; 2070 haveNativeWindow = false; 2071 usingSwRenderer = true; 2072 if (storingMetadataInDecodedBuffers()) { 2073 err = mOMX->storeMetaDataInBuffers( 2074 mNode, kPortIndexOutput, OMX_FALSE, &mOutputMetadataType); 2075 mOutputMetadataType = kMetadataBufferTypeInvalid; // just in case 2076 // TODO: implement adaptive-playback support for bytebuffer mode. 2077 // This is done by SW codecs, but most HW codecs don't support it. 2078 inputFormat->setInt32("adaptive-playback", false); 2079 } 2080 if (err == OK) { 2081 err = mOMX->enableNativeBuffers( 2082 mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE); 2083 } 2084 if (mFlags & kFlagIsGrallocUsageProtected) { 2085 // fallback is not supported for protected playback 2086 err = PERMISSION_DENIED; 2087 } else if (err == OK) { 2088 err = setupVideoDecoder( 2089 mime, msg, haveNativeWindow, usingSwRenderer, outputFormat); 2090 } 2091 } 2092 } 2093 } 2094 2095 if (usingSwRenderer) { 2096 outputFormat->setInt32("using-sw-renderer", 1); 2097 } 2098 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) { 2099 int32_t numChannels, sampleRate; 2100 if (!msg->findInt32("channel-count", &numChannels) 2101 || !msg->findInt32("sample-rate", &sampleRate)) { 2102 // Since we did not always check for these, leave them optional 2103 // and have the decoder figure it all out. 2104 err = OK; 2105 } else { 2106 err = setupRawAudioFormat( 2107 encoder ? kPortIndexInput : kPortIndexOutput, 2108 sampleRate, 2109 numChannels); 2110 } 2111 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) { 2112 int32_t numChannels, sampleRate; 2113 if (!msg->findInt32("channel-count", &numChannels) 2114 || !msg->findInt32("sample-rate", &sampleRate)) { 2115 err = INVALID_OPERATION; 2116 } else { 2117 int32_t isADTS, aacProfile; 2118 int32_t sbrMode; 2119 int32_t maxOutputChannelCount; 2120 int32_t pcmLimiterEnable; 2121 drcParams_t drc; 2122 if (!msg->findInt32("is-adts", &isADTS)) { 2123 isADTS = 0; 2124 } 2125 if (!msg->findInt32("aac-profile", &aacProfile)) { 2126 aacProfile = OMX_AUDIO_AACObjectNull; 2127 } 2128 if (!msg->findInt32("aac-sbr-mode", &sbrMode)) { 2129 sbrMode = -1; 2130 } 2131 2132 if (!msg->findInt32("aac-max-output-channel_count", &maxOutputChannelCount)) { 2133 maxOutputChannelCount = -1; 2134 } 2135 if (!msg->findInt32("aac-pcm-limiter-enable", &pcmLimiterEnable)) { 2136 // value is unknown 2137 pcmLimiterEnable = -1; 2138 } 2139 if (!msg->findInt32("aac-encoded-target-level", &drc.encodedTargetLevel)) { 2140 // value is unknown 2141 drc.encodedTargetLevel = -1; 2142 } 2143 if (!msg->findInt32("aac-drc-cut-level", &drc.drcCut)) { 2144 // value is unknown 2145 drc.drcCut = -1; 2146 } 2147 if (!msg->findInt32("aac-drc-boost-level", &drc.drcBoost)) { 2148 // value is unknown 2149 drc.drcBoost = -1; 2150 } 2151 if (!msg->findInt32("aac-drc-heavy-compression", &drc.heavyCompression)) { 2152 // value is unknown 2153 drc.heavyCompression = -1; 2154 } 2155 if (!msg->findInt32("aac-target-ref-level", &drc.targetRefLevel)) { 2156 // value is unknown 2157 drc.targetRefLevel = -1; 2158 } 2159 2160 err = setupAACCodec( 2161 encoder, numChannels, sampleRate, bitRate, aacProfile, 2162 isADTS != 0, sbrMode, maxOutputChannelCount, drc, 2163 pcmLimiterEnable); 2164 } 2165 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)) { 2166 err = setupAMRCodec(encoder, false /* isWAMR */, bitRate); 2167 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_WB)) { 2168 err = setupAMRCodec(encoder, true /* isWAMR */, bitRate); 2169 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_ALAW) 2170 || !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_MLAW)) { 2171 // These are PCM-like formats with a fixed sample rate but 2172 // a variable number of channels. 2173 2174 int32_t numChannels; 2175 if (!msg->findInt32("channel-count", &numChannels)) { 2176 err = INVALID_OPERATION; 2177 } else { 2178 int32_t sampleRate; 2179 if (!msg->findInt32("sample-rate", &sampleRate)) { 2180 sampleRate = 8000; 2181 } 2182 err = setupG711Codec(encoder, sampleRate, numChannels); 2183 } 2184 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)) { 2185 int32_t numChannels = 0, sampleRate = 0, compressionLevel = -1; 2186 if (encoder && 2187 (!msg->findInt32("channel-count", &numChannels) 2188 || !msg->findInt32("sample-rate", &sampleRate))) { 2189 ALOGE("missing channel count or sample rate for FLAC encoder"); 2190 err = INVALID_OPERATION; 2191 } else { 2192 if (encoder) { 2193 if (!msg->findInt32( 2194 "complexity", &compressionLevel) && 2195 !msg->findInt32( 2196 "flac-compression-level", &compressionLevel)) { 2197 compressionLevel = 5; // default FLAC compression level 2198 } else if (compressionLevel < 0) { 2199 ALOGW("compression level %d outside [0..8] range, " 2200 "using 0", 2201 compressionLevel); 2202 compressionLevel = 0; 2203 } else if (compressionLevel > 8) { 2204 ALOGW("compression level %d outside [0..8] range, " 2205 "using 8", 2206 compressionLevel); 2207 compressionLevel = 8; 2208 } 2209 } 2210 err = setupFlacCodec( 2211 encoder, numChannels, sampleRate, compressionLevel); 2212 } 2213 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) { 2214 int32_t numChannels, sampleRate; 2215 if (encoder 2216 || !msg->findInt32("channel-count", &numChannels) 2217 || !msg->findInt32("sample-rate", &sampleRate)) { 2218 err = INVALID_OPERATION; 2219 } else { 2220 err = setupRawAudioFormat(kPortIndexInput, sampleRate, numChannels, pcmEncoding); 2221 } 2222 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AC3)) { 2223 int32_t numChannels; 2224 int32_t sampleRate; 2225 if (!msg->findInt32("channel-count", &numChannels) 2226 || !msg->findInt32("sample-rate", &sampleRate)) { 2227 err = INVALID_OPERATION; 2228 } else { 2229 err = setupAC3Codec(encoder, numChannels, sampleRate); 2230 } 2231 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_EAC3)) { 2232 int32_t numChannels; 2233 int32_t sampleRate; 2234 if (!msg->findInt32("channel-count", &numChannels) 2235 || !msg->findInt32("sample-rate", &sampleRate)) { 2236 err = INVALID_OPERATION; 2237 } else { 2238 err = setupEAC3Codec(encoder, numChannels, sampleRate); 2239 } 2240 } 2241 2242 if (err != OK) { 2243 return err; 2244 } 2245 2246 if (!msg->findInt32("encoder-delay", &mEncoderDelay)) { 2247 mEncoderDelay = 0; 2248 } 2249 2250 if (!msg->findInt32("encoder-padding", &mEncoderPadding)) { 2251 mEncoderPadding = 0; 2252 } 2253 2254 if (msg->findInt32("channel-mask", &mChannelMask)) { 2255 mChannelMaskPresent = true; 2256 } else { 2257 mChannelMaskPresent = false; 2258 } 2259 2260 int32_t maxInputSize; 2261 if (msg->findInt32("max-input-size", &maxInputSize)) { 2262 err = setMinBufferSize(kPortIndexInput, (size_t)maxInputSize); 2263 } else if (!strcmp("OMX.Nvidia.aac.decoder", mComponentName.c_str())) { 2264 err = setMinBufferSize(kPortIndexInput, 8192); // XXX 2265 } 2266 2267 int32_t priority; 2268 if (msg->findInt32("priority", &priority)) { 2269 err = setPriority(priority); 2270 } 2271 2272 int32_t rateInt = -1; 2273 float rateFloat = -1; 2274 if (!msg->findFloat("operating-rate", &rateFloat)) { 2275 msg->findInt32("operating-rate", &rateInt); 2276 rateFloat = (float)rateInt; // 16MHz (FLINTMAX) is OK for upper bound. 2277 } 2278 if (rateFloat > 0) { 2279 err = setOperatingRate(rateFloat, video); 2280 } 2281 2282 // NOTE: both mBaseOutputFormat and mOutputFormat are outputFormat to signal first frame. 2283 mBaseOutputFormat = outputFormat; 2284 // trigger a kWhatOutputFormatChanged msg on first buffer 2285 mLastOutputFormat.clear(); 2286 2287 err = getPortFormat(kPortIndexInput, inputFormat); 2288 if (err == OK) { 2289 err = getPortFormat(kPortIndexOutput, outputFormat); 2290 if (err == OK) { 2291 mInputFormat = inputFormat; 2292 mOutputFormat = outputFormat; 2293 } 2294 } 2295 2296 // create data converters if needed 2297 if (!video && err == OK) { 2298 AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit; 2299 if (encoder) { 2300 (void)mInputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding); 2301 mConverter[kPortIndexInput] = AudioConverter::Create(pcmEncoding, codecPcmEncoding); 2302 if (mConverter[kPortIndexInput] != NULL) { 2303 mInputFormat->setInt32("pcm-encoding", pcmEncoding); 2304 } 2305 } else { 2306 (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding); 2307 mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding); 2308 if (mConverter[kPortIndexOutput] != NULL) { 2309 mOutputFormat->setInt32("pcm-encoding", pcmEncoding); 2310 } 2311 } 2312 } 2313 2314 return err; 2315} 2316 2317status_t ACodec::setPriority(int32_t priority) { 2318 if (priority < 0) { 2319 return BAD_VALUE; 2320 } 2321 OMX_PARAM_U32TYPE config; 2322 InitOMXParams(&config); 2323 config.nU32 = (OMX_U32)priority; 2324 status_t temp = mOMX->setConfig( 2325 mNode, (OMX_INDEXTYPE)OMX_IndexConfigPriority, 2326 &config, sizeof(config)); 2327 if (temp != OK) { 2328 ALOGI("codec does not support config priority (err %d)", temp); 2329 } 2330 return OK; 2331} 2332 2333status_t ACodec::setOperatingRate(float rateFloat, bool isVideo) { 2334 if (rateFloat < 0) { 2335 return BAD_VALUE; 2336 } 2337 OMX_U32 rate; 2338 if (isVideo) { 2339 if (rateFloat > 65535) { 2340 return BAD_VALUE; 2341 } 2342 rate = (OMX_U32)(rateFloat * 65536.0f + 0.5f); 2343 } else { 2344 if (rateFloat > UINT_MAX) { 2345 return BAD_VALUE; 2346 } 2347 rate = (OMX_U32)(rateFloat); 2348 } 2349 OMX_PARAM_U32TYPE config; 2350 InitOMXParams(&config); 2351 config.nU32 = rate; 2352 status_t err = mOMX->setConfig( 2353 mNode, (OMX_INDEXTYPE)OMX_IndexConfigOperatingRate, 2354 &config, sizeof(config)); 2355 if (err != OK) { 2356 ALOGI("codec does not support config operating rate (err %d)", err); 2357 } 2358 return OK; 2359} 2360 2361status_t ACodec::getIntraRefreshPeriod(uint32_t *intraRefreshPeriod) { 2362 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 2363 InitOMXParams(¶ms); 2364 params.nPortIndex = kPortIndexOutput; 2365 status_t err = mOMX->getConfig( 2366 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params)); 2367 if (err == OK) { 2368 *intraRefreshPeriod = params.nRefreshPeriod; 2369 return OK; 2370 } 2371 2372 // Fallback to query through standard OMX index. 2373 OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams; 2374 InitOMXParams(&refreshParams); 2375 refreshParams.nPortIndex = kPortIndexOutput; 2376 refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic; 2377 err = mOMX->getParameter( 2378 mNode, OMX_IndexParamVideoIntraRefresh, &refreshParams, sizeof(refreshParams)); 2379 if (err != OK || refreshParams.nCirMBs == 0) { 2380 *intraRefreshPeriod = 0; 2381 return OK; 2382 } 2383 2384 // Calculate period based on width and height 2385 uint32_t width, height; 2386 OMX_PARAM_PORTDEFINITIONTYPE def; 2387 InitOMXParams(&def); 2388 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2389 def.nPortIndex = kPortIndexOutput; 2390 err = mOMX->getParameter( 2391 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2392 if (err != OK) { 2393 *intraRefreshPeriod = 0; 2394 return err; 2395 } 2396 width = video_def->nFrameWidth; 2397 height = video_def->nFrameHeight; 2398 // Use H.264/AVC MacroBlock size 16x16 2399 *intraRefreshPeriod = divUp((divUp(width, 16u) * divUp(height, 16u)), refreshParams.nCirMBs); 2400 2401 return OK; 2402} 2403 2404status_t ACodec::setIntraRefreshPeriod(uint32_t intraRefreshPeriod, bool inConfigure) { 2405 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 2406 InitOMXParams(¶ms); 2407 params.nPortIndex = kPortIndexOutput; 2408 params.nRefreshPeriod = intraRefreshPeriod; 2409 status_t err = mOMX->setConfig( 2410 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params)); 2411 if (err == OK) { 2412 return OK; 2413 } 2414 2415 // Only in configure state, a component could invoke setParameter. 2416 if (!inConfigure) { 2417 return INVALID_OPERATION; 2418 } else { 2419 ALOGI("[%s] try falling back to Cyclic", mComponentName.c_str()); 2420 } 2421 2422 OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams; 2423 InitOMXParams(&refreshParams); 2424 refreshParams.nPortIndex = kPortIndexOutput; 2425 refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic; 2426 2427 if (intraRefreshPeriod == 0) { 2428 // 0 means disable intra refresh. 2429 refreshParams.nCirMBs = 0; 2430 } else { 2431 // Calculate macroblocks that need to be intra coded base on width and height 2432 uint32_t width, height; 2433 OMX_PARAM_PORTDEFINITIONTYPE def; 2434 InitOMXParams(&def); 2435 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2436 def.nPortIndex = kPortIndexOutput; 2437 err = mOMX->getParameter( 2438 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2439 if (err != OK) { 2440 return err; 2441 } 2442 width = video_def->nFrameWidth; 2443 height = video_def->nFrameHeight; 2444 // Use H.264/AVC MacroBlock size 16x16 2445 refreshParams.nCirMBs = divUp((divUp(width, 16u) * divUp(height, 16u)), intraRefreshPeriod); 2446 } 2447 2448 err = mOMX->setParameter(mNode, OMX_IndexParamVideoIntraRefresh, 2449 &refreshParams, sizeof(refreshParams)); 2450 if (err != OK) { 2451 return err; 2452 } 2453 2454 return OK; 2455} 2456 2457status_t ACodec::setMinBufferSize(OMX_U32 portIndex, size_t size) { 2458 OMX_PARAM_PORTDEFINITIONTYPE def; 2459 InitOMXParams(&def); 2460 def.nPortIndex = portIndex; 2461 2462 status_t err = mOMX->getParameter( 2463 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2464 2465 if (err != OK) { 2466 return err; 2467 } 2468 2469 if (def.nBufferSize >= size) { 2470 return OK; 2471 } 2472 2473 def.nBufferSize = size; 2474 2475 err = mOMX->setParameter( 2476 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2477 2478 if (err != OK) { 2479 return err; 2480 } 2481 2482 err = mOMX->getParameter( 2483 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2484 2485 if (err != OK) { 2486 return err; 2487 } 2488 2489 if (def.nBufferSize < size) { 2490 ALOGE("failed to set min buffer size to %zu (is still %u)", size, def.nBufferSize); 2491 return FAILED_TRANSACTION; 2492 } 2493 2494 return OK; 2495} 2496 2497status_t ACodec::selectAudioPortFormat( 2498 OMX_U32 portIndex, OMX_AUDIO_CODINGTYPE desiredFormat) { 2499 OMX_AUDIO_PARAM_PORTFORMATTYPE format; 2500 InitOMXParams(&format); 2501 2502 format.nPortIndex = portIndex; 2503 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 2504 format.nIndex = index; 2505 status_t err = mOMX->getParameter( 2506 mNode, OMX_IndexParamAudioPortFormat, 2507 &format, sizeof(format)); 2508 2509 if (err != OK) { 2510 return err; 2511 } 2512 2513 if (format.eEncoding == desiredFormat) { 2514 break; 2515 } 2516 2517 if (index == kMaxIndicesToCheck) { 2518 ALOGW("[%s] stopping checking formats after %u: %s(%x)", 2519 mComponentName.c_str(), index, 2520 asString(format.eEncoding), format.eEncoding); 2521 return ERROR_UNSUPPORTED; 2522 } 2523 } 2524 2525 return mOMX->setParameter( 2526 mNode, OMX_IndexParamAudioPortFormat, &format, sizeof(format)); 2527} 2528 2529status_t ACodec::setupAACCodec( 2530 bool encoder, int32_t numChannels, int32_t sampleRate, 2531 int32_t bitRate, int32_t aacProfile, bool isADTS, int32_t sbrMode, 2532 int32_t maxOutputChannelCount, const drcParams_t& drc, 2533 int32_t pcmLimiterEnable) { 2534 if (encoder && isADTS) { 2535 return -EINVAL; 2536 } 2537 2538 status_t err = setupRawAudioFormat( 2539 encoder ? kPortIndexInput : kPortIndexOutput, 2540 sampleRate, 2541 numChannels); 2542 2543 if (err != OK) { 2544 return err; 2545 } 2546 2547 if (encoder) { 2548 err = selectAudioPortFormat(kPortIndexOutput, OMX_AUDIO_CodingAAC); 2549 2550 if (err != OK) { 2551 return err; 2552 } 2553 2554 OMX_PARAM_PORTDEFINITIONTYPE def; 2555 InitOMXParams(&def); 2556 def.nPortIndex = kPortIndexOutput; 2557 2558 err = mOMX->getParameter( 2559 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2560 2561 if (err != OK) { 2562 return err; 2563 } 2564 2565 def.format.audio.bFlagErrorConcealment = OMX_TRUE; 2566 def.format.audio.eEncoding = OMX_AUDIO_CodingAAC; 2567 2568 err = mOMX->setParameter( 2569 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2570 2571 if (err != OK) { 2572 return err; 2573 } 2574 2575 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 2576 InitOMXParams(&profile); 2577 profile.nPortIndex = kPortIndexOutput; 2578 2579 err = mOMX->getParameter( 2580 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2581 2582 if (err != OK) { 2583 return err; 2584 } 2585 2586 profile.nChannels = numChannels; 2587 2588 profile.eChannelMode = 2589 (numChannels == 1) 2590 ? OMX_AUDIO_ChannelModeMono: OMX_AUDIO_ChannelModeStereo; 2591 2592 profile.nSampleRate = sampleRate; 2593 profile.nBitRate = bitRate; 2594 profile.nAudioBandWidth = 0; 2595 profile.nFrameLength = 0; 2596 profile.nAACtools = OMX_AUDIO_AACToolAll; 2597 profile.nAACERtools = OMX_AUDIO_AACERNone; 2598 profile.eAACProfile = (OMX_AUDIO_AACPROFILETYPE) aacProfile; 2599 profile.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4FF; 2600 switch (sbrMode) { 2601 case 0: 2602 // disable sbr 2603 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 2604 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 2605 break; 2606 case 1: 2607 // enable single-rate sbr 2608 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 2609 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 2610 break; 2611 case 2: 2612 // enable dual-rate sbr 2613 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 2614 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 2615 break; 2616 case -1: 2617 // enable both modes -> the codec will decide which mode should be used 2618 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 2619 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 2620 break; 2621 default: 2622 // unsupported sbr mode 2623 return BAD_VALUE; 2624 } 2625 2626 2627 err = mOMX->setParameter( 2628 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2629 2630 if (err != OK) { 2631 return err; 2632 } 2633 2634 return err; 2635 } 2636 2637 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 2638 InitOMXParams(&profile); 2639 profile.nPortIndex = kPortIndexInput; 2640 2641 err = mOMX->getParameter( 2642 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2643 2644 if (err != OK) { 2645 return err; 2646 } 2647 2648 profile.nChannels = numChannels; 2649 profile.nSampleRate = sampleRate; 2650 2651 profile.eAACStreamFormat = 2652 isADTS 2653 ? OMX_AUDIO_AACStreamFormatMP4ADTS 2654 : OMX_AUDIO_AACStreamFormatMP4FF; 2655 2656 OMX_AUDIO_PARAM_ANDROID_AACPRESENTATIONTYPE presentation; 2657 InitOMXParams(&presentation); 2658 presentation.nMaxOutputChannels = maxOutputChannelCount; 2659 presentation.nDrcCut = drc.drcCut; 2660 presentation.nDrcBoost = drc.drcBoost; 2661 presentation.nHeavyCompression = drc.heavyCompression; 2662 presentation.nTargetReferenceLevel = drc.targetRefLevel; 2663 presentation.nEncodedTargetLevel = drc.encodedTargetLevel; 2664 presentation.nPCMLimiterEnable = pcmLimiterEnable; 2665 2666 status_t res = mOMX->setParameter(mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2667 if (res == OK) { 2668 // optional parameters, will not cause configuration failure 2669 mOMX->setParameter(mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAacPresentation, 2670 &presentation, sizeof(presentation)); 2671 } else { 2672 ALOGW("did not set AudioAndroidAacPresentation due to error %d when setting AudioAac", res); 2673 } 2674 return res; 2675} 2676 2677status_t ACodec::setupAC3Codec( 2678 bool encoder, int32_t numChannels, int32_t sampleRate) { 2679 status_t err = setupRawAudioFormat( 2680 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 2681 2682 if (err != OK) { 2683 return err; 2684 } 2685 2686 if (encoder) { 2687 ALOGW("AC3 encoding is not supported."); 2688 return INVALID_OPERATION; 2689 } 2690 2691 OMX_AUDIO_PARAM_ANDROID_AC3TYPE def; 2692 InitOMXParams(&def); 2693 def.nPortIndex = kPortIndexInput; 2694 2695 err = mOMX->getParameter( 2696 mNode, 2697 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 2698 &def, 2699 sizeof(def)); 2700 2701 if (err != OK) { 2702 return err; 2703 } 2704 2705 def.nChannels = numChannels; 2706 def.nSampleRate = sampleRate; 2707 2708 return mOMX->setParameter( 2709 mNode, 2710 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 2711 &def, 2712 sizeof(def)); 2713} 2714 2715status_t ACodec::setupEAC3Codec( 2716 bool encoder, int32_t numChannels, int32_t sampleRate) { 2717 status_t err = setupRawAudioFormat( 2718 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 2719 2720 if (err != OK) { 2721 return err; 2722 } 2723 2724 if (encoder) { 2725 ALOGW("EAC3 encoding is not supported."); 2726 return INVALID_OPERATION; 2727 } 2728 2729 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE def; 2730 InitOMXParams(&def); 2731 def.nPortIndex = kPortIndexInput; 2732 2733 err = mOMX->getParameter( 2734 mNode, 2735 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 2736 &def, 2737 sizeof(def)); 2738 2739 if (err != OK) { 2740 return err; 2741 } 2742 2743 def.nChannels = numChannels; 2744 def.nSampleRate = sampleRate; 2745 2746 return mOMX->setParameter( 2747 mNode, 2748 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 2749 &def, 2750 sizeof(def)); 2751} 2752 2753static OMX_AUDIO_AMRBANDMODETYPE pickModeFromBitRate( 2754 bool isAMRWB, int32_t bps) { 2755 if (isAMRWB) { 2756 if (bps <= 6600) { 2757 return OMX_AUDIO_AMRBandModeWB0; 2758 } else if (bps <= 8850) { 2759 return OMX_AUDIO_AMRBandModeWB1; 2760 } else if (bps <= 12650) { 2761 return OMX_AUDIO_AMRBandModeWB2; 2762 } else if (bps <= 14250) { 2763 return OMX_AUDIO_AMRBandModeWB3; 2764 } else if (bps <= 15850) { 2765 return OMX_AUDIO_AMRBandModeWB4; 2766 } else if (bps <= 18250) { 2767 return OMX_AUDIO_AMRBandModeWB5; 2768 } else if (bps <= 19850) { 2769 return OMX_AUDIO_AMRBandModeWB6; 2770 } else if (bps <= 23050) { 2771 return OMX_AUDIO_AMRBandModeWB7; 2772 } 2773 2774 // 23850 bps 2775 return OMX_AUDIO_AMRBandModeWB8; 2776 } else { // AMRNB 2777 if (bps <= 4750) { 2778 return OMX_AUDIO_AMRBandModeNB0; 2779 } else if (bps <= 5150) { 2780 return OMX_AUDIO_AMRBandModeNB1; 2781 } else if (bps <= 5900) { 2782 return OMX_AUDIO_AMRBandModeNB2; 2783 } else if (bps <= 6700) { 2784 return OMX_AUDIO_AMRBandModeNB3; 2785 } else if (bps <= 7400) { 2786 return OMX_AUDIO_AMRBandModeNB4; 2787 } else if (bps <= 7950) { 2788 return OMX_AUDIO_AMRBandModeNB5; 2789 } else if (bps <= 10200) { 2790 return OMX_AUDIO_AMRBandModeNB6; 2791 } 2792 2793 // 12200 bps 2794 return OMX_AUDIO_AMRBandModeNB7; 2795 } 2796} 2797 2798status_t ACodec::setupAMRCodec(bool encoder, bool isWAMR, int32_t bitrate) { 2799 OMX_AUDIO_PARAM_AMRTYPE def; 2800 InitOMXParams(&def); 2801 def.nPortIndex = encoder ? kPortIndexOutput : kPortIndexInput; 2802 2803 status_t err = 2804 mOMX->getParameter(mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); 2805 2806 if (err != OK) { 2807 return err; 2808 } 2809 2810 def.eAMRFrameFormat = OMX_AUDIO_AMRFrameFormatFSF; 2811 def.eAMRBandMode = pickModeFromBitRate(isWAMR, bitrate); 2812 2813 err = mOMX->setParameter( 2814 mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); 2815 2816 if (err != OK) { 2817 return err; 2818 } 2819 2820 return setupRawAudioFormat( 2821 encoder ? kPortIndexInput : kPortIndexOutput, 2822 isWAMR ? 16000 : 8000 /* sampleRate */, 2823 1 /* numChannels */); 2824} 2825 2826status_t ACodec::setupG711Codec(bool encoder, int32_t sampleRate, int32_t numChannels) { 2827 if (encoder) { 2828 return INVALID_OPERATION; 2829 } 2830 2831 return setupRawAudioFormat( 2832 kPortIndexInput, sampleRate, numChannels); 2833} 2834 2835status_t ACodec::setupFlacCodec( 2836 bool encoder, int32_t numChannels, int32_t sampleRate, int32_t compressionLevel) { 2837 2838 if (encoder) { 2839 OMX_AUDIO_PARAM_FLACTYPE def; 2840 InitOMXParams(&def); 2841 def.nPortIndex = kPortIndexOutput; 2842 2843 // configure compression level 2844 status_t err = mOMX->getParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def)); 2845 if (err != OK) { 2846 ALOGE("setupFlacCodec(): Error %d getting OMX_IndexParamAudioFlac parameter", err); 2847 return err; 2848 } 2849 def.nCompressionLevel = compressionLevel; 2850 err = mOMX->setParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def)); 2851 if (err != OK) { 2852 ALOGE("setupFlacCodec(): Error %d setting OMX_IndexParamAudioFlac parameter", err); 2853 return err; 2854 } 2855 } 2856 2857 return setupRawAudioFormat( 2858 encoder ? kPortIndexInput : kPortIndexOutput, 2859 sampleRate, 2860 numChannels); 2861} 2862 2863status_t ACodec::setupRawAudioFormat( 2864 OMX_U32 portIndex, int32_t sampleRate, int32_t numChannels, AudioEncoding encoding) { 2865 OMX_PARAM_PORTDEFINITIONTYPE def; 2866 InitOMXParams(&def); 2867 def.nPortIndex = portIndex; 2868 2869 status_t err = mOMX->getParameter( 2870 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2871 2872 if (err != OK) { 2873 return err; 2874 } 2875 2876 def.format.audio.eEncoding = OMX_AUDIO_CodingPCM; 2877 2878 err = mOMX->setParameter( 2879 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2880 2881 if (err != OK) { 2882 return err; 2883 } 2884 2885 OMX_AUDIO_PARAM_PCMMODETYPE pcmParams; 2886 InitOMXParams(&pcmParams); 2887 pcmParams.nPortIndex = portIndex; 2888 2889 err = mOMX->getParameter( 2890 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2891 2892 if (err != OK) { 2893 return err; 2894 } 2895 2896 pcmParams.nChannels = numChannels; 2897 switch (encoding) { 2898 case kAudioEncodingPcm8bit: 2899 pcmParams.eNumData = OMX_NumericalDataUnsigned; 2900 pcmParams.nBitPerSample = 8; 2901 break; 2902 case kAudioEncodingPcmFloat: 2903 pcmParams.eNumData = OMX_NumericalDataFloat; 2904 pcmParams.nBitPerSample = 32; 2905 break; 2906 case kAudioEncodingPcm16bit: 2907 pcmParams.eNumData = OMX_NumericalDataSigned; 2908 pcmParams.nBitPerSample = 16; 2909 break; 2910 default: 2911 return BAD_VALUE; 2912 } 2913 pcmParams.bInterleaved = OMX_TRUE; 2914 pcmParams.nSamplingRate = sampleRate; 2915 pcmParams.ePCMMode = OMX_AUDIO_PCMModeLinear; 2916 2917 if (getOMXChannelMapping(numChannels, pcmParams.eChannelMapping) != OK) { 2918 return OMX_ErrorNone; 2919 } 2920 2921 err = mOMX->setParameter( 2922 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2923 // if we could not set up raw format to non-16-bit, try with 16-bit 2924 // NOTE: we will also verify this via readback, in case codec ignores these fields 2925 if (err != OK && encoding != kAudioEncodingPcm16bit) { 2926 pcmParams.eNumData = OMX_NumericalDataSigned; 2927 pcmParams.nBitPerSample = 16; 2928 err = mOMX->setParameter( 2929 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2930 } 2931 return err; 2932} 2933 2934status_t ACodec::configureTunneledVideoPlayback( 2935 int32_t audioHwSync, const sp<ANativeWindow> &nativeWindow) { 2936 native_handle_t* sidebandHandle; 2937 2938 status_t err = mOMX->configureVideoTunnelMode( 2939 mNode, kPortIndexOutput, OMX_TRUE, audioHwSync, &sidebandHandle); 2940 if (err != OK) { 2941 ALOGE("configureVideoTunnelMode failed! (err %d).", err); 2942 return err; 2943 } 2944 2945 err = native_window_set_sideband_stream(nativeWindow.get(), sidebandHandle); 2946 if (err != OK) { 2947 ALOGE("native_window_set_sideband_stream(%p) failed! (err %d).", 2948 sidebandHandle, err); 2949 return err; 2950 } 2951 2952 return OK; 2953} 2954 2955status_t ACodec::setVideoPortFormatType( 2956 OMX_U32 portIndex, 2957 OMX_VIDEO_CODINGTYPE compressionFormat, 2958 OMX_COLOR_FORMATTYPE colorFormat, 2959 bool usingNativeBuffers) { 2960 OMX_VIDEO_PARAM_PORTFORMATTYPE format; 2961 InitOMXParams(&format); 2962 format.nPortIndex = portIndex; 2963 format.nIndex = 0; 2964 bool found = false; 2965 2966 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 2967 format.nIndex = index; 2968 status_t err = mOMX->getParameter( 2969 mNode, OMX_IndexParamVideoPortFormat, 2970 &format, sizeof(format)); 2971 2972 if (err != OK) { 2973 return err; 2974 } 2975 2976 // substitute back flexible color format to codec supported format 2977 OMX_U32 flexibleEquivalent; 2978 if (compressionFormat == OMX_VIDEO_CodingUnused 2979 && isFlexibleColorFormat( 2980 mOMX, mNode, format.eColorFormat, usingNativeBuffers, &flexibleEquivalent) 2981 && colorFormat == flexibleEquivalent) { 2982 ALOGI("[%s] using color format %#x in place of %#x", 2983 mComponentName.c_str(), format.eColorFormat, colorFormat); 2984 colorFormat = format.eColorFormat; 2985 } 2986 2987 // The following assertion is violated by TI's video decoder. 2988 // CHECK_EQ(format.nIndex, index); 2989 2990 if (!strcmp("OMX.TI.Video.encoder", mComponentName.c_str())) { 2991 if (portIndex == kPortIndexInput 2992 && colorFormat == format.eColorFormat) { 2993 // eCompressionFormat does not seem right. 2994 found = true; 2995 break; 2996 } 2997 if (portIndex == kPortIndexOutput 2998 && compressionFormat == format.eCompressionFormat) { 2999 // eColorFormat does not seem right. 3000 found = true; 3001 break; 3002 } 3003 } 3004 3005 if (format.eCompressionFormat == compressionFormat 3006 && format.eColorFormat == colorFormat) { 3007 found = true; 3008 break; 3009 } 3010 3011 if (index == kMaxIndicesToCheck) { 3012 ALOGW("[%s] stopping checking formats after %u: %s(%x)/%s(%x)", 3013 mComponentName.c_str(), index, 3014 asString(format.eCompressionFormat), format.eCompressionFormat, 3015 asString(format.eColorFormat), format.eColorFormat); 3016 } 3017 } 3018 3019 if (!found) { 3020 return UNKNOWN_ERROR; 3021 } 3022 3023 status_t err = mOMX->setParameter( 3024 mNode, OMX_IndexParamVideoPortFormat, 3025 &format, sizeof(format)); 3026 3027 return err; 3028} 3029 3030// Set optimal output format. OMX component lists output formats in the order 3031// of preference, but this got more complicated since the introduction of flexible 3032// YUV formats. We support a legacy behavior for applications that do not use 3033// surface output, do not specify an output format, but expect a "usable" standard 3034// OMX format. SW readable and standard formats must be flex-YUV. 3035// 3036// Suggested preference order: 3037// - optimal format for texture rendering (mediaplayer behavior) 3038// - optimal SW readable & texture renderable format (flex-YUV support) 3039// - optimal SW readable non-renderable format (flex-YUV bytebuffer support) 3040// - legacy "usable" standard formats 3041// 3042// For legacy support, we prefer a standard format, but will settle for a SW readable 3043// flex-YUV format. 3044status_t ACodec::setSupportedOutputFormat(bool getLegacyFlexibleFormat) { 3045 OMX_VIDEO_PARAM_PORTFORMATTYPE format, legacyFormat; 3046 InitOMXParams(&format); 3047 format.nPortIndex = kPortIndexOutput; 3048 3049 InitOMXParams(&legacyFormat); 3050 // this field will change when we find a suitable legacy format 3051 legacyFormat.eColorFormat = OMX_COLOR_FormatUnused; 3052 3053 for (OMX_U32 index = 0; ; ++index) { 3054 format.nIndex = index; 3055 status_t err = mOMX->getParameter( 3056 mNode, OMX_IndexParamVideoPortFormat, 3057 &format, sizeof(format)); 3058 if (err != OK) { 3059 // no more formats, pick legacy format if found 3060 if (legacyFormat.eColorFormat != OMX_COLOR_FormatUnused) { 3061 memcpy(&format, &legacyFormat, sizeof(format)); 3062 break; 3063 } 3064 return err; 3065 } 3066 if (format.eCompressionFormat != OMX_VIDEO_CodingUnused) { 3067 return OMX_ErrorBadParameter; 3068 } 3069 if (!getLegacyFlexibleFormat) { 3070 break; 3071 } 3072 // standard formats that were exposed to users before 3073 if (format.eColorFormat == OMX_COLOR_FormatYUV420Planar 3074 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedPlanar 3075 || format.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar 3076 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedSemiPlanar 3077 || format.eColorFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar) { 3078 break; 3079 } 3080 // find best legacy non-standard format 3081 OMX_U32 flexibleEquivalent; 3082 if (legacyFormat.eColorFormat == OMX_COLOR_FormatUnused 3083 && isFlexibleColorFormat( 3084 mOMX, mNode, format.eColorFormat, false /* usingNativeBuffers */, 3085 &flexibleEquivalent) 3086 && flexibleEquivalent == OMX_COLOR_FormatYUV420Flexible) { 3087 memcpy(&legacyFormat, &format, sizeof(format)); 3088 } 3089 } 3090 return mOMX->setParameter( 3091 mNode, OMX_IndexParamVideoPortFormat, 3092 &format, sizeof(format)); 3093} 3094 3095static const struct VideoCodingMapEntry { 3096 const char *mMime; 3097 OMX_VIDEO_CODINGTYPE mVideoCodingType; 3098} kVideoCodingMapEntry[] = { 3099 { MEDIA_MIMETYPE_VIDEO_AVC, OMX_VIDEO_CodingAVC }, 3100 { MEDIA_MIMETYPE_VIDEO_HEVC, OMX_VIDEO_CodingHEVC }, 3101 { MEDIA_MIMETYPE_VIDEO_MPEG4, OMX_VIDEO_CodingMPEG4 }, 3102 { MEDIA_MIMETYPE_VIDEO_H263, OMX_VIDEO_CodingH263 }, 3103 { MEDIA_MIMETYPE_VIDEO_MPEG2, OMX_VIDEO_CodingMPEG2 }, 3104 { MEDIA_MIMETYPE_VIDEO_VP8, OMX_VIDEO_CodingVP8 }, 3105 { MEDIA_MIMETYPE_VIDEO_VP9, OMX_VIDEO_CodingVP9 }, 3106 { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, OMX_VIDEO_CodingDolbyVision }, 3107}; 3108 3109static status_t GetVideoCodingTypeFromMime( 3110 const char *mime, OMX_VIDEO_CODINGTYPE *codingType) { 3111 for (size_t i = 0; 3112 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 3113 ++i) { 3114 if (!strcasecmp(mime, kVideoCodingMapEntry[i].mMime)) { 3115 *codingType = kVideoCodingMapEntry[i].mVideoCodingType; 3116 return OK; 3117 } 3118 } 3119 3120 *codingType = OMX_VIDEO_CodingUnused; 3121 3122 return ERROR_UNSUPPORTED; 3123} 3124 3125static status_t GetMimeTypeForVideoCoding( 3126 OMX_VIDEO_CODINGTYPE codingType, AString *mime) { 3127 for (size_t i = 0; 3128 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 3129 ++i) { 3130 if (codingType == kVideoCodingMapEntry[i].mVideoCodingType) { 3131 *mime = kVideoCodingMapEntry[i].mMime; 3132 return OK; 3133 } 3134 } 3135 3136 mime->clear(); 3137 3138 return ERROR_UNSUPPORTED; 3139} 3140 3141status_t ACodec::setupVideoDecoder( 3142 const char *mime, const sp<AMessage> &msg, bool haveNativeWindow, 3143 bool usingSwRenderer, sp<AMessage> &outputFormat) { 3144 int32_t width, height; 3145 if (!msg->findInt32("width", &width) 3146 || !msg->findInt32("height", &height)) { 3147 return INVALID_OPERATION; 3148 } 3149 3150 OMX_VIDEO_CODINGTYPE compressionFormat; 3151 status_t err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 3152 3153 if (err != OK) { 3154 return err; 3155 } 3156 3157 if (compressionFormat == OMX_VIDEO_CodingVP9) { 3158 OMX_VIDEO_PARAM_PROFILELEVELTYPE params; 3159 InitOMXParams(¶ms); 3160 params.nPortIndex = kPortIndexInput; 3161 // Check if VP9 decoder advertises supported profiles. 3162 params.nProfileIndex = 0; 3163 status_t err = mOMX->getParameter( 3164 mNode, 3165 OMX_IndexParamVideoProfileLevelQuerySupported, 3166 ¶ms, 3167 sizeof(params)); 3168 mIsLegacyVP9Decoder = err != OK; 3169 } 3170 3171 err = setVideoPortFormatType( 3172 kPortIndexInput, compressionFormat, OMX_COLOR_FormatUnused); 3173 3174 if (err != OK) { 3175 return err; 3176 } 3177 3178 int32_t tmp; 3179 if (msg->findInt32("color-format", &tmp)) { 3180 OMX_COLOR_FORMATTYPE colorFormat = 3181 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 3182 err = setVideoPortFormatType( 3183 kPortIndexOutput, OMX_VIDEO_CodingUnused, colorFormat, haveNativeWindow); 3184 if (err != OK) { 3185 ALOGW("[%s] does not support color format %d", 3186 mComponentName.c_str(), colorFormat); 3187 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 3188 } 3189 } else { 3190 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 3191 } 3192 3193 if (err != OK) { 3194 return err; 3195 } 3196 3197 int32_t frameRateInt; 3198 float frameRateFloat; 3199 if (!msg->findFloat("frame-rate", &frameRateFloat)) { 3200 if (!msg->findInt32("frame-rate", &frameRateInt)) { 3201 frameRateInt = -1; 3202 } 3203 frameRateFloat = (float)frameRateInt; 3204 } 3205 3206 err = setVideoFormatOnPort( 3207 kPortIndexInput, width, height, compressionFormat, frameRateFloat); 3208 3209 if (err != OK) { 3210 return err; 3211 } 3212 3213 err = setVideoFormatOnPort( 3214 kPortIndexOutput, width, height, OMX_VIDEO_CodingUnused); 3215 3216 if (err != OK) { 3217 return err; 3218 } 3219 3220 err = setColorAspectsForVideoDecoder( 3221 width, height, haveNativeWindow | usingSwRenderer, msg, outputFormat); 3222 if (err == ERROR_UNSUPPORTED) { // support is optional 3223 err = OK; 3224 } 3225 3226 if (err != OK) { 3227 return err; 3228 } 3229 3230 err = setHDRStaticInfoForVideoCodec(kPortIndexOutput, msg, outputFormat); 3231 if (err == ERROR_UNSUPPORTED) { // support is optional 3232 err = OK; 3233 } 3234 return err; 3235} 3236 3237status_t ACodec::initDescribeColorAspectsIndex() { 3238 status_t err = mOMX->getExtensionIndex( 3239 mNode, "OMX.google.android.index.describeColorAspects", &mDescribeColorAspectsIndex); 3240 if (err != OK) { 3241 mDescribeColorAspectsIndex = (OMX_INDEXTYPE)0; 3242 } 3243 return err; 3244} 3245 3246status_t ACodec::setCodecColorAspects(DescribeColorAspectsParams ¶ms, bool verify) { 3247 status_t err = ERROR_UNSUPPORTED; 3248 if (mDescribeColorAspectsIndex) { 3249 err = mOMX->setConfig(mNode, mDescribeColorAspectsIndex, ¶ms, sizeof(params)); 3250 } 3251 ALOGV("[%s] setting color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)", 3252 mComponentName.c_str(), 3253 params.sAspects.mRange, asString(params.sAspects.mRange), 3254 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3255 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3256 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3257 err, asString(err)); 3258 3259 if (verify && err == OK) { 3260 err = getCodecColorAspects(params); 3261 } 3262 3263 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex, 3264 "[%s] setting color aspects failed even though codec advertises support", 3265 mComponentName.c_str()); 3266 return err; 3267} 3268 3269status_t ACodec::setColorAspectsForVideoDecoder( 3270 int32_t width, int32_t height, bool usingNativeWindow, 3271 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) { 3272 DescribeColorAspectsParams params; 3273 InitOMXParams(¶ms); 3274 params.nPortIndex = kPortIndexOutput; 3275 3276 getColorAspectsFromFormat(configFormat, params.sAspects); 3277 if (usingNativeWindow) { 3278 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3279 // The default aspects will be set back to the output format during the 3280 // getFormat phase of configure(). Set non-Unspecified values back into the 3281 // format, in case component does not support this enumeration. 3282 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3283 } 3284 3285 (void)initDescribeColorAspectsIndex(); 3286 3287 // communicate color aspects to codec 3288 return setCodecColorAspects(params); 3289} 3290 3291status_t ACodec::getCodecColorAspects(DescribeColorAspectsParams ¶ms) { 3292 status_t err = ERROR_UNSUPPORTED; 3293 if (mDescribeColorAspectsIndex) { 3294 err = mOMX->getConfig(mNode, mDescribeColorAspectsIndex, ¶ms, sizeof(params)); 3295 } 3296 ALOGV("[%s] got color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)", 3297 mComponentName.c_str(), 3298 params.sAspects.mRange, asString(params.sAspects.mRange), 3299 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3300 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3301 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3302 err, asString(err)); 3303 if (params.bRequestingDataSpace) { 3304 ALOGV("for dataspace %#x", params.nDataSpace); 3305 } 3306 if (err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex 3307 && !params.bRequestingDataSpace && !params.bDataSpaceChanged) { 3308 ALOGW("[%s] getting color aspects failed even though codec advertises support", 3309 mComponentName.c_str()); 3310 } 3311 return err; 3312} 3313 3314status_t ACodec::getInputColorAspectsForVideoEncoder(sp<AMessage> &format) { 3315 DescribeColorAspectsParams params; 3316 InitOMXParams(¶ms); 3317 params.nPortIndex = kPortIndexInput; 3318 status_t err = getCodecColorAspects(params); 3319 if (err == OK) { 3320 // we only set encoder input aspects if codec supports them 3321 setColorAspectsIntoFormat(params.sAspects, format, true /* force */); 3322 } 3323 return err; 3324} 3325 3326status_t ACodec::getDataSpace( 3327 DescribeColorAspectsParams ¶ms, android_dataspace *dataSpace /* nonnull */, 3328 bool tryCodec) { 3329 status_t err = OK; 3330 if (tryCodec) { 3331 // request dataspace guidance from codec. 3332 params.bRequestingDataSpace = OMX_TRUE; 3333 err = getCodecColorAspects(params); 3334 params.bRequestingDataSpace = OMX_FALSE; 3335 if (err == OK && params.nDataSpace != HAL_DATASPACE_UNKNOWN) { 3336 *dataSpace = (android_dataspace)params.nDataSpace; 3337 return err; 3338 } else if (err == ERROR_UNSUPPORTED) { 3339 // ignore not-implemented error for dataspace requests 3340 err = OK; 3341 } 3342 } 3343 3344 // this returns legacy versions if available 3345 *dataSpace = getDataSpaceForColorAspects(params.sAspects, true /* mayexpand */); 3346 ALOGV("[%s] using color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) " 3347 "and dataspace %#x", 3348 mComponentName.c_str(), 3349 params.sAspects.mRange, asString(params.sAspects.mRange), 3350 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3351 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3352 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3353 *dataSpace); 3354 return err; 3355} 3356 3357 3358status_t ACodec::getColorAspectsAndDataSpaceForVideoDecoder( 3359 int32_t width, int32_t height, const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, 3360 android_dataspace *dataSpace) { 3361 DescribeColorAspectsParams params; 3362 InitOMXParams(¶ms); 3363 params.nPortIndex = kPortIndexOutput; 3364 3365 // reset default format and get resulting format 3366 getColorAspectsFromFormat(configFormat, params.sAspects); 3367 if (dataSpace != NULL) { 3368 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3369 } 3370 status_t err = setCodecColorAspects(params, true /* readBack */); 3371 3372 // we always set specified aspects for decoders 3373 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3374 3375 if (dataSpace != NULL) { 3376 status_t res = getDataSpace(params, dataSpace, err == OK /* tryCodec */); 3377 if (err == OK) { 3378 err = res; 3379 } 3380 } 3381 3382 return err; 3383} 3384 3385// initial video encoder setup for bytebuffer mode 3386status_t ACodec::setColorAspectsForVideoEncoder( 3387 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) { 3388 // copy config to output format as this is not exposed via getFormat 3389 copyColorConfig(configFormat, outputFormat); 3390 3391 DescribeColorAspectsParams params; 3392 InitOMXParams(¶ms); 3393 params.nPortIndex = kPortIndexInput; 3394 getColorAspectsFromFormat(configFormat, params.sAspects); 3395 3396 (void)initDescribeColorAspectsIndex(); 3397 3398 int32_t usingRecorder; 3399 if (configFormat->findInt32("android._using-recorder", &usingRecorder) && usingRecorder) { 3400 android_dataspace dataSpace = HAL_DATASPACE_BT709; 3401 int32_t width, height; 3402 if (configFormat->findInt32("width", &width) 3403 && configFormat->findInt32("height", &height)) { 3404 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3405 status_t err = getDataSpace( 3406 params, &dataSpace, mDescribeColorAspectsIndex /* tryCodec */); 3407 if (err != OK) { 3408 return err; 3409 } 3410 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3411 } 3412 inputFormat->setInt32("android._dataspace", (int32_t)dataSpace); 3413 } 3414 3415 // communicate color aspects to codec, but do not allow change of the platform aspects 3416 ColorAspects origAspects = params.sAspects; 3417 for (int triesLeft = 2; --triesLeft >= 0; ) { 3418 status_t err = setCodecColorAspects(params, true /* readBack */); 3419 if (err != OK 3420 || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem( 3421 params.sAspects, origAspects, true /* usePlatformAspects */)) { 3422 return err; 3423 } 3424 ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.", 3425 mComponentName.c_str()); 3426 } 3427 return OK; 3428} 3429 3430status_t ACodec::setHDRStaticInfoForVideoCodec( 3431 OMX_U32 portIndex, const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) { 3432 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 3433 3434 DescribeHDRStaticInfoParams params; 3435 InitOMXParams(¶ms); 3436 params.nPortIndex = portIndex; 3437 3438 HDRStaticInfo *info = ¶ms.sInfo; 3439 if (getHDRStaticInfoFromFormat(configFormat, info)) { 3440 setHDRStaticInfoIntoFormat(params.sInfo, outputFormat); 3441 } 3442 3443 (void)initDescribeHDRStaticInfoIndex(); 3444 3445 // communicate HDR static Info to codec 3446 return setHDRStaticInfo(params); 3447} 3448 3449// subsequent initial video encoder setup for surface mode 3450status_t ACodec::setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace( 3451 android_dataspace *dataSpace /* nonnull */) { 3452 DescribeColorAspectsParams params; 3453 InitOMXParams(¶ms); 3454 params.nPortIndex = kPortIndexInput; 3455 ColorAspects &aspects = params.sAspects; 3456 3457 // reset default format and store resulting format into both input and output formats 3458 getColorAspectsFromFormat(mConfigFormat, aspects); 3459 int32_t width, height; 3460 if (mInputFormat->findInt32("width", &width) && mInputFormat->findInt32("height", &height)) { 3461 setDefaultCodecColorAspectsIfNeeded(aspects, width, height); 3462 } 3463 setColorAspectsIntoFormat(aspects, mInputFormat); 3464 setColorAspectsIntoFormat(aspects, mOutputFormat); 3465 3466 // communicate color aspects to codec, but do not allow any change 3467 ColorAspects origAspects = aspects; 3468 status_t err = OK; 3469 for (int triesLeft = 2; mDescribeColorAspectsIndex && --triesLeft >= 0; ) { 3470 status_t err = setCodecColorAspects(params, true /* readBack */); 3471 if (err != OK || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem(aspects, origAspects)) { 3472 break; 3473 } 3474 ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.", 3475 mComponentName.c_str()); 3476 } 3477 3478 *dataSpace = HAL_DATASPACE_BT709; 3479 aspects = origAspects; // restore desired color aspects 3480 status_t res = getDataSpace( 3481 params, dataSpace, err == OK && mDescribeColorAspectsIndex /* tryCodec */); 3482 if (err == OK) { 3483 err = res; 3484 } 3485 mInputFormat->setInt32("android._dataspace", (int32_t)*dataSpace); 3486 mInputFormat->setBuffer( 3487 "android._color-aspects", ABuffer::CreateAsCopy(&aspects, sizeof(aspects))); 3488 3489 // update input format with codec supported color aspects (basically set unsupported 3490 // aspects to Unspecified) 3491 if (err == OK) { 3492 (void)getInputColorAspectsForVideoEncoder(mInputFormat); 3493 } 3494 3495 ALOGV("set default color aspects, updated input format to %s, output format to %s", 3496 mInputFormat->debugString(4).c_str(), mOutputFormat->debugString(4).c_str()); 3497 3498 return err; 3499} 3500 3501status_t ACodec::getHDRStaticInfoForVideoCodec(OMX_U32 portIndex, sp<AMessage> &format) { 3502 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 3503 DescribeHDRStaticInfoParams params; 3504 InitOMXParams(¶ms); 3505 params.nPortIndex = portIndex; 3506 3507 status_t err = getHDRStaticInfo(params); 3508 if (err == OK) { 3509 // we only set decodec output HDRStaticInfo if codec supports them 3510 setHDRStaticInfoIntoFormat(params.sInfo, format); 3511 } 3512 return err; 3513} 3514 3515status_t ACodec::initDescribeHDRStaticInfoIndex() { 3516 status_t err = mOMX->getExtensionIndex( 3517 mNode, "OMX.google.android.index.describeHDRStaticInfo", &mDescribeHDRStaticInfoIndex); 3518 if (err != OK) { 3519 mDescribeHDRStaticInfoIndex = (OMX_INDEXTYPE)0; 3520 } 3521 return err; 3522} 3523 3524status_t ACodec::setHDRStaticInfo(const DescribeHDRStaticInfoParams ¶ms) { 3525 status_t err = ERROR_UNSUPPORTED; 3526 if (mDescribeHDRStaticInfoIndex) { 3527 err = mOMX->setConfig(mNode, mDescribeHDRStaticInfoIndex, ¶ms, sizeof(params)); 3528 } 3529 3530 const HDRStaticInfo *info = ¶ms.sInfo; 3531 ALOGV("[%s] setting HDRStaticInfo (R: %u %u, G: %u %u, B: %u, %u, W: %u, %u, " 3532 "MaxDispL: %u, MinDispL: %u, MaxContentL: %u, MaxFrameAvgL: %u)", 3533 mComponentName.c_str(), 3534 info->sType1.mR.x, info->sType1.mR.y, info->sType1.mG.x, info->sType1.mG.y, 3535 info->sType1.mB.x, info->sType1.mB.y, info->sType1.mW.x, info->sType1.mW.y, 3536 info->sType1.mMaxDisplayLuminance, info->sType1.mMinDisplayLuminance, 3537 info->sType1.mMaxContentLightLevel, info->sType1.mMaxFrameAverageLightLevel); 3538 3539 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeHDRStaticInfoIndex, 3540 "[%s] setting HDRStaticInfo failed even though codec advertises support", 3541 mComponentName.c_str()); 3542 return err; 3543} 3544 3545status_t ACodec::getHDRStaticInfo(DescribeHDRStaticInfoParams ¶ms) { 3546 status_t err = ERROR_UNSUPPORTED; 3547 if (mDescribeHDRStaticInfoIndex) { 3548 err = mOMX->getConfig(mNode, mDescribeHDRStaticInfoIndex, ¶ms, sizeof(params)); 3549 } 3550 3551 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeHDRStaticInfoIndex, 3552 "[%s] getting HDRStaticInfo failed even though codec advertises support", 3553 mComponentName.c_str()); 3554 return err; 3555} 3556 3557status_t ACodec::setupVideoEncoder( 3558 const char *mime, const sp<AMessage> &msg, 3559 sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) { 3560 int32_t tmp; 3561 if (!msg->findInt32("color-format", &tmp)) { 3562 return INVALID_OPERATION; 3563 } 3564 3565 OMX_COLOR_FORMATTYPE colorFormat = 3566 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 3567 3568 status_t err = setVideoPortFormatType( 3569 kPortIndexInput, OMX_VIDEO_CodingUnused, colorFormat); 3570 3571 if (err != OK) { 3572 ALOGE("[%s] does not support color format %d", 3573 mComponentName.c_str(), colorFormat); 3574 3575 return err; 3576 } 3577 3578 /* Input port configuration */ 3579 3580 OMX_PARAM_PORTDEFINITIONTYPE def; 3581 InitOMXParams(&def); 3582 3583 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 3584 3585 def.nPortIndex = kPortIndexInput; 3586 3587 err = mOMX->getParameter( 3588 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3589 3590 if (err != OK) { 3591 return err; 3592 } 3593 3594 int32_t width, height, bitrate; 3595 if (!msg->findInt32("width", &width) 3596 || !msg->findInt32("height", &height) 3597 || !msg->findInt32("bitrate", &bitrate)) { 3598 return INVALID_OPERATION; 3599 } 3600 3601 video_def->nFrameWidth = width; 3602 video_def->nFrameHeight = height; 3603 3604 int32_t stride; 3605 if (!msg->findInt32("stride", &stride)) { 3606 stride = width; 3607 } 3608 3609 video_def->nStride = stride; 3610 3611 int32_t sliceHeight; 3612 if (!msg->findInt32("slice-height", &sliceHeight)) { 3613 sliceHeight = height; 3614 } 3615 3616 video_def->nSliceHeight = sliceHeight; 3617 3618 def.nBufferSize = (video_def->nStride * video_def->nSliceHeight * 3) / 2; 3619 3620 float frameRate; 3621 if (!msg->findFloat("frame-rate", &frameRate)) { 3622 int32_t tmp; 3623 if (!msg->findInt32("frame-rate", &tmp)) { 3624 return INVALID_OPERATION; 3625 } 3626 frameRate = (float)tmp; 3627 mTimePerFrameUs = (int64_t) (1000000.0f / frameRate); 3628 } 3629 3630 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 3631 video_def->eCompressionFormat = OMX_VIDEO_CodingUnused; 3632 // this is redundant as it was already set up in setVideoPortFormatType 3633 // FIXME for now skip this only for flexible YUV formats 3634 if (colorFormat != OMX_COLOR_FormatYUV420Flexible) { 3635 video_def->eColorFormat = colorFormat; 3636 } 3637 3638 err = mOMX->setParameter( 3639 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3640 3641 if (err != OK) { 3642 ALOGE("[%s] failed to set input port definition parameters.", 3643 mComponentName.c_str()); 3644 3645 return err; 3646 } 3647 3648 /* Output port configuration */ 3649 3650 OMX_VIDEO_CODINGTYPE compressionFormat; 3651 err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 3652 3653 if (err != OK) { 3654 return err; 3655 } 3656 3657 err = setVideoPortFormatType( 3658 kPortIndexOutput, compressionFormat, OMX_COLOR_FormatUnused); 3659 3660 if (err != OK) { 3661 ALOGE("[%s] does not support compression format %d", 3662 mComponentName.c_str(), compressionFormat); 3663 3664 return err; 3665 } 3666 3667 def.nPortIndex = kPortIndexOutput; 3668 3669 err = mOMX->getParameter( 3670 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3671 3672 if (err != OK) { 3673 return err; 3674 } 3675 3676 video_def->nFrameWidth = width; 3677 video_def->nFrameHeight = height; 3678 video_def->xFramerate = 0; 3679 video_def->nBitrate = bitrate; 3680 video_def->eCompressionFormat = compressionFormat; 3681 video_def->eColorFormat = OMX_COLOR_FormatUnused; 3682 3683 err = mOMX->setParameter( 3684 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3685 3686 if (err != OK) { 3687 ALOGE("[%s] failed to set output port definition parameters.", 3688 mComponentName.c_str()); 3689 3690 return err; 3691 } 3692 3693 int32_t intraRefreshPeriod = 0; 3694 if (msg->findInt32("intra-refresh-period", &intraRefreshPeriod) 3695 && intraRefreshPeriod >= 0) { 3696 err = setIntraRefreshPeriod((uint32_t)intraRefreshPeriod, true); 3697 if (err != OK) { 3698 ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional", 3699 mComponentName.c_str()); 3700 err = OK; 3701 } 3702 } 3703 3704 switch (compressionFormat) { 3705 case OMX_VIDEO_CodingMPEG4: 3706 err = setupMPEG4EncoderParameters(msg); 3707 break; 3708 3709 case OMX_VIDEO_CodingH263: 3710 err = setupH263EncoderParameters(msg); 3711 break; 3712 3713 case OMX_VIDEO_CodingAVC: 3714 err = setupAVCEncoderParameters(msg); 3715 break; 3716 3717 case OMX_VIDEO_CodingHEVC: 3718 err = setupHEVCEncoderParameters(msg); 3719 break; 3720 3721 case OMX_VIDEO_CodingVP8: 3722 case OMX_VIDEO_CodingVP9: 3723 err = setupVPXEncoderParameters(msg); 3724 break; 3725 3726 default: 3727 break; 3728 } 3729 3730 // Set up color aspects on input, but propagate them to the output format, as they will 3731 // not be read back from encoder. 3732 err = setColorAspectsForVideoEncoder(msg, outputFormat, inputFormat); 3733 if (err == ERROR_UNSUPPORTED) { 3734 ALOGI("[%s] cannot encode color aspects. Ignoring.", mComponentName.c_str()); 3735 err = OK; 3736 } 3737 3738 if (err != OK) { 3739 return err; 3740 } 3741 3742 err = setHDRStaticInfoForVideoCodec(kPortIndexInput, msg, outputFormat); 3743 if (err == ERROR_UNSUPPORTED) { // support is optional 3744 ALOGI("[%s] cannot encode HDR static metadata. Ignoring.", mComponentName.c_str()); 3745 err = OK; 3746 } 3747 3748 if (err == OK) { 3749 ALOGI("setupVideoEncoder succeeded"); 3750 } 3751 3752 return err; 3753} 3754 3755status_t ACodec::setCyclicIntraMacroblockRefresh(const sp<AMessage> &msg, int32_t mode) { 3756 OMX_VIDEO_PARAM_INTRAREFRESHTYPE params; 3757 InitOMXParams(¶ms); 3758 params.nPortIndex = kPortIndexOutput; 3759 3760 params.eRefreshMode = static_cast<OMX_VIDEO_INTRAREFRESHTYPE>(mode); 3761 3762 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshCyclic || 3763 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 3764 int32_t mbs; 3765 if (!msg->findInt32("intra-refresh-CIR-mbs", &mbs)) { 3766 return INVALID_OPERATION; 3767 } 3768 params.nCirMBs = mbs; 3769 } 3770 3771 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshAdaptive || 3772 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 3773 int32_t mbs; 3774 if (!msg->findInt32("intra-refresh-AIR-mbs", &mbs)) { 3775 return INVALID_OPERATION; 3776 } 3777 params.nAirMBs = mbs; 3778 3779 int32_t ref; 3780 if (!msg->findInt32("intra-refresh-AIR-ref", &ref)) { 3781 return INVALID_OPERATION; 3782 } 3783 params.nAirRef = ref; 3784 } 3785 3786 status_t err = mOMX->setParameter( 3787 mNode, OMX_IndexParamVideoIntraRefresh, 3788 ¶ms, sizeof(params)); 3789 return err; 3790} 3791 3792static OMX_U32 setPFramesSpacing(int32_t iFramesInterval, int32_t frameRate) { 3793 if (iFramesInterval < 0) { 3794 return 0xFFFFFFFF; 3795 } else if (iFramesInterval == 0) { 3796 return 0; 3797 } 3798 OMX_U32 ret = frameRate * iFramesInterval; 3799 return ret; 3800} 3801 3802static OMX_VIDEO_CONTROLRATETYPE getBitrateMode(const sp<AMessage> &msg) { 3803 int32_t tmp; 3804 if (!msg->findInt32("bitrate-mode", &tmp)) { 3805 return OMX_Video_ControlRateVariable; 3806 } 3807 3808 return static_cast<OMX_VIDEO_CONTROLRATETYPE>(tmp); 3809} 3810 3811status_t ACodec::setupMPEG4EncoderParameters(const sp<AMessage> &msg) { 3812 int32_t bitrate, iFrameInterval; 3813 if (!msg->findInt32("bitrate", &bitrate) 3814 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 3815 return INVALID_OPERATION; 3816 } 3817 3818 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3819 3820 float frameRate; 3821 if (!msg->findFloat("frame-rate", &frameRate)) { 3822 int32_t tmp; 3823 if (!msg->findInt32("frame-rate", &tmp)) { 3824 return INVALID_OPERATION; 3825 } 3826 frameRate = (float)tmp; 3827 } 3828 3829 OMX_VIDEO_PARAM_MPEG4TYPE mpeg4type; 3830 InitOMXParams(&mpeg4type); 3831 mpeg4type.nPortIndex = kPortIndexOutput; 3832 3833 status_t err = mOMX->getParameter( 3834 mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 3835 3836 if (err != OK) { 3837 return err; 3838 } 3839 3840 mpeg4type.nSliceHeaderSpacing = 0; 3841 mpeg4type.bSVH = OMX_FALSE; 3842 mpeg4type.bGov = OMX_FALSE; 3843 3844 mpeg4type.nAllowedPictureTypes = 3845 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 3846 3847 mpeg4type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 3848 if (mpeg4type.nPFrames == 0) { 3849 mpeg4type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 3850 } 3851 mpeg4type.nBFrames = 0; 3852 mpeg4type.nIDCVLCThreshold = 0; 3853 mpeg4type.bACPred = OMX_TRUE; 3854 mpeg4type.nMaxPacketSize = 256; 3855 mpeg4type.nTimeIncRes = 1000; 3856 mpeg4type.nHeaderExtension = 0; 3857 mpeg4type.bReversibleVLC = OMX_FALSE; 3858 3859 int32_t profile; 3860 if (msg->findInt32("profile", &profile)) { 3861 int32_t level; 3862 if (!msg->findInt32("level", &level)) { 3863 return INVALID_OPERATION; 3864 } 3865 3866 err = verifySupportForProfileAndLevel(profile, level); 3867 3868 if (err != OK) { 3869 return err; 3870 } 3871 3872 mpeg4type.eProfile = static_cast<OMX_VIDEO_MPEG4PROFILETYPE>(profile); 3873 mpeg4type.eLevel = static_cast<OMX_VIDEO_MPEG4LEVELTYPE>(level); 3874 } 3875 3876 err = mOMX->setParameter( 3877 mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 3878 3879 if (err != OK) { 3880 return err; 3881 } 3882 3883 err = configureBitrate(bitrate, bitrateMode); 3884 3885 if (err != OK) { 3886 return err; 3887 } 3888 3889 return setupErrorCorrectionParameters(); 3890} 3891 3892status_t ACodec::setupH263EncoderParameters(const sp<AMessage> &msg) { 3893 int32_t bitrate, iFrameInterval; 3894 if (!msg->findInt32("bitrate", &bitrate) 3895 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 3896 return INVALID_OPERATION; 3897 } 3898 3899 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3900 3901 float frameRate; 3902 if (!msg->findFloat("frame-rate", &frameRate)) { 3903 int32_t tmp; 3904 if (!msg->findInt32("frame-rate", &tmp)) { 3905 return INVALID_OPERATION; 3906 } 3907 frameRate = (float)tmp; 3908 } 3909 3910 OMX_VIDEO_PARAM_H263TYPE h263type; 3911 InitOMXParams(&h263type); 3912 h263type.nPortIndex = kPortIndexOutput; 3913 3914 status_t err = mOMX->getParameter( 3915 mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 3916 3917 if (err != OK) { 3918 return err; 3919 } 3920 3921 h263type.nAllowedPictureTypes = 3922 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 3923 3924 h263type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 3925 if (h263type.nPFrames == 0) { 3926 h263type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 3927 } 3928 h263type.nBFrames = 0; 3929 3930 int32_t profile; 3931 if (msg->findInt32("profile", &profile)) { 3932 int32_t level; 3933 if (!msg->findInt32("level", &level)) { 3934 return INVALID_OPERATION; 3935 } 3936 3937 err = verifySupportForProfileAndLevel(profile, level); 3938 3939 if (err != OK) { 3940 return err; 3941 } 3942 3943 h263type.eProfile = static_cast<OMX_VIDEO_H263PROFILETYPE>(profile); 3944 h263type.eLevel = static_cast<OMX_VIDEO_H263LEVELTYPE>(level); 3945 } 3946 3947 h263type.bPLUSPTYPEAllowed = OMX_FALSE; 3948 h263type.bForceRoundingTypeToZero = OMX_FALSE; 3949 h263type.nPictureHeaderRepetition = 0; 3950 h263type.nGOBHeaderInterval = 0; 3951 3952 err = mOMX->setParameter( 3953 mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 3954 3955 if (err != OK) { 3956 return err; 3957 } 3958 3959 err = configureBitrate(bitrate, bitrateMode); 3960 3961 if (err != OK) { 3962 return err; 3963 } 3964 3965 return setupErrorCorrectionParameters(); 3966} 3967 3968// static 3969int /* OMX_VIDEO_AVCLEVELTYPE */ ACodec::getAVCLevelFor( 3970 int width, int height, int rate, int bitrate, 3971 OMX_VIDEO_AVCPROFILETYPE profile) { 3972 // convert bitrate to main/baseline profile kbps equivalent 3973 switch (profile) { 3974 case OMX_VIDEO_AVCProfileHigh10: 3975 bitrate = divUp(bitrate, 3000); break; 3976 case OMX_VIDEO_AVCProfileHigh: 3977 bitrate = divUp(bitrate, 1250); break; 3978 default: 3979 bitrate = divUp(bitrate, 1000); break; 3980 } 3981 3982 // convert size and rate to MBs 3983 width = divUp(width, 16); 3984 height = divUp(height, 16); 3985 int mbs = width * height; 3986 rate *= mbs; 3987 int maxDimension = max(width, height); 3988 3989 static const int limits[][5] = { 3990 /* MBps MB dim bitrate level */ 3991 { 1485, 99, 28, 64, OMX_VIDEO_AVCLevel1 }, 3992 { 1485, 99, 28, 128, OMX_VIDEO_AVCLevel1b }, 3993 { 3000, 396, 56, 192, OMX_VIDEO_AVCLevel11 }, 3994 { 6000, 396, 56, 384, OMX_VIDEO_AVCLevel12 }, 3995 { 11880, 396, 56, 768, OMX_VIDEO_AVCLevel13 }, 3996 { 11880, 396, 56, 2000, OMX_VIDEO_AVCLevel2 }, 3997 { 19800, 792, 79, 4000, OMX_VIDEO_AVCLevel21 }, 3998 { 20250, 1620, 113, 4000, OMX_VIDEO_AVCLevel22 }, 3999 { 40500, 1620, 113, 10000, OMX_VIDEO_AVCLevel3 }, 4000 { 108000, 3600, 169, 14000, OMX_VIDEO_AVCLevel31 }, 4001 { 216000, 5120, 202, 20000, OMX_VIDEO_AVCLevel32 }, 4002 { 245760, 8192, 256, 20000, OMX_VIDEO_AVCLevel4 }, 4003 { 245760, 8192, 256, 50000, OMX_VIDEO_AVCLevel41 }, 4004 { 522240, 8704, 263, 50000, OMX_VIDEO_AVCLevel42 }, 4005 { 589824, 22080, 420, 135000, OMX_VIDEO_AVCLevel5 }, 4006 { 983040, 36864, 543, 240000, OMX_VIDEO_AVCLevel51 }, 4007 { 2073600, 36864, 543, 240000, OMX_VIDEO_AVCLevel52 }, 4008 }; 4009 4010 for (size_t i = 0; i < ARRAY_SIZE(limits); i++) { 4011 const int (&limit)[5] = limits[i]; 4012 if (rate <= limit[0] && mbs <= limit[1] && maxDimension <= limit[2] 4013 && bitrate <= limit[3]) { 4014 return limit[4]; 4015 } 4016 } 4017 return 0; 4018} 4019 4020status_t ACodec::setupAVCEncoderParameters(const sp<AMessage> &msg) { 4021 int32_t bitrate, iFrameInterval; 4022 if (!msg->findInt32("bitrate", &bitrate) 4023 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 4024 return INVALID_OPERATION; 4025 } 4026 4027 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4028 4029 float frameRate; 4030 if (!msg->findFloat("frame-rate", &frameRate)) { 4031 int32_t tmp; 4032 if (!msg->findInt32("frame-rate", &tmp)) { 4033 return INVALID_OPERATION; 4034 } 4035 frameRate = (float)tmp; 4036 } 4037 4038 status_t err = OK; 4039 int32_t intraRefreshMode = 0; 4040 if (msg->findInt32("intra-refresh-mode", &intraRefreshMode)) { 4041 err = setCyclicIntraMacroblockRefresh(msg, intraRefreshMode); 4042 if (err != OK) { 4043 ALOGE("Setting intra macroblock refresh mode (%d) failed: 0x%x", 4044 err, intraRefreshMode); 4045 return err; 4046 } 4047 } 4048 4049 OMX_VIDEO_PARAM_AVCTYPE h264type; 4050 InitOMXParams(&h264type); 4051 h264type.nPortIndex = kPortIndexOutput; 4052 4053 err = mOMX->getParameter( 4054 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 4055 4056 if (err != OK) { 4057 return err; 4058 } 4059 4060 h264type.nAllowedPictureTypes = 4061 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 4062 4063 int32_t profile; 4064 if (msg->findInt32("profile", &profile)) { 4065 int32_t level; 4066 if (!msg->findInt32("level", &level)) { 4067 return INVALID_OPERATION; 4068 } 4069 4070 err = verifySupportForProfileAndLevel(profile, level); 4071 4072 if (err != OK) { 4073 return err; 4074 } 4075 4076 h264type.eProfile = static_cast<OMX_VIDEO_AVCPROFILETYPE>(profile); 4077 h264type.eLevel = static_cast<OMX_VIDEO_AVCLEVELTYPE>(level); 4078 } else { 4079 // Use baseline profile for AVC recording if profile is not specified. 4080 h264type.eProfile = OMX_VIDEO_AVCProfileBaseline; 4081 } 4082 4083 ALOGI("setupAVCEncoderParameters with [profile: %s] [level: %s]", 4084 asString(h264type.eProfile), asString(h264type.eLevel)); 4085 4086 if (h264type.eProfile == OMX_VIDEO_AVCProfileBaseline) { 4087 h264type.nSliceHeaderSpacing = 0; 4088 h264type.bUseHadamard = OMX_TRUE; 4089 h264type.nRefFrames = 1; 4090 h264type.nBFrames = 0; 4091 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 4092 if (h264type.nPFrames == 0) { 4093 h264type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 4094 } 4095 h264type.nRefIdx10ActiveMinus1 = 0; 4096 h264type.nRefIdx11ActiveMinus1 = 0; 4097 h264type.bEntropyCodingCABAC = OMX_FALSE; 4098 h264type.bWeightedPPrediction = OMX_FALSE; 4099 h264type.bconstIpred = OMX_FALSE; 4100 h264type.bDirect8x8Inference = OMX_FALSE; 4101 h264type.bDirectSpatialTemporal = OMX_FALSE; 4102 h264type.nCabacInitIdc = 0; 4103 } else if (h264type.eProfile == OMX_VIDEO_AVCProfileMain || 4104 h264type.eProfile == OMX_VIDEO_AVCProfileHigh) { 4105 h264type.nSliceHeaderSpacing = 0; 4106 h264type.bUseHadamard = OMX_TRUE; 4107 h264type.nRefFrames = 2; 4108 h264type.nBFrames = 1; 4109 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 4110 h264type.nAllowedPictureTypes = 4111 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP | OMX_VIDEO_PictureTypeB; 4112 h264type.nRefIdx10ActiveMinus1 = 0; 4113 h264type.nRefIdx11ActiveMinus1 = 0; 4114 h264type.bEntropyCodingCABAC = OMX_TRUE; 4115 h264type.bWeightedPPrediction = OMX_TRUE; 4116 h264type.bconstIpred = OMX_TRUE; 4117 h264type.bDirect8x8Inference = OMX_TRUE; 4118 h264type.bDirectSpatialTemporal = OMX_TRUE; 4119 h264type.nCabacInitIdc = 1; 4120 } 4121 4122 if (h264type.nBFrames != 0) { 4123 h264type.nAllowedPictureTypes |= OMX_VIDEO_PictureTypeB; 4124 } 4125 4126 h264type.bEnableUEP = OMX_FALSE; 4127 h264type.bEnableFMO = OMX_FALSE; 4128 h264type.bEnableASO = OMX_FALSE; 4129 h264type.bEnableRS = OMX_FALSE; 4130 h264type.bFrameMBsOnly = OMX_TRUE; 4131 h264type.bMBAFF = OMX_FALSE; 4132 h264type.eLoopFilterMode = OMX_VIDEO_AVCLoopFilterEnable; 4133 4134 err = mOMX->setParameter( 4135 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 4136 4137 if (err != OK) { 4138 return err; 4139 } 4140 4141 return configureBitrate(bitrate, bitrateMode); 4142} 4143 4144status_t ACodec::setupHEVCEncoderParameters(const sp<AMessage> &msg) { 4145 int32_t bitrate, iFrameInterval; 4146 if (!msg->findInt32("bitrate", &bitrate) 4147 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 4148 return INVALID_OPERATION; 4149 } 4150 4151 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4152 4153 float frameRate; 4154 if (!msg->findFloat("frame-rate", &frameRate)) { 4155 int32_t tmp; 4156 if (!msg->findInt32("frame-rate", &tmp)) { 4157 return INVALID_OPERATION; 4158 } 4159 frameRate = (float)tmp; 4160 } 4161 4162 OMX_VIDEO_PARAM_HEVCTYPE hevcType; 4163 InitOMXParams(&hevcType); 4164 hevcType.nPortIndex = kPortIndexOutput; 4165 4166 status_t err = OK; 4167 err = mOMX->getParameter( 4168 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 4169 if (err != OK) { 4170 return err; 4171 } 4172 4173 int32_t profile; 4174 if (msg->findInt32("profile", &profile)) { 4175 int32_t level; 4176 if (!msg->findInt32("level", &level)) { 4177 return INVALID_OPERATION; 4178 } 4179 4180 err = verifySupportForProfileAndLevel(profile, level); 4181 if (err != OK) { 4182 return err; 4183 } 4184 4185 hevcType.eProfile = static_cast<OMX_VIDEO_HEVCPROFILETYPE>(profile); 4186 hevcType.eLevel = static_cast<OMX_VIDEO_HEVCLEVELTYPE>(level); 4187 } 4188 // TODO: finer control? 4189 hevcType.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate); 4190 4191 err = mOMX->setParameter( 4192 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 4193 if (err != OK) { 4194 return err; 4195 } 4196 4197 return configureBitrate(bitrate, bitrateMode); 4198} 4199 4200status_t ACodec::setupVPXEncoderParameters(const sp<AMessage> &msg) { 4201 int32_t bitrate; 4202 int32_t iFrameInterval = 0; 4203 size_t tsLayers = 0; 4204 OMX_VIDEO_ANDROID_VPXTEMPORALLAYERPATTERNTYPE pattern = 4205 OMX_VIDEO_VPXTemporalLayerPatternNone; 4206 static const uint32_t kVp8LayerRateAlloction 4207 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] 4208 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] = { 4209 {100, 100, 100}, // 1 layer 4210 { 60, 100, 100}, // 2 layers {60%, 40%} 4211 { 40, 60, 100}, // 3 layers {40%, 20%, 40%} 4212 }; 4213 if (!msg->findInt32("bitrate", &bitrate)) { 4214 return INVALID_OPERATION; 4215 } 4216 msg->findInt32("i-frame-interval", &iFrameInterval); 4217 4218 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4219 4220 float frameRate; 4221 if (!msg->findFloat("frame-rate", &frameRate)) { 4222 int32_t tmp; 4223 if (!msg->findInt32("frame-rate", &tmp)) { 4224 return INVALID_OPERATION; 4225 } 4226 frameRate = (float)tmp; 4227 } 4228 4229 AString tsSchema; 4230 if (msg->findString("ts-schema", &tsSchema)) { 4231 if (tsSchema == "webrtc.vp8.1-layer") { 4232 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4233 tsLayers = 1; 4234 } else if (tsSchema == "webrtc.vp8.2-layer") { 4235 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4236 tsLayers = 2; 4237 } else if (tsSchema == "webrtc.vp8.3-layer") { 4238 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4239 tsLayers = 3; 4240 } else { 4241 ALOGW("Unsupported ts-schema [%s]", tsSchema.c_str()); 4242 } 4243 } 4244 4245 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 4246 InitOMXParams(&vp8type); 4247 vp8type.nPortIndex = kPortIndexOutput; 4248 status_t err = mOMX->getParameter( 4249 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4250 &vp8type, sizeof(vp8type)); 4251 4252 if (err == OK) { 4253 if (iFrameInterval > 0) { 4254 vp8type.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate); 4255 } 4256 vp8type.eTemporalPattern = pattern; 4257 vp8type.nTemporalLayerCount = tsLayers; 4258 if (tsLayers > 0) { 4259 for (size_t i = 0; i < OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS; i++) { 4260 vp8type.nTemporalLayerBitrateRatio[i] = 4261 kVp8LayerRateAlloction[tsLayers - 1][i]; 4262 } 4263 } 4264 if (bitrateMode == OMX_Video_ControlRateConstant) { 4265 vp8type.nMinQuantizer = 2; 4266 vp8type.nMaxQuantizer = 63; 4267 } 4268 4269 err = mOMX->setParameter( 4270 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4271 &vp8type, sizeof(vp8type)); 4272 if (err != OK) { 4273 ALOGW("Extended VP8 parameters set failed: %d", err); 4274 } 4275 } 4276 4277 return configureBitrate(bitrate, bitrateMode); 4278} 4279 4280status_t ACodec::verifySupportForProfileAndLevel( 4281 int32_t profile, int32_t level) { 4282 OMX_VIDEO_PARAM_PROFILELEVELTYPE params; 4283 InitOMXParams(¶ms); 4284 params.nPortIndex = kPortIndexOutput; 4285 4286 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 4287 params.nProfileIndex = index; 4288 status_t err = mOMX->getParameter( 4289 mNode, 4290 OMX_IndexParamVideoProfileLevelQuerySupported, 4291 ¶ms, 4292 sizeof(params)); 4293 4294 if (err != OK) { 4295 return err; 4296 } 4297 4298 int32_t supportedProfile = static_cast<int32_t>(params.eProfile); 4299 int32_t supportedLevel = static_cast<int32_t>(params.eLevel); 4300 4301 if (profile == supportedProfile && level <= supportedLevel) { 4302 return OK; 4303 } 4304 4305 if (index == kMaxIndicesToCheck) { 4306 ALOGW("[%s] stopping checking profiles after %u: %x/%x", 4307 mComponentName.c_str(), index, 4308 params.eProfile, params.eLevel); 4309 } 4310 } 4311 return ERROR_UNSUPPORTED; 4312} 4313 4314status_t ACodec::configureBitrate( 4315 int32_t bitrate, OMX_VIDEO_CONTROLRATETYPE bitrateMode) { 4316 OMX_VIDEO_PARAM_BITRATETYPE bitrateType; 4317 InitOMXParams(&bitrateType); 4318 bitrateType.nPortIndex = kPortIndexOutput; 4319 4320 status_t err = mOMX->getParameter( 4321 mNode, OMX_IndexParamVideoBitrate, 4322 &bitrateType, sizeof(bitrateType)); 4323 4324 if (err != OK) { 4325 return err; 4326 } 4327 4328 bitrateType.eControlRate = bitrateMode; 4329 bitrateType.nTargetBitrate = bitrate; 4330 4331 return mOMX->setParameter( 4332 mNode, OMX_IndexParamVideoBitrate, 4333 &bitrateType, sizeof(bitrateType)); 4334} 4335 4336status_t ACodec::setupErrorCorrectionParameters() { 4337 OMX_VIDEO_PARAM_ERRORCORRECTIONTYPE errorCorrectionType; 4338 InitOMXParams(&errorCorrectionType); 4339 errorCorrectionType.nPortIndex = kPortIndexOutput; 4340 4341 status_t err = mOMX->getParameter( 4342 mNode, OMX_IndexParamVideoErrorCorrection, 4343 &errorCorrectionType, sizeof(errorCorrectionType)); 4344 4345 if (err != OK) { 4346 return OK; // Optional feature. Ignore this failure 4347 } 4348 4349 errorCorrectionType.bEnableHEC = OMX_FALSE; 4350 errorCorrectionType.bEnableResync = OMX_TRUE; 4351 errorCorrectionType.nResynchMarkerSpacing = 256; 4352 errorCorrectionType.bEnableDataPartitioning = OMX_FALSE; 4353 errorCorrectionType.bEnableRVLC = OMX_FALSE; 4354 4355 return mOMX->setParameter( 4356 mNode, OMX_IndexParamVideoErrorCorrection, 4357 &errorCorrectionType, sizeof(errorCorrectionType)); 4358} 4359 4360status_t ACodec::setVideoFormatOnPort( 4361 OMX_U32 portIndex, 4362 int32_t width, int32_t height, OMX_VIDEO_CODINGTYPE compressionFormat, 4363 float frameRate) { 4364 OMX_PARAM_PORTDEFINITIONTYPE def; 4365 InitOMXParams(&def); 4366 def.nPortIndex = portIndex; 4367 4368 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 4369 4370 status_t err = mOMX->getParameter( 4371 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4372 if (err != OK) { 4373 return err; 4374 } 4375 4376 if (portIndex == kPortIndexInput) { 4377 // XXX Need a (much) better heuristic to compute input buffer sizes. 4378 const size_t X = 64 * 1024; 4379 if (def.nBufferSize < X) { 4380 def.nBufferSize = X; 4381 } 4382 } 4383 4384 if (def.eDomain != OMX_PortDomainVideo) { 4385 ALOGE("expected video port, got %s(%d)", asString(def.eDomain), def.eDomain); 4386 return FAILED_TRANSACTION; 4387 } 4388 4389 video_def->nFrameWidth = width; 4390 video_def->nFrameHeight = height; 4391 4392 if (portIndex == kPortIndexInput) { 4393 video_def->eCompressionFormat = compressionFormat; 4394 video_def->eColorFormat = OMX_COLOR_FormatUnused; 4395 if (frameRate >= 0) { 4396 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 4397 } 4398 } 4399 4400 err = mOMX->setParameter( 4401 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4402 4403 return err; 4404} 4405 4406status_t ACodec::initNativeWindow() { 4407 if (mNativeWindow != NULL) { 4408 return mOMX->enableNativeBuffers(mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_TRUE); 4409 } 4410 4411 mOMX->enableNativeBuffers(mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE); 4412 return OK; 4413} 4414 4415size_t ACodec::countBuffersOwnedByComponent(OMX_U32 portIndex) const { 4416 size_t n = 0; 4417 4418 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 4419 const BufferInfo &info = mBuffers[portIndex].itemAt(i); 4420 4421 if (info.mStatus == BufferInfo::OWNED_BY_COMPONENT) { 4422 ++n; 4423 } 4424 } 4425 4426 return n; 4427} 4428 4429size_t ACodec::countBuffersOwnedByNativeWindow() const { 4430 size_t n = 0; 4431 4432 for (size_t i = 0; i < mBuffers[kPortIndexOutput].size(); ++i) { 4433 const BufferInfo &info = mBuffers[kPortIndexOutput].itemAt(i); 4434 4435 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4436 ++n; 4437 } 4438 } 4439 4440 return n; 4441} 4442 4443void ACodec::waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs() { 4444 if (mNativeWindow == NULL) { 4445 return; 4446 } 4447 4448 while (countBuffersOwnedByNativeWindow() > mNumUndequeuedBuffers 4449 && dequeueBufferFromNativeWindow() != NULL) { 4450 // these buffers will be submitted as regular buffers; account for this 4451 if (storingMetadataInDecodedBuffers() && mMetadataBuffersToSubmit > 0) { 4452 --mMetadataBuffersToSubmit; 4453 } 4454 } 4455} 4456 4457bool ACodec::allYourBuffersAreBelongToUs( 4458 OMX_U32 portIndex) { 4459 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 4460 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 4461 4462 if (info->mStatus != BufferInfo::OWNED_BY_US 4463 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4464 ALOGV("[%s] Buffer %u on port %u still has status %d", 4465 mComponentName.c_str(), 4466 info->mBufferID, portIndex, info->mStatus); 4467 return false; 4468 } 4469 } 4470 4471 return true; 4472} 4473 4474bool ACodec::allYourBuffersAreBelongToUs() { 4475 return allYourBuffersAreBelongToUs(kPortIndexInput) 4476 && allYourBuffersAreBelongToUs(kPortIndexOutput); 4477} 4478 4479void ACodec::deferMessage(const sp<AMessage> &msg) { 4480 mDeferredQueue.push_back(msg); 4481} 4482 4483void ACodec::processDeferredMessages() { 4484 List<sp<AMessage> > queue = mDeferredQueue; 4485 mDeferredQueue.clear(); 4486 4487 List<sp<AMessage> >::iterator it = queue.begin(); 4488 while (it != queue.end()) { 4489 onMessageReceived(*it++); 4490 } 4491} 4492 4493// static 4494bool ACodec::describeDefaultColorFormat(DescribeColorFormat2Params ¶ms) { 4495 MediaImage2 &image = params.sMediaImage; 4496 memset(&image, 0, sizeof(image)); 4497 4498 image.mType = MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN; 4499 image.mNumPlanes = 0; 4500 4501 const OMX_COLOR_FORMATTYPE fmt = params.eColorFormat; 4502 image.mWidth = params.nFrameWidth; 4503 image.mHeight = params.nFrameHeight; 4504 4505 // only supporting YUV420 4506 if (fmt != OMX_COLOR_FormatYUV420Planar && 4507 fmt != OMX_COLOR_FormatYUV420PackedPlanar && 4508 fmt != OMX_COLOR_FormatYUV420SemiPlanar && 4509 fmt != OMX_COLOR_FormatYUV420PackedSemiPlanar && 4510 fmt != (OMX_COLOR_FORMATTYPE)HAL_PIXEL_FORMAT_YV12) { 4511 ALOGW("do not know color format 0x%x = %d", fmt, fmt); 4512 return false; 4513 } 4514 4515 // TEMPORARY FIX for some vendors that advertise sliceHeight as 0 4516 if (params.nStride != 0 && params.nSliceHeight == 0) { 4517 ALOGW("using sliceHeight=%u instead of what codec advertised (=0)", 4518 params.nFrameHeight); 4519 params.nSliceHeight = params.nFrameHeight; 4520 } 4521 4522 // we need stride and slice-height to be non-zero and sensible. These values were chosen to 4523 // prevent integer overflows further down the line, and do not indicate support for 4524 // 32kx32k video. 4525 if (params.nStride == 0 || params.nSliceHeight == 0 4526 || params.nStride > 32768 || params.nSliceHeight > 32768) { 4527 ALOGW("cannot describe color format 0x%x = %d with stride=%u and sliceHeight=%u", 4528 fmt, fmt, params.nStride, params.nSliceHeight); 4529 return false; 4530 } 4531 4532 // set-up YUV format 4533 image.mType = MediaImage2::MEDIA_IMAGE_TYPE_YUV; 4534 image.mNumPlanes = 3; 4535 image.mBitDepth = 8; 4536 image.mBitDepthAllocated = 8; 4537 image.mPlane[image.Y].mOffset = 0; 4538 image.mPlane[image.Y].mColInc = 1; 4539 image.mPlane[image.Y].mRowInc = params.nStride; 4540 image.mPlane[image.Y].mHorizSubsampling = 1; 4541 image.mPlane[image.Y].mVertSubsampling = 1; 4542 4543 switch ((int)fmt) { 4544 case HAL_PIXEL_FORMAT_YV12: 4545 if (params.bUsingNativeBuffers) { 4546 size_t ystride = align(params.nStride, 16); 4547 size_t cstride = align(params.nStride / 2, 16); 4548 image.mPlane[image.Y].mRowInc = ystride; 4549 4550 image.mPlane[image.V].mOffset = ystride * params.nSliceHeight; 4551 image.mPlane[image.V].mColInc = 1; 4552 image.mPlane[image.V].mRowInc = cstride; 4553 image.mPlane[image.V].mHorizSubsampling = 2; 4554 image.mPlane[image.V].mVertSubsampling = 2; 4555 4556 image.mPlane[image.U].mOffset = image.mPlane[image.V].mOffset 4557 + (cstride * params.nSliceHeight / 2); 4558 image.mPlane[image.U].mColInc = 1; 4559 image.mPlane[image.U].mRowInc = cstride; 4560 image.mPlane[image.U].mHorizSubsampling = 2; 4561 image.mPlane[image.U].mVertSubsampling = 2; 4562 break; 4563 } else { 4564 // fall through as YV12 is used for YUV420Planar by some codecs 4565 } 4566 4567 case OMX_COLOR_FormatYUV420Planar: 4568 case OMX_COLOR_FormatYUV420PackedPlanar: 4569 image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight; 4570 image.mPlane[image.U].mColInc = 1; 4571 image.mPlane[image.U].mRowInc = params.nStride / 2; 4572 image.mPlane[image.U].mHorizSubsampling = 2; 4573 image.mPlane[image.U].mVertSubsampling = 2; 4574 4575 image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset 4576 + (params.nStride * params.nSliceHeight / 4); 4577 image.mPlane[image.V].mColInc = 1; 4578 image.mPlane[image.V].mRowInc = params.nStride / 2; 4579 image.mPlane[image.V].mHorizSubsampling = 2; 4580 image.mPlane[image.V].mVertSubsampling = 2; 4581 break; 4582 4583 case OMX_COLOR_FormatYUV420SemiPlanar: 4584 // FIXME: NV21 for sw-encoder, NV12 for decoder and hw-encoder 4585 case OMX_COLOR_FormatYUV420PackedSemiPlanar: 4586 // NV12 4587 image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight; 4588 image.mPlane[image.U].mColInc = 2; 4589 image.mPlane[image.U].mRowInc = params.nStride; 4590 image.mPlane[image.U].mHorizSubsampling = 2; 4591 image.mPlane[image.U].mVertSubsampling = 2; 4592 4593 image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset + 1; 4594 image.mPlane[image.V].mColInc = 2; 4595 image.mPlane[image.V].mRowInc = params.nStride; 4596 image.mPlane[image.V].mHorizSubsampling = 2; 4597 image.mPlane[image.V].mVertSubsampling = 2; 4598 break; 4599 4600 default: 4601 TRESPASS(); 4602 } 4603 return true; 4604} 4605 4606// static 4607bool ACodec::describeColorFormat( 4608 const sp<IOMX> &omx, IOMX::node_id node, 4609 DescribeColorFormat2Params &describeParams) 4610{ 4611 OMX_INDEXTYPE describeColorFormatIndex; 4612 if (omx->getExtensionIndex( 4613 node, "OMX.google.android.index.describeColorFormat", 4614 &describeColorFormatIndex) == OK) { 4615 DescribeColorFormatParams describeParamsV1(describeParams); 4616 if (omx->getParameter( 4617 node, describeColorFormatIndex, 4618 &describeParamsV1, sizeof(describeParamsV1)) == OK) { 4619 describeParams.initFromV1(describeParamsV1); 4620 return describeParams.sMediaImage.mType != MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN; 4621 } 4622 } else if (omx->getExtensionIndex( 4623 node, "OMX.google.android.index.describeColorFormat2", &describeColorFormatIndex) == OK 4624 && omx->getParameter( 4625 node, describeColorFormatIndex, &describeParams, sizeof(describeParams)) == OK) { 4626 return describeParams.sMediaImage.mType != MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN; 4627 } 4628 4629 return describeDefaultColorFormat(describeParams); 4630} 4631 4632// static 4633bool ACodec::isFlexibleColorFormat( 4634 const sp<IOMX> &omx, IOMX::node_id node, 4635 uint32_t colorFormat, bool usingNativeBuffers, OMX_U32 *flexibleEquivalent) { 4636 DescribeColorFormat2Params describeParams; 4637 InitOMXParams(&describeParams); 4638 describeParams.eColorFormat = (OMX_COLOR_FORMATTYPE)colorFormat; 4639 // reasonable dummy values 4640 describeParams.nFrameWidth = 128; 4641 describeParams.nFrameHeight = 128; 4642 describeParams.nStride = 128; 4643 describeParams.nSliceHeight = 128; 4644 describeParams.bUsingNativeBuffers = (OMX_BOOL)usingNativeBuffers; 4645 4646 CHECK(flexibleEquivalent != NULL); 4647 4648 if (!describeColorFormat(omx, node, describeParams)) { 4649 return false; 4650 } 4651 4652 const MediaImage2 &img = describeParams.sMediaImage; 4653 if (img.mType == MediaImage2::MEDIA_IMAGE_TYPE_YUV) { 4654 if (img.mNumPlanes != 3 4655 || img.mPlane[img.Y].mHorizSubsampling != 1 4656 || img.mPlane[img.Y].mVertSubsampling != 1) { 4657 return false; 4658 } 4659 4660 // YUV 420 4661 if (img.mPlane[img.U].mHorizSubsampling == 2 4662 && img.mPlane[img.U].mVertSubsampling == 2 4663 && img.mPlane[img.V].mHorizSubsampling == 2 4664 && img.mPlane[img.V].mVertSubsampling == 2) { 4665 // possible flexible YUV420 format 4666 if (img.mBitDepth <= 8) { 4667 *flexibleEquivalent = OMX_COLOR_FormatYUV420Flexible; 4668 return true; 4669 } 4670 } 4671 } 4672 return false; 4673} 4674 4675status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify) { 4676 const char *niceIndex = portIndex == kPortIndexInput ? "input" : "output"; 4677 OMX_PARAM_PORTDEFINITIONTYPE def; 4678 InitOMXParams(&def); 4679 def.nPortIndex = portIndex; 4680 4681 status_t err = mOMX->getParameter(mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4682 if (err != OK) { 4683 return err; 4684 } 4685 4686 if (def.eDir != (portIndex == kPortIndexOutput ? OMX_DirOutput : OMX_DirInput)) { 4687 ALOGE("unexpected dir: %s(%d) on %s port", asString(def.eDir), def.eDir, niceIndex); 4688 return BAD_VALUE; 4689 } 4690 4691 switch (def.eDomain) { 4692 case OMX_PortDomainVideo: 4693 { 4694 OMX_VIDEO_PORTDEFINITIONTYPE *videoDef = &def.format.video; 4695 switch ((int)videoDef->eCompressionFormat) { 4696 case OMX_VIDEO_CodingUnused: 4697 { 4698 CHECK(mIsEncoder ^ (portIndex == kPortIndexOutput)); 4699 notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW); 4700 4701 notify->setInt32("stride", videoDef->nStride); 4702 notify->setInt32("slice-height", videoDef->nSliceHeight); 4703 notify->setInt32("color-format", videoDef->eColorFormat); 4704 4705 if (mNativeWindow == NULL) { 4706 DescribeColorFormat2Params describeParams; 4707 InitOMXParams(&describeParams); 4708 describeParams.eColorFormat = videoDef->eColorFormat; 4709 describeParams.nFrameWidth = videoDef->nFrameWidth; 4710 describeParams.nFrameHeight = videoDef->nFrameHeight; 4711 describeParams.nStride = videoDef->nStride; 4712 describeParams.nSliceHeight = videoDef->nSliceHeight; 4713 describeParams.bUsingNativeBuffers = OMX_FALSE; 4714 4715 if (describeColorFormat(mOMX, mNode, describeParams)) { 4716 notify->setBuffer( 4717 "image-data", 4718 ABuffer::CreateAsCopy( 4719 &describeParams.sMediaImage, 4720 sizeof(describeParams.sMediaImage))); 4721 4722 MediaImage2 &img = describeParams.sMediaImage; 4723 MediaImage2::PlaneInfo *plane = img.mPlane; 4724 ALOGV("[%s] MediaImage { F(%ux%u) @%u+%d+%d @%u+%d+%d @%u+%d+%d }", 4725 mComponentName.c_str(), img.mWidth, img.mHeight, 4726 plane[0].mOffset, plane[0].mColInc, plane[0].mRowInc, 4727 plane[1].mOffset, plane[1].mColInc, plane[1].mRowInc, 4728 plane[2].mOffset, plane[2].mColInc, plane[2].mRowInc); 4729 } 4730 } 4731 4732 int32_t width = (int32_t)videoDef->nFrameWidth; 4733 int32_t height = (int32_t)videoDef->nFrameHeight; 4734 4735 if (portIndex == kPortIndexOutput) { 4736 OMX_CONFIG_RECTTYPE rect; 4737 InitOMXParams(&rect); 4738 rect.nPortIndex = portIndex; 4739 4740 if (mOMX->getConfig( 4741 mNode, 4742 (portIndex == kPortIndexOutput ? 4743 OMX_IndexConfigCommonOutputCrop : 4744 OMX_IndexConfigCommonInputCrop), 4745 &rect, sizeof(rect)) != OK) { 4746 rect.nLeft = 0; 4747 rect.nTop = 0; 4748 rect.nWidth = videoDef->nFrameWidth; 4749 rect.nHeight = videoDef->nFrameHeight; 4750 } 4751 4752 if (rect.nLeft < 0 || 4753 rect.nTop < 0 || 4754 rect.nLeft + rect.nWidth > videoDef->nFrameWidth || 4755 rect.nTop + rect.nHeight > videoDef->nFrameHeight) { 4756 ALOGE("Wrong cropped rect (%d, %d) - (%u, %u) vs. frame (%u, %u)", 4757 rect.nLeft, rect.nTop, 4758 rect.nLeft + rect.nWidth, rect.nTop + rect.nHeight, 4759 videoDef->nFrameWidth, videoDef->nFrameHeight); 4760 return BAD_VALUE; 4761 } 4762 4763 notify->setRect( 4764 "crop", 4765 rect.nLeft, 4766 rect.nTop, 4767 rect.nLeft + rect.nWidth - 1, 4768 rect.nTop + rect.nHeight - 1); 4769 4770 width = rect.nWidth; 4771 height = rect.nHeight; 4772 4773 android_dataspace dataSpace = HAL_DATASPACE_UNKNOWN; 4774 (void)getColorAspectsAndDataSpaceForVideoDecoder( 4775 width, height, mConfigFormat, notify, 4776 mUsingNativeWindow ? &dataSpace : NULL); 4777 if (mUsingNativeWindow) { 4778 notify->setInt32("android._dataspace", dataSpace); 4779 } 4780 (void)getHDRStaticInfoForVideoCodec(kPortIndexOutput, notify); 4781 } else { 4782 (void)getInputColorAspectsForVideoEncoder(notify); 4783 if (mConfigFormat->contains("hdr-static-info")) { 4784 (void)getHDRStaticInfoForVideoCodec(kPortIndexInput, notify); 4785 } 4786 } 4787 4788 break; 4789 } 4790 4791 case OMX_VIDEO_CodingVP8: 4792 case OMX_VIDEO_CodingVP9: 4793 { 4794 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 4795 InitOMXParams(&vp8type); 4796 vp8type.nPortIndex = kPortIndexOutput; 4797 status_t err = mOMX->getParameter( 4798 mNode, 4799 (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4800 &vp8type, 4801 sizeof(vp8type)); 4802 4803 if (err == OK) { 4804 AString tsSchema = "none"; 4805 if (vp8type.eTemporalPattern 4806 == OMX_VIDEO_VPXTemporalLayerPatternWebRTC) { 4807 switch (vp8type.nTemporalLayerCount) { 4808 case 1: 4809 { 4810 tsSchema = "webrtc.vp8.1-layer"; 4811 break; 4812 } 4813 case 2: 4814 { 4815 tsSchema = "webrtc.vp8.2-layer"; 4816 break; 4817 } 4818 case 3: 4819 { 4820 tsSchema = "webrtc.vp8.3-layer"; 4821 break; 4822 } 4823 default: 4824 { 4825 break; 4826 } 4827 } 4828 } 4829 notify->setString("ts-schema", tsSchema); 4830 } 4831 // Fall through to set up mime. 4832 } 4833 4834 default: 4835 { 4836 if (mIsEncoder ^ (portIndex == kPortIndexOutput)) { 4837 // should be CodingUnused 4838 ALOGE("Raw port video compression format is %s(%d)", 4839 asString(videoDef->eCompressionFormat), 4840 videoDef->eCompressionFormat); 4841 return BAD_VALUE; 4842 } 4843 AString mime; 4844 if (GetMimeTypeForVideoCoding( 4845 videoDef->eCompressionFormat, &mime) != OK) { 4846 notify->setString("mime", "application/octet-stream"); 4847 } else { 4848 notify->setString("mime", mime.c_str()); 4849 } 4850 uint32_t intraRefreshPeriod = 0; 4851 if (mIsEncoder && getIntraRefreshPeriod(&intraRefreshPeriod) == OK 4852 && intraRefreshPeriod > 0) { 4853 notify->setInt32("intra-refresh-period", intraRefreshPeriod); 4854 } 4855 break; 4856 } 4857 } 4858 notify->setInt32("width", videoDef->nFrameWidth); 4859 notify->setInt32("height", videoDef->nFrameHeight); 4860 ALOGV("[%s] %s format is %s", mComponentName.c_str(), 4861 portIndex == kPortIndexInput ? "input" : "output", 4862 notify->debugString().c_str()); 4863 4864 break; 4865 } 4866 4867 case OMX_PortDomainAudio: 4868 { 4869 OMX_AUDIO_PORTDEFINITIONTYPE *audioDef = &def.format.audio; 4870 4871 switch ((int)audioDef->eEncoding) { 4872 case OMX_AUDIO_CodingPCM: 4873 { 4874 OMX_AUDIO_PARAM_PCMMODETYPE params; 4875 InitOMXParams(¶ms); 4876 params.nPortIndex = portIndex; 4877 4878 err = mOMX->getParameter( 4879 mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 4880 if (err != OK) { 4881 return err; 4882 } 4883 4884 if (params.nChannels <= 0 4885 || (params.nChannels != 1 && !params.bInterleaved) 4886 || params.ePCMMode != OMX_AUDIO_PCMModeLinear) { 4887 ALOGE("unsupported PCM port: %u channels%s, %u-bit", 4888 params.nChannels, 4889 params.bInterleaved ? " interleaved" : "", 4890 params.nBitPerSample); 4891 return FAILED_TRANSACTION; 4892 } 4893 4894 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW); 4895 notify->setInt32("channel-count", params.nChannels); 4896 notify->setInt32("sample-rate", params.nSamplingRate); 4897 4898 AudioEncoding encoding = kAudioEncodingPcm16bit; 4899 if (params.eNumData == OMX_NumericalDataUnsigned 4900 && params.nBitPerSample == 8u) { 4901 encoding = kAudioEncodingPcm8bit; 4902 } else if (params.eNumData == OMX_NumericalDataFloat 4903 && params.nBitPerSample == 32u) { 4904 encoding = kAudioEncodingPcmFloat; 4905 } else if (params.nBitPerSample != 16u 4906 || params.eNumData != OMX_NumericalDataSigned) { 4907 ALOGE("unsupported PCM port: %s(%d), %s(%d) mode ", 4908 asString(params.eNumData), params.eNumData, 4909 asString(params.ePCMMode), params.ePCMMode); 4910 return FAILED_TRANSACTION; 4911 } 4912 notify->setInt32("pcm-encoding", encoding); 4913 4914 if (mChannelMaskPresent) { 4915 notify->setInt32("channel-mask", mChannelMask); 4916 } 4917 break; 4918 } 4919 4920 case OMX_AUDIO_CodingAAC: 4921 { 4922 OMX_AUDIO_PARAM_AACPROFILETYPE params; 4923 InitOMXParams(¶ms); 4924 params.nPortIndex = portIndex; 4925 4926 err = mOMX->getParameter( 4927 mNode, OMX_IndexParamAudioAac, ¶ms, sizeof(params)); 4928 if (err != OK) { 4929 return err; 4930 } 4931 4932 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC); 4933 notify->setInt32("channel-count", params.nChannels); 4934 notify->setInt32("sample-rate", params.nSampleRate); 4935 break; 4936 } 4937 4938 case OMX_AUDIO_CodingAMR: 4939 { 4940 OMX_AUDIO_PARAM_AMRTYPE params; 4941 InitOMXParams(¶ms); 4942 params.nPortIndex = portIndex; 4943 4944 err = mOMX->getParameter( 4945 mNode, OMX_IndexParamAudioAmr, ¶ms, sizeof(params)); 4946 if (err != OK) { 4947 return err; 4948 } 4949 4950 notify->setInt32("channel-count", 1); 4951 if (params.eAMRBandMode >= OMX_AUDIO_AMRBandModeWB0) { 4952 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_WB); 4953 notify->setInt32("sample-rate", 16000); 4954 } else { 4955 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_NB); 4956 notify->setInt32("sample-rate", 8000); 4957 } 4958 break; 4959 } 4960 4961 case OMX_AUDIO_CodingFLAC: 4962 { 4963 OMX_AUDIO_PARAM_FLACTYPE params; 4964 InitOMXParams(¶ms); 4965 params.nPortIndex = portIndex; 4966 4967 err = mOMX->getParameter( 4968 mNode, OMX_IndexParamAudioFlac, ¶ms, sizeof(params)); 4969 if (err != OK) { 4970 return err; 4971 } 4972 4973 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_FLAC); 4974 notify->setInt32("channel-count", params.nChannels); 4975 notify->setInt32("sample-rate", params.nSampleRate); 4976 break; 4977 } 4978 4979 case OMX_AUDIO_CodingMP3: 4980 { 4981 OMX_AUDIO_PARAM_MP3TYPE params; 4982 InitOMXParams(¶ms); 4983 params.nPortIndex = portIndex; 4984 4985 err = mOMX->getParameter( 4986 mNode, OMX_IndexParamAudioMp3, ¶ms, sizeof(params)); 4987 if (err != OK) { 4988 return err; 4989 } 4990 4991 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MPEG); 4992 notify->setInt32("channel-count", params.nChannels); 4993 notify->setInt32("sample-rate", params.nSampleRate); 4994 break; 4995 } 4996 4997 case OMX_AUDIO_CodingVORBIS: 4998 { 4999 OMX_AUDIO_PARAM_VORBISTYPE params; 5000 InitOMXParams(¶ms); 5001 params.nPortIndex = portIndex; 5002 5003 err = mOMX->getParameter( 5004 mNode, OMX_IndexParamAudioVorbis, ¶ms, sizeof(params)); 5005 if (err != OK) { 5006 return err; 5007 } 5008 5009 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_VORBIS); 5010 notify->setInt32("channel-count", params.nChannels); 5011 notify->setInt32("sample-rate", params.nSampleRate); 5012 break; 5013 } 5014 5015 case OMX_AUDIO_CodingAndroidAC3: 5016 { 5017 OMX_AUDIO_PARAM_ANDROID_AC3TYPE params; 5018 InitOMXParams(¶ms); 5019 params.nPortIndex = portIndex; 5020 5021 err = mOMX->getParameter( 5022 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 5023 ¶ms, sizeof(params)); 5024 if (err != OK) { 5025 return err; 5026 } 5027 5028 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AC3); 5029 notify->setInt32("channel-count", params.nChannels); 5030 notify->setInt32("sample-rate", params.nSampleRate); 5031 break; 5032 } 5033 5034 case OMX_AUDIO_CodingAndroidEAC3: 5035 { 5036 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE params; 5037 InitOMXParams(¶ms); 5038 params.nPortIndex = portIndex; 5039 5040 err = mOMX->getParameter( 5041 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 5042 ¶ms, sizeof(params)); 5043 if (err != OK) { 5044 return err; 5045 } 5046 5047 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_EAC3); 5048 notify->setInt32("channel-count", params.nChannels); 5049 notify->setInt32("sample-rate", params.nSampleRate); 5050 break; 5051 } 5052 5053 case OMX_AUDIO_CodingAndroidOPUS: 5054 { 5055 OMX_AUDIO_PARAM_ANDROID_OPUSTYPE params; 5056 InitOMXParams(¶ms); 5057 params.nPortIndex = portIndex; 5058 5059 err = mOMX->getParameter( 5060 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidOpus, 5061 ¶ms, sizeof(params)); 5062 if (err != OK) { 5063 return err; 5064 } 5065 5066 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_OPUS); 5067 notify->setInt32("channel-count", params.nChannels); 5068 notify->setInt32("sample-rate", params.nSampleRate); 5069 break; 5070 } 5071 5072 case OMX_AUDIO_CodingG711: 5073 { 5074 OMX_AUDIO_PARAM_PCMMODETYPE params; 5075 InitOMXParams(¶ms); 5076 params.nPortIndex = portIndex; 5077 5078 err = mOMX->getParameter( 5079 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 5080 if (err != OK) { 5081 return err; 5082 } 5083 5084 const char *mime = NULL; 5085 if (params.ePCMMode == OMX_AUDIO_PCMModeMULaw) { 5086 mime = MEDIA_MIMETYPE_AUDIO_G711_MLAW; 5087 } else if (params.ePCMMode == OMX_AUDIO_PCMModeALaw) { 5088 mime = MEDIA_MIMETYPE_AUDIO_G711_ALAW; 5089 } else { // params.ePCMMode == OMX_AUDIO_PCMModeLinear 5090 mime = MEDIA_MIMETYPE_AUDIO_RAW; 5091 } 5092 notify->setString("mime", mime); 5093 notify->setInt32("channel-count", params.nChannels); 5094 notify->setInt32("sample-rate", params.nSamplingRate); 5095 notify->setInt32("pcm-encoding", kAudioEncodingPcm16bit); 5096 break; 5097 } 5098 5099 case OMX_AUDIO_CodingGSMFR: 5100 { 5101 OMX_AUDIO_PARAM_PCMMODETYPE params; 5102 InitOMXParams(¶ms); 5103 params.nPortIndex = portIndex; 5104 5105 err = mOMX->getParameter( 5106 mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 5107 if (err != OK) { 5108 return err; 5109 } 5110 5111 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MSGSM); 5112 notify->setInt32("channel-count", params.nChannels); 5113 notify->setInt32("sample-rate", params.nSamplingRate); 5114 break; 5115 } 5116 5117 default: 5118 ALOGE("Unsupported audio coding: %s(%d)\n", 5119 asString(audioDef->eEncoding), audioDef->eEncoding); 5120 return BAD_TYPE; 5121 } 5122 break; 5123 } 5124 5125 default: 5126 ALOGE("Unsupported domain: %s(%d)", asString(def.eDomain), def.eDomain); 5127 return BAD_TYPE; 5128 } 5129 5130 return OK; 5131} 5132 5133void ACodec::onDataSpaceChanged(android_dataspace dataSpace, const ColorAspects &aspects) { 5134 // aspects are normally communicated in ColorAspects 5135 int32_t range, standard, transfer; 5136 convertCodecColorAspectsToPlatformAspects(aspects, &range, &standard, &transfer); 5137 5138 // if some aspects are unspecified, use dataspace fields 5139 if (range != 0) { 5140 range = (dataSpace & HAL_DATASPACE_RANGE_MASK) >> HAL_DATASPACE_RANGE_SHIFT; 5141 } 5142 if (standard != 0) { 5143 standard = (dataSpace & HAL_DATASPACE_STANDARD_MASK) >> HAL_DATASPACE_STANDARD_SHIFT; 5144 } 5145 if (transfer != 0) { 5146 transfer = (dataSpace & HAL_DATASPACE_TRANSFER_MASK) >> HAL_DATASPACE_TRANSFER_SHIFT; 5147 } 5148 5149 mOutputFormat = mOutputFormat->dup(); // trigger an output format changed event 5150 if (range != 0) { 5151 mOutputFormat->setInt32("color-range", range); 5152 } 5153 if (standard != 0) { 5154 mOutputFormat->setInt32("color-standard", standard); 5155 } 5156 if (transfer != 0) { 5157 mOutputFormat->setInt32("color-transfer", transfer); 5158 } 5159 5160 ALOGD("dataspace changed to %#x (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) " 5161 "(R:%d(%s), S:%d(%s), T:%d(%s))", 5162 dataSpace, 5163 aspects.mRange, asString(aspects.mRange), 5164 aspects.mPrimaries, asString(aspects.mPrimaries), 5165 aspects.mMatrixCoeffs, asString(aspects.mMatrixCoeffs), 5166 aspects.mTransfer, asString(aspects.mTransfer), 5167 range, asString((ColorRange)range), 5168 standard, asString((ColorStandard)standard), 5169 transfer, asString((ColorTransfer)transfer)); 5170} 5171 5172void ACodec::onOutputFormatChanged(sp<const AMessage> expectedFormat) { 5173 // store new output format, at the same time mark that this is no longer the first frame 5174 mOutputFormat = mBaseOutputFormat->dup(); 5175 5176 if (getPortFormat(kPortIndexOutput, mOutputFormat) != OK) { 5177 ALOGE("[%s] Failed to get port format to send format change", mComponentName.c_str()); 5178 return; 5179 } 5180 5181 if (expectedFormat != NULL) { 5182 sp<const AMessage> changes = expectedFormat->changesFrom(mOutputFormat); 5183 sp<const AMessage> to = mOutputFormat->changesFrom(expectedFormat); 5184 if (changes->countEntries() != 0 || to->countEntries() != 0) { 5185 ALOGW("[%s] BAD CODEC: Output format changed unexpectedly from (diff) %s to (diff) %s", 5186 mComponentName.c_str(), 5187 changes->debugString(4).c_str(), to->debugString(4).c_str()); 5188 } 5189 } 5190 5191 if (!mIsVideo && !mIsEncoder) { 5192 AudioEncoding pcmEncoding = kAudioEncodingPcm16bit; 5193 (void)mConfigFormat->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 5194 AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit; 5195 (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 5196 5197 mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding); 5198 if (mConverter[kPortIndexOutput] != NULL) { 5199 mOutputFormat->setInt32("pcm-encoding", pcmEncoding); 5200 } 5201 } 5202 5203 if (mTunneled) { 5204 sendFormatChange(); 5205 } 5206} 5207 5208void ACodec::addKeyFormatChangesToRenderBufferNotification(sp<AMessage> ¬ify) { 5209 AString mime; 5210 CHECK(mOutputFormat->findString("mime", &mime)); 5211 5212 if (mime == MEDIA_MIMETYPE_VIDEO_RAW && mNativeWindow != NULL) { 5213 // notify renderer of the crop change and dataspace change 5214 // NOTE: native window uses extended right-bottom coordinate 5215 int32_t left, top, right, bottom; 5216 if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) { 5217 notify->setRect("crop", left, top, right + 1, bottom + 1); 5218 } 5219 5220 int32_t dataSpace; 5221 if (mOutputFormat->findInt32("android._dataspace", &dataSpace)) { 5222 notify->setInt32("dataspace", dataSpace); 5223 } 5224 } 5225} 5226 5227void ACodec::sendFormatChange() { 5228 AString mime; 5229 CHECK(mOutputFormat->findString("mime", &mime)); 5230 5231 if (mime == MEDIA_MIMETYPE_AUDIO_RAW && (mEncoderDelay || mEncoderPadding)) { 5232 int32_t channelCount; 5233 CHECK(mOutputFormat->findInt32("channel-count", &channelCount)); 5234 if (mSkipCutBuffer != NULL) { 5235 size_t prevbufsize = mSkipCutBuffer->size(); 5236 if (prevbufsize != 0) { 5237 ALOGW("Replacing SkipCutBuffer holding %zu bytes", prevbufsize); 5238 } 5239 } 5240 mSkipCutBuffer = new SkipCutBuffer(mEncoderDelay, mEncoderPadding, channelCount); 5241 } 5242 5243 sp<AMessage> notify = mNotify->dup(); 5244 notify->setInt32("what", kWhatOutputFormatChanged); 5245 notify->setMessage("format", mOutputFormat); 5246 notify->post(); 5247 5248 // mLastOutputFormat is not used when tunneled; doing this just to stay consistent 5249 mLastOutputFormat = mOutputFormat; 5250} 5251 5252void ACodec::signalError(OMX_ERRORTYPE error, status_t internalError) { 5253 sp<AMessage> notify = mNotify->dup(); 5254 notify->setInt32("what", CodecBase::kWhatError); 5255 ALOGE("signalError(omxError %#x, internalError %d)", error, internalError); 5256 5257 if (internalError == UNKNOWN_ERROR) { // find better error code 5258 const status_t omxStatus = statusFromOMXError(error); 5259 if (omxStatus != 0) { 5260 internalError = omxStatus; 5261 } else { 5262 ALOGW("Invalid OMX error %#x", error); 5263 } 5264 } 5265 5266 mFatalError = true; 5267 5268 notify->setInt32("err", internalError); 5269 notify->setInt32("actionCode", ACTION_CODE_FATAL); // could translate from OMX error. 5270 notify->post(); 5271} 5272 5273//////////////////////////////////////////////////////////////////////////////// 5274 5275ACodec::PortDescription::PortDescription() { 5276} 5277 5278status_t ACodec::requestIDRFrame() { 5279 if (!mIsEncoder) { 5280 return ERROR_UNSUPPORTED; 5281 } 5282 5283 OMX_CONFIG_INTRAREFRESHVOPTYPE params; 5284 InitOMXParams(¶ms); 5285 5286 params.nPortIndex = kPortIndexOutput; 5287 params.IntraRefreshVOP = OMX_TRUE; 5288 5289 return mOMX->setConfig( 5290 mNode, 5291 OMX_IndexConfigVideoIntraVOPRefresh, 5292 ¶ms, 5293 sizeof(params)); 5294} 5295 5296void ACodec::PortDescription::addBuffer( 5297 IOMX::buffer_id id, const sp<ABuffer> &buffer, 5298 const sp<NativeHandle> &handle, const sp<RefBase> &memRef) { 5299 mBufferIDs.push_back(id); 5300 mBuffers.push_back(buffer); 5301 mHandles.push_back(handle); 5302 mMemRefs.push_back(memRef); 5303} 5304 5305size_t ACodec::PortDescription::countBuffers() { 5306 return mBufferIDs.size(); 5307} 5308 5309IOMX::buffer_id ACodec::PortDescription::bufferIDAt(size_t index) const { 5310 return mBufferIDs.itemAt(index); 5311} 5312 5313sp<ABuffer> ACodec::PortDescription::bufferAt(size_t index) const { 5314 return mBuffers.itemAt(index); 5315} 5316 5317sp<NativeHandle> ACodec::PortDescription::handleAt(size_t index) const { 5318 return mHandles.itemAt(index); 5319} 5320 5321sp<RefBase> ACodec::PortDescription::memRefAt(size_t index) const { 5322 return mMemRefs.itemAt(index); 5323} 5324 5325//////////////////////////////////////////////////////////////////////////////// 5326 5327ACodec::BaseState::BaseState(ACodec *codec, const sp<AState> &parentState) 5328 : AState(parentState), 5329 mCodec(codec) { 5330} 5331 5332ACodec::BaseState::PortMode ACodec::BaseState::getPortMode( 5333 OMX_U32 /* portIndex */) { 5334 return KEEP_BUFFERS; 5335} 5336 5337bool ACodec::BaseState::onMessageReceived(const sp<AMessage> &msg) { 5338 switch (msg->what()) { 5339 case kWhatInputBufferFilled: 5340 { 5341 onInputBufferFilled(msg); 5342 break; 5343 } 5344 5345 case kWhatOutputBufferDrained: 5346 { 5347 onOutputBufferDrained(msg); 5348 break; 5349 } 5350 5351 case ACodec::kWhatOMXMessageList: 5352 { 5353 return checkOMXMessage(msg) ? onOMXMessageList(msg) : true; 5354 } 5355 5356 case ACodec::kWhatOMXMessageItem: 5357 { 5358 // no need to check as we already did it for kWhatOMXMessageList 5359 return onOMXMessage(msg); 5360 } 5361 5362 case ACodec::kWhatOMXMessage: 5363 { 5364 return checkOMXMessage(msg) ? onOMXMessage(msg) : true; 5365 } 5366 5367 case ACodec::kWhatSetSurface: 5368 { 5369 sp<AReplyToken> replyID; 5370 CHECK(msg->senderAwaitsResponse(&replyID)); 5371 5372 sp<RefBase> obj; 5373 CHECK(msg->findObject("surface", &obj)); 5374 5375 status_t err = mCodec->handleSetSurface(static_cast<Surface *>(obj.get())); 5376 5377 sp<AMessage> response = new AMessage; 5378 response->setInt32("err", err); 5379 response->postReply(replyID); 5380 break; 5381 } 5382 5383 case ACodec::kWhatCreateInputSurface: 5384 case ACodec::kWhatSetInputSurface: 5385 case ACodec::kWhatSignalEndOfInputStream: 5386 { 5387 // This may result in an app illegal state exception. 5388 ALOGE("Message 0x%x was not handled", msg->what()); 5389 mCodec->signalError(OMX_ErrorUndefined, INVALID_OPERATION); 5390 return true; 5391 } 5392 5393 case ACodec::kWhatOMXDied: 5394 { 5395 // This will result in kFlagSawMediaServerDie handling in MediaCodec. 5396 ALOGE("OMX/mediaserver died, signalling error!"); 5397 mCodec->signalError(OMX_ErrorResourcesLost, DEAD_OBJECT); 5398 break; 5399 } 5400 5401 case ACodec::kWhatReleaseCodecInstance: 5402 { 5403 ALOGI("[%s] forcing the release of codec", 5404 mCodec->mComponentName.c_str()); 5405 status_t err = mCodec->mOMX->freeNode(mCodec->mNode); 5406 ALOGE_IF("[%s] failed to release codec instance: err=%d", 5407 mCodec->mComponentName.c_str(), err); 5408 sp<AMessage> notify = mCodec->mNotify->dup(); 5409 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 5410 notify->post(); 5411 break; 5412 } 5413 5414 default: 5415 return false; 5416 } 5417 5418 return true; 5419} 5420 5421bool ACodec::BaseState::checkOMXMessage(const sp<AMessage> &msg) { 5422 // there is a possibility that this is an outstanding message for a 5423 // codec that we have already destroyed 5424 if (mCodec->mNode == 0) { 5425 ALOGI("ignoring message as already freed component: %s", 5426 msg->debugString().c_str()); 5427 return false; 5428 } 5429 5430 IOMX::node_id nodeID; 5431 CHECK(msg->findInt32("node", (int32_t*)&nodeID)); 5432 if (nodeID != mCodec->mNode) { 5433 ALOGE("Unexpected message for nodeID: %u, should have been %u", nodeID, mCodec->mNode); 5434 return false; 5435 } 5436 return true; 5437} 5438 5439bool ACodec::BaseState::onOMXMessageList(const sp<AMessage> &msg) { 5440 sp<RefBase> obj; 5441 CHECK(msg->findObject("messages", &obj)); 5442 sp<MessageList> msgList = static_cast<MessageList *>(obj.get()); 5443 5444 bool receivedRenderedEvents = false; 5445 for (std::list<sp<AMessage>>::const_iterator it = msgList->getList().cbegin(); 5446 it != msgList->getList().cend(); ++it) { 5447 (*it)->setWhat(ACodec::kWhatOMXMessageItem); 5448 mCodec->handleMessage(*it); 5449 int32_t type; 5450 CHECK((*it)->findInt32("type", &type)); 5451 if (type == omx_message::FRAME_RENDERED) { 5452 receivedRenderedEvents = true; 5453 } 5454 } 5455 5456 if (receivedRenderedEvents) { 5457 // NOTE: all buffers are rendered in this case 5458 mCodec->notifyOfRenderedFrames(); 5459 } 5460 return true; 5461} 5462 5463bool ACodec::BaseState::onOMXMessage(const sp<AMessage> &msg) { 5464 int32_t type; 5465 CHECK(msg->findInt32("type", &type)); 5466 5467 switch (type) { 5468 case omx_message::EVENT: 5469 { 5470 int32_t event, data1, data2; 5471 CHECK(msg->findInt32("event", &event)); 5472 CHECK(msg->findInt32("data1", &data1)); 5473 CHECK(msg->findInt32("data2", &data2)); 5474 5475 if (event == OMX_EventCmdComplete 5476 && data1 == OMX_CommandFlush 5477 && data2 == (int32_t)OMX_ALL) { 5478 // Use of this notification is not consistent across 5479 // implementations. We'll drop this notification and rely 5480 // on flush-complete notifications on the individual port 5481 // indices instead. 5482 5483 return true; 5484 } 5485 5486 return onOMXEvent( 5487 static_cast<OMX_EVENTTYPE>(event), 5488 static_cast<OMX_U32>(data1), 5489 static_cast<OMX_U32>(data2)); 5490 } 5491 5492 case omx_message::EMPTY_BUFFER_DONE: 5493 { 5494 IOMX::buffer_id bufferID; 5495 int32_t fenceFd; 5496 5497 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 5498 CHECK(msg->findInt32("fence_fd", &fenceFd)); 5499 5500 return onOMXEmptyBufferDone(bufferID, fenceFd); 5501 } 5502 5503 case omx_message::FILL_BUFFER_DONE: 5504 { 5505 IOMX::buffer_id bufferID; 5506 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 5507 5508 int32_t rangeOffset, rangeLength, flags, fenceFd; 5509 int64_t timeUs; 5510 5511 CHECK(msg->findInt32("range_offset", &rangeOffset)); 5512 CHECK(msg->findInt32("range_length", &rangeLength)); 5513 CHECK(msg->findInt32("flags", &flags)); 5514 CHECK(msg->findInt64("timestamp", &timeUs)); 5515 CHECK(msg->findInt32("fence_fd", &fenceFd)); 5516 5517 return onOMXFillBufferDone( 5518 bufferID, 5519 (size_t)rangeOffset, (size_t)rangeLength, 5520 (OMX_U32)flags, 5521 timeUs, 5522 fenceFd); 5523 } 5524 5525 case omx_message::FRAME_RENDERED: 5526 { 5527 int64_t mediaTimeUs, systemNano; 5528 5529 CHECK(msg->findInt64("media_time_us", &mediaTimeUs)); 5530 CHECK(msg->findInt64("system_nano", &systemNano)); 5531 5532 return onOMXFrameRendered( 5533 mediaTimeUs, systemNano); 5534 } 5535 5536 default: 5537 ALOGE("Unexpected message type: %d", type); 5538 return false; 5539 } 5540} 5541 5542bool ACodec::BaseState::onOMXFrameRendered( 5543 int64_t mediaTimeUs __unused, nsecs_t systemNano __unused) { 5544 // ignore outside of Executing and PortSettingsChanged states 5545 return true; 5546} 5547 5548bool ACodec::BaseState::onOMXEvent( 5549 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 5550 if (event == OMX_EventDataSpaceChanged) { 5551 ColorAspects aspects; 5552 aspects.mRange = (ColorAspects::Range)((data2 >> 24) & 0xFF); 5553 aspects.mPrimaries = (ColorAspects::Primaries)((data2 >> 16) & 0xFF); 5554 aspects.mMatrixCoeffs = (ColorAspects::MatrixCoeffs)((data2 >> 8) & 0xFF); 5555 aspects.mTransfer = (ColorAspects::Transfer)(data2 & 0xFF); 5556 5557 mCodec->onDataSpaceChanged((android_dataspace)data1, aspects); 5558 return true; 5559 } 5560 5561 if (event != OMX_EventError) { 5562 ALOGV("[%s] EVENT(%d, 0x%08x, 0x%08x)", 5563 mCodec->mComponentName.c_str(), event, data1, data2); 5564 5565 return false; 5566 } 5567 5568 ALOGE("[%s] ERROR(0x%08x)", mCodec->mComponentName.c_str(), data1); 5569 5570 // verify OMX component sends back an error we expect. 5571 OMX_ERRORTYPE omxError = (OMX_ERRORTYPE)data1; 5572 if (!isOMXError(omxError)) { 5573 ALOGW("Invalid OMX error %#x", omxError); 5574 omxError = OMX_ErrorUndefined; 5575 } 5576 mCodec->signalError(omxError); 5577 5578 return true; 5579} 5580 5581bool ACodec::BaseState::onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd) { 5582 ALOGV("[%s] onOMXEmptyBufferDone %u", 5583 mCodec->mComponentName.c_str(), bufferID); 5584 5585 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 5586 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5587 if (status != BufferInfo::OWNED_BY_COMPONENT) { 5588 ALOGE("Wrong ownership in EBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 5589 mCodec->dumpBuffers(kPortIndexInput); 5590 if (fenceFd >= 0) { 5591 ::close(fenceFd); 5592 } 5593 return false; 5594 } 5595 info->mStatus = BufferInfo::OWNED_BY_US; 5596 5597 // input buffers cannot take fences, so wait for any fence now 5598 (void)mCodec->waitForFence(fenceFd, "onOMXEmptyBufferDone"); 5599 fenceFd = -1; 5600 5601 // still save fence for completeness 5602 info->setWriteFence(fenceFd, "onOMXEmptyBufferDone"); 5603 5604 // We're in "store-metadata-in-buffers" mode, the underlying 5605 // OMX component had access to data that's implicitly refcounted 5606 // by this "MediaBuffer" object. Now that the OMX component has 5607 // told us that it's done with the input buffer, we can decrement 5608 // the mediaBuffer's reference count. 5609 info->mData->setMediaBufferBase(NULL); 5610 5611 PortMode mode = getPortMode(kPortIndexInput); 5612 5613 switch (mode) { 5614 case KEEP_BUFFERS: 5615 break; 5616 5617 case RESUBMIT_BUFFERS: 5618 postFillThisBuffer(info); 5619 break; 5620 5621 case FREE_BUFFERS: 5622 default: 5623 ALOGE("SHOULD NOT REACH HERE: cannot free empty output buffers"); 5624 return false; 5625 } 5626 5627 return true; 5628} 5629 5630void ACodec::BaseState::postFillThisBuffer(BufferInfo *info) { 5631 if (mCodec->mPortEOS[kPortIndexInput]) { 5632 return; 5633 } 5634 5635 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 5636 5637 sp<AMessage> notify = mCodec->mNotify->dup(); 5638 notify->setInt32("what", CodecBase::kWhatFillThisBuffer); 5639 notify->setInt32("buffer-id", info->mBufferID); 5640 5641 info->mData->meta()->clear(); 5642 notify->setBuffer("buffer", info->mData); 5643 5644 sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, mCodec); 5645 reply->setInt32("buffer-id", info->mBufferID); 5646 5647 notify->setMessage("reply", reply); 5648 5649 notify->post(); 5650 5651 info->mStatus = BufferInfo::OWNED_BY_UPSTREAM; 5652} 5653 5654void ACodec::BaseState::onInputBufferFilled(const sp<AMessage> &msg) { 5655 IOMX::buffer_id bufferID; 5656 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 5657 sp<ABuffer> buffer; 5658 int32_t err = OK; 5659 bool eos = false; 5660 PortMode mode = getPortMode(kPortIndexInput); 5661 5662 if (!msg->findBuffer("buffer", &buffer)) { 5663 /* these are unfilled buffers returned by client */ 5664 CHECK(msg->findInt32("err", &err)); 5665 5666 if (err == OK) { 5667 /* buffers with no errors are returned on MediaCodec.flush */ 5668 mode = KEEP_BUFFERS; 5669 } else { 5670 ALOGV("[%s] saw error %d instead of an input buffer", 5671 mCodec->mComponentName.c_str(), err); 5672 eos = true; 5673 } 5674 5675 buffer.clear(); 5676 } 5677 5678 int32_t tmp; 5679 if (buffer != NULL && buffer->meta()->findInt32("eos", &tmp) && tmp) { 5680 eos = true; 5681 err = ERROR_END_OF_STREAM; 5682 } 5683 5684 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 5685 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5686 if (status != BufferInfo::OWNED_BY_UPSTREAM) { 5687 ALOGE("Wrong ownership in IBF: %s(%d) buffer #%u", _asString(status), status, bufferID); 5688 mCodec->dumpBuffers(kPortIndexInput); 5689 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 5690 return; 5691 } 5692 5693 info->mStatus = BufferInfo::OWNED_BY_US; 5694 5695 switch (mode) { 5696 case KEEP_BUFFERS: 5697 { 5698 if (eos) { 5699 if (!mCodec->mPortEOS[kPortIndexInput]) { 5700 mCodec->mPortEOS[kPortIndexInput] = true; 5701 mCodec->mInputEOSResult = err; 5702 } 5703 } 5704 break; 5705 } 5706 5707 case RESUBMIT_BUFFERS: 5708 { 5709 if (buffer != NULL && !mCodec->mPortEOS[kPortIndexInput]) { 5710 // Do not send empty input buffer w/o EOS to the component. 5711 if (buffer->size() == 0 && !eos) { 5712 postFillThisBuffer(info); 5713 break; 5714 } 5715 5716 int64_t timeUs; 5717 CHECK(buffer->meta()->findInt64("timeUs", &timeUs)); 5718 5719 OMX_U32 flags = OMX_BUFFERFLAG_ENDOFFRAME; 5720 5721 MetadataBufferType metaType = mCodec->mInputMetadataType; 5722 int32_t isCSD = 0; 5723 if (buffer->meta()->findInt32("csd", &isCSD) && isCSD != 0) { 5724 if (mCodec->mIsLegacyVP9Decoder) { 5725 ALOGV("[%s] is legacy VP9 decoder. Ignore %u codec specific data", 5726 mCodec->mComponentName.c_str(), bufferID); 5727 postFillThisBuffer(info); 5728 break; 5729 } 5730 flags |= OMX_BUFFERFLAG_CODECCONFIG; 5731 metaType = kMetadataBufferTypeInvalid; 5732 } 5733 5734 if (eos) { 5735 flags |= OMX_BUFFERFLAG_EOS; 5736 } 5737 5738 if (buffer != info->mCodecData) { 5739 ALOGV("[%s] Needs to copy input data for buffer %u. (%p != %p)", 5740 mCodec->mComponentName.c_str(), 5741 bufferID, 5742 buffer.get(), info->mCodecData.get()); 5743 5744 sp<DataConverter> converter = mCodec->mConverter[kPortIndexInput]; 5745 if (converter == NULL || isCSD) { 5746 converter = getCopyConverter(); 5747 } 5748 status_t err = converter->convert(buffer, info->mCodecData); 5749 if (err != OK) { 5750 mCodec->signalError(OMX_ErrorUndefined, err); 5751 return; 5752 } 5753 } 5754 5755 if (flags & OMX_BUFFERFLAG_CODECCONFIG) { 5756 ALOGV("[%s] calling emptyBuffer %u w/ codec specific data", 5757 mCodec->mComponentName.c_str(), bufferID); 5758 } else if (flags & OMX_BUFFERFLAG_EOS) { 5759 ALOGV("[%s] calling emptyBuffer %u w/ EOS", 5760 mCodec->mComponentName.c_str(), bufferID); 5761 } else { 5762#if TRACK_BUFFER_TIMING 5763 ALOGI("[%s] calling emptyBuffer %u w/ time %lld us", 5764 mCodec->mComponentName.c_str(), bufferID, (long long)timeUs); 5765#else 5766 ALOGV("[%s] calling emptyBuffer %u w/ time %lld us", 5767 mCodec->mComponentName.c_str(), bufferID, (long long)timeUs); 5768#endif 5769 } 5770 5771#if TRACK_BUFFER_TIMING 5772 ACodec::BufferStats stats; 5773 stats.mEmptyBufferTimeUs = ALooper::GetNowUs(); 5774 stats.mFillBufferDoneTimeUs = -1ll; 5775 mCodec->mBufferStats.add(timeUs, stats); 5776#endif 5777 5778 if (mCodec->storingMetadataInDecodedBuffers()) { 5779 // try to submit an output buffer for each input buffer 5780 PortMode outputMode = getPortMode(kPortIndexOutput); 5781 5782 ALOGV("MetadataBuffersToSubmit=%u portMode=%s", 5783 mCodec->mMetadataBuffersToSubmit, 5784 (outputMode == FREE_BUFFERS ? "FREE" : 5785 outputMode == KEEP_BUFFERS ? "KEEP" : "RESUBMIT")); 5786 if (outputMode == RESUBMIT_BUFFERS) { 5787 mCodec->submitOutputMetadataBuffer(); 5788 } 5789 } 5790 info->checkReadFence("onInputBufferFilled"); 5791 5792 status_t err2 = OK; 5793 switch (metaType) { 5794 case kMetadataBufferTypeInvalid: 5795 break; 5796#ifndef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 5797 case kMetadataBufferTypeNativeHandleSource: 5798 if (info->mCodecData->size() >= sizeof(VideoNativeHandleMetadata)) { 5799 VideoNativeHandleMetadata *vnhmd = 5800 (VideoNativeHandleMetadata*)info->mCodecData->base(); 5801 err2 = mCodec->mOMX->updateNativeHandleInMeta( 5802 mCodec->mNode, kPortIndexInput, 5803 NativeHandle::create(vnhmd->pHandle, false /* ownsHandle */), 5804 bufferID); 5805 } 5806 break; 5807 case kMetadataBufferTypeANWBuffer: 5808 if (info->mCodecData->size() >= sizeof(VideoNativeMetadata)) { 5809 VideoNativeMetadata *vnmd = (VideoNativeMetadata*)info->mCodecData->base(); 5810 err2 = mCodec->mOMX->updateGraphicBufferInMeta( 5811 mCodec->mNode, kPortIndexInput, 5812 new GraphicBuffer(vnmd->pBuffer, false /* keepOwnership */), 5813 bufferID); 5814 } 5815 break; 5816#endif 5817 default: 5818 ALOGW("Can't marshall %s data in %zu sized buffers in %zu-bit mode", 5819 asString(metaType), info->mCodecData->size(), 5820 sizeof(buffer_handle_t) * 8); 5821 err2 = ERROR_UNSUPPORTED; 5822 break; 5823 } 5824 5825 if (err2 == OK) { 5826 err2 = mCodec->mOMX->emptyBuffer( 5827 mCodec->mNode, 5828 bufferID, 5829 0, 5830 info->mCodecData->size(), 5831 flags, 5832 timeUs, 5833 info->mFenceFd); 5834 } 5835 info->mFenceFd = -1; 5836 if (err2 != OK) { 5837 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2)); 5838 return; 5839 } 5840 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5841 5842 if (!eos && err == OK) { 5843 getMoreInputDataIfPossible(); 5844 } else { 5845 ALOGV("[%s] Signalled EOS (%d) on the input port", 5846 mCodec->mComponentName.c_str(), err); 5847 5848 mCodec->mPortEOS[kPortIndexInput] = true; 5849 mCodec->mInputEOSResult = err; 5850 } 5851 } else if (!mCodec->mPortEOS[kPortIndexInput]) { 5852 if (err != OK && err != ERROR_END_OF_STREAM) { 5853 ALOGV("[%s] Signalling EOS on the input port due to error %d", 5854 mCodec->mComponentName.c_str(), err); 5855 } else { 5856 ALOGV("[%s] Signalling EOS on the input port", 5857 mCodec->mComponentName.c_str()); 5858 } 5859 5860 ALOGV("[%s] calling emptyBuffer %u signalling EOS", 5861 mCodec->mComponentName.c_str(), bufferID); 5862 5863 info->checkReadFence("onInputBufferFilled"); 5864 status_t err2 = mCodec->mOMX->emptyBuffer( 5865 mCodec->mNode, 5866 bufferID, 5867 0, 5868 0, 5869 OMX_BUFFERFLAG_EOS, 5870 0, 5871 info->mFenceFd); 5872 info->mFenceFd = -1; 5873 if (err2 != OK) { 5874 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2)); 5875 return; 5876 } 5877 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5878 5879 mCodec->mPortEOS[kPortIndexInput] = true; 5880 mCodec->mInputEOSResult = err; 5881 } 5882 break; 5883 } 5884 5885 case FREE_BUFFERS: 5886 break; 5887 5888 default: 5889 ALOGE("invalid port mode: %d", mode); 5890 break; 5891 } 5892} 5893 5894void ACodec::BaseState::getMoreInputDataIfPossible() { 5895 if (mCodec->mPortEOS[kPortIndexInput]) { 5896 return; 5897 } 5898 5899 BufferInfo *eligible = NULL; 5900 5901 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 5902 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 5903 5904#if 0 5905 if (info->mStatus == BufferInfo::OWNED_BY_UPSTREAM) { 5906 // There's already a "read" pending. 5907 return; 5908 } 5909#endif 5910 5911 if (info->mStatus == BufferInfo::OWNED_BY_US) { 5912 eligible = info; 5913 } 5914 } 5915 5916 if (eligible == NULL) { 5917 return; 5918 } 5919 5920 postFillThisBuffer(eligible); 5921} 5922 5923bool ACodec::BaseState::onOMXFillBufferDone( 5924 IOMX::buffer_id bufferID, 5925 size_t rangeOffset, size_t rangeLength, 5926 OMX_U32 flags, 5927 int64_t timeUs, 5928 int fenceFd) { 5929 ALOGV("[%s] onOMXFillBufferDone %u time %" PRId64 " us, flags = 0x%08x", 5930 mCodec->mComponentName.c_str(), bufferID, timeUs, flags); 5931 5932 ssize_t index; 5933 status_t err= OK; 5934 5935#if TRACK_BUFFER_TIMING 5936 index = mCodec->mBufferStats.indexOfKey(timeUs); 5937 if (index >= 0) { 5938 ACodec::BufferStats *stats = &mCodec->mBufferStats.editValueAt(index); 5939 stats->mFillBufferDoneTimeUs = ALooper::GetNowUs(); 5940 5941 ALOGI("frame PTS %lld: %lld", 5942 timeUs, 5943 stats->mFillBufferDoneTimeUs - stats->mEmptyBufferTimeUs); 5944 5945 mCodec->mBufferStats.removeItemsAt(index); 5946 stats = NULL; 5947 } 5948#endif 5949 5950 BufferInfo *info = 5951 mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 5952 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5953 if (status != BufferInfo::OWNED_BY_COMPONENT) { 5954 ALOGE("Wrong ownership in FBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 5955 mCodec->dumpBuffers(kPortIndexOutput); 5956 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 5957 if (fenceFd >= 0) { 5958 ::close(fenceFd); 5959 } 5960 return true; 5961 } 5962 5963 info->mDequeuedAt = ++mCodec->mDequeueCounter; 5964 info->mStatus = BufferInfo::OWNED_BY_US; 5965 5966 if (info->mRenderInfo != NULL) { 5967 // The fence for an emptied buffer must have signaled, but there still could be queued 5968 // or out-of-order dequeued buffers in the render queue prior to this buffer. Drop these, 5969 // as we will soon requeue this buffer to the surface. While in theory we could still keep 5970 // track of buffers that are requeued to the surface, it is better to add support to the 5971 // buffer-queue to notify us of released buffers and their fences (in the future). 5972 mCodec->notifyOfRenderedFrames(true /* dropIncomplete */); 5973 } 5974 5975 // byte buffers cannot take fences, so wait for any fence now 5976 if (mCodec->mNativeWindow == NULL) { 5977 (void)mCodec->waitForFence(fenceFd, "onOMXFillBufferDone"); 5978 fenceFd = -1; 5979 } 5980 info->setReadFence(fenceFd, "onOMXFillBufferDone"); 5981 5982 PortMode mode = getPortMode(kPortIndexOutput); 5983 5984 switch (mode) { 5985 case KEEP_BUFFERS: 5986 break; 5987 5988 case RESUBMIT_BUFFERS: 5989 { 5990 if (rangeLength == 0 && (!(flags & OMX_BUFFERFLAG_EOS) 5991 || mCodec->mPortEOS[kPortIndexOutput])) { 5992 ALOGV("[%s] calling fillBuffer %u", 5993 mCodec->mComponentName.c_str(), info->mBufferID); 5994 5995 err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd); 5996 info->mFenceFd = -1; 5997 if (err != OK) { 5998 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5999 return true; 6000 } 6001 6002 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 6003 break; 6004 } 6005 6006 sp<AMessage> reply = 6007 new AMessage(kWhatOutputBufferDrained, mCodec); 6008 6009 if (mCodec->mOutputFormat != mCodec->mLastOutputFormat && rangeLength > 0) { 6010 // pretend that output format has changed on the first frame (we used to do this) 6011 if (mCodec->mBaseOutputFormat == mCodec->mOutputFormat) { 6012 mCodec->onOutputFormatChanged(mCodec->mOutputFormat); 6013 } 6014 mCodec->addKeyFormatChangesToRenderBufferNotification(reply); 6015 mCodec->sendFormatChange(); 6016 } else if (rangeLength > 0 && mCodec->mNativeWindow != NULL) { 6017 // If potentially rendering onto a surface, always save key format data (crop & 6018 // data space) so that we can set it if and once the buffer is rendered. 6019 mCodec->addKeyFormatChangesToRenderBufferNotification(reply); 6020 } 6021 6022 if (mCodec->usingMetadataOnEncoderOutput()) { 6023 native_handle_t *handle = NULL; 6024 VideoNativeHandleMetadata &nativeMeta = 6025 *(VideoNativeHandleMetadata *)info->mData->data(); 6026 if (info->mData->size() >= sizeof(nativeMeta) 6027 && nativeMeta.eType == kMetadataBufferTypeNativeHandleSource) { 6028#ifdef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 6029 // handle is only valid on 32-bit/mediaserver process 6030 handle = NULL; 6031#else 6032 handle = (native_handle_t *)nativeMeta.pHandle; 6033#endif 6034 } 6035 info->mData->meta()->setPointer("handle", handle); 6036 info->mData->meta()->setInt32("rangeOffset", rangeOffset); 6037 info->mData->meta()->setInt32("rangeLength", rangeLength); 6038 } else if (info->mData == info->mCodecData) { 6039 info->mData->setRange(rangeOffset, rangeLength); 6040 } else { 6041 info->mCodecData->setRange(rangeOffset, rangeLength); 6042 // in this case we know that mConverter is not null 6043 status_t err = mCodec->mConverter[kPortIndexOutput]->convert( 6044 info->mCodecData, info->mData); 6045 if (err != OK) { 6046 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6047 return true; 6048 } 6049 } 6050#if 0 6051 if (mCodec->mNativeWindow == NULL) { 6052 if (IsIDR(info->mData)) { 6053 ALOGI("IDR frame"); 6054 } 6055 } 6056#endif 6057 6058 if (mCodec->mSkipCutBuffer != NULL) { 6059 mCodec->mSkipCutBuffer->submit(info->mData); 6060 } 6061 info->mData->meta()->setInt64("timeUs", timeUs); 6062 6063 sp<AMessage> notify = mCodec->mNotify->dup(); 6064 notify->setInt32("what", CodecBase::kWhatDrainThisBuffer); 6065 notify->setInt32("buffer-id", info->mBufferID); 6066 notify->setBuffer("buffer", info->mData); 6067 notify->setInt32("flags", flags); 6068 6069 reply->setInt32("buffer-id", info->mBufferID); 6070 6071 notify->setMessage("reply", reply); 6072 6073 notify->post(); 6074 6075 info->mStatus = BufferInfo::OWNED_BY_DOWNSTREAM; 6076 6077 if (flags & OMX_BUFFERFLAG_EOS) { 6078 ALOGV("[%s] saw output EOS", mCodec->mComponentName.c_str()); 6079 6080 sp<AMessage> notify = mCodec->mNotify->dup(); 6081 notify->setInt32("what", CodecBase::kWhatEOS); 6082 notify->setInt32("err", mCodec->mInputEOSResult); 6083 notify->post(); 6084 6085 mCodec->mPortEOS[kPortIndexOutput] = true; 6086 } 6087 break; 6088 } 6089 6090 case FREE_BUFFERS: 6091 err = mCodec->freeBuffer(kPortIndexOutput, index); 6092 if (err != OK) { 6093 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6094 return true; 6095 } 6096 break; 6097 6098 default: 6099 ALOGE("Invalid port mode: %d", mode); 6100 return false; 6101 } 6102 6103 return true; 6104} 6105 6106void ACodec::BaseState::onOutputBufferDrained(const sp<AMessage> &msg) { 6107 IOMX::buffer_id bufferID; 6108 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 6109 ssize_t index; 6110 BufferInfo *info = mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 6111 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 6112 if (status != BufferInfo::OWNED_BY_DOWNSTREAM) { 6113 ALOGE("Wrong ownership in OBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 6114 mCodec->dumpBuffers(kPortIndexOutput); 6115 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6116 return; 6117 } 6118 6119 android_native_rect_t crop; 6120 if (msg->findRect("crop", &crop.left, &crop.top, &crop.right, &crop.bottom) 6121 && memcmp(&crop, &mCodec->mLastNativeWindowCrop, sizeof(crop)) != 0) { 6122 mCodec->mLastNativeWindowCrop = crop; 6123 status_t err = native_window_set_crop(mCodec->mNativeWindow.get(), &crop); 6124 ALOGW_IF(err != NO_ERROR, "failed to set crop: %d", err); 6125 } 6126 6127 int32_t dataSpace; 6128 if (msg->findInt32("dataspace", &dataSpace) 6129 && dataSpace != mCodec->mLastNativeWindowDataSpace) { 6130 status_t err = native_window_set_buffers_data_space( 6131 mCodec->mNativeWindow.get(), (android_dataspace)dataSpace); 6132 mCodec->mLastNativeWindowDataSpace = dataSpace; 6133 ALOGW_IF(err != NO_ERROR, "failed to set dataspace: %d", err); 6134 } 6135 6136 int32_t render; 6137 if (mCodec->mNativeWindow != NULL 6138 && msg->findInt32("render", &render) && render != 0 6139 && info->mData != NULL && info->mData->size() != 0) { 6140 ATRACE_NAME("render"); 6141 // The client wants this buffer to be rendered. 6142 6143 // save buffers sent to the surface so we can get render time when they return 6144 int64_t mediaTimeUs = -1; 6145 info->mData->meta()->findInt64("timeUs", &mediaTimeUs); 6146 if (mediaTimeUs >= 0) { 6147 mCodec->mRenderTracker.onFrameQueued( 6148 mediaTimeUs, info->mGraphicBuffer, new Fence(::dup(info->mFenceFd))); 6149 } 6150 6151 int64_t timestampNs = 0; 6152 if (!msg->findInt64("timestampNs", ×tampNs)) { 6153 // use media timestamp if client did not request a specific render timestamp 6154 if (info->mData->meta()->findInt64("timeUs", ×tampNs)) { 6155 ALOGV("using buffer PTS of %lld", (long long)timestampNs); 6156 timestampNs *= 1000; 6157 } 6158 } 6159 6160 status_t err; 6161 err = native_window_set_buffers_timestamp(mCodec->mNativeWindow.get(), timestampNs); 6162 ALOGW_IF(err != NO_ERROR, "failed to set buffer timestamp: %d", err); 6163 6164 info->checkReadFence("onOutputBufferDrained before queueBuffer"); 6165 err = mCodec->mNativeWindow->queueBuffer( 6166 mCodec->mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd); 6167 info->mFenceFd = -1; 6168 if (err == OK) { 6169 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 6170 } else { 6171 ALOGE("queueBuffer failed in onOutputBufferDrained: %d", err); 6172 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6173 info->mStatus = BufferInfo::OWNED_BY_US; 6174 // keeping read fence as write fence to avoid clobbering 6175 info->mIsReadFence = false; 6176 } 6177 } else { 6178 if (mCodec->mNativeWindow != NULL && 6179 (info->mData == NULL || info->mData->size() != 0)) { 6180 // move read fence into write fence to avoid clobbering 6181 info->mIsReadFence = false; 6182 ATRACE_NAME("frame-drop"); 6183 } 6184 info->mStatus = BufferInfo::OWNED_BY_US; 6185 } 6186 6187 PortMode mode = getPortMode(kPortIndexOutput); 6188 6189 switch (mode) { 6190 case KEEP_BUFFERS: 6191 { 6192 // XXX fishy, revisit!!! What about the FREE_BUFFERS case below? 6193 6194 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6195 // We cannot resubmit the buffer we just rendered, dequeue 6196 // the spare instead. 6197 6198 info = mCodec->dequeueBufferFromNativeWindow(); 6199 } 6200 break; 6201 } 6202 6203 case RESUBMIT_BUFFERS: 6204 { 6205 if (!mCodec->mPortEOS[kPortIndexOutput]) { 6206 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6207 // We cannot resubmit the buffer we just rendered, dequeue 6208 // the spare instead. 6209 6210 info = mCodec->dequeueBufferFromNativeWindow(); 6211 } 6212 6213 if (info != NULL) { 6214 ALOGV("[%s] calling fillBuffer %u", 6215 mCodec->mComponentName.c_str(), info->mBufferID); 6216 info->checkWriteFence("onOutputBufferDrained::RESUBMIT_BUFFERS"); 6217 status_t err = mCodec->mOMX->fillBuffer( 6218 mCodec->mNode, info->mBufferID, info->mFenceFd); 6219 info->mFenceFd = -1; 6220 if (err == OK) { 6221 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 6222 } else { 6223 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6224 } 6225 } 6226 } 6227 break; 6228 } 6229 6230 case FREE_BUFFERS: 6231 { 6232 status_t err = mCodec->freeBuffer(kPortIndexOutput, index); 6233 if (err != OK) { 6234 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6235 } 6236 break; 6237 } 6238 6239 default: 6240 ALOGE("Invalid port mode: %d", mode); 6241 return; 6242 } 6243} 6244 6245//////////////////////////////////////////////////////////////////////////////// 6246 6247ACodec::UninitializedState::UninitializedState(ACodec *codec) 6248 : BaseState(codec) { 6249} 6250 6251void ACodec::UninitializedState::stateEntered() { 6252 ALOGV("Now uninitialized"); 6253 6254 if (mDeathNotifier != NULL) { 6255 mCodec->mNodeBinder->unlinkToDeath(mDeathNotifier); 6256 mDeathNotifier.clear(); 6257 } 6258 6259 mCodec->mUsingNativeWindow = false; 6260 mCodec->mNativeWindow.clear(); 6261 mCodec->mNativeWindowUsageBits = 0; 6262 mCodec->mNode = 0; 6263 mCodec->mOMX.clear(); 6264 mCodec->mQuirks = 0; 6265 mCodec->mFlags = 0; 6266 mCodec->mInputMetadataType = kMetadataBufferTypeInvalid; 6267 mCodec->mOutputMetadataType = kMetadataBufferTypeInvalid; 6268 mCodec->mConverter[0].clear(); 6269 mCodec->mConverter[1].clear(); 6270 mCodec->mComponentName.clear(); 6271} 6272 6273bool ACodec::UninitializedState::onMessageReceived(const sp<AMessage> &msg) { 6274 bool handled = false; 6275 6276 switch (msg->what()) { 6277 case ACodec::kWhatSetup: 6278 { 6279 onSetup(msg); 6280 6281 handled = true; 6282 break; 6283 } 6284 6285 case ACodec::kWhatAllocateComponent: 6286 { 6287 onAllocateComponent(msg); 6288 handled = true; 6289 break; 6290 } 6291 6292 case ACodec::kWhatShutdown: 6293 { 6294 int32_t keepComponentAllocated; 6295 CHECK(msg->findInt32( 6296 "keepComponentAllocated", &keepComponentAllocated)); 6297 ALOGW_IF(keepComponentAllocated, 6298 "cannot keep component allocated on shutdown in Uninitialized state"); 6299 6300 sp<AMessage> notify = mCodec->mNotify->dup(); 6301 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 6302 notify->post(); 6303 6304 handled = true; 6305 break; 6306 } 6307 6308 case ACodec::kWhatFlush: 6309 { 6310 sp<AMessage> notify = mCodec->mNotify->dup(); 6311 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6312 notify->post(); 6313 6314 handled = true; 6315 break; 6316 } 6317 6318 case ACodec::kWhatReleaseCodecInstance: 6319 { 6320 // nothing to do, as we have already signaled shutdown 6321 handled = true; 6322 break; 6323 } 6324 6325 default: 6326 return BaseState::onMessageReceived(msg); 6327 } 6328 6329 return handled; 6330} 6331 6332void ACodec::UninitializedState::onSetup( 6333 const sp<AMessage> &msg) { 6334 if (onAllocateComponent(msg) 6335 && mCodec->mLoadedState->onConfigureComponent(msg)) { 6336 mCodec->mLoadedState->onStart(); 6337 } 6338} 6339 6340bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) { 6341 ALOGV("onAllocateComponent"); 6342 6343 CHECK(mCodec->mNode == 0); 6344 6345 OMXClient client; 6346 if (client.connect() != OK) { 6347 mCodec->signalError(OMX_ErrorUndefined, NO_INIT); 6348 return false; 6349 } 6350 6351 sp<IOMX> omx = client.interface(); 6352 6353 sp<AMessage> notify = new AMessage(kWhatOMXDied, mCodec); 6354 6355 Vector<AString> matchingCodecs; 6356 6357 AString mime; 6358 6359 AString componentName; 6360 uint32_t quirks = 0; 6361 int32_t encoder = false; 6362 if (msg->findString("componentName", &componentName)) { 6363 sp<IMediaCodecList> list = MediaCodecList::getInstance(); 6364 if (list != NULL && list->findCodecByName(componentName.c_str()) >= 0) { 6365 matchingCodecs.add(componentName); 6366 } 6367 } else { 6368 CHECK(msg->findString("mime", &mime)); 6369 6370 if (!msg->findInt32("encoder", &encoder)) { 6371 encoder = false; 6372 } 6373 6374 MediaCodecList::findMatchingCodecs( 6375 mime.c_str(), 6376 encoder, // createEncoder 6377 0, // flags 6378 &matchingCodecs); 6379 } 6380 6381 sp<CodecObserver> observer = new CodecObserver; 6382 IOMX::node_id node = 0; 6383 6384 status_t err = NAME_NOT_FOUND; 6385 for (size_t matchIndex = 0; matchIndex < matchingCodecs.size(); 6386 ++matchIndex) { 6387 componentName = matchingCodecs[matchIndex]; 6388 quirks = MediaCodecList::getQuirksFor(componentName.c_str()); 6389 6390 pid_t tid = gettid(); 6391 int prevPriority = androidGetThreadPriority(tid); 6392 androidSetThreadPriority(tid, ANDROID_PRIORITY_FOREGROUND); 6393 err = omx->allocateNode(componentName.c_str(), observer, &mCodec->mNodeBinder, &node); 6394 androidSetThreadPriority(tid, prevPriority); 6395 6396 if (err == OK) { 6397 break; 6398 } else { 6399 ALOGW("Allocating component '%s' failed, try next one.", componentName.c_str()); 6400 } 6401 6402 node = 0; 6403 } 6404 6405 if (node == 0) { 6406 if (!mime.empty()) { 6407 ALOGE("Unable to instantiate a %scoder for type '%s' with err %#x.", 6408 encoder ? "en" : "de", mime.c_str(), err); 6409 } else { 6410 ALOGE("Unable to instantiate codec '%s' with err %#x.", componentName.c_str(), err); 6411 } 6412 6413 mCodec->signalError((OMX_ERRORTYPE)err, makeNoSideEffectStatus(err)); 6414 return false; 6415 } 6416 6417 mDeathNotifier = new DeathNotifier(notify); 6418 if (mCodec->mNodeBinder == NULL || 6419 mCodec->mNodeBinder->linkToDeath(mDeathNotifier) != OK) { 6420 // This was a local binder, if it dies so do we, we won't care 6421 // about any notifications in the afterlife. 6422 mDeathNotifier.clear(); 6423 } 6424 6425 notify = new AMessage(kWhatOMXMessageList, mCodec); 6426 observer->setNotificationMessage(notify); 6427 6428 mCodec->mComponentName = componentName; 6429 mCodec->mRenderTracker.setComponentName(componentName); 6430 mCodec->mFlags = 0; 6431 6432 if (componentName.endsWith(".secure")) { 6433 mCodec->mFlags |= kFlagIsSecure; 6434 mCodec->mFlags |= kFlagIsGrallocUsageProtected; 6435 mCodec->mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 6436 } 6437 6438 mCodec->mQuirks = quirks; 6439 mCodec->mOMX = omx; 6440 mCodec->mNode = node; 6441 6442 { 6443 sp<AMessage> notify = mCodec->mNotify->dup(); 6444 notify->setInt32("what", CodecBase::kWhatComponentAllocated); 6445 notify->setString("componentName", mCodec->mComponentName.c_str()); 6446 notify->post(); 6447 } 6448 6449 mCodec->changeState(mCodec->mLoadedState); 6450 6451 return true; 6452} 6453 6454//////////////////////////////////////////////////////////////////////////////// 6455 6456ACodec::LoadedState::LoadedState(ACodec *codec) 6457 : BaseState(codec) { 6458} 6459 6460void ACodec::LoadedState::stateEntered() { 6461 ALOGV("[%s] Now Loaded", mCodec->mComponentName.c_str()); 6462 6463 mCodec->mPortEOS[kPortIndexInput] = 6464 mCodec->mPortEOS[kPortIndexOutput] = false; 6465 6466 mCodec->mInputEOSResult = OK; 6467 6468 mCodec->mDequeueCounter = 0; 6469 mCodec->mMetadataBuffersToSubmit = 0; 6470 mCodec->mRepeatFrameDelayUs = -1ll; 6471 mCodec->mInputFormat.clear(); 6472 mCodec->mOutputFormat.clear(); 6473 mCodec->mBaseOutputFormat.clear(); 6474 6475 if (mCodec->mShutdownInProgress) { 6476 bool keepComponentAllocated = mCodec->mKeepComponentAllocated; 6477 6478 mCodec->mShutdownInProgress = false; 6479 mCodec->mKeepComponentAllocated = false; 6480 6481 onShutdown(keepComponentAllocated); 6482 } 6483 mCodec->mExplicitShutdown = false; 6484 6485 mCodec->processDeferredMessages(); 6486} 6487 6488void ACodec::LoadedState::onShutdown(bool keepComponentAllocated) { 6489 if (!keepComponentAllocated) { 6490 (void)mCodec->mOMX->freeNode(mCodec->mNode); 6491 6492 mCodec->changeState(mCodec->mUninitializedState); 6493 } 6494 6495 if (mCodec->mExplicitShutdown) { 6496 sp<AMessage> notify = mCodec->mNotify->dup(); 6497 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 6498 notify->post(); 6499 mCodec->mExplicitShutdown = false; 6500 } 6501} 6502 6503bool ACodec::LoadedState::onMessageReceived(const sp<AMessage> &msg) { 6504 bool handled = false; 6505 6506 switch (msg->what()) { 6507 case ACodec::kWhatConfigureComponent: 6508 { 6509 onConfigureComponent(msg); 6510 handled = true; 6511 break; 6512 } 6513 6514 case ACodec::kWhatCreateInputSurface: 6515 { 6516 onCreateInputSurface(msg); 6517 handled = true; 6518 break; 6519 } 6520 6521 case ACodec::kWhatSetInputSurface: 6522 { 6523 onSetInputSurface(msg); 6524 handled = true; 6525 break; 6526 } 6527 6528 case ACodec::kWhatStart: 6529 { 6530 onStart(); 6531 handled = true; 6532 break; 6533 } 6534 6535 case ACodec::kWhatShutdown: 6536 { 6537 int32_t keepComponentAllocated; 6538 CHECK(msg->findInt32( 6539 "keepComponentAllocated", &keepComponentAllocated)); 6540 6541 mCodec->mExplicitShutdown = true; 6542 onShutdown(keepComponentAllocated); 6543 6544 handled = true; 6545 break; 6546 } 6547 6548 case ACodec::kWhatFlush: 6549 { 6550 sp<AMessage> notify = mCodec->mNotify->dup(); 6551 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6552 notify->post(); 6553 6554 handled = true; 6555 break; 6556 } 6557 6558 default: 6559 return BaseState::onMessageReceived(msg); 6560 } 6561 6562 return handled; 6563} 6564 6565bool ACodec::LoadedState::onConfigureComponent( 6566 const sp<AMessage> &msg) { 6567 ALOGV("onConfigureComponent"); 6568 6569 CHECK(mCodec->mNode != 0); 6570 6571 status_t err = OK; 6572 AString mime; 6573 if (!msg->findString("mime", &mime)) { 6574 err = BAD_VALUE; 6575 } else { 6576 err = mCodec->configureCodec(mime.c_str(), msg); 6577 } 6578 if (err != OK) { 6579 ALOGE("[%s] configureCodec returning error %d", 6580 mCodec->mComponentName.c_str(), err); 6581 6582 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6583 return false; 6584 } 6585 6586 { 6587 sp<AMessage> notify = mCodec->mNotify->dup(); 6588 notify->setInt32("what", CodecBase::kWhatComponentConfigured); 6589 notify->setMessage("input-format", mCodec->mInputFormat); 6590 notify->setMessage("output-format", mCodec->mOutputFormat); 6591 notify->post(); 6592 } 6593 6594 return true; 6595} 6596 6597status_t ACodec::LoadedState::setupInputSurface() { 6598 status_t err = OK; 6599 6600 if (mCodec->mRepeatFrameDelayUs > 0ll) { 6601 err = mCodec->mOMX->setInternalOption( 6602 mCodec->mNode, 6603 kPortIndexInput, 6604 IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY, 6605 &mCodec->mRepeatFrameDelayUs, 6606 sizeof(mCodec->mRepeatFrameDelayUs)); 6607 6608 if (err != OK) { 6609 ALOGE("[%s] Unable to configure option to repeat previous " 6610 "frames (err %d)", 6611 mCodec->mComponentName.c_str(), 6612 err); 6613 return err; 6614 } 6615 } 6616 6617 if (mCodec->mMaxPtsGapUs > 0ll) { 6618 err = mCodec->mOMX->setInternalOption( 6619 mCodec->mNode, 6620 kPortIndexInput, 6621 IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP, 6622 &mCodec->mMaxPtsGapUs, 6623 sizeof(mCodec->mMaxPtsGapUs)); 6624 6625 if (err != OK) { 6626 ALOGE("[%s] Unable to configure max timestamp gap (err %d)", 6627 mCodec->mComponentName.c_str(), 6628 err); 6629 return err; 6630 } 6631 } 6632 6633 if (mCodec->mMaxFps > 0) { 6634 err = mCodec->mOMX->setInternalOption( 6635 mCodec->mNode, 6636 kPortIndexInput, 6637 IOMX::INTERNAL_OPTION_MAX_FPS, 6638 &mCodec->mMaxFps, 6639 sizeof(mCodec->mMaxFps)); 6640 6641 if (err != OK) { 6642 ALOGE("[%s] Unable to configure max fps (err %d)", 6643 mCodec->mComponentName.c_str(), 6644 err); 6645 return err; 6646 } 6647 } 6648 6649 if (mCodec->mTimePerCaptureUs > 0ll 6650 && mCodec->mTimePerFrameUs > 0ll) { 6651 int64_t timeLapse[2]; 6652 timeLapse[0] = mCodec->mTimePerFrameUs; 6653 timeLapse[1] = mCodec->mTimePerCaptureUs; 6654 err = mCodec->mOMX->setInternalOption( 6655 mCodec->mNode, 6656 kPortIndexInput, 6657 IOMX::INTERNAL_OPTION_TIME_LAPSE, 6658 &timeLapse[0], 6659 sizeof(timeLapse)); 6660 6661 if (err != OK) { 6662 ALOGE("[%s] Unable to configure time lapse (err %d)", 6663 mCodec->mComponentName.c_str(), 6664 err); 6665 return err; 6666 } 6667 } 6668 6669 if (mCodec->mCreateInputBuffersSuspended) { 6670 bool suspend = true; 6671 err = mCodec->mOMX->setInternalOption( 6672 mCodec->mNode, 6673 kPortIndexInput, 6674 IOMX::INTERNAL_OPTION_SUSPEND, 6675 &suspend, 6676 sizeof(suspend)); 6677 6678 if (err != OK) { 6679 ALOGE("[%s] Unable to configure option to suspend (err %d)", 6680 mCodec->mComponentName.c_str(), 6681 err); 6682 return err; 6683 } 6684 } 6685 6686 uint32_t usageBits; 6687 if (mCodec->mOMX->getParameter( 6688 mCodec->mNode, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, 6689 &usageBits, sizeof(usageBits)) == OK) { 6690 mCodec->mInputFormat->setInt32( 6691 "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); 6692 } 6693 6694 sp<ABuffer> colorAspectsBuffer; 6695 if (mCodec->mInputFormat->findBuffer("android._color-aspects", &colorAspectsBuffer)) { 6696 err = mCodec->mOMX->setInternalOption( 6697 mCodec->mNode, kPortIndexInput, IOMX::INTERNAL_OPTION_COLOR_ASPECTS, 6698 colorAspectsBuffer->base(), colorAspectsBuffer->capacity()); 6699 if (err != OK) { 6700 ALOGE("[%s] Unable to configure color aspects (err %d)", 6701 mCodec->mComponentName.c_str(), err); 6702 return err; 6703 } 6704 } 6705 return OK; 6706} 6707 6708void ACodec::LoadedState::onCreateInputSurface( 6709 const sp<AMessage> & /* msg */) { 6710 ALOGV("onCreateInputSurface"); 6711 6712 sp<AMessage> notify = mCodec->mNotify->dup(); 6713 notify->setInt32("what", CodecBase::kWhatInputSurfaceCreated); 6714 6715 android_dataspace dataSpace; 6716 status_t err = 6717 mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace); 6718 notify->setMessage("input-format", mCodec->mInputFormat); 6719 notify->setMessage("output-format", mCodec->mOutputFormat); 6720 6721 sp<IGraphicBufferProducer> bufferProducer; 6722 if (err == OK) { 6723 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6724 err = mCodec->mOMX->createInputSurface( 6725 mCodec->mNode, kPortIndexInput, dataSpace, &bufferProducer, 6726 &mCodec->mInputMetadataType); 6727 // framework uses ANW buffers internally instead of gralloc handles 6728 if (mCodec->mInputMetadataType == kMetadataBufferTypeGrallocSource) { 6729 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6730 } 6731 } 6732 6733 if (err == OK) { 6734 err = setupInputSurface(); 6735 } 6736 6737 if (err == OK) { 6738 notify->setObject("input-surface", 6739 new BufferProducerWrapper(bufferProducer)); 6740 } else { 6741 // Can't use mCodec->signalError() here -- MediaCodec won't forward 6742 // the error through because it's in the "configured" state. We 6743 // send a kWhatInputSurfaceCreated with an error value instead. 6744 ALOGE("[%s] onCreateInputSurface returning error %d", 6745 mCodec->mComponentName.c_str(), err); 6746 notify->setInt32("err", err); 6747 } 6748 notify->post(); 6749} 6750 6751void ACodec::LoadedState::onSetInputSurface( 6752 const sp<AMessage> &msg) { 6753 ALOGV("onSetInputSurface"); 6754 6755 sp<AMessage> notify = mCodec->mNotify->dup(); 6756 notify->setInt32("what", CodecBase::kWhatInputSurfaceAccepted); 6757 6758 sp<RefBase> obj; 6759 CHECK(msg->findObject("input-surface", &obj)); 6760 sp<PersistentSurface> surface = static_cast<PersistentSurface *>(obj.get()); 6761 6762 android_dataspace dataSpace; 6763 status_t err = 6764 mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace); 6765 notify->setMessage("input-format", mCodec->mInputFormat); 6766 notify->setMessage("output-format", mCodec->mOutputFormat); 6767 6768 if (err == OK) { 6769 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6770 err = mCodec->mOMX->setInputSurface( 6771 mCodec->mNode, kPortIndexInput, surface->getBufferConsumer(), 6772 &mCodec->mInputMetadataType); 6773 // framework uses ANW buffers internally instead of gralloc handles 6774 if (mCodec->mInputMetadataType == kMetadataBufferTypeGrallocSource) { 6775 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6776 } 6777 } 6778 6779 if (err == OK) { 6780 surface->getBufferConsumer()->setDefaultBufferDataSpace(dataSpace); 6781 err = setupInputSurface(); 6782 } 6783 6784 if (err != OK) { 6785 // Can't use mCodec->signalError() here -- MediaCodec won't forward 6786 // the error through because it's in the "configured" state. We 6787 // send a kWhatInputSurfaceAccepted with an error value instead. 6788 ALOGE("[%s] onSetInputSurface returning error %d", 6789 mCodec->mComponentName.c_str(), err); 6790 notify->setInt32("err", err); 6791 } 6792 notify->post(); 6793} 6794 6795void ACodec::LoadedState::onStart() { 6796 ALOGV("onStart"); 6797 6798 status_t err = mCodec->mOMX->sendCommand(mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle); 6799 if (err != OK) { 6800 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6801 } else { 6802 mCodec->changeState(mCodec->mLoadedToIdleState); 6803 } 6804} 6805 6806//////////////////////////////////////////////////////////////////////////////// 6807 6808ACodec::LoadedToIdleState::LoadedToIdleState(ACodec *codec) 6809 : BaseState(codec) { 6810} 6811 6812void ACodec::LoadedToIdleState::stateEntered() { 6813 ALOGV("[%s] Now Loaded->Idle", mCodec->mComponentName.c_str()); 6814 6815 status_t err; 6816 if ((err = allocateBuffers()) != OK) { 6817 ALOGE("Failed to allocate buffers after transitioning to IDLE state " 6818 "(error 0x%08x)", 6819 err); 6820 6821 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6822 6823 mCodec->mOMX->sendCommand( 6824 mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded); 6825 if (mCodec->allYourBuffersAreBelongToUs(kPortIndexInput)) { 6826 mCodec->freeBuffersOnPort(kPortIndexInput); 6827 } 6828 if (mCodec->allYourBuffersAreBelongToUs(kPortIndexOutput)) { 6829 mCodec->freeBuffersOnPort(kPortIndexOutput); 6830 } 6831 6832 mCodec->changeState(mCodec->mLoadedState); 6833 } 6834} 6835 6836status_t ACodec::LoadedToIdleState::allocateBuffers() { 6837 status_t err = mCodec->allocateBuffersOnPort(kPortIndexInput); 6838 6839 if (err != OK) { 6840 return err; 6841 } 6842 6843 return mCodec->allocateBuffersOnPort(kPortIndexOutput); 6844} 6845 6846bool ACodec::LoadedToIdleState::onMessageReceived(const sp<AMessage> &msg) { 6847 switch (msg->what()) { 6848 case kWhatSetParameters: 6849 case kWhatShutdown: 6850 { 6851 mCodec->deferMessage(msg); 6852 return true; 6853 } 6854 6855 case kWhatSignalEndOfInputStream: 6856 { 6857 mCodec->onSignalEndOfInputStream(); 6858 return true; 6859 } 6860 6861 case kWhatResume: 6862 { 6863 // We'll be active soon enough. 6864 return true; 6865 } 6866 6867 case kWhatFlush: 6868 { 6869 // We haven't even started yet, so we're flushed alright... 6870 sp<AMessage> notify = mCodec->mNotify->dup(); 6871 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6872 notify->post(); 6873 return true; 6874 } 6875 6876 default: 6877 return BaseState::onMessageReceived(msg); 6878 } 6879} 6880 6881bool ACodec::LoadedToIdleState::onOMXEvent( 6882 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 6883 switch (event) { 6884 case OMX_EventCmdComplete: 6885 { 6886 status_t err = OK; 6887 if (data1 != (OMX_U32)OMX_CommandStateSet 6888 || data2 != (OMX_U32)OMX_StateIdle) { 6889 ALOGE("Unexpected command completion in LoadedToIdleState: %s(%u) %s(%u)", 6890 asString((OMX_COMMANDTYPE)data1), data1, 6891 asString((OMX_STATETYPE)data2), data2); 6892 err = FAILED_TRANSACTION; 6893 } 6894 6895 if (err == OK) { 6896 err = mCodec->mOMX->sendCommand( 6897 mCodec->mNode, OMX_CommandStateSet, OMX_StateExecuting); 6898 } 6899 6900 if (err != OK) { 6901 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6902 } else { 6903 mCodec->changeState(mCodec->mIdleToExecutingState); 6904 } 6905 6906 return true; 6907 } 6908 6909 default: 6910 return BaseState::onOMXEvent(event, data1, data2); 6911 } 6912} 6913 6914//////////////////////////////////////////////////////////////////////////////// 6915 6916ACodec::IdleToExecutingState::IdleToExecutingState(ACodec *codec) 6917 : BaseState(codec) { 6918} 6919 6920void ACodec::IdleToExecutingState::stateEntered() { 6921 ALOGV("[%s] Now Idle->Executing", mCodec->mComponentName.c_str()); 6922} 6923 6924bool ACodec::IdleToExecutingState::onMessageReceived(const sp<AMessage> &msg) { 6925 switch (msg->what()) { 6926 case kWhatSetParameters: 6927 case kWhatShutdown: 6928 { 6929 mCodec->deferMessage(msg); 6930 return true; 6931 } 6932 6933 case kWhatResume: 6934 { 6935 // We'll be active soon enough. 6936 return true; 6937 } 6938 6939 case kWhatFlush: 6940 { 6941 // We haven't even started yet, so we're flushed alright... 6942 sp<AMessage> notify = mCodec->mNotify->dup(); 6943 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6944 notify->post(); 6945 6946 return true; 6947 } 6948 6949 case kWhatSignalEndOfInputStream: 6950 { 6951 mCodec->onSignalEndOfInputStream(); 6952 return true; 6953 } 6954 6955 default: 6956 return BaseState::onMessageReceived(msg); 6957 } 6958} 6959 6960bool ACodec::IdleToExecutingState::onOMXEvent( 6961 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 6962 switch (event) { 6963 case OMX_EventCmdComplete: 6964 { 6965 if (data1 != (OMX_U32)OMX_CommandStateSet 6966 || data2 != (OMX_U32)OMX_StateExecuting) { 6967 ALOGE("Unexpected command completion in IdleToExecutingState: %s(%u) %s(%u)", 6968 asString((OMX_COMMANDTYPE)data1), data1, 6969 asString((OMX_STATETYPE)data2), data2); 6970 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6971 return true; 6972 } 6973 6974 mCodec->mExecutingState->resume(); 6975 mCodec->changeState(mCodec->mExecutingState); 6976 6977 return true; 6978 } 6979 6980 default: 6981 return BaseState::onOMXEvent(event, data1, data2); 6982 } 6983} 6984 6985//////////////////////////////////////////////////////////////////////////////// 6986 6987ACodec::ExecutingState::ExecutingState(ACodec *codec) 6988 : BaseState(codec), 6989 mActive(false) { 6990} 6991 6992ACodec::BaseState::PortMode ACodec::ExecutingState::getPortMode( 6993 OMX_U32 /* portIndex */) { 6994 return RESUBMIT_BUFFERS; 6995} 6996 6997void ACodec::ExecutingState::submitOutputMetaBuffers() { 6998 // submit as many buffers as there are input buffers with the codec 6999 // in case we are in port reconfiguring 7000 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 7001 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 7002 7003 if (info->mStatus == BufferInfo::OWNED_BY_COMPONENT) { 7004 if (mCodec->submitOutputMetadataBuffer() != OK) 7005 break; 7006 } 7007 } 7008 7009 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 7010 mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround(); 7011} 7012 7013void ACodec::ExecutingState::submitRegularOutputBuffers() { 7014 bool failed = false; 7015 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexOutput].size(); ++i) { 7016 BufferInfo *info = &mCodec->mBuffers[kPortIndexOutput].editItemAt(i); 7017 7018 if (mCodec->mNativeWindow != NULL) { 7019 if (info->mStatus != BufferInfo::OWNED_BY_US 7020 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 7021 ALOGE("buffers should be owned by us or the surface"); 7022 failed = true; 7023 break; 7024 } 7025 7026 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 7027 continue; 7028 } 7029 } else { 7030 if (info->mStatus != BufferInfo::OWNED_BY_US) { 7031 ALOGE("buffers should be owned by us"); 7032 failed = true; 7033 break; 7034 } 7035 } 7036 7037 ALOGV("[%s] calling fillBuffer %u", mCodec->mComponentName.c_str(), info->mBufferID); 7038 7039 info->checkWriteFence("submitRegularOutputBuffers"); 7040 status_t err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd); 7041 info->mFenceFd = -1; 7042 if (err != OK) { 7043 failed = true; 7044 break; 7045 } 7046 7047 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 7048 } 7049 7050 if (failed) { 7051 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7052 } 7053} 7054 7055void ACodec::ExecutingState::submitOutputBuffers() { 7056 submitRegularOutputBuffers(); 7057 if (mCodec->storingMetadataInDecodedBuffers()) { 7058 submitOutputMetaBuffers(); 7059 } 7060} 7061 7062void ACodec::ExecutingState::resume() { 7063 if (mActive) { 7064 ALOGV("[%s] We're already active, no need to resume.", mCodec->mComponentName.c_str()); 7065 return; 7066 } 7067 7068 submitOutputBuffers(); 7069 7070 // Post all available input buffers 7071 if (mCodec->mBuffers[kPortIndexInput].size() == 0u) { 7072 ALOGW("[%s] we don't have any input buffers to resume", mCodec->mComponentName.c_str()); 7073 } 7074 7075 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); i++) { 7076 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 7077 if (info->mStatus == BufferInfo::OWNED_BY_US) { 7078 postFillThisBuffer(info); 7079 } 7080 } 7081 7082 mActive = true; 7083} 7084 7085void ACodec::ExecutingState::stateEntered() { 7086 ALOGV("[%s] Now Executing", mCodec->mComponentName.c_str()); 7087 7088 mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC)); 7089 mCodec->processDeferredMessages(); 7090} 7091 7092bool ACodec::ExecutingState::onMessageReceived(const sp<AMessage> &msg) { 7093 bool handled = false; 7094 7095 switch (msg->what()) { 7096 case kWhatShutdown: 7097 { 7098 int32_t keepComponentAllocated; 7099 CHECK(msg->findInt32( 7100 "keepComponentAllocated", &keepComponentAllocated)); 7101 7102 mCodec->mShutdownInProgress = true; 7103 mCodec->mExplicitShutdown = true; 7104 mCodec->mKeepComponentAllocated = keepComponentAllocated; 7105 7106 mActive = false; 7107 7108 status_t err = mCodec->mOMX->sendCommand( 7109 mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle); 7110 if (err != OK) { 7111 if (keepComponentAllocated) { 7112 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7113 } 7114 // TODO: do some recovery here. 7115 } else { 7116 mCodec->changeState(mCodec->mExecutingToIdleState); 7117 } 7118 7119 handled = true; 7120 break; 7121 } 7122 7123 case kWhatFlush: 7124 { 7125 ALOGV("[%s] ExecutingState flushing now " 7126 "(codec owns %zu/%zu input, %zu/%zu output).", 7127 mCodec->mComponentName.c_str(), 7128 mCodec->countBuffersOwnedByComponent(kPortIndexInput), 7129 mCodec->mBuffers[kPortIndexInput].size(), 7130 mCodec->countBuffersOwnedByComponent(kPortIndexOutput), 7131 mCodec->mBuffers[kPortIndexOutput].size()); 7132 7133 mActive = false; 7134 7135 status_t err = mCodec->mOMX->sendCommand(mCodec->mNode, OMX_CommandFlush, OMX_ALL); 7136 if (err != OK) { 7137 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7138 } else { 7139 mCodec->changeState(mCodec->mFlushingState); 7140 } 7141 7142 handled = true; 7143 break; 7144 } 7145 7146 case kWhatResume: 7147 { 7148 resume(); 7149 7150 handled = true; 7151 break; 7152 } 7153 7154 case kWhatRequestIDRFrame: 7155 { 7156 status_t err = mCodec->requestIDRFrame(); 7157 if (err != OK) { 7158 ALOGW("Requesting an IDR frame failed."); 7159 } 7160 7161 handled = true; 7162 break; 7163 } 7164 7165 case kWhatSetParameters: 7166 { 7167 sp<AMessage> params; 7168 CHECK(msg->findMessage("params", ¶ms)); 7169 7170 status_t err = mCodec->setParameters(params); 7171 7172 sp<AMessage> reply; 7173 if (msg->findMessage("reply", &reply)) { 7174 reply->setInt32("err", err); 7175 reply->post(); 7176 } 7177 7178 handled = true; 7179 break; 7180 } 7181 7182 case ACodec::kWhatSignalEndOfInputStream: 7183 { 7184 mCodec->onSignalEndOfInputStream(); 7185 handled = true; 7186 break; 7187 } 7188 7189 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 7190 case kWhatSubmitOutputMetadataBufferIfEOS: 7191 { 7192 if (mCodec->mPortEOS[kPortIndexInput] && 7193 !mCodec->mPortEOS[kPortIndexOutput]) { 7194 status_t err = mCodec->submitOutputMetadataBuffer(); 7195 if (err == OK) { 7196 mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround(); 7197 } 7198 } 7199 return true; 7200 } 7201 7202 default: 7203 handled = BaseState::onMessageReceived(msg); 7204 break; 7205 } 7206 7207 return handled; 7208} 7209 7210status_t ACodec::setParameters(const sp<AMessage> ¶ms) { 7211 int32_t videoBitrate; 7212 if (params->findInt32("video-bitrate", &videoBitrate)) { 7213 OMX_VIDEO_CONFIG_BITRATETYPE configParams; 7214 InitOMXParams(&configParams); 7215 configParams.nPortIndex = kPortIndexOutput; 7216 configParams.nEncodeBitrate = videoBitrate; 7217 7218 status_t err = mOMX->setConfig( 7219 mNode, 7220 OMX_IndexConfigVideoBitrate, 7221 &configParams, 7222 sizeof(configParams)); 7223 7224 if (err != OK) { 7225 ALOGE("setConfig(OMX_IndexConfigVideoBitrate, %d) failed w/ err %d", 7226 videoBitrate, err); 7227 7228 return err; 7229 } 7230 } 7231 7232 int64_t skipFramesBeforeUs; 7233 if (params->findInt64("skip-frames-before", &skipFramesBeforeUs)) { 7234 status_t err = 7235 mOMX->setInternalOption( 7236 mNode, 7237 kPortIndexInput, 7238 IOMX::INTERNAL_OPTION_START_TIME, 7239 &skipFramesBeforeUs, 7240 sizeof(skipFramesBeforeUs)); 7241 7242 if (err != OK) { 7243 ALOGE("Failed to set parameter 'skip-frames-before' (err %d)", err); 7244 return err; 7245 } 7246 } 7247 7248 int32_t dropInputFrames; 7249 if (params->findInt32("drop-input-frames", &dropInputFrames)) { 7250 bool suspend = dropInputFrames != 0; 7251 7252 status_t err = 7253 mOMX->setInternalOption( 7254 mNode, 7255 kPortIndexInput, 7256 IOMX::INTERNAL_OPTION_SUSPEND, 7257 &suspend, 7258 sizeof(suspend)); 7259 7260 if (err != OK) { 7261 ALOGE("Failed to set parameter 'drop-input-frames' (err %d)", err); 7262 return err; 7263 } 7264 } 7265 7266 int32_t dummy; 7267 if (params->findInt32("request-sync", &dummy)) { 7268 status_t err = requestIDRFrame(); 7269 7270 if (err != OK) { 7271 ALOGE("Requesting a sync frame failed w/ err %d", err); 7272 return err; 7273 } 7274 } 7275 7276 float rate; 7277 if (params->findFloat("operating-rate", &rate) && rate > 0) { 7278 status_t err = setOperatingRate(rate, mIsVideo); 7279 if (err != OK) { 7280 ALOGE("Failed to set parameter 'operating-rate' (err %d)", err); 7281 return err; 7282 } 7283 } 7284 7285 int32_t intraRefreshPeriod = 0; 7286 if (params->findInt32("intra-refresh-period", &intraRefreshPeriod) 7287 && intraRefreshPeriod > 0) { 7288 status_t err = setIntraRefreshPeriod(intraRefreshPeriod, false); 7289 if (err != OK) { 7290 ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional", 7291 mComponentName.c_str()); 7292 err = OK; 7293 } 7294 } 7295 7296 return OK; 7297} 7298 7299void ACodec::onSignalEndOfInputStream() { 7300 sp<AMessage> notify = mNotify->dup(); 7301 notify->setInt32("what", CodecBase::kWhatSignaledInputEOS); 7302 7303 status_t err = mOMX->signalEndOfInputStream(mNode); 7304 if (err != OK) { 7305 notify->setInt32("err", err); 7306 } 7307 notify->post(); 7308} 7309 7310bool ACodec::ExecutingState::onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { 7311 mCodec->onFrameRendered(mediaTimeUs, systemNano); 7312 return true; 7313} 7314 7315bool ACodec::ExecutingState::onOMXEvent( 7316 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7317 switch (event) { 7318 case OMX_EventPortSettingsChanged: 7319 { 7320 CHECK_EQ(data1, (OMX_U32)kPortIndexOutput); 7321 7322 mCodec->onOutputFormatChanged(); 7323 7324 if (data2 == 0 || data2 == OMX_IndexParamPortDefinition) { 7325 mCodec->mMetadataBuffersToSubmit = 0; 7326 CHECK_EQ(mCodec->mOMX->sendCommand( 7327 mCodec->mNode, 7328 OMX_CommandPortDisable, kPortIndexOutput), 7329 (status_t)OK); 7330 7331 mCodec->freeOutputBuffersNotOwnedByComponent(); 7332 7333 mCodec->changeState(mCodec->mOutputPortSettingsChangedState); 7334 } else if (data2 != OMX_IndexConfigCommonOutputCrop 7335 && data2 != OMX_IndexConfigAndroidIntraRefresh) { 7336 ALOGV("[%s] OMX_EventPortSettingsChanged 0x%08x", 7337 mCodec->mComponentName.c_str(), data2); 7338 } 7339 7340 return true; 7341 } 7342 7343 case OMX_EventBufferFlag: 7344 { 7345 return true; 7346 } 7347 7348 default: 7349 return BaseState::onOMXEvent(event, data1, data2); 7350 } 7351} 7352 7353//////////////////////////////////////////////////////////////////////////////// 7354 7355ACodec::OutputPortSettingsChangedState::OutputPortSettingsChangedState( 7356 ACodec *codec) 7357 : BaseState(codec) { 7358} 7359 7360ACodec::BaseState::PortMode ACodec::OutputPortSettingsChangedState::getPortMode( 7361 OMX_U32 portIndex) { 7362 if (portIndex == kPortIndexOutput) { 7363 return FREE_BUFFERS; 7364 } 7365 7366 CHECK_EQ(portIndex, (OMX_U32)kPortIndexInput); 7367 7368 return RESUBMIT_BUFFERS; 7369} 7370 7371bool ACodec::OutputPortSettingsChangedState::onMessageReceived( 7372 const sp<AMessage> &msg) { 7373 bool handled = false; 7374 7375 switch (msg->what()) { 7376 case kWhatFlush: 7377 case kWhatShutdown: 7378 case kWhatResume: 7379 case kWhatSetParameters: 7380 { 7381 if (msg->what() == kWhatResume) { 7382 ALOGV("[%s] Deferring resume", mCodec->mComponentName.c_str()); 7383 } 7384 7385 mCodec->deferMessage(msg); 7386 handled = true; 7387 break; 7388 } 7389 7390 default: 7391 handled = BaseState::onMessageReceived(msg); 7392 break; 7393 } 7394 7395 return handled; 7396} 7397 7398void ACodec::OutputPortSettingsChangedState::stateEntered() { 7399 ALOGV("[%s] Now handling output port settings change", 7400 mCodec->mComponentName.c_str()); 7401} 7402 7403bool ACodec::OutputPortSettingsChangedState::onOMXFrameRendered( 7404 int64_t mediaTimeUs, nsecs_t systemNano) { 7405 mCodec->onFrameRendered(mediaTimeUs, systemNano); 7406 return true; 7407} 7408 7409bool ACodec::OutputPortSettingsChangedState::onOMXEvent( 7410 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7411 switch (event) { 7412 case OMX_EventCmdComplete: 7413 { 7414 if (data1 == (OMX_U32)OMX_CommandPortDisable) { 7415 if (data2 != (OMX_U32)kPortIndexOutput) { 7416 ALOGW("ignoring EventCmdComplete CommandPortDisable for port %u", data2); 7417 return false; 7418 } 7419 7420 ALOGV("[%s] Output port now disabled.", mCodec->mComponentName.c_str()); 7421 7422 status_t err = OK; 7423 if (!mCodec->mBuffers[kPortIndexOutput].isEmpty()) { 7424 ALOGE("disabled port should be empty, but has %zu buffers", 7425 mCodec->mBuffers[kPortIndexOutput].size()); 7426 err = FAILED_TRANSACTION; 7427 } else { 7428 mCodec->mDealer[kPortIndexOutput].clear(); 7429 } 7430 7431 if (err == OK) { 7432 err = mCodec->mOMX->sendCommand( 7433 mCodec->mNode, OMX_CommandPortEnable, kPortIndexOutput); 7434 } 7435 7436 if (err == OK) { 7437 err = mCodec->allocateBuffersOnPort(kPortIndexOutput); 7438 ALOGE_IF(err != OK, "Failed to allocate output port buffers after port " 7439 "reconfiguration: (%d)", err); 7440 } 7441 7442 if (err != OK) { 7443 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 7444 7445 // This is technically not correct, but appears to be 7446 // the only way to free the component instance. 7447 // Controlled transitioning from excecuting->idle 7448 // and idle->loaded seem impossible probably because 7449 // the output port never finishes re-enabling. 7450 mCodec->mShutdownInProgress = true; 7451 mCodec->mKeepComponentAllocated = false; 7452 mCodec->changeState(mCodec->mLoadedState); 7453 } 7454 7455 return true; 7456 } else if (data1 == (OMX_U32)OMX_CommandPortEnable) { 7457 if (data2 != (OMX_U32)kPortIndexOutput) { 7458 ALOGW("ignoring EventCmdComplete OMX_CommandPortEnable for port %u", data2); 7459 return false; 7460 } 7461 7462 ALOGV("[%s] Output port now reenabled.", mCodec->mComponentName.c_str()); 7463 7464 if (mCodec->mExecutingState->active()) { 7465 mCodec->mExecutingState->submitOutputBuffers(); 7466 } 7467 7468 mCodec->changeState(mCodec->mExecutingState); 7469 7470 return true; 7471 } 7472 7473 return false; 7474 } 7475 7476 default: 7477 return false; 7478 } 7479} 7480 7481//////////////////////////////////////////////////////////////////////////////// 7482 7483ACodec::ExecutingToIdleState::ExecutingToIdleState(ACodec *codec) 7484 : BaseState(codec), 7485 mComponentNowIdle(false) { 7486} 7487 7488bool ACodec::ExecutingToIdleState::onMessageReceived(const sp<AMessage> &msg) { 7489 bool handled = false; 7490 7491 switch (msg->what()) { 7492 case kWhatFlush: 7493 { 7494 // Don't send me a flush request if you previously wanted me 7495 // to shutdown. 7496 ALOGW("Ignoring flush request in ExecutingToIdleState"); 7497 break; 7498 } 7499 7500 case kWhatShutdown: 7501 { 7502 // We're already doing that... 7503 7504 handled = true; 7505 break; 7506 } 7507 7508 default: 7509 handled = BaseState::onMessageReceived(msg); 7510 break; 7511 } 7512 7513 return handled; 7514} 7515 7516void ACodec::ExecutingToIdleState::stateEntered() { 7517 ALOGV("[%s] Now Executing->Idle", mCodec->mComponentName.c_str()); 7518 7519 mComponentNowIdle = false; 7520 mCodec->mLastOutputFormat.clear(); 7521} 7522 7523bool ACodec::ExecutingToIdleState::onOMXEvent( 7524 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7525 switch (event) { 7526 case OMX_EventCmdComplete: 7527 { 7528 if (data1 != (OMX_U32)OMX_CommandStateSet 7529 || data2 != (OMX_U32)OMX_StateIdle) { 7530 ALOGE("Unexpected command completion in ExecutingToIdleState: %s(%u) %s(%u)", 7531 asString((OMX_COMMANDTYPE)data1), data1, 7532 asString((OMX_STATETYPE)data2), data2); 7533 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7534 return true; 7535 } 7536 7537 mComponentNowIdle = true; 7538 7539 changeStateIfWeOwnAllBuffers(); 7540 7541 return true; 7542 } 7543 7544 case OMX_EventPortSettingsChanged: 7545 case OMX_EventBufferFlag: 7546 { 7547 // We're shutting down and don't care about this anymore. 7548 return true; 7549 } 7550 7551 default: 7552 return BaseState::onOMXEvent(event, data1, data2); 7553 } 7554} 7555 7556void ACodec::ExecutingToIdleState::changeStateIfWeOwnAllBuffers() { 7557 if (mComponentNowIdle && mCodec->allYourBuffersAreBelongToUs()) { 7558 status_t err = mCodec->mOMX->sendCommand( 7559 mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded); 7560 if (err == OK) { 7561 err = mCodec->freeBuffersOnPort(kPortIndexInput); 7562 status_t err2 = mCodec->freeBuffersOnPort(kPortIndexOutput); 7563 if (err == OK) { 7564 err = err2; 7565 } 7566 } 7567 7568 if ((mCodec->mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) 7569 && mCodec->mNativeWindow != NULL) { 7570 // We push enough 1x1 blank buffers to ensure that one of 7571 // them has made it to the display. This allows the OMX 7572 // component teardown to zero out any protected buffers 7573 // without the risk of scanning out one of those buffers. 7574 pushBlankBuffersToNativeWindow(mCodec->mNativeWindow.get()); 7575 } 7576 7577 if (err != OK) { 7578 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7579 return; 7580 } 7581 7582 mCodec->changeState(mCodec->mIdleToLoadedState); 7583 } 7584} 7585 7586void ACodec::ExecutingToIdleState::onInputBufferFilled( 7587 const sp<AMessage> &msg) { 7588 BaseState::onInputBufferFilled(msg); 7589 7590 changeStateIfWeOwnAllBuffers(); 7591} 7592 7593void ACodec::ExecutingToIdleState::onOutputBufferDrained( 7594 const sp<AMessage> &msg) { 7595 BaseState::onOutputBufferDrained(msg); 7596 7597 changeStateIfWeOwnAllBuffers(); 7598} 7599 7600//////////////////////////////////////////////////////////////////////////////// 7601 7602ACodec::IdleToLoadedState::IdleToLoadedState(ACodec *codec) 7603 : BaseState(codec) { 7604} 7605 7606bool ACodec::IdleToLoadedState::onMessageReceived(const sp<AMessage> &msg) { 7607 bool handled = false; 7608 7609 switch (msg->what()) { 7610 case kWhatShutdown: 7611 { 7612 // We're already doing that... 7613 7614 handled = true; 7615 break; 7616 } 7617 7618 case kWhatFlush: 7619 { 7620 // Don't send me a flush request if you previously wanted me 7621 // to shutdown. 7622 ALOGE("Got flush request in IdleToLoadedState"); 7623 break; 7624 } 7625 7626 default: 7627 handled = BaseState::onMessageReceived(msg); 7628 break; 7629 } 7630 7631 return handled; 7632} 7633 7634void ACodec::IdleToLoadedState::stateEntered() { 7635 ALOGV("[%s] Now Idle->Loaded", mCodec->mComponentName.c_str()); 7636} 7637 7638bool ACodec::IdleToLoadedState::onOMXEvent( 7639 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7640 switch (event) { 7641 case OMX_EventCmdComplete: 7642 { 7643 if (data1 != (OMX_U32)OMX_CommandStateSet 7644 || data2 != (OMX_U32)OMX_StateLoaded) { 7645 ALOGE("Unexpected command completion in IdleToLoadedState: %s(%u) %s(%u)", 7646 asString((OMX_COMMANDTYPE)data1), data1, 7647 asString((OMX_STATETYPE)data2), data2); 7648 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7649 return true; 7650 } 7651 7652 mCodec->changeState(mCodec->mLoadedState); 7653 7654 return true; 7655 } 7656 7657 default: 7658 return BaseState::onOMXEvent(event, data1, data2); 7659 } 7660} 7661 7662//////////////////////////////////////////////////////////////////////////////// 7663 7664ACodec::FlushingState::FlushingState(ACodec *codec) 7665 : BaseState(codec) { 7666} 7667 7668void ACodec::FlushingState::stateEntered() { 7669 ALOGV("[%s] Now Flushing", mCodec->mComponentName.c_str()); 7670 7671 mFlushComplete[kPortIndexInput] = mFlushComplete[kPortIndexOutput] = false; 7672} 7673 7674bool ACodec::FlushingState::onMessageReceived(const sp<AMessage> &msg) { 7675 bool handled = false; 7676 7677 switch (msg->what()) { 7678 case kWhatShutdown: 7679 { 7680 mCodec->deferMessage(msg); 7681 break; 7682 } 7683 7684 case kWhatFlush: 7685 { 7686 // We're already doing this right now. 7687 handled = true; 7688 break; 7689 } 7690 7691 default: 7692 handled = BaseState::onMessageReceived(msg); 7693 break; 7694 } 7695 7696 return handled; 7697} 7698 7699bool ACodec::FlushingState::onOMXEvent( 7700 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7701 ALOGV("[%s] FlushingState onOMXEvent(%u,%d)", 7702 mCodec->mComponentName.c_str(), event, (OMX_S32)data1); 7703 7704 switch (event) { 7705 case OMX_EventCmdComplete: 7706 { 7707 if (data1 != (OMX_U32)OMX_CommandFlush) { 7708 ALOGE("unexpected EventCmdComplete %s(%d) data2:%d in FlushingState", 7709 asString((OMX_COMMANDTYPE)data1), data1, data2); 7710 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7711 return true; 7712 } 7713 7714 if (data2 == kPortIndexInput || data2 == kPortIndexOutput) { 7715 if (mFlushComplete[data2]) { 7716 ALOGW("Flush already completed for %s port", 7717 data2 == kPortIndexInput ? "input" : "output"); 7718 return true; 7719 } 7720 mFlushComplete[data2] = true; 7721 7722 if (mFlushComplete[kPortIndexInput] && mFlushComplete[kPortIndexOutput]) { 7723 changeStateIfWeOwnAllBuffers(); 7724 } 7725 } else if (data2 == OMX_ALL) { 7726 if (!mFlushComplete[kPortIndexInput] || !mFlushComplete[kPortIndexOutput]) { 7727 ALOGW("received flush complete event for OMX_ALL before ports have been" 7728 "flushed (%d/%d)", 7729 mFlushComplete[kPortIndexInput], mFlushComplete[kPortIndexOutput]); 7730 return false; 7731 } 7732 7733 changeStateIfWeOwnAllBuffers(); 7734 } else { 7735 ALOGW("data2 not OMX_ALL but %u in EventCmdComplete CommandFlush", data2); 7736 } 7737 7738 return true; 7739 } 7740 7741 case OMX_EventPortSettingsChanged: 7742 { 7743 sp<AMessage> msg = new AMessage(kWhatOMXMessage, mCodec); 7744 msg->setInt32("type", omx_message::EVENT); 7745 msg->setInt32("node", mCodec->mNode); 7746 msg->setInt32("event", event); 7747 msg->setInt32("data1", data1); 7748 msg->setInt32("data2", data2); 7749 7750 ALOGV("[%s] Deferring OMX_EventPortSettingsChanged", 7751 mCodec->mComponentName.c_str()); 7752 7753 mCodec->deferMessage(msg); 7754 7755 return true; 7756 } 7757 7758 default: 7759 return BaseState::onOMXEvent(event, data1, data2); 7760 } 7761 7762 return true; 7763} 7764 7765void ACodec::FlushingState::onOutputBufferDrained(const sp<AMessage> &msg) { 7766 BaseState::onOutputBufferDrained(msg); 7767 7768 changeStateIfWeOwnAllBuffers(); 7769} 7770 7771void ACodec::FlushingState::onInputBufferFilled(const sp<AMessage> &msg) { 7772 BaseState::onInputBufferFilled(msg); 7773 7774 changeStateIfWeOwnAllBuffers(); 7775} 7776 7777void ACodec::FlushingState::changeStateIfWeOwnAllBuffers() { 7778 if (mFlushComplete[kPortIndexInput] 7779 && mFlushComplete[kPortIndexOutput] 7780 && mCodec->allYourBuffersAreBelongToUs()) { 7781 // We now own all buffers except possibly those still queued with 7782 // the native window for rendering. Let's get those back as well. 7783 mCodec->waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs(); 7784 7785 mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC)); 7786 7787 sp<AMessage> notify = mCodec->mNotify->dup(); 7788 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 7789 notify->post(); 7790 7791 mCodec->mPortEOS[kPortIndexInput] = 7792 mCodec->mPortEOS[kPortIndexOutput] = false; 7793 7794 mCodec->mInputEOSResult = OK; 7795 7796 if (mCodec->mSkipCutBuffer != NULL) { 7797 mCodec->mSkipCutBuffer->clear(); 7798 } 7799 7800 mCodec->changeState(mCodec->mExecutingState); 7801 } 7802} 7803 7804status_t ACodec::queryCapabilities( 7805 const AString &name, const AString &mime, bool isEncoder, 7806 sp<MediaCodecInfo::Capabilities> *caps) { 7807 (*caps).clear(); 7808 const char *role = getComponentRole(isEncoder, mime.c_str()); 7809 if (role == NULL) { 7810 return BAD_VALUE; 7811 } 7812 7813 OMXClient client; 7814 status_t err = client.connect(); 7815 if (err != OK) { 7816 return err; 7817 } 7818 7819 sp<IOMX> omx = client.interface(); 7820 sp<CodecObserver> observer = new CodecObserver; 7821 IOMX::node_id node = 0; 7822 7823 err = omx->allocateNode(name.c_str(), observer, NULL, &node); 7824 if (err != OK) { 7825 client.disconnect(); 7826 return err; 7827 } 7828 7829 err = setComponentRole(omx, node, role); 7830 if (err != OK) { 7831 omx->freeNode(node); 7832 client.disconnect(); 7833 return err; 7834 } 7835 7836 sp<MediaCodecInfo::CapabilitiesBuilder> builder = new MediaCodecInfo::CapabilitiesBuilder(); 7837 bool isVideo = mime.startsWithIgnoreCase("video/"); 7838 7839 if (isVideo) { 7840 OMX_VIDEO_PARAM_PROFILELEVELTYPE param; 7841 InitOMXParams(¶m); 7842 param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput; 7843 7844 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7845 param.nProfileIndex = index; 7846 status_t err = omx->getParameter( 7847 node, OMX_IndexParamVideoProfileLevelQuerySupported, 7848 ¶m, sizeof(param)); 7849 if (err != OK) { 7850 break; 7851 } 7852 builder->addProfileLevel(param.eProfile, param.eLevel); 7853 7854 if (index == kMaxIndicesToCheck) { 7855 ALOGW("[%s] stopping checking profiles after %u: %x/%x", 7856 name.c_str(), index, 7857 param.eProfile, param.eLevel); 7858 } 7859 } 7860 7861 // Color format query 7862 // return colors in the order reported by the OMX component 7863 // prefix "flexible" standard ones with the flexible equivalent 7864 OMX_VIDEO_PARAM_PORTFORMATTYPE portFormat; 7865 InitOMXParams(&portFormat); 7866 portFormat.nPortIndex = isEncoder ? kPortIndexInput : kPortIndexOutput; 7867 Vector<uint32_t> supportedColors; // shadow copy to check for duplicates 7868 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7869 portFormat.nIndex = index; 7870 status_t err = omx->getParameter( 7871 node, OMX_IndexParamVideoPortFormat, 7872 &portFormat, sizeof(portFormat)); 7873 if (err != OK) { 7874 break; 7875 } 7876 7877 OMX_U32 flexibleEquivalent; 7878 if (isFlexibleColorFormat( 7879 omx, node, portFormat.eColorFormat, false /* usingNativeWindow */, 7880 &flexibleEquivalent)) { 7881 bool marked = false; 7882 for (size_t i = 0; i < supportedColors.size(); ++i) { 7883 if (supportedColors[i] == flexibleEquivalent) { 7884 marked = true; 7885 break; 7886 } 7887 } 7888 if (!marked) { 7889 supportedColors.push(flexibleEquivalent); 7890 builder->addColorFormat(flexibleEquivalent); 7891 } 7892 } 7893 supportedColors.push(portFormat.eColorFormat); 7894 builder->addColorFormat(portFormat.eColorFormat); 7895 7896 if (index == kMaxIndicesToCheck) { 7897 ALOGW("[%s] stopping checking formats after %u: %s(%x)", 7898 name.c_str(), index, 7899 asString(portFormat.eColorFormat), portFormat.eColorFormat); 7900 } 7901 } 7902 } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_AUDIO_AAC)) { 7903 // More audio codecs if they have profiles. 7904 OMX_AUDIO_PARAM_ANDROID_PROFILETYPE param; 7905 InitOMXParams(¶m); 7906 param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput; 7907 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7908 param.nProfileIndex = index; 7909 status_t err = omx->getParameter( 7910 node, (OMX_INDEXTYPE)OMX_IndexParamAudioProfileQuerySupported, 7911 ¶m, sizeof(param)); 7912 if (err != OK) { 7913 break; 7914 } 7915 // For audio, level is ignored. 7916 builder->addProfileLevel(param.eProfile, 0 /* level */); 7917 7918 if (index == kMaxIndicesToCheck) { 7919 ALOGW("[%s] stopping checking profiles after %u: %x", 7920 name.c_str(), index, 7921 param.eProfile); 7922 } 7923 } 7924 7925 // NOTE: Without Android extensions, OMX does not provide a way to query 7926 // AAC profile support 7927 if (param.nProfileIndex == 0) { 7928 ALOGW("component %s doesn't support profile query.", name.c_str()); 7929 } 7930 } 7931 7932 if (isVideo && !isEncoder) { 7933 native_handle_t *sidebandHandle = NULL; 7934 if (omx->configureVideoTunnelMode( 7935 node, kPortIndexOutput, OMX_TRUE, 0, &sidebandHandle) == OK) { 7936 // tunneled playback includes adaptive playback 7937 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback 7938 | MediaCodecInfo::Capabilities::kFlagSupportsTunneledPlayback); 7939 } else if (omx->storeMetaDataInBuffers( 7940 node, kPortIndexOutput, OMX_TRUE) == OK || 7941 omx->prepareForAdaptivePlayback( 7942 node, kPortIndexOutput, OMX_TRUE, 7943 1280 /* width */, 720 /* height */) == OK) { 7944 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback); 7945 } 7946 } 7947 7948 if (isVideo && isEncoder) { 7949 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 7950 InitOMXParams(¶ms); 7951 params.nPortIndex = kPortIndexOutput; 7952 // TODO: should we verify if fallback is supported? 7953 if (omx->getConfig( 7954 node, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, 7955 ¶ms, sizeof(params)) == OK) { 7956 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsIntraRefresh); 7957 } 7958 } 7959 7960 *caps = builder; 7961 omx->freeNode(node); 7962 client.disconnect(); 7963 return OK; 7964} 7965 7966// These are supposed be equivalent to the logic in 7967// "audio_channel_out_mask_from_count". 7968//static 7969status_t ACodec::getOMXChannelMapping(size_t numChannels, OMX_AUDIO_CHANNELTYPE map[]) { 7970 switch (numChannels) { 7971 case 1: 7972 map[0] = OMX_AUDIO_ChannelCF; 7973 break; 7974 case 2: 7975 map[0] = OMX_AUDIO_ChannelLF; 7976 map[1] = OMX_AUDIO_ChannelRF; 7977 break; 7978 case 3: 7979 map[0] = OMX_AUDIO_ChannelLF; 7980 map[1] = OMX_AUDIO_ChannelRF; 7981 map[2] = OMX_AUDIO_ChannelCF; 7982 break; 7983 case 4: 7984 map[0] = OMX_AUDIO_ChannelLF; 7985 map[1] = OMX_AUDIO_ChannelRF; 7986 map[2] = OMX_AUDIO_ChannelLR; 7987 map[3] = OMX_AUDIO_ChannelRR; 7988 break; 7989 case 5: 7990 map[0] = OMX_AUDIO_ChannelLF; 7991 map[1] = OMX_AUDIO_ChannelRF; 7992 map[2] = OMX_AUDIO_ChannelCF; 7993 map[3] = OMX_AUDIO_ChannelLR; 7994 map[4] = OMX_AUDIO_ChannelRR; 7995 break; 7996 case 6: 7997 map[0] = OMX_AUDIO_ChannelLF; 7998 map[1] = OMX_AUDIO_ChannelRF; 7999 map[2] = OMX_AUDIO_ChannelCF; 8000 map[3] = OMX_AUDIO_ChannelLFE; 8001 map[4] = OMX_AUDIO_ChannelLR; 8002 map[5] = OMX_AUDIO_ChannelRR; 8003 break; 8004 case 7: 8005 map[0] = OMX_AUDIO_ChannelLF; 8006 map[1] = OMX_AUDIO_ChannelRF; 8007 map[2] = OMX_AUDIO_ChannelCF; 8008 map[3] = OMX_AUDIO_ChannelLFE; 8009 map[4] = OMX_AUDIO_ChannelLR; 8010 map[5] = OMX_AUDIO_ChannelRR; 8011 map[6] = OMX_AUDIO_ChannelCS; 8012 break; 8013 case 8: 8014 map[0] = OMX_AUDIO_ChannelLF; 8015 map[1] = OMX_AUDIO_ChannelRF; 8016 map[2] = OMX_AUDIO_ChannelCF; 8017 map[3] = OMX_AUDIO_ChannelLFE; 8018 map[4] = OMX_AUDIO_ChannelLR; 8019 map[5] = OMX_AUDIO_ChannelRR; 8020 map[6] = OMX_AUDIO_ChannelLS; 8021 map[7] = OMX_AUDIO_ChannelRS; 8022 break; 8023 default: 8024 return -EINVAL; 8025 } 8026 8027 return OK; 8028} 8029 8030} // namespace android 8031