ACodec.cpp revision 5048de32f603c00926d61b928616e679ce88d693
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "ACodec" 19 20#ifdef __LP64__ 21#define OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 22#endif 23 24#include <inttypes.h> 25#include <utils/Trace.h> 26 27#include <gui/Surface.h> 28 29#include <media/stagefright/ACodec.h> 30 31#include <binder/MemoryDealer.h> 32 33#include <media/stagefright/foundation/hexdump.h> 34#include <media/stagefright/foundation/ABuffer.h> 35#include <media/stagefright/foundation/ADebug.h> 36#include <media/stagefright/foundation/AMessage.h> 37#include <media/stagefright/foundation/AUtils.h> 38 39#include <media/stagefright/BufferProducerWrapper.h> 40#include <media/stagefright/MediaCodec.h> 41#include <media/stagefright/MediaCodecList.h> 42#include <media/stagefright/MediaDefs.h> 43#include <media/stagefright/OMXClient.h> 44#include <media/stagefright/PersistentSurface.h> 45#include <media/stagefright/SurfaceUtils.h> 46#include <media/hardware/HardwareAPI.h> 47 48#include <OMX_AudioExt.h> 49#include <OMX_VideoExt.h> 50#include <OMX_Component.h> 51#include <OMX_IndexExt.h> 52#include <OMX_AsString.h> 53 54#include "include/avc_utils.h" 55#include "include/DataConverter.h" 56#include "omx/OMXUtils.h" 57 58namespace android { 59 60enum { 61 kMaxIndicesToCheck = 32, // used when enumerating supported formats and profiles 62}; 63 64// OMX errors are directly mapped into status_t range if 65// there is no corresponding MediaError status code. 66// Use the statusFromOMXError(int32_t omxError) function. 67// 68// Currently this is a direct map. 69// See frameworks/native/include/media/openmax/OMX_Core.h 70// 71// Vendor OMX errors from 0x90000000 - 0x9000FFFF 72// Extension OMX errors from 0x8F000000 - 0x90000000 73// Standard OMX errors from 0x80001000 - 0x80001024 (0x80001024 current) 74// 75 76// returns true if err is a recognized OMX error code. 77// as OMX error is OMX_S32, this is an int32_t type 78static inline bool isOMXError(int32_t err) { 79 return (ERROR_CODEC_MIN <= err && err <= ERROR_CODEC_MAX); 80} 81 82// converts an OMX error to a status_t 83static inline status_t statusFromOMXError(int32_t omxError) { 84 switch (omxError) { 85 case OMX_ErrorInvalidComponentName: 86 case OMX_ErrorComponentNotFound: 87 return NAME_NOT_FOUND; // can trigger illegal argument error for provided names. 88 default: 89 return isOMXError(omxError) ? omxError : 0; // no translation required 90 } 91} 92 93// checks and converts status_t to a non-side-effect status_t 94static inline status_t makeNoSideEffectStatus(status_t err) { 95 switch (err) { 96 // the following errors have side effects and may come 97 // from other code modules. Remap for safety reasons. 98 case INVALID_OPERATION: 99 case DEAD_OBJECT: 100 return UNKNOWN_ERROR; 101 default: 102 return err; 103 } 104} 105 106struct MessageList : public RefBase { 107 MessageList() { 108 } 109 virtual ~MessageList() { 110 } 111 std::list<sp<AMessage> > &getList() { return mList; } 112private: 113 std::list<sp<AMessage> > mList; 114 115 DISALLOW_EVIL_CONSTRUCTORS(MessageList); 116}; 117 118static sp<DataConverter> getCopyConverter() { 119 static pthread_once_t once = PTHREAD_ONCE_INIT; // const-inited 120 static sp<DataConverter> sCopyConverter; // zero-inited 121 pthread_once(&once, [](){ sCopyConverter = new DataConverter(); }); 122 return sCopyConverter; 123} 124 125struct CodecObserver : public BnOMXObserver { 126 CodecObserver() {} 127 128 void setNotificationMessage(const sp<AMessage> &msg) { 129 mNotify = msg; 130 } 131 132 // from IOMXObserver 133 virtual void onMessages(const std::list<omx_message> &messages) { 134 if (messages.empty()) { 135 return; 136 } 137 138 sp<AMessage> notify = mNotify->dup(); 139 bool first = true; 140 sp<MessageList> msgList = new MessageList(); 141 for (std::list<omx_message>::const_iterator it = messages.cbegin(); 142 it != messages.cend(); ++it) { 143 const omx_message &omx_msg = *it; 144 if (first) { 145 notify->setInt32("node", omx_msg.node); 146 first = false; 147 } 148 149 sp<AMessage> msg = new AMessage; 150 msg->setInt32("type", omx_msg.type); 151 switch (omx_msg.type) { 152 case omx_message::EVENT: 153 { 154 msg->setInt32("event", omx_msg.u.event_data.event); 155 msg->setInt32("data1", omx_msg.u.event_data.data1); 156 msg->setInt32("data2", omx_msg.u.event_data.data2); 157 break; 158 } 159 160 case omx_message::EMPTY_BUFFER_DONE: 161 { 162 msg->setInt32("buffer", omx_msg.u.buffer_data.buffer); 163 msg->setInt32("fence_fd", omx_msg.fenceFd); 164 break; 165 } 166 167 case omx_message::FILL_BUFFER_DONE: 168 { 169 msg->setInt32( 170 "buffer", omx_msg.u.extended_buffer_data.buffer); 171 msg->setInt32( 172 "range_offset", 173 omx_msg.u.extended_buffer_data.range_offset); 174 msg->setInt32( 175 "range_length", 176 omx_msg.u.extended_buffer_data.range_length); 177 msg->setInt32( 178 "flags", 179 omx_msg.u.extended_buffer_data.flags); 180 msg->setInt64( 181 "timestamp", 182 omx_msg.u.extended_buffer_data.timestamp); 183 msg->setInt32( 184 "fence_fd", omx_msg.fenceFd); 185 break; 186 } 187 188 case omx_message::FRAME_RENDERED: 189 { 190 msg->setInt64( 191 "media_time_us", omx_msg.u.render_data.timestamp); 192 msg->setInt64( 193 "system_nano", omx_msg.u.render_data.nanoTime); 194 break; 195 } 196 197 default: 198 ALOGE("Unrecognized message type: %d", omx_msg.type); 199 break; 200 } 201 msgList->getList().push_back(msg); 202 } 203 notify->setObject("messages", msgList); 204 notify->post(); 205 } 206 207protected: 208 virtual ~CodecObserver() {} 209 210private: 211 sp<AMessage> mNotify; 212 213 DISALLOW_EVIL_CONSTRUCTORS(CodecObserver); 214}; 215 216//////////////////////////////////////////////////////////////////////////////// 217 218struct ACodec::BaseState : public AState { 219 BaseState(ACodec *codec, const sp<AState> &parentState = NULL); 220 221protected: 222 enum PortMode { 223 KEEP_BUFFERS, 224 RESUBMIT_BUFFERS, 225 FREE_BUFFERS, 226 }; 227 228 ACodec *mCodec; 229 230 virtual PortMode getPortMode(OMX_U32 portIndex); 231 232 virtual bool onMessageReceived(const sp<AMessage> &msg); 233 234 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 235 236 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 237 virtual void onInputBufferFilled(const sp<AMessage> &msg); 238 239 void postFillThisBuffer(BufferInfo *info); 240 241private: 242 // Handles an OMX message. Returns true iff message was handled. 243 bool onOMXMessage(const sp<AMessage> &msg); 244 245 // Handles a list of messages. Returns true iff messages were handled. 246 bool onOMXMessageList(const sp<AMessage> &msg); 247 248 // returns true iff this message is for this component and the component is alive 249 bool checkOMXMessage(const sp<AMessage> &msg); 250 251 bool onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd); 252 253 bool onOMXFillBufferDone( 254 IOMX::buffer_id bufferID, 255 size_t rangeOffset, size_t rangeLength, 256 OMX_U32 flags, 257 int64_t timeUs, 258 int fenceFd); 259 260 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 261 262 void getMoreInputDataIfPossible(); 263 264 DISALLOW_EVIL_CONSTRUCTORS(BaseState); 265}; 266 267//////////////////////////////////////////////////////////////////////////////// 268 269struct ACodec::DeathNotifier : public IBinder::DeathRecipient { 270 DeathNotifier(const sp<AMessage> ¬ify) 271 : mNotify(notify) { 272 } 273 274 virtual void binderDied(const wp<IBinder> &) { 275 mNotify->post(); 276 } 277 278protected: 279 virtual ~DeathNotifier() {} 280 281private: 282 sp<AMessage> mNotify; 283 284 DISALLOW_EVIL_CONSTRUCTORS(DeathNotifier); 285}; 286 287struct ACodec::UninitializedState : public ACodec::BaseState { 288 UninitializedState(ACodec *codec); 289 290protected: 291 virtual bool onMessageReceived(const sp<AMessage> &msg); 292 virtual void stateEntered(); 293 294private: 295 void onSetup(const sp<AMessage> &msg); 296 bool onAllocateComponent(const sp<AMessage> &msg); 297 298 sp<DeathNotifier> mDeathNotifier; 299 300 DISALLOW_EVIL_CONSTRUCTORS(UninitializedState); 301}; 302 303//////////////////////////////////////////////////////////////////////////////// 304 305struct ACodec::LoadedState : public ACodec::BaseState { 306 LoadedState(ACodec *codec); 307 308protected: 309 virtual bool onMessageReceived(const sp<AMessage> &msg); 310 virtual void stateEntered(); 311 312private: 313 friend struct ACodec::UninitializedState; 314 315 bool onConfigureComponent(const sp<AMessage> &msg); 316 void onCreateInputSurface(const sp<AMessage> &msg); 317 void onSetInputSurface(const sp<AMessage> &msg); 318 void onStart(); 319 void onShutdown(bool keepComponentAllocated); 320 321 status_t setupInputSurface(); 322 323 DISALLOW_EVIL_CONSTRUCTORS(LoadedState); 324}; 325 326//////////////////////////////////////////////////////////////////////////////// 327 328struct ACodec::LoadedToIdleState : public ACodec::BaseState { 329 LoadedToIdleState(ACodec *codec); 330 331protected: 332 virtual bool onMessageReceived(const sp<AMessage> &msg); 333 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 334 virtual void stateEntered(); 335 336private: 337 status_t allocateBuffers(); 338 339 DISALLOW_EVIL_CONSTRUCTORS(LoadedToIdleState); 340}; 341 342//////////////////////////////////////////////////////////////////////////////// 343 344struct ACodec::IdleToExecutingState : public ACodec::BaseState { 345 IdleToExecutingState(ACodec *codec); 346 347protected: 348 virtual bool onMessageReceived(const sp<AMessage> &msg); 349 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 350 virtual void stateEntered(); 351 352private: 353 DISALLOW_EVIL_CONSTRUCTORS(IdleToExecutingState); 354}; 355 356//////////////////////////////////////////////////////////////////////////////// 357 358struct ACodec::ExecutingState : public ACodec::BaseState { 359 ExecutingState(ACodec *codec); 360 361 void submitRegularOutputBuffers(); 362 void submitOutputMetaBuffers(); 363 void submitOutputBuffers(); 364 365 // Submit output buffers to the decoder, submit input buffers to client 366 // to fill with data. 367 void resume(); 368 369 // Returns true iff input and output buffers are in play. 370 bool active() const { return mActive; } 371 372protected: 373 virtual PortMode getPortMode(OMX_U32 portIndex); 374 virtual bool onMessageReceived(const sp<AMessage> &msg); 375 virtual void stateEntered(); 376 377 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 378 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 379 380private: 381 bool mActive; 382 383 DISALLOW_EVIL_CONSTRUCTORS(ExecutingState); 384}; 385 386//////////////////////////////////////////////////////////////////////////////// 387 388struct ACodec::OutputPortSettingsChangedState : public ACodec::BaseState { 389 OutputPortSettingsChangedState(ACodec *codec); 390 391protected: 392 virtual PortMode getPortMode(OMX_U32 portIndex); 393 virtual bool onMessageReceived(const sp<AMessage> &msg); 394 virtual void stateEntered(); 395 396 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 397 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 398 399private: 400 DISALLOW_EVIL_CONSTRUCTORS(OutputPortSettingsChangedState); 401}; 402 403//////////////////////////////////////////////////////////////////////////////// 404 405struct ACodec::ExecutingToIdleState : public ACodec::BaseState { 406 ExecutingToIdleState(ACodec *codec); 407 408protected: 409 virtual bool onMessageReceived(const sp<AMessage> &msg); 410 virtual void stateEntered(); 411 412 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 413 414 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 415 virtual void onInputBufferFilled(const sp<AMessage> &msg); 416 417private: 418 void changeStateIfWeOwnAllBuffers(); 419 420 bool mComponentNowIdle; 421 422 DISALLOW_EVIL_CONSTRUCTORS(ExecutingToIdleState); 423}; 424 425//////////////////////////////////////////////////////////////////////////////// 426 427struct ACodec::IdleToLoadedState : public ACodec::BaseState { 428 IdleToLoadedState(ACodec *codec); 429 430protected: 431 virtual bool onMessageReceived(const sp<AMessage> &msg); 432 virtual void stateEntered(); 433 434 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 435 436private: 437 DISALLOW_EVIL_CONSTRUCTORS(IdleToLoadedState); 438}; 439 440//////////////////////////////////////////////////////////////////////////////// 441 442struct ACodec::FlushingState : public ACodec::BaseState { 443 FlushingState(ACodec *codec); 444 445protected: 446 virtual bool onMessageReceived(const sp<AMessage> &msg); 447 virtual void stateEntered(); 448 449 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 450 451 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 452 virtual void onInputBufferFilled(const sp<AMessage> &msg); 453 454private: 455 bool mFlushComplete[2]; 456 457 void changeStateIfWeOwnAllBuffers(); 458 459 DISALLOW_EVIL_CONSTRUCTORS(FlushingState); 460}; 461 462//////////////////////////////////////////////////////////////////////////////// 463 464void ACodec::BufferInfo::setWriteFence(int fenceFd, const char *dbg) { 465 if (mFenceFd >= 0) { 466 ALOGW("OVERWRITE OF %s fence %d by write fence %d in %s", 467 mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg); 468 } 469 mFenceFd = fenceFd; 470 mIsReadFence = false; 471} 472 473void ACodec::BufferInfo::setReadFence(int fenceFd, const char *dbg) { 474 if (mFenceFd >= 0) { 475 ALOGW("OVERWRITE OF %s fence %d by read fence %d in %s", 476 mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg); 477 } 478 mFenceFd = fenceFd; 479 mIsReadFence = true; 480} 481 482void ACodec::BufferInfo::checkWriteFence(const char *dbg) { 483 if (mFenceFd >= 0 && mIsReadFence) { 484 ALOGD("REUSING read fence %d as write fence in %s", mFenceFd, dbg); 485 } 486} 487 488void ACodec::BufferInfo::checkReadFence(const char *dbg) { 489 if (mFenceFd >= 0 && !mIsReadFence) { 490 ALOGD("REUSING write fence %d as read fence in %s", mFenceFd, dbg); 491 } 492} 493 494//////////////////////////////////////////////////////////////////////////////// 495 496ACodec::ACodec() 497 : mQuirks(0), 498 mNode(0), 499 mUsingNativeWindow(false), 500 mNativeWindowUsageBits(0), 501 mIsVideo(false), 502 mIsEncoder(false), 503 mFatalError(false), 504 mShutdownInProgress(false), 505 mExplicitShutdown(false), 506 mEncoderDelay(0), 507 mEncoderPadding(0), 508 mRotationDegrees(0), 509 mChannelMaskPresent(false), 510 mChannelMask(0), 511 mDequeueCounter(0), 512 mInputMetadataType(kMetadataBufferTypeInvalid), 513 mOutputMetadataType(kMetadataBufferTypeInvalid), 514 mLegacyAdaptiveExperiment(false), 515 mMetadataBuffersToSubmit(0), 516 mNumUndequeuedBuffers(0), 517 mRepeatFrameDelayUs(-1ll), 518 mMaxPtsGapUs(-1ll), 519 mMaxFps(-1), 520 mTimePerFrameUs(-1ll), 521 mTimePerCaptureUs(-1ll), 522 mCreateInputBuffersSuspended(false), 523 mTunneled(false), 524 mDescribeColorAspectsIndex((OMX_INDEXTYPE)0) { 525 mUninitializedState = new UninitializedState(this); 526 mLoadedState = new LoadedState(this); 527 mLoadedToIdleState = new LoadedToIdleState(this); 528 mIdleToExecutingState = new IdleToExecutingState(this); 529 mExecutingState = new ExecutingState(this); 530 531 mOutputPortSettingsChangedState = 532 new OutputPortSettingsChangedState(this); 533 534 mExecutingToIdleState = new ExecutingToIdleState(this); 535 mIdleToLoadedState = new IdleToLoadedState(this); 536 mFlushingState = new FlushingState(this); 537 538 mPortEOS[kPortIndexInput] = mPortEOS[kPortIndexOutput] = false; 539 mInputEOSResult = OK; 540 541 changeState(mUninitializedState); 542} 543 544ACodec::~ACodec() { 545} 546 547void ACodec::setNotificationMessage(const sp<AMessage> &msg) { 548 mNotify = msg; 549} 550 551void ACodec::initiateSetup(const sp<AMessage> &msg) { 552 msg->setWhat(kWhatSetup); 553 msg->setTarget(this); 554 msg->post(); 555} 556 557void ACodec::signalSetParameters(const sp<AMessage> ¶ms) { 558 sp<AMessage> msg = new AMessage(kWhatSetParameters, this); 559 msg->setMessage("params", params); 560 msg->post(); 561} 562 563void ACodec::initiateAllocateComponent(const sp<AMessage> &msg) { 564 msg->setWhat(kWhatAllocateComponent); 565 msg->setTarget(this); 566 msg->post(); 567} 568 569void ACodec::initiateConfigureComponent(const sp<AMessage> &msg) { 570 msg->setWhat(kWhatConfigureComponent); 571 msg->setTarget(this); 572 msg->post(); 573} 574 575status_t ACodec::setSurface(const sp<Surface> &surface) { 576 sp<AMessage> msg = new AMessage(kWhatSetSurface, this); 577 msg->setObject("surface", surface); 578 579 sp<AMessage> response; 580 status_t err = msg->postAndAwaitResponse(&response); 581 582 if (err == OK) { 583 (void)response->findInt32("err", &err); 584 } 585 return err; 586} 587 588void ACodec::initiateCreateInputSurface() { 589 (new AMessage(kWhatCreateInputSurface, this))->post(); 590} 591 592void ACodec::initiateSetInputSurface( 593 const sp<PersistentSurface> &surface) { 594 sp<AMessage> msg = new AMessage(kWhatSetInputSurface, this); 595 msg->setObject("input-surface", surface); 596 msg->post(); 597} 598 599void ACodec::signalEndOfInputStream() { 600 (new AMessage(kWhatSignalEndOfInputStream, this))->post(); 601} 602 603void ACodec::initiateStart() { 604 (new AMessage(kWhatStart, this))->post(); 605} 606 607void ACodec::signalFlush() { 608 ALOGV("[%s] signalFlush", mComponentName.c_str()); 609 (new AMessage(kWhatFlush, this))->post(); 610} 611 612void ACodec::signalResume() { 613 (new AMessage(kWhatResume, this))->post(); 614} 615 616void ACodec::initiateShutdown(bool keepComponentAllocated) { 617 sp<AMessage> msg = new AMessage(kWhatShutdown, this); 618 msg->setInt32("keepComponentAllocated", keepComponentAllocated); 619 msg->post(); 620 if (!keepComponentAllocated) { 621 // ensure shutdown completes in 3 seconds 622 (new AMessage(kWhatReleaseCodecInstance, this))->post(3000000); 623 } 624} 625 626void ACodec::signalRequestIDRFrame() { 627 (new AMessage(kWhatRequestIDRFrame, this))->post(); 628} 629 630// *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 631// Some codecs may return input buffers before having them processed. 632// This causes a halt if we already signaled an EOS on the input 633// port. For now keep submitting an output buffer if there was an 634// EOS on the input port, but not yet on the output port. 635void ACodec::signalSubmitOutputMetadataBufferIfEOS_workaround() { 636 if (mPortEOS[kPortIndexInput] && !mPortEOS[kPortIndexOutput] && 637 mMetadataBuffersToSubmit > 0) { 638 (new AMessage(kWhatSubmitOutputMetadataBufferIfEOS, this))->post(); 639 } 640} 641 642status_t ACodec::handleSetSurface(const sp<Surface> &surface) { 643 // allow keeping unset surface 644 if (surface == NULL) { 645 if (mNativeWindow != NULL) { 646 ALOGW("cannot unset a surface"); 647 return INVALID_OPERATION; 648 } 649 return OK; 650 } 651 652 // cannot switch from bytebuffers to surface 653 if (mNativeWindow == NULL) { 654 ALOGW("component was not configured with a surface"); 655 return INVALID_OPERATION; 656 } 657 658 ANativeWindow *nativeWindow = surface.get(); 659 // if we have not yet started the codec, we can simply set the native window 660 if (mBuffers[kPortIndexInput].size() == 0) { 661 mNativeWindow = surface; 662 return OK; 663 } 664 665 // we do not support changing a tunneled surface after start 666 if (mTunneled) { 667 ALOGW("cannot change tunneled surface"); 668 return INVALID_OPERATION; 669 } 670 671 int usageBits = 0; 672 status_t err = setupNativeWindowSizeFormatAndUsage(nativeWindow, &usageBits); 673 if (err != OK) { 674 return err; 675 } 676 677 int ignoredFlags = kVideoGrallocUsage; 678 // New output surface is not allowed to add new usage flag except ignored ones. 679 if ((usageBits & ~(mNativeWindowUsageBits | ignoredFlags)) != 0) { 680 ALOGW("cannot change usage from %#x to %#x", mNativeWindowUsageBits, usageBits); 681 return BAD_VALUE; 682 } 683 684 // get min undequeued count. We cannot switch to a surface that has a higher 685 // undequeued count than we allocated. 686 int minUndequeuedBuffers = 0; 687 err = nativeWindow->query( 688 nativeWindow, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 689 &minUndequeuedBuffers); 690 if (err != 0) { 691 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 692 strerror(-err), -err); 693 return err; 694 } 695 if (minUndequeuedBuffers > (int)mNumUndequeuedBuffers) { 696 ALOGE("new surface holds onto more buffers (%d) than planned for (%zu)", 697 minUndequeuedBuffers, mNumUndequeuedBuffers); 698 return BAD_VALUE; 699 } 700 701 // we cannot change the number of output buffers while OMX is running 702 // set up surface to the same count 703 Vector<BufferInfo> &buffers = mBuffers[kPortIndexOutput]; 704 ALOGV("setting up surface for %zu buffers", buffers.size()); 705 706 err = native_window_set_buffer_count(nativeWindow, buffers.size()); 707 if (err != 0) { 708 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 709 -err); 710 return err; 711 } 712 713 // need to enable allocation when attaching 714 surface->getIGraphicBufferProducer()->allowAllocation(true); 715 716 // for meta data mode, we move dequeud buffers to the new surface. 717 // for non-meta mode, we must move all registered buffers 718 for (size_t i = 0; i < buffers.size(); ++i) { 719 const BufferInfo &info = buffers[i]; 720 // skip undequeued buffers for meta data mode 721 if (storingMetadataInDecodedBuffers() 722 && !mLegacyAdaptiveExperiment 723 && info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 724 ALOGV("skipping buffer %p", info.mGraphicBuffer->getNativeBuffer()); 725 continue; 726 } 727 ALOGV("attaching buffer %p", info.mGraphicBuffer->getNativeBuffer()); 728 729 err = surface->attachBuffer(info.mGraphicBuffer->getNativeBuffer()); 730 if (err != OK) { 731 ALOGE("failed to attach buffer %p to the new surface: %s (%d)", 732 info.mGraphicBuffer->getNativeBuffer(), 733 strerror(-err), -err); 734 return err; 735 } 736 } 737 738 // cancel undequeued buffers to new surface 739 if (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment) { 740 for (size_t i = 0; i < buffers.size(); ++i) { 741 BufferInfo &info = buffers.editItemAt(i); 742 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 743 ALOGV("canceling buffer %p", info.mGraphicBuffer->getNativeBuffer()); 744 err = nativeWindow->cancelBuffer( 745 nativeWindow, info.mGraphicBuffer->getNativeBuffer(), info.mFenceFd); 746 info.mFenceFd = -1; 747 if (err != OK) { 748 ALOGE("failed to cancel buffer %p to the new surface: %s (%d)", 749 info.mGraphicBuffer->getNativeBuffer(), 750 strerror(-err), -err); 751 return err; 752 } 753 } 754 } 755 // disallow further allocation 756 (void)surface->getIGraphicBufferProducer()->allowAllocation(false); 757 } 758 759 // push blank buffers to previous window if requested 760 if (mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) { 761 pushBlankBuffersToNativeWindow(mNativeWindow.get()); 762 } 763 764 mNativeWindow = nativeWindow; 765 mNativeWindowUsageBits = usageBits; 766 return OK; 767} 768 769status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) { 770 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 771 772 CHECK(mDealer[portIndex] == NULL); 773 CHECK(mBuffers[portIndex].isEmpty()); 774 775 status_t err; 776 if (mNativeWindow != NULL && portIndex == kPortIndexOutput) { 777 if (storingMetadataInDecodedBuffers()) { 778 err = allocateOutputMetadataBuffers(); 779 } else { 780 err = allocateOutputBuffersFromNativeWindow(); 781 } 782 } else { 783 OMX_PARAM_PORTDEFINITIONTYPE def; 784 InitOMXParams(&def); 785 def.nPortIndex = portIndex; 786 787 err = mOMX->getParameter( 788 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 789 790 if (err == OK) { 791 MetadataBufferType type = 792 portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; 793 size_t bufSize = def.nBufferSize; 794 if (type == kMetadataBufferTypeGrallocSource) { 795 bufSize = sizeof(VideoGrallocMetadata); 796 } else if (type == kMetadataBufferTypeANWBuffer) { 797 bufSize = sizeof(VideoNativeMetadata); 798 } 799 800 // If using gralloc or native source input metadata buffers, allocate largest 801 // metadata size as we prefer to generate native source metadata, but component 802 // may require gralloc source. For camera source, allocate at least enough 803 // size for native metadata buffers. 804 size_t allottedSize = bufSize; 805 if (portIndex == kPortIndexInput && type >= kMetadataBufferTypeGrallocSource) { 806 bufSize = max(sizeof(VideoGrallocMetadata), sizeof(VideoNativeMetadata)); 807 } else if (portIndex == kPortIndexInput && type == kMetadataBufferTypeCameraSource) { 808 bufSize = max(bufSize, sizeof(VideoNativeMetadata)); 809 } 810 811 size_t conversionBufferSize = 0; 812 813 sp<DataConverter> converter = mConverter[portIndex]; 814 if (converter != NULL) { 815 // here we assume sane conversions of max 4:1, so result fits in int32 816 if (portIndex == kPortIndexInput) { 817 conversionBufferSize = converter->sourceSize(bufSize); 818 } else { 819 conversionBufferSize = converter->targetSize(bufSize); 820 } 821 } 822 823 size_t alignment = MemoryDealer::getAllocationAlignment(); 824 825 ALOGV("[%s] Allocating %u buffers of size %zu/%zu (from %u using %s) on %s port", 826 mComponentName.c_str(), 827 def.nBufferCountActual, bufSize, allottedSize, def.nBufferSize, asString(type), 828 portIndex == kPortIndexInput ? "input" : "output"); 829 830 // verify buffer sizes to avoid overflow in align() 831 if (bufSize == 0 || max(bufSize, conversionBufferSize) > kMaxCodecBufferSize) { 832 ALOGE("b/22885421"); 833 return NO_MEMORY; 834 } 835 836 // don't modify bufSize as OMX may not expect it to increase after negotiation 837 size_t alignedSize = align(bufSize, alignment); 838 size_t alignedConvSize = align(conversionBufferSize, alignment); 839 if (def.nBufferCountActual > SIZE_MAX / (alignedSize + alignedConvSize)) { 840 ALOGE("b/22885421"); 841 return NO_MEMORY; 842 } 843 844 size_t totalSize = def.nBufferCountActual * (alignedSize + alignedConvSize); 845 mDealer[portIndex] = new MemoryDealer(totalSize, "ACodec"); 846 847 for (OMX_U32 i = 0; i < def.nBufferCountActual && err == OK; ++i) { 848 sp<IMemory> mem = mDealer[portIndex]->allocate(bufSize); 849 if (mem == NULL || mem->pointer() == NULL) { 850 return NO_MEMORY; 851 } 852 853 BufferInfo info; 854 info.mStatus = BufferInfo::OWNED_BY_US; 855 info.mFenceFd = -1; 856 info.mRenderInfo = NULL; 857 info.mNativeHandle = NULL; 858 859 uint32_t requiresAllocateBufferBit = 860 (portIndex == kPortIndexInput) 861 ? kRequiresAllocateBufferOnInputPorts 862 : kRequiresAllocateBufferOnOutputPorts; 863 864 if (portIndex == kPortIndexInput && (mFlags & kFlagIsSecure)) { 865 mem.clear(); 866 867 void *ptr = NULL; 868 native_handle_t *native_handle = NULL; 869 err = mOMX->allocateSecureBuffer( 870 mNode, portIndex, bufSize, &info.mBufferID, 871 &ptr, &native_handle); 872 873 // TRICKY: this representation is unorthodox, but ACodec requires 874 // an ABuffer with a proper size to validate range offsets and lengths. 875 // Since mData is never referenced for secure input, it is used to store 876 // either the pointer to the secure buffer, or the opaque handle as on 877 // some devices ptr is actually an opaque handle, not a pointer. 878 879 // TRICKY2: use native handle as the base of the ABuffer if received one, 880 // because Widevine source only receives these base addresses. 881 info.mData = new ABuffer(ptr != NULL ? ptr : (void *)native_handle, bufSize); 882 info.mNativeHandle = NativeHandle::create(native_handle, true /* ownsHandle */); 883 info.mCodecData = info.mData; 884 } else if (mQuirks & requiresAllocateBufferBit) { 885 err = mOMX->allocateBufferWithBackup( 886 mNode, portIndex, mem, &info.mBufferID, allottedSize); 887 } else { 888 err = mOMX->useBuffer(mNode, portIndex, mem, &info.mBufferID, allottedSize); 889 } 890 891 if (mem != NULL) { 892 info.mCodecData = new ABuffer(mem->pointer(), bufSize); 893 info.mCodecRef = mem; 894 895 if (type == kMetadataBufferTypeANWBuffer) { 896 ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; 897 } 898 899 // if we require conversion, allocate conversion buffer for client use; 900 // otherwise, reuse codec buffer 901 if (mConverter[portIndex] != NULL) { 902 CHECK_GT(conversionBufferSize, (size_t)0); 903 mem = mDealer[portIndex]->allocate(conversionBufferSize); 904 if (mem == NULL|| mem->pointer() == NULL) { 905 return NO_MEMORY; 906 } 907 info.mData = new ABuffer(mem->pointer(), conversionBufferSize); 908 info.mMemRef = mem; 909 } else { 910 info.mData = info.mCodecData; 911 info.mMemRef = info.mCodecRef; 912 } 913 } 914 915 mBuffers[portIndex].push(info); 916 } 917 } 918 } 919 920 if (err != OK) { 921 return err; 922 } 923 924 sp<AMessage> notify = mNotify->dup(); 925 notify->setInt32("what", CodecBase::kWhatBuffersAllocated); 926 927 notify->setInt32("portIndex", portIndex); 928 929 sp<PortDescription> desc = new PortDescription; 930 931 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 932 const BufferInfo &info = mBuffers[portIndex][i]; 933 desc->addBuffer(info.mBufferID, info.mData, info.mNativeHandle, info.mMemRef); 934 } 935 936 notify->setObject("portDesc", desc); 937 notify->post(); 938 939 return OK; 940} 941 942status_t ACodec::setupNativeWindowSizeFormatAndUsage( 943 ANativeWindow *nativeWindow /* nonnull */, int *finalUsage /* nonnull */) { 944 OMX_PARAM_PORTDEFINITIONTYPE def; 945 InitOMXParams(&def); 946 def.nPortIndex = kPortIndexOutput; 947 948 status_t err = mOMX->getParameter( 949 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 950 951 if (err != OK) { 952 return err; 953 } 954 955 OMX_U32 usage = 0; 956 err = mOMX->getGraphicBufferUsage(mNode, kPortIndexOutput, &usage); 957 if (err != 0) { 958 ALOGW("querying usage flags from OMX IL component failed: %d", err); 959 // XXX: Currently this error is logged, but not fatal. 960 usage = 0; 961 } 962 int omxUsage = usage; 963 964 if (mFlags & kFlagIsGrallocUsageProtected) { 965 usage |= GRALLOC_USAGE_PROTECTED; 966 } 967 968 usage |= kVideoGrallocUsage; 969 *finalUsage = usage; 970 971 ALOGV("gralloc usage: %#x(OMX) => %#x(ACodec)", omxUsage, usage); 972 return setNativeWindowSizeFormatAndUsage( 973 nativeWindow, 974 def.format.video.nFrameWidth, 975 def.format.video.nFrameHeight, 976 def.format.video.eColorFormat, 977 mRotationDegrees, 978 usage); 979} 980 981status_t ACodec::configureOutputBuffersFromNativeWindow( 982 OMX_U32 *bufferCount, OMX_U32 *bufferSize, 983 OMX_U32 *minUndequeuedBuffers) { 984 OMX_PARAM_PORTDEFINITIONTYPE def; 985 InitOMXParams(&def); 986 def.nPortIndex = kPortIndexOutput; 987 988 status_t err = mOMX->getParameter( 989 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 990 991 if (err == OK) { 992 err = setupNativeWindowSizeFormatAndUsage(mNativeWindow.get(), &mNativeWindowUsageBits); 993 } 994 if (err != OK) { 995 mNativeWindowUsageBits = 0; 996 return err; 997 } 998 999 // Exits here for tunneled video playback codecs -- i.e. skips native window 1000 // buffer allocation step as this is managed by the tunneled OMX omponent 1001 // itself and explicitly sets def.nBufferCountActual to 0. 1002 if (mTunneled) { 1003 ALOGV("Tunneled Playback: skipping native window buffer allocation."); 1004 def.nBufferCountActual = 0; 1005 err = mOMX->setParameter( 1006 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1007 1008 *minUndequeuedBuffers = 0; 1009 *bufferCount = 0; 1010 *bufferSize = 0; 1011 return err; 1012 } 1013 1014 *minUndequeuedBuffers = 0; 1015 err = mNativeWindow->query( 1016 mNativeWindow.get(), NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 1017 (int *)minUndequeuedBuffers); 1018 1019 if (err != 0) { 1020 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 1021 strerror(-err), -err); 1022 return err; 1023 } 1024 1025 // FIXME: assume that surface is controlled by app (native window 1026 // returns the number for the case when surface is not controlled by app) 1027 // FIXME2: This means that minUndeqeueudBufs can be 1 larger than reported 1028 // For now, try to allocate 1 more buffer, but don't fail if unsuccessful 1029 1030 // Use conservative allocation while also trying to reduce starvation 1031 // 1032 // 1. allocate at least nBufferCountMin + minUndequeuedBuffers - that is the 1033 // minimum needed for the consumer to be able to work 1034 // 2. try to allocate two (2) additional buffers to reduce starvation from 1035 // the consumer 1036 // plus an extra buffer to account for incorrect minUndequeuedBufs 1037 for (OMX_U32 extraBuffers = 2 + 1; /* condition inside loop */; extraBuffers--) { 1038 OMX_U32 newBufferCount = 1039 def.nBufferCountMin + *minUndequeuedBuffers + extraBuffers; 1040 def.nBufferCountActual = newBufferCount; 1041 err = mOMX->setParameter( 1042 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1043 1044 if (err == OK) { 1045 *minUndequeuedBuffers += extraBuffers; 1046 break; 1047 } 1048 1049 ALOGW("[%s] setting nBufferCountActual to %u failed: %d", 1050 mComponentName.c_str(), newBufferCount, err); 1051 /* exit condition */ 1052 if (extraBuffers == 0) { 1053 return err; 1054 } 1055 } 1056 1057 err = native_window_set_buffer_count( 1058 mNativeWindow.get(), def.nBufferCountActual); 1059 1060 if (err != 0) { 1061 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 1062 -err); 1063 return err; 1064 } 1065 1066 *bufferCount = def.nBufferCountActual; 1067 *bufferSize = def.nBufferSize; 1068 return err; 1069} 1070 1071status_t ACodec::allocateOutputBuffersFromNativeWindow() { 1072 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 1073 status_t err = configureOutputBuffersFromNativeWindow( 1074 &bufferCount, &bufferSize, &minUndequeuedBuffers); 1075 if (err != 0) 1076 return err; 1077 mNumUndequeuedBuffers = minUndequeuedBuffers; 1078 1079 if (!storingMetadataInDecodedBuffers()) { 1080 static_cast<Surface*>(mNativeWindow.get()) 1081 ->getIGraphicBufferProducer()->allowAllocation(true); 1082 } 1083 1084 ALOGV("[%s] Allocating %u buffers from a native window of size %u on " 1085 "output port", 1086 mComponentName.c_str(), bufferCount, bufferSize); 1087 1088 // Dequeue buffers and send them to OMX 1089 for (OMX_U32 i = 0; i < bufferCount; i++) { 1090 ANativeWindowBuffer *buf; 1091 int fenceFd; 1092 err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1093 if (err != 0) { 1094 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); 1095 break; 1096 } 1097 1098 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); 1099 BufferInfo info; 1100 info.mStatus = BufferInfo::OWNED_BY_US; 1101 info.mFenceFd = fenceFd; 1102 info.mIsReadFence = false; 1103 info.mRenderInfo = NULL; 1104 info.mData = new ABuffer(NULL /* data */, bufferSize /* capacity */); 1105 info.mCodecData = info.mData; 1106 info.mGraphicBuffer = graphicBuffer; 1107 mBuffers[kPortIndexOutput].push(info); 1108 1109 IOMX::buffer_id bufferId; 1110 err = mOMX->useGraphicBuffer(mNode, kPortIndexOutput, graphicBuffer, 1111 &bufferId); 1112 if (err != 0) { 1113 ALOGE("registering GraphicBuffer %u with OMX IL component failed: " 1114 "%d", i, err); 1115 break; 1116 } 1117 1118 mBuffers[kPortIndexOutput].editItemAt(i).mBufferID = bufferId; 1119 1120 ALOGV("[%s] Registered graphic buffer with ID %u (pointer = %p)", 1121 mComponentName.c_str(), 1122 bufferId, graphicBuffer.get()); 1123 } 1124 1125 OMX_U32 cancelStart; 1126 OMX_U32 cancelEnd; 1127 1128 if (err != 0) { 1129 // If an error occurred while dequeuing we need to cancel any buffers 1130 // that were dequeued. 1131 cancelStart = 0; 1132 cancelEnd = mBuffers[kPortIndexOutput].size(); 1133 } else { 1134 // Return the required minimum undequeued buffers to the native window. 1135 cancelStart = bufferCount - minUndequeuedBuffers; 1136 cancelEnd = bufferCount; 1137 } 1138 1139 for (OMX_U32 i = cancelStart; i < cancelEnd; i++) { 1140 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1141 if (info->mStatus == BufferInfo::OWNED_BY_US) { 1142 status_t error = cancelBufferToNativeWindow(info); 1143 if (err == 0) { 1144 err = error; 1145 } 1146 } 1147 } 1148 1149 if (!storingMetadataInDecodedBuffers()) { 1150 static_cast<Surface*>(mNativeWindow.get()) 1151 ->getIGraphicBufferProducer()->allowAllocation(false); 1152 } 1153 1154 return err; 1155} 1156 1157status_t ACodec::allocateOutputMetadataBuffers() { 1158 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 1159 status_t err = configureOutputBuffersFromNativeWindow( 1160 &bufferCount, &bufferSize, &minUndequeuedBuffers); 1161 if (err != 0) 1162 return err; 1163 mNumUndequeuedBuffers = minUndequeuedBuffers; 1164 1165 ALOGV("[%s] Allocating %u meta buffers on output port", 1166 mComponentName.c_str(), bufferCount); 1167 1168 size_t bufSize = mOutputMetadataType == kMetadataBufferTypeANWBuffer ? 1169 sizeof(struct VideoNativeMetadata) : sizeof(struct VideoGrallocMetadata); 1170 size_t totalSize = bufferCount * align(bufSize, MemoryDealer::getAllocationAlignment()); 1171 mDealer[kPortIndexOutput] = new MemoryDealer(totalSize, "ACodec"); 1172 1173 // Dequeue buffers and send them to OMX 1174 for (OMX_U32 i = 0; i < bufferCount; i++) { 1175 BufferInfo info; 1176 info.mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 1177 info.mFenceFd = -1; 1178 info.mRenderInfo = NULL; 1179 info.mGraphicBuffer = NULL; 1180 info.mDequeuedAt = mDequeueCounter; 1181 1182 sp<IMemory> mem = mDealer[kPortIndexOutput]->allocate(bufSize); 1183 if (mem == NULL || mem->pointer() == NULL) { 1184 return NO_MEMORY; 1185 } 1186 if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) { 1187 ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; 1188 } 1189 info.mData = new ABuffer(mem->pointer(), mem->size()); 1190 info.mMemRef = mem; 1191 info.mCodecData = info.mData; 1192 info.mCodecRef = mem; 1193 1194 // we use useBuffer for metadata regardless of quirks 1195 err = mOMX->useBuffer( 1196 mNode, kPortIndexOutput, mem, &info.mBufferID, mem->size()); 1197 mBuffers[kPortIndexOutput].push(info); 1198 1199 ALOGV("[%s] allocated meta buffer with ID %u (pointer = %p)", 1200 mComponentName.c_str(), info.mBufferID, mem->pointer()); 1201 } 1202 1203 if (mLegacyAdaptiveExperiment) { 1204 // preallocate and preregister buffers 1205 static_cast<Surface *>(mNativeWindow.get()) 1206 ->getIGraphicBufferProducer()->allowAllocation(true); 1207 1208 ALOGV("[%s] Allocating %u buffers from a native window of size %u on " 1209 "output port", 1210 mComponentName.c_str(), bufferCount, bufferSize); 1211 1212 // Dequeue buffers then cancel them all 1213 for (OMX_U32 i = 0; i < bufferCount; i++) { 1214 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1215 1216 ANativeWindowBuffer *buf; 1217 int fenceFd; 1218 err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1219 if (err != 0) { 1220 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); 1221 break; 1222 } 1223 1224 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); 1225 mOMX->updateGraphicBufferInMeta( 1226 mNode, kPortIndexOutput, graphicBuffer, info->mBufferID); 1227 info->mStatus = BufferInfo::OWNED_BY_US; 1228 info->setWriteFence(fenceFd, "allocateOutputMetadataBuffers for legacy"); 1229 info->mGraphicBuffer = graphicBuffer; 1230 } 1231 1232 for (OMX_U32 i = 0; i < mBuffers[kPortIndexOutput].size(); i++) { 1233 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1234 if (info->mStatus == BufferInfo::OWNED_BY_US) { 1235 status_t error = cancelBufferToNativeWindow(info); 1236 if (err == OK) { 1237 err = error; 1238 } 1239 } 1240 } 1241 1242 static_cast<Surface*>(mNativeWindow.get()) 1243 ->getIGraphicBufferProducer()->allowAllocation(false); 1244 } 1245 1246 mMetadataBuffersToSubmit = bufferCount - minUndequeuedBuffers; 1247 return err; 1248} 1249 1250status_t ACodec::submitOutputMetadataBuffer() { 1251 CHECK(storingMetadataInDecodedBuffers()); 1252 if (mMetadataBuffersToSubmit == 0) 1253 return OK; 1254 1255 BufferInfo *info = dequeueBufferFromNativeWindow(); 1256 if (info == NULL) { 1257 return ERROR_IO; 1258 } 1259 1260 ALOGV("[%s] submitting output meta buffer ID %u for graphic buffer %p", 1261 mComponentName.c_str(), info->mBufferID, info->mGraphicBuffer.get()); 1262 1263 --mMetadataBuffersToSubmit; 1264 info->checkWriteFence("submitOutputMetadataBuffer"); 1265 status_t err = mOMX->fillBuffer(mNode, info->mBufferID, info->mFenceFd); 1266 info->mFenceFd = -1; 1267 if (err == OK) { 1268 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 1269 } 1270 1271 return err; 1272} 1273 1274status_t ACodec::waitForFence(int fd, const char *dbg ) { 1275 status_t res = OK; 1276 if (fd >= 0) { 1277 sp<Fence> fence = new Fence(fd); 1278 res = fence->wait(IOMX::kFenceTimeoutMs); 1279 ALOGW_IF(res != OK, "FENCE TIMEOUT for %d in %s", fd, dbg); 1280 } 1281 return res; 1282} 1283 1284// static 1285const char *ACodec::_asString(BufferInfo::Status s) { 1286 switch (s) { 1287 case BufferInfo::OWNED_BY_US: return "OUR"; 1288 case BufferInfo::OWNED_BY_COMPONENT: return "COMPONENT"; 1289 case BufferInfo::OWNED_BY_UPSTREAM: return "UPSTREAM"; 1290 case BufferInfo::OWNED_BY_DOWNSTREAM: return "DOWNSTREAM"; 1291 case BufferInfo::OWNED_BY_NATIVE_WINDOW: return "SURFACE"; 1292 case BufferInfo::UNRECOGNIZED: return "UNRECOGNIZED"; 1293 default: return "?"; 1294 } 1295} 1296 1297void ACodec::dumpBuffers(OMX_U32 portIndex) { 1298 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 1299 ALOGI("[%s] %s port has %zu buffers:", mComponentName.c_str(), 1300 portIndex == kPortIndexInput ? "input" : "output", mBuffers[portIndex].size()); 1301 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1302 const BufferInfo &info = mBuffers[portIndex][i]; 1303 ALOGI(" slot %2zu: #%8u %p/%p %s(%d) dequeued:%u", 1304 i, info.mBufferID, info.mGraphicBuffer.get(), 1305 info.mGraphicBuffer == NULL ? NULL : info.mGraphicBuffer->getNativeBuffer(), 1306 _asString(info.mStatus), info.mStatus, info.mDequeuedAt); 1307 } 1308} 1309 1310status_t ACodec::cancelBufferToNativeWindow(BufferInfo *info) { 1311 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 1312 1313 ALOGV("[%s] Calling cancelBuffer on buffer %u", 1314 mComponentName.c_str(), info->mBufferID); 1315 1316 info->checkWriteFence("cancelBufferToNativeWindow"); 1317 int err = mNativeWindow->cancelBuffer( 1318 mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd); 1319 info->mFenceFd = -1; 1320 1321 ALOGW_IF(err != 0, "[%s] can not return buffer %u to native window", 1322 mComponentName.c_str(), info->mBufferID); 1323 // change ownership even if cancelBuffer fails 1324 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 1325 1326 return err; 1327} 1328 1329void ACodec::updateRenderInfoForDequeuedBuffer( 1330 ANativeWindowBuffer *buf, int fenceFd, BufferInfo *info) { 1331 1332 info->mRenderInfo = 1333 mRenderTracker.updateInfoForDequeuedBuffer( 1334 buf, fenceFd, info - &mBuffers[kPortIndexOutput][0]); 1335 1336 // check for any fences already signaled 1337 notifyOfRenderedFrames(false /* dropIncomplete */, info->mRenderInfo); 1338} 1339 1340void ACodec::onFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { 1341 if (mRenderTracker.onFrameRendered(mediaTimeUs, systemNano) != OK) { 1342 mRenderTracker.dumpRenderQueue(); 1343 } 1344} 1345 1346void ACodec::notifyOfRenderedFrames(bool dropIncomplete, FrameRenderTracker::Info *until) { 1347 sp<AMessage> msg = mNotify->dup(); 1348 msg->setInt32("what", CodecBase::kWhatOutputFramesRendered); 1349 std::list<FrameRenderTracker::Info> done = 1350 mRenderTracker.checkFencesAndGetRenderedFrames(until, dropIncomplete); 1351 1352 // unlink untracked frames 1353 for (std::list<FrameRenderTracker::Info>::const_iterator it = done.cbegin(); 1354 it != done.cend(); ++it) { 1355 ssize_t index = it->getIndex(); 1356 if (index >= 0 && (size_t)index < mBuffers[kPortIndexOutput].size()) { 1357 mBuffers[kPortIndexOutput].editItemAt(index).mRenderInfo = NULL; 1358 } else if (index >= 0) { 1359 // THIS SHOULD NEVER HAPPEN 1360 ALOGE("invalid index %zd in %zu", index, mBuffers[kPortIndexOutput].size()); 1361 } 1362 } 1363 1364 if (MediaCodec::CreateFramesRenderedMessage(done, msg)) { 1365 msg->post(); 1366 } 1367} 1368 1369ACodec::BufferInfo *ACodec::dequeueBufferFromNativeWindow() { 1370 ANativeWindowBuffer *buf; 1371 CHECK(mNativeWindow.get() != NULL); 1372 1373 if (mTunneled) { 1374 ALOGW("dequeueBufferFromNativeWindow() should not be called in tunnel" 1375 " video playback mode mode!"); 1376 return NULL; 1377 } 1378 1379 if (mFatalError) { 1380 ALOGW("not dequeuing from native window due to fatal error"); 1381 return NULL; 1382 } 1383 1384 int fenceFd = -1; 1385 do { 1386 status_t err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1387 if (err != 0) { 1388 ALOGE("dequeueBuffer failed: %s(%d).", asString(err), err); 1389 return NULL; 1390 } 1391 1392 bool stale = false; 1393 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1394 i--; 1395 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1396 1397 if (info->mGraphicBuffer != NULL && 1398 info->mGraphicBuffer->handle == buf->handle) { 1399 // Since consumers can attach buffers to BufferQueues, it is possible 1400 // that a known yet stale buffer can return from a surface that we 1401 // once used. We can simply ignore this as we have already dequeued 1402 // this buffer properly. NOTE: this does not eliminate all cases, 1403 // e.g. it is possible that we have queued the valid buffer to the 1404 // NW, and a stale copy of the same buffer gets dequeued - which will 1405 // be treated as the valid buffer by ACodec. 1406 if (info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 1407 ALOGI("dequeued stale buffer %p. discarding", buf); 1408 stale = true; 1409 break; 1410 } 1411 1412 ALOGV("dequeued buffer %p", info->mGraphicBuffer->getNativeBuffer()); 1413 info->mStatus = BufferInfo::OWNED_BY_US; 1414 info->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow"); 1415 updateRenderInfoForDequeuedBuffer(buf, fenceFd, info); 1416 return info; 1417 } 1418 } 1419 1420 // It is also possible to receive a previously unregistered buffer 1421 // in non-meta mode. These should be treated as stale buffers. The 1422 // same is possible in meta mode, in which case, it will be treated 1423 // as a normal buffer, which is not desirable. 1424 // TODO: fix this. 1425 if (!stale && (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment)) { 1426 ALOGI("dequeued unrecognized (stale) buffer %p. discarding", buf); 1427 stale = true; 1428 } 1429 if (stale) { 1430 // TODO: detach stale buffer, but there is no API yet to do it. 1431 buf = NULL; 1432 } 1433 } while (buf == NULL); 1434 1435 // get oldest undequeued buffer 1436 BufferInfo *oldest = NULL; 1437 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1438 i--; 1439 BufferInfo *info = 1440 &mBuffers[kPortIndexOutput].editItemAt(i); 1441 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW && 1442 (oldest == NULL || 1443 // avoid potential issues from counter rolling over 1444 mDequeueCounter - info->mDequeuedAt > 1445 mDequeueCounter - oldest->mDequeuedAt)) { 1446 oldest = info; 1447 } 1448 } 1449 1450 // it is impossible dequeue a buffer when there are no buffers with ANW 1451 CHECK(oldest != NULL); 1452 // it is impossible to dequeue an unknown buffer in non-meta mode, as the 1453 // while loop above does not complete 1454 CHECK(storingMetadataInDecodedBuffers()); 1455 1456 // discard buffer in LRU info and replace with new buffer 1457 oldest->mGraphicBuffer = new GraphicBuffer(buf, false); 1458 oldest->mStatus = BufferInfo::OWNED_BY_US; 1459 oldest->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow for oldest"); 1460 mRenderTracker.untrackFrame(oldest->mRenderInfo); 1461 oldest->mRenderInfo = NULL; 1462 1463 mOMX->updateGraphicBufferInMeta( 1464 mNode, kPortIndexOutput, oldest->mGraphicBuffer, 1465 oldest->mBufferID); 1466 1467 if (mOutputMetadataType == kMetadataBufferTypeGrallocSource) { 1468 VideoGrallocMetadata *grallocMeta = 1469 reinterpret_cast<VideoGrallocMetadata *>(oldest->mData->base()); 1470 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 1471 (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]), 1472 mDequeueCounter - oldest->mDequeuedAt, 1473 (void *)(uintptr_t)grallocMeta->pHandle, 1474 oldest->mGraphicBuffer->handle, oldest->mData->base()); 1475 } else if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) { 1476 VideoNativeMetadata *nativeMeta = 1477 reinterpret_cast<VideoNativeMetadata *>(oldest->mData->base()); 1478 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 1479 (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]), 1480 mDequeueCounter - oldest->mDequeuedAt, 1481 (void *)(uintptr_t)nativeMeta->pBuffer, 1482 oldest->mGraphicBuffer->getNativeBuffer(), oldest->mData->base()); 1483 } 1484 1485 updateRenderInfoForDequeuedBuffer(buf, fenceFd, oldest); 1486 return oldest; 1487} 1488 1489status_t ACodec::freeBuffersOnPort(OMX_U32 portIndex) { 1490 status_t err = OK; 1491 for (size_t i = mBuffers[portIndex].size(); i > 0;) { 1492 i--; 1493 status_t err2 = freeBuffer(portIndex, i); 1494 if (err == OK) { 1495 err = err2; 1496 } 1497 } 1498 1499 // clear mDealer even on an error 1500 mDealer[portIndex].clear(); 1501 return err; 1502} 1503 1504status_t ACodec::freeOutputBuffersNotOwnedByComponent() { 1505 status_t err = OK; 1506 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1507 i--; 1508 BufferInfo *info = 1509 &mBuffers[kPortIndexOutput].editItemAt(i); 1510 1511 // At this time some buffers may still be with the component 1512 // or being drained. 1513 if (info->mStatus != BufferInfo::OWNED_BY_COMPONENT && 1514 info->mStatus != BufferInfo::OWNED_BY_DOWNSTREAM) { 1515 status_t err2 = freeBuffer(kPortIndexOutput, i); 1516 if (err == OK) { 1517 err = err2; 1518 } 1519 } 1520 } 1521 1522 return err; 1523} 1524 1525status_t ACodec::freeBuffer(OMX_U32 portIndex, size_t i) { 1526 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1527 status_t err = OK; 1528 1529 // there should not be any fences in the metadata 1530 MetadataBufferType type = 1531 portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; 1532 if (type == kMetadataBufferTypeANWBuffer && info->mData != NULL 1533 && info->mData->size() >= sizeof(VideoNativeMetadata)) { 1534 int fenceFd = ((VideoNativeMetadata *)info->mData->data())->nFenceFd; 1535 if (fenceFd >= 0) { 1536 ALOGW("unreleased fence (%d) in %s metadata buffer %zu", 1537 fenceFd, portIndex == kPortIndexInput ? "input" : "output", i); 1538 } 1539 } 1540 1541 switch (info->mStatus) { 1542 case BufferInfo::OWNED_BY_US: 1543 if (portIndex == kPortIndexOutput && mNativeWindow != NULL) { 1544 (void)cancelBufferToNativeWindow(info); 1545 } 1546 // fall through 1547 1548 case BufferInfo::OWNED_BY_NATIVE_WINDOW: 1549 err = mOMX->freeBuffer(mNode, portIndex, info->mBufferID); 1550 break; 1551 1552 default: 1553 ALOGE("trying to free buffer not owned by us or ANW (%d)", info->mStatus); 1554 err = FAILED_TRANSACTION; 1555 break; 1556 } 1557 1558 if (info->mFenceFd >= 0) { 1559 ::close(info->mFenceFd); 1560 } 1561 1562 if (portIndex == kPortIndexOutput) { 1563 mRenderTracker.untrackFrame(info->mRenderInfo, i); 1564 info->mRenderInfo = NULL; 1565 } 1566 1567 // remove buffer even if mOMX->freeBuffer fails 1568 mBuffers[portIndex].removeAt(i); 1569 return err; 1570} 1571 1572ACodec::BufferInfo *ACodec::findBufferByID( 1573 uint32_t portIndex, IOMX::buffer_id bufferID, ssize_t *index) { 1574 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1575 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1576 1577 if (info->mBufferID == bufferID) { 1578 if (index != NULL) { 1579 *index = i; 1580 } 1581 return info; 1582 } 1583 } 1584 1585 ALOGE("Could not find buffer with ID %u", bufferID); 1586 return NULL; 1587} 1588 1589status_t ACodec::setComponentRole( 1590 bool isEncoder, const char *mime) { 1591 const char *role = getComponentRole(isEncoder, mime); 1592 if (role == NULL) { 1593 return BAD_VALUE; 1594 } 1595 status_t err = setComponentRole(mOMX, mNode, role); 1596 if (err != OK) { 1597 ALOGW("[%s] Failed to set standard component role '%s'.", 1598 mComponentName.c_str(), role); 1599 } 1600 return err; 1601} 1602 1603//static 1604const char *ACodec::getComponentRole( 1605 bool isEncoder, const char *mime) { 1606 struct MimeToRole { 1607 const char *mime; 1608 const char *decoderRole; 1609 const char *encoderRole; 1610 }; 1611 1612 static const MimeToRole kMimeToRole[] = { 1613 { MEDIA_MIMETYPE_AUDIO_MPEG, 1614 "audio_decoder.mp3", "audio_encoder.mp3" }, 1615 { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I, 1616 "audio_decoder.mp1", "audio_encoder.mp1" }, 1617 { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II, 1618 "audio_decoder.mp2", "audio_encoder.mp2" }, 1619 { MEDIA_MIMETYPE_AUDIO_AMR_NB, 1620 "audio_decoder.amrnb", "audio_encoder.amrnb" }, 1621 { MEDIA_MIMETYPE_AUDIO_AMR_WB, 1622 "audio_decoder.amrwb", "audio_encoder.amrwb" }, 1623 { MEDIA_MIMETYPE_AUDIO_AAC, 1624 "audio_decoder.aac", "audio_encoder.aac" }, 1625 { MEDIA_MIMETYPE_AUDIO_VORBIS, 1626 "audio_decoder.vorbis", "audio_encoder.vorbis" }, 1627 { MEDIA_MIMETYPE_AUDIO_OPUS, 1628 "audio_decoder.opus", "audio_encoder.opus" }, 1629 { MEDIA_MIMETYPE_AUDIO_G711_MLAW, 1630 "audio_decoder.g711mlaw", "audio_encoder.g711mlaw" }, 1631 { MEDIA_MIMETYPE_AUDIO_G711_ALAW, 1632 "audio_decoder.g711alaw", "audio_encoder.g711alaw" }, 1633 { MEDIA_MIMETYPE_VIDEO_AVC, 1634 "video_decoder.avc", "video_encoder.avc" }, 1635 { MEDIA_MIMETYPE_VIDEO_HEVC, 1636 "video_decoder.hevc", "video_encoder.hevc" }, 1637 { MEDIA_MIMETYPE_VIDEO_MPEG4, 1638 "video_decoder.mpeg4", "video_encoder.mpeg4" }, 1639 { MEDIA_MIMETYPE_VIDEO_H263, 1640 "video_decoder.h263", "video_encoder.h263" }, 1641 { MEDIA_MIMETYPE_VIDEO_VP8, 1642 "video_decoder.vp8", "video_encoder.vp8" }, 1643 { MEDIA_MIMETYPE_VIDEO_VP9, 1644 "video_decoder.vp9", "video_encoder.vp9" }, 1645 { MEDIA_MIMETYPE_AUDIO_RAW, 1646 "audio_decoder.raw", "audio_encoder.raw" }, 1647 { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, 1648 "video_decoder.dolby-vision", "video_encoder.dolby-vision" }, 1649 { MEDIA_MIMETYPE_AUDIO_FLAC, 1650 "audio_decoder.flac", "audio_encoder.flac" }, 1651 { MEDIA_MIMETYPE_AUDIO_MSGSM, 1652 "audio_decoder.gsm", "audio_encoder.gsm" }, 1653 { MEDIA_MIMETYPE_VIDEO_MPEG2, 1654 "video_decoder.mpeg2", "video_encoder.mpeg2" }, 1655 { MEDIA_MIMETYPE_AUDIO_AC3, 1656 "audio_decoder.ac3", "audio_encoder.ac3" }, 1657 { MEDIA_MIMETYPE_AUDIO_EAC3, 1658 "audio_decoder.eac3", "audio_encoder.eac3" }, 1659 }; 1660 1661 static const size_t kNumMimeToRole = 1662 sizeof(kMimeToRole) / sizeof(kMimeToRole[0]); 1663 1664 size_t i; 1665 for (i = 0; i < kNumMimeToRole; ++i) { 1666 if (!strcasecmp(mime, kMimeToRole[i].mime)) { 1667 break; 1668 } 1669 } 1670 1671 if (i == kNumMimeToRole) { 1672 return NULL; 1673 } 1674 1675 return isEncoder ? kMimeToRole[i].encoderRole 1676 : kMimeToRole[i].decoderRole; 1677} 1678 1679//static 1680status_t ACodec::setComponentRole( 1681 const sp<IOMX> &omx, IOMX::node_id node, const char *role) { 1682 OMX_PARAM_COMPONENTROLETYPE roleParams; 1683 InitOMXParams(&roleParams); 1684 1685 strncpy((char *)roleParams.cRole, 1686 role, OMX_MAX_STRINGNAME_SIZE - 1); 1687 1688 roleParams.cRole[OMX_MAX_STRINGNAME_SIZE - 1] = '\0'; 1689 1690 return omx->setParameter( 1691 node, OMX_IndexParamStandardComponentRole, 1692 &roleParams, sizeof(roleParams)); 1693} 1694 1695status_t ACodec::configureCodec( 1696 const char *mime, const sp<AMessage> &msg) { 1697 int32_t encoder; 1698 if (!msg->findInt32("encoder", &encoder)) { 1699 encoder = false; 1700 } 1701 1702 sp<AMessage> inputFormat = new AMessage; 1703 sp<AMessage> outputFormat = new AMessage; 1704 mConfigFormat = msg; 1705 1706 mIsEncoder = encoder; 1707 1708 mInputMetadataType = kMetadataBufferTypeInvalid; 1709 mOutputMetadataType = kMetadataBufferTypeInvalid; 1710 1711 status_t err = setComponentRole(encoder /* isEncoder */, mime); 1712 1713 if (err != OK) { 1714 return err; 1715 } 1716 1717 int32_t bitRate = 0; 1718 // FLAC encoder doesn't need a bitrate, other encoders do 1719 if (encoder && strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC) 1720 && !msg->findInt32("bitrate", &bitRate)) { 1721 return INVALID_OPERATION; 1722 } 1723 1724 int32_t storeMeta; 1725 if (encoder 1726 && msg->findInt32("store-metadata-in-buffers", &storeMeta) 1727 && storeMeta != 0) { 1728 err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexInput, OMX_TRUE, &mInputMetadataType); 1729 if (err != OK) { 1730 ALOGE("[%s] storeMetaDataInBuffers (input) failed w/ err %d", 1731 mComponentName.c_str(), err); 1732 1733 return err; 1734 } 1735 // For this specific case we could be using camera source even if storeMetaDataInBuffers 1736 // returns Gralloc source. Pretend that we are; this will force us to use nBufferSize. 1737 if (mInputMetadataType == kMetadataBufferTypeGrallocSource) { 1738 mInputMetadataType = kMetadataBufferTypeCameraSource; 1739 } 1740 1741 uint32_t usageBits; 1742 if (mOMX->getParameter( 1743 mNode, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, 1744 &usageBits, sizeof(usageBits)) == OK) { 1745 inputFormat->setInt32( 1746 "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); 1747 } 1748 } 1749 1750 int32_t prependSPSPPS = 0; 1751 if (encoder 1752 && msg->findInt32("prepend-sps-pps-to-idr-frames", &prependSPSPPS) 1753 && prependSPSPPS != 0) { 1754 OMX_INDEXTYPE index; 1755 err = mOMX->getExtensionIndex( 1756 mNode, 1757 "OMX.google.android.index.prependSPSPPSToIDRFrames", 1758 &index); 1759 1760 if (err == OK) { 1761 PrependSPSPPSToIDRFramesParams params; 1762 InitOMXParams(¶ms); 1763 params.bEnable = OMX_TRUE; 1764 1765 err = mOMX->setParameter( 1766 mNode, index, ¶ms, sizeof(params)); 1767 } 1768 1769 if (err != OK) { 1770 ALOGE("Encoder could not be configured to emit SPS/PPS before " 1771 "IDR frames. (err %d)", err); 1772 1773 return err; 1774 } 1775 } 1776 1777 // Only enable metadata mode on encoder output if encoder can prepend 1778 // sps/pps to idr frames, since in metadata mode the bitstream is in an 1779 // opaque handle, to which we don't have access. 1780 int32_t video = !strncasecmp(mime, "video/", 6); 1781 mIsVideo = video; 1782 if (encoder && video) { 1783 OMX_BOOL enable = (OMX_BOOL) (prependSPSPPS 1784 && msg->findInt32("store-metadata-in-buffers-output", &storeMeta) 1785 && storeMeta != 0); 1786 1787 err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, enable, &mOutputMetadataType); 1788 if (err != OK) { 1789 ALOGE("[%s] storeMetaDataInBuffers (output) failed w/ err %d", 1790 mComponentName.c_str(), err); 1791 } 1792 1793 if (!msg->findInt64( 1794 "repeat-previous-frame-after", 1795 &mRepeatFrameDelayUs)) { 1796 mRepeatFrameDelayUs = -1ll; 1797 } 1798 1799 if (!msg->findInt64("max-pts-gap-to-encoder", &mMaxPtsGapUs)) { 1800 mMaxPtsGapUs = -1ll; 1801 } 1802 1803 if (!msg->findFloat("max-fps-to-encoder", &mMaxFps)) { 1804 mMaxFps = -1; 1805 } 1806 1807 if (!msg->findInt64("time-lapse", &mTimePerCaptureUs)) { 1808 mTimePerCaptureUs = -1ll; 1809 } 1810 1811 if (!msg->findInt32( 1812 "create-input-buffers-suspended", 1813 (int32_t*)&mCreateInputBuffersSuspended)) { 1814 mCreateInputBuffersSuspended = false; 1815 } 1816 } 1817 1818 // NOTE: we only use native window for video decoders 1819 sp<RefBase> obj; 1820 bool haveNativeWindow = msg->findObject("native-window", &obj) 1821 && obj != NULL && video && !encoder; 1822 mUsingNativeWindow = haveNativeWindow; 1823 mLegacyAdaptiveExperiment = false; 1824 if (video && !encoder) { 1825 inputFormat->setInt32("adaptive-playback", false); 1826 1827 int32_t usageProtected; 1828 if (msg->findInt32("protected", &usageProtected) && usageProtected) { 1829 if (!haveNativeWindow) { 1830 ALOGE("protected output buffers must be sent to an ANativeWindow"); 1831 return PERMISSION_DENIED; 1832 } 1833 mFlags |= kFlagIsGrallocUsageProtected; 1834 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 1835 } 1836 1837 if (mFlags & kFlagIsSecure) { 1838 // use native_handles for secure input buffers 1839 err = mOMX->enableNativeBuffers( 1840 mNode, kPortIndexInput, OMX_FALSE /* graphic */, OMX_TRUE); 1841 ALOGI_IF(err != OK, "falling back to non-native_handles"); 1842 err = OK; // ignore error for now 1843 } 1844 } 1845 if (haveNativeWindow) { 1846 sp<ANativeWindow> nativeWindow = 1847 static_cast<ANativeWindow *>(static_cast<Surface *>(obj.get())); 1848 1849 // START of temporary support for automatic FRC - THIS WILL BE REMOVED 1850 int32_t autoFrc; 1851 if (msg->findInt32("auto-frc", &autoFrc)) { 1852 bool enabled = autoFrc; 1853 OMX_CONFIG_BOOLEANTYPE config; 1854 InitOMXParams(&config); 1855 config.bEnabled = (OMX_BOOL)enabled; 1856 status_t temp = mOMX->setConfig( 1857 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAutoFramerateConversion, 1858 &config, sizeof(config)); 1859 if (temp == OK) { 1860 outputFormat->setInt32("auto-frc", enabled); 1861 } else if (enabled) { 1862 ALOGI("codec does not support requested auto-frc (err %d)", temp); 1863 } 1864 } 1865 // END of temporary support for automatic FRC 1866 1867 int32_t tunneled; 1868 if (msg->findInt32("feature-tunneled-playback", &tunneled) && 1869 tunneled != 0) { 1870 ALOGI("Configuring TUNNELED video playback."); 1871 mTunneled = true; 1872 1873 int32_t audioHwSync = 0; 1874 if (!msg->findInt32("audio-hw-sync", &audioHwSync)) { 1875 ALOGW("No Audio HW Sync provided for video tunnel"); 1876 } 1877 err = configureTunneledVideoPlayback(audioHwSync, nativeWindow); 1878 if (err != OK) { 1879 ALOGE("configureTunneledVideoPlayback(%d,%p) failed!", 1880 audioHwSync, nativeWindow.get()); 1881 return err; 1882 } 1883 1884 int32_t maxWidth = 0, maxHeight = 0; 1885 if (msg->findInt32("max-width", &maxWidth) && 1886 msg->findInt32("max-height", &maxHeight)) { 1887 1888 err = mOMX->prepareForAdaptivePlayback( 1889 mNode, kPortIndexOutput, OMX_TRUE, maxWidth, maxHeight); 1890 if (err != OK) { 1891 ALOGW("[%s] prepareForAdaptivePlayback failed w/ err %d", 1892 mComponentName.c_str(), err); 1893 // allow failure 1894 err = OK; 1895 } else { 1896 inputFormat->setInt32("max-width", maxWidth); 1897 inputFormat->setInt32("max-height", maxHeight); 1898 inputFormat->setInt32("adaptive-playback", true); 1899 } 1900 } 1901 } else { 1902 ALOGV("Configuring CPU controlled video playback."); 1903 mTunneled = false; 1904 1905 // Explicity reset the sideband handle of the window for 1906 // non-tunneled video in case the window was previously used 1907 // for a tunneled video playback. 1908 err = native_window_set_sideband_stream(nativeWindow.get(), NULL); 1909 if (err != OK) { 1910 ALOGE("set_sideband_stream(NULL) failed! (err %d).", err); 1911 return err; 1912 } 1913 1914 // Always try to enable dynamic output buffers on native surface 1915 err = mOMX->storeMetaDataInBuffers( 1916 mNode, kPortIndexOutput, OMX_TRUE, &mOutputMetadataType); 1917 if (err != OK) { 1918 ALOGE("[%s] storeMetaDataInBuffers failed w/ err %d", 1919 mComponentName.c_str(), err); 1920 1921 // if adaptive playback has been requested, try JB fallback 1922 // NOTE: THIS FALLBACK MECHANISM WILL BE REMOVED DUE TO ITS 1923 // LARGE MEMORY REQUIREMENT 1924 1925 // we will not do adaptive playback on software accessed 1926 // surfaces as they never had to respond to changes in the 1927 // crop window, and we don't trust that they will be able to. 1928 int usageBits = 0; 1929 bool canDoAdaptivePlayback; 1930 1931 if (nativeWindow->query( 1932 nativeWindow.get(), 1933 NATIVE_WINDOW_CONSUMER_USAGE_BITS, 1934 &usageBits) != OK) { 1935 canDoAdaptivePlayback = false; 1936 } else { 1937 canDoAdaptivePlayback = 1938 (usageBits & 1939 (GRALLOC_USAGE_SW_READ_MASK | 1940 GRALLOC_USAGE_SW_WRITE_MASK)) == 0; 1941 } 1942 1943 int32_t maxWidth = 0, maxHeight = 0; 1944 if (canDoAdaptivePlayback && 1945 msg->findInt32("max-width", &maxWidth) && 1946 msg->findInt32("max-height", &maxHeight)) { 1947 ALOGV("[%s] prepareForAdaptivePlayback(%dx%d)", 1948 mComponentName.c_str(), maxWidth, maxHeight); 1949 1950 err = mOMX->prepareForAdaptivePlayback( 1951 mNode, kPortIndexOutput, OMX_TRUE, maxWidth, 1952 maxHeight); 1953 ALOGW_IF(err != OK, 1954 "[%s] prepareForAdaptivePlayback failed w/ err %d", 1955 mComponentName.c_str(), err); 1956 1957 if (err == OK) { 1958 inputFormat->setInt32("max-width", maxWidth); 1959 inputFormat->setInt32("max-height", maxHeight); 1960 inputFormat->setInt32("adaptive-playback", true); 1961 } 1962 } 1963 // allow failure 1964 err = OK; 1965 } else { 1966 ALOGV("[%s] storeMetaDataInBuffers succeeded", 1967 mComponentName.c_str()); 1968 CHECK(storingMetadataInDecodedBuffers()); 1969 mLegacyAdaptiveExperiment = ADebug::isExperimentEnabled( 1970 "legacy-adaptive", !msg->contains("no-experiments")); 1971 1972 inputFormat->setInt32("adaptive-playback", true); 1973 } 1974 1975 int32_t push; 1976 if (msg->findInt32("push-blank-buffers-on-shutdown", &push) 1977 && push != 0) { 1978 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 1979 } 1980 } 1981 1982 int32_t rotationDegrees; 1983 if (msg->findInt32("rotation-degrees", &rotationDegrees)) { 1984 mRotationDegrees = rotationDegrees; 1985 } else { 1986 mRotationDegrees = 0; 1987 } 1988 } 1989 1990 AudioEncoding pcmEncoding = kAudioEncodingPcm16bit; 1991 (void)msg->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 1992 // invalid encodings will default to PCM-16bit in setupRawAudioFormat. 1993 1994 if (video) { 1995 // determine need for software renderer 1996 bool usingSwRenderer = false; 1997 if (haveNativeWindow && mComponentName.startsWith("OMX.google.")) { 1998 usingSwRenderer = true; 1999 haveNativeWindow = false; 2000 } 2001 2002 if (encoder) { 2003 err = setupVideoEncoder(mime, msg, outputFormat, inputFormat); 2004 } else { 2005 err = setupVideoDecoder(mime, msg, haveNativeWindow, usingSwRenderer, outputFormat); 2006 } 2007 2008 if (err != OK) { 2009 return err; 2010 } 2011 2012 if (haveNativeWindow) { 2013 mNativeWindow = static_cast<Surface *>(obj.get()); 2014 } 2015 2016 // initialize native window now to get actual output format 2017 // TODO: this is needed for some encoders even though they don't use native window 2018 err = initNativeWindow(); 2019 if (err != OK) { 2020 return err; 2021 } 2022 2023 // fallback for devices that do not handle flex-YUV for native buffers 2024 if (haveNativeWindow) { 2025 int32_t requestedColorFormat = OMX_COLOR_FormatUnused; 2026 if (msg->findInt32("color-format", &requestedColorFormat) && 2027 requestedColorFormat == OMX_COLOR_FormatYUV420Flexible) { 2028 status_t err = getPortFormat(kPortIndexOutput, outputFormat); 2029 if (err != OK) { 2030 return err; 2031 } 2032 int32_t colorFormat = OMX_COLOR_FormatUnused; 2033 OMX_U32 flexibleEquivalent = OMX_COLOR_FormatUnused; 2034 if (!outputFormat->findInt32("color-format", &colorFormat)) { 2035 ALOGE("ouptut port did not have a color format (wrong domain?)"); 2036 return BAD_VALUE; 2037 } 2038 ALOGD("[%s] Requested output format %#x and got %#x.", 2039 mComponentName.c_str(), requestedColorFormat, colorFormat); 2040 if (!isFlexibleColorFormat( 2041 mOMX, mNode, colorFormat, haveNativeWindow, &flexibleEquivalent) 2042 || flexibleEquivalent != (OMX_U32)requestedColorFormat) { 2043 // device did not handle flex-YUV request for native window, fall back 2044 // to SW renderer 2045 ALOGI("[%s] Falling back to software renderer", mComponentName.c_str()); 2046 mNativeWindow.clear(); 2047 mNativeWindowUsageBits = 0; 2048 haveNativeWindow = false; 2049 usingSwRenderer = true; 2050 if (storingMetadataInDecodedBuffers()) { 2051 err = mOMX->storeMetaDataInBuffers( 2052 mNode, kPortIndexOutput, OMX_FALSE, &mOutputMetadataType); 2053 mOutputMetadataType = kMetadataBufferTypeInvalid; // just in case 2054 // TODO: implement adaptive-playback support for bytebuffer mode. 2055 // This is done by SW codecs, but most HW codecs don't support it. 2056 inputFormat->setInt32("adaptive-playback", false); 2057 } 2058 if (err == OK) { 2059 err = mOMX->enableNativeBuffers( 2060 mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE); 2061 } 2062 if (mFlags & kFlagIsGrallocUsageProtected) { 2063 // fallback is not supported for protected playback 2064 err = PERMISSION_DENIED; 2065 } else if (err == OK) { 2066 err = setupVideoDecoder( 2067 mime, msg, haveNativeWindow, usingSwRenderer, outputFormat); 2068 } 2069 } 2070 } 2071 } 2072 2073 if (usingSwRenderer) { 2074 outputFormat->setInt32("using-sw-renderer", 1); 2075 } 2076 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) { 2077 int32_t numChannels, sampleRate; 2078 if (!msg->findInt32("channel-count", &numChannels) 2079 || !msg->findInt32("sample-rate", &sampleRate)) { 2080 // Since we did not always check for these, leave them optional 2081 // and have the decoder figure it all out. 2082 err = OK; 2083 } else { 2084 err = setupRawAudioFormat( 2085 encoder ? kPortIndexInput : kPortIndexOutput, 2086 sampleRate, 2087 numChannels); 2088 } 2089 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) { 2090 int32_t numChannels, sampleRate; 2091 if (!msg->findInt32("channel-count", &numChannels) 2092 || !msg->findInt32("sample-rate", &sampleRate)) { 2093 err = INVALID_OPERATION; 2094 } else { 2095 int32_t isADTS, aacProfile; 2096 int32_t sbrMode; 2097 int32_t maxOutputChannelCount; 2098 int32_t pcmLimiterEnable; 2099 drcParams_t drc; 2100 if (!msg->findInt32("is-adts", &isADTS)) { 2101 isADTS = 0; 2102 } 2103 if (!msg->findInt32("aac-profile", &aacProfile)) { 2104 aacProfile = OMX_AUDIO_AACObjectNull; 2105 } 2106 if (!msg->findInt32("aac-sbr-mode", &sbrMode)) { 2107 sbrMode = -1; 2108 } 2109 2110 if (!msg->findInt32("aac-max-output-channel_count", &maxOutputChannelCount)) { 2111 maxOutputChannelCount = -1; 2112 } 2113 if (!msg->findInt32("aac-pcm-limiter-enable", &pcmLimiterEnable)) { 2114 // value is unknown 2115 pcmLimiterEnable = -1; 2116 } 2117 if (!msg->findInt32("aac-encoded-target-level", &drc.encodedTargetLevel)) { 2118 // value is unknown 2119 drc.encodedTargetLevel = -1; 2120 } 2121 if (!msg->findInt32("aac-drc-cut-level", &drc.drcCut)) { 2122 // value is unknown 2123 drc.drcCut = -1; 2124 } 2125 if (!msg->findInt32("aac-drc-boost-level", &drc.drcBoost)) { 2126 // value is unknown 2127 drc.drcBoost = -1; 2128 } 2129 if (!msg->findInt32("aac-drc-heavy-compression", &drc.heavyCompression)) { 2130 // value is unknown 2131 drc.heavyCompression = -1; 2132 } 2133 if (!msg->findInt32("aac-target-ref-level", &drc.targetRefLevel)) { 2134 // value is unknown 2135 drc.targetRefLevel = -1; 2136 } 2137 2138 err = setupAACCodec( 2139 encoder, numChannels, sampleRate, bitRate, aacProfile, 2140 isADTS != 0, sbrMode, maxOutputChannelCount, drc, 2141 pcmLimiterEnable); 2142 } 2143 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)) { 2144 err = setupAMRCodec(encoder, false /* isWAMR */, bitRate); 2145 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_WB)) { 2146 err = setupAMRCodec(encoder, true /* isWAMR */, bitRate); 2147 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_ALAW) 2148 || !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_MLAW)) { 2149 // These are PCM-like formats with a fixed sample rate but 2150 // a variable number of channels. 2151 2152 int32_t numChannels; 2153 if (!msg->findInt32("channel-count", &numChannels)) { 2154 err = INVALID_OPERATION; 2155 } else { 2156 int32_t sampleRate; 2157 if (!msg->findInt32("sample-rate", &sampleRate)) { 2158 sampleRate = 8000; 2159 } 2160 err = setupG711Codec(encoder, sampleRate, numChannels); 2161 } 2162 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)) { 2163 int32_t numChannels = 0, sampleRate = 0, compressionLevel = -1; 2164 if (encoder && 2165 (!msg->findInt32("channel-count", &numChannels) 2166 || !msg->findInt32("sample-rate", &sampleRate))) { 2167 ALOGE("missing channel count or sample rate for FLAC encoder"); 2168 err = INVALID_OPERATION; 2169 } else { 2170 if (encoder) { 2171 if (!msg->findInt32( 2172 "complexity", &compressionLevel) && 2173 !msg->findInt32( 2174 "flac-compression-level", &compressionLevel)) { 2175 compressionLevel = 5; // default FLAC compression level 2176 } else if (compressionLevel < 0) { 2177 ALOGW("compression level %d outside [0..8] range, " 2178 "using 0", 2179 compressionLevel); 2180 compressionLevel = 0; 2181 } else if (compressionLevel > 8) { 2182 ALOGW("compression level %d outside [0..8] range, " 2183 "using 8", 2184 compressionLevel); 2185 compressionLevel = 8; 2186 } 2187 } 2188 err = setupFlacCodec( 2189 encoder, numChannels, sampleRate, compressionLevel); 2190 } 2191 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) { 2192 int32_t numChannels, sampleRate; 2193 if (encoder 2194 || !msg->findInt32("channel-count", &numChannels) 2195 || !msg->findInt32("sample-rate", &sampleRate)) { 2196 err = INVALID_OPERATION; 2197 } else { 2198 err = setupRawAudioFormat(kPortIndexInput, sampleRate, numChannels, pcmEncoding); 2199 } 2200 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AC3)) { 2201 int32_t numChannels; 2202 int32_t sampleRate; 2203 if (!msg->findInt32("channel-count", &numChannels) 2204 || !msg->findInt32("sample-rate", &sampleRate)) { 2205 err = INVALID_OPERATION; 2206 } else { 2207 err = setupAC3Codec(encoder, numChannels, sampleRate); 2208 } 2209 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_EAC3)) { 2210 int32_t numChannels; 2211 int32_t sampleRate; 2212 if (!msg->findInt32("channel-count", &numChannels) 2213 || !msg->findInt32("sample-rate", &sampleRate)) { 2214 err = INVALID_OPERATION; 2215 } else { 2216 err = setupEAC3Codec(encoder, numChannels, sampleRate); 2217 } 2218 } 2219 2220 if (err != OK) { 2221 return err; 2222 } 2223 2224 if (!msg->findInt32("encoder-delay", &mEncoderDelay)) { 2225 mEncoderDelay = 0; 2226 } 2227 2228 if (!msg->findInt32("encoder-padding", &mEncoderPadding)) { 2229 mEncoderPadding = 0; 2230 } 2231 2232 if (msg->findInt32("channel-mask", &mChannelMask)) { 2233 mChannelMaskPresent = true; 2234 } else { 2235 mChannelMaskPresent = false; 2236 } 2237 2238 int32_t maxInputSize; 2239 if (msg->findInt32("max-input-size", &maxInputSize)) { 2240 err = setMinBufferSize(kPortIndexInput, (size_t)maxInputSize); 2241 } else if (!strcmp("OMX.Nvidia.aac.decoder", mComponentName.c_str())) { 2242 err = setMinBufferSize(kPortIndexInput, 8192); // XXX 2243 } 2244 2245 int32_t priority; 2246 if (msg->findInt32("priority", &priority)) { 2247 err = setPriority(priority); 2248 } 2249 2250 int32_t rateInt = -1; 2251 float rateFloat = -1; 2252 if (!msg->findFloat("operating-rate", &rateFloat)) { 2253 msg->findInt32("operating-rate", &rateInt); 2254 rateFloat = (float)rateInt; // 16MHz (FLINTMAX) is OK for upper bound. 2255 } 2256 if (rateFloat > 0) { 2257 err = setOperatingRate(rateFloat, video); 2258 } 2259 2260 // NOTE: both mBaseOutputFormat and mOutputFormat are outputFormat to signal first frame. 2261 mBaseOutputFormat = outputFormat; 2262 // trigger a kWhatOutputFormatChanged msg on first buffer 2263 mLastOutputFormat.clear(); 2264 2265 err = getPortFormat(kPortIndexInput, inputFormat); 2266 if (err == OK) { 2267 err = getPortFormat(kPortIndexOutput, outputFormat); 2268 if (err == OK) { 2269 mInputFormat = inputFormat; 2270 mOutputFormat = outputFormat; 2271 } 2272 } 2273 2274 // create data converters if needed 2275 if (!video && err == OK) { 2276 AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit; 2277 if (encoder) { 2278 (void)mInputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding); 2279 mConverter[kPortIndexInput] = AudioConverter::Create(pcmEncoding, codecPcmEncoding); 2280 if (mConverter[kPortIndexInput] != NULL) { 2281 mInputFormat->setInt32("pcm-encoding", pcmEncoding); 2282 } 2283 } else { 2284 (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding); 2285 mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding); 2286 if (mConverter[kPortIndexOutput] != NULL) { 2287 mOutputFormat->setInt32("pcm-encoding", pcmEncoding); 2288 } 2289 } 2290 } 2291 2292 return err; 2293} 2294 2295status_t ACodec::setPriority(int32_t priority) { 2296 if (priority < 0) { 2297 return BAD_VALUE; 2298 } 2299 OMX_PARAM_U32TYPE config; 2300 InitOMXParams(&config); 2301 config.nU32 = (OMX_U32)priority; 2302 status_t temp = mOMX->setConfig( 2303 mNode, (OMX_INDEXTYPE)OMX_IndexConfigPriority, 2304 &config, sizeof(config)); 2305 if (temp != OK) { 2306 ALOGI("codec does not support config priority (err %d)", temp); 2307 } 2308 return OK; 2309} 2310 2311status_t ACodec::setOperatingRate(float rateFloat, bool isVideo) { 2312 if (rateFloat < 0) { 2313 return BAD_VALUE; 2314 } 2315 OMX_U32 rate; 2316 if (isVideo) { 2317 if (rateFloat > 65535) { 2318 return BAD_VALUE; 2319 } 2320 rate = (OMX_U32)(rateFloat * 65536.0f + 0.5f); 2321 } else { 2322 if (rateFloat > UINT_MAX) { 2323 return BAD_VALUE; 2324 } 2325 rate = (OMX_U32)(rateFloat); 2326 } 2327 OMX_PARAM_U32TYPE config; 2328 InitOMXParams(&config); 2329 config.nU32 = rate; 2330 status_t err = mOMX->setConfig( 2331 mNode, (OMX_INDEXTYPE)OMX_IndexConfigOperatingRate, 2332 &config, sizeof(config)); 2333 if (err != OK) { 2334 ALOGI("codec does not support config operating rate (err %d)", err); 2335 } 2336 return OK; 2337} 2338 2339status_t ACodec::getIntraRefreshPeriod(uint32_t *intraRefreshPeriod) { 2340 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 2341 InitOMXParams(¶ms); 2342 params.nPortIndex = kPortIndexOutput; 2343 status_t err = mOMX->getConfig( 2344 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params)); 2345 if (err == OK) { 2346 *intraRefreshPeriod = params.nRefreshPeriod; 2347 return OK; 2348 } 2349 2350 // Fallback to query through standard OMX index. 2351 OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams; 2352 InitOMXParams(&refreshParams); 2353 refreshParams.nPortIndex = kPortIndexOutput; 2354 refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic; 2355 err = mOMX->getParameter( 2356 mNode, OMX_IndexParamVideoIntraRefresh, &refreshParams, sizeof(refreshParams)); 2357 if (err != OK || refreshParams.nCirMBs == 0) { 2358 *intraRefreshPeriod = 0; 2359 return OK; 2360 } 2361 2362 // Calculate period based on width and height 2363 uint32_t width, height; 2364 OMX_PARAM_PORTDEFINITIONTYPE def; 2365 InitOMXParams(&def); 2366 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2367 def.nPortIndex = kPortIndexOutput; 2368 err = mOMX->getParameter( 2369 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2370 if (err != OK) { 2371 *intraRefreshPeriod = 0; 2372 return err; 2373 } 2374 width = video_def->nFrameWidth; 2375 height = video_def->nFrameHeight; 2376 // Use H.264/AVC MacroBlock size 16x16 2377 *intraRefreshPeriod = divUp((divUp(width, 16u) * divUp(height, 16u)), refreshParams.nCirMBs); 2378 2379 return OK; 2380} 2381 2382status_t ACodec::setIntraRefreshPeriod(uint32_t intraRefreshPeriod, bool inConfigure) { 2383 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 2384 InitOMXParams(¶ms); 2385 params.nPortIndex = kPortIndexOutput; 2386 params.nRefreshPeriod = intraRefreshPeriod; 2387 status_t err = mOMX->setConfig( 2388 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params)); 2389 if (err == OK) { 2390 return OK; 2391 } 2392 2393 // Only in configure state, a component could invoke setParameter. 2394 if (!inConfigure) { 2395 return INVALID_OPERATION; 2396 } else { 2397 ALOGI("[%s] try falling back to Cyclic", mComponentName.c_str()); 2398 } 2399 2400 OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams; 2401 InitOMXParams(&refreshParams); 2402 refreshParams.nPortIndex = kPortIndexOutput; 2403 refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic; 2404 2405 if (intraRefreshPeriod == 0) { 2406 // 0 means disable intra refresh. 2407 refreshParams.nCirMBs = 0; 2408 } else { 2409 // Calculate macroblocks that need to be intra coded base on width and height 2410 uint32_t width, height; 2411 OMX_PARAM_PORTDEFINITIONTYPE def; 2412 InitOMXParams(&def); 2413 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2414 def.nPortIndex = kPortIndexOutput; 2415 err = mOMX->getParameter( 2416 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2417 if (err != OK) { 2418 return err; 2419 } 2420 width = video_def->nFrameWidth; 2421 height = video_def->nFrameHeight; 2422 // Use H.264/AVC MacroBlock size 16x16 2423 refreshParams.nCirMBs = divUp((divUp(width, 16u) * divUp(height, 16u)), intraRefreshPeriod); 2424 } 2425 2426 err = mOMX->setParameter(mNode, OMX_IndexParamVideoIntraRefresh, 2427 &refreshParams, sizeof(refreshParams)); 2428 if (err != OK) { 2429 return err; 2430 } 2431 2432 return OK; 2433} 2434 2435status_t ACodec::setMinBufferSize(OMX_U32 portIndex, size_t size) { 2436 OMX_PARAM_PORTDEFINITIONTYPE def; 2437 InitOMXParams(&def); 2438 def.nPortIndex = portIndex; 2439 2440 status_t err = mOMX->getParameter( 2441 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2442 2443 if (err != OK) { 2444 return err; 2445 } 2446 2447 if (def.nBufferSize >= size) { 2448 return OK; 2449 } 2450 2451 def.nBufferSize = size; 2452 2453 err = mOMX->setParameter( 2454 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2455 2456 if (err != OK) { 2457 return err; 2458 } 2459 2460 err = mOMX->getParameter( 2461 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2462 2463 if (err != OK) { 2464 return err; 2465 } 2466 2467 if (def.nBufferSize < size) { 2468 ALOGE("failed to set min buffer size to %zu (is still %u)", size, def.nBufferSize); 2469 return FAILED_TRANSACTION; 2470 } 2471 2472 return OK; 2473} 2474 2475status_t ACodec::selectAudioPortFormat( 2476 OMX_U32 portIndex, OMX_AUDIO_CODINGTYPE desiredFormat) { 2477 OMX_AUDIO_PARAM_PORTFORMATTYPE format; 2478 InitOMXParams(&format); 2479 2480 format.nPortIndex = portIndex; 2481 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 2482 format.nIndex = index; 2483 status_t err = mOMX->getParameter( 2484 mNode, OMX_IndexParamAudioPortFormat, 2485 &format, sizeof(format)); 2486 2487 if (err != OK) { 2488 return err; 2489 } 2490 2491 if (format.eEncoding == desiredFormat) { 2492 break; 2493 } 2494 2495 if (index == kMaxIndicesToCheck) { 2496 ALOGW("[%s] stopping checking formats after %u: %s(%x)", 2497 mComponentName.c_str(), index, 2498 asString(format.eEncoding), format.eEncoding); 2499 return ERROR_UNSUPPORTED; 2500 } 2501 } 2502 2503 return mOMX->setParameter( 2504 mNode, OMX_IndexParamAudioPortFormat, &format, sizeof(format)); 2505} 2506 2507status_t ACodec::setupAACCodec( 2508 bool encoder, int32_t numChannels, int32_t sampleRate, 2509 int32_t bitRate, int32_t aacProfile, bool isADTS, int32_t sbrMode, 2510 int32_t maxOutputChannelCount, const drcParams_t& drc, 2511 int32_t pcmLimiterEnable) { 2512 if (encoder && isADTS) { 2513 return -EINVAL; 2514 } 2515 2516 status_t err = setupRawAudioFormat( 2517 encoder ? kPortIndexInput : kPortIndexOutput, 2518 sampleRate, 2519 numChannels); 2520 2521 if (err != OK) { 2522 return err; 2523 } 2524 2525 if (encoder) { 2526 err = selectAudioPortFormat(kPortIndexOutput, OMX_AUDIO_CodingAAC); 2527 2528 if (err != OK) { 2529 return err; 2530 } 2531 2532 OMX_PARAM_PORTDEFINITIONTYPE def; 2533 InitOMXParams(&def); 2534 def.nPortIndex = kPortIndexOutput; 2535 2536 err = mOMX->getParameter( 2537 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2538 2539 if (err != OK) { 2540 return err; 2541 } 2542 2543 def.format.audio.bFlagErrorConcealment = OMX_TRUE; 2544 def.format.audio.eEncoding = OMX_AUDIO_CodingAAC; 2545 2546 err = mOMX->setParameter( 2547 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2548 2549 if (err != OK) { 2550 return err; 2551 } 2552 2553 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 2554 InitOMXParams(&profile); 2555 profile.nPortIndex = kPortIndexOutput; 2556 2557 err = mOMX->getParameter( 2558 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2559 2560 if (err != OK) { 2561 return err; 2562 } 2563 2564 profile.nChannels = numChannels; 2565 2566 profile.eChannelMode = 2567 (numChannels == 1) 2568 ? OMX_AUDIO_ChannelModeMono: OMX_AUDIO_ChannelModeStereo; 2569 2570 profile.nSampleRate = sampleRate; 2571 profile.nBitRate = bitRate; 2572 profile.nAudioBandWidth = 0; 2573 profile.nFrameLength = 0; 2574 profile.nAACtools = OMX_AUDIO_AACToolAll; 2575 profile.nAACERtools = OMX_AUDIO_AACERNone; 2576 profile.eAACProfile = (OMX_AUDIO_AACPROFILETYPE) aacProfile; 2577 profile.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4FF; 2578 switch (sbrMode) { 2579 case 0: 2580 // disable sbr 2581 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 2582 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 2583 break; 2584 case 1: 2585 // enable single-rate sbr 2586 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 2587 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 2588 break; 2589 case 2: 2590 // enable dual-rate sbr 2591 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 2592 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 2593 break; 2594 case -1: 2595 // enable both modes -> the codec will decide which mode should be used 2596 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 2597 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 2598 break; 2599 default: 2600 // unsupported sbr mode 2601 return BAD_VALUE; 2602 } 2603 2604 2605 err = mOMX->setParameter( 2606 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2607 2608 if (err != OK) { 2609 return err; 2610 } 2611 2612 return err; 2613 } 2614 2615 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 2616 InitOMXParams(&profile); 2617 profile.nPortIndex = kPortIndexInput; 2618 2619 err = mOMX->getParameter( 2620 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2621 2622 if (err != OK) { 2623 return err; 2624 } 2625 2626 profile.nChannels = numChannels; 2627 profile.nSampleRate = sampleRate; 2628 2629 profile.eAACStreamFormat = 2630 isADTS 2631 ? OMX_AUDIO_AACStreamFormatMP4ADTS 2632 : OMX_AUDIO_AACStreamFormatMP4FF; 2633 2634 OMX_AUDIO_PARAM_ANDROID_AACPRESENTATIONTYPE presentation; 2635 InitOMXParams(&presentation); 2636 presentation.nMaxOutputChannels = maxOutputChannelCount; 2637 presentation.nDrcCut = drc.drcCut; 2638 presentation.nDrcBoost = drc.drcBoost; 2639 presentation.nHeavyCompression = drc.heavyCompression; 2640 presentation.nTargetReferenceLevel = drc.targetRefLevel; 2641 presentation.nEncodedTargetLevel = drc.encodedTargetLevel; 2642 presentation.nPCMLimiterEnable = pcmLimiterEnable; 2643 2644 status_t res = mOMX->setParameter(mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2645 if (res == OK) { 2646 // optional parameters, will not cause configuration failure 2647 mOMX->setParameter(mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAacPresentation, 2648 &presentation, sizeof(presentation)); 2649 } else { 2650 ALOGW("did not set AudioAndroidAacPresentation due to error %d when setting AudioAac", res); 2651 } 2652 return res; 2653} 2654 2655status_t ACodec::setupAC3Codec( 2656 bool encoder, int32_t numChannels, int32_t sampleRate) { 2657 status_t err = setupRawAudioFormat( 2658 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 2659 2660 if (err != OK) { 2661 return err; 2662 } 2663 2664 if (encoder) { 2665 ALOGW("AC3 encoding is not supported."); 2666 return INVALID_OPERATION; 2667 } 2668 2669 OMX_AUDIO_PARAM_ANDROID_AC3TYPE def; 2670 InitOMXParams(&def); 2671 def.nPortIndex = kPortIndexInput; 2672 2673 err = mOMX->getParameter( 2674 mNode, 2675 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 2676 &def, 2677 sizeof(def)); 2678 2679 if (err != OK) { 2680 return err; 2681 } 2682 2683 def.nChannels = numChannels; 2684 def.nSampleRate = sampleRate; 2685 2686 return mOMX->setParameter( 2687 mNode, 2688 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 2689 &def, 2690 sizeof(def)); 2691} 2692 2693status_t ACodec::setupEAC3Codec( 2694 bool encoder, int32_t numChannels, int32_t sampleRate) { 2695 status_t err = setupRawAudioFormat( 2696 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 2697 2698 if (err != OK) { 2699 return err; 2700 } 2701 2702 if (encoder) { 2703 ALOGW("EAC3 encoding is not supported."); 2704 return INVALID_OPERATION; 2705 } 2706 2707 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE def; 2708 InitOMXParams(&def); 2709 def.nPortIndex = kPortIndexInput; 2710 2711 err = mOMX->getParameter( 2712 mNode, 2713 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 2714 &def, 2715 sizeof(def)); 2716 2717 if (err != OK) { 2718 return err; 2719 } 2720 2721 def.nChannels = numChannels; 2722 def.nSampleRate = sampleRate; 2723 2724 return mOMX->setParameter( 2725 mNode, 2726 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 2727 &def, 2728 sizeof(def)); 2729} 2730 2731static OMX_AUDIO_AMRBANDMODETYPE pickModeFromBitRate( 2732 bool isAMRWB, int32_t bps) { 2733 if (isAMRWB) { 2734 if (bps <= 6600) { 2735 return OMX_AUDIO_AMRBandModeWB0; 2736 } else if (bps <= 8850) { 2737 return OMX_AUDIO_AMRBandModeWB1; 2738 } else if (bps <= 12650) { 2739 return OMX_AUDIO_AMRBandModeWB2; 2740 } else if (bps <= 14250) { 2741 return OMX_AUDIO_AMRBandModeWB3; 2742 } else if (bps <= 15850) { 2743 return OMX_AUDIO_AMRBandModeWB4; 2744 } else if (bps <= 18250) { 2745 return OMX_AUDIO_AMRBandModeWB5; 2746 } else if (bps <= 19850) { 2747 return OMX_AUDIO_AMRBandModeWB6; 2748 } else if (bps <= 23050) { 2749 return OMX_AUDIO_AMRBandModeWB7; 2750 } 2751 2752 // 23850 bps 2753 return OMX_AUDIO_AMRBandModeWB8; 2754 } else { // AMRNB 2755 if (bps <= 4750) { 2756 return OMX_AUDIO_AMRBandModeNB0; 2757 } else if (bps <= 5150) { 2758 return OMX_AUDIO_AMRBandModeNB1; 2759 } else if (bps <= 5900) { 2760 return OMX_AUDIO_AMRBandModeNB2; 2761 } else if (bps <= 6700) { 2762 return OMX_AUDIO_AMRBandModeNB3; 2763 } else if (bps <= 7400) { 2764 return OMX_AUDIO_AMRBandModeNB4; 2765 } else if (bps <= 7950) { 2766 return OMX_AUDIO_AMRBandModeNB5; 2767 } else if (bps <= 10200) { 2768 return OMX_AUDIO_AMRBandModeNB6; 2769 } 2770 2771 // 12200 bps 2772 return OMX_AUDIO_AMRBandModeNB7; 2773 } 2774} 2775 2776status_t ACodec::setupAMRCodec(bool encoder, bool isWAMR, int32_t bitrate) { 2777 OMX_AUDIO_PARAM_AMRTYPE def; 2778 InitOMXParams(&def); 2779 def.nPortIndex = encoder ? kPortIndexOutput : kPortIndexInput; 2780 2781 status_t err = 2782 mOMX->getParameter(mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); 2783 2784 if (err != OK) { 2785 return err; 2786 } 2787 2788 def.eAMRFrameFormat = OMX_AUDIO_AMRFrameFormatFSF; 2789 def.eAMRBandMode = pickModeFromBitRate(isWAMR, bitrate); 2790 2791 err = mOMX->setParameter( 2792 mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); 2793 2794 if (err != OK) { 2795 return err; 2796 } 2797 2798 return setupRawAudioFormat( 2799 encoder ? kPortIndexInput : kPortIndexOutput, 2800 isWAMR ? 16000 : 8000 /* sampleRate */, 2801 1 /* numChannels */); 2802} 2803 2804status_t ACodec::setupG711Codec(bool encoder, int32_t sampleRate, int32_t numChannels) { 2805 if (encoder) { 2806 return INVALID_OPERATION; 2807 } 2808 2809 return setupRawAudioFormat( 2810 kPortIndexInput, sampleRate, numChannels); 2811} 2812 2813status_t ACodec::setupFlacCodec( 2814 bool encoder, int32_t numChannels, int32_t sampleRate, int32_t compressionLevel) { 2815 2816 if (encoder) { 2817 OMX_AUDIO_PARAM_FLACTYPE def; 2818 InitOMXParams(&def); 2819 def.nPortIndex = kPortIndexOutput; 2820 2821 // configure compression level 2822 status_t err = mOMX->getParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def)); 2823 if (err != OK) { 2824 ALOGE("setupFlacCodec(): Error %d getting OMX_IndexParamAudioFlac parameter", err); 2825 return err; 2826 } 2827 def.nCompressionLevel = compressionLevel; 2828 err = mOMX->setParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def)); 2829 if (err != OK) { 2830 ALOGE("setupFlacCodec(): Error %d setting OMX_IndexParamAudioFlac parameter", err); 2831 return err; 2832 } 2833 } 2834 2835 return setupRawAudioFormat( 2836 encoder ? kPortIndexInput : kPortIndexOutput, 2837 sampleRate, 2838 numChannels); 2839} 2840 2841status_t ACodec::setupRawAudioFormat( 2842 OMX_U32 portIndex, int32_t sampleRate, int32_t numChannels, AudioEncoding encoding) { 2843 OMX_PARAM_PORTDEFINITIONTYPE def; 2844 InitOMXParams(&def); 2845 def.nPortIndex = portIndex; 2846 2847 status_t err = mOMX->getParameter( 2848 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2849 2850 if (err != OK) { 2851 return err; 2852 } 2853 2854 def.format.audio.eEncoding = OMX_AUDIO_CodingPCM; 2855 2856 err = mOMX->setParameter( 2857 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2858 2859 if (err != OK) { 2860 return err; 2861 } 2862 2863 OMX_AUDIO_PARAM_PCMMODETYPE pcmParams; 2864 InitOMXParams(&pcmParams); 2865 pcmParams.nPortIndex = portIndex; 2866 2867 err = mOMX->getParameter( 2868 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2869 2870 if (err != OK) { 2871 return err; 2872 } 2873 2874 pcmParams.nChannels = numChannels; 2875 switch (encoding) { 2876 case kAudioEncodingPcm8bit: 2877 pcmParams.eNumData = OMX_NumericalDataUnsigned; 2878 pcmParams.nBitPerSample = 8; 2879 break; 2880 case kAudioEncodingPcmFloat: 2881 pcmParams.eNumData = OMX_NumericalDataFloat; 2882 pcmParams.nBitPerSample = 32; 2883 break; 2884 case kAudioEncodingPcm16bit: 2885 pcmParams.eNumData = OMX_NumericalDataSigned; 2886 pcmParams.nBitPerSample = 16; 2887 break; 2888 default: 2889 return BAD_VALUE; 2890 } 2891 pcmParams.bInterleaved = OMX_TRUE; 2892 pcmParams.nSamplingRate = sampleRate; 2893 pcmParams.ePCMMode = OMX_AUDIO_PCMModeLinear; 2894 2895 if (getOMXChannelMapping(numChannels, pcmParams.eChannelMapping) != OK) { 2896 return OMX_ErrorNone; 2897 } 2898 2899 err = mOMX->setParameter( 2900 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2901 // if we could not set up raw format to non-16-bit, try with 16-bit 2902 // NOTE: we will also verify this via readback, in case codec ignores these fields 2903 if (err != OK && encoding != kAudioEncodingPcm16bit) { 2904 pcmParams.eNumData = OMX_NumericalDataSigned; 2905 pcmParams.nBitPerSample = 16; 2906 err = mOMX->setParameter( 2907 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2908 } 2909 return err; 2910} 2911 2912status_t ACodec::configureTunneledVideoPlayback( 2913 int32_t audioHwSync, const sp<ANativeWindow> &nativeWindow) { 2914 native_handle_t* sidebandHandle; 2915 2916 status_t err = mOMX->configureVideoTunnelMode( 2917 mNode, kPortIndexOutput, OMX_TRUE, audioHwSync, &sidebandHandle); 2918 if (err != OK) { 2919 ALOGE("configureVideoTunnelMode failed! (err %d).", err); 2920 return err; 2921 } 2922 2923 err = native_window_set_sideband_stream(nativeWindow.get(), sidebandHandle); 2924 if (err != OK) { 2925 ALOGE("native_window_set_sideband_stream(%p) failed! (err %d).", 2926 sidebandHandle, err); 2927 return err; 2928 } 2929 2930 return OK; 2931} 2932 2933status_t ACodec::setVideoPortFormatType( 2934 OMX_U32 portIndex, 2935 OMX_VIDEO_CODINGTYPE compressionFormat, 2936 OMX_COLOR_FORMATTYPE colorFormat, 2937 bool usingNativeBuffers) { 2938 OMX_VIDEO_PARAM_PORTFORMATTYPE format; 2939 InitOMXParams(&format); 2940 format.nPortIndex = portIndex; 2941 format.nIndex = 0; 2942 bool found = false; 2943 2944 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 2945 format.nIndex = index; 2946 status_t err = mOMX->getParameter( 2947 mNode, OMX_IndexParamVideoPortFormat, 2948 &format, sizeof(format)); 2949 2950 if (err != OK) { 2951 return err; 2952 } 2953 2954 // substitute back flexible color format to codec supported format 2955 OMX_U32 flexibleEquivalent; 2956 if (compressionFormat == OMX_VIDEO_CodingUnused 2957 && isFlexibleColorFormat( 2958 mOMX, mNode, format.eColorFormat, usingNativeBuffers, &flexibleEquivalent) 2959 && colorFormat == flexibleEquivalent) { 2960 ALOGI("[%s] using color format %#x in place of %#x", 2961 mComponentName.c_str(), format.eColorFormat, colorFormat); 2962 colorFormat = format.eColorFormat; 2963 } 2964 2965 // The following assertion is violated by TI's video decoder. 2966 // CHECK_EQ(format.nIndex, index); 2967 2968 if (!strcmp("OMX.TI.Video.encoder", mComponentName.c_str())) { 2969 if (portIndex == kPortIndexInput 2970 && colorFormat == format.eColorFormat) { 2971 // eCompressionFormat does not seem right. 2972 found = true; 2973 break; 2974 } 2975 if (portIndex == kPortIndexOutput 2976 && compressionFormat == format.eCompressionFormat) { 2977 // eColorFormat does not seem right. 2978 found = true; 2979 break; 2980 } 2981 } 2982 2983 if (format.eCompressionFormat == compressionFormat 2984 && format.eColorFormat == colorFormat) { 2985 found = true; 2986 break; 2987 } 2988 2989 if (index == kMaxIndicesToCheck) { 2990 ALOGW("[%s] stopping checking formats after %u: %s(%x)/%s(%x)", 2991 mComponentName.c_str(), index, 2992 asString(format.eCompressionFormat), format.eCompressionFormat, 2993 asString(format.eColorFormat), format.eColorFormat); 2994 } 2995 } 2996 2997 if (!found) { 2998 return UNKNOWN_ERROR; 2999 } 3000 3001 status_t err = mOMX->setParameter( 3002 mNode, OMX_IndexParamVideoPortFormat, 3003 &format, sizeof(format)); 3004 3005 return err; 3006} 3007 3008// Set optimal output format. OMX component lists output formats in the order 3009// of preference, but this got more complicated since the introduction of flexible 3010// YUV formats. We support a legacy behavior for applications that do not use 3011// surface output, do not specify an output format, but expect a "usable" standard 3012// OMX format. SW readable and standard formats must be flex-YUV. 3013// 3014// Suggested preference order: 3015// - optimal format for texture rendering (mediaplayer behavior) 3016// - optimal SW readable & texture renderable format (flex-YUV support) 3017// - optimal SW readable non-renderable format (flex-YUV bytebuffer support) 3018// - legacy "usable" standard formats 3019// 3020// For legacy support, we prefer a standard format, but will settle for a SW readable 3021// flex-YUV format. 3022status_t ACodec::setSupportedOutputFormat(bool getLegacyFlexibleFormat) { 3023 OMX_VIDEO_PARAM_PORTFORMATTYPE format, legacyFormat; 3024 InitOMXParams(&format); 3025 format.nPortIndex = kPortIndexOutput; 3026 3027 InitOMXParams(&legacyFormat); 3028 // this field will change when we find a suitable legacy format 3029 legacyFormat.eColorFormat = OMX_COLOR_FormatUnused; 3030 3031 for (OMX_U32 index = 0; ; ++index) { 3032 format.nIndex = index; 3033 status_t err = mOMX->getParameter( 3034 mNode, OMX_IndexParamVideoPortFormat, 3035 &format, sizeof(format)); 3036 if (err != OK) { 3037 // no more formats, pick legacy format if found 3038 if (legacyFormat.eColorFormat != OMX_COLOR_FormatUnused) { 3039 memcpy(&format, &legacyFormat, sizeof(format)); 3040 break; 3041 } 3042 return err; 3043 } 3044 if (format.eCompressionFormat != OMX_VIDEO_CodingUnused) { 3045 return OMX_ErrorBadParameter; 3046 } 3047 if (!getLegacyFlexibleFormat) { 3048 break; 3049 } 3050 // standard formats that were exposed to users before 3051 if (format.eColorFormat == OMX_COLOR_FormatYUV420Planar 3052 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedPlanar 3053 || format.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar 3054 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedSemiPlanar 3055 || format.eColorFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar) { 3056 break; 3057 } 3058 // find best legacy non-standard format 3059 OMX_U32 flexibleEquivalent; 3060 if (legacyFormat.eColorFormat == OMX_COLOR_FormatUnused 3061 && isFlexibleColorFormat( 3062 mOMX, mNode, format.eColorFormat, false /* usingNativeBuffers */, 3063 &flexibleEquivalent) 3064 && flexibleEquivalent == OMX_COLOR_FormatYUV420Flexible) { 3065 memcpy(&legacyFormat, &format, sizeof(format)); 3066 } 3067 } 3068 return mOMX->setParameter( 3069 mNode, OMX_IndexParamVideoPortFormat, 3070 &format, sizeof(format)); 3071} 3072 3073static const struct VideoCodingMapEntry { 3074 const char *mMime; 3075 OMX_VIDEO_CODINGTYPE mVideoCodingType; 3076} kVideoCodingMapEntry[] = { 3077 { MEDIA_MIMETYPE_VIDEO_AVC, OMX_VIDEO_CodingAVC }, 3078 { MEDIA_MIMETYPE_VIDEO_HEVC, OMX_VIDEO_CodingHEVC }, 3079 { MEDIA_MIMETYPE_VIDEO_MPEG4, OMX_VIDEO_CodingMPEG4 }, 3080 { MEDIA_MIMETYPE_VIDEO_H263, OMX_VIDEO_CodingH263 }, 3081 { MEDIA_MIMETYPE_VIDEO_MPEG2, OMX_VIDEO_CodingMPEG2 }, 3082 { MEDIA_MIMETYPE_VIDEO_VP8, OMX_VIDEO_CodingVP8 }, 3083 { MEDIA_MIMETYPE_VIDEO_VP9, OMX_VIDEO_CodingVP9 }, 3084 { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, OMX_VIDEO_CodingDolbyVision }, 3085}; 3086 3087static status_t GetVideoCodingTypeFromMime( 3088 const char *mime, OMX_VIDEO_CODINGTYPE *codingType) { 3089 for (size_t i = 0; 3090 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 3091 ++i) { 3092 if (!strcasecmp(mime, kVideoCodingMapEntry[i].mMime)) { 3093 *codingType = kVideoCodingMapEntry[i].mVideoCodingType; 3094 return OK; 3095 } 3096 } 3097 3098 *codingType = OMX_VIDEO_CodingUnused; 3099 3100 return ERROR_UNSUPPORTED; 3101} 3102 3103static status_t GetMimeTypeForVideoCoding( 3104 OMX_VIDEO_CODINGTYPE codingType, AString *mime) { 3105 for (size_t i = 0; 3106 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 3107 ++i) { 3108 if (codingType == kVideoCodingMapEntry[i].mVideoCodingType) { 3109 *mime = kVideoCodingMapEntry[i].mMime; 3110 return OK; 3111 } 3112 } 3113 3114 mime->clear(); 3115 3116 return ERROR_UNSUPPORTED; 3117} 3118 3119status_t ACodec::setupVideoDecoder( 3120 const char *mime, const sp<AMessage> &msg, bool haveNativeWindow, 3121 bool usingSwRenderer, sp<AMessage> &outputFormat) { 3122 int32_t width, height; 3123 if (!msg->findInt32("width", &width) 3124 || !msg->findInt32("height", &height)) { 3125 return INVALID_OPERATION; 3126 } 3127 3128 OMX_VIDEO_CODINGTYPE compressionFormat; 3129 status_t err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 3130 3131 if (err != OK) { 3132 return err; 3133 } 3134 3135 err = setVideoPortFormatType( 3136 kPortIndexInput, compressionFormat, OMX_COLOR_FormatUnused); 3137 3138 if (err != OK) { 3139 return err; 3140 } 3141 3142 int32_t tmp; 3143 if (msg->findInt32("color-format", &tmp)) { 3144 OMX_COLOR_FORMATTYPE colorFormat = 3145 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 3146 err = setVideoPortFormatType( 3147 kPortIndexOutput, OMX_VIDEO_CodingUnused, colorFormat, haveNativeWindow); 3148 if (err != OK) { 3149 ALOGW("[%s] does not support color format %d", 3150 mComponentName.c_str(), colorFormat); 3151 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 3152 } 3153 } else { 3154 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 3155 } 3156 3157 if (err != OK) { 3158 return err; 3159 } 3160 3161 int32_t frameRateInt; 3162 float frameRateFloat; 3163 if (!msg->findFloat("frame-rate", &frameRateFloat)) { 3164 if (!msg->findInt32("frame-rate", &frameRateInt)) { 3165 frameRateInt = -1; 3166 } 3167 frameRateFloat = (float)frameRateInt; 3168 } 3169 3170 err = setVideoFormatOnPort( 3171 kPortIndexInput, width, height, compressionFormat, frameRateFloat); 3172 3173 if (err != OK) { 3174 return err; 3175 } 3176 3177 err = setVideoFormatOnPort( 3178 kPortIndexOutput, width, height, OMX_VIDEO_CodingUnused); 3179 3180 if (err != OK) { 3181 return err; 3182 } 3183 3184 err = setColorAspectsForVideoDecoder( 3185 width, height, haveNativeWindow | usingSwRenderer, msg, outputFormat); 3186 if (err == ERROR_UNSUPPORTED) { // support is optional 3187 err = OK; 3188 } 3189 return err; 3190} 3191 3192status_t ACodec::initDescribeColorAspectsIndex() { 3193 status_t err = mOMX->getExtensionIndex( 3194 mNode, "OMX.google.android.index.describeColorAspects", &mDescribeColorAspectsIndex); 3195 if (err != OK) { 3196 mDescribeColorAspectsIndex = (OMX_INDEXTYPE)0; 3197 } 3198 return err; 3199} 3200 3201status_t ACodec::setCodecColorAspects(DescribeColorAspectsParams ¶ms, bool verify) { 3202 status_t err = ERROR_UNSUPPORTED; 3203 if (mDescribeColorAspectsIndex) { 3204 err = mOMX->setConfig(mNode, mDescribeColorAspectsIndex, ¶ms, sizeof(params)); 3205 } 3206 ALOGV("[%s] setting color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)", 3207 mComponentName.c_str(), 3208 params.sAspects.mRange, asString(params.sAspects.mRange), 3209 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3210 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3211 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3212 err, asString(err)); 3213 3214 if (verify && err == OK) { 3215 err = getCodecColorAspects(params); 3216 } 3217 3218 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex, 3219 "[%s] getting color aspects failed even though codec advertises support", 3220 mComponentName.c_str()); 3221 return err; 3222} 3223 3224status_t ACodec::setColorAspectsForVideoDecoder( 3225 int32_t width, int32_t height, bool usingNativeWindow, 3226 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) { 3227 DescribeColorAspectsParams params; 3228 InitOMXParams(¶ms); 3229 params.nPortIndex = kPortIndexOutput; 3230 3231 getColorAspectsFromFormat(configFormat, params.sAspects); 3232 if (usingNativeWindow) { 3233 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3234 // The default aspects will be set back to the output format during the 3235 // getFormat phase of configure(). Set non-Unspecified values back into the 3236 // format, in case component does not support this enumeration. 3237 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3238 } 3239 3240 (void)initDescribeColorAspectsIndex(); 3241 3242 // communicate color aspects to codec 3243 return setCodecColorAspects(params); 3244} 3245 3246status_t ACodec::getCodecColorAspects(DescribeColorAspectsParams ¶ms) { 3247 status_t err = ERROR_UNSUPPORTED; 3248 if (mDescribeColorAspectsIndex) { 3249 err = mOMX->getConfig(mNode, mDescribeColorAspectsIndex, ¶ms, sizeof(params)); 3250 } 3251 ALOGV("[%s] got color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)", 3252 mComponentName.c_str(), 3253 params.sAspects.mRange, asString(params.sAspects.mRange), 3254 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3255 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3256 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3257 err, asString(err)); 3258 if (params.bRequestingDataSpace) { 3259 ALOGV("for dataspace %#x", params.nDataSpace); 3260 } 3261 if (err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex 3262 && !params.bRequestingDataSpace && !params.bDataSpaceChanged) { 3263 ALOGW("[%s] getting color aspects failed even though codec advertises support", 3264 mComponentName.c_str()); 3265 } 3266 return err; 3267} 3268 3269status_t ACodec::getInputColorAspectsForVideoEncoder(sp<AMessage> &format) { 3270 DescribeColorAspectsParams params; 3271 InitOMXParams(¶ms); 3272 params.nPortIndex = kPortIndexInput; 3273 status_t err = getCodecColorAspects(params); 3274 if (err == OK) { 3275 // we only set encoder input aspects if codec supports them 3276 setColorAspectsIntoFormat(params.sAspects, format, true /* force */); 3277 } 3278 return err; 3279} 3280 3281status_t ACodec::getDataSpace( 3282 DescribeColorAspectsParams ¶ms, android_dataspace *dataSpace /* nonnull */, 3283 bool tryCodec) { 3284 status_t err = OK; 3285 if (tryCodec) { 3286 // request dataspace guidance from codec. 3287 params.bRequestingDataSpace = OMX_TRUE; 3288 err = getCodecColorAspects(params); 3289 params.bRequestingDataSpace = OMX_FALSE; 3290 if (err == OK && params.nDataSpace != HAL_DATASPACE_UNKNOWN) { 3291 *dataSpace = (android_dataspace)params.nDataSpace; 3292 return err; 3293 } else if (err == ERROR_UNSUPPORTED) { 3294 // ignore not-implemented error for dataspace requests 3295 err = OK; 3296 } 3297 } 3298 3299 // this returns legacy versions if available 3300 *dataSpace = getDataSpaceForColorAspects(params.sAspects, true /* mayexpand */); 3301 ALOGV("[%s] using color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) " 3302 "and dataspace %#x", 3303 mComponentName.c_str(), 3304 params.sAspects.mRange, asString(params.sAspects.mRange), 3305 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3306 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3307 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3308 *dataSpace); 3309 return err; 3310} 3311 3312 3313status_t ACodec::getColorAspectsAndDataSpaceForVideoDecoder( 3314 int32_t width, int32_t height, const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, 3315 android_dataspace *dataSpace) { 3316 DescribeColorAspectsParams params; 3317 InitOMXParams(¶ms); 3318 params.nPortIndex = kPortIndexOutput; 3319 3320 // reset default format and get resulting format 3321 getColorAspectsFromFormat(configFormat, params.sAspects); 3322 if (dataSpace != NULL) { 3323 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3324 } 3325 status_t err = setCodecColorAspects(params, true /* readBack */); 3326 3327 // we always set specified aspects for decoders 3328 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3329 3330 if (dataSpace != NULL) { 3331 status_t res = getDataSpace(params, dataSpace, err == OK /* tryCodec */); 3332 if (err == OK) { 3333 err = res; 3334 } 3335 } 3336 3337 return err; 3338} 3339 3340// initial video encoder setup for bytebuffer mode 3341status_t ACodec::setColorAspectsForVideoEncoder( 3342 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) { 3343 // copy config to output format as this is not exposed via getFormat 3344 copyColorConfig(configFormat, outputFormat); 3345 3346 DescribeColorAspectsParams params; 3347 InitOMXParams(¶ms); 3348 params.nPortIndex = kPortIndexInput; 3349 getColorAspectsFromFormat(configFormat, params.sAspects); 3350 3351 (void)initDescribeColorAspectsIndex(); 3352 3353 int32_t usingRecorder; 3354 if (configFormat->findInt32("android._using-recorder", &usingRecorder) && usingRecorder) { 3355 android_dataspace dataSpace = HAL_DATASPACE_BT709; 3356 int32_t width, height; 3357 if (configFormat->findInt32("width", &width) 3358 && configFormat->findInt32("height", &height)) { 3359 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3360 status_t err = getDataSpace( 3361 params, &dataSpace, mDescribeColorAspectsIndex /* tryCodec */); 3362 if (err != OK) { 3363 return err; 3364 } 3365 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3366 } 3367 inputFormat->setInt32("android._dataspace", (int32_t)dataSpace); 3368 } 3369 3370 // communicate color aspects to codec, but do not allow change of the platform aspects 3371 ColorAspects origAspects = params.sAspects; 3372 for (int triesLeft = 2; --triesLeft >= 0; ) { 3373 status_t err = setCodecColorAspects(params, true /* readBack */); 3374 if (err != OK 3375 || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem( 3376 params.sAspects, origAspects, true /* usePlatformAspects */)) { 3377 return err; 3378 } 3379 ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.", 3380 mComponentName.c_str()); 3381 } 3382 return OK; 3383} 3384 3385// subsequent initial video encoder setup for surface mode 3386status_t ACodec::setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace( 3387 android_dataspace *dataSpace /* nonnull */) { 3388 DescribeColorAspectsParams params; 3389 InitOMXParams(¶ms); 3390 params.nPortIndex = kPortIndexInput; 3391 ColorAspects &aspects = params.sAspects; 3392 3393 // reset default format and store resulting format into both input and output formats 3394 getColorAspectsFromFormat(mConfigFormat, aspects); 3395 int32_t width, height; 3396 if (mInputFormat->findInt32("width", &width) && mInputFormat->findInt32("height", &height)) { 3397 setDefaultCodecColorAspectsIfNeeded(aspects, width, height); 3398 } 3399 setColorAspectsIntoFormat(aspects, mInputFormat); 3400 setColorAspectsIntoFormat(aspects, mOutputFormat); 3401 3402 // communicate color aspects to codec, but do not allow any change 3403 ColorAspects origAspects = aspects; 3404 status_t err = OK; 3405 for (int triesLeft = 2; mDescribeColorAspectsIndex && --triesLeft >= 0; ) { 3406 status_t err = setCodecColorAspects(params, true /* readBack */); 3407 if (err != OK || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem(aspects, origAspects)) { 3408 break; 3409 } 3410 ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.", 3411 mComponentName.c_str()); 3412 } 3413 3414 *dataSpace = HAL_DATASPACE_BT709; 3415 aspects = origAspects; // restore desired color aspects 3416 status_t res = getDataSpace( 3417 params, dataSpace, err == OK && mDescribeColorAspectsIndex /* tryCodec */); 3418 if (err == OK) { 3419 err = res; 3420 } 3421 mInputFormat->setInt32("android._dataspace", (int32_t)*dataSpace); 3422 mInputFormat->setBuffer( 3423 "android._color-aspects", ABuffer::CreateAsCopy(&aspects, sizeof(aspects))); 3424 3425 // update input format with codec supported color aspects (basically set unsupported 3426 // aspects to Unspecified) 3427 if (err == OK) { 3428 (void)getInputColorAspectsForVideoEncoder(mInputFormat); 3429 } 3430 3431 ALOGV("set default color aspects, updated input format to %s, output format to %s", 3432 mInputFormat->debugString(4).c_str(), mOutputFormat->debugString(4).c_str()); 3433 3434 return err; 3435} 3436 3437status_t ACodec::setupVideoEncoder( 3438 const char *mime, const sp<AMessage> &msg, 3439 sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) { 3440 int32_t tmp; 3441 if (!msg->findInt32("color-format", &tmp)) { 3442 return INVALID_OPERATION; 3443 } 3444 3445 OMX_COLOR_FORMATTYPE colorFormat = 3446 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 3447 3448 status_t err = setVideoPortFormatType( 3449 kPortIndexInput, OMX_VIDEO_CodingUnused, colorFormat); 3450 3451 if (err != OK) { 3452 ALOGE("[%s] does not support color format %d", 3453 mComponentName.c_str(), colorFormat); 3454 3455 return err; 3456 } 3457 3458 /* Input port configuration */ 3459 3460 OMX_PARAM_PORTDEFINITIONTYPE def; 3461 InitOMXParams(&def); 3462 3463 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 3464 3465 def.nPortIndex = kPortIndexInput; 3466 3467 err = mOMX->getParameter( 3468 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3469 3470 if (err != OK) { 3471 return err; 3472 } 3473 3474 int32_t width, height, bitrate; 3475 if (!msg->findInt32("width", &width) 3476 || !msg->findInt32("height", &height) 3477 || !msg->findInt32("bitrate", &bitrate)) { 3478 return INVALID_OPERATION; 3479 } 3480 3481 video_def->nFrameWidth = width; 3482 video_def->nFrameHeight = height; 3483 3484 int32_t stride; 3485 if (!msg->findInt32("stride", &stride)) { 3486 stride = width; 3487 } 3488 3489 video_def->nStride = stride; 3490 3491 int32_t sliceHeight; 3492 if (!msg->findInt32("slice-height", &sliceHeight)) { 3493 sliceHeight = height; 3494 } 3495 3496 video_def->nSliceHeight = sliceHeight; 3497 3498 def.nBufferSize = (video_def->nStride * video_def->nSliceHeight * 3) / 2; 3499 3500 float frameRate; 3501 if (!msg->findFloat("frame-rate", &frameRate)) { 3502 int32_t tmp; 3503 if (!msg->findInt32("frame-rate", &tmp)) { 3504 return INVALID_OPERATION; 3505 } 3506 frameRate = (float)tmp; 3507 mTimePerFrameUs = (int64_t) (1000000.0f / frameRate); 3508 } 3509 3510 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 3511 video_def->eCompressionFormat = OMX_VIDEO_CodingUnused; 3512 // this is redundant as it was already set up in setVideoPortFormatType 3513 // FIXME for now skip this only for flexible YUV formats 3514 if (colorFormat != OMX_COLOR_FormatYUV420Flexible) { 3515 video_def->eColorFormat = colorFormat; 3516 } 3517 3518 err = mOMX->setParameter( 3519 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3520 3521 if (err != OK) { 3522 ALOGE("[%s] failed to set input port definition parameters.", 3523 mComponentName.c_str()); 3524 3525 return err; 3526 } 3527 3528 /* Output port configuration */ 3529 3530 OMX_VIDEO_CODINGTYPE compressionFormat; 3531 err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 3532 3533 if (err != OK) { 3534 return err; 3535 } 3536 3537 err = setVideoPortFormatType( 3538 kPortIndexOutput, compressionFormat, OMX_COLOR_FormatUnused); 3539 3540 if (err != OK) { 3541 ALOGE("[%s] does not support compression format %d", 3542 mComponentName.c_str(), compressionFormat); 3543 3544 return err; 3545 } 3546 3547 def.nPortIndex = kPortIndexOutput; 3548 3549 err = mOMX->getParameter( 3550 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3551 3552 if (err != OK) { 3553 return err; 3554 } 3555 3556 video_def->nFrameWidth = width; 3557 video_def->nFrameHeight = height; 3558 video_def->xFramerate = 0; 3559 video_def->nBitrate = bitrate; 3560 video_def->eCompressionFormat = compressionFormat; 3561 video_def->eColorFormat = OMX_COLOR_FormatUnused; 3562 3563 err = mOMX->setParameter( 3564 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3565 3566 if (err != OK) { 3567 ALOGE("[%s] failed to set output port definition parameters.", 3568 mComponentName.c_str()); 3569 3570 return err; 3571 } 3572 3573 int32_t intraRefreshPeriod = 0; 3574 if (msg->findInt32("intra-refresh-period", &intraRefreshPeriod) 3575 && intraRefreshPeriod >= 0) { 3576 err = setIntraRefreshPeriod((uint32_t)intraRefreshPeriod, true); 3577 if (err != OK) { 3578 ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional", 3579 mComponentName.c_str()); 3580 err = OK; 3581 } 3582 } 3583 3584 switch (compressionFormat) { 3585 case OMX_VIDEO_CodingMPEG4: 3586 err = setupMPEG4EncoderParameters(msg); 3587 break; 3588 3589 case OMX_VIDEO_CodingH263: 3590 err = setupH263EncoderParameters(msg); 3591 break; 3592 3593 case OMX_VIDEO_CodingAVC: 3594 err = setupAVCEncoderParameters(msg); 3595 break; 3596 3597 case OMX_VIDEO_CodingHEVC: 3598 err = setupHEVCEncoderParameters(msg); 3599 break; 3600 3601 case OMX_VIDEO_CodingVP8: 3602 case OMX_VIDEO_CodingVP9: 3603 err = setupVPXEncoderParameters(msg); 3604 break; 3605 3606 default: 3607 break; 3608 } 3609 3610 // Set up color aspects on input, but propagate them to the output format, as they will 3611 // not be read back from encoder. 3612 err = setColorAspectsForVideoEncoder(msg, outputFormat, inputFormat); 3613 if (err == ERROR_UNSUPPORTED) { 3614 ALOGI("[%s] cannot encode color aspects. Ignoring.", mComponentName.c_str()); 3615 err = OK; 3616 } 3617 3618 if (err == OK) { 3619 ALOGI("setupVideoEncoder succeeded"); 3620 } 3621 3622 return err; 3623} 3624 3625status_t ACodec::setCyclicIntraMacroblockRefresh(const sp<AMessage> &msg, int32_t mode) { 3626 OMX_VIDEO_PARAM_INTRAREFRESHTYPE params; 3627 InitOMXParams(¶ms); 3628 params.nPortIndex = kPortIndexOutput; 3629 3630 params.eRefreshMode = static_cast<OMX_VIDEO_INTRAREFRESHTYPE>(mode); 3631 3632 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshCyclic || 3633 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 3634 int32_t mbs; 3635 if (!msg->findInt32("intra-refresh-CIR-mbs", &mbs)) { 3636 return INVALID_OPERATION; 3637 } 3638 params.nCirMBs = mbs; 3639 } 3640 3641 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshAdaptive || 3642 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 3643 int32_t mbs; 3644 if (!msg->findInt32("intra-refresh-AIR-mbs", &mbs)) { 3645 return INVALID_OPERATION; 3646 } 3647 params.nAirMBs = mbs; 3648 3649 int32_t ref; 3650 if (!msg->findInt32("intra-refresh-AIR-ref", &ref)) { 3651 return INVALID_OPERATION; 3652 } 3653 params.nAirRef = ref; 3654 } 3655 3656 status_t err = mOMX->setParameter( 3657 mNode, OMX_IndexParamVideoIntraRefresh, 3658 ¶ms, sizeof(params)); 3659 return err; 3660} 3661 3662static OMX_U32 setPFramesSpacing(int32_t iFramesInterval, int32_t frameRate) { 3663 if (iFramesInterval < 0) { 3664 return 0xFFFFFFFF; 3665 } else if (iFramesInterval == 0) { 3666 return 0; 3667 } 3668 OMX_U32 ret = frameRate * iFramesInterval; 3669 return ret; 3670} 3671 3672static OMX_VIDEO_CONTROLRATETYPE getBitrateMode(const sp<AMessage> &msg) { 3673 int32_t tmp; 3674 if (!msg->findInt32("bitrate-mode", &tmp)) { 3675 return OMX_Video_ControlRateVariable; 3676 } 3677 3678 return static_cast<OMX_VIDEO_CONTROLRATETYPE>(tmp); 3679} 3680 3681status_t ACodec::setupMPEG4EncoderParameters(const sp<AMessage> &msg) { 3682 int32_t bitrate, iFrameInterval; 3683 if (!msg->findInt32("bitrate", &bitrate) 3684 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 3685 return INVALID_OPERATION; 3686 } 3687 3688 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3689 3690 float frameRate; 3691 if (!msg->findFloat("frame-rate", &frameRate)) { 3692 int32_t tmp; 3693 if (!msg->findInt32("frame-rate", &tmp)) { 3694 return INVALID_OPERATION; 3695 } 3696 frameRate = (float)tmp; 3697 } 3698 3699 OMX_VIDEO_PARAM_MPEG4TYPE mpeg4type; 3700 InitOMXParams(&mpeg4type); 3701 mpeg4type.nPortIndex = kPortIndexOutput; 3702 3703 status_t err = mOMX->getParameter( 3704 mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 3705 3706 if (err != OK) { 3707 return err; 3708 } 3709 3710 mpeg4type.nSliceHeaderSpacing = 0; 3711 mpeg4type.bSVH = OMX_FALSE; 3712 mpeg4type.bGov = OMX_FALSE; 3713 3714 mpeg4type.nAllowedPictureTypes = 3715 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 3716 3717 mpeg4type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 3718 if (mpeg4type.nPFrames == 0) { 3719 mpeg4type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 3720 } 3721 mpeg4type.nBFrames = 0; 3722 mpeg4type.nIDCVLCThreshold = 0; 3723 mpeg4type.bACPred = OMX_TRUE; 3724 mpeg4type.nMaxPacketSize = 256; 3725 mpeg4type.nTimeIncRes = 1000; 3726 mpeg4type.nHeaderExtension = 0; 3727 mpeg4type.bReversibleVLC = OMX_FALSE; 3728 3729 int32_t profile; 3730 if (msg->findInt32("profile", &profile)) { 3731 int32_t level; 3732 if (!msg->findInt32("level", &level)) { 3733 return INVALID_OPERATION; 3734 } 3735 3736 err = verifySupportForProfileAndLevel(profile, level); 3737 3738 if (err != OK) { 3739 return err; 3740 } 3741 3742 mpeg4type.eProfile = static_cast<OMX_VIDEO_MPEG4PROFILETYPE>(profile); 3743 mpeg4type.eLevel = static_cast<OMX_VIDEO_MPEG4LEVELTYPE>(level); 3744 } 3745 3746 err = mOMX->setParameter( 3747 mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 3748 3749 if (err != OK) { 3750 return err; 3751 } 3752 3753 err = configureBitrate(bitrate, bitrateMode); 3754 3755 if (err != OK) { 3756 return err; 3757 } 3758 3759 return setupErrorCorrectionParameters(); 3760} 3761 3762status_t ACodec::setupH263EncoderParameters(const sp<AMessage> &msg) { 3763 int32_t bitrate, iFrameInterval; 3764 if (!msg->findInt32("bitrate", &bitrate) 3765 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 3766 return INVALID_OPERATION; 3767 } 3768 3769 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3770 3771 float frameRate; 3772 if (!msg->findFloat("frame-rate", &frameRate)) { 3773 int32_t tmp; 3774 if (!msg->findInt32("frame-rate", &tmp)) { 3775 return INVALID_OPERATION; 3776 } 3777 frameRate = (float)tmp; 3778 } 3779 3780 OMX_VIDEO_PARAM_H263TYPE h263type; 3781 InitOMXParams(&h263type); 3782 h263type.nPortIndex = kPortIndexOutput; 3783 3784 status_t err = mOMX->getParameter( 3785 mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 3786 3787 if (err != OK) { 3788 return err; 3789 } 3790 3791 h263type.nAllowedPictureTypes = 3792 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 3793 3794 h263type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 3795 if (h263type.nPFrames == 0) { 3796 h263type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 3797 } 3798 h263type.nBFrames = 0; 3799 3800 int32_t profile; 3801 if (msg->findInt32("profile", &profile)) { 3802 int32_t level; 3803 if (!msg->findInt32("level", &level)) { 3804 return INVALID_OPERATION; 3805 } 3806 3807 err = verifySupportForProfileAndLevel(profile, level); 3808 3809 if (err != OK) { 3810 return err; 3811 } 3812 3813 h263type.eProfile = static_cast<OMX_VIDEO_H263PROFILETYPE>(profile); 3814 h263type.eLevel = static_cast<OMX_VIDEO_H263LEVELTYPE>(level); 3815 } 3816 3817 h263type.bPLUSPTYPEAllowed = OMX_FALSE; 3818 h263type.bForceRoundingTypeToZero = OMX_FALSE; 3819 h263type.nPictureHeaderRepetition = 0; 3820 h263type.nGOBHeaderInterval = 0; 3821 3822 err = mOMX->setParameter( 3823 mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 3824 3825 if (err != OK) { 3826 return err; 3827 } 3828 3829 err = configureBitrate(bitrate, bitrateMode); 3830 3831 if (err != OK) { 3832 return err; 3833 } 3834 3835 return setupErrorCorrectionParameters(); 3836} 3837 3838// static 3839int /* OMX_VIDEO_AVCLEVELTYPE */ ACodec::getAVCLevelFor( 3840 int width, int height, int rate, int bitrate, 3841 OMX_VIDEO_AVCPROFILETYPE profile) { 3842 // convert bitrate to main/baseline profile kbps equivalent 3843 switch (profile) { 3844 case OMX_VIDEO_AVCProfileHigh10: 3845 bitrate = divUp(bitrate, 3000); break; 3846 case OMX_VIDEO_AVCProfileHigh: 3847 bitrate = divUp(bitrate, 1250); break; 3848 default: 3849 bitrate = divUp(bitrate, 1000); break; 3850 } 3851 3852 // convert size and rate to MBs 3853 width = divUp(width, 16); 3854 height = divUp(height, 16); 3855 int mbs = width * height; 3856 rate *= mbs; 3857 int maxDimension = max(width, height); 3858 3859 static const int limits[][5] = { 3860 /* MBps MB dim bitrate level */ 3861 { 1485, 99, 28, 64, OMX_VIDEO_AVCLevel1 }, 3862 { 1485, 99, 28, 128, OMX_VIDEO_AVCLevel1b }, 3863 { 3000, 396, 56, 192, OMX_VIDEO_AVCLevel11 }, 3864 { 6000, 396, 56, 384, OMX_VIDEO_AVCLevel12 }, 3865 { 11880, 396, 56, 768, OMX_VIDEO_AVCLevel13 }, 3866 { 11880, 396, 56, 2000, OMX_VIDEO_AVCLevel2 }, 3867 { 19800, 792, 79, 4000, OMX_VIDEO_AVCLevel21 }, 3868 { 20250, 1620, 113, 4000, OMX_VIDEO_AVCLevel22 }, 3869 { 40500, 1620, 113, 10000, OMX_VIDEO_AVCLevel3 }, 3870 { 108000, 3600, 169, 14000, OMX_VIDEO_AVCLevel31 }, 3871 { 216000, 5120, 202, 20000, OMX_VIDEO_AVCLevel32 }, 3872 { 245760, 8192, 256, 20000, OMX_VIDEO_AVCLevel4 }, 3873 { 245760, 8192, 256, 50000, OMX_VIDEO_AVCLevel41 }, 3874 { 522240, 8704, 263, 50000, OMX_VIDEO_AVCLevel42 }, 3875 { 589824, 22080, 420, 135000, OMX_VIDEO_AVCLevel5 }, 3876 { 983040, 36864, 543, 240000, OMX_VIDEO_AVCLevel51 }, 3877 { 2073600, 36864, 543, 240000, OMX_VIDEO_AVCLevel52 }, 3878 }; 3879 3880 for (size_t i = 0; i < ARRAY_SIZE(limits); i++) { 3881 const int (&limit)[5] = limits[i]; 3882 if (rate <= limit[0] && mbs <= limit[1] && maxDimension <= limit[2] 3883 && bitrate <= limit[3]) { 3884 return limit[4]; 3885 } 3886 } 3887 return 0; 3888} 3889 3890status_t ACodec::setupAVCEncoderParameters(const sp<AMessage> &msg) { 3891 int32_t bitrate, iFrameInterval; 3892 if (!msg->findInt32("bitrate", &bitrate) 3893 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 3894 return INVALID_OPERATION; 3895 } 3896 3897 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3898 3899 float frameRate; 3900 if (!msg->findFloat("frame-rate", &frameRate)) { 3901 int32_t tmp; 3902 if (!msg->findInt32("frame-rate", &tmp)) { 3903 return INVALID_OPERATION; 3904 } 3905 frameRate = (float)tmp; 3906 } 3907 3908 status_t err = OK; 3909 int32_t intraRefreshMode = 0; 3910 if (msg->findInt32("intra-refresh-mode", &intraRefreshMode)) { 3911 err = setCyclicIntraMacroblockRefresh(msg, intraRefreshMode); 3912 if (err != OK) { 3913 ALOGE("Setting intra macroblock refresh mode (%d) failed: 0x%x", 3914 err, intraRefreshMode); 3915 return err; 3916 } 3917 } 3918 3919 OMX_VIDEO_PARAM_AVCTYPE h264type; 3920 InitOMXParams(&h264type); 3921 h264type.nPortIndex = kPortIndexOutput; 3922 3923 err = mOMX->getParameter( 3924 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 3925 3926 if (err != OK) { 3927 return err; 3928 } 3929 3930 h264type.nAllowedPictureTypes = 3931 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 3932 3933 int32_t profile; 3934 if (msg->findInt32("profile", &profile)) { 3935 int32_t level; 3936 if (!msg->findInt32("level", &level)) { 3937 return INVALID_OPERATION; 3938 } 3939 3940 err = verifySupportForProfileAndLevel(profile, level); 3941 3942 if (err != OK) { 3943 return err; 3944 } 3945 3946 h264type.eProfile = static_cast<OMX_VIDEO_AVCPROFILETYPE>(profile); 3947 h264type.eLevel = static_cast<OMX_VIDEO_AVCLEVELTYPE>(level); 3948 } 3949 3950 if (h264type.eProfile == OMX_VIDEO_AVCProfileBaseline) { 3951 h264type.nSliceHeaderSpacing = 0; 3952 h264type.bUseHadamard = OMX_TRUE; 3953 h264type.nRefFrames = 1; 3954 h264type.nBFrames = 0; 3955 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 3956 if (h264type.nPFrames == 0) { 3957 h264type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 3958 } 3959 h264type.nRefIdx10ActiveMinus1 = 0; 3960 h264type.nRefIdx11ActiveMinus1 = 0; 3961 h264type.bEntropyCodingCABAC = OMX_FALSE; 3962 h264type.bWeightedPPrediction = OMX_FALSE; 3963 h264type.bconstIpred = OMX_FALSE; 3964 h264type.bDirect8x8Inference = OMX_FALSE; 3965 h264type.bDirectSpatialTemporal = OMX_FALSE; 3966 h264type.nCabacInitIdc = 0; 3967 } else if (h264type.eProfile == OMX_VIDEO_AVCProfileMain || 3968 h264type.eProfile == OMX_VIDEO_AVCProfileHigh) { 3969 h264type.nSliceHeaderSpacing = 0; 3970 h264type.bUseHadamard = OMX_TRUE; 3971 h264type.nRefFrames = 2; 3972 h264type.nBFrames = 1; 3973 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 3974 h264type.nAllowedPictureTypes = 3975 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP | OMX_VIDEO_PictureTypeB; 3976 h264type.nRefIdx10ActiveMinus1 = 0; 3977 h264type.nRefIdx11ActiveMinus1 = 0; 3978 h264type.bEntropyCodingCABAC = OMX_TRUE; 3979 h264type.bWeightedPPrediction = OMX_TRUE; 3980 h264type.bconstIpred = OMX_TRUE; 3981 h264type.bDirect8x8Inference = OMX_TRUE; 3982 h264type.bDirectSpatialTemporal = OMX_TRUE; 3983 h264type.nCabacInitIdc = 1; 3984 } 3985 3986 if (h264type.nBFrames != 0) { 3987 h264type.nAllowedPictureTypes |= OMX_VIDEO_PictureTypeB; 3988 } 3989 3990 h264type.bEnableUEP = OMX_FALSE; 3991 h264type.bEnableFMO = OMX_FALSE; 3992 h264type.bEnableASO = OMX_FALSE; 3993 h264type.bEnableRS = OMX_FALSE; 3994 h264type.bFrameMBsOnly = OMX_TRUE; 3995 h264type.bMBAFF = OMX_FALSE; 3996 h264type.eLoopFilterMode = OMX_VIDEO_AVCLoopFilterEnable; 3997 3998 err = mOMX->setParameter( 3999 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 4000 4001 if (err != OK) { 4002 return err; 4003 } 4004 4005 return configureBitrate(bitrate, bitrateMode); 4006} 4007 4008status_t ACodec::setupHEVCEncoderParameters(const sp<AMessage> &msg) { 4009 int32_t bitrate, iFrameInterval; 4010 if (!msg->findInt32("bitrate", &bitrate) 4011 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 4012 return INVALID_OPERATION; 4013 } 4014 4015 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4016 4017 float frameRate; 4018 if (!msg->findFloat("frame-rate", &frameRate)) { 4019 int32_t tmp; 4020 if (!msg->findInt32("frame-rate", &tmp)) { 4021 return INVALID_OPERATION; 4022 } 4023 frameRate = (float)tmp; 4024 } 4025 4026 OMX_VIDEO_PARAM_HEVCTYPE hevcType; 4027 InitOMXParams(&hevcType); 4028 hevcType.nPortIndex = kPortIndexOutput; 4029 4030 status_t err = OK; 4031 err = mOMX->getParameter( 4032 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 4033 if (err != OK) { 4034 return err; 4035 } 4036 4037 int32_t profile; 4038 if (msg->findInt32("profile", &profile)) { 4039 int32_t level; 4040 if (!msg->findInt32("level", &level)) { 4041 return INVALID_OPERATION; 4042 } 4043 4044 err = verifySupportForProfileAndLevel(profile, level); 4045 if (err != OK) { 4046 return err; 4047 } 4048 4049 hevcType.eProfile = static_cast<OMX_VIDEO_HEVCPROFILETYPE>(profile); 4050 hevcType.eLevel = static_cast<OMX_VIDEO_HEVCLEVELTYPE>(level); 4051 } 4052 // TODO: finer control? 4053 hevcType.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate); 4054 4055 err = mOMX->setParameter( 4056 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 4057 if (err != OK) { 4058 return err; 4059 } 4060 4061 return configureBitrate(bitrate, bitrateMode); 4062} 4063 4064status_t ACodec::setupVPXEncoderParameters(const sp<AMessage> &msg) { 4065 int32_t bitrate; 4066 int32_t iFrameInterval = 0; 4067 size_t tsLayers = 0; 4068 OMX_VIDEO_ANDROID_VPXTEMPORALLAYERPATTERNTYPE pattern = 4069 OMX_VIDEO_VPXTemporalLayerPatternNone; 4070 static const uint32_t kVp8LayerRateAlloction 4071 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] 4072 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] = { 4073 {100, 100, 100}, // 1 layer 4074 { 60, 100, 100}, // 2 layers {60%, 40%} 4075 { 40, 60, 100}, // 3 layers {40%, 20%, 40%} 4076 }; 4077 if (!msg->findInt32("bitrate", &bitrate)) { 4078 return INVALID_OPERATION; 4079 } 4080 msg->findInt32("i-frame-interval", &iFrameInterval); 4081 4082 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4083 4084 float frameRate; 4085 if (!msg->findFloat("frame-rate", &frameRate)) { 4086 int32_t tmp; 4087 if (!msg->findInt32("frame-rate", &tmp)) { 4088 return INVALID_OPERATION; 4089 } 4090 frameRate = (float)tmp; 4091 } 4092 4093 AString tsSchema; 4094 if (msg->findString("ts-schema", &tsSchema)) { 4095 if (tsSchema == "webrtc.vp8.1-layer") { 4096 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4097 tsLayers = 1; 4098 } else if (tsSchema == "webrtc.vp8.2-layer") { 4099 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4100 tsLayers = 2; 4101 } else if (tsSchema == "webrtc.vp8.3-layer") { 4102 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4103 tsLayers = 3; 4104 } else { 4105 ALOGW("Unsupported ts-schema [%s]", tsSchema.c_str()); 4106 } 4107 } 4108 4109 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 4110 InitOMXParams(&vp8type); 4111 vp8type.nPortIndex = kPortIndexOutput; 4112 status_t err = mOMX->getParameter( 4113 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4114 &vp8type, sizeof(vp8type)); 4115 4116 if (err == OK) { 4117 if (iFrameInterval > 0) { 4118 vp8type.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate); 4119 } 4120 vp8type.eTemporalPattern = pattern; 4121 vp8type.nTemporalLayerCount = tsLayers; 4122 if (tsLayers > 0) { 4123 for (size_t i = 0; i < OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS; i++) { 4124 vp8type.nTemporalLayerBitrateRatio[i] = 4125 kVp8LayerRateAlloction[tsLayers - 1][i]; 4126 } 4127 } 4128 if (bitrateMode == OMX_Video_ControlRateConstant) { 4129 vp8type.nMinQuantizer = 2; 4130 vp8type.nMaxQuantizer = 63; 4131 } 4132 4133 err = mOMX->setParameter( 4134 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4135 &vp8type, sizeof(vp8type)); 4136 if (err != OK) { 4137 ALOGW("Extended VP8 parameters set failed: %d", err); 4138 } 4139 } 4140 4141 return configureBitrate(bitrate, bitrateMode); 4142} 4143 4144status_t ACodec::verifySupportForProfileAndLevel( 4145 int32_t profile, int32_t level) { 4146 OMX_VIDEO_PARAM_PROFILELEVELTYPE params; 4147 InitOMXParams(¶ms); 4148 params.nPortIndex = kPortIndexOutput; 4149 4150 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 4151 params.nProfileIndex = index; 4152 status_t err = mOMX->getParameter( 4153 mNode, 4154 OMX_IndexParamVideoProfileLevelQuerySupported, 4155 ¶ms, 4156 sizeof(params)); 4157 4158 if (err != OK) { 4159 return err; 4160 } 4161 4162 int32_t supportedProfile = static_cast<int32_t>(params.eProfile); 4163 int32_t supportedLevel = static_cast<int32_t>(params.eLevel); 4164 4165 if (profile == supportedProfile && level <= supportedLevel) { 4166 return OK; 4167 } 4168 4169 if (index == kMaxIndicesToCheck) { 4170 ALOGW("[%s] stopping checking profiles after %u: %x/%x", 4171 mComponentName.c_str(), index, 4172 params.eProfile, params.eLevel); 4173 } 4174 } 4175 return ERROR_UNSUPPORTED; 4176} 4177 4178status_t ACodec::configureBitrate( 4179 int32_t bitrate, OMX_VIDEO_CONTROLRATETYPE bitrateMode) { 4180 OMX_VIDEO_PARAM_BITRATETYPE bitrateType; 4181 InitOMXParams(&bitrateType); 4182 bitrateType.nPortIndex = kPortIndexOutput; 4183 4184 status_t err = mOMX->getParameter( 4185 mNode, OMX_IndexParamVideoBitrate, 4186 &bitrateType, sizeof(bitrateType)); 4187 4188 if (err != OK) { 4189 return err; 4190 } 4191 4192 bitrateType.eControlRate = bitrateMode; 4193 bitrateType.nTargetBitrate = bitrate; 4194 4195 return mOMX->setParameter( 4196 mNode, OMX_IndexParamVideoBitrate, 4197 &bitrateType, sizeof(bitrateType)); 4198} 4199 4200status_t ACodec::setupErrorCorrectionParameters() { 4201 OMX_VIDEO_PARAM_ERRORCORRECTIONTYPE errorCorrectionType; 4202 InitOMXParams(&errorCorrectionType); 4203 errorCorrectionType.nPortIndex = kPortIndexOutput; 4204 4205 status_t err = mOMX->getParameter( 4206 mNode, OMX_IndexParamVideoErrorCorrection, 4207 &errorCorrectionType, sizeof(errorCorrectionType)); 4208 4209 if (err != OK) { 4210 return OK; // Optional feature. Ignore this failure 4211 } 4212 4213 errorCorrectionType.bEnableHEC = OMX_FALSE; 4214 errorCorrectionType.bEnableResync = OMX_TRUE; 4215 errorCorrectionType.nResynchMarkerSpacing = 256; 4216 errorCorrectionType.bEnableDataPartitioning = OMX_FALSE; 4217 errorCorrectionType.bEnableRVLC = OMX_FALSE; 4218 4219 return mOMX->setParameter( 4220 mNode, OMX_IndexParamVideoErrorCorrection, 4221 &errorCorrectionType, sizeof(errorCorrectionType)); 4222} 4223 4224status_t ACodec::setVideoFormatOnPort( 4225 OMX_U32 portIndex, 4226 int32_t width, int32_t height, OMX_VIDEO_CODINGTYPE compressionFormat, 4227 float frameRate) { 4228 OMX_PARAM_PORTDEFINITIONTYPE def; 4229 InitOMXParams(&def); 4230 def.nPortIndex = portIndex; 4231 4232 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 4233 4234 status_t err = mOMX->getParameter( 4235 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4236 if (err != OK) { 4237 return err; 4238 } 4239 4240 if (portIndex == kPortIndexInput) { 4241 // XXX Need a (much) better heuristic to compute input buffer sizes. 4242 const size_t X = 64 * 1024; 4243 if (def.nBufferSize < X) { 4244 def.nBufferSize = X; 4245 } 4246 } 4247 4248 if (def.eDomain != OMX_PortDomainVideo) { 4249 ALOGE("expected video port, got %s(%d)", asString(def.eDomain), def.eDomain); 4250 return FAILED_TRANSACTION; 4251 } 4252 4253 video_def->nFrameWidth = width; 4254 video_def->nFrameHeight = height; 4255 4256 if (portIndex == kPortIndexInput) { 4257 video_def->eCompressionFormat = compressionFormat; 4258 video_def->eColorFormat = OMX_COLOR_FormatUnused; 4259 if (frameRate >= 0) { 4260 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 4261 } 4262 } 4263 4264 err = mOMX->setParameter( 4265 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4266 4267 return err; 4268} 4269 4270status_t ACodec::initNativeWindow() { 4271 if (mNativeWindow != NULL) { 4272 return mOMX->enableNativeBuffers(mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_TRUE); 4273 } 4274 4275 mOMX->enableNativeBuffers(mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE); 4276 return OK; 4277} 4278 4279size_t ACodec::countBuffersOwnedByComponent(OMX_U32 portIndex) const { 4280 size_t n = 0; 4281 4282 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 4283 const BufferInfo &info = mBuffers[portIndex].itemAt(i); 4284 4285 if (info.mStatus == BufferInfo::OWNED_BY_COMPONENT) { 4286 ++n; 4287 } 4288 } 4289 4290 return n; 4291} 4292 4293size_t ACodec::countBuffersOwnedByNativeWindow() const { 4294 size_t n = 0; 4295 4296 for (size_t i = 0; i < mBuffers[kPortIndexOutput].size(); ++i) { 4297 const BufferInfo &info = mBuffers[kPortIndexOutput].itemAt(i); 4298 4299 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4300 ++n; 4301 } 4302 } 4303 4304 return n; 4305} 4306 4307void ACodec::waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs() { 4308 if (mNativeWindow == NULL) { 4309 return; 4310 } 4311 4312 while (countBuffersOwnedByNativeWindow() > mNumUndequeuedBuffers 4313 && dequeueBufferFromNativeWindow() != NULL) { 4314 // these buffers will be submitted as regular buffers; account for this 4315 if (storingMetadataInDecodedBuffers() && mMetadataBuffersToSubmit > 0) { 4316 --mMetadataBuffersToSubmit; 4317 } 4318 } 4319} 4320 4321bool ACodec::allYourBuffersAreBelongToUs( 4322 OMX_U32 portIndex) { 4323 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 4324 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 4325 4326 if (info->mStatus != BufferInfo::OWNED_BY_US 4327 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4328 ALOGV("[%s] Buffer %u on port %u still has status %d", 4329 mComponentName.c_str(), 4330 info->mBufferID, portIndex, info->mStatus); 4331 return false; 4332 } 4333 } 4334 4335 return true; 4336} 4337 4338bool ACodec::allYourBuffersAreBelongToUs() { 4339 return allYourBuffersAreBelongToUs(kPortIndexInput) 4340 && allYourBuffersAreBelongToUs(kPortIndexOutput); 4341} 4342 4343void ACodec::deferMessage(const sp<AMessage> &msg) { 4344 mDeferredQueue.push_back(msg); 4345} 4346 4347void ACodec::processDeferredMessages() { 4348 List<sp<AMessage> > queue = mDeferredQueue; 4349 mDeferredQueue.clear(); 4350 4351 List<sp<AMessage> >::iterator it = queue.begin(); 4352 while (it != queue.end()) { 4353 onMessageReceived(*it++); 4354 } 4355} 4356 4357// static 4358bool ACodec::describeDefaultColorFormat(DescribeColorFormat2Params ¶ms) { 4359 MediaImage2 &image = params.sMediaImage; 4360 memset(&image, 0, sizeof(image)); 4361 4362 image.mType = MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN; 4363 image.mNumPlanes = 0; 4364 4365 const OMX_COLOR_FORMATTYPE fmt = params.eColorFormat; 4366 image.mWidth = params.nFrameWidth; 4367 image.mHeight = params.nFrameHeight; 4368 4369 // only supporting YUV420 4370 if (fmt != OMX_COLOR_FormatYUV420Planar && 4371 fmt != OMX_COLOR_FormatYUV420PackedPlanar && 4372 fmt != OMX_COLOR_FormatYUV420SemiPlanar && 4373 fmt != OMX_COLOR_FormatYUV420PackedSemiPlanar && 4374 fmt != (OMX_COLOR_FORMATTYPE)HAL_PIXEL_FORMAT_YV12) { 4375 ALOGW("do not know color format 0x%x = %d", fmt, fmt); 4376 return false; 4377 } 4378 4379 // TEMPORARY FIX for some vendors that advertise sliceHeight as 0 4380 if (params.nStride != 0 && params.nSliceHeight == 0) { 4381 ALOGW("using sliceHeight=%u instead of what codec advertised (=0)", 4382 params.nFrameHeight); 4383 params.nSliceHeight = params.nFrameHeight; 4384 } 4385 4386 // we need stride and slice-height to be non-zero and sensible. These values were chosen to 4387 // prevent integer overflows further down the line, and do not indicate support for 4388 // 32kx32k video. 4389 if (params.nStride == 0 || params.nSliceHeight == 0 4390 || params.nStride > 32768 || params.nSliceHeight > 32768) { 4391 ALOGW("cannot describe color format 0x%x = %d with stride=%u and sliceHeight=%u", 4392 fmt, fmt, params.nStride, params.nSliceHeight); 4393 return false; 4394 } 4395 4396 // set-up YUV format 4397 image.mType = MediaImage2::MEDIA_IMAGE_TYPE_YUV; 4398 image.mNumPlanes = 3; 4399 image.mBitDepth = 8; 4400 image.mBitDepthAllocated = 8; 4401 image.mPlane[image.Y].mOffset = 0; 4402 image.mPlane[image.Y].mColInc = 1; 4403 image.mPlane[image.Y].mRowInc = params.nStride; 4404 image.mPlane[image.Y].mHorizSubsampling = 1; 4405 image.mPlane[image.Y].mVertSubsampling = 1; 4406 4407 switch ((int)fmt) { 4408 case HAL_PIXEL_FORMAT_YV12: 4409 if (params.bUsingNativeBuffers) { 4410 size_t ystride = align(params.nStride, 16); 4411 size_t cstride = align(params.nStride / 2, 16); 4412 image.mPlane[image.Y].mRowInc = ystride; 4413 4414 image.mPlane[image.V].mOffset = ystride * params.nSliceHeight; 4415 image.mPlane[image.V].mColInc = 1; 4416 image.mPlane[image.V].mRowInc = cstride; 4417 image.mPlane[image.V].mHorizSubsampling = 2; 4418 image.mPlane[image.V].mVertSubsampling = 2; 4419 4420 image.mPlane[image.U].mOffset = image.mPlane[image.V].mOffset 4421 + (cstride * params.nSliceHeight / 2); 4422 image.mPlane[image.U].mColInc = 1; 4423 image.mPlane[image.U].mRowInc = cstride; 4424 image.mPlane[image.U].mHorizSubsampling = 2; 4425 image.mPlane[image.U].mVertSubsampling = 2; 4426 break; 4427 } else { 4428 // fall through as YV12 is used for YUV420Planar by some codecs 4429 } 4430 4431 case OMX_COLOR_FormatYUV420Planar: 4432 case OMX_COLOR_FormatYUV420PackedPlanar: 4433 image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight; 4434 image.mPlane[image.U].mColInc = 1; 4435 image.mPlane[image.U].mRowInc = params.nStride / 2; 4436 image.mPlane[image.U].mHorizSubsampling = 2; 4437 image.mPlane[image.U].mVertSubsampling = 2; 4438 4439 image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset 4440 + (params.nStride * params.nSliceHeight / 4); 4441 image.mPlane[image.V].mColInc = 1; 4442 image.mPlane[image.V].mRowInc = params.nStride / 2; 4443 image.mPlane[image.V].mHorizSubsampling = 2; 4444 image.mPlane[image.V].mVertSubsampling = 2; 4445 break; 4446 4447 case OMX_COLOR_FormatYUV420SemiPlanar: 4448 // FIXME: NV21 for sw-encoder, NV12 for decoder and hw-encoder 4449 case OMX_COLOR_FormatYUV420PackedSemiPlanar: 4450 // NV12 4451 image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight; 4452 image.mPlane[image.U].mColInc = 2; 4453 image.mPlane[image.U].mRowInc = params.nStride; 4454 image.mPlane[image.U].mHorizSubsampling = 2; 4455 image.mPlane[image.U].mVertSubsampling = 2; 4456 4457 image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset + 1; 4458 image.mPlane[image.V].mColInc = 2; 4459 image.mPlane[image.V].mRowInc = params.nStride; 4460 image.mPlane[image.V].mHorizSubsampling = 2; 4461 image.mPlane[image.V].mVertSubsampling = 2; 4462 break; 4463 4464 default: 4465 TRESPASS(); 4466 } 4467 return true; 4468} 4469 4470// static 4471bool ACodec::describeColorFormat( 4472 const sp<IOMX> &omx, IOMX::node_id node, 4473 DescribeColorFormat2Params &describeParams) 4474{ 4475 OMX_INDEXTYPE describeColorFormatIndex; 4476 if (omx->getExtensionIndex( 4477 node, "OMX.google.android.index.describeColorFormat", 4478 &describeColorFormatIndex) == OK) { 4479 DescribeColorFormatParams describeParamsV1(describeParams); 4480 if (omx->getParameter( 4481 node, describeColorFormatIndex, 4482 &describeParamsV1, sizeof(describeParamsV1)) == OK) { 4483 describeParams.initFromV1(describeParamsV1); 4484 return describeParams.sMediaImage.mType != MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN; 4485 } 4486 } else if (omx->getExtensionIndex( 4487 node, "OMX.google.android.index.describeColorFormat2", &describeColorFormatIndex) == OK 4488 && omx->getParameter( 4489 node, describeColorFormatIndex, &describeParams, sizeof(describeParams)) == OK) { 4490 return describeParams.sMediaImage.mType != MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN; 4491 } 4492 4493 return describeDefaultColorFormat(describeParams); 4494} 4495 4496// static 4497bool ACodec::isFlexibleColorFormat( 4498 const sp<IOMX> &omx, IOMX::node_id node, 4499 uint32_t colorFormat, bool usingNativeBuffers, OMX_U32 *flexibleEquivalent) { 4500 DescribeColorFormat2Params describeParams; 4501 InitOMXParams(&describeParams); 4502 describeParams.eColorFormat = (OMX_COLOR_FORMATTYPE)colorFormat; 4503 // reasonable dummy values 4504 describeParams.nFrameWidth = 128; 4505 describeParams.nFrameHeight = 128; 4506 describeParams.nStride = 128; 4507 describeParams.nSliceHeight = 128; 4508 describeParams.bUsingNativeBuffers = (OMX_BOOL)usingNativeBuffers; 4509 4510 CHECK(flexibleEquivalent != NULL); 4511 4512 if (!describeColorFormat(omx, node, describeParams)) { 4513 return false; 4514 } 4515 4516 const MediaImage2 &img = describeParams.sMediaImage; 4517 if (img.mType == MediaImage2::MEDIA_IMAGE_TYPE_YUV) { 4518 if (img.mNumPlanes != 3 4519 || img.mPlane[img.Y].mHorizSubsampling != 1 4520 || img.mPlane[img.Y].mVertSubsampling != 1) { 4521 return false; 4522 } 4523 4524 // YUV 420 4525 if (img.mPlane[img.U].mHorizSubsampling == 2 4526 && img.mPlane[img.U].mVertSubsampling == 2 4527 && img.mPlane[img.V].mHorizSubsampling == 2 4528 && img.mPlane[img.V].mVertSubsampling == 2) { 4529 // possible flexible YUV420 format 4530 if (img.mBitDepth <= 8) { 4531 *flexibleEquivalent = OMX_COLOR_FormatYUV420Flexible; 4532 return true; 4533 } 4534 } 4535 } 4536 return false; 4537} 4538 4539status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify) { 4540 const char *niceIndex = portIndex == kPortIndexInput ? "input" : "output"; 4541 OMX_PARAM_PORTDEFINITIONTYPE def; 4542 InitOMXParams(&def); 4543 def.nPortIndex = portIndex; 4544 4545 status_t err = mOMX->getParameter(mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4546 if (err != OK) { 4547 return err; 4548 } 4549 4550 if (def.eDir != (portIndex == kPortIndexOutput ? OMX_DirOutput : OMX_DirInput)) { 4551 ALOGE("unexpected dir: %s(%d) on %s port", asString(def.eDir), def.eDir, niceIndex); 4552 return BAD_VALUE; 4553 } 4554 4555 switch (def.eDomain) { 4556 case OMX_PortDomainVideo: 4557 { 4558 OMX_VIDEO_PORTDEFINITIONTYPE *videoDef = &def.format.video; 4559 switch ((int)videoDef->eCompressionFormat) { 4560 case OMX_VIDEO_CodingUnused: 4561 { 4562 CHECK(mIsEncoder ^ (portIndex == kPortIndexOutput)); 4563 notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW); 4564 4565 notify->setInt32("stride", videoDef->nStride); 4566 notify->setInt32("slice-height", videoDef->nSliceHeight); 4567 notify->setInt32("color-format", videoDef->eColorFormat); 4568 4569 if (mNativeWindow == NULL) { 4570 DescribeColorFormat2Params describeParams; 4571 InitOMXParams(&describeParams); 4572 describeParams.eColorFormat = videoDef->eColorFormat; 4573 describeParams.nFrameWidth = videoDef->nFrameWidth; 4574 describeParams.nFrameHeight = videoDef->nFrameHeight; 4575 describeParams.nStride = videoDef->nStride; 4576 describeParams.nSliceHeight = videoDef->nSliceHeight; 4577 describeParams.bUsingNativeBuffers = OMX_FALSE; 4578 4579 if (describeColorFormat(mOMX, mNode, describeParams)) { 4580 notify->setBuffer( 4581 "image-data", 4582 ABuffer::CreateAsCopy( 4583 &describeParams.sMediaImage, 4584 sizeof(describeParams.sMediaImage))); 4585 4586 MediaImage2 &img = describeParams.sMediaImage; 4587 MediaImage2::PlaneInfo *plane = img.mPlane; 4588 ALOGV("[%s] MediaImage { F(%ux%u) @%u+%d+%d @%u+%d+%d @%u+%d+%d }", 4589 mComponentName.c_str(), img.mWidth, img.mHeight, 4590 plane[0].mOffset, plane[0].mColInc, plane[0].mRowInc, 4591 plane[1].mOffset, plane[1].mColInc, plane[1].mRowInc, 4592 plane[2].mOffset, plane[2].mColInc, plane[2].mRowInc); 4593 } 4594 } 4595 4596 int32_t width = (int32_t)videoDef->nFrameWidth; 4597 int32_t height = (int32_t)videoDef->nFrameHeight; 4598 4599 if (portIndex == kPortIndexOutput) { 4600 OMX_CONFIG_RECTTYPE rect; 4601 InitOMXParams(&rect); 4602 rect.nPortIndex = portIndex; 4603 4604 if (mOMX->getConfig( 4605 mNode, 4606 (portIndex == kPortIndexOutput ? 4607 OMX_IndexConfigCommonOutputCrop : 4608 OMX_IndexConfigCommonInputCrop), 4609 &rect, sizeof(rect)) != OK) { 4610 rect.nLeft = 0; 4611 rect.nTop = 0; 4612 rect.nWidth = videoDef->nFrameWidth; 4613 rect.nHeight = videoDef->nFrameHeight; 4614 } 4615 4616 if (rect.nLeft < 0 || 4617 rect.nTop < 0 || 4618 rect.nLeft + rect.nWidth > videoDef->nFrameWidth || 4619 rect.nTop + rect.nHeight > videoDef->nFrameHeight) { 4620 ALOGE("Wrong cropped rect (%d, %d) - (%u, %u) vs. frame (%u, %u)", 4621 rect.nLeft, rect.nTop, 4622 rect.nLeft + rect.nWidth, rect.nTop + rect.nHeight, 4623 videoDef->nFrameWidth, videoDef->nFrameHeight); 4624 return BAD_VALUE; 4625 } 4626 4627 notify->setRect( 4628 "crop", 4629 rect.nLeft, 4630 rect.nTop, 4631 rect.nLeft + rect.nWidth - 1, 4632 rect.nTop + rect.nHeight - 1); 4633 4634 width = rect.nWidth; 4635 height = rect.nHeight; 4636 4637 android_dataspace dataSpace = HAL_DATASPACE_UNKNOWN; 4638 (void)getColorAspectsAndDataSpaceForVideoDecoder( 4639 width, height, mConfigFormat, notify, 4640 mUsingNativeWindow ? &dataSpace : NULL); 4641 if (mUsingNativeWindow) { 4642 notify->setInt32("android._dataspace", dataSpace); 4643 } 4644 } else { 4645 (void)getInputColorAspectsForVideoEncoder(notify); 4646 } 4647 4648 break; 4649 } 4650 4651 case OMX_VIDEO_CodingVP8: 4652 case OMX_VIDEO_CodingVP9: 4653 { 4654 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 4655 InitOMXParams(&vp8type); 4656 vp8type.nPortIndex = kPortIndexOutput; 4657 status_t err = mOMX->getParameter( 4658 mNode, 4659 (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4660 &vp8type, 4661 sizeof(vp8type)); 4662 4663 if (err == OK) { 4664 AString tsSchema = "none"; 4665 if (vp8type.eTemporalPattern 4666 == OMX_VIDEO_VPXTemporalLayerPatternWebRTC) { 4667 switch (vp8type.nTemporalLayerCount) { 4668 case 1: 4669 { 4670 tsSchema = "webrtc.vp8.1-layer"; 4671 break; 4672 } 4673 case 2: 4674 { 4675 tsSchema = "webrtc.vp8.2-layer"; 4676 break; 4677 } 4678 case 3: 4679 { 4680 tsSchema = "webrtc.vp8.3-layer"; 4681 break; 4682 } 4683 default: 4684 { 4685 break; 4686 } 4687 } 4688 } 4689 notify->setString("ts-schema", tsSchema); 4690 } 4691 // Fall through to set up mime. 4692 } 4693 4694 default: 4695 { 4696 if (mIsEncoder ^ (portIndex == kPortIndexOutput)) { 4697 // should be CodingUnused 4698 ALOGE("Raw port video compression format is %s(%d)", 4699 asString(videoDef->eCompressionFormat), 4700 videoDef->eCompressionFormat); 4701 return BAD_VALUE; 4702 } 4703 AString mime; 4704 if (GetMimeTypeForVideoCoding( 4705 videoDef->eCompressionFormat, &mime) != OK) { 4706 notify->setString("mime", "application/octet-stream"); 4707 } else { 4708 notify->setString("mime", mime.c_str()); 4709 } 4710 uint32_t intraRefreshPeriod = 0; 4711 if (mIsEncoder && getIntraRefreshPeriod(&intraRefreshPeriod) == OK 4712 && intraRefreshPeriod > 0) { 4713 notify->setInt32("intra-refresh-period", intraRefreshPeriod); 4714 } 4715 break; 4716 } 4717 } 4718 notify->setInt32("width", videoDef->nFrameWidth); 4719 notify->setInt32("height", videoDef->nFrameHeight); 4720 ALOGV("[%s] %s format is %s", mComponentName.c_str(), 4721 portIndex == kPortIndexInput ? "input" : "output", 4722 notify->debugString().c_str()); 4723 4724 break; 4725 } 4726 4727 case OMX_PortDomainAudio: 4728 { 4729 OMX_AUDIO_PORTDEFINITIONTYPE *audioDef = &def.format.audio; 4730 4731 switch ((int)audioDef->eEncoding) { 4732 case OMX_AUDIO_CodingPCM: 4733 { 4734 OMX_AUDIO_PARAM_PCMMODETYPE params; 4735 InitOMXParams(¶ms); 4736 params.nPortIndex = portIndex; 4737 4738 err = mOMX->getParameter( 4739 mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 4740 if (err != OK) { 4741 return err; 4742 } 4743 4744 if (params.nChannels <= 0 4745 || (params.nChannels != 1 && !params.bInterleaved) 4746 || params.ePCMMode != OMX_AUDIO_PCMModeLinear) { 4747 ALOGE("unsupported PCM port: %u channels%s, %u-bit", 4748 params.nChannels, 4749 params.bInterleaved ? " interleaved" : "", 4750 params.nBitPerSample); 4751 return FAILED_TRANSACTION; 4752 } 4753 4754 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW); 4755 notify->setInt32("channel-count", params.nChannels); 4756 notify->setInt32("sample-rate", params.nSamplingRate); 4757 4758 AudioEncoding encoding = kAudioEncodingPcm16bit; 4759 if (params.eNumData == OMX_NumericalDataUnsigned 4760 && params.nBitPerSample == 8u) { 4761 encoding = kAudioEncodingPcm8bit; 4762 } else if (params.eNumData == OMX_NumericalDataFloat 4763 && params.nBitPerSample == 32u) { 4764 encoding = kAudioEncodingPcmFloat; 4765 } else if (params.nBitPerSample != 16u 4766 || params.eNumData != OMX_NumericalDataSigned) { 4767 ALOGE("unsupported PCM port: %s(%d), %s(%d) mode ", 4768 asString(params.eNumData), params.eNumData, 4769 asString(params.ePCMMode), params.ePCMMode); 4770 return FAILED_TRANSACTION; 4771 } 4772 notify->setInt32("pcm-encoding", encoding); 4773 4774 if (mChannelMaskPresent) { 4775 notify->setInt32("channel-mask", mChannelMask); 4776 } 4777 break; 4778 } 4779 4780 case OMX_AUDIO_CodingAAC: 4781 { 4782 OMX_AUDIO_PARAM_AACPROFILETYPE params; 4783 InitOMXParams(¶ms); 4784 params.nPortIndex = portIndex; 4785 4786 err = mOMX->getParameter( 4787 mNode, OMX_IndexParamAudioAac, ¶ms, sizeof(params)); 4788 if (err != OK) { 4789 return err; 4790 } 4791 4792 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC); 4793 notify->setInt32("channel-count", params.nChannels); 4794 notify->setInt32("sample-rate", params.nSampleRate); 4795 break; 4796 } 4797 4798 case OMX_AUDIO_CodingAMR: 4799 { 4800 OMX_AUDIO_PARAM_AMRTYPE params; 4801 InitOMXParams(¶ms); 4802 params.nPortIndex = portIndex; 4803 4804 err = mOMX->getParameter( 4805 mNode, OMX_IndexParamAudioAmr, ¶ms, sizeof(params)); 4806 if (err != OK) { 4807 return err; 4808 } 4809 4810 notify->setInt32("channel-count", 1); 4811 if (params.eAMRBandMode >= OMX_AUDIO_AMRBandModeWB0) { 4812 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_WB); 4813 notify->setInt32("sample-rate", 16000); 4814 } else { 4815 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_NB); 4816 notify->setInt32("sample-rate", 8000); 4817 } 4818 break; 4819 } 4820 4821 case OMX_AUDIO_CodingFLAC: 4822 { 4823 OMX_AUDIO_PARAM_FLACTYPE params; 4824 InitOMXParams(¶ms); 4825 params.nPortIndex = portIndex; 4826 4827 err = mOMX->getParameter( 4828 mNode, OMX_IndexParamAudioFlac, ¶ms, sizeof(params)); 4829 if (err != OK) { 4830 return err; 4831 } 4832 4833 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_FLAC); 4834 notify->setInt32("channel-count", params.nChannels); 4835 notify->setInt32("sample-rate", params.nSampleRate); 4836 break; 4837 } 4838 4839 case OMX_AUDIO_CodingMP3: 4840 { 4841 OMX_AUDIO_PARAM_MP3TYPE params; 4842 InitOMXParams(¶ms); 4843 params.nPortIndex = portIndex; 4844 4845 err = mOMX->getParameter( 4846 mNode, OMX_IndexParamAudioMp3, ¶ms, sizeof(params)); 4847 if (err != OK) { 4848 return err; 4849 } 4850 4851 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MPEG); 4852 notify->setInt32("channel-count", params.nChannels); 4853 notify->setInt32("sample-rate", params.nSampleRate); 4854 break; 4855 } 4856 4857 case OMX_AUDIO_CodingVORBIS: 4858 { 4859 OMX_AUDIO_PARAM_VORBISTYPE params; 4860 InitOMXParams(¶ms); 4861 params.nPortIndex = portIndex; 4862 4863 err = mOMX->getParameter( 4864 mNode, OMX_IndexParamAudioVorbis, ¶ms, sizeof(params)); 4865 if (err != OK) { 4866 return err; 4867 } 4868 4869 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_VORBIS); 4870 notify->setInt32("channel-count", params.nChannels); 4871 notify->setInt32("sample-rate", params.nSampleRate); 4872 break; 4873 } 4874 4875 case OMX_AUDIO_CodingAndroidAC3: 4876 { 4877 OMX_AUDIO_PARAM_ANDROID_AC3TYPE params; 4878 InitOMXParams(¶ms); 4879 params.nPortIndex = portIndex; 4880 4881 err = mOMX->getParameter( 4882 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 4883 ¶ms, sizeof(params)); 4884 if (err != OK) { 4885 return err; 4886 } 4887 4888 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AC3); 4889 notify->setInt32("channel-count", params.nChannels); 4890 notify->setInt32("sample-rate", params.nSampleRate); 4891 break; 4892 } 4893 4894 case OMX_AUDIO_CodingAndroidEAC3: 4895 { 4896 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE params; 4897 InitOMXParams(¶ms); 4898 params.nPortIndex = portIndex; 4899 4900 err = mOMX->getParameter( 4901 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 4902 ¶ms, sizeof(params)); 4903 if (err != OK) { 4904 return err; 4905 } 4906 4907 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_EAC3); 4908 notify->setInt32("channel-count", params.nChannels); 4909 notify->setInt32("sample-rate", params.nSampleRate); 4910 break; 4911 } 4912 4913 case OMX_AUDIO_CodingAndroidOPUS: 4914 { 4915 OMX_AUDIO_PARAM_ANDROID_OPUSTYPE params; 4916 InitOMXParams(¶ms); 4917 params.nPortIndex = portIndex; 4918 4919 err = mOMX->getParameter( 4920 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidOpus, 4921 ¶ms, sizeof(params)); 4922 if (err != OK) { 4923 return err; 4924 } 4925 4926 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_OPUS); 4927 notify->setInt32("channel-count", params.nChannels); 4928 notify->setInt32("sample-rate", params.nSampleRate); 4929 break; 4930 } 4931 4932 case OMX_AUDIO_CodingG711: 4933 { 4934 OMX_AUDIO_PARAM_PCMMODETYPE params; 4935 InitOMXParams(¶ms); 4936 params.nPortIndex = portIndex; 4937 4938 err = mOMX->getParameter( 4939 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 4940 if (err != OK) { 4941 return err; 4942 } 4943 4944 const char *mime = NULL; 4945 if (params.ePCMMode == OMX_AUDIO_PCMModeMULaw) { 4946 mime = MEDIA_MIMETYPE_AUDIO_G711_MLAW; 4947 } else if (params.ePCMMode == OMX_AUDIO_PCMModeALaw) { 4948 mime = MEDIA_MIMETYPE_AUDIO_G711_ALAW; 4949 } else { // params.ePCMMode == OMX_AUDIO_PCMModeLinear 4950 mime = MEDIA_MIMETYPE_AUDIO_RAW; 4951 } 4952 notify->setString("mime", mime); 4953 notify->setInt32("channel-count", params.nChannels); 4954 notify->setInt32("sample-rate", params.nSamplingRate); 4955 notify->setInt32("pcm-encoding", kAudioEncodingPcm16bit); 4956 break; 4957 } 4958 4959 case OMX_AUDIO_CodingGSMFR: 4960 { 4961 OMX_AUDIO_PARAM_PCMMODETYPE params; 4962 InitOMXParams(¶ms); 4963 params.nPortIndex = portIndex; 4964 4965 err = mOMX->getParameter( 4966 mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 4967 if (err != OK) { 4968 return err; 4969 } 4970 4971 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MSGSM); 4972 notify->setInt32("channel-count", params.nChannels); 4973 notify->setInt32("sample-rate", params.nSamplingRate); 4974 break; 4975 } 4976 4977 default: 4978 ALOGE("Unsupported audio coding: %s(%d)\n", 4979 asString(audioDef->eEncoding), audioDef->eEncoding); 4980 return BAD_TYPE; 4981 } 4982 break; 4983 } 4984 4985 default: 4986 ALOGE("Unsupported domain: %s(%d)", asString(def.eDomain), def.eDomain); 4987 return BAD_TYPE; 4988 } 4989 4990 return OK; 4991} 4992 4993void ACodec::onDataSpaceChanged(android_dataspace dataSpace, const ColorAspects &aspects) { 4994 // aspects are normally communicated in ColorAspects 4995 int32_t range, standard, transfer; 4996 convertCodecColorAspectsToPlatformAspects(aspects, &range, &standard, &transfer); 4997 4998 // if some aspects are unspecified, use dataspace fields 4999 if (range != 0) { 5000 range = (dataSpace & HAL_DATASPACE_RANGE_MASK) >> HAL_DATASPACE_RANGE_SHIFT; 5001 } 5002 if (standard != 0) { 5003 standard = (dataSpace & HAL_DATASPACE_STANDARD_MASK) >> HAL_DATASPACE_STANDARD_SHIFT; 5004 } 5005 if (transfer != 0) { 5006 transfer = (dataSpace & HAL_DATASPACE_TRANSFER_MASK) >> HAL_DATASPACE_TRANSFER_SHIFT; 5007 } 5008 5009 mOutputFormat = mOutputFormat->dup(); // trigger an output format changed event 5010 if (range != 0) { 5011 mOutputFormat->setInt32("color-range", range); 5012 } 5013 if (standard != 0) { 5014 mOutputFormat->setInt32("color-standard", standard); 5015 } 5016 if (transfer != 0) { 5017 mOutputFormat->setInt32("color-transfer", transfer); 5018 } 5019 5020 ALOGD("dataspace changed to %#x (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) " 5021 "(R:%d(%s), S:%d(%s), T:%d(%s))", 5022 dataSpace, 5023 aspects.mRange, asString(aspects.mRange), 5024 aspects.mPrimaries, asString(aspects.mPrimaries), 5025 aspects.mMatrixCoeffs, asString(aspects.mMatrixCoeffs), 5026 aspects.mTransfer, asString(aspects.mTransfer), 5027 range, asString((ColorRange)range), 5028 standard, asString((ColorStandard)standard), 5029 transfer, asString((ColorTransfer)transfer)); 5030} 5031 5032void ACodec::onOutputFormatChanged() { 5033 // store new output format, at the same time mark that this is no longer the first frame 5034 mOutputFormat = mBaseOutputFormat->dup(); 5035 5036 if (getPortFormat(kPortIndexOutput, mOutputFormat) != OK) { 5037 ALOGE("[%s] Failed to get port format to send format change", mComponentName.c_str()); 5038 return; 5039 } 5040 5041 if (!mIsVideo && !mIsEncoder) { 5042 AudioEncoding pcmEncoding = kAudioEncodingPcm16bit; 5043 (void)mConfigFormat->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 5044 AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit; 5045 (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 5046 5047 mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding); 5048 if (mConverter[kPortIndexOutput] != NULL) { 5049 mOutputFormat->setInt32("pcm-encoding", pcmEncoding); 5050 } 5051 } 5052 5053 if (mTunneled) { 5054 sendFormatChange(); 5055 } 5056} 5057 5058void ACodec::addKeyFormatChangesToRenderBufferNotification(sp<AMessage> ¬ify) { 5059 AString mime; 5060 CHECK(mOutputFormat->findString("mime", &mime)); 5061 5062 if (mime == MEDIA_MIMETYPE_VIDEO_RAW && mNativeWindow != NULL) { 5063 // notify renderer of the crop change and dataspace change 5064 // NOTE: native window uses extended right-bottom coordinate 5065 int32_t left, top, right, bottom; 5066 if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) { 5067 notify->setRect("crop", left, top, right + 1, bottom + 1); 5068 } 5069 5070 int32_t dataSpace; 5071 if (mOutputFormat->findInt32("android._dataspace", &dataSpace)) { 5072 notify->setInt32("dataspace", dataSpace); 5073 } 5074 } 5075} 5076 5077void ACodec::sendFormatChange() { 5078 AString mime; 5079 CHECK(mOutputFormat->findString("mime", &mime)); 5080 5081 if (mime == MEDIA_MIMETYPE_AUDIO_RAW && (mEncoderDelay || mEncoderPadding)) { 5082 int32_t channelCount; 5083 CHECK(mOutputFormat->findInt32("channel-count", &channelCount)); 5084 if (mSkipCutBuffer != NULL) { 5085 size_t prevbufsize = mSkipCutBuffer->size(); 5086 if (prevbufsize != 0) { 5087 ALOGW("Replacing SkipCutBuffer holding %zu bytes", prevbufsize); 5088 } 5089 } 5090 mSkipCutBuffer = new SkipCutBuffer(mEncoderDelay, mEncoderPadding, channelCount); 5091 } 5092 5093 sp<AMessage> notify = mNotify->dup(); 5094 notify->setInt32("what", kWhatOutputFormatChanged); 5095 notify->setMessage("format", mOutputFormat); 5096 notify->post(); 5097 5098 // mLastOutputFormat is not used when tunneled; doing this just to stay consistent 5099 mLastOutputFormat = mOutputFormat; 5100} 5101 5102void ACodec::signalError(OMX_ERRORTYPE error, status_t internalError) { 5103 sp<AMessage> notify = mNotify->dup(); 5104 notify->setInt32("what", CodecBase::kWhatError); 5105 ALOGE("signalError(omxError %#x, internalError %d)", error, internalError); 5106 5107 if (internalError == UNKNOWN_ERROR) { // find better error code 5108 const status_t omxStatus = statusFromOMXError(error); 5109 if (omxStatus != 0) { 5110 internalError = omxStatus; 5111 } else { 5112 ALOGW("Invalid OMX error %#x", error); 5113 } 5114 } 5115 5116 mFatalError = true; 5117 5118 notify->setInt32("err", internalError); 5119 notify->setInt32("actionCode", ACTION_CODE_FATAL); // could translate from OMX error. 5120 notify->post(); 5121} 5122 5123//////////////////////////////////////////////////////////////////////////////// 5124 5125ACodec::PortDescription::PortDescription() { 5126} 5127 5128status_t ACodec::requestIDRFrame() { 5129 if (!mIsEncoder) { 5130 return ERROR_UNSUPPORTED; 5131 } 5132 5133 OMX_CONFIG_INTRAREFRESHVOPTYPE params; 5134 InitOMXParams(¶ms); 5135 5136 params.nPortIndex = kPortIndexOutput; 5137 params.IntraRefreshVOP = OMX_TRUE; 5138 5139 return mOMX->setConfig( 5140 mNode, 5141 OMX_IndexConfigVideoIntraVOPRefresh, 5142 ¶ms, 5143 sizeof(params)); 5144} 5145 5146void ACodec::PortDescription::addBuffer( 5147 IOMX::buffer_id id, const sp<ABuffer> &buffer, 5148 const sp<NativeHandle> &handle, const sp<RefBase> &memRef) { 5149 mBufferIDs.push_back(id); 5150 mBuffers.push_back(buffer); 5151 mHandles.push_back(handle); 5152 mMemRefs.push_back(memRef); 5153} 5154 5155size_t ACodec::PortDescription::countBuffers() { 5156 return mBufferIDs.size(); 5157} 5158 5159IOMX::buffer_id ACodec::PortDescription::bufferIDAt(size_t index) const { 5160 return mBufferIDs.itemAt(index); 5161} 5162 5163sp<ABuffer> ACodec::PortDescription::bufferAt(size_t index) const { 5164 return mBuffers.itemAt(index); 5165} 5166 5167sp<NativeHandle> ACodec::PortDescription::handleAt(size_t index) const { 5168 return mHandles.itemAt(index); 5169} 5170 5171sp<RefBase> ACodec::PortDescription::memRefAt(size_t index) const { 5172 return mMemRefs.itemAt(index); 5173} 5174 5175//////////////////////////////////////////////////////////////////////////////// 5176 5177ACodec::BaseState::BaseState(ACodec *codec, const sp<AState> &parentState) 5178 : AState(parentState), 5179 mCodec(codec) { 5180} 5181 5182ACodec::BaseState::PortMode ACodec::BaseState::getPortMode( 5183 OMX_U32 /* portIndex */) { 5184 return KEEP_BUFFERS; 5185} 5186 5187bool ACodec::BaseState::onMessageReceived(const sp<AMessage> &msg) { 5188 switch (msg->what()) { 5189 case kWhatInputBufferFilled: 5190 { 5191 onInputBufferFilled(msg); 5192 break; 5193 } 5194 5195 case kWhatOutputBufferDrained: 5196 { 5197 onOutputBufferDrained(msg); 5198 break; 5199 } 5200 5201 case ACodec::kWhatOMXMessageList: 5202 { 5203 return checkOMXMessage(msg) ? onOMXMessageList(msg) : true; 5204 } 5205 5206 case ACodec::kWhatOMXMessageItem: 5207 { 5208 // no need to check as we already did it for kWhatOMXMessageList 5209 return onOMXMessage(msg); 5210 } 5211 5212 case ACodec::kWhatOMXMessage: 5213 { 5214 return checkOMXMessage(msg) ? onOMXMessage(msg) : true; 5215 } 5216 5217 case ACodec::kWhatSetSurface: 5218 { 5219 sp<AReplyToken> replyID; 5220 CHECK(msg->senderAwaitsResponse(&replyID)); 5221 5222 sp<RefBase> obj; 5223 CHECK(msg->findObject("surface", &obj)); 5224 5225 status_t err = mCodec->handleSetSurface(static_cast<Surface *>(obj.get())); 5226 5227 sp<AMessage> response = new AMessage; 5228 response->setInt32("err", err); 5229 response->postReply(replyID); 5230 break; 5231 } 5232 5233 case ACodec::kWhatCreateInputSurface: 5234 case ACodec::kWhatSetInputSurface: 5235 case ACodec::kWhatSignalEndOfInputStream: 5236 { 5237 // This may result in an app illegal state exception. 5238 ALOGE("Message 0x%x was not handled", msg->what()); 5239 mCodec->signalError(OMX_ErrorUndefined, INVALID_OPERATION); 5240 return true; 5241 } 5242 5243 case ACodec::kWhatOMXDied: 5244 { 5245 // This will result in kFlagSawMediaServerDie handling in MediaCodec. 5246 ALOGE("OMX/mediaserver died, signalling error!"); 5247 mCodec->signalError(OMX_ErrorResourcesLost, DEAD_OBJECT); 5248 break; 5249 } 5250 5251 case ACodec::kWhatReleaseCodecInstance: 5252 { 5253 ALOGI("[%s] forcing the release of codec", 5254 mCodec->mComponentName.c_str()); 5255 status_t err = mCodec->mOMX->freeNode(mCodec->mNode); 5256 ALOGE_IF("[%s] failed to release codec instance: err=%d", 5257 mCodec->mComponentName.c_str(), err); 5258 sp<AMessage> notify = mCodec->mNotify->dup(); 5259 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 5260 notify->post(); 5261 break; 5262 } 5263 5264 default: 5265 return false; 5266 } 5267 5268 return true; 5269} 5270 5271bool ACodec::BaseState::checkOMXMessage(const sp<AMessage> &msg) { 5272 // there is a possibility that this is an outstanding message for a 5273 // codec that we have already destroyed 5274 if (mCodec->mNode == 0) { 5275 ALOGI("ignoring message as already freed component: %s", 5276 msg->debugString().c_str()); 5277 return false; 5278 } 5279 5280 IOMX::node_id nodeID; 5281 CHECK(msg->findInt32("node", (int32_t*)&nodeID)); 5282 if (nodeID != mCodec->mNode) { 5283 ALOGE("Unexpected message for nodeID: %u, should have been %u", nodeID, mCodec->mNode); 5284 return false; 5285 } 5286 return true; 5287} 5288 5289bool ACodec::BaseState::onOMXMessageList(const sp<AMessage> &msg) { 5290 sp<RefBase> obj; 5291 CHECK(msg->findObject("messages", &obj)); 5292 sp<MessageList> msgList = static_cast<MessageList *>(obj.get()); 5293 5294 bool receivedRenderedEvents = false; 5295 for (std::list<sp<AMessage>>::const_iterator it = msgList->getList().cbegin(); 5296 it != msgList->getList().cend(); ++it) { 5297 (*it)->setWhat(ACodec::kWhatOMXMessageItem); 5298 mCodec->handleMessage(*it); 5299 int32_t type; 5300 CHECK((*it)->findInt32("type", &type)); 5301 if (type == omx_message::FRAME_RENDERED) { 5302 receivedRenderedEvents = true; 5303 } 5304 } 5305 5306 if (receivedRenderedEvents) { 5307 // NOTE: all buffers are rendered in this case 5308 mCodec->notifyOfRenderedFrames(); 5309 } 5310 return true; 5311} 5312 5313bool ACodec::BaseState::onOMXMessage(const sp<AMessage> &msg) { 5314 int32_t type; 5315 CHECK(msg->findInt32("type", &type)); 5316 5317 switch (type) { 5318 case omx_message::EVENT: 5319 { 5320 int32_t event, data1, data2; 5321 CHECK(msg->findInt32("event", &event)); 5322 CHECK(msg->findInt32("data1", &data1)); 5323 CHECK(msg->findInt32("data2", &data2)); 5324 5325 if (event == OMX_EventCmdComplete 5326 && data1 == OMX_CommandFlush 5327 && data2 == (int32_t)OMX_ALL) { 5328 // Use of this notification is not consistent across 5329 // implementations. We'll drop this notification and rely 5330 // on flush-complete notifications on the individual port 5331 // indices instead. 5332 5333 return true; 5334 } 5335 5336 return onOMXEvent( 5337 static_cast<OMX_EVENTTYPE>(event), 5338 static_cast<OMX_U32>(data1), 5339 static_cast<OMX_U32>(data2)); 5340 } 5341 5342 case omx_message::EMPTY_BUFFER_DONE: 5343 { 5344 IOMX::buffer_id bufferID; 5345 int32_t fenceFd; 5346 5347 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 5348 CHECK(msg->findInt32("fence_fd", &fenceFd)); 5349 5350 return onOMXEmptyBufferDone(bufferID, fenceFd); 5351 } 5352 5353 case omx_message::FILL_BUFFER_DONE: 5354 { 5355 IOMX::buffer_id bufferID; 5356 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 5357 5358 int32_t rangeOffset, rangeLength, flags, fenceFd; 5359 int64_t timeUs; 5360 5361 CHECK(msg->findInt32("range_offset", &rangeOffset)); 5362 CHECK(msg->findInt32("range_length", &rangeLength)); 5363 CHECK(msg->findInt32("flags", &flags)); 5364 CHECK(msg->findInt64("timestamp", &timeUs)); 5365 CHECK(msg->findInt32("fence_fd", &fenceFd)); 5366 5367 return onOMXFillBufferDone( 5368 bufferID, 5369 (size_t)rangeOffset, (size_t)rangeLength, 5370 (OMX_U32)flags, 5371 timeUs, 5372 fenceFd); 5373 } 5374 5375 case omx_message::FRAME_RENDERED: 5376 { 5377 int64_t mediaTimeUs, systemNano; 5378 5379 CHECK(msg->findInt64("media_time_us", &mediaTimeUs)); 5380 CHECK(msg->findInt64("system_nano", &systemNano)); 5381 5382 return onOMXFrameRendered( 5383 mediaTimeUs, systemNano); 5384 } 5385 5386 default: 5387 ALOGE("Unexpected message type: %d", type); 5388 return false; 5389 } 5390} 5391 5392bool ACodec::BaseState::onOMXFrameRendered( 5393 int64_t mediaTimeUs __unused, nsecs_t systemNano __unused) { 5394 // ignore outside of Executing and PortSettingsChanged states 5395 return true; 5396} 5397 5398bool ACodec::BaseState::onOMXEvent( 5399 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 5400 if (event == OMX_EventDataSpaceChanged) { 5401 ColorAspects aspects; 5402 aspects.mRange = (ColorAspects::Range)((data2 >> 24) & 0xFF); 5403 aspects.mPrimaries = (ColorAspects::Primaries)((data2 >> 16) & 0xFF); 5404 aspects.mMatrixCoeffs = (ColorAspects::MatrixCoeffs)((data2 >> 8) & 0xFF); 5405 aspects.mTransfer = (ColorAspects::Transfer)(data2 & 0xFF); 5406 5407 mCodec->onDataSpaceChanged((android_dataspace)data1, aspects); 5408 return true; 5409 } 5410 5411 if (event != OMX_EventError) { 5412 ALOGV("[%s] EVENT(%d, 0x%08x, 0x%08x)", 5413 mCodec->mComponentName.c_str(), event, data1, data2); 5414 5415 return false; 5416 } 5417 5418 ALOGE("[%s] ERROR(0x%08x)", mCodec->mComponentName.c_str(), data1); 5419 5420 // verify OMX component sends back an error we expect. 5421 OMX_ERRORTYPE omxError = (OMX_ERRORTYPE)data1; 5422 if (!isOMXError(omxError)) { 5423 ALOGW("Invalid OMX error %#x", omxError); 5424 omxError = OMX_ErrorUndefined; 5425 } 5426 mCodec->signalError(omxError); 5427 5428 return true; 5429} 5430 5431bool ACodec::BaseState::onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd) { 5432 ALOGV("[%s] onOMXEmptyBufferDone %u", 5433 mCodec->mComponentName.c_str(), bufferID); 5434 5435 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 5436 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5437 if (status != BufferInfo::OWNED_BY_COMPONENT) { 5438 ALOGE("Wrong ownership in EBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 5439 mCodec->dumpBuffers(kPortIndexInput); 5440 if (fenceFd >= 0) { 5441 ::close(fenceFd); 5442 } 5443 return false; 5444 } 5445 info->mStatus = BufferInfo::OWNED_BY_US; 5446 5447 // input buffers cannot take fences, so wait for any fence now 5448 (void)mCodec->waitForFence(fenceFd, "onOMXEmptyBufferDone"); 5449 fenceFd = -1; 5450 5451 // still save fence for completeness 5452 info->setWriteFence(fenceFd, "onOMXEmptyBufferDone"); 5453 5454 // We're in "store-metadata-in-buffers" mode, the underlying 5455 // OMX component had access to data that's implicitly refcounted 5456 // by this "MediaBuffer" object. Now that the OMX component has 5457 // told us that it's done with the input buffer, we can decrement 5458 // the mediaBuffer's reference count. 5459 info->mData->setMediaBufferBase(NULL); 5460 5461 PortMode mode = getPortMode(kPortIndexInput); 5462 5463 switch (mode) { 5464 case KEEP_BUFFERS: 5465 break; 5466 5467 case RESUBMIT_BUFFERS: 5468 postFillThisBuffer(info); 5469 break; 5470 5471 case FREE_BUFFERS: 5472 default: 5473 ALOGE("SHOULD NOT REACH HERE: cannot free empty output buffers"); 5474 return false; 5475 } 5476 5477 return true; 5478} 5479 5480void ACodec::BaseState::postFillThisBuffer(BufferInfo *info) { 5481 if (mCodec->mPortEOS[kPortIndexInput]) { 5482 return; 5483 } 5484 5485 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 5486 5487 sp<AMessage> notify = mCodec->mNotify->dup(); 5488 notify->setInt32("what", CodecBase::kWhatFillThisBuffer); 5489 notify->setInt32("buffer-id", info->mBufferID); 5490 5491 info->mData->meta()->clear(); 5492 notify->setBuffer("buffer", info->mData); 5493 5494 sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, mCodec); 5495 reply->setInt32("buffer-id", info->mBufferID); 5496 5497 notify->setMessage("reply", reply); 5498 5499 notify->post(); 5500 5501 info->mStatus = BufferInfo::OWNED_BY_UPSTREAM; 5502} 5503 5504void ACodec::BaseState::onInputBufferFilled(const sp<AMessage> &msg) { 5505 IOMX::buffer_id bufferID; 5506 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 5507 sp<ABuffer> buffer; 5508 int32_t err = OK; 5509 bool eos = false; 5510 PortMode mode = getPortMode(kPortIndexInput); 5511 5512 if (!msg->findBuffer("buffer", &buffer)) { 5513 /* these are unfilled buffers returned by client */ 5514 CHECK(msg->findInt32("err", &err)); 5515 5516 if (err == OK) { 5517 /* buffers with no errors are returned on MediaCodec.flush */ 5518 mode = KEEP_BUFFERS; 5519 } else { 5520 ALOGV("[%s] saw error %d instead of an input buffer", 5521 mCodec->mComponentName.c_str(), err); 5522 eos = true; 5523 } 5524 5525 buffer.clear(); 5526 } 5527 5528 int32_t tmp; 5529 if (buffer != NULL && buffer->meta()->findInt32("eos", &tmp) && tmp) { 5530 eos = true; 5531 err = ERROR_END_OF_STREAM; 5532 } 5533 5534 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 5535 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5536 if (status != BufferInfo::OWNED_BY_UPSTREAM) { 5537 ALOGE("Wrong ownership in IBF: %s(%d) buffer #%u", _asString(status), status, bufferID); 5538 mCodec->dumpBuffers(kPortIndexInput); 5539 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 5540 return; 5541 } 5542 5543 info->mStatus = BufferInfo::OWNED_BY_US; 5544 5545 switch (mode) { 5546 case KEEP_BUFFERS: 5547 { 5548 if (eos) { 5549 if (!mCodec->mPortEOS[kPortIndexInput]) { 5550 mCodec->mPortEOS[kPortIndexInput] = true; 5551 mCodec->mInputEOSResult = err; 5552 } 5553 } 5554 break; 5555 } 5556 5557 case RESUBMIT_BUFFERS: 5558 { 5559 if (buffer != NULL && !mCodec->mPortEOS[kPortIndexInput]) { 5560 // Do not send empty input buffer w/o EOS to the component. 5561 if (buffer->size() == 0 && !eos) { 5562 postFillThisBuffer(info); 5563 break; 5564 } 5565 5566 int64_t timeUs; 5567 CHECK(buffer->meta()->findInt64("timeUs", &timeUs)); 5568 5569 OMX_U32 flags = OMX_BUFFERFLAG_ENDOFFRAME; 5570 5571 int32_t isCSD; 5572 if (buffer->meta()->findInt32("csd", &isCSD) && isCSD != 0) { 5573 flags |= OMX_BUFFERFLAG_CODECCONFIG; 5574 } 5575 5576 if (eos) { 5577 flags |= OMX_BUFFERFLAG_EOS; 5578 } 5579 5580 if (buffer != info->mCodecData) { 5581 ALOGV("[%s] Needs to copy input data for buffer %u. (%p != %p)", 5582 mCodec->mComponentName.c_str(), 5583 bufferID, 5584 buffer.get(), info->mCodecData.get()); 5585 5586 sp<DataConverter> converter = mCodec->mConverter[kPortIndexInput]; 5587 if (converter == NULL) { 5588 converter = getCopyConverter(); 5589 } 5590 status_t err = converter->convert(buffer, info->mCodecData); 5591 if (err != OK) { 5592 mCodec->signalError(OMX_ErrorUndefined, err); 5593 return; 5594 } 5595 } 5596 5597 if (flags & OMX_BUFFERFLAG_CODECCONFIG) { 5598 ALOGV("[%s] calling emptyBuffer %u w/ codec specific data", 5599 mCodec->mComponentName.c_str(), bufferID); 5600 } else if (flags & OMX_BUFFERFLAG_EOS) { 5601 ALOGV("[%s] calling emptyBuffer %u w/ EOS", 5602 mCodec->mComponentName.c_str(), bufferID); 5603 } else { 5604#if TRACK_BUFFER_TIMING 5605 ALOGI("[%s] calling emptyBuffer %u w/ time %lld us", 5606 mCodec->mComponentName.c_str(), bufferID, (long long)timeUs); 5607#else 5608 ALOGV("[%s] calling emptyBuffer %u w/ time %lld us", 5609 mCodec->mComponentName.c_str(), bufferID, (long long)timeUs); 5610#endif 5611 } 5612 5613#if TRACK_BUFFER_TIMING 5614 ACodec::BufferStats stats; 5615 stats.mEmptyBufferTimeUs = ALooper::GetNowUs(); 5616 stats.mFillBufferDoneTimeUs = -1ll; 5617 mCodec->mBufferStats.add(timeUs, stats); 5618#endif 5619 5620 if (mCodec->storingMetadataInDecodedBuffers()) { 5621 // try to submit an output buffer for each input buffer 5622 PortMode outputMode = getPortMode(kPortIndexOutput); 5623 5624 ALOGV("MetadataBuffersToSubmit=%u portMode=%s", 5625 mCodec->mMetadataBuffersToSubmit, 5626 (outputMode == FREE_BUFFERS ? "FREE" : 5627 outputMode == KEEP_BUFFERS ? "KEEP" : "RESUBMIT")); 5628 if (outputMode == RESUBMIT_BUFFERS) { 5629 mCodec->submitOutputMetadataBuffer(); 5630 } 5631 } 5632 info->checkReadFence("onInputBufferFilled"); 5633 status_t err2 = mCodec->mOMX->emptyBuffer( 5634 mCodec->mNode, 5635 bufferID, 5636 0, 5637 info->mCodecData->size(), 5638 flags, 5639 timeUs, 5640 info->mFenceFd); 5641 info->mFenceFd = -1; 5642 if (err2 != OK) { 5643 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2)); 5644 return; 5645 } 5646 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5647 5648 if (!eos && err == OK) { 5649 getMoreInputDataIfPossible(); 5650 } else { 5651 ALOGV("[%s] Signalled EOS (%d) on the input port", 5652 mCodec->mComponentName.c_str(), err); 5653 5654 mCodec->mPortEOS[kPortIndexInput] = true; 5655 mCodec->mInputEOSResult = err; 5656 } 5657 } else if (!mCodec->mPortEOS[kPortIndexInput]) { 5658 if (err != OK && err != ERROR_END_OF_STREAM) { 5659 ALOGV("[%s] Signalling EOS on the input port due to error %d", 5660 mCodec->mComponentName.c_str(), err); 5661 } else { 5662 ALOGV("[%s] Signalling EOS on the input port", 5663 mCodec->mComponentName.c_str()); 5664 } 5665 5666 ALOGV("[%s] calling emptyBuffer %u signalling EOS", 5667 mCodec->mComponentName.c_str(), bufferID); 5668 5669 info->checkReadFence("onInputBufferFilled"); 5670 status_t err2 = mCodec->mOMX->emptyBuffer( 5671 mCodec->mNode, 5672 bufferID, 5673 0, 5674 0, 5675 OMX_BUFFERFLAG_EOS, 5676 0, 5677 info->mFenceFd); 5678 info->mFenceFd = -1; 5679 if (err2 != OK) { 5680 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2)); 5681 return; 5682 } 5683 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5684 5685 mCodec->mPortEOS[kPortIndexInput] = true; 5686 mCodec->mInputEOSResult = err; 5687 } 5688 break; 5689 } 5690 5691 case FREE_BUFFERS: 5692 break; 5693 5694 default: 5695 ALOGE("invalid port mode: %d", mode); 5696 break; 5697 } 5698} 5699 5700void ACodec::BaseState::getMoreInputDataIfPossible() { 5701 if (mCodec->mPortEOS[kPortIndexInput]) { 5702 return; 5703 } 5704 5705 BufferInfo *eligible = NULL; 5706 5707 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 5708 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 5709 5710#if 0 5711 if (info->mStatus == BufferInfo::OWNED_BY_UPSTREAM) { 5712 // There's already a "read" pending. 5713 return; 5714 } 5715#endif 5716 5717 if (info->mStatus == BufferInfo::OWNED_BY_US) { 5718 eligible = info; 5719 } 5720 } 5721 5722 if (eligible == NULL) { 5723 return; 5724 } 5725 5726 postFillThisBuffer(eligible); 5727} 5728 5729bool ACodec::BaseState::onOMXFillBufferDone( 5730 IOMX::buffer_id bufferID, 5731 size_t rangeOffset, size_t rangeLength, 5732 OMX_U32 flags, 5733 int64_t timeUs, 5734 int fenceFd) { 5735 ALOGV("[%s] onOMXFillBufferDone %u time %" PRId64 " us, flags = 0x%08x", 5736 mCodec->mComponentName.c_str(), bufferID, timeUs, flags); 5737 5738 ssize_t index; 5739 status_t err= OK; 5740 5741#if TRACK_BUFFER_TIMING 5742 index = mCodec->mBufferStats.indexOfKey(timeUs); 5743 if (index >= 0) { 5744 ACodec::BufferStats *stats = &mCodec->mBufferStats.editValueAt(index); 5745 stats->mFillBufferDoneTimeUs = ALooper::GetNowUs(); 5746 5747 ALOGI("frame PTS %lld: %lld", 5748 timeUs, 5749 stats->mFillBufferDoneTimeUs - stats->mEmptyBufferTimeUs); 5750 5751 mCodec->mBufferStats.removeItemsAt(index); 5752 stats = NULL; 5753 } 5754#endif 5755 5756 BufferInfo *info = 5757 mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 5758 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5759 if (status != BufferInfo::OWNED_BY_COMPONENT) { 5760 ALOGE("Wrong ownership in FBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 5761 mCodec->dumpBuffers(kPortIndexOutput); 5762 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 5763 if (fenceFd >= 0) { 5764 ::close(fenceFd); 5765 } 5766 return true; 5767 } 5768 5769 info->mDequeuedAt = ++mCodec->mDequeueCounter; 5770 info->mStatus = BufferInfo::OWNED_BY_US; 5771 5772 if (info->mRenderInfo != NULL) { 5773 // The fence for an emptied buffer must have signaled, but there still could be queued 5774 // or out-of-order dequeued buffers in the render queue prior to this buffer. Drop these, 5775 // as we will soon requeue this buffer to the surface. While in theory we could still keep 5776 // track of buffers that are requeued to the surface, it is better to add support to the 5777 // buffer-queue to notify us of released buffers and their fences (in the future). 5778 mCodec->notifyOfRenderedFrames(true /* dropIncomplete */); 5779 } 5780 5781 // byte buffers cannot take fences, so wait for any fence now 5782 if (mCodec->mNativeWindow == NULL) { 5783 (void)mCodec->waitForFence(fenceFd, "onOMXFillBufferDone"); 5784 fenceFd = -1; 5785 } 5786 info->setReadFence(fenceFd, "onOMXFillBufferDone"); 5787 5788 PortMode mode = getPortMode(kPortIndexOutput); 5789 5790 switch (mode) { 5791 case KEEP_BUFFERS: 5792 break; 5793 5794 case RESUBMIT_BUFFERS: 5795 { 5796 if (rangeLength == 0 && (!(flags & OMX_BUFFERFLAG_EOS) 5797 || mCodec->mPortEOS[kPortIndexOutput])) { 5798 ALOGV("[%s] calling fillBuffer %u", 5799 mCodec->mComponentName.c_str(), info->mBufferID); 5800 5801 err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd); 5802 info->mFenceFd = -1; 5803 if (err != OK) { 5804 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5805 return true; 5806 } 5807 5808 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5809 break; 5810 } 5811 5812 sp<AMessage> reply = 5813 new AMessage(kWhatOutputBufferDrained, mCodec); 5814 5815 if (mCodec->mOutputFormat != mCodec->mLastOutputFormat && rangeLength > 0) { 5816 // pretend that output format has changed on the first frame (we used to do this) 5817 if (mCodec->mBaseOutputFormat == mCodec->mOutputFormat) { 5818 mCodec->onOutputFormatChanged(); 5819 } 5820 mCodec->addKeyFormatChangesToRenderBufferNotification(reply); 5821 mCodec->sendFormatChange(); 5822 } 5823 5824 if (mCodec->usingMetadataOnEncoderOutput()) { 5825 native_handle_t *handle = NULL; 5826 VideoGrallocMetadata &grallocMeta = *(VideoGrallocMetadata *)info->mData->data(); 5827 VideoNativeMetadata &nativeMeta = *(VideoNativeMetadata *)info->mData->data(); 5828 if (info->mData->size() >= sizeof(grallocMeta) 5829 && grallocMeta.eType == kMetadataBufferTypeGrallocSource) { 5830 handle = (native_handle_t *)(uintptr_t)grallocMeta.pHandle; 5831 } else if (info->mData->size() >= sizeof(nativeMeta) 5832 && nativeMeta.eType == kMetadataBufferTypeANWBuffer) { 5833#ifdef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 5834 // ANativeWindowBuffer is only valid on 32-bit/mediaserver process 5835 handle = NULL; 5836#else 5837 handle = (native_handle_t *)nativeMeta.pBuffer->handle; 5838#endif 5839 } 5840 info->mData->meta()->setPointer("handle", handle); 5841 info->mData->meta()->setInt32("rangeOffset", rangeOffset); 5842 info->mData->meta()->setInt32("rangeLength", rangeLength); 5843 } else if (info->mData == info->mCodecData) { 5844 info->mData->setRange(rangeOffset, rangeLength); 5845 } else { 5846 info->mCodecData->setRange(rangeOffset, rangeLength); 5847 // in this case we know that mConverter is not null 5848 status_t err = mCodec->mConverter[kPortIndexOutput]->convert( 5849 info->mCodecData, info->mData); 5850 if (err != OK) { 5851 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5852 return true; 5853 } 5854 } 5855#if 0 5856 if (mCodec->mNativeWindow == NULL) { 5857 if (IsIDR(info->mData)) { 5858 ALOGI("IDR frame"); 5859 } 5860 } 5861#endif 5862 5863 if (mCodec->mSkipCutBuffer != NULL) { 5864 mCodec->mSkipCutBuffer->submit(info->mData); 5865 } 5866 info->mData->meta()->setInt64("timeUs", timeUs); 5867 5868 sp<AMessage> notify = mCodec->mNotify->dup(); 5869 notify->setInt32("what", CodecBase::kWhatDrainThisBuffer); 5870 notify->setInt32("buffer-id", info->mBufferID); 5871 notify->setBuffer("buffer", info->mData); 5872 notify->setInt32("flags", flags); 5873 5874 reply->setInt32("buffer-id", info->mBufferID); 5875 5876 notify->setMessage("reply", reply); 5877 5878 notify->post(); 5879 5880 info->mStatus = BufferInfo::OWNED_BY_DOWNSTREAM; 5881 5882 if (flags & OMX_BUFFERFLAG_EOS) { 5883 ALOGV("[%s] saw output EOS", mCodec->mComponentName.c_str()); 5884 5885 sp<AMessage> notify = mCodec->mNotify->dup(); 5886 notify->setInt32("what", CodecBase::kWhatEOS); 5887 notify->setInt32("err", mCodec->mInputEOSResult); 5888 notify->post(); 5889 5890 mCodec->mPortEOS[kPortIndexOutput] = true; 5891 } 5892 break; 5893 } 5894 5895 case FREE_BUFFERS: 5896 err = mCodec->freeBuffer(kPortIndexOutput, index); 5897 if (err != OK) { 5898 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5899 return true; 5900 } 5901 break; 5902 5903 default: 5904 ALOGE("Invalid port mode: %d", mode); 5905 return false; 5906 } 5907 5908 return true; 5909} 5910 5911void ACodec::BaseState::onOutputBufferDrained(const sp<AMessage> &msg) { 5912 IOMX::buffer_id bufferID; 5913 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 5914 ssize_t index; 5915 BufferInfo *info = mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 5916 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5917 if (status != BufferInfo::OWNED_BY_DOWNSTREAM) { 5918 ALOGE("Wrong ownership in OBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 5919 mCodec->dumpBuffers(kPortIndexOutput); 5920 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 5921 return; 5922 } 5923 5924 android_native_rect_t crop; 5925 if (msg->findRect("crop", &crop.left, &crop.top, &crop.right, &crop.bottom)) { 5926 status_t err = native_window_set_crop(mCodec->mNativeWindow.get(), &crop); 5927 ALOGW_IF(err != NO_ERROR, "failed to set crop: %d", err); 5928 } 5929 5930 int32_t dataSpace; 5931 if (msg->findInt32("dataspace", &dataSpace)) { 5932 status_t err = native_window_set_buffers_data_space( 5933 mCodec->mNativeWindow.get(), (android_dataspace)dataSpace); 5934 ALOGW_IF(err != NO_ERROR, "failed to set dataspace: %d", err); 5935 } 5936 5937 int32_t render; 5938 if (mCodec->mNativeWindow != NULL 5939 && msg->findInt32("render", &render) && render != 0 5940 && info->mData != NULL && info->mData->size() != 0) { 5941 ATRACE_NAME("render"); 5942 // The client wants this buffer to be rendered. 5943 5944 // save buffers sent to the surface so we can get render time when they return 5945 int64_t mediaTimeUs = -1; 5946 info->mData->meta()->findInt64("timeUs", &mediaTimeUs); 5947 if (mediaTimeUs >= 0) { 5948 mCodec->mRenderTracker.onFrameQueued( 5949 mediaTimeUs, info->mGraphicBuffer, new Fence(::dup(info->mFenceFd))); 5950 } 5951 5952 int64_t timestampNs = 0; 5953 if (!msg->findInt64("timestampNs", ×tampNs)) { 5954 // use media timestamp if client did not request a specific render timestamp 5955 if (info->mData->meta()->findInt64("timeUs", ×tampNs)) { 5956 ALOGV("using buffer PTS of %lld", (long long)timestampNs); 5957 timestampNs *= 1000; 5958 } 5959 } 5960 5961 status_t err; 5962 err = native_window_set_buffers_timestamp(mCodec->mNativeWindow.get(), timestampNs); 5963 ALOGW_IF(err != NO_ERROR, "failed to set buffer timestamp: %d", err); 5964 5965 info->checkReadFence("onOutputBufferDrained before queueBuffer"); 5966 err = mCodec->mNativeWindow->queueBuffer( 5967 mCodec->mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd); 5968 info->mFenceFd = -1; 5969 if (err == OK) { 5970 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 5971 } else { 5972 ALOGE("queueBuffer failed in onOutputBufferDrained: %d", err); 5973 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5974 info->mStatus = BufferInfo::OWNED_BY_US; 5975 // keeping read fence as write fence to avoid clobbering 5976 info->mIsReadFence = false; 5977 } 5978 } else { 5979 if (mCodec->mNativeWindow != NULL && 5980 (info->mData == NULL || info->mData->size() != 0)) { 5981 // move read fence into write fence to avoid clobbering 5982 info->mIsReadFence = false; 5983 ATRACE_NAME("frame-drop"); 5984 } 5985 info->mStatus = BufferInfo::OWNED_BY_US; 5986 } 5987 5988 PortMode mode = getPortMode(kPortIndexOutput); 5989 5990 switch (mode) { 5991 case KEEP_BUFFERS: 5992 { 5993 // XXX fishy, revisit!!! What about the FREE_BUFFERS case below? 5994 5995 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 5996 // We cannot resubmit the buffer we just rendered, dequeue 5997 // the spare instead. 5998 5999 info = mCodec->dequeueBufferFromNativeWindow(); 6000 } 6001 break; 6002 } 6003 6004 case RESUBMIT_BUFFERS: 6005 { 6006 if (!mCodec->mPortEOS[kPortIndexOutput]) { 6007 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6008 // We cannot resubmit the buffer we just rendered, dequeue 6009 // the spare instead. 6010 6011 info = mCodec->dequeueBufferFromNativeWindow(); 6012 } 6013 6014 if (info != NULL) { 6015 ALOGV("[%s] calling fillBuffer %u", 6016 mCodec->mComponentName.c_str(), info->mBufferID); 6017 info->checkWriteFence("onOutputBufferDrained::RESUBMIT_BUFFERS"); 6018 status_t err = mCodec->mOMX->fillBuffer( 6019 mCodec->mNode, info->mBufferID, info->mFenceFd); 6020 info->mFenceFd = -1; 6021 if (err == OK) { 6022 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 6023 } else { 6024 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6025 } 6026 } 6027 } 6028 break; 6029 } 6030 6031 case FREE_BUFFERS: 6032 { 6033 status_t err = mCodec->freeBuffer(kPortIndexOutput, index); 6034 if (err != OK) { 6035 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6036 } 6037 break; 6038 } 6039 6040 default: 6041 ALOGE("Invalid port mode: %d", mode); 6042 return; 6043 } 6044} 6045 6046//////////////////////////////////////////////////////////////////////////////// 6047 6048ACodec::UninitializedState::UninitializedState(ACodec *codec) 6049 : BaseState(codec) { 6050} 6051 6052void ACodec::UninitializedState::stateEntered() { 6053 ALOGV("Now uninitialized"); 6054 6055 if (mDeathNotifier != NULL) { 6056 mCodec->mNodeBinder->unlinkToDeath(mDeathNotifier); 6057 mDeathNotifier.clear(); 6058 } 6059 6060 mCodec->mUsingNativeWindow = false; 6061 mCodec->mNativeWindow.clear(); 6062 mCodec->mNativeWindowUsageBits = 0; 6063 mCodec->mNode = 0; 6064 mCodec->mOMX.clear(); 6065 mCodec->mQuirks = 0; 6066 mCodec->mFlags = 0; 6067 mCodec->mInputMetadataType = kMetadataBufferTypeInvalid; 6068 mCodec->mOutputMetadataType = kMetadataBufferTypeInvalid; 6069 mCodec->mConverter[0].clear(); 6070 mCodec->mConverter[1].clear(); 6071 mCodec->mComponentName.clear(); 6072} 6073 6074bool ACodec::UninitializedState::onMessageReceived(const sp<AMessage> &msg) { 6075 bool handled = false; 6076 6077 switch (msg->what()) { 6078 case ACodec::kWhatSetup: 6079 { 6080 onSetup(msg); 6081 6082 handled = true; 6083 break; 6084 } 6085 6086 case ACodec::kWhatAllocateComponent: 6087 { 6088 onAllocateComponent(msg); 6089 handled = true; 6090 break; 6091 } 6092 6093 case ACodec::kWhatShutdown: 6094 { 6095 int32_t keepComponentAllocated; 6096 CHECK(msg->findInt32( 6097 "keepComponentAllocated", &keepComponentAllocated)); 6098 ALOGW_IF(keepComponentAllocated, 6099 "cannot keep component allocated on shutdown in Uninitialized state"); 6100 6101 sp<AMessage> notify = mCodec->mNotify->dup(); 6102 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 6103 notify->post(); 6104 6105 handled = true; 6106 break; 6107 } 6108 6109 case ACodec::kWhatFlush: 6110 { 6111 sp<AMessage> notify = mCodec->mNotify->dup(); 6112 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6113 notify->post(); 6114 6115 handled = true; 6116 break; 6117 } 6118 6119 case ACodec::kWhatReleaseCodecInstance: 6120 { 6121 // nothing to do, as we have already signaled shutdown 6122 handled = true; 6123 break; 6124 } 6125 6126 default: 6127 return BaseState::onMessageReceived(msg); 6128 } 6129 6130 return handled; 6131} 6132 6133void ACodec::UninitializedState::onSetup( 6134 const sp<AMessage> &msg) { 6135 if (onAllocateComponent(msg) 6136 && mCodec->mLoadedState->onConfigureComponent(msg)) { 6137 mCodec->mLoadedState->onStart(); 6138 } 6139} 6140 6141bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) { 6142 ALOGV("onAllocateComponent"); 6143 6144 CHECK(mCodec->mNode == 0); 6145 6146 OMXClient client; 6147 if (client.connect() != OK) { 6148 mCodec->signalError(OMX_ErrorUndefined, NO_INIT); 6149 return false; 6150 } 6151 6152 sp<IOMX> omx = client.interface(); 6153 6154 sp<AMessage> notify = new AMessage(kWhatOMXDied, mCodec); 6155 6156 Vector<AString> matchingCodecs; 6157 6158 AString mime; 6159 6160 AString componentName; 6161 uint32_t quirks = 0; 6162 int32_t encoder = false; 6163 if (msg->findString("componentName", &componentName)) { 6164 sp<IMediaCodecList> list = MediaCodecList::getInstance(); 6165 if (list != NULL && list->findCodecByName(componentName.c_str()) >= 0) { 6166 matchingCodecs.add(componentName); 6167 } 6168 } else { 6169 CHECK(msg->findString("mime", &mime)); 6170 6171 if (!msg->findInt32("encoder", &encoder)) { 6172 encoder = false; 6173 } 6174 6175 MediaCodecList::findMatchingCodecs( 6176 mime.c_str(), 6177 encoder, // createEncoder 6178 0, // flags 6179 &matchingCodecs); 6180 } 6181 6182 sp<CodecObserver> observer = new CodecObserver; 6183 IOMX::node_id node = 0; 6184 6185 status_t err = NAME_NOT_FOUND; 6186 for (size_t matchIndex = 0; matchIndex < matchingCodecs.size(); 6187 ++matchIndex) { 6188 componentName = matchingCodecs[matchIndex]; 6189 quirks = MediaCodecList::getQuirksFor(componentName.c_str()); 6190 6191 pid_t tid = gettid(); 6192 int prevPriority = androidGetThreadPriority(tid); 6193 androidSetThreadPriority(tid, ANDROID_PRIORITY_FOREGROUND); 6194 err = omx->allocateNode(componentName.c_str(), observer, &mCodec->mNodeBinder, &node); 6195 androidSetThreadPriority(tid, prevPriority); 6196 6197 if (err == OK) { 6198 break; 6199 } else { 6200 ALOGW("Allocating component '%s' failed, try next one.", componentName.c_str()); 6201 } 6202 6203 node = 0; 6204 } 6205 6206 if (node == 0) { 6207 if (!mime.empty()) { 6208 ALOGE("Unable to instantiate a %scoder for type '%s' with err %#x.", 6209 encoder ? "en" : "de", mime.c_str(), err); 6210 } else { 6211 ALOGE("Unable to instantiate codec '%s' with err %#x.", componentName.c_str(), err); 6212 } 6213 6214 mCodec->signalError((OMX_ERRORTYPE)err, makeNoSideEffectStatus(err)); 6215 return false; 6216 } 6217 6218 mDeathNotifier = new DeathNotifier(notify); 6219 if (mCodec->mNodeBinder == NULL || 6220 mCodec->mNodeBinder->linkToDeath(mDeathNotifier) != OK) { 6221 // This was a local binder, if it dies so do we, we won't care 6222 // about any notifications in the afterlife. 6223 mDeathNotifier.clear(); 6224 } 6225 6226 notify = new AMessage(kWhatOMXMessageList, mCodec); 6227 observer->setNotificationMessage(notify); 6228 6229 mCodec->mComponentName = componentName; 6230 mCodec->mRenderTracker.setComponentName(componentName); 6231 mCodec->mFlags = 0; 6232 6233 if (componentName.endsWith(".secure")) { 6234 mCodec->mFlags |= kFlagIsSecure; 6235 mCodec->mFlags |= kFlagIsGrallocUsageProtected; 6236 mCodec->mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 6237 } 6238 6239 mCodec->mQuirks = quirks; 6240 mCodec->mOMX = omx; 6241 mCodec->mNode = node; 6242 6243 { 6244 sp<AMessage> notify = mCodec->mNotify->dup(); 6245 notify->setInt32("what", CodecBase::kWhatComponentAllocated); 6246 notify->setString("componentName", mCodec->mComponentName.c_str()); 6247 notify->post(); 6248 } 6249 6250 mCodec->changeState(mCodec->mLoadedState); 6251 6252 return true; 6253} 6254 6255//////////////////////////////////////////////////////////////////////////////// 6256 6257ACodec::LoadedState::LoadedState(ACodec *codec) 6258 : BaseState(codec) { 6259} 6260 6261void ACodec::LoadedState::stateEntered() { 6262 ALOGV("[%s] Now Loaded", mCodec->mComponentName.c_str()); 6263 6264 mCodec->mPortEOS[kPortIndexInput] = 6265 mCodec->mPortEOS[kPortIndexOutput] = false; 6266 6267 mCodec->mInputEOSResult = OK; 6268 6269 mCodec->mDequeueCounter = 0; 6270 mCodec->mMetadataBuffersToSubmit = 0; 6271 mCodec->mRepeatFrameDelayUs = -1ll; 6272 mCodec->mInputFormat.clear(); 6273 mCodec->mOutputFormat.clear(); 6274 mCodec->mBaseOutputFormat.clear(); 6275 6276 if (mCodec->mShutdownInProgress) { 6277 bool keepComponentAllocated = mCodec->mKeepComponentAllocated; 6278 6279 mCodec->mShutdownInProgress = false; 6280 mCodec->mKeepComponentAllocated = false; 6281 6282 onShutdown(keepComponentAllocated); 6283 } 6284 mCodec->mExplicitShutdown = false; 6285 6286 mCodec->processDeferredMessages(); 6287} 6288 6289void ACodec::LoadedState::onShutdown(bool keepComponentAllocated) { 6290 if (!keepComponentAllocated) { 6291 (void)mCodec->mOMX->freeNode(mCodec->mNode); 6292 6293 mCodec->changeState(mCodec->mUninitializedState); 6294 } 6295 6296 if (mCodec->mExplicitShutdown) { 6297 sp<AMessage> notify = mCodec->mNotify->dup(); 6298 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 6299 notify->post(); 6300 mCodec->mExplicitShutdown = false; 6301 } 6302} 6303 6304bool ACodec::LoadedState::onMessageReceived(const sp<AMessage> &msg) { 6305 bool handled = false; 6306 6307 switch (msg->what()) { 6308 case ACodec::kWhatConfigureComponent: 6309 { 6310 onConfigureComponent(msg); 6311 handled = true; 6312 break; 6313 } 6314 6315 case ACodec::kWhatCreateInputSurface: 6316 { 6317 onCreateInputSurface(msg); 6318 handled = true; 6319 break; 6320 } 6321 6322 case ACodec::kWhatSetInputSurface: 6323 { 6324 onSetInputSurface(msg); 6325 handled = true; 6326 break; 6327 } 6328 6329 case ACodec::kWhatStart: 6330 { 6331 onStart(); 6332 handled = true; 6333 break; 6334 } 6335 6336 case ACodec::kWhatShutdown: 6337 { 6338 int32_t keepComponentAllocated; 6339 CHECK(msg->findInt32( 6340 "keepComponentAllocated", &keepComponentAllocated)); 6341 6342 mCodec->mExplicitShutdown = true; 6343 onShutdown(keepComponentAllocated); 6344 6345 handled = true; 6346 break; 6347 } 6348 6349 case ACodec::kWhatFlush: 6350 { 6351 sp<AMessage> notify = mCodec->mNotify->dup(); 6352 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6353 notify->post(); 6354 6355 handled = true; 6356 break; 6357 } 6358 6359 default: 6360 return BaseState::onMessageReceived(msg); 6361 } 6362 6363 return handled; 6364} 6365 6366bool ACodec::LoadedState::onConfigureComponent( 6367 const sp<AMessage> &msg) { 6368 ALOGV("onConfigureComponent"); 6369 6370 CHECK(mCodec->mNode != 0); 6371 6372 status_t err = OK; 6373 AString mime; 6374 if (!msg->findString("mime", &mime)) { 6375 err = BAD_VALUE; 6376 } else { 6377 err = mCodec->configureCodec(mime.c_str(), msg); 6378 } 6379 if (err != OK) { 6380 ALOGE("[%s] configureCodec returning error %d", 6381 mCodec->mComponentName.c_str(), err); 6382 6383 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6384 return false; 6385 } 6386 6387 { 6388 sp<AMessage> notify = mCodec->mNotify->dup(); 6389 notify->setInt32("what", CodecBase::kWhatComponentConfigured); 6390 notify->setMessage("input-format", mCodec->mInputFormat); 6391 notify->setMessage("output-format", mCodec->mOutputFormat); 6392 notify->post(); 6393 } 6394 6395 return true; 6396} 6397 6398status_t ACodec::LoadedState::setupInputSurface() { 6399 status_t err = OK; 6400 6401 if (mCodec->mRepeatFrameDelayUs > 0ll) { 6402 err = mCodec->mOMX->setInternalOption( 6403 mCodec->mNode, 6404 kPortIndexInput, 6405 IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY, 6406 &mCodec->mRepeatFrameDelayUs, 6407 sizeof(mCodec->mRepeatFrameDelayUs)); 6408 6409 if (err != OK) { 6410 ALOGE("[%s] Unable to configure option to repeat previous " 6411 "frames (err %d)", 6412 mCodec->mComponentName.c_str(), 6413 err); 6414 return err; 6415 } 6416 } 6417 6418 if (mCodec->mMaxPtsGapUs > 0ll) { 6419 err = mCodec->mOMX->setInternalOption( 6420 mCodec->mNode, 6421 kPortIndexInput, 6422 IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP, 6423 &mCodec->mMaxPtsGapUs, 6424 sizeof(mCodec->mMaxPtsGapUs)); 6425 6426 if (err != OK) { 6427 ALOGE("[%s] Unable to configure max timestamp gap (err %d)", 6428 mCodec->mComponentName.c_str(), 6429 err); 6430 return err; 6431 } 6432 } 6433 6434 if (mCodec->mMaxFps > 0) { 6435 err = mCodec->mOMX->setInternalOption( 6436 mCodec->mNode, 6437 kPortIndexInput, 6438 IOMX::INTERNAL_OPTION_MAX_FPS, 6439 &mCodec->mMaxFps, 6440 sizeof(mCodec->mMaxFps)); 6441 6442 if (err != OK) { 6443 ALOGE("[%s] Unable to configure max fps (err %d)", 6444 mCodec->mComponentName.c_str(), 6445 err); 6446 return err; 6447 } 6448 } 6449 6450 if (mCodec->mTimePerCaptureUs > 0ll 6451 && mCodec->mTimePerFrameUs > 0ll) { 6452 int64_t timeLapse[2]; 6453 timeLapse[0] = mCodec->mTimePerFrameUs; 6454 timeLapse[1] = mCodec->mTimePerCaptureUs; 6455 err = mCodec->mOMX->setInternalOption( 6456 mCodec->mNode, 6457 kPortIndexInput, 6458 IOMX::INTERNAL_OPTION_TIME_LAPSE, 6459 &timeLapse[0], 6460 sizeof(timeLapse)); 6461 6462 if (err != OK) { 6463 ALOGE("[%s] Unable to configure time lapse (err %d)", 6464 mCodec->mComponentName.c_str(), 6465 err); 6466 return err; 6467 } 6468 } 6469 6470 if (mCodec->mCreateInputBuffersSuspended) { 6471 bool suspend = true; 6472 err = mCodec->mOMX->setInternalOption( 6473 mCodec->mNode, 6474 kPortIndexInput, 6475 IOMX::INTERNAL_OPTION_SUSPEND, 6476 &suspend, 6477 sizeof(suspend)); 6478 6479 if (err != OK) { 6480 ALOGE("[%s] Unable to configure option to suspend (err %d)", 6481 mCodec->mComponentName.c_str(), 6482 err); 6483 return err; 6484 } 6485 } 6486 6487 uint32_t usageBits; 6488 if (mCodec->mOMX->getParameter( 6489 mCodec->mNode, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, 6490 &usageBits, sizeof(usageBits)) == OK) { 6491 mCodec->mInputFormat->setInt32( 6492 "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); 6493 } 6494 6495 sp<ABuffer> colorAspectsBuffer; 6496 if (mCodec->mInputFormat->findBuffer("android._color-aspects", &colorAspectsBuffer)) { 6497 err = mCodec->mOMX->setInternalOption( 6498 mCodec->mNode, kPortIndexInput, IOMX::INTERNAL_OPTION_COLOR_ASPECTS, 6499 colorAspectsBuffer->base(), colorAspectsBuffer->capacity()); 6500 if (err != OK) { 6501 ALOGE("[%s] Unable to configure color aspects (err %d)", 6502 mCodec->mComponentName.c_str(), err); 6503 return err; 6504 } 6505 } 6506 return OK; 6507} 6508 6509void ACodec::LoadedState::onCreateInputSurface( 6510 const sp<AMessage> & /* msg */) { 6511 ALOGV("onCreateInputSurface"); 6512 6513 sp<AMessage> notify = mCodec->mNotify->dup(); 6514 notify->setInt32("what", CodecBase::kWhatInputSurfaceCreated); 6515 6516 android_dataspace dataSpace; 6517 status_t err = 6518 mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace); 6519 notify->setMessage("input-format", mCodec->mInputFormat); 6520 notify->setMessage("output-format", mCodec->mOutputFormat); 6521 6522 sp<IGraphicBufferProducer> bufferProducer; 6523 if (err == OK) { 6524 err = mCodec->mOMX->createInputSurface( 6525 mCodec->mNode, kPortIndexInput, dataSpace, &bufferProducer, &mCodec->mInputMetadataType); 6526 } 6527 6528 if (err == OK) { 6529 err = setupInputSurface(); 6530 } 6531 6532 if (err == OK) { 6533 notify->setObject("input-surface", 6534 new BufferProducerWrapper(bufferProducer)); 6535 } else { 6536 // Can't use mCodec->signalError() here -- MediaCodec won't forward 6537 // the error through because it's in the "configured" state. We 6538 // send a kWhatInputSurfaceCreated with an error value instead. 6539 ALOGE("[%s] onCreateInputSurface returning error %d", 6540 mCodec->mComponentName.c_str(), err); 6541 notify->setInt32("err", err); 6542 } 6543 notify->post(); 6544} 6545 6546void ACodec::LoadedState::onSetInputSurface( 6547 const sp<AMessage> &msg) { 6548 ALOGV("onSetInputSurface"); 6549 6550 sp<AMessage> notify = mCodec->mNotify->dup(); 6551 notify->setInt32("what", CodecBase::kWhatInputSurfaceAccepted); 6552 6553 sp<RefBase> obj; 6554 CHECK(msg->findObject("input-surface", &obj)); 6555 sp<PersistentSurface> surface = static_cast<PersistentSurface *>(obj.get()); 6556 6557 android_dataspace dataSpace; 6558 status_t err = 6559 mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace); 6560 notify->setMessage("input-format", mCodec->mInputFormat); 6561 notify->setMessage("output-format", mCodec->mOutputFormat); 6562 6563 if (err == OK) { 6564 err = mCodec->mOMX->setInputSurface( 6565 mCodec->mNode, kPortIndexInput, surface->getBufferConsumer(), 6566 &mCodec->mInputMetadataType); 6567 } 6568 6569 if (err == OK) { 6570 surface->getBufferConsumer()->setDefaultBufferDataSpace(dataSpace); 6571 err = setupInputSurface(); 6572 } 6573 6574 if (err != OK) { 6575 // Can't use mCodec->signalError() here -- MediaCodec won't forward 6576 // the error through because it's in the "configured" state. We 6577 // send a kWhatInputSurfaceAccepted with an error value instead. 6578 ALOGE("[%s] onSetInputSurface returning error %d", 6579 mCodec->mComponentName.c_str(), err); 6580 notify->setInt32("err", err); 6581 } 6582 notify->post(); 6583} 6584 6585void ACodec::LoadedState::onStart() { 6586 ALOGV("onStart"); 6587 6588 status_t err = mCodec->mOMX->sendCommand(mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle); 6589 if (err != OK) { 6590 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6591 } else { 6592 mCodec->changeState(mCodec->mLoadedToIdleState); 6593 } 6594} 6595 6596//////////////////////////////////////////////////////////////////////////////// 6597 6598ACodec::LoadedToIdleState::LoadedToIdleState(ACodec *codec) 6599 : BaseState(codec) { 6600} 6601 6602void ACodec::LoadedToIdleState::stateEntered() { 6603 ALOGV("[%s] Now Loaded->Idle", mCodec->mComponentName.c_str()); 6604 6605 status_t err; 6606 if ((err = allocateBuffers()) != OK) { 6607 ALOGE("Failed to allocate buffers after transitioning to IDLE state " 6608 "(error 0x%08x)", 6609 err); 6610 6611 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6612 6613 mCodec->mOMX->sendCommand( 6614 mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded); 6615 if (mCodec->allYourBuffersAreBelongToUs(kPortIndexInput)) { 6616 mCodec->freeBuffersOnPort(kPortIndexInput); 6617 } 6618 if (mCodec->allYourBuffersAreBelongToUs(kPortIndexOutput)) { 6619 mCodec->freeBuffersOnPort(kPortIndexOutput); 6620 } 6621 6622 mCodec->changeState(mCodec->mLoadedState); 6623 } 6624} 6625 6626status_t ACodec::LoadedToIdleState::allocateBuffers() { 6627 status_t err = mCodec->allocateBuffersOnPort(kPortIndexInput); 6628 6629 if (err != OK) { 6630 return err; 6631 } 6632 6633 return mCodec->allocateBuffersOnPort(kPortIndexOutput); 6634} 6635 6636bool ACodec::LoadedToIdleState::onMessageReceived(const sp<AMessage> &msg) { 6637 switch (msg->what()) { 6638 case kWhatSetParameters: 6639 case kWhatShutdown: 6640 { 6641 mCodec->deferMessage(msg); 6642 return true; 6643 } 6644 6645 case kWhatSignalEndOfInputStream: 6646 { 6647 mCodec->onSignalEndOfInputStream(); 6648 return true; 6649 } 6650 6651 case kWhatResume: 6652 { 6653 // We'll be active soon enough. 6654 return true; 6655 } 6656 6657 case kWhatFlush: 6658 { 6659 // We haven't even started yet, so we're flushed alright... 6660 sp<AMessage> notify = mCodec->mNotify->dup(); 6661 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6662 notify->post(); 6663 return true; 6664 } 6665 6666 default: 6667 return BaseState::onMessageReceived(msg); 6668 } 6669} 6670 6671bool ACodec::LoadedToIdleState::onOMXEvent( 6672 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 6673 switch (event) { 6674 case OMX_EventCmdComplete: 6675 { 6676 status_t err = OK; 6677 if (data1 != (OMX_U32)OMX_CommandStateSet 6678 || data2 != (OMX_U32)OMX_StateIdle) { 6679 ALOGE("Unexpected command completion in LoadedToIdleState: %s(%u) %s(%u)", 6680 asString((OMX_COMMANDTYPE)data1), data1, 6681 asString((OMX_STATETYPE)data2), data2); 6682 err = FAILED_TRANSACTION; 6683 } 6684 6685 if (err == OK) { 6686 err = mCodec->mOMX->sendCommand( 6687 mCodec->mNode, OMX_CommandStateSet, OMX_StateExecuting); 6688 } 6689 6690 if (err != OK) { 6691 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6692 } else { 6693 mCodec->changeState(mCodec->mIdleToExecutingState); 6694 } 6695 6696 return true; 6697 } 6698 6699 default: 6700 return BaseState::onOMXEvent(event, data1, data2); 6701 } 6702} 6703 6704//////////////////////////////////////////////////////////////////////////////// 6705 6706ACodec::IdleToExecutingState::IdleToExecutingState(ACodec *codec) 6707 : BaseState(codec) { 6708} 6709 6710void ACodec::IdleToExecutingState::stateEntered() { 6711 ALOGV("[%s] Now Idle->Executing", mCodec->mComponentName.c_str()); 6712} 6713 6714bool ACodec::IdleToExecutingState::onMessageReceived(const sp<AMessage> &msg) { 6715 switch (msg->what()) { 6716 case kWhatSetParameters: 6717 case kWhatShutdown: 6718 { 6719 mCodec->deferMessage(msg); 6720 return true; 6721 } 6722 6723 case kWhatResume: 6724 { 6725 // We'll be active soon enough. 6726 return true; 6727 } 6728 6729 case kWhatFlush: 6730 { 6731 // We haven't even started yet, so we're flushed alright... 6732 sp<AMessage> notify = mCodec->mNotify->dup(); 6733 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6734 notify->post(); 6735 6736 return true; 6737 } 6738 6739 case kWhatSignalEndOfInputStream: 6740 { 6741 mCodec->onSignalEndOfInputStream(); 6742 return true; 6743 } 6744 6745 default: 6746 return BaseState::onMessageReceived(msg); 6747 } 6748} 6749 6750bool ACodec::IdleToExecutingState::onOMXEvent( 6751 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 6752 switch (event) { 6753 case OMX_EventCmdComplete: 6754 { 6755 if (data1 != (OMX_U32)OMX_CommandStateSet 6756 || data2 != (OMX_U32)OMX_StateExecuting) { 6757 ALOGE("Unexpected command completion in IdleToExecutingState: %s(%u) %s(%u)", 6758 asString((OMX_COMMANDTYPE)data1), data1, 6759 asString((OMX_STATETYPE)data2), data2); 6760 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6761 return true; 6762 } 6763 6764 mCodec->mExecutingState->resume(); 6765 mCodec->changeState(mCodec->mExecutingState); 6766 6767 return true; 6768 } 6769 6770 default: 6771 return BaseState::onOMXEvent(event, data1, data2); 6772 } 6773} 6774 6775//////////////////////////////////////////////////////////////////////////////// 6776 6777ACodec::ExecutingState::ExecutingState(ACodec *codec) 6778 : BaseState(codec), 6779 mActive(false) { 6780} 6781 6782ACodec::BaseState::PortMode ACodec::ExecutingState::getPortMode( 6783 OMX_U32 /* portIndex */) { 6784 return RESUBMIT_BUFFERS; 6785} 6786 6787void ACodec::ExecutingState::submitOutputMetaBuffers() { 6788 // submit as many buffers as there are input buffers with the codec 6789 // in case we are in port reconfiguring 6790 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 6791 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 6792 6793 if (info->mStatus == BufferInfo::OWNED_BY_COMPONENT) { 6794 if (mCodec->submitOutputMetadataBuffer() != OK) 6795 break; 6796 } 6797 } 6798 6799 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 6800 mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround(); 6801} 6802 6803void ACodec::ExecutingState::submitRegularOutputBuffers() { 6804 bool failed = false; 6805 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexOutput].size(); ++i) { 6806 BufferInfo *info = &mCodec->mBuffers[kPortIndexOutput].editItemAt(i); 6807 6808 if (mCodec->mNativeWindow != NULL) { 6809 if (info->mStatus != BufferInfo::OWNED_BY_US 6810 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6811 ALOGE("buffers should be owned by us or the surface"); 6812 failed = true; 6813 break; 6814 } 6815 6816 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6817 continue; 6818 } 6819 } else { 6820 if (info->mStatus != BufferInfo::OWNED_BY_US) { 6821 ALOGE("buffers should be owned by us"); 6822 failed = true; 6823 break; 6824 } 6825 } 6826 6827 ALOGV("[%s] calling fillBuffer %u", mCodec->mComponentName.c_str(), info->mBufferID); 6828 6829 info->checkWriteFence("submitRegularOutputBuffers"); 6830 status_t err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd); 6831 info->mFenceFd = -1; 6832 if (err != OK) { 6833 failed = true; 6834 break; 6835 } 6836 6837 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 6838 } 6839 6840 if (failed) { 6841 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6842 } 6843} 6844 6845void ACodec::ExecutingState::submitOutputBuffers() { 6846 submitRegularOutputBuffers(); 6847 if (mCodec->storingMetadataInDecodedBuffers()) { 6848 submitOutputMetaBuffers(); 6849 } 6850} 6851 6852void ACodec::ExecutingState::resume() { 6853 if (mActive) { 6854 ALOGV("[%s] We're already active, no need to resume.", mCodec->mComponentName.c_str()); 6855 return; 6856 } 6857 6858 submitOutputBuffers(); 6859 6860 // Post all available input buffers 6861 if (mCodec->mBuffers[kPortIndexInput].size() == 0u) { 6862 ALOGW("[%s] we don't have any input buffers to resume", mCodec->mComponentName.c_str()); 6863 } 6864 6865 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); i++) { 6866 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 6867 if (info->mStatus == BufferInfo::OWNED_BY_US) { 6868 postFillThisBuffer(info); 6869 } 6870 } 6871 6872 mActive = true; 6873} 6874 6875void ACodec::ExecutingState::stateEntered() { 6876 ALOGV("[%s] Now Executing", mCodec->mComponentName.c_str()); 6877 6878 mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC)); 6879 mCodec->processDeferredMessages(); 6880} 6881 6882bool ACodec::ExecutingState::onMessageReceived(const sp<AMessage> &msg) { 6883 bool handled = false; 6884 6885 switch (msg->what()) { 6886 case kWhatShutdown: 6887 { 6888 int32_t keepComponentAllocated; 6889 CHECK(msg->findInt32( 6890 "keepComponentAllocated", &keepComponentAllocated)); 6891 6892 mCodec->mShutdownInProgress = true; 6893 mCodec->mExplicitShutdown = true; 6894 mCodec->mKeepComponentAllocated = keepComponentAllocated; 6895 6896 mActive = false; 6897 6898 status_t err = mCodec->mOMX->sendCommand( 6899 mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle); 6900 if (err != OK) { 6901 if (keepComponentAllocated) { 6902 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6903 } 6904 // TODO: do some recovery here. 6905 } else { 6906 mCodec->changeState(mCodec->mExecutingToIdleState); 6907 } 6908 6909 handled = true; 6910 break; 6911 } 6912 6913 case kWhatFlush: 6914 { 6915 ALOGV("[%s] ExecutingState flushing now " 6916 "(codec owns %zu/%zu input, %zu/%zu output).", 6917 mCodec->mComponentName.c_str(), 6918 mCodec->countBuffersOwnedByComponent(kPortIndexInput), 6919 mCodec->mBuffers[kPortIndexInput].size(), 6920 mCodec->countBuffersOwnedByComponent(kPortIndexOutput), 6921 mCodec->mBuffers[kPortIndexOutput].size()); 6922 6923 mActive = false; 6924 6925 status_t err = mCodec->mOMX->sendCommand(mCodec->mNode, OMX_CommandFlush, OMX_ALL); 6926 if (err != OK) { 6927 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6928 } else { 6929 mCodec->changeState(mCodec->mFlushingState); 6930 } 6931 6932 handled = true; 6933 break; 6934 } 6935 6936 case kWhatResume: 6937 { 6938 resume(); 6939 6940 handled = true; 6941 break; 6942 } 6943 6944 case kWhatRequestIDRFrame: 6945 { 6946 status_t err = mCodec->requestIDRFrame(); 6947 if (err != OK) { 6948 ALOGW("Requesting an IDR frame failed."); 6949 } 6950 6951 handled = true; 6952 break; 6953 } 6954 6955 case kWhatSetParameters: 6956 { 6957 sp<AMessage> params; 6958 CHECK(msg->findMessage("params", ¶ms)); 6959 6960 status_t err = mCodec->setParameters(params); 6961 6962 sp<AMessage> reply; 6963 if (msg->findMessage("reply", &reply)) { 6964 reply->setInt32("err", err); 6965 reply->post(); 6966 } 6967 6968 handled = true; 6969 break; 6970 } 6971 6972 case ACodec::kWhatSignalEndOfInputStream: 6973 { 6974 mCodec->onSignalEndOfInputStream(); 6975 handled = true; 6976 break; 6977 } 6978 6979 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 6980 case kWhatSubmitOutputMetadataBufferIfEOS: 6981 { 6982 if (mCodec->mPortEOS[kPortIndexInput] && 6983 !mCodec->mPortEOS[kPortIndexOutput]) { 6984 status_t err = mCodec->submitOutputMetadataBuffer(); 6985 if (err == OK) { 6986 mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround(); 6987 } 6988 } 6989 return true; 6990 } 6991 6992 default: 6993 handled = BaseState::onMessageReceived(msg); 6994 break; 6995 } 6996 6997 return handled; 6998} 6999 7000status_t ACodec::setParameters(const sp<AMessage> ¶ms) { 7001 int32_t videoBitrate; 7002 if (params->findInt32("video-bitrate", &videoBitrate)) { 7003 OMX_VIDEO_CONFIG_BITRATETYPE configParams; 7004 InitOMXParams(&configParams); 7005 configParams.nPortIndex = kPortIndexOutput; 7006 configParams.nEncodeBitrate = videoBitrate; 7007 7008 status_t err = mOMX->setConfig( 7009 mNode, 7010 OMX_IndexConfigVideoBitrate, 7011 &configParams, 7012 sizeof(configParams)); 7013 7014 if (err != OK) { 7015 ALOGE("setConfig(OMX_IndexConfigVideoBitrate, %d) failed w/ err %d", 7016 videoBitrate, err); 7017 7018 return err; 7019 } 7020 } 7021 7022 int64_t skipFramesBeforeUs; 7023 if (params->findInt64("skip-frames-before", &skipFramesBeforeUs)) { 7024 status_t err = 7025 mOMX->setInternalOption( 7026 mNode, 7027 kPortIndexInput, 7028 IOMX::INTERNAL_OPTION_START_TIME, 7029 &skipFramesBeforeUs, 7030 sizeof(skipFramesBeforeUs)); 7031 7032 if (err != OK) { 7033 ALOGE("Failed to set parameter 'skip-frames-before' (err %d)", err); 7034 return err; 7035 } 7036 } 7037 7038 int32_t dropInputFrames; 7039 if (params->findInt32("drop-input-frames", &dropInputFrames)) { 7040 bool suspend = dropInputFrames != 0; 7041 7042 status_t err = 7043 mOMX->setInternalOption( 7044 mNode, 7045 kPortIndexInput, 7046 IOMX::INTERNAL_OPTION_SUSPEND, 7047 &suspend, 7048 sizeof(suspend)); 7049 7050 if (err != OK) { 7051 ALOGE("Failed to set parameter 'drop-input-frames' (err %d)", err); 7052 return err; 7053 } 7054 } 7055 7056 int32_t dummy; 7057 if (params->findInt32("request-sync", &dummy)) { 7058 status_t err = requestIDRFrame(); 7059 7060 if (err != OK) { 7061 ALOGE("Requesting a sync frame failed w/ err %d", err); 7062 return err; 7063 } 7064 } 7065 7066 float rate; 7067 if (params->findFloat("operating-rate", &rate) && rate > 0) { 7068 status_t err = setOperatingRate(rate, mIsVideo); 7069 if (err != OK) { 7070 ALOGE("Failed to set parameter 'operating-rate' (err %d)", err); 7071 return err; 7072 } 7073 } 7074 7075 int32_t intraRefreshPeriod = 0; 7076 if (params->findInt32("intra-refresh-period", &intraRefreshPeriod) 7077 && intraRefreshPeriod > 0) { 7078 status_t err = setIntraRefreshPeriod(intraRefreshPeriod, false); 7079 if (err != OK) { 7080 ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional", 7081 mComponentName.c_str()); 7082 err = OK; 7083 } 7084 } 7085 7086 return OK; 7087} 7088 7089void ACodec::onSignalEndOfInputStream() { 7090 sp<AMessage> notify = mNotify->dup(); 7091 notify->setInt32("what", CodecBase::kWhatSignaledInputEOS); 7092 7093 status_t err = mOMX->signalEndOfInputStream(mNode); 7094 if (err != OK) { 7095 notify->setInt32("err", err); 7096 } 7097 notify->post(); 7098} 7099 7100bool ACodec::ExecutingState::onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { 7101 mCodec->onFrameRendered(mediaTimeUs, systemNano); 7102 return true; 7103} 7104 7105bool ACodec::ExecutingState::onOMXEvent( 7106 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7107 switch (event) { 7108 case OMX_EventPortSettingsChanged: 7109 { 7110 CHECK_EQ(data1, (OMX_U32)kPortIndexOutput); 7111 7112 mCodec->onOutputFormatChanged(); 7113 7114 if (data2 == 0 || data2 == OMX_IndexParamPortDefinition) { 7115 mCodec->mMetadataBuffersToSubmit = 0; 7116 CHECK_EQ(mCodec->mOMX->sendCommand( 7117 mCodec->mNode, 7118 OMX_CommandPortDisable, kPortIndexOutput), 7119 (status_t)OK); 7120 7121 mCodec->freeOutputBuffersNotOwnedByComponent(); 7122 7123 mCodec->changeState(mCodec->mOutputPortSettingsChangedState); 7124 } else if (data2 != OMX_IndexConfigCommonOutputCrop 7125 && data2 != OMX_IndexConfigAndroidIntraRefresh) { 7126 ALOGV("[%s] OMX_EventPortSettingsChanged 0x%08x", 7127 mCodec->mComponentName.c_str(), data2); 7128 } 7129 7130 return true; 7131 } 7132 7133 case OMX_EventBufferFlag: 7134 { 7135 return true; 7136 } 7137 7138 default: 7139 return BaseState::onOMXEvent(event, data1, data2); 7140 } 7141} 7142 7143//////////////////////////////////////////////////////////////////////////////// 7144 7145ACodec::OutputPortSettingsChangedState::OutputPortSettingsChangedState( 7146 ACodec *codec) 7147 : BaseState(codec) { 7148} 7149 7150ACodec::BaseState::PortMode ACodec::OutputPortSettingsChangedState::getPortMode( 7151 OMX_U32 portIndex) { 7152 if (portIndex == kPortIndexOutput) { 7153 return FREE_BUFFERS; 7154 } 7155 7156 CHECK_EQ(portIndex, (OMX_U32)kPortIndexInput); 7157 7158 return RESUBMIT_BUFFERS; 7159} 7160 7161bool ACodec::OutputPortSettingsChangedState::onMessageReceived( 7162 const sp<AMessage> &msg) { 7163 bool handled = false; 7164 7165 switch (msg->what()) { 7166 case kWhatFlush: 7167 case kWhatShutdown: 7168 case kWhatResume: 7169 case kWhatSetParameters: 7170 { 7171 if (msg->what() == kWhatResume) { 7172 ALOGV("[%s] Deferring resume", mCodec->mComponentName.c_str()); 7173 } 7174 7175 mCodec->deferMessage(msg); 7176 handled = true; 7177 break; 7178 } 7179 7180 default: 7181 handled = BaseState::onMessageReceived(msg); 7182 break; 7183 } 7184 7185 return handled; 7186} 7187 7188void ACodec::OutputPortSettingsChangedState::stateEntered() { 7189 ALOGV("[%s] Now handling output port settings change", 7190 mCodec->mComponentName.c_str()); 7191} 7192 7193bool ACodec::OutputPortSettingsChangedState::onOMXFrameRendered( 7194 int64_t mediaTimeUs, nsecs_t systemNano) { 7195 mCodec->onFrameRendered(mediaTimeUs, systemNano); 7196 return true; 7197} 7198 7199bool ACodec::OutputPortSettingsChangedState::onOMXEvent( 7200 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7201 switch (event) { 7202 case OMX_EventCmdComplete: 7203 { 7204 if (data1 == (OMX_U32)OMX_CommandPortDisable) { 7205 if (data2 != (OMX_U32)kPortIndexOutput) { 7206 ALOGW("ignoring EventCmdComplete CommandPortDisable for port %u", data2); 7207 return false; 7208 } 7209 7210 ALOGV("[%s] Output port now disabled.", mCodec->mComponentName.c_str()); 7211 7212 status_t err = OK; 7213 if (!mCodec->mBuffers[kPortIndexOutput].isEmpty()) { 7214 ALOGE("disabled port should be empty, but has %zu buffers", 7215 mCodec->mBuffers[kPortIndexOutput].size()); 7216 err = FAILED_TRANSACTION; 7217 } else { 7218 mCodec->mDealer[kPortIndexOutput].clear(); 7219 } 7220 7221 if (err == OK) { 7222 err = mCodec->mOMX->sendCommand( 7223 mCodec->mNode, OMX_CommandPortEnable, kPortIndexOutput); 7224 } 7225 7226 if (err == OK) { 7227 err = mCodec->allocateBuffersOnPort(kPortIndexOutput); 7228 ALOGE_IF(err != OK, "Failed to allocate output port buffers after port " 7229 "reconfiguration: (%d)", err); 7230 } 7231 7232 if (err != OK) { 7233 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 7234 7235 // This is technically not correct, but appears to be 7236 // the only way to free the component instance. 7237 // Controlled transitioning from excecuting->idle 7238 // and idle->loaded seem impossible probably because 7239 // the output port never finishes re-enabling. 7240 mCodec->mShutdownInProgress = true; 7241 mCodec->mKeepComponentAllocated = false; 7242 mCodec->changeState(mCodec->mLoadedState); 7243 } 7244 7245 return true; 7246 } else if (data1 == (OMX_U32)OMX_CommandPortEnable) { 7247 if (data2 != (OMX_U32)kPortIndexOutput) { 7248 ALOGW("ignoring EventCmdComplete OMX_CommandPortEnable for port %u", data2); 7249 return false; 7250 } 7251 7252 ALOGV("[%s] Output port now reenabled.", mCodec->mComponentName.c_str()); 7253 7254 if (mCodec->mExecutingState->active()) { 7255 mCodec->mExecutingState->submitOutputBuffers(); 7256 } 7257 7258 mCodec->changeState(mCodec->mExecutingState); 7259 7260 return true; 7261 } 7262 7263 return false; 7264 } 7265 7266 default: 7267 return false; 7268 } 7269} 7270 7271//////////////////////////////////////////////////////////////////////////////// 7272 7273ACodec::ExecutingToIdleState::ExecutingToIdleState(ACodec *codec) 7274 : BaseState(codec), 7275 mComponentNowIdle(false) { 7276} 7277 7278bool ACodec::ExecutingToIdleState::onMessageReceived(const sp<AMessage> &msg) { 7279 bool handled = false; 7280 7281 switch (msg->what()) { 7282 case kWhatFlush: 7283 { 7284 // Don't send me a flush request if you previously wanted me 7285 // to shutdown. 7286 ALOGW("Ignoring flush request in ExecutingToIdleState"); 7287 break; 7288 } 7289 7290 case kWhatShutdown: 7291 { 7292 // We're already doing that... 7293 7294 handled = true; 7295 break; 7296 } 7297 7298 default: 7299 handled = BaseState::onMessageReceived(msg); 7300 break; 7301 } 7302 7303 return handled; 7304} 7305 7306void ACodec::ExecutingToIdleState::stateEntered() { 7307 ALOGV("[%s] Now Executing->Idle", mCodec->mComponentName.c_str()); 7308 7309 mComponentNowIdle = false; 7310 mCodec->mLastOutputFormat.clear(); 7311} 7312 7313bool ACodec::ExecutingToIdleState::onOMXEvent( 7314 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7315 switch (event) { 7316 case OMX_EventCmdComplete: 7317 { 7318 if (data1 != (OMX_U32)OMX_CommandStateSet 7319 || data2 != (OMX_U32)OMX_StateIdle) { 7320 ALOGE("Unexpected command completion in ExecutingToIdleState: %s(%u) %s(%u)", 7321 asString((OMX_COMMANDTYPE)data1), data1, 7322 asString((OMX_STATETYPE)data2), data2); 7323 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7324 return true; 7325 } 7326 7327 mComponentNowIdle = true; 7328 7329 changeStateIfWeOwnAllBuffers(); 7330 7331 return true; 7332 } 7333 7334 case OMX_EventPortSettingsChanged: 7335 case OMX_EventBufferFlag: 7336 { 7337 // We're shutting down and don't care about this anymore. 7338 return true; 7339 } 7340 7341 default: 7342 return BaseState::onOMXEvent(event, data1, data2); 7343 } 7344} 7345 7346void ACodec::ExecutingToIdleState::changeStateIfWeOwnAllBuffers() { 7347 if (mComponentNowIdle && mCodec->allYourBuffersAreBelongToUs()) { 7348 status_t err = mCodec->mOMX->sendCommand( 7349 mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded); 7350 if (err == OK) { 7351 err = mCodec->freeBuffersOnPort(kPortIndexInput); 7352 status_t err2 = mCodec->freeBuffersOnPort(kPortIndexOutput); 7353 if (err == OK) { 7354 err = err2; 7355 } 7356 } 7357 7358 if ((mCodec->mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) 7359 && mCodec->mNativeWindow != NULL) { 7360 // We push enough 1x1 blank buffers to ensure that one of 7361 // them has made it to the display. This allows the OMX 7362 // component teardown to zero out any protected buffers 7363 // without the risk of scanning out one of those buffers. 7364 pushBlankBuffersToNativeWindow(mCodec->mNativeWindow.get()); 7365 } 7366 7367 if (err != OK) { 7368 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7369 return; 7370 } 7371 7372 mCodec->changeState(mCodec->mIdleToLoadedState); 7373 } 7374} 7375 7376void ACodec::ExecutingToIdleState::onInputBufferFilled( 7377 const sp<AMessage> &msg) { 7378 BaseState::onInputBufferFilled(msg); 7379 7380 changeStateIfWeOwnAllBuffers(); 7381} 7382 7383void ACodec::ExecutingToIdleState::onOutputBufferDrained( 7384 const sp<AMessage> &msg) { 7385 BaseState::onOutputBufferDrained(msg); 7386 7387 changeStateIfWeOwnAllBuffers(); 7388} 7389 7390//////////////////////////////////////////////////////////////////////////////// 7391 7392ACodec::IdleToLoadedState::IdleToLoadedState(ACodec *codec) 7393 : BaseState(codec) { 7394} 7395 7396bool ACodec::IdleToLoadedState::onMessageReceived(const sp<AMessage> &msg) { 7397 bool handled = false; 7398 7399 switch (msg->what()) { 7400 case kWhatShutdown: 7401 { 7402 // We're already doing that... 7403 7404 handled = true; 7405 break; 7406 } 7407 7408 case kWhatFlush: 7409 { 7410 // Don't send me a flush request if you previously wanted me 7411 // to shutdown. 7412 ALOGE("Got flush request in IdleToLoadedState"); 7413 break; 7414 } 7415 7416 default: 7417 handled = BaseState::onMessageReceived(msg); 7418 break; 7419 } 7420 7421 return handled; 7422} 7423 7424void ACodec::IdleToLoadedState::stateEntered() { 7425 ALOGV("[%s] Now Idle->Loaded", mCodec->mComponentName.c_str()); 7426} 7427 7428bool ACodec::IdleToLoadedState::onOMXEvent( 7429 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7430 switch (event) { 7431 case OMX_EventCmdComplete: 7432 { 7433 if (data1 != (OMX_U32)OMX_CommandStateSet 7434 || data2 != (OMX_U32)OMX_StateLoaded) { 7435 ALOGE("Unexpected command completion in IdleToLoadedState: %s(%u) %s(%u)", 7436 asString((OMX_COMMANDTYPE)data1), data1, 7437 asString((OMX_STATETYPE)data2), data2); 7438 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7439 return true; 7440 } 7441 7442 mCodec->changeState(mCodec->mLoadedState); 7443 7444 return true; 7445 } 7446 7447 default: 7448 return BaseState::onOMXEvent(event, data1, data2); 7449 } 7450} 7451 7452//////////////////////////////////////////////////////////////////////////////// 7453 7454ACodec::FlushingState::FlushingState(ACodec *codec) 7455 : BaseState(codec) { 7456} 7457 7458void ACodec::FlushingState::stateEntered() { 7459 ALOGV("[%s] Now Flushing", mCodec->mComponentName.c_str()); 7460 7461 mFlushComplete[kPortIndexInput] = mFlushComplete[kPortIndexOutput] = false; 7462} 7463 7464bool ACodec::FlushingState::onMessageReceived(const sp<AMessage> &msg) { 7465 bool handled = false; 7466 7467 switch (msg->what()) { 7468 case kWhatShutdown: 7469 { 7470 mCodec->deferMessage(msg); 7471 break; 7472 } 7473 7474 case kWhatFlush: 7475 { 7476 // We're already doing this right now. 7477 handled = true; 7478 break; 7479 } 7480 7481 default: 7482 handled = BaseState::onMessageReceived(msg); 7483 break; 7484 } 7485 7486 return handled; 7487} 7488 7489bool ACodec::FlushingState::onOMXEvent( 7490 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7491 ALOGV("[%s] FlushingState onOMXEvent(%u,%d)", 7492 mCodec->mComponentName.c_str(), event, (OMX_S32)data1); 7493 7494 switch (event) { 7495 case OMX_EventCmdComplete: 7496 { 7497 if (data1 != (OMX_U32)OMX_CommandFlush) { 7498 ALOGE("unexpected EventCmdComplete %s(%d) data2:%d in FlushingState", 7499 asString((OMX_COMMANDTYPE)data1), data1, data2); 7500 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7501 return true; 7502 } 7503 7504 if (data2 == kPortIndexInput || data2 == kPortIndexOutput) { 7505 if (mFlushComplete[data2]) { 7506 ALOGW("Flush already completed for %s port", 7507 data2 == kPortIndexInput ? "input" : "output"); 7508 return true; 7509 } 7510 mFlushComplete[data2] = true; 7511 7512 if (mFlushComplete[kPortIndexInput] && mFlushComplete[kPortIndexOutput]) { 7513 changeStateIfWeOwnAllBuffers(); 7514 } 7515 } else if (data2 == OMX_ALL) { 7516 if (!mFlushComplete[kPortIndexInput] || !mFlushComplete[kPortIndexOutput]) { 7517 ALOGW("received flush complete event for OMX_ALL before ports have been" 7518 "flushed (%d/%d)", 7519 mFlushComplete[kPortIndexInput], mFlushComplete[kPortIndexOutput]); 7520 return false; 7521 } 7522 7523 changeStateIfWeOwnAllBuffers(); 7524 } else { 7525 ALOGW("data2 not OMX_ALL but %u in EventCmdComplete CommandFlush", data2); 7526 } 7527 7528 return true; 7529 } 7530 7531 case OMX_EventPortSettingsChanged: 7532 { 7533 sp<AMessage> msg = new AMessage(kWhatOMXMessage, mCodec); 7534 msg->setInt32("type", omx_message::EVENT); 7535 msg->setInt32("node", mCodec->mNode); 7536 msg->setInt32("event", event); 7537 msg->setInt32("data1", data1); 7538 msg->setInt32("data2", data2); 7539 7540 ALOGV("[%s] Deferring OMX_EventPortSettingsChanged", 7541 mCodec->mComponentName.c_str()); 7542 7543 mCodec->deferMessage(msg); 7544 7545 return true; 7546 } 7547 7548 default: 7549 return BaseState::onOMXEvent(event, data1, data2); 7550 } 7551 7552 return true; 7553} 7554 7555void ACodec::FlushingState::onOutputBufferDrained(const sp<AMessage> &msg) { 7556 BaseState::onOutputBufferDrained(msg); 7557 7558 changeStateIfWeOwnAllBuffers(); 7559} 7560 7561void ACodec::FlushingState::onInputBufferFilled(const sp<AMessage> &msg) { 7562 BaseState::onInputBufferFilled(msg); 7563 7564 changeStateIfWeOwnAllBuffers(); 7565} 7566 7567void ACodec::FlushingState::changeStateIfWeOwnAllBuffers() { 7568 if (mFlushComplete[kPortIndexInput] 7569 && mFlushComplete[kPortIndexOutput] 7570 && mCodec->allYourBuffersAreBelongToUs()) { 7571 // We now own all buffers except possibly those still queued with 7572 // the native window for rendering. Let's get those back as well. 7573 mCodec->waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs(); 7574 7575 mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC)); 7576 7577 sp<AMessage> notify = mCodec->mNotify->dup(); 7578 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 7579 notify->post(); 7580 7581 mCodec->mPortEOS[kPortIndexInput] = 7582 mCodec->mPortEOS[kPortIndexOutput] = false; 7583 7584 mCodec->mInputEOSResult = OK; 7585 7586 if (mCodec->mSkipCutBuffer != NULL) { 7587 mCodec->mSkipCutBuffer->clear(); 7588 } 7589 7590 mCodec->changeState(mCodec->mExecutingState); 7591 } 7592} 7593 7594status_t ACodec::queryCapabilities( 7595 const AString &name, const AString &mime, bool isEncoder, 7596 sp<MediaCodecInfo::Capabilities> *caps) { 7597 (*caps).clear(); 7598 const char *role = getComponentRole(isEncoder, mime.c_str()); 7599 if (role == NULL) { 7600 return BAD_VALUE; 7601 } 7602 7603 OMXClient client; 7604 status_t err = client.connect(); 7605 if (err != OK) { 7606 return err; 7607 } 7608 7609 sp<IOMX> omx = client.interface(); 7610 sp<CodecObserver> observer = new CodecObserver; 7611 IOMX::node_id node = 0; 7612 7613 err = omx->allocateNode(name.c_str(), observer, NULL, &node); 7614 if (err != OK) { 7615 client.disconnect(); 7616 return err; 7617 } 7618 7619 err = setComponentRole(omx, node, role); 7620 if (err != OK) { 7621 omx->freeNode(node); 7622 client.disconnect(); 7623 return err; 7624 } 7625 7626 sp<MediaCodecInfo::CapabilitiesBuilder> builder = new MediaCodecInfo::CapabilitiesBuilder(); 7627 bool isVideo = mime.startsWithIgnoreCase("video/"); 7628 7629 if (isVideo) { 7630 OMX_VIDEO_PARAM_PROFILELEVELTYPE param; 7631 InitOMXParams(¶m); 7632 param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput; 7633 7634 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7635 param.nProfileIndex = index; 7636 status_t err = omx->getParameter( 7637 node, OMX_IndexParamVideoProfileLevelQuerySupported, 7638 ¶m, sizeof(param)); 7639 if (err != OK) { 7640 break; 7641 } 7642 builder->addProfileLevel(param.eProfile, param.eLevel); 7643 7644 if (index == kMaxIndicesToCheck) { 7645 ALOGW("[%s] stopping checking profiles after %u: %x/%x", 7646 name.c_str(), index, 7647 param.eProfile, param.eLevel); 7648 } 7649 } 7650 7651 // Color format query 7652 // return colors in the order reported by the OMX component 7653 // prefix "flexible" standard ones with the flexible equivalent 7654 OMX_VIDEO_PARAM_PORTFORMATTYPE portFormat; 7655 InitOMXParams(&portFormat); 7656 portFormat.nPortIndex = isEncoder ? kPortIndexInput : kPortIndexOutput; 7657 Vector<uint32_t> supportedColors; // shadow copy to check for duplicates 7658 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7659 portFormat.nIndex = index; 7660 status_t err = omx->getParameter( 7661 node, OMX_IndexParamVideoPortFormat, 7662 &portFormat, sizeof(portFormat)); 7663 if (err != OK) { 7664 break; 7665 } 7666 7667 OMX_U32 flexibleEquivalent; 7668 if (isFlexibleColorFormat( 7669 omx, node, portFormat.eColorFormat, false /* usingNativeWindow */, 7670 &flexibleEquivalent)) { 7671 bool marked = false; 7672 for (size_t i = 0; i < supportedColors.size(); ++i) { 7673 if (supportedColors[i] == flexibleEquivalent) { 7674 marked = true; 7675 break; 7676 } 7677 } 7678 if (!marked) { 7679 supportedColors.push(flexibleEquivalent); 7680 builder->addColorFormat(flexibleEquivalent); 7681 } 7682 } 7683 supportedColors.push(portFormat.eColorFormat); 7684 builder->addColorFormat(portFormat.eColorFormat); 7685 7686 if (index == kMaxIndicesToCheck) { 7687 ALOGW("[%s] stopping checking formats after %u: %s(%x)", 7688 name.c_str(), index, 7689 asString(portFormat.eColorFormat), portFormat.eColorFormat); 7690 } 7691 } 7692 } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_AUDIO_AAC)) { 7693 // More audio codecs if they have profiles. 7694 OMX_AUDIO_PARAM_ANDROID_PROFILETYPE param; 7695 InitOMXParams(¶m); 7696 param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput; 7697 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7698 param.nProfileIndex = index; 7699 status_t err = omx->getParameter( 7700 node, (OMX_INDEXTYPE)OMX_IndexParamAudioProfileQuerySupported, 7701 ¶m, sizeof(param)); 7702 if (err != OK) { 7703 break; 7704 } 7705 // For audio, level is ignored. 7706 builder->addProfileLevel(param.eProfile, 0 /* level */); 7707 7708 if (index == kMaxIndicesToCheck) { 7709 ALOGW("[%s] stopping checking profiles after %u: %x", 7710 name.c_str(), index, 7711 param.eProfile); 7712 } 7713 } 7714 7715 // NOTE: Without Android extensions, OMX does not provide a way to query 7716 // AAC profile support 7717 if (param.nProfileIndex == 0) { 7718 ALOGW("component %s doesn't support profile query.", name.c_str()); 7719 } 7720 } 7721 7722 if (isVideo && !isEncoder) { 7723 native_handle_t *sidebandHandle = NULL; 7724 if (omx->configureVideoTunnelMode( 7725 node, kPortIndexOutput, OMX_TRUE, 0, &sidebandHandle) == OK) { 7726 // tunneled playback includes adaptive playback 7727 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback 7728 | MediaCodecInfo::Capabilities::kFlagSupportsTunneledPlayback); 7729 } else if (omx->storeMetaDataInBuffers( 7730 node, kPortIndexOutput, OMX_TRUE) == OK || 7731 omx->prepareForAdaptivePlayback( 7732 node, kPortIndexOutput, OMX_TRUE, 7733 1280 /* width */, 720 /* height */) == OK) { 7734 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback); 7735 } 7736 } 7737 7738 if (isVideo && isEncoder) { 7739 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 7740 InitOMXParams(¶ms); 7741 params.nPortIndex = kPortIndexOutput; 7742 // TODO: should we verify if fallback is supported? 7743 if (omx->getConfig( 7744 node, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, 7745 ¶ms, sizeof(params)) == OK) { 7746 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsIntraRefresh); 7747 } 7748 } 7749 7750 *caps = builder; 7751 omx->freeNode(node); 7752 client.disconnect(); 7753 return OK; 7754} 7755 7756// These are supposed be equivalent to the logic in 7757// "audio_channel_out_mask_from_count". 7758//static 7759status_t ACodec::getOMXChannelMapping(size_t numChannels, OMX_AUDIO_CHANNELTYPE map[]) { 7760 switch (numChannels) { 7761 case 1: 7762 map[0] = OMX_AUDIO_ChannelCF; 7763 break; 7764 case 2: 7765 map[0] = OMX_AUDIO_ChannelLF; 7766 map[1] = OMX_AUDIO_ChannelRF; 7767 break; 7768 case 3: 7769 map[0] = OMX_AUDIO_ChannelLF; 7770 map[1] = OMX_AUDIO_ChannelRF; 7771 map[2] = OMX_AUDIO_ChannelCF; 7772 break; 7773 case 4: 7774 map[0] = OMX_AUDIO_ChannelLF; 7775 map[1] = OMX_AUDIO_ChannelRF; 7776 map[2] = OMX_AUDIO_ChannelLR; 7777 map[3] = OMX_AUDIO_ChannelRR; 7778 break; 7779 case 5: 7780 map[0] = OMX_AUDIO_ChannelLF; 7781 map[1] = OMX_AUDIO_ChannelRF; 7782 map[2] = OMX_AUDIO_ChannelCF; 7783 map[3] = OMX_AUDIO_ChannelLR; 7784 map[4] = OMX_AUDIO_ChannelRR; 7785 break; 7786 case 6: 7787 map[0] = OMX_AUDIO_ChannelLF; 7788 map[1] = OMX_AUDIO_ChannelRF; 7789 map[2] = OMX_AUDIO_ChannelCF; 7790 map[3] = OMX_AUDIO_ChannelLFE; 7791 map[4] = OMX_AUDIO_ChannelLR; 7792 map[5] = OMX_AUDIO_ChannelRR; 7793 break; 7794 case 7: 7795 map[0] = OMX_AUDIO_ChannelLF; 7796 map[1] = OMX_AUDIO_ChannelRF; 7797 map[2] = OMX_AUDIO_ChannelCF; 7798 map[3] = OMX_AUDIO_ChannelLFE; 7799 map[4] = OMX_AUDIO_ChannelLR; 7800 map[5] = OMX_AUDIO_ChannelRR; 7801 map[6] = OMX_AUDIO_ChannelCS; 7802 break; 7803 case 8: 7804 map[0] = OMX_AUDIO_ChannelLF; 7805 map[1] = OMX_AUDIO_ChannelRF; 7806 map[2] = OMX_AUDIO_ChannelCF; 7807 map[3] = OMX_AUDIO_ChannelLFE; 7808 map[4] = OMX_AUDIO_ChannelLR; 7809 map[5] = OMX_AUDIO_ChannelRR; 7810 map[6] = OMX_AUDIO_ChannelLS; 7811 map[7] = OMX_AUDIO_ChannelRS; 7812 break; 7813 default: 7814 return -EINVAL; 7815 } 7816 7817 return OK; 7818} 7819 7820} // namespace android 7821