ACodec.cpp revision e98883f809d3eb7502f29d977a68179b3998e0d3
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "ACodec" 19 20#ifdef __LP64__ 21#define OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 22#endif 23 24#include <inttypes.h> 25#include <utils/Trace.h> 26 27#include <gui/Surface.h> 28 29#include <media/stagefright/ACodec.h> 30 31#include <binder/MemoryDealer.h> 32 33#include <media/stagefright/foundation/hexdump.h> 34#include <media/stagefright/foundation/ABuffer.h> 35#include <media/stagefright/foundation/ADebug.h> 36#include <media/stagefright/foundation/AMessage.h> 37#include <media/stagefright/foundation/AUtils.h> 38 39#include <media/stagefright/BufferProducerWrapper.h> 40#include <media/stagefright/MediaCodec.h> 41#include <media/stagefright/MediaCodecList.h> 42#include <media/stagefright/MediaDefs.h> 43#include <media/stagefright/OMXClient.h> 44#include <media/stagefright/PersistentSurface.h> 45#include <media/stagefright/SurfaceUtils.h> 46#include <media/hardware/HardwareAPI.h> 47 48#include <OMX_AudioExt.h> 49#include <OMX_VideoExt.h> 50#include <OMX_Component.h> 51#include <OMX_IndexExt.h> 52#include <OMX_AsString.h> 53 54#include "include/avc_utils.h" 55#include "include/DataConverter.h" 56#include "omx/OMXUtils.h" 57 58namespace android { 59 60enum { 61 kMaxIndicesToCheck = 32, // used when enumerating supported formats and profiles 62}; 63 64// OMX errors are directly mapped into status_t range if 65// there is no corresponding MediaError status code. 66// Use the statusFromOMXError(int32_t omxError) function. 67// 68// Currently this is a direct map. 69// See frameworks/native/include/media/openmax/OMX_Core.h 70// 71// Vendor OMX errors from 0x90000000 - 0x9000FFFF 72// Extension OMX errors from 0x8F000000 - 0x90000000 73// Standard OMX errors from 0x80001000 - 0x80001024 (0x80001024 current) 74// 75 76// returns true if err is a recognized OMX error code. 77// as OMX error is OMX_S32, this is an int32_t type 78static inline bool isOMXError(int32_t err) { 79 return (ERROR_CODEC_MIN <= err && err <= ERROR_CODEC_MAX); 80} 81 82// converts an OMX error to a status_t 83static inline status_t statusFromOMXError(int32_t omxError) { 84 switch (omxError) { 85 case OMX_ErrorInvalidComponentName: 86 case OMX_ErrorComponentNotFound: 87 return NAME_NOT_FOUND; // can trigger illegal argument error for provided names. 88 default: 89 return isOMXError(omxError) ? omxError : 0; // no translation required 90 } 91} 92 93// checks and converts status_t to a non-side-effect status_t 94static inline status_t makeNoSideEffectStatus(status_t err) { 95 switch (err) { 96 // the following errors have side effects and may come 97 // from other code modules. Remap for safety reasons. 98 case INVALID_OPERATION: 99 case DEAD_OBJECT: 100 return UNKNOWN_ERROR; 101 default: 102 return err; 103 } 104} 105 106struct MessageList : public RefBase { 107 MessageList() { 108 } 109 virtual ~MessageList() { 110 } 111 std::list<sp<AMessage> > &getList() { return mList; } 112private: 113 std::list<sp<AMessage> > mList; 114 115 DISALLOW_EVIL_CONSTRUCTORS(MessageList); 116}; 117 118static sp<DataConverter> getCopyConverter() { 119 static pthread_once_t once = PTHREAD_ONCE_INIT; // const-inited 120 static sp<DataConverter> sCopyConverter; // zero-inited 121 pthread_once(&once, [](){ sCopyConverter = new DataConverter(); }); 122 return sCopyConverter; 123} 124 125struct CodecObserver : public BnOMXObserver { 126 CodecObserver() {} 127 128 void setNotificationMessage(const sp<AMessage> &msg) { 129 mNotify = msg; 130 } 131 132 // from IOMXObserver 133 virtual void onMessages(const std::list<omx_message> &messages) { 134 if (messages.empty()) { 135 return; 136 } 137 138 sp<AMessage> notify = mNotify->dup(); 139 bool first = true; 140 sp<MessageList> msgList = new MessageList(); 141 for (std::list<omx_message>::const_iterator it = messages.cbegin(); 142 it != messages.cend(); ++it) { 143 const omx_message &omx_msg = *it; 144 if (first) { 145 notify->setInt32("node", omx_msg.node); 146 first = false; 147 } 148 149 sp<AMessage> msg = new AMessage; 150 msg->setInt32("type", omx_msg.type); 151 switch (omx_msg.type) { 152 case omx_message::EVENT: 153 { 154 msg->setInt32("event", omx_msg.u.event_data.event); 155 msg->setInt32("data1", omx_msg.u.event_data.data1); 156 msg->setInt32("data2", omx_msg.u.event_data.data2); 157 break; 158 } 159 160 case omx_message::EMPTY_BUFFER_DONE: 161 { 162 msg->setInt32("buffer", omx_msg.u.buffer_data.buffer); 163 msg->setInt32("fence_fd", omx_msg.fenceFd); 164 break; 165 } 166 167 case omx_message::FILL_BUFFER_DONE: 168 { 169 msg->setInt32( 170 "buffer", omx_msg.u.extended_buffer_data.buffer); 171 msg->setInt32( 172 "range_offset", 173 omx_msg.u.extended_buffer_data.range_offset); 174 msg->setInt32( 175 "range_length", 176 omx_msg.u.extended_buffer_data.range_length); 177 msg->setInt32( 178 "flags", 179 omx_msg.u.extended_buffer_data.flags); 180 msg->setInt64( 181 "timestamp", 182 omx_msg.u.extended_buffer_data.timestamp); 183 msg->setInt32( 184 "fence_fd", omx_msg.fenceFd); 185 break; 186 } 187 188 case omx_message::FRAME_RENDERED: 189 { 190 msg->setInt64( 191 "media_time_us", omx_msg.u.render_data.timestamp); 192 msg->setInt64( 193 "system_nano", omx_msg.u.render_data.nanoTime); 194 break; 195 } 196 197 default: 198 ALOGE("Unrecognized message type: %d", omx_msg.type); 199 break; 200 } 201 msgList->getList().push_back(msg); 202 } 203 notify->setObject("messages", msgList); 204 notify->post(); 205 } 206 207protected: 208 virtual ~CodecObserver() {} 209 210private: 211 sp<AMessage> mNotify; 212 213 DISALLOW_EVIL_CONSTRUCTORS(CodecObserver); 214}; 215 216//////////////////////////////////////////////////////////////////////////////// 217 218struct ACodec::BaseState : public AState { 219 BaseState(ACodec *codec, const sp<AState> &parentState = NULL); 220 221protected: 222 enum PortMode { 223 KEEP_BUFFERS, 224 RESUBMIT_BUFFERS, 225 FREE_BUFFERS, 226 }; 227 228 ACodec *mCodec; 229 230 virtual PortMode getPortMode(OMX_U32 portIndex); 231 232 virtual bool onMessageReceived(const sp<AMessage> &msg); 233 234 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 235 236 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 237 virtual void onInputBufferFilled(const sp<AMessage> &msg); 238 239 void postFillThisBuffer(BufferInfo *info); 240 241private: 242 // Handles an OMX message. Returns true iff message was handled. 243 bool onOMXMessage(const sp<AMessage> &msg); 244 245 // Handles a list of messages. Returns true iff messages were handled. 246 bool onOMXMessageList(const sp<AMessage> &msg); 247 248 // returns true iff this message is for this component and the component is alive 249 bool checkOMXMessage(const sp<AMessage> &msg); 250 251 bool onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd); 252 253 bool onOMXFillBufferDone( 254 IOMX::buffer_id bufferID, 255 size_t rangeOffset, size_t rangeLength, 256 OMX_U32 flags, 257 int64_t timeUs, 258 int fenceFd); 259 260 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 261 262 void getMoreInputDataIfPossible(); 263 264 DISALLOW_EVIL_CONSTRUCTORS(BaseState); 265}; 266 267//////////////////////////////////////////////////////////////////////////////// 268 269struct ACodec::DeathNotifier : public IBinder::DeathRecipient { 270 DeathNotifier(const sp<AMessage> ¬ify) 271 : mNotify(notify) { 272 } 273 274 virtual void binderDied(const wp<IBinder> &) { 275 mNotify->post(); 276 } 277 278protected: 279 virtual ~DeathNotifier() {} 280 281private: 282 sp<AMessage> mNotify; 283 284 DISALLOW_EVIL_CONSTRUCTORS(DeathNotifier); 285}; 286 287struct ACodec::UninitializedState : public ACodec::BaseState { 288 UninitializedState(ACodec *codec); 289 290protected: 291 virtual bool onMessageReceived(const sp<AMessage> &msg); 292 virtual void stateEntered(); 293 294private: 295 void onSetup(const sp<AMessage> &msg); 296 bool onAllocateComponent(const sp<AMessage> &msg); 297 298 sp<DeathNotifier> mDeathNotifier; 299 300 DISALLOW_EVIL_CONSTRUCTORS(UninitializedState); 301}; 302 303//////////////////////////////////////////////////////////////////////////////// 304 305struct ACodec::LoadedState : public ACodec::BaseState { 306 LoadedState(ACodec *codec); 307 308protected: 309 virtual bool onMessageReceived(const sp<AMessage> &msg); 310 virtual void stateEntered(); 311 312private: 313 friend struct ACodec::UninitializedState; 314 315 bool onConfigureComponent(const sp<AMessage> &msg); 316 void onCreateInputSurface(const sp<AMessage> &msg); 317 void onSetInputSurface(const sp<AMessage> &msg); 318 void onStart(); 319 void onShutdown(bool keepComponentAllocated); 320 321 status_t setupInputSurface(); 322 323 DISALLOW_EVIL_CONSTRUCTORS(LoadedState); 324}; 325 326//////////////////////////////////////////////////////////////////////////////// 327 328struct ACodec::LoadedToIdleState : public ACodec::BaseState { 329 LoadedToIdleState(ACodec *codec); 330 331protected: 332 virtual bool onMessageReceived(const sp<AMessage> &msg); 333 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 334 virtual void stateEntered(); 335 336private: 337 status_t allocateBuffers(); 338 339 DISALLOW_EVIL_CONSTRUCTORS(LoadedToIdleState); 340}; 341 342//////////////////////////////////////////////////////////////////////////////// 343 344struct ACodec::IdleToExecutingState : public ACodec::BaseState { 345 IdleToExecutingState(ACodec *codec); 346 347protected: 348 virtual bool onMessageReceived(const sp<AMessage> &msg); 349 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 350 virtual void stateEntered(); 351 352private: 353 DISALLOW_EVIL_CONSTRUCTORS(IdleToExecutingState); 354}; 355 356//////////////////////////////////////////////////////////////////////////////// 357 358struct ACodec::ExecutingState : public ACodec::BaseState { 359 ExecutingState(ACodec *codec); 360 361 void submitRegularOutputBuffers(); 362 void submitOutputMetaBuffers(); 363 void submitOutputBuffers(); 364 365 // Submit output buffers to the decoder, submit input buffers to client 366 // to fill with data. 367 void resume(); 368 369 // Returns true iff input and output buffers are in play. 370 bool active() const { return mActive; } 371 372protected: 373 virtual PortMode getPortMode(OMX_U32 portIndex); 374 virtual bool onMessageReceived(const sp<AMessage> &msg); 375 virtual void stateEntered(); 376 377 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 378 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 379 380private: 381 bool mActive; 382 383 DISALLOW_EVIL_CONSTRUCTORS(ExecutingState); 384}; 385 386//////////////////////////////////////////////////////////////////////////////// 387 388struct ACodec::OutputPortSettingsChangedState : public ACodec::BaseState { 389 OutputPortSettingsChangedState(ACodec *codec); 390 391protected: 392 virtual PortMode getPortMode(OMX_U32 portIndex); 393 virtual bool onMessageReceived(const sp<AMessage> &msg); 394 virtual void stateEntered(); 395 396 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 397 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 398 399private: 400 DISALLOW_EVIL_CONSTRUCTORS(OutputPortSettingsChangedState); 401}; 402 403//////////////////////////////////////////////////////////////////////////////// 404 405struct ACodec::ExecutingToIdleState : public ACodec::BaseState { 406 ExecutingToIdleState(ACodec *codec); 407 408protected: 409 virtual bool onMessageReceived(const sp<AMessage> &msg); 410 virtual void stateEntered(); 411 412 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 413 414 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 415 virtual void onInputBufferFilled(const sp<AMessage> &msg); 416 417private: 418 void changeStateIfWeOwnAllBuffers(); 419 420 bool mComponentNowIdle; 421 422 DISALLOW_EVIL_CONSTRUCTORS(ExecutingToIdleState); 423}; 424 425//////////////////////////////////////////////////////////////////////////////// 426 427struct ACodec::IdleToLoadedState : public ACodec::BaseState { 428 IdleToLoadedState(ACodec *codec); 429 430protected: 431 virtual bool onMessageReceived(const sp<AMessage> &msg); 432 virtual void stateEntered(); 433 434 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 435 436private: 437 DISALLOW_EVIL_CONSTRUCTORS(IdleToLoadedState); 438}; 439 440//////////////////////////////////////////////////////////////////////////////// 441 442struct ACodec::FlushingState : public ACodec::BaseState { 443 FlushingState(ACodec *codec); 444 445protected: 446 virtual bool onMessageReceived(const sp<AMessage> &msg); 447 virtual void stateEntered(); 448 449 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 450 451 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 452 virtual void onInputBufferFilled(const sp<AMessage> &msg); 453 454private: 455 bool mFlushComplete[2]; 456 457 void changeStateIfWeOwnAllBuffers(); 458 459 DISALLOW_EVIL_CONSTRUCTORS(FlushingState); 460}; 461 462//////////////////////////////////////////////////////////////////////////////// 463 464void ACodec::BufferInfo::setWriteFence(int fenceFd, const char *dbg) { 465 if (mFenceFd >= 0) { 466 ALOGW("OVERWRITE OF %s fence %d by write fence %d in %s", 467 mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg); 468 } 469 mFenceFd = fenceFd; 470 mIsReadFence = false; 471} 472 473void ACodec::BufferInfo::setReadFence(int fenceFd, const char *dbg) { 474 if (mFenceFd >= 0) { 475 ALOGW("OVERWRITE OF %s fence %d by read fence %d in %s", 476 mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg); 477 } 478 mFenceFd = fenceFd; 479 mIsReadFence = true; 480} 481 482void ACodec::BufferInfo::checkWriteFence(const char *dbg) { 483 if (mFenceFd >= 0 && mIsReadFence) { 484 ALOGD("REUSING read fence %d as write fence in %s", mFenceFd, dbg); 485 } 486} 487 488void ACodec::BufferInfo::checkReadFence(const char *dbg) { 489 if (mFenceFd >= 0 && !mIsReadFence) { 490 ALOGD("REUSING write fence %d as read fence in %s", mFenceFd, dbg); 491 } 492} 493 494//////////////////////////////////////////////////////////////////////////////// 495 496ACodec::ACodec() 497 : mQuirks(0), 498 mNode(0), 499 mUsingNativeWindow(false), 500 mNativeWindowUsageBits(0), 501 mIsVideo(false), 502 mIsEncoder(false), 503 mFatalError(false), 504 mShutdownInProgress(false), 505 mExplicitShutdown(false), 506 mEncoderDelay(0), 507 mEncoderPadding(0), 508 mRotationDegrees(0), 509 mChannelMaskPresent(false), 510 mChannelMask(0), 511 mDequeueCounter(0), 512 mInputMetadataType(kMetadataBufferTypeInvalid), 513 mOutputMetadataType(kMetadataBufferTypeInvalid), 514 mLegacyAdaptiveExperiment(false), 515 mMetadataBuffersToSubmit(0), 516 mNumUndequeuedBuffers(0), 517 mRepeatFrameDelayUs(-1ll), 518 mMaxPtsGapUs(-1ll), 519 mMaxFps(-1), 520 mTimePerFrameUs(-1ll), 521 mTimePerCaptureUs(-1ll), 522 mCreateInputBuffersSuspended(false), 523 mTunneled(false), 524 mDescribeColorAspectsIndex((OMX_INDEXTYPE)0) { 525 mUninitializedState = new UninitializedState(this); 526 mLoadedState = new LoadedState(this); 527 mLoadedToIdleState = new LoadedToIdleState(this); 528 mIdleToExecutingState = new IdleToExecutingState(this); 529 mExecutingState = new ExecutingState(this); 530 531 mOutputPortSettingsChangedState = 532 new OutputPortSettingsChangedState(this); 533 534 mExecutingToIdleState = new ExecutingToIdleState(this); 535 mIdleToLoadedState = new IdleToLoadedState(this); 536 mFlushingState = new FlushingState(this); 537 538 mPortEOS[kPortIndexInput] = mPortEOS[kPortIndexOutput] = false; 539 mInputEOSResult = OK; 540 541 changeState(mUninitializedState); 542} 543 544ACodec::~ACodec() { 545} 546 547void ACodec::setNotificationMessage(const sp<AMessage> &msg) { 548 mNotify = msg; 549} 550 551void ACodec::initiateSetup(const sp<AMessage> &msg) { 552 msg->setWhat(kWhatSetup); 553 msg->setTarget(this); 554 msg->post(); 555} 556 557void ACodec::signalSetParameters(const sp<AMessage> ¶ms) { 558 sp<AMessage> msg = new AMessage(kWhatSetParameters, this); 559 msg->setMessage("params", params); 560 msg->post(); 561} 562 563void ACodec::initiateAllocateComponent(const sp<AMessage> &msg) { 564 msg->setWhat(kWhatAllocateComponent); 565 msg->setTarget(this); 566 msg->post(); 567} 568 569void ACodec::initiateConfigureComponent(const sp<AMessage> &msg) { 570 msg->setWhat(kWhatConfigureComponent); 571 msg->setTarget(this); 572 msg->post(); 573} 574 575status_t ACodec::setSurface(const sp<Surface> &surface) { 576 sp<AMessage> msg = new AMessage(kWhatSetSurface, this); 577 msg->setObject("surface", surface); 578 579 sp<AMessage> response; 580 status_t err = msg->postAndAwaitResponse(&response); 581 582 if (err == OK) { 583 (void)response->findInt32("err", &err); 584 } 585 return err; 586} 587 588void ACodec::initiateCreateInputSurface() { 589 (new AMessage(kWhatCreateInputSurface, this))->post(); 590} 591 592void ACodec::initiateSetInputSurface( 593 const sp<PersistentSurface> &surface) { 594 sp<AMessage> msg = new AMessage(kWhatSetInputSurface, this); 595 msg->setObject("input-surface", surface); 596 msg->post(); 597} 598 599void ACodec::signalEndOfInputStream() { 600 (new AMessage(kWhatSignalEndOfInputStream, this))->post(); 601} 602 603void ACodec::initiateStart() { 604 (new AMessage(kWhatStart, this))->post(); 605} 606 607void ACodec::signalFlush() { 608 ALOGV("[%s] signalFlush", mComponentName.c_str()); 609 (new AMessage(kWhatFlush, this))->post(); 610} 611 612void ACodec::signalResume() { 613 (new AMessage(kWhatResume, this))->post(); 614} 615 616void ACodec::initiateShutdown(bool keepComponentAllocated) { 617 sp<AMessage> msg = new AMessage(kWhatShutdown, this); 618 msg->setInt32("keepComponentAllocated", keepComponentAllocated); 619 msg->post(); 620 if (!keepComponentAllocated) { 621 // ensure shutdown completes in 3 seconds 622 (new AMessage(kWhatReleaseCodecInstance, this))->post(3000000); 623 } 624} 625 626void ACodec::signalRequestIDRFrame() { 627 (new AMessage(kWhatRequestIDRFrame, this))->post(); 628} 629 630// *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 631// Some codecs may return input buffers before having them processed. 632// This causes a halt if we already signaled an EOS on the input 633// port. For now keep submitting an output buffer if there was an 634// EOS on the input port, but not yet on the output port. 635void ACodec::signalSubmitOutputMetadataBufferIfEOS_workaround() { 636 if (mPortEOS[kPortIndexInput] && !mPortEOS[kPortIndexOutput] && 637 mMetadataBuffersToSubmit > 0) { 638 (new AMessage(kWhatSubmitOutputMetadataBufferIfEOS, this))->post(); 639 } 640} 641 642status_t ACodec::handleSetSurface(const sp<Surface> &surface) { 643 // allow keeping unset surface 644 if (surface == NULL) { 645 if (mNativeWindow != NULL) { 646 ALOGW("cannot unset a surface"); 647 return INVALID_OPERATION; 648 } 649 return OK; 650 } 651 652 // cannot switch from bytebuffers to surface 653 if (mNativeWindow == NULL) { 654 ALOGW("component was not configured with a surface"); 655 return INVALID_OPERATION; 656 } 657 658 ANativeWindow *nativeWindow = surface.get(); 659 // if we have not yet started the codec, we can simply set the native window 660 if (mBuffers[kPortIndexInput].size() == 0) { 661 mNativeWindow = surface; 662 return OK; 663 } 664 665 // we do not support changing a tunneled surface after start 666 if (mTunneled) { 667 ALOGW("cannot change tunneled surface"); 668 return INVALID_OPERATION; 669 } 670 671 int usageBits = 0; 672 status_t err = setupNativeWindowSizeFormatAndUsage(nativeWindow, &usageBits); 673 if (err != OK) { 674 return err; 675 } 676 677 int ignoredFlags = kVideoGrallocUsage; 678 // New output surface is not allowed to add new usage flag except ignored ones. 679 if ((usageBits & ~(mNativeWindowUsageBits | ignoredFlags)) != 0) { 680 ALOGW("cannot change usage from %#x to %#x", mNativeWindowUsageBits, usageBits); 681 return BAD_VALUE; 682 } 683 684 // get min undequeued count. We cannot switch to a surface that has a higher 685 // undequeued count than we allocated. 686 int minUndequeuedBuffers = 0; 687 err = nativeWindow->query( 688 nativeWindow, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 689 &minUndequeuedBuffers); 690 if (err != 0) { 691 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 692 strerror(-err), -err); 693 return err; 694 } 695 if (minUndequeuedBuffers > (int)mNumUndequeuedBuffers) { 696 ALOGE("new surface holds onto more buffers (%d) than planned for (%zu)", 697 minUndequeuedBuffers, mNumUndequeuedBuffers); 698 return BAD_VALUE; 699 } 700 701 // we cannot change the number of output buffers while OMX is running 702 // set up surface to the same count 703 Vector<BufferInfo> &buffers = mBuffers[kPortIndexOutput]; 704 ALOGV("setting up surface for %zu buffers", buffers.size()); 705 706 err = native_window_set_buffer_count(nativeWindow, buffers.size()); 707 if (err != 0) { 708 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 709 -err); 710 return err; 711 } 712 713 // need to enable allocation when attaching 714 surface->getIGraphicBufferProducer()->allowAllocation(true); 715 716 // for meta data mode, we move dequeud buffers to the new surface. 717 // for non-meta mode, we must move all registered buffers 718 for (size_t i = 0; i < buffers.size(); ++i) { 719 const BufferInfo &info = buffers[i]; 720 // skip undequeued buffers for meta data mode 721 if (storingMetadataInDecodedBuffers() 722 && !mLegacyAdaptiveExperiment 723 && info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 724 ALOGV("skipping buffer %p", info.mGraphicBuffer->getNativeBuffer()); 725 continue; 726 } 727 ALOGV("attaching buffer %p", info.mGraphicBuffer->getNativeBuffer()); 728 729 err = surface->attachBuffer(info.mGraphicBuffer->getNativeBuffer()); 730 if (err != OK) { 731 ALOGE("failed to attach buffer %p to the new surface: %s (%d)", 732 info.mGraphicBuffer->getNativeBuffer(), 733 strerror(-err), -err); 734 return err; 735 } 736 } 737 738 // cancel undequeued buffers to new surface 739 if (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment) { 740 for (size_t i = 0; i < buffers.size(); ++i) { 741 BufferInfo &info = buffers.editItemAt(i); 742 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 743 ALOGV("canceling buffer %p", info.mGraphicBuffer->getNativeBuffer()); 744 err = nativeWindow->cancelBuffer( 745 nativeWindow, info.mGraphicBuffer->getNativeBuffer(), info.mFenceFd); 746 info.mFenceFd = -1; 747 if (err != OK) { 748 ALOGE("failed to cancel buffer %p to the new surface: %s (%d)", 749 info.mGraphicBuffer->getNativeBuffer(), 750 strerror(-err), -err); 751 return err; 752 } 753 } 754 } 755 // disallow further allocation 756 (void)surface->getIGraphicBufferProducer()->allowAllocation(false); 757 } 758 759 // push blank buffers to previous window if requested 760 if (mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) { 761 pushBlankBuffersToNativeWindow(mNativeWindow.get()); 762 } 763 764 mNativeWindow = nativeWindow; 765 mNativeWindowUsageBits = usageBits; 766 return OK; 767} 768 769status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) { 770 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 771 772 CHECK(mDealer[portIndex] == NULL); 773 CHECK(mBuffers[portIndex].isEmpty()); 774 775 status_t err; 776 if (mNativeWindow != NULL && portIndex == kPortIndexOutput) { 777 if (storingMetadataInDecodedBuffers()) { 778 err = allocateOutputMetadataBuffers(); 779 } else { 780 err = allocateOutputBuffersFromNativeWindow(); 781 } 782 } else { 783 OMX_PARAM_PORTDEFINITIONTYPE def; 784 InitOMXParams(&def); 785 def.nPortIndex = portIndex; 786 787 err = mOMX->getParameter( 788 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 789 790 if (err == OK) { 791 MetadataBufferType type = 792 portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; 793 size_t bufSize = def.nBufferSize; 794 if (type == kMetadataBufferTypeGrallocSource) { 795 bufSize = sizeof(VideoGrallocMetadata); 796 } else if (type == kMetadataBufferTypeANWBuffer) { 797 bufSize = sizeof(VideoNativeMetadata); 798 } 799 800 // If using gralloc or native source input metadata buffers, allocate largest 801 // metadata size as we prefer to generate native source metadata, but component 802 // may require gralloc source. For camera source, allocate at least enough 803 // size for native metadata buffers. 804 size_t allottedSize = bufSize; 805 if (portIndex == kPortIndexInput && type >= kMetadataBufferTypeGrallocSource) { 806 bufSize = max(sizeof(VideoGrallocMetadata), sizeof(VideoNativeMetadata)); 807 } else if (portIndex == kPortIndexInput && type == kMetadataBufferTypeCameraSource) { 808 bufSize = max(bufSize, sizeof(VideoNativeMetadata)); 809 } 810 811 size_t conversionBufferSize = 0; 812 813 sp<DataConverter> converter = mConverter[portIndex]; 814 if (converter != NULL) { 815 // here we assume sane conversions of max 4:1, so result fits in int32 816 if (portIndex == kPortIndexInput) { 817 conversionBufferSize = converter->sourceSize(bufSize); 818 } else { 819 conversionBufferSize = converter->targetSize(bufSize); 820 } 821 } 822 823 size_t alignment = MemoryDealer::getAllocationAlignment(); 824 825 ALOGV("[%s] Allocating %u buffers of size %zu/%zu (from %u using %s) on %s port", 826 mComponentName.c_str(), 827 def.nBufferCountActual, bufSize, allottedSize, def.nBufferSize, asString(type), 828 portIndex == kPortIndexInput ? "input" : "output"); 829 830 // verify buffer sizes to avoid overflow in align() 831 if (bufSize == 0 || max(bufSize, conversionBufferSize) > kMaxCodecBufferSize) { 832 ALOGE("b/22885421"); 833 return NO_MEMORY; 834 } 835 836 // don't modify bufSize as OMX may not expect it to increase after negotiation 837 size_t alignedSize = align(bufSize, alignment); 838 size_t alignedConvSize = align(conversionBufferSize, alignment); 839 if (def.nBufferCountActual > SIZE_MAX / (alignedSize + alignedConvSize)) { 840 ALOGE("b/22885421"); 841 return NO_MEMORY; 842 } 843 844 size_t totalSize = def.nBufferCountActual * (alignedSize + alignedConvSize); 845 mDealer[portIndex] = new MemoryDealer(totalSize, "ACodec"); 846 847 for (OMX_U32 i = 0; i < def.nBufferCountActual && err == OK; ++i) { 848 sp<IMemory> mem = mDealer[portIndex]->allocate(bufSize); 849 if (mem == NULL || mem->pointer() == NULL) { 850 return NO_MEMORY; 851 } 852 853 BufferInfo info; 854 info.mStatus = BufferInfo::OWNED_BY_US; 855 info.mFenceFd = -1; 856 info.mRenderInfo = NULL; 857 info.mNativeHandle = NULL; 858 859 uint32_t requiresAllocateBufferBit = 860 (portIndex == kPortIndexInput) 861 ? kRequiresAllocateBufferOnInputPorts 862 : kRequiresAllocateBufferOnOutputPorts; 863 864 if (portIndex == kPortIndexInput && (mFlags & kFlagIsSecure)) { 865 mem.clear(); 866 867 void *ptr = NULL; 868 native_handle_t *native_handle = NULL; 869 err = mOMX->allocateSecureBuffer( 870 mNode, portIndex, bufSize, &info.mBufferID, 871 &ptr, &native_handle); 872 873 // TRICKY: this representation is unorthodox, but ACodec requires 874 // an ABuffer with a proper size to validate range offsets and lengths. 875 // Since mData is never referenced for secure input, it is used to store 876 // either the pointer to the secure buffer, or the opaque handle as on 877 // some devices ptr is actually an opaque handle, not a pointer. 878 879 // TRICKY2: use native handle as the base of the ABuffer if received one, 880 // because Widevine source only receives these base addresses. 881 info.mData = new ABuffer(ptr != NULL ? ptr : (void *)native_handle, bufSize); 882 info.mNativeHandle = NativeHandle::create(native_handle, true /* ownsHandle */); 883 info.mCodecData = info.mData; 884 } else if (mQuirks & requiresAllocateBufferBit) { 885 err = mOMX->allocateBufferWithBackup( 886 mNode, portIndex, mem, &info.mBufferID, allottedSize); 887 } else { 888 err = mOMX->useBuffer(mNode, portIndex, mem, &info.mBufferID, allottedSize); 889 } 890 891 if (mem != NULL) { 892 info.mCodecData = new ABuffer(mem->pointer(), bufSize); 893 info.mCodecRef = mem; 894 895 if (type == kMetadataBufferTypeANWBuffer) { 896 ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; 897 } 898 899 // if we require conversion, allocate conversion buffer for client use; 900 // otherwise, reuse codec buffer 901 if (mConverter[portIndex] != NULL) { 902 CHECK_GT(conversionBufferSize, (size_t)0); 903 mem = mDealer[portIndex]->allocate(conversionBufferSize); 904 if (mem == NULL|| mem->pointer() == NULL) { 905 return NO_MEMORY; 906 } 907 info.mData = new ABuffer(mem->pointer(), conversionBufferSize); 908 info.mMemRef = mem; 909 } else { 910 info.mData = info.mCodecData; 911 info.mMemRef = info.mCodecRef; 912 } 913 } 914 915 mBuffers[portIndex].push(info); 916 } 917 } 918 } 919 920 if (err != OK) { 921 return err; 922 } 923 924 sp<AMessage> notify = mNotify->dup(); 925 notify->setInt32("what", CodecBase::kWhatBuffersAllocated); 926 927 notify->setInt32("portIndex", portIndex); 928 929 sp<PortDescription> desc = new PortDescription; 930 931 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 932 const BufferInfo &info = mBuffers[portIndex][i]; 933 desc->addBuffer(info.mBufferID, info.mData, info.mNativeHandle, info.mMemRef); 934 } 935 936 notify->setObject("portDesc", desc); 937 notify->post(); 938 939 return OK; 940} 941 942status_t ACodec::setupNativeWindowSizeFormatAndUsage( 943 ANativeWindow *nativeWindow /* nonnull */, int *finalUsage /* nonnull */) { 944 OMX_PARAM_PORTDEFINITIONTYPE def; 945 InitOMXParams(&def); 946 def.nPortIndex = kPortIndexOutput; 947 948 status_t err = mOMX->getParameter( 949 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 950 951 if (err != OK) { 952 return err; 953 } 954 955 OMX_U32 usage = 0; 956 err = mOMX->getGraphicBufferUsage(mNode, kPortIndexOutput, &usage); 957 if (err != 0) { 958 ALOGW("querying usage flags from OMX IL component failed: %d", err); 959 // XXX: Currently this error is logged, but not fatal. 960 usage = 0; 961 } 962 int omxUsage = usage; 963 964 if (mFlags & kFlagIsGrallocUsageProtected) { 965 usage |= GRALLOC_USAGE_PROTECTED; 966 } 967 968 usage |= kVideoGrallocUsage; 969 *finalUsage = usage; 970 971 ALOGV("gralloc usage: %#x(OMX) => %#x(ACodec)", omxUsage, usage); 972 return setNativeWindowSizeFormatAndUsage( 973 nativeWindow, 974 def.format.video.nFrameWidth, 975 def.format.video.nFrameHeight, 976 def.format.video.eColorFormat, 977 mRotationDegrees, 978 usage); 979} 980 981status_t ACodec::configureOutputBuffersFromNativeWindow( 982 OMX_U32 *bufferCount, OMX_U32 *bufferSize, 983 OMX_U32 *minUndequeuedBuffers) { 984 OMX_PARAM_PORTDEFINITIONTYPE def; 985 InitOMXParams(&def); 986 def.nPortIndex = kPortIndexOutput; 987 988 status_t err = mOMX->getParameter( 989 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 990 991 if (err == OK) { 992 err = setupNativeWindowSizeFormatAndUsage(mNativeWindow.get(), &mNativeWindowUsageBits); 993 } 994 if (err != OK) { 995 mNativeWindowUsageBits = 0; 996 return err; 997 } 998 999 // Exits here for tunneled video playback codecs -- i.e. skips native window 1000 // buffer allocation step as this is managed by the tunneled OMX omponent 1001 // itself and explicitly sets def.nBufferCountActual to 0. 1002 if (mTunneled) { 1003 ALOGV("Tunneled Playback: skipping native window buffer allocation."); 1004 def.nBufferCountActual = 0; 1005 err = mOMX->setParameter( 1006 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1007 1008 *minUndequeuedBuffers = 0; 1009 *bufferCount = 0; 1010 *bufferSize = 0; 1011 return err; 1012 } 1013 1014 *minUndequeuedBuffers = 0; 1015 err = mNativeWindow->query( 1016 mNativeWindow.get(), NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 1017 (int *)minUndequeuedBuffers); 1018 1019 if (err != 0) { 1020 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 1021 strerror(-err), -err); 1022 return err; 1023 } 1024 1025 // FIXME: assume that surface is controlled by app (native window 1026 // returns the number for the case when surface is not controlled by app) 1027 // FIXME2: This means that minUndeqeueudBufs can be 1 larger than reported 1028 // For now, try to allocate 1 more buffer, but don't fail if unsuccessful 1029 1030 // Use conservative allocation while also trying to reduce starvation 1031 // 1032 // 1. allocate at least nBufferCountMin + minUndequeuedBuffers - that is the 1033 // minimum needed for the consumer to be able to work 1034 // 2. try to allocate two (2) additional buffers to reduce starvation from 1035 // the consumer 1036 // plus an extra buffer to account for incorrect minUndequeuedBufs 1037 for (OMX_U32 extraBuffers = 2 + 1; /* condition inside loop */; extraBuffers--) { 1038 OMX_U32 newBufferCount = 1039 def.nBufferCountMin + *minUndequeuedBuffers + extraBuffers; 1040 def.nBufferCountActual = newBufferCount; 1041 err = mOMX->setParameter( 1042 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1043 1044 if (err == OK) { 1045 *minUndequeuedBuffers += extraBuffers; 1046 break; 1047 } 1048 1049 ALOGW("[%s] setting nBufferCountActual to %u failed: %d", 1050 mComponentName.c_str(), newBufferCount, err); 1051 /* exit condition */ 1052 if (extraBuffers == 0) { 1053 return err; 1054 } 1055 } 1056 1057 err = native_window_set_buffer_count( 1058 mNativeWindow.get(), def.nBufferCountActual); 1059 1060 if (err != 0) { 1061 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 1062 -err); 1063 return err; 1064 } 1065 1066 *bufferCount = def.nBufferCountActual; 1067 *bufferSize = def.nBufferSize; 1068 return err; 1069} 1070 1071status_t ACodec::allocateOutputBuffersFromNativeWindow() { 1072 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 1073 status_t err = configureOutputBuffersFromNativeWindow( 1074 &bufferCount, &bufferSize, &minUndequeuedBuffers); 1075 if (err != 0) 1076 return err; 1077 mNumUndequeuedBuffers = minUndequeuedBuffers; 1078 1079 if (!storingMetadataInDecodedBuffers()) { 1080 static_cast<Surface*>(mNativeWindow.get()) 1081 ->getIGraphicBufferProducer()->allowAllocation(true); 1082 } 1083 1084 ALOGV("[%s] Allocating %u buffers from a native window of size %u on " 1085 "output port", 1086 mComponentName.c_str(), bufferCount, bufferSize); 1087 1088 // Dequeue buffers and send them to OMX 1089 for (OMX_U32 i = 0; i < bufferCount; i++) { 1090 ANativeWindowBuffer *buf; 1091 int fenceFd; 1092 err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1093 if (err != 0) { 1094 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); 1095 break; 1096 } 1097 1098 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); 1099 BufferInfo info; 1100 info.mStatus = BufferInfo::OWNED_BY_US; 1101 info.mFenceFd = fenceFd; 1102 info.mIsReadFence = false; 1103 info.mRenderInfo = NULL; 1104 info.mData = new ABuffer(NULL /* data */, bufferSize /* capacity */); 1105 info.mCodecData = info.mData; 1106 info.mGraphicBuffer = graphicBuffer; 1107 mBuffers[kPortIndexOutput].push(info); 1108 1109 IOMX::buffer_id bufferId; 1110 err = mOMX->useGraphicBuffer(mNode, kPortIndexOutput, graphicBuffer, 1111 &bufferId); 1112 if (err != 0) { 1113 ALOGE("registering GraphicBuffer %u with OMX IL component failed: " 1114 "%d", i, err); 1115 break; 1116 } 1117 1118 mBuffers[kPortIndexOutput].editItemAt(i).mBufferID = bufferId; 1119 1120 ALOGV("[%s] Registered graphic buffer with ID %u (pointer = %p)", 1121 mComponentName.c_str(), 1122 bufferId, graphicBuffer.get()); 1123 } 1124 1125 OMX_U32 cancelStart; 1126 OMX_U32 cancelEnd; 1127 1128 if (err != 0) { 1129 // If an error occurred while dequeuing we need to cancel any buffers 1130 // that were dequeued. 1131 cancelStart = 0; 1132 cancelEnd = mBuffers[kPortIndexOutput].size(); 1133 } else { 1134 // Return the required minimum undequeued buffers to the native window. 1135 cancelStart = bufferCount - minUndequeuedBuffers; 1136 cancelEnd = bufferCount; 1137 } 1138 1139 for (OMX_U32 i = cancelStart; i < cancelEnd; i++) { 1140 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1141 if (info->mStatus == BufferInfo::OWNED_BY_US) { 1142 status_t error = cancelBufferToNativeWindow(info); 1143 if (err == 0) { 1144 err = error; 1145 } 1146 } 1147 } 1148 1149 if (!storingMetadataInDecodedBuffers()) { 1150 static_cast<Surface*>(mNativeWindow.get()) 1151 ->getIGraphicBufferProducer()->allowAllocation(false); 1152 } 1153 1154 return err; 1155} 1156 1157status_t ACodec::allocateOutputMetadataBuffers() { 1158 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 1159 status_t err = configureOutputBuffersFromNativeWindow( 1160 &bufferCount, &bufferSize, &minUndequeuedBuffers); 1161 if (err != 0) 1162 return err; 1163 mNumUndequeuedBuffers = minUndequeuedBuffers; 1164 1165 ALOGV("[%s] Allocating %u meta buffers on output port", 1166 mComponentName.c_str(), bufferCount); 1167 1168 size_t bufSize = mOutputMetadataType == kMetadataBufferTypeANWBuffer ? 1169 sizeof(struct VideoNativeMetadata) : sizeof(struct VideoGrallocMetadata); 1170 size_t totalSize = bufferCount * align(bufSize, MemoryDealer::getAllocationAlignment()); 1171 mDealer[kPortIndexOutput] = new MemoryDealer(totalSize, "ACodec"); 1172 1173 // Dequeue buffers and send them to OMX 1174 for (OMX_U32 i = 0; i < bufferCount; i++) { 1175 BufferInfo info; 1176 info.mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 1177 info.mFenceFd = -1; 1178 info.mRenderInfo = NULL; 1179 info.mGraphicBuffer = NULL; 1180 info.mDequeuedAt = mDequeueCounter; 1181 1182 sp<IMemory> mem = mDealer[kPortIndexOutput]->allocate(bufSize); 1183 if (mem == NULL || mem->pointer() == NULL) { 1184 return NO_MEMORY; 1185 } 1186 if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) { 1187 ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; 1188 } 1189 info.mData = new ABuffer(mem->pointer(), mem->size()); 1190 info.mMemRef = mem; 1191 info.mCodecData = info.mData; 1192 info.mCodecRef = mem; 1193 1194 // we use useBuffer for metadata regardless of quirks 1195 err = mOMX->useBuffer( 1196 mNode, kPortIndexOutput, mem, &info.mBufferID, mem->size()); 1197 mBuffers[kPortIndexOutput].push(info); 1198 1199 ALOGV("[%s] allocated meta buffer with ID %u (pointer = %p)", 1200 mComponentName.c_str(), info.mBufferID, mem->pointer()); 1201 } 1202 1203 if (mLegacyAdaptiveExperiment) { 1204 // preallocate and preregister buffers 1205 static_cast<Surface *>(mNativeWindow.get()) 1206 ->getIGraphicBufferProducer()->allowAllocation(true); 1207 1208 ALOGV("[%s] Allocating %u buffers from a native window of size %u on " 1209 "output port", 1210 mComponentName.c_str(), bufferCount, bufferSize); 1211 1212 // Dequeue buffers then cancel them all 1213 for (OMX_U32 i = 0; i < bufferCount; i++) { 1214 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1215 1216 ANativeWindowBuffer *buf; 1217 int fenceFd; 1218 err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1219 if (err != 0) { 1220 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); 1221 break; 1222 } 1223 1224 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); 1225 mOMX->updateGraphicBufferInMeta( 1226 mNode, kPortIndexOutput, graphicBuffer, info->mBufferID); 1227 info->mStatus = BufferInfo::OWNED_BY_US; 1228 info->setWriteFence(fenceFd, "allocateOutputMetadataBuffers for legacy"); 1229 info->mGraphicBuffer = graphicBuffer; 1230 } 1231 1232 for (OMX_U32 i = 0; i < mBuffers[kPortIndexOutput].size(); i++) { 1233 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1234 if (info->mStatus == BufferInfo::OWNED_BY_US) { 1235 status_t error = cancelBufferToNativeWindow(info); 1236 if (err == OK) { 1237 err = error; 1238 } 1239 } 1240 } 1241 1242 static_cast<Surface*>(mNativeWindow.get()) 1243 ->getIGraphicBufferProducer()->allowAllocation(false); 1244 } 1245 1246 mMetadataBuffersToSubmit = bufferCount - minUndequeuedBuffers; 1247 return err; 1248} 1249 1250status_t ACodec::submitOutputMetadataBuffer() { 1251 CHECK(storingMetadataInDecodedBuffers()); 1252 if (mMetadataBuffersToSubmit == 0) 1253 return OK; 1254 1255 BufferInfo *info = dequeueBufferFromNativeWindow(); 1256 if (info == NULL) { 1257 return ERROR_IO; 1258 } 1259 1260 ALOGV("[%s] submitting output meta buffer ID %u for graphic buffer %p", 1261 mComponentName.c_str(), info->mBufferID, info->mGraphicBuffer.get()); 1262 1263 --mMetadataBuffersToSubmit; 1264 info->checkWriteFence("submitOutputMetadataBuffer"); 1265 status_t err = mOMX->fillBuffer(mNode, info->mBufferID, info->mFenceFd); 1266 info->mFenceFd = -1; 1267 if (err == OK) { 1268 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 1269 } 1270 1271 return err; 1272} 1273 1274status_t ACodec::waitForFence(int fd, const char *dbg ) { 1275 status_t res = OK; 1276 if (fd >= 0) { 1277 sp<Fence> fence = new Fence(fd); 1278 res = fence->wait(IOMX::kFenceTimeoutMs); 1279 ALOGW_IF(res != OK, "FENCE TIMEOUT for %d in %s", fd, dbg); 1280 } 1281 return res; 1282} 1283 1284// static 1285const char *ACodec::_asString(BufferInfo::Status s) { 1286 switch (s) { 1287 case BufferInfo::OWNED_BY_US: return "OUR"; 1288 case BufferInfo::OWNED_BY_COMPONENT: return "COMPONENT"; 1289 case BufferInfo::OWNED_BY_UPSTREAM: return "UPSTREAM"; 1290 case BufferInfo::OWNED_BY_DOWNSTREAM: return "DOWNSTREAM"; 1291 case BufferInfo::OWNED_BY_NATIVE_WINDOW: return "SURFACE"; 1292 case BufferInfo::UNRECOGNIZED: return "UNRECOGNIZED"; 1293 default: return "?"; 1294 } 1295} 1296 1297void ACodec::dumpBuffers(OMX_U32 portIndex) { 1298 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 1299 ALOGI("[%s] %s port has %zu buffers:", mComponentName.c_str(), 1300 portIndex == kPortIndexInput ? "input" : "output", mBuffers[portIndex].size()); 1301 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1302 const BufferInfo &info = mBuffers[portIndex][i]; 1303 ALOGI(" slot %2zu: #%8u %p/%p %s(%d) dequeued:%u", 1304 i, info.mBufferID, info.mGraphicBuffer.get(), 1305 info.mGraphicBuffer == NULL ? NULL : info.mGraphicBuffer->getNativeBuffer(), 1306 _asString(info.mStatus), info.mStatus, info.mDequeuedAt); 1307 } 1308} 1309 1310status_t ACodec::cancelBufferToNativeWindow(BufferInfo *info) { 1311 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 1312 1313 ALOGV("[%s] Calling cancelBuffer on buffer %u", 1314 mComponentName.c_str(), info->mBufferID); 1315 1316 info->checkWriteFence("cancelBufferToNativeWindow"); 1317 int err = mNativeWindow->cancelBuffer( 1318 mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd); 1319 info->mFenceFd = -1; 1320 1321 ALOGW_IF(err != 0, "[%s] can not return buffer %u to native window", 1322 mComponentName.c_str(), info->mBufferID); 1323 // change ownership even if cancelBuffer fails 1324 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 1325 1326 return err; 1327} 1328 1329void ACodec::updateRenderInfoForDequeuedBuffer( 1330 ANativeWindowBuffer *buf, int fenceFd, BufferInfo *info) { 1331 1332 info->mRenderInfo = 1333 mRenderTracker.updateInfoForDequeuedBuffer( 1334 buf, fenceFd, info - &mBuffers[kPortIndexOutput][0]); 1335 1336 // check for any fences already signaled 1337 notifyOfRenderedFrames(false /* dropIncomplete */, info->mRenderInfo); 1338} 1339 1340void ACodec::onFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { 1341 if (mRenderTracker.onFrameRendered(mediaTimeUs, systemNano) != OK) { 1342 mRenderTracker.dumpRenderQueue(); 1343 } 1344} 1345 1346void ACodec::notifyOfRenderedFrames(bool dropIncomplete, FrameRenderTracker::Info *until) { 1347 sp<AMessage> msg = mNotify->dup(); 1348 msg->setInt32("what", CodecBase::kWhatOutputFramesRendered); 1349 std::list<FrameRenderTracker::Info> done = 1350 mRenderTracker.checkFencesAndGetRenderedFrames(until, dropIncomplete); 1351 1352 // unlink untracked frames 1353 for (std::list<FrameRenderTracker::Info>::const_iterator it = done.cbegin(); 1354 it != done.cend(); ++it) { 1355 ssize_t index = it->getIndex(); 1356 if (index >= 0 && (size_t)index < mBuffers[kPortIndexOutput].size()) { 1357 mBuffers[kPortIndexOutput].editItemAt(index).mRenderInfo = NULL; 1358 } else if (index >= 0) { 1359 // THIS SHOULD NEVER HAPPEN 1360 ALOGE("invalid index %zd in %zu", index, mBuffers[kPortIndexOutput].size()); 1361 } 1362 } 1363 1364 if (MediaCodec::CreateFramesRenderedMessage(done, msg)) { 1365 msg->post(); 1366 } 1367} 1368 1369ACodec::BufferInfo *ACodec::dequeueBufferFromNativeWindow() { 1370 ANativeWindowBuffer *buf; 1371 CHECK(mNativeWindow.get() != NULL); 1372 1373 if (mTunneled) { 1374 ALOGW("dequeueBufferFromNativeWindow() should not be called in tunnel" 1375 " video playback mode mode!"); 1376 return NULL; 1377 } 1378 1379 if (mFatalError) { 1380 ALOGW("not dequeuing from native window due to fatal error"); 1381 return NULL; 1382 } 1383 1384 int fenceFd = -1; 1385 do { 1386 status_t err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1387 if (err != 0) { 1388 ALOGE("dequeueBuffer failed: %s(%d).", asString(err), err); 1389 return NULL; 1390 } 1391 1392 bool stale = false; 1393 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1394 i--; 1395 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1396 1397 if (info->mGraphicBuffer != NULL && 1398 info->mGraphicBuffer->handle == buf->handle) { 1399 // Since consumers can attach buffers to BufferQueues, it is possible 1400 // that a known yet stale buffer can return from a surface that we 1401 // once used. We can simply ignore this as we have already dequeued 1402 // this buffer properly. NOTE: this does not eliminate all cases, 1403 // e.g. it is possible that we have queued the valid buffer to the 1404 // NW, and a stale copy of the same buffer gets dequeued - which will 1405 // be treated as the valid buffer by ACodec. 1406 if (info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 1407 ALOGI("dequeued stale buffer %p. discarding", buf); 1408 stale = true; 1409 break; 1410 } 1411 1412 ALOGV("dequeued buffer %p", info->mGraphicBuffer->getNativeBuffer()); 1413 info->mStatus = BufferInfo::OWNED_BY_US; 1414 info->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow"); 1415 updateRenderInfoForDequeuedBuffer(buf, fenceFd, info); 1416 return info; 1417 } 1418 } 1419 1420 // It is also possible to receive a previously unregistered buffer 1421 // in non-meta mode. These should be treated as stale buffers. The 1422 // same is possible in meta mode, in which case, it will be treated 1423 // as a normal buffer, which is not desirable. 1424 // TODO: fix this. 1425 if (!stale && (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment)) { 1426 ALOGI("dequeued unrecognized (stale) buffer %p. discarding", buf); 1427 stale = true; 1428 } 1429 if (stale) { 1430 // TODO: detach stale buffer, but there is no API yet to do it. 1431 buf = NULL; 1432 } 1433 } while (buf == NULL); 1434 1435 // get oldest undequeued buffer 1436 BufferInfo *oldest = NULL; 1437 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1438 i--; 1439 BufferInfo *info = 1440 &mBuffers[kPortIndexOutput].editItemAt(i); 1441 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW && 1442 (oldest == NULL || 1443 // avoid potential issues from counter rolling over 1444 mDequeueCounter - info->mDequeuedAt > 1445 mDequeueCounter - oldest->mDequeuedAt)) { 1446 oldest = info; 1447 } 1448 } 1449 1450 // it is impossible dequeue a buffer when there are no buffers with ANW 1451 CHECK(oldest != NULL); 1452 // it is impossible to dequeue an unknown buffer in non-meta mode, as the 1453 // while loop above does not complete 1454 CHECK(storingMetadataInDecodedBuffers()); 1455 1456 // discard buffer in LRU info and replace with new buffer 1457 oldest->mGraphicBuffer = new GraphicBuffer(buf, false); 1458 oldest->mStatus = BufferInfo::OWNED_BY_US; 1459 oldest->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow for oldest"); 1460 mRenderTracker.untrackFrame(oldest->mRenderInfo); 1461 oldest->mRenderInfo = NULL; 1462 1463 mOMX->updateGraphicBufferInMeta( 1464 mNode, kPortIndexOutput, oldest->mGraphicBuffer, 1465 oldest->mBufferID); 1466 1467 if (mOutputMetadataType == kMetadataBufferTypeGrallocSource) { 1468 VideoGrallocMetadata *grallocMeta = 1469 reinterpret_cast<VideoGrallocMetadata *>(oldest->mData->base()); 1470 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 1471 (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]), 1472 mDequeueCounter - oldest->mDequeuedAt, 1473 (void *)(uintptr_t)grallocMeta->pHandle, 1474 oldest->mGraphicBuffer->handle, oldest->mData->base()); 1475 } else if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) { 1476 VideoNativeMetadata *nativeMeta = 1477 reinterpret_cast<VideoNativeMetadata *>(oldest->mData->base()); 1478 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 1479 (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]), 1480 mDequeueCounter - oldest->mDequeuedAt, 1481 (void *)(uintptr_t)nativeMeta->pBuffer, 1482 oldest->mGraphicBuffer->getNativeBuffer(), oldest->mData->base()); 1483 } 1484 1485 updateRenderInfoForDequeuedBuffer(buf, fenceFd, oldest); 1486 return oldest; 1487} 1488 1489status_t ACodec::freeBuffersOnPort(OMX_U32 portIndex) { 1490 status_t err = OK; 1491 for (size_t i = mBuffers[portIndex].size(); i > 0;) { 1492 i--; 1493 status_t err2 = freeBuffer(portIndex, i); 1494 if (err == OK) { 1495 err = err2; 1496 } 1497 } 1498 1499 // clear mDealer even on an error 1500 mDealer[portIndex].clear(); 1501 return err; 1502} 1503 1504status_t ACodec::freeOutputBuffersNotOwnedByComponent() { 1505 status_t err = OK; 1506 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1507 i--; 1508 BufferInfo *info = 1509 &mBuffers[kPortIndexOutput].editItemAt(i); 1510 1511 // At this time some buffers may still be with the component 1512 // or being drained. 1513 if (info->mStatus != BufferInfo::OWNED_BY_COMPONENT && 1514 info->mStatus != BufferInfo::OWNED_BY_DOWNSTREAM) { 1515 status_t err2 = freeBuffer(kPortIndexOutput, i); 1516 if (err == OK) { 1517 err = err2; 1518 } 1519 } 1520 } 1521 1522 return err; 1523} 1524 1525status_t ACodec::freeBuffer(OMX_U32 portIndex, size_t i) { 1526 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1527 status_t err = OK; 1528 1529 // there should not be any fences in the metadata 1530 MetadataBufferType type = 1531 portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; 1532 if (type == kMetadataBufferTypeANWBuffer && info->mData != NULL 1533 && info->mData->size() >= sizeof(VideoNativeMetadata)) { 1534 int fenceFd = ((VideoNativeMetadata *)info->mData->data())->nFenceFd; 1535 if (fenceFd >= 0) { 1536 ALOGW("unreleased fence (%d) in %s metadata buffer %zu", 1537 fenceFd, portIndex == kPortIndexInput ? "input" : "output", i); 1538 } 1539 } 1540 1541 switch (info->mStatus) { 1542 case BufferInfo::OWNED_BY_US: 1543 if (portIndex == kPortIndexOutput && mNativeWindow != NULL) { 1544 (void)cancelBufferToNativeWindow(info); 1545 } 1546 // fall through 1547 1548 case BufferInfo::OWNED_BY_NATIVE_WINDOW: 1549 err = mOMX->freeBuffer(mNode, portIndex, info->mBufferID); 1550 break; 1551 1552 default: 1553 ALOGE("trying to free buffer not owned by us or ANW (%d)", info->mStatus); 1554 err = FAILED_TRANSACTION; 1555 break; 1556 } 1557 1558 if (info->mFenceFd >= 0) { 1559 ::close(info->mFenceFd); 1560 } 1561 1562 if (portIndex == kPortIndexOutput) { 1563 mRenderTracker.untrackFrame(info->mRenderInfo, i); 1564 info->mRenderInfo = NULL; 1565 } 1566 1567 // remove buffer even if mOMX->freeBuffer fails 1568 mBuffers[portIndex].removeAt(i); 1569 return err; 1570} 1571 1572ACodec::BufferInfo *ACodec::findBufferByID( 1573 uint32_t portIndex, IOMX::buffer_id bufferID, ssize_t *index) { 1574 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1575 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1576 1577 if (info->mBufferID == bufferID) { 1578 if (index != NULL) { 1579 *index = i; 1580 } 1581 return info; 1582 } 1583 } 1584 1585 ALOGE("Could not find buffer with ID %u", bufferID); 1586 return NULL; 1587} 1588 1589status_t ACodec::setComponentRole( 1590 bool isEncoder, const char *mime) { 1591 const char *role = getComponentRole(isEncoder, mime); 1592 if (role == NULL) { 1593 return BAD_VALUE; 1594 } 1595 status_t err = setComponentRole(mOMX, mNode, role); 1596 if (err != OK) { 1597 ALOGW("[%s] Failed to set standard component role '%s'.", 1598 mComponentName.c_str(), role); 1599 } 1600 return err; 1601} 1602 1603//static 1604const char *ACodec::getComponentRole( 1605 bool isEncoder, const char *mime) { 1606 struct MimeToRole { 1607 const char *mime; 1608 const char *decoderRole; 1609 const char *encoderRole; 1610 }; 1611 1612 static const MimeToRole kMimeToRole[] = { 1613 { MEDIA_MIMETYPE_AUDIO_MPEG, 1614 "audio_decoder.mp3", "audio_encoder.mp3" }, 1615 { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I, 1616 "audio_decoder.mp1", "audio_encoder.mp1" }, 1617 { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II, 1618 "audio_decoder.mp2", "audio_encoder.mp2" }, 1619 { MEDIA_MIMETYPE_AUDIO_AMR_NB, 1620 "audio_decoder.amrnb", "audio_encoder.amrnb" }, 1621 { MEDIA_MIMETYPE_AUDIO_AMR_WB, 1622 "audio_decoder.amrwb", "audio_encoder.amrwb" }, 1623 { MEDIA_MIMETYPE_AUDIO_AAC, 1624 "audio_decoder.aac", "audio_encoder.aac" }, 1625 { MEDIA_MIMETYPE_AUDIO_VORBIS, 1626 "audio_decoder.vorbis", "audio_encoder.vorbis" }, 1627 { MEDIA_MIMETYPE_AUDIO_OPUS, 1628 "audio_decoder.opus", "audio_encoder.opus" }, 1629 { MEDIA_MIMETYPE_AUDIO_G711_MLAW, 1630 "audio_decoder.g711mlaw", "audio_encoder.g711mlaw" }, 1631 { MEDIA_MIMETYPE_AUDIO_G711_ALAW, 1632 "audio_decoder.g711alaw", "audio_encoder.g711alaw" }, 1633 { MEDIA_MIMETYPE_VIDEO_AVC, 1634 "video_decoder.avc", "video_encoder.avc" }, 1635 { MEDIA_MIMETYPE_VIDEO_HEVC, 1636 "video_decoder.hevc", "video_encoder.hevc" }, 1637 { MEDIA_MIMETYPE_VIDEO_MPEG4, 1638 "video_decoder.mpeg4", "video_encoder.mpeg4" }, 1639 { MEDIA_MIMETYPE_VIDEO_H263, 1640 "video_decoder.h263", "video_encoder.h263" }, 1641 { MEDIA_MIMETYPE_VIDEO_VP8, 1642 "video_decoder.vp8", "video_encoder.vp8" }, 1643 { MEDIA_MIMETYPE_VIDEO_VP9, 1644 "video_decoder.vp9", "video_encoder.vp9" }, 1645 { MEDIA_MIMETYPE_AUDIO_RAW, 1646 "audio_decoder.raw", "audio_encoder.raw" }, 1647 { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, 1648 "video_decoder.dolby-vision", "video_encoder.dolby-vision" }, 1649 { MEDIA_MIMETYPE_AUDIO_FLAC, 1650 "audio_decoder.flac", "audio_encoder.flac" }, 1651 { MEDIA_MIMETYPE_AUDIO_MSGSM, 1652 "audio_decoder.gsm", "audio_encoder.gsm" }, 1653 { MEDIA_MIMETYPE_VIDEO_MPEG2, 1654 "video_decoder.mpeg2", "video_encoder.mpeg2" }, 1655 { MEDIA_MIMETYPE_AUDIO_AC3, 1656 "audio_decoder.ac3", "audio_encoder.ac3" }, 1657 { MEDIA_MIMETYPE_AUDIO_EAC3, 1658 "audio_decoder.eac3", "audio_encoder.eac3" }, 1659 }; 1660 1661 static const size_t kNumMimeToRole = 1662 sizeof(kMimeToRole) / sizeof(kMimeToRole[0]); 1663 1664 size_t i; 1665 for (i = 0; i < kNumMimeToRole; ++i) { 1666 if (!strcasecmp(mime, kMimeToRole[i].mime)) { 1667 break; 1668 } 1669 } 1670 1671 if (i == kNumMimeToRole) { 1672 return NULL; 1673 } 1674 1675 return isEncoder ? kMimeToRole[i].encoderRole 1676 : kMimeToRole[i].decoderRole; 1677} 1678 1679//static 1680status_t ACodec::setComponentRole( 1681 const sp<IOMX> &omx, IOMX::node_id node, const char *role) { 1682 OMX_PARAM_COMPONENTROLETYPE roleParams; 1683 InitOMXParams(&roleParams); 1684 1685 strncpy((char *)roleParams.cRole, 1686 role, OMX_MAX_STRINGNAME_SIZE - 1); 1687 1688 roleParams.cRole[OMX_MAX_STRINGNAME_SIZE - 1] = '\0'; 1689 1690 return omx->setParameter( 1691 node, OMX_IndexParamStandardComponentRole, 1692 &roleParams, sizeof(roleParams)); 1693} 1694 1695status_t ACodec::configureCodec( 1696 const char *mime, const sp<AMessage> &msg) { 1697 int32_t encoder; 1698 if (!msg->findInt32("encoder", &encoder)) { 1699 encoder = false; 1700 } 1701 1702 sp<AMessage> inputFormat = new AMessage; 1703 sp<AMessage> outputFormat = new AMessage; 1704 mConfigFormat = msg; 1705 1706 mIsEncoder = encoder; 1707 1708 mInputMetadataType = kMetadataBufferTypeInvalid; 1709 mOutputMetadataType = kMetadataBufferTypeInvalid; 1710 1711 status_t err = setComponentRole(encoder /* isEncoder */, mime); 1712 1713 if (err != OK) { 1714 return err; 1715 } 1716 1717 int32_t bitRate = 0; 1718 // FLAC encoder doesn't need a bitrate, other encoders do 1719 if (encoder && strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC) 1720 && !msg->findInt32("bitrate", &bitRate)) { 1721 return INVALID_OPERATION; 1722 } 1723 1724 int32_t storeMeta; 1725 if (encoder 1726 && msg->findInt32("store-metadata-in-buffers", &storeMeta) 1727 && storeMeta != 0) { 1728 err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexInput, OMX_TRUE, &mInputMetadataType); 1729 if (err != OK) { 1730 ALOGE("[%s] storeMetaDataInBuffers (input) failed w/ err %d", 1731 mComponentName.c_str(), err); 1732 1733 return err; 1734 } 1735 // For this specific case we could be using camera source even if storeMetaDataInBuffers 1736 // returns Gralloc source. Pretend that we are; this will force us to use nBufferSize. 1737 if (mInputMetadataType == kMetadataBufferTypeGrallocSource) { 1738 mInputMetadataType = kMetadataBufferTypeCameraSource; 1739 } 1740 1741 uint32_t usageBits; 1742 if (mOMX->getParameter( 1743 mNode, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, 1744 &usageBits, sizeof(usageBits)) == OK) { 1745 inputFormat->setInt32( 1746 "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); 1747 } 1748 } 1749 1750 int32_t prependSPSPPS = 0; 1751 if (encoder 1752 && msg->findInt32("prepend-sps-pps-to-idr-frames", &prependSPSPPS) 1753 && prependSPSPPS != 0) { 1754 OMX_INDEXTYPE index; 1755 err = mOMX->getExtensionIndex( 1756 mNode, 1757 "OMX.google.android.index.prependSPSPPSToIDRFrames", 1758 &index); 1759 1760 if (err == OK) { 1761 PrependSPSPPSToIDRFramesParams params; 1762 InitOMXParams(¶ms); 1763 params.bEnable = OMX_TRUE; 1764 1765 err = mOMX->setParameter( 1766 mNode, index, ¶ms, sizeof(params)); 1767 } 1768 1769 if (err != OK) { 1770 ALOGE("Encoder could not be configured to emit SPS/PPS before " 1771 "IDR frames. (err %d)", err); 1772 1773 return err; 1774 } 1775 } 1776 1777 // Only enable metadata mode on encoder output if encoder can prepend 1778 // sps/pps to idr frames, since in metadata mode the bitstream is in an 1779 // opaque handle, to which we don't have access. 1780 int32_t video = !strncasecmp(mime, "video/", 6); 1781 mIsVideo = video; 1782 if (encoder && video) { 1783 OMX_BOOL enable = (OMX_BOOL) (prependSPSPPS 1784 && msg->findInt32("store-metadata-in-buffers-output", &storeMeta) 1785 && storeMeta != 0); 1786 1787 err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, enable, &mOutputMetadataType); 1788 if (err != OK) { 1789 ALOGE("[%s] storeMetaDataInBuffers (output) failed w/ err %d", 1790 mComponentName.c_str(), err); 1791 } 1792 1793 if (!msg->findInt64( 1794 "repeat-previous-frame-after", 1795 &mRepeatFrameDelayUs)) { 1796 mRepeatFrameDelayUs = -1ll; 1797 } 1798 1799 if (!msg->findInt64("max-pts-gap-to-encoder", &mMaxPtsGapUs)) { 1800 mMaxPtsGapUs = -1ll; 1801 } 1802 1803 if (!msg->findFloat("max-fps-to-encoder", &mMaxFps)) { 1804 mMaxFps = -1; 1805 } 1806 1807 if (!msg->findInt64("time-lapse", &mTimePerCaptureUs)) { 1808 mTimePerCaptureUs = -1ll; 1809 } 1810 1811 if (!msg->findInt32( 1812 "create-input-buffers-suspended", 1813 (int32_t*)&mCreateInputBuffersSuspended)) { 1814 mCreateInputBuffersSuspended = false; 1815 } 1816 } 1817 1818 // NOTE: we only use native window for video decoders 1819 sp<RefBase> obj; 1820 bool haveNativeWindow = msg->findObject("native-window", &obj) 1821 && obj != NULL && video && !encoder; 1822 mUsingNativeWindow = haveNativeWindow; 1823 mLegacyAdaptiveExperiment = false; 1824 if (video && !encoder) { 1825 inputFormat->setInt32("adaptive-playback", false); 1826 1827 int32_t usageProtected; 1828 if (msg->findInt32("protected", &usageProtected) && usageProtected) { 1829 if (!haveNativeWindow) { 1830 ALOGE("protected output buffers must be sent to an ANativeWindow"); 1831 return PERMISSION_DENIED; 1832 } 1833 mFlags |= kFlagIsGrallocUsageProtected; 1834 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 1835 } 1836 1837 if (mFlags & kFlagIsSecure) { 1838 // use native_handles for secure input buffers 1839 err = mOMX->enableNativeBuffers( 1840 mNode, kPortIndexInput, OMX_FALSE /* graphic */, OMX_TRUE); 1841 ALOGI_IF(err != OK, "falling back to non-native_handles"); 1842 err = OK; // ignore error for now 1843 } 1844 } 1845 if (haveNativeWindow) { 1846 sp<ANativeWindow> nativeWindow = 1847 static_cast<ANativeWindow *>(static_cast<Surface *>(obj.get())); 1848 1849 // START of temporary support for automatic FRC - THIS WILL BE REMOVED 1850 int32_t autoFrc; 1851 if (msg->findInt32("auto-frc", &autoFrc)) { 1852 bool enabled = autoFrc; 1853 OMX_CONFIG_BOOLEANTYPE config; 1854 InitOMXParams(&config); 1855 config.bEnabled = (OMX_BOOL)enabled; 1856 status_t temp = mOMX->setConfig( 1857 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAutoFramerateConversion, 1858 &config, sizeof(config)); 1859 if (temp == OK) { 1860 outputFormat->setInt32("auto-frc", enabled); 1861 } else if (enabled) { 1862 ALOGI("codec does not support requested auto-frc (err %d)", temp); 1863 } 1864 } 1865 // END of temporary support for automatic FRC 1866 1867 int32_t tunneled; 1868 if (msg->findInt32("feature-tunneled-playback", &tunneled) && 1869 tunneled != 0) { 1870 ALOGI("Configuring TUNNELED video playback."); 1871 mTunneled = true; 1872 1873 int32_t audioHwSync = 0; 1874 if (!msg->findInt32("audio-hw-sync", &audioHwSync)) { 1875 ALOGW("No Audio HW Sync provided for video tunnel"); 1876 } 1877 err = configureTunneledVideoPlayback(audioHwSync, nativeWindow); 1878 if (err != OK) { 1879 ALOGE("configureTunneledVideoPlayback(%d,%p) failed!", 1880 audioHwSync, nativeWindow.get()); 1881 return err; 1882 } 1883 1884 int32_t maxWidth = 0, maxHeight = 0; 1885 if (msg->findInt32("max-width", &maxWidth) && 1886 msg->findInt32("max-height", &maxHeight)) { 1887 1888 err = mOMX->prepareForAdaptivePlayback( 1889 mNode, kPortIndexOutput, OMX_TRUE, maxWidth, maxHeight); 1890 if (err != OK) { 1891 ALOGW("[%s] prepareForAdaptivePlayback failed w/ err %d", 1892 mComponentName.c_str(), err); 1893 // allow failure 1894 err = OK; 1895 } else { 1896 inputFormat->setInt32("max-width", maxWidth); 1897 inputFormat->setInt32("max-height", maxHeight); 1898 inputFormat->setInt32("adaptive-playback", true); 1899 } 1900 } 1901 } else { 1902 ALOGV("Configuring CPU controlled video playback."); 1903 mTunneled = false; 1904 1905 // Explicity reset the sideband handle of the window for 1906 // non-tunneled video in case the window was previously used 1907 // for a tunneled video playback. 1908 err = native_window_set_sideband_stream(nativeWindow.get(), NULL); 1909 if (err != OK) { 1910 ALOGE("set_sideband_stream(NULL) failed! (err %d).", err); 1911 return err; 1912 } 1913 1914 // Always try to enable dynamic output buffers on native surface 1915 err = mOMX->storeMetaDataInBuffers( 1916 mNode, kPortIndexOutput, OMX_TRUE, &mOutputMetadataType); 1917 if (err != OK) { 1918 ALOGE("[%s] storeMetaDataInBuffers failed w/ err %d", 1919 mComponentName.c_str(), err); 1920 1921 // if adaptive playback has been requested, try JB fallback 1922 // NOTE: THIS FALLBACK MECHANISM WILL BE REMOVED DUE TO ITS 1923 // LARGE MEMORY REQUIREMENT 1924 1925 // we will not do adaptive playback on software accessed 1926 // surfaces as they never had to respond to changes in the 1927 // crop window, and we don't trust that they will be able to. 1928 int usageBits = 0; 1929 bool canDoAdaptivePlayback; 1930 1931 if (nativeWindow->query( 1932 nativeWindow.get(), 1933 NATIVE_WINDOW_CONSUMER_USAGE_BITS, 1934 &usageBits) != OK) { 1935 canDoAdaptivePlayback = false; 1936 } else { 1937 canDoAdaptivePlayback = 1938 (usageBits & 1939 (GRALLOC_USAGE_SW_READ_MASK | 1940 GRALLOC_USAGE_SW_WRITE_MASK)) == 0; 1941 } 1942 1943 int32_t maxWidth = 0, maxHeight = 0; 1944 if (canDoAdaptivePlayback && 1945 msg->findInt32("max-width", &maxWidth) && 1946 msg->findInt32("max-height", &maxHeight)) { 1947 ALOGV("[%s] prepareForAdaptivePlayback(%dx%d)", 1948 mComponentName.c_str(), maxWidth, maxHeight); 1949 1950 err = mOMX->prepareForAdaptivePlayback( 1951 mNode, kPortIndexOutput, OMX_TRUE, maxWidth, 1952 maxHeight); 1953 ALOGW_IF(err != OK, 1954 "[%s] prepareForAdaptivePlayback failed w/ err %d", 1955 mComponentName.c_str(), err); 1956 1957 if (err == OK) { 1958 inputFormat->setInt32("max-width", maxWidth); 1959 inputFormat->setInt32("max-height", maxHeight); 1960 inputFormat->setInt32("adaptive-playback", true); 1961 } 1962 } 1963 // allow failure 1964 err = OK; 1965 } else { 1966 ALOGV("[%s] storeMetaDataInBuffers succeeded", 1967 mComponentName.c_str()); 1968 CHECK(storingMetadataInDecodedBuffers()); 1969 mLegacyAdaptiveExperiment = ADebug::isExperimentEnabled( 1970 "legacy-adaptive", !msg->contains("no-experiments")); 1971 1972 inputFormat->setInt32("adaptive-playback", true); 1973 } 1974 1975 int32_t push; 1976 if (msg->findInt32("push-blank-buffers-on-shutdown", &push) 1977 && push != 0) { 1978 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 1979 } 1980 } 1981 1982 int32_t rotationDegrees; 1983 if (msg->findInt32("rotation-degrees", &rotationDegrees)) { 1984 mRotationDegrees = rotationDegrees; 1985 } else { 1986 mRotationDegrees = 0; 1987 } 1988 } 1989 1990 AudioEncoding pcmEncoding = kAudioEncodingPcm16bit; 1991 (void)msg->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 1992 // invalid encodings will default to PCM-16bit in setupRawAudioFormat. 1993 1994 if (video) { 1995 // determine need for software renderer 1996 bool usingSwRenderer = false; 1997 if (haveNativeWindow && mComponentName.startsWith("OMX.google.")) { 1998 usingSwRenderer = true; 1999 haveNativeWindow = false; 2000 } 2001 2002 if (encoder) { 2003 err = setupVideoEncoder(mime, msg, outputFormat, inputFormat); 2004 } else { 2005 err = setupVideoDecoder(mime, msg, haveNativeWindow, usingSwRenderer, outputFormat); 2006 } 2007 2008 if (err != OK) { 2009 return err; 2010 } 2011 2012 if (haveNativeWindow) { 2013 mNativeWindow = static_cast<Surface *>(obj.get()); 2014 } 2015 2016 // initialize native window now to get actual output format 2017 // TODO: this is needed for some encoders even though they don't use native window 2018 err = initNativeWindow(); 2019 if (err != OK) { 2020 return err; 2021 } 2022 2023 // fallback for devices that do not handle flex-YUV for native buffers 2024 if (haveNativeWindow) { 2025 int32_t requestedColorFormat = OMX_COLOR_FormatUnused; 2026 if (msg->findInt32("color-format", &requestedColorFormat) && 2027 requestedColorFormat == OMX_COLOR_FormatYUV420Flexible) { 2028 status_t err = getPortFormat(kPortIndexOutput, outputFormat); 2029 if (err != OK) { 2030 return err; 2031 } 2032 int32_t colorFormat = OMX_COLOR_FormatUnused; 2033 OMX_U32 flexibleEquivalent = OMX_COLOR_FormatUnused; 2034 if (!outputFormat->findInt32("color-format", &colorFormat)) { 2035 ALOGE("ouptut port did not have a color format (wrong domain?)"); 2036 return BAD_VALUE; 2037 } 2038 ALOGD("[%s] Requested output format %#x and got %#x.", 2039 mComponentName.c_str(), requestedColorFormat, colorFormat); 2040 if (!isFlexibleColorFormat( 2041 mOMX, mNode, colorFormat, haveNativeWindow, &flexibleEquivalent) 2042 || flexibleEquivalent != (OMX_U32)requestedColorFormat) { 2043 // device did not handle flex-YUV request for native window, fall back 2044 // to SW renderer 2045 ALOGI("[%s] Falling back to software renderer", mComponentName.c_str()); 2046 mNativeWindow.clear(); 2047 mNativeWindowUsageBits = 0; 2048 haveNativeWindow = false; 2049 usingSwRenderer = true; 2050 if (storingMetadataInDecodedBuffers()) { 2051 err = mOMX->storeMetaDataInBuffers( 2052 mNode, kPortIndexOutput, OMX_FALSE, &mOutputMetadataType); 2053 mOutputMetadataType = kMetadataBufferTypeInvalid; // just in case 2054 // TODO: implement adaptive-playback support for bytebuffer mode. 2055 // This is done by SW codecs, but most HW codecs don't support it. 2056 inputFormat->setInt32("adaptive-playback", false); 2057 } 2058 if (err == OK) { 2059 err = mOMX->enableNativeBuffers( 2060 mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE); 2061 } 2062 if (mFlags & kFlagIsGrallocUsageProtected) { 2063 // fallback is not supported for protected playback 2064 err = PERMISSION_DENIED; 2065 } else if (err == OK) { 2066 err = setupVideoDecoder( 2067 mime, msg, haveNativeWindow, usingSwRenderer, outputFormat); 2068 } 2069 } 2070 } 2071 } 2072 2073 if (usingSwRenderer) { 2074 outputFormat->setInt32("using-sw-renderer", 1); 2075 } 2076 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) { 2077 int32_t numChannels, sampleRate; 2078 if (!msg->findInt32("channel-count", &numChannels) 2079 || !msg->findInt32("sample-rate", &sampleRate)) { 2080 // Since we did not always check for these, leave them optional 2081 // and have the decoder figure it all out. 2082 err = OK; 2083 } else { 2084 err = setupRawAudioFormat( 2085 encoder ? kPortIndexInput : kPortIndexOutput, 2086 sampleRate, 2087 numChannels); 2088 } 2089 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) { 2090 int32_t numChannels, sampleRate; 2091 if (!msg->findInt32("channel-count", &numChannels) 2092 || !msg->findInt32("sample-rate", &sampleRate)) { 2093 err = INVALID_OPERATION; 2094 } else { 2095 int32_t isADTS, aacProfile; 2096 int32_t sbrMode; 2097 int32_t maxOutputChannelCount; 2098 int32_t pcmLimiterEnable; 2099 drcParams_t drc; 2100 if (!msg->findInt32("is-adts", &isADTS)) { 2101 isADTS = 0; 2102 } 2103 if (!msg->findInt32("aac-profile", &aacProfile)) { 2104 aacProfile = OMX_AUDIO_AACObjectNull; 2105 } 2106 if (!msg->findInt32("aac-sbr-mode", &sbrMode)) { 2107 sbrMode = -1; 2108 } 2109 2110 if (!msg->findInt32("aac-max-output-channel_count", &maxOutputChannelCount)) { 2111 maxOutputChannelCount = -1; 2112 } 2113 if (!msg->findInt32("aac-pcm-limiter-enable", &pcmLimiterEnable)) { 2114 // value is unknown 2115 pcmLimiterEnable = -1; 2116 } 2117 if (!msg->findInt32("aac-encoded-target-level", &drc.encodedTargetLevel)) { 2118 // value is unknown 2119 drc.encodedTargetLevel = -1; 2120 } 2121 if (!msg->findInt32("aac-drc-cut-level", &drc.drcCut)) { 2122 // value is unknown 2123 drc.drcCut = -1; 2124 } 2125 if (!msg->findInt32("aac-drc-boost-level", &drc.drcBoost)) { 2126 // value is unknown 2127 drc.drcBoost = -1; 2128 } 2129 if (!msg->findInt32("aac-drc-heavy-compression", &drc.heavyCompression)) { 2130 // value is unknown 2131 drc.heavyCompression = -1; 2132 } 2133 if (!msg->findInt32("aac-target-ref-level", &drc.targetRefLevel)) { 2134 // value is unknown 2135 drc.targetRefLevel = -1; 2136 } 2137 2138 err = setupAACCodec( 2139 encoder, numChannels, sampleRate, bitRate, aacProfile, 2140 isADTS != 0, sbrMode, maxOutputChannelCount, drc, 2141 pcmLimiterEnable); 2142 } 2143 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)) { 2144 err = setupAMRCodec(encoder, false /* isWAMR */, bitRate); 2145 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_WB)) { 2146 err = setupAMRCodec(encoder, true /* isWAMR */, bitRate); 2147 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_ALAW) 2148 || !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_MLAW)) { 2149 // These are PCM-like formats with a fixed sample rate but 2150 // a variable number of channels. 2151 2152 int32_t numChannels; 2153 if (!msg->findInt32("channel-count", &numChannels)) { 2154 err = INVALID_OPERATION; 2155 } else { 2156 int32_t sampleRate; 2157 if (!msg->findInt32("sample-rate", &sampleRate)) { 2158 sampleRate = 8000; 2159 } 2160 err = setupG711Codec(encoder, sampleRate, numChannels); 2161 } 2162 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)) { 2163 int32_t numChannels = 0, sampleRate = 0, compressionLevel = -1; 2164 if (encoder && 2165 (!msg->findInt32("channel-count", &numChannels) 2166 || !msg->findInt32("sample-rate", &sampleRate))) { 2167 ALOGE("missing channel count or sample rate for FLAC encoder"); 2168 err = INVALID_OPERATION; 2169 } else { 2170 if (encoder) { 2171 if (!msg->findInt32( 2172 "complexity", &compressionLevel) && 2173 !msg->findInt32( 2174 "flac-compression-level", &compressionLevel)) { 2175 compressionLevel = 5; // default FLAC compression level 2176 } else if (compressionLevel < 0) { 2177 ALOGW("compression level %d outside [0..8] range, " 2178 "using 0", 2179 compressionLevel); 2180 compressionLevel = 0; 2181 } else if (compressionLevel > 8) { 2182 ALOGW("compression level %d outside [0..8] range, " 2183 "using 8", 2184 compressionLevel); 2185 compressionLevel = 8; 2186 } 2187 } 2188 err = setupFlacCodec( 2189 encoder, numChannels, sampleRate, compressionLevel); 2190 } 2191 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) { 2192 int32_t numChannels, sampleRate; 2193 if (encoder 2194 || !msg->findInt32("channel-count", &numChannels) 2195 || !msg->findInt32("sample-rate", &sampleRate)) { 2196 err = INVALID_OPERATION; 2197 } else { 2198 err = setupRawAudioFormat(kPortIndexInput, sampleRate, numChannels, pcmEncoding); 2199 } 2200 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AC3)) { 2201 int32_t numChannels; 2202 int32_t sampleRate; 2203 if (!msg->findInt32("channel-count", &numChannels) 2204 || !msg->findInt32("sample-rate", &sampleRate)) { 2205 err = INVALID_OPERATION; 2206 } else { 2207 err = setupAC3Codec(encoder, numChannels, sampleRate); 2208 } 2209 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_EAC3)) { 2210 int32_t numChannels; 2211 int32_t sampleRate; 2212 if (!msg->findInt32("channel-count", &numChannels) 2213 || !msg->findInt32("sample-rate", &sampleRate)) { 2214 err = INVALID_OPERATION; 2215 } else { 2216 err = setupEAC3Codec(encoder, numChannels, sampleRate); 2217 } 2218 } 2219 2220 if (err != OK) { 2221 return err; 2222 } 2223 2224 if (!msg->findInt32("encoder-delay", &mEncoderDelay)) { 2225 mEncoderDelay = 0; 2226 } 2227 2228 if (!msg->findInt32("encoder-padding", &mEncoderPadding)) { 2229 mEncoderPadding = 0; 2230 } 2231 2232 if (msg->findInt32("channel-mask", &mChannelMask)) { 2233 mChannelMaskPresent = true; 2234 } else { 2235 mChannelMaskPresent = false; 2236 } 2237 2238 int32_t maxInputSize; 2239 if (msg->findInt32("max-input-size", &maxInputSize)) { 2240 err = setMinBufferSize(kPortIndexInput, (size_t)maxInputSize); 2241 } else if (!strcmp("OMX.Nvidia.aac.decoder", mComponentName.c_str())) { 2242 err = setMinBufferSize(kPortIndexInput, 8192); // XXX 2243 } 2244 2245 int32_t priority; 2246 if (msg->findInt32("priority", &priority)) { 2247 err = setPriority(priority); 2248 } 2249 2250 int32_t rateInt = -1; 2251 float rateFloat = -1; 2252 if (!msg->findFloat("operating-rate", &rateFloat)) { 2253 msg->findInt32("operating-rate", &rateInt); 2254 rateFloat = (float)rateInt; // 16MHz (FLINTMAX) is OK for upper bound. 2255 } 2256 if (rateFloat > 0) { 2257 err = setOperatingRate(rateFloat, video); 2258 } 2259 2260 // NOTE: both mBaseOutputFormat and mOutputFormat are outputFormat to signal first frame. 2261 mBaseOutputFormat = outputFormat; 2262 // trigger a kWhatOutputFormatChanged msg on first buffer 2263 mLastOutputFormat.clear(); 2264 2265 err = getPortFormat(kPortIndexInput, inputFormat); 2266 if (err == OK) { 2267 err = getPortFormat(kPortIndexOutput, outputFormat); 2268 if (err == OK) { 2269 mInputFormat = inputFormat; 2270 mOutputFormat = outputFormat; 2271 } 2272 } 2273 2274 // create data converters if needed 2275 if (!video && err == OK) { 2276 AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit; 2277 if (encoder) { 2278 (void)mInputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding); 2279 mConverter[kPortIndexInput] = AudioConverter::Create(pcmEncoding, codecPcmEncoding); 2280 if (mConverter[kPortIndexInput] != NULL) { 2281 mInputFormat->setInt32("pcm-encoding", pcmEncoding); 2282 } 2283 } else { 2284 (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding); 2285 mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding); 2286 if (mConverter[kPortIndexOutput] != NULL) { 2287 mOutputFormat->setInt32("pcm-encoding", pcmEncoding); 2288 } 2289 } 2290 } 2291 2292 return err; 2293} 2294 2295status_t ACodec::setPriority(int32_t priority) { 2296 if (priority < 0) { 2297 return BAD_VALUE; 2298 } 2299 OMX_PARAM_U32TYPE config; 2300 InitOMXParams(&config); 2301 config.nU32 = (OMX_U32)priority; 2302 status_t temp = mOMX->setConfig( 2303 mNode, (OMX_INDEXTYPE)OMX_IndexConfigPriority, 2304 &config, sizeof(config)); 2305 if (temp != OK) { 2306 ALOGI("codec does not support config priority (err %d)", temp); 2307 } 2308 return OK; 2309} 2310 2311status_t ACodec::setOperatingRate(float rateFloat, bool isVideo) { 2312 if (rateFloat < 0) { 2313 return BAD_VALUE; 2314 } 2315 OMX_U32 rate; 2316 if (isVideo) { 2317 if (rateFloat > 65535) { 2318 return BAD_VALUE; 2319 } 2320 rate = (OMX_U32)(rateFloat * 65536.0f + 0.5f); 2321 } else { 2322 if (rateFloat > UINT_MAX) { 2323 return BAD_VALUE; 2324 } 2325 rate = (OMX_U32)(rateFloat); 2326 } 2327 OMX_PARAM_U32TYPE config; 2328 InitOMXParams(&config); 2329 config.nU32 = rate; 2330 status_t err = mOMX->setConfig( 2331 mNode, (OMX_INDEXTYPE)OMX_IndexConfigOperatingRate, 2332 &config, sizeof(config)); 2333 if (err != OK) { 2334 ALOGI("codec does not support config operating rate (err %d)", err); 2335 } 2336 return OK; 2337} 2338 2339status_t ACodec::getIntraRefreshPeriod(uint32_t *intraRefreshPeriod) { 2340 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 2341 InitOMXParams(¶ms); 2342 params.nPortIndex = kPortIndexOutput; 2343 status_t err = mOMX->getConfig( 2344 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params)); 2345 if (err == OK) { 2346 *intraRefreshPeriod = params.nRefreshPeriod; 2347 return OK; 2348 } 2349 2350 // Fallback to query through standard OMX index. 2351 OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams; 2352 InitOMXParams(&refreshParams); 2353 refreshParams.nPortIndex = kPortIndexOutput; 2354 refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic; 2355 err = mOMX->getParameter( 2356 mNode, OMX_IndexParamVideoIntraRefresh, &refreshParams, sizeof(refreshParams)); 2357 if (err != OK || refreshParams.nCirMBs == 0) { 2358 *intraRefreshPeriod = 0; 2359 return OK; 2360 } 2361 2362 // Calculate period based on width and height 2363 uint32_t width, height; 2364 OMX_PARAM_PORTDEFINITIONTYPE def; 2365 InitOMXParams(&def); 2366 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2367 def.nPortIndex = kPortIndexOutput; 2368 err = mOMX->getParameter( 2369 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2370 if (err != OK) { 2371 *intraRefreshPeriod = 0; 2372 return err; 2373 } 2374 width = video_def->nFrameWidth; 2375 height = video_def->nFrameHeight; 2376 // Use H.264/AVC MacroBlock size 16x16 2377 *intraRefreshPeriod = divUp((divUp(width, 16u) * divUp(height, 16u)), refreshParams.nCirMBs); 2378 2379 return OK; 2380} 2381 2382status_t ACodec::setIntraRefreshPeriod(uint32_t intraRefreshPeriod, bool inConfigure) { 2383 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 2384 InitOMXParams(¶ms); 2385 params.nPortIndex = kPortIndexOutput; 2386 params.nRefreshPeriod = intraRefreshPeriod; 2387 status_t err = mOMX->setConfig( 2388 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params)); 2389 if (err == OK) { 2390 return OK; 2391 } 2392 2393 // Only in configure state, a component could invoke setParameter. 2394 if (!inConfigure) { 2395 return INVALID_OPERATION; 2396 } else { 2397 ALOGI("[%s] try falling back to Cyclic", mComponentName.c_str()); 2398 } 2399 2400 OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams; 2401 InitOMXParams(&refreshParams); 2402 refreshParams.nPortIndex = kPortIndexOutput; 2403 refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic; 2404 2405 if (intraRefreshPeriod == 0) { 2406 // 0 means disable intra refresh. 2407 refreshParams.nCirMBs = 0; 2408 } else { 2409 // Calculate macroblocks that need to be intra coded base on width and height 2410 uint32_t width, height; 2411 OMX_PARAM_PORTDEFINITIONTYPE def; 2412 InitOMXParams(&def); 2413 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2414 def.nPortIndex = kPortIndexOutput; 2415 err = mOMX->getParameter( 2416 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2417 if (err != OK) { 2418 return err; 2419 } 2420 width = video_def->nFrameWidth; 2421 height = video_def->nFrameHeight; 2422 // Use H.264/AVC MacroBlock size 16x16 2423 refreshParams.nCirMBs = divUp((divUp(width, 16u) * divUp(height, 16u)), intraRefreshPeriod); 2424 } 2425 2426 err = mOMX->setParameter(mNode, OMX_IndexParamVideoIntraRefresh, 2427 &refreshParams, sizeof(refreshParams)); 2428 if (err != OK) { 2429 return err; 2430 } 2431 2432 return OK; 2433} 2434 2435status_t ACodec::setMinBufferSize(OMX_U32 portIndex, size_t size) { 2436 OMX_PARAM_PORTDEFINITIONTYPE def; 2437 InitOMXParams(&def); 2438 def.nPortIndex = portIndex; 2439 2440 status_t err = mOMX->getParameter( 2441 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2442 2443 if (err != OK) { 2444 return err; 2445 } 2446 2447 if (def.nBufferSize >= size) { 2448 return OK; 2449 } 2450 2451 def.nBufferSize = size; 2452 2453 err = mOMX->setParameter( 2454 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2455 2456 if (err != OK) { 2457 return err; 2458 } 2459 2460 err = mOMX->getParameter( 2461 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2462 2463 if (err != OK) { 2464 return err; 2465 } 2466 2467 if (def.nBufferSize < size) { 2468 ALOGE("failed to set min buffer size to %zu (is still %u)", size, def.nBufferSize); 2469 return FAILED_TRANSACTION; 2470 } 2471 2472 return OK; 2473} 2474 2475status_t ACodec::selectAudioPortFormat( 2476 OMX_U32 portIndex, OMX_AUDIO_CODINGTYPE desiredFormat) { 2477 OMX_AUDIO_PARAM_PORTFORMATTYPE format; 2478 InitOMXParams(&format); 2479 2480 format.nPortIndex = portIndex; 2481 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 2482 format.nIndex = index; 2483 status_t err = mOMX->getParameter( 2484 mNode, OMX_IndexParamAudioPortFormat, 2485 &format, sizeof(format)); 2486 2487 if (err != OK) { 2488 return err; 2489 } 2490 2491 if (format.eEncoding == desiredFormat) { 2492 break; 2493 } 2494 2495 if (index == kMaxIndicesToCheck) { 2496 ALOGW("[%s] stopping checking formats after %u: %s(%x)", 2497 mComponentName.c_str(), index, 2498 asString(format.eEncoding), format.eEncoding); 2499 return ERROR_UNSUPPORTED; 2500 } 2501 } 2502 2503 return mOMX->setParameter( 2504 mNode, OMX_IndexParamAudioPortFormat, &format, sizeof(format)); 2505} 2506 2507status_t ACodec::setupAACCodec( 2508 bool encoder, int32_t numChannels, int32_t sampleRate, 2509 int32_t bitRate, int32_t aacProfile, bool isADTS, int32_t sbrMode, 2510 int32_t maxOutputChannelCount, const drcParams_t& drc, 2511 int32_t pcmLimiterEnable) { 2512 if (encoder && isADTS) { 2513 return -EINVAL; 2514 } 2515 2516 status_t err = setupRawAudioFormat( 2517 encoder ? kPortIndexInput : kPortIndexOutput, 2518 sampleRate, 2519 numChannels); 2520 2521 if (err != OK) { 2522 return err; 2523 } 2524 2525 if (encoder) { 2526 err = selectAudioPortFormat(kPortIndexOutput, OMX_AUDIO_CodingAAC); 2527 2528 if (err != OK) { 2529 return err; 2530 } 2531 2532 OMX_PARAM_PORTDEFINITIONTYPE def; 2533 InitOMXParams(&def); 2534 def.nPortIndex = kPortIndexOutput; 2535 2536 err = mOMX->getParameter( 2537 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2538 2539 if (err != OK) { 2540 return err; 2541 } 2542 2543 def.format.audio.bFlagErrorConcealment = OMX_TRUE; 2544 def.format.audio.eEncoding = OMX_AUDIO_CodingAAC; 2545 2546 err = mOMX->setParameter( 2547 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2548 2549 if (err != OK) { 2550 return err; 2551 } 2552 2553 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 2554 InitOMXParams(&profile); 2555 profile.nPortIndex = kPortIndexOutput; 2556 2557 err = mOMX->getParameter( 2558 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2559 2560 if (err != OK) { 2561 return err; 2562 } 2563 2564 profile.nChannels = numChannels; 2565 2566 profile.eChannelMode = 2567 (numChannels == 1) 2568 ? OMX_AUDIO_ChannelModeMono: OMX_AUDIO_ChannelModeStereo; 2569 2570 profile.nSampleRate = sampleRate; 2571 profile.nBitRate = bitRate; 2572 profile.nAudioBandWidth = 0; 2573 profile.nFrameLength = 0; 2574 profile.nAACtools = OMX_AUDIO_AACToolAll; 2575 profile.nAACERtools = OMX_AUDIO_AACERNone; 2576 profile.eAACProfile = (OMX_AUDIO_AACPROFILETYPE) aacProfile; 2577 profile.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4FF; 2578 switch (sbrMode) { 2579 case 0: 2580 // disable sbr 2581 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 2582 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 2583 break; 2584 case 1: 2585 // enable single-rate sbr 2586 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 2587 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 2588 break; 2589 case 2: 2590 // enable dual-rate sbr 2591 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 2592 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 2593 break; 2594 case -1: 2595 // enable both modes -> the codec will decide which mode should be used 2596 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 2597 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 2598 break; 2599 default: 2600 // unsupported sbr mode 2601 return BAD_VALUE; 2602 } 2603 2604 2605 err = mOMX->setParameter( 2606 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2607 2608 if (err != OK) { 2609 return err; 2610 } 2611 2612 return err; 2613 } 2614 2615 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 2616 InitOMXParams(&profile); 2617 profile.nPortIndex = kPortIndexInput; 2618 2619 err = mOMX->getParameter( 2620 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2621 2622 if (err != OK) { 2623 return err; 2624 } 2625 2626 profile.nChannels = numChannels; 2627 profile.nSampleRate = sampleRate; 2628 2629 profile.eAACStreamFormat = 2630 isADTS 2631 ? OMX_AUDIO_AACStreamFormatMP4ADTS 2632 : OMX_AUDIO_AACStreamFormatMP4FF; 2633 2634 OMX_AUDIO_PARAM_ANDROID_AACPRESENTATIONTYPE presentation; 2635 InitOMXParams(&presentation); 2636 presentation.nMaxOutputChannels = maxOutputChannelCount; 2637 presentation.nDrcCut = drc.drcCut; 2638 presentation.nDrcBoost = drc.drcBoost; 2639 presentation.nHeavyCompression = drc.heavyCompression; 2640 presentation.nTargetReferenceLevel = drc.targetRefLevel; 2641 presentation.nEncodedTargetLevel = drc.encodedTargetLevel; 2642 presentation.nPCMLimiterEnable = pcmLimiterEnable; 2643 2644 status_t res = mOMX->setParameter(mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2645 if (res == OK) { 2646 // optional parameters, will not cause configuration failure 2647 mOMX->setParameter(mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAacPresentation, 2648 &presentation, sizeof(presentation)); 2649 } else { 2650 ALOGW("did not set AudioAndroidAacPresentation due to error %d when setting AudioAac", res); 2651 } 2652 return res; 2653} 2654 2655status_t ACodec::setupAC3Codec( 2656 bool encoder, int32_t numChannels, int32_t sampleRate) { 2657 status_t err = setupRawAudioFormat( 2658 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 2659 2660 if (err != OK) { 2661 return err; 2662 } 2663 2664 if (encoder) { 2665 ALOGW("AC3 encoding is not supported."); 2666 return INVALID_OPERATION; 2667 } 2668 2669 OMX_AUDIO_PARAM_ANDROID_AC3TYPE def; 2670 InitOMXParams(&def); 2671 def.nPortIndex = kPortIndexInput; 2672 2673 err = mOMX->getParameter( 2674 mNode, 2675 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 2676 &def, 2677 sizeof(def)); 2678 2679 if (err != OK) { 2680 return err; 2681 } 2682 2683 def.nChannels = numChannels; 2684 def.nSampleRate = sampleRate; 2685 2686 return mOMX->setParameter( 2687 mNode, 2688 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 2689 &def, 2690 sizeof(def)); 2691} 2692 2693status_t ACodec::setupEAC3Codec( 2694 bool encoder, int32_t numChannels, int32_t sampleRate) { 2695 status_t err = setupRawAudioFormat( 2696 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 2697 2698 if (err != OK) { 2699 return err; 2700 } 2701 2702 if (encoder) { 2703 ALOGW("EAC3 encoding is not supported."); 2704 return INVALID_OPERATION; 2705 } 2706 2707 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE def; 2708 InitOMXParams(&def); 2709 def.nPortIndex = kPortIndexInput; 2710 2711 err = mOMX->getParameter( 2712 mNode, 2713 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 2714 &def, 2715 sizeof(def)); 2716 2717 if (err != OK) { 2718 return err; 2719 } 2720 2721 def.nChannels = numChannels; 2722 def.nSampleRate = sampleRate; 2723 2724 return mOMX->setParameter( 2725 mNode, 2726 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 2727 &def, 2728 sizeof(def)); 2729} 2730 2731static OMX_AUDIO_AMRBANDMODETYPE pickModeFromBitRate( 2732 bool isAMRWB, int32_t bps) { 2733 if (isAMRWB) { 2734 if (bps <= 6600) { 2735 return OMX_AUDIO_AMRBandModeWB0; 2736 } else if (bps <= 8850) { 2737 return OMX_AUDIO_AMRBandModeWB1; 2738 } else if (bps <= 12650) { 2739 return OMX_AUDIO_AMRBandModeWB2; 2740 } else if (bps <= 14250) { 2741 return OMX_AUDIO_AMRBandModeWB3; 2742 } else if (bps <= 15850) { 2743 return OMX_AUDIO_AMRBandModeWB4; 2744 } else if (bps <= 18250) { 2745 return OMX_AUDIO_AMRBandModeWB5; 2746 } else if (bps <= 19850) { 2747 return OMX_AUDIO_AMRBandModeWB6; 2748 } else if (bps <= 23050) { 2749 return OMX_AUDIO_AMRBandModeWB7; 2750 } 2751 2752 // 23850 bps 2753 return OMX_AUDIO_AMRBandModeWB8; 2754 } else { // AMRNB 2755 if (bps <= 4750) { 2756 return OMX_AUDIO_AMRBandModeNB0; 2757 } else if (bps <= 5150) { 2758 return OMX_AUDIO_AMRBandModeNB1; 2759 } else if (bps <= 5900) { 2760 return OMX_AUDIO_AMRBandModeNB2; 2761 } else if (bps <= 6700) { 2762 return OMX_AUDIO_AMRBandModeNB3; 2763 } else if (bps <= 7400) { 2764 return OMX_AUDIO_AMRBandModeNB4; 2765 } else if (bps <= 7950) { 2766 return OMX_AUDIO_AMRBandModeNB5; 2767 } else if (bps <= 10200) { 2768 return OMX_AUDIO_AMRBandModeNB6; 2769 } 2770 2771 // 12200 bps 2772 return OMX_AUDIO_AMRBandModeNB7; 2773 } 2774} 2775 2776status_t ACodec::setupAMRCodec(bool encoder, bool isWAMR, int32_t bitrate) { 2777 OMX_AUDIO_PARAM_AMRTYPE def; 2778 InitOMXParams(&def); 2779 def.nPortIndex = encoder ? kPortIndexOutput : kPortIndexInput; 2780 2781 status_t err = 2782 mOMX->getParameter(mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); 2783 2784 if (err != OK) { 2785 return err; 2786 } 2787 2788 def.eAMRFrameFormat = OMX_AUDIO_AMRFrameFormatFSF; 2789 def.eAMRBandMode = pickModeFromBitRate(isWAMR, bitrate); 2790 2791 err = mOMX->setParameter( 2792 mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); 2793 2794 if (err != OK) { 2795 return err; 2796 } 2797 2798 return setupRawAudioFormat( 2799 encoder ? kPortIndexInput : kPortIndexOutput, 2800 isWAMR ? 16000 : 8000 /* sampleRate */, 2801 1 /* numChannels */); 2802} 2803 2804status_t ACodec::setupG711Codec(bool encoder, int32_t sampleRate, int32_t numChannels) { 2805 if (encoder) { 2806 return INVALID_OPERATION; 2807 } 2808 2809 return setupRawAudioFormat( 2810 kPortIndexInput, sampleRate, numChannels); 2811} 2812 2813status_t ACodec::setupFlacCodec( 2814 bool encoder, int32_t numChannels, int32_t sampleRate, int32_t compressionLevel) { 2815 2816 if (encoder) { 2817 OMX_AUDIO_PARAM_FLACTYPE def; 2818 InitOMXParams(&def); 2819 def.nPortIndex = kPortIndexOutput; 2820 2821 // configure compression level 2822 status_t err = mOMX->getParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def)); 2823 if (err != OK) { 2824 ALOGE("setupFlacCodec(): Error %d getting OMX_IndexParamAudioFlac parameter", err); 2825 return err; 2826 } 2827 def.nCompressionLevel = compressionLevel; 2828 err = mOMX->setParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def)); 2829 if (err != OK) { 2830 ALOGE("setupFlacCodec(): Error %d setting OMX_IndexParamAudioFlac parameter", err); 2831 return err; 2832 } 2833 } 2834 2835 return setupRawAudioFormat( 2836 encoder ? kPortIndexInput : kPortIndexOutput, 2837 sampleRate, 2838 numChannels); 2839} 2840 2841status_t ACodec::setupRawAudioFormat( 2842 OMX_U32 portIndex, int32_t sampleRate, int32_t numChannels, AudioEncoding encoding) { 2843 OMX_PARAM_PORTDEFINITIONTYPE def; 2844 InitOMXParams(&def); 2845 def.nPortIndex = portIndex; 2846 2847 status_t err = mOMX->getParameter( 2848 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2849 2850 if (err != OK) { 2851 return err; 2852 } 2853 2854 def.format.audio.eEncoding = OMX_AUDIO_CodingPCM; 2855 2856 err = mOMX->setParameter( 2857 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2858 2859 if (err != OK) { 2860 return err; 2861 } 2862 2863 OMX_AUDIO_PARAM_PCMMODETYPE pcmParams; 2864 InitOMXParams(&pcmParams); 2865 pcmParams.nPortIndex = portIndex; 2866 2867 err = mOMX->getParameter( 2868 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2869 2870 if (err != OK) { 2871 return err; 2872 } 2873 2874 pcmParams.nChannels = numChannels; 2875 switch (encoding) { 2876 case kAudioEncodingPcm8bit: 2877 pcmParams.eNumData = OMX_NumericalDataUnsigned; 2878 pcmParams.nBitPerSample = 8; 2879 break; 2880 case kAudioEncodingPcmFloat: 2881 pcmParams.eNumData = OMX_NumericalDataFloat; 2882 pcmParams.nBitPerSample = 32; 2883 break; 2884 case kAudioEncodingPcm16bit: 2885 pcmParams.eNumData = OMX_NumericalDataSigned; 2886 pcmParams.nBitPerSample = 16; 2887 break; 2888 default: 2889 return BAD_VALUE; 2890 } 2891 pcmParams.bInterleaved = OMX_TRUE; 2892 pcmParams.nSamplingRate = sampleRate; 2893 pcmParams.ePCMMode = OMX_AUDIO_PCMModeLinear; 2894 2895 if (getOMXChannelMapping(numChannels, pcmParams.eChannelMapping) != OK) { 2896 return OMX_ErrorNone; 2897 } 2898 2899 err = mOMX->setParameter( 2900 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2901 // if we could not set up raw format to non-16-bit, try with 16-bit 2902 // NOTE: we will also verify this via readback, in case codec ignores these fields 2903 if (err != OK && encoding != kAudioEncodingPcm16bit) { 2904 pcmParams.eNumData = OMX_NumericalDataSigned; 2905 pcmParams.nBitPerSample = 16; 2906 err = mOMX->setParameter( 2907 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2908 } 2909 return err; 2910} 2911 2912status_t ACodec::configureTunneledVideoPlayback( 2913 int32_t audioHwSync, const sp<ANativeWindow> &nativeWindow) { 2914 native_handle_t* sidebandHandle; 2915 2916 status_t err = mOMX->configureVideoTunnelMode( 2917 mNode, kPortIndexOutput, OMX_TRUE, audioHwSync, &sidebandHandle); 2918 if (err != OK) { 2919 ALOGE("configureVideoTunnelMode failed! (err %d).", err); 2920 return err; 2921 } 2922 2923 err = native_window_set_sideband_stream(nativeWindow.get(), sidebandHandle); 2924 if (err != OK) { 2925 ALOGE("native_window_set_sideband_stream(%p) failed! (err %d).", 2926 sidebandHandle, err); 2927 return err; 2928 } 2929 2930 return OK; 2931} 2932 2933status_t ACodec::setVideoPortFormatType( 2934 OMX_U32 portIndex, 2935 OMX_VIDEO_CODINGTYPE compressionFormat, 2936 OMX_COLOR_FORMATTYPE colorFormat, 2937 bool usingNativeBuffers) { 2938 OMX_VIDEO_PARAM_PORTFORMATTYPE format; 2939 InitOMXParams(&format); 2940 format.nPortIndex = portIndex; 2941 format.nIndex = 0; 2942 bool found = false; 2943 2944 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 2945 format.nIndex = index; 2946 status_t err = mOMX->getParameter( 2947 mNode, OMX_IndexParamVideoPortFormat, 2948 &format, sizeof(format)); 2949 2950 if (err != OK) { 2951 return err; 2952 } 2953 2954 // substitute back flexible color format to codec supported format 2955 OMX_U32 flexibleEquivalent; 2956 if (compressionFormat == OMX_VIDEO_CodingUnused 2957 && isFlexibleColorFormat( 2958 mOMX, mNode, format.eColorFormat, usingNativeBuffers, &flexibleEquivalent) 2959 && colorFormat == flexibleEquivalent) { 2960 ALOGI("[%s] using color format %#x in place of %#x", 2961 mComponentName.c_str(), format.eColorFormat, colorFormat); 2962 colorFormat = format.eColorFormat; 2963 } 2964 2965 // The following assertion is violated by TI's video decoder. 2966 // CHECK_EQ(format.nIndex, index); 2967 2968 if (!strcmp("OMX.TI.Video.encoder", mComponentName.c_str())) { 2969 if (portIndex == kPortIndexInput 2970 && colorFormat == format.eColorFormat) { 2971 // eCompressionFormat does not seem right. 2972 found = true; 2973 break; 2974 } 2975 if (portIndex == kPortIndexOutput 2976 && compressionFormat == format.eCompressionFormat) { 2977 // eColorFormat does not seem right. 2978 found = true; 2979 break; 2980 } 2981 } 2982 2983 if (format.eCompressionFormat == compressionFormat 2984 && format.eColorFormat == colorFormat) { 2985 found = true; 2986 break; 2987 } 2988 2989 if (index == kMaxIndicesToCheck) { 2990 ALOGW("[%s] stopping checking formats after %u: %s(%x)/%s(%x)", 2991 mComponentName.c_str(), index, 2992 asString(format.eCompressionFormat), format.eCompressionFormat, 2993 asString(format.eColorFormat), format.eColorFormat); 2994 } 2995 } 2996 2997 if (!found) { 2998 return UNKNOWN_ERROR; 2999 } 3000 3001 status_t err = mOMX->setParameter( 3002 mNode, OMX_IndexParamVideoPortFormat, 3003 &format, sizeof(format)); 3004 3005 return err; 3006} 3007 3008// Set optimal output format. OMX component lists output formats in the order 3009// of preference, but this got more complicated since the introduction of flexible 3010// YUV formats. We support a legacy behavior for applications that do not use 3011// surface output, do not specify an output format, but expect a "usable" standard 3012// OMX format. SW readable and standard formats must be flex-YUV. 3013// 3014// Suggested preference order: 3015// - optimal format for texture rendering (mediaplayer behavior) 3016// - optimal SW readable & texture renderable format (flex-YUV support) 3017// - optimal SW readable non-renderable format (flex-YUV bytebuffer support) 3018// - legacy "usable" standard formats 3019// 3020// For legacy support, we prefer a standard format, but will settle for a SW readable 3021// flex-YUV format. 3022status_t ACodec::setSupportedOutputFormat(bool getLegacyFlexibleFormat) { 3023 OMX_VIDEO_PARAM_PORTFORMATTYPE format, legacyFormat; 3024 InitOMXParams(&format); 3025 format.nPortIndex = kPortIndexOutput; 3026 3027 InitOMXParams(&legacyFormat); 3028 // this field will change when we find a suitable legacy format 3029 legacyFormat.eColorFormat = OMX_COLOR_FormatUnused; 3030 3031 for (OMX_U32 index = 0; ; ++index) { 3032 format.nIndex = index; 3033 status_t err = mOMX->getParameter( 3034 mNode, OMX_IndexParamVideoPortFormat, 3035 &format, sizeof(format)); 3036 if (err != OK) { 3037 // no more formats, pick legacy format if found 3038 if (legacyFormat.eColorFormat != OMX_COLOR_FormatUnused) { 3039 memcpy(&format, &legacyFormat, sizeof(format)); 3040 break; 3041 } 3042 return err; 3043 } 3044 if (format.eCompressionFormat != OMX_VIDEO_CodingUnused) { 3045 return OMX_ErrorBadParameter; 3046 } 3047 if (!getLegacyFlexibleFormat) { 3048 break; 3049 } 3050 // standard formats that were exposed to users before 3051 if (format.eColorFormat == OMX_COLOR_FormatYUV420Planar 3052 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedPlanar 3053 || format.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar 3054 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedSemiPlanar 3055 || format.eColorFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar) { 3056 break; 3057 } 3058 // find best legacy non-standard format 3059 OMX_U32 flexibleEquivalent; 3060 if (legacyFormat.eColorFormat == OMX_COLOR_FormatUnused 3061 && isFlexibleColorFormat( 3062 mOMX, mNode, format.eColorFormat, false /* usingNativeBuffers */, 3063 &flexibleEquivalent) 3064 && flexibleEquivalent == OMX_COLOR_FormatYUV420Flexible) { 3065 memcpy(&legacyFormat, &format, sizeof(format)); 3066 } 3067 } 3068 return mOMX->setParameter( 3069 mNode, OMX_IndexParamVideoPortFormat, 3070 &format, sizeof(format)); 3071} 3072 3073static const struct VideoCodingMapEntry { 3074 const char *mMime; 3075 OMX_VIDEO_CODINGTYPE mVideoCodingType; 3076} kVideoCodingMapEntry[] = { 3077 { MEDIA_MIMETYPE_VIDEO_AVC, OMX_VIDEO_CodingAVC }, 3078 { MEDIA_MIMETYPE_VIDEO_HEVC, OMX_VIDEO_CodingHEVC }, 3079 { MEDIA_MIMETYPE_VIDEO_MPEG4, OMX_VIDEO_CodingMPEG4 }, 3080 { MEDIA_MIMETYPE_VIDEO_H263, OMX_VIDEO_CodingH263 }, 3081 { MEDIA_MIMETYPE_VIDEO_MPEG2, OMX_VIDEO_CodingMPEG2 }, 3082 { MEDIA_MIMETYPE_VIDEO_VP8, OMX_VIDEO_CodingVP8 }, 3083 { MEDIA_MIMETYPE_VIDEO_VP9, OMX_VIDEO_CodingVP9 }, 3084 { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, OMX_VIDEO_CodingDolbyVision }, 3085}; 3086 3087static status_t GetVideoCodingTypeFromMime( 3088 const char *mime, OMX_VIDEO_CODINGTYPE *codingType) { 3089 for (size_t i = 0; 3090 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 3091 ++i) { 3092 if (!strcasecmp(mime, kVideoCodingMapEntry[i].mMime)) { 3093 *codingType = kVideoCodingMapEntry[i].mVideoCodingType; 3094 return OK; 3095 } 3096 } 3097 3098 *codingType = OMX_VIDEO_CodingUnused; 3099 3100 return ERROR_UNSUPPORTED; 3101} 3102 3103static status_t GetMimeTypeForVideoCoding( 3104 OMX_VIDEO_CODINGTYPE codingType, AString *mime) { 3105 for (size_t i = 0; 3106 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 3107 ++i) { 3108 if (codingType == kVideoCodingMapEntry[i].mVideoCodingType) { 3109 *mime = kVideoCodingMapEntry[i].mMime; 3110 return OK; 3111 } 3112 } 3113 3114 mime->clear(); 3115 3116 return ERROR_UNSUPPORTED; 3117} 3118 3119status_t ACodec::setupVideoDecoder( 3120 const char *mime, const sp<AMessage> &msg, bool haveNativeWindow, 3121 bool usingSwRenderer, sp<AMessage> &outputFormat) { 3122 int32_t width, height; 3123 if (!msg->findInt32("width", &width) 3124 || !msg->findInt32("height", &height)) { 3125 return INVALID_OPERATION; 3126 } 3127 3128 OMX_VIDEO_CODINGTYPE compressionFormat; 3129 status_t err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 3130 3131 if (err != OK) { 3132 return err; 3133 } 3134 3135 err = setVideoPortFormatType( 3136 kPortIndexInput, compressionFormat, OMX_COLOR_FormatUnused); 3137 3138 if (err != OK) { 3139 return err; 3140 } 3141 3142 int32_t tmp; 3143 if (msg->findInt32("color-format", &tmp)) { 3144 OMX_COLOR_FORMATTYPE colorFormat = 3145 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 3146 err = setVideoPortFormatType( 3147 kPortIndexOutput, OMX_VIDEO_CodingUnused, colorFormat, haveNativeWindow); 3148 if (err != OK) { 3149 ALOGW("[%s] does not support color format %d", 3150 mComponentName.c_str(), colorFormat); 3151 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 3152 } 3153 } else { 3154 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 3155 } 3156 3157 if (err != OK) { 3158 return err; 3159 } 3160 3161 int32_t frameRateInt; 3162 float frameRateFloat; 3163 if (!msg->findFloat("frame-rate", &frameRateFloat)) { 3164 if (!msg->findInt32("frame-rate", &frameRateInt)) { 3165 frameRateInt = -1; 3166 } 3167 frameRateFloat = (float)frameRateInt; 3168 } 3169 3170 err = setVideoFormatOnPort( 3171 kPortIndexInput, width, height, compressionFormat, frameRateFloat); 3172 3173 if (err != OK) { 3174 return err; 3175 } 3176 3177 err = setVideoFormatOnPort( 3178 kPortIndexOutput, width, height, OMX_VIDEO_CodingUnused); 3179 3180 if (err != OK) { 3181 return err; 3182 } 3183 3184 err = setColorAspectsForVideoDecoder( 3185 width, height, haveNativeWindow | usingSwRenderer, msg, outputFormat); 3186 if (err == ERROR_UNSUPPORTED) { // support is optional 3187 err = OK; 3188 } 3189 return err; 3190} 3191 3192status_t ACodec::initDescribeColorAspectsIndex() { 3193 status_t err = mOMX->getExtensionIndex( 3194 mNode, "OMX.google.android.index.describeColorAspects", &mDescribeColorAspectsIndex); 3195 if (err != OK) { 3196 mDescribeColorAspectsIndex = (OMX_INDEXTYPE)0; 3197 } 3198 return err; 3199} 3200 3201status_t ACodec::setCodecColorAspects(DescribeColorAspectsParams ¶ms, bool verify) { 3202 status_t err = ERROR_UNSUPPORTED; 3203 if (mDescribeColorAspectsIndex) { 3204 err = mOMX->setConfig(mNode, mDescribeColorAspectsIndex, ¶ms, sizeof(params)); 3205 } 3206 ALOGV("[%s] setting color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)", 3207 mComponentName.c_str(), 3208 params.sAspects.mRange, asString(params.sAspects.mRange), 3209 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3210 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3211 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3212 err, asString(err)); 3213 3214 if (verify && err == OK) { 3215 err = getCodecColorAspects(params); 3216 } 3217 3218 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex, 3219 "[%s] getting color aspects failed even though codec advertises support", 3220 mComponentName.c_str()); 3221 return err; 3222} 3223 3224status_t ACodec::setColorAspectsForVideoDecoder( 3225 int32_t width, int32_t height, bool usingNativeWindow, 3226 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) { 3227 DescribeColorAspectsParams params; 3228 InitOMXParams(¶ms); 3229 params.nPortIndex = kPortIndexOutput; 3230 3231 getColorAspectsFromFormat(configFormat, params.sAspects); 3232 if (usingNativeWindow) { 3233 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3234 // The default aspects will be set back to the output format during the 3235 // getFormat phase of configure(). Set non-Unspecified values back into the 3236 // format, in case component does not support this enumeration. 3237 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3238 } 3239 3240 (void)initDescribeColorAspectsIndex(); 3241 3242 // communicate color aspects to codec 3243 return setCodecColorAspects(params); 3244} 3245 3246status_t ACodec::getCodecColorAspects(DescribeColorAspectsParams ¶ms) { 3247 status_t err = ERROR_UNSUPPORTED; 3248 if (mDescribeColorAspectsIndex) { 3249 err = mOMX->getConfig(mNode, mDescribeColorAspectsIndex, ¶ms, sizeof(params)); 3250 } 3251 ALOGV("[%s] got color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)", 3252 mComponentName.c_str(), 3253 params.sAspects.mRange, asString(params.sAspects.mRange), 3254 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3255 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3256 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3257 err, asString(err)); 3258 if (params.bRequestingDataSpace) { 3259 ALOGV("for dataspace %#x", params.nDataSpace); 3260 } 3261 if (err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex 3262 && !params.bRequestingDataSpace && !params.bDataSpaceChanged) { 3263 ALOGW("[%s] getting color aspects failed even though codec advertises support", 3264 mComponentName.c_str()); 3265 } 3266 return err; 3267} 3268 3269status_t ACodec::getInputColorAspectsForVideoEncoder(sp<AMessage> &format) { 3270 DescribeColorAspectsParams params; 3271 InitOMXParams(¶ms); 3272 params.nPortIndex = kPortIndexInput; 3273 status_t err = getCodecColorAspects(params); 3274 if (err == OK) { 3275 // we only set encoder input aspects if codec supports them 3276 setColorAspectsIntoFormat(params.sAspects, format, true /* force */); 3277 } 3278 return err; 3279} 3280 3281status_t ACodec::getDataSpace( 3282 DescribeColorAspectsParams ¶ms, android_dataspace *dataSpace /* nonnull */, 3283 bool tryCodec) { 3284 status_t err = OK; 3285 if (tryCodec) { 3286 // request dataspace guidance from codec. 3287 params.bRequestingDataSpace = OMX_TRUE; 3288 err = getCodecColorAspects(params); 3289 params.bRequestingDataSpace = OMX_FALSE; 3290 if (err == OK && params.nDataSpace != HAL_DATASPACE_UNKNOWN) { 3291 *dataSpace = (android_dataspace)params.nDataSpace; 3292 return err; 3293 } else if (err == ERROR_UNSUPPORTED) { 3294 // ignore not-implemented error for dataspace requests 3295 err = OK; 3296 } 3297 } 3298 3299 // this returns legacy versions if available 3300 *dataSpace = getDataSpaceForColorAspects(params.sAspects, true /* mayexpand */); 3301 ALOGV("[%s] using color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) " 3302 "and dataspace %#x", 3303 mComponentName.c_str(), 3304 params.sAspects.mRange, asString(params.sAspects.mRange), 3305 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3306 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3307 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3308 *dataSpace); 3309 return err; 3310} 3311 3312 3313status_t ACodec::getColorAspectsAndDataSpaceForVideoDecoder( 3314 int32_t width, int32_t height, const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, 3315 android_dataspace *dataSpace) { 3316 DescribeColorAspectsParams params; 3317 InitOMXParams(¶ms); 3318 params.nPortIndex = kPortIndexOutput; 3319 3320 // reset default format and get resulting format 3321 getColorAspectsFromFormat(configFormat, params.sAspects); 3322 if (dataSpace != NULL) { 3323 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3324 } 3325 status_t err = setCodecColorAspects(params, true /* readBack */); 3326 3327 // we always set specified aspects for decoders 3328 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3329 3330 if (dataSpace != NULL) { 3331 status_t res = getDataSpace(params, dataSpace, err == OK /* tryCodec */); 3332 if (err == OK) { 3333 err = res; 3334 } 3335 } 3336 3337 return err; 3338} 3339 3340// initial video encoder setup for bytebuffer mode 3341status_t ACodec::setColorAspectsForVideoEncoder( 3342 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) { 3343 // copy config to output format as this is not exposed via getFormat 3344 copyColorConfig(configFormat, outputFormat); 3345 3346 DescribeColorAspectsParams params; 3347 InitOMXParams(¶ms); 3348 params.nPortIndex = kPortIndexInput; 3349 getColorAspectsFromFormat(configFormat, params.sAspects); 3350 3351 (void)initDescribeColorAspectsIndex(); 3352 3353 int32_t usingRecorder; 3354 if (configFormat->findInt32("android._using-recorder", &usingRecorder) && usingRecorder) { 3355 android_dataspace dataSpace = HAL_DATASPACE_BT709; 3356 int32_t width, height; 3357 if (configFormat->findInt32("width", &width) 3358 && configFormat->findInt32("height", &height)) { 3359 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3360 status_t err = getDataSpace( 3361 params, &dataSpace, mDescribeColorAspectsIndex /* tryCodec */); 3362 if (err != OK) { 3363 return err; 3364 } 3365 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3366 } 3367 inputFormat->setInt32("android._dataspace", (int32_t)dataSpace); 3368 } 3369 3370 // communicate color aspects to codec, but do not allow change of the platform aspects 3371 ColorAspects origAspects = params.sAspects; 3372 for (int triesLeft = 2; --triesLeft >= 0; ) { 3373 status_t err = setCodecColorAspects(params, true /* readBack */); 3374 if (err != OK 3375 || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem( 3376 params.sAspects, origAspects, true /* usePlatformAspects */)) { 3377 return err; 3378 } 3379 ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.", 3380 mComponentName.c_str()); 3381 } 3382 return OK; 3383} 3384 3385// subsequent initial video encoder setup for surface mode 3386status_t ACodec::setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace( 3387 android_dataspace *dataSpace /* nonnull */) { 3388 DescribeColorAspectsParams params; 3389 InitOMXParams(¶ms); 3390 params.nPortIndex = kPortIndexInput; 3391 ColorAspects &aspects = params.sAspects; 3392 3393 // reset default format and store resulting format into both input and output formats 3394 getColorAspectsFromFormat(mConfigFormat, aspects); 3395 int32_t width, height; 3396 if (mInputFormat->findInt32("width", &width) && mInputFormat->findInt32("height", &height)) { 3397 setDefaultCodecColorAspectsIfNeeded(aspects, width, height); 3398 } 3399 setColorAspectsIntoFormat(aspects, mInputFormat); 3400 setColorAspectsIntoFormat(aspects, mOutputFormat); 3401 3402 // communicate color aspects to codec, but do not allow any change 3403 ColorAspects origAspects = aspects; 3404 status_t err = OK; 3405 for (int triesLeft = 2; mDescribeColorAspectsIndex && --triesLeft >= 0; ) { 3406 status_t err = setCodecColorAspects(params, true /* readBack */); 3407 if (err != OK || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem(aspects, origAspects)) { 3408 break; 3409 } 3410 ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.", 3411 mComponentName.c_str()); 3412 } 3413 3414 *dataSpace = HAL_DATASPACE_BT709; 3415 aspects = origAspects; // restore desired color aspects 3416 status_t res = getDataSpace( 3417 params, dataSpace, err == OK && mDescribeColorAspectsIndex /* tryCodec */); 3418 if (err == OK) { 3419 err = res; 3420 } 3421 mInputFormat->setInt32("android._dataspace", (int32_t)*dataSpace); 3422 mInputFormat->setBuffer( 3423 "android._color-aspects", ABuffer::CreateAsCopy(&aspects, sizeof(aspects))); 3424 3425 // update input format with codec supported color aspects (basically set unsupported 3426 // aspects to Unspecified) 3427 if (err == OK) { 3428 (void)getInputColorAspectsForVideoEncoder(mInputFormat); 3429 } 3430 3431 ALOGV("set default color aspects, updated input format to %s, output format to %s", 3432 mInputFormat->debugString(4).c_str(), mOutputFormat->debugString(4).c_str()); 3433 3434 return err; 3435} 3436 3437status_t ACodec::setupVideoEncoder( 3438 const char *mime, const sp<AMessage> &msg, 3439 sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) { 3440 int32_t tmp; 3441 if (!msg->findInt32("color-format", &tmp)) { 3442 return INVALID_OPERATION; 3443 } 3444 3445 OMX_COLOR_FORMATTYPE colorFormat = 3446 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 3447 3448 status_t err = setVideoPortFormatType( 3449 kPortIndexInput, OMX_VIDEO_CodingUnused, colorFormat); 3450 3451 if (err != OK) { 3452 ALOGE("[%s] does not support color format %d", 3453 mComponentName.c_str(), colorFormat); 3454 3455 return err; 3456 } 3457 3458 /* Input port configuration */ 3459 3460 OMX_PARAM_PORTDEFINITIONTYPE def; 3461 InitOMXParams(&def); 3462 3463 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 3464 3465 def.nPortIndex = kPortIndexInput; 3466 3467 err = mOMX->getParameter( 3468 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3469 3470 if (err != OK) { 3471 return err; 3472 } 3473 3474 int32_t width, height, bitrate; 3475 if (!msg->findInt32("width", &width) 3476 || !msg->findInt32("height", &height) 3477 || !msg->findInt32("bitrate", &bitrate)) { 3478 return INVALID_OPERATION; 3479 } 3480 3481 video_def->nFrameWidth = width; 3482 video_def->nFrameHeight = height; 3483 3484 int32_t stride; 3485 if (!msg->findInt32("stride", &stride)) { 3486 stride = width; 3487 } 3488 3489 video_def->nStride = stride; 3490 3491 int32_t sliceHeight; 3492 if (!msg->findInt32("slice-height", &sliceHeight)) { 3493 sliceHeight = height; 3494 } 3495 3496 video_def->nSliceHeight = sliceHeight; 3497 3498 def.nBufferSize = (video_def->nStride * video_def->nSliceHeight * 3) / 2; 3499 3500 float frameRate; 3501 if (!msg->findFloat("frame-rate", &frameRate)) { 3502 int32_t tmp; 3503 if (!msg->findInt32("frame-rate", &tmp)) { 3504 return INVALID_OPERATION; 3505 } 3506 frameRate = (float)tmp; 3507 mTimePerFrameUs = (int64_t) (1000000.0f / frameRate); 3508 } 3509 3510 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 3511 video_def->eCompressionFormat = OMX_VIDEO_CodingUnused; 3512 // this is redundant as it was already set up in setVideoPortFormatType 3513 // FIXME for now skip this only for flexible YUV formats 3514 if (colorFormat != OMX_COLOR_FormatYUV420Flexible) { 3515 video_def->eColorFormat = colorFormat; 3516 } 3517 3518 err = mOMX->setParameter( 3519 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3520 3521 if (err != OK) { 3522 ALOGE("[%s] failed to set input port definition parameters.", 3523 mComponentName.c_str()); 3524 3525 return err; 3526 } 3527 3528 /* Output port configuration */ 3529 3530 OMX_VIDEO_CODINGTYPE compressionFormat; 3531 err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 3532 3533 if (err != OK) { 3534 return err; 3535 } 3536 3537 err = setVideoPortFormatType( 3538 kPortIndexOutput, compressionFormat, OMX_COLOR_FormatUnused); 3539 3540 if (err != OK) { 3541 ALOGE("[%s] does not support compression format %d", 3542 mComponentName.c_str(), compressionFormat); 3543 3544 return err; 3545 } 3546 3547 def.nPortIndex = kPortIndexOutput; 3548 3549 err = mOMX->getParameter( 3550 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3551 3552 if (err != OK) { 3553 return err; 3554 } 3555 3556 video_def->nFrameWidth = width; 3557 video_def->nFrameHeight = height; 3558 video_def->xFramerate = 0; 3559 video_def->nBitrate = bitrate; 3560 video_def->eCompressionFormat = compressionFormat; 3561 video_def->eColorFormat = OMX_COLOR_FormatUnused; 3562 3563 err = mOMX->setParameter( 3564 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3565 3566 if (err != OK) { 3567 ALOGE("[%s] failed to set output port definition parameters.", 3568 mComponentName.c_str()); 3569 3570 return err; 3571 } 3572 3573 int32_t intraRefreshPeriod = 0; 3574 if (msg->findInt32("intra-refresh-period", &intraRefreshPeriod) 3575 && intraRefreshPeriod >= 0) { 3576 err = setIntraRefreshPeriod((uint32_t)intraRefreshPeriod, true); 3577 if (err != OK) { 3578 ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional", 3579 mComponentName.c_str()); 3580 err = OK; 3581 } 3582 } 3583 3584 switch (compressionFormat) { 3585 case OMX_VIDEO_CodingMPEG4: 3586 err = setupMPEG4EncoderParameters(msg); 3587 break; 3588 3589 case OMX_VIDEO_CodingH263: 3590 err = setupH263EncoderParameters(msg); 3591 break; 3592 3593 case OMX_VIDEO_CodingAVC: 3594 err = setupAVCEncoderParameters(msg); 3595 break; 3596 3597 case OMX_VIDEO_CodingHEVC: 3598 err = setupHEVCEncoderParameters(msg); 3599 break; 3600 3601 case OMX_VIDEO_CodingVP8: 3602 case OMX_VIDEO_CodingVP9: 3603 err = setupVPXEncoderParameters(msg); 3604 break; 3605 3606 default: 3607 break; 3608 } 3609 3610 // Set up color aspects on input, but propagate them to the output format, as they will 3611 // not be read back from encoder. 3612 err = setColorAspectsForVideoEncoder(msg, outputFormat, inputFormat); 3613 if (err == ERROR_UNSUPPORTED) { 3614 ALOGI("[%s] cannot encode color aspects. Ignoring.", mComponentName.c_str()); 3615 err = OK; 3616 } 3617 3618 if (err == OK) { 3619 ALOGI("setupVideoEncoder succeeded"); 3620 } 3621 3622 return err; 3623} 3624 3625status_t ACodec::setCyclicIntraMacroblockRefresh(const sp<AMessage> &msg, int32_t mode) { 3626 OMX_VIDEO_PARAM_INTRAREFRESHTYPE params; 3627 InitOMXParams(¶ms); 3628 params.nPortIndex = kPortIndexOutput; 3629 3630 params.eRefreshMode = static_cast<OMX_VIDEO_INTRAREFRESHTYPE>(mode); 3631 3632 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshCyclic || 3633 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 3634 int32_t mbs; 3635 if (!msg->findInt32("intra-refresh-CIR-mbs", &mbs)) { 3636 return INVALID_OPERATION; 3637 } 3638 params.nCirMBs = mbs; 3639 } 3640 3641 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshAdaptive || 3642 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 3643 int32_t mbs; 3644 if (!msg->findInt32("intra-refresh-AIR-mbs", &mbs)) { 3645 return INVALID_OPERATION; 3646 } 3647 params.nAirMBs = mbs; 3648 3649 int32_t ref; 3650 if (!msg->findInt32("intra-refresh-AIR-ref", &ref)) { 3651 return INVALID_OPERATION; 3652 } 3653 params.nAirRef = ref; 3654 } 3655 3656 status_t err = mOMX->setParameter( 3657 mNode, OMX_IndexParamVideoIntraRefresh, 3658 ¶ms, sizeof(params)); 3659 return err; 3660} 3661 3662static OMX_U32 setPFramesSpacing(int32_t iFramesInterval, int32_t frameRate) { 3663 if (iFramesInterval < 0) { 3664 return 0xFFFFFFFF; 3665 } else if (iFramesInterval == 0) { 3666 return 0; 3667 } 3668 OMX_U32 ret = frameRate * iFramesInterval; 3669 return ret; 3670} 3671 3672static OMX_VIDEO_CONTROLRATETYPE getBitrateMode(const sp<AMessage> &msg) { 3673 int32_t tmp; 3674 if (!msg->findInt32("bitrate-mode", &tmp)) { 3675 return OMX_Video_ControlRateVariable; 3676 } 3677 3678 return static_cast<OMX_VIDEO_CONTROLRATETYPE>(tmp); 3679} 3680 3681status_t ACodec::setupMPEG4EncoderParameters(const sp<AMessage> &msg) { 3682 int32_t bitrate, iFrameInterval; 3683 if (!msg->findInt32("bitrate", &bitrate) 3684 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 3685 return INVALID_OPERATION; 3686 } 3687 3688 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3689 3690 float frameRate; 3691 if (!msg->findFloat("frame-rate", &frameRate)) { 3692 int32_t tmp; 3693 if (!msg->findInt32("frame-rate", &tmp)) { 3694 return INVALID_OPERATION; 3695 } 3696 frameRate = (float)tmp; 3697 } 3698 3699 OMX_VIDEO_PARAM_MPEG4TYPE mpeg4type; 3700 InitOMXParams(&mpeg4type); 3701 mpeg4type.nPortIndex = kPortIndexOutput; 3702 3703 status_t err = mOMX->getParameter( 3704 mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 3705 3706 if (err != OK) { 3707 return err; 3708 } 3709 3710 mpeg4type.nSliceHeaderSpacing = 0; 3711 mpeg4type.bSVH = OMX_FALSE; 3712 mpeg4type.bGov = OMX_FALSE; 3713 3714 mpeg4type.nAllowedPictureTypes = 3715 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 3716 3717 mpeg4type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 3718 if (mpeg4type.nPFrames == 0) { 3719 mpeg4type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 3720 } 3721 mpeg4type.nBFrames = 0; 3722 mpeg4type.nIDCVLCThreshold = 0; 3723 mpeg4type.bACPred = OMX_TRUE; 3724 mpeg4type.nMaxPacketSize = 256; 3725 mpeg4type.nTimeIncRes = 1000; 3726 mpeg4type.nHeaderExtension = 0; 3727 mpeg4type.bReversibleVLC = OMX_FALSE; 3728 3729 int32_t profile; 3730 if (msg->findInt32("profile", &profile)) { 3731 int32_t level; 3732 if (!msg->findInt32("level", &level)) { 3733 return INVALID_OPERATION; 3734 } 3735 3736 err = verifySupportForProfileAndLevel(profile, level); 3737 3738 if (err != OK) { 3739 return err; 3740 } 3741 3742 mpeg4type.eProfile = static_cast<OMX_VIDEO_MPEG4PROFILETYPE>(profile); 3743 mpeg4type.eLevel = static_cast<OMX_VIDEO_MPEG4LEVELTYPE>(level); 3744 } 3745 3746 err = mOMX->setParameter( 3747 mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 3748 3749 if (err != OK) { 3750 return err; 3751 } 3752 3753 err = configureBitrate(bitrate, bitrateMode); 3754 3755 if (err != OK) { 3756 return err; 3757 } 3758 3759 return setupErrorCorrectionParameters(); 3760} 3761 3762status_t ACodec::setupH263EncoderParameters(const sp<AMessage> &msg) { 3763 int32_t bitrate, iFrameInterval; 3764 if (!msg->findInt32("bitrate", &bitrate) 3765 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 3766 return INVALID_OPERATION; 3767 } 3768 3769 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3770 3771 float frameRate; 3772 if (!msg->findFloat("frame-rate", &frameRate)) { 3773 int32_t tmp; 3774 if (!msg->findInt32("frame-rate", &tmp)) { 3775 return INVALID_OPERATION; 3776 } 3777 frameRate = (float)tmp; 3778 } 3779 3780 OMX_VIDEO_PARAM_H263TYPE h263type; 3781 InitOMXParams(&h263type); 3782 h263type.nPortIndex = kPortIndexOutput; 3783 3784 status_t err = mOMX->getParameter( 3785 mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 3786 3787 if (err != OK) { 3788 return err; 3789 } 3790 3791 h263type.nAllowedPictureTypes = 3792 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 3793 3794 h263type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 3795 if (h263type.nPFrames == 0) { 3796 h263type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 3797 } 3798 h263type.nBFrames = 0; 3799 3800 int32_t profile; 3801 if (msg->findInt32("profile", &profile)) { 3802 int32_t level; 3803 if (!msg->findInt32("level", &level)) { 3804 return INVALID_OPERATION; 3805 } 3806 3807 err = verifySupportForProfileAndLevel(profile, level); 3808 3809 if (err != OK) { 3810 return err; 3811 } 3812 3813 h263type.eProfile = static_cast<OMX_VIDEO_H263PROFILETYPE>(profile); 3814 h263type.eLevel = static_cast<OMX_VIDEO_H263LEVELTYPE>(level); 3815 } 3816 3817 h263type.bPLUSPTYPEAllowed = OMX_FALSE; 3818 h263type.bForceRoundingTypeToZero = OMX_FALSE; 3819 h263type.nPictureHeaderRepetition = 0; 3820 h263type.nGOBHeaderInterval = 0; 3821 3822 err = mOMX->setParameter( 3823 mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 3824 3825 if (err != OK) { 3826 return err; 3827 } 3828 3829 err = configureBitrate(bitrate, bitrateMode); 3830 3831 if (err != OK) { 3832 return err; 3833 } 3834 3835 return setupErrorCorrectionParameters(); 3836} 3837 3838// static 3839int /* OMX_VIDEO_AVCLEVELTYPE */ ACodec::getAVCLevelFor( 3840 int width, int height, int rate, int bitrate, 3841 OMX_VIDEO_AVCPROFILETYPE profile) { 3842 // convert bitrate to main/baseline profile kbps equivalent 3843 switch (profile) { 3844 case OMX_VIDEO_AVCProfileHigh10: 3845 bitrate = divUp(bitrate, 3000); break; 3846 case OMX_VIDEO_AVCProfileHigh: 3847 bitrate = divUp(bitrate, 1250); break; 3848 default: 3849 bitrate = divUp(bitrate, 1000); break; 3850 } 3851 3852 // convert size and rate to MBs 3853 width = divUp(width, 16); 3854 height = divUp(height, 16); 3855 int mbs = width * height; 3856 rate *= mbs; 3857 int maxDimension = max(width, height); 3858 3859 static const int limits[][5] = { 3860 /* MBps MB dim bitrate level */ 3861 { 1485, 99, 28, 64, OMX_VIDEO_AVCLevel1 }, 3862 { 1485, 99, 28, 128, OMX_VIDEO_AVCLevel1b }, 3863 { 3000, 396, 56, 192, OMX_VIDEO_AVCLevel11 }, 3864 { 6000, 396, 56, 384, OMX_VIDEO_AVCLevel12 }, 3865 { 11880, 396, 56, 768, OMX_VIDEO_AVCLevel13 }, 3866 { 11880, 396, 56, 2000, OMX_VIDEO_AVCLevel2 }, 3867 { 19800, 792, 79, 4000, OMX_VIDEO_AVCLevel21 }, 3868 { 20250, 1620, 113, 4000, OMX_VIDEO_AVCLevel22 }, 3869 { 40500, 1620, 113, 10000, OMX_VIDEO_AVCLevel3 }, 3870 { 108000, 3600, 169, 14000, OMX_VIDEO_AVCLevel31 }, 3871 { 216000, 5120, 202, 20000, OMX_VIDEO_AVCLevel32 }, 3872 { 245760, 8192, 256, 20000, OMX_VIDEO_AVCLevel4 }, 3873 { 245760, 8192, 256, 50000, OMX_VIDEO_AVCLevel41 }, 3874 { 522240, 8704, 263, 50000, OMX_VIDEO_AVCLevel42 }, 3875 { 589824, 22080, 420, 135000, OMX_VIDEO_AVCLevel5 }, 3876 { 983040, 36864, 543, 240000, OMX_VIDEO_AVCLevel51 }, 3877 { 2073600, 36864, 543, 240000, OMX_VIDEO_AVCLevel52 }, 3878 }; 3879 3880 for (size_t i = 0; i < ARRAY_SIZE(limits); i++) { 3881 const int (&limit)[5] = limits[i]; 3882 if (rate <= limit[0] && mbs <= limit[1] && maxDimension <= limit[2] 3883 && bitrate <= limit[3]) { 3884 return limit[4]; 3885 } 3886 } 3887 return 0; 3888} 3889 3890status_t ACodec::setupAVCEncoderParameters(const sp<AMessage> &msg) { 3891 int32_t bitrate, iFrameInterval; 3892 if (!msg->findInt32("bitrate", &bitrate) 3893 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 3894 return INVALID_OPERATION; 3895 } 3896 3897 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3898 3899 float frameRate; 3900 if (!msg->findFloat("frame-rate", &frameRate)) { 3901 int32_t tmp; 3902 if (!msg->findInt32("frame-rate", &tmp)) { 3903 return INVALID_OPERATION; 3904 } 3905 frameRate = (float)tmp; 3906 } 3907 3908 status_t err = OK; 3909 int32_t intraRefreshMode = 0; 3910 if (msg->findInt32("intra-refresh-mode", &intraRefreshMode)) { 3911 err = setCyclicIntraMacroblockRefresh(msg, intraRefreshMode); 3912 if (err != OK) { 3913 ALOGE("Setting intra macroblock refresh mode (%d) failed: 0x%x", 3914 err, intraRefreshMode); 3915 return err; 3916 } 3917 } 3918 3919 OMX_VIDEO_PARAM_AVCTYPE h264type; 3920 InitOMXParams(&h264type); 3921 h264type.nPortIndex = kPortIndexOutput; 3922 3923 err = mOMX->getParameter( 3924 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 3925 3926 if (err != OK) { 3927 return err; 3928 } 3929 3930 h264type.nAllowedPictureTypes = 3931 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 3932 3933 int32_t profile; 3934 if (msg->findInt32("profile", &profile)) { 3935 int32_t level; 3936 if (!msg->findInt32("level", &level)) { 3937 return INVALID_OPERATION; 3938 } 3939 3940 err = verifySupportForProfileAndLevel(profile, level); 3941 3942 if (err != OK) { 3943 return err; 3944 } 3945 3946 h264type.eProfile = static_cast<OMX_VIDEO_AVCPROFILETYPE>(profile); 3947 h264type.eLevel = static_cast<OMX_VIDEO_AVCLEVELTYPE>(level); 3948 } else { 3949 // Use baseline profile for AVC recording if profile is not specified. 3950 h264type.eProfile = OMX_VIDEO_AVCProfileBaseline; 3951 } 3952 3953 ALOGI("setupAVCEncoderParameters with [profile: %s] [level: %s]", 3954 asString(h264type.eProfile), asString(h264type.eLevel)); 3955 3956 if (h264type.eProfile == OMX_VIDEO_AVCProfileBaseline) { 3957 h264type.nSliceHeaderSpacing = 0; 3958 h264type.bUseHadamard = OMX_TRUE; 3959 h264type.nRefFrames = 1; 3960 h264type.nBFrames = 0; 3961 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 3962 if (h264type.nPFrames == 0) { 3963 h264type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 3964 } 3965 h264type.nRefIdx10ActiveMinus1 = 0; 3966 h264type.nRefIdx11ActiveMinus1 = 0; 3967 h264type.bEntropyCodingCABAC = OMX_FALSE; 3968 h264type.bWeightedPPrediction = OMX_FALSE; 3969 h264type.bconstIpred = OMX_FALSE; 3970 h264type.bDirect8x8Inference = OMX_FALSE; 3971 h264type.bDirectSpatialTemporal = OMX_FALSE; 3972 h264type.nCabacInitIdc = 0; 3973 } else if (h264type.eProfile == OMX_VIDEO_AVCProfileMain || 3974 h264type.eProfile == OMX_VIDEO_AVCProfileHigh) { 3975 h264type.nSliceHeaderSpacing = 0; 3976 h264type.bUseHadamard = OMX_TRUE; 3977 h264type.nRefFrames = 2; 3978 h264type.nBFrames = 1; 3979 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 3980 h264type.nAllowedPictureTypes = 3981 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP | OMX_VIDEO_PictureTypeB; 3982 h264type.nRefIdx10ActiveMinus1 = 0; 3983 h264type.nRefIdx11ActiveMinus1 = 0; 3984 h264type.bEntropyCodingCABAC = OMX_TRUE; 3985 h264type.bWeightedPPrediction = OMX_TRUE; 3986 h264type.bconstIpred = OMX_TRUE; 3987 h264type.bDirect8x8Inference = OMX_TRUE; 3988 h264type.bDirectSpatialTemporal = OMX_TRUE; 3989 h264type.nCabacInitIdc = 1; 3990 } 3991 3992 if (h264type.nBFrames != 0) { 3993 h264type.nAllowedPictureTypes |= OMX_VIDEO_PictureTypeB; 3994 } 3995 3996 h264type.bEnableUEP = OMX_FALSE; 3997 h264type.bEnableFMO = OMX_FALSE; 3998 h264type.bEnableASO = OMX_FALSE; 3999 h264type.bEnableRS = OMX_FALSE; 4000 h264type.bFrameMBsOnly = OMX_TRUE; 4001 h264type.bMBAFF = OMX_FALSE; 4002 h264type.eLoopFilterMode = OMX_VIDEO_AVCLoopFilterEnable; 4003 4004 err = mOMX->setParameter( 4005 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 4006 4007 if (err != OK) { 4008 return err; 4009 } 4010 4011 return configureBitrate(bitrate, bitrateMode); 4012} 4013 4014status_t ACodec::setupHEVCEncoderParameters(const sp<AMessage> &msg) { 4015 int32_t bitrate, iFrameInterval; 4016 if (!msg->findInt32("bitrate", &bitrate) 4017 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 4018 return INVALID_OPERATION; 4019 } 4020 4021 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4022 4023 float frameRate; 4024 if (!msg->findFloat("frame-rate", &frameRate)) { 4025 int32_t tmp; 4026 if (!msg->findInt32("frame-rate", &tmp)) { 4027 return INVALID_OPERATION; 4028 } 4029 frameRate = (float)tmp; 4030 } 4031 4032 OMX_VIDEO_PARAM_HEVCTYPE hevcType; 4033 InitOMXParams(&hevcType); 4034 hevcType.nPortIndex = kPortIndexOutput; 4035 4036 status_t err = OK; 4037 err = mOMX->getParameter( 4038 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 4039 if (err != OK) { 4040 return err; 4041 } 4042 4043 int32_t profile; 4044 if (msg->findInt32("profile", &profile)) { 4045 int32_t level; 4046 if (!msg->findInt32("level", &level)) { 4047 return INVALID_OPERATION; 4048 } 4049 4050 err = verifySupportForProfileAndLevel(profile, level); 4051 if (err != OK) { 4052 return err; 4053 } 4054 4055 hevcType.eProfile = static_cast<OMX_VIDEO_HEVCPROFILETYPE>(profile); 4056 hevcType.eLevel = static_cast<OMX_VIDEO_HEVCLEVELTYPE>(level); 4057 } 4058 // TODO: finer control? 4059 hevcType.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate); 4060 4061 err = mOMX->setParameter( 4062 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 4063 if (err != OK) { 4064 return err; 4065 } 4066 4067 return configureBitrate(bitrate, bitrateMode); 4068} 4069 4070status_t ACodec::setupVPXEncoderParameters(const sp<AMessage> &msg) { 4071 int32_t bitrate; 4072 int32_t iFrameInterval = 0; 4073 size_t tsLayers = 0; 4074 OMX_VIDEO_ANDROID_VPXTEMPORALLAYERPATTERNTYPE pattern = 4075 OMX_VIDEO_VPXTemporalLayerPatternNone; 4076 static const uint32_t kVp8LayerRateAlloction 4077 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] 4078 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] = { 4079 {100, 100, 100}, // 1 layer 4080 { 60, 100, 100}, // 2 layers {60%, 40%} 4081 { 40, 60, 100}, // 3 layers {40%, 20%, 40%} 4082 }; 4083 if (!msg->findInt32("bitrate", &bitrate)) { 4084 return INVALID_OPERATION; 4085 } 4086 msg->findInt32("i-frame-interval", &iFrameInterval); 4087 4088 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4089 4090 float frameRate; 4091 if (!msg->findFloat("frame-rate", &frameRate)) { 4092 int32_t tmp; 4093 if (!msg->findInt32("frame-rate", &tmp)) { 4094 return INVALID_OPERATION; 4095 } 4096 frameRate = (float)tmp; 4097 } 4098 4099 AString tsSchema; 4100 if (msg->findString("ts-schema", &tsSchema)) { 4101 if (tsSchema == "webrtc.vp8.1-layer") { 4102 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4103 tsLayers = 1; 4104 } else if (tsSchema == "webrtc.vp8.2-layer") { 4105 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4106 tsLayers = 2; 4107 } else if (tsSchema == "webrtc.vp8.3-layer") { 4108 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4109 tsLayers = 3; 4110 } else { 4111 ALOGW("Unsupported ts-schema [%s]", tsSchema.c_str()); 4112 } 4113 } 4114 4115 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 4116 InitOMXParams(&vp8type); 4117 vp8type.nPortIndex = kPortIndexOutput; 4118 status_t err = mOMX->getParameter( 4119 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4120 &vp8type, sizeof(vp8type)); 4121 4122 if (err == OK) { 4123 if (iFrameInterval > 0) { 4124 vp8type.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate); 4125 } 4126 vp8type.eTemporalPattern = pattern; 4127 vp8type.nTemporalLayerCount = tsLayers; 4128 if (tsLayers > 0) { 4129 for (size_t i = 0; i < OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS; i++) { 4130 vp8type.nTemporalLayerBitrateRatio[i] = 4131 kVp8LayerRateAlloction[tsLayers - 1][i]; 4132 } 4133 } 4134 if (bitrateMode == OMX_Video_ControlRateConstant) { 4135 vp8type.nMinQuantizer = 2; 4136 vp8type.nMaxQuantizer = 63; 4137 } 4138 4139 err = mOMX->setParameter( 4140 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4141 &vp8type, sizeof(vp8type)); 4142 if (err != OK) { 4143 ALOGW("Extended VP8 parameters set failed: %d", err); 4144 } 4145 } 4146 4147 return configureBitrate(bitrate, bitrateMode); 4148} 4149 4150status_t ACodec::verifySupportForProfileAndLevel( 4151 int32_t profile, int32_t level) { 4152 OMX_VIDEO_PARAM_PROFILELEVELTYPE params; 4153 InitOMXParams(¶ms); 4154 params.nPortIndex = kPortIndexOutput; 4155 4156 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 4157 params.nProfileIndex = index; 4158 status_t err = mOMX->getParameter( 4159 mNode, 4160 OMX_IndexParamVideoProfileLevelQuerySupported, 4161 ¶ms, 4162 sizeof(params)); 4163 4164 if (err != OK) { 4165 return err; 4166 } 4167 4168 int32_t supportedProfile = static_cast<int32_t>(params.eProfile); 4169 int32_t supportedLevel = static_cast<int32_t>(params.eLevel); 4170 4171 if (profile == supportedProfile && level <= supportedLevel) { 4172 return OK; 4173 } 4174 4175 if (index == kMaxIndicesToCheck) { 4176 ALOGW("[%s] stopping checking profiles after %u: %x/%x", 4177 mComponentName.c_str(), index, 4178 params.eProfile, params.eLevel); 4179 } 4180 } 4181 return ERROR_UNSUPPORTED; 4182} 4183 4184status_t ACodec::configureBitrate( 4185 int32_t bitrate, OMX_VIDEO_CONTROLRATETYPE bitrateMode) { 4186 OMX_VIDEO_PARAM_BITRATETYPE bitrateType; 4187 InitOMXParams(&bitrateType); 4188 bitrateType.nPortIndex = kPortIndexOutput; 4189 4190 status_t err = mOMX->getParameter( 4191 mNode, OMX_IndexParamVideoBitrate, 4192 &bitrateType, sizeof(bitrateType)); 4193 4194 if (err != OK) { 4195 return err; 4196 } 4197 4198 bitrateType.eControlRate = bitrateMode; 4199 bitrateType.nTargetBitrate = bitrate; 4200 4201 return mOMX->setParameter( 4202 mNode, OMX_IndexParamVideoBitrate, 4203 &bitrateType, sizeof(bitrateType)); 4204} 4205 4206status_t ACodec::setupErrorCorrectionParameters() { 4207 OMX_VIDEO_PARAM_ERRORCORRECTIONTYPE errorCorrectionType; 4208 InitOMXParams(&errorCorrectionType); 4209 errorCorrectionType.nPortIndex = kPortIndexOutput; 4210 4211 status_t err = mOMX->getParameter( 4212 mNode, OMX_IndexParamVideoErrorCorrection, 4213 &errorCorrectionType, sizeof(errorCorrectionType)); 4214 4215 if (err != OK) { 4216 return OK; // Optional feature. Ignore this failure 4217 } 4218 4219 errorCorrectionType.bEnableHEC = OMX_FALSE; 4220 errorCorrectionType.bEnableResync = OMX_TRUE; 4221 errorCorrectionType.nResynchMarkerSpacing = 256; 4222 errorCorrectionType.bEnableDataPartitioning = OMX_FALSE; 4223 errorCorrectionType.bEnableRVLC = OMX_FALSE; 4224 4225 return mOMX->setParameter( 4226 mNode, OMX_IndexParamVideoErrorCorrection, 4227 &errorCorrectionType, sizeof(errorCorrectionType)); 4228} 4229 4230status_t ACodec::setVideoFormatOnPort( 4231 OMX_U32 portIndex, 4232 int32_t width, int32_t height, OMX_VIDEO_CODINGTYPE compressionFormat, 4233 float frameRate) { 4234 OMX_PARAM_PORTDEFINITIONTYPE def; 4235 InitOMXParams(&def); 4236 def.nPortIndex = portIndex; 4237 4238 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 4239 4240 status_t err = mOMX->getParameter( 4241 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4242 if (err != OK) { 4243 return err; 4244 } 4245 4246 if (portIndex == kPortIndexInput) { 4247 // XXX Need a (much) better heuristic to compute input buffer sizes. 4248 const size_t X = 64 * 1024; 4249 if (def.nBufferSize < X) { 4250 def.nBufferSize = X; 4251 } 4252 } 4253 4254 if (def.eDomain != OMX_PortDomainVideo) { 4255 ALOGE("expected video port, got %s(%d)", asString(def.eDomain), def.eDomain); 4256 return FAILED_TRANSACTION; 4257 } 4258 4259 video_def->nFrameWidth = width; 4260 video_def->nFrameHeight = height; 4261 4262 if (portIndex == kPortIndexInput) { 4263 video_def->eCompressionFormat = compressionFormat; 4264 video_def->eColorFormat = OMX_COLOR_FormatUnused; 4265 if (frameRate >= 0) { 4266 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 4267 } 4268 } 4269 4270 err = mOMX->setParameter( 4271 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4272 4273 return err; 4274} 4275 4276status_t ACodec::initNativeWindow() { 4277 if (mNativeWindow != NULL) { 4278 return mOMX->enableNativeBuffers(mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_TRUE); 4279 } 4280 4281 mOMX->enableNativeBuffers(mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE); 4282 return OK; 4283} 4284 4285size_t ACodec::countBuffersOwnedByComponent(OMX_U32 portIndex) const { 4286 size_t n = 0; 4287 4288 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 4289 const BufferInfo &info = mBuffers[portIndex].itemAt(i); 4290 4291 if (info.mStatus == BufferInfo::OWNED_BY_COMPONENT) { 4292 ++n; 4293 } 4294 } 4295 4296 return n; 4297} 4298 4299size_t ACodec::countBuffersOwnedByNativeWindow() const { 4300 size_t n = 0; 4301 4302 for (size_t i = 0; i < mBuffers[kPortIndexOutput].size(); ++i) { 4303 const BufferInfo &info = mBuffers[kPortIndexOutput].itemAt(i); 4304 4305 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4306 ++n; 4307 } 4308 } 4309 4310 return n; 4311} 4312 4313void ACodec::waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs() { 4314 if (mNativeWindow == NULL) { 4315 return; 4316 } 4317 4318 while (countBuffersOwnedByNativeWindow() > mNumUndequeuedBuffers 4319 && dequeueBufferFromNativeWindow() != NULL) { 4320 // these buffers will be submitted as regular buffers; account for this 4321 if (storingMetadataInDecodedBuffers() && mMetadataBuffersToSubmit > 0) { 4322 --mMetadataBuffersToSubmit; 4323 } 4324 } 4325} 4326 4327bool ACodec::allYourBuffersAreBelongToUs( 4328 OMX_U32 portIndex) { 4329 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 4330 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 4331 4332 if (info->mStatus != BufferInfo::OWNED_BY_US 4333 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4334 ALOGV("[%s] Buffer %u on port %u still has status %d", 4335 mComponentName.c_str(), 4336 info->mBufferID, portIndex, info->mStatus); 4337 return false; 4338 } 4339 } 4340 4341 return true; 4342} 4343 4344bool ACodec::allYourBuffersAreBelongToUs() { 4345 return allYourBuffersAreBelongToUs(kPortIndexInput) 4346 && allYourBuffersAreBelongToUs(kPortIndexOutput); 4347} 4348 4349void ACodec::deferMessage(const sp<AMessage> &msg) { 4350 mDeferredQueue.push_back(msg); 4351} 4352 4353void ACodec::processDeferredMessages() { 4354 List<sp<AMessage> > queue = mDeferredQueue; 4355 mDeferredQueue.clear(); 4356 4357 List<sp<AMessage> >::iterator it = queue.begin(); 4358 while (it != queue.end()) { 4359 onMessageReceived(*it++); 4360 } 4361} 4362 4363// static 4364bool ACodec::describeDefaultColorFormat(DescribeColorFormat2Params ¶ms) { 4365 MediaImage2 &image = params.sMediaImage; 4366 memset(&image, 0, sizeof(image)); 4367 4368 image.mType = MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN; 4369 image.mNumPlanes = 0; 4370 4371 const OMX_COLOR_FORMATTYPE fmt = params.eColorFormat; 4372 image.mWidth = params.nFrameWidth; 4373 image.mHeight = params.nFrameHeight; 4374 4375 // only supporting YUV420 4376 if (fmt != OMX_COLOR_FormatYUV420Planar && 4377 fmt != OMX_COLOR_FormatYUV420PackedPlanar && 4378 fmt != OMX_COLOR_FormatYUV420SemiPlanar && 4379 fmt != OMX_COLOR_FormatYUV420PackedSemiPlanar && 4380 fmt != (OMX_COLOR_FORMATTYPE)HAL_PIXEL_FORMAT_YV12) { 4381 ALOGW("do not know color format 0x%x = %d", fmt, fmt); 4382 return false; 4383 } 4384 4385 // TEMPORARY FIX for some vendors that advertise sliceHeight as 0 4386 if (params.nStride != 0 && params.nSliceHeight == 0) { 4387 ALOGW("using sliceHeight=%u instead of what codec advertised (=0)", 4388 params.nFrameHeight); 4389 params.nSliceHeight = params.nFrameHeight; 4390 } 4391 4392 // we need stride and slice-height to be non-zero and sensible. These values were chosen to 4393 // prevent integer overflows further down the line, and do not indicate support for 4394 // 32kx32k video. 4395 if (params.nStride == 0 || params.nSliceHeight == 0 4396 || params.nStride > 32768 || params.nSliceHeight > 32768) { 4397 ALOGW("cannot describe color format 0x%x = %d with stride=%u and sliceHeight=%u", 4398 fmt, fmt, params.nStride, params.nSliceHeight); 4399 return false; 4400 } 4401 4402 // set-up YUV format 4403 image.mType = MediaImage2::MEDIA_IMAGE_TYPE_YUV; 4404 image.mNumPlanes = 3; 4405 image.mBitDepth = 8; 4406 image.mBitDepthAllocated = 8; 4407 image.mPlane[image.Y].mOffset = 0; 4408 image.mPlane[image.Y].mColInc = 1; 4409 image.mPlane[image.Y].mRowInc = params.nStride; 4410 image.mPlane[image.Y].mHorizSubsampling = 1; 4411 image.mPlane[image.Y].mVertSubsampling = 1; 4412 4413 switch ((int)fmt) { 4414 case HAL_PIXEL_FORMAT_YV12: 4415 if (params.bUsingNativeBuffers) { 4416 size_t ystride = align(params.nStride, 16); 4417 size_t cstride = align(params.nStride / 2, 16); 4418 image.mPlane[image.Y].mRowInc = ystride; 4419 4420 image.mPlane[image.V].mOffset = ystride * params.nSliceHeight; 4421 image.mPlane[image.V].mColInc = 1; 4422 image.mPlane[image.V].mRowInc = cstride; 4423 image.mPlane[image.V].mHorizSubsampling = 2; 4424 image.mPlane[image.V].mVertSubsampling = 2; 4425 4426 image.mPlane[image.U].mOffset = image.mPlane[image.V].mOffset 4427 + (cstride * params.nSliceHeight / 2); 4428 image.mPlane[image.U].mColInc = 1; 4429 image.mPlane[image.U].mRowInc = cstride; 4430 image.mPlane[image.U].mHorizSubsampling = 2; 4431 image.mPlane[image.U].mVertSubsampling = 2; 4432 break; 4433 } else { 4434 // fall through as YV12 is used for YUV420Planar by some codecs 4435 } 4436 4437 case OMX_COLOR_FormatYUV420Planar: 4438 case OMX_COLOR_FormatYUV420PackedPlanar: 4439 image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight; 4440 image.mPlane[image.U].mColInc = 1; 4441 image.mPlane[image.U].mRowInc = params.nStride / 2; 4442 image.mPlane[image.U].mHorizSubsampling = 2; 4443 image.mPlane[image.U].mVertSubsampling = 2; 4444 4445 image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset 4446 + (params.nStride * params.nSliceHeight / 4); 4447 image.mPlane[image.V].mColInc = 1; 4448 image.mPlane[image.V].mRowInc = params.nStride / 2; 4449 image.mPlane[image.V].mHorizSubsampling = 2; 4450 image.mPlane[image.V].mVertSubsampling = 2; 4451 break; 4452 4453 case OMX_COLOR_FormatYUV420SemiPlanar: 4454 // FIXME: NV21 for sw-encoder, NV12 for decoder and hw-encoder 4455 case OMX_COLOR_FormatYUV420PackedSemiPlanar: 4456 // NV12 4457 image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight; 4458 image.mPlane[image.U].mColInc = 2; 4459 image.mPlane[image.U].mRowInc = params.nStride; 4460 image.mPlane[image.U].mHorizSubsampling = 2; 4461 image.mPlane[image.U].mVertSubsampling = 2; 4462 4463 image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset + 1; 4464 image.mPlane[image.V].mColInc = 2; 4465 image.mPlane[image.V].mRowInc = params.nStride; 4466 image.mPlane[image.V].mHorizSubsampling = 2; 4467 image.mPlane[image.V].mVertSubsampling = 2; 4468 break; 4469 4470 default: 4471 TRESPASS(); 4472 } 4473 return true; 4474} 4475 4476// static 4477bool ACodec::describeColorFormat( 4478 const sp<IOMX> &omx, IOMX::node_id node, 4479 DescribeColorFormat2Params &describeParams) 4480{ 4481 OMX_INDEXTYPE describeColorFormatIndex; 4482 if (omx->getExtensionIndex( 4483 node, "OMX.google.android.index.describeColorFormat", 4484 &describeColorFormatIndex) == OK) { 4485 DescribeColorFormatParams describeParamsV1(describeParams); 4486 if (omx->getParameter( 4487 node, describeColorFormatIndex, 4488 &describeParamsV1, sizeof(describeParamsV1)) == OK) { 4489 describeParams.initFromV1(describeParamsV1); 4490 return describeParams.sMediaImage.mType != MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN; 4491 } 4492 } else if (omx->getExtensionIndex( 4493 node, "OMX.google.android.index.describeColorFormat2", &describeColorFormatIndex) == OK 4494 && omx->getParameter( 4495 node, describeColorFormatIndex, &describeParams, sizeof(describeParams)) == OK) { 4496 return describeParams.sMediaImage.mType != MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN; 4497 } 4498 4499 return describeDefaultColorFormat(describeParams); 4500} 4501 4502// static 4503bool ACodec::isFlexibleColorFormat( 4504 const sp<IOMX> &omx, IOMX::node_id node, 4505 uint32_t colorFormat, bool usingNativeBuffers, OMX_U32 *flexibleEquivalent) { 4506 DescribeColorFormat2Params describeParams; 4507 InitOMXParams(&describeParams); 4508 describeParams.eColorFormat = (OMX_COLOR_FORMATTYPE)colorFormat; 4509 // reasonable dummy values 4510 describeParams.nFrameWidth = 128; 4511 describeParams.nFrameHeight = 128; 4512 describeParams.nStride = 128; 4513 describeParams.nSliceHeight = 128; 4514 describeParams.bUsingNativeBuffers = (OMX_BOOL)usingNativeBuffers; 4515 4516 CHECK(flexibleEquivalent != NULL); 4517 4518 if (!describeColorFormat(omx, node, describeParams)) { 4519 return false; 4520 } 4521 4522 const MediaImage2 &img = describeParams.sMediaImage; 4523 if (img.mType == MediaImage2::MEDIA_IMAGE_TYPE_YUV) { 4524 if (img.mNumPlanes != 3 4525 || img.mPlane[img.Y].mHorizSubsampling != 1 4526 || img.mPlane[img.Y].mVertSubsampling != 1) { 4527 return false; 4528 } 4529 4530 // YUV 420 4531 if (img.mPlane[img.U].mHorizSubsampling == 2 4532 && img.mPlane[img.U].mVertSubsampling == 2 4533 && img.mPlane[img.V].mHorizSubsampling == 2 4534 && img.mPlane[img.V].mVertSubsampling == 2) { 4535 // possible flexible YUV420 format 4536 if (img.mBitDepth <= 8) { 4537 *flexibleEquivalent = OMX_COLOR_FormatYUV420Flexible; 4538 return true; 4539 } 4540 } 4541 } 4542 return false; 4543} 4544 4545status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify) { 4546 const char *niceIndex = portIndex == kPortIndexInput ? "input" : "output"; 4547 OMX_PARAM_PORTDEFINITIONTYPE def; 4548 InitOMXParams(&def); 4549 def.nPortIndex = portIndex; 4550 4551 status_t err = mOMX->getParameter(mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4552 if (err != OK) { 4553 return err; 4554 } 4555 4556 if (def.eDir != (portIndex == kPortIndexOutput ? OMX_DirOutput : OMX_DirInput)) { 4557 ALOGE("unexpected dir: %s(%d) on %s port", asString(def.eDir), def.eDir, niceIndex); 4558 return BAD_VALUE; 4559 } 4560 4561 switch (def.eDomain) { 4562 case OMX_PortDomainVideo: 4563 { 4564 OMX_VIDEO_PORTDEFINITIONTYPE *videoDef = &def.format.video; 4565 switch ((int)videoDef->eCompressionFormat) { 4566 case OMX_VIDEO_CodingUnused: 4567 { 4568 CHECK(mIsEncoder ^ (portIndex == kPortIndexOutput)); 4569 notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW); 4570 4571 notify->setInt32("stride", videoDef->nStride); 4572 notify->setInt32("slice-height", videoDef->nSliceHeight); 4573 notify->setInt32("color-format", videoDef->eColorFormat); 4574 4575 if (mNativeWindow == NULL) { 4576 DescribeColorFormat2Params describeParams; 4577 InitOMXParams(&describeParams); 4578 describeParams.eColorFormat = videoDef->eColorFormat; 4579 describeParams.nFrameWidth = videoDef->nFrameWidth; 4580 describeParams.nFrameHeight = videoDef->nFrameHeight; 4581 describeParams.nStride = videoDef->nStride; 4582 describeParams.nSliceHeight = videoDef->nSliceHeight; 4583 describeParams.bUsingNativeBuffers = OMX_FALSE; 4584 4585 if (describeColorFormat(mOMX, mNode, describeParams)) { 4586 notify->setBuffer( 4587 "image-data", 4588 ABuffer::CreateAsCopy( 4589 &describeParams.sMediaImage, 4590 sizeof(describeParams.sMediaImage))); 4591 4592 MediaImage2 &img = describeParams.sMediaImage; 4593 MediaImage2::PlaneInfo *plane = img.mPlane; 4594 ALOGV("[%s] MediaImage { F(%ux%u) @%u+%d+%d @%u+%d+%d @%u+%d+%d }", 4595 mComponentName.c_str(), img.mWidth, img.mHeight, 4596 plane[0].mOffset, plane[0].mColInc, plane[0].mRowInc, 4597 plane[1].mOffset, plane[1].mColInc, plane[1].mRowInc, 4598 plane[2].mOffset, plane[2].mColInc, plane[2].mRowInc); 4599 } 4600 } 4601 4602 int32_t width = (int32_t)videoDef->nFrameWidth; 4603 int32_t height = (int32_t)videoDef->nFrameHeight; 4604 4605 if (portIndex == kPortIndexOutput) { 4606 OMX_CONFIG_RECTTYPE rect; 4607 InitOMXParams(&rect); 4608 rect.nPortIndex = portIndex; 4609 4610 if (mOMX->getConfig( 4611 mNode, 4612 (portIndex == kPortIndexOutput ? 4613 OMX_IndexConfigCommonOutputCrop : 4614 OMX_IndexConfigCommonInputCrop), 4615 &rect, sizeof(rect)) != OK) { 4616 rect.nLeft = 0; 4617 rect.nTop = 0; 4618 rect.nWidth = videoDef->nFrameWidth; 4619 rect.nHeight = videoDef->nFrameHeight; 4620 } 4621 4622 if (rect.nLeft < 0 || 4623 rect.nTop < 0 || 4624 rect.nLeft + rect.nWidth > videoDef->nFrameWidth || 4625 rect.nTop + rect.nHeight > videoDef->nFrameHeight) { 4626 ALOGE("Wrong cropped rect (%d, %d) - (%u, %u) vs. frame (%u, %u)", 4627 rect.nLeft, rect.nTop, 4628 rect.nLeft + rect.nWidth, rect.nTop + rect.nHeight, 4629 videoDef->nFrameWidth, videoDef->nFrameHeight); 4630 return BAD_VALUE; 4631 } 4632 4633 notify->setRect( 4634 "crop", 4635 rect.nLeft, 4636 rect.nTop, 4637 rect.nLeft + rect.nWidth - 1, 4638 rect.nTop + rect.nHeight - 1); 4639 4640 width = rect.nWidth; 4641 height = rect.nHeight; 4642 4643 android_dataspace dataSpace = HAL_DATASPACE_UNKNOWN; 4644 (void)getColorAspectsAndDataSpaceForVideoDecoder( 4645 width, height, mConfigFormat, notify, 4646 mUsingNativeWindow ? &dataSpace : NULL); 4647 if (mUsingNativeWindow) { 4648 notify->setInt32("android._dataspace", dataSpace); 4649 } 4650 } else { 4651 (void)getInputColorAspectsForVideoEncoder(notify); 4652 } 4653 4654 break; 4655 } 4656 4657 case OMX_VIDEO_CodingVP8: 4658 case OMX_VIDEO_CodingVP9: 4659 { 4660 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 4661 InitOMXParams(&vp8type); 4662 vp8type.nPortIndex = kPortIndexOutput; 4663 status_t err = mOMX->getParameter( 4664 mNode, 4665 (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4666 &vp8type, 4667 sizeof(vp8type)); 4668 4669 if (err == OK) { 4670 AString tsSchema = "none"; 4671 if (vp8type.eTemporalPattern 4672 == OMX_VIDEO_VPXTemporalLayerPatternWebRTC) { 4673 switch (vp8type.nTemporalLayerCount) { 4674 case 1: 4675 { 4676 tsSchema = "webrtc.vp8.1-layer"; 4677 break; 4678 } 4679 case 2: 4680 { 4681 tsSchema = "webrtc.vp8.2-layer"; 4682 break; 4683 } 4684 case 3: 4685 { 4686 tsSchema = "webrtc.vp8.3-layer"; 4687 break; 4688 } 4689 default: 4690 { 4691 break; 4692 } 4693 } 4694 } 4695 notify->setString("ts-schema", tsSchema); 4696 } 4697 // Fall through to set up mime. 4698 } 4699 4700 default: 4701 { 4702 if (mIsEncoder ^ (portIndex == kPortIndexOutput)) { 4703 // should be CodingUnused 4704 ALOGE("Raw port video compression format is %s(%d)", 4705 asString(videoDef->eCompressionFormat), 4706 videoDef->eCompressionFormat); 4707 return BAD_VALUE; 4708 } 4709 AString mime; 4710 if (GetMimeTypeForVideoCoding( 4711 videoDef->eCompressionFormat, &mime) != OK) { 4712 notify->setString("mime", "application/octet-stream"); 4713 } else { 4714 notify->setString("mime", mime.c_str()); 4715 } 4716 uint32_t intraRefreshPeriod = 0; 4717 if (mIsEncoder && getIntraRefreshPeriod(&intraRefreshPeriod) == OK 4718 && intraRefreshPeriod > 0) { 4719 notify->setInt32("intra-refresh-period", intraRefreshPeriod); 4720 } 4721 break; 4722 } 4723 } 4724 notify->setInt32("width", videoDef->nFrameWidth); 4725 notify->setInt32("height", videoDef->nFrameHeight); 4726 ALOGV("[%s] %s format is %s", mComponentName.c_str(), 4727 portIndex == kPortIndexInput ? "input" : "output", 4728 notify->debugString().c_str()); 4729 4730 break; 4731 } 4732 4733 case OMX_PortDomainAudio: 4734 { 4735 OMX_AUDIO_PORTDEFINITIONTYPE *audioDef = &def.format.audio; 4736 4737 switch ((int)audioDef->eEncoding) { 4738 case OMX_AUDIO_CodingPCM: 4739 { 4740 OMX_AUDIO_PARAM_PCMMODETYPE params; 4741 InitOMXParams(¶ms); 4742 params.nPortIndex = portIndex; 4743 4744 err = mOMX->getParameter( 4745 mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 4746 if (err != OK) { 4747 return err; 4748 } 4749 4750 if (params.nChannels <= 0 4751 || (params.nChannels != 1 && !params.bInterleaved) 4752 || params.ePCMMode != OMX_AUDIO_PCMModeLinear) { 4753 ALOGE("unsupported PCM port: %u channels%s, %u-bit", 4754 params.nChannels, 4755 params.bInterleaved ? " interleaved" : "", 4756 params.nBitPerSample); 4757 return FAILED_TRANSACTION; 4758 } 4759 4760 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW); 4761 notify->setInt32("channel-count", params.nChannels); 4762 notify->setInt32("sample-rate", params.nSamplingRate); 4763 4764 AudioEncoding encoding = kAudioEncodingPcm16bit; 4765 if (params.eNumData == OMX_NumericalDataUnsigned 4766 && params.nBitPerSample == 8u) { 4767 encoding = kAudioEncodingPcm8bit; 4768 } else if (params.eNumData == OMX_NumericalDataFloat 4769 && params.nBitPerSample == 32u) { 4770 encoding = kAudioEncodingPcmFloat; 4771 } else if (params.nBitPerSample != 16u 4772 || params.eNumData != OMX_NumericalDataSigned) { 4773 ALOGE("unsupported PCM port: %s(%d), %s(%d) mode ", 4774 asString(params.eNumData), params.eNumData, 4775 asString(params.ePCMMode), params.ePCMMode); 4776 return FAILED_TRANSACTION; 4777 } 4778 notify->setInt32("pcm-encoding", encoding); 4779 4780 if (mChannelMaskPresent) { 4781 notify->setInt32("channel-mask", mChannelMask); 4782 } 4783 break; 4784 } 4785 4786 case OMX_AUDIO_CodingAAC: 4787 { 4788 OMX_AUDIO_PARAM_AACPROFILETYPE params; 4789 InitOMXParams(¶ms); 4790 params.nPortIndex = portIndex; 4791 4792 err = mOMX->getParameter( 4793 mNode, OMX_IndexParamAudioAac, ¶ms, sizeof(params)); 4794 if (err != OK) { 4795 return err; 4796 } 4797 4798 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC); 4799 notify->setInt32("channel-count", params.nChannels); 4800 notify->setInt32("sample-rate", params.nSampleRate); 4801 break; 4802 } 4803 4804 case OMX_AUDIO_CodingAMR: 4805 { 4806 OMX_AUDIO_PARAM_AMRTYPE params; 4807 InitOMXParams(¶ms); 4808 params.nPortIndex = portIndex; 4809 4810 err = mOMX->getParameter( 4811 mNode, OMX_IndexParamAudioAmr, ¶ms, sizeof(params)); 4812 if (err != OK) { 4813 return err; 4814 } 4815 4816 notify->setInt32("channel-count", 1); 4817 if (params.eAMRBandMode >= OMX_AUDIO_AMRBandModeWB0) { 4818 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_WB); 4819 notify->setInt32("sample-rate", 16000); 4820 } else { 4821 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_NB); 4822 notify->setInt32("sample-rate", 8000); 4823 } 4824 break; 4825 } 4826 4827 case OMX_AUDIO_CodingFLAC: 4828 { 4829 OMX_AUDIO_PARAM_FLACTYPE params; 4830 InitOMXParams(¶ms); 4831 params.nPortIndex = portIndex; 4832 4833 err = mOMX->getParameter( 4834 mNode, OMX_IndexParamAudioFlac, ¶ms, sizeof(params)); 4835 if (err != OK) { 4836 return err; 4837 } 4838 4839 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_FLAC); 4840 notify->setInt32("channel-count", params.nChannels); 4841 notify->setInt32("sample-rate", params.nSampleRate); 4842 break; 4843 } 4844 4845 case OMX_AUDIO_CodingMP3: 4846 { 4847 OMX_AUDIO_PARAM_MP3TYPE params; 4848 InitOMXParams(¶ms); 4849 params.nPortIndex = portIndex; 4850 4851 err = mOMX->getParameter( 4852 mNode, OMX_IndexParamAudioMp3, ¶ms, sizeof(params)); 4853 if (err != OK) { 4854 return err; 4855 } 4856 4857 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MPEG); 4858 notify->setInt32("channel-count", params.nChannels); 4859 notify->setInt32("sample-rate", params.nSampleRate); 4860 break; 4861 } 4862 4863 case OMX_AUDIO_CodingVORBIS: 4864 { 4865 OMX_AUDIO_PARAM_VORBISTYPE params; 4866 InitOMXParams(¶ms); 4867 params.nPortIndex = portIndex; 4868 4869 err = mOMX->getParameter( 4870 mNode, OMX_IndexParamAudioVorbis, ¶ms, sizeof(params)); 4871 if (err != OK) { 4872 return err; 4873 } 4874 4875 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_VORBIS); 4876 notify->setInt32("channel-count", params.nChannels); 4877 notify->setInt32("sample-rate", params.nSampleRate); 4878 break; 4879 } 4880 4881 case OMX_AUDIO_CodingAndroidAC3: 4882 { 4883 OMX_AUDIO_PARAM_ANDROID_AC3TYPE params; 4884 InitOMXParams(¶ms); 4885 params.nPortIndex = portIndex; 4886 4887 err = mOMX->getParameter( 4888 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 4889 ¶ms, sizeof(params)); 4890 if (err != OK) { 4891 return err; 4892 } 4893 4894 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AC3); 4895 notify->setInt32("channel-count", params.nChannels); 4896 notify->setInt32("sample-rate", params.nSampleRate); 4897 break; 4898 } 4899 4900 case OMX_AUDIO_CodingAndroidEAC3: 4901 { 4902 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE params; 4903 InitOMXParams(¶ms); 4904 params.nPortIndex = portIndex; 4905 4906 err = mOMX->getParameter( 4907 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 4908 ¶ms, sizeof(params)); 4909 if (err != OK) { 4910 return err; 4911 } 4912 4913 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_EAC3); 4914 notify->setInt32("channel-count", params.nChannels); 4915 notify->setInt32("sample-rate", params.nSampleRate); 4916 break; 4917 } 4918 4919 case OMX_AUDIO_CodingAndroidOPUS: 4920 { 4921 OMX_AUDIO_PARAM_ANDROID_OPUSTYPE params; 4922 InitOMXParams(¶ms); 4923 params.nPortIndex = portIndex; 4924 4925 err = mOMX->getParameter( 4926 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidOpus, 4927 ¶ms, sizeof(params)); 4928 if (err != OK) { 4929 return err; 4930 } 4931 4932 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_OPUS); 4933 notify->setInt32("channel-count", params.nChannels); 4934 notify->setInt32("sample-rate", params.nSampleRate); 4935 break; 4936 } 4937 4938 case OMX_AUDIO_CodingG711: 4939 { 4940 OMX_AUDIO_PARAM_PCMMODETYPE params; 4941 InitOMXParams(¶ms); 4942 params.nPortIndex = portIndex; 4943 4944 err = mOMX->getParameter( 4945 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 4946 if (err != OK) { 4947 return err; 4948 } 4949 4950 const char *mime = NULL; 4951 if (params.ePCMMode == OMX_AUDIO_PCMModeMULaw) { 4952 mime = MEDIA_MIMETYPE_AUDIO_G711_MLAW; 4953 } else if (params.ePCMMode == OMX_AUDIO_PCMModeALaw) { 4954 mime = MEDIA_MIMETYPE_AUDIO_G711_ALAW; 4955 } else { // params.ePCMMode == OMX_AUDIO_PCMModeLinear 4956 mime = MEDIA_MIMETYPE_AUDIO_RAW; 4957 } 4958 notify->setString("mime", mime); 4959 notify->setInt32("channel-count", params.nChannels); 4960 notify->setInt32("sample-rate", params.nSamplingRate); 4961 notify->setInt32("pcm-encoding", kAudioEncodingPcm16bit); 4962 break; 4963 } 4964 4965 case OMX_AUDIO_CodingGSMFR: 4966 { 4967 OMX_AUDIO_PARAM_PCMMODETYPE params; 4968 InitOMXParams(¶ms); 4969 params.nPortIndex = portIndex; 4970 4971 err = mOMX->getParameter( 4972 mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 4973 if (err != OK) { 4974 return err; 4975 } 4976 4977 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MSGSM); 4978 notify->setInt32("channel-count", params.nChannels); 4979 notify->setInt32("sample-rate", params.nSamplingRate); 4980 break; 4981 } 4982 4983 default: 4984 ALOGE("Unsupported audio coding: %s(%d)\n", 4985 asString(audioDef->eEncoding), audioDef->eEncoding); 4986 return BAD_TYPE; 4987 } 4988 break; 4989 } 4990 4991 default: 4992 ALOGE("Unsupported domain: %s(%d)", asString(def.eDomain), def.eDomain); 4993 return BAD_TYPE; 4994 } 4995 4996 return OK; 4997} 4998 4999void ACodec::onDataSpaceChanged(android_dataspace dataSpace, const ColorAspects &aspects) { 5000 // aspects are normally communicated in ColorAspects 5001 int32_t range, standard, transfer; 5002 convertCodecColorAspectsToPlatformAspects(aspects, &range, &standard, &transfer); 5003 5004 // if some aspects are unspecified, use dataspace fields 5005 if (range != 0) { 5006 range = (dataSpace & HAL_DATASPACE_RANGE_MASK) >> HAL_DATASPACE_RANGE_SHIFT; 5007 } 5008 if (standard != 0) { 5009 standard = (dataSpace & HAL_DATASPACE_STANDARD_MASK) >> HAL_DATASPACE_STANDARD_SHIFT; 5010 } 5011 if (transfer != 0) { 5012 transfer = (dataSpace & HAL_DATASPACE_TRANSFER_MASK) >> HAL_DATASPACE_TRANSFER_SHIFT; 5013 } 5014 5015 mOutputFormat = mOutputFormat->dup(); // trigger an output format changed event 5016 if (range != 0) { 5017 mOutputFormat->setInt32("color-range", range); 5018 } 5019 if (standard != 0) { 5020 mOutputFormat->setInt32("color-standard", standard); 5021 } 5022 if (transfer != 0) { 5023 mOutputFormat->setInt32("color-transfer", transfer); 5024 } 5025 5026 ALOGD("dataspace changed to %#x (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) " 5027 "(R:%d(%s), S:%d(%s), T:%d(%s))", 5028 dataSpace, 5029 aspects.mRange, asString(aspects.mRange), 5030 aspects.mPrimaries, asString(aspects.mPrimaries), 5031 aspects.mMatrixCoeffs, asString(aspects.mMatrixCoeffs), 5032 aspects.mTransfer, asString(aspects.mTransfer), 5033 range, asString((ColorRange)range), 5034 standard, asString((ColorStandard)standard), 5035 transfer, asString((ColorTransfer)transfer)); 5036} 5037 5038void ACodec::onOutputFormatChanged(sp<const AMessage> expectedFormat) { 5039 // store new output format, at the same time mark that this is no longer the first frame 5040 mOutputFormat = mBaseOutputFormat->dup(); 5041 5042 if (getPortFormat(kPortIndexOutput, mOutputFormat) != OK) { 5043 ALOGE("[%s] Failed to get port format to send format change", mComponentName.c_str()); 5044 return; 5045 } 5046 5047 if (expectedFormat != NULL) { 5048 sp<const AMessage> changes = expectedFormat->changesFrom(mOutputFormat); 5049 sp<const AMessage> to = mOutputFormat->changesFrom(expectedFormat); 5050 if (changes->countEntries() != 0 || to->countEntries() != 0) { 5051 ALOGW("[%s] BAD CODEC: Output format changed unexpectedly from (diff) %s to (diff) %s", 5052 mComponentName.c_str(), 5053 changes->debugString(4).c_str(), to->debugString(4).c_str()); 5054 } 5055 } 5056 5057 if (!mIsVideo && !mIsEncoder) { 5058 AudioEncoding pcmEncoding = kAudioEncodingPcm16bit; 5059 (void)mConfigFormat->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 5060 AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit; 5061 (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 5062 5063 mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding); 5064 if (mConverter[kPortIndexOutput] != NULL) { 5065 mOutputFormat->setInt32("pcm-encoding", pcmEncoding); 5066 } 5067 } 5068 5069 if (mTunneled) { 5070 sendFormatChange(); 5071 } 5072} 5073 5074void ACodec::addKeyFormatChangesToRenderBufferNotification(sp<AMessage> ¬ify) { 5075 AString mime; 5076 CHECK(mOutputFormat->findString("mime", &mime)); 5077 5078 if (mime == MEDIA_MIMETYPE_VIDEO_RAW && mNativeWindow != NULL) { 5079 // notify renderer of the crop change and dataspace change 5080 // NOTE: native window uses extended right-bottom coordinate 5081 int32_t left, top, right, bottom; 5082 if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) { 5083 notify->setRect("crop", left, top, right + 1, bottom + 1); 5084 } 5085 5086 int32_t dataSpace; 5087 if (mOutputFormat->findInt32("android._dataspace", &dataSpace)) { 5088 notify->setInt32("dataspace", dataSpace); 5089 } 5090 } 5091} 5092 5093void ACodec::sendFormatChange() { 5094 AString mime; 5095 CHECK(mOutputFormat->findString("mime", &mime)); 5096 5097 if (mime == MEDIA_MIMETYPE_AUDIO_RAW && (mEncoderDelay || mEncoderPadding)) { 5098 int32_t channelCount; 5099 CHECK(mOutputFormat->findInt32("channel-count", &channelCount)); 5100 if (mSkipCutBuffer != NULL) { 5101 size_t prevbufsize = mSkipCutBuffer->size(); 5102 if (prevbufsize != 0) { 5103 ALOGW("Replacing SkipCutBuffer holding %zu bytes", prevbufsize); 5104 } 5105 } 5106 mSkipCutBuffer = new SkipCutBuffer(mEncoderDelay, mEncoderPadding, channelCount); 5107 } 5108 5109 sp<AMessage> notify = mNotify->dup(); 5110 notify->setInt32("what", kWhatOutputFormatChanged); 5111 notify->setMessage("format", mOutputFormat); 5112 notify->post(); 5113 5114 // mLastOutputFormat is not used when tunneled; doing this just to stay consistent 5115 mLastOutputFormat = mOutputFormat; 5116} 5117 5118void ACodec::signalError(OMX_ERRORTYPE error, status_t internalError) { 5119 sp<AMessage> notify = mNotify->dup(); 5120 notify->setInt32("what", CodecBase::kWhatError); 5121 ALOGE("signalError(omxError %#x, internalError %d)", error, internalError); 5122 5123 if (internalError == UNKNOWN_ERROR) { // find better error code 5124 const status_t omxStatus = statusFromOMXError(error); 5125 if (omxStatus != 0) { 5126 internalError = omxStatus; 5127 } else { 5128 ALOGW("Invalid OMX error %#x", error); 5129 } 5130 } 5131 5132 mFatalError = true; 5133 5134 notify->setInt32("err", internalError); 5135 notify->setInt32("actionCode", ACTION_CODE_FATAL); // could translate from OMX error. 5136 notify->post(); 5137} 5138 5139//////////////////////////////////////////////////////////////////////////////// 5140 5141ACodec::PortDescription::PortDescription() { 5142} 5143 5144status_t ACodec::requestIDRFrame() { 5145 if (!mIsEncoder) { 5146 return ERROR_UNSUPPORTED; 5147 } 5148 5149 OMX_CONFIG_INTRAREFRESHVOPTYPE params; 5150 InitOMXParams(¶ms); 5151 5152 params.nPortIndex = kPortIndexOutput; 5153 params.IntraRefreshVOP = OMX_TRUE; 5154 5155 return mOMX->setConfig( 5156 mNode, 5157 OMX_IndexConfigVideoIntraVOPRefresh, 5158 ¶ms, 5159 sizeof(params)); 5160} 5161 5162void ACodec::PortDescription::addBuffer( 5163 IOMX::buffer_id id, const sp<ABuffer> &buffer, 5164 const sp<NativeHandle> &handle, const sp<RefBase> &memRef) { 5165 mBufferIDs.push_back(id); 5166 mBuffers.push_back(buffer); 5167 mHandles.push_back(handle); 5168 mMemRefs.push_back(memRef); 5169} 5170 5171size_t ACodec::PortDescription::countBuffers() { 5172 return mBufferIDs.size(); 5173} 5174 5175IOMX::buffer_id ACodec::PortDescription::bufferIDAt(size_t index) const { 5176 return mBufferIDs.itemAt(index); 5177} 5178 5179sp<ABuffer> ACodec::PortDescription::bufferAt(size_t index) const { 5180 return mBuffers.itemAt(index); 5181} 5182 5183sp<NativeHandle> ACodec::PortDescription::handleAt(size_t index) const { 5184 return mHandles.itemAt(index); 5185} 5186 5187sp<RefBase> ACodec::PortDescription::memRefAt(size_t index) const { 5188 return mMemRefs.itemAt(index); 5189} 5190 5191//////////////////////////////////////////////////////////////////////////////// 5192 5193ACodec::BaseState::BaseState(ACodec *codec, const sp<AState> &parentState) 5194 : AState(parentState), 5195 mCodec(codec) { 5196} 5197 5198ACodec::BaseState::PortMode ACodec::BaseState::getPortMode( 5199 OMX_U32 /* portIndex */) { 5200 return KEEP_BUFFERS; 5201} 5202 5203bool ACodec::BaseState::onMessageReceived(const sp<AMessage> &msg) { 5204 switch (msg->what()) { 5205 case kWhatInputBufferFilled: 5206 { 5207 onInputBufferFilled(msg); 5208 break; 5209 } 5210 5211 case kWhatOutputBufferDrained: 5212 { 5213 onOutputBufferDrained(msg); 5214 break; 5215 } 5216 5217 case ACodec::kWhatOMXMessageList: 5218 { 5219 return checkOMXMessage(msg) ? onOMXMessageList(msg) : true; 5220 } 5221 5222 case ACodec::kWhatOMXMessageItem: 5223 { 5224 // no need to check as we already did it for kWhatOMXMessageList 5225 return onOMXMessage(msg); 5226 } 5227 5228 case ACodec::kWhatOMXMessage: 5229 { 5230 return checkOMXMessage(msg) ? onOMXMessage(msg) : true; 5231 } 5232 5233 case ACodec::kWhatSetSurface: 5234 { 5235 sp<AReplyToken> replyID; 5236 CHECK(msg->senderAwaitsResponse(&replyID)); 5237 5238 sp<RefBase> obj; 5239 CHECK(msg->findObject("surface", &obj)); 5240 5241 status_t err = mCodec->handleSetSurface(static_cast<Surface *>(obj.get())); 5242 5243 sp<AMessage> response = new AMessage; 5244 response->setInt32("err", err); 5245 response->postReply(replyID); 5246 break; 5247 } 5248 5249 case ACodec::kWhatCreateInputSurface: 5250 case ACodec::kWhatSetInputSurface: 5251 case ACodec::kWhatSignalEndOfInputStream: 5252 { 5253 // This may result in an app illegal state exception. 5254 ALOGE("Message 0x%x was not handled", msg->what()); 5255 mCodec->signalError(OMX_ErrorUndefined, INVALID_OPERATION); 5256 return true; 5257 } 5258 5259 case ACodec::kWhatOMXDied: 5260 { 5261 // This will result in kFlagSawMediaServerDie handling in MediaCodec. 5262 ALOGE("OMX/mediaserver died, signalling error!"); 5263 mCodec->signalError(OMX_ErrorResourcesLost, DEAD_OBJECT); 5264 break; 5265 } 5266 5267 case ACodec::kWhatReleaseCodecInstance: 5268 { 5269 ALOGI("[%s] forcing the release of codec", 5270 mCodec->mComponentName.c_str()); 5271 status_t err = mCodec->mOMX->freeNode(mCodec->mNode); 5272 ALOGE_IF("[%s] failed to release codec instance: err=%d", 5273 mCodec->mComponentName.c_str(), err); 5274 sp<AMessage> notify = mCodec->mNotify->dup(); 5275 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 5276 notify->post(); 5277 break; 5278 } 5279 5280 default: 5281 return false; 5282 } 5283 5284 return true; 5285} 5286 5287bool ACodec::BaseState::checkOMXMessage(const sp<AMessage> &msg) { 5288 // there is a possibility that this is an outstanding message for a 5289 // codec that we have already destroyed 5290 if (mCodec->mNode == 0) { 5291 ALOGI("ignoring message as already freed component: %s", 5292 msg->debugString().c_str()); 5293 return false; 5294 } 5295 5296 IOMX::node_id nodeID; 5297 CHECK(msg->findInt32("node", (int32_t*)&nodeID)); 5298 if (nodeID != mCodec->mNode) { 5299 ALOGE("Unexpected message for nodeID: %u, should have been %u", nodeID, mCodec->mNode); 5300 return false; 5301 } 5302 return true; 5303} 5304 5305bool ACodec::BaseState::onOMXMessageList(const sp<AMessage> &msg) { 5306 sp<RefBase> obj; 5307 CHECK(msg->findObject("messages", &obj)); 5308 sp<MessageList> msgList = static_cast<MessageList *>(obj.get()); 5309 5310 bool receivedRenderedEvents = false; 5311 for (std::list<sp<AMessage>>::const_iterator it = msgList->getList().cbegin(); 5312 it != msgList->getList().cend(); ++it) { 5313 (*it)->setWhat(ACodec::kWhatOMXMessageItem); 5314 mCodec->handleMessage(*it); 5315 int32_t type; 5316 CHECK((*it)->findInt32("type", &type)); 5317 if (type == omx_message::FRAME_RENDERED) { 5318 receivedRenderedEvents = true; 5319 } 5320 } 5321 5322 if (receivedRenderedEvents) { 5323 // NOTE: all buffers are rendered in this case 5324 mCodec->notifyOfRenderedFrames(); 5325 } 5326 return true; 5327} 5328 5329bool ACodec::BaseState::onOMXMessage(const sp<AMessage> &msg) { 5330 int32_t type; 5331 CHECK(msg->findInt32("type", &type)); 5332 5333 switch (type) { 5334 case omx_message::EVENT: 5335 { 5336 int32_t event, data1, data2; 5337 CHECK(msg->findInt32("event", &event)); 5338 CHECK(msg->findInt32("data1", &data1)); 5339 CHECK(msg->findInt32("data2", &data2)); 5340 5341 if (event == OMX_EventCmdComplete 5342 && data1 == OMX_CommandFlush 5343 && data2 == (int32_t)OMX_ALL) { 5344 // Use of this notification is not consistent across 5345 // implementations. We'll drop this notification and rely 5346 // on flush-complete notifications on the individual port 5347 // indices instead. 5348 5349 return true; 5350 } 5351 5352 return onOMXEvent( 5353 static_cast<OMX_EVENTTYPE>(event), 5354 static_cast<OMX_U32>(data1), 5355 static_cast<OMX_U32>(data2)); 5356 } 5357 5358 case omx_message::EMPTY_BUFFER_DONE: 5359 { 5360 IOMX::buffer_id bufferID; 5361 int32_t fenceFd; 5362 5363 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 5364 CHECK(msg->findInt32("fence_fd", &fenceFd)); 5365 5366 return onOMXEmptyBufferDone(bufferID, fenceFd); 5367 } 5368 5369 case omx_message::FILL_BUFFER_DONE: 5370 { 5371 IOMX::buffer_id bufferID; 5372 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 5373 5374 int32_t rangeOffset, rangeLength, flags, fenceFd; 5375 int64_t timeUs; 5376 5377 CHECK(msg->findInt32("range_offset", &rangeOffset)); 5378 CHECK(msg->findInt32("range_length", &rangeLength)); 5379 CHECK(msg->findInt32("flags", &flags)); 5380 CHECK(msg->findInt64("timestamp", &timeUs)); 5381 CHECK(msg->findInt32("fence_fd", &fenceFd)); 5382 5383 return onOMXFillBufferDone( 5384 bufferID, 5385 (size_t)rangeOffset, (size_t)rangeLength, 5386 (OMX_U32)flags, 5387 timeUs, 5388 fenceFd); 5389 } 5390 5391 case omx_message::FRAME_RENDERED: 5392 { 5393 int64_t mediaTimeUs, systemNano; 5394 5395 CHECK(msg->findInt64("media_time_us", &mediaTimeUs)); 5396 CHECK(msg->findInt64("system_nano", &systemNano)); 5397 5398 return onOMXFrameRendered( 5399 mediaTimeUs, systemNano); 5400 } 5401 5402 default: 5403 ALOGE("Unexpected message type: %d", type); 5404 return false; 5405 } 5406} 5407 5408bool ACodec::BaseState::onOMXFrameRendered( 5409 int64_t mediaTimeUs __unused, nsecs_t systemNano __unused) { 5410 // ignore outside of Executing and PortSettingsChanged states 5411 return true; 5412} 5413 5414bool ACodec::BaseState::onOMXEvent( 5415 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 5416 if (event == OMX_EventDataSpaceChanged) { 5417 ColorAspects aspects; 5418 aspects.mRange = (ColorAspects::Range)((data2 >> 24) & 0xFF); 5419 aspects.mPrimaries = (ColorAspects::Primaries)((data2 >> 16) & 0xFF); 5420 aspects.mMatrixCoeffs = (ColorAspects::MatrixCoeffs)((data2 >> 8) & 0xFF); 5421 aspects.mTransfer = (ColorAspects::Transfer)(data2 & 0xFF); 5422 5423 mCodec->onDataSpaceChanged((android_dataspace)data1, aspects); 5424 return true; 5425 } 5426 5427 if (event != OMX_EventError) { 5428 ALOGV("[%s] EVENT(%d, 0x%08x, 0x%08x)", 5429 mCodec->mComponentName.c_str(), event, data1, data2); 5430 5431 return false; 5432 } 5433 5434 ALOGE("[%s] ERROR(0x%08x)", mCodec->mComponentName.c_str(), data1); 5435 5436 // verify OMX component sends back an error we expect. 5437 OMX_ERRORTYPE omxError = (OMX_ERRORTYPE)data1; 5438 if (!isOMXError(omxError)) { 5439 ALOGW("Invalid OMX error %#x", omxError); 5440 omxError = OMX_ErrorUndefined; 5441 } 5442 mCodec->signalError(omxError); 5443 5444 return true; 5445} 5446 5447bool ACodec::BaseState::onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd) { 5448 ALOGV("[%s] onOMXEmptyBufferDone %u", 5449 mCodec->mComponentName.c_str(), bufferID); 5450 5451 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 5452 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5453 if (status != BufferInfo::OWNED_BY_COMPONENT) { 5454 ALOGE("Wrong ownership in EBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 5455 mCodec->dumpBuffers(kPortIndexInput); 5456 if (fenceFd >= 0) { 5457 ::close(fenceFd); 5458 } 5459 return false; 5460 } 5461 info->mStatus = BufferInfo::OWNED_BY_US; 5462 5463 // input buffers cannot take fences, so wait for any fence now 5464 (void)mCodec->waitForFence(fenceFd, "onOMXEmptyBufferDone"); 5465 fenceFd = -1; 5466 5467 // still save fence for completeness 5468 info->setWriteFence(fenceFd, "onOMXEmptyBufferDone"); 5469 5470 // We're in "store-metadata-in-buffers" mode, the underlying 5471 // OMX component had access to data that's implicitly refcounted 5472 // by this "MediaBuffer" object. Now that the OMX component has 5473 // told us that it's done with the input buffer, we can decrement 5474 // the mediaBuffer's reference count. 5475 info->mData->setMediaBufferBase(NULL); 5476 5477 PortMode mode = getPortMode(kPortIndexInput); 5478 5479 switch (mode) { 5480 case KEEP_BUFFERS: 5481 break; 5482 5483 case RESUBMIT_BUFFERS: 5484 postFillThisBuffer(info); 5485 break; 5486 5487 case FREE_BUFFERS: 5488 default: 5489 ALOGE("SHOULD NOT REACH HERE: cannot free empty output buffers"); 5490 return false; 5491 } 5492 5493 return true; 5494} 5495 5496void ACodec::BaseState::postFillThisBuffer(BufferInfo *info) { 5497 if (mCodec->mPortEOS[kPortIndexInput]) { 5498 return; 5499 } 5500 5501 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 5502 5503 sp<AMessage> notify = mCodec->mNotify->dup(); 5504 notify->setInt32("what", CodecBase::kWhatFillThisBuffer); 5505 notify->setInt32("buffer-id", info->mBufferID); 5506 5507 info->mData->meta()->clear(); 5508 notify->setBuffer("buffer", info->mData); 5509 5510 sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, mCodec); 5511 reply->setInt32("buffer-id", info->mBufferID); 5512 5513 notify->setMessage("reply", reply); 5514 5515 notify->post(); 5516 5517 info->mStatus = BufferInfo::OWNED_BY_UPSTREAM; 5518} 5519 5520void ACodec::BaseState::onInputBufferFilled(const sp<AMessage> &msg) { 5521 IOMX::buffer_id bufferID; 5522 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 5523 sp<ABuffer> buffer; 5524 int32_t err = OK; 5525 bool eos = false; 5526 PortMode mode = getPortMode(kPortIndexInput); 5527 5528 if (!msg->findBuffer("buffer", &buffer)) { 5529 /* these are unfilled buffers returned by client */ 5530 CHECK(msg->findInt32("err", &err)); 5531 5532 if (err == OK) { 5533 /* buffers with no errors are returned on MediaCodec.flush */ 5534 mode = KEEP_BUFFERS; 5535 } else { 5536 ALOGV("[%s] saw error %d instead of an input buffer", 5537 mCodec->mComponentName.c_str(), err); 5538 eos = true; 5539 } 5540 5541 buffer.clear(); 5542 } 5543 5544 int32_t tmp; 5545 if (buffer != NULL && buffer->meta()->findInt32("eos", &tmp) && tmp) { 5546 eos = true; 5547 err = ERROR_END_OF_STREAM; 5548 } 5549 5550 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 5551 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5552 if (status != BufferInfo::OWNED_BY_UPSTREAM) { 5553 ALOGE("Wrong ownership in IBF: %s(%d) buffer #%u", _asString(status), status, bufferID); 5554 mCodec->dumpBuffers(kPortIndexInput); 5555 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 5556 return; 5557 } 5558 5559 info->mStatus = BufferInfo::OWNED_BY_US; 5560 5561 switch (mode) { 5562 case KEEP_BUFFERS: 5563 { 5564 if (eos) { 5565 if (!mCodec->mPortEOS[kPortIndexInput]) { 5566 mCodec->mPortEOS[kPortIndexInput] = true; 5567 mCodec->mInputEOSResult = err; 5568 } 5569 } 5570 break; 5571 } 5572 5573 case RESUBMIT_BUFFERS: 5574 { 5575 if (buffer != NULL && !mCodec->mPortEOS[kPortIndexInput]) { 5576 // Do not send empty input buffer w/o EOS to the component. 5577 if (buffer->size() == 0 && !eos) { 5578 postFillThisBuffer(info); 5579 break; 5580 } 5581 5582 int64_t timeUs; 5583 CHECK(buffer->meta()->findInt64("timeUs", &timeUs)); 5584 5585 OMX_U32 flags = OMX_BUFFERFLAG_ENDOFFRAME; 5586 5587 int32_t isCSD; 5588 if (buffer->meta()->findInt32("csd", &isCSD) && isCSD != 0) { 5589 flags |= OMX_BUFFERFLAG_CODECCONFIG; 5590 } 5591 5592 if (eos) { 5593 flags |= OMX_BUFFERFLAG_EOS; 5594 } 5595 5596 if (buffer != info->mCodecData) { 5597 ALOGV("[%s] Needs to copy input data for buffer %u. (%p != %p)", 5598 mCodec->mComponentName.c_str(), 5599 bufferID, 5600 buffer.get(), info->mCodecData.get()); 5601 5602 sp<DataConverter> converter = mCodec->mConverter[kPortIndexInput]; 5603 if (converter == NULL) { 5604 converter = getCopyConverter(); 5605 } 5606 status_t err = converter->convert(buffer, info->mCodecData); 5607 if (err != OK) { 5608 mCodec->signalError(OMX_ErrorUndefined, err); 5609 return; 5610 } 5611 } 5612 5613 if (flags & OMX_BUFFERFLAG_CODECCONFIG) { 5614 ALOGV("[%s] calling emptyBuffer %u w/ codec specific data", 5615 mCodec->mComponentName.c_str(), bufferID); 5616 } else if (flags & OMX_BUFFERFLAG_EOS) { 5617 ALOGV("[%s] calling emptyBuffer %u w/ EOS", 5618 mCodec->mComponentName.c_str(), bufferID); 5619 } else { 5620#if TRACK_BUFFER_TIMING 5621 ALOGI("[%s] calling emptyBuffer %u w/ time %lld us", 5622 mCodec->mComponentName.c_str(), bufferID, (long long)timeUs); 5623#else 5624 ALOGV("[%s] calling emptyBuffer %u w/ time %lld us", 5625 mCodec->mComponentName.c_str(), bufferID, (long long)timeUs); 5626#endif 5627 } 5628 5629#if TRACK_BUFFER_TIMING 5630 ACodec::BufferStats stats; 5631 stats.mEmptyBufferTimeUs = ALooper::GetNowUs(); 5632 stats.mFillBufferDoneTimeUs = -1ll; 5633 mCodec->mBufferStats.add(timeUs, stats); 5634#endif 5635 5636 if (mCodec->storingMetadataInDecodedBuffers()) { 5637 // try to submit an output buffer for each input buffer 5638 PortMode outputMode = getPortMode(kPortIndexOutput); 5639 5640 ALOGV("MetadataBuffersToSubmit=%u portMode=%s", 5641 mCodec->mMetadataBuffersToSubmit, 5642 (outputMode == FREE_BUFFERS ? "FREE" : 5643 outputMode == KEEP_BUFFERS ? "KEEP" : "RESUBMIT")); 5644 if (outputMode == RESUBMIT_BUFFERS) { 5645 mCodec->submitOutputMetadataBuffer(); 5646 } 5647 } 5648 info->checkReadFence("onInputBufferFilled"); 5649 status_t err2 = mCodec->mOMX->emptyBuffer( 5650 mCodec->mNode, 5651 bufferID, 5652 0, 5653 info->mCodecData->size(), 5654 flags, 5655 timeUs, 5656 info->mFenceFd); 5657 info->mFenceFd = -1; 5658 if (err2 != OK) { 5659 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2)); 5660 return; 5661 } 5662 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5663 5664 if (!eos && err == OK) { 5665 getMoreInputDataIfPossible(); 5666 } else { 5667 ALOGV("[%s] Signalled EOS (%d) on the input port", 5668 mCodec->mComponentName.c_str(), err); 5669 5670 mCodec->mPortEOS[kPortIndexInput] = true; 5671 mCodec->mInputEOSResult = err; 5672 } 5673 } else if (!mCodec->mPortEOS[kPortIndexInput]) { 5674 if (err != OK && err != ERROR_END_OF_STREAM) { 5675 ALOGV("[%s] Signalling EOS on the input port due to error %d", 5676 mCodec->mComponentName.c_str(), err); 5677 } else { 5678 ALOGV("[%s] Signalling EOS on the input port", 5679 mCodec->mComponentName.c_str()); 5680 } 5681 5682 ALOGV("[%s] calling emptyBuffer %u signalling EOS", 5683 mCodec->mComponentName.c_str(), bufferID); 5684 5685 info->checkReadFence("onInputBufferFilled"); 5686 status_t err2 = mCodec->mOMX->emptyBuffer( 5687 mCodec->mNode, 5688 bufferID, 5689 0, 5690 0, 5691 OMX_BUFFERFLAG_EOS, 5692 0, 5693 info->mFenceFd); 5694 info->mFenceFd = -1; 5695 if (err2 != OK) { 5696 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2)); 5697 return; 5698 } 5699 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5700 5701 mCodec->mPortEOS[kPortIndexInput] = true; 5702 mCodec->mInputEOSResult = err; 5703 } 5704 break; 5705 } 5706 5707 case FREE_BUFFERS: 5708 break; 5709 5710 default: 5711 ALOGE("invalid port mode: %d", mode); 5712 break; 5713 } 5714} 5715 5716void ACodec::BaseState::getMoreInputDataIfPossible() { 5717 if (mCodec->mPortEOS[kPortIndexInput]) { 5718 return; 5719 } 5720 5721 BufferInfo *eligible = NULL; 5722 5723 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 5724 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 5725 5726#if 0 5727 if (info->mStatus == BufferInfo::OWNED_BY_UPSTREAM) { 5728 // There's already a "read" pending. 5729 return; 5730 } 5731#endif 5732 5733 if (info->mStatus == BufferInfo::OWNED_BY_US) { 5734 eligible = info; 5735 } 5736 } 5737 5738 if (eligible == NULL) { 5739 return; 5740 } 5741 5742 postFillThisBuffer(eligible); 5743} 5744 5745bool ACodec::BaseState::onOMXFillBufferDone( 5746 IOMX::buffer_id bufferID, 5747 size_t rangeOffset, size_t rangeLength, 5748 OMX_U32 flags, 5749 int64_t timeUs, 5750 int fenceFd) { 5751 ALOGV("[%s] onOMXFillBufferDone %u time %" PRId64 " us, flags = 0x%08x", 5752 mCodec->mComponentName.c_str(), bufferID, timeUs, flags); 5753 5754 ssize_t index; 5755 status_t err= OK; 5756 5757#if TRACK_BUFFER_TIMING 5758 index = mCodec->mBufferStats.indexOfKey(timeUs); 5759 if (index >= 0) { 5760 ACodec::BufferStats *stats = &mCodec->mBufferStats.editValueAt(index); 5761 stats->mFillBufferDoneTimeUs = ALooper::GetNowUs(); 5762 5763 ALOGI("frame PTS %lld: %lld", 5764 timeUs, 5765 stats->mFillBufferDoneTimeUs - stats->mEmptyBufferTimeUs); 5766 5767 mCodec->mBufferStats.removeItemsAt(index); 5768 stats = NULL; 5769 } 5770#endif 5771 5772 BufferInfo *info = 5773 mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 5774 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5775 if (status != BufferInfo::OWNED_BY_COMPONENT) { 5776 ALOGE("Wrong ownership in FBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 5777 mCodec->dumpBuffers(kPortIndexOutput); 5778 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 5779 if (fenceFd >= 0) { 5780 ::close(fenceFd); 5781 } 5782 return true; 5783 } 5784 5785 info->mDequeuedAt = ++mCodec->mDequeueCounter; 5786 info->mStatus = BufferInfo::OWNED_BY_US; 5787 5788 if (info->mRenderInfo != NULL) { 5789 // The fence for an emptied buffer must have signaled, but there still could be queued 5790 // or out-of-order dequeued buffers in the render queue prior to this buffer. Drop these, 5791 // as we will soon requeue this buffer to the surface. While in theory we could still keep 5792 // track of buffers that are requeued to the surface, it is better to add support to the 5793 // buffer-queue to notify us of released buffers and their fences (in the future). 5794 mCodec->notifyOfRenderedFrames(true /* dropIncomplete */); 5795 } 5796 5797 // byte buffers cannot take fences, so wait for any fence now 5798 if (mCodec->mNativeWindow == NULL) { 5799 (void)mCodec->waitForFence(fenceFd, "onOMXFillBufferDone"); 5800 fenceFd = -1; 5801 } 5802 info->setReadFence(fenceFd, "onOMXFillBufferDone"); 5803 5804 PortMode mode = getPortMode(kPortIndexOutput); 5805 5806 switch (mode) { 5807 case KEEP_BUFFERS: 5808 break; 5809 5810 case RESUBMIT_BUFFERS: 5811 { 5812 if (rangeLength == 0 && (!(flags & OMX_BUFFERFLAG_EOS) 5813 || mCodec->mPortEOS[kPortIndexOutput])) { 5814 ALOGV("[%s] calling fillBuffer %u", 5815 mCodec->mComponentName.c_str(), info->mBufferID); 5816 5817 err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd); 5818 info->mFenceFd = -1; 5819 if (err != OK) { 5820 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5821 return true; 5822 } 5823 5824 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5825 break; 5826 } 5827 5828 sp<AMessage> reply = 5829 new AMessage(kWhatOutputBufferDrained, mCodec); 5830 5831 if (mCodec->mOutputFormat != mCodec->mLastOutputFormat && rangeLength > 0) { 5832 // pretend that output format has changed on the first frame (we used to do this) 5833 if (mCodec->mBaseOutputFormat == mCodec->mOutputFormat) { 5834 mCodec->onOutputFormatChanged(mCodec->mOutputFormat); 5835 } 5836 mCodec->addKeyFormatChangesToRenderBufferNotification(reply); 5837 mCodec->sendFormatChange(); 5838 } 5839 5840 if (mCodec->usingMetadataOnEncoderOutput()) { 5841 native_handle_t *handle = NULL; 5842 VideoGrallocMetadata &grallocMeta = *(VideoGrallocMetadata *)info->mData->data(); 5843 VideoNativeMetadata &nativeMeta = *(VideoNativeMetadata *)info->mData->data(); 5844 if (info->mData->size() >= sizeof(grallocMeta) 5845 && grallocMeta.eType == kMetadataBufferTypeGrallocSource) { 5846 handle = (native_handle_t *)(uintptr_t)grallocMeta.pHandle; 5847 } else if (info->mData->size() >= sizeof(nativeMeta) 5848 && nativeMeta.eType == kMetadataBufferTypeANWBuffer) { 5849#ifdef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 5850 // ANativeWindowBuffer is only valid on 32-bit/mediaserver process 5851 handle = NULL; 5852#else 5853 handle = (native_handle_t *)nativeMeta.pBuffer->handle; 5854#endif 5855 } 5856 info->mData->meta()->setPointer("handle", handle); 5857 info->mData->meta()->setInt32("rangeOffset", rangeOffset); 5858 info->mData->meta()->setInt32("rangeLength", rangeLength); 5859 } else if (info->mData == info->mCodecData) { 5860 info->mData->setRange(rangeOffset, rangeLength); 5861 } else { 5862 info->mCodecData->setRange(rangeOffset, rangeLength); 5863 // in this case we know that mConverter is not null 5864 status_t err = mCodec->mConverter[kPortIndexOutput]->convert( 5865 info->mCodecData, info->mData); 5866 if (err != OK) { 5867 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5868 return true; 5869 } 5870 } 5871#if 0 5872 if (mCodec->mNativeWindow == NULL) { 5873 if (IsIDR(info->mData)) { 5874 ALOGI("IDR frame"); 5875 } 5876 } 5877#endif 5878 5879 if (mCodec->mSkipCutBuffer != NULL) { 5880 mCodec->mSkipCutBuffer->submit(info->mData); 5881 } 5882 info->mData->meta()->setInt64("timeUs", timeUs); 5883 5884 sp<AMessage> notify = mCodec->mNotify->dup(); 5885 notify->setInt32("what", CodecBase::kWhatDrainThisBuffer); 5886 notify->setInt32("buffer-id", info->mBufferID); 5887 notify->setBuffer("buffer", info->mData); 5888 notify->setInt32("flags", flags); 5889 5890 reply->setInt32("buffer-id", info->mBufferID); 5891 5892 notify->setMessage("reply", reply); 5893 5894 notify->post(); 5895 5896 info->mStatus = BufferInfo::OWNED_BY_DOWNSTREAM; 5897 5898 if (flags & OMX_BUFFERFLAG_EOS) { 5899 ALOGV("[%s] saw output EOS", mCodec->mComponentName.c_str()); 5900 5901 sp<AMessage> notify = mCodec->mNotify->dup(); 5902 notify->setInt32("what", CodecBase::kWhatEOS); 5903 notify->setInt32("err", mCodec->mInputEOSResult); 5904 notify->post(); 5905 5906 mCodec->mPortEOS[kPortIndexOutput] = true; 5907 } 5908 break; 5909 } 5910 5911 case FREE_BUFFERS: 5912 err = mCodec->freeBuffer(kPortIndexOutput, index); 5913 if (err != OK) { 5914 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5915 return true; 5916 } 5917 break; 5918 5919 default: 5920 ALOGE("Invalid port mode: %d", mode); 5921 return false; 5922 } 5923 5924 return true; 5925} 5926 5927void ACodec::BaseState::onOutputBufferDrained(const sp<AMessage> &msg) { 5928 IOMX::buffer_id bufferID; 5929 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 5930 ssize_t index; 5931 BufferInfo *info = mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 5932 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5933 if (status != BufferInfo::OWNED_BY_DOWNSTREAM) { 5934 ALOGE("Wrong ownership in OBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 5935 mCodec->dumpBuffers(kPortIndexOutput); 5936 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 5937 return; 5938 } 5939 5940 android_native_rect_t crop; 5941 if (msg->findRect("crop", &crop.left, &crop.top, &crop.right, &crop.bottom)) { 5942 status_t err = native_window_set_crop(mCodec->mNativeWindow.get(), &crop); 5943 ALOGW_IF(err != NO_ERROR, "failed to set crop: %d", err); 5944 } 5945 5946 int32_t dataSpace; 5947 if (msg->findInt32("dataspace", &dataSpace)) { 5948 status_t err = native_window_set_buffers_data_space( 5949 mCodec->mNativeWindow.get(), (android_dataspace)dataSpace); 5950 ALOGW_IF(err != NO_ERROR, "failed to set dataspace: %d", err); 5951 } 5952 5953 int32_t render; 5954 if (mCodec->mNativeWindow != NULL 5955 && msg->findInt32("render", &render) && render != 0 5956 && info->mData != NULL && info->mData->size() != 0) { 5957 ATRACE_NAME("render"); 5958 // The client wants this buffer to be rendered. 5959 5960 // save buffers sent to the surface so we can get render time when they return 5961 int64_t mediaTimeUs = -1; 5962 info->mData->meta()->findInt64("timeUs", &mediaTimeUs); 5963 if (mediaTimeUs >= 0) { 5964 mCodec->mRenderTracker.onFrameQueued( 5965 mediaTimeUs, info->mGraphicBuffer, new Fence(::dup(info->mFenceFd))); 5966 } 5967 5968 int64_t timestampNs = 0; 5969 if (!msg->findInt64("timestampNs", ×tampNs)) { 5970 // use media timestamp if client did not request a specific render timestamp 5971 if (info->mData->meta()->findInt64("timeUs", ×tampNs)) { 5972 ALOGV("using buffer PTS of %lld", (long long)timestampNs); 5973 timestampNs *= 1000; 5974 } 5975 } 5976 5977 status_t err; 5978 err = native_window_set_buffers_timestamp(mCodec->mNativeWindow.get(), timestampNs); 5979 ALOGW_IF(err != NO_ERROR, "failed to set buffer timestamp: %d", err); 5980 5981 info->checkReadFence("onOutputBufferDrained before queueBuffer"); 5982 err = mCodec->mNativeWindow->queueBuffer( 5983 mCodec->mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd); 5984 info->mFenceFd = -1; 5985 if (err == OK) { 5986 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 5987 } else { 5988 ALOGE("queueBuffer failed in onOutputBufferDrained: %d", err); 5989 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5990 info->mStatus = BufferInfo::OWNED_BY_US; 5991 // keeping read fence as write fence to avoid clobbering 5992 info->mIsReadFence = false; 5993 } 5994 } else { 5995 if (mCodec->mNativeWindow != NULL && 5996 (info->mData == NULL || info->mData->size() != 0)) { 5997 // move read fence into write fence to avoid clobbering 5998 info->mIsReadFence = false; 5999 ATRACE_NAME("frame-drop"); 6000 } 6001 info->mStatus = BufferInfo::OWNED_BY_US; 6002 } 6003 6004 PortMode mode = getPortMode(kPortIndexOutput); 6005 6006 switch (mode) { 6007 case KEEP_BUFFERS: 6008 { 6009 // XXX fishy, revisit!!! What about the FREE_BUFFERS case below? 6010 6011 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6012 // We cannot resubmit the buffer we just rendered, dequeue 6013 // the spare instead. 6014 6015 info = mCodec->dequeueBufferFromNativeWindow(); 6016 } 6017 break; 6018 } 6019 6020 case RESUBMIT_BUFFERS: 6021 { 6022 if (!mCodec->mPortEOS[kPortIndexOutput]) { 6023 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6024 // We cannot resubmit the buffer we just rendered, dequeue 6025 // the spare instead. 6026 6027 info = mCodec->dequeueBufferFromNativeWindow(); 6028 } 6029 6030 if (info != NULL) { 6031 ALOGV("[%s] calling fillBuffer %u", 6032 mCodec->mComponentName.c_str(), info->mBufferID); 6033 info->checkWriteFence("onOutputBufferDrained::RESUBMIT_BUFFERS"); 6034 status_t err = mCodec->mOMX->fillBuffer( 6035 mCodec->mNode, info->mBufferID, info->mFenceFd); 6036 info->mFenceFd = -1; 6037 if (err == OK) { 6038 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 6039 } else { 6040 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6041 } 6042 } 6043 } 6044 break; 6045 } 6046 6047 case FREE_BUFFERS: 6048 { 6049 status_t err = mCodec->freeBuffer(kPortIndexOutput, index); 6050 if (err != OK) { 6051 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6052 } 6053 break; 6054 } 6055 6056 default: 6057 ALOGE("Invalid port mode: %d", mode); 6058 return; 6059 } 6060} 6061 6062//////////////////////////////////////////////////////////////////////////////// 6063 6064ACodec::UninitializedState::UninitializedState(ACodec *codec) 6065 : BaseState(codec) { 6066} 6067 6068void ACodec::UninitializedState::stateEntered() { 6069 ALOGV("Now uninitialized"); 6070 6071 if (mDeathNotifier != NULL) { 6072 mCodec->mNodeBinder->unlinkToDeath(mDeathNotifier); 6073 mDeathNotifier.clear(); 6074 } 6075 6076 mCodec->mUsingNativeWindow = false; 6077 mCodec->mNativeWindow.clear(); 6078 mCodec->mNativeWindowUsageBits = 0; 6079 mCodec->mNode = 0; 6080 mCodec->mOMX.clear(); 6081 mCodec->mQuirks = 0; 6082 mCodec->mFlags = 0; 6083 mCodec->mInputMetadataType = kMetadataBufferTypeInvalid; 6084 mCodec->mOutputMetadataType = kMetadataBufferTypeInvalid; 6085 mCodec->mConverter[0].clear(); 6086 mCodec->mConverter[1].clear(); 6087 mCodec->mComponentName.clear(); 6088} 6089 6090bool ACodec::UninitializedState::onMessageReceived(const sp<AMessage> &msg) { 6091 bool handled = false; 6092 6093 switch (msg->what()) { 6094 case ACodec::kWhatSetup: 6095 { 6096 onSetup(msg); 6097 6098 handled = true; 6099 break; 6100 } 6101 6102 case ACodec::kWhatAllocateComponent: 6103 { 6104 onAllocateComponent(msg); 6105 handled = true; 6106 break; 6107 } 6108 6109 case ACodec::kWhatShutdown: 6110 { 6111 int32_t keepComponentAllocated; 6112 CHECK(msg->findInt32( 6113 "keepComponentAllocated", &keepComponentAllocated)); 6114 ALOGW_IF(keepComponentAllocated, 6115 "cannot keep component allocated on shutdown in Uninitialized state"); 6116 6117 sp<AMessage> notify = mCodec->mNotify->dup(); 6118 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 6119 notify->post(); 6120 6121 handled = true; 6122 break; 6123 } 6124 6125 case ACodec::kWhatFlush: 6126 { 6127 sp<AMessage> notify = mCodec->mNotify->dup(); 6128 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6129 notify->post(); 6130 6131 handled = true; 6132 break; 6133 } 6134 6135 case ACodec::kWhatReleaseCodecInstance: 6136 { 6137 // nothing to do, as we have already signaled shutdown 6138 handled = true; 6139 break; 6140 } 6141 6142 default: 6143 return BaseState::onMessageReceived(msg); 6144 } 6145 6146 return handled; 6147} 6148 6149void ACodec::UninitializedState::onSetup( 6150 const sp<AMessage> &msg) { 6151 if (onAllocateComponent(msg) 6152 && mCodec->mLoadedState->onConfigureComponent(msg)) { 6153 mCodec->mLoadedState->onStart(); 6154 } 6155} 6156 6157bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) { 6158 ALOGV("onAllocateComponent"); 6159 6160 CHECK(mCodec->mNode == 0); 6161 6162 OMXClient client; 6163 if (client.connect() != OK) { 6164 mCodec->signalError(OMX_ErrorUndefined, NO_INIT); 6165 return false; 6166 } 6167 6168 sp<IOMX> omx = client.interface(); 6169 6170 sp<AMessage> notify = new AMessage(kWhatOMXDied, mCodec); 6171 6172 Vector<AString> matchingCodecs; 6173 6174 AString mime; 6175 6176 AString componentName; 6177 uint32_t quirks = 0; 6178 int32_t encoder = false; 6179 if (msg->findString("componentName", &componentName)) { 6180 sp<IMediaCodecList> list = MediaCodecList::getInstance(); 6181 if (list != NULL && list->findCodecByName(componentName.c_str()) >= 0) { 6182 matchingCodecs.add(componentName); 6183 } 6184 } else { 6185 CHECK(msg->findString("mime", &mime)); 6186 6187 if (!msg->findInt32("encoder", &encoder)) { 6188 encoder = false; 6189 } 6190 6191 MediaCodecList::findMatchingCodecs( 6192 mime.c_str(), 6193 encoder, // createEncoder 6194 0, // flags 6195 &matchingCodecs); 6196 } 6197 6198 sp<CodecObserver> observer = new CodecObserver; 6199 IOMX::node_id node = 0; 6200 6201 status_t err = NAME_NOT_FOUND; 6202 for (size_t matchIndex = 0; matchIndex < matchingCodecs.size(); 6203 ++matchIndex) { 6204 componentName = matchingCodecs[matchIndex]; 6205 quirks = MediaCodecList::getQuirksFor(componentName.c_str()); 6206 6207 pid_t tid = gettid(); 6208 int prevPriority = androidGetThreadPriority(tid); 6209 androidSetThreadPriority(tid, ANDROID_PRIORITY_FOREGROUND); 6210 err = omx->allocateNode(componentName.c_str(), observer, &mCodec->mNodeBinder, &node); 6211 androidSetThreadPriority(tid, prevPriority); 6212 6213 if (err == OK) { 6214 break; 6215 } else { 6216 ALOGW("Allocating component '%s' failed, try next one.", componentName.c_str()); 6217 } 6218 6219 node = 0; 6220 } 6221 6222 if (node == 0) { 6223 if (!mime.empty()) { 6224 ALOGE("Unable to instantiate a %scoder for type '%s' with err %#x.", 6225 encoder ? "en" : "de", mime.c_str(), err); 6226 } else { 6227 ALOGE("Unable to instantiate codec '%s' with err %#x.", componentName.c_str(), err); 6228 } 6229 6230 mCodec->signalError((OMX_ERRORTYPE)err, makeNoSideEffectStatus(err)); 6231 return false; 6232 } 6233 6234 mDeathNotifier = new DeathNotifier(notify); 6235 if (mCodec->mNodeBinder == NULL || 6236 mCodec->mNodeBinder->linkToDeath(mDeathNotifier) != OK) { 6237 // This was a local binder, if it dies so do we, we won't care 6238 // about any notifications in the afterlife. 6239 mDeathNotifier.clear(); 6240 } 6241 6242 notify = new AMessage(kWhatOMXMessageList, mCodec); 6243 observer->setNotificationMessage(notify); 6244 6245 mCodec->mComponentName = componentName; 6246 mCodec->mRenderTracker.setComponentName(componentName); 6247 mCodec->mFlags = 0; 6248 6249 if (componentName.endsWith(".secure")) { 6250 mCodec->mFlags |= kFlagIsSecure; 6251 mCodec->mFlags |= kFlagIsGrallocUsageProtected; 6252 mCodec->mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 6253 } 6254 6255 mCodec->mQuirks = quirks; 6256 mCodec->mOMX = omx; 6257 mCodec->mNode = node; 6258 6259 { 6260 sp<AMessage> notify = mCodec->mNotify->dup(); 6261 notify->setInt32("what", CodecBase::kWhatComponentAllocated); 6262 notify->setString("componentName", mCodec->mComponentName.c_str()); 6263 notify->post(); 6264 } 6265 6266 mCodec->changeState(mCodec->mLoadedState); 6267 6268 return true; 6269} 6270 6271//////////////////////////////////////////////////////////////////////////////// 6272 6273ACodec::LoadedState::LoadedState(ACodec *codec) 6274 : BaseState(codec) { 6275} 6276 6277void ACodec::LoadedState::stateEntered() { 6278 ALOGV("[%s] Now Loaded", mCodec->mComponentName.c_str()); 6279 6280 mCodec->mPortEOS[kPortIndexInput] = 6281 mCodec->mPortEOS[kPortIndexOutput] = false; 6282 6283 mCodec->mInputEOSResult = OK; 6284 6285 mCodec->mDequeueCounter = 0; 6286 mCodec->mMetadataBuffersToSubmit = 0; 6287 mCodec->mRepeatFrameDelayUs = -1ll; 6288 mCodec->mInputFormat.clear(); 6289 mCodec->mOutputFormat.clear(); 6290 mCodec->mBaseOutputFormat.clear(); 6291 6292 if (mCodec->mShutdownInProgress) { 6293 bool keepComponentAllocated = mCodec->mKeepComponentAllocated; 6294 6295 mCodec->mShutdownInProgress = false; 6296 mCodec->mKeepComponentAllocated = false; 6297 6298 onShutdown(keepComponentAllocated); 6299 } 6300 mCodec->mExplicitShutdown = false; 6301 6302 mCodec->processDeferredMessages(); 6303} 6304 6305void ACodec::LoadedState::onShutdown(bool keepComponentAllocated) { 6306 if (!keepComponentAllocated) { 6307 (void)mCodec->mOMX->freeNode(mCodec->mNode); 6308 6309 mCodec->changeState(mCodec->mUninitializedState); 6310 } 6311 6312 if (mCodec->mExplicitShutdown) { 6313 sp<AMessage> notify = mCodec->mNotify->dup(); 6314 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 6315 notify->post(); 6316 mCodec->mExplicitShutdown = false; 6317 } 6318} 6319 6320bool ACodec::LoadedState::onMessageReceived(const sp<AMessage> &msg) { 6321 bool handled = false; 6322 6323 switch (msg->what()) { 6324 case ACodec::kWhatConfigureComponent: 6325 { 6326 onConfigureComponent(msg); 6327 handled = true; 6328 break; 6329 } 6330 6331 case ACodec::kWhatCreateInputSurface: 6332 { 6333 onCreateInputSurface(msg); 6334 handled = true; 6335 break; 6336 } 6337 6338 case ACodec::kWhatSetInputSurface: 6339 { 6340 onSetInputSurface(msg); 6341 handled = true; 6342 break; 6343 } 6344 6345 case ACodec::kWhatStart: 6346 { 6347 onStart(); 6348 handled = true; 6349 break; 6350 } 6351 6352 case ACodec::kWhatShutdown: 6353 { 6354 int32_t keepComponentAllocated; 6355 CHECK(msg->findInt32( 6356 "keepComponentAllocated", &keepComponentAllocated)); 6357 6358 mCodec->mExplicitShutdown = true; 6359 onShutdown(keepComponentAllocated); 6360 6361 handled = true; 6362 break; 6363 } 6364 6365 case ACodec::kWhatFlush: 6366 { 6367 sp<AMessage> notify = mCodec->mNotify->dup(); 6368 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6369 notify->post(); 6370 6371 handled = true; 6372 break; 6373 } 6374 6375 default: 6376 return BaseState::onMessageReceived(msg); 6377 } 6378 6379 return handled; 6380} 6381 6382bool ACodec::LoadedState::onConfigureComponent( 6383 const sp<AMessage> &msg) { 6384 ALOGV("onConfigureComponent"); 6385 6386 CHECK(mCodec->mNode != 0); 6387 6388 status_t err = OK; 6389 AString mime; 6390 if (!msg->findString("mime", &mime)) { 6391 err = BAD_VALUE; 6392 } else { 6393 err = mCodec->configureCodec(mime.c_str(), msg); 6394 } 6395 if (err != OK) { 6396 ALOGE("[%s] configureCodec returning error %d", 6397 mCodec->mComponentName.c_str(), err); 6398 6399 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6400 return false; 6401 } 6402 6403 { 6404 sp<AMessage> notify = mCodec->mNotify->dup(); 6405 notify->setInt32("what", CodecBase::kWhatComponentConfigured); 6406 notify->setMessage("input-format", mCodec->mInputFormat); 6407 notify->setMessage("output-format", mCodec->mOutputFormat); 6408 notify->post(); 6409 } 6410 6411 return true; 6412} 6413 6414status_t ACodec::LoadedState::setupInputSurface() { 6415 status_t err = OK; 6416 6417 if (mCodec->mRepeatFrameDelayUs > 0ll) { 6418 err = mCodec->mOMX->setInternalOption( 6419 mCodec->mNode, 6420 kPortIndexInput, 6421 IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY, 6422 &mCodec->mRepeatFrameDelayUs, 6423 sizeof(mCodec->mRepeatFrameDelayUs)); 6424 6425 if (err != OK) { 6426 ALOGE("[%s] Unable to configure option to repeat previous " 6427 "frames (err %d)", 6428 mCodec->mComponentName.c_str(), 6429 err); 6430 return err; 6431 } 6432 } 6433 6434 if (mCodec->mMaxPtsGapUs > 0ll) { 6435 err = mCodec->mOMX->setInternalOption( 6436 mCodec->mNode, 6437 kPortIndexInput, 6438 IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP, 6439 &mCodec->mMaxPtsGapUs, 6440 sizeof(mCodec->mMaxPtsGapUs)); 6441 6442 if (err != OK) { 6443 ALOGE("[%s] Unable to configure max timestamp gap (err %d)", 6444 mCodec->mComponentName.c_str(), 6445 err); 6446 return err; 6447 } 6448 } 6449 6450 if (mCodec->mMaxFps > 0) { 6451 err = mCodec->mOMX->setInternalOption( 6452 mCodec->mNode, 6453 kPortIndexInput, 6454 IOMX::INTERNAL_OPTION_MAX_FPS, 6455 &mCodec->mMaxFps, 6456 sizeof(mCodec->mMaxFps)); 6457 6458 if (err != OK) { 6459 ALOGE("[%s] Unable to configure max fps (err %d)", 6460 mCodec->mComponentName.c_str(), 6461 err); 6462 return err; 6463 } 6464 } 6465 6466 if (mCodec->mTimePerCaptureUs > 0ll 6467 && mCodec->mTimePerFrameUs > 0ll) { 6468 int64_t timeLapse[2]; 6469 timeLapse[0] = mCodec->mTimePerFrameUs; 6470 timeLapse[1] = mCodec->mTimePerCaptureUs; 6471 err = mCodec->mOMX->setInternalOption( 6472 mCodec->mNode, 6473 kPortIndexInput, 6474 IOMX::INTERNAL_OPTION_TIME_LAPSE, 6475 &timeLapse[0], 6476 sizeof(timeLapse)); 6477 6478 if (err != OK) { 6479 ALOGE("[%s] Unable to configure time lapse (err %d)", 6480 mCodec->mComponentName.c_str(), 6481 err); 6482 return err; 6483 } 6484 } 6485 6486 if (mCodec->mCreateInputBuffersSuspended) { 6487 bool suspend = true; 6488 err = mCodec->mOMX->setInternalOption( 6489 mCodec->mNode, 6490 kPortIndexInput, 6491 IOMX::INTERNAL_OPTION_SUSPEND, 6492 &suspend, 6493 sizeof(suspend)); 6494 6495 if (err != OK) { 6496 ALOGE("[%s] Unable to configure option to suspend (err %d)", 6497 mCodec->mComponentName.c_str(), 6498 err); 6499 return err; 6500 } 6501 } 6502 6503 uint32_t usageBits; 6504 if (mCodec->mOMX->getParameter( 6505 mCodec->mNode, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, 6506 &usageBits, sizeof(usageBits)) == OK) { 6507 mCodec->mInputFormat->setInt32( 6508 "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); 6509 } 6510 6511 sp<ABuffer> colorAspectsBuffer; 6512 if (mCodec->mInputFormat->findBuffer("android._color-aspects", &colorAspectsBuffer)) { 6513 err = mCodec->mOMX->setInternalOption( 6514 mCodec->mNode, kPortIndexInput, IOMX::INTERNAL_OPTION_COLOR_ASPECTS, 6515 colorAspectsBuffer->base(), colorAspectsBuffer->capacity()); 6516 if (err != OK) { 6517 ALOGE("[%s] Unable to configure color aspects (err %d)", 6518 mCodec->mComponentName.c_str(), err); 6519 return err; 6520 } 6521 } 6522 return OK; 6523} 6524 6525void ACodec::LoadedState::onCreateInputSurface( 6526 const sp<AMessage> & /* msg */) { 6527 ALOGV("onCreateInputSurface"); 6528 6529 sp<AMessage> notify = mCodec->mNotify->dup(); 6530 notify->setInt32("what", CodecBase::kWhatInputSurfaceCreated); 6531 6532 android_dataspace dataSpace; 6533 status_t err = 6534 mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace); 6535 notify->setMessage("input-format", mCodec->mInputFormat); 6536 notify->setMessage("output-format", mCodec->mOutputFormat); 6537 6538 sp<IGraphicBufferProducer> bufferProducer; 6539 if (err == OK) { 6540 err = mCodec->mOMX->createInputSurface( 6541 mCodec->mNode, kPortIndexInput, dataSpace, &bufferProducer, &mCodec->mInputMetadataType); 6542 } 6543 6544 if (err == OK) { 6545 err = setupInputSurface(); 6546 } 6547 6548 if (err == OK) { 6549 notify->setObject("input-surface", 6550 new BufferProducerWrapper(bufferProducer)); 6551 } else { 6552 // Can't use mCodec->signalError() here -- MediaCodec won't forward 6553 // the error through because it's in the "configured" state. We 6554 // send a kWhatInputSurfaceCreated with an error value instead. 6555 ALOGE("[%s] onCreateInputSurface returning error %d", 6556 mCodec->mComponentName.c_str(), err); 6557 notify->setInt32("err", err); 6558 } 6559 notify->post(); 6560} 6561 6562void ACodec::LoadedState::onSetInputSurface( 6563 const sp<AMessage> &msg) { 6564 ALOGV("onSetInputSurface"); 6565 6566 sp<AMessage> notify = mCodec->mNotify->dup(); 6567 notify->setInt32("what", CodecBase::kWhatInputSurfaceAccepted); 6568 6569 sp<RefBase> obj; 6570 CHECK(msg->findObject("input-surface", &obj)); 6571 sp<PersistentSurface> surface = static_cast<PersistentSurface *>(obj.get()); 6572 6573 android_dataspace dataSpace; 6574 status_t err = 6575 mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace); 6576 notify->setMessage("input-format", mCodec->mInputFormat); 6577 notify->setMessage("output-format", mCodec->mOutputFormat); 6578 6579 if (err == OK) { 6580 err = mCodec->mOMX->setInputSurface( 6581 mCodec->mNode, kPortIndexInput, surface->getBufferConsumer(), 6582 &mCodec->mInputMetadataType); 6583 } 6584 6585 if (err == OK) { 6586 surface->getBufferConsumer()->setDefaultBufferDataSpace(dataSpace); 6587 err = setupInputSurface(); 6588 } 6589 6590 if (err != OK) { 6591 // Can't use mCodec->signalError() here -- MediaCodec won't forward 6592 // the error through because it's in the "configured" state. We 6593 // send a kWhatInputSurfaceAccepted with an error value instead. 6594 ALOGE("[%s] onSetInputSurface returning error %d", 6595 mCodec->mComponentName.c_str(), err); 6596 notify->setInt32("err", err); 6597 } 6598 notify->post(); 6599} 6600 6601void ACodec::LoadedState::onStart() { 6602 ALOGV("onStart"); 6603 6604 status_t err = mCodec->mOMX->sendCommand(mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle); 6605 if (err != OK) { 6606 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6607 } else { 6608 mCodec->changeState(mCodec->mLoadedToIdleState); 6609 } 6610} 6611 6612//////////////////////////////////////////////////////////////////////////////// 6613 6614ACodec::LoadedToIdleState::LoadedToIdleState(ACodec *codec) 6615 : BaseState(codec) { 6616} 6617 6618void ACodec::LoadedToIdleState::stateEntered() { 6619 ALOGV("[%s] Now Loaded->Idle", mCodec->mComponentName.c_str()); 6620 6621 status_t err; 6622 if ((err = allocateBuffers()) != OK) { 6623 ALOGE("Failed to allocate buffers after transitioning to IDLE state " 6624 "(error 0x%08x)", 6625 err); 6626 6627 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6628 6629 mCodec->mOMX->sendCommand( 6630 mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded); 6631 if (mCodec->allYourBuffersAreBelongToUs(kPortIndexInput)) { 6632 mCodec->freeBuffersOnPort(kPortIndexInput); 6633 } 6634 if (mCodec->allYourBuffersAreBelongToUs(kPortIndexOutput)) { 6635 mCodec->freeBuffersOnPort(kPortIndexOutput); 6636 } 6637 6638 mCodec->changeState(mCodec->mLoadedState); 6639 } 6640} 6641 6642status_t ACodec::LoadedToIdleState::allocateBuffers() { 6643 status_t err = mCodec->allocateBuffersOnPort(kPortIndexInput); 6644 6645 if (err != OK) { 6646 return err; 6647 } 6648 6649 return mCodec->allocateBuffersOnPort(kPortIndexOutput); 6650} 6651 6652bool ACodec::LoadedToIdleState::onMessageReceived(const sp<AMessage> &msg) { 6653 switch (msg->what()) { 6654 case kWhatSetParameters: 6655 case kWhatShutdown: 6656 { 6657 mCodec->deferMessage(msg); 6658 return true; 6659 } 6660 6661 case kWhatSignalEndOfInputStream: 6662 { 6663 mCodec->onSignalEndOfInputStream(); 6664 return true; 6665 } 6666 6667 case kWhatResume: 6668 { 6669 // We'll be active soon enough. 6670 return true; 6671 } 6672 6673 case kWhatFlush: 6674 { 6675 // We haven't even started yet, so we're flushed alright... 6676 sp<AMessage> notify = mCodec->mNotify->dup(); 6677 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6678 notify->post(); 6679 return true; 6680 } 6681 6682 default: 6683 return BaseState::onMessageReceived(msg); 6684 } 6685} 6686 6687bool ACodec::LoadedToIdleState::onOMXEvent( 6688 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 6689 switch (event) { 6690 case OMX_EventCmdComplete: 6691 { 6692 status_t err = OK; 6693 if (data1 != (OMX_U32)OMX_CommandStateSet 6694 || data2 != (OMX_U32)OMX_StateIdle) { 6695 ALOGE("Unexpected command completion in LoadedToIdleState: %s(%u) %s(%u)", 6696 asString((OMX_COMMANDTYPE)data1), data1, 6697 asString((OMX_STATETYPE)data2), data2); 6698 err = FAILED_TRANSACTION; 6699 } 6700 6701 if (err == OK) { 6702 err = mCodec->mOMX->sendCommand( 6703 mCodec->mNode, OMX_CommandStateSet, OMX_StateExecuting); 6704 } 6705 6706 if (err != OK) { 6707 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6708 } else { 6709 mCodec->changeState(mCodec->mIdleToExecutingState); 6710 } 6711 6712 return true; 6713 } 6714 6715 default: 6716 return BaseState::onOMXEvent(event, data1, data2); 6717 } 6718} 6719 6720//////////////////////////////////////////////////////////////////////////////// 6721 6722ACodec::IdleToExecutingState::IdleToExecutingState(ACodec *codec) 6723 : BaseState(codec) { 6724} 6725 6726void ACodec::IdleToExecutingState::stateEntered() { 6727 ALOGV("[%s] Now Idle->Executing", mCodec->mComponentName.c_str()); 6728} 6729 6730bool ACodec::IdleToExecutingState::onMessageReceived(const sp<AMessage> &msg) { 6731 switch (msg->what()) { 6732 case kWhatSetParameters: 6733 case kWhatShutdown: 6734 { 6735 mCodec->deferMessage(msg); 6736 return true; 6737 } 6738 6739 case kWhatResume: 6740 { 6741 // We'll be active soon enough. 6742 return true; 6743 } 6744 6745 case kWhatFlush: 6746 { 6747 // We haven't even started yet, so we're flushed alright... 6748 sp<AMessage> notify = mCodec->mNotify->dup(); 6749 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6750 notify->post(); 6751 6752 return true; 6753 } 6754 6755 case kWhatSignalEndOfInputStream: 6756 { 6757 mCodec->onSignalEndOfInputStream(); 6758 return true; 6759 } 6760 6761 default: 6762 return BaseState::onMessageReceived(msg); 6763 } 6764} 6765 6766bool ACodec::IdleToExecutingState::onOMXEvent( 6767 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 6768 switch (event) { 6769 case OMX_EventCmdComplete: 6770 { 6771 if (data1 != (OMX_U32)OMX_CommandStateSet 6772 || data2 != (OMX_U32)OMX_StateExecuting) { 6773 ALOGE("Unexpected command completion in IdleToExecutingState: %s(%u) %s(%u)", 6774 asString((OMX_COMMANDTYPE)data1), data1, 6775 asString((OMX_STATETYPE)data2), data2); 6776 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6777 return true; 6778 } 6779 6780 mCodec->mExecutingState->resume(); 6781 mCodec->changeState(mCodec->mExecutingState); 6782 6783 return true; 6784 } 6785 6786 default: 6787 return BaseState::onOMXEvent(event, data1, data2); 6788 } 6789} 6790 6791//////////////////////////////////////////////////////////////////////////////// 6792 6793ACodec::ExecutingState::ExecutingState(ACodec *codec) 6794 : BaseState(codec), 6795 mActive(false) { 6796} 6797 6798ACodec::BaseState::PortMode ACodec::ExecutingState::getPortMode( 6799 OMX_U32 /* portIndex */) { 6800 return RESUBMIT_BUFFERS; 6801} 6802 6803void ACodec::ExecutingState::submitOutputMetaBuffers() { 6804 // submit as many buffers as there are input buffers with the codec 6805 // in case we are in port reconfiguring 6806 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 6807 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 6808 6809 if (info->mStatus == BufferInfo::OWNED_BY_COMPONENT) { 6810 if (mCodec->submitOutputMetadataBuffer() != OK) 6811 break; 6812 } 6813 } 6814 6815 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 6816 mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround(); 6817} 6818 6819void ACodec::ExecutingState::submitRegularOutputBuffers() { 6820 bool failed = false; 6821 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexOutput].size(); ++i) { 6822 BufferInfo *info = &mCodec->mBuffers[kPortIndexOutput].editItemAt(i); 6823 6824 if (mCodec->mNativeWindow != NULL) { 6825 if (info->mStatus != BufferInfo::OWNED_BY_US 6826 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6827 ALOGE("buffers should be owned by us or the surface"); 6828 failed = true; 6829 break; 6830 } 6831 6832 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6833 continue; 6834 } 6835 } else { 6836 if (info->mStatus != BufferInfo::OWNED_BY_US) { 6837 ALOGE("buffers should be owned by us"); 6838 failed = true; 6839 break; 6840 } 6841 } 6842 6843 ALOGV("[%s] calling fillBuffer %u", mCodec->mComponentName.c_str(), info->mBufferID); 6844 6845 info->checkWriteFence("submitRegularOutputBuffers"); 6846 status_t err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd); 6847 info->mFenceFd = -1; 6848 if (err != OK) { 6849 failed = true; 6850 break; 6851 } 6852 6853 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 6854 } 6855 6856 if (failed) { 6857 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6858 } 6859} 6860 6861void ACodec::ExecutingState::submitOutputBuffers() { 6862 submitRegularOutputBuffers(); 6863 if (mCodec->storingMetadataInDecodedBuffers()) { 6864 submitOutputMetaBuffers(); 6865 } 6866} 6867 6868void ACodec::ExecutingState::resume() { 6869 if (mActive) { 6870 ALOGV("[%s] We're already active, no need to resume.", mCodec->mComponentName.c_str()); 6871 return; 6872 } 6873 6874 submitOutputBuffers(); 6875 6876 // Post all available input buffers 6877 if (mCodec->mBuffers[kPortIndexInput].size() == 0u) { 6878 ALOGW("[%s] we don't have any input buffers to resume", mCodec->mComponentName.c_str()); 6879 } 6880 6881 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); i++) { 6882 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 6883 if (info->mStatus == BufferInfo::OWNED_BY_US) { 6884 postFillThisBuffer(info); 6885 } 6886 } 6887 6888 mActive = true; 6889} 6890 6891void ACodec::ExecutingState::stateEntered() { 6892 ALOGV("[%s] Now Executing", mCodec->mComponentName.c_str()); 6893 6894 mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC)); 6895 mCodec->processDeferredMessages(); 6896} 6897 6898bool ACodec::ExecutingState::onMessageReceived(const sp<AMessage> &msg) { 6899 bool handled = false; 6900 6901 switch (msg->what()) { 6902 case kWhatShutdown: 6903 { 6904 int32_t keepComponentAllocated; 6905 CHECK(msg->findInt32( 6906 "keepComponentAllocated", &keepComponentAllocated)); 6907 6908 mCodec->mShutdownInProgress = true; 6909 mCodec->mExplicitShutdown = true; 6910 mCodec->mKeepComponentAllocated = keepComponentAllocated; 6911 6912 mActive = false; 6913 6914 status_t err = mCodec->mOMX->sendCommand( 6915 mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle); 6916 if (err != OK) { 6917 if (keepComponentAllocated) { 6918 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6919 } 6920 // TODO: do some recovery here. 6921 } else { 6922 mCodec->changeState(mCodec->mExecutingToIdleState); 6923 } 6924 6925 handled = true; 6926 break; 6927 } 6928 6929 case kWhatFlush: 6930 { 6931 ALOGV("[%s] ExecutingState flushing now " 6932 "(codec owns %zu/%zu input, %zu/%zu output).", 6933 mCodec->mComponentName.c_str(), 6934 mCodec->countBuffersOwnedByComponent(kPortIndexInput), 6935 mCodec->mBuffers[kPortIndexInput].size(), 6936 mCodec->countBuffersOwnedByComponent(kPortIndexOutput), 6937 mCodec->mBuffers[kPortIndexOutput].size()); 6938 6939 mActive = false; 6940 6941 status_t err = mCodec->mOMX->sendCommand(mCodec->mNode, OMX_CommandFlush, OMX_ALL); 6942 if (err != OK) { 6943 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6944 } else { 6945 mCodec->changeState(mCodec->mFlushingState); 6946 } 6947 6948 handled = true; 6949 break; 6950 } 6951 6952 case kWhatResume: 6953 { 6954 resume(); 6955 6956 handled = true; 6957 break; 6958 } 6959 6960 case kWhatRequestIDRFrame: 6961 { 6962 status_t err = mCodec->requestIDRFrame(); 6963 if (err != OK) { 6964 ALOGW("Requesting an IDR frame failed."); 6965 } 6966 6967 handled = true; 6968 break; 6969 } 6970 6971 case kWhatSetParameters: 6972 { 6973 sp<AMessage> params; 6974 CHECK(msg->findMessage("params", ¶ms)); 6975 6976 status_t err = mCodec->setParameters(params); 6977 6978 sp<AMessage> reply; 6979 if (msg->findMessage("reply", &reply)) { 6980 reply->setInt32("err", err); 6981 reply->post(); 6982 } 6983 6984 handled = true; 6985 break; 6986 } 6987 6988 case ACodec::kWhatSignalEndOfInputStream: 6989 { 6990 mCodec->onSignalEndOfInputStream(); 6991 handled = true; 6992 break; 6993 } 6994 6995 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 6996 case kWhatSubmitOutputMetadataBufferIfEOS: 6997 { 6998 if (mCodec->mPortEOS[kPortIndexInput] && 6999 !mCodec->mPortEOS[kPortIndexOutput]) { 7000 status_t err = mCodec->submitOutputMetadataBuffer(); 7001 if (err == OK) { 7002 mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround(); 7003 } 7004 } 7005 return true; 7006 } 7007 7008 default: 7009 handled = BaseState::onMessageReceived(msg); 7010 break; 7011 } 7012 7013 return handled; 7014} 7015 7016status_t ACodec::setParameters(const sp<AMessage> ¶ms) { 7017 int32_t videoBitrate; 7018 if (params->findInt32("video-bitrate", &videoBitrate)) { 7019 OMX_VIDEO_CONFIG_BITRATETYPE configParams; 7020 InitOMXParams(&configParams); 7021 configParams.nPortIndex = kPortIndexOutput; 7022 configParams.nEncodeBitrate = videoBitrate; 7023 7024 status_t err = mOMX->setConfig( 7025 mNode, 7026 OMX_IndexConfigVideoBitrate, 7027 &configParams, 7028 sizeof(configParams)); 7029 7030 if (err != OK) { 7031 ALOGE("setConfig(OMX_IndexConfigVideoBitrate, %d) failed w/ err %d", 7032 videoBitrate, err); 7033 7034 return err; 7035 } 7036 } 7037 7038 int64_t skipFramesBeforeUs; 7039 if (params->findInt64("skip-frames-before", &skipFramesBeforeUs)) { 7040 status_t err = 7041 mOMX->setInternalOption( 7042 mNode, 7043 kPortIndexInput, 7044 IOMX::INTERNAL_OPTION_START_TIME, 7045 &skipFramesBeforeUs, 7046 sizeof(skipFramesBeforeUs)); 7047 7048 if (err != OK) { 7049 ALOGE("Failed to set parameter 'skip-frames-before' (err %d)", err); 7050 return err; 7051 } 7052 } 7053 7054 int32_t dropInputFrames; 7055 if (params->findInt32("drop-input-frames", &dropInputFrames)) { 7056 bool suspend = dropInputFrames != 0; 7057 7058 status_t err = 7059 mOMX->setInternalOption( 7060 mNode, 7061 kPortIndexInput, 7062 IOMX::INTERNAL_OPTION_SUSPEND, 7063 &suspend, 7064 sizeof(suspend)); 7065 7066 if (err != OK) { 7067 ALOGE("Failed to set parameter 'drop-input-frames' (err %d)", err); 7068 return err; 7069 } 7070 } 7071 7072 int32_t dummy; 7073 if (params->findInt32("request-sync", &dummy)) { 7074 status_t err = requestIDRFrame(); 7075 7076 if (err != OK) { 7077 ALOGE("Requesting a sync frame failed w/ err %d", err); 7078 return err; 7079 } 7080 } 7081 7082 float rate; 7083 if (params->findFloat("operating-rate", &rate) && rate > 0) { 7084 status_t err = setOperatingRate(rate, mIsVideo); 7085 if (err != OK) { 7086 ALOGE("Failed to set parameter 'operating-rate' (err %d)", err); 7087 return err; 7088 } 7089 } 7090 7091 int32_t intraRefreshPeriod = 0; 7092 if (params->findInt32("intra-refresh-period", &intraRefreshPeriod) 7093 && intraRefreshPeriod > 0) { 7094 status_t err = setIntraRefreshPeriod(intraRefreshPeriod, false); 7095 if (err != OK) { 7096 ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional", 7097 mComponentName.c_str()); 7098 err = OK; 7099 } 7100 } 7101 7102 return OK; 7103} 7104 7105void ACodec::onSignalEndOfInputStream() { 7106 sp<AMessage> notify = mNotify->dup(); 7107 notify->setInt32("what", CodecBase::kWhatSignaledInputEOS); 7108 7109 status_t err = mOMX->signalEndOfInputStream(mNode); 7110 if (err != OK) { 7111 notify->setInt32("err", err); 7112 } 7113 notify->post(); 7114} 7115 7116bool ACodec::ExecutingState::onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { 7117 mCodec->onFrameRendered(mediaTimeUs, systemNano); 7118 return true; 7119} 7120 7121bool ACodec::ExecutingState::onOMXEvent( 7122 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7123 switch (event) { 7124 case OMX_EventPortSettingsChanged: 7125 { 7126 CHECK_EQ(data1, (OMX_U32)kPortIndexOutput); 7127 7128 mCodec->onOutputFormatChanged(); 7129 7130 if (data2 == 0 || data2 == OMX_IndexParamPortDefinition) { 7131 mCodec->mMetadataBuffersToSubmit = 0; 7132 CHECK_EQ(mCodec->mOMX->sendCommand( 7133 mCodec->mNode, 7134 OMX_CommandPortDisable, kPortIndexOutput), 7135 (status_t)OK); 7136 7137 mCodec->freeOutputBuffersNotOwnedByComponent(); 7138 7139 mCodec->changeState(mCodec->mOutputPortSettingsChangedState); 7140 } else if (data2 != OMX_IndexConfigCommonOutputCrop 7141 && data2 != OMX_IndexConfigAndroidIntraRefresh) { 7142 ALOGV("[%s] OMX_EventPortSettingsChanged 0x%08x", 7143 mCodec->mComponentName.c_str(), data2); 7144 } 7145 7146 return true; 7147 } 7148 7149 case OMX_EventBufferFlag: 7150 { 7151 return true; 7152 } 7153 7154 default: 7155 return BaseState::onOMXEvent(event, data1, data2); 7156 } 7157} 7158 7159//////////////////////////////////////////////////////////////////////////////// 7160 7161ACodec::OutputPortSettingsChangedState::OutputPortSettingsChangedState( 7162 ACodec *codec) 7163 : BaseState(codec) { 7164} 7165 7166ACodec::BaseState::PortMode ACodec::OutputPortSettingsChangedState::getPortMode( 7167 OMX_U32 portIndex) { 7168 if (portIndex == kPortIndexOutput) { 7169 return FREE_BUFFERS; 7170 } 7171 7172 CHECK_EQ(portIndex, (OMX_U32)kPortIndexInput); 7173 7174 return RESUBMIT_BUFFERS; 7175} 7176 7177bool ACodec::OutputPortSettingsChangedState::onMessageReceived( 7178 const sp<AMessage> &msg) { 7179 bool handled = false; 7180 7181 switch (msg->what()) { 7182 case kWhatFlush: 7183 case kWhatShutdown: 7184 case kWhatResume: 7185 case kWhatSetParameters: 7186 { 7187 if (msg->what() == kWhatResume) { 7188 ALOGV("[%s] Deferring resume", mCodec->mComponentName.c_str()); 7189 } 7190 7191 mCodec->deferMessage(msg); 7192 handled = true; 7193 break; 7194 } 7195 7196 default: 7197 handled = BaseState::onMessageReceived(msg); 7198 break; 7199 } 7200 7201 return handled; 7202} 7203 7204void ACodec::OutputPortSettingsChangedState::stateEntered() { 7205 ALOGV("[%s] Now handling output port settings change", 7206 mCodec->mComponentName.c_str()); 7207} 7208 7209bool ACodec::OutputPortSettingsChangedState::onOMXFrameRendered( 7210 int64_t mediaTimeUs, nsecs_t systemNano) { 7211 mCodec->onFrameRendered(mediaTimeUs, systemNano); 7212 return true; 7213} 7214 7215bool ACodec::OutputPortSettingsChangedState::onOMXEvent( 7216 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7217 switch (event) { 7218 case OMX_EventCmdComplete: 7219 { 7220 if (data1 == (OMX_U32)OMX_CommandPortDisable) { 7221 if (data2 != (OMX_U32)kPortIndexOutput) { 7222 ALOGW("ignoring EventCmdComplete CommandPortDisable for port %u", data2); 7223 return false; 7224 } 7225 7226 ALOGV("[%s] Output port now disabled.", mCodec->mComponentName.c_str()); 7227 7228 status_t err = OK; 7229 if (!mCodec->mBuffers[kPortIndexOutput].isEmpty()) { 7230 ALOGE("disabled port should be empty, but has %zu buffers", 7231 mCodec->mBuffers[kPortIndexOutput].size()); 7232 err = FAILED_TRANSACTION; 7233 } else { 7234 mCodec->mDealer[kPortIndexOutput].clear(); 7235 } 7236 7237 if (err == OK) { 7238 err = mCodec->mOMX->sendCommand( 7239 mCodec->mNode, OMX_CommandPortEnable, kPortIndexOutput); 7240 } 7241 7242 if (err == OK) { 7243 err = mCodec->allocateBuffersOnPort(kPortIndexOutput); 7244 ALOGE_IF(err != OK, "Failed to allocate output port buffers after port " 7245 "reconfiguration: (%d)", err); 7246 } 7247 7248 if (err != OK) { 7249 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 7250 7251 // This is technically not correct, but appears to be 7252 // the only way to free the component instance. 7253 // Controlled transitioning from excecuting->idle 7254 // and idle->loaded seem impossible probably because 7255 // the output port never finishes re-enabling. 7256 mCodec->mShutdownInProgress = true; 7257 mCodec->mKeepComponentAllocated = false; 7258 mCodec->changeState(mCodec->mLoadedState); 7259 } 7260 7261 return true; 7262 } else if (data1 == (OMX_U32)OMX_CommandPortEnable) { 7263 if (data2 != (OMX_U32)kPortIndexOutput) { 7264 ALOGW("ignoring EventCmdComplete OMX_CommandPortEnable for port %u", data2); 7265 return false; 7266 } 7267 7268 ALOGV("[%s] Output port now reenabled.", mCodec->mComponentName.c_str()); 7269 7270 if (mCodec->mExecutingState->active()) { 7271 mCodec->mExecutingState->submitOutputBuffers(); 7272 } 7273 7274 mCodec->changeState(mCodec->mExecutingState); 7275 7276 return true; 7277 } 7278 7279 return false; 7280 } 7281 7282 default: 7283 return false; 7284 } 7285} 7286 7287//////////////////////////////////////////////////////////////////////////////// 7288 7289ACodec::ExecutingToIdleState::ExecutingToIdleState(ACodec *codec) 7290 : BaseState(codec), 7291 mComponentNowIdle(false) { 7292} 7293 7294bool ACodec::ExecutingToIdleState::onMessageReceived(const sp<AMessage> &msg) { 7295 bool handled = false; 7296 7297 switch (msg->what()) { 7298 case kWhatFlush: 7299 { 7300 // Don't send me a flush request if you previously wanted me 7301 // to shutdown. 7302 ALOGW("Ignoring flush request in ExecutingToIdleState"); 7303 break; 7304 } 7305 7306 case kWhatShutdown: 7307 { 7308 // We're already doing that... 7309 7310 handled = true; 7311 break; 7312 } 7313 7314 default: 7315 handled = BaseState::onMessageReceived(msg); 7316 break; 7317 } 7318 7319 return handled; 7320} 7321 7322void ACodec::ExecutingToIdleState::stateEntered() { 7323 ALOGV("[%s] Now Executing->Idle", mCodec->mComponentName.c_str()); 7324 7325 mComponentNowIdle = false; 7326 mCodec->mLastOutputFormat.clear(); 7327} 7328 7329bool ACodec::ExecutingToIdleState::onOMXEvent( 7330 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7331 switch (event) { 7332 case OMX_EventCmdComplete: 7333 { 7334 if (data1 != (OMX_U32)OMX_CommandStateSet 7335 || data2 != (OMX_U32)OMX_StateIdle) { 7336 ALOGE("Unexpected command completion in ExecutingToIdleState: %s(%u) %s(%u)", 7337 asString((OMX_COMMANDTYPE)data1), data1, 7338 asString((OMX_STATETYPE)data2), data2); 7339 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7340 return true; 7341 } 7342 7343 mComponentNowIdle = true; 7344 7345 changeStateIfWeOwnAllBuffers(); 7346 7347 return true; 7348 } 7349 7350 case OMX_EventPortSettingsChanged: 7351 case OMX_EventBufferFlag: 7352 { 7353 // We're shutting down and don't care about this anymore. 7354 return true; 7355 } 7356 7357 default: 7358 return BaseState::onOMXEvent(event, data1, data2); 7359 } 7360} 7361 7362void ACodec::ExecutingToIdleState::changeStateIfWeOwnAllBuffers() { 7363 if (mComponentNowIdle && mCodec->allYourBuffersAreBelongToUs()) { 7364 status_t err = mCodec->mOMX->sendCommand( 7365 mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded); 7366 if (err == OK) { 7367 err = mCodec->freeBuffersOnPort(kPortIndexInput); 7368 status_t err2 = mCodec->freeBuffersOnPort(kPortIndexOutput); 7369 if (err == OK) { 7370 err = err2; 7371 } 7372 } 7373 7374 if ((mCodec->mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) 7375 && mCodec->mNativeWindow != NULL) { 7376 // We push enough 1x1 blank buffers to ensure that one of 7377 // them has made it to the display. This allows the OMX 7378 // component teardown to zero out any protected buffers 7379 // without the risk of scanning out one of those buffers. 7380 pushBlankBuffersToNativeWindow(mCodec->mNativeWindow.get()); 7381 } 7382 7383 if (err != OK) { 7384 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7385 return; 7386 } 7387 7388 mCodec->changeState(mCodec->mIdleToLoadedState); 7389 } 7390} 7391 7392void ACodec::ExecutingToIdleState::onInputBufferFilled( 7393 const sp<AMessage> &msg) { 7394 BaseState::onInputBufferFilled(msg); 7395 7396 changeStateIfWeOwnAllBuffers(); 7397} 7398 7399void ACodec::ExecutingToIdleState::onOutputBufferDrained( 7400 const sp<AMessage> &msg) { 7401 BaseState::onOutputBufferDrained(msg); 7402 7403 changeStateIfWeOwnAllBuffers(); 7404} 7405 7406//////////////////////////////////////////////////////////////////////////////// 7407 7408ACodec::IdleToLoadedState::IdleToLoadedState(ACodec *codec) 7409 : BaseState(codec) { 7410} 7411 7412bool ACodec::IdleToLoadedState::onMessageReceived(const sp<AMessage> &msg) { 7413 bool handled = false; 7414 7415 switch (msg->what()) { 7416 case kWhatShutdown: 7417 { 7418 // We're already doing that... 7419 7420 handled = true; 7421 break; 7422 } 7423 7424 case kWhatFlush: 7425 { 7426 // Don't send me a flush request if you previously wanted me 7427 // to shutdown. 7428 ALOGE("Got flush request in IdleToLoadedState"); 7429 break; 7430 } 7431 7432 default: 7433 handled = BaseState::onMessageReceived(msg); 7434 break; 7435 } 7436 7437 return handled; 7438} 7439 7440void ACodec::IdleToLoadedState::stateEntered() { 7441 ALOGV("[%s] Now Idle->Loaded", mCodec->mComponentName.c_str()); 7442} 7443 7444bool ACodec::IdleToLoadedState::onOMXEvent( 7445 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7446 switch (event) { 7447 case OMX_EventCmdComplete: 7448 { 7449 if (data1 != (OMX_U32)OMX_CommandStateSet 7450 || data2 != (OMX_U32)OMX_StateLoaded) { 7451 ALOGE("Unexpected command completion in IdleToLoadedState: %s(%u) %s(%u)", 7452 asString((OMX_COMMANDTYPE)data1), data1, 7453 asString((OMX_STATETYPE)data2), data2); 7454 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7455 return true; 7456 } 7457 7458 mCodec->changeState(mCodec->mLoadedState); 7459 7460 return true; 7461 } 7462 7463 default: 7464 return BaseState::onOMXEvent(event, data1, data2); 7465 } 7466} 7467 7468//////////////////////////////////////////////////////////////////////////////// 7469 7470ACodec::FlushingState::FlushingState(ACodec *codec) 7471 : BaseState(codec) { 7472} 7473 7474void ACodec::FlushingState::stateEntered() { 7475 ALOGV("[%s] Now Flushing", mCodec->mComponentName.c_str()); 7476 7477 mFlushComplete[kPortIndexInput] = mFlushComplete[kPortIndexOutput] = false; 7478} 7479 7480bool ACodec::FlushingState::onMessageReceived(const sp<AMessage> &msg) { 7481 bool handled = false; 7482 7483 switch (msg->what()) { 7484 case kWhatShutdown: 7485 { 7486 mCodec->deferMessage(msg); 7487 break; 7488 } 7489 7490 case kWhatFlush: 7491 { 7492 // We're already doing this right now. 7493 handled = true; 7494 break; 7495 } 7496 7497 default: 7498 handled = BaseState::onMessageReceived(msg); 7499 break; 7500 } 7501 7502 return handled; 7503} 7504 7505bool ACodec::FlushingState::onOMXEvent( 7506 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7507 ALOGV("[%s] FlushingState onOMXEvent(%u,%d)", 7508 mCodec->mComponentName.c_str(), event, (OMX_S32)data1); 7509 7510 switch (event) { 7511 case OMX_EventCmdComplete: 7512 { 7513 if (data1 != (OMX_U32)OMX_CommandFlush) { 7514 ALOGE("unexpected EventCmdComplete %s(%d) data2:%d in FlushingState", 7515 asString((OMX_COMMANDTYPE)data1), data1, data2); 7516 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7517 return true; 7518 } 7519 7520 if (data2 == kPortIndexInput || data2 == kPortIndexOutput) { 7521 if (mFlushComplete[data2]) { 7522 ALOGW("Flush already completed for %s port", 7523 data2 == kPortIndexInput ? "input" : "output"); 7524 return true; 7525 } 7526 mFlushComplete[data2] = true; 7527 7528 if (mFlushComplete[kPortIndexInput] && mFlushComplete[kPortIndexOutput]) { 7529 changeStateIfWeOwnAllBuffers(); 7530 } 7531 } else if (data2 == OMX_ALL) { 7532 if (!mFlushComplete[kPortIndexInput] || !mFlushComplete[kPortIndexOutput]) { 7533 ALOGW("received flush complete event for OMX_ALL before ports have been" 7534 "flushed (%d/%d)", 7535 mFlushComplete[kPortIndexInput], mFlushComplete[kPortIndexOutput]); 7536 return false; 7537 } 7538 7539 changeStateIfWeOwnAllBuffers(); 7540 } else { 7541 ALOGW("data2 not OMX_ALL but %u in EventCmdComplete CommandFlush", data2); 7542 } 7543 7544 return true; 7545 } 7546 7547 case OMX_EventPortSettingsChanged: 7548 { 7549 sp<AMessage> msg = new AMessage(kWhatOMXMessage, mCodec); 7550 msg->setInt32("type", omx_message::EVENT); 7551 msg->setInt32("node", mCodec->mNode); 7552 msg->setInt32("event", event); 7553 msg->setInt32("data1", data1); 7554 msg->setInt32("data2", data2); 7555 7556 ALOGV("[%s] Deferring OMX_EventPortSettingsChanged", 7557 mCodec->mComponentName.c_str()); 7558 7559 mCodec->deferMessage(msg); 7560 7561 return true; 7562 } 7563 7564 default: 7565 return BaseState::onOMXEvent(event, data1, data2); 7566 } 7567 7568 return true; 7569} 7570 7571void ACodec::FlushingState::onOutputBufferDrained(const sp<AMessage> &msg) { 7572 BaseState::onOutputBufferDrained(msg); 7573 7574 changeStateIfWeOwnAllBuffers(); 7575} 7576 7577void ACodec::FlushingState::onInputBufferFilled(const sp<AMessage> &msg) { 7578 BaseState::onInputBufferFilled(msg); 7579 7580 changeStateIfWeOwnAllBuffers(); 7581} 7582 7583void ACodec::FlushingState::changeStateIfWeOwnAllBuffers() { 7584 if (mFlushComplete[kPortIndexInput] 7585 && mFlushComplete[kPortIndexOutput] 7586 && mCodec->allYourBuffersAreBelongToUs()) { 7587 // We now own all buffers except possibly those still queued with 7588 // the native window for rendering. Let's get those back as well. 7589 mCodec->waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs(); 7590 7591 mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC)); 7592 7593 sp<AMessage> notify = mCodec->mNotify->dup(); 7594 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 7595 notify->post(); 7596 7597 mCodec->mPortEOS[kPortIndexInput] = 7598 mCodec->mPortEOS[kPortIndexOutput] = false; 7599 7600 mCodec->mInputEOSResult = OK; 7601 7602 if (mCodec->mSkipCutBuffer != NULL) { 7603 mCodec->mSkipCutBuffer->clear(); 7604 } 7605 7606 mCodec->changeState(mCodec->mExecutingState); 7607 } 7608} 7609 7610status_t ACodec::queryCapabilities( 7611 const AString &name, const AString &mime, bool isEncoder, 7612 sp<MediaCodecInfo::Capabilities> *caps) { 7613 (*caps).clear(); 7614 const char *role = getComponentRole(isEncoder, mime.c_str()); 7615 if (role == NULL) { 7616 return BAD_VALUE; 7617 } 7618 7619 OMXClient client; 7620 status_t err = client.connect(); 7621 if (err != OK) { 7622 return err; 7623 } 7624 7625 sp<IOMX> omx = client.interface(); 7626 sp<CodecObserver> observer = new CodecObserver; 7627 IOMX::node_id node = 0; 7628 7629 err = omx->allocateNode(name.c_str(), observer, NULL, &node); 7630 if (err != OK) { 7631 client.disconnect(); 7632 return err; 7633 } 7634 7635 err = setComponentRole(omx, node, role); 7636 if (err != OK) { 7637 omx->freeNode(node); 7638 client.disconnect(); 7639 return err; 7640 } 7641 7642 sp<MediaCodecInfo::CapabilitiesBuilder> builder = new MediaCodecInfo::CapabilitiesBuilder(); 7643 bool isVideo = mime.startsWithIgnoreCase("video/"); 7644 7645 if (isVideo) { 7646 OMX_VIDEO_PARAM_PROFILELEVELTYPE param; 7647 InitOMXParams(¶m); 7648 param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput; 7649 7650 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7651 param.nProfileIndex = index; 7652 status_t err = omx->getParameter( 7653 node, OMX_IndexParamVideoProfileLevelQuerySupported, 7654 ¶m, sizeof(param)); 7655 if (err != OK) { 7656 break; 7657 } 7658 builder->addProfileLevel(param.eProfile, param.eLevel); 7659 7660 if (index == kMaxIndicesToCheck) { 7661 ALOGW("[%s] stopping checking profiles after %u: %x/%x", 7662 name.c_str(), index, 7663 param.eProfile, param.eLevel); 7664 } 7665 } 7666 7667 // Color format query 7668 // return colors in the order reported by the OMX component 7669 // prefix "flexible" standard ones with the flexible equivalent 7670 OMX_VIDEO_PARAM_PORTFORMATTYPE portFormat; 7671 InitOMXParams(&portFormat); 7672 portFormat.nPortIndex = isEncoder ? kPortIndexInput : kPortIndexOutput; 7673 Vector<uint32_t> supportedColors; // shadow copy to check for duplicates 7674 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7675 portFormat.nIndex = index; 7676 status_t err = omx->getParameter( 7677 node, OMX_IndexParamVideoPortFormat, 7678 &portFormat, sizeof(portFormat)); 7679 if (err != OK) { 7680 break; 7681 } 7682 7683 OMX_U32 flexibleEquivalent; 7684 if (isFlexibleColorFormat( 7685 omx, node, portFormat.eColorFormat, false /* usingNativeWindow */, 7686 &flexibleEquivalent)) { 7687 bool marked = false; 7688 for (size_t i = 0; i < supportedColors.size(); ++i) { 7689 if (supportedColors[i] == flexibleEquivalent) { 7690 marked = true; 7691 break; 7692 } 7693 } 7694 if (!marked) { 7695 supportedColors.push(flexibleEquivalent); 7696 builder->addColorFormat(flexibleEquivalent); 7697 } 7698 } 7699 supportedColors.push(portFormat.eColorFormat); 7700 builder->addColorFormat(portFormat.eColorFormat); 7701 7702 if (index == kMaxIndicesToCheck) { 7703 ALOGW("[%s] stopping checking formats after %u: %s(%x)", 7704 name.c_str(), index, 7705 asString(portFormat.eColorFormat), portFormat.eColorFormat); 7706 } 7707 } 7708 } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_AUDIO_AAC)) { 7709 // More audio codecs if they have profiles. 7710 OMX_AUDIO_PARAM_ANDROID_PROFILETYPE param; 7711 InitOMXParams(¶m); 7712 param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput; 7713 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7714 param.nProfileIndex = index; 7715 status_t err = omx->getParameter( 7716 node, (OMX_INDEXTYPE)OMX_IndexParamAudioProfileQuerySupported, 7717 ¶m, sizeof(param)); 7718 if (err != OK) { 7719 break; 7720 } 7721 // For audio, level is ignored. 7722 builder->addProfileLevel(param.eProfile, 0 /* level */); 7723 7724 if (index == kMaxIndicesToCheck) { 7725 ALOGW("[%s] stopping checking profiles after %u: %x", 7726 name.c_str(), index, 7727 param.eProfile); 7728 } 7729 } 7730 7731 // NOTE: Without Android extensions, OMX does not provide a way to query 7732 // AAC profile support 7733 if (param.nProfileIndex == 0) { 7734 ALOGW("component %s doesn't support profile query.", name.c_str()); 7735 } 7736 } 7737 7738 if (isVideo && !isEncoder) { 7739 native_handle_t *sidebandHandle = NULL; 7740 if (omx->configureVideoTunnelMode( 7741 node, kPortIndexOutput, OMX_TRUE, 0, &sidebandHandle) == OK) { 7742 // tunneled playback includes adaptive playback 7743 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback 7744 | MediaCodecInfo::Capabilities::kFlagSupportsTunneledPlayback); 7745 } else if (omx->storeMetaDataInBuffers( 7746 node, kPortIndexOutput, OMX_TRUE) == OK || 7747 omx->prepareForAdaptivePlayback( 7748 node, kPortIndexOutput, OMX_TRUE, 7749 1280 /* width */, 720 /* height */) == OK) { 7750 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback); 7751 } 7752 } 7753 7754 if (isVideo && isEncoder) { 7755 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 7756 InitOMXParams(¶ms); 7757 params.nPortIndex = kPortIndexOutput; 7758 // TODO: should we verify if fallback is supported? 7759 if (omx->getConfig( 7760 node, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, 7761 ¶ms, sizeof(params)) == OK) { 7762 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsIntraRefresh); 7763 } 7764 } 7765 7766 *caps = builder; 7767 omx->freeNode(node); 7768 client.disconnect(); 7769 return OK; 7770} 7771 7772// These are supposed be equivalent to the logic in 7773// "audio_channel_out_mask_from_count". 7774//static 7775status_t ACodec::getOMXChannelMapping(size_t numChannels, OMX_AUDIO_CHANNELTYPE map[]) { 7776 switch (numChannels) { 7777 case 1: 7778 map[0] = OMX_AUDIO_ChannelCF; 7779 break; 7780 case 2: 7781 map[0] = OMX_AUDIO_ChannelLF; 7782 map[1] = OMX_AUDIO_ChannelRF; 7783 break; 7784 case 3: 7785 map[0] = OMX_AUDIO_ChannelLF; 7786 map[1] = OMX_AUDIO_ChannelRF; 7787 map[2] = OMX_AUDIO_ChannelCF; 7788 break; 7789 case 4: 7790 map[0] = OMX_AUDIO_ChannelLF; 7791 map[1] = OMX_AUDIO_ChannelRF; 7792 map[2] = OMX_AUDIO_ChannelLR; 7793 map[3] = OMX_AUDIO_ChannelRR; 7794 break; 7795 case 5: 7796 map[0] = OMX_AUDIO_ChannelLF; 7797 map[1] = OMX_AUDIO_ChannelRF; 7798 map[2] = OMX_AUDIO_ChannelCF; 7799 map[3] = OMX_AUDIO_ChannelLR; 7800 map[4] = OMX_AUDIO_ChannelRR; 7801 break; 7802 case 6: 7803 map[0] = OMX_AUDIO_ChannelLF; 7804 map[1] = OMX_AUDIO_ChannelRF; 7805 map[2] = OMX_AUDIO_ChannelCF; 7806 map[3] = OMX_AUDIO_ChannelLFE; 7807 map[4] = OMX_AUDIO_ChannelLR; 7808 map[5] = OMX_AUDIO_ChannelRR; 7809 break; 7810 case 7: 7811 map[0] = OMX_AUDIO_ChannelLF; 7812 map[1] = OMX_AUDIO_ChannelRF; 7813 map[2] = OMX_AUDIO_ChannelCF; 7814 map[3] = OMX_AUDIO_ChannelLFE; 7815 map[4] = OMX_AUDIO_ChannelLR; 7816 map[5] = OMX_AUDIO_ChannelRR; 7817 map[6] = OMX_AUDIO_ChannelCS; 7818 break; 7819 case 8: 7820 map[0] = OMX_AUDIO_ChannelLF; 7821 map[1] = OMX_AUDIO_ChannelRF; 7822 map[2] = OMX_AUDIO_ChannelCF; 7823 map[3] = OMX_AUDIO_ChannelLFE; 7824 map[4] = OMX_AUDIO_ChannelLR; 7825 map[5] = OMX_AUDIO_ChannelRR; 7826 map[6] = OMX_AUDIO_ChannelLS; 7827 map[7] = OMX_AUDIO_ChannelRS; 7828 break; 7829 default: 7830 return -EINVAL; 7831 } 7832 7833 return OK; 7834} 7835 7836} // namespace android 7837