ACodec.cpp revision 38667cc8c1c46b66665e4a17d60e44e708c184b4
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "ACodec" 19 20#ifdef __LP64__ 21#define OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 22#endif 23 24#include <inttypes.h> 25#include <utils/Trace.h> 26 27#include <gui/Surface.h> 28 29#include <media/stagefright/ACodec.h> 30 31#include <binder/MemoryDealer.h> 32 33#include <media/stagefright/foundation/hexdump.h> 34#include <media/stagefright/foundation/ABuffer.h> 35#include <media/stagefright/foundation/ADebug.h> 36#include <media/stagefright/foundation/AMessage.h> 37#include <media/stagefright/foundation/AUtils.h> 38 39#include <media/stagefright/BufferProducerWrapper.h> 40#include <media/stagefright/MediaCodec.h> 41#include <media/stagefright/MediaCodecList.h> 42#include <media/stagefright/MediaDefs.h> 43#include <media/stagefright/OMXClient.h> 44#include <media/stagefright/PersistentSurface.h> 45#include <media/stagefright/SurfaceUtils.h> 46#include <media/hardware/HardwareAPI.h> 47 48#include <OMX_AudioExt.h> 49#include <OMX_VideoExt.h> 50#include <OMX_Component.h> 51#include <OMX_IndexExt.h> 52#include <OMX_AsString.h> 53 54#include "include/avc_utils.h" 55#include "include/DataConverter.h" 56#include "omx/OMXUtils.h" 57 58namespace android { 59 60enum { 61 kMaxIndicesToCheck = 32, // used when enumerating supported formats and profiles 62}; 63 64// OMX errors are directly mapped into status_t range if 65// there is no corresponding MediaError status code. 66// Use the statusFromOMXError(int32_t omxError) function. 67// 68// Currently this is a direct map. 69// See frameworks/native/include/media/openmax/OMX_Core.h 70// 71// Vendor OMX errors from 0x90000000 - 0x9000FFFF 72// Extension OMX errors from 0x8F000000 - 0x90000000 73// Standard OMX errors from 0x80001000 - 0x80001024 (0x80001024 current) 74// 75 76// returns true if err is a recognized OMX error code. 77// as OMX error is OMX_S32, this is an int32_t type 78static inline bool isOMXError(int32_t err) { 79 return (ERROR_CODEC_MIN <= err && err <= ERROR_CODEC_MAX); 80} 81 82// converts an OMX error to a status_t 83static inline status_t statusFromOMXError(int32_t omxError) { 84 switch (omxError) { 85 case OMX_ErrorInvalidComponentName: 86 case OMX_ErrorComponentNotFound: 87 return NAME_NOT_FOUND; // can trigger illegal argument error for provided names. 88 default: 89 return isOMXError(omxError) ? omxError : 0; // no translation required 90 } 91} 92 93// checks and converts status_t to a non-side-effect status_t 94static inline status_t makeNoSideEffectStatus(status_t err) { 95 switch (err) { 96 // the following errors have side effects and may come 97 // from other code modules. Remap for safety reasons. 98 case INVALID_OPERATION: 99 case DEAD_OBJECT: 100 return UNKNOWN_ERROR; 101 default: 102 return err; 103 } 104} 105 106struct MessageList : public RefBase { 107 MessageList() { 108 } 109 virtual ~MessageList() { 110 } 111 std::list<sp<AMessage> > &getList() { return mList; } 112private: 113 std::list<sp<AMessage> > mList; 114 115 DISALLOW_EVIL_CONSTRUCTORS(MessageList); 116}; 117 118static sp<DataConverter> getCopyConverter() { 119 static pthread_once_t once = PTHREAD_ONCE_INIT; // const-inited 120 static sp<DataConverter> sCopyConverter; // zero-inited 121 pthread_once(&once, [](){ sCopyConverter = new DataConverter(); }); 122 return sCopyConverter; 123} 124 125struct CodecObserver : public BnOMXObserver { 126 CodecObserver() {} 127 128 void setNotificationMessage(const sp<AMessage> &msg) { 129 mNotify = msg; 130 } 131 132 // from IOMXObserver 133 virtual void onMessages(const std::list<omx_message> &messages) { 134 if (messages.empty()) { 135 return; 136 } 137 138 sp<AMessage> notify = mNotify->dup(); 139 bool first = true; 140 sp<MessageList> msgList = new MessageList(); 141 for (std::list<omx_message>::const_iterator it = messages.cbegin(); 142 it != messages.cend(); ++it) { 143 const omx_message &omx_msg = *it; 144 if (first) { 145 notify->setInt32("node", omx_msg.node); 146 first = false; 147 } 148 149 sp<AMessage> msg = new AMessage; 150 msg->setInt32("type", omx_msg.type); 151 switch (omx_msg.type) { 152 case omx_message::EVENT: 153 { 154 msg->setInt32("event", omx_msg.u.event_data.event); 155 msg->setInt32("data1", omx_msg.u.event_data.data1); 156 msg->setInt32("data2", omx_msg.u.event_data.data2); 157 break; 158 } 159 160 case omx_message::EMPTY_BUFFER_DONE: 161 { 162 msg->setInt32("buffer", omx_msg.u.buffer_data.buffer); 163 msg->setInt32("fence_fd", omx_msg.fenceFd); 164 break; 165 } 166 167 case omx_message::FILL_BUFFER_DONE: 168 { 169 msg->setInt32( 170 "buffer", omx_msg.u.extended_buffer_data.buffer); 171 msg->setInt32( 172 "range_offset", 173 omx_msg.u.extended_buffer_data.range_offset); 174 msg->setInt32( 175 "range_length", 176 omx_msg.u.extended_buffer_data.range_length); 177 msg->setInt32( 178 "flags", 179 omx_msg.u.extended_buffer_data.flags); 180 msg->setInt64( 181 "timestamp", 182 omx_msg.u.extended_buffer_data.timestamp); 183 msg->setInt32( 184 "fence_fd", omx_msg.fenceFd); 185 break; 186 } 187 188 case omx_message::FRAME_RENDERED: 189 { 190 msg->setInt64( 191 "media_time_us", omx_msg.u.render_data.timestamp); 192 msg->setInt64( 193 "system_nano", omx_msg.u.render_data.nanoTime); 194 break; 195 } 196 197 default: 198 ALOGE("Unrecognized message type: %d", omx_msg.type); 199 break; 200 } 201 msgList->getList().push_back(msg); 202 } 203 notify->setObject("messages", msgList); 204 notify->post(); 205 } 206 207protected: 208 virtual ~CodecObserver() {} 209 210private: 211 sp<AMessage> mNotify; 212 213 DISALLOW_EVIL_CONSTRUCTORS(CodecObserver); 214}; 215 216//////////////////////////////////////////////////////////////////////////////// 217 218struct ACodec::BaseState : public AState { 219 explicit BaseState(ACodec *codec, const sp<AState> &parentState = NULL); 220 221protected: 222 enum PortMode { 223 KEEP_BUFFERS, 224 RESUBMIT_BUFFERS, 225 FREE_BUFFERS, 226 }; 227 228 ACodec *mCodec; 229 230 virtual PortMode getPortMode(OMX_U32 portIndex); 231 232 virtual bool onMessageReceived(const sp<AMessage> &msg); 233 234 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 235 236 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 237 virtual void onInputBufferFilled(const sp<AMessage> &msg); 238 239 void postFillThisBuffer(BufferInfo *info); 240 241private: 242 // Handles an OMX message. Returns true iff message was handled. 243 bool onOMXMessage(const sp<AMessage> &msg); 244 245 // Handles a list of messages. Returns true iff messages were handled. 246 bool onOMXMessageList(const sp<AMessage> &msg); 247 248 // returns true iff this message is for this component and the component is alive 249 bool checkOMXMessage(const sp<AMessage> &msg); 250 251 bool onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd); 252 253 bool onOMXFillBufferDone( 254 IOMX::buffer_id bufferID, 255 size_t rangeOffset, size_t rangeLength, 256 OMX_U32 flags, 257 int64_t timeUs, 258 int fenceFd); 259 260 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 261 262 void getMoreInputDataIfPossible(); 263 264 DISALLOW_EVIL_CONSTRUCTORS(BaseState); 265}; 266 267//////////////////////////////////////////////////////////////////////////////// 268 269struct ACodec::DeathNotifier : public IBinder::DeathRecipient { 270 explicit DeathNotifier(const sp<AMessage> ¬ify) 271 : mNotify(notify) { 272 } 273 274 virtual void binderDied(const wp<IBinder> &) { 275 mNotify->post(); 276 } 277 278protected: 279 virtual ~DeathNotifier() {} 280 281private: 282 sp<AMessage> mNotify; 283 284 DISALLOW_EVIL_CONSTRUCTORS(DeathNotifier); 285}; 286 287struct ACodec::UninitializedState : public ACodec::BaseState { 288 explicit UninitializedState(ACodec *codec); 289 290protected: 291 virtual bool onMessageReceived(const sp<AMessage> &msg); 292 virtual void stateEntered(); 293 294private: 295 void onSetup(const sp<AMessage> &msg); 296 bool onAllocateComponent(const sp<AMessage> &msg); 297 298 sp<DeathNotifier> mDeathNotifier; 299 300 DISALLOW_EVIL_CONSTRUCTORS(UninitializedState); 301}; 302 303//////////////////////////////////////////////////////////////////////////////// 304 305struct ACodec::LoadedState : public ACodec::BaseState { 306 explicit LoadedState(ACodec *codec); 307 308protected: 309 virtual bool onMessageReceived(const sp<AMessage> &msg); 310 virtual void stateEntered(); 311 312private: 313 friend struct ACodec::UninitializedState; 314 315 bool onConfigureComponent(const sp<AMessage> &msg); 316 void onCreateInputSurface(const sp<AMessage> &msg); 317 void onSetInputSurface(const sp<AMessage> &msg); 318 void onStart(); 319 void onShutdown(bool keepComponentAllocated); 320 321 status_t setupInputSurface(); 322 323 DISALLOW_EVIL_CONSTRUCTORS(LoadedState); 324}; 325 326//////////////////////////////////////////////////////////////////////////////// 327 328struct ACodec::LoadedToIdleState : public ACodec::BaseState { 329 explicit LoadedToIdleState(ACodec *codec); 330 331protected: 332 virtual bool onMessageReceived(const sp<AMessage> &msg); 333 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 334 virtual void stateEntered(); 335 336private: 337 status_t allocateBuffers(); 338 339 DISALLOW_EVIL_CONSTRUCTORS(LoadedToIdleState); 340}; 341 342//////////////////////////////////////////////////////////////////////////////// 343 344struct ACodec::IdleToExecutingState : public ACodec::BaseState { 345 explicit IdleToExecutingState(ACodec *codec); 346 347protected: 348 virtual bool onMessageReceived(const sp<AMessage> &msg); 349 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 350 virtual void stateEntered(); 351 352private: 353 DISALLOW_EVIL_CONSTRUCTORS(IdleToExecutingState); 354}; 355 356//////////////////////////////////////////////////////////////////////////////// 357 358struct ACodec::ExecutingState : public ACodec::BaseState { 359 explicit ExecutingState(ACodec *codec); 360 361 void submitRegularOutputBuffers(); 362 void submitOutputMetaBuffers(); 363 void submitOutputBuffers(); 364 365 // Submit output buffers to the decoder, submit input buffers to client 366 // to fill with data. 367 void resume(); 368 369 // Returns true iff input and output buffers are in play. 370 bool active() const { return mActive; } 371 372protected: 373 virtual PortMode getPortMode(OMX_U32 portIndex); 374 virtual bool onMessageReceived(const sp<AMessage> &msg); 375 virtual void stateEntered(); 376 377 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 378 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 379 380private: 381 bool mActive; 382 383 DISALLOW_EVIL_CONSTRUCTORS(ExecutingState); 384}; 385 386//////////////////////////////////////////////////////////////////////////////// 387 388struct ACodec::OutputPortSettingsChangedState : public ACodec::BaseState { 389 explicit OutputPortSettingsChangedState(ACodec *codec); 390 391protected: 392 virtual PortMode getPortMode(OMX_U32 portIndex); 393 virtual bool onMessageReceived(const sp<AMessage> &msg); 394 virtual void stateEntered(); 395 396 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 397 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 398 399private: 400 DISALLOW_EVIL_CONSTRUCTORS(OutputPortSettingsChangedState); 401}; 402 403//////////////////////////////////////////////////////////////////////////////// 404 405struct ACodec::ExecutingToIdleState : public ACodec::BaseState { 406 explicit ExecutingToIdleState(ACodec *codec); 407 408protected: 409 virtual bool onMessageReceived(const sp<AMessage> &msg); 410 virtual void stateEntered(); 411 412 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 413 414 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 415 virtual void onInputBufferFilled(const sp<AMessage> &msg); 416 417private: 418 void changeStateIfWeOwnAllBuffers(); 419 420 bool mComponentNowIdle; 421 422 DISALLOW_EVIL_CONSTRUCTORS(ExecutingToIdleState); 423}; 424 425//////////////////////////////////////////////////////////////////////////////// 426 427struct ACodec::IdleToLoadedState : public ACodec::BaseState { 428 explicit IdleToLoadedState(ACodec *codec); 429 430protected: 431 virtual bool onMessageReceived(const sp<AMessage> &msg); 432 virtual void stateEntered(); 433 434 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 435 436private: 437 DISALLOW_EVIL_CONSTRUCTORS(IdleToLoadedState); 438}; 439 440//////////////////////////////////////////////////////////////////////////////// 441 442struct ACodec::FlushingState : public ACodec::BaseState { 443 explicit FlushingState(ACodec *codec); 444 445protected: 446 virtual bool onMessageReceived(const sp<AMessage> &msg); 447 virtual void stateEntered(); 448 449 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 450 451 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 452 virtual void onInputBufferFilled(const sp<AMessage> &msg); 453 454private: 455 bool mFlushComplete[2]; 456 457 void changeStateIfWeOwnAllBuffers(); 458 459 DISALLOW_EVIL_CONSTRUCTORS(FlushingState); 460}; 461 462//////////////////////////////////////////////////////////////////////////////// 463 464void ACodec::BufferInfo::setWriteFence(int fenceFd, const char *dbg) { 465 if (mFenceFd >= 0) { 466 ALOGW("OVERWRITE OF %s fence %d by write fence %d in %s", 467 mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg); 468 } 469 mFenceFd = fenceFd; 470 mIsReadFence = false; 471} 472 473void ACodec::BufferInfo::setReadFence(int fenceFd, const char *dbg) { 474 if (mFenceFd >= 0) { 475 ALOGW("OVERWRITE OF %s fence %d by read fence %d in %s", 476 mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg); 477 } 478 mFenceFd = fenceFd; 479 mIsReadFence = true; 480} 481 482void ACodec::BufferInfo::checkWriteFence(const char *dbg) { 483 if (mFenceFd >= 0 && mIsReadFence) { 484 ALOGD("REUSING read fence %d as write fence in %s", mFenceFd, dbg); 485 } 486} 487 488void ACodec::BufferInfo::checkReadFence(const char *dbg) { 489 if (mFenceFd >= 0 && !mIsReadFence) { 490 ALOGD("REUSING write fence %d as read fence in %s", mFenceFd, dbg); 491 } 492} 493 494//////////////////////////////////////////////////////////////////////////////// 495 496ACodec::ACodec() 497 : mSampleRate(0), 498 mQuirks(0), 499 mNode(0), 500 mUsingNativeWindow(false), 501 mNativeWindowUsageBits(0), 502 mLastNativeWindowDataSpace(HAL_DATASPACE_UNKNOWN), 503 mIsVideo(false), 504 mIsEncoder(false), 505 mFatalError(false), 506 mShutdownInProgress(false), 507 mExplicitShutdown(false), 508 mIsLegacyVP9Decoder(false), 509 mEncoderDelay(0), 510 mEncoderPadding(0), 511 mRotationDegrees(0), 512 mChannelMaskPresent(false), 513 mChannelMask(0), 514 mDequeueCounter(0), 515 mInputMetadataType(kMetadataBufferTypeInvalid), 516 mOutputMetadataType(kMetadataBufferTypeInvalid), 517 mLegacyAdaptiveExperiment(false), 518 mMetadataBuffersToSubmit(0), 519 mNumUndequeuedBuffers(0), 520 mRepeatFrameDelayUs(-1ll), 521 mMaxPtsGapUs(-1ll), 522 mMaxFps(-1), 523 mTimePerFrameUs(-1ll), 524 mTimePerCaptureUs(-1ll), 525 mCreateInputBuffersSuspended(false), 526 mTunneled(false), 527 mDescribeColorAspectsIndex((OMX_INDEXTYPE)0), 528 mDescribeHDRStaticInfoIndex((OMX_INDEXTYPE)0) { 529 mUninitializedState = new UninitializedState(this); 530 mLoadedState = new LoadedState(this); 531 mLoadedToIdleState = new LoadedToIdleState(this); 532 mIdleToExecutingState = new IdleToExecutingState(this); 533 mExecutingState = new ExecutingState(this); 534 535 mOutputPortSettingsChangedState = 536 new OutputPortSettingsChangedState(this); 537 538 mExecutingToIdleState = new ExecutingToIdleState(this); 539 mIdleToLoadedState = new IdleToLoadedState(this); 540 mFlushingState = new FlushingState(this); 541 542 mPortEOS[kPortIndexInput] = mPortEOS[kPortIndexOutput] = false; 543 mInputEOSResult = OK; 544 545 memset(&mLastNativeWindowCrop, 0, sizeof(mLastNativeWindowCrop)); 546 547 changeState(mUninitializedState); 548} 549 550ACodec::~ACodec() { 551} 552 553void ACodec::setNotificationMessage(const sp<AMessage> &msg) { 554 mNotify = msg; 555} 556 557void ACodec::initiateSetup(const sp<AMessage> &msg) { 558 msg->setWhat(kWhatSetup); 559 msg->setTarget(this); 560 msg->post(); 561} 562 563void ACodec::signalSetParameters(const sp<AMessage> ¶ms) { 564 sp<AMessage> msg = new AMessage(kWhatSetParameters, this); 565 msg->setMessage("params", params); 566 msg->post(); 567} 568 569void ACodec::initiateAllocateComponent(const sp<AMessage> &msg) { 570 msg->setWhat(kWhatAllocateComponent); 571 msg->setTarget(this); 572 msg->post(); 573} 574 575void ACodec::initiateConfigureComponent(const sp<AMessage> &msg) { 576 msg->setWhat(kWhatConfigureComponent); 577 msg->setTarget(this); 578 msg->post(); 579} 580 581status_t ACodec::setSurface(const sp<Surface> &surface) { 582 sp<AMessage> msg = new AMessage(kWhatSetSurface, this); 583 msg->setObject("surface", surface); 584 585 sp<AMessage> response; 586 status_t err = msg->postAndAwaitResponse(&response); 587 588 if (err == OK) { 589 (void)response->findInt32("err", &err); 590 } 591 return err; 592} 593 594void ACodec::initiateCreateInputSurface() { 595 (new AMessage(kWhatCreateInputSurface, this))->post(); 596} 597 598void ACodec::initiateSetInputSurface( 599 const sp<PersistentSurface> &surface) { 600 sp<AMessage> msg = new AMessage(kWhatSetInputSurface, this); 601 msg->setObject("input-surface", surface); 602 msg->post(); 603} 604 605void ACodec::signalEndOfInputStream() { 606 (new AMessage(kWhatSignalEndOfInputStream, this))->post(); 607} 608 609void ACodec::initiateStart() { 610 (new AMessage(kWhatStart, this))->post(); 611} 612 613void ACodec::signalFlush() { 614 ALOGV("[%s] signalFlush", mComponentName.c_str()); 615 (new AMessage(kWhatFlush, this))->post(); 616} 617 618void ACodec::signalResume() { 619 (new AMessage(kWhatResume, this))->post(); 620} 621 622void ACodec::initiateShutdown(bool keepComponentAllocated) { 623 sp<AMessage> msg = new AMessage(kWhatShutdown, this); 624 msg->setInt32("keepComponentAllocated", keepComponentAllocated); 625 msg->post(); 626 if (!keepComponentAllocated) { 627 // ensure shutdown completes in 3 seconds 628 (new AMessage(kWhatReleaseCodecInstance, this))->post(3000000); 629 } 630} 631 632void ACodec::signalRequestIDRFrame() { 633 (new AMessage(kWhatRequestIDRFrame, this))->post(); 634} 635 636// *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 637// Some codecs may return input buffers before having them processed. 638// This causes a halt if we already signaled an EOS on the input 639// port. For now keep submitting an output buffer if there was an 640// EOS on the input port, but not yet on the output port. 641void ACodec::signalSubmitOutputMetadataBufferIfEOS_workaround() { 642 if (mPortEOS[kPortIndexInput] && !mPortEOS[kPortIndexOutput] && 643 mMetadataBuffersToSubmit > 0) { 644 (new AMessage(kWhatSubmitOutputMetadataBufferIfEOS, this))->post(); 645 } 646} 647 648status_t ACodec::handleSetSurface(const sp<Surface> &surface) { 649 // allow keeping unset surface 650 if (surface == NULL) { 651 if (mNativeWindow != NULL) { 652 ALOGW("cannot unset a surface"); 653 return INVALID_OPERATION; 654 } 655 return OK; 656 } 657 658 // cannot switch from bytebuffers to surface 659 if (mNativeWindow == NULL) { 660 ALOGW("component was not configured with a surface"); 661 return INVALID_OPERATION; 662 } 663 664 ANativeWindow *nativeWindow = surface.get(); 665 // if we have not yet started the codec, we can simply set the native window 666 if (mBuffers[kPortIndexInput].size() == 0) { 667 mNativeWindow = surface; 668 return OK; 669 } 670 671 // we do not support changing a tunneled surface after start 672 if (mTunneled) { 673 ALOGW("cannot change tunneled surface"); 674 return INVALID_OPERATION; 675 } 676 677 int usageBits = 0; 678 // no need to reconnect as we will not dequeue all buffers 679 status_t err = setupNativeWindowSizeFormatAndUsage( 680 nativeWindow, &usageBits, 681 !storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment /* reconnect */); 682 if (err != OK) { 683 return err; 684 } 685 686 int ignoredFlags = kVideoGrallocUsage; 687 // New output surface is not allowed to add new usage flag except ignored ones. 688 if ((usageBits & ~(mNativeWindowUsageBits | ignoredFlags)) != 0) { 689 ALOGW("cannot change usage from %#x to %#x", mNativeWindowUsageBits, usageBits); 690 return BAD_VALUE; 691 } 692 693 // get min undequeued count. We cannot switch to a surface that has a higher 694 // undequeued count than we allocated. 695 int minUndequeuedBuffers = 0; 696 err = nativeWindow->query( 697 nativeWindow, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 698 &minUndequeuedBuffers); 699 if (err != 0) { 700 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 701 strerror(-err), -err); 702 return err; 703 } 704 if (minUndequeuedBuffers > (int)mNumUndequeuedBuffers) { 705 ALOGE("new surface holds onto more buffers (%d) than planned for (%zu)", 706 minUndequeuedBuffers, mNumUndequeuedBuffers); 707 return BAD_VALUE; 708 } 709 710 // we cannot change the number of output buffers while OMX is running 711 // set up surface to the same count 712 Vector<BufferInfo> &buffers = mBuffers[kPortIndexOutput]; 713 ALOGV("setting up surface for %zu buffers", buffers.size()); 714 715 err = native_window_set_buffer_count(nativeWindow, buffers.size()); 716 if (err != 0) { 717 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 718 -err); 719 return err; 720 } 721 722 // need to enable allocation when attaching 723 surface->getIGraphicBufferProducer()->allowAllocation(true); 724 725 // for meta data mode, we move dequeud buffers to the new surface. 726 // for non-meta mode, we must move all registered buffers 727 for (size_t i = 0; i < buffers.size(); ++i) { 728 const BufferInfo &info = buffers[i]; 729 // skip undequeued buffers for meta data mode 730 if (storingMetadataInDecodedBuffers() 731 && !mLegacyAdaptiveExperiment 732 && info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 733 ALOGV("skipping buffer"); 734 continue; 735 } 736 ALOGV("attaching buffer %p", info.mGraphicBuffer->getNativeBuffer()); 737 738 err = surface->attachBuffer(info.mGraphicBuffer->getNativeBuffer()); 739 if (err != OK) { 740 ALOGE("failed to attach buffer %p to the new surface: %s (%d)", 741 info.mGraphicBuffer->getNativeBuffer(), 742 strerror(-err), -err); 743 return err; 744 } 745 } 746 747 // cancel undequeued buffers to new surface 748 if (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment) { 749 for (size_t i = 0; i < buffers.size(); ++i) { 750 BufferInfo &info = buffers.editItemAt(i); 751 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 752 ALOGV("canceling buffer %p", info.mGraphicBuffer->getNativeBuffer()); 753 err = nativeWindow->cancelBuffer( 754 nativeWindow, info.mGraphicBuffer->getNativeBuffer(), info.mFenceFd); 755 info.mFenceFd = -1; 756 if (err != OK) { 757 ALOGE("failed to cancel buffer %p to the new surface: %s (%d)", 758 info.mGraphicBuffer->getNativeBuffer(), 759 strerror(-err), -err); 760 return err; 761 } 762 } 763 } 764 // disallow further allocation 765 (void)surface->getIGraphicBufferProducer()->allowAllocation(false); 766 } 767 768 // push blank buffers to previous window if requested 769 if (mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) { 770 pushBlankBuffersToNativeWindow(mNativeWindow.get()); 771 } 772 773 mNativeWindow = nativeWindow; 774 mNativeWindowUsageBits = usageBits; 775 return OK; 776} 777 778status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) { 779 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 780 781 CHECK(mDealer[portIndex] == NULL); 782 CHECK(mBuffers[portIndex].isEmpty()); 783 784 status_t err; 785 if (mNativeWindow != NULL && portIndex == kPortIndexOutput) { 786 if (storingMetadataInDecodedBuffers()) { 787 err = allocateOutputMetadataBuffers(); 788 } else { 789 err = allocateOutputBuffersFromNativeWindow(); 790 } 791 } else { 792 OMX_PARAM_PORTDEFINITIONTYPE def; 793 InitOMXParams(&def); 794 def.nPortIndex = portIndex; 795 796 err = mOMX->getParameter( 797 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 798 799 if (err == OK) { 800 MetadataBufferType type = 801 portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; 802 size_t bufSize = def.nBufferSize; 803 if (type == kMetadataBufferTypeANWBuffer) { 804 bufSize = sizeof(VideoNativeMetadata); 805 } else if (type == kMetadataBufferTypeNativeHandleSource) { 806 bufSize = sizeof(VideoNativeHandleMetadata); 807 } 808 809 // If using gralloc or native source input metadata buffers, allocate largest 810 // metadata size as we prefer to generate native source metadata, but component 811 // may require gralloc source. For camera source, allocate at least enough 812 // size for native metadata buffers. 813 size_t allottedSize = bufSize; 814 if (portIndex == kPortIndexInput && type == kMetadataBufferTypeANWBuffer) { 815 bufSize = max(sizeof(VideoGrallocMetadata), sizeof(VideoNativeMetadata)); 816 } else if (portIndex == kPortIndexInput && type == kMetadataBufferTypeCameraSource) { 817 bufSize = max(bufSize, sizeof(VideoNativeMetadata)); 818 } 819 820 size_t conversionBufferSize = 0; 821 822 sp<DataConverter> converter = mConverter[portIndex]; 823 if (converter != NULL) { 824 // here we assume sane conversions of max 4:1, so result fits in int32 825 if (portIndex == kPortIndexInput) { 826 conversionBufferSize = converter->sourceSize(bufSize); 827 } else { 828 conversionBufferSize = converter->targetSize(bufSize); 829 } 830 } 831 832 size_t alignment = MemoryDealer::getAllocationAlignment(); 833 834 ALOGV("[%s] Allocating %u buffers of size %zu/%zu (from %u using %s) on %s port", 835 mComponentName.c_str(), 836 def.nBufferCountActual, bufSize, allottedSize, def.nBufferSize, asString(type), 837 portIndex == kPortIndexInput ? "input" : "output"); 838 839 // verify buffer sizes to avoid overflow in align() 840 if (bufSize == 0 || max(bufSize, conversionBufferSize) > kMaxCodecBufferSize) { 841 ALOGE("b/22885421"); 842 return NO_MEMORY; 843 } 844 845 // don't modify bufSize as OMX may not expect it to increase after negotiation 846 size_t alignedSize = align(bufSize, alignment); 847 size_t alignedConvSize = align(conversionBufferSize, alignment); 848 if (def.nBufferCountActual > SIZE_MAX / (alignedSize + alignedConvSize)) { 849 ALOGE("b/22885421"); 850 return NO_MEMORY; 851 } 852 853 size_t totalSize = def.nBufferCountActual * (alignedSize + alignedConvSize); 854 mDealer[portIndex] = new MemoryDealer(totalSize, "ACodec"); 855 856 for (OMX_U32 i = 0; i < def.nBufferCountActual && err == OK; ++i) { 857 sp<IMemory> mem = mDealer[portIndex]->allocate(bufSize); 858 if (mem == NULL || mem->pointer() == NULL) { 859 return NO_MEMORY; 860 } 861 862 BufferInfo info; 863 info.mStatus = BufferInfo::OWNED_BY_US; 864 info.mFenceFd = -1; 865 info.mRenderInfo = NULL; 866 info.mNativeHandle = NULL; 867 868 uint32_t requiresAllocateBufferBit = 869 (portIndex == kPortIndexInput) 870 ? kRequiresAllocateBufferOnInputPorts 871 : kRequiresAllocateBufferOnOutputPorts; 872 873 if (portIndex == kPortIndexInput && (mFlags & kFlagIsSecure)) { 874 mem.clear(); 875 876 void *ptr = NULL; 877 sp<NativeHandle> native_handle; 878 err = mOMX->allocateSecureBuffer( 879 mNode, portIndex, bufSize, &info.mBufferID, 880 &ptr, &native_handle); 881 882 // TRICKY: this representation is unorthodox, but ACodec requires 883 // an ABuffer with a proper size to validate range offsets and lengths. 884 // Since mData is never referenced for secure input, it is used to store 885 // either the pointer to the secure buffer, or the opaque handle as on 886 // some devices ptr is actually an opaque handle, not a pointer. 887 888 // TRICKY2: use native handle as the base of the ABuffer if received one, 889 // because Widevine source only receives these base addresses. 890 const native_handle_t *native_handle_ptr = 891 native_handle == NULL ? NULL : native_handle->handle(); 892 info.mData = new ABuffer( 893 ptr != NULL ? ptr : (void *)native_handle_ptr, bufSize); 894 info.mNativeHandle = native_handle; 895 info.mCodecData = info.mData; 896 } else if (mQuirks & requiresAllocateBufferBit) { 897 err = mOMX->allocateBufferWithBackup( 898 mNode, portIndex, mem, &info.mBufferID, allottedSize); 899 } else { 900 err = mOMX->useBuffer(mNode, portIndex, mem, &info.mBufferID, allottedSize); 901 } 902 903 if (mem != NULL) { 904 info.mCodecData = new ABuffer(mem->pointer(), bufSize); 905 info.mCodecRef = mem; 906 907 if (type == kMetadataBufferTypeANWBuffer) { 908 ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; 909 } 910 911 // if we require conversion, allocate conversion buffer for client use; 912 // otherwise, reuse codec buffer 913 if (mConverter[portIndex] != NULL) { 914 CHECK_GT(conversionBufferSize, (size_t)0); 915 mem = mDealer[portIndex]->allocate(conversionBufferSize); 916 if (mem == NULL|| mem->pointer() == NULL) { 917 return NO_MEMORY; 918 } 919 info.mData = new ABuffer(mem->pointer(), conversionBufferSize); 920 info.mMemRef = mem; 921 } else { 922 info.mData = info.mCodecData; 923 info.mMemRef = info.mCodecRef; 924 } 925 } 926 927 mBuffers[portIndex].push(info); 928 } 929 } 930 } 931 932 if (err != OK) { 933 return err; 934 } 935 936 sp<AMessage> notify = mNotify->dup(); 937 notify->setInt32("what", CodecBase::kWhatBuffersAllocated); 938 939 notify->setInt32("portIndex", portIndex); 940 941 sp<PortDescription> desc = new PortDescription; 942 943 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 944 const BufferInfo &info = mBuffers[portIndex][i]; 945 desc->addBuffer(info.mBufferID, info.mData, info.mNativeHandle, info.mMemRef); 946 } 947 948 notify->setObject("portDesc", desc); 949 notify->post(); 950 951 return OK; 952} 953 954status_t ACodec::setupNativeWindowSizeFormatAndUsage( 955 ANativeWindow *nativeWindow /* nonnull */, int *finalUsage /* nonnull */, 956 bool reconnect) { 957 OMX_PARAM_PORTDEFINITIONTYPE def; 958 InitOMXParams(&def); 959 def.nPortIndex = kPortIndexOutput; 960 961 status_t err = mOMX->getParameter( 962 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 963 964 if (err != OK) { 965 return err; 966 } 967 968 OMX_U32 usage = 0; 969 err = mOMX->getGraphicBufferUsage(mNode, kPortIndexOutput, &usage); 970 if (err != 0) { 971 ALOGW("querying usage flags from OMX IL component failed: %d", err); 972 // XXX: Currently this error is logged, but not fatal. 973 usage = 0; 974 } 975 int omxUsage = usage; 976 977 if (mFlags & kFlagIsGrallocUsageProtected) { 978 usage |= GRALLOC_USAGE_PROTECTED; 979 } 980 981 usage |= kVideoGrallocUsage; 982 *finalUsage = usage; 983 984 memset(&mLastNativeWindowCrop, 0, sizeof(mLastNativeWindowCrop)); 985 mLastNativeWindowDataSpace = HAL_DATASPACE_UNKNOWN; 986 987 ALOGV("gralloc usage: %#x(OMX) => %#x(ACodec)", omxUsage, usage); 988 return setNativeWindowSizeFormatAndUsage( 989 nativeWindow, 990 def.format.video.nFrameWidth, 991 def.format.video.nFrameHeight, 992 def.format.video.eColorFormat, 993 mRotationDegrees, 994 usage, 995 reconnect); 996} 997 998status_t ACodec::configureOutputBuffersFromNativeWindow( 999 OMX_U32 *bufferCount, OMX_U32 *bufferSize, 1000 OMX_U32 *minUndequeuedBuffers, bool preregister) { 1001 1002 OMX_PARAM_PORTDEFINITIONTYPE def; 1003 InitOMXParams(&def); 1004 def.nPortIndex = kPortIndexOutput; 1005 1006 status_t err = mOMX->getParameter( 1007 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1008 1009 if (err == OK) { 1010 err = setupNativeWindowSizeFormatAndUsage( 1011 mNativeWindow.get(), &mNativeWindowUsageBits, preregister /* reconnect */); 1012 } 1013 if (err != OK) { 1014 mNativeWindowUsageBits = 0; 1015 return err; 1016 } 1017 1018 // Exits here for tunneled video playback codecs -- i.e. skips native window 1019 // buffer allocation step as this is managed by the tunneled OMX omponent 1020 // itself and explicitly sets def.nBufferCountActual to 0. 1021 if (mTunneled) { 1022 ALOGV("Tunneled Playback: skipping native window buffer allocation."); 1023 def.nBufferCountActual = 0; 1024 err = mOMX->setParameter( 1025 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1026 1027 *minUndequeuedBuffers = 0; 1028 *bufferCount = 0; 1029 *bufferSize = 0; 1030 return err; 1031 } 1032 1033 *minUndequeuedBuffers = 0; 1034 err = mNativeWindow->query( 1035 mNativeWindow.get(), NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 1036 (int *)minUndequeuedBuffers); 1037 1038 if (err != 0) { 1039 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 1040 strerror(-err), -err); 1041 return err; 1042 } 1043 1044 // FIXME: assume that surface is controlled by app (native window 1045 // returns the number for the case when surface is not controlled by app) 1046 // FIXME2: This means that minUndeqeueudBufs can be 1 larger than reported 1047 // For now, try to allocate 1 more buffer, but don't fail if unsuccessful 1048 1049 // Use conservative allocation while also trying to reduce starvation 1050 // 1051 // 1. allocate at least nBufferCountMin + minUndequeuedBuffers - that is the 1052 // minimum needed for the consumer to be able to work 1053 // 2. try to allocate two (2) additional buffers to reduce starvation from 1054 // the consumer 1055 // plus an extra buffer to account for incorrect minUndequeuedBufs 1056 for (OMX_U32 extraBuffers = 2 + 1; /* condition inside loop */; extraBuffers--) { 1057 OMX_U32 newBufferCount = 1058 def.nBufferCountMin + *minUndequeuedBuffers + extraBuffers; 1059 def.nBufferCountActual = newBufferCount; 1060 err = mOMX->setParameter( 1061 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1062 1063 if (err == OK) { 1064 *minUndequeuedBuffers += extraBuffers; 1065 break; 1066 } 1067 1068 ALOGW("[%s] setting nBufferCountActual to %u failed: %d", 1069 mComponentName.c_str(), newBufferCount, err); 1070 /* exit condition */ 1071 if (extraBuffers == 0) { 1072 return err; 1073 } 1074 } 1075 1076 err = native_window_set_buffer_count( 1077 mNativeWindow.get(), def.nBufferCountActual); 1078 1079 if (err != 0) { 1080 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 1081 -err); 1082 return err; 1083 } 1084 1085 *bufferCount = def.nBufferCountActual; 1086 *bufferSize = def.nBufferSize; 1087 return err; 1088} 1089 1090status_t ACodec::allocateOutputBuffersFromNativeWindow() { 1091 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 1092 status_t err = configureOutputBuffersFromNativeWindow( 1093 &bufferCount, &bufferSize, &minUndequeuedBuffers, true /* preregister */); 1094 if (err != 0) 1095 return err; 1096 mNumUndequeuedBuffers = minUndequeuedBuffers; 1097 1098 if (!storingMetadataInDecodedBuffers()) { 1099 static_cast<Surface*>(mNativeWindow.get()) 1100 ->getIGraphicBufferProducer()->allowAllocation(true); 1101 } 1102 1103 ALOGV("[%s] Allocating %u buffers from a native window of size %u on " 1104 "output port", 1105 mComponentName.c_str(), bufferCount, bufferSize); 1106 1107 // Dequeue buffers and send them to OMX 1108 for (OMX_U32 i = 0; i < bufferCount; i++) { 1109 ANativeWindowBuffer *buf; 1110 int fenceFd; 1111 err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1112 if (err != 0) { 1113 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); 1114 break; 1115 } 1116 1117 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); 1118 BufferInfo info; 1119 info.mStatus = BufferInfo::OWNED_BY_US; 1120 info.mFenceFd = fenceFd; 1121 info.mIsReadFence = false; 1122 info.mRenderInfo = NULL; 1123 info.mData = new ABuffer(NULL /* data */, bufferSize /* capacity */); 1124 info.mCodecData = info.mData; 1125 info.mGraphicBuffer = graphicBuffer; 1126 mBuffers[kPortIndexOutput].push(info); 1127 1128 IOMX::buffer_id bufferId; 1129 err = mOMX->useGraphicBuffer(mNode, kPortIndexOutput, graphicBuffer, 1130 &bufferId); 1131 if (err != 0) { 1132 ALOGE("registering GraphicBuffer %u with OMX IL component failed: " 1133 "%d", i, err); 1134 break; 1135 } 1136 1137 mBuffers[kPortIndexOutput].editItemAt(i).mBufferID = bufferId; 1138 1139 ALOGV("[%s] Registered graphic buffer with ID %u (pointer = %p)", 1140 mComponentName.c_str(), 1141 bufferId, graphicBuffer.get()); 1142 } 1143 1144 OMX_U32 cancelStart; 1145 OMX_U32 cancelEnd; 1146 1147 if (err != 0) { 1148 // If an error occurred while dequeuing we need to cancel any buffers 1149 // that were dequeued. 1150 cancelStart = 0; 1151 cancelEnd = mBuffers[kPortIndexOutput].size(); 1152 } else { 1153 // Return the required minimum undequeued buffers to the native window. 1154 cancelStart = bufferCount - minUndequeuedBuffers; 1155 cancelEnd = bufferCount; 1156 } 1157 1158 for (OMX_U32 i = cancelStart; i < cancelEnd; i++) { 1159 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1160 if (info->mStatus == BufferInfo::OWNED_BY_US) { 1161 status_t error = cancelBufferToNativeWindow(info); 1162 if (err == 0) { 1163 err = error; 1164 } 1165 } 1166 } 1167 1168 if (!storingMetadataInDecodedBuffers()) { 1169 static_cast<Surface*>(mNativeWindow.get()) 1170 ->getIGraphicBufferProducer()->allowAllocation(false); 1171 } 1172 1173 return err; 1174} 1175 1176status_t ACodec::allocateOutputMetadataBuffers() { 1177 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 1178 status_t err = configureOutputBuffersFromNativeWindow( 1179 &bufferCount, &bufferSize, &minUndequeuedBuffers, 1180 mLegacyAdaptiveExperiment /* preregister */); 1181 if (err != 0) 1182 return err; 1183 mNumUndequeuedBuffers = minUndequeuedBuffers; 1184 1185 ALOGV("[%s] Allocating %u meta buffers on output port", 1186 mComponentName.c_str(), bufferCount); 1187 1188 size_t bufSize = mOutputMetadataType == kMetadataBufferTypeANWBuffer ? 1189 sizeof(struct VideoNativeMetadata) : sizeof(struct VideoGrallocMetadata); 1190 size_t totalSize = bufferCount * align(bufSize, MemoryDealer::getAllocationAlignment()); 1191 mDealer[kPortIndexOutput] = new MemoryDealer(totalSize, "ACodec"); 1192 1193 // Dequeue buffers and send them to OMX 1194 for (OMX_U32 i = 0; i < bufferCount; i++) { 1195 BufferInfo info; 1196 info.mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 1197 info.mFenceFd = -1; 1198 info.mRenderInfo = NULL; 1199 info.mGraphicBuffer = NULL; 1200 info.mDequeuedAt = mDequeueCounter; 1201 1202 sp<IMemory> mem = mDealer[kPortIndexOutput]->allocate(bufSize); 1203 if (mem == NULL || mem->pointer() == NULL) { 1204 return NO_MEMORY; 1205 } 1206 if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) { 1207 ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; 1208 } 1209 info.mData = new ABuffer(mem->pointer(), mem->size()); 1210 info.mMemRef = mem; 1211 info.mCodecData = info.mData; 1212 info.mCodecRef = mem; 1213 1214 // we use useBuffer for metadata regardless of quirks 1215 err = mOMX->useBuffer( 1216 mNode, kPortIndexOutput, mem, &info.mBufferID, mem->size()); 1217 mBuffers[kPortIndexOutput].push(info); 1218 1219 ALOGV("[%s] allocated meta buffer with ID %u (pointer = %p)", 1220 mComponentName.c_str(), info.mBufferID, mem->pointer()); 1221 } 1222 1223 if (mLegacyAdaptiveExperiment) { 1224 // preallocate and preregister buffers 1225 static_cast<Surface *>(mNativeWindow.get()) 1226 ->getIGraphicBufferProducer()->allowAllocation(true); 1227 1228 ALOGV("[%s] Allocating %u buffers from a native window of size %u on " 1229 "output port", 1230 mComponentName.c_str(), bufferCount, bufferSize); 1231 1232 // Dequeue buffers then cancel them all 1233 for (OMX_U32 i = 0; i < bufferCount; i++) { 1234 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1235 1236 ANativeWindowBuffer *buf; 1237 int fenceFd; 1238 err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1239 if (err != 0) { 1240 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); 1241 break; 1242 } 1243 1244 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); 1245 mOMX->updateGraphicBufferInMeta( 1246 mNode, kPortIndexOutput, graphicBuffer, info->mBufferID); 1247 info->mStatus = BufferInfo::OWNED_BY_US; 1248 info->setWriteFence(fenceFd, "allocateOutputMetadataBuffers for legacy"); 1249 info->mGraphicBuffer = graphicBuffer; 1250 } 1251 1252 for (OMX_U32 i = 0; i < mBuffers[kPortIndexOutput].size(); i++) { 1253 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1254 if (info->mStatus == BufferInfo::OWNED_BY_US) { 1255 status_t error = cancelBufferToNativeWindow(info); 1256 if (err == OK) { 1257 err = error; 1258 } 1259 } 1260 } 1261 1262 static_cast<Surface*>(mNativeWindow.get()) 1263 ->getIGraphicBufferProducer()->allowAllocation(false); 1264 } 1265 1266 mMetadataBuffersToSubmit = bufferCount - minUndequeuedBuffers; 1267 return err; 1268} 1269 1270status_t ACodec::submitOutputMetadataBuffer() { 1271 CHECK(storingMetadataInDecodedBuffers()); 1272 if (mMetadataBuffersToSubmit == 0) 1273 return OK; 1274 1275 BufferInfo *info = dequeueBufferFromNativeWindow(); 1276 if (info == NULL) { 1277 return ERROR_IO; 1278 } 1279 1280 ALOGV("[%s] submitting output meta buffer ID %u for graphic buffer %p", 1281 mComponentName.c_str(), info->mBufferID, info->mGraphicBuffer.get()); 1282 1283 --mMetadataBuffersToSubmit; 1284 info->checkWriteFence("submitOutputMetadataBuffer"); 1285 status_t err = mOMX->fillBuffer(mNode, info->mBufferID, info->mFenceFd); 1286 info->mFenceFd = -1; 1287 if (err == OK) { 1288 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 1289 } 1290 1291 return err; 1292} 1293 1294status_t ACodec::waitForFence(int fd, const char *dbg ) { 1295 status_t res = OK; 1296 if (fd >= 0) { 1297 sp<Fence> fence = new Fence(fd); 1298 res = fence->wait(IOMX::kFenceTimeoutMs); 1299 ALOGW_IF(res != OK, "FENCE TIMEOUT for %d in %s", fd, dbg); 1300 } 1301 return res; 1302} 1303 1304// static 1305const char *ACodec::_asString(BufferInfo::Status s) { 1306 switch (s) { 1307 case BufferInfo::OWNED_BY_US: return "OUR"; 1308 case BufferInfo::OWNED_BY_COMPONENT: return "COMPONENT"; 1309 case BufferInfo::OWNED_BY_UPSTREAM: return "UPSTREAM"; 1310 case BufferInfo::OWNED_BY_DOWNSTREAM: return "DOWNSTREAM"; 1311 case BufferInfo::OWNED_BY_NATIVE_WINDOW: return "SURFACE"; 1312 case BufferInfo::UNRECOGNIZED: return "UNRECOGNIZED"; 1313 default: return "?"; 1314 } 1315} 1316 1317void ACodec::dumpBuffers(OMX_U32 portIndex) { 1318 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 1319 ALOGI("[%s] %s port has %zu buffers:", mComponentName.c_str(), 1320 portIndex == kPortIndexInput ? "input" : "output", mBuffers[portIndex].size()); 1321 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1322 const BufferInfo &info = mBuffers[portIndex][i]; 1323 ALOGI(" slot %2zu: #%8u %p/%p %s(%d) dequeued:%u", 1324 i, info.mBufferID, info.mGraphicBuffer.get(), 1325 info.mGraphicBuffer == NULL ? NULL : info.mGraphicBuffer->getNativeBuffer(), 1326 _asString(info.mStatus), info.mStatus, info.mDequeuedAt); 1327 } 1328} 1329 1330status_t ACodec::cancelBufferToNativeWindow(BufferInfo *info) { 1331 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 1332 1333 ALOGV("[%s] Calling cancelBuffer on buffer %u", 1334 mComponentName.c_str(), info->mBufferID); 1335 1336 info->checkWriteFence("cancelBufferToNativeWindow"); 1337 int err = mNativeWindow->cancelBuffer( 1338 mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd); 1339 info->mFenceFd = -1; 1340 1341 ALOGW_IF(err != 0, "[%s] can not return buffer %u to native window", 1342 mComponentName.c_str(), info->mBufferID); 1343 // change ownership even if cancelBuffer fails 1344 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 1345 1346 return err; 1347} 1348 1349void ACodec::updateRenderInfoForDequeuedBuffer( 1350 ANativeWindowBuffer *buf, int fenceFd, BufferInfo *info) { 1351 1352 info->mRenderInfo = 1353 mRenderTracker.updateInfoForDequeuedBuffer( 1354 buf, fenceFd, info - &mBuffers[kPortIndexOutput][0]); 1355 1356 // check for any fences already signaled 1357 notifyOfRenderedFrames(false /* dropIncomplete */, info->mRenderInfo); 1358} 1359 1360void ACodec::onFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { 1361 if (mRenderTracker.onFrameRendered(mediaTimeUs, systemNano) != OK) { 1362 mRenderTracker.dumpRenderQueue(); 1363 } 1364} 1365 1366void ACodec::notifyOfRenderedFrames(bool dropIncomplete, FrameRenderTracker::Info *until) { 1367 sp<AMessage> msg = mNotify->dup(); 1368 msg->setInt32("what", CodecBase::kWhatOutputFramesRendered); 1369 std::list<FrameRenderTracker::Info> done = 1370 mRenderTracker.checkFencesAndGetRenderedFrames(until, dropIncomplete); 1371 1372 // unlink untracked frames 1373 for (std::list<FrameRenderTracker::Info>::const_iterator it = done.cbegin(); 1374 it != done.cend(); ++it) { 1375 ssize_t index = it->getIndex(); 1376 if (index >= 0 && (size_t)index < mBuffers[kPortIndexOutput].size()) { 1377 mBuffers[kPortIndexOutput].editItemAt(index).mRenderInfo = NULL; 1378 } else if (index >= 0) { 1379 // THIS SHOULD NEVER HAPPEN 1380 ALOGE("invalid index %zd in %zu", index, mBuffers[kPortIndexOutput].size()); 1381 } 1382 } 1383 1384 if (MediaCodec::CreateFramesRenderedMessage(done, msg)) { 1385 msg->post(); 1386 } 1387} 1388 1389ACodec::BufferInfo *ACodec::dequeueBufferFromNativeWindow() { 1390 ANativeWindowBuffer *buf; 1391 CHECK(mNativeWindow.get() != NULL); 1392 1393 if (mTunneled) { 1394 ALOGW("dequeueBufferFromNativeWindow() should not be called in tunnel" 1395 " video playback mode mode!"); 1396 return NULL; 1397 } 1398 1399 if (mFatalError) { 1400 ALOGW("not dequeuing from native window due to fatal error"); 1401 return NULL; 1402 } 1403 1404 int fenceFd = -1; 1405 do { 1406 status_t err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1407 if (err != 0) { 1408 ALOGE("dequeueBuffer failed: %s(%d).", asString(err), err); 1409 return NULL; 1410 } 1411 1412 bool stale = false; 1413 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1414 i--; 1415 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1416 1417 if (info->mGraphicBuffer != NULL && 1418 info->mGraphicBuffer->handle == buf->handle) { 1419 // Since consumers can attach buffers to BufferQueues, it is possible 1420 // that a known yet stale buffer can return from a surface that we 1421 // once used. We can simply ignore this as we have already dequeued 1422 // this buffer properly. NOTE: this does not eliminate all cases, 1423 // e.g. it is possible that we have queued the valid buffer to the 1424 // NW, and a stale copy of the same buffer gets dequeued - which will 1425 // be treated as the valid buffer by ACodec. 1426 if (info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 1427 ALOGI("dequeued stale buffer %p. discarding", buf); 1428 stale = true; 1429 break; 1430 } 1431 1432 ALOGV("dequeued buffer %p", info->mGraphicBuffer->getNativeBuffer()); 1433 info->mStatus = BufferInfo::OWNED_BY_US; 1434 info->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow"); 1435 updateRenderInfoForDequeuedBuffer(buf, fenceFd, info); 1436 return info; 1437 } 1438 } 1439 1440 // It is also possible to receive a previously unregistered buffer 1441 // in non-meta mode. These should be treated as stale buffers. The 1442 // same is possible in meta mode, in which case, it will be treated 1443 // as a normal buffer, which is not desirable. 1444 // TODO: fix this. 1445 if (!stale && (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment)) { 1446 ALOGI("dequeued unrecognized (stale) buffer %p. discarding", buf); 1447 stale = true; 1448 } 1449 if (stale) { 1450 // TODO: detach stale buffer, but there is no API yet to do it. 1451 buf = NULL; 1452 } 1453 } while (buf == NULL); 1454 1455 // get oldest undequeued buffer 1456 BufferInfo *oldest = NULL; 1457 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1458 i--; 1459 BufferInfo *info = 1460 &mBuffers[kPortIndexOutput].editItemAt(i); 1461 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW && 1462 (oldest == NULL || 1463 // avoid potential issues from counter rolling over 1464 mDequeueCounter - info->mDequeuedAt > 1465 mDequeueCounter - oldest->mDequeuedAt)) { 1466 oldest = info; 1467 } 1468 } 1469 1470 // it is impossible dequeue a buffer when there are no buffers with ANW 1471 CHECK(oldest != NULL); 1472 // it is impossible to dequeue an unknown buffer in non-meta mode, as the 1473 // while loop above does not complete 1474 CHECK(storingMetadataInDecodedBuffers()); 1475 1476 // discard buffer in LRU info and replace with new buffer 1477 oldest->mGraphicBuffer = new GraphicBuffer(buf, false); 1478 oldest->mStatus = BufferInfo::OWNED_BY_US; 1479 oldest->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow for oldest"); 1480 mRenderTracker.untrackFrame(oldest->mRenderInfo); 1481 oldest->mRenderInfo = NULL; 1482 1483 mOMX->updateGraphicBufferInMeta( 1484 mNode, kPortIndexOutput, oldest->mGraphicBuffer, 1485 oldest->mBufferID); 1486 1487 if (mOutputMetadataType == kMetadataBufferTypeGrallocSource) { 1488 VideoGrallocMetadata *grallocMeta = 1489 reinterpret_cast<VideoGrallocMetadata *>(oldest->mData->base()); 1490 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 1491 (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]), 1492 mDequeueCounter - oldest->mDequeuedAt, 1493 (void *)(uintptr_t)grallocMeta->pHandle, 1494 oldest->mGraphicBuffer->handle, oldest->mData->base()); 1495 } else if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) { 1496 VideoNativeMetadata *nativeMeta = 1497 reinterpret_cast<VideoNativeMetadata *>(oldest->mData->base()); 1498 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 1499 (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]), 1500 mDequeueCounter - oldest->mDequeuedAt, 1501 (void *)(uintptr_t)nativeMeta->pBuffer, 1502 oldest->mGraphicBuffer->getNativeBuffer(), oldest->mData->base()); 1503 } 1504 1505 updateRenderInfoForDequeuedBuffer(buf, fenceFd, oldest); 1506 return oldest; 1507} 1508 1509status_t ACodec::freeBuffersOnPort(OMX_U32 portIndex) { 1510 status_t err = OK; 1511 for (size_t i = mBuffers[portIndex].size(); i > 0;) { 1512 i--; 1513 status_t err2 = freeBuffer(portIndex, i); 1514 if (err == OK) { 1515 err = err2; 1516 } 1517 } 1518 1519 // clear mDealer even on an error 1520 mDealer[portIndex].clear(); 1521 return err; 1522} 1523 1524status_t ACodec::freeOutputBuffersNotOwnedByComponent() { 1525 status_t err = OK; 1526 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1527 i--; 1528 BufferInfo *info = 1529 &mBuffers[kPortIndexOutput].editItemAt(i); 1530 1531 // At this time some buffers may still be with the component 1532 // or being drained. 1533 if (info->mStatus != BufferInfo::OWNED_BY_COMPONENT && 1534 info->mStatus != BufferInfo::OWNED_BY_DOWNSTREAM) { 1535 status_t err2 = freeBuffer(kPortIndexOutput, i); 1536 if (err == OK) { 1537 err = err2; 1538 } 1539 } 1540 } 1541 1542 return err; 1543} 1544 1545status_t ACodec::freeBuffer(OMX_U32 portIndex, size_t i) { 1546 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1547 status_t err = OK; 1548 1549 // there should not be any fences in the metadata 1550 MetadataBufferType type = 1551 portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; 1552 if (type == kMetadataBufferTypeANWBuffer && info->mData != NULL 1553 && info->mData->size() >= sizeof(VideoNativeMetadata)) { 1554 int fenceFd = ((VideoNativeMetadata *)info->mData->data())->nFenceFd; 1555 if (fenceFd >= 0) { 1556 ALOGW("unreleased fence (%d) in %s metadata buffer %zu", 1557 fenceFd, portIndex == kPortIndexInput ? "input" : "output", i); 1558 } 1559 } 1560 1561 switch (info->mStatus) { 1562 case BufferInfo::OWNED_BY_US: 1563 if (portIndex == kPortIndexOutput && mNativeWindow != NULL) { 1564 (void)cancelBufferToNativeWindow(info); 1565 } 1566 // fall through 1567 1568 case BufferInfo::OWNED_BY_NATIVE_WINDOW: 1569 err = mOMX->freeBuffer(mNode, portIndex, info->mBufferID); 1570 break; 1571 1572 default: 1573 ALOGE("trying to free buffer not owned by us or ANW (%d)", info->mStatus); 1574 err = FAILED_TRANSACTION; 1575 break; 1576 } 1577 1578 if (info->mFenceFd >= 0) { 1579 ::close(info->mFenceFd); 1580 } 1581 1582 if (portIndex == kPortIndexOutput) { 1583 mRenderTracker.untrackFrame(info->mRenderInfo, i); 1584 info->mRenderInfo = NULL; 1585 } 1586 1587 // remove buffer even if mOMX->freeBuffer fails 1588 mBuffers[portIndex].removeAt(i); 1589 return err; 1590} 1591 1592ACodec::BufferInfo *ACodec::findBufferByID( 1593 uint32_t portIndex, IOMX::buffer_id bufferID, ssize_t *index) { 1594 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1595 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1596 1597 if (info->mBufferID == bufferID) { 1598 if (index != NULL) { 1599 *index = i; 1600 } 1601 return info; 1602 } 1603 } 1604 1605 ALOGE("Could not find buffer with ID %u", bufferID); 1606 return NULL; 1607} 1608 1609status_t ACodec::setComponentRole( 1610 bool isEncoder, const char *mime) { 1611 const char *role = getComponentRole(isEncoder, mime); 1612 if (role == NULL) { 1613 return BAD_VALUE; 1614 } 1615 status_t err = setComponentRole(mOMX, mNode, role); 1616 if (err != OK) { 1617 ALOGW("[%s] Failed to set standard component role '%s'.", 1618 mComponentName.c_str(), role); 1619 } 1620 return err; 1621} 1622 1623//static 1624const char *ACodec::getComponentRole( 1625 bool isEncoder, const char *mime) { 1626 struct MimeToRole { 1627 const char *mime; 1628 const char *decoderRole; 1629 const char *encoderRole; 1630 }; 1631 1632 static const MimeToRole kMimeToRole[] = { 1633 { MEDIA_MIMETYPE_AUDIO_MPEG, 1634 "audio_decoder.mp3", "audio_encoder.mp3" }, 1635 { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I, 1636 "audio_decoder.mp1", "audio_encoder.mp1" }, 1637 { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II, 1638 "audio_decoder.mp2", "audio_encoder.mp2" }, 1639 { MEDIA_MIMETYPE_AUDIO_AMR_NB, 1640 "audio_decoder.amrnb", "audio_encoder.amrnb" }, 1641 { MEDIA_MIMETYPE_AUDIO_AMR_WB, 1642 "audio_decoder.amrwb", "audio_encoder.amrwb" }, 1643 { MEDIA_MIMETYPE_AUDIO_AAC, 1644 "audio_decoder.aac", "audio_encoder.aac" }, 1645 { MEDIA_MIMETYPE_AUDIO_VORBIS, 1646 "audio_decoder.vorbis", "audio_encoder.vorbis" }, 1647 { MEDIA_MIMETYPE_AUDIO_OPUS, 1648 "audio_decoder.opus", "audio_encoder.opus" }, 1649 { MEDIA_MIMETYPE_AUDIO_G711_MLAW, 1650 "audio_decoder.g711mlaw", "audio_encoder.g711mlaw" }, 1651 { MEDIA_MIMETYPE_AUDIO_G711_ALAW, 1652 "audio_decoder.g711alaw", "audio_encoder.g711alaw" }, 1653 { MEDIA_MIMETYPE_VIDEO_AVC, 1654 "video_decoder.avc", "video_encoder.avc" }, 1655 { MEDIA_MIMETYPE_VIDEO_HEVC, 1656 "video_decoder.hevc", "video_encoder.hevc" }, 1657 { MEDIA_MIMETYPE_VIDEO_MPEG4, 1658 "video_decoder.mpeg4", "video_encoder.mpeg4" }, 1659 { MEDIA_MIMETYPE_VIDEO_H263, 1660 "video_decoder.h263", "video_encoder.h263" }, 1661 { MEDIA_MIMETYPE_VIDEO_VP8, 1662 "video_decoder.vp8", "video_encoder.vp8" }, 1663 { MEDIA_MIMETYPE_VIDEO_VP9, 1664 "video_decoder.vp9", "video_encoder.vp9" }, 1665 { MEDIA_MIMETYPE_AUDIO_RAW, 1666 "audio_decoder.raw", "audio_encoder.raw" }, 1667 { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, 1668 "video_decoder.dolby-vision", "video_encoder.dolby-vision" }, 1669 { MEDIA_MIMETYPE_AUDIO_FLAC, 1670 "audio_decoder.flac", "audio_encoder.flac" }, 1671 { MEDIA_MIMETYPE_AUDIO_MSGSM, 1672 "audio_decoder.gsm", "audio_encoder.gsm" }, 1673 { MEDIA_MIMETYPE_VIDEO_MPEG2, 1674 "video_decoder.mpeg2", "video_encoder.mpeg2" }, 1675 { MEDIA_MIMETYPE_AUDIO_AC3, 1676 "audio_decoder.ac3", "audio_encoder.ac3" }, 1677 { MEDIA_MIMETYPE_AUDIO_EAC3, 1678 "audio_decoder.eac3", "audio_encoder.eac3" }, 1679 }; 1680 1681 static const size_t kNumMimeToRole = 1682 sizeof(kMimeToRole) / sizeof(kMimeToRole[0]); 1683 1684 size_t i; 1685 for (i = 0; i < kNumMimeToRole; ++i) { 1686 if (!strcasecmp(mime, kMimeToRole[i].mime)) { 1687 break; 1688 } 1689 } 1690 1691 if (i == kNumMimeToRole) { 1692 return NULL; 1693 } 1694 1695 return isEncoder ? kMimeToRole[i].encoderRole 1696 : kMimeToRole[i].decoderRole; 1697} 1698 1699//static 1700status_t ACodec::setComponentRole( 1701 const sp<IOMX> &omx, IOMX::node_id node, const char *role) { 1702 OMX_PARAM_COMPONENTROLETYPE roleParams; 1703 InitOMXParams(&roleParams); 1704 1705 strncpy((char *)roleParams.cRole, 1706 role, OMX_MAX_STRINGNAME_SIZE - 1); 1707 1708 roleParams.cRole[OMX_MAX_STRINGNAME_SIZE - 1] = '\0'; 1709 1710 return omx->setParameter( 1711 node, OMX_IndexParamStandardComponentRole, 1712 &roleParams, sizeof(roleParams)); 1713} 1714 1715status_t ACodec::configureCodec( 1716 const char *mime, const sp<AMessage> &msg) { 1717 int32_t encoder; 1718 if (!msg->findInt32("encoder", &encoder)) { 1719 encoder = false; 1720 } 1721 1722 sp<AMessage> inputFormat = new AMessage; 1723 sp<AMessage> outputFormat = new AMessage; 1724 mConfigFormat = msg; 1725 1726 mIsEncoder = encoder; 1727 1728 mInputMetadataType = kMetadataBufferTypeInvalid; 1729 mOutputMetadataType = kMetadataBufferTypeInvalid; 1730 1731 status_t err = setComponentRole(encoder /* isEncoder */, mime); 1732 1733 if (err != OK) { 1734 return err; 1735 } 1736 1737 int32_t bitRate = 0; 1738 // FLAC encoder doesn't need a bitrate, other encoders do 1739 if (encoder && strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC) 1740 && !msg->findInt32("bitrate", &bitRate)) { 1741 return INVALID_OPERATION; 1742 } 1743 1744 // propagate bitrate to the output so that the muxer has it 1745 if (encoder && msg->findInt32("bitrate", &bitRate)) { 1746 // Technically ISO spec says that 'bitrate' should be 0 for VBR even though it is the 1747 // average bitrate. We've been setting both bitrate and max-bitrate to this same value. 1748 outputFormat->setInt32("bitrate", bitRate); 1749 outputFormat->setInt32("max-bitrate", bitRate); 1750 } 1751 1752 int32_t storeMeta; 1753 if (encoder 1754 && msg->findInt32("android._input-metadata-buffer-type", &storeMeta) 1755 && storeMeta != kMetadataBufferTypeInvalid) { 1756 mInputMetadataType = (MetadataBufferType)storeMeta; 1757 err = mOMX->storeMetaDataInBuffers( 1758 mNode, kPortIndexInput, OMX_TRUE, &mInputMetadataType); 1759 if (err != OK) { 1760 ALOGE("[%s] storeMetaDataInBuffers (input) failed w/ err %d", 1761 mComponentName.c_str(), err); 1762 1763 return err; 1764 } else if (storeMeta == kMetadataBufferTypeANWBuffer 1765 && mInputMetadataType == kMetadataBufferTypeGrallocSource) { 1766 // IOMX translates ANWBuffers to gralloc source already. 1767 mInputMetadataType = (MetadataBufferType)storeMeta; 1768 } 1769 1770 uint32_t usageBits; 1771 if (mOMX->getParameter( 1772 mNode, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, 1773 &usageBits, sizeof(usageBits)) == OK) { 1774 inputFormat->setInt32( 1775 "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); 1776 } 1777 } 1778 1779 int32_t prependSPSPPS = 0; 1780 if (encoder 1781 && msg->findInt32("prepend-sps-pps-to-idr-frames", &prependSPSPPS) 1782 && prependSPSPPS != 0) { 1783 OMX_INDEXTYPE index; 1784 err = mOMX->getExtensionIndex( 1785 mNode, 1786 "OMX.google.android.index.prependSPSPPSToIDRFrames", 1787 &index); 1788 1789 if (err == OK) { 1790 PrependSPSPPSToIDRFramesParams params; 1791 InitOMXParams(¶ms); 1792 params.bEnable = OMX_TRUE; 1793 1794 err = mOMX->setParameter( 1795 mNode, index, ¶ms, sizeof(params)); 1796 } 1797 1798 if (err != OK) { 1799 ALOGE("Encoder could not be configured to emit SPS/PPS before " 1800 "IDR frames. (err %d)", err); 1801 1802 return err; 1803 } 1804 } 1805 1806 // Only enable metadata mode on encoder output if encoder can prepend 1807 // sps/pps to idr frames, since in metadata mode the bitstream is in an 1808 // opaque handle, to which we don't have access. 1809 int32_t video = !strncasecmp(mime, "video/", 6); 1810 mIsVideo = video; 1811 if (encoder && video) { 1812 OMX_BOOL enable = (OMX_BOOL) (prependSPSPPS 1813 && msg->findInt32("android._store-metadata-in-buffers-output", &storeMeta) 1814 && storeMeta != 0); 1815 1816 mOutputMetadataType = kMetadataBufferTypeNativeHandleSource; 1817 err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, enable, &mOutputMetadataType); 1818 if (err != OK) { 1819 ALOGE("[%s] storeMetaDataInBuffers (output) failed w/ err %d", 1820 mComponentName.c_str(), err); 1821 } 1822 1823 if (!msg->findInt64( 1824 "repeat-previous-frame-after", 1825 &mRepeatFrameDelayUs)) { 1826 mRepeatFrameDelayUs = -1ll; 1827 } 1828 1829 if (!msg->findInt64("max-pts-gap-to-encoder", &mMaxPtsGapUs)) { 1830 mMaxPtsGapUs = -1ll; 1831 } 1832 1833 if (!msg->findFloat("max-fps-to-encoder", &mMaxFps)) { 1834 mMaxFps = -1; 1835 } 1836 1837 if (!msg->findInt64("time-lapse", &mTimePerCaptureUs)) { 1838 mTimePerCaptureUs = -1ll; 1839 } 1840 1841 if (!msg->findInt32( 1842 "create-input-buffers-suspended", 1843 (int32_t*)&mCreateInputBuffersSuspended)) { 1844 mCreateInputBuffersSuspended = false; 1845 } 1846 } 1847 1848 // NOTE: we only use native window for video decoders 1849 sp<RefBase> obj; 1850 bool haveNativeWindow = msg->findObject("native-window", &obj) 1851 && obj != NULL && video && !encoder; 1852 mUsingNativeWindow = haveNativeWindow; 1853 mLegacyAdaptiveExperiment = false; 1854 if (video && !encoder) { 1855 inputFormat->setInt32("adaptive-playback", false); 1856 1857 int32_t usageProtected; 1858 if (msg->findInt32("protected", &usageProtected) && usageProtected) { 1859 if (!haveNativeWindow) { 1860 ALOGE("protected output buffers must be sent to an ANativeWindow"); 1861 return PERMISSION_DENIED; 1862 } 1863 mFlags |= kFlagIsGrallocUsageProtected; 1864 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 1865 } 1866 1867 if (mFlags & kFlagIsSecure) { 1868 // use native_handles for secure input buffers 1869 err = mOMX->enableNativeBuffers( 1870 mNode, kPortIndexInput, OMX_FALSE /* graphic */, OMX_TRUE); 1871 ALOGI_IF(err != OK, "falling back to non-native_handles"); 1872 err = OK; // ignore error for now 1873 } 1874 } 1875 if (haveNativeWindow) { 1876 sp<ANativeWindow> nativeWindow = 1877 static_cast<ANativeWindow *>(static_cast<Surface *>(obj.get())); 1878 1879 // START of temporary support for automatic FRC - THIS WILL BE REMOVED 1880 int32_t autoFrc; 1881 if (msg->findInt32("auto-frc", &autoFrc)) { 1882 bool enabled = autoFrc; 1883 OMX_CONFIG_BOOLEANTYPE config; 1884 InitOMXParams(&config); 1885 config.bEnabled = (OMX_BOOL)enabled; 1886 status_t temp = mOMX->setConfig( 1887 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAutoFramerateConversion, 1888 &config, sizeof(config)); 1889 if (temp == OK) { 1890 outputFormat->setInt32("auto-frc", enabled); 1891 } else if (enabled) { 1892 ALOGI("codec does not support requested auto-frc (err %d)", temp); 1893 } 1894 } 1895 // END of temporary support for automatic FRC 1896 1897 int32_t tunneled; 1898 if (msg->findInt32("feature-tunneled-playback", &tunneled) && 1899 tunneled != 0) { 1900 ALOGI("Configuring TUNNELED video playback."); 1901 mTunneled = true; 1902 1903 int32_t audioHwSync = 0; 1904 if (!msg->findInt32("audio-hw-sync", &audioHwSync)) { 1905 ALOGW("No Audio HW Sync provided for video tunnel"); 1906 } 1907 err = configureTunneledVideoPlayback(audioHwSync, nativeWindow); 1908 if (err != OK) { 1909 ALOGE("configureTunneledVideoPlayback(%d,%p) failed!", 1910 audioHwSync, nativeWindow.get()); 1911 return err; 1912 } 1913 1914 int32_t maxWidth = 0, maxHeight = 0; 1915 if (msg->findInt32("max-width", &maxWidth) && 1916 msg->findInt32("max-height", &maxHeight)) { 1917 1918 err = mOMX->prepareForAdaptivePlayback( 1919 mNode, kPortIndexOutput, OMX_TRUE, maxWidth, maxHeight); 1920 if (err != OK) { 1921 ALOGW("[%s] prepareForAdaptivePlayback failed w/ err %d", 1922 mComponentName.c_str(), err); 1923 // allow failure 1924 err = OK; 1925 } else { 1926 inputFormat->setInt32("max-width", maxWidth); 1927 inputFormat->setInt32("max-height", maxHeight); 1928 inputFormat->setInt32("adaptive-playback", true); 1929 } 1930 } 1931 } else { 1932 ALOGV("Configuring CPU controlled video playback."); 1933 mTunneled = false; 1934 1935 // Explicity reset the sideband handle of the window for 1936 // non-tunneled video in case the window was previously used 1937 // for a tunneled video playback. 1938 err = native_window_set_sideband_stream(nativeWindow.get(), NULL); 1939 if (err != OK) { 1940 ALOGE("set_sideband_stream(NULL) failed! (err %d).", err); 1941 return err; 1942 } 1943 1944 // Always try to enable dynamic output buffers on native surface 1945 mOutputMetadataType = kMetadataBufferTypeANWBuffer; 1946 err = mOMX->storeMetaDataInBuffers( 1947 mNode, kPortIndexOutput, OMX_TRUE, &mOutputMetadataType); 1948 if (err != OK) { 1949 ALOGE("[%s] storeMetaDataInBuffers failed w/ err %d", 1950 mComponentName.c_str(), err); 1951 1952 // if adaptive playback has been requested, try JB fallback 1953 // NOTE: THIS FALLBACK MECHANISM WILL BE REMOVED DUE TO ITS 1954 // LARGE MEMORY REQUIREMENT 1955 1956 // we will not do adaptive playback on software accessed 1957 // surfaces as they never had to respond to changes in the 1958 // crop window, and we don't trust that they will be able to. 1959 int usageBits = 0; 1960 bool canDoAdaptivePlayback; 1961 1962 if (nativeWindow->query( 1963 nativeWindow.get(), 1964 NATIVE_WINDOW_CONSUMER_USAGE_BITS, 1965 &usageBits) != OK) { 1966 canDoAdaptivePlayback = false; 1967 } else { 1968 canDoAdaptivePlayback = 1969 (usageBits & 1970 (GRALLOC_USAGE_SW_READ_MASK | 1971 GRALLOC_USAGE_SW_WRITE_MASK)) == 0; 1972 } 1973 1974 int32_t maxWidth = 0, maxHeight = 0; 1975 if (canDoAdaptivePlayback && 1976 msg->findInt32("max-width", &maxWidth) && 1977 msg->findInt32("max-height", &maxHeight)) { 1978 ALOGV("[%s] prepareForAdaptivePlayback(%dx%d)", 1979 mComponentName.c_str(), maxWidth, maxHeight); 1980 1981 err = mOMX->prepareForAdaptivePlayback( 1982 mNode, kPortIndexOutput, OMX_TRUE, maxWidth, 1983 maxHeight); 1984 ALOGW_IF(err != OK, 1985 "[%s] prepareForAdaptivePlayback failed w/ err %d", 1986 mComponentName.c_str(), err); 1987 1988 if (err == OK) { 1989 inputFormat->setInt32("max-width", maxWidth); 1990 inputFormat->setInt32("max-height", maxHeight); 1991 inputFormat->setInt32("adaptive-playback", true); 1992 } 1993 } 1994 // allow failure 1995 err = OK; 1996 } else { 1997 ALOGV("[%s] storeMetaDataInBuffers succeeded", 1998 mComponentName.c_str()); 1999 CHECK(storingMetadataInDecodedBuffers()); 2000 mLegacyAdaptiveExperiment = ADebug::isExperimentEnabled( 2001 "legacy-adaptive", !msg->contains("no-experiments")); 2002 2003 inputFormat->setInt32("adaptive-playback", true); 2004 } 2005 2006 int32_t push; 2007 if (msg->findInt32("push-blank-buffers-on-shutdown", &push) 2008 && push != 0) { 2009 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 2010 } 2011 } 2012 2013 int32_t rotationDegrees; 2014 if (msg->findInt32("rotation-degrees", &rotationDegrees)) { 2015 mRotationDegrees = rotationDegrees; 2016 } else { 2017 mRotationDegrees = 0; 2018 } 2019 } 2020 2021 AudioEncoding pcmEncoding = kAudioEncodingPcm16bit; 2022 (void)msg->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 2023 // invalid encodings will default to PCM-16bit in setupRawAudioFormat. 2024 2025 if (video) { 2026 // determine need for software renderer 2027 bool usingSwRenderer = false; 2028 if (haveNativeWindow && mComponentName.startsWith("OMX.google.")) { 2029 usingSwRenderer = true; 2030 haveNativeWindow = false; 2031 } 2032 2033 if (encoder) { 2034 err = setupVideoEncoder(mime, msg, outputFormat, inputFormat); 2035 } else { 2036 err = setupVideoDecoder(mime, msg, haveNativeWindow, usingSwRenderer, outputFormat); 2037 } 2038 2039 if (err != OK) { 2040 return err; 2041 } 2042 2043 if (haveNativeWindow) { 2044 mNativeWindow = static_cast<Surface *>(obj.get()); 2045 } 2046 2047 // initialize native window now to get actual output format 2048 // TODO: this is needed for some encoders even though they don't use native window 2049 err = initNativeWindow(); 2050 if (err != OK) { 2051 return err; 2052 } 2053 2054 // fallback for devices that do not handle flex-YUV for native buffers 2055 if (haveNativeWindow) { 2056 int32_t requestedColorFormat = OMX_COLOR_FormatUnused; 2057 if (msg->findInt32("color-format", &requestedColorFormat) && 2058 requestedColorFormat == OMX_COLOR_FormatYUV420Flexible) { 2059 status_t err = getPortFormat(kPortIndexOutput, outputFormat); 2060 if (err != OK) { 2061 return err; 2062 } 2063 int32_t colorFormat = OMX_COLOR_FormatUnused; 2064 OMX_U32 flexibleEquivalent = OMX_COLOR_FormatUnused; 2065 if (!outputFormat->findInt32("color-format", &colorFormat)) { 2066 ALOGE("ouptut port did not have a color format (wrong domain?)"); 2067 return BAD_VALUE; 2068 } 2069 ALOGD("[%s] Requested output format %#x and got %#x.", 2070 mComponentName.c_str(), requestedColorFormat, colorFormat); 2071 if (!isFlexibleColorFormat( 2072 mOMX, mNode, colorFormat, haveNativeWindow, &flexibleEquivalent) 2073 || flexibleEquivalent != (OMX_U32)requestedColorFormat) { 2074 // device did not handle flex-YUV request for native window, fall back 2075 // to SW renderer 2076 ALOGI("[%s] Falling back to software renderer", mComponentName.c_str()); 2077 mNativeWindow.clear(); 2078 mNativeWindowUsageBits = 0; 2079 haveNativeWindow = false; 2080 usingSwRenderer = true; 2081 if (storingMetadataInDecodedBuffers()) { 2082 err = mOMX->storeMetaDataInBuffers( 2083 mNode, kPortIndexOutput, OMX_FALSE, &mOutputMetadataType); 2084 mOutputMetadataType = kMetadataBufferTypeInvalid; // just in case 2085 // TODO: implement adaptive-playback support for bytebuffer mode. 2086 // This is done by SW codecs, but most HW codecs don't support it. 2087 inputFormat->setInt32("adaptive-playback", false); 2088 } 2089 if (err == OK) { 2090 err = mOMX->enableNativeBuffers( 2091 mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE); 2092 } 2093 if (mFlags & kFlagIsGrallocUsageProtected) { 2094 // fallback is not supported for protected playback 2095 err = PERMISSION_DENIED; 2096 } else if (err == OK) { 2097 err = setupVideoDecoder( 2098 mime, msg, haveNativeWindow, usingSwRenderer, outputFormat); 2099 } 2100 } 2101 } 2102 } 2103 2104 if (usingSwRenderer) { 2105 outputFormat->setInt32("using-sw-renderer", 1); 2106 } 2107 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) { 2108 int32_t numChannels, sampleRate; 2109 if (!msg->findInt32("channel-count", &numChannels) 2110 || !msg->findInt32("sample-rate", &sampleRate)) { 2111 // Since we did not always check for these, leave them optional 2112 // and have the decoder figure it all out. 2113 err = OK; 2114 } else { 2115 err = setupRawAudioFormat( 2116 encoder ? kPortIndexInput : kPortIndexOutput, 2117 sampleRate, 2118 numChannels); 2119 } 2120 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) { 2121 int32_t numChannels, sampleRate; 2122 if (!msg->findInt32("channel-count", &numChannels) 2123 || !msg->findInt32("sample-rate", &sampleRate)) { 2124 err = INVALID_OPERATION; 2125 } else { 2126 int32_t isADTS, aacProfile; 2127 int32_t sbrMode; 2128 int32_t maxOutputChannelCount; 2129 int32_t pcmLimiterEnable; 2130 drcParams_t drc; 2131 if (!msg->findInt32("is-adts", &isADTS)) { 2132 isADTS = 0; 2133 } 2134 if (!msg->findInt32("aac-profile", &aacProfile)) { 2135 aacProfile = OMX_AUDIO_AACObjectNull; 2136 } 2137 if (!msg->findInt32("aac-sbr-mode", &sbrMode)) { 2138 sbrMode = -1; 2139 } 2140 2141 if (!msg->findInt32("aac-max-output-channel_count", &maxOutputChannelCount)) { 2142 maxOutputChannelCount = -1; 2143 } 2144 if (!msg->findInt32("aac-pcm-limiter-enable", &pcmLimiterEnable)) { 2145 // value is unknown 2146 pcmLimiterEnable = -1; 2147 } 2148 if (!msg->findInt32("aac-encoded-target-level", &drc.encodedTargetLevel)) { 2149 // value is unknown 2150 drc.encodedTargetLevel = -1; 2151 } 2152 if (!msg->findInt32("aac-drc-cut-level", &drc.drcCut)) { 2153 // value is unknown 2154 drc.drcCut = -1; 2155 } 2156 if (!msg->findInt32("aac-drc-boost-level", &drc.drcBoost)) { 2157 // value is unknown 2158 drc.drcBoost = -1; 2159 } 2160 if (!msg->findInt32("aac-drc-heavy-compression", &drc.heavyCompression)) { 2161 // value is unknown 2162 drc.heavyCompression = -1; 2163 } 2164 if (!msg->findInt32("aac-target-ref-level", &drc.targetRefLevel)) { 2165 // value is unknown 2166 drc.targetRefLevel = -1; 2167 } 2168 2169 err = setupAACCodec( 2170 encoder, numChannels, sampleRate, bitRate, aacProfile, 2171 isADTS != 0, sbrMode, maxOutputChannelCount, drc, 2172 pcmLimiterEnable); 2173 } 2174 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)) { 2175 err = setupAMRCodec(encoder, false /* isWAMR */, bitRate); 2176 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_WB)) { 2177 err = setupAMRCodec(encoder, true /* isWAMR */, bitRate); 2178 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_ALAW) 2179 || !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_MLAW)) { 2180 // These are PCM-like formats with a fixed sample rate but 2181 // a variable number of channels. 2182 2183 int32_t numChannels; 2184 if (!msg->findInt32("channel-count", &numChannels)) { 2185 err = INVALID_OPERATION; 2186 } else { 2187 int32_t sampleRate; 2188 if (!msg->findInt32("sample-rate", &sampleRate)) { 2189 sampleRate = 8000; 2190 } 2191 err = setupG711Codec(encoder, sampleRate, numChannels); 2192 } 2193 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)) { 2194 int32_t numChannels = 0, sampleRate = 0, compressionLevel = -1; 2195 if (encoder && 2196 (!msg->findInt32("channel-count", &numChannels) 2197 || !msg->findInt32("sample-rate", &sampleRate))) { 2198 ALOGE("missing channel count or sample rate for FLAC encoder"); 2199 err = INVALID_OPERATION; 2200 } else { 2201 if (encoder) { 2202 if (!msg->findInt32( 2203 "complexity", &compressionLevel) && 2204 !msg->findInt32( 2205 "flac-compression-level", &compressionLevel)) { 2206 compressionLevel = 5; // default FLAC compression level 2207 } else if (compressionLevel < 0) { 2208 ALOGW("compression level %d outside [0..8] range, " 2209 "using 0", 2210 compressionLevel); 2211 compressionLevel = 0; 2212 } else if (compressionLevel > 8) { 2213 ALOGW("compression level %d outside [0..8] range, " 2214 "using 8", 2215 compressionLevel); 2216 compressionLevel = 8; 2217 } 2218 } 2219 err = setupFlacCodec( 2220 encoder, numChannels, sampleRate, compressionLevel); 2221 } 2222 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) { 2223 int32_t numChannels, sampleRate; 2224 if (encoder 2225 || !msg->findInt32("channel-count", &numChannels) 2226 || !msg->findInt32("sample-rate", &sampleRate)) { 2227 err = INVALID_OPERATION; 2228 } else { 2229 err = setupRawAudioFormat(kPortIndexInput, sampleRate, numChannels, pcmEncoding); 2230 } 2231 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AC3)) { 2232 int32_t numChannels; 2233 int32_t sampleRate; 2234 if (!msg->findInt32("channel-count", &numChannels) 2235 || !msg->findInt32("sample-rate", &sampleRate)) { 2236 err = INVALID_OPERATION; 2237 } else { 2238 err = setupAC3Codec(encoder, numChannels, sampleRate); 2239 } 2240 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_EAC3)) { 2241 int32_t numChannels; 2242 int32_t sampleRate; 2243 if (!msg->findInt32("channel-count", &numChannels) 2244 || !msg->findInt32("sample-rate", &sampleRate)) { 2245 err = INVALID_OPERATION; 2246 } else { 2247 err = setupEAC3Codec(encoder, numChannels, sampleRate); 2248 } 2249 } 2250 2251 if (err != OK) { 2252 return err; 2253 } 2254 2255 if (!msg->findInt32("encoder-delay", &mEncoderDelay)) { 2256 mEncoderDelay = 0; 2257 } 2258 2259 if (!msg->findInt32("encoder-padding", &mEncoderPadding)) { 2260 mEncoderPadding = 0; 2261 } 2262 2263 if (msg->findInt32("channel-mask", &mChannelMask)) { 2264 mChannelMaskPresent = true; 2265 } else { 2266 mChannelMaskPresent = false; 2267 } 2268 2269 int32_t maxInputSize; 2270 if (msg->findInt32("max-input-size", &maxInputSize)) { 2271 err = setMinBufferSize(kPortIndexInput, (size_t)maxInputSize); 2272 } else if (!strcmp("OMX.Nvidia.aac.decoder", mComponentName.c_str())) { 2273 err = setMinBufferSize(kPortIndexInput, 8192); // XXX 2274 } 2275 2276 int32_t priority; 2277 if (msg->findInt32("priority", &priority)) { 2278 err = setPriority(priority); 2279 } 2280 2281 int32_t rateInt = -1; 2282 float rateFloat = -1; 2283 if (!msg->findFloat("operating-rate", &rateFloat)) { 2284 msg->findInt32("operating-rate", &rateInt); 2285 rateFloat = (float)rateInt; // 16MHz (FLINTMAX) is OK for upper bound. 2286 } 2287 if (rateFloat > 0) { 2288 err = setOperatingRate(rateFloat, video); 2289 } 2290 2291 // NOTE: both mBaseOutputFormat and mOutputFormat are outputFormat to signal first frame. 2292 mBaseOutputFormat = outputFormat; 2293 // trigger a kWhatOutputFormatChanged msg on first buffer 2294 mLastOutputFormat.clear(); 2295 2296 err = getPortFormat(kPortIndexInput, inputFormat); 2297 if (err == OK) { 2298 err = getPortFormat(kPortIndexOutput, outputFormat); 2299 if (err == OK) { 2300 mInputFormat = inputFormat; 2301 mOutputFormat = outputFormat; 2302 } 2303 } 2304 2305 // create data converters if needed 2306 if (!video && err == OK) { 2307 AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit; 2308 if (encoder) { 2309 (void)mInputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding); 2310 mConverter[kPortIndexInput] = AudioConverter::Create(pcmEncoding, codecPcmEncoding); 2311 if (mConverter[kPortIndexInput] != NULL) { 2312 mInputFormat->setInt32("pcm-encoding", pcmEncoding); 2313 } 2314 } else { 2315 (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding); 2316 mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding); 2317 if (mConverter[kPortIndexOutput] != NULL) { 2318 mOutputFormat->setInt32("pcm-encoding", pcmEncoding); 2319 } 2320 } 2321 } 2322 2323 return err; 2324} 2325 2326status_t ACodec::setPriority(int32_t priority) { 2327 if (priority < 0) { 2328 return BAD_VALUE; 2329 } 2330 OMX_PARAM_U32TYPE config; 2331 InitOMXParams(&config); 2332 config.nU32 = (OMX_U32)priority; 2333 status_t temp = mOMX->setConfig( 2334 mNode, (OMX_INDEXTYPE)OMX_IndexConfigPriority, 2335 &config, sizeof(config)); 2336 if (temp != OK) { 2337 ALOGI("codec does not support config priority (err %d)", temp); 2338 } 2339 return OK; 2340} 2341 2342status_t ACodec::setOperatingRate(float rateFloat, bool isVideo) { 2343 if (rateFloat < 0) { 2344 return BAD_VALUE; 2345 } 2346 OMX_U32 rate; 2347 if (isVideo) { 2348 if (rateFloat > 65535) { 2349 return BAD_VALUE; 2350 } 2351 rate = (OMX_U32)(rateFloat * 65536.0f + 0.5f); 2352 } else { 2353 if (rateFloat > UINT_MAX) { 2354 return BAD_VALUE; 2355 } 2356 rate = (OMX_U32)(rateFloat); 2357 } 2358 OMX_PARAM_U32TYPE config; 2359 InitOMXParams(&config); 2360 config.nU32 = rate; 2361 status_t err = mOMX->setConfig( 2362 mNode, (OMX_INDEXTYPE)OMX_IndexConfigOperatingRate, 2363 &config, sizeof(config)); 2364 if (err != OK) { 2365 ALOGI("codec does not support config operating rate (err %d)", err); 2366 } 2367 return OK; 2368} 2369 2370status_t ACodec::getIntraRefreshPeriod(uint32_t *intraRefreshPeriod) { 2371 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 2372 InitOMXParams(¶ms); 2373 params.nPortIndex = kPortIndexOutput; 2374 status_t err = mOMX->getConfig( 2375 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params)); 2376 if (err == OK) { 2377 *intraRefreshPeriod = params.nRefreshPeriod; 2378 return OK; 2379 } 2380 2381 // Fallback to query through standard OMX index. 2382 OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams; 2383 InitOMXParams(&refreshParams); 2384 refreshParams.nPortIndex = kPortIndexOutput; 2385 refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic; 2386 err = mOMX->getParameter( 2387 mNode, OMX_IndexParamVideoIntraRefresh, &refreshParams, sizeof(refreshParams)); 2388 if (err != OK || refreshParams.nCirMBs == 0) { 2389 *intraRefreshPeriod = 0; 2390 return OK; 2391 } 2392 2393 // Calculate period based on width and height 2394 uint32_t width, height; 2395 OMX_PARAM_PORTDEFINITIONTYPE def; 2396 InitOMXParams(&def); 2397 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2398 def.nPortIndex = kPortIndexOutput; 2399 err = mOMX->getParameter( 2400 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2401 if (err != OK) { 2402 *intraRefreshPeriod = 0; 2403 return err; 2404 } 2405 width = video_def->nFrameWidth; 2406 height = video_def->nFrameHeight; 2407 // Use H.264/AVC MacroBlock size 16x16 2408 *intraRefreshPeriod = divUp((divUp(width, 16u) * divUp(height, 16u)), refreshParams.nCirMBs); 2409 2410 return OK; 2411} 2412 2413status_t ACodec::setIntraRefreshPeriod(uint32_t intraRefreshPeriod, bool inConfigure) { 2414 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 2415 InitOMXParams(¶ms); 2416 params.nPortIndex = kPortIndexOutput; 2417 params.nRefreshPeriod = intraRefreshPeriod; 2418 status_t err = mOMX->setConfig( 2419 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params)); 2420 if (err == OK) { 2421 return OK; 2422 } 2423 2424 // Only in configure state, a component could invoke setParameter. 2425 if (!inConfigure) { 2426 return INVALID_OPERATION; 2427 } else { 2428 ALOGI("[%s] try falling back to Cyclic", mComponentName.c_str()); 2429 } 2430 2431 OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams; 2432 InitOMXParams(&refreshParams); 2433 refreshParams.nPortIndex = kPortIndexOutput; 2434 refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic; 2435 2436 if (intraRefreshPeriod == 0) { 2437 // 0 means disable intra refresh. 2438 refreshParams.nCirMBs = 0; 2439 } else { 2440 // Calculate macroblocks that need to be intra coded base on width and height 2441 uint32_t width, height; 2442 OMX_PARAM_PORTDEFINITIONTYPE def; 2443 InitOMXParams(&def); 2444 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2445 def.nPortIndex = kPortIndexOutput; 2446 err = mOMX->getParameter( 2447 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2448 if (err != OK) { 2449 return err; 2450 } 2451 width = video_def->nFrameWidth; 2452 height = video_def->nFrameHeight; 2453 // Use H.264/AVC MacroBlock size 16x16 2454 refreshParams.nCirMBs = divUp((divUp(width, 16u) * divUp(height, 16u)), intraRefreshPeriod); 2455 } 2456 2457 err = mOMX->setParameter(mNode, OMX_IndexParamVideoIntraRefresh, 2458 &refreshParams, sizeof(refreshParams)); 2459 if (err != OK) { 2460 return err; 2461 } 2462 2463 return OK; 2464} 2465 2466status_t ACodec::configureTemporalLayers( 2467 const sp<AMessage> &msg, bool inConfigure, sp<AMessage> &outputFormat) { 2468 if (!mIsVideo || !mIsEncoder) { 2469 return INVALID_OPERATION; 2470 } 2471 2472 AString tsSchema; 2473 if (!msg->findString("ts-schema", &tsSchema)) { 2474 return OK; 2475 } 2476 2477 unsigned int numLayers = 0; 2478 unsigned int numBLayers = 0; 2479 int tags; 2480 char dummy; 2481 OMX_VIDEO_ANDROID_TEMPORALLAYERINGPATTERNTYPE pattern = 2482 OMX_VIDEO_AndroidTemporalLayeringPatternNone; 2483 if (sscanf(tsSchema.c_str(), "webrtc.vp8.%u-layer%c", &numLayers, &dummy) == 1 2484 && numLayers > 0) { 2485 pattern = OMX_VIDEO_AndroidTemporalLayeringPatternWebRTC; 2486 } else if ((tags = sscanf(tsSchema.c_str(), "android.generic.%u%c%u%c", 2487 &numLayers, &dummy, &numBLayers, &dummy)) 2488 && (tags == 1 || (tags == 3 && dummy == '+')) 2489 && numLayers > 0 && numLayers < UINT32_MAX - numBLayers) { 2490 numLayers += numBLayers; 2491 pattern = OMX_VIDEO_AndroidTemporalLayeringPatternAndroid; 2492 } else { 2493 ALOGI("Ignoring unsupported ts-schema [%s]", tsSchema.c_str()); 2494 return BAD_VALUE; 2495 } 2496 2497 OMX_VIDEO_PARAM_ANDROID_TEMPORALLAYERINGTYPE layerParams; 2498 InitOMXParams(&layerParams); 2499 layerParams.nPortIndex = kPortIndexOutput; 2500 2501 status_t err = mOMX->getParameter( 2502 mNode, (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering, 2503 &layerParams, sizeof(layerParams)); 2504 2505 if (err != OK) { 2506 return err; 2507 } else if (!(layerParams.eSupportedPatterns & pattern)) { 2508 return BAD_VALUE; 2509 } 2510 2511 numLayers = min(numLayers, layerParams.nLayerCountMax); 2512 numBLayers = min(numBLayers, layerParams.nBLayerCountMax); 2513 2514 if (!inConfigure) { 2515 OMX_VIDEO_CONFIG_ANDROID_TEMPORALLAYERINGTYPE layerConfig; 2516 InitOMXParams(&layerConfig); 2517 layerConfig.nPortIndex = kPortIndexOutput; 2518 layerConfig.ePattern = pattern; 2519 layerConfig.nPLayerCountActual = numLayers - numBLayers; 2520 layerConfig.nBLayerCountActual = numBLayers; 2521 layerConfig.bBitrateRatiosSpecified = OMX_FALSE; 2522 2523 err = mOMX->setConfig( 2524 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidVideoTemporalLayering, 2525 &layerConfig, sizeof(layerConfig)); 2526 } else { 2527 layerParams.ePattern = pattern; 2528 layerParams.nPLayerCountActual = numLayers - numBLayers; 2529 layerParams.nBLayerCountActual = numBLayers; 2530 layerParams.bBitrateRatiosSpecified = OMX_FALSE; 2531 2532 err = mOMX->setParameter( 2533 mNode, (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering, 2534 &layerParams, sizeof(layerParams)); 2535 } 2536 2537 AString configSchema; 2538 if (pattern == OMX_VIDEO_AndroidTemporalLayeringPatternAndroid) { 2539 configSchema = AStringPrintf("android.generic.%u+%u", numLayers - numBLayers, numBLayers); 2540 } else if (pattern == OMX_VIDEO_AndroidTemporalLayeringPatternWebRTC) { 2541 configSchema = AStringPrintf("webrtc.vp8.%u", numLayers); 2542 } 2543 2544 if (err != OK) { 2545 ALOGW("Failed to set temporal layers to %s (requested %s)", 2546 configSchema.c_str(), tsSchema.c_str()); 2547 return err; 2548 } 2549 2550 err = mOMX->getParameter( 2551 mNode, (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering, 2552 &layerParams, sizeof(layerParams)); 2553 2554 if (err == OK) { 2555 ALOGD("Temporal layers requested:%s configured:%s got:%s(%u: P=%u, B=%u)", 2556 tsSchema.c_str(), configSchema.c_str(), 2557 asString(layerParams.ePattern), layerParams.ePattern, 2558 layerParams.nPLayerCountActual, layerParams.nBLayerCountActual); 2559 2560 if (outputFormat.get() == mOutputFormat.get()) { 2561 mOutputFormat = mOutputFormat->dup(); // trigger an output format change event 2562 } 2563 // assume we got what we configured 2564 outputFormat->setString("ts-schema", configSchema); 2565 } 2566 return err; 2567} 2568 2569status_t ACodec::setMinBufferSize(OMX_U32 portIndex, size_t size) { 2570 OMX_PARAM_PORTDEFINITIONTYPE def; 2571 InitOMXParams(&def); 2572 def.nPortIndex = portIndex; 2573 2574 status_t err = mOMX->getParameter( 2575 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2576 2577 if (err != OK) { 2578 return err; 2579 } 2580 2581 if (def.nBufferSize >= size) { 2582 return OK; 2583 } 2584 2585 def.nBufferSize = size; 2586 2587 err = mOMX->setParameter( 2588 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2589 2590 if (err != OK) { 2591 return err; 2592 } 2593 2594 err = mOMX->getParameter( 2595 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2596 2597 if (err != OK) { 2598 return err; 2599 } 2600 2601 if (def.nBufferSize < size) { 2602 ALOGE("failed to set min buffer size to %zu (is still %u)", size, def.nBufferSize); 2603 return FAILED_TRANSACTION; 2604 } 2605 2606 return OK; 2607} 2608 2609status_t ACodec::selectAudioPortFormat( 2610 OMX_U32 portIndex, OMX_AUDIO_CODINGTYPE desiredFormat) { 2611 OMX_AUDIO_PARAM_PORTFORMATTYPE format; 2612 InitOMXParams(&format); 2613 2614 format.nPortIndex = portIndex; 2615 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 2616 format.nIndex = index; 2617 status_t err = mOMX->getParameter( 2618 mNode, OMX_IndexParamAudioPortFormat, 2619 &format, sizeof(format)); 2620 2621 if (err != OK) { 2622 return err; 2623 } 2624 2625 if (format.eEncoding == desiredFormat) { 2626 break; 2627 } 2628 2629 if (index == kMaxIndicesToCheck) { 2630 ALOGW("[%s] stopping checking formats after %u: %s(%x)", 2631 mComponentName.c_str(), index, 2632 asString(format.eEncoding), format.eEncoding); 2633 return ERROR_UNSUPPORTED; 2634 } 2635 } 2636 2637 return mOMX->setParameter( 2638 mNode, OMX_IndexParamAudioPortFormat, &format, sizeof(format)); 2639} 2640 2641status_t ACodec::setupAACCodec( 2642 bool encoder, int32_t numChannels, int32_t sampleRate, 2643 int32_t bitRate, int32_t aacProfile, bool isADTS, int32_t sbrMode, 2644 int32_t maxOutputChannelCount, const drcParams_t& drc, 2645 int32_t pcmLimiterEnable) { 2646 if (encoder && isADTS) { 2647 return -EINVAL; 2648 } 2649 2650 status_t err = setupRawAudioFormat( 2651 encoder ? kPortIndexInput : kPortIndexOutput, 2652 sampleRate, 2653 numChannels); 2654 2655 if (err != OK) { 2656 return err; 2657 } 2658 2659 if (encoder) { 2660 err = selectAudioPortFormat(kPortIndexOutput, OMX_AUDIO_CodingAAC); 2661 2662 if (err != OK) { 2663 return err; 2664 } 2665 2666 OMX_PARAM_PORTDEFINITIONTYPE def; 2667 InitOMXParams(&def); 2668 def.nPortIndex = kPortIndexOutput; 2669 2670 err = mOMX->getParameter( 2671 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2672 2673 if (err != OK) { 2674 return err; 2675 } 2676 2677 def.format.audio.bFlagErrorConcealment = OMX_TRUE; 2678 def.format.audio.eEncoding = OMX_AUDIO_CodingAAC; 2679 2680 err = mOMX->setParameter( 2681 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2682 2683 if (err != OK) { 2684 return err; 2685 } 2686 2687 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 2688 InitOMXParams(&profile); 2689 profile.nPortIndex = kPortIndexOutput; 2690 2691 err = mOMX->getParameter( 2692 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2693 2694 if (err != OK) { 2695 return err; 2696 } 2697 2698 profile.nChannels = numChannels; 2699 2700 profile.eChannelMode = 2701 (numChannels == 1) 2702 ? OMX_AUDIO_ChannelModeMono: OMX_AUDIO_ChannelModeStereo; 2703 2704 profile.nSampleRate = sampleRate; 2705 profile.nBitRate = bitRate; 2706 profile.nAudioBandWidth = 0; 2707 profile.nFrameLength = 0; 2708 profile.nAACtools = OMX_AUDIO_AACToolAll; 2709 profile.nAACERtools = OMX_AUDIO_AACERNone; 2710 profile.eAACProfile = (OMX_AUDIO_AACPROFILETYPE) aacProfile; 2711 profile.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4FF; 2712 switch (sbrMode) { 2713 case 0: 2714 // disable sbr 2715 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 2716 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 2717 break; 2718 case 1: 2719 // enable single-rate sbr 2720 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 2721 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 2722 break; 2723 case 2: 2724 // enable dual-rate sbr 2725 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 2726 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 2727 break; 2728 case -1: 2729 // enable both modes -> the codec will decide which mode should be used 2730 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 2731 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 2732 break; 2733 default: 2734 // unsupported sbr mode 2735 return BAD_VALUE; 2736 } 2737 2738 2739 err = mOMX->setParameter( 2740 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2741 2742 if (err != OK) { 2743 return err; 2744 } 2745 2746 return err; 2747 } 2748 2749 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 2750 InitOMXParams(&profile); 2751 profile.nPortIndex = kPortIndexInput; 2752 2753 err = mOMX->getParameter( 2754 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2755 2756 if (err != OK) { 2757 return err; 2758 } 2759 2760 profile.nChannels = numChannels; 2761 profile.nSampleRate = sampleRate; 2762 2763 profile.eAACStreamFormat = 2764 isADTS 2765 ? OMX_AUDIO_AACStreamFormatMP4ADTS 2766 : OMX_AUDIO_AACStreamFormatMP4FF; 2767 2768 OMX_AUDIO_PARAM_ANDROID_AACPRESENTATIONTYPE presentation; 2769 InitOMXParams(&presentation); 2770 presentation.nMaxOutputChannels = maxOutputChannelCount; 2771 presentation.nDrcCut = drc.drcCut; 2772 presentation.nDrcBoost = drc.drcBoost; 2773 presentation.nHeavyCompression = drc.heavyCompression; 2774 presentation.nTargetReferenceLevel = drc.targetRefLevel; 2775 presentation.nEncodedTargetLevel = drc.encodedTargetLevel; 2776 presentation.nPCMLimiterEnable = pcmLimiterEnable; 2777 2778 status_t res = mOMX->setParameter(mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2779 if (res == OK) { 2780 // optional parameters, will not cause configuration failure 2781 mOMX->setParameter(mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAacPresentation, 2782 &presentation, sizeof(presentation)); 2783 } else { 2784 ALOGW("did not set AudioAndroidAacPresentation due to error %d when setting AudioAac", res); 2785 } 2786 mSampleRate = sampleRate; 2787 return res; 2788} 2789 2790status_t ACodec::setupAC3Codec( 2791 bool encoder, int32_t numChannels, int32_t sampleRate) { 2792 status_t err = setupRawAudioFormat( 2793 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 2794 2795 if (err != OK) { 2796 return err; 2797 } 2798 2799 if (encoder) { 2800 ALOGW("AC3 encoding is not supported."); 2801 return INVALID_OPERATION; 2802 } 2803 2804 OMX_AUDIO_PARAM_ANDROID_AC3TYPE def; 2805 InitOMXParams(&def); 2806 def.nPortIndex = kPortIndexInput; 2807 2808 err = mOMX->getParameter( 2809 mNode, 2810 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 2811 &def, 2812 sizeof(def)); 2813 2814 if (err != OK) { 2815 return err; 2816 } 2817 2818 def.nChannels = numChannels; 2819 def.nSampleRate = sampleRate; 2820 2821 return mOMX->setParameter( 2822 mNode, 2823 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 2824 &def, 2825 sizeof(def)); 2826} 2827 2828status_t ACodec::setupEAC3Codec( 2829 bool encoder, int32_t numChannels, int32_t sampleRate) { 2830 status_t err = setupRawAudioFormat( 2831 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 2832 2833 if (err != OK) { 2834 return err; 2835 } 2836 2837 if (encoder) { 2838 ALOGW("EAC3 encoding is not supported."); 2839 return INVALID_OPERATION; 2840 } 2841 2842 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE def; 2843 InitOMXParams(&def); 2844 def.nPortIndex = kPortIndexInput; 2845 2846 err = mOMX->getParameter( 2847 mNode, 2848 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 2849 &def, 2850 sizeof(def)); 2851 2852 if (err != OK) { 2853 return err; 2854 } 2855 2856 def.nChannels = numChannels; 2857 def.nSampleRate = sampleRate; 2858 2859 return mOMX->setParameter( 2860 mNode, 2861 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 2862 &def, 2863 sizeof(def)); 2864} 2865 2866static OMX_AUDIO_AMRBANDMODETYPE pickModeFromBitRate( 2867 bool isAMRWB, int32_t bps) { 2868 if (isAMRWB) { 2869 if (bps <= 6600) { 2870 return OMX_AUDIO_AMRBandModeWB0; 2871 } else if (bps <= 8850) { 2872 return OMX_AUDIO_AMRBandModeWB1; 2873 } else if (bps <= 12650) { 2874 return OMX_AUDIO_AMRBandModeWB2; 2875 } else if (bps <= 14250) { 2876 return OMX_AUDIO_AMRBandModeWB3; 2877 } else if (bps <= 15850) { 2878 return OMX_AUDIO_AMRBandModeWB4; 2879 } else if (bps <= 18250) { 2880 return OMX_AUDIO_AMRBandModeWB5; 2881 } else if (bps <= 19850) { 2882 return OMX_AUDIO_AMRBandModeWB6; 2883 } else if (bps <= 23050) { 2884 return OMX_AUDIO_AMRBandModeWB7; 2885 } 2886 2887 // 23850 bps 2888 return OMX_AUDIO_AMRBandModeWB8; 2889 } else { // AMRNB 2890 if (bps <= 4750) { 2891 return OMX_AUDIO_AMRBandModeNB0; 2892 } else if (bps <= 5150) { 2893 return OMX_AUDIO_AMRBandModeNB1; 2894 } else if (bps <= 5900) { 2895 return OMX_AUDIO_AMRBandModeNB2; 2896 } else if (bps <= 6700) { 2897 return OMX_AUDIO_AMRBandModeNB3; 2898 } else if (bps <= 7400) { 2899 return OMX_AUDIO_AMRBandModeNB4; 2900 } else if (bps <= 7950) { 2901 return OMX_AUDIO_AMRBandModeNB5; 2902 } else if (bps <= 10200) { 2903 return OMX_AUDIO_AMRBandModeNB6; 2904 } 2905 2906 // 12200 bps 2907 return OMX_AUDIO_AMRBandModeNB7; 2908 } 2909} 2910 2911status_t ACodec::setupAMRCodec(bool encoder, bool isWAMR, int32_t bitrate) { 2912 OMX_AUDIO_PARAM_AMRTYPE def; 2913 InitOMXParams(&def); 2914 def.nPortIndex = encoder ? kPortIndexOutput : kPortIndexInput; 2915 2916 status_t err = 2917 mOMX->getParameter(mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); 2918 2919 if (err != OK) { 2920 return err; 2921 } 2922 2923 def.eAMRFrameFormat = OMX_AUDIO_AMRFrameFormatFSF; 2924 def.eAMRBandMode = pickModeFromBitRate(isWAMR, bitrate); 2925 2926 err = mOMX->setParameter( 2927 mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); 2928 2929 if (err != OK) { 2930 return err; 2931 } 2932 2933 return setupRawAudioFormat( 2934 encoder ? kPortIndexInput : kPortIndexOutput, 2935 isWAMR ? 16000 : 8000 /* sampleRate */, 2936 1 /* numChannels */); 2937} 2938 2939status_t ACodec::setupG711Codec(bool encoder, int32_t sampleRate, int32_t numChannels) { 2940 if (encoder) { 2941 return INVALID_OPERATION; 2942 } 2943 2944 return setupRawAudioFormat( 2945 kPortIndexInput, sampleRate, numChannels); 2946} 2947 2948status_t ACodec::setupFlacCodec( 2949 bool encoder, int32_t numChannels, int32_t sampleRate, int32_t compressionLevel) { 2950 2951 if (encoder) { 2952 OMX_AUDIO_PARAM_FLACTYPE def; 2953 InitOMXParams(&def); 2954 def.nPortIndex = kPortIndexOutput; 2955 2956 // configure compression level 2957 status_t err = mOMX->getParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def)); 2958 if (err != OK) { 2959 ALOGE("setupFlacCodec(): Error %d getting OMX_IndexParamAudioFlac parameter", err); 2960 return err; 2961 } 2962 def.nCompressionLevel = compressionLevel; 2963 err = mOMX->setParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def)); 2964 if (err != OK) { 2965 ALOGE("setupFlacCodec(): Error %d setting OMX_IndexParamAudioFlac parameter", err); 2966 return err; 2967 } 2968 } 2969 2970 return setupRawAudioFormat( 2971 encoder ? kPortIndexInput : kPortIndexOutput, 2972 sampleRate, 2973 numChannels); 2974} 2975 2976status_t ACodec::setupRawAudioFormat( 2977 OMX_U32 portIndex, int32_t sampleRate, int32_t numChannels, AudioEncoding encoding) { 2978 OMX_PARAM_PORTDEFINITIONTYPE def; 2979 InitOMXParams(&def); 2980 def.nPortIndex = portIndex; 2981 2982 status_t err = mOMX->getParameter( 2983 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2984 2985 if (err != OK) { 2986 return err; 2987 } 2988 2989 def.format.audio.eEncoding = OMX_AUDIO_CodingPCM; 2990 2991 err = mOMX->setParameter( 2992 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2993 2994 if (err != OK) { 2995 return err; 2996 } 2997 2998 OMX_AUDIO_PARAM_PCMMODETYPE pcmParams; 2999 InitOMXParams(&pcmParams); 3000 pcmParams.nPortIndex = portIndex; 3001 3002 err = mOMX->getParameter( 3003 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 3004 3005 if (err != OK) { 3006 return err; 3007 } 3008 3009 pcmParams.nChannels = numChannels; 3010 switch (encoding) { 3011 case kAudioEncodingPcm8bit: 3012 pcmParams.eNumData = OMX_NumericalDataUnsigned; 3013 pcmParams.nBitPerSample = 8; 3014 break; 3015 case kAudioEncodingPcmFloat: 3016 pcmParams.eNumData = OMX_NumericalDataFloat; 3017 pcmParams.nBitPerSample = 32; 3018 break; 3019 case kAudioEncodingPcm16bit: 3020 pcmParams.eNumData = OMX_NumericalDataSigned; 3021 pcmParams.nBitPerSample = 16; 3022 break; 3023 default: 3024 return BAD_VALUE; 3025 } 3026 pcmParams.bInterleaved = OMX_TRUE; 3027 pcmParams.nSamplingRate = sampleRate; 3028 pcmParams.ePCMMode = OMX_AUDIO_PCMModeLinear; 3029 3030 if (getOMXChannelMapping(numChannels, pcmParams.eChannelMapping) != OK) { 3031 return OMX_ErrorNone; 3032 } 3033 3034 err = mOMX->setParameter( 3035 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 3036 // if we could not set up raw format to non-16-bit, try with 16-bit 3037 // NOTE: we will also verify this via readback, in case codec ignores these fields 3038 if (err != OK && encoding != kAudioEncodingPcm16bit) { 3039 pcmParams.eNumData = OMX_NumericalDataSigned; 3040 pcmParams.nBitPerSample = 16; 3041 err = mOMX->setParameter( 3042 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 3043 } 3044 return err; 3045} 3046 3047status_t ACodec::configureTunneledVideoPlayback( 3048 int32_t audioHwSync, const sp<ANativeWindow> &nativeWindow) { 3049 native_handle_t* sidebandHandle; 3050 3051 status_t err = mOMX->configureVideoTunnelMode( 3052 mNode, kPortIndexOutput, OMX_TRUE, audioHwSync, &sidebandHandle); 3053 if (err != OK) { 3054 ALOGE("configureVideoTunnelMode failed! (err %d).", err); 3055 return err; 3056 } 3057 3058 err = native_window_set_sideband_stream(nativeWindow.get(), sidebandHandle); 3059 if (err != OK) { 3060 ALOGE("native_window_set_sideband_stream(%p) failed! (err %d).", 3061 sidebandHandle, err); 3062 return err; 3063 } 3064 3065 return OK; 3066} 3067 3068status_t ACodec::setVideoPortFormatType( 3069 OMX_U32 portIndex, 3070 OMX_VIDEO_CODINGTYPE compressionFormat, 3071 OMX_COLOR_FORMATTYPE colorFormat, 3072 bool usingNativeBuffers) { 3073 OMX_VIDEO_PARAM_PORTFORMATTYPE format; 3074 InitOMXParams(&format); 3075 format.nPortIndex = portIndex; 3076 format.nIndex = 0; 3077 bool found = false; 3078 3079 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 3080 format.nIndex = index; 3081 status_t err = mOMX->getParameter( 3082 mNode, OMX_IndexParamVideoPortFormat, 3083 &format, sizeof(format)); 3084 3085 if (err != OK) { 3086 return err; 3087 } 3088 3089 // substitute back flexible color format to codec supported format 3090 OMX_U32 flexibleEquivalent; 3091 if (compressionFormat == OMX_VIDEO_CodingUnused 3092 && isFlexibleColorFormat( 3093 mOMX, mNode, format.eColorFormat, usingNativeBuffers, &flexibleEquivalent) 3094 && colorFormat == flexibleEquivalent) { 3095 ALOGI("[%s] using color format %#x in place of %#x", 3096 mComponentName.c_str(), format.eColorFormat, colorFormat); 3097 colorFormat = format.eColorFormat; 3098 } 3099 3100 // The following assertion is violated by TI's video decoder. 3101 // CHECK_EQ(format.nIndex, index); 3102 3103 if (!strcmp("OMX.TI.Video.encoder", mComponentName.c_str())) { 3104 if (portIndex == kPortIndexInput 3105 && colorFormat == format.eColorFormat) { 3106 // eCompressionFormat does not seem right. 3107 found = true; 3108 break; 3109 } 3110 if (portIndex == kPortIndexOutput 3111 && compressionFormat == format.eCompressionFormat) { 3112 // eColorFormat does not seem right. 3113 found = true; 3114 break; 3115 } 3116 } 3117 3118 if (format.eCompressionFormat == compressionFormat 3119 && format.eColorFormat == colorFormat) { 3120 found = true; 3121 break; 3122 } 3123 3124 if (index == kMaxIndicesToCheck) { 3125 ALOGW("[%s] stopping checking formats after %u: %s(%x)/%s(%x)", 3126 mComponentName.c_str(), index, 3127 asString(format.eCompressionFormat), format.eCompressionFormat, 3128 asString(format.eColorFormat), format.eColorFormat); 3129 } 3130 } 3131 3132 if (!found) { 3133 return UNKNOWN_ERROR; 3134 } 3135 3136 status_t err = mOMX->setParameter( 3137 mNode, OMX_IndexParamVideoPortFormat, 3138 &format, sizeof(format)); 3139 3140 return err; 3141} 3142 3143// Set optimal output format. OMX component lists output formats in the order 3144// of preference, but this got more complicated since the introduction of flexible 3145// YUV formats. We support a legacy behavior for applications that do not use 3146// surface output, do not specify an output format, but expect a "usable" standard 3147// OMX format. SW readable and standard formats must be flex-YUV. 3148// 3149// Suggested preference order: 3150// - optimal format for texture rendering (mediaplayer behavior) 3151// - optimal SW readable & texture renderable format (flex-YUV support) 3152// - optimal SW readable non-renderable format (flex-YUV bytebuffer support) 3153// - legacy "usable" standard formats 3154// 3155// For legacy support, we prefer a standard format, but will settle for a SW readable 3156// flex-YUV format. 3157status_t ACodec::setSupportedOutputFormat(bool getLegacyFlexibleFormat) { 3158 OMX_VIDEO_PARAM_PORTFORMATTYPE format, legacyFormat; 3159 InitOMXParams(&format); 3160 format.nPortIndex = kPortIndexOutput; 3161 3162 InitOMXParams(&legacyFormat); 3163 // this field will change when we find a suitable legacy format 3164 legacyFormat.eColorFormat = OMX_COLOR_FormatUnused; 3165 3166 for (OMX_U32 index = 0; ; ++index) { 3167 format.nIndex = index; 3168 status_t err = mOMX->getParameter( 3169 mNode, OMX_IndexParamVideoPortFormat, 3170 &format, sizeof(format)); 3171 if (err != OK) { 3172 // no more formats, pick legacy format if found 3173 if (legacyFormat.eColorFormat != OMX_COLOR_FormatUnused) { 3174 memcpy(&format, &legacyFormat, sizeof(format)); 3175 break; 3176 } 3177 return err; 3178 } 3179 if (format.eCompressionFormat != OMX_VIDEO_CodingUnused) { 3180 return OMX_ErrorBadParameter; 3181 } 3182 if (!getLegacyFlexibleFormat) { 3183 break; 3184 } 3185 // standard formats that were exposed to users before 3186 if (format.eColorFormat == OMX_COLOR_FormatYUV420Planar 3187 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedPlanar 3188 || format.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar 3189 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedSemiPlanar 3190 || format.eColorFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar) { 3191 break; 3192 } 3193 // find best legacy non-standard format 3194 OMX_U32 flexibleEquivalent; 3195 if (legacyFormat.eColorFormat == OMX_COLOR_FormatUnused 3196 && isFlexibleColorFormat( 3197 mOMX, mNode, format.eColorFormat, false /* usingNativeBuffers */, 3198 &flexibleEquivalent) 3199 && flexibleEquivalent == OMX_COLOR_FormatYUV420Flexible) { 3200 memcpy(&legacyFormat, &format, sizeof(format)); 3201 } 3202 } 3203 return mOMX->setParameter( 3204 mNode, OMX_IndexParamVideoPortFormat, 3205 &format, sizeof(format)); 3206} 3207 3208static const struct VideoCodingMapEntry { 3209 const char *mMime; 3210 OMX_VIDEO_CODINGTYPE mVideoCodingType; 3211} kVideoCodingMapEntry[] = { 3212 { MEDIA_MIMETYPE_VIDEO_AVC, OMX_VIDEO_CodingAVC }, 3213 { MEDIA_MIMETYPE_VIDEO_HEVC, OMX_VIDEO_CodingHEVC }, 3214 { MEDIA_MIMETYPE_VIDEO_MPEG4, OMX_VIDEO_CodingMPEG4 }, 3215 { MEDIA_MIMETYPE_VIDEO_H263, OMX_VIDEO_CodingH263 }, 3216 { MEDIA_MIMETYPE_VIDEO_MPEG2, OMX_VIDEO_CodingMPEG2 }, 3217 { MEDIA_MIMETYPE_VIDEO_VP8, OMX_VIDEO_CodingVP8 }, 3218 { MEDIA_MIMETYPE_VIDEO_VP9, OMX_VIDEO_CodingVP9 }, 3219 { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, OMX_VIDEO_CodingDolbyVision }, 3220}; 3221 3222static status_t GetVideoCodingTypeFromMime( 3223 const char *mime, OMX_VIDEO_CODINGTYPE *codingType) { 3224 for (size_t i = 0; 3225 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 3226 ++i) { 3227 if (!strcasecmp(mime, kVideoCodingMapEntry[i].mMime)) { 3228 *codingType = kVideoCodingMapEntry[i].mVideoCodingType; 3229 return OK; 3230 } 3231 } 3232 3233 *codingType = OMX_VIDEO_CodingUnused; 3234 3235 return ERROR_UNSUPPORTED; 3236} 3237 3238static status_t GetMimeTypeForVideoCoding( 3239 OMX_VIDEO_CODINGTYPE codingType, AString *mime) { 3240 for (size_t i = 0; 3241 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 3242 ++i) { 3243 if (codingType == kVideoCodingMapEntry[i].mVideoCodingType) { 3244 *mime = kVideoCodingMapEntry[i].mMime; 3245 return OK; 3246 } 3247 } 3248 3249 mime->clear(); 3250 3251 return ERROR_UNSUPPORTED; 3252} 3253 3254status_t ACodec::setPortBufferNum(OMX_U32 portIndex, int bufferNum) { 3255 OMX_PARAM_PORTDEFINITIONTYPE def; 3256 InitOMXParams(&def); 3257 def.nPortIndex = portIndex; 3258 status_t err; 3259 ALOGD("Setting [%s] %s port buffer number: %d", mComponentName.c_str(), 3260 portIndex == kPortIndexInput ? "input" : "output", bufferNum); 3261 err = mOMX->getParameter( 3262 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3263 if (err != OK) { 3264 return err; 3265 } 3266 def.nBufferCountActual = bufferNum; 3267 err = mOMX->setParameter( 3268 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3269 if (err != OK) { 3270 // Component could reject this request. 3271 ALOGW("Fail to set [%s] %s port buffer number: %d", mComponentName.c_str(), 3272 portIndex == kPortIndexInput ? "input" : "output", bufferNum); 3273 } 3274 return OK; 3275} 3276 3277status_t ACodec::setupVideoDecoder( 3278 const char *mime, const sp<AMessage> &msg, bool haveNativeWindow, 3279 bool usingSwRenderer, sp<AMessage> &outputFormat) { 3280 int32_t width, height; 3281 if (!msg->findInt32("width", &width) 3282 || !msg->findInt32("height", &height)) { 3283 return INVALID_OPERATION; 3284 } 3285 3286 OMX_VIDEO_CODINGTYPE compressionFormat; 3287 status_t err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 3288 3289 if (err != OK) { 3290 return err; 3291 } 3292 3293 if (compressionFormat == OMX_VIDEO_CodingVP9) { 3294 OMX_VIDEO_PARAM_PROFILELEVELTYPE params; 3295 InitOMXParams(¶ms); 3296 params.nPortIndex = kPortIndexInput; 3297 // Check if VP9 decoder advertises supported profiles. 3298 params.nProfileIndex = 0; 3299 status_t err = mOMX->getParameter( 3300 mNode, 3301 OMX_IndexParamVideoProfileLevelQuerySupported, 3302 ¶ms, 3303 sizeof(params)); 3304 mIsLegacyVP9Decoder = err != OK; 3305 } 3306 3307 err = setVideoPortFormatType( 3308 kPortIndexInput, compressionFormat, OMX_COLOR_FormatUnused); 3309 3310 if (err != OK) { 3311 return err; 3312 } 3313 3314 int32_t tmp; 3315 if (msg->findInt32("color-format", &tmp)) { 3316 OMX_COLOR_FORMATTYPE colorFormat = 3317 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 3318 err = setVideoPortFormatType( 3319 kPortIndexOutput, OMX_VIDEO_CodingUnused, colorFormat, haveNativeWindow); 3320 if (err != OK) { 3321 ALOGW("[%s] does not support color format %d", 3322 mComponentName.c_str(), colorFormat); 3323 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 3324 } 3325 } else { 3326 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 3327 } 3328 3329 if (err != OK) { 3330 return err; 3331 } 3332 3333 // Set the component input buffer number to be |tmp|. If succeed, 3334 // component will set input port buffer number to be |tmp|. If fail, 3335 // component will keep the same buffer number as before. 3336 if (msg->findInt32("android._num-input-buffers", &tmp)) { 3337 err = setPortBufferNum(kPortIndexInput, tmp); 3338 if (err != OK) 3339 return err; 3340 } 3341 3342 // Set the component output buffer number to be |tmp|. If succeed, 3343 // component will set output port buffer number to be |tmp|. If fail, 3344 // component will keep the same buffer number as before. 3345 if (msg->findInt32("android._num-output-buffers", &tmp)) { 3346 err = setPortBufferNum(kPortIndexOutput, tmp); 3347 if (err != OK) 3348 return err; 3349 } 3350 3351 int32_t frameRateInt; 3352 float frameRateFloat; 3353 if (!msg->findFloat("frame-rate", &frameRateFloat)) { 3354 if (!msg->findInt32("frame-rate", &frameRateInt)) { 3355 frameRateInt = -1; 3356 } 3357 frameRateFloat = (float)frameRateInt; 3358 } 3359 3360 err = setVideoFormatOnPort( 3361 kPortIndexInput, width, height, compressionFormat, frameRateFloat); 3362 3363 if (err != OK) { 3364 return err; 3365 } 3366 3367 err = setVideoFormatOnPort( 3368 kPortIndexOutput, width, height, OMX_VIDEO_CodingUnused); 3369 3370 if (err != OK) { 3371 return err; 3372 } 3373 3374 err = setColorAspectsForVideoDecoder( 3375 width, height, haveNativeWindow | usingSwRenderer, msg, outputFormat); 3376 if (err == ERROR_UNSUPPORTED) { // support is optional 3377 err = OK; 3378 } 3379 3380 if (err != OK) { 3381 return err; 3382 } 3383 3384 err = setHDRStaticInfoForVideoCodec(kPortIndexOutput, msg, outputFormat); 3385 if (err == ERROR_UNSUPPORTED) { // support is optional 3386 err = OK; 3387 } 3388 return err; 3389} 3390 3391status_t ACodec::initDescribeColorAspectsIndex() { 3392 status_t err = mOMX->getExtensionIndex( 3393 mNode, "OMX.google.android.index.describeColorAspects", &mDescribeColorAspectsIndex); 3394 if (err != OK) { 3395 mDescribeColorAspectsIndex = (OMX_INDEXTYPE)0; 3396 } 3397 return err; 3398} 3399 3400status_t ACodec::setCodecColorAspects(DescribeColorAspectsParams ¶ms, bool verify) { 3401 status_t err = ERROR_UNSUPPORTED; 3402 if (mDescribeColorAspectsIndex) { 3403 err = mOMX->setConfig(mNode, mDescribeColorAspectsIndex, ¶ms, sizeof(params)); 3404 } 3405 ALOGV("[%s] setting color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)", 3406 mComponentName.c_str(), 3407 params.sAspects.mRange, asString(params.sAspects.mRange), 3408 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3409 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3410 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3411 err, asString(err)); 3412 3413 if (verify && err == OK) { 3414 err = getCodecColorAspects(params); 3415 } 3416 3417 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex, 3418 "[%s] setting color aspects failed even though codec advertises support", 3419 mComponentName.c_str()); 3420 return err; 3421} 3422 3423status_t ACodec::setColorAspectsForVideoDecoder( 3424 int32_t width, int32_t height, bool usingNativeWindow, 3425 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) { 3426 DescribeColorAspectsParams params; 3427 InitOMXParams(¶ms); 3428 params.nPortIndex = kPortIndexOutput; 3429 3430 getColorAspectsFromFormat(configFormat, params.sAspects); 3431 if (usingNativeWindow) { 3432 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3433 // The default aspects will be set back to the output format during the 3434 // getFormat phase of configure(). Set non-Unspecified values back into the 3435 // format, in case component does not support this enumeration. 3436 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3437 } 3438 3439 (void)initDescribeColorAspectsIndex(); 3440 3441 // communicate color aspects to codec 3442 return setCodecColorAspects(params); 3443} 3444 3445status_t ACodec::getCodecColorAspects(DescribeColorAspectsParams ¶ms) { 3446 status_t err = ERROR_UNSUPPORTED; 3447 if (mDescribeColorAspectsIndex) { 3448 err = mOMX->getConfig(mNode, mDescribeColorAspectsIndex, ¶ms, sizeof(params)); 3449 } 3450 ALOGV("[%s] got color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)", 3451 mComponentName.c_str(), 3452 params.sAspects.mRange, asString(params.sAspects.mRange), 3453 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3454 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3455 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3456 err, asString(err)); 3457 if (params.bRequestingDataSpace) { 3458 ALOGV("for dataspace %#x", params.nDataSpace); 3459 } 3460 if (err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex 3461 && !params.bRequestingDataSpace && !params.bDataSpaceChanged) { 3462 ALOGW("[%s] getting color aspects failed even though codec advertises support", 3463 mComponentName.c_str()); 3464 } 3465 return err; 3466} 3467 3468status_t ACodec::getInputColorAspectsForVideoEncoder(sp<AMessage> &format) { 3469 DescribeColorAspectsParams params; 3470 InitOMXParams(¶ms); 3471 params.nPortIndex = kPortIndexInput; 3472 status_t err = getCodecColorAspects(params); 3473 if (err == OK) { 3474 // we only set encoder input aspects if codec supports them 3475 setColorAspectsIntoFormat(params.sAspects, format, true /* force */); 3476 } 3477 return err; 3478} 3479 3480status_t ACodec::getDataSpace( 3481 DescribeColorAspectsParams ¶ms, android_dataspace *dataSpace /* nonnull */, 3482 bool tryCodec) { 3483 status_t err = OK; 3484 if (tryCodec) { 3485 // request dataspace guidance from codec. 3486 params.bRequestingDataSpace = OMX_TRUE; 3487 err = getCodecColorAspects(params); 3488 params.bRequestingDataSpace = OMX_FALSE; 3489 if (err == OK && params.nDataSpace != HAL_DATASPACE_UNKNOWN) { 3490 *dataSpace = (android_dataspace)params.nDataSpace; 3491 return err; 3492 } else if (err == ERROR_UNSUPPORTED) { 3493 // ignore not-implemented error for dataspace requests 3494 err = OK; 3495 } 3496 } 3497 3498 // this returns legacy versions if available 3499 *dataSpace = getDataSpaceForColorAspects(params.sAspects, true /* mayexpand */); 3500 ALOGV("[%s] using color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) " 3501 "and dataspace %#x", 3502 mComponentName.c_str(), 3503 params.sAspects.mRange, asString(params.sAspects.mRange), 3504 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3505 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3506 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3507 *dataSpace); 3508 return err; 3509} 3510 3511 3512status_t ACodec::getColorAspectsAndDataSpaceForVideoDecoder( 3513 int32_t width, int32_t height, const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, 3514 android_dataspace *dataSpace) { 3515 DescribeColorAspectsParams params; 3516 InitOMXParams(¶ms); 3517 params.nPortIndex = kPortIndexOutput; 3518 3519 // reset default format and get resulting format 3520 getColorAspectsFromFormat(configFormat, params.sAspects); 3521 if (dataSpace != NULL) { 3522 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3523 } 3524 status_t err = setCodecColorAspects(params, true /* readBack */); 3525 3526 // we always set specified aspects for decoders 3527 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3528 3529 if (dataSpace != NULL) { 3530 status_t res = getDataSpace(params, dataSpace, err == OK /* tryCodec */); 3531 if (err == OK) { 3532 err = res; 3533 } 3534 } 3535 3536 return err; 3537} 3538 3539// initial video encoder setup for bytebuffer mode 3540status_t ACodec::setColorAspectsForVideoEncoder( 3541 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) { 3542 // copy config to output format as this is not exposed via getFormat 3543 copyColorConfig(configFormat, outputFormat); 3544 3545 DescribeColorAspectsParams params; 3546 InitOMXParams(¶ms); 3547 params.nPortIndex = kPortIndexInput; 3548 getColorAspectsFromFormat(configFormat, params.sAspects); 3549 3550 (void)initDescribeColorAspectsIndex(); 3551 3552 int32_t usingRecorder; 3553 if (configFormat->findInt32("android._using-recorder", &usingRecorder) && usingRecorder) { 3554 android_dataspace dataSpace = HAL_DATASPACE_BT709; 3555 int32_t width, height; 3556 if (configFormat->findInt32("width", &width) 3557 && configFormat->findInt32("height", &height)) { 3558 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3559 status_t err = getDataSpace( 3560 params, &dataSpace, mDescribeColorAspectsIndex /* tryCodec */); 3561 if (err != OK) { 3562 return err; 3563 } 3564 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3565 } 3566 inputFormat->setInt32("android._dataspace", (int32_t)dataSpace); 3567 } 3568 3569 // communicate color aspects to codec, but do not allow change of the platform aspects 3570 ColorAspects origAspects = params.sAspects; 3571 for (int triesLeft = 2; --triesLeft >= 0; ) { 3572 status_t err = setCodecColorAspects(params, true /* readBack */); 3573 if (err != OK 3574 || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem( 3575 params.sAspects, origAspects, true /* usePlatformAspects */)) { 3576 return err; 3577 } 3578 ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.", 3579 mComponentName.c_str()); 3580 } 3581 return OK; 3582} 3583 3584status_t ACodec::setHDRStaticInfoForVideoCodec( 3585 OMX_U32 portIndex, const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) { 3586 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 3587 3588 DescribeHDRStaticInfoParams params; 3589 InitOMXParams(¶ms); 3590 params.nPortIndex = portIndex; 3591 3592 HDRStaticInfo *info = ¶ms.sInfo; 3593 if (getHDRStaticInfoFromFormat(configFormat, info)) { 3594 setHDRStaticInfoIntoFormat(params.sInfo, outputFormat); 3595 } 3596 3597 (void)initDescribeHDRStaticInfoIndex(); 3598 3599 // communicate HDR static Info to codec 3600 return setHDRStaticInfo(params); 3601} 3602 3603// subsequent initial video encoder setup for surface mode 3604status_t ACodec::setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace( 3605 android_dataspace *dataSpace /* nonnull */) { 3606 DescribeColorAspectsParams params; 3607 InitOMXParams(¶ms); 3608 params.nPortIndex = kPortIndexInput; 3609 ColorAspects &aspects = params.sAspects; 3610 3611 // reset default format and store resulting format into both input and output formats 3612 getColorAspectsFromFormat(mConfigFormat, aspects); 3613 int32_t width, height; 3614 if (mInputFormat->findInt32("width", &width) && mInputFormat->findInt32("height", &height)) { 3615 setDefaultCodecColorAspectsIfNeeded(aspects, width, height); 3616 } 3617 setColorAspectsIntoFormat(aspects, mInputFormat); 3618 setColorAspectsIntoFormat(aspects, mOutputFormat); 3619 3620 // communicate color aspects to codec, but do not allow any change 3621 ColorAspects origAspects = aspects; 3622 status_t err = OK; 3623 for (int triesLeft = 2; mDescribeColorAspectsIndex && --triesLeft >= 0; ) { 3624 status_t err = setCodecColorAspects(params, true /* readBack */); 3625 if (err != OK || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem(aspects, origAspects)) { 3626 break; 3627 } 3628 ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.", 3629 mComponentName.c_str()); 3630 } 3631 3632 *dataSpace = HAL_DATASPACE_BT709; 3633 aspects = origAspects; // restore desired color aspects 3634 status_t res = getDataSpace( 3635 params, dataSpace, err == OK && mDescribeColorAspectsIndex /* tryCodec */); 3636 if (err == OK) { 3637 err = res; 3638 } 3639 mInputFormat->setInt32("android._dataspace", (int32_t)*dataSpace); 3640 mInputFormat->setBuffer( 3641 "android._color-aspects", ABuffer::CreateAsCopy(&aspects, sizeof(aspects))); 3642 3643 // update input format with codec supported color aspects (basically set unsupported 3644 // aspects to Unspecified) 3645 if (err == OK) { 3646 (void)getInputColorAspectsForVideoEncoder(mInputFormat); 3647 } 3648 3649 ALOGV("set default color aspects, updated input format to %s, output format to %s", 3650 mInputFormat->debugString(4).c_str(), mOutputFormat->debugString(4).c_str()); 3651 3652 return err; 3653} 3654 3655status_t ACodec::getHDRStaticInfoForVideoCodec(OMX_U32 portIndex, sp<AMessage> &format) { 3656 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 3657 DescribeHDRStaticInfoParams params; 3658 InitOMXParams(¶ms); 3659 params.nPortIndex = portIndex; 3660 3661 status_t err = getHDRStaticInfo(params); 3662 if (err == OK) { 3663 // we only set decodec output HDRStaticInfo if codec supports them 3664 setHDRStaticInfoIntoFormat(params.sInfo, format); 3665 } 3666 return err; 3667} 3668 3669status_t ACodec::initDescribeHDRStaticInfoIndex() { 3670 status_t err = mOMX->getExtensionIndex( 3671 mNode, "OMX.google.android.index.describeHDRStaticInfo", &mDescribeHDRStaticInfoIndex); 3672 if (err != OK) { 3673 mDescribeHDRStaticInfoIndex = (OMX_INDEXTYPE)0; 3674 } 3675 return err; 3676} 3677 3678status_t ACodec::setHDRStaticInfo(const DescribeHDRStaticInfoParams ¶ms) { 3679 status_t err = ERROR_UNSUPPORTED; 3680 if (mDescribeHDRStaticInfoIndex) { 3681 err = mOMX->setConfig(mNode, mDescribeHDRStaticInfoIndex, ¶ms, sizeof(params)); 3682 } 3683 3684 const HDRStaticInfo *info = ¶ms.sInfo; 3685 ALOGV("[%s] setting HDRStaticInfo (R: %u %u, G: %u %u, B: %u, %u, W: %u, %u, " 3686 "MaxDispL: %u, MinDispL: %u, MaxContentL: %u, MaxFrameAvgL: %u)", 3687 mComponentName.c_str(), 3688 info->sType1.mR.x, info->sType1.mR.y, info->sType1.mG.x, info->sType1.mG.y, 3689 info->sType1.mB.x, info->sType1.mB.y, info->sType1.mW.x, info->sType1.mW.y, 3690 info->sType1.mMaxDisplayLuminance, info->sType1.mMinDisplayLuminance, 3691 info->sType1.mMaxContentLightLevel, info->sType1.mMaxFrameAverageLightLevel); 3692 3693 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeHDRStaticInfoIndex, 3694 "[%s] setting HDRStaticInfo failed even though codec advertises support", 3695 mComponentName.c_str()); 3696 return err; 3697} 3698 3699status_t ACodec::getHDRStaticInfo(DescribeHDRStaticInfoParams ¶ms) { 3700 status_t err = ERROR_UNSUPPORTED; 3701 if (mDescribeHDRStaticInfoIndex) { 3702 err = mOMX->getConfig(mNode, mDescribeHDRStaticInfoIndex, ¶ms, sizeof(params)); 3703 } 3704 3705 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeHDRStaticInfoIndex, 3706 "[%s] getting HDRStaticInfo failed even though codec advertises support", 3707 mComponentName.c_str()); 3708 return err; 3709} 3710 3711status_t ACodec::setupVideoEncoder( 3712 const char *mime, const sp<AMessage> &msg, 3713 sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) { 3714 int32_t tmp; 3715 if (!msg->findInt32("color-format", &tmp)) { 3716 return INVALID_OPERATION; 3717 } 3718 3719 OMX_COLOR_FORMATTYPE colorFormat = 3720 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 3721 3722 status_t err = setVideoPortFormatType( 3723 kPortIndexInput, OMX_VIDEO_CodingUnused, colorFormat); 3724 3725 if (err != OK) { 3726 ALOGE("[%s] does not support color format %d", 3727 mComponentName.c_str(), colorFormat); 3728 3729 return err; 3730 } 3731 3732 /* Input port configuration */ 3733 3734 OMX_PARAM_PORTDEFINITIONTYPE def; 3735 InitOMXParams(&def); 3736 3737 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 3738 3739 def.nPortIndex = kPortIndexInput; 3740 3741 err = mOMX->getParameter( 3742 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3743 3744 if (err != OK) { 3745 return err; 3746 } 3747 3748 int32_t width, height, bitrate; 3749 if (!msg->findInt32("width", &width) 3750 || !msg->findInt32("height", &height) 3751 || !msg->findInt32("bitrate", &bitrate)) { 3752 return INVALID_OPERATION; 3753 } 3754 3755 video_def->nFrameWidth = width; 3756 video_def->nFrameHeight = height; 3757 3758 int32_t stride; 3759 if (!msg->findInt32("stride", &stride)) { 3760 stride = width; 3761 } 3762 3763 video_def->nStride = stride; 3764 3765 int32_t sliceHeight; 3766 if (!msg->findInt32("slice-height", &sliceHeight)) { 3767 sliceHeight = height; 3768 } 3769 3770 video_def->nSliceHeight = sliceHeight; 3771 3772 def.nBufferSize = (video_def->nStride * video_def->nSliceHeight * 3) / 2; 3773 3774 float frameRate; 3775 if (!msg->findFloat("frame-rate", &frameRate)) { 3776 int32_t tmp; 3777 if (!msg->findInt32("frame-rate", &tmp)) { 3778 return INVALID_OPERATION; 3779 } 3780 frameRate = (float)tmp; 3781 mTimePerFrameUs = (int64_t) (1000000.0f / frameRate); 3782 } 3783 3784 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 3785 video_def->eCompressionFormat = OMX_VIDEO_CodingUnused; 3786 // this is redundant as it was already set up in setVideoPortFormatType 3787 // FIXME for now skip this only for flexible YUV formats 3788 if (colorFormat != OMX_COLOR_FormatYUV420Flexible) { 3789 video_def->eColorFormat = colorFormat; 3790 } 3791 3792 err = mOMX->setParameter( 3793 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3794 3795 if (err != OK) { 3796 ALOGE("[%s] failed to set input port definition parameters.", 3797 mComponentName.c_str()); 3798 3799 return err; 3800 } 3801 3802 /* Output port configuration */ 3803 3804 OMX_VIDEO_CODINGTYPE compressionFormat; 3805 err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 3806 3807 if (err != OK) { 3808 return err; 3809 } 3810 3811 err = setVideoPortFormatType( 3812 kPortIndexOutput, compressionFormat, OMX_COLOR_FormatUnused); 3813 3814 if (err != OK) { 3815 ALOGE("[%s] does not support compression format %d", 3816 mComponentName.c_str(), compressionFormat); 3817 3818 return err; 3819 } 3820 3821 def.nPortIndex = kPortIndexOutput; 3822 3823 err = mOMX->getParameter( 3824 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3825 3826 if (err != OK) { 3827 return err; 3828 } 3829 3830 video_def->nFrameWidth = width; 3831 video_def->nFrameHeight = height; 3832 video_def->xFramerate = 0; 3833 video_def->nBitrate = bitrate; 3834 video_def->eCompressionFormat = compressionFormat; 3835 video_def->eColorFormat = OMX_COLOR_FormatUnused; 3836 3837 err = mOMX->setParameter( 3838 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3839 3840 if (err != OK) { 3841 ALOGE("[%s] failed to set output port definition parameters.", 3842 mComponentName.c_str()); 3843 3844 return err; 3845 } 3846 3847 int32_t intraRefreshPeriod = 0; 3848 if (msg->findInt32("intra-refresh-period", &intraRefreshPeriod) 3849 && intraRefreshPeriod >= 0) { 3850 err = setIntraRefreshPeriod((uint32_t)intraRefreshPeriod, true); 3851 if (err != OK) { 3852 ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional", 3853 mComponentName.c_str()); 3854 err = OK; 3855 } 3856 } 3857 3858 switch (compressionFormat) { 3859 case OMX_VIDEO_CodingMPEG4: 3860 err = setupMPEG4EncoderParameters(msg); 3861 break; 3862 3863 case OMX_VIDEO_CodingH263: 3864 err = setupH263EncoderParameters(msg); 3865 break; 3866 3867 case OMX_VIDEO_CodingAVC: 3868 err = setupAVCEncoderParameters(msg); 3869 break; 3870 3871 case OMX_VIDEO_CodingHEVC: 3872 err = setupHEVCEncoderParameters(msg); 3873 break; 3874 3875 case OMX_VIDEO_CodingVP8: 3876 case OMX_VIDEO_CodingVP9: 3877 err = setupVPXEncoderParameters(msg); 3878 break; 3879 3880 default: 3881 break; 3882 } 3883 3884 if (err != OK) { 3885 return err; 3886 } 3887 3888 // Set up color aspects on input, but propagate them to the output format, as they will 3889 // not be read back from encoder. 3890 err = setColorAspectsForVideoEncoder(msg, outputFormat, inputFormat); 3891 if (err == ERROR_UNSUPPORTED) { 3892 ALOGI("[%s] cannot encode color aspects. Ignoring.", mComponentName.c_str()); 3893 err = OK; 3894 } 3895 3896 if (err != OK) { 3897 return err; 3898 } 3899 3900 err = setHDRStaticInfoForVideoCodec(kPortIndexInput, msg, outputFormat); 3901 if (err == ERROR_UNSUPPORTED) { // support is optional 3902 ALOGI("[%s] cannot encode HDR static metadata. Ignoring.", mComponentName.c_str()); 3903 err = OK; 3904 } 3905 3906 if (err != OK) { 3907 return err; 3908 } 3909 3910 switch (compressionFormat) { 3911 case OMX_VIDEO_CodingAVC: 3912 case OMX_VIDEO_CodingHEVC: 3913 err = configureTemporalLayers(msg, true /* inConfigure */, outputFormat); 3914 if (err != OK) { 3915 err = OK; // ignore failure 3916 } 3917 break; 3918 3919 case OMX_VIDEO_CodingVP8: 3920 case OMX_VIDEO_CodingVP9: 3921 // TODO: do we need to support android.generic layering? webrtc layering is 3922 // already set up in setupVPXEncoderParameters. 3923 break; 3924 3925 default: 3926 break; 3927 } 3928 3929 if (err == OK) { 3930 ALOGI("setupVideoEncoder succeeded"); 3931 } 3932 3933 return err; 3934} 3935 3936status_t ACodec::setCyclicIntraMacroblockRefresh(const sp<AMessage> &msg, int32_t mode) { 3937 OMX_VIDEO_PARAM_INTRAREFRESHTYPE params; 3938 InitOMXParams(¶ms); 3939 params.nPortIndex = kPortIndexOutput; 3940 3941 params.eRefreshMode = static_cast<OMX_VIDEO_INTRAREFRESHTYPE>(mode); 3942 3943 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshCyclic || 3944 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 3945 int32_t mbs; 3946 if (!msg->findInt32("intra-refresh-CIR-mbs", &mbs)) { 3947 return INVALID_OPERATION; 3948 } 3949 params.nCirMBs = mbs; 3950 } 3951 3952 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshAdaptive || 3953 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 3954 int32_t mbs; 3955 if (!msg->findInt32("intra-refresh-AIR-mbs", &mbs)) { 3956 return INVALID_OPERATION; 3957 } 3958 params.nAirMBs = mbs; 3959 3960 int32_t ref; 3961 if (!msg->findInt32("intra-refresh-AIR-ref", &ref)) { 3962 return INVALID_OPERATION; 3963 } 3964 params.nAirRef = ref; 3965 } 3966 3967 status_t err = mOMX->setParameter( 3968 mNode, OMX_IndexParamVideoIntraRefresh, 3969 ¶ms, sizeof(params)); 3970 return err; 3971} 3972 3973static OMX_U32 setPFramesSpacing( 3974 int32_t iFramesInterval /* seconds */, int32_t frameRate, uint32_t BFramesSpacing = 0) { 3975 // BFramesSpacing is the number of B frames between I/P frames 3976 // PFramesSpacing (the value to be returned) is the number of P frames between I frames 3977 // 3978 // keyFrameInterval = ((PFramesSpacing + 1) * BFramesSpacing) + PFramesSpacing + 1 3979 // ^^^ ^^^ ^^^ 3980 // number of B frames number of P I frame 3981 // 3982 // = (PFramesSpacing + 1) * (BFramesSpacing + 1) 3983 // 3984 // E.g. 3985 // I P I : I-interval: 8, nPFrames 1, nBFrames 3 3986 // BBB BBB 3987 3988 if (iFramesInterval < 0) { // just 1 key frame 3989 return 0xFFFFFFFE; // don't use maxint as key-frame-interval calculation will add 1 3990 } else if (iFramesInterval == 0) { // just key frames 3991 return 0; 3992 } 3993 3994 // round down as key-frame-interval is an upper limit 3995 uint32_t keyFrameInterval = uint32_t(frameRate * iFramesInterval); 3996 OMX_U32 ret = keyFrameInterval / (BFramesSpacing + 1); 3997 return ret > 0 ? ret - 1 : 0; 3998} 3999 4000static OMX_VIDEO_CONTROLRATETYPE getBitrateMode(const sp<AMessage> &msg) { 4001 int32_t tmp; 4002 if (!msg->findInt32("bitrate-mode", &tmp)) { 4003 return OMX_Video_ControlRateVariable; 4004 } 4005 4006 return static_cast<OMX_VIDEO_CONTROLRATETYPE>(tmp); 4007} 4008 4009status_t ACodec::setupMPEG4EncoderParameters(const sp<AMessage> &msg) { 4010 int32_t bitrate, iFrameInterval; 4011 if (!msg->findInt32("bitrate", &bitrate) 4012 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 4013 return INVALID_OPERATION; 4014 } 4015 4016 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4017 4018 float frameRate; 4019 if (!msg->findFloat("frame-rate", &frameRate)) { 4020 int32_t tmp; 4021 if (!msg->findInt32("frame-rate", &tmp)) { 4022 return INVALID_OPERATION; 4023 } 4024 frameRate = (float)tmp; 4025 } 4026 4027 OMX_VIDEO_PARAM_MPEG4TYPE mpeg4type; 4028 InitOMXParams(&mpeg4type); 4029 mpeg4type.nPortIndex = kPortIndexOutput; 4030 4031 status_t err = mOMX->getParameter( 4032 mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 4033 4034 if (err != OK) { 4035 return err; 4036 } 4037 4038 mpeg4type.nSliceHeaderSpacing = 0; 4039 mpeg4type.bSVH = OMX_FALSE; 4040 mpeg4type.bGov = OMX_FALSE; 4041 4042 mpeg4type.nAllowedPictureTypes = 4043 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 4044 4045 mpeg4type.nBFrames = 0; 4046 mpeg4type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, mpeg4type.nBFrames); 4047 if (mpeg4type.nPFrames == 0) { 4048 mpeg4type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 4049 } 4050 mpeg4type.nIDCVLCThreshold = 0; 4051 mpeg4type.bACPred = OMX_TRUE; 4052 mpeg4type.nMaxPacketSize = 256; 4053 mpeg4type.nTimeIncRes = 1000; 4054 mpeg4type.nHeaderExtension = 0; 4055 mpeg4type.bReversibleVLC = OMX_FALSE; 4056 4057 int32_t profile; 4058 if (msg->findInt32("profile", &profile)) { 4059 int32_t level; 4060 if (!msg->findInt32("level", &level)) { 4061 return INVALID_OPERATION; 4062 } 4063 4064 err = verifySupportForProfileAndLevel(profile, level); 4065 4066 if (err != OK) { 4067 return err; 4068 } 4069 4070 mpeg4type.eProfile = static_cast<OMX_VIDEO_MPEG4PROFILETYPE>(profile); 4071 mpeg4type.eLevel = static_cast<OMX_VIDEO_MPEG4LEVELTYPE>(level); 4072 } 4073 4074 err = mOMX->setParameter( 4075 mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 4076 4077 if (err != OK) { 4078 return err; 4079 } 4080 4081 err = configureBitrate(bitrate, bitrateMode); 4082 4083 if (err != OK) { 4084 return err; 4085 } 4086 4087 return setupErrorCorrectionParameters(); 4088} 4089 4090status_t ACodec::setupH263EncoderParameters(const sp<AMessage> &msg) { 4091 int32_t bitrate, iFrameInterval; 4092 if (!msg->findInt32("bitrate", &bitrate) 4093 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 4094 return INVALID_OPERATION; 4095 } 4096 4097 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4098 4099 float frameRate; 4100 if (!msg->findFloat("frame-rate", &frameRate)) { 4101 int32_t tmp; 4102 if (!msg->findInt32("frame-rate", &tmp)) { 4103 return INVALID_OPERATION; 4104 } 4105 frameRate = (float)tmp; 4106 } 4107 4108 OMX_VIDEO_PARAM_H263TYPE h263type; 4109 InitOMXParams(&h263type); 4110 h263type.nPortIndex = kPortIndexOutput; 4111 4112 status_t err = mOMX->getParameter( 4113 mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 4114 4115 if (err != OK) { 4116 return err; 4117 } 4118 4119 h263type.nAllowedPictureTypes = 4120 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 4121 4122 h263type.nBFrames = 0; 4123 h263type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, h263type.nBFrames); 4124 if (h263type.nPFrames == 0) { 4125 h263type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 4126 } 4127 4128 int32_t profile; 4129 if (msg->findInt32("profile", &profile)) { 4130 int32_t level; 4131 if (!msg->findInt32("level", &level)) { 4132 return INVALID_OPERATION; 4133 } 4134 4135 err = verifySupportForProfileAndLevel(profile, level); 4136 4137 if (err != OK) { 4138 return err; 4139 } 4140 4141 h263type.eProfile = static_cast<OMX_VIDEO_H263PROFILETYPE>(profile); 4142 h263type.eLevel = static_cast<OMX_VIDEO_H263LEVELTYPE>(level); 4143 } 4144 4145 h263type.bPLUSPTYPEAllowed = OMX_FALSE; 4146 h263type.bForceRoundingTypeToZero = OMX_FALSE; 4147 h263type.nPictureHeaderRepetition = 0; 4148 h263type.nGOBHeaderInterval = 0; 4149 4150 err = mOMX->setParameter( 4151 mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 4152 4153 if (err != OK) { 4154 return err; 4155 } 4156 4157 err = configureBitrate(bitrate, bitrateMode); 4158 4159 if (err != OK) { 4160 return err; 4161 } 4162 4163 return setupErrorCorrectionParameters(); 4164} 4165 4166// static 4167int /* OMX_VIDEO_AVCLEVELTYPE */ ACodec::getAVCLevelFor( 4168 int width, int height, int rate, int bitrate, 4169 OMX_VIDEO_AVCPROFILETYPE profile) { 4170 // convert bitrate to main/baseline profile kbps equivalent 4171 switch (profile) { 4172 case OMX_VIDEO_AVCProfileHigh10: 4173 bitrate = divUp(bitrate, 3000); break; 4174 case OMX_VIDEO_AVCProfileHigh: 4175 bitrate = divUp(bitrate, 1250); break; 4176 default: 4177 bitrate = divUp(bitrate, 1000); break; 4178 } 4179 4180 // convert size and rate to MBs 4181 width = divUp(width, 16); 4182 height = divUp(height, 16); 4183 int mbs = width * height; 4184 rate *= mbs; 4185 int maxDimension = max(width, height); 4186 4187 static const int limits[][5] = { 4188 /* MBps MB dim bitrate level */ 4189 { 1485, 99, 28, 64, OMX_VIDEO_AVCLevel1 }, 4190 { 1485, 99, 28, 128, OMX_VIDEO_AVCLevel1b }, 4191 { 3000, 396, 56, 192, OMX_VIDEO_AVCLevel11 }, 4192 { 6000, 396, 56, 384, OMX_VIDEO_AVCLevel12 }, 4193 { 11880, 396, 56, 768, OMX_VIDEO_AVCLevel13 }, 4194 { 11880, 396, 56, 2000, OMX_VIDEO_AVCLevel2 }, 4195 { 19800, 792, 79, 4000, OMX_VIDEO_AVCLevel21 }, 4196 { 20250, 1620, 113, 4000, OMX_VIDEO_AVCLevel22 }, 4197 { 40500, 1620, 113, 10000, OMX_VIDEO_AVCLevel3 }, 4198 { 108000, 3600, 169, 14000, OMX_VIDEO_AVCLevel31 }, 4199 { 216000, 5120, 202, 20000, OMX_VIDEO_AVCLevel32 }, 4200 { 245760, 8192, 256, 20000, OMX_VIDEO_AVCLevel4 }, 4201 { 245760, 8192, 256, 50000, OMX_VIDEO_AVCLevel41 }, 4202 { 522240, 8704, 263, 50000, OMX_VIDEO_AVCLevel42 }, 4203 { 589824, 22080, 420, 135000, OMX_VIDEO_AVCLevel5 }, 4204 { 983040, 36864, 543, 240000, OMX_VIDEO_AVCLevel51 }, 4205 { 2073600, 36864, 543, 240000, OMX_VIDEO_AVCLevel52 }, 4206 }; 4207 4208 for (size_t i = 0; i < ARRAY_SIZE(limits); i++) { 4209 const int (&limit)[5] = limits[i]; 4210 if (rate <= limit[0] && mbs <= limit[1] && maxDimension <= limit[2] 4211 && bitrate <= limit[3]) { 4212 return limit[4]; 4213 } 4214 } 4215 return 0; 4216} 4217 4218status_t ACodec::setupAVCEncoderParameters(const sp<AMessage> &msg) { 4219 int32_t bitrate, iFrameInterval; 4220 if (!msg->findInt32("bitrate", &bitrate) 4221 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 4222 return INVALID_OPERATION; 4223 } 4224 4225 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4226 4227 float frameRate; 4228 if (!msg->findFloat("frame-rate", &frameRate)) { 4229 int32_t tmp; 4230 if (!msg->findInt32("frame-rate", &tmp)) { 4231 return INVALID_OPERATION; 4232 } 4233 frameRate = (float)tmp; 4234 } 4235 4236 status_t err = OK; 4237 int32_t intraRefreshMode = 0; 4238 if (msg->findInt32("intra-refresh-mode", &intraRefreshMode)) { 4239 err = setCyclicIntraMacroblockRefresh(msg, intraRefreshMode); 4240 if (err != OK) { 4241 ALOGE("Setting intra macroblock refresh mode (%d) failed: 0x%x", 4242 err, intraRefreshMode); 4243 return err; 4244 } 4245 } 4246 4247 OMX_VIDEO_PARAM_AVCTYPE h264type; 4248 InitOMXParams(&h264type); 4249 h264type.nPortIndex = kPortIndexOutput; 4250 4251 err = mOMX->getParameter( 4252 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 4253 4254 if (err != OK) { 4255 return err; 4256 } 4257 4258 h264type.nAllowedPictureTypes = 4259 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 4260 4261 int32_t profile; 4262 if (msg->findInt32("profile", &profile)) { 4263 int32_t level; 4264 if (!msg->findInt32("level", &level)) { 4265 return INVALID_OPERATION; 4266 } 4267 4268 err = verifySupportForProfileAndLevel(profile, level); 4269 4270 if (err != OK) { 4271 return err; 4272 } 4273 4274 h264type.eProfile = static_cast<OMX_VIDEO_AVCPROFILETYPE>(profile); 4275 h264type.eLevel = static_cast<OMX_VIDEO_AVCLEVELTYPE>(level); 4276 } else { 4277 // Use largest supported profile for AVC recording if profile is not specified. 4278 h264type.eProfile = OMX_VIDEO_AVCProfileBaseline; 4279 for (OMX_VIDEO_AVCPROFILETYPE profile : { 4280 OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCProfileMain }) { 4281 if (verifySupportForProfileAndLevel(profile, 0) == OK) { 4282 h264type.eProfile = profile; 4283 break; 4284 } 4285 } 4286 } 4287 4288 ALOGI("setupAVCEncoderParameters with [profile: %s] [level: %s]", 4289 asString(h264type.eProfile), asString(h264type.eLevel)); 4290 4291 if (h264type.eProfile == OMX_VIDEO_AVCProfileBaseline) { 4292 h264type.nSliceHeaderSpacing = 0; 4293 h264type.bUseHadamard = OMX_TRUE; 4294 h264type.nRefFrames = 1; 4295 h264type.nBFrames = 0; 4296 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, h264type.nBFrames); 4297 if (h264type.nPFrames == 0) { 4298 h264type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 4299 } 4300 h264type.nRefIdx10ActiveMinus1 = 0; 4301 h264type.nRefIdx11ActiveMinus1 = 0; 4302 h264type.bEntropyCodingCABAC = OMX_FALSE; 4303 h264type.bWeightedPPrediction = OMX_FALSE; 4304 h264type.bconstIpred = OMX_FALSE; 4305 h264type.bDirect8x8Inference = OMX_FALSE; 4306 h264type.bDirectSpatialTemporal = OMX_FALSE; 4307 h264type.nCabacInitIdc = 0; 4308 } else if (h264type.eProfile == OMX_VIDEO_AVCProfileMain || 4309 h264type.eProfile == OMX_VIDEO_AVCProfileHigh) { 4310 h264type.nSliceHeaderSpacing = 0; 4311 h264type.bUseHadamard = OMX_TRUE; 4312 h264type.nRefFrames = 2; 4313 h264type.nBFrames = 1; 4314 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, h264type.nBFrames); 4315 h264type.nAllowedPictureTypes = 4316 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP | OMX_VIDEO_PictureTypeB; 4317 h264type.nRefIdx10ActiveMinus1 = 0; 4318 h264type.nRefIdx11ActiveMinus1 = 0; 4319 h264type.bEntropyCodingCABAC = OMX_TRUE; 4320 h264type.bWeightedPPrediction = OMX_TRUE; 4321 h264type.bconstIpred = OMX_TRUE; 4322 h264type.bDirect8x8Inference = OMX_TRUE; 4323 h264type.bDirectSpatialTemporal = OMX_TRUE; 4324 h264type.nCabacInitIdc = 1; 4325 } 4326 4327 if (h264type.nBFrames != 0) { 4328 h264type.nAllowedPictureTypes |= OMX_VIDEO_PictureTypeB; 4329 } 4330 4331 h264type.bEnableUEP = OMX_FALSE; 4332 h264type.bEnableFMO = OMX_FALSE; 4333 h264type.bEnableASO = OMX_FALSE; 4334 h264type.bEnableRS = OMX_FALSE; 4335 h264type.bFrameMBsOnly = OMX_TRUE; 4336 h264type.bMBAFF = OMX_FALSE; 4337 h264type.eLoopFilterMode = OMX_VIDEO_AVCLoopFilterEnable; 4338 4339 err = mOMX->setParameter( 4340 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 4341 4342 if (err != OK) { 4343 return err; 4344 } 4345 4346 return configureBitrate(bitrate, bitrateMode); 4347} 4348 4349status_t ACodec::setupHEVCEncoderParameters(const sp<AMessage> &msg) { 4350 int32_t bitrate, iFrameInterval; 4351 if (!msg->findInt32("bitrate", &bitrate) 4352 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 4353 return INVALID_OPERATION; 4354 } 4355 4356 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4357 4358 float frameRate; 4359 if (!msg->findFloat("frame-rate", &frameRate)) { 4360 int32_t tmp; 4361 if (!msg->findInt32("frame-rate", &tmp)) { 4362 return INVALID_OPERATION; 4363 } 4364 frameRate = (float)tmp; 4365 } 4366 4367 OMX_VIDEO_PARAM_HEVCTYPE hevcType; 4368 InitOMXParams(&hevcType); 4369 hevcType.nPortIndex = kPortIndexOutput; 4370 4371 status_t err = OK; 4372 err = mOMX->getParameter( 4373 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 4374 if (err != OK) { 4375 return err; 4376 } 4377 4378 int32_t profile; 4379 if (msg->findInt32("profile", &profile)) { 4380 int32_t level; 4381 if (!msg->findInt32("level", &level)) { 4382 return INVALID_OPERATION; 4383 } 4384 4385 err = verifySupportForProfileAndLevel(profile, level); 4386 if (err != OK) { 4387 return err; 4388 } 4389 4390 hevcType.eProfile = static_cast<OMX_VIDEO_HEVCPROFILETYPE>(profile); 4391 hevcType.eLevel = static_cast<OMX_VIDEO_HEVCLEVELTYPE>(level); 4392 } 4393 // TODO: finer control? 4394 hevcType.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate) + 1; 4395 4396 err = mOMX->setParameter( 4397 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 4398 if (err != OK) { 4399 return err; 4400 } 4401 4402 return configureBitrate(bitrate, bitrateMode); 4403} 4404 4405status_t ACodec::setupVPXEncoderParameters(const sp<AMessage> &msg) { 4406 int32_t bitrate; 4407 int32_t iFrameInterval = 0; 4408 size_t tsLayers = 0; 4409 OMX_VIDEO_ANDROID_VPXTEMPORALLAYERPATTERNTYPE pattern = 4410 OMX_VIDEO_VPXTemporalLayerPatternNone; 4411 static const uint32_t kVp8LayerRateAlloction 4412 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] 4413 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] = { 4414 {100, 100, 100}, // 1 layer 4415 { 60, 100, 100}, // 2 layers {60%, 40%} 4416 { 40, 60, 100}, // 3 layers {40%, 20%, 40%} 4417 }; 4418 if (!msg->findInt32("bitrate", &bitrate)) { 4419 return INVALID_OPERATION; 4420 } 4421 msg->findInt32("i-frame-interval", &iFrameInterval); 4422 4423 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4424 4425 float frameRate; 4426 if (!msg->findFloat("frame-rate", &frameRate)) { 4427 int32_t tmp; 4428 if (!msg->findInt32("frame-rate", &tmp)) { 4429 return INVALID_OPERATION; 4430 } 4431 frameRate = (float)tmp; 4432 } 4433 4434 AString tsSchema; 4435 if (msg->findString("ts-schema", &tsSchema)) { 4436 unsigned int numLayers = 0; 4437 unsigned int numBLayers = 0; 4438 int tags; 4439 char dummy; 4440 if (sscanf(tsSchema.c_str(), "webrtc.vp8.%u-layer%c", &numLayers, &dummy) == 1 4441 && numLayers > 0) { 4442 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4443 tsLayers = numLayers; 4444 } else if ((tags = sscanf(tsSchema.c_str(), "android.generic.%u%c%u%c", 4445 &numLayers, &dummy, &numBLayers, &dummy)) 4446 && (tags == 1 || (tags == 3 && dummy == '+')) 4447 && numLayers > 0 && numLayers < UINT32_MAX - numBLayers) { 4448 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4449 // VPX does not have a concept of B-frames, so just count all layers 4450 tsLayers = numLayers + numBLayers; 4451 } else { 4452 ALOGW("Ignoring unsupported ts-schema [%s]", tsSchema.c_str()); 4453 } 4454 tsLayers = min(tsLayers, (size_t)OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS); 4455 } 4456 4457 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 4458 InitOMXParams(&vp8type); 4459 vp8type.nPortIndex = kPortIndexOutput; 4460 status_t err = mOMX->getParameter( 4461 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4462 &vp8type, sizeof(vp8type)); 4463 4464 if (err == OK) { 4465 if (iFrameInterval > 0) { 4466 vp8type.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate) + 1; 4467 } 4468 vp8type.eTemporalPattern = pattern; 4469 vp8type.nTemporalLayerCount = tsLayers; 4470 if (tsLayers > 0) { 4471 for (size_t i = 0; i < OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS; i++) { 4472 vp8type.nTemporalLayerBitrateRatio[i] = 4473 kVp8LayerRateAlloction[tsLayers - 1][i]; 4474 } 4475 } 4476 if (bitrateMode == OMX_Video_ControlRateConstant) { 4477 vp8type.nMinQuantizer = 2; 4478 vp8type.nMaxQuantizer = 63; 4479 } 4480 4481 err = mOMX->setParameter( 4482 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4483 &vp8type, sizeof(vp8type)); 4484 if (err != OK) { 4485 ALOGW("Extended VP8 parameters set failed: %d", err); 4486 } 4487 } 4488 4489 return configureBitrate(bitrate, bitrateMode); 4490} 4491 4492status_t ACodec::verifySupportForProfileAndLevel( 4493 int32_t profile, int32_t level) { 4494 OMX_VIDEO_PARAM_PROFILELEVELTYPE params; 4495 InitOMXParams(¶ms); 4496 params.nPortIndex = kPortIndexOutput; 4497 4498 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 4499 params.nProfileIndex = index; 4500 status_t err = mOMX->getParameter( 4501 mNode, 4502 OMX_IndexParamVideoProfileLevelQuerySupported, 4503 ¶ms, 4504 sizeof(params)); 4505 4506 if (err != OK) { 4507 return err; 4508 } 4509 4510 int32_t supportedProfile = static_cast<int32_t>(params.eProfile); 4511 int32_t supportedLevel = static_cast<int32_t>(params.eLevel); 4512 4513 if (profile == supportedProfile && level <= supportedLevel) { 4514 return OK; 4515 } 4516 4517 if (index == kMaxIndicesToCheck) { 4518 ALOGW("[%s] stopping checking profiles after %u: %x/%x", 4519 mComponentName.c_str(), index, 4520 params.eProfile, params.eLevel); 4521 } 4522 } 4523 return ERROR_UNSUPPORTED; 4524} 4525 4526status_t ACodec::configureBitrate( 4527 int32_t bitrate, OMX_VIDEO_CONTROLRATETYPE bitrateMode) { 4528 OMX_VIDEO_PARAM_BITRATETYPE bitrateType; 4529 InitOMXParams(&bitrateType); 4530 bitrateType.nPortIndex = kPortIndexOutput; 4531 4532 status_t err = mOMX->getParameter( 4533 mNode, OMX_IndexParamVideoBitrate, 4534 &bitrateType, sizeof(bitrateType)); 4535 4536 if (err != OK) { 4537 return err; 4538 } 4539 4540 bitrateType.eControlRate = bitrateMode; 4541 bitrateType.nTargetBitrate = bitrate; 4542 4543 return mOMX->setParameter( 4544 mNode, OMX_IndexParamVideoBitrate, 4545 &bitrateType, sizeof(bitrateType)); 4546} 4547 4548status_t ACodec::setupErrorCorrectionParameters() { 4549 OMX_VIDEO_PARAM_ERRORCORRECTIONTYPE errorCorrectionType; 4550 InitOMXParams(&errorCorrectionType); 4551 errorCorrectionType.nPortIndex = kPortIndexOutput; 4552 4553 status_t err = mOMX->getParameter( 4554 mNode, OMX_IndexParamVideoErrorCorrection, 4555 &errorCorrectionType, sizeof(errorCorrectionType)); 4556 4557 if (err != OK) { 4558 return OK; // Optional feature. Ignore this failure 4559 } 4560 4561 errorCorrectionType.bEnableHEC = OMX_FALSE; 4562 errorCorrectionType.bEnableResync = OMX_TRUE; 4563 errorCorrectionType.nResynchMarkerSpacing = 256; 4564 errorCorrectionType.bEnableDataPartitioning = OMX_FALSE; 4565 errorCorrectionType.bEnableRVLC = OMX_FALSE; 4566 4567 return mOMX->setParameter( 4568 mNode, OMX_IndexParamVideoErrorCorrection, 4569 &errorCorrectionType, sizeof(errorCorrectionType)); 4570} 4571 4572status_t ACodec::setVideoFormatOnPort( 4573 OMX_U32 portIndex, 4574 int32_t width, int32_t height, OMX_VIDEO_CODINGTYPE compressionFormat, 4575 float frameRate) { 4576 OMX_PARAM_PORTDEFINITIONTYPE def; 4577 InitOMXParams(&def); 4578 def.nPortIndex = portIndex; 4579 4580 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 4581 4582 status_t err = mOMX->getParameter( 4583 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4584 if (err != OK) { 4585 return err; 4586 } 4587 4588 if (portIndex == kPortIndexInput) { 4589 // XXX Need a (much) better heuristic to compute input buffer sizes. 4590 const size_t X = 64 * 1024; 4591 if (def.nBufferSize < X) { 4592 def.nBufferSize = X; 4593 } 4594 } 4595 4596 if (def.eDomain != OMX_PortDomainVideo) { 4597 ALOGE("expected video port, got %s(%d)", asString(def.eDomain), def.eDomain); 4598 return FAILED_TRANSACTION; 4599 } 4600 4601 video_def->nFrameWidth = width; 4602 video_def->nFrameHeight = height; 4603 4604 if (portIndex == kPortIndexInput) { 4605 video_def->eCompressionFormat = compressionFormat; 4606 video_def->eColorFormat = OMX_COLOR_FormatUnused; 4607 if (frameRate >= 0) { 4608 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 4609 } 4610 } 4611 4612 err = mOMX->setParameter( 4613 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4614 4615 return err; 4616} 4617 4618status_t ACodec::initNativeWindow() { 4619 if (mNativeWindow != NULL) { 4620 return mOMX->enableNativeBuffers(mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_TRUE); 4621 } 4622 4623 mOMX->enableNativeBuffers(mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE); 4624 return OK; 4625} 4626 4627size_t ACodec::countBuffersOwnedByComponent(OMX_U32 portIndex) const { 4628 size_t n = 0; 4629 4630 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 4631 const BufferInfo &info = mBuffers[portIndex].itemAt(i); 4632 4633 if (info.mStatus == BufferInfo::OWNED_BY_COMPONENT) { 4634 ++n; 4635 } 4636 } 4637 4638 return n; 4639} 4640 4641size_t ACodec::countBuffersOwnedByNativeWindow() const { 4642 size_t n = 0; 4643 4644 for (size_t i = 0; i < mBuffers[kPortIndexOutput].size(); ++i) { 4645 const BufferInfo &info = mBuffers[kPortIndexOutput].itemAt(i); 4646 4647 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4648 ++n; 4649 } 4650 } 4651 4652 return n; 4653} 4654 4655void ACodec::waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs() { 4656 if (mNativeWindow == NULL) { 4657 return; 4658 } 4659 4660 while (countBuffersOwnedByNativeWindow() > mNumUndequeuedBuffers 4661 && dequeueBufferFromNativeWindow() != NULL) { 4662 // these buffers will be submitted as regular buffers; account for this 4663 if (storingMetadataInDecodedBuffers() && mMetadataBuffersToSubmit > 0) { 4664 --mMetadataBuffersToSubmit; 4665 } 4666 } 4667} 4668 4669bool ACodec::allYourBuffersAreBelongToUs( 4670 OMX_U32 portIndex) { 4671 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 4672 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 4673 4674 if (info->mStatus != BufferInfo::OWNED_BY_US 4675 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4676 ALOGV("[%s] Buffer %u on port %u still has status %d", 4677 mComponentName.c_str(), 4678 info->mBufferID, portIndex, info->mStatus); 4679 return false; 4680 } 4681 } 4682 4683 return true; 4684} 4685 4686bool ACodec::allYourBuffersAreBelongToUs() { 4687 return allYourBuffersAreBelongToUs(kPortIndexInput) 4688 && allYourBuffersAreBelongToUs(kPortIndexOutput); 4689} 4690 4691void ACodec::deferMessage(const sp<AMessage> &msg) { 4692 mDeferredQueue.push_back(msg); 4693} 4694 4695void ACodec::processDeferredMessages() { 4696 List<sp<AMessage> > queue = mDeferredQueue; 4697 mDeferredQueue.clear(); 4698 4699 List<sp<AMessage> >::iterator it = queue.begin(); 4700 while (it != queue.end()) { 4701 onMessageReceived(*it++); 4702 } 4703} 4704 4705// static 4706bool ACodec::describeDefaultColorFormat(DescribeColorFormat2Params ¶ms) { 4707 MediaImage2 &image = params.sMediaImage; 4708 memset(&image, 0, sizeof(image)); 4709 4710 image.mType = MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN; 4711 image.mNumPlanes = 0; 4712 4713 const OMX_COLOR_FORMATTYPE fmt = params.eColorFormat; 4714 image.mWidth = params.nFrameWidth; 4715 image.mHeight = params.nFrameHeight; 4716 4717 // only supporting YUV420 4718 if (fmt != OMX_COLOR_FormatYUV420Planar && 4719 fmt != OMX_COLOR_FormatYUV420PackedPlanar && 4720 fmt != OMX_COLOR_FormatYUV420SemiPlanar && 4721 fmt != OMX_COLOR_FormatYUV420PackedSemiPlanar && 4722 fmt != (OMX_COLOR_FORMATTYPE)HAL_PIXEL_FORMAT_YV12) { 4723 ALOGW("do not know color format 0x%x = %d", fmt, fmt); 4724 return false; 4725 } 4726 4727 // TEMPORARY FIX for some vendors that advertise sliceHeight as 0 4728 if (params.nStride != 0 && params.nSliceHeight == 0) { 4729 ALOGW("using sliceHeight=%u instead of what codec advertised (=0)", 4730 params.nFrameHeight); 4731 params.nSliceHeight = params.nFrameHeight; 4732 } 4733 4734 // we need stride and slice-height to be non-zero and sensible. These values were chosen to 4735 // prevent integer overflows further down the line, and do not indicate support for 4736 // 32kx32k video. 4737 if (params.nStride == 0 || params.nSliceHeight == 0 4738 || params.nStride > 32768 || params.nSliceHeight > 32768) { 4739 ALOGW("cannot describe color format 0x%x = %d with stride=%u and sliceHeight=%u", 4740 fmt, fmt, params.nStride, params.nSliceHeight); 4741 return false; 4742 } 4743 4744 // set-up YUV format 4745 image.mType = MediaImage2::MEDIA_IMAGE_TYPE_YUV; 4746 image.mNumPlanes = 3; 4747 image.mBitDepth = 8; 4748 image.mBitDepthAllocated = 8; 4749 image.mPlane[image.Y].mOffset = 0; 4750 image.mPlane[image.Y].mColInc = 1; 4751 image.mPlane[image.Y].mRowInc = params.nStride; 4752 image.mPlane[image.Y].mHorizSubsampling = 1; 4753 image.mPlane[image.Y].mVertSubsampling = 1; 4754 4755 switch ((int)fmt) { 4756 case HAL_PIXEL_FORMAT_YV12: 4757 if (params.bUsingNativeBuffers) { 4758 size_t ystride = align(params.nStride, 16); 4759 size_t cstride = align(params.nStride / 2, 16); 4760 image.mPlane[image.Y].mRowInc = ystride; 4761 4762 image.mPlane[image.V].mOffset = ystride * params.nSliceHeight; 4763 image.mPlane[image.V].mColInc = 1; 4764 image.mPlane[image.V].mRowInc = cstride; 4765 image.mPlane[image.V].mHorizSubsampling = 2; 4766 image.mPlane[image.V].mVertSubsampling = 2; 4767 4768 image.mPlane[image.U].mOffset = image.mPlane[image.V].mOffset 4769 + (cstride * params.nSliceHeight / 2); 4770 image.mPlane[image.U].mColInc = 1; 4771 image.mPlane[image.U].mRowInc = cstride; 4772 image.mPlane[image.U].mHorizSubsampling = 2; 4773 image.mPlane[image.U].mVertSubsampling = 2; 4774 break; 4775 } else { 4776 // fall through as YV12 is used for YUV420Planar by some codecs 4777 } 4778 4779 case OMX_COLOR_FormatYUV420Planar: 4780 case OMX_COLOR_FormatYUV420PackedPlanar: 4781 image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight; 4782 image.mPlane[image.U].mColInc = 1; 4783 image.mPlane[image.U].mRowInc = params.nStride / 2; 4784 image.mPlane[image.U].mHorizSubsampling = 2; 4785 image.mPlane[image.U].mVertSubsampling = 2; 4786 4787 image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset 4788 + (params.nStride * params.nSliceHeight / 4); 4789 image.mPlane[image.V].mColInc = 1; 4790 image.mPlane[image.V].mRowInc = params.nStride / 2; 4791 image.mPlane[image.V].mHorizSubsampling = 2; 4792 image.mPlane[image.V].mVertSubsampling = 2; 4793 break; 4794 4795 case OMX_COLOR_FormatYUV420SemiPlanar: 4796 // FIXME: NV21 for sw-encoder, NV12 for decoder and hw-encoder 4797 case OMX_COLOR_FormatYUV420PackedSemiPlanar: 4798 // NV12 4799 image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight; 4800 image.mPlane[image.U].mColInc = 2; 4801 image.mPlane[image.U].mRowInc = params.nStride; 4802 image.mPlane[image.U].mHorizSubsampling = 2; 4803 image.mPlane[image.U].mVertSubsampling = 2; 4804 4805 image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset + 1; 4806 image.mPlane[image.V].mColInc = 2; 4807 image.mPlane[image.V].mRowInc = params.nStride; 4808 image.mPlane[image.V].mHorizSubsampling = 2; 4809 image.mPlane[image.V].mVertSubsampling = 2; 4810 break; 4811 4812 default: 4813 TRESPASS(); 4814 } 4815 return true; 4816} 4817 4818// static 4819bool ACodec::describeColorFormat( 4820 const sp<IOMX> &omx, IOMX::node_id node, 4821 DescribeColorFormat2Params &describeParams) 4822{ 4823 OMX_INDEXTYPE describeColorFormatIndex; 4824 if (omx->getExtensionIndex( 4825 node, "OMX.google.android.index.describeColorFormat", 4826 &describeColorFormatIndex) == OK) { 4827 DescribeColorFormatParams describeParamsV1(describeParams); 4828 if (omx->getParameter( 4829 node, describeColorFormatIndex, 4830 &describeParamsV1, sizeof(describeParamsV1)) == OK) { 4831 describeParams.initFromV1(describeParamsV1); 4832 return describeParams.sMediaImage.mType != MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN; 4833 } 4834 } else if (omx->getExtensionIndex( 4835 node, "OMX.google.android.index.describeColorFormat2", &describeColorFormatIndex) == OK 4836 && omx->getParameter( 4837 node, describeColorFormatIndex, &describeParams, sizeof(describeParams)) == OK) { 4838 return describeParams.sMediaImage.mType != MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN; 4839 } 4840 4841 return describeDefaultColorFormat(describeParams); 4842} 4843 4844// static 4845bool ACodec::isFlexibleColorFormat( 4846 const sp<IOMX> &omx, IOMX::node_id node, 4847 uint32_t colorFormat, bool usingNativeBuffers, OMX_U32 *flexibleEquivalent) { 4848 DescribeColorFormat2Params describeParams; 4849 InitOMXParams(&describeParams); 4850 describeParams.eColorFormat = (OMX_COLOR_FORMATTYPE)colorFormat; 4851 // reasonable dummy values 4852 describeParams.nFrameWidth = 128; 4853 describeParams.nFrameHeight = 128; 4854 describeParams.nStride = 128; 4855 describeParams.nSliceHeight = 128; 4856 describeParams.bUsingNativeBuffers = (OMX_BOOL)usingNativeBuffers; 4857 4858 CHECK(flexibleEquivalent != NULL); 4859 4860 if (!describeColorFormat(omx, node, describeParams)) { 4861 return false; 4862 } 4863 4864 const MediaImage2 &img = describeParams.sMediaImage; 4865 if (img.mType == MediaImage2::MEDIA_IMAGE_TYPE_YUV) { 4866 if (img.mNumPlanes != 3 4867 || img.mPlane[img.Y].mHorizSubsampling != 1 4868 || img.mPlane[img.Y].mVertSubsampling != 1) { 4869 return false; 4870 } 4871 4872 // YUV 420 4873 if (img.mPlane[img.U].mHorizSubsampling == 2 4874 && img.mPlane[img.U].mVertSubsampling == 2 4875 && img.mPlane[img.V].mHorizSubsampling == 2 4876 && img.mPlane[img.V].mVertSubsampling == 2) { 4877 // possible flexible YUV420 format 4878 if (img.mBitDepth <= 8) { 4879 *flexibleEquivalent = OMX_COLOR_FormatYUV420Flexible; 4880 return true; 4881 } 4882 } 4883 } 4884 return false; 4885} 4886 4887status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify) { 4888 const char *niceIndex = portIndex == kPortIndexInput ? "input" : "output"; 4889 OMX_PARAM_PORTDEFINITIONTYPE def; 4890 InitOMXParams(&def); 4891 def.nPortIndex = portIndex; 4892 4893 status_t err = mOMX->getParameter(mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4894 if (err != OK) { 4895 return err; 4896 } 4897 4898 if (def.eDir != (portIndex == kPortIndexOutput ? OMX_DirOutput : OMX_DirInput)) { 4899 ALOGE("unexpected dir: %s(%d) on %s port", asString(def.eDir), def.eDir, niceIndex); 4900 return BAD_VALUE; 4901 } 4902 4903 switch (def.eDomain) { 4904 case OMX_PortDomainVideo: 4905 { 4906 OMX_VIDEO_PORTDEFINITIONTYPE *videoDef = &def.format.video; 4907 switch ((int)videoDef->eCompressionFormat) { 4908 case OMX_VIDEO_CodingUnused: 4909 { 4910 CHECK(mIsEncoder ^ (portIndex == kPortIndexOutput)); 4911 notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW); 4912 4913 notify->setInt32("stride", videoDef->nStride); 4914 notify->setInt32("slice-height", videoDef->nSliceHeight); 4915 notify->setInt32("color-format", videoDef->eColorFormat); 4916 4917 if (mNativeWindow == NULL) { 4918 DescribeColorFormat2Params describeParams; 4919 InitOMXParams(&describeParams); 4920 describeParams.eColorFormat = videoDef->eColorFormat; 4921 describeParams.nFrameWidth = videoDef->nFrameWidth; 4922 describeParams.nFrameHeight = videoDef->nFrameHeight; 4923 describeParams.nStride = videoDef->nStride; 4924 describeParams.nSliceHeight = videoDef->nSliceHeight; 4925 describeParams.bUsingNativeBuffers = OMX_FALSE; 4926 4927 if (describeColorFormat(mOMX, mNode, describeParams)) { 4928 notify->setBuffer( 4929 "image-data", 4930 ABuffer::CreateAsCopy( 4931 &describeParams.sMediaImage, 4932 sizeof(describeParams.sMediaImage))); 4933 4934 MediaImage2 &img = describeParams.sMediaImage; 4935 MediaImage2::PlaneInfo *plane = img.mPlane; 4936 ALOGV("[%s] MediaImage { F(%ux%u) @%u+%d+%d @%u+%d+%d @%u+%d+%d }", 4937 mComponentName.c_str(), img.mWidth, img.mHeight, 4938 plane[0].mOffset, plane[0].mColInc, plane[0].mRowInc, 4939 plane[1].mOffset, plane[1].mColInc, plane[1].mRowInc, 4940 plane[2].mOffset, plane[2].mColInc, plane[2].mRowInc); 4941 } 4942 } 4943 4944 int32_t width = (int32_t)videoDef->nFrameWidth; 4945 int32_t height = (int32_t)videoDef->nFrameHeight; 4946 4947 if (portIndex == kPortIndexOutput) { 4948 OMX_CONFIG_RECTTYPE rect; 4949 InitOMXParams(&rect); 4950 rect.nPortIndex = portIndex; 4951 4952 if (mOMX->getConfig( 4953 mNode, 4954 (portIndex == kPortIndexOutput ? 4955 OMX_IndexConfigCommonOutputCrop : 4956 OMX_IndexConfigCommonInputCrop), 4957 &rect, sizeof(rect)) != OK) { 4958 rect.nLeft = 0; 4959 rect.nTop = 0; 4960 rect.nWidth = videoDef->nFrameWidth; 4961 rect.nHeight = videoDef->nFrameHeight; 4962 } 4963 4964 if (rect.nLeft < 0 || 4965 rect.nTop < 0 || 4966 rect.nLeft + rect.nWidth > videoDef->nFrameWidth || 4967 rect.nTop + rect.nHeight > videoDef->nFrameHeight) { 4968 ALOGE("Wrong cropped rect (%d, %d) - (%u, %u) vs. frame (%u, %u)", 4969 rect.nLeft, rect.nTop, 4970 rect.nLeft + rect.nWidth, rect.nTop + rect.nHeight, 4971 videoDef->nFrameWidth, videoDef->nFrameHeight); 4972 return BAD_VALUE; 4973 } 4974 4975 notify->setRect( 4976 "crop", 4977 rect.nLeft, 4978 rect.nTop, 4979 rect.nLeft + rect.nWidth - 1, 4980 rect.nTop + rect.nHeight - 1); 4981 4982 width = rect.nWidth; 4983 height = rect.nHeight; 4984 4985 android_dataspace dataSpace = HAL_DATASPACE_UNKNOWN; 4986 (void)getColorAspectsAndDataSpaceForVideoDecoder( 4987 width, height, mConfigFormat, notify, 4988 mUsingNativeWindow ? &dataSpace : NULL); 4989 if (mUsingNativeWindow) { 4990 notify->setInt32("android._dataspace", dataSpace); 4991 } 4992 (void)getHDRStaticInfoForVideoCodec(kPortIndexOutput, notify); 4993 } else { 4994 (void)getInputColorAspectsForVideoEncoder(notify); 4995 if (mConfigFormat->contains("hdr-static-info")) { 4996 (void)getHDRStaticInfoForVideoCodec(kPortIndexInput, notify); 4997 } 4998 } 4999 5000 break; 5001 } 5002 5003 case OMX_VIDEO_CodingVP8: 5004 case OMX_VIDEO_CodingVP9: 5005 { 5006 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 5007 InitOMXParams(&vp8type); 5008 vp8type.nPortIndex = kPortIndexOutput; 5009 status_t err = mOMX->getParameter( 5010 mNode, 5011 (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 5012 &vp8type, 5013 sizeof(vp8type)); 5014 5015 if (err == OK) { 5016 AString tsSchema = "none"; 5017 if (vp8type.eTemporalPattern 5018 == OMX_VIDEO_VPXTemporalLayerPatternWebRTC) { 5019 switch (vp8type.nTemporalLayerCount) { 5020 case 1: 5021 { 5022 tsSchema = "webrtc.vp8.1-layer"; 5023 break; 5024 } 5025 case 2: 5026 { 5027 tsSchema = "webrtc.vp8.2-layer"; 5028 break; 5029 } 5030 case 3: 5031 { 5032 tsSchema = "webrtc.vp8.3-layer"; 5033 break; 5034 } 5035 default: 5036 { 5037 break; 5038 } 5039 } 5040 } 5041 notify->setString("ts-schema", tsSchema); 5042 } 5043 // Fall through to set up mime. 5044 } 5045 5046 default: 5047 { 5048 if (mIsEncoder ^ (portIndex == kPortIndexOutput)) { 5049 // should be CodingUnused 5050 ALOGE("Raw port video compression format is %s(%d)", 5051 asString(videoDef->eCompressionFormat), 5052 videoDef->eCompressionFormat); 5053 return BAD_VALUE; 5054 } 5055 AString mime; 5056 if (GetMimeTypeForVideoCoding( 5057 videoDef->eCompressionFormat, &mime) != OK) { 5058 notify->setString("mime", "application/octet-stream"); 5059 } else { 5060 notify->setString("mime", mime.c_str()); 5061 } 5062 uint32_t intraRefreshPeriod = 0; 5063 if (mIsEncoder && getIntraRefreshPeriod(&intraRefreshPeriod) == OK 5064 && intraRefreshPeriod > 0) { 5065 notify->setInt32("intra-refresh-period", intraRefreshPeriod); 5066 } 5067 break; 5068 } 5069 } 5070 notify->setInt32("width", videoDef->nFrameWidth); 5071 notify->setInt32("height", videoDef->nFrameHeight); 5072 ALOGV("[%s] %s format is %s", mComponentName.c_str(), 5073 portIndex == kPortIndexInput ? "input" : "output", 5074 notify->debugString().c_str()); 5075 5076 break; 5077 } 5078 5079 case OMX_PortDomainAudio: 5080 { 5081 OMX_AUDIO_PORTDEFINITIONTYPE *audioDef = &def.format.audio; 5082 5083 switch ((int)audioDef->eEncoding) { 5084 case OMX_AUDIO_CodingPCM: 5085 { 5086 OMX_AUDIO_PARAM_PCMMODETYPE params; 5087 InitOMXParams(¶ms); 5088 params.nPortIndex = portIndex; 5089 5090 err = mOMX->getParameter( 5091 mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 5092 if (err != OK) { 5093 return err; 5094 } 5095 5096 if (params.nChannels <= 0 5097 || (params.nChannels != 1 && !params.bInterleaved) 5098 || params.ePCMMode != OMX_AUDIO_PCMModeLinear) { 5099 ALOGE("unsupported PCM port: %u channels%s, %u-bit", 5100 params.nChannels, 5101 params.bInterleaved ? " interleaved" : "", 5102 params.nBitPerSample); 5103 return FAILED_TRANSACTION; 5104 } 5105 5106 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW); 5107 notify->setInt32("channel-count", params.nChannels); 5108 notify->setInt32("sample-rate", params.nSamplingRate); 5109 5110 AudioEncoding encoding = kAudioEncodingPcm16bit; 5111 if (params.eNumData == OMX_NumericalDataUnsigned 5112 && params.nBitPerSample == 8u) { 5113 encoding = kAudioEncodingPcm8bit; 5114 } else if (params.eNumData == OMX_NumericalDataFloat 5115 && params.nBitPerSample == 32u) { 5116 encoding = kAudioEncodingPcmFloat; 5117 } else if (params.nBitPerSample != 16u 5118 || params.eNumData != OMX_NumericalDataSigned) { 5119 ALOGE("unsupported PCM port: %s(%d), %s(%d) mode ", 5120 asString(params.eNumData), params.eNumData, 5121 asString(params.ePCMMode), params.ePCMMode); 5122 return FAILED_TRANSACTION; 5123 } 5124 notify->setInt32("pcm-encoding", encoding); 5125 5126 if (mChannelMaskPresent) { 5127 notify->setInt32("channel-mask", mChannelMask); 5128 } 5129 break; 5130 } 5131 5132 case OMX_AUDIO_CodingAAC: 5133 { 5134 OMX_AUDIO_PARAM_AACPROFILETYPE params; 5135 InitOMXParams(¶ms); 5136 params.nPortIndex = portIndex; 5137 5138 err = mOMX->getParameter( 5139 mNode, OMX_IndexParamAudioAac, ¶ms, sizeof(params)); 5140 if (err != OK) { 5141 return err; 5142 } 5143 5144 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC); 5145 notify->setInt32("channel-count", params.nChannels); 5146 notify->setInt32("sample-rate", params.nSampleRate); 5147 break; 5148 } 5149 5150 case OMX_AUDIO_CodingAMR: 5151 { 5152 OMX_AUDIO_PARAM_AMRTYPE params; 5153 InitOMXParams(¶ms); 5154 params.nPortIndex = portIndex; 5155 5156 err = mOMX->getParameter( 5157 mNode, OMX_IndexParamAudioAmr, ¶ms, sizeof(params)); 5158 if (err != OK) { 5159 return err; 5160 } 5161 5162 notify->setInt32("channel-count", 1); 5163 if (params.eAMRBandMode >= OMX_AUDIO_AMRBandModeWB0) { 5164 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_WB); 5165 notify->setInt32("sample-rate", 16000); 5166 } else { 5167 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_NB); 5168 notify->setInt32("sample-rate", 8000); 5169 } 5170 break; 5171 } 5172 5173 case OMX_AUDIO_CodingFLAC: 5174 { 5175 OMX_AUDIO_PARAM_FLACTYPE params; 5176 InitOMXParams(¶ms); 5177 params.nPortIndex = portIndex; 5178 5179 err = mOMX->getParameter( 5180 mNode, OMX_IndexParamAudioFlac, ¶ms, sizeof(params)); 5181 if (err != OK) { 5182 return err; 5183 } 5184 5185 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_FLAC); 5186 notify->setInt32("channel-count", params.nChannels); 5187 notify->setInt32("sample-rate", params.nSampleRate); 5188 break; 5189 } 5190 5191 case OMX_AUDIO_CodingMP3: 5192 { 5193 OMX_AUDIO_PARAM_MP3TYPE params; 5194 InitOMXParams(¶ms); 5195 params.nPortIndex = portIndex; 5196 5197 err = mOMX->getParameter( 5198 mNode, OMX_IndexParamAudioMp3, ¶ms, sizeof(params)); 5199 if (err != OK) { 5200 return err; 5201 } 5202 5203 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MPEG); 5204 notify->setInt32("channel-count", params.nChannels); 5205 notify->setInt32("sample-rate", params.nSampleRate); 5206 break; 5207 } 5208 5209 case OMX_AUDIO_CodingVORBIS: 5210 { 5211 OMX_AUDIO_PARAM_VORBISTYPE params; 5212 InitOMXParams(¶ms); 5213 params.nPortIndex = portIndex; 5214 5215 err = mOMX->getParameter( 5216 mNode, OMX_IndexParamAudioVorbis, ¶ms, sizeof(params)); 5217 if (err != OK) { 5218 return err; 5219 } 5220 5221 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_VORBIS); 5222 notify->setInt32("channel-count", params.nChannels); 5223 notify->setInt32("sample-rate", params.nSampleRate); 5224 break; 5225 } 5226 5227 case OMX_AUDIO_CodingAndroidAC3: 5228 { 5229 OMX_AUDIO_PARAM_ANDROID_AC3TYPE params; 5230 InitOMXParams(¶ms); 5231 params.nPortIndex = portIndex; 5232 5233 err = mOMX->getParameter( 5234 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 5235 ¶ms, sizeof(params)); 5236 if (err != OK) { 5237 return err; 5238 } 5239 5240 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AC3); 5241 notify->setInt32("channel-count", params.nChannels); 5242 notify->setInt32("sample-rate", params.nSampleRate); 5243 break; 5244 } 5245 5246 case OMX_AUDIO_CodingAndroidEAC3: 5247 { 5248 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE params; 5249 InitOMXParams(¶ms); 5250 params.nPortIndex = portIndex; 5251 5252 err = mOMX->getParameter( 5253 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 5254 ¶ms, sizeof(params)); 5255 if (err != OK) { 5256 return err; 5257 } 5258 5259 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_EAC3); 5260 notify->setInt32("channel-count", params.nChannels); 5261 notify->setInt32("sample-rate", params.nSampleRate); 5262 break; 5263 } 5264 5265 case OMX_AUDIO_CodingAndroidOPUS: 5266 { 5267 OMX_AUDIO_PARAM_ANDROID_OPUSTYPE params; 5268 InitOMXParams(¶ms); 5269 params.nPortIndex = portIndex; 5270 5271 err = mOMX->getParameter( 5272 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidOpus, 5273 ¶ms, sizeof(params)); 5274 if (err != OK) { 5275 return err; 5276 } 5277 5278 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_OPUS); 5279 notify->setInt32("channel-count", params.nChannels); 5280 notify->setInt32("sample-rate", params.nSampleRate); 5281 break; 5282 } 5283 5284 case OMX_AUDIO_CodingG711: 5285 { 5286 OMX_AUDIO_PARAM_PCMMODETYPE params; 5287 InitOMXParams(¶ms); 5288 params.nPortIndex = portIndex; 5289 5290 err = mOMX->getParameter( 5291 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 5292 if (err != OK) { 5293 return err; 5294 } 5295 5296 const char *mime = NULL; 5297 if (params.ePCMMode == OMX_AUDIO_PCMModeMULaw) { 5298 mime = MEDIA_MIMETYPE_AUDIO_G711_MLAW; 5299 } else if (params.ePCMMode == OMX_AUDIO_PCMModeALaw) { 5300 mime = MEDIA_MIMETYPE_AUDIO_G711_ALAW; 5301 } else { // params.ePCMMode == OMX_AUDIO_PCMModeLinear 5302 mime = MEDIA_MIMETYPE_AUDIO_RAW; 5303 } 5304 notify->setString("mime", mime); 5305 notify->setInt32("channel-count", params.nChannels); 5306 notify->setInt32("sample-rate", params.nSamplingRate); 5307 notify->setInt32("pcm-encoding", kAudioEncodingPcm16bit); 5308 break; 5309 } 5310 5311 case OMX_AUDIO_CodingGSMFR: 5312 { 5313 OMX_AUDIO_PARAM_PCMMODETYPE params; 5314 InitOMXParams(¶ms); 5315 params.nPortIndex = portIndex; 5316 5317 err = mOMX->getParameter( 5318 mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 5319 if (err != OK) { 5320 return err; 5321 } 5322 5323 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MSGSM); 5324 notify->setInt32("channel-count", params.nChannels); 5325 notify->setInt32("sample-rate", params.nSamplingRate); 5326 break; 5327 } 5328 5329 default: 5330 ALOGE("Unsupported audio coding: %s(%d)\n", 5331 asString(audioDef->eEncoding), audioDef->eEncoding); 5332 return BAD_TYPE; 5333 } 5334 break; 5335 } 5336 5337 default: 5338 ALOGE("Unsupported domain: %s(%d)", asString(def.eDomain), def.eDomain); 5339 return BAD_TYPE; 5340 } 5341 5342 return OK; 5343} 5344 5345void ACodec::onDataSpaceChanged(android_dataspace dataSpace, const ColorAspects &aspects) { 5346 // aspects are normally communicated in ColorAspects 5347 int32_t range, standard, transfer; 5348 convertCodecColorAspectsToPlatformAspects(aspects, &range, &standard, &transfer); 5349 5350 // if some aspects are unspecified, use dataspace fields 5351 if (range != 0) { 5352 range = (dataSpace & HAL_DATASPACE_RANGE_MASK) >> HAL_DATASPACE_RANGE_SHIFT; 5353 } 5354 if (standard != 0) { 5355 standard = (dataSpace & HAL_DATASPACE_STANDARD_MASK) >> HAL_DATASPACE_STANDARD_SHIFT; 5356 } 5357 if (transfer != 0) { 5358 transfer = (dataSpace & HAL_DATASPACE_TRANSFER_MASK) >> HAL_DATASPACE_TRANSFER_SHIFT; 5359 } 5360 5361 mOutputFormat = mOutputFormat->dup(); // trigger an output format changed event 5362 if (range != 0) { 5363 mOutputFormat->setInt32("color-range", range); 5364 } 5365 if (standard != 0) { 5366 mOutputFormat->setInt32("color-standard", standard); 5367 } 5368 if (transfer != 0) { 5369 mOutputFormat->setInt32("color-transfer", transfer); 5370 } 5371 5372 ALOGD("dataspace changed to %#x (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) " 5373 "(R:%d(%s), S:%d(%s), T:%d(%s))", 5374 dataSpace, 5375 aspects.mRange, asString(aspects.mRange), 5376 aspects.mPrimaries, asString(aspects.mPrimaries), 5377 aspects.mMatrixCoeffs, asString(aspects.mMatrixCoeffs), 5378 aspects.mTransfer, asString(aspects.mTransfer), 5379 range, asString((ColorRange)range), 5380 standard, asString((ColorStandard)standard), 5381 transfer, asString((ColorTransfer)transfer)); 5382} 5383 5384void ACodec::onOutputFormatChanged(sp<const AMessage> expectedFormat) { 5385 // store new output format, at the same time mark that this is no longer the first frame 5386 mOutputFormat = mBaseOutputFormat->dup(); 5387 5388 if (getPortFormat(kPortIndexOutput, mOutputFormat) != OK) { 5389 ALOGE("[%s] Failed to get port format to send format change", mComponentName.c_str()); 5390 return; 5391 } 5392 5393 if (expectedFormat != NULL) { 5394 sp<const AMessage> changes = expectedFormat->changesFrom(mOutputFormat); 5395 sp<const AMessage> to = mOutputFormat->changesFrom(expectedFormat); 5396 if (changes->countEntries() != 0 || to->countEntries() != 0) { 5397 ALOGW("[%s] BAD CODEC: Output format changed unexpectedly from (diff) %s to (diff) %s", 5398 mComponentName.c_str(), 5399 changes->debugString(4).c_str(), to->debugString(4).c_str()); 5400 } 5401 } 5402 5403 if (!mIsVideo && !mIsEncoder) { 5404 AudioEncoding pcmEncoding = kAudioEncodingPcm16bit; 5405 (void)mConfigFormat->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 5406 AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit; 5407 (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 5408 5409 mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding); 5410 if (mConverter[kPortIndexOutput] != NULL) { 5411 mOutputFormat->setInt32("pcm-encoding", pcmEncoding); 5412 } 5413 } 5414 5415 if (mTunneled) { 5416 sendFormatChange(); 5417 } 5418} 5419 5420void ACodec::addKeyFormatChangesToRenderBufferNotification(sp<AMessage> ¬ify) { 5421 AString mime; 5422 CHECK(mOutputFormat->findString("mime", &mime)); 5423 5424 if (mime == MEDIA_MIMETYPE_VIDEO_RAW && mNativeWindow != NULL) { 5425 // notify renderer of the crop change and dataspace change 5426 // NOTE: native window uses extended right-bottom coordinate 5427 int32_t left, top, right, bottom; 5428 if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) { 5429 notify->setRect("crop", left, top, right + 1, bottom + 1); 5430 } 5431 5432 int32_t dataSpace; 5433 if (mOutputFormat->findInt32("android._dataspace", &dataSpace)) { 5434 notify->setInt32("dataspace", dataSpace); 5435 } 5436 } 5437} 5438 5439void ACodec::sendFormatChange() { 5440 AString mime; 5441 CHECK(mOutputFormat->findString("mime", &mime)); 5442 5443 if (mime == MEDIA_MIMETYPE_AUDIO_RAW && (mEncoderDelay || mEncoderPadding)) { 5444 int32_t channelCount, sampleRate; 5445 CHECK(mOutputFormat->findInt32("channel-count", &channelCount)); 5446 CHECK(mOutputFormat->findInt32("sample-rate", &sampleRate)); 5447 if (mSampleRate != 0 && sampleRate != 0) { 5448 mEncoderDelay = mEncoderDelay * sampleRate / mSampleRate; 5449 mEncoderPadding = mEncoderPadding * sampleRate / mSampleRate; 5450 mSampleRate = sampleRate; 5451 } 5452 if (mSkipCutBuffer != NULL) { 5453 size_t prevbufsize = mSkipCutBuffer->size(); 5454 if (prevbufsize != 0) { 5455 ALOGW("Replacing SkipCutBuffer holding %zu bytes", prevbufsize); 5456 } 5457 } 5458 mSkipCutBuffer = new SkipCutBuffer(mEncoderDelay, mEncoderPadding, channelCount); 5459 } 5460 5461 sp<AMessage> notify = mNotify->dup(); 5462 notify->setInt32("what", kWhatOutputFormatChanged); 5463 notify->setMessage("format", mOutputFormat); 5464 notify->post(); 5465 5466 // mLastOutputFormat is not used when tunneled; doing this just to stay consistent 5467 mLastOutputFormat = mOutputFormat; 5468} 5469 5470void ACodec::signalError(OMX_ERRORTYPE error, status_t internalError) { 5471 sp<AMessage> notify = mNotify->dup(); 5472 notify->setInt32("what", CodecBase::kWhatError); 5473 ALOGE("signalError(omxError %#x, internalError %d)", error, internalError); 5474 5475 if (internalError == UNKNOWN_ERROR) { // find better error code 5476 const status_t omxStatus = statusFromOMXError(error); 5477 if (omxStatus != 0) { 5478 internalError = omxStatus; 5479 } else { 5480 ALOGW("Invalid OMX error %#x", error); 5481 } 5482 } 5483 5484 mFatalError = true; 5485 5486 notify->setInt32("err", internalError); 5487 notify->setInt32("actionCode", ACTION_CODE_FATAL); // could translate from OMX error. 5488 notify->post(); 5489} 5490 5491//////////////////////////////////////////////////////////////////////////////// 5492 5493ACodec::PortDescription::PortDescription() { 5494} 5495 5496status_t ACodec::requestIDRFrame() { 5497 if (!mIsEncoder) { 5498 return ERROR_UNSUPPORTED; 5499 } 5500 5501 OMX_CONFIG_INTRAREFRESHVOPTYPE params; 5502 InitOMXParams(¶ms); 5503 5504 params.nPortIndex = kPortIndexOutput; 5505 params.IntraRefreshVOP = OMX_TRUE; 5506 5507 return mOMX->setConfig( 5508 mNode, 5509 OMX_IndexConfigVideoIntraVOPRefresh, 5510 ¶ms, 5511 sizeof(params)); 5512} 5513 5514void ACodec::PortDescription::addBuffer( 5515 IOMX::buffer_id id, const sp<ABuffer> &buffer, 5516 const sp<NativeHandle> &handle, const sp<RefBase> &memRef) { 5517 mBufferIDs.push_back(id); 5518 mBuffers.push_back(buffer); 5519 mHandles.push_back(handle); 5520 mMemRefs.push_back(memRef); 5521} 5522 5523size_t ACodec::PortDescription::countBuffers() { 5524 return mBufferIDs.size(); 5525} 5526 5527IOMX::buffer_id ACodec::PortDescription::bufferIDAt(size_t index) const { 5528 return mBufferIDs.itemAt(index); 5529} 5530 5531sp<ABuffer> ACodec::PortDescription::bufferAt(size_t index) const { 5532 return mBuffers.itemAt(index); 5533} 5534 5535sp<NativeHandle> ACodec::PortDescription::handleAt(size_t index) const { 5536 return mHandles.itemAt(index); 5537} 5538 5539sp<RefBase> ACodec::PortDescription::memRefAt(size_t index) const { 5540 return mMemRefs.itemAt(index); 5541} 5542 5543//////////////////////////////////////////////////////////////////////////////// 5544 5545ACodec::BaseState::BaseState(ACodec *codec, const sp<AState> &parentState) 5546 : AState(parentState), 5547 mCodec(codec) { 5548} 5549 5550ACodec::BaseState::PortMode ACodec::BaseState::getPortMode( 5551 OMX_U32 /* portIndex */) { 5552 return KEEP_BUFFERS; 5553} 5554 5555bool ACodec::BaseState::onMessageReceived(const sp<AMessage> &msg) { 5556 switch (msg->what()) { 5557 case kWhatInputBufferFilled: 5558 { 5559 onInputBufferFilled(msg); 5560 break; 5561 } 5562 5563 case kWhatOutputBufferDrained: 5564 { 5565 onOutputBufferDrained(msg); 5566 break; 5567 } 5568 5569 case ACodec::kWhatOMXMessageList: 5570 { 5571 return checkOMXMessage(msg) ? onOMXMessageList(msg) : true; 5572 } 5573 5574 case ACodec::kWhatOMXMessageItem: 5575 { 5576 // no need to check as we already did it for kWhatOMXMessageList 5577 return onOMXMessage(msg); 5578 } 5579 5580 case ACodec::kWhatOMXMessage: 5581 { 5582 return checkOMXMessage(msg) ? onOMXMessage(msg) : true; 5583 } 5584 5585 case ACodec::kWhatSetSurface: 5586 { 5587 sp<AReplyToken> replyID; 5588 CHECK(msg->senderAwaitsResponse(&replyID)); 5589 5590 sp<RefBase> obj; 5591 CHECK(msg->findObject("surface", &obj)); 5592 5593 status_t err = mCodec->handleSetSurface(static_cast<Surface *>(obj.get())); 5594 5595 sp<AMessage> response = new AMessage; 5596 response->setInt32("err", err); 5597 response->postReply(replyID); 5598 break; 5599 } 5600 5601 case ACodec::kWhatCreateInputSurface: 5602 case ACodec::kWhatSetInputSurface: 5603 case ACodec::kWhatSignalEndOfInputStream: 5604 { 5605 // This may result in an app illegal state exception. 5606 ALOGE("Message 0x%x was not handled", msg->what()); 5607 mCodec->signalError(OMX_ErrorUndefined, INVALID_OPERATION); 5608 return true; 5609 } 5610 5611 case ACodec::kWhatOMXDied: 5612 { 5613 // This will result in kFlagSawMediaServerDie handling in MediaCodec. 5614 ALOGE("OMX/mediaserver died, signalling error!"); 5615 mCodec->signalError(OMX_ErrorResourcesLost, DEAD_OBJECT); 5616 break; 5617 } 5618 5619 case ACodec::kWhatReleaseCodecInstance: 5620 { 5621 ALOGI("[%s] forcing the release of codec", 5622 mCodec->mComponentName.c_str()); 5623 status_t err = mCodec->mOMX->freeNode(mCodec->mNode); 5624 ALOGE_IF("[%s] failed to release codec instance: err=%d", 5625 mCodec->mComponentName.c_str(), err); 5626 sp<AMessage> notify = mCodec->mNotify->dup(); 5627 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 5628 notify->post(); 5629 break; 5630 } 5631 5632 default: 5633 return false; 5634 } 5635 5636 return true; 5637} 5638 5639bool ACodec::BaseState::checkOMXMessage(const sp<AMessage> &msg) { 5640 // there is a possibility that this is an outstanding message for a 5641 // codec that we have already destroyed 5642 if (mCodec->mNode == 0) { 5643 ALOGI("ignoring message as already freed component: %s", 5644 msg->debugString().c_str()); 5645 return false; 5646 } 5647 5648 IOMX::node_id nodeID; 5649 CHECK(msg->findInt32("node", (int32_t*)&nodeID)); 5650 if (nodeID != mCodec->mNode) { 5651 ALOGE("Unexpected message for nodeID: %u, should have been %u", nodeID, mCodec->mNode); 5652 return false; 5653 } 5654 return true; 5655} 5656 5657bool ACodec::BaseState::onOMXMessageList(const sp<AMessage> &msg) { 5658 sp<RefBase> obj; 5659 CHECK(msg->findObject("messages", &obj)); 5660 sp<MessageList> msgList = static_cast<MessageList *>(obj.get()); 5661 5662 bool receivedRenderedEvents = false; 5663 for (std::list<sp<AMessage>>::const_iterator it = msgList->getList().cbegin(); 5664 it != msgList->getList().cend(); ++it) { 5665 (*it)->setWhat(ACodec::kWhatOMXMessageItem); 5666 mCodec->handleMessage(*it); 5667 int32_t type; 5668 CHECK((*it)->findInt32("type", &type)); 5669 if (type == omx_message::FRAME_RENDERED) { 5670 receivedRenderedEvents = true; 5671 } 5672 } 5673 5674 if (receivedRenderedEvents) { 5675 // NOTE: all buffers are rendered in this case 5676 mCodec->notifyOfRenderedFrames(); 5677 } 5678 return true; 5679} 5680 5681bool ACodec::BaseState::onOMXMessage(const sp<AMessage> &msg) { 5682 int32_t type; 5683 CHECK(msg->findInt32("type", &type)); 5684 5685 switch (type) { 5686 case omx_message::EVENT: 5687 { 5688 int32_t event, data1, data2; 5689 CHECK(msg->findInt32("event", &event)); 5690 CHECK(msg->findInt32("data1", &data1)); 5691 CHECK(msg->findInt32("data2", &data2)); 5692 5693 if (event == OMX_EventCmdComplete 5694 && data1 == OMX_CommandFlush 5695 && data2 == (int32_t)OMX_ALL) { 5696 // Use of this notification is not consistent across 5697 // implementations. We'll drop this notification and rely 5698 // on flush-complete notifications on the individual port 5699 // indices instead. 5700 5701 return true; 5702 } 5703 5704 return onOMXEvent( 5705 static_cast<OMX_EVENTTYPE>(event), 5706 static_cast<OMX_U32>(data1), 5707 static_cast<OMX_U32>(data2)); 5708 } 5709 5710 case omx_message::EMPTY_BUFFER_DONE: 5711 { 5712 IOMX::buffer_id bufferID; 5713 int32_t fenceFd; 5714 5715 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 5716 CHECK(msg->findInt32("fence_fd", &fenceFd)); 5717 5718 return onOMXEmptyBufferDone(bufferID, fenceFd); 5719 } 5720 5721 case omx_message::FILL_BUFFER_DONE: 5722 { 5723 IOMX::buffer_id bufferID; 5724 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 5725 5726 int32_t rangeOffset, rangeLength, flags, fenceFd; 5727 int64_t timeUs; 5728 5729 CHECK(msg->findInt32("range_offset", &rangeOffset)); 5730 CHECK(msg->findInt32("range_length", &rangeLength)); 5731 CHECK(msg->findInt32("flags", &flags)); 5732 CHECK(msg->findInt64("timestamp", &timeUs)); 5733 CHECK(msg->findInt32("fence_fd", &fenceFd)); 5734 5735 return onOMXFillBufferDone( 5736 bufferID, 5737 (size_t)rangeOffset, (size_t)rangeLength, 5738 (OMX_U32)flags, 5739 timeUs, 5740 fenceFd); 5741 } 5742 5743 case omx_message::FRAME_RENDERED: 5744 { 5745 int64_t mediaTimeUs, systemNano; 5746 5747 CHECK(msg->findInt64("media_time_us", &mediaTimeUs)); 5748 CHECK(msg->findInt64("system_nano", &systemNano)); 5749 5750 return onOMXFrameRendered( 5751 mediaTimeUs, systemNano); 5752 } 5753 5754 default: 5755 ALOGE("Unexpected message type: %d", type); 5756 return false; 5757 } 5758} 5759 5760bool ACodec::BaseState::onOMXFrameRendered( 5761 int64_t mediaTimeUs __unused, nsecs_t systemNano __unused) { 5762 // ignore outside of Executing and PortSettingsChanged states 5763 return true; 5764} 5765 5766bool ACodec::BaseState::onOMXEvent( 5767 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 5768 if (event == OMX_EventDataSpaceChanged) { 5769 ColorAspects aspects; 5770 aspects.mRange = (ColorAspects::Range)((data2 >> 24) & 0xFF); 5771 aspects.mPrimaries = (ColorAspects::Primaries)((data2 >> 16) & 0xFF); 5772 aspects.mMatrixCoeffs = (ColorAspects::MatrixCoeffs)((data2 >> 8) & 0xFF); 5773 aspects.mTransfer = (ColorAspects::Transfer)(data2 & 0xFF); 5774 5775 mCodec->onDataSpaceChanged((android_dataspace)data1, aspects); 5776 return true; 5777 } 5778 5779 if (event != OMX_EventError) { 5780 ALOGV("[%s] EVENT(%d, 0x%08x, 0x%08x)", 5781 mCodec->mComponentName.c_str(), event, data1, data2); 5782 5783 return false; 5784 } 5785 5786 ALOGE("[%s] ERROR(0x%08x)", mCodec->mComponentName.c_str(), data1); 5787 5788 // verify OMX component sends back an error we expect. 5789 OMX_ERRORTYPE omxError = (OMX_ERRORTYPE)data1; 5790 if (!isOMXError(omxError)) { 5791 ALOGW("Invalid OMX error %#x", omxError); 5792 omxError = OMX_ErrorUndefined; 5793 } 5794 mCodec->signalError(omxError); 5795 5796 return true; 5797} 5798 5799bool ACodec::BaseState::onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd) { 5800 ALOGV("[%s] onOMXEmptyBufferDone %u", 5801 mCodec->mComponentName.c_str(), bufferID); 5802 5803 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 5804 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5805 if (status != BufferInfo::OWNED_BY_COMPONENT) { 5806 ALOGE("Wrong ownership in EBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 5807 mCodec->dumpBuffers(kPortIndexInput); 5808 if (fenceFd >= 0) { 5809 ::close(fenceFd); 5810 } 5811 return false; 5812 } 5813 info->mStatus = BufferInfo::OWNED_BY_US; 5814 5815 // input buffers cannot take fences, so wait for any fence now 5816 (void)mCodec->waitForFence(fenceFd, "onOMXEmptyBufferDone"); 5817 fenceFd = -1; 5818 5819 // still save fence for completeness 5820 info->setWriteFence(fenceFd, "onOMXEmptyBufferDone"); 5821 5822 // We're in "store-metadata-in-buffers" mode, the underlying 5823 // OMX component had access to data that's implicitly refcounted 5824 // by this "MediaBuffer" object. Now that the OMX component has 5825 // told us that it's done with the input buffer, we can decrement 5826 // the mediaBuffer's reference count. 5827 info->mData->setMediaBufferBase(NULL); 5828 5829 PortMode mode = getPortMode(kPortIndexInput); 5830 5831 switch (mode) { 5832 case KEEP_BUFFERS: 5833 break; 5834 5835 case RESUBMIT_BUFFERS: 5836 postFillThisBuffer(info); 5837 break; 5838 5839 case FREE_BUFFERS: 5840 default: 5841 ALOGE("SHOULD NOT REACH HERE: cannot free empty output buffers"); 5842 return false; 5843 } 5844 5845 return true; 5846} 5847 5848void ACodec::BaseState::postFillThisBuffer(BufferInfo *info) { 5849 if (mCodec->mPortEOS[kPortIndexInput]) { 5850 return; 5851 } 5852 5853 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 5854 5855 sp<AMessage> notify = mCodec->mNotify->dup(); 5856 notify->setInt32("what", CodecBase::kWhatFillThisBuffer); 5857 notify->setInt32("buffer-id", info->mBufferID); 5858 5859 info->mData->meta()->clear(); 5860 notify->setBuffer("buffer", info->mData); 5861 5862 sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, mCodec); 5863 reply->setInt32("buffer-id", info->mBufferID); 5864 5865 notify->setMessage("reply", reply); 5866 5867 notify->post(); 5868 5869 info->mStatus = BufferInfo::OWNED_BY_UPSTREAM; 5870} 5871 5872void ACodec::BaseState::onInputBufferFilled(const sp<AMessage> &msg) { 5873 IOMX::buffer_id bufferID; 5874 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 5875 sp<ABuffer> buffer; 5876 int32_t err = OK; 5877 bool eos = false; 5878 PortMode mode = getPortMode(kPortIndexInput); 5879 5880 if (!msg->findBuffer("buffer", &buffer)) { 5881 /* these are unfilled buffers returned by client */ 5882 CHECK(msg->findInt32("err", &err)); 5883 5884 if (err == OK) { 5885 /* buffers with no errors are returned on MediaCodec.flush */ 5886 mode = KEEP_BUFFERS; 5887 } else { 5888 ALOGV("[%s] saw error %d instead of an input buffer", 5889 mCodec->mComponentName.c_str(), err); 5890 eos = true; 5891 } 5892 5893 buffer.clear(); 5894 } 5895 5896 int32_t tmp; 5897 if (buffer != NULL && buffer->meta()->findInt32("eos", &tmp) && tmp) { 5898 eos = true; 5899 err = ERROR_END_OF_STREAM; 5900 } 5901 5902 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 5903 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5904 if (status != BufferInfo::OWNED_BY_UPSTREAM) { 5905 ALOGE("Wrong ownership in IBF: %s(%d) buffer #%u", _asString(status), status, bufferID); 5906 mCodec->dumpBuffers(kPortIndexInput); 5907 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 5908 return; 5909 } 5910 5911 info->mStatus = BufferInfo::OWNED_BY_US; 5912 5913 switch (mode) { 5914 case KEEP_BUFFERS: 5915 { 5916 if (eos) { 5917 if (!mCodec->mPortEOS[kPortIndexInput]) { 5918 mCodec->mPortEOS[kPortIndexInput] = true; 5919 mCodec->mInputEOSResult = err; 5920 } 5921 } 5922 break; 5923 } 5924 5925 case RESUBMIT_BUFFERS: 5926 { 5927 if (buffer != NULL && !mCodec->mPortEOS[kPortIndexInput]) { 5928 // Do not send empty input buffer w/o EOS to the component. 5929 if (buffer->size() == 0 && !eos) { 5930 postFillThisBuffer(info); 5931 break; 5932 } 5933 5934 int64_t timeUs; 5935 CHECK(buffer->meta()->findInt64("timeUs", &timeUs)); 5936 5937 OMX_U32 flags = OMX_BUFFERFLAG_ENDOFFRAME; 5938 5939 MetadataBufferType metaType = mCodec->mInputMetadataType; 5940 int32_t isCSD = 0; 5941 if (buffer->meta()->findInt32("csd", &isCSD) && isCSD != 0) { 5942 if (mCodec->mIsLegacyVP9Decoder) { 5943 ALOGV("[%s] is legacy VP9 decoder. Ignore %u codec specific data", 5944 mCodec->mComponentName.c_str(), bufferID); 5945 postFillThisBuffer(info); 5946 break; 5947 } 5948 flags |= OMX_BUFFERFLAG_CODECCONFIG; 5949 metaType = kMetadataBufferTypeInvalid; 5950 } 5951 5952 if (eos) { 5953 flags |= OMX_BUFFERFLAG_EOS; 5954 } 5955 5956 if (buffer != info->mCodecData) { 5957 ALOGV("[%s] Needs to copy input data for buffer %u. (%p != %p)", 5958 mCodec->mComponentName.c_str(), 5959 bufferID, 5960 buffer.get(), info->mCodecData.get()); 5961 5962 sp<DataConverter> converter = mCodec->mConverter[kPortIndexInput]; 5963 if (converter == NULL || isCSD) { 5964 converter = getCopyConverter(); 5965 } 5966 status_t err = converter->convert(buffer, info->mCodecData); 5967 if (err != OK) { 5968 mCodec->signalError(OMX_ErrorUndefined, err); 5969 return; 5970 } 5971 } 5972 5973 if (flags & OMX_BUFFERFLAG_CODECCONFIG) { 5974 ALOGV("[%s] calling emptyBuffer %u w/ codec specific data", 5975 mCodec->mComponentName.c_str(), bufferID); 5976 } else if (flags & OMX_BUFFERFLAG_EOS) { 5977 ALOGV("[%s] calling emptyBuffer %u w/ EOS", 5978 mCodec->mComponentName.c_str(), bufferID); 5979 } else { 5980#if TRACK_BUFFER_TIMING 5981 ALOGI("[%s] calling emptyBuffer %u w/ time %lld us", 5982 mCodec->mComponentName.c_str(), bufferID, (long long)timeUs); 5983#else 5984 ALOGV("[%s] calling emptyBuffer %u w/ time %lld us", 5985 mCodec->mComponentName.c_str(), bufferID, (long long)timeUs); 5986#endif 5987 } 5988 5989#if TRACK_BUFFER_TIMING 5990 ACodec::BufferStats stats; 5991 stats.mEmptyBufferTimeUs = ALooper::GetNowUs(); 5992 stats.mFillBufferDoneTimeUs = -1ll; 5993 mCodec->mBufferStats.add(timeUs, stats); 5994#endif 5995 5996 if (mCodec->storingMetadataInDecodedBuffers()) { 5997 // try to submit an output buffer for each input buffer 5998 PortMode outputMode = getPortMode(kPortIndexOutput); 5999 6000 ALOGV("MetadataBuffersToSubmit=%u portMode=%s", 6001 mCodec->mMetadataBuffersToSubmit, 6002 (outputMode == FREE_BUFFERS ? "FREE" : 6003 outputMode == KEEP_BUFFERS ? "KEEP" : "RESUBMIT")); 6004 if (outputMode == RESUBMIT_BUFFERS) { 6005 mCodec->submitOutputMetadataBuffer(); 6006 } 6007 } 6008 info->checkReadFence("onInputBufferFilled"); 6009 6010 status_t err2 = OK; 6011 switch (metaType) { 6012 case kMetadataBufferTypeInvalid: 6013 break; 6014#ifndef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 6015 case kMetadataBufferTypeNativeHandleSource: 6016 if (info->mCodecData->size() >= sizeof(VideoNativeHandleMetadata)) { 6017 VideoNativeHandleMetadata *vnhmd = 6018 (VideoNativeHandleMetadata*)info->mCodecData->base(); 6019 err2 = mCodec->mOMX->updateNativeHandleInMeta( 6020 mCodec->mNode, kPortIndexInput, 6021 NativeHandle::create(vnhmd->pHandle, false /* ownsHandle */), 6022 bufferID); 6023 } 6024 break; 6025 case kMetadataBufferTypeANWBuffer: 6026 if (info->mCodecData->size() >= sizeof(VideoNativeMetadata)) { 6027 VideoNativeMetadata *vnmd = (VideoNativeMetadata*)info->mCodecData->base(); 6028 err2 = mCodec->mOMX->updateGraphicBufferInMeta( 6029 mCodec->mNode, kPortIndexInput, 6030 new GraphicBuffer(vnmd->pBuffer, false /* keepOwnership */), 6031 bufferID); 6032 } 6033 break; 6034#endif 6035 default: 6036 ALOGW("Can't marshall %s data in %zu sized buffers in %zu-bit mode", 6037 asString(metaType), info->mCodecData->size(), 6038 sizeof(buffer_handle_t) * 8); 6039 err2 = ERROR_UNSUPPORTED; 6040 break; 6041 } 6042 6043 if (err2 == OK) { 6044 err2 = mCodec->mOMX->emptyBuffer( 6045 mCodec->mNode, 6046 bufferID, 6047 0, 6048 info->mCodecData->size(), 6049 flags, 6050 timeUs, 6051 info->mFenceFd); 6052 } 6053 info->mFenceFd = -1; 6054 if (err2 != OK) { 6055 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2)); 6056 return; 6057 } 6058 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 6059 6060 if (!eos && err == OK) { 6061 getMoreInputDataIfPossible(); 6062 } else { 6063 ALOGV("[%s] Signalled EOS (%d) on the input port", 6064 mCodec->mComponentName.c_str(), err); 6065 6066 mCodec->mPortEOS[kPortIndexInput] = true; 6067 mCodec->mInputEOSResult = err; 6068 } 6069 } else if (!mCodec->mPortEOS[kPortIndexInput]) { 6070 if (err != OK && err != ERROR_END_OF_STREAM) { 6071 ALOGV("[%s] Signalling EOS on the input port due to error %d", 6072 mCodec->mComponentName.c_str(), err); 6073 } else { 6074 ALOGV("[%s] Signalling EOS on the input port", 6075 mCodec->mComponentName.c_str()); 6076 } 6077 6078 ALOGV("[%s] calling emptyBuffer %u signalling EOS", 6079 mCodec->mComponentName.c_str(), bufferID); 6080 6081 info->checkReadFence("onInputBufferFilled"); 6082 status_t err2 = mCodec->mOMX->emptyBuffer( 6083 mCodec->mNode, 6084 bufferID, 6085 0, 6086 0, 6087 OMX_BUFFERFLAG_EOS, 6088 0, 6089 info->mFenceFd); 6090 info->mFenceFd = -1; 6091 if (err2 != OK) { 6092 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2)); 6093 return; 6094 } 6095 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 6096 6097 mCodec->mPortEOS[kPortIndexInput] = true; 6098 mCodec->mInputEOSResult = err; 6099 } 6100 break; 6101 } 6102 6103 case FREE_BUFFERS: 6104 break; 6105 6106 default: 6107 ALOGE("invalid port mode: %d", mode); 6108 break; 6109 } 6110} 6111 6112void ACodec::BaseState::getMoreInputDataIfPossible() { 6113 if (mCodec->mPortEOS[kPortIndexInput]) { 6114 return; 6115 } 6116 6117 BufferInfo *eligible = NULL; 6118 6119 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 6120 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 6121 6122#if 0 6123 if (info->mStatus == BufferInfo::OWNED_BY_UPSTREAM) { 6124 // There's already a "read" pending. 6125 return; 6126 } 6127#endif 6128 6129 if (info->mStatus == BufferInfo::OWNED_BY_US) { 6130 eligible = info; 6131 } 6132 } 6133 6134 if (eligible == NULL) { 6135 return; 6136 } 6137 6138 postFillThisBuffer(eligible); 6139} 6140 6141bool ACodec::BaseState::onOMXFillBufferDone( 6142 IOMX::buffer_id bufferID, 6143 size_t rangeOffset, size_t rangeLength, 6144 OMX_U32 flags, 6145 int64_t timeUs, 6146 int fenceFd) { 6147 ALOGV("[%s] onOMXFillBufferDone %u time %" PRId64 " us, flags = 0x%08x", 6148 mCodec->mComponentName.c_str(), bufferID, timeUs, flags); 6149 6150 ssize_t index; 6151 status_t err= OK; 6152 6153#if TRACK_BUFFER_TIMING 6154 index = mCodec->mBufferStats.indexOfKey(timeUs); 6155 if (index >= 0) { 6156 ACodec::BufferStats *stats = &mCodec->mBufferStats.editValueAt(index); 6157 stats->mFillBufferDoneTimeUs = ALooper::GetNowUs(); 6158 6159 ALOGI("frame PTS %lld: %lld", 6160 timeUs, 6161 stats->mFillBufferDoneTimeUs - stats->mEmptyBufferTimeUs); 6162 6163 mCodec->mBufferStats.removeItemsAt(index); 6164 stats = NULL; 6165 } 6166#endif 6167 6168 BufferInfo *info = 6169 mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 6170 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 6171 if (status != BufferInfo::OWNED_BY_COMPONENT) { 6172 ALOGE("Wrong ownership in FBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 6173 mCodec->dumpBuffers(kPortIndexOutput); 6174 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6175 if (fenceFd >= 0) { 6176 ::close(fenceFd); 6177 } 6178 return true; 6179 } 6180 6181 info->mDequeuedAt = ++mCodec->mDequeueCounter; 6182 info->mStatus = BufferInfo::OWNED_BY_US; 6183 6184 if (info->mRenderInfo != NULL) { 6185 // The fence for an emptied buffer must have signaled, but there still could be queued 6186 // or out-of-order dequeued buffers in the render queue prior to this buffer. Drop these, 6187 // as we will soon requeue this buffer to the surface. While in theory we could still keep 6188 // track of buffers that are requeued to the surface, it is better to add support to the 6189 // buffer-queue to notify us of released buffers and their fences (in the future). 6190 mCodec->notifyOfRenderedFrames(true /* dropIncomplete */); 6191 } 6192 6193 // byte buffers cannot take fences, so wait for any fence now 6194 if (mCodec->mNativeWindow == NULL) { 6195 (void)mCodec->waitForFence(fenceFd, "onOMXFillBufferDone"); 6196 fenceFd = -1; 6197 } 6198 info->setReadFence(fenceFd, "onOMXFillBufferDone"); 6199 6200 PortMode mode = getPortMode(kPortIndexOutput); 6201 6202 switch (mode) { 6203 case KEEP_BUFFERS: 6204 break; 6205 6206 case RESUBMIT_BUFFERS: 6207 { 6208 if (rangeLength == 0 && (!(flags & OMX_BUFFERFLAG_EOS) 6209 || mCodec->mPortEOS[kPortIndexOutput])) { 6210 ALOGV("[%s] calling fillBuffer %u", 6211 mCodec->mComponentName.c_str(), info->mBufferID); 6212 6213 err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd); 6214 info->mFenceFd = -1; 6215 if (err != OK) { 6216 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6217 return true; 6218 } 6219 6220 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 6221 break; 6222 } 6223 6224 sp<AMessage> reply = 6225 new AMessage(kWhatOutputBufferDrained, mCodec); 6226 6227 if (mCodec->mOutputFormat != mCodec->mLastOutputFormat && rangeLength > 0) { 6228 // pretend that output format has changed on the first frame (we used to do this) 6229 if (mCodec->mBaseOutputFormat == mCodec->mOutputFormat) { 6230 mCodec->onOutputFormatChanged(mCodec->mOutputFormat); 6231 } 6232 mCodec->addKeyFormatChangesToRenderBufferNotification(reply); 6233 mCodec->sendFormatChange(); 6234 } else if (rangeLength > 0 && mCodec->mNativeWindow != NULL) { 6235 // If potentially rendering onto a surface, always save key format data (crop & 6236 // data space) so that we can set it if and once the buffer is rendered. 6237 mCodec->addKeyFormatChangesToRenderBufferNotification(reply); 6238 } 6239 6240 if (mCodec->usingMetadataOnEncoderOutput()) { 6241 native_handle_t *handle = NULL; 6242 VideoNativeHandleMetadata &nativeMeta = 6243 *(VideoNativeHandleMetadata *)info->mData->data(); 6244 if (info->mData->size() >= sizeof(nativeMeta) 6245 && nativeMeta.eType == kMetadataBufferTypeNativeHandleSource) { 6246#ifdef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 6247 // handle is only valid on 32-bit/mediaserver process 6248 handle = NULL; 6249#else 6250 handle = (native_handle_t *)nativeMeta.pHandle; 6251#endif 6252 } 6253 info->mData->meta()->setPointer("handle", handle); 6254 info->mData->meta()->setInt32("rangeOffset", rangeOffset); 6255 info->mData->meta()->setInt32("rangeLength", rangeLength); 6256 } else if (info->mData == info->mCodecData) { 6257 info->mData->setRange(rangeOffset, rangeLength); 6258 } else { 6259 info->mCodecData->setRange(rangeOffset, rangeLength); 6260 // in this case we know that mConverter is not null 6261 status_t err = mCodec->mConverter[kPortIndexOutput]->convert( 6262 info->mCodecData, info->mData); 6263 if (err != OK) { 6264 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6265 return true; 6266 } 6267 } 6268#if 0 6269 if (mCodec->mNativeWindow == NULL) { 6270 if (IsIDR(info->mData)) { 6271 ALOGI("IDR frame"); 6272 } 6273 } 6274#endif 6275 6276 if (mCodec->mSkipCutBuffer != NULL) { 6277 mCodec->mSkipCutBuffer->submit(info->mData); 6278 } 6279 info->mData->meta()->setInt64("timeUs", timeUs); 6280 6281 sp<AMessage> notify = mCodec->mNotify->dup(); 6282 notify->setInt32("what", CodecBase::kWhatDrainThisBuffer); 6283 notify->setInt32("buffer-id", info->mBufferID); 6284 notify->setBuffer("buffer", info->mData); 6285 notify->setInt32("flags", flags); 6286 6287 reply->setInt32("buffer-id", info->mBufferID); 6288 6289 notify->setMessage("reply", reply); 6290 6291 notify->post(); 6292 6293 info->mStatus = BufferInfo::OWNED_BY_DOWNSTREAM; 6294 6295 if (flags & OMX_BUFFERFLAG_EOS) { 6296 ALOGV("[%s] saw output EOS", mCodec->mComponentName.c_str()); 6297 6298 sp<AMessage> notify = mCodec->mNotify->dup(); 6299 notify->setInt32("what", CodecBase::kWhatEOS); 6300 notify->setInt32("err", mCodec->mInputEOSResult); 6301 notify->post(); 6302 6303 mCodec->mPortEOS[kPortIndexOutput] = true; 6304 } 6305 break; 6306 } 6307 6308 case FREE_BUFFERS: 6309 err = mCodec->freeBuffer(kPortIndexOutput, index); 6310 if (err != OK) { 6311 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6312 return true; 6313 } 6314 break; 6315 6316 default: 6317 ALOGE("Invalid port mode: %d", mode); 6318 return false; 6319 } 6320 6321 return true; 6322} 6323 6324void ACodec::BaseState::onOutputBufferDrained(const sp<AMessage> &msg) { 6325 IOMX::buffer_id bufferID; 6326 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 6327 ssize_t index; 6328 BufferInfo *info = mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 6329 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 6330 if (status != BufferInfo::OWNED_BY_DOWNSTREAM) { 6331 ALOGE("Wrong ownership in OBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 6332 mCodec->dumpBuffers(kPortIndexOutput); 6333 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6334 return; 6335 } 6336 6337 android_native_rect_t crop; 6338 if (msg->findRect("crop", &crop.left, &crop.top, &crop.right, &crop.bottom) 6339 && memcmp(&crop, &mCodec->mLastNativeWindowCrop, sizeof(crop)) != 0) { 6340 mCodec->mLastNativeWindowCrop = crop; 6341 status_t err = native_window_set_crop(mCodec->mNativeWindow.get(), &crop); 6342 ALOGW_IF(err != NO_ERROR, "failed to set crop: %d", err); 6343 } 6344 6345 int32_t dataSpace; 6346 if (msg->findInt32("dataspace", &dataSpace) 6347 && dataSpace != mCodec->mLastNativeWindowDataSpace) { 6348 status_t err = native_window_set_buffers_data_space( 6349 mCodec->mNativeWindow.get(), (android_dataspace)dataSpace); 6350 mCodec->mLastNativeWindowDataSpace = dataSpace; 6351 ALOGW_IF(err != NO_ERROR, "failed to set dataspace: %d", err); 6352 } 6353 6354 int32_t render; 6355 if (mCodec->mNativeWindow != NULL 6356 && msg->findInt32("render", &render) && render != 0 6357 && info->mData != NULL && info->mData->size() != 0) { 6358 ATRACE_NAME("render"); 6359 // The client wants this buffer to be rendered. 6360 6361 // save buffers sent to the surface so we can get render time when they return 6362 int64_t mediaTimeUs = -1; 6363 info->mData->meta()->findInt64("timeUs", &mediaTimeUs); 6364 if (mediaTimeUs >= 0) { 6365 mCodec->mRenderTracker.onFrameQueued( 6366 mediaTimeUs, info->mGraphicBuffer, new Fence(::dup(info->mFenceFd))); 6367 } 6368 6369 int64_t timestampNs = 0; 6370 if (!msg->findInt64("timestampNs", ×tampNs)) { 6371 // use media timestamp if client did not request a specific render timestamp 6372 if (info->mData->meta()->findInt64("timeUs", ×tampNs)) { 6373 ALOGV("using buffer PTS of %lld", (long long)timestampNs); 6374 timestampNs *= 1000; 6375 } 6376 } 6377 6378 status_t err; 6379 err = native_window_set_buffers_timestamp(mCodec->mNativeWindow.get(), timestampNs); 6380 ALOGW_IF(err != NO_ERROR, "failed to set buffer timestamp: %d", err); 6381 6382 info->checkReadFence("onOutputBufferDrained before queueBuffer"); 6383 err = mCodec->mNativeWindow->queueBuffer( 6384 mCodec->mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd); 6385 info->mFenceFd = -1; 6386 if (err == OK) { 6387 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 6388 } else { 6389 ALOGE("queueBuffer failed in onOutputBufferDrained: %d", err); 6390 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6391 info->mStatus = BufferInfo::OWNED_BY_US; 6392 // keeping read fence as write fence to avoid clobbering 6393 info->mIsReadFence = false; 6394 } 6395 } else { 6396 if (mCodec->mNativeWindow != NULL && 6397 (info->mData == NULL || info->mData->size() != 0)) { 6398 // move read fence into write fence to avoid clobbering 6399 info->mIsReadFence = false; 6400 ATRACE_NAME("frame-drop"); 6401 } 6402 info->mStatus = BufferInfo::OWNED_BY_US; 6403 } 6404 6405 PortMode mode = getPortMode(kPortIndexOutput); 6406 6407 switch (mode) { 6408 case KEEP_BUFFERS: 6409 { 6410 // XXX fishy, revisit!!! What about the FREE_BUFFERS case below? 6411 6412 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6413 // We cannot resubmit the buffer we just rendered, dequeue 6414 // the spare instead. 6415 6416 info = mCodec->dequeueBufferFromNativeWindow(); 6417 } 6418 break; 6419 } 6420 6421 case RESUBMIT_BUFFERS: 6422 { 6423 if (!mCodec->mPortEOS[kPortIndexOutput]) { 6424 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6425 // We cannot resubmit the buffer we just rendered, dequeue 6426 // the spare instead. 6427 6428 info = mCodec->dequeueBufferFromNativeWindow(); 6429 } 6430 6431 if (info != NULL) { 6432 ALOGV("[%s] calling fillBuffer %u", 6433 mCodec->mComponentName.c_str(), info->mBufferID); 6434 info->checkWriteFence("onOutputBufferDrained::RESUBMIT_BUFFERS"); 6435 status_t err = mCodec->mOMX->fillBuffer( 6436 mCodec->mNode, info->mBufferID, info->mFenceFd); 6437 info->mFenceFd = -1; 6438 if (err == OK) { 6439 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 6440 } else { 6441 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6442 } 6443 } 6444 } 6445 break; 6446 } 6447 6448 case FREE_BUFFERS: 6449 { 6450 status_t err = mCodec->freeBuffer(kPortIndexOutput, index); 6451 if (err != OK) { 6452 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6453 } 6454 break; 6455 } 6456 6457 default: 6458 ALOGE("Invalid port mode: %d", mode); 6459 return; 6460 } 6461} 6462 6463//////////////////////////////////////////////////////////////////////////////// 6464 6465ACodec::UninitializedState::UninitializedState(ACodec *codec) 6466 : BaseState(codec) { 6467} 6468 6469void ACodec::UninitializedState::stateEntered() { 6470 ALOGV("Now uninitialized"); 6471 6472 if (mDeathNotifier != NULL) { 6473 mCodec->mNodeBinder->unlinkToDeath(mDeathNotifier); 6474 mDeathNotifier.clear(); 6475 } 6476 6477 mCodec->mUsingNativeWindow = false; 6478 mCodec->mNativeWindow.clear(); 6479 mCodec->mNativeWindowUsageBits = 0; 6480 mCodec->mNode = 0; 6481 mCodec->mOMX.clear(); 6482 mCodec->mQuirks = 0; 6483 mCodec->mFlags = 0; 6484 mCodec->mInputMetadataType = kMetadataBufferTypeInvalid; 6485 mCodec->mOutputMetadataType = kMetadataBufferTypeInvalid; 6486 mCodec->mConverter[0].clear(); 6487 mCodec->mConverter[1].clear(); 6488 mCodec->mComponentName.clear(); 6489} 6490 6491bool ACodec::UninitializedState::onMessageReceived(const sp<AMessage> &msg) { 6492 bool handled = false; 6493 6494 switch (msg->what()) { 6495 case ACodec::kWhatSetup: 6496 { 6497 onSetup(msg); 6498 6499 handled = true; 6500 break; 6501 } 6502 6503 case ACodec::kWhatAllocateComponent: 6504 { 6505 onAllocateComponent(msg); 6506 handled = true; 6507 break; 6508 } 6509 6510 case ACodec::kWhatShutdown: 6511 { 6512 int32_t keepComponentAllocated; 6513 CHECK(msg->findInt32( 6514 "keepComponentAllocated", &keepComponentAllocated)); 6515 ALOGW_IF(keepComponentAllocated, 6516 "cannot keep component allocated on shutdown in Uninitialized state"); 6517 6518 sp<AMessage> notify = mCodec->mNotify->dup(); 6519 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 6520 notify->post(); 6521 6522 handled = true; 6523 break; 6524 } 6525 6526 case ACodec::kWhatFlush: 6527 { 6528 sp<AMessage> notify = mCodec->mNotify->dup(); 6529 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6530 notify->post(); 6531 6532 handled = true; 6533 break; 6534 } 6535 6536 case ACodec::kWhatReleaseCodecInstance: 6537 { 6538 // nothing to do, as we have already signaled shutdown 6539 handled = true; 6540 break; 6541 } 6542 6543 default: 6544 return BaseState::onMessageReceived(msg); 6545 } 6546 6547 return handled; 6548} 6549 6550void ACodec::UninitializedState::onSetup( 6551 const sp<AMessage> &msg) { 6552 if (onAllocateComponent(msg) 6553 && mCodec->mLoadedState->onConfigureComponent(msg)) { 6554 mCodec->mLoadedState->onStart(); 6555 } 6556} 6557 6558bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) { 6559 ALOGV("onAllocateComponent"); 6560 6561 CHECK(mCodec->mNode == 0); 6562 6563 OMXClient client; 6564 if (client.connect() != OK) { 6565 mCodec->signalError(OMX_ErrorUndefined, NO_INIT); 6566 return false; 6567 } 6568 6569 sp<IOMX> omx = client.interface(); 6570 6571 sp<AMessage> notify = new AMessage(kWhatOMXDied, mCodec); 6572 6573 Vector<AString> matchingCodecs; 6574 6575 AString mime; 6576 6577 AString componentName; 6578 uint32_t quirks = 0; 6579 int32_t encoder = false; 6580 if (msg->findString("componentName", &componentName)) { 6581 sp<IMediaCodecList> list = MediaCodecList::getInstance(); 6582 if (list != NULL && list->findCodecByName(componentName.c_str()) >= 0) { 6583 matchingCodecs.add(componentName); 6584 } 6585 } else { 6586 CHECK(msg->findString("mime", &mime)); 6587 6588 if (!msg->findInt32("encoder", &encoder)) { 6589 encoder = false; 6590 } 6591 6592 MediaCodecList::findMatchingCodecs( 6593 mime.c_str(), 6594 encoder, // createEncoder 6595 0, // flags 6596 &matchingCodecs); 6597 } 6598 6599 sp<CodecObserver> observer = new CodecObserver; 6600 IOMX::node_id node = 0; 6601 6602 status_t err = NAME_NOT_FOUND; 6603 for (size_t matchIndex = 0; matchIndex < matchingCodecs.size(); 6604 ++matchIndex) { 6605 componentName = matchingCodecs[matchIndex]; 6606 quirks = MediaCodecList::getQuirksFor(componentName.c_str()); 6607 6608 pid_t tid = gettid(); 6609 int prevPriority = androidGetThreadPriority(tid); 6610 androidSetThreadPriority(tid, ANDROID_PRIORITY_FOREGROUND); 6611 err = omx->allocateNode(componentName.c_str(), observer, &mCodec->mNodeBinder, &node); 6612 androidSetThreadPriority(tid, prevPriority); 6613 6614 if (err == OK) { 6615 break; 6616 } else { 6617 ALOGW("Allocating component '%s' failed, try next one.", componentName.c_str()); 6618 } 6619 6620 node = 0; 6621 } 6622 6623 if (node == 0) { 6624 if (!mime.empty()) { 6625 ALOGE("Unable to instantiate a %scoder for type '%s' with err %#x.", 6626 encoder ? "en" : "de", mime.c_str(), err); 6627 } else { 6628 ALOGE("Unable to instantiate codec '%s' with err %#x.", componentName.c_str(), err); 6629 } 6630 6631 mCodec->signalError((OMX_ERRORTYPE)err, makeNoSideEffectStatus(err)); 6632 return false; 6633 } 6634 6635 mDeathNotifier = new DeathNotifier(notify); 6636 if (mCodec->mNodeBinder == NULL || 6637 mCodec->mNodeBinder->linkToDeath(mDeathNotifier) != OK) { 6638 // This was a local binder, if it dies so do we, we won't care 6639 // about any notifications in the afterlife. 6640 mDeathNotifier.clear(); 6641 } 6642 6643 notify = new AMessage(kWhatOMXMessageList, mCodec); 6644 observer->setNotificationMessage(notify); 6645 6646 mCodec->mComponentName = componentName; 6647 mCodec->mRenderTracker.setComponentName(componentName); 6648 mCodec->mFlags = 0; 6649 6650 if (componentName.endsWith(".secure")) { 6651 mCodec->mFlags |= kFlagIsSecure; 6652 mCodec->mFlags |= kFlagIsGrallocUsageProtected; 6653 mCodec->mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 6654 } 6655 6656 mCodec->mQuirks = quirks; 6657 mCodec->mOMX = omx; 6658 mCodec->mNode = node; 6659 6660 { 6661 sp<AMessage> notify = mCodec->mNotify->dup(); 6662 notify->setInt32("what", CodecBase::kWhatComponentAllocated); 6663 notify->setString("componentName", mCodec->mComponentName.c_str()); 6664 notify->post(); 6665 } 6666 6667 mCodec->changeState(mCodec->mLoadedState); 6668 6669 return true; 6670} 6671 6672//////////////////////////////////////////////////////////////////////////////// 6673 6674ACodec::LoadedState::LoadedState(ACodec *codec) 6675 : BaseState(codec) { 6676} 6677 6678void ACodec::LoadedState::stateEntered() { 6679 ALOGV("[%s] Now Loaded", mCodec->mComponentName.c_str()); 6680 6681 mCodec->mPortEOS[kPortIndexInput] = 6682 mCodec->mPortEOS[kPortIndexOutput] = false; 6683 6684 mCodec->mInputEOSResult = OK; 6685 6686 mCodec->mDequeueCounter = 0; 6687 mCodec->mMetadataBuffersToSubmit = 0; 6688 mCodec->mRepeatFrameDelayUs = -1ll; 6689 mCodec->mInputFormat.clear(); 6690 mCodec->mOutputFormat.clear(); 6691 mCodec->mBaseOutputFormat.clear(); 6692 6693 if (mCodec->mShutdownInProgress) { 6694 bool keepComponentAllocated = mCodec->mKeepComponentAllocated; 6695 6696 mCodec->mShutdownInProgress = false; 6697 mCodec->mKeepComponentAllocated = false; 6698 6699 onShutdown(keepComponentAllocated); 6700 } 6701 mCodec->mExplicitShutdown = false; 6702 6703 mCodec->processDeferredMessages(); 6704} 6705 6706void ACodec::LoadedState::onShutdown(bool keepComponentAllocated) { 6707 if (!keepComponentAllocated) { 6708 (void)mCodec->mOMX->freeNode(mCodec->mNode); 6709 6710 mCodec->changeState(mCodec->mUninitializedState); 6711 } 6712 6713 if (mCodec->mExplicitShutdown) { 6714 sp<AMessage> notify = mCodec->mNotify->dup(); 6715 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 6716 notify->post(); 6717 mCodec->mExplicitShutdown = false; 6718 } 6719} 6720 6721bool ACodec::LoadedState::onMessageReceived(const sp<AMessage> &msg) { 6722 bool handled = false; 6723 6724 switch (msg->what()) { 6725 case ACodec::kWhatConfigureComponent: 6726 { 6727 onConfigureComponent(msg); 6728 handled = true; 6729 break; 6730 } 6731 6732 case ACodec::kWhatCreateInputSurface: 6733 { 6734 onCreateInputSurface(msg); 6735 handled = true; 6736 break; 6737 } 6738 6739 case ACodec::kWhatSetInputSurface: 6740 { 6741 onSetInputSurface(msg); 6742 handled = true; 6743 break; 6744 } 6745 6746 case ACodec::kWhatStart: 6747 { 6748 onStart(); 6749 handled = true; 6750 break; 6751 } 6752 6753 case ACodec::kWhatShutdown: 6754 { 6755 int32_t keepComponentAllocated; 6756 CHECK(msg->findInt32( 6757 "keepComponentAllocated", &keepComponentAllocated)); 6758 6759 mCodec->mExplicitShutdown = true; 6760 onShutdown(keepComponentAllocated); 6761 6762 handled = true; 6763 break; 6764 } 6765 6766 case ACodec::kWhatFlush: 6767 { 6768 sp<AMessage> notify = mCodec->mNotify->dup(); 6769 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6770 notify->post(); 6771 6772 handled = true; 6773 break; 6774 } 6775 6776 default: 6777 return BaseState::onMessageReceived(msg); 6778 } 6779 6780 return handled; 6781} 6782 6783bool ACodec::LoadedState::onConfigureComponent( 6784 const sp<AMessage> &msg) { 6785 ALOGV("onConfigureComponent"); 6786 6787 CHECK(mCodec->mNode != 0); 6788 6789 status_t err = OK; 6790 AString mime; 6791 if (!msg->findString("mime", &mime)) { 6792 err = BAD_VALUE; 6793 } else { 6794 err = mCodec->configureCodec(mime.c_str(), msg); 6795 } 6796 if (err != OK) { 6797 ALOGE("[%s] configureCodec returning error %d", 6798 mCodec->mComponentName.c_str(), err); 6799 6800 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6801 return false; 6802 } 6803 6804 { 6805 sp<AMessage> notify = mCodec->mNotify->dup(); 6806 notify->setInt32("what", CodecBase::kWhatComponentConfigured); 6807 notify->setMessage("input-format", mCodec->mInputFormat); 6808 notify->setMessage("output-format", mCodec->mOutputFormat); 6809 notify->post(); 6810 } 6811 6812 return true; 6813} 6814 6815status_t ACodec::LoadedState::setupInputSurface() { 6816 status_t err = OK; 6817 6818 if (mCodec->mRepeatFrameDelayUs > 0ll) { 6819 err = mCodec->mOMX->setInternalOption( 6820 mCodec->mNode, 6821 kPortIndexInput, 6822 IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY, 6823 &mCodec->mRepeatFrameDelayUs, 6824 sizeof(mCodec->mRepeatFrameDelayUs)); 6825 6826 if (err != OK) { 6827 ALOGE("[%s] Unable to configure option to repeat previous " 6828 "frames (err %d)", 6829 mCodec->mComponentName.c_str(), 6830 err); 6831 return err; 6832 } 6833 } 6834 6835 if (mCodec->mMaxPtsGapUs > 0ll) { 6836 err = mCodec->mOMX->setInternalOption( 6837 mCodec->mNode, 6838 kPortIndexInput, 6839 IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP, 6840 &mCodec->mMaxPtsGapUs, 6841 sizeof(mCodec->mMaxPtsGapUs)); 6842 6843 if (err != OK) { 6844 ALOGE("[%s] Unable to configure max timestamp gap (err %d)", 6845 mCodec->mComponentName.c_str(), 6846 err); 6847 return err; 6848 } 6849 } 6850 6851 if (mCodec->mMaxFps > 0) { 6852 err = mCodec->mOMX->setInternalOption( 6853 mCodec->mNode, 6854 kPortIndexInput, 6855 IOMX::INTERNAL_OPTION_MAX_FPS, 6856 &mCodec->mMaxFps, 6857 sizeof(mCodec->mMaxFps)); 6858 6859 if (err != OK) { 6860 ALOGE("[%s] Unable to configure max fps (err %d)", 6861 mCodec->mComponentName.c_str(), 6862 err); 6863 return err; 6864 } 6865 } 6866 6867 if (mCodec->mTimePerCaptureUs > 0ll 6868 && mCodec->mTimePerFrameUs > 0ll) { 6869 int64_t timeLapse[2]; 6870 timeLapse[0] = mCodec->mTimePerFrameUs; 6871 timeLapse[1] = mCodec->mTimePerCaptureUs; 6872 err = mCodec->mOMX->setInternalOption( 6873 mCodec->mNode, 6874 kPortIndexInput, 6875 IOMX::INTERNAL_OPTION_TIME_LAPSE, 6876 &timeLapse[0], 6877 sizeof(timeLapse)); 6878 6879 if (err != OK) { 6880 ALOGE("[%s] Unable to configure time lapse (err %d)", 6881 mCodec->mComponentName.c_str(), 6882 err); 6883 return err; 6884 } 6885 } 6886 6887 if (mCodec->mCreateInputBuffersSuspended) { 6888 bool suspend = true; 6889 err = mCodec->mOMX->setInternalOption( 6890 mCodec->mNode, 6891 kPortIndexInput, 6892 IOMX::INTERNAL_OPTION_SUSPEND, 6893 &suspend, 6894 sizeof(suspend)); 6895 6896 if (err != OK) { 6897 ALOGE("[%s] Unable to configure option to suspend (err %d)", 6898 mCodec->mComponentName.c_str(), 6899 err); 6900 return err; 6901 } 6902 } 6903 6904 uint32_t usageBits; 6905 if (mCodec->mOMX->getParameter( 6906 mCodec->mNode, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, 6907 &usageBits, sizeof(usageBits)) == OK) { 6908 mCodec->mInputFormat->setInt32( 6909 "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); 6910 } 6911 6912 sp<ABuffer> colorAspectsBuffer; 6913 if (mCodec->mInputFormat->findBuffer("android._color-aspects", &colorAspectsBuffer)) { 6914 err = mCodec->mOMX->setInternalOption( 6915 mCodec->mNode, kPortIndexInput, IOMX::INTERNAL_OPTION_COLOR_ASPECTS, 6916 colorAspectsBuffer->base(), colorAspectsBuffer->capacity()); 6917 if (err != OK) { 6918 ALOGE("[%s] Unable to configure color aspects (err %d)", 6919 mCodec->mComponentName.c_str(), err); 6920 return err; 6921 } 6922 } 6923 return OK; 6924} 6925 6926void ACodec::LoadedState::onCreateInputSurface( 6927 const sp<AMessage> & /* msg */) { 6928 ALOGV("onCreateInputSurface"); 6929 6930 sp<AMessage> notify = mCodec->mNotify->dup(); 6931 notify->setInt32("what", CodecBase::kWhatInputSurfaceCreated); 6932 6933 android_dataspace dataSpace; 6934 status_t err = 6935 mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace); 6936 notify->setMessage("input-format", mCodec->mInputFormat); 6937 notify->setMessage("output-format", mCodec->mOutputFormat); 6938 6939 sp<IGraphicBufferProducer> bufferProducer; 6940 if (err == OK) { 6941 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6942 err = mCodec->mOMX->createInputSurface( 6943 mCodec->mNode, kPortIndexInput, dataSpace, &bufferProducer, 6944 &mCodec->mInputMetadataType); 6945 // framework uses ANW buffers internally instead of gralloc handles 6946 if (mCodec->mInputMetadataType == kMetadataBufferTypeGrallocSource) { 6947 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6948 } 6949 } 6950 6951 if (err == OK) { 6952 err = setupInputSurface(); 6953 } 6954 6955 if (err == OK) { 6956 notify->setObject("input-surface", 6957 new BufferProducerWrapper(bufferProducer)); 6958 } else { 6959 // Can't use mCodec->signalError() here -- MediaCodec won't forward 6960 // the error through because it's in the "configured" state. We 6961 // send a kWhatInputSurfaceCreated with an error value instead. 6962 ALOGE("[%s] onCreateInputSurface returning error %d", 6963 mCodec->mComponentName.c_str(), err); 6964 notify->setInt32("err", err); 6965 } 6966 notify->post(); 6967} 6968 6969void ACodec::LoadedState::onSetInputSurface( 6970 const sp<AMessage> &msg) { 6971 ALOGV("onSetInputSurface"); 6972 6973 sp<AMessage> notify = mCodec->mNotify->dup(); 6974 notify->setInt32("what", CodecBase::kWhatInputSurfaceAccepted); 6975 6976 sp<RefBase> obj; 6977 CHECK(msg->findObject("input-surface", &obj)); 6978 sp<PersistentSurface> surface = static_cast<PersistentSurface *>(obj.get()); 6979 6980 android_dataspace dataSpace; 6981 status_t err = 6982 mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace); 6983 notify->setMessage("input-format", mCodec->mInputFormat); 6984 notify->setMessage("output-format", mCodec->mOutputFormat); 6985 6986 if (err == OK) { 6987 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6988 err = mCodec->mOMX->setInputSurface( 6989 mCodec->mNode, kPortIndexInput, surface->getBufferConsumer(), 6990 &mCodec->mInputMetadataType); 6991 // framework uses ANW buffers internally instead of gralloc handles 6992 if (mCodec->mInputMetadataType == kMetadataBufferTypeGrallocSource) { 6993 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6994 } 6995 } 6996 6997 if (err == OK) { 6998 surface->getBufferConsumer()->setDefaultBufferDataSpace(dataSpace); 6999 err = setupInputSurface(); 7000 } 7001 7002 if (err != OK) { 7003 // Can't use mCodec->signalError() here -- MediaCodec won't forward 7004 // the error through because it's in the "configured" state. We 7005 // send a kWhatInputSurfaceAccepted with an error value instead. 7006 ALOGE("[%s] onSetInputSurface returning error %d", 7007 mCodec->mComponentName.c_str(), err); 7008 notify->setInt32("err", err); 7009 } 7010 notify->post(); 7011} 7012 7013void ACodec::LoadedState::onStart() { 7014 ALOGV("onStart"); 7015 7016 status_t err = mCodec->mOMX->sendCommand(mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle); 7017 if (err != OK) { 7018 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 7019 } else { 7020 mCodec->changeState(mCodec->mLoadedToIdleState); 7021 } 7022} 7023 7024//////////////////////////////////////////////////////////////////////////////// 7025 7026ACodec::LoadedToIdleState::LoadedToIdleState(ACodec *codec) 7027 : BaseState(codec) { 7028} 7029 7030void ACodec::LoadedToIdleState::stateEntered() { 7031 ALOGV("[%s] Now Loaded->Idle", mCodec->mComponentName.c_str()); 7032 7033 status_t err; 7034 if ((err = allocateBuffers()) != OK) { 7035 ALOGE("Failed to allocate buffers after transitioning to IDLE state " 7036 "(error 0x%08x)", 7037 err); 7038 7039 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 7040 7041 mCodec->mOMX->sendCommand( 7042 mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded); 7043 if (mCodec->allYourBuffersAreBelongToUs(kPortIndexInput)) { 7044 mCodec->freeBuffersOnPort(kPortIndexInput); 7045 } 7046 if (mCodec->allYourBuffersAreBelongToUs(kPortIndexOutput)) { 7047 mCodec->freeBuffersOnPort(kPortIndexOutput); 7048 } 7049 7050 mCodec->changeState(mCodec->mLoadedState); 7051 } 7052} 7053 7054status_t ACodec::LoadedToIdleState::allocateBuffers() { 7055 status_t err = mCodec->allocateBuffersOnPort(kPortIndexInput); 7056 7057 if (err != OK) { 7058 return err; 7059 } 7060 7061 return mCodec->allocateBuffersOnPort(kPortIndexOutput); 7062} 7063 7064bool ACodec::LoadedToIdleState::onMessageReceived(const sp<AMessage> &msg) { 7065 switch (msg->what()) { 7066 case kWhatSetParameters: 7067 case kWhatShutdown: 7068 { 7069 mCodec->deferMessage(msg); 7070 return true; 7071 } 7072 7073 case kWhatSignalEndOfInputStream: 7074 { 7075 mCodec->onSignalEndOfInputStream(); 7076 return true; 7077 } 7078 7079 case kWhatResume: 7080 { 7081 // We'll be active soon enough. 7082 return true; 7083 } 7084 7085 case kWhatFlush: 7086 { 7087 // We haven't even started yet, so we're flushed alright... 7088 sp<AMessage> notify = mCodec->mNotify->dup(); 7089 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 7090 notify->post(); 7091 return true; 7092 } 7093 7094 default: 7095 return BaseState::onMessageReceived(msg); 7096 } 7097} 7098 7099bool ACodec::LoadedToIdleState::onOMXEvent( 7100 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7101 switch (event) { 7102 case OMX_EventCmdComplete: 7103 { 7104 status_t err = OK; 7105 if (data1 != (OMX_U32)OMX_CommandStateSet 7106 || data2 != (OMX_U32)OMX_StateIdle) { 7107 ALOGE("Unexpected command completion in LoadedToIdleState: %s(%u) %s(%u)", 7108 asString((OMX_COMMANDTYPE)data1), data1, 7109 asString((OMX_STATETYPE)data2), data2); 7110 err = FAILED_TRANSACTION; 7111 } 7112 7113 if (err == OK) { 7114 err = mCodec->mOMX->sendCommand( 7115 mCodec->mNode, OMX_CommandStateSet, OMX_StateExecuting); 7116 } 7117 7118 if (err != OK) { 7119 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 7120 } else { 7121 mCodec->changeState(mCodec->mIdleToExecutingState); 7122 } 7123 7124 return true; 7125 } 7126 7127 default: 7128 return BaseState::onOMXEvent(event, data1, data2); 7129 } 7130} 7131 7132//////////////////////////////////////////////////////////////////////////////// 7133 7134ACodec::IdleToExecutingState::IdleToExecutingState(ACodec *codec) 7135 : BaseState(codec) { 7136} 7137 7138void ACodec::IdleToExecutingState::stateEntered() { 7139 ALOGV("[%s] Now Idle->Executing", mCodec->mComponentName.c_str()); 7140} 7141 7142bool ACodec::IdleToExecutingState::onMessageReceived(const sp<AMessage> &msg) { 7143 switch (msg->what()) { 7144 case kWhatSetParameters: 7145 case kWhatShutdown: 7146 { 7147 mCodec->deferMessage(msg); 7148 return true; 7149 } 7150 7151 case kWhatResume: 7152 { 7153 // We'll be active soon enough. 7154 return true; 7155 } 7156 7157 case kWhatFlush: 7158 { 7159 // We haven't even started yet, so we're flushed alright... 7160 sp<AMessage> notify = mCodec->mNotify->dup(); 7161 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 7162 notify->post(); 7163 7164 return true; 7165 } 7166 7167 case kWhatSignalEndOfInputStream: 7168 { 7169 mCodec->onSignalEndOfInputStream(); 7170 return true; 7171 } 7172 7173 default: 7174 return BaseState::onMessageReceived(msg); 7175 } 7176} 7177 7178bool ACodec::IdleToExecutingState::onOMXEvent( 7179 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7180 switch (event) { 7181 case OMX_EventCmdComplete: 7182 { 7183 if (data1 != (OMX_U32)OMX_CommandStateSet 7184 || data2 != (OMX_U32)OMX_StateExecuting) { 7185 ALOGE("Unexpected command completion in IdleToExecutingState: %s(%u) %s(%u)", 7186 asString((OMX_COMMANDTYPE)data1), data1, 7187 asString((OMX_STATETYPE)data2), data2); 7188 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7189 return true; 7190 } 7191 7192 mCodec->mExecutingState->resume(); 7193 mCodec->changeState(mCodec->mExecutingState); 7194 7195 return true; 7196 } 7197 7198 default: 7199 return BaseState::onOMXEvent(event, data1, data2); 7200 } 7201} 7202 7203//////////////////////////////////////////////////////////////////////////////// 7204 7205ACodec::ExecutingState::ExecutingState(ACodec *codec) 7206 : BaseState(codec), 7207 mActive(false) { 7208} 7209 7210ACodec::BaseState::PortMode ACodec::ExecutingState::getPortMode( 7211 OMX_U32 /* portIndex */) { 7212 return RESUBMIT_BUFFERS; 7213} 7214 7215void ACodec::ExecutingState::submitOutputMetaBuffers() { 7216 // submit as many buffers as there are input buffers with the codec 7217 // in case we are in port reconfiguring 7218 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 7219 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 7220 7221 if (info->mStatus == BufferInfo::OWNED_BY_COMPONENT) { 7222 if (mCodec->submitOutputMetadataBuffer() != OK) 7223 break; 7224 } 7225 } 7226 7227 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 7228 mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround(); 7229} 7230 7231void ACodec::ExecutingState::submitRegularOutputBuffers() { 7232 bool failed = false; 7233 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexOutput].size(); ++i) { 7234 BufferInfo *info = &mCodec->mBuffers[kPortIndexOutput].editItemAt(i); 7235 7236 if (mCodec->mNativeWindow != NULL) { 7237 if (info->mStatus != BufferInfo::OWNED_BY_US 7238 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 7239 ALOGE("buffers should be owned by us or the surface"); 7240 failed = true; 7241 break; 7242 } 7243 7244 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 7245 continue; 7246 } 7247 } else { 7248 if (info->mStatus != BufferInfo::OWNED_BY_US) { 7249 ALOGE("buffers should be owned by us"); 7250 failed = true; 7251 break; 7252 } 7253 } 7254 7255 ALOGV("[%s] calling fillBuffer %u", mCodec->mComponentName.c_str(), info->mBufferID); 7256 7257 info->checkWriteFence("submitRegularOutputBuffers"); 7258 status_t err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd); 7259 info->mFenceFd = -1; 7260 if (err != OK) { 7261 failed = true; 7262 break; 7263 } 7264 7265 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 7266 } 7267 7268 if (failed) { 7269 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7270 } 7271} 7272 7273void ACodec::ExecutingState::submitOutputBuffers() { 7274 submitRegularOutputBuffers(); 7275 if (mCodec->storingMetadataInDecodedBuffers()) { 7276 submitOutputMetaBuffers(); 7277 } 7278} 7279 7280void ACodec::ExecutingState::resume() { 7281 if (mActive) { 7282 ALOGV("[%s] We're already active, no need to resume.", mCodec->mComponentName.c_str()); 7283 return; 7284 } 7285 7286 submitOutputBuffers(); 7287 7288 // Post all available input buffers 7289 if (mCodec->mBuffers[kPortIndexInput].size() == 0u) { 7290 ALOGW("[%s] we don't have any input buffers to resume", mCodec->mComponentName.c_str()); 7291 } 7292 7293 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); i++) { 7294 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 7295 if (info->mStatus == BufferInfo::OWNED_BY_US) { 7296 postFillThisBuffer(info); 7297 } 7298 } 7299 7300 mActive = true; 7301} 7302 7303void ACodec::ExecutingState::stateEntered() { 7304 ALOGV("[%s] Now Executing", mCodec->mComponentName.c_str()); 7305 7306 mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC)); 7307 mCodec->processDeferredMessages(); 7308} 7309 7310bool ACodec::ExecutingState::onMessageReceived(const sp<AMessage> &msg) { 7311 bool handled = false; 7312 7313 switch (msg->what()) { 7314 case kWhatShutdown: 7315 { 7316 int32_t keepComponentAllocated; 7317 CHECK(msg->findInt32( 7318 "keepComponentAllocated", &keepComponentAllocated)); 7319 7320 mCodec->mShutdownInProgress = true; 7321 mCodec->mExplicitShutdown = true; 7322 mCodec->mKeepComponentAllocated = keepComponentAllocated; 7323 7324 mActive = false; 7325 7326 status_t err = mCodec->mOMX->sendCommand( 7327 mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle); 7328 if (err != OK) { 7329 if (keepComponentAllocated) { 7330 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7331 } 7332 // TODO: do some recovery here. 7333 } else { 7334 mCodec->changeState(mCodec->mExecutingToIdleState); 7335 } 7336 7337 handled = true; 7338 break; 7339 } 7340 7341 case kWhatFlush: 7342 { 7343 ALOGV("[%s] ExecutingState flushing now " 7344 "(codec owns %zu/%zu input, %zu/%zu output).", 7345 mCodec->mComponentName.c_str(), 7346 mCodec->countBuffersOwnedByComponent(kPortIndexInput), 7347 mCodec->mBuffers[kPortIndexInput].size(), 7348 mCodec->countBuffersOwnedByComponent(kPortIndexOutput), 7349 mCodec->mBuffers[kPortIndexOutput].size()); 7350 7351 mActive = false; 7352 7353 status_t err = mCodec->mOMX->sendCommand(mCodec->mNode, OMX_CommandFlush, OMX_ALL); 7354 if (err != OK) { 7355 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7356 } else { 7357 mCodec->changeState(mCodec->mFlushingState); 7358 } 7359 7360 handled = true; 7361 break; 7362 } 7363 7364 case kWhatResume: 7365 { 7366 resume(); 7367 7368 handled = true; 7369 break; 7370 } 7371 7372 case kWhatRequestIDRFrame: 7373 { 7374 status_t err = mCodec->requestIDRFrame(); 7375 if (err != OK) { 7376 ALOGW("Requesting an IDR frame failed."); 7377 } 7378 7379 handled = true; 7380 break; 7381 } 7382 7383 case kWhatSetParameters: 7384 { 7385 sp<AMessage> params; 7386 CHECK(msg->findMessage("params", ¶ms)); 7387 7388 status_t err = mCodec->setParameters(params); 7389 7390 sp<AMessage> reply; 7391 if (msg->findMessage("reply", &reply)) { 7392 reply->setInt32("err", err); 7393 reply->post(); 7394 } 7395 7396 handled = true; 7397 break; 7398 } 7399 7400 case ACodec::kWhatSignalEndOfInputStream: 7401 { 7402 mCodec->onSignalEndOfInputStream(); 7403 handled = true; 7404 break; 7405 } 7406 7407 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 7408 case kWhatSubmitOutputMetadataBufferIfEOS: 7409 { 7410 if (mCodec->mPortEOS[kPortIndexInput] && 7411 !mCodec->mPortEOS[kPortIndexOutput]) { 7412 status_t err = mCodec->submitOutputMetadataBuffer(); 7413 if (err == OK) { 7414 mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround(); 7415 } 7416 } 7417 return true; 7418 } 7419 7420 default: 7421 handled = BaseState::onMessageReceived(msg); 7422 break; 7423 } 7424 7425 return handled; 7426} 7427 7428status_t ACodec::setParameters(const sp<AMessage> ¶ms) { 7429 int32_t videoBitrate; 7430 if (params->findInt32("video-bitrate", &videoBitrate)) { 7431 OMX_VIDEO_CONFIG_BITRATETYPE configParams; 7432 InitOMXParams(&configParams); 7433 configParams.nPortIndex = kPortIndexOutput; 7434 configParams.nEncodeBitrate = videoBitrate; 7435 7436 status_t err = mOMX->setConfig( 7437 mNode, 7438 OMX_IndexConfigVideoBitrate, 7439 &configParams, 7440 sizeof(configParams)); 7441 7442 if (err != OK) { 7443 ALOGE("setConfig(OMX_IndexConfigVideoBitrate, %d) failed w/ err %d", 7444 videoBitrate, err); 7445 7446 return err; 7447 } 7448 } 7449 7450 int64_t skipFramesBeforeUs; 7451 if (params->findInt64("skip-frames-before", &skipFramesBeforeUs)) { 7452 status_t err = 7453 mOMX->setInternalOption( 7454 mNode, 7455 kPortIndexInput, 7456 IOMX::INTERNAL_OPTION_START_TIME, 7457 &skipFramesBeforeUs, 7458 sizeof(skipFramesBeforeUs)); 7459 7460 if (err != OK) { 7461 ALOGE("Failed to set parameter 'skip-frames-before' (err %d)", err); 7462 return err; 7463 } 7464 } 7465 7466 int32_t dropInputFrames; 7467 if (params->findInt32("drop-input-frames", &dropInputFrames)) { 7468 bool suspend = dropInputFrames != 0; 7469 7470 status_t err = 7471 mOMX->setInternalOption( 7472 mNode, 7473 kPortIndexInput, 7474 IOMX::INTERNAL_OPTION_SUSPEND, 7475 &suspend, 7476 sizeof(suspend)); 7477 7478 if (err != OK) { 7479 ALOGE("Failed to set parameter 'drop-input-frames' (err %d)", err); 7480 return err; 7481 } 7482 } 7483 7484 int32_t dummy; 7485 if (params->findInt32("request-sync", &dummy)) { 7486 status_t err = requestIDRFrame(); 7487 7488 if (err != OK) { 7489 ALOGE("Requesting a sync frame failed w/ err %d", err); 7490 return err; 7491 } 7492 } 7493 7494 float rate; 7495 if (params->findFloat("operating-rate", &rate) && rate > 0) { 7496 status_t err = setOperatingRate(rate, mIsVideo); 7497 if (err != OK) { 7498 ALOGE("Failed to set parameter 'operating-rate' (err %d)", err); 7499 return err; 7500 } 7501 } 7502 7503 int32_t intraRefreshPeriod = 0; 7504 if (params->findInt32("intra-refresh-period", &intraRefreshPeriod) 7505 && intraRefreshPeriod > 0) { 7506 status_t err = setIntraRefreshPeriod(intraRefreshPeriod, false); 7507 if (err != OK) { 7508 ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional", 7509 mComponentName.c_str()); 7510 err = OK; 7511 } 7512 } 7513 7514 status_t err = configureTemporalLayers(params, false /* inConfigure */, mOutputFormat); 7515 if (err != OK) { 7516 err = OK; // ignore failure 7517 } 7518 7519 return err; 7520} 7521 7522void ACodec::onSignalEndOfInputStream() { 7523 sp<AMessage> notify = mNotify->dup(); 7524 notify->setInt32("what", CodecBase::kWhatSignaledInputEOS); 7525 7526 status_t err = mOMX->signalEndOfInputStream(mNode); 7527 if (err != OK) { 7528 notify->setInt32("err", err); 7529 } 7530 notify->post(); 7531} 7532 7533bool ACodec::ExecutingState::onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { 7534 mCodec->onFrameRendered(mediaTimeUs, systemNano); 7535 return true; 7536} 7537 7538bool ACodec::ExecutingState::onOMXEvent( 7539 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7540 switch (event) { 7541 case OMX_EventPortSettingsChanged: 7542 { 7543 CHECK_EQ(data1, (OMX_U32)kPortIndexOutput); 7544 7545 mCodec->onOutputFormatChanged(); 7546 7547 if (data2 == 0 || data2 == OMX_IndexParamPortDefinition) { 7548 mCodec->mMetadataBuffersToSubmit = 0; 7549 CHECK_EQ(mCodec->mOMX->sendCommand( 7550 mCodec->mNode, 7551 OMX_CommandPortDisable, kPortIndexOutput), 7552 (status_t)OK); 7553 7554 mCodec->freeOutputBuffersNotOwnedByComponent(); 7555 7556 mCodec->changeState(mCodec->mOutputPortSettingsChangedState); 7557 } else if (data2 != OMX_IndexConfigCommonOutputCrop 7558 && data2 != OMX_IndexConfigAndroidIntraRefresh) { 7559 ALOGV("[%s] OMX_EventPortSettingsChanged 0x%08x", 7560 mCodec->mComponentName.c_str(), data2); 7561 } 7562 7563 return true; 7564 } 7565 7566 case OMX_EventBufferFlag: 7567 { 7568 return true; 7569 } 7570 7571 default: 7572 return BaseState::onOMXEvent(event, data1, data2); 7573 } 7574} 7575 7576//////////////////////////////////////////////////////////////////////////////// 7577 7578ACodec::OutputPortSettingsChangedState::OutputPortSettingsChangedState( 7579 ACodec *codec) 7580 : BaseState(codec) { 7581} 7582 7583ACodec::BaseState::PortMode ACodec::OutputPortSettingsChangedState::getPortMode( 7584 OMX_U32 portIndex) { 7585 if (portIndex == kPortIndexOutput) { 7586 return FREE_BUFFERS; 7587 } 7588 7589 CHECK_EQ(portIndex, (OMX_U32)kPortIndexInput); 7590 7591 return RESUBMIT_BUFFERS; 7592} 7593 7594bool ACodec::OutputPortSettingsChangedState::onMessageReceived( 7595 const sp<AMessage> &msg) { 7596 bool handled = false; 7597 7598 switch (msg->what()) { 7599 case kWhatFlush: 7600 case kWhatShutdown: 7601 case kWhatResume: 7602 case kWhatSetParameters: 7603 { 7604 if (msg->what() == kWhatResume) { 7605 ALOGV("[%s] Deferring resume", mCodec->mComponentName.c_str()); 7606 } 7607 7608 mCodec->deferMessage(msg); 7609 handled = true; 7610 break; 7611 } 7612 7613 default: 7614 handled = BaseState::onMessageReceived(msg); 7615 break; 7616 } 7617 7618 return handled; 7619} 7620 7621void ACodec::OutputPortSettingsChangedState::stateEntered() { 7622 ALOGV("[%s] Now handling output port settings change", 7623 mCodec->mComponentName.c_str()); 7624} 7625 7626bool ACodec::OutputPortSettingsChangedState::onOMXFrameRendered( 7627 int64_t mediaTimeUs, nsecs_t systemNano) { 7628 mCodec->onFrameRendered(mediaTimeUs, systemNano); 7629 return true; 7630} 7631 7632bool ACodec::OutputPortSettingsChangedState::onOMXEvent( 7633 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7634 switch (event) { 7635 case OMX_EventCmdComplete: 7636 { 7637 if (data1 == (OMX_U32)OMX_CommandPortDisable) { 7638 if (data2 != (OMX_U32)kPortIndexOutput) { 7639 ALOGW("ignoring EventCmdComplete CommandPortDisable for port %u", data2); 7640 return false; 7641 } 7642 7643 ALOGV("[%s] Output port now disabled.", mCodec->mComponentName.c_str()); 7644 7645 status_t err = OK; 7646 if (!mCodec->mBuffers[kPortIndexOutput].isEmpty()) { 7647 ALOGE("disabled port should be empty, but has %zu buffers", 7648 mCodec->mBuffers[kPortIndexOutput].size()); 7649 err = FAILED_TRANSACTION; 7650 } else { 7651 mCodec->mDealer[kPortIndexOutput].clear(); 7652 } 7653 7654 if (err == OK) { 7655 err = mCodec->mOMX->sendCommand( 7656 mCodec->mNode, OMX_CommandPortEnable, kPortIndexOutput); 7657 } 7658 7659 if (err == OK) { 7660 err = mCodec->allocateBuffersOnPort(kPortIndexOutput); 7661 ALOGE_IF(err != OK, "Failed to allocate output port buffers after port " 7662 "reconfiguration: (%d)", err); 7663 } 7664 7665 if (err != OK) { 7666 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 7667 7668 // This is technically not correct, but appears to be 7669 // the only way to free the component instance. 7670 // Controlled transitioning from excecuting->idle 7671 // and idle->loaded seem impossible probably because 7672 // the output port never finishes re-enabling. 7673 mCodec->mShutdownInProgress = true; 7674 mCodec->mKeepComponentAllocated = false; 7675 mCodec->changeState(mCodec->mLoadedState); 7676 } 7677 7678 return true; 7679 } else if (data1 == (OMX_U32)OMX_CommandPortEnable) { 7680 if (data2 != (OMX_U32)kPortIndexOutput) { 7681 ALOGW("ignoring EventCmdComplete OMX_CommandPortEnable for port %u", data2); 7682 return false; 7683 } 7684 7685 ALOGV("[%s] Output port now reenabled.", mCodec->mComponentName.c_str()); 7686 7687 if (mCodec->mExecutingState->active()) { 7688 mCodec->mExecutingState->submitOutputBuffers(); 7689 } 7690 7691 mCodec->changeState(mCodec->mExecutingState); 7692 7693 return true; 7694 } 7695 7696 return false; 7697 } 7698 7699 default: 7700 return false; 7701 } 7702} 7703 7704//////////////////////////////////////////////////////////////////////////////// 7705 7706ACodec::ExecutingToIdleState::ExecutingToIdleState(ACodec *codec) 7707 : BaseState(codec), 7708 mComponentNowIdle(false) { 7709} 7710 7711bool ACodec::ExecutingToIdleState::onMessageReceived(const sp<AMessage> &msg) { 7712 bool handled = false; 7713 7714 switch (msg->what()) { 7715 case kWhatFlush: 7716 { 7717 // Don't send me a flush request if you previously wanted me 7718 // to shutdown. 7719 ALOGW("Ignoring flush request in ExecutingToIdleState"); 7720 break; 7721 } 7722 7723 case kWhatShutdown: 7724 { 7725 // We're already doing that... 7726 7727 handled = true; 7728 break; 7729 } 7730 7731 default: 7732 handled = BaseState::onMessageReceived(msg); 7733 break; 7734 } 7735 7736 return handled; 7737} 7738 7739void ACodec::ExecutingToIdleState::stateEntered() { 7740 ALOGV("[%s] Now Executing->Idle", mCodec->mComponentName.c_str()); 7741 7742 mComponentNowIdle = false; 7743 mCodec->mLastOutputFormat.clear(); 7744} 7745 7746bool ACodec::ExecutingToIdleState::onOMXEvent( 7747 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7748 switch (event) { 7749 case OMX_EventCmdComplete: 7750 { 7751 if (data1 != (OMX_U32)OMX_CommandStateSet 7752 || data2 != (OMX_U32)OMX_StateIdle) { 7753 ALOGE("Unexpected command completion in ExecutingToIdleState: %s(%u) %s(%u)", 7754 asString((OMX_COMMANDTYPE)data1), data1, 7755 asString((OMX_STATETYPE)data2), data2); 7756 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7757 return true; 7758 } 7759 7760 mComponentNowIdle = true; 7761 7762 changeStateIfWeOwnAllBuffers(); 7763 7764 return true; 7765 } 7766 7767 case OMX_EventPortSettingsChanged: 7768 case OMX_EventBufferFlag: 7769 { 7770 // We're shutting down and don't care about this anymore. 7771 return true; 7772 } 7773 7774 default: 7775 return BaseState::onOMXEvent(event, data1, data2); 7776 } 7777} 7778 7779void ACodec::ExecutingToIdleState::changeStateIfWeOwnAllBuffers() { 7780 if (mComponentNowIdle && mCodec->allYourBuffersAreBelongToUs()) { 7781 status_t err = mCodec->mOMX->sendCommand( 7782 mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded); 7783 if (err == OK) { 7784 err = mCodec->freeBuffersOnPort(kPortIndexInput); 7785 status_t err2 = mCodec->freeBuffersOnPort(kPortIndexOutput); 7786 if (err == OK) { 7787 err = err2; 7788 } 7789 } 7790 7791 if ((mCodec->mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) 7792 && mCodec->mNativeWindow != NULL) { 7793 // We push enough 1x1 blank buffers to ensure that one of 7794 // them has made it to the display. This allows the OMX 7795 // component teardown to zero out any protected buffers 7796 // without the risk of scanning out one of those buffers. 7797 pushBlankBuffersToNativeWindow(mCodec->mNativeWindow.get()); 7798 } 7799 7800 if (err != OK) { 7801 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7802 return; 7803 } 7804 7805 mCodec->changeState(mCodec->mIdleToLoadedState); 7806 } 7807} 7808 7809void ACodec::ExecutingToIdleState::onInputBufferFilled( 7810 const sp<AMessage> &msg) { 7811 BaseState::onInputBufferFilled(msg); 7812 7813 changeStateIfWeOwnAllBuffers(); 7814} 7815 7816void ACodec::ExecutingToIdleState::onOutputBufferDrained( 7817 const sp<AMessage> &msg) { 7818 BaseState::onOutputBufferDrained(msg); 7819 7820 changeStateIfWeOwnAllBuffers(); 7821} 7822 7823//////////////////////////////////////////////////////////////////////////////// 7824 7825ACodec::IdleToLoadedState::IdleToLoadedState(ACodec *codec) 7826 : BaseState(codec) { 7827} 7828 7829bool ACodec::IdleToLoadedState::onMessageReceived(const sp<AMessage> &msg) { 7830 bool handled = false; 7831 7832 switch (msg->what()) { 7833 case kWhatShutdown: 7834 { 7835 // We're already doing that... 7836 7837 handled = true; 7838 break; 7839 } 7840 7841 case kWhatFlush: 7842 { 7843 // Don't send me a flush request if you previously wanted me 7844 // to shutdown. 7845 ALOGE("Got flush request in IdleToLoadedState"); 7846 break; 7847 } 7848 7849 default: 7850 handled = BaseState::onMessageReceived(msg); 7851 break; 7852 } 7853 7854 return handled; 7855} 7856 7857void ACodec::IdleToLoadedState::stateEntered() { 7858 ALOGV("[%s] Now Idle->Loaded", mCodec->mComponentName.c_str()); 7859} 7860 7861bool ACodec::IdleToLoadedState::onOMXEvent( 7862 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7863 switch (event) { 7864 case OMX_EventCmdComplete: 7865 { 7866 if (data1 != (OMX_U32)OMX_CommandStateSet 7867 || data2 != (OMX_U32)OMX_StateLoaded) { 7868 ALOGE("Unexpected command completion in IdleToLoadedState: %s(%u) %s(%u)", 7869 asString((OMX_COMMANDTYPE)data1), data1, 7870 asString((OMX_STATETYPE)data2), data2); 7871 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7872 return true; 7873 } 7874 7875 mCodec->changeState(mCodec->mLoadedState); 7876 7877 return true; 7878 } 7879 7880 default: 7881 return BaseState::onOMXEvent(event, data1, data2); 7882 } 7883} 7884 7885//////////////////////////////////////////////////////////////////////////////// 7886 7887ACodec::FlushingState::FlushingState(ACodec *codec) 7888 : BaseState(codec) { 7889} 7890 7891void ACodec::FlushingState::stateEntered() { 7892 ALOGV("[%s] Now Flushing", mCodec->mComponentName.c_str()); 7893 7894 mFlushComplete[kPortIndexInput] = mFlushComplete[kPortIndexOutput] = false; 7895} 7896 7897bool ACodec::FlushingState::onMessageReceived(const sp<AMessage> &msg) { 7898 bool handled = false; 7899 7900 switch (msg->what()) { 7901 case kWhatShutdown: 7902 { 7903 mCodec->deferMessage(msg); 7904 break; 7905 } 7906 7907 case kWhatFlush: 7908 { 7909 // We're already doing this right now. 7910 handled = true; 7911 break; 7912 } 7913 7914 default: 7915 handled = BaseState::onMessageReceived(msg); 7916 break; 7917 } 7918 7919 return handled; 7920} 7921 7922bool ACodec::FlushingState::onOMXEvent( 7923 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7924 ALOGV("[%s] FlushingState onOMXEvent(%u,%d)", 7925 mCodec->mComponentName.c_str(), event, (OMX_S32)data1); 7926 7927 switch (event) { 7928 case OMX_EventCmdComplete: 7929 { 7930 if (data1 != (OMX_U32)OMX_CommandFlush) { 7931 ALOGE("unexpected EventCmdComplete %s(%d) data2:%d in FlushingState", 7932 asString((OMX_COMMANDTYPE)data1), data1, data2); 7933 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7934 return true; 7935 } 7936 7937 if (data2 == kPortIndexInput || data2 == kPortIndexOutput) { 7938 if (mFlushComplete[data2]) { 7939 ALOGW("Flush already completed for %s port", 7940 data2 == kPortIndexInput ? "input" : "output"); 7941 return true; 7942 } 7943 mFlushComplete[data2] = true; 7944 7945 if (mFlushComplete[kPortIndexInput] && mFlushComplete[kPortIndexOutput]) { 7946 changeStateIfWeOwnAllBuffers(); 7947 } 7948 } else if (data2 == OMX_ALL) { 7949 if (!mFlushComplete[kPortIndexInput] || !mFlushComplete[kPortIndexOutput]) { 7950 ALOGW("received flush complete event for OMX_ALL before ports have been" 7951 "flushed (%d/%d)", 7952 mFlushComplete[kPortIndexInput], mFlushComplete[kPortIndexOutput]); 7953 return false; 7954 } 7955 7956 changeStateIfWeOwnAllBuffers(); 7957 } else { 7958 ALOGW("data2 not OMX_ALL but %u in EventCmdComplete CommandFlush", data2); 7959 } 7960 7961 return true; 7962 } 7963 7964 case OMX_EventPortSettingsChanged: 7965 { 7966 sp<AMessage> msg = new AMessage(kWhatOMXMessage, mCodec); 7967 msg->setInt32("type", omx_message::EVENT); 7968 msg->setInt32("node", mCodec->mNode); 7969 msg->setInt32("event", event); 7970 msg->setInt32("data1", data1); 7971 msg->setInt32("data2", data2); 7972 7973 ALOGV("[%s] Deferring OMX_EventPortSettingsChanged", 7974 mCodec->mComponentName.c_str()); 7975 7976 mCodec->deferMessage(msg); 7977 7978 return true; 7979 } 7980 7981 default: 7982 return BaseState::onOMXEvent(event, data1, data2); 7983 } 7984 7985 return true; 7986} 7987 7988void ACodec::FlushingState::onOutputBufferDrained(const sp<AMessage> &msg) { 7989 BaseState::onOutputBufferDrained(msg); 7990 7991 changeStateIfWeOwnAllBuffers(); 7992} 7993 7994void ACodec::FlushingState::onInputBufferFilled(const sp<AMessage> &msg) { 7995 BaseState::onInputBufferFilled(msg); 7996 7997 changeStateIfWeOwnAllBuffers(); 7998} 7999 8000void ACodec::FlushingState::changeStateIfWeOwnAllBuffers() { 8001 if (mFlushComplete[kPortIndexInput] 8002 && mFlushComplete[kPortIndexOutput] 8003 && mCodec->allYourBuffersAreBelongToUs()) { 8004 // We now own all buffers except possibly those still queued with 8005 // the native window for rendering. Let's get those back as well. 8006 mCodec->waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs(); 8007 8008 mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC)); 8009 8010 sp<AMessage> notify = mCodec->mNotify->dup(); 8011 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 8012 notify->post(); 8013 8014 mCodec->mPortEOS[kPortIndexInput] = 8015 mCodec->mPortEOS[kPortIndexOutput] = false; 8016 8017 mCodec->mInputEOSResult = OK; 8018 8019 if (mCodec->mSkipCutBuffer != NULL) { 8020 mCodec->mSkipCutBuffer->clear(); 8021 } 8022 8023 mCodec->changeState(mCodec->mExecutingState); 8024 } 8025} 8026 8027status_t ACodec::queryCapabilities( 8028 const AString &name, const AString &mime, bool isEncoder, 8029 sp<MediaCodecInfo::Capabilities> *caps) { 8030 (*caps).clear(); 8031 const char *role = getComponentRole(isEncoder, mime.c_str()); 8032 if (role == NULL) { 8033 return BAD_VALUE; 8034 } 8035 8036 OMXClient client; 8037 status_t err = client.connect(); 8038 if (err != OK) { 8039 return err; 8040 } 8041 8042 sp<IOMX> omx = client.interface(); 8043 sp<CodecObserver> observer = new CodecObserver; 8044 IOMX::node_id node = 0; 8045 8046 err = omx->allocateNode(name.c_str(), observer, NULL, &node); 8047 if (err != OK) { 8048 client.disconnect(); 8049 return err; 8050 } 8051 8052 err = setComponentRole(omx, node, role); 8053 if (err != OK) { 8054 omx->freeNode(node); 8055 client.disconnect(); 8056 return err; 8057 } 8058 8059 sp<MediaCodecInfo::CapabilitiesBuilder> builder = new MediaCodecInfo::CapabilitiesBuilder(); 8060 bool isVideo = mime.startsWithIgnoreCase("video/"); 8061 8062 if (isVideo) { 8063 OMX_VIDEO_PARAM_PROFILELEVELTYPE param; 8064 InitOMXParams(¶m); 8065 param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput; 8066 8067 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 8068 param.nProfileIndex = index; 8069 status_t err = omx->getParameter( 8070 node, OMX_IndexParamVideoProfileLevelQuerySupported, 8071 ¶m, sizeof(param)); 8072 if (err != OK) { 8073 break; 8074 } 8075 builder->addProfileLevel(param.eProfile, param.eLevel); 8076 8077 if (index == kMaxIndicesToCheck) { 8078 ALOGW("[%s] stopping checking profiles after %u: %x/%x", 8079 name.c_str(), index, 8080 param.eProfile, param.eLevel); 8081 } 8082 } 8083 8084 // Color format query 8085 // return colors in the order reported by the OMX component 8086 // prefix "flexible" standard ones with the flexible equivalent 8087 OMX_VIDEO_PARAM_PORTFORMATTYPE portFormat; 8088 InitOMXParams(&portFormat); 8089 portFormat.nPortIndex = isEncoder ? kPortIndexInput : kPortIndexOutput; 8090 Vector<uint32_t> supportedColors; // shadow copy to check for duplicates 8091 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 8092 portFormat.nIndex = index; 8093 status_t err = omx->getParameter( 8094 node, OMX_IndexParamVideoPortFormat, 8095 &portFormat, sizeof(portFormat)); 8096 if (err != OK) { 8097 break; 8098 } 8099 8100 OMX_U32 flexibleEquivalent; 8101 if (isFlexibleColorFormat( 8102 omx, node, portFormat.eColorFormat, false /* usingNativeWindow */, 8103 &flexibleEquivalent)) { 8104 bool marked = false; 8105 for (size_t i = 0; i < supportedColors.size(); ++i) { 8106 if (supportedColors[i] == flexibleEquivalent) { 8107 marked = true; 8108 break; 8109 } 8110 } 8111 if (!marked) { 8112 supportedColors.push(flexibleEquivalent); 8113 builder->addColorFormat(flexibleEquivalent); 8114 } 8115 } 8116 supportedColors.push(portFormat.eColorFormat); 8117 builder->addColorFormat(portFormat.eColorFormat); 8118 8119 if (index == kMaxIndicesToCheck) { 8120 ALOGW("[%s] stopping checking formats after %u: %s(%x)", 8121 name.c_str(), index, 8122 asString(portFormat.eColorFormat), portFormat.eColorFormat); 8123 } 8124 } 8125 } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_AUDIO_AAC)) { 8126 // More audio codecs if they have profiles. 8127 OMX_AUDIO_PARAM_ANDROID_PROFILETYPE param; 8128 InitOMXParams(¶m); 8129 param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput; 8130 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 8131 param.nProfileIndex = index; 8132 status_t err = omx->getParameter( 8133 node, (OMX_INDEXTYPE)OMX_IndexParamAudioProfileQuerySupported, 8134 ¶m, sizeof(param)); 8135 if (err != OK) { 8136 break; 8137 } 8138 // For audio, level is ignored. 8139 builder->addProfileLevel(param.eProfile, 0 /* level */); 8140 8141 if (index == kMaxIndicesToCheck) { 8142 ALOGW("[%s] stopping checking profiles after %u: %x", 8143 name.c_str(), index, 8144 param.eProfile); 8145 } 8146 } 8147 8148 // NOTE: Without Android extensions, OMX does not provide a way to query 8149 // AAC profile support 8150 if (param.nProfileIndex == 0) { 8151 ALOGW("component %s doesn't support profile query.", name.c_str()); 8152 } 8153 } 8154 8155 if (isVideo && !isEncoder) { 8156 native_handle_t *sidebandHandle = NULL; 8157 if (omx->configureVideoTunnelMode( 8158 node, kPortIndexOutput, OMX_TRUE, 0, &sidebandHandle) == OK) { 8159 // tunneled playback includes adaptive playback 8160 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback 8161 | MediaCodecInfo::Capabilities::kFlagSupportsTunneledPlayback); 8162 } else if (omx->storeMetaDataInBuffers( 8163 node, kPortIndexOutput, OMX_TRUE) == OK || 8164 omx->prepareForAdaptivePlayback( 8165 node, kPortIndexOutput, OMX_TRUE, 8166 1280 /* width */, 720 /* height */) == OK) { 8167 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback); 8168 } 8169 } 8170 8171 if (isVideo && isEncoder) { 8172 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 8173 InitOMXParams(¶ms); 8174 params.nPortIndex = kPortIndexOutput; 8175 // TODO: should we verify if fallback is supported? 8176 if (omx->getConfig( 8177 node, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, 8178 ¶ms, sizeof(params)) == OK) { 8179 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsIntraRefresh); 8180 } 8181 } 8182 8183 *caps = builder; 8184 omx->freeNode(node); 8185 client.disconnect(); 8186 return OK; 8187} 8188 8189// These are supposed be equivalent to the logic in 8190// "audio_channel_out_mask_from_count". 8191//static 8192status_t ACodec::getOMXChannelMapping(size_t numChannels, OMX_AUDIO_CHANNELTYPE map[]) { 8193 switch (numChannels) { 8194 case 1: 8195 map[0] = OMX_AUDIO_ChannelCF; 8196 break; 8197 case 2: 8198 map[0] = OMX_AUDIO_ChannelLF; 8199 map[1] = OMX_AUDIO_ChannelRF; 8200 break; 8201 case 3: 8202 map[0] = OMX_AUDIO_ChannelLF; 8203 map[1] = OMX_AUDIO_ChannelRF; 8204 map[2] = OMX_AUDIO_ChannelCF; 8205 break; 8206 case 4: 8207 map[0] = OMX_AUDIO_ChannelLF; 8208 map[1] = OMX_AUDIO_ChannelRF; 8209 map[2] = OMX_AUDIO_ChannelLR; 8210 map[3] = OMX_AUDIO_ChannelRR; 8211 break; 8212 case 5: 8213 map[0] = OMX_AUDIO_ChannelLF; 8214 map[1] = OMX_AUDIO_ChannelRF; 8215 map[2] = OMX_AUDIO_ChannelCF; 8216 map[3] = OMX_AUDIO_ChannelLR; 8217 map[4] = OMX_AUDIO_ChannelRR; 8218 break; 8219 case 6: 8220 map[0] = OMX_AUDIO_ChannelLF; 8221 map[1] = OMX_AUDIO_ChannelRF; 8222 map[2] = OMX_AUDIO_ChannelCF; 8223 map[3] = OMX_AUDIO_ChannelLFE; 8224 map[4] = OMX_AUDIO_ChannelLR; 8225 map[5] = OMX_AUDIO_ChannelRR; 8226 break; 8227 case 7: 8228 map[0] = OMX_AUDIO_ChannelLF; 8229 map[1] = OMX_AUDIO_ChannelRF; 8230 map[2] = OMX_AUDIO_ChannelCF; 8231 map[3] = OMX_AUDIO_ChannelLFE; 8232 map[4] = OMX_AUDIO_ChannelLR; 8233 map[5] = OMX_AUDIO_ChannelRR; 8234 map[6] = OMX_AUDIO_ChannelCS; 8235 break; 8236 case 8: 8237 map[0] = OMX_AUDIO_ChannelLF; 8238 map[1] = OMX_AUDIO_ChannelRF; 8239 map[2] = OMX_AUDIO_ChannelCF; 8240 map[3] = OMX_AUDIO_ChannelLFE; 8241 map[4] = OMX_AUDIO_ChannelLR; 8242 map[5] = OMX_AUDIO_ChannelRR; 8243 map[6] = OMX_AUDIO_ChannelLS; 8244 map[7] = OMX_AUDIO_ChannelRS; 8245 break; 8246 default: 8247 return -EINVAL; 8248 } 8249 8250 return OK; 8251} 8252 8253} // namespace android 8254